##// END OF EJS Templates
bundle: add config option to include phases...
Martin von Zweigbergk -
r33031:e8c8d81e default
parent child Browse files
Show More
@@ -0,0 +1,259 b''
1 $ cat >> $HGRCPATH <<EOF
2 > [experimental]
3 > bundle-phases=yes
4 > [extensions]
5 > strip=
6 > drawdag=$TESTDIR/drawdag.py
7 > EOF
8
9 Set up repo with linear history
10 $ hg init linear
11 $ cd linear
12 $ hg debugdrawdag <<'EOF'
13 > E
14 > |
15 > D
16 > |
17 > C
18 > |
19 > B
20 > |
21 > A
22 > EOF
23 $ hg phase --public A
24 $ hg phase --force --secret D
25 $ hg log -G -T '{desc} {phase}\n'
26 o E secret
27 |
28 o D secret
29 |
30 o C draft
31 |
32 o B draft
33 |
34 o A public
35
36 Phases are restored when unbundling
37 $ hg bundle --base B -r E bundle
38 3 changesets found
39 $ hg debugbundle bundle
40 Stream params: sortdict([('Compression', 'BZ')])
41 changegroup -- "sortdict([('version', '02'), ('nbchanges', '3')])"
42 26805aba1e600a82e93661149f2313866a221a7b
43 f585351a92f85104bff7c284233c338b10eb1df7
44 9bc730a19041f9ec7cb33c626e811aa233efb18c
45 phase-heads -- 'sortdict()'
46 26805aba1e600a82e93661149f2313866a221a7b draft
47 $ hg strip --no-backup C
48 $ hg unbundle -q bundle
49 $ rm bundle
50 $ hg log -G -T '{desc} {phase}\n'
51 o E secret
52 |
53 o D secret
54 |
55 o C draft
56 |
57 o B draft
58 |
59 o A public
60
61 Root revision's phase is preserved
62 $ hg bundle -a bundle
63 5 changesets found
64 $ hg strip --no-backup A
65 $ hg unbundle -q bundle
66 $ rm bundle
67 $ hg log -G -T '{desc} {phase}\n'
68 o E secret
69 |
70 o D secret
71 |
72 o C draft
73 |
74 o B draft
75 |
76 o A public
77
78 Completely public history can be restored
79 $ hg phase --public E
80 $ hg bundle -a bundle
81 5 changesets found
82 $ hg strip --no-backup A
83 $ hg unbundle -q bundle
84 $ rm bundle
85 $ hg log -G -T '{desc} {phase}\n'
86 o E public
87 |
88 o D public
89 |
90 o C public
91 |
92 o B public
93 |
94 o A public
95
96 Direct transition from public to secret can be restored
97 $ hg phase --secret --force D
98 $ hg bundle -a bundle
99 5 changesets found
100 $ hg strip --no-backup A
101 $ hg unbundle -q bundle
102 $ rm bundle
103 $ hg log -G -T '{desc} {phase}\n'
104 o E secret
105 |
106 o D secret
107 |
108 o C public
109 |
110 o B public
111 |
112 o A public
113
114 Revisions within bundle preserve their phase even if parent changes its phase
115 $ hg phase --draft --force B
116 $ hg bundle --base B -r E bundle
117 3 changesets found
118 $ hg strip --no-backup C
119 $ hg phase --public B
120 $ hg unbundle -q bundle
121 $ rm bundle
122 $ hg log -G -T '{desc} {phase}\n'
123 o E secret
124 |
125 o D secret
126 |
127 o C draft
128 |
129 o B public
130 |
131 o A public
132
133 Phase of ancestors of stripped node get advanced to accommodate child
134 $ hg bundle --base B -r E bundle
135 3 changesets found
136 $ hg strip --no-backup C
137 $ hg phase --force --secret B
138 $ hg unbundle -q bundle
139 $ rm bundle
140 $ hg log -G -T '{desc} {phase}\n'
141 o E secret
142 |
143 o D secret
144 |
145 o C draft
146 |
147 o B draft
148 |
149 o A public
150
151 Unbundling advances phases of changesets even if they were already in the repo.
152 To test that, create a bundle of everything in draft phase and then unbundle
153 to see that secret becomes draft, but public remains public.
154 $ hg phase --draft --force A
155 $ hg phase --draft E
156 $ hg bundle -a bundle
157 5 changesets found
158 $ hg phase --public A
159 $ hg phase --secret --force E
160 $ hg unbundle -q bundle
161 $ rm bundle
162 $ hg log -G -T '{desc} {phase}\n'
163 o E draft
164 |
165 o D draft
166 |
167 o C draft
168 |
169 o B draft
170 |
171 o A public
172
173 $ cd ..
174
175 Set up repo with non-linear history
176 $ hg init non-linear
177 $ cd non-linear
178 $ hg debugdrawdag <<'EOF'
179 > D E
180 > |\|
181 > B C
182 > |/
183 > A
184 > EOF
185 $ hg phase --public C
186 $ hg phase --force --secret B
187 $ hg log -G -T '{node|short} {desc} {phase}\n'
188 o 03ca77807e91 E draft
189 |
190 | o 215e7b0814e1 D secret
191 |/|
192 o | dc0947a82db8 C public
193 | |
194 | o 112478962961 B secret
195 |/
196 o 426bada5c675 A public
197
198
199 Restore bundle of entire repo
200 $ hg bundle -a bundle
201 5 changesets found
202 $ hg debugbundle bundle
203 Stream params: sortdict([('Compression', 'BZ')])
204 changegroup -- "sortdict([('version', '02'), ('nbchanges', '5')])"
205 426bada5c67598ca65036d57d9e4b64b0c1ce7a0
206 112478962961147124edd43549aedd1a335e44bf
207 dc0947a82db884575bb76ea10ac97b08536bfa03
208 215e7b0814e1cac8e2614e7284f2a5dc266b4323
209 03ca77807e919db8807c3749086dc36fb478cac0
210 phase-heads -- 'sortdict()'
211 dc0947a82db884575bb76ea10ac97b08536bfa03 public
212 03ca77807e919db8807c3749086dc36fb478cac0 draft
213 $ hg strip --no-backup A
214 $ hg unbundle -q bundle
215 $ rm bundle
216 $ hg log -G -T '{node|short} {desc} {phase}\n'
217 o 03ca77807e91 E draft
218 |
219 | o 215e7b0814e1 D secret
220 |/|
221 o | dc0947a82db8 C public
222 | |
223 | o 112478962961 B secret
224 |/
225 o 426bada5c675 A public
226
227
228 $ hg bundle --base 'A + C' -r D bundle
229 2 changesets found
230 $ hg debugbundle bundle
231 Stream params: sortdict([('Compression', 'BZ')])
232 changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
233 112478962961147124edd43549aedd1a335e44bf
234 215e7b0814e1cac8e2614e7284f2a5dc266b4323
235 phase-heads -- 'sortdict()'
236 $ rm bundle
237
238 $ hg bundle --base A -r D bundle
239 3 changesets found
240 $ hg debugbundle bundle
241 Stream params: sortdict([('Compression', 'BZ')])
242 changegroup -- "sortdict([('version', '02'), ('nbchanges', '3')])"
243 112478962961147124edd43549aedd1a335e44bf
244 dc0947a82db884575bb76ea10ac97b08536bfa03
245 215e7b0814e1cac8e2614e7284f2a5dc266b4323
246 phase-heads -- 'sortdict()'
247 dc0947a82db884575bb76ea10ac97b08536bfa03 public
248 $ rm bundle
249
250 $ hg bundle --base 'B + C' -r 'D + E' bundle
251 2 changesets found
252 $ hg debugbundle bundle
253 Stream params: sortdict([('Compression', 'BZ')])
254 changegroup -- "sortdict([('version', '02'), ('nbchanges', '2')])"
255 215e7b0814e1cac8e2614e7284f2a5dc266b4323
256 03ca77807e919db8807c3749086dc36fb478cac0
257 phase-heads -- 'sortdict()'
258 03ca77807e919db8807c3749086dc36fb478cac0 draft
259 $ rm bundle
@@ -1,1784 +1,1818 b''
1 1 # bundle2.py - generic container format to transmit arbitrary data.
2 2 #
3 3 # Copyright 2013 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 """Handling of the new bundle2 format
8 8
9 9 The goal of bundle2 is to act as an atomically packet to transmit a set of
10 10 payloads in an application agnostic way. It consist in a sequence of "parts"
11 11 that will be handed to and processed by the application layer.
12 12
13 13
14 14 General format architecture
15 15 ===========================
16 16
17 17 The format is architectured as follow
18 18
19 19 - magic string
20 20 - stream level parameters
21 21 - payload parts (any number)
22 22 - end of stream marker.
23 23
24 24 the Binary format
25 25 ============================
26 26
27 27 All numbers are unsigned and big-endian.
28 28
29 29 stream level parameters
30 30 ------------------------
31 31
32 32 Binary format is as follow
33 33
34 34 :params size: int32
35 35
36 36 The total number of Bytes used by the parameters
37 37
38 38 :params value: arbitrary number of Bytes
39 39
40 40 A blob of `params size` containing the serialized version of all stream level
41 41 parameters.
42 42
43 43 The blob contains a space separated list of parameters. Parameters with value
44 44 are stored in the form `<name>=<value>`. Both name and value are urlquoted.
45 45
46 46 Empty name are obviously forbidden.
47 47
48 48 Name MUST start with a letter. If this first letter is lower case, the
49 49 parameter is advisory and can be safely ignored. However when the first
50 50 letter is capital, the parameter is mandatory and the bundling process MUST
51 51 stop if he is not able to proceed it.
52 52
53 53 Stream parameters use a simple textual format for two main reasons:
54 54
55 55 - Stream level parameters should remain simple and we want to discourage any
56 56 crazy usage.
57 57 - Textual data allow easy human inspection of a bundle2 header in case of
58 58 troubles.
59 59
60 60 Any Applicative level options MUST go into a bundle2 part instead.
61 61
62 62 Payload part
63 63 ------------------------
64 64
65 65 Binary format is as follow
66 66
67 67 :header size: int32
68 68
69 69 The total number of Bytes used by the part header. When the header is empty
70 70 (size = 0) this is interpreted as the end of stream marker.
71 71
72 72 :header:
73 73
74 74 The header defines how to interpret the part. It contains two piece of
75 75 data: the part type, and the part parameters.
76 76
77 77 The part type is used to route an application level handler, that can
78 78 interpret payload.
79 79
80 80 Part parameters are passed to the application level handler. They are
81 81 meant to convey information that will help the application level object to
82 82 interpret the part payload.
83 83
84 84 The binary format of the header is has follow
85 85
86 86 :typesize: (one byte)
87 87
88 88 :parttype: alphanumerical part name (restricted to [a-zA-Z0-9_:-]*)
89 89
90 90 :partid: A 32bits integer (unique in the bundle) that can be used to refer
91 91 to this part.
92 92
93 93 :parameters:
94 94
95 95 Part's parameter may have arbitrary content, the binary structure is::
96 96
97 97 <mandatory-count><advisory-count><param-sizes><param-data>
98 98
99 99 :mandatory-count: 1 byte, number of mandatory parameters
100 100
101 101 :advisory-count: 1 byte, number of advisory parameters
102 102
103 103 :param-sizes:
104 104
105 105 N couple of bytes, where N is the total number of parameters. Each
106 106 couple contains (<size-of-key>, <size-of-value) for one parameter.
107 107
108 108 :param-data:
109 109
110 110 A blob of bytes from which each parameter key and value can be
111 111 retrieved using the list of size couples stored in the previous
112 112 field.
113 113
114 114 Mandatory parameters comes first, then the advisory ones.
115 115
116 116 Each parameter's key MUST be unique within the part.
117 117
118 118 :payload:
119 119
120 120 payload is a series of `<chunksize><chunkdata>`.
121 121
122 122 `chunksize` is an int32, `chunkdata` are plain bytes (as much as
123 123 `chunksize` says)` The payload part is concluded by a zero size chunk.
124 124
125 125 The current implementation always produces either zero or one chunk.
126 126 This is an implementation limitation that will ultimately be lifted.
127 127
128 128 `chunksize` can be negative to trigger special case processing. No such
129 129 processing is in place yet.
130 130
131 131 Bundle processing
132 132 ============================
133 133
134 134 Each part is processed in order using a "part handler". Handler are registered
135 135 for a certain part type.
136 136
137 137 The matching of a part to its handler is case insensitive. The case of the
138 138 part type is used to know if a part is mandatory or advisory. If the Part type
139 139 contains any uppercase char it is considered mandatory. When no handler is
140 140 known for a Mandatory part, the process is aborted and an exception is raised.
141 141 If the part is advisory and no handler is known, the part is ignored. When the
142 142 process is aborted, the full bundle is still read from the stream to keep the
143 143 channel usable. But none of the part read from an abort are processed. In the
144 144 future, dropping the stream may become an option for channel we do not care to
145 145 preserve.
146 146 """
147 147
148 148 from __future__ import absolute_import
149 149
150 150 import errno
151 151 import re
152 152 import string
153 153 import struct
154 154 import sys
155 155
156 156 from .i18n import _
157 157 from . import (
158 158 changegroup,
159 159 error,
160 160 obsolete,
161 phases,
161 162 pushkey,
162 163 pycompat,
163 164 tags,
164 165 url,
165 166 util,
166 167 )
167 168
168 169 urlerr = util.urlerr
169 170 urlreq = util.urlreq
170 171
171 172 _pack = struct.pack
172 173 _unpack = struct.unpack
173 174
174 175 _fstreamparamsize = '>i'
175 176 _fpartheadersize = '>i'
176 177 _fparttypesize = '>B'
177 178 _fpartid = '>I'
178 179 _fpayloadsize = '>i'
179 180 _fpartparamcount = '>BB'
180 181
182 _fphasesentry = '>i20s'
183
181 184 preferedchunksize = 4096
182 185
183 186 _parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
184 187
185 188 def outdebug(ui, message):
186 189 """debug regarding output stream (bundling)"""
187 190 if ui.configbool('devel', 'bundle2.debug', False):
188 191 ui.debug('bundle2-output: %s\n' % message)
189 192
190 193 def indebug(ui, message):
191 194 """debug on input stream (unbundling)"""
192 195 if ui.configbool('devel', 'bundle2.debug', False):
193 196 ui.debug('bundle2-input: %s\n' % message)
194 197
195 198 def validateparttype(parttype):
196 199 """raise ValueError if a parttype contains invalid character"""
197 200 if _parttypeforbidden.search(parttype):
198 201 raise ValueError(parttype)
199 202
200 203 def _makefpartparamsizes(nbparams):
201 204 """return a struct format to read part parameter sizes
202 205
203 206 The number parameters is variable so we need to build that format
204 207 dynamically.
205 208 """
206 209 return '>'+('BB'*nbparams)
207 210
208 211 parthandlermapping = {}
209 212
210 213 def parthandler(parttype, params=()):
211 214 """decorator that register a function as a bundle2 part handler
212 215
213 216 eg::
214 217
215 218 @parthandler('myparttype', ('mandatory', 'param', 'handled'))
216 219 def myparttypehandler(...):
217 220 '''process a part of type "my part".'''
218 221 ...
219 222 """
220 223 validateparttype(parttype)
221 224 def _decorator(func):
222 225 lparttype = parttype.lower() # enforce lower case matching.
223 226 assert lparttype not in parthandlermapping
224 227 parthandlermapping[lparttype] = func
225 228 func.params = frozenset(params)
226 229 return func
227 230 return _decorator
228 231
229 232 class unbundlerecords(object):
230 233 """keep record of what happens during and unbundle
231 234
232 235 New records are added using `records.add('cat', obj)`. Where 'cat' is a
233 236 category of record and obj is an arbitrary object.
234 237
235 238 `records['cat']` will return all entries of this category 'cat'.
236 239
237 240 Iterating on the object itself will yield `('category', obj)` tuples
238 241 for all entries.
239 242
240 243 All iterations happens in chronological order.
241 244 """
242 245
243 246 def __init__(self):
244 247 self._categories = {}
245 248 self._sequences = []
246 249 self._replies = {}
247 250
248 251 def add(self, category, entry, inreplyto=None):
249 252 """add a new record of a given category.
250 253
251 254 The entry can then be retrieved in the list returned by
252 255 self['category']."""
253 256 self._categories.setdefault(category, []).append(entry)
254 257 self._sequences.append((category, entry))
255 258 if inreplyto is not None:
256 259 self.getreplies(inreplyto).add(category, entry)
257 260
258 261 def getreplies(self, partid):
259 262 """get the records that are replies to a specific part"""
260 263 return self._replies.setdefault(partid, unbundlerecords())
261 264
262 265 def __getitem__(self, cat):
263 266 return tuple(self._categories.get(cat, ()))
264 267
265 268 def __iter__(self):
266 269 return iter(self._sequences)
267 270
268 271 def __len__(self):
269 272 return len(self._sequences)
270 273
271 274 def __nonzero__(self):
272 275 return bool(self._sequences)
273 276
274 277 __bool__ = __nonzero__
275 278
276 279 class bundleoperation(object):
277 280 """an object that represents a single bundling process
278 281
279 282 Its purpose is to carry unbundle-related objects and states.
280 283
281 284 A new object should be created at the beginning of each bundle processing.
282 285 The object is to be returned by the processing function.
283 286
284 287 The object has very little content now it will ultimately contain:
285 288 * an access to the repo the bundle is applied to,
286 289 * a ui object,
287 290 * a way to retrieve a transaction to add changes to the repo,
288 291 * a way to record the result of processing each part,
289 292 * a way to construct a bundle response when applicable.
290 293 """
291 294
292 295 def __init__(self, repo, transactiongetter, captureoutput=True):
293 296 self.repo = repo
294 297 self.ui = repo.ui
295 298 self.records = unbundlerecords()
296 299 self.gettransaction = transactiongetter
297 300 self.reply = None
298 301 self.captureoutput = captureoutput
299 302
300 303 class TransactionUnavailable(RuntimeError):
301 304 pass
302 305
303 306 def _notransaction():
304 307 """default method to get a transaction while processing a bundle
305 308
306 309 Raise an exception to highlight the fact that no transaction was expected
307 310 to be created"""
308 311 raise TransactionUnavailable()
309 312
310 313 def applybundle(repo, unbundler, tr, source=None, url=None, op=None):
311 314 # transform me into unbundler.apply() as soon as the freeze is lifted
312 315 tr.hookargs['bundle2'] = '1'
313 316 if source is not None and 'source' not in tr.hookargs:
314 317 tr.hookargs['source'] = source
315 318 if url is not None and 'url' not in tr.hookargs:
316 319 tr.hookargs['url'] = url
317 320 return processbundle(repo, unbundler, lambda: tr, op=op)
318 321
319 322 def processbundle(repo, unbundler, transactiongetter=None, op=None):
320 323 """This function process a bundle, apply effect to/from a repo
321 324
322 325 It iterates over each part then searches for and uses the proper handling
323 326 code to process the part. Parts are processed in order.
324 327
325 328 Unknown Mandatory part will abort the process.
326 329
327 330 It is temporarily possible to provide a prebuilt bundleoperation to the
328 331 function. This is used to ensure output is properly propagated in case of
329 332 an error during the unbundling. This output capturing part will likely be
330 333 reworked and this ability will probably go away in the process.
331 334 """
332 335 if op is None:
333 336 if transactiongetter is None:
334 337 transactiongetter = _notransaction
335 338 op = bundleoperation(repo, transactiongetter)
336 339 # todo:
337 340 # - replace this is a init function soon.
338 341 # - exception catching
339 342 unbundler.params
340 343 if repo.ui.debugflag:
341 344 msg = ['bundle2-input-bundle:']
342 345 if unbundler.params:
343 346 msg.append(' %i params')
344 347 if op.gettransaction is None or op.gettransaction is _notransaction:
345 348 msg.append(' no-transaction')
346 349 else:
347 350 msg.append(' with-transaction')
348 351 msg.append('\n')
349 352 repo.ui.debug(''.join(msg))
350 353 iterparts = enumerate(unbundler.iterparts())
351 354 part = None
352 355 nbpart = 0
353 356 try:
354 357 for nbpart, part in iterparts:
355 358 _processpart(op, part)
356 359 except Exception as exc:
357 360 # Any exceptions seeking to the end of the bundle at this point are
358 361 # almost certainly related to the underlying stream being bad.
359 362 # And, chances are that the exception we're handling is related to
360 363 # getting in that bad state. So, we swallow the seeking error and
361 364 # re-raise the original error.
362 365 seekerror = False
363 366 try:
364 367 for nbpart, part in iterparts:
365 368 # consume the bundle content
366 369 part.seek(0, 2)
367 370 except Exception:
368 371 seekerror = True
369 372
370 373 # Small hack to let caller code distinguish exceptions from bundle2
371 374 # processing from processing the old format. This is mostly
372 375 # needed to handle different return codes to unbundle according to the
373 376 # type of bundle. We should probably clean up or drop this return code
374 377 # craziness in a future version.
375 378 exc.duringunbundle2 = True
376 379 salvaged = []
377 380 replycaps = None
378 381 if op.reply is not None:
379 382 salvaged = op.reply.salvageoutput()
380 383 replycaps = op.reply.capabilities
381 384 exc._replycaps = replycaps
382 385 exc._bundle2salvagedoutput = salvaged
383 386
384 387 # Re-raising from a variable loses the original stack. So only use
385 388 # that form if we need to.
386 389 if seekerror:
387 390 raise exc
388 391 else:
389 392 raise
390 393 finally:
391 394 repo.ui.debug('bundle2-input-bundle: %i parts total\n' % nbpart)
392 395
393 396 return op
394 397
395 398 def _processpart(op, part):
396 399 """process a single part from a bundle
397 400
398 401 The part is guaranteed to have been fully consumed when the function exits
399 402 (even if an exception is raised)."""
400 403 status = 'unknown' # used by debug output
401 404 hardabort = False
402 405 try:
403 406 try:
404 407 handler = parthandlermapping.get(part.type)
405 408 if handler is None:
406 409 status = 'unsupported-type'
407 410 raise error.BundleUnknownFeatureError(parttype=part.type)
408 411 indebug(op.ui, 'found a handler for part %r' % part.type)
409 412 unknownparams = part.mandatorykeys - handler.params
410 413 if unknownparams:
411 414 unknownparams = list(unknownparams)
412 415 unknownparams.sort()
413 416 status = 'unsupported-params (%s)' % unknownparams
414 417 raise error.BundleUnknownFeatureError(parttype=part.type,
415 418 params=unknownparams)
416 419 status = 'supported'
417 420 except error.BundleUnknownFeatureError as exc:
418 421 if part.mandatory: # mandatory parts
419 422 raise
420 423 indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
421 424 return # skip to part processing
422 425 finally:
423 426 if op.ui.debugflag:
424 427 msg = ['bundle2-input-part: "%s"' % part.type]
425 428 if not part.mandatory:
426 429 msg.append(' (advisory)')
427 430 nbmp = len(part.mandatorykeys)
428 431 nbap = len(part.params) - nbmp
429 432 if nbmp or nbap:
430 433 msg.append(' (params:')
431 434 if nbmp:
432 435 msg.append(' %i mandatory' % nbmp)
433 436 if nbap:
434 437 msg.append(' %i advisory' % nbmp)
435 438 msg.append(')')
436 439 msg.append(' %s\n' % status)
437 440 op.ui.debug(''.join(msg))
438 441
439 442 # handler is called outside the above try block so that we don't
440 443 # risk catching KeyErrors from anything other than the
441 444 # parthandlermapping lookup (any KeyError raised by handler()
442 445 # itself represents a defect of a different variety).
443 446 output = None
444 447 if op.captureoutput and op.reply is not None:
445 448 op.ui.pushbuffer(error=True, subproc=True)
446 449 output = ''
447 450 try:
448 451 handler(op, part)
449 452 finally:
450 453 if output is not None:
451 454 output = op.ui.popbuffer()
452 455 if output:
453 456 outpart = op.reply.newpart('output', data=output,
454 457 mandatory=False)
455 458 outpart.addparam('in-reply-to', str(part.id), mandatory=False)
456 459 # If exiting or interrupted, do not attempt to seek the stream in the
457 460 # finally block below. This makes abort faster.
458 461 except (SystemExit, KeyboardInterrupt):
459 462 hardabort = True
460 463 raise
461 464 finally:
462 465 # consume the part content to not corrupt the stream.
463 466 if not hardabort:
464 467 part.seek(0, 2)
465 468
466 469
467 470 def decodecaps(blob):
468 471 """decode a bundle2 caps bytes blob into a dictionary
469 472
470 473 The blob is a list of capabilities (one per line)
471 474 Capabilities may have values using a line of the form::
472 475
473 476 capability=value1,value2,value3
474 477
475 478 The values are always a list."""
476 479 caps = {}
477 480 for line in blob.splitlines():
478 481 if not line:
479 482 continue
480 483 if '=' not in line:
481 484 key, vals = line, ()
482 485 else:
483 486 key, vals = line.split('=', 1)
484 487 vals = vals.split(',')
485 488 key = urlreq.unquote(key)
486 489 vals = [urlreq.unquote(v) for v in vals]
487 490 caps[key] = vals
488 491 return caps
489 492
490 493 def encodecaps(caps):
491 494 """encode a bundle2 caps dictionary into a bytes blob"""
492 495 chunks = []
493 496 for ca in sorted(caps):
494 497 vals = caps[ca]
495 498 ca = urlreq.quote(ca)
496 499 vals = [urlreq.quote(v) for v in vals]
497 500 if vals:
498 501 ca = "%s=%s" % (ca, ','.join(vals))
499 502 chunks.append(ca)
500 503 return '\n'.join(chunks)
501 504
502 505 bundletypes = {
503 506 "": ("", 'UN'), # only when using unbundle on ssh and old http servers
504 507 # since the unification ssh accepts a header but there
505 508 # is no capability signaling it.
506 509 "HG20": (), # special-cased below
507 510 "HG10UN": ("HG10UN", 'UN'),
508 511 "HG10BZ": ("HG10", 'BZ'),
509 512 "HG10GZ": ("HG10GZ", 'GZ'),
510 513 }
511 514
512 515 # hgweb uses this list to communicate its preferred type
513 516 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
514 517
515 518 class bundle20(object):
516 519 """represent an outgoing bundle2 container
517 520
518 521 Use the `addparam` method to add stream level parameter. and `newpart` to
519 522 populate it. Then call `getchunks` to retrieve all the binary chunks of
520 523 data that compose the bundle2 container."""
521 524
522 525 _magicstring = 'HG20'
523 526
524 527 def __init__(self, ui, capabilities=()):
525 528 self.ui = ui
526 529 self._params = []
527 530 self._parts = []
528 531 self.capabilities = dict(capabilities)
529 532 self._compengine = util.compengines.forbundletype('UN')
530 533 self._compopts = None
531 534
532 535 def setcompression(self, alg, compopts=None):
533 536 """setup core part compression to <alg>"""
534 537 if alg in (None, 'UN'):
535 538 return
536 539 assert not any(n.lower() == 'compression' for n, v in self._params)
537 540 self.addparam('Compression', alg)
538 541 self._compengine = util.compengines.forbundletype(alg)
539 542 self._compopts = compopts
540 543
541 544 @property
542 545 def nbparts(self):
543 546 """total number of parts added to the bundler"""
544 547 return len(self._parts)
545 548
546 549 # methods used to defines the bundle2 content
547 550 def addparam(self, name, value=None):
548 551 """add a stream level parameter"""
549 552 if not name:
550 553 raise ValueError('empty parameter name')
551 554 if name[0] not in string.letters:
552 555 raise ValueError('non letter first character: %r' % name)
553 556 self._params.append((name, value))
554 557
555 558 def addpart(self, part):
556 559 """add a new part to the bundle2 container
557 560
558 561 Parts contains the actual applicative payload."""
559 562 assert part.id is None
560 563 part.id = len(self._parts) # very cheap counter
561 564 self._parts.append(part)
562 565
563 566 def newpart(self, typeid, *args, **kwargs):
564 567 """create a new part and add it to the containers
565 568
566 569 As the part is directly added to the containers. For now, this means
567 570 that any failure to properly initialize the part after calling
568 571 ``newpart`` should result in a failure of the whole bundling process.
569 572
570 573 You can still fall back to manually create and add if you need better
571 574 control."""
572 575 part = bundlepart(typeid, *args, **kwargs)
573 576 self.addpart(part)
574 577 return part
575 578
576 579 # methods used to generate the bundle2 stream
577 580 def getchunks(self):
578 581 if self.ui.debugflag:
579 582 msg = ['bundle2-output-bundle: "%s",' % self._magicstring]
580 583 if self._params:
581 584 msg.append(' (%i params)' % len(self._params))
582 585 msg.append(' %i parts total\n' % len(self._parts))
583 586 self.ui.debug(''.join(msg))
584 587 outdebug(self.ui, 'start emission of %s stream' % self._magicstring)
585 588 yield self._magicstring
586 589 param = self._paramchunk()
587 590 outdebug(self.ui, 'bundle parameter: %s' % param)
588 591 yield _pack(_fstreamparamsize, len(param))
589 592 if param:
590 593 yield param
591 594 for chunk in self._compengine.compressstream(self._getcorechunk(),
592 595 self._compopts):
593 596 yield chunk
594 597
595 598 def _paramchunk(self):
596 599 """return a encoded version of all stream parameters"""
597 600 blocks = []
598 601 for par, value in self._params:
599 602 par = urlreq.quote(par)
600 603 if value is not None:
601 604 value = urlreq.quote(value)
602 605 par = '%s=%s' % (par, value)
603 606 blocks.append(par)
604 607 return ' '.join(blocks)
605 608
606 609 def _getcorechunk(self):
607 610 """yield chunk for the core part of the bundle
608 611
609 612 (all but headers and parameters)"""
610 613 outdebug(self.ui, 'start of parts')
611 614 for part in self._parts:
612 615 outdebug(self.ui, 'bundle part: "%s"' % part.type)
613 616 for chunk in part.getchunks(ui=self.ui):
614 617 yield chunk
615 618 outdebug(self.ui, 'end of bundle')
616 619 yield _pack(_fpartheadersize, 0)
617 620
618 621
619 622 def salvageoutput(self):
620 623 """return a list with a copy of all output parts in the bundle
621 624
622 625 This is meant to be used during error handling to make sure we preserve
623 626 server output"""
624 627 salvaged = []
625 628 for part in self._parts:
626 629 if part.type.startswith('output'):
627 630 salvaged.append(part.copy())
628 631 return salvaged
629 632
630 633
631 634 class unpackermixin(object):
632 635 """A mixin to extract bytes and struct data from a stream"""
633 636
634 637 def __init__(self, fp):
635 638 self._fp = fp
636 639
637 640 def _unpack(self, format):
638 641 """unpack this struct format from the stream
639 642
640 643 This method is meant for internal usage by the bundle2 protocol only.
641 644 They directly manipulate the low level stream including bundle2 level
642 645 instruction.
643 646
644 647 Do not use it to implement higher-level logic or methods."""
645 648 data = self._readexact(struct.calcsize(format))
646 649 return _unpack(format, data)
647 650
648 651 def _readexact(self, size):
649 652 """read exactly <size> bytes from the stream
650 653
651 654 This method is meant for internal usage by the bundle2 protocol only.
652 655 They directly manipulate the low level stream including bundle2 level
653 656 instruction.
654 657
655 658 Do not use it to implement higher-level logic or methods."""
656 659 return changegroup.readexactly(self._fp, size)
657 660
658 661 def getunbundler(ui, fp, magicstring=None):
659 662 """return a valid unbundler object for a given magicstring"""
660 663 if magicstring is None:
661 664 magicstring = changegroup.readexactly(fp, 4)
662 665 magic, version = magicstring[0:2], magicstring[2:4]
663 666 if magic != 'HG':
664 667 raise error.Abort(_('not a Mercurial bundle'))
665 668 unbundlerclass = formatmap.get(version)
666 669 if unbundlerclass is None:
667 670 raise error.Abort(_('unknown bundle version %s') % version)
668 671 unbundler = unbundlerclass(ui, fp)
669 672 indebug(ui, 'start processing of %s stream' % magicstring)
670 673 return unbundler
671 674
672 675 class unbundle20(unpackermixin):
673 676 """interpret a bundle2 stream
674 677
675 678 This class is fed with a binary stream and yields parts through its
676 679 `iterparts` methods."""
677 680
678 681 _magicstring = 'HG20'
679 682
680 683 def __init__(self, ui, fp):
681 684 """If header is specified, we do not read it out of the stream."""
682 685 self.ui = ui
683 686 self._compengine = util.compengines.forbundletype('UN')
684 687 self._compressed = None
685 688 super(unbundle20, self).__init__(fp)
686 689
687 690 @util.propertycache
688 691 def params(self):
689 692 """dictionary of stream level parameters"""
690 693 indebug(self.ui, 'reading bundle2 stream parameters')
691 694 params = {}
692 695 paramssize = self._unpack(_fstreamparamsize)[0]
693 696 if paramssize < 0:
694 697 raise error.BundleValueError('negative bundle param size: %i'
695 698 % paramssize)
696 699 if paramssize:
697 700 params = self._readexact(paramssize)
698 701 params = self._processallparams(params)
699 702 return params
700 703
701 704 def _processallparams(self, paramsblock):
702 705 """"""
703 706 params = util.sortdict()
704 707 for p in paramsblock.split(' '):
705 708 p = p.split('=', 1)
706 709 p = [urlreq.unquote(i) for i in p]
707 710 if len(p) < 2:
708 711 p.append(None)
709 712 self._processparam(*p)
710 713 params[p[0]] = p[1]
711 714 return params
712 715
713 716
714 717 def _processparam(self, name, value):
715 718 """process a parameter, applying its effect if needed
716 719
717 720 Parameter starting with a lower case letter are advisory and will be
718 721 ignored when unknown. Those starting with an upper case letter are
719 722 mandatory and will this function will raise a KeyError when unknown.
720 723
721 724 Note: no option are currently supported. Any input will be either
722 725 ignored or failing.
723 726 """
724 727 if not name:
725 728 raise ValueError('empty parameter name')
726 729 if name[0] not in string.letters:
727 730 raise ValueError('non letter first character: %r' % name)
728 731 try:
729 732 handler = b2streamparamsmap[name.lower()]
730 733 except KeyError:
731 734 if name[0].islower():
732 735 indebug(self.ui, "ignoring unknown parameter %r" % name)
733 736 else:
734 737 raise error.BundleUnknownFeatureError(params=(name,))
735 738 else:
736 739 handler(self, name, value)
737 740
738 741 def _forwardchunks(self):
739 742 """utility to transfer a bundle2 as binary
740 743
741 744 This is made necessary by the fact the 'getbundle' command over 'ssh'
742 745 have no way to know then the reply end, relying on the bundle to be
743 746 interpreted to know its end. This is terrible and we are sorry, but we
744 747 needed to move forward to get general delta enabled.
745 748 """
746 749 yield self._magicstring
747 750 assert 'params' not in vars(self)
748 751 paramssize = self._unpack(_fstreamparamsize)[0]
749 752 if paramssize < 0:
750 753 raise error.BundleValueError('negative bundle param size: %i'
751 754 % paramssize)
752 755 yield _pack(_fstreamparamsize, paramssize)
753 756 if paramssize:
754 757 params = self._readexact(paramssize)
755 758 self._processallparams(params)
756 759 yield params
757 760 assert self._compengine.bundletype == 'UN'
758 761 # From there, payload might need to be decompressed
759 762 self._fp = self._compengine.decompressorreader(self._fp)
760 763 emptycount = 0
761 764 while emptycount < 2:
762 765 # so we can brainlessly loop
763 766 assert _fpartheadersize == _fpayloadsize
764 767 size = self._unpack(_fpartheadersize)[0]
765 768 yield _pack(_fpartheadersize, size)
766 769 if size:
767 770 emptycount = 0
768 771 else:
769 772 emptycount += 1
770 773 continue
771 774 if size == flaginterrupt:
772 775 continue
773 776 elif size < 0:
774 777 raise error.BundleValueError('negative chunk size: %i')
775 778 yield self._readexact(size)
776 779
777 780
778 781 def iterparts(self):
779 782 """yield all parts contained in the stream"""
780 783 # make sure param have been loaded
781 784 self.params
782 785 # From there, payload need to be decompressed
783 786 self._fp = self._compengine.decompressorreader(self._fp)
784 787 indebug(self.ui, 'start extraction of bundle2 parts')
785 788 headerblock = self._readpartheader()
786 789 while headerblock is not None:
787 790 part = unbundlepart(self.ui, headerblock, self._fp)
788 791 yield part
789 792 part.seek(0, 2)
790 793 headerblock = self._readpartheader()
791 794 indebug(self.ui, 'end of bundle2 stream')
792 795
793 796 def _readpartheader(self):
794 797 """reads a part header size and return the bytes blob
795 798
796 799 returns None if empty"""
797 800 headersize = self._unpack(_fpartheadersize)[0]
798 801 if headersize < 0:
799 802 raise error.BundleValueError('negative part header size: %i'
800 803 % headersize)
801 804 indebug(self.ui, 'part header size: %i' % headersize)
802 805 if headersize:
803 806 return self._readexact(headersize)
804 807 return None
805 808
806 809 def compressed(self):
807 810 self.params # load params
808 811 return self._compressed
809 812
810 813 def close(self):
811 814 """close underlying file"""
812 815 if util.safehasattr(self._fp, 'close'):
813 816 return self._fp.close()
814 817
815 818 formatmap = {'20': unbundle20}
816 819
817 820 b2streamparamsmap = {}
818 821
819 822 def b2streamparamhandler(name):
820 823 """register a handler for a stream level parameter"""
821 824 def decorator(func):
822 825 assert name not in formatmap
823 826 b2streamparamsmap[name] = func
824 827 return func
825 828 return decorator
826 829
827 830 @b2streamparamhandler('compression')
828 831 def processcompression(unbundler, param, value):
829 832 """read compression parameter and install payload decompression"""
830 833 if value not in util.compengines.supportedbundletypes:
831 834 raise error.BundleUnknownFeatureError(params=(param,),
832 835 values=(value,))
833 836 unbundler._compengine = util.compengines.forbundletype(value)
834 837 if value is not None:
835 838 unbundler._compressed = True
836 839
837 840 class bundlepart(object):
838 841 """A bundle2 part contains application level payload
839 842
840 843 The part `type` is used to route the part to the application level
841 844 handler.
842 845
843 846 The part payload is contained in ``part.data``. It could be raw bytes or a
844 847 generator of byte chunks.
845 848
846 849 You can add parameters to the part using the ``addparam`` method.
847 850 Parameters can be either mandatory (default) or advisory. Remote side
848 851 should be able to safely ignore the advisory ones.
849 852
850 853 Both data and parameters cannot be modified after the generation has begun.
851 854 """
852 855
853 856 def __init__(self, parttype, mandatoryparams=(), advisoryparams=(),
854 857 data='', mandatory=True):
855 858 validateparttype(parttype)
856 859 self.id = None
857 860 self.type = parttype
858 861 self._data = data
859 862 self._mandatoryparams = list(mandatoryparams)
860 863 self._advisoryparams = list(advisoryparams)
861 864 # checking for duplicated entries
862 865 self._seenparams = set()
863 866 for pname, __ in self._mandatoryparams + self._advisoryparams:
864 867 if pname in self._seenparams:
865 868 raise error.ProgrammingError('duplicated params: %s' % pname)
866 869 self._seenparams.add(pname)
867 870 # status of the part's generation:
868 871 # - None: not started,
869 872 # - False: currently generated,
870 873 # - True: generation done.
871 874 self._generated = None
872 875 self.mandatory = mandatory
873 876
874 877 def __repr__(self):
875 878 cls = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
876 879 return ('<%s object at %x; id: %s; type: %s; mandatory: %s>'
877 880 % (cls, id(self), self.id, self.type, self.mandatory))
878 881
879 882 def copy(self):
880 883 """return a copy of the part
881 884
882 885 The new part have the very same content but no partid assigned yet.
883 886 Parts with generated data cannot be copied."""
884 887 assert not util.safehasattr(self.data, 'next')
885 888 return self.__class__(self.type, self._mandatoryparams,
886 889 self._advisoryparams, self._data, self.mandatory)
887 890
888 891 # methods used to defines the part content
889 892 @property
890 893 def data(self):
891 894 return self._data
892 895
893 896 @data.setter
894 897 def data(self, data):
895 898 if self._generated is not None:
896 899 raise error.ReadOnlyPartError('part is being generated')
897 900 self._data = data
898 901
899 902 @property
900 903 def mandatoryparams(self):
901 904 # make it an immutable tuple to force people through ``addparam``
902 905 return tuple(self._mandatoryparams)
903 906
904 907 @property
905 908 def advisoryparams(self):
906 909 # make it an immutable tuple to force people through ``addparam``
907 910 return tuple(self._advisoryparams)
908 911
909 912 def addparam(self, name, value='', mandatory=True):
910 913 """add a parameter to the part
911 914
912 915 If 'mandatory' is set to True, the remote handler must claim support
913 916 for this parameter or the unbundling will be aborted.
914 917
915 918 The 'name' and 'value' cannot exceed 255 bytes each.
916 919 """
917 920 if self._generated is not None:
918 921 raise error.ReadOnlyPartError('part is being generated')
919 922 if name in self._seenparams:
920 923 raise ValueError('duplicated params: %s' % name)
921 924 self._seenparams.add(name)
922 925 params = self._advisoryparams
923 926 if mandatory:
924 927 params = self._mandatoryparams
925 928 params.append((name, value))
926 929
927 930 # methods used to generates the bundle2 stream
928 931 def getchunks(self, ui):
929 932 if self._generated is not None:
930 933 raise error.ProgrammingError('part can only be consumed once')
931 934 self._generated = False
932 935
933 936 if ui.debugflag:
934 937 msg = ['bundle2-output-part: "%s"' % self.type]
935 938 if not self.mandatory:
936 939 msg.append(' (advisory)')
937 940 nbmp = len(self.mandatoryparams)
938 941 nbap = len(self.advisoryparams)
939 942 if nbmp or nbap:
940 943 msg.append(' (params:')
941 944 if nbmp:
942 945 msg.append(' %i mandatory' % nbmp)
943 946 if nbap:
944 947 msg.append(' %i advisory' % nbmp)
945 948 msg.append(')')
946 949 if not self.data:
947 950 msg.append(' empty payload')
948 951 elif util.safehasattr(self.data, 'next'):
949 952 msg.append(' streamed payload')
950 953 else:
951 954 msg.append(' %i bytes payload' % len(self.data))
952 955 msg.append('\n')
953 956 ui.debug(''.join(msg))
954 957
955 958 #### header
956 959 if self.mandatory:
957 960 parttype = self.type.upper()
958 961 else:
959 962 parttype = self.type.lower()
960 963 outdebug(ui, 'part %s: "%s"' % (self.id, parttype))
961 964 ## parttype
962 965 header = [_pack(_fparttypesize, len(parttype)),
963 966 parttype, _pack(_fpartid, self.id),
964 967 ]
965 968 ## parameters
966 969 # count
967 970 manpar = self.mandatoryparams
968 971 advpar = self.advisoryparams
969 972 header.append(_pack(_fpartparamcount, len(manpar), len(advpar)))
970 973 # size
971 974 parsizes = []
972 975 for key, value in manpar:
973 976 parsizes.append(len(key))
974 977 parsizes.append(len(value))
975 978 for key, value in advpar:
976 979 parsizes.append(len(key))
977 980 parsizes.append(len(value))
978 981 paramsizes = _pack(_makefpartparamsizes(len(parsizes) / 2), *parsizes)
979 982 header.append(paramsizes)
980 983 # key, value
981 984 for key, value in manpar:
982 985 header.append(key)
983 986 header.append(value)
984 987 for key, value in advpar:
985 988 header.append(key)
986 989 header.append(value)
987 990 ## finalize header
988 991 headerchunk = ''.join(header)
989 992 outdebug(ui, 'header chunk size: %i' % len(headerchunk))
990 993 yield _pack(_fpartheadersize, len(headerchunk))
991 994 yield headerchunk
992 995 ## payload
993 996 try:
994 997 for chunk in self._payloadchunks():
995 998 outdebug(ui, 'payload chunk size: %i' % len(chunk))
996 999 yield _pack(_fpayloadsize, len(chunk))
997 1000 yield chunk
998 1001 except GeneratorExit:
999 1002 # GeneratorExit means that nobody is listening for our
1000 1003 # results anyway, so just bail quickly rather than trying
1001 1004 # to produce an error part.
1002 1005 ui.debug('bundle2-generatorexit\n')
1003 1006 raise
1004 1007 except BaseException as exc:
1005 1008 # backup exception data for later
1006 1009 ui.debug('bundle2-input-stream-interrupt: encoding exception %s'
1007 1010 % exc)
1008 1011 tb = sys.exc_info()[2]
1009 1012 msg = 'unexpected error: %s' % exc
1010 1013 interpart = bundlepart('error:abort', [('message', msg)],
1011 1014 mandatory=False)
1012 1015 interpart.id = 0
1013 1016 yield _pack(_fpayloadsize, -1)
1014 1017 for chunk in interpart.getchunks(ui=ui):
1015 1018 yield chunk
1016 1019 outdebug(ui, 'closing payload chunk')
1017 1020 # abort current part payload
1018 1021 yield _pack(_fpayloadsize, 0)
1019 1022 pycompat.raisewithtb(exc, tb)
1020 1023 # end of payload
1021 1024 outdebug(ui, 'closing payload chunk')
1022 1025 yield _pack(_fpayloadsize, 0)
1023 1026 self._generated = True
1024 1027
1025 1028 def _payloadchunks(self):
1026 1029 """yield chunks of a the part payload
1027 1030
1028 1031 Exists to handle the different methods to provide data to a part."""
1029 1032 # we only support fixed size data now.
1030 1033 # This will be improved in the future.
1031 1034 if util.safehasattr(self.data, 'next'):
1032 1035 buff = util.chunkbuffer(self.data)
1033 1036 chunk = buff.read(preferedchunksize)
1034 1037 while chunk:
1035 1038 yield chunk
1036 1039 chunk = buff.read(preferedchunksize)
1037 1040 elif len(self.data):
1038 1041 yield self.data
1039 1042
1040 1043
1041 1044 flaginterrupt = -1
1042 1045
1043 1046 class interrupthandler(unpackermixin):
1044 1047 """read one part and process it with restricted capability
1045 1048
1046 1049 This allows to transmit exception raised on the producer size during part
1047 1050 iteration while the consumer is reading a part.
1048 1051
1049 1052 Part processed in this manner only have access to a ui object,"""
1050 1053
1051 1054 def __init__(self, ui, fp):
1052 1055 super(interrupthandler, self).__init__(fp)
1053 1056 self.ui = ui
1054 1057
1055 1058 def _readpartheader(self):
1056 1059 """reads a part header size and return the bytes blob
1057 1060
1058 1061 returns None if empty"""
1059 1062 headersize = self._unpack(_fpartheadersize)[0]
1060 1063 if headersize < 0:
1061 1064 raise error.BundleValueError('negative part header size: %i'
1062 1065 % headersize)
1063 1066 indebug(self.ui, 'part header size: %i\n' % headersize)
1064 1067 if headersize:
1065 1068 return self._readexact(headersize)
1066 1069 return None
1067 1070
1068 1071 def __call__(self):
1069 1072
1070 1073 self.ui.debug('bundle2-input-stream-interrupt:'
1071 1074 ' opening out of band context\n')
1072 1075 indebug(self.ui, 'bundle2 stream interruption, looking for a part.')
1073 1076 headerblock = self._readpartheader()
1074 1077 if headerblock is None:
1075 1078 indebug(self.ui, 'no part found during interruption.')
1076 1079 return
1077 1080 part = unbundlepart(self.ui, headerblock, self._fp)
1078 1081 op = interruptoperation(self.ui)
1079 1082 _processpart(op, part)
1080 1083 self.ui.debug('bundle2-input-stream-interrupt:'
1081 1084 ' closing out of band context\n')
1082 1085
1083 1086 class interruptoperation(object):
1084 1087 """A limited operation to be use by part handler during interruption
1085 1088
1086 1089 It only have access to an ui object.
1087 1090 """
1088 1091
1089 1092 def __init__(self, ui):
1090 1093 self.ui = ui
1091 1094 self.reply = None
1092 1095 self.captureoutput = False
1093 1096
1094 1097 @property
1095 1098 def repo(self):
1096 1099 raise error.ProgrammingError('no repo access from stream interruption')
1097 1100
1098 1101 def gettransaction(self):
1099 1102 raise TransactionUnavailable('no repo access from stream interruption')
1100 1103
1101 1104 class unbundlepart(unpackermixin):
1102 1105 """a bundle part read from a bundle"""
1103 1106
1104 1107 def __init__(self, ui, header, fp):
1105 1108 super(unbundlepart, self).__init__(fp)
1106 1109 self._seekable = (util.safehasattr(fp, 'seek') and
1107 1110 util.safehasattr(fp, 'tell'))
1108 1111 self.ui = ui
1109 1112 # unbundle state attr
1110 1113 self._headerdata = header
1111 1114 self._headeroffset = 0
1112 1115 self._initialized = False
1113 1116 self.consumed = False
1114 1117 # part data
1115 1118 self.id = None
1116 1119 self.type = None
1117 1120 self.mandatoryparams = None
1118 1121 self.advisoryparams = None
1119 1122 self.params = None
1120 1123 self.mandatorykeys = ()
1121 1124 self._payloadstream = None
1122 1125 self._readheader()
1123 1126 self._mandatory = None
1124 1127 self._chunkindex = [] #(payload, file) position tuples for chunk starts
1125 1128 self._pos = 0
1126 1129
1127 1130 def _fromheader(self, size):
1128 1131 """return the next <size> byte from the header"""
1129 1132 offset = self._headeroffset
1130 1133 data = self._headerdata[offset:(offset + size)]
1131 1134 self._headeroffset = offset + size
1132 1135 return data
1133 1136
1134 1137 def _unpackheader(self, format):
1135 1138 """read given format from header
1136 1139
1137 1140 This automatically compute the size of the format to read."""
1138 1141 data = self._fromheader(struct.calcsize(format))
1139 1142 return _unpack(format, data)
1140 1143
1141 1144 def _initparams(self, mandatoryparams, advisoryparams):
1142 1145 """internal function to setup all logic related parameters"""
1143 1146 # make it read only to prevent people touching it by mistake.
1144 1147 self.mandatoryparams = tuple(mandatoryparams)
1145 1148 self.advisoryparams = tuple(advisoryparams)
1146 1149 # user friendly UI
1147 1150 self.params = util.sortdict(self.mandatoryparams)
1148 1151 self.params.update(self.advisoryparams)
1149 1152 self.mandatorykeys = frozenset(p[0] for p in mandatoryparams)
1150 1153
1151 1154 def _payloadchunks(self, chunknum=0):
1152 1155 '''seek to specified chunk and start yielding data'''
1153 1156 if len(self._chunkindex) == 0:
1154 1157 assert chunknum == 0, 'Must start with chunk 0'
1155 1158 self._chunkindex.append((0, self._tellfp()))
1156 1159 else:
1157 1160 assert chunknum < len(self._chunkindex), \
1158 1161 'Unknown chunk %d' % chunknum
1159 1162 self._seekfp(self._chunkindex[chunknum][1])
1160 1163
1161 1164 pos = self._chunkindex[chunknum][0]
1162 1165 payloadsize = self._unpack(_fpayloadsize)[0]
1163 1166 indebug(self.ui, 'payload chunk size: %i' % payloadsize)
1164 1167 while payloadsize:
1165 1168 if payloadsize == flaginterrupt:
1166 1169 # interruption detection, the handler will now read a
1167 1170 # single part and process it.
1168 1171 interrupthandler(self.ui, self._fp)()
1169 1172 elif payloadsize < 0:
1170 1173 msg = 'negative payload chunk size: %i' % payloadsize
1171 1174 raise error.BundleValueError(msg)
1172 1175 else:
1173 1176 result = self._readexact(payloadsize)
1174 1177 chunknum += 1
1175 1178 pos += payloadsize
1176 1179 if chunknum == len(self._chunkindex):
1177 1180 self._chunkindex.append((pos, self._tellfp()))
1178 1181 yield result
1179 1182 payloadsize = self._unpack(_fpayloadsize)[0]
1180 1183 indebug(self.ui, 'payload chunk size: %i' % payloadsize)
1181 1184
1182 1185 def _findchunk(self, pos):
1183 1186 '''for a given payload position, return a chunk number and offset'''
1184 1187 for chunk, (ppos, fpos) in enumerate(self._chunkindex):
1185 1188 if ppos == pos:
1186 1189 return chunk, 0
1187 1190 elif ppos > pos:
1188 1191 return chunk - 1, pos - self._chunkindex[chunk - 1][0]
1189 1192 raise ValueError('Unknown chunk')
1190 1193
1191 1194 def _readheader(self):
1192 1195 """read the header and setup the object"""
1193 1196 typesize = self._unpackheader(_fparttypesize)[0]
1194 1197 self.type = self._fromheader(typesize)
1195 1198 indebug(self.ui, 'part type: "%s"' % self.type)
1196 1199 self.id = self._unpackheader(_fpartid)[0]
1197 1200 indebug(self.ui, 'part id: "%s"' % self.id)
1198 1201 # extract mandatory bit from type
1199 1202 self.mandatory = (self.type != self.type.lower())
1200 1203 self.type = self.type.lower()
1201 1204 ## reading parameters
1202 1205 # param count
1203 1206 mancount, advcount = self._unpackheader(_fpartparamcount)
1204 1207 indebug(self.ui, 'part parameters: %i' % (mancount + advcount))
1205 1208 # param size
1206 1209 fparamsizes = _makefpartparamsizes(mancount + advcount)
1207 1210 paramsizes = self._unpackheader(fparamsizes)
1208 1211 # make it a list of couple again
1209 1212 paramsizes = zip(paramsizes[::2], paramsizes[1::2])
1210 1213 # split mandatory from advisory
1211 1214 mansizes = paramsizes[:mancount]
1212 1215 advsizes = paramsizes[mancount:]
1213 1216 # retrieve param value
1214 1217 manparams = []
1215 1218 for key, value in mansizes:
1216 1219 manparams.append((self._fromheader(key), self._fromheader(value)))
1217 1220 advparams = []
1218 1221 for key, value in advsizes:
1219 1222 advparams.append((self._fromheader(key), self._fromheader(value)))
1220 1223 self._initparams(manparams, advparams)
1221 1224 ## part payload
1222 1225 self._payloadstream = util.chunkbuffer(self._payloadchunks())
1223 1226 # we read the data, tell it
1224 1227 self._initialized = True
1225 1228
1226 1229 def read(self, size=None):
1227 1230 """read payload data"""
1228 1231 if not self._initialized:
1229 1232 self._readheader()
1230 1233 if size is None:
1231 1234 data = self._payloadstream.read()
1232 1235 else:
1233 1236 data = self._payloadstream.read(size)
1234 1237 self._pos += len(data)
1235 1238 if size is None or len(data) < size:
1236 1239 if not self.consumed and self._pos:
1237 1240 self.ui.debug('bundle2-input-part: total payload size %i\n'
1238 1241 % self._pos)
1239 1242 self.consumed = True
1240 1243 return data
1241 1244
1242 1245 def tell(self):
1243 1246 return self._pos
1244 1247
1245 1248 def seek(self, offset, whence=0):
1246 1249 if whence == 0:
1247 1250 newpos = offset
1248 1251 elif whence == 1:
1249 1252 newpos = self._pos + offset
1250 1253 elif whence == 2:
1251 1254 if not self.consumed:
1252 1255 self.read()
1253 1256 newpos = self._chunkindex[-1][0] - offset
1254 1257 else:
1255 1258 raise ValueError('Unknown whence value: %r' % (whence,))
1256 1259
1257 1260 if newpos > self._chunkindex[-1][0] and not self.consumed:
1258 1261 self.read()
1259 1262 if not 0 <= newpos <= self._chunkindex[-1][0]:
1260 1263 raise ValueError('Offset out of range')
1261 1264
1262 1265 if self._pos != newpos:
1263 1266 chunk, internaloffset = self._findchunk(newpos)
1264 1267 self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk))
1265 1268 adjust = self.read(internaloffset)
1266 1269 if len(adjust) != internaloffset:
1267 1270 raise error.Abort(_('Seek failed\n'))
1268 1271 self._pos = newpos
1269 1272
1270 1273 def _seekfp(self, offset, whence=0):
1271 1274 """move the underlying file pointer
1272 1275
1273 1276 This method is meant for internal usage by the bundle2 protocol only.
1274 1277 They directly manipulate the low level stream including bundle2 level
1275 1278 instruction.
1276 1279
1277 1280 Do not use it to implement higher-level logic or methods."""
1278 1281 if self._seekable:
1279 1282 return self._fp.seek(offset, whence)
1280 1283 else:
1281 1284 raise NotImplementedError(_('File pointer is not seekable'))
1282 1285
1283 1286 def _tellfp(self):
1284 1287 """return the file offset, or None if file is not seekable
1285 1288
1286 1289 This method is meant for internal usage by the bundle2 protocol only.
1287 1290 They directly manipulate the low level stream including bundle2 level
1288 1291 instruction.
1289 1292
1290 1293 Do not use it to implement higher-level logic or methods."""
1291 1294 if self._seekable:
1292 1295 try:
1293 1296 return self._fp.tell()
1294 1297 except IOError as e:
1295 1298 if e.errno == errno.ESPIPE:
1296 1299 self._seekable = False
1297 1300 else:
1298 1301 raise
1299 1302 return None
1300 1303
1301 1304 # These are only the static capabilities.
1302 1305 # Check the 'getrepocaps' function for the rest.
1303 1306 capabilities = {'HG20': (),
1304 1307 'error': ('abort', 'unsupportedcontent', 'pushraced',
1305 1308 'pushkey'),
1306 1309 'listkeys': (),
1307 1310 'pushkey': (),
1308 1311 'digests': tuple(sorted(util.DIGESTS.keys())),
1309 1312 'remote-changegroup': ('http', 'https'),
1310 1313 'hgtagsfnodes': (),
1311 1314 }
1312 1315
1313 1316 def getrepocaps(repo, allowpushback=False):
1314 1317 """return the bundle2 capabilities for a given repo
1315 1318
1316 1319 Exists to allow extensions (like evolution) to mutate the capabilities.
1317 1320 """
1318 1321 caps = capabilities.copy()
1319 1322 caps['changegroup'] = tuple(sorted(
1320 1323 changegroup.supportedincomingversions(repo)))
1321 1324 if obsolete.isenabled(repo, obsolete.exchangeopt):
1322 1325 supportedformat = tuple('V%i' % v for v in obsolete.formats)
1323 1326 caps['obsmarkers'] = supportedformat
1324 1327 if allowpushback:
1325 1328 caps['pushback'] = ()
1326 1329 cpmode = repo.ui.config('server', 'concurrent-push-mode', 'strict')
1327 1330 if cpmode == 'check-related':
1328 1331 caps['checkheads'] = ('related',)
1329 1332 return caps
1330 1333
1331 1334 def bundle2caps(remote):
1332 1335 """return the bundle capabilities of a peer as dict"""
1333 1336 raw = remote.capable('bundle2')
1334 1337 if not raw and raw != '':
1335 1338 return {}
1336 1339 capsblob = urlreq.unquote(remote.capable('bundle2'))
1337 1340 return decodecaps(capsblob)
1338 1341
1339 1342 def obsmarkersversion(caps):
1340 1343 """extract the list of supported obsmarkers versions from a bundle2caps dict
1341 1344 """
1342 1345 obscaps = caps.get('obsmarkers', ())
1343 1346 return [int(c[1:]) for c in obscaps if c.startswith('V')]
1344 1347
1345 1348 def writenewbundle(ui, repo, source, filename, bundletype, outgoing, opts,
1346 1349 vfs=None, compression=None, compopts=None):
1347 1350 if bundletype.startswith('HG10'):
1348 1351 cg = changegroup.getchangegroup(repo, source, outgoing, version='01')
1349 1352 return writebundle(ui, cg, filename, bundletype, vfs=vfs,
1350 1353 compression=compression, compopts=compopts)
1351 1354 elif not bundletype.startswith('HG20'):
1352 1355 raise error.ProgrammingError('unknown bundle type: %s' % bundletype)
1353 1356
1354 1357 caps = {}
1355 1358 if 'obsolescence' in opts:
1356 1359 caps['obsmarkers'] = ('V1',)
1357 1360 bundle = bundle20(ui, caps)
1358 1361 bundle.setcompression(compression, compopts)
1359 1362 _addpartsfromopts(ui, repo, bundle, source, outgoing, opts)
1360 1363 chunkiter = bundle.getchunks()
1361 1364
1362 1365 return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
1363 1366
1364 1367 def _addpartsfromopts(ui, repo, bundler, source, outgoing, opts):
1365 1368 # We should eventually reconcile this logic with the one behind
1366 1369 # 'exchange.getbundle2partsgenerator'.
1367 1370 #
1368 1371 # The type of input from 'getbundle' and 'writenewbundle' are a bit
1369 1372 # different right now. So we keep them separated for now for the sake of
1370 1373 # simplicity.
1371 1374
1372 1375 # we always want a changegroup in such bundle
1373 1376 cgversion = opts.get('cg.version')
1374 1377 if cgversion is None:
1375 1378 cgversion = changegroup.safeversion(repo)
1376 1379 cg = changegroup.getchangegroup(repo, source, outgoing,
1377 1380 version=cgversion)
1378 1381 part = bundler.newpart('changegroup', data=cg.getchunks())
1379 1382 part.addparam('version', cg.version)
1380 1383 if 'clcount' in cg.extras:
1381 1384 part.addparam('nbchanges', str(cg.extras['clcount']),
1382 1385 mandatory=False)
1383 1386
1384 1387 addparttagsfnodescache(repo, bundler, outgoing)
1385 1388
1386 1389 if opts.get('obsolescence', False):
1387 1390 obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
1388 1391 buildobsmarkerspart(bundler, obsmarkers)
1389 1392
1393 if opts.get('phases', False):
1394 headsbyphase = phases.subsetphaseheads(repo, outgoing.missing)
1395 phasedata = []
1396 for phase in phases.allphases:
1397 for head in headsbyphase[phase]:
1398 phasedata.append(_pack(_fphasesentry, phase, head))
1399 bundler.newpart('phase-heads', data=''.join(phasedata))
1400
1390 1401 def addparttagsfnodescache(repo, bundler, outgoing):
1391 1402 # we include the tags fnode cache for the bundle changeset
1392 1403 # (as an optional parts)
1393 1404 cache = tags.hgtagsfnodescache(repo.unfiltered())
1394 1405 chunks = []
1395 1406
1396 1407 # .hgtags fnodes are only relevant for head changesets. While we could
1397 1408 # transfer values for all known nodes, there will likely be little to
1398 1409 # no benefit.
1399 1410 #
1400 1411 # We don't bother using a generator to produce output data because
1401 1412 # a) we only have 40 bytes per head and even esoteric numbers of heads
1402 1413 # consume little memory (1M heads is 40MB) b) we don't want to send the
1403 1414 # part if we don't have entries and knowing if we have entries requires
1404 1415 # cache lookups.
1405 1416 for node in outgoing.missingheads:
1406 1417 # Don't compute missing, as this may slow down serving.
1407 1418 fnode = cache.getfnode(node, computemissing=False)
1408 1419 if fnode is not None:
1409 1420 chunks.extend([node, fnode])
1410 1421
1411 1422 if chunks:
1412 1423 bundler.newpart('hgtagsfnodes', data=''.join(chunks))
1413 1424
1414 1425 def buildobsmarkerspart(bundler, markers):
1415 1426 """add an obsmarker part to the bundler with <markers>
1416 1427
1417 1428 No part is created if markers is empty.
1418 1429 Raises ValueError if the bundler doesn't support any known obsmarker format.
1419 1430 """
1420 1431 if not markers:
1421 1432 return None
1422 1433
1423 1434 remoteversions = obsmarkersversion(bundler.capabilities)
1424 1435 version = obsolete.commonversion(remoteversions)
1425 1436 if version is None:
1426 1437 raise ValueError('bundler does not support common obsmarker format')
1427 1438 stream = obsolete.encodemarkers(markers, True, version=version)
1428 1439 return bundler.newpart('obsmarkers', data=stream)
1429 1440
1430 1441 def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None,
1431 1442 compopts=None):
1432 1443 """Write a bundle file and return its filename.
1433 1444
1434 1445 Existing files will not be overwritten.
1435 1446 If no filename is specified, a temporary file is created.
1436 1447 bz2 compression can be turned off.
1437 1448 The bundle file will be deleted in case of errors.
1438 1449 """
1439 1450
1440 1451 if bundletype == "HG20":
1441 1452 bundle = bundle20(ui)
1442 1453 bundle.setcompression(compression, compopts)
1443 1454 part = bundle.newpart('changegroup', data=cg.getchunks())
1444 1455 part.addparam('version', cg.version)
1445 1456 if 'clcount' in cg.extras:
1446 1457 part.addparam('nbchanges', str(cg.extras['clcount']),
1447 1458 mandatory=False)
1448 1459 chunkiter = bundle.getchunks()
1449 1460 else:
1450 1461 # compression argument is only for the bundle2 case
1451 1462 assert compression is None
1452 1463 if cg.version != '01':
1453 1464 raise error.Abort(_('old bundle types only supports v1 '
1454 1465 'changegroups'))
1455 1466 header, comp = bundletypes[bundletype]
1456 1467 if comp not in util.compengines.supportedbundletypes:
1457 1468 raise error.Abort(_('unknown stream compression type: %s')
1458 1469 % comp)
1459 1470 compengine = util.compengines.forbundletype(comp)
1460 1471 def chunkiter():
1461 1472 yield header
1462 1473 for chunk in compengine.compressstream(cg.getchunks(), compopts):
1463 1474 yield chunk
1464 1475 chunkiter = chunkiter()
1465 1476
1466 1477 # parse the changegroup data, otherwise we will block
1467 1478 # in case of sshrepo because we don't know the end of the stream
1468 1479 return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
1469 1480
1470 1481 @parthandler('changegroup', ('version', 'nbchanges', 'treemanifest'))
1471 1482 def handlechangegroup(op, inpart):
1472 1483 """apply a changegroup part on the repo
1473 1484
1474 1485 This is a very early implementation that will massive rework before being
1475 1486 inflicted to any end-user.
1476 1487 """
1477 1488 tr = op.gettransaction()
1478 1489 unpackerversion = inpart.params.get('version', '01')
1479 1490 # We should raise an appropriate exception here
1480 1491 cg = changegroup.getunbundler(unpackerversion, inpart, None)
1481 1492 # the source and url passed here are overwritten by the one contained in
1482 1493 # the transaction.hookargs argument. So 'bundle2' is a placeholder
1483 1494 nbchangesets = None
1484 1495 if 'nbchanges' in inpart.params:
1485 1496 nbchangesets = int(inpart.params.get('nbchanges'))
1486 1497 if ('treemanifest' in inpart.params and
1487 1498 'treemanifest' not in op.repo.requirements):
1488 1499 if len(op.repo.changelog) != 0:
1489 1500 raise error.Abort(_(
1490 1501 "bundle contains tree manifests, but local repo is "
1491 1502 "non-empty and does not use tree manifests"))
1492 1503 op.repo.requirements.add('treemanifest')
1493 1504 op.repo._applyopenerreqs()
1494 1505 op.repo._writerequirements()
1495 1506 ret, addednodes = cg.apply(op.repo, tr, 'bundle2', 'bundle2',
1496 1507 expectedtotal=nbchangesets)
1497 1508 op.records.add('changegroup', {
1498 1509 'return': ret,
1499 1510 'addednodes': addednodes,
1500 1511 })
1501 1512 if op.reply is not None:
1502 1513 # This is definitely not the final form of this
1503 1514 # return. But one need to start somewhere.
1504 1515 part = op.reply.newpart('reply:changegroup', mandatory=False)
1505 1516 part.addparam('in-reply-to', str(inpart.id), mandatory=False)
1506 1517 part.addparam('return', '%i' % ret, mandatory=False)
1507 1518 assert not inpart.read()
1508 1519
1509 1520 _remotechangegroupparams = tuple(['url', 'size', 'digests'] +
1510 1521 ['digest:%s' % k for k in util.DIGESTS.keys()])
1511 1522 @parthandler('remote-changegroup', _remotechangegroupparams)
1512 1523 def handleremotechangegroup(op, inpart):
1513 1524 """apply a bundle10 on the repo, given an url and validation information
1514 1525
1515 1526 All the information about the remote bundle to import are given as
1516 1527 parameters. The parameters include:
1517 1528 - url: the url to the bundle10.
1518 1529 - size: the bundle10 file size. It is used to validate what was
1519 1530 retrieved by the client matches the server knowledge about the bundle.
1520 1531 - digests: a space separated list of the digest types provided as
1521 1532 parameters.
1522 1533 - digest:<digest-type>: the hexadecimal representation of the digest with
1523 1534 that name. Like the size, it is used to validate what was retrieved by
1524 1535 the client matches what the server knows about the bundle.
1525 1536
1526 1537 When multiple digest types are given, all of them are checked.
1527 1538 """
1528 1539 try:
1529 1540 raw_url = inpart.params['url']
1530 1541 except KeyError:
1531 1542 raise error.Abort(_('remote-changegroup: missing "%s" param') % 'url')
1532 1543 parsed_url = util.url(raw_url)
1533 1544 if parsed_url.scheme not in capabilities['remote-changegroup']:
1534 1545 raise error.Abort(_('remote-changegroup does not support %s urls') %
1535 1546 parsed_url.scheme)
1536 1547
1537 1548 try:
1538 1549 size = int(inpart.params['size'])
1539 1550 except ValueError:
1540 1551 raise error.Abort(_('remote-changegroup: invalid value for param "%s"')
1541 1552 % 'size')
1542 1553 except KeyError:
1543 1554 raise error.Abort(_('remote-changegroup: missing "%s" param') % 'size')
1544 1555
1545 1556 digests = {}
1546 1557 for typ in inpart.params.get('digests', '').split():
1547 1558 param = 'digest:%s' % typ
1548 1559 try:
1549 1560 value = inpart.params[param]
1550 1561 except KeyError:
1551 1562 raise error.Abort(_('remote-changegroup: missing "%s" param') %
1552 1563 param)
1553 1564 digests[typ] = value
1554 1565
1555 1566 real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests)
1556 1567
1557 1568 tr = op.gettransaction()
1558 1569 from . import exchange
1559 1570 cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
1560 1571 if not isinstance(cg, changegroup.cg1unpacker):
1561 1572 raise error.Abort(_('%s: not a bundle version 1.0') %
1562 1573 util.hidepassword(raw_url))
1563 1574 ret, addednodes = cg.apply(op.repo, tr, 'bundle2', 'bundle2')
1564 1575 op.records.add('changegroup', {
1565 1576 'return': ret,
1566 1577 'addednodes': addednodes,
1567 1578 })
1568 1579 if op.reply is not None:
1569 1580 # This is definitely not the final form of this
1570 1581 # return. But one need to start somewhere.
1571 1582 part = op.reply.newpart('reply:changegroup')
1572 1583 part.addparam('in-reply-to', str(inpart.id), mandatory=False)
1573 1584 part.addparam('return', '%i' % ret, mandatory=False)
1574 1585 try:
1575 1586 real_part.validate()
1576 1587 except error.Abort as e:
1577 1588 raise error.Abort(_('bundle at %s is corrupted:\n%s') %
1578 1589 (util.hidepassword(raw_url), str(e)))
1579 1590 assert not inpart.read()
1580 1591
1581 1592 @parthandler('reply:changegroup', ('return', 'in-reply-to'))
1582 1593 def handlereplychangegroup(op, inpart):
1583 1594 ret = int(inpart.params['return'])
1584 1595 replyto = int(inpart.params['in-reply-to'])
1585 1596 op.records.add('changegroup', {'return': ret}, replyto)
1586 1597
1587 1598 @parthandler('check:heads')
1588 1599 def handlecheckheads(op, inpart):
1589 1600 """check that head of the repo did not change
1590 1601
1591 1602 This is used to detect a push race when using unbundle.
1592 1603 This replaces the "heads" argument of unbundle."""
1593 1604 h = inpart.read(20)
1594 1605 heads = []
1595 1606 while len(h) == 20:
1596 1607 heads.append(h)
1597 1608 h = inpart.read(20)
1598 1609 assert not h
1599 1610 # Trigger a transaction so that we are guaranteed to have the lock now.
1600 1611 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1601 1612 op.gettransaction()
1602 1613 if sorted(heads) != sorted(op.repo.heads()):
1603 1614 raise error.PushRaced('repository changed while pushing - '
1604 1615 'please try again')
1605 1616
1606 1617 @parthandler('check:updated-heads')
1607 1618 def handlecheckupdatedheads(op, inpart):
1608 1619 """check for race on the heads touched by a push
1609 1620
1610 1621 This is similar to 'check:heads' but focus on the heads actually updated
1611 1622 during the push. If other activities happen on unrelated heads, it is
1612 1623 ignored.
1613 1624
1614 1625 This allow server with high traffic to avoid push contention as long as
1615 1626 unrelated parts of the graph are involved."""
1616 1627 h = inpart.read(20)
1617 1628 heads = []
1618 1629 while len(h) == 20:
1619 1630 heads.append(h)
1620 1631 h = inpart.read(20)
1621 1632 assert not h
1622 1633 # trigger a transaction so that we are guaranteed to have the lock now.
1623 1634 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1624 1635 op.gettransaction()
1625 1636
1626 1637 currentheads = set()
1627 1638 for ls in op.repo.branchmap().itervalues():
1628 1639 currentheads.update(ls)
1629 1640
1630 1641 for h in heads:
1631 1642 if h not in currentheads:
1632 1643 raise error.PushRaced('repository changed while pushing - '
1633 1644 'please try again')
1634 1645
1635 1646 @parthandler('output')
1636 1647 def handleoutput(op, inpart):
1637 1648 """forward output captured on the server to the client"""
1638 1649 for line in inpart.read().splitlines():
1639 1650 op.ui.status(_('remote: %s\n') % line)
1640 1651
1641 1652 @parthandler('replycaps')
1642 1653 def handlereplycaps(op, inpart):
1643 1654 """Notify that a reply bundle should be created
1644 1655
1645 1656 The payload contains the capabilities information for the reply"""
1646 1657 caps = decodecaps(inpart.read())
1647 1658 if op.reply is None:
1648 1659 op.reply = bundle20(op.ui, caps)
1649 1660
1650 1661 class AbortFromPart(error.Abort):
1651 1662 """Sub-class of Abort that denotes an error from a bundle2 part."""
1652 1663
1653 1664 @parthandler('error:abort', ('message', 'hint'))
1654 1665 def handleerrorabort(op, inpart):
1655 1666 """Used to transmit abort error over the wire"""
1656 1667 raise AbortFromPart(inpart.params['message'],
1657 1668 hint=inpart.params.get('hint'))
1658 1669
1659 1670 @parthandler('error:pushkey', ('namespace', 'key', 'new', 'old', 'ret',
1660 1671 'in-reply-to'))
1661 1672 def handleerrorpushkey(op, inpart):
1662 1673 """Used to transmit failure of a mandatory pushkey over the wire"""
1663 1674 kwargs = {}
1664 1675 for name in ('namespace', 'key', 'new', 'old', 'ret'):
1665 1676 value = inpart.params.get(name)
1666 1677 if value is not None:
1667 1678 kwargs[name] = value
1668 1679 raise error.PushkeyFailed(inpart.params['in-reply-to'], **kwargs)
1669 1680
1670 1681 @parthandler('error:unsupportedcontent', ('parttype', 'params'))
1671 1682 def handleerrorunsupportedcontent(op, inpart):
1672 1683 """Used to transmit unknown content error over the wire"""
1673 1684 kwargs = {}
1674 1685 parttype = inpart.params.get('parttype')
1675 1686 if parttype is not None:
1676 1687 kwargs['parttype'] = parttype
1677 1688 params = inpart.params.get('params')
1678 1689 if params is not None:
1679 1690 kwargs['params'] = params.split('\0')
1680 1691
1681 1692 raise error.BundleUnknownFeatureError(**kwargs)
1682 1693
1683 1694 @parthandler('error:pushraced', ('message',))
1684 1695 def handleerrorpushraced(op, inpart):
1685 1696 """Used to transmit push race error over the wire"""
1686 1697 raise error.ResponseError(_('push failed:'), inpart.params['message'])
1687 1698
1688 1699 @parthandler('listkeys', ('namespace',))
1689 1700 def handlelistkeys(op, inpart):
1690 1701 """retrieve pushkey namespace content stored in a bundle2"""
1691 1702 namespace = inpart.params['namespace']
1692 1703 r = pushkey.decodekeys(inpart.read())
1693 1704 op.records.add('listkeys', (namespace, r))
1694 1705
1695 1706 @parthandler('pushkey', ('namespace', 'key', 'old', 'new'))
1696 1707 def handlepushkey(op, inpart):
1697 1708 """process a pushkey request"""
1698 1709 dec = pushkey.decode
1699 1710 namespace = dec(inpart.params['namespace'])
1700 1711 key = dec(inpart.params['key'])
1701 1712 old = dec(inpart.params['old'])
1702 1713 new = dec(inpart.params['new'])
1703 1714 # Grab the transaction to ensure that we have the lock before performing the
1704 1715 # pushkey.
1705 1716 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1706 1717 op.gettransaction()
1707 1718 ret = op.repo.pushkey(namespace, key, old, new)
1708 1719 record = {'namespace': namespace,
1709 1720 'key': key,
1710 1721 'old': old,
1711 1722 'new': new}
1712 1723 op.records.add('pushkey', record)
1713 1724 if op.reply is not None:
1714 1725 rpart = op.reply.newpart('reply:pushkey')
1715 1726 rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
1716 1727 rpart.addparam('return', '%i' % ret, mandatory=False)
1717 1728 if inpart.mandatory and not ret:
1718 1729 kwargs = {}
1719 1730 for key in ('namespace', 'key', 'new', 'old', 'ret'):
1720 1731 if key in inpart.params:
1721 1732 kwargs[key] = inpart.params[key]
1722 1733 raise error.PushkeyFailed(partid=str(inpart.id), **kwargs)
1723 1734
1735 def _readphaseheads(inpart):
1736 headsbyphase = [[] for i in phases.allphases]
1737 entrysize = struct.calcsize(_fphasesentry)
1738 while True:
1739 entry = inpart.read(entrysize)
1740 if len(entry) < entrysize:
1741 if entry:
1742 raise error.Abort(_('bad phase-heads bundle part'))
1743 break
1744 phase, node = struct.unpack(_fphasesentry, entry)
1745 headsbyphase[phase].append(node)
1746 return headsbyphase
1747
1748 @parthandler('phase-heads')
1749 def handlephases(op, inpart):
1750 """apply phases from bundle part to repo"""
1751 headsbyphase = _readphaseheads(inpart)
1752 addednodes = []
1753 for entry in op.records['changegroup']:
1754 addednodes.extend(entry['addednodes'])
1755 phases.updatephases(op.repo.unfiltered(), op.gettransaction(), headsbyphase,
1756 addednodes)
1757
1724 1758 @parthandler('reply:pushkey', ('return', 'in-reply-to'))
1725 1759 def handlepushkeyreply(op, inpart):
1726 1760 """retrieve the result of a pushkey request"""
1727 1761 ret = int(inpart.params['return'])
1728 1762 partid = int(inpart.params['in-reply-to'])
1729 1763 op.records.add('pushkey', {'return': ret}, partid)
1730 1764
1731 1765 @parthandler('obsmarkers')
1732 1766 def handleobsmarker(op, inpart):
1733 1767 """add a stream of obsmarkers to the repo"""
1734 1768 tr = op.gettransaction()
1735 1769 markerdata = inpart.read()
1736 1770 if op.ui.config('experimental', 'obsmarkers-exchange-debug', False):
1737 1771 op.ui.write(('obsmarker-exchange: %i bytes received\n')
1738 1772 % len(markerdata))
1739 1773 # The mergemarkers call will crash if marker creation is not enabled.
1740 1774 # we want to avoid this if the part is advisory.
1741 1775 if not inpart.mandatory and op.repo.obsstore.readonly:
1742 1776 op.repo.ui.debug('ignoring obsolescence markers, feature not enabled')
1743 1777 return
1744 1778 new = op.repo.obsstore.mergemarkers(tr, markerdata)
1745 1779 op.repo.invalidatevolatilesets()
1746 1780 if new:
1747 1781 op.repo.ui.status(_('%i new obsolescence markers\n') % new)
1748 1782 op.records.add('obsmarkers', {'new': new})
1749 1783 if op.reply is not None:
1750 1784 rpart = op.reply.newpart('reply:obsmarkers')
1751 1785 rpart.addparam('in-reply-to', str(inpart.id), mandatory=False)
1752 1786 rpart.addparam('new', '%i' % new, mandatory=False)
1753 1787
1754 1788
1755 1789 @parthandler('reply:obsmarkers', ('new', 'in-reply-to'))
1756 1790 def handleobsmarkerreply(op, inpart):
1757 1791 """retrieve the result of a pushkey request"""
1758 1792 ret = int(inpart.params['new'])
1759 1793 partid = int(inpart.params['in-reply-to'])
1760 1794 op.records.add('obsmarkers', {'new': ret}, partid)
1761 1795
1762 1796 @parthandler('hgtagsfnodes')
1763 1797 def handlehgtagsfnodes(op, inpart):
1764 1798 """Applies .hgtags fnodes cache entries to the local repo.
1765 1799
1766 1800 Payload is pairs of 20 byte changeset nodes and filenodes.
1767 1801 """
1768 1802 # Grab the transaction so we ensure that we have the lock at this point.
1769 1803 if op.ui.configbool('experimental', 'bundle2lazylocking'):
1770 1804 op.gettransaction()
1771 1805 cache = tags.hgtagsfnodescache(op.repo.unfiltered())
1772 1806
1773 1807 count = 0
1774 1808 while True:
1775 1809 node = inpart.read(20)
1776 1810 fnode = inpart.read(20)
1777 1811 if len(node) < 20 or len(fnode) < 20:
1778 1812 op.ui.debug('ignoring incomplete received .hgtags fnodes data\n')
1779 1813 break
1780 1814 cache.setfnode(node, fnode)
1781 1815 count += 1
1782 1816
1783 1817 cache.write()
1784 1818 op.ui.debug('applied %i hgtags fnodes cache entries\n' % count)
@@ -1,5400 +1,5402 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import os
13 13 import re
14 14 import sys
15 15
16 16 from .i18n import _
17 17 from .node import (
18 18 hex,
19 19 nullid,
20 20 nullrev,
21 21 short,
22 22 )
23 23 from . import (
24 24 archival,
25 25 bookmarks,
26 26 bundle2,
27 27 changegroup,
28 28 cmdutil,
29 29 copies,
30 30 debugcommands as debugcommandsmod,
31 31 destutil,
32 32 dirstateguard,
33 33 discovery,
34 34 encoding,
35 35 error,
36 36 exchange,
37 37 extensions,
38 38 formatter,
39 39 graphmod,
40 40 hbisect,
41 41 help,
42 42 hg,
43 43 lock as lockmod,
44 44 merge as mergemod,
45 45 obsolete,
46 46 patch,
47 47 phases,
48 48 pycompat,
49 49 rcutil,
50 50 registrar,
51 51 revsetlang,
52 52 scmutil,
53 53 server,
54 54 sshserver,
55 55 streamclone,
56 56 tags as tagsmod,
57 57 templatekw,
58 58 ui as uimod,
59 59 util,
60 60 )
61 61
62 62 release = lockmod.release
63 63
64 64 table = {}
65 65 table.update(debugcommandsmod.command._table)
66 66
67 67 command = registrar.command(table)
68 68
69 69 # common command options
70 70
71 71 globalopts = [
72 72 ('R', 'repository', '',
73 73 _('repository root directory or name of overlay bundle file'),
74 74 _('REPO')),
75 75 ('', 'cwd', '',
76 76 _('change working directory'), _('DIR')),
77 77 ('y', 'noninteractive', None,
78 78 _('do not prompt, automatically pick the first choice for all prompts')),
79 79 ('q', 'quiet', None, _('suppress output')),
80 80 ('v', 'verbose', None, _('enable additional output')),
81 81 ('', 'color', '',
82 82 # i18n: 'always', 'auto', 'never', and 'debug' are keywords
83 83 # and should not be translated
84 84 _("when to colorize (boolean, always, auto, never, or debug)"),
85 85 _('TYPE')),
86 86 ('', 'config', [],
87 87 _('set/override config option (use \'section.name=value\')'),
88 88 _('CONFIG')),
89 89 ('', 'debug', None, _('enable debugging output')),
90 90 ('', 'debugger', None, _('start debugger')),
91 91 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
92 92 _('ENCODE')),
93 93 ('', 'encodingmode', encoding.encodingmode,
94 94 _('set the charset encoding mode'), _('MODE')),
95 95 ('', 'traceback', None, _('always print a traceback on exception')),
96 96 ('', 'time', None, _('time how long the command takes')),
97 97 ('', 'profile', None, _('print command execution profile')),
98 98 ('', 'version', None, _('output version information and exit')),
99 99 ('h', 'help', None, _('display help and exit')),
100 100 ('', 'hidden', False, _('consider hidden changesets')),
101 101 ('', 'pager', 'auto',
102 102 _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
103 103 ]
104 104
105 105 dryrunopts = cmdutil.dryrunopts
106 106 remoteopts = cmdutil.remoteopts
107 107 walkopts = cmdutil.walkopts
108 108 commitopts = cmdutil.commitopts
109 109 commitopts2 = cmdutil.commitopts2
110 110 formatteropts = cmdutil.formatteropts
111 111 templateopts = cmdutil.templateopts
112 112 logopts = cmdutil.logopts
113 113 diffopts = cmdutil.diffopts
114 114 diffwsopts = cmdutil.diffwsopts
115 115 diffopts2 = cmdutil.diffopts2
116 116 mergetoolopts = cmdutil.mergetoolopts
117 117 similarityopts = cmdutil.similarityopts
118 118 subrepoopts = cmdutil.subrepoopts
119 119 debugrevlogopts = cmdutil.debugrevlogopts
120 120
121 121 # Commands start here, listed alphabetically
122 122
123 123 @command('^add',
124 124 walkopts + subrepoopts + dryrunopts,
125 125 _('[OPTION]... [FILE]...'),
126 126 inferrepo=True)
127 127 def add(ui, repo, *pats, **opts):
128 128 """add the specified files on the next commit
129 129
130 130 Schedule files to be version controlled and added to the
131 131 repository.
132 132
133 133 The files will be added to the repository at the next commit. To
134 134 undo an add before that, see :hg:`forget`.
135 135
136 136 If no names are given, add all files to the repository (except
137 137 files matching ``.hgignore``).
138 138
139 139 .. container:: verbose
140 140
141 141 Examples:
142 142
143 143 - New (unknown) files are added
144 144 automatically by :hg:`add`::
145 145
146 146 $ ls
147 147 foo.c
148 148 $ hg status
149 149 ? foo.c
150 150 $ hg add
151 151 adding foo.c
152 152 $ hg status
153 153 A foo.c
154 154
155 155 - Specific files to be added can be specified::
156 156
157 157 $ ls
158 158 bar.c foo.c
159 159 $ hg status
160 160 ? bar.c
161 161 ? foo.c
162 162 $ hg add bar.c
163 163 $ hg status
164 164 A bar.c
165 165 ? foo.c
166 166
167 167 Returns 0 if all files are successfully added.
168 168 """
169 169
170 170 m = scmutil.match(repo[None], pats, pycompat.byteskwargs(opts))
171 171 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
172 172 return rejected and 1 or 0
173 173
174 174 @command('addremove',
175 175 similarityopts + subrepoopts + walkopts + dryrunopts,
176 176 _('[OPTION]... [FILE]...'),
177 177 inferrepo=True)
178 178 def addremove(ui, repo, *pats, **opts):
179 179 """add all new files, delete all missing files
180 180
181 181 Add all new files and remove all missing files from the
182 182 repository.
183 183
184 184 Unless names are given, new files are ignored if they match any of
185 185 the patterns in ``.hgignore``. As with add, these changes take
186 186 effect at the next commit.
187 187
188 188 Use the -s/--similarity option to detect renamed files. This
189 189 option takes a percentage between 0 (disabled) and 100 (files must
190 190 be identical) as its parameter. With a parameter greater than 0,
191 191 this compares every removed file with every added file and records
192 192 those similar enough as renames. Detecting renamed files this way
193 193 can be expensive. After using this option, :hg:`status -C` can be
194 194 used to check which files were identified as moved or renamed. If
195 195 not specified, -s/--similarity defaults to 100 and only renames of
196 196 identical files are detected.
197 197
198 198 .. container:: verbose
199 199
200 200 Examples:
201 201
202 202 - A number of files (bar.c and foo.c) are new,
203 203 while foobar.c has been removed (without using :hg:`remove`)
204 204 from the repository::
205 205
206 206 $ ls
207 207 bar.c foo.c
208 208 $ hg status
209 209 ! foobar.c
210 210 ? bar.c
211 211 ? foo.c
212 212 $ hg addremove
213 213 adding bar.c
214 214 adding foo.c
215 215 removing foobar.c
216 216 $ hg status
217 217 A bar.c
218 218 A foo.c
219 219 R foobar.c
220 220
221 221 - A file foobar.c was moved to foo.c without using :hg:`rename`.
222 222 Afterwards, it was edited slightly::
223 223
224 224 $ ls
225 225 foo.c
226 226 $ hg status
227 227 ! foobar.c
228 228 ? foo.c
229 229 $ hg addremove --similarity 90
230 230 removing foobar.c
231 231 adding foo.c
232 232 recording removal of foobar.c as rename to foo.c (94% similar)
233 233 $ hg status -C
234 234 A foo.c
235 235 foobar.c
236 236 R foobar.c
237 237
238 238 Returns 0 if all files are successfully added.
239 239 """
240 240 opts = pycompat.byteskwargs(opts)
241 241 try:
242 242 sim = float(opts.get('similarity') or 100)
243 243 except ValueError:
244 244 raise error.Abort(_('similarity must be a number'))
245 245 if sim < 0 or sim > 100:
246 246 raise error.Abort(_('similarity must be between 0 and 100'))
247 247 matcher = scmutil.match(repo[None], pats, opts)
248 248 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
249 249
250 250 @command('^annotate|blame',
251 251 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
252 252 ('', 'follow', None,
253 253 _('follow copies/renames and list the filename (DEPRECATED)')),
254 254 ('', 'no-follow', None, _("don't follow copies and renames")),
255 255 ('a', 'text', None, _('treat all files as text')),
256 256 ('u', 'user', None, _('list the author (long with -v)')),
257 257 ('f', 'file', None, _('list the filename')),
258 258 ('d', 'date', None, _('list the date (short with -q)')),
259 259 ('n', 'number', None, _('list the revision number (default)')),
260 260 ('c', 'changeset', None, _('list the changeset')),
261 261 ('l', 'line-number', None, _('show line number at the first appearance')),
262 262 ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
263 263 ] + diffwsopts + walkopts + formatteropts,
264 264 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
265 265 inferrepo=True)
266 266 def annotate(ui, repo, *pats, **opts):
267 267 """show changeset information by line for each file
268 268
269 269 List changes in files, showing the revision id responsible for
270 270 each line.
271 271
272 272 This command is useful for discovering when a change was made and
273 273 by whom.
274 274
275 275 If you include --file, --user, or --date, the revision number is
276 276 suppressed unless you also include --number.
277 277
278 278 Without the -a/--text option, annotate will avoid processing files
279 279 it detects as binary. With -a, annotate will annotate the file
280 280 anyway, although the results will probably be neither useful
281 281 nor desirable.
282 282
283 283 Returns 0 on success.
284 284 """
285 285 opts = pycompat.byteskwargs(opts)
286 286 if not pats:
287 287 raise error.Abort(_('at least one filename or pattern is required'))
288 288
289 289 if opts.get('follow'):
290 290 # --follow is deprecated and now just an alias for -f/--file
291 291 # to mimic the behavior of Mercurial before version 1.5
292 292 opts['file'] = True
293 293
294 294 ctx = scmutil.revsingle(repo, opts.get('rev'))
295 295
296 296 rootfm = ui.formatter('annotate', opts)
297 297 if ui.quiet:
298 298 datefunc = util.shortdate
299 299 else:
300 300 datefunc = util.datestr
301 301 if ctx.rev() is None:
302 302 def hexfn(node):
303 303 if node is None:
304 304 return None
305 305 else:
306 306 return rootfm.hexfunc(node)
307 307 if opts.get('changeset'):
308 308 # omit "+" suffix which is appended to node hex
309 309 def formatrev(rev):
310 310 if rev is None:
311 311 return '%d' % ctx.p1().rev()
312 312 else:
313 313 return '%d' % rev
314 314 else:
315 315 def formatrev(rev):
316 316 if rev is None:
317 317 return '%d+' % ctx.p1().rev()
318 318 else:
319 319 return '%d ' % rev
320 320 def formathex(hex):
321 321 if hex is None:
322 322 return '%s+' % rootfm.hexfunc(ctx.p1().node())
323 323 else:
324 324 return '%s ' % hex
325 325 else:
326 326 hexfn = rootfm.hexfunc
327 327 formatrev = formathex = str
328 328
329 329 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
330 330 ('number', ' ', lambda x: x[0].rev(), formatrev),
331 331 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
332 332 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
333 333 ('file', ' ', lambda x: x[0].path(), str),
334 334 ('line_number', ':', lambda x: x[1], str),
335 335 ]
336 336 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
337 337
338 338 if (not opts.get('user') and not opts.get('changeset')
339 339 and not opts.get('date') and not opts.get('file')):
340 340 opts['number'] = True
341 341
342 342 linenumber = opts.get('line_number') is not None
343 343 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
344 344 raise error.Abort(_('at least one of -n/-c is required for -l'))
345 345
346 346 ui.pager('annotate')
347 347
348 348 if rootfm.isplain():
349 349 def makefunc(get, fmt):
350 350 return lambda x: fmt(get(x))
351 351 else:
352 352 def makefunc(get, fmt):
353 353 return get
354 354 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
355 355 if opts.get(op)]
356 356 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
357 357 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
358 358 if opts.get(op))
359 359
360 360 def bad(x, y):
361 361 raise error.Abort("%s: %s" % (x, y))
362 362
363 363 m = scmutil.match(ctx, pats, opts, badfn=bad)
364 364
365 365 follow = not opts.get('no_follow')
366 366 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
367 367 whitespace=True)
368 368 skiprevs = opts.get('skip')
369 369 if skiprevs:
370 370 skiprevs = scmutil.revrange(repo, skiprevs)
371 371
372 372 for abs in ctx.walk(m):
373 373 fctx = ctx[abs]
374 374 rootfm.startitem()
375 375 rootfm.data(abspath=abs, path=m.rel(abs))
376 376 if not opts.get('text') and fctx.isbinary():
377 377 rootfm.plain(_("%s: binary file\n")
378 378 % ((pats and m.rel(abs)) or abs))
379 379 continue
380 380
381 381 fm = rootfm.nested('lines')
382 382 lines = fctx.annotate(follow=follow, linenumber=linenumber,
383 383 skiprevs=skiprevs, diffopts=diffopts)
384 384 if not lines:
385 385 fm.end()
386 386 continue
387 387 formats = []
388 388 pieces = []
389 389
390 390 for f, sep in funcmap:
391 391 l = [f(n) for n, dummy in lines]
392 392 if fm.isplain():
393 393 sizes = [encoding.colwidth(x) for x in l]
394 394 ml = max(sizes)
395 395 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
396 396 else:
397 397 formats.append(['%s' for x in l])
398 398 pieces.append(l)
399 399
400 400 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
401 401 fm.startitem()
402 402 fm.write(fields, "".join(f), *p)
403 403 fm.write('line', ": %s", l[1])
404 404
405 405 if not lines[-1][1].endswith('\n'):
406 406 fm.plain('\n')
407 407 fm.end()
408 408
409 409 rootfm.end()
410 410
411 411 @command('archive',
412 412 [('', 'no-decode', None, _('do not pass files through decoders')),
413 413 ('p', 'prefix', '', _('directory prefix for files in archive'),
414 414 _('PREFIX')),
415 415 ('r', 'rev', '', _('revision to distribute'), _('REV')),
416 416 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
417 417 ] + subrepoopts + walkopts,
418 418 _('[OPTION]... DEST'))
419 419 def archive(ui, repo, dest, **opts):
420 420 '''create an unversioned archive of a repository revision
421 421
422 422 By default, the revision used is the parent of the working
423 423 directory; use -r/--rev to specify a different revision.
424 424
425 425 The archive type is automatically detected based on file
426 426 extension (to override, use -t/--type).
427 427
428 428 .. container:: verbose
429 429
430 430 Examples:
431 431
432 432 - create a zip file containing the 1.0 release::
433 433
434 434 hg archive -r 1.0 project-1.0.zip
435 435
436 436 - create a tarball excluding .hg files::
437 437
438 438 hg archive project.tar.gz -X ".hg*"
439 439
440 440 Valid types are:
441 441
442 442 :``files``: a directory full of files (default)
443 443 :``tar``: tar archive, uncompressed
444 444 :``tbz2``: tar archive, compressed using bzip2
445 445 :``tgz``: tar archive, compressed using gzip
446 446 :``uzip``: zip archive, uncompressed
447 447 :``zip``: zip archive, compressed using deflate
448 448
449 449 The exact name of the destination archive or directory is given
450 450 using a format string; see :hg:`help export` for details.
451 451
452 452 Each member added to an archive file has a directory prefix
453 453 prepended. Use -p/--prefix to specify a format string for the
454 454 prefix. The default is the basename of the archive, with suffixes
455 455 removed.
456 456
457 457 Returns 0 on success.
458 458 '''
459 459
460 460 opts = pycompat.byteskwargs(opts)
461 461 ctx = scmutil.revsingle(repo, opts.get('rev'))
462 462 if not ctx:
463 463 raise error.Abort(_('no working directory: please specify a revision'))
464 464 node = ctx.node()
465 465 dest = cmdutil.makefilename(repo, dest, node)
466 466 if os.path.realpath(dest) == repo.root:
467 467 raise error.Abort(_('repository root cannot be destination'))
468 468
469 469 kind = opts.get('type') or archival.guesskind(dest) or 'files'
470 470 prefix = opts.get('prefix')
471 471
472 472 if dest == '-':
473 473 if kind == 'files':
474 474 raise error.Abort(_('cannot archive plain files to stdout'))
475 475 dest = cmdutil.makefileobj(repo, dest)
476 476 if not prefix:
477 477 prefix = os.path.basename(repo.root) + '-%h'
478 478
479 479 prefix = cmdutil.makefilename(repo, prefix, node)
480 480 matchfn = scmutil.match(ctx, [], opts)
481 481 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
482 482 matchfn, prefix, subrepos=opts.get('subrepos'))
483 483
484 484 @command('backout',
485 485 [('', 'merge', None, _('merge with old dirstate parent after backout')),
486 486 ('', 'commit', None,
487 487 _('commit if no conflicts were encountered (DEPRECATED)')),
488 488 ('', 'no-commit', None, _('do not commit')),
489 489 ('', 'parent', '',
490 490 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
491 491 ('r', 'rev', '', _('revision to backout'), _('REV')),
492 492 ('e', 'edit', False, _('invoke editor on commit messages')),
493 493 ] + mergetoolopts + walkopts + commitopts + commitopts2,
494 494 _('[OPTION]... [-r] REV'))
495 495 def backout(ui, repo, node=None, rev=None, **opts):
496 496 '''reverse effect of earlier changeset
497 497
498 498 Prepare a new changeset with the effect of REV undone in the
499 499 current working directory. If no conflicts were encountered,
500 500 it will be committed immediately.
501 501
502 502 If REV is the parent of the working directory, then this new changeset
503 503 is committed automatically (unless --no-commit is specified).
504 504
505 505 .. note::
506 506
507 507 :hg:`backout` cannot be used to fix either an unwanted or
508 508 incorrect merge.
509 509
510 510 .. container:: verbose
511 511
512 512 Examples:
513 513
514 514 - Reverse the effect of the parent of the working directory.
515 515 This backout will be committed immediately::
516 516
517 517 hg backout -r .
518 518
519 519 - Reverse the effect of previous bad revision 23::
520 520
521 521 hg backout -r 23
522 522
523 523 - Reverse the effect of previous bad revision 23 and
524 524 leave changes uncommitted::
525 525
526 526 hg backout -r 23 --no-commit
527 527 hg commit -m "Backout revision 23"
528 528
529 529 By default, the pending changeset will have one parent,
530 530 maintaining a linear history. With --merge, the pending
531 531 changeset will instead have two parents: the old parent of the
532 532 working directory and a new child of REV that simply undoes REV.
533 533
534 534 Before version 1.7, the behavior without --merge was equivalent
535 535 to specifying --merge followed by :hg:`update --clean .` to
536 536 cancel the merge and leave the child of REV as a head to be
537 537 merged separately.
538 538
539 539 See :hg:`help dates` for a list of formats valid for -d/--date.
540 540
541 541 See :hg:`help revert` for a way to restore files to the state
542 542 of another revision.
543 543
544 544 Returns 0 on success, 1 if nothing to backout or there are unresolved
545 545 files.
546 546 '''
547 547 wlock = lock = None
548 548 try:
549 549 wlock = repo.wlock()
550 550 lock = repo.lock()
551 551 return _dobackout(ui, repo, node, rev, **opts)
552 552 finally:
553 553 release(lock, wlock)
554 554
555 555 def _dobackout(ui, repo, node=None, rev=None, **opts):
556 556 opts = pycompat.byteskwargs(opts)
557 557 if opts.get('commit') and opts.get('no_commit'):
558 558 raise error.Abort(_("cannot use --commit with --no-commit"))
559 559 if opts.get('merge') and opts.get('no_commit'):
560 560 raise error.Abort(_("cannot use --merge with --no-commit"))
561 561
562 562 if rev and node:
563 563 raise error.Abort(_("please specify just one revision"))
564 564
565 565 if not rev:
566 566 rev = node
567 567
568 568 if not rev:
569 569 raise error.Abort(_("please specify a revision to backout"))
570 570
571 571 date = opts.get('date')
572 572 if date:
573 573 opts['date'] = util.parsedate(date)
574 574
575 575 cmdutil.checkunfinished(repo)
576 576 cmdutil.bailifchanged(repo)
577 577 node = scmutil.revsingle(repo, rev).node()
578 578
579 579 op1, op2 = repo.dirstate.parents()
580 580 if not repo.changelog.isancestor(node, op1):
581 581 raise error.Abort(_('cannot backout change that is not an ancestor'))
582 582
583 583 p1, p2 = repo.changelog.parents(node)
584 584 if p1 == nullid:
585 585 raise error.Abort(_('cannot backout a change with no parents'))
586 586 if p2 != nullid:
587 587 if not opts.get('parent'):
588 588 raise error.Abort(_('cannot backout a merge changeset'))
589 589 p = repo.lookup(opts['parent'])
590 590 if p not in (p1, p2):
591 591 raise error.Abort(_('%s is not a parent of %s') %
592 592 (short(p), short(node)))
593 593 parent = p
594 594 else:
595 595 if opts.get('parent'):
596 596 raise error.Abort(_('cannot use --parent on non-merge changeset'))
597 597 parent = p1
598 598
599 599 # the backout should appear on the same branch
600 600 branch = repo.dirstate.branch()
601 601 bheads = repo.branchheads(branch)
602 602 rctx = scmutil.revsingle(repo, hex(parent))
603 603 if not opts.get('merge') and op1 != node:
604 604 dsguard = dirstateguard.dirstateguard(repo, 'backout')
605 605 try:
606 606 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
607 607 'backout')
608 608 stats = mergemod.update(repo, parent, True, True, node, False)
609 609 repo.setparents(op1, op2)
610 610 dsguard.close()
611 611 hg._showstats(repo, stats)
612 612 if stats[3]:
613 613 repo.ui.status(_("use 'hg resolve' to retry unresolved "
614 614 "file merges\n"))
615 615 return 1
616 616 finally:
617 617 ui.setconfig('ui', 'forcemerge', '', '')
618 618 lockmod.release(dsguard)
619 619 else:
620 620 hg.clean(repo, node, show_stats=False)
621 621 repo.dirstate.setbranch(branch)
622 622 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
623 623
624 624 if opts.get('no_commit'):
625 625 msg = _("changeset %s backed out, "
626 626 "don't forget to commit.\n")
627 627 ui.status(msg % short(node))
628 628 return 0
629 629
630 630 def commitfunc(ui, repo, message, match, opts):
631 631 editform = 'backout'
632 632 e = cmdutil.getcommiteditor(editform=editform,
633 633 **pycompat.strkwargs(opts))
634 634 if not message:
635 635 # we don't translate commit messages
636 636 message = "Backed out changeset %s" % short(node)
637 637 e = cmdutil.getcommiteditor(edit=True, editform=editform)
638 638 return repo.commit(message, opts.get('user'), opts.get('date'),
639 639 match, editor=e)
640 640 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
641 641 if not newnode:
642 642 ui.status(_("nothing changed\n"))
643 643 return 1
644 644 cmdutil.commitstatus(repo, newnode, branch, bheads)
645 645
646 646 def nice(node):
647 647 return '%d:%s' % (repo.changelog.rev(node), short(node))
648 648 ui.status(_('changeset %s backs out changeset %s\n') %
649 649 (nice(repo.changelog.tip()), nice(node)))
650 650 if opts.get('merge') and op1 != node:
651 651 hg.clean(repo, op1, show_stats=False)
652 652 ui.status(_('merging with changeset %s\n')
653 653 % nice(repo.changelog.tip()))
654 654 try:
655 655 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
656 656 'backout')
657 657 return hg.merge(repo, hex(repo.changelog.tip()))
658 658 finally:
659 659 ui.setconfig('ui', 'forcemerge', '', '')
660 660 return 0
661 661
662 662 @command('bisect',
663 663 [('r', 'reset', False, _('reset bisect state')),
664 664 ('g', 'good', False, _('mark changeset good')),
665 665 ('b', 'bad', False, _('mark changeset bad')),
666 666 ('s', 'skip', False, _('skip testing changeset')),
667 667 ('e', 'extend', False, _('extend the bisect range')),
668 668 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
669 669 ('U', 'noupdate', False, _('do not update to target'))],
670 670 _("[-gbsr] [-U] [-c CMD] [REV]"))
671 671 def bisect(ui, repo, rev=None, extra=None, command=None,
672 672 reset=None, good=None, bad=None, skip=None, extend=None,
673 673 noupdate=None):
674 674 """subdivision search of changesets
675 675
676 676 This command helps to find changesets which introduce problems. To
677 677 use, mark the earliest changeset you know exhibits the problem as
678 678 bad, then mark the latest changeset which is free from the problem
679 679 as good. Bisect will update your working directory to a revision
680 680 for testing (unless the -U/--noupdate option is specified). Once
681 681 you have performed tests, mark the working directory as good or
682 682 bad, and bisect will either update to another candidate changeset
683 683 or announce that it has found the bad revision.
684 684
685 685 As a shortcut, you can also use the revision argument to mark a
686 686 revision as good or bad without checking it out first.
687 687
688 688 If you supply a command, it will be used for automatic bisection.
689 689 The environment variable HG_NODE will contain the ID of the
690 690 changeset being tested. The exit status of the command will be
691 691 used to mark revisions as good or bad: status 0 means good, 125
692 692 means to skip the revision, 127 (command not found) will abort the
693 693 bisection, and any other non-zero exit status means the revision
694 694 is bad.
695 695
696 696 .. container:: verbose
697 697
698 698 Some examples:
699 699
700 700 - start a bisection with known bad revision 34, and good revision 12::
701 701
702 702 hg bisect --bad 34
703 703 hg bisect --good 12
704 704
705 705 - advance the current bisection by marking current revision as good or
706 706 bad::
707 707
708 708 hg bisect --good
709 709 hg bisect --bad
710 710
711 711 - mark the current revision, or a known revision, to be skipped (e.g. if
712 712 that revision is not usable because of another issue)::
713 713
714 714 hg bisect --skip
715 715 hg bisect --skip 23
716 716
717 717 - skip all revisions that do not touch directories ``foo`` or ``bar``::
718 718
719 719 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
720 720
721 721 - forget the current bisection::
722 722
723 723 hg bisect --reset
724 724
725 725 - use 'make && make tests' to automatically find the first broken
726 726 revision::
727 727
728 728 hg bisect --reset
729 729 hg bisect --bad 34
730 730 hg bisect --good 12
731 731 hg bisect --command "make && make tests"
732 732
733 733 - see all changesets whose states are already known in the current
734 734 bisection::
735 735
736 736 hg log -r "bisect(pruned)"
737 737
738 738 - see the changeset currently being bisected (especially useful
739 739 if running with -U/--noupdate)::
740 740
741 741 hg log -r "bisect(current)"
742 742
743 743 - see all changesets that took part in the current bisection::
744 744
745 745 hg log -r "bisect(range)"
746 746
747 747 - you can even get a nice graph::
748 748
749 749 hg log --graph -r "bisect(range)"
750 750
751 751 See :hg:`help revisions.bisect` for more about the `bisect()` predicate.
752 752
753 753 Returns 0 on success.
754 754 """
755 755 # backward compatibility
756 756 if rev in "good bad reset init".split():
757 757 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
758 758 cmd, rev, extra = rev, extra, None
759 759 if cmd == "good":
760 760 good = True
761 761 elif cmd == "bad":
762 762 bad = True
763 763 else:
764 764 reset = True
765 765 elif extra:
766 766 raise error.Abort(_('incompatible arguments'))
767 767
768 768 incompatibles = {
769 769 '--bad': bad,
770 770 '--command': bool(command),
771 771 '--extend': extend,
772 772 '--good': good,
773 773 '--reset': reset,
774 774 '--skip': skip,
775 775 }
776 776
777 777 enabled = [x for x in incompatibles if incompatibles[x]]
778 778
779 779 if len(enabled) > 1:
780 780 raise error.Abort(_('%s and %s are incompatible') %
781 781 tuple(sorted(enabled)[0:2]))
782 782
783 783 if reset:
784 784 hbisect.resetstate(repo)
785 785 return
786 786
787 787 state = hbisect.load_state(repo)
788 788
789 789 # update state
790 790 if good or bad or skip:
791 791 if rev:
792 792 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
793 793 else:
794 794 nodes = [repo.lookup('.')]
795 795 if good:
796 796 state['good'] += nodes
797 797 elif bad:
798 798 state['bad'] += nodes
799 799 elif skip:
800 800 state['skip'] += nodes
801 801 hbisect.save_state(repo, state)
802 802 if not (state['good'] and state['bad']):
803 803 return
804 804
805 805 def mayupdate(repo, node, show_stats=True):
806 806 """common used update sequence"""
807 807 if noupdate:
808 808 return
809 809 cmdutil.checkunfinished(repo)
810 810 cmdutil.bailifchanged(repo)
811 811 return hg.clean(repo, node, show_stats=show_stats)
812 812
813 813 displayer = cmdutil.show_changeset(ui, repo, {})
814 814
815 815 if command:
816 816 changesets = 1
817 817 if noupdate:
818 818 try:
819 819 node = state['current'][0]
820 820 except LookupError:
821 821 raise error.Abort(_('current bisect revision is unknown - '
822 822 'start a new bisect to fix'))
823 823 else:
824 824 node, p2 = repo.dirstate.parents()
825 825 if p2 != nullid:
826 826 raise error.Abort(_('current bisect revision is a merge'))
827 827 if rev:
828 828 node = repo[scmutil.revsingle(repo, rev, node)].node()
829 829 try:
830 830 while changesets:
831 831 # update state
832 832 state['current'] = [node]
833 833 hbisect.save_state(repo, state)
834 834 status = ui.system(command, environ={'HG_NODE': hex(node)},
835 835 blockedtag='bisect_check')
836 836 if status == 125:
837 837 transition = "skip"
838 838 elif status == 0:
839 839 transition = "good"
840 840 # status < 0 means process was killed
841 841 elif status == 127:
842 842 raise error.Abort(_("failed to execute %s") % command)
843 843 elif status < 0:
844 844 raise error.Abort(_("%s killed") % command)
845 845 else:
846 846 transition = "bad"
847 847 state[transition].append(node)
848 848 ctx = repo[node]
849 849 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
850 850 hbisect.checkstate(state)
851 851 # bisect
852 852 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
853 853 # update to next check
854 854 node = nodes[0]
855 855 mayupdate(repo, node, show_stats=False)
856 856 finally:
857 857 state['current'] = [node]
858 858 hbisect.save_state(repo, state)
859 859 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
860 860 return
861 861
862 862 hbisect.checkstate(state)
863 863
864 864 # actually bisect
865 865 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
866 866 if extend:
867 867 if not changesets:
868 868 extendnode = hbisect.extendrange(repo, state, nodes, good)
869 869 if extendnode is not None:
870 870 ui.write(_("Extending search to changeset %d:%s\n")
871 871 % (extendnode.rev(), extendnode))
872 872 state['current'] = [extendnode.node()]
873 873 hbisect.save_state(repo, state)
874 874 return mayupdate(repo, extendnode.node())
875 875 raise error.Abort(_("nothing to extend"))
876 876
877 877 if changesets == 0:
878 878 hbisect.printresult(ui, repo, state, displayer, nodes, good)
879 879 else:
880 880 assert len(nodes) == 1 # only a single node can be tested next
881 881 node = nodes[0]
882 882 # compute the approximate number of remaining tests
883 883 tests, size = 0, 2
884 884 while size <= changesets:
885 885 tests, size = tests + 1, size * 2
886 886 rev = repo.changelog.rev(node)
887 887 ui.write(_("Testing changeset %d:%s "
888 888 "(%d changesets remaining, ~%d tests)\n")
889 889 % (rev, short(node), changesets, tests))
890 890 state['current'] = [node]
891 891 hbisect.save_state(repo, state)
892 892 return mayupdate(repo, node)
893 893
894 894 @command('bookmarks|bookmark',
895 895 [('f', 'force', False, _('force')),
896 896 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
897 897 ('d', 'delete', False, _('delete a given bookmark')),
898 898 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
899 899 ('i', 'inactive', False, _('mark a bookmark inactive')),
900 900 ] + formatteropts,
901 901 _('hg bookmarks [OPTIONS]... [NAME]...'))
902 902 def bookmark(ui, repo, *names, **opts):
903 903 '''create a new bookmark or list existing bookmarks
904 904
905 905 Bookmarks are labels on changesets to help track lines of development.
906 906 Bookmarks are unversioned and can be moved, renamed and deleted.
907 907 Deleting or moving a bookmark has no effect on the associated changesets.
908 908
909 909 Creating or updating to a bookmark causes it to be marked as 'active'.
910 910 The active bookmark is indicated with a '*'.
911 911 When a commit is made, the active bookmark will advance to the new commit.
912 912 A plain :hg:`update` will also advance an active bookmark, if possible.
913 913 Updating away from a bookmark will cause it to be deactivated.
914 914
915 915 Bookmarks can be pushed and pulled between repositories (see
916 916 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
917 917 diverged, a new 'divergent bookmark' of the form 'name@path' will
918 918 be created. Using :hg:`merge` will resolve the divergence.
919 919
920 920 A bookmark named '@' has the special property that :hg:`clone` will
921 921 check it out by default if it exists.
922 922
923 923 .. container:: verbose
924 924
925 925 Examples:
926 926
927 927 - create an active bookmark for a new line of development::
928 928
929 929 hg book new-feature
930 930
931 931 - create an inactive bookmark as a place marker::
932 932
933 933 hg book -i reviewed
934 934
935 935 - create an inactive bookmark on another changeset::
936 936
937 937 hg book -r .^ tested
938 938
939 939 - rename bookmark turkey to dinner::
940 940
941 941 hg book -m turkey dinner
942 942
943 943 - move the '@' bookmark from another branch::
944 944
945 945 hg book -f @
946 946 '''
947 947 opts = pycompat.byteskwargs(opts)
948 948 force = opts.get('force')
949 949 rev = opts.get('rev')
950 950 delete = opts.get('delete')
951 951 rename = opts.get('rename')
952 952 inactive = opts.get('inactive')
953 953
954 954 if delete and rename:
955 955 raise error.Abort(_("--delete and --rename are incompatible"))
956 956 if delete and rev:
957 957 raise error.Abort(_("--rev is incompatible with --delete"))
958 958 if rename and rev:
959 959 raise error.Abort(_("--rev is incompatible with --rename"))
960 960 if not names and (delete or rev):
961 961 raise error.Abort(_("bookmark name required"))
962 962
963 963 if delete or rename or names or inactive:
964 964 with repo.wlock(), repo.lock(), repo.transaction('bookmark') as tr:
965 965 if delete:
966 966 bookmarks.delete(repo, tr, names)
967 967 elif rename:
968 968 if not names:
969 969 raise error.Abort(_("new bookmark name required"))
970 970 elif len(names) > 1:
971 971 raise error.Abort(_("only one new bookmark name allowed"))
972 972 bookmarks.rename(repo, tr, rename, names[0], force, inactive)
973 973 elif names:
974 974 bookmarks.addbookmarks(repo, tr, names, rev, force, inactive)
975 975 elif inactive:
976 976 if len(repo._bookmarks) == 0:
977 977 ui.status(_("no bookmarks set\n"))
978 978 elif not repo._activebookmark:
979 979 ui.status(_("no active bookmark\n"))
980 980 else:
981 981 bookmarks.deactivate(repo)
982 982 else: # show bookmarks
983 983 bookmarks.printbookmarks(ui, repo, **opts)
984 984
985 985 @command('branch',
986 986 [('f', 'force', None,
987 987 _('set branch name even if it shadows an existing branch')),
988 988 ('C', 'clean', None, _('reset branch name to parent branch name'))],
989 989 _('[-fC] [NAME]'))
990 990 def branch(ui, repo, label=None, **opts):
991 991 """set or show the current branch name
992 992
993 993 .. note::
994 994
995 995 Branch names are permanent and global. Use :hg:`bookmark` to create a
996 996 light-weight bookmark instead. See :hg:`help glossary` for more
997 997 information about named branches and bookmarks.
998 998
999 999 With no argument, show the current branch name. With one argument,
1000 1000 set the working directory branch name (the branch will not exist
1001 1001 in the repository until the next commit). Standard practice
1002 1002 recommends that primary development take place on the 'default'
1003 1003 branch.
1004 1004
1005 1005 Unless -f/--force is specified, branch will not let you set a
1006 1006 branch name that already exists.
1007 1007
1008 1008 Use -C/--clean to reset the working directory branch to that of
1009 1009 the parent of the working directory, negating a previous branch
1010 1010 change.
1011 1011
1012 1012 Use the command :hg:`update` to switch to an existing branch. Use
1013 1013 :hg:`commit --close-branch` to mark this branch head as closed.
1014 1014 When all heads of a branch are closed, the branch will be
1015 1015 considered closed.
1016 1016
1017 1017 Returns 0 on success.
1018 1018 """
1019 1019 opts = pycompat.byteskwargs(opts)
1020 1020 if label:
1021 1021 label = label.strip()
1022 1022
1023 1023 if not opts.get('clean') and not label:
1024 1024 ui.write("%s\n" % repo.dirstate.branch())
1025 1025 return
1026 1026
1027 1027 with repo.wlock():
1028 1028 if opts.get('clean'):
1029 1029 label = repo[None].p1().branch()
1030 1030 repo.dirstate.setbranch(label)
1031 1031 ui.status(_('reset working directory to branch %s\n') % label)
1032 1032 elif label:
1033 1033 if not opts.get('force') and label in repo.branchmap():
1034 1034 if label not in [p.branch() for p in repo[None].parents()]:
1035 1035 raise error.Abort(_('a branch of the same name already'
1036 1036 ' exists'),
1037 1037 # i18n: "it" refers to an existing branch
1038 1038 hint=_("use 'hg update' to switch to it"))
1039 1039 scmutil.checknewlabel(repo, label, 'branch')
1040 1040 repo.dirstate.setbranch(label)
1041 1041 ui.status(_('marked working directory as branch %s\n') % label)
1042 1042
1043 1043 # find any open named branches aside from default
1044 1044 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1045 1045 if n != "default" and not c]
1046 1046 if not others:
1047 1047 ui.status(_('(branches are permanent and global, '
1048 1048 'did you want a bookmark?)\n'))
1049 1049
1050 1050 @command('branches',
1051 1051 [('a', 'active', False,
1052 1052 _('show only branches that have unmerged heads (DEPRECATED)')),
1053 1053 ('c', 'closed', False, _('show normal and closed branches')),
1054 1054 ] + formatteropts,
1055 1055 _('[-c]'))
1056 1056 def branches(ui, repo, active=False, closed=False, **opts):
1057 1057 """list repository named branches
1058 1058
1059 1059 List the repository's named branches, indicating which ones are
1060 1060 inactive. If -c/--closed is specified, also list branches which have
1061 1061 been marked closed (see :hg:`commit --close-branch`).
1062 1062
1063 1063 Use the command :hg:`update` to switch to an existing branch.
1064 1064
1065 1065 Returns 0.
1066 1066 """
1067 1067
1068 1068 opts = pycompat.byteskwargs(opts)
1069 1069 ui.pager('branches')
1070 1070 fm = ui.formatter('branches', opts)
1071 1071 hexfunc = fm.hexfunc
1072 1072
1073 1073 allheads = set(repo.heads())
1074 1074 branches = []
1075 1075 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1076 1076 isactive = not isclosed and bool(set(heads) & allheads)
1077 1077 branches.append((tag, repo[tip], isactive, not isclosed))
1078 1078 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1079 1079 reverse=True)
1080 1080
1081 1081 for tag, ctx, isactive, isopen in branches:
1082 1082 if active and not isactive:
1083 1083 continue
1084 1084 if isactive:
1085 1085 label = 'branches.active'
1086 1086 notice = ''
1087 1087 elif not isopen:
1088 1088 if not closed:
1089 1089 continue
1090 1090 label = 'branches.closed'
1091 1091 notice = _(' (closed)')
1092 1092 else:
1093 1093 label = 'branches.inactive'
1094 1094 notice = _(' (inactive)')
1095 1095 current = (tag == repo.dirstate.branch())
1096 1096 if current:
1097 1097 label = 'branches.current'
1098 1098
1099 1099 fm.startitem()
1100 1100 fm.write('branch', '%s', tag, label=label)
1101 1101 rev = ctx.rev()
1102 1102 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1103 1103 fmt = ' ' * padsize + ' %d:%s'
1104 1104 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1105 1105 label='log.changeset changeset.%s' % ctx.phasestr())
1106 1106 fm.context(ctx=ctx)
1107 1107 fm.data(active=isactive, closed=not isopen, current=current)
1108 1108 if not ui.quiet:
1109 1109 fm.plain(notice)
1110 1110 fm.plain('\n')
1111 1111 fm.end()
1112 1112
1113 1113 @command('bundle',
1114 1114 [('f', 'force', None, _('run even when the destination is unrelated')),
1115 1115 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1116 1116 _('REV')),
1117 1117 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1118 1118 _('BRANCH')),
1119 1119 ('', 'base', [],
1120 1120 _('a base changeset assumed to be available at the destination'),
1121 1121 _('REV')),
1122 1122 ('a', 'all', None, _('bundle all changesets in the repository')),
1123 1123 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1124 1124 ] + remoteopts,
1125 1125 _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1126 1126 def bundle(ui, repo, fname, dest=None, **opts):
1127 1127 """create a bundle file
1128 1128
1129 1129 Generate a bundle file containing data to be added to a repository.
1130 1130
1131 1131 To create a bundle containing all changesets, use -a/--all
1132 1132 (or --base null). Otherwise, hg assumes the destination will have
1133 1133 all the nodes you specify with --base parameters. Otherwise, hg
1134 1134 will assume the repository has all the nodes in destination, or
1135 1135 default-push/default if no destination is specified.
1136 1136
1137 1137 You can change bundle format with the -t/--type option. See
1138 1138 :hg:`help bundlespec` for documentation on this format. By default,
1139 1139 the most appropriate format is used and compression defaults to
1140 1140 bzip2.
1141 1141
1142 1142 The bundle file can then be transferred using conventional means
1143 1143 and applied to another repository with the unbundle or pull
1144 1144 command. This is useful when direct push and pull are not
1145 1145 available or when exporting an entire repository is undesirable.
1146 1146
1147 1147 Applying bundles preserves all changeset contents including
1148 1148 permissions, copy/rename information, and revision history.
1149 1149
1150 1150 Returns 0 on success, 1 if no changes found.
1151 1151 """
1152 1152 opts = pycompat.byteskwargs(opts)
1153 1153 revs = None
1154 1154 if 'rev' in opts:
1155 1155 revstrings = opts['rev']
1156 1156 revs = scmutil.revrange(repo, revstrings)
1157 1157 if revstrings and not revs:
1158 1158 raise error.Abort(_('no commits to bundle'))
1159 1159
1160 1160 bundletype = opts.get('type', 'bzip2').lower()
1161 1161 try:
1162 1162 bcompression, cgversion, params = exchange.parsebundlespec(
1163 1163 repo, bundletype, strict=False)
1164 1164 except error.UnsupportedBundleSpecification as e:
1165 1165 raise error.Abort(str(e),
1166 1166 hint=_("see 'hg help bundlespec' for supported "
1167 1167 "values for --type"))
1168 1168
1169 1169 # Packed bundles are a pseudo bundle format for now.
1170 1170 if cgversion == 's1':
1171 1171 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1172 1172 hint=_("use 'hg debugcreatestreamclonebundle'"))
1173 1173
1174 1174 if opts.get('all'):
1175 1175 if dest:
1176 1176 raise error.Abort(_("--all is incompatible with specifying "
1177 1177 "a destination"))
1178 1178 if opts.get('base'):
1179 1179 ui.warn(_("ignoring --base because --all was specified\n"))
1180 1180 base = ['null']
1181 1181 else:
1182 1182 base = scmutil.revrange(repo, opts.get('base'))
1183 1183 if cgversion not in changegroup.supportedoutgoingversions(repo):
1184 1184 raise error.Abort(_("repository does not support bundle version %s") %
1185 1185 cgversion)
1186 1186
1187 1187 if base:
1188 1188 if dest:
1189 1189 raise error.Abort(_("--base is incompatible with specifying "
1190 1190 "a destination"))
1191 1191 common = [repo.lookup(rev) for rev in base]
1192 1192 heads = revs and map(repo.lookup, revs) or None
1193 1193 outgoing = discovery.outgoing(repo, common, heads)
1194 1194 else:
1195 1195 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1196 1196 dest, branches = hg.parseurl(dest, opts.get('branch'))
1197 1197 other = hg.peer(repo, opts, dest)
1198 1198 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1199 1199 heads = revs and map(repo.lookup, revs) or revs
1200 1200 outgoing = discovery.findcommonoutgoing(repo, other,
1201 1201 onlyheads=heads,
1202 1202 force=opts.get('force'),
1203 1203 portable=True)
1204 1204
1205 1205 if not outgoing.missing:
1206 1206 scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
1207 1207 return 1
1208 1208
1209 1209 if cgversion == '01': #bundle1
1210 1210 if bcompression is None:
1211 1211 bcompression = 'UN'
1212 1212 bversion = 'HG10' + bcompression
1213 1213 bcompression = None
1214 1214 elif cgversion in ('02', '03'):
1215 1215 bversion = 'HG20'
1216 1216 else:
1217 1217 raise error.ProgrammingError(
1218 1218 'bundle: unexpected changegroup version %s' % cgversion)
1219 1219
1220 1220 # TODO compression options should be derived from bundlespec parsing.
1221 1221 # This is a temporary hack to allow adjusting bundle compression
1222 1222 # level without a) formalizing the bundlespec changes to declare it
1223 1223 # b) introducing a command flag.
1224 1224 compopts = {}
1225 1225 complevel = ui.configint('experimental', 'bundlecomplevel')
1226 1226 if complevel is not None:
1227 1227 compopts['level'] = complevel
1228 1228
1229 1229
1230 1230 contentopts = {'cg.version': cgversion}
1231 1231 if repo.ui.configbool('experimental', 'evolution.bundle-obsmarker', False):
1232 1232 contentopts['obsolescence'] = True
1233 if repo.ui.configbool('experimental', 'bundle-phases', False):
1234 contentopts['phases'] = True
1233 1235 bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
1234 1236 contentopts, compression=bcompression,
1235 1237 compopts=compopts)
1236 1238
1237 1239 @command('cat',
1238 1240 [('o', 'output', '',
1239 1241 _('print output to file with formatted name'), _('FORMAT')),
1240 1242 ('r', 'rev', '', _('print the given revision'), _('REV')),
1241 1243 ('', 'decode', None, _('apply any matching decode filter')),
1242 1244 ] + walkopts + formatteropts,
1243 1245 _('[OPTION]... FILE...'),
1244 1246 inferrepo=True)
1245 1247 def cat(ui, repo, file1, *pats, **opts):
1246 1248 """output the current or given revision of files
1247 1249
1248 1250 Print the specified files as they were at the given revision. If
1249 1251 no revision is given, the parent of the working directory is used.
1250 1252
1251 1253 Output may be to a file, in which case the name of the file is
1252 1254 given using a format string. The formatting rules as follows:
1253 1255
1254 1256 :``%%``: literal "%" character
1255 1257 :``%s``: basename of file being printed
1256 1258 :``%d``: dirname of file being printed, or '.' if in repository root
1257 1259 :``%p``: root-relative path name of file being printed
1258 1260 :``%H``: changeset hash (40 hexadecimal digits)
1259 1261 :``%R``: changeset revision number
1260 1262 :``%h``: short-form changeset hash (12 hexadecimal digits)
1261 1263 :``%r``: zero-padded changeset revision number
1262 1264 :``%b``: basename of the exporting repository
1263 1265
1264 1266 Returns 0 on success.
1265 1267 """
1266 1268 ctx = scmutil.revsingle(repo, opts.get('rev'))
1267 1269 m = scmutil.match(ctx, (file1,) + pats, opts)
1268 1270 fntemplate = opts.pop('output', '')
1269 1271 if cmdutil.isstdiofilename(fntemplate):
1270 1272 fntemplate = ''
1271 1273
1272 1274 if fntemplate:
1273 1275 fm = formatter.nullformatter(ui, 'cat')
1274 1276 else:
1275 1277 ui.pager('cat')
1276 1278 fm = ui.formatter('cat', opts)
1277 1279 with fm:
1278 1280 return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '', **opts)
1279 1281
1280 1282 @command('^clone',
1281 1283 [('U', 'noupdate', None, _('the clone will include an empty working '
1282 1284 'directory (only a repository)')),
1283 1285 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1284 1286 _('REV')),
1285 1287 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1286 1288 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1287 1289 ('', 'pull', None, _('use pull protocol to copy metadata')),
1288 1290 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1289 1291 ] + remoteopts,
1290 1292 _('[OPTION]... SOURCE [DEST]'),
1291 1293 norepo=True)
1292 1294 def clone(ui, source, dest=None, **opts):
1293 1295 """make a copy of an existing repository
1294 1296
1295 1297 Create a copy of an existing repository in a new directory.
1296 1298
1297 1299 If no destination directory name is specified, it defaults to the
1298 1300 basename of the source.
1299 1301
1300 1302 The location of the source is added to the new repository's
1301 1303 ``.hg/hgrc`` file, as the default to be used for future pulls.
1302 1304
1303 1305 Only local paths and ``ssh://`` URLs are supported as
1304 1306 destinations. For ``ssh://`` destinations, no working directory or
1305 1307 ``.hg/hgrc`` will be created on the remote side.
1306 1308
1307 1309 If the source repository has a bookmark called '@' set, that
1308 1310 revision will be checked out in the new repository by default.
1309 1311
1310 1312 To check out a particular version, use -u/--update, or
1311 1313 -U/--noupdate to create a clone with no working directory.
1312 1314
1313 1315 To pull only a subset of changesets, specify one or more revisions
1314 1316 identifiers with -r/--rev or branches with -b/--branch. The
1315 1317 resulting clone will contain only the specified changesets and
1316 1318 their ancestors. These options (or 'clone src#rev dest') imply
1317 1319 --pull, even for local source repositories.
1318 1320
1319 1321 .. note::
1320 1322
1321 1323 Specifying a tag will include the tagged changeset but not the
1322 1324 changeset containing the tag.
1323 1325
1324 1326 .. container:: verbose
1325 1327
1326 1328 For efficiency, hardlinks are used for cloning whenever the
1327 1329 source and destination are on the same filesystem (note this
1328 1330 applies only to the repository data, not to the working
1329 1331 directory). Some filesystems, such as AFS, implement hardlinking
1330 1332 incorrectly, but do not report errors. In these cases, use the
1331 1333 --pull option to avoid hardlinking.
1332 1334
1333 1335 In some cases, you can clone repositories and the working
1334 1336 directory using full hardlinks with ::
1335 1337
1336 1338 $ cp -al REPO REPOCLONE
1337 1339
1338 1340 This is the fastest way to clone, but it is not always safe. The
1339 1341 operation is not atomic (making sure REPO is not modified during
1340 1342 the operation is up to you) and you have to make sure your
1341 1343 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1342 1344 so). Also, this is not compatible with certain extensions that
1343 1345 place their metadata under the .hg directory, such as mq.
1344 1346
1345 1347 Mercurial will update the working directory to the first applicable
1346 1348 revision from this list:
1347 1349
1348 1350 a) null if -U or the source repository has no changesets
1349 1351 b) if -u . and the source repository is local, the first parent of
1350 1352 the source repository's working directory
1351 1353 c) the changeset specified with -u (if a branch name, this means the
1352 1354 latest head of that branch)
1353 1355 d) the changeset specified with -r
1354 1356 e) the tipmost head specified with -b
1355 1357 f) the tipmost head specified with the url#branch source syntax
1356 1358 g) the revision marked with the '@' bookmark, if present
1357 1359 h) the tipmost head of the default branch
1358 1360 i) tip
1359 1361
1360 1362 When cloning from servers that support it, Mercurial may fetch
1361 1363 pre-generated data from a server-advertised URL. When this is done,
1362 1364 hooks operating on incoming changesets and changegroups may fire twice,
1363 1365 once for the bundle fetched from the URL and another for any additional
1364 1366 data not fetched from this URL. In addition, if an error occurs, the
1365 1367 repository may be rolled back to a partial clone. This behavior may
1366 1368 change in future releases. See :hg:`help -e clonebundles` for more.
1367 1369
1368 1370 Examples:
1369 1371
1370 1372 - clone a remote repository to a new directory named hg/::
1371 1373
1372 1374 hg clone https://www.mercurial-scm.org/repo/hg/
1373 1375
1374 1376 - create a lightweight local clone::
1375 1377
1376 1378 hg clone project/ project-feature/
1377 1379
1378 1380 - clone from an absolute path on an ssh server (note double-slash)::
1379 1381
1380 1382 hg clone ssh://user@server//home/projects/alpha/
1381 1383
1382 1384 - do a high-speed clone over a LAN while checking out a
1383 1385 specified version::
1384 1386
1385 1387 hg clone --uncompressed http://server/repo -u 1.5
1386 1388
1387 1389 - create a repository without changesets after a particular revision::
1388 1390
1389 1391 hg clone -r 04e544 experimental/ good/
1390 1392
1391 1393 - clone (and track) a particular named branch::
1392 1394
1393 1395 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1394 1396
1395 1397 See :hg:`help urls` for details on specifying URLs.
1396 1398
1397 1399 Returns 0 on success.
1398 1400 """
1399 1401 opts = pycompat.byteskwargs(opts)
1400 1402 if opts.get('noupdate') and opts.get('updaterev'):
1401 1403 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1402 1404
1403 1405 r = hg.clone(ui, opts, source, dest,
1404 1406 pull=opts.get('pull'),
1405 1407 stream=opts.get('uncompressed'),
1406 1408 rev=opts.get('rev'),
1407 1409 update=opts.get('updaterev') or not opts.get('noupdate'),
1408 1410 branch=opts.get('branch'),
1409 1411 shareopts=opts.get('shareopts'))
1410 1412
1411 1413 return r is None
1412 1414
1413 1415 @command('^commit|ci',
1414 1416 [('A', 'addremove', None,
1415 1417 _('mark new/missing files as added/removed before committing')),
1416 1418 ('', 'close-branch', None,
1417 1419 _('mark a branch head as closed')),
1418 1420 ('', 'amend', None, _('amend the parent of the working directory')),
1419 1421 ('s', 'secret', None, _('use the secret phase for committing')),
1420 1422 ('e', 'edit', None, _('invoke editor on commit messages')),
1421 1423 ('i', 'interactive', None, _('use interactive mode')),
1422 1424 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1423 1425 _('[OPTION]... [FILE]...'),
1424 1426 inferrepo=True)
1425 1427 def commit(ui, repo, *pats, **opts):
1426 1428 """commit the specified files or all outstanding changes
1427 1429
1428 1430 Commit changes to the given files into the repository. Unlike a
1429 1431 centralized SCM, this operation is a local operation. See
1430 1432 :hg:`push` for a way to actively distribute your changes.
1431 1433
1432 1434 If a list of files is omitted, all changes reported by :hg:`status`
1433 1435 will be committed.
1434 1436
1435 1437 If you are committing the result of a merge, do not provide any
1436 1438 filenames or -I/-X filters.
1437 1439
1438 1440 If no commit message is specified, Mercurial starts your
1439 1441 configured editor where you can enter a message. In case your
1440 1442 commit fails, you will find a backup of your message in
1441 1443 ``.hg/last-message.txt``.
1442 1444
1443 1445 The --close-branch flag can be used to mark the current branch
1444 1446 head closed. When all heads of a branch are closed, the branch
1445 1447 will be considered closed and no longer listed.
1446 1448
1447 1449 The --amend flag can be used to amend the parent of the
1448 1450 working directory with a new commit that contains the changes
1449 1451 in the parent in addition to those currently reported by :hg:`status`,
1450 1452 if there are any. The old commit is stored in a backup bundle in
1451 1453 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1452 1454 on how to restore it).
1453 1455
1454 1456 Message, user and date are taken from the amended commit unless
1455 1457 specified. When a message isn't specified on the command line,
1456 1458 the editor will open with the message of the amended commit.
1457 1459
1458 1460 It is not possible to amend public changesets (see :hg:`help phases`)
1459 1461 or changesets that have children.
1460 1462
1461 1463 See :hg:`help dates` for a list of formats valid for -d/--date.
1462 1464
1463 1465 Returns 0 on success, 1 if nothing changed.
1464 1466
1465 1467 .. container:: verbose
1466 1468
1467 1469 Examples:
1468 1470
1469 1471 - commit all files ending in .py::
1470 1472
1471 1473 hg commit --include "set:**.py"
1472 1474
1473 1475 - commit all non-binary files::
1474 1476
1475 1477 hg commit --exclude "set:binary()"
1476 1478
1477 1479 - amend the current commit and set the date to now::
1478 1480
1479 1481 hg commit --amend --date now
1480 1482 """
1481 1483 wlock = lock = None
1482 1484 try:
1483 1485 wlock = repo.wlock()
1484 1486 lock = repo.lock()
1485 1487 return _docommit(ui, repo, *pats, **opts)
1486 1488 finally:
1487 1489 release(lock, wlock)
1488 1490
1489 1491 def _docommit(ui, repo, *pats, **opts):
1490 1492 if opts.get(r'interactive'):
1491 1493 opts.pop(r'interactive')
1492 1494 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1493 1495 cmdutil.recordfilter, *pats,
1494 1496 **opts)
1495 1497 # ret can be 0 (no changes to record) or the value returned by
1496 1498 # commit(), 1 if nothing changed or None on success.
1497 1499 return 1 if ret == 0 else ret
1498 1500
1499 1501 opts = pycompat.byteskwargs(opts)
1500 1502 if opts.get('subrepos'):
1501 1503 if opts.get('amend'):
1502 1504 raise error.Abort(_('cannot amend with --subrepos'))
1503 1505 # Let --subrepos on the command line override config setting.
1504 1506 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1505 1507
1506 1508 cmdutil.checkunfinished(repo, commit=True)
1507 1509
1508 1510 branch = repo[None].branch()
1509 1511 bheads = repo.branchheads(branch)
1510 1512
1511 1513 extra = {}
1512 1514 if opts.get('close_branch'):
1513 1515 extra['close'] = 1
1514 1516
1515 1517 if not bheads:
1516 1518 raise error.Abort(_('can only close branch heads'))
1517 1519 elif opts.get('amend'):
1518 1520 if repo[None].parents()[0].p1().branch() != branch and \
1519 1521 repo[None].parents()[0].p2().branch() != branch:
1520 1522 raise error.Abort(_('can only close branch heads'))
1521 1523
1522 1524 if opts.get('amend'):
1523 1525 if ui.configbool('ui', 'commitsubrepos'):
1524 1526 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1525 1527
1526 1528 old = repo['.']
1527 1529 if not old.mutable():
1528 1530 raise error.Abort(_('cannot amend public changesets'))
1529 1531 if len(repo[None].parents()) > 1:
1530 1532 raise error.Abort(_('cannot amend while merging'))
1531 1533 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1532 1534 if not allowunstable and old.children():
1533 1535 raise error.Abort(_('cannot amend changeset with children'))
1534 1536
1535 1537 # Currently histedit gets confused if an amend happens while histedit
1536 1538 # is in progress. Since we have a checkunfinished command, we are
1537 1539 # temporarily honoring it.
1538 1540 #
1539 1541 # Note: eventually this guard will be removed. Please do not expect
1540 1542 # this behavior to remain.
1541 1543 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1542 1544 cmdutil.checkunfinished(repo)
1543 1545
1544 1546 # commitfunc is used only for temporary amend commit by cmdutil.amend
1545 1547 def commitfunc(ui, repo, message, match, opts):
1546 1548 return repo.commit(message,
1547 1549 opts.get('user') or old.user(),
1548 1550 opts.get('date') or old.date(),
1549 1551 match,
1550 1552 extra=extra)
1551 1553
1552 1554 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1553 1555 if node == old.node():
1554 1556 ui.status(_("nothing changed\n"))
1555 1557 return 1
1556 1558 else:
1557 1559 def commitfunc(ui, repo, message, match, opts):
1558 1560 overrides = {}
1559 1561 if opts.get('secret'):
1560 1562 overrides[('phases', 'new-commit')] = 'secret'
1561 1563
1562 1564 baseui = repo.baseui
1563 1565 with baseui.configoverride(overrides, 'commit'):
1564 1566 with ui.configoverride(overrides, 'commit'):
1565 1567 editform = cmdutil.mergeeditform(repo[None],
1566 1568 'commit.normal')
1567 1569 editor = cmdutil.getcommiteditor(
1568 1570 editform=editform, **pycompat.strkwargs(opts))
1569 1571 return repo.commit(message,
1570 1572 opts.get('user'),
1571 1573 opts.get('date'),
1572 1574 match,
1573 1575 editor=editor,
1574 1576 extra=extra)
1575 1577
1576 1578 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1577 1579
1578 1580 if not node:
1579 1581 stat = cmdutil.postcommitstatus(repo, pats, opts)
1580 1582 if stat[3]:
1581 1583 ui.status(_("nothing changed (%d missing files, see "
1582 1584 "'hg status')\n") % len(stat[3]))
1583 1585 else:
1584 1586 ui.status(_("nothing changed\n"))
1585 1587 return 1
1586 1588
1587 1589 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1588 1590
1589 1591 @command('config|showconfig|debugconfig',
1590 1592 [('u', 'untrusted', None, _('show untrusted configuration options')),
1591 1593 ('e', 'edit', None, _('edit user config')),
1592 1594 ('l', 'local', None, _('edit repository config')),
1593 1595 ('g', 'global', None, _('edit global config'))] + formatteropts,
1594 1596 _('[-u] [NAME]...'),
1595 1597 optionalrepo=True)
1596 1598 def config(ui, repo, *values, **opts):
1597 1599 """show combined config settings from all hgrc files
1598 1600
1599 1601 With no arguments, print names and values of all config items.
1600 1602
1601 1603 With one argument of the form section.name, print just the value
1602 1604 of that config item.
1603 1605
1604 1606 With multiple arguments, print names and values of all config
1605 1607 items with matching section names.
1606 1608
1607 1609 With --edit, start an editor on the user-level config file. With
1608 1610 --global, edit the system-wide config file. With --local, edit the
1609 1611 repository-level config file.
1610 1612
1611 1613 With --debug, the source (filename and line number) is printed
1612 1614 for each config item.
1613 1615
1614 1616 See :hg:`help config` for more information about config files.
1615 1617
1616 1618 Returns 0 on success, 1 if NAME does not exist.
1617 1619
1618 1620 """
1619 1621
1620 1622 opts = pycompat.byteskwargs(opts)
1621 1623 if opts.get('edit') or opts.get('local') or opts.get('global'):
1622 1624 if opts.get('local') and opts.get('global'):
1623 1625 raise error.Abort(_("can't use --local and --global together"))
1624 1626
1625 1627 if opts.get('local'):
1626 1628 if not repo:
1627 1629 raise error.Abort(_("can't use --local outside a repository"))
1628 1630 paths = [repo.vfs.join('hgrc')]
1629 1631 elif opts.get('global'):
1630 1632 paths = rcutil.systemrcpath()
1631 1633 else:
1632 1634 paths = rcutil.userrcpath()
1633 1635
1634 1636 for f in paths:
1635 1637 if os.path.exists(f):
1636 1638 break
1637 1639 else:
1638 1640 if opts.get('global'):
1639 1641 samplehgrc = uimod.samplehgrcs['global']
1640 1642 elif opts.get('local'):
1641 1643 samplehgrc = uimod.samplehgrcs['local']
1642 1644 else:
1643 1645 samplehgrc = uimod.samplehgrcs['user']
1644 1646
1645 1647 f = paths[0]
1646 1648 fp = open(f, "w")
1647 1649 fp.write(samplehgrc)
1648 1650 fp.close()
1649 1651
1650 1652 editor = ui.geteditor()
1651 1653 ui.system("%s \"%s\"" % (editor, f),
1652 1654 onerr=error.Abort, errprefix=_("edit failed"),
1653 1655 blockedtag='config_edit')
1654 1656 return
1655 1657 ui.pager('config')
1656 1658 fm = ui.formatter('config', opts)
1657 1659 for t, f in rcutil.rccomponents():
1658 1660 if t == 'path':
1659 1661 ui.debug('read config from: %s\n' % f)
1660 1662 elif t == 'items':
1661 1663 for section, name, value, source in f:
1662 1664 ui.debug('set config by: %s\n' % source)
1663 1665 else:
1664 1666 raise error.ProgrammingError('unknown rctype: %s' % t)
1665 1667 untrusted = bool(opts.get('untrusted'))
1666 1668 if values:
1667 1669 sections = [v for v in values if '.' not in v]
1668 1670 items = [v for v in values if '.' in v]
1669 1671 if len(items) > 1 or items and sections:
1670 1672 raise error.Abort(_('only one config item permitted'))
1671 1673 matched = False
1672 1674 for section, name, value in ui.walkconfig(untrusted=untrusted):
1673 1675 source = ui.configsource(section, name, untrusted)
1674 1676 value = pycompat.bytestr(value)
1675 1677 if fm.isplain():
1676 1678 source = source or 'none'
1677 1679 value = value.replace('\n', '\\n')
1678 1680 entryname = section + '.' + name
1679 1681 if values:
1680 1682 for v in values:
1681 1683 if v == section:
1682 1684 fm.startitem()
1683 1685 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1684 1686 fm.write('name value', '%s=%s\n', entryname, value)
1685 1687 matched = True
1686 1688 elif v == entryname:
1687 1689 fm.startitem()
1688 1690 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1689 1691 fm.write('value', '%s\n', value)
1690 1692 fm.data(name=entryname)
1691 1693 matched = True
1692 1694 else:
1693 1695 fm.startitem()
1694 1696 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1695 1697 fm.write('name value', '%s=%s\n', entryname, value)
1696 1698 matched = True
1697 1699 fm.end()
1698 1700 if matched:
1699 1701 return 0
1700 1702 return 1
1701 1703
1702 1704 @command('copy|cp',
1703 1705 [('A', 'after', None, _('record a copy that has already occurred')),
1704 1706 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1705 1707 ] + walkopts + dryrunopts,
1706 1708 _('[OPTION]... [SOURCE]... DEST'))
1707 1709 def copy(ui, repo, *pats, **opts):
1708 1710 """mark files as copied for the next commit
1709 1711
1710 1712 Mark dest as having copies of source files. If dest is a
1711 1713 directory, copies are put in that directory. If dest is a file,
1712 1714 the source must be a single file.
1713 1715
1714 1716 By default, this command copies the contents of files as they
1715 1717 exist in the working directory. If invoked with -A/--after, the
1716 1718 operation is recorded, but no copying is performed.
1717 1719
1718 1720 This command takes effect with the next commit. To undo a copy
1719 1721 before that, see :hg:`revert`.
1720 1722
1721 1723 Returns 0 on success, 1 if errors are encountered.
1722 1724 """
1723 1725 opts = pycompat.byteskwargs(opts)
1724 1726 with repo.wlock(False):
1725 1727 return cmdutil.copy(ui, repo, pats, opts)
1726 1728
1727 1729 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1728 1730 def debugcommands(ui, cmd='', *args):
1729 1731 """list all available commands and options"""
1730 1732 for cmd, vals in sorted(table.iteritems()):
1731 1733 cmd = cmd.split('|')[0].strip('^')
1732 1734 opts = ', '.join([i[1] for i in vals[1]])
1733 1735 ui.write('%s: %s\n' % (cmd, opts))
1734 1736
1735 1737 @command('debugcomplete',
1736 1738 [('o', 'options', None, _('show the command options'))],
1737 1739 _('[-o] CMD'),
1738 1740 norepo=True)
1739 1741 def debugcomplete(ui, cmd='', **opts):
1740 1742 """returns the completion list associated with the given command"""
1741 1743
1742 1744 if opts.get('options'):
1743 1745 options = []
1744 1746 otables = [globalopts]
1745 1747 if cmd:
1746 1748 aliases, entry = cmdutil.findcmd(cmd, table, False)
1747 1749 otables.append(entry[1])
1748 1750 for t in otables:
1749 1751 for o in t:
1750 1752 if "(DEPRECATED)" in o[3]:
1751 1753 continue
1752 1754 if o[0]:
1753 1755 options.append('-%s' % o[0])
1754 1756 options.append('--%s' % o[1])
1755 1757 ui.write("%s\n" % "\n".join(options))
1756 1758 return
1757 1759
1758 1760 cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table)
1759 1761 if ui.verbose:
1760 1762 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1761 1763 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1762 1764
1763 1765 @command('^diff',
1764 1766 [('r', 'rev', [], _('revision'), _('REV')),
1765 1767 ('c', 'change', '', _('change made by revision'), _('REV'))
1766 1768 ] + diffopts + diffopts2 + walkopts + subrepoopts,
1767 1769 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
1768 1770 inferrepo=True)
1769 1771 def diff(ui, repo, *pats, **opts):
1770 1772 """diff repository (or selected files)
1771 1773
1772 1774 Show differences between revisions for the specified files.
1773 1775
1774 1776 Differences between files are shown using the unified diff format.
1775 1777
1776 1778 .. note::
1777 1779
1778 1780 :hg:`diff` may generate unexpected results for merges, as it will
1779 1781 default to comparing against the working directory's first
1780 1782 parent changeset if no revisions are specified.
1781 1783
1782 1784 When two revision arguments are given, then changes are shown
1783 1785 between those revisions. If only one revision is specified then
1784 1786 that revision is compared to the working directory, and, when no
1785 1787 revisions are specified, the working directory files are compared
1786 1788 to its first parent.
1787 1789
1788 1790 Alternatively you can specify -c/--change with a revision to see
1789 1791 the changes in that changeset relative to its first parent.
1790 1792
1791 1793 Without the -a/--text option, diff will avoid generating diffs of
1792 1794 files it detects as binary. With -a, diff will generate a diff
1793 1795 anyway, probably with undesirable results.
1794 1796
1795 1797 Use the -g/--git option to generate diffs in the git extended diff
1796 1798 format. For more information, read :hg:`help diffs`.
1797 1799
1798 1800 .. container:: verbose
1799 1801
1800 1802 Examples:
1801 1803
1802 1804 - compare a file in the current working directory to its parent::
1803 1805
1804 1806 hg diff foo.c
1805 1807
1806 1808 - compare two historical versions of a directory, with rename info::
1807 1809
1808 1810 hg diff --git -r 1.0:1.2 lib/
1809 1811
1810 1812 - get change stats relative to the last change on some date::
1811 1813
1812 1814 hg diff --stat -r "date('may 2')"
1813 1815
1814 1816 - diff all newly-added files that contain a keyword::
1815 1817
1816 1818 hg diff "set:added() and grep(GNU)"
1817 1819
1818 1820 - compare a revision and its parents::
1819 1821
1820 1822 hg diff -c 9353 # compare against first parent
1821 1823 hg diff -r 9353^:9353 # same using revset syntax
1822 1824 hg diff -r 9353^2:9353 # compare against the second parent
1823 1825
1824 1826 Returns 0 on success.
1825 1827 """
1826 1828
1827 1829 opts = pycompat.byteskwargs(opts)
1828 1830 revs = opts.get('rev')
1829 1831 change = opts.get('change')
1830 1832 stat = opts.get('stat')
1831 1833 reverse = opts.get('reverse')
1832 1834
1833 1835 if revs and change:
1834 1836 msg = _('cannot specify --rev and --change at the same time')
1835 1837 raise error.Abort(msg)
1836 1838 elif change:
1837 1839 node2 = scmutil.revsingle(repo, change, None).node()
1838 1840 node1 = repo[node2].p1().node()
1839 1841 else:
1840 1842 node1, node2 = scmutil.revpair(repo, revs)
1841 1843
1842 1844 if reverse:
1843 1845 node1, node2 = node2, node1
1844 1846
1845 1847 diffopts = patch.diffallopts(ui, opts)
1846 1848 m = scmutil.match(repo[node2], pats, opts)
1847 1849 ui.pager('diff')
1848 1850 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1849 1851 listsubrepos=opts.get('subrepos'),
1850 1852 root=opts.get('root'))
1851 1853
1852 1854 @command('^export',
1853 1855 [('o', 'output', '',
1854 1856 _('print output to file with formatted name'), _('FORMAT')),
1855 1857 ('', 'switch-parent', None, _('diff against the second parent')),
1856 1858 ('r', 'rev', [], _('revisions to export'), _('REV')),
1857 1859 ] + diffopts,
1858 1860 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
1859 1861 def export(ui, repo, *changesets, **opts):
1860 1862 """dump the header and diffs for one or more changesets
1861 1863
1862 1864 Print the changeset header and diffs for one or more revisions.
1863 1865 If no revision is given, the parent of the working directory is used.
1864 1866
1865 1867 The information shown in the changeset header is: author, date,
1866 1868 branch name (if non-default), changeset hash, parent(s) and commit
1867 1869 comment.
1868 1870
1869 1871 .. note::
1870 1872
1871 1873 :hg:`export` may generate unexpected diff output for merge
1872 1874 changesets, as it will compare the merge changeset against its
1873 1875 first parent only.
1874 1876
1875 1877 Output may be to a file, in which case the name of the file is
1876 1878 given using a format string. The formatting rules are as follows:
1877 1879
1878 1880 :``%%``: literal "%" character
1879 1881 :``%H``: changeset hash (40 hexadecimal digits)
1880 1882 :``%N``: number of patches being generated
1881 1883 :``%R``: changeset revision number
1882 1884 :``%b``: basename of the exporting repository
1883 1885 :``%h``: short-form changeset hash (12 hexadecimal digits)
1884 1886 :``%m``: first line of the commit message (only alphanumeric characters)
1885 1887 :``%n``: zero-padded sequence number, starting at 1
1886 1888 :``%r``: zero-padded changeset revision number
1887 1889
1888 1890 Without the -a/--text option, export will avoid generating diffs
1889 1891 of files it detects as binary. With -a, export will generate a
1890 1892 diff anyway, probably with undesirable results.
1891 1893
1892 1894 Use the -g/--git option to generate diffs in the git extended diff
1893 1895 format. See :hg:`help diffs` for more information.
1894 1896
1895 1897 With the --switch-parent option, the diff will be against the
1896 1898 second parent. It can be useful to review a merge.
1897 1899
1898 1900 .. container:: verbose
1899 1901
1900 1902 Examples:
1901 1903
1902 1904 - use export and import to transplant a bugfix to the current
1903 1905 branch::
1904 1906
1905 1907 hg export -r 9353 | hg import -
1906 1908
1907 1909 - export all the changesets between two revisions to a file with
1908 1910 rename information::
1909 1911
1910 1912 hg export --git -r 123:150 > changes.txt
1911 1913
1912 1914 - split outgoing changes into a series of patches with
1913 1915 descriptive names::
1914 1916
1915 1917 hg export -r "outgoing()" -o "%n-%m.patch"
1916 1918
1917 1919 Returns 0 on success.
1918 1920 """
1919 1921 opts = pycompat.byteskwargs(opts)
1920 1922 changesets += tuple(opts.get('rev', []))
1921 1923 if not changesets:
1922 1924 changesets = ['.']
1923 1925 revs = scmutil.revrange(repo, changesets)
1924 1926 if not revs:
1925 1927 raise error.Abort(_("export requires at least one changeset"))
1926 1928 if len(revs) > 1:
1927 1929 ui.note(_('exporting patches:\n'))
1928 1930 else:
1929 1931 ui.note(_('exporting patch:\n'))
1930 1932 ui.pager('export')
1931 1933 cmdutil.export(repo, revs, fntemplate=opts.get('output'),
1932 1934 switch_parent=opts.get('switch_parent'),
1933 1935 opts=patch.diffallopts(ui, opts))
1934 1936
1935 1937 @command('files',
1936 1938 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
1937 1939 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
1938 1940 ] + walkopts + formatteropts + subrepoopts,
1939 1941 _('[OPTION]... [FILE]...'))
1940 1942 def files(ui, repo, *pats, **opts):
1941 1943 """list tracked files
1942 1944
1943 1945 Print files under Mercurial control in the working directory or
1944 1946 specified revision for given files (excluding removed files).
1945 1947 Files can be specified as filenames or filesets.
1946 1948
1947 1949 If no files are given to match, this command prints the names
1948 1950 of all files under Mercurial control.
1949 1951
1950 1952 .. container:: verbose
1951 1953
1952 1954 Examples:
1953 1955
1954 1956 - list all files under the current directory::
1955 1957
1956 1958 hg files .
1957 1959
1958 1960 - shows sizes and flags for current revision::
1959 1961
1960 1962 hg files -vr .
1961 1963
1962 1964 - list all files named README::
1963 1965
1964 1966 hg files -I "**/README"
1965 1967
1966 1968 - list all binary files::
1967 1969
1968 1970 hg files "set:binary()"
1969 1971
1970 1972 - find files containing a regular expression::
1971 1973
1972 1974 hg files "set:grep('bob')"
1973 1975
1974 1976 - search tracked file contents with xargs and grep::
1975 1977
1976 1978 hg files -0 | xargs -0 grep foo
1977 1979
1978 1980 See :hg:`help patterns` and :hg:`help filesets` for more information
1979 1981 on specifying file patterns.
1980 1982
1981 1983 Returns 0 if a match is found, 1 otherwise.
1982 1984
1983 1985 """
1984 1986
1985 1987 opts = pycompat.byteskwargs(opts)
1986 1988 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
1987 1989
1988 1990 end = '\n'
1989 1991 if opts.get('print0'):
1990 1992 end = '\0'
1991 1993 fmt = '%s' + end
1992 1994
1993 1995 m = scmutil.match(ctx, pats, opts)
1994 1996 ui.pager('files')
1995 1997 with ui.formatter('files', opts) as fm:
1996 1998 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
1997 1999
1998 2000 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
1999 2001 def forget(ui, repo, *pats, **opts):
2000 2002 """forget the specified files on the next commit
2001 2003
2002 2004 Mark the specified files so they will no longer be tracked
2003 2005 after the next commit.
2004 2006
2005 2007 This only removes files from the current branch, not from the
2006 2008 entire project history, and it does not delete them from the
2007 2009 working directory.
2008 2010
2009 2011 To delete the file from the working directory, see :hg:`remove`.
2010 2012
2011 2013 To undo a forget before the next commit, see :hg:`add`.
2012 2014
2013 2015 .. container:: verbose
2014 2016
2015 2017 Examples:
2016 2018
2017 2019 - forget newly-added binary files::
2018 2020
2019 2021 hg forget "set:added() and binary()"
2020 2022
2021 2023 - forget files that would be excluded by .hgignore::
2022 2024
2023 2025 hg forget "set:hgignore()"
2024 2026
2025 2027 Returns 0 on success.
2026 2028 """
2027 2029
2028 2030 opts = pycompat.byteskwargs(opts)
2029 2031 if not pats:
2030 2032 raise error.Abort(_('no files specified'))
2031 2033
2032 2034 m = scmutil.match(repo[None], pats, opts)
2033 2035 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
2034 2036 return rejected and 1 or 0
2035 2037
2036 2038 @command(
2037 2039 'graft',
2038 2040 [('r', 'rev', [], _('revisions to graft'), _('REV')),
2039 2041 ('c', 'continue', False, _('resume interrupted graft')),
2040 2042 ('e', 'edit', False, _('invoke editor on commit messages')),
2041 2043 ('', 'log', None, _('append graft info to log message')),
2042 2044 ('f', 'force', False, _('force graft')),
2043 2045 ('D', 'currentdate', False,
2044 2046 _('record the current date as commit date')),
2045 2047 ('U', 'currentuser', False,
2046 2048 _('record the current user as committer'), _('DATE'))]
2047 2049 + commitopts2 + mergetoolopts + dryrunopts,
2048 2050 _('[OPTION]... [-r REV]... REV...'))
2049 2051 def graft(ui, repo, *revs, **opts):
2050 2052 '''copy changes from other branches onto the current branch
2051 2053
2052 2054 This command uses Mercurial's merge logic to copy individual
2053 2055 changes from other branches without merging branches in the
2054 2056 history graph. This is sometimes known as 'backporting' or
2055 2057 'cherry-picking'. By default, graft will copy user, date, and
2056 2058 description from the source changesets.
2057 2059
2058 2060 Changesets that are ancestors of the current revision, that have
2059 2061 already been grafted, or that are merges will be skipped.
2060 2062
2061 2063 If --log is specified, log messages will have a comment appended
2062 2064 of the form::
2063 2065
2064 2066 (grafted from CHANGESETHASH)
2065 2067
2066 2068 If --force is specified, revisions will be grafted even if they
2067 2069 are already ancestors of or have been grafted to the destination.
2068 2070 This is useful when the revisions have since been backed out.
2069 2071
2070 2072 If a graft merge results in conflicts, the graft process is
2071 2073 interrupted so that the current merge can be manually resolved.
2072 2074 Once all conflicts are addressed, the graft process can be
2073 2075 continued with the -c/--continue option.
2074 2076
2075 2077 .. note::
2076 2078
2077 2079 The -c/--continue option does not reapply earlier options, except
2078 2080 for --force.
2079 2081
2080 2082 .. container:: verbose
2081 2083
2082 2084 Examples:
2083 2085
2084 2086 - copy a single change to the stable branch and edit its description::
2085 2087
2086 2088 hg update stable
2087 2089 hg graft --edit 9393
2088 2090
2089 2091 - graft a range of changesets with one exception, updating dates::
2090 2092
2091 2093 hg graft -D "2085::2093 and not 2091"
2092 2094
2093 2095 - continue a graft after resolving conflicts::
2094 2096
2095 2097 hg graft -c
2096 2098
2097 2099 - show the source of a grafted changeset::
2098 2100
2099 2101 hg log --debug -r .
2100 2102
2101 2103 - show revisions sorted by date::
2102 2104
2103 2105 hg log -r "sort(all(), date)"
2104 2106
2105 2107 See :hg:`help revisions` for more about specifying revisions.
2106 2108
2107 2109 Returns 0 on successful completion.
2108 2110 '''
2109 2111 with repo.wlock():
2110 2112 return _dograft(ui, repo, *revs, **opts)
2111 2113
2112 2114 def _dograft(ui, repo, *revs, **opts):
2113 2115 opts = pycompat.byteskwargs(opts)
2114 2116 if revs and opts.get('rev'):
2115 2117 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
2116 2118 'revision ordering!\n'))
2117 2119
2118 2120 revs = list(revs)
2119 2121 revs.extend(opts.get('rev'))
2120 2122
2121 2123 if not opts.get('user') and opts.get('currentuser'):
2122 2124 opts['user'] = ui.username()
2123 2125 if not opts.get('date') and opts.get('currentdate'):
2124 2126 opts['date'] = "%d %d" % util.makedate()
2125 2127
2126 2128 editor = cmdutil.getcommiteditor(editform='graft',
2127 2129 **pycompat.strkwargs(opts))
2128 2130
2129 2131 cont = False
2130 2132 if opts.get('continue'):
2131 2133 cont = True
2132 2134 if revs:
2133 2135 raise error.Abort(_("can't specify --continue and revisions"))
2134 2136 # read in unfinished revisions
2135 2137 try:
2136 2138 nodes = repo.vfs.read('graftstate').splitlines()
2137 2139 revs = [repo[node].rev() for node in nodes]
2138 2140 except IOError as inst:
2139 2141 if inst.errno != errno.ENOENT:
2140 2142 raise
2141 2143 cmdutil.wrongtooltocontinue(repo, _('graft'))
2142 2144 else:
2143 2145 cmdutil.checkunfinished(repo)
2144 2146 cmdutil.bailifchanged(repo)
2145 2147 if not revs:
2146 2148 raise error.Abort(_('no revisions specified'))
2147 2149 revs = scmutil.revrange(repo, revs)
2148 2150
2149 2151 skipped = set()
2150 2152 # check for merges
2151 2153 for rev in repo.revs('%ld and merge()', revs):
2152 2154 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
2153 2155 skipped.add(rev)
2154 2156 revs = [r for r in revs if r not in skipped]
2155 2157 if not revs:
2156 2158 return -1
2157 2159
2158 2160 # Don't check in the --continue case, in effect retaining --force across
2159 2161 # --continues. That's because without --force, any revisions we decided to
2160 2162 # skip would have been filtered out here, so they wouldn't have made their
2161 2163 # way to the graftstate. With --force, any revisions we would have otherwise
2162 2164 # skipped would not have been filtered out, and if they hadn't been applied
2163 2165 # already, they'd have been in the graftstate.
2164 2166 if not (cont or opts.get('force')):
2165 2167 # check for ancestors of dest branch
2166 2168 crev = repo['.'].rev()
2167 2169 ancestors = repo.changelog.ancestors([crev], inclusive=True)
2168 2170 # XXX make this lazy in the future
2169 2171 # don't mutate while iterating, create a copy
2170 2172 for rev in list(revs):
2171 2173 if rev in ancestors:
2172 2174 ui.warn(_('skipping ancestor revision %d:%s\n') %
2173 2175 (rev, repo[rev]))
2174 2176 # XXX remove on list is slow
2175 2177 revs.remove(rev)
2176 2178 if not revs:
2177 2179 return -1
2178 2180
2179 2181 # analyze revs for earlier grafts
2180 2182 ids = {}
2181 2183 for ctx in repo.set("%ld", revs):
2182 2184 ids[ctx.hex()] = ctx.rev()
2183 2185 n = ctx.extra().get('source')
2184 2186 if n:
2185 2187 ids[n] = ctx.rev()
2186 2188
2187 2189 # check ancestors for earlier grafts
2188 2190 ui.debug('scanning for duplicate grafts\n')
2189 2191
2190 2192 # The only changesets we can be sure doesn't contain grafts of any
2191 2193 # revs, are the ones that are common ancestors of *all* revs:
2192 2194 for rev in repo.revs('only(%d,ancestor(%ld))', crev, revs):
2193 2195 ctx = repo[rev]
2194 2196 n = ctx.extra().get('source')
2195 2197 if n in ids:
2196 2198 try:
2197 2199 r = repo[n].rev()
2198 2200 except error.RepoLookupError:
2199 2201 r = None
2200 2202 if r in revs:
2201 2203 ui.warn(_('skipping revision %d:%s '
2202 2204 '(already grafted to %d:%s)\n')
2203 2205 % (r, repo[r], rev, ctx))
2204 2206 revs.remove(r)
2205 2207 elif ids[n] in revs:
2206 2208 if r is None:
2207 2209 ui.warn(_('skipping already grafted revision %d:%s '
2208 2210 '(%d:%s also has unknown origin %s)\n')
2209 2211 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
2210 2212 else:
2211 2213 ui.warn(_('skipping already grafted revision %d:%s '
2212 2214 '(%d:%s also has origin %d:%s)\n')
2213 2215 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
2214 2216 revs.remove(ids[n])
2215 2217 elif ctx.hex() in ids:
2216 2218 r = ids[ctx.hex()]
2217 2219 ui.warn(_('skipping already grafted revision %d:%s '
2218 2220 '(was grafted from %d:%s)\n') %
2219 2221 (r, repo[r], rev, ctx))
2220 2222 revs.remove(r)
2221 2223 if not revs:
2222 2224 return -1
2223 2225
2224 2226 for pos, ctx in enumerate(repo.set("%ld", revs)):
2225 2227 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
2226 2228 ctx.description().split('\n', 1)[0])
2227 2229 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
2228 2230 if names:
2229 2231 desc += ' (%s)' % ' '.join(names)
2230 2232 ui.status(_('grafting %s\n') % desc)
2231 2233 if opts.get('dry_run'):
2232 2234 continue
2233 2235
2234 2236 source = ctx.extra().get('source')
2235 2237 extra = {}
2236 2238 if source:
2237 2239 extra['source'] = source
2238 2240 extra['intermediate-source'] = ctx.hex()
2239 2241 else:
2240 2242 extra['source'] = ctx.hex()
2241 2243 user = ctx.user()
2242 2244 if opts.get('user'):
2243 2245 user = opts['user']
2244 2246 date = ctx.date()
2245 2247 if opts.get('date'):
2246 2248 date = opts['date']
2247 2249 message = ctx.description()
2248 2250 if opts.get('log'):
2249 2251 message += '\n(grafted from %s)' % ctx.hex()
2250 2252
2251 2253 # we don't merge the first commit when continuing
2252 2254 if not cont:
2253 2255 # perform the graft merge with p1(rev) as 'ancestor'
2254 2256 try:
2255 2257 # ui.forcemerge is an internal variable, do not document
2256 2258 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
2257 2259 'graft')
2258 2260 stats = mergemod.graft(repo, ctx, ctx.p1(),
2259 2261 ['local', 'graft'])
2260 2262 finally:
2261 2263 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
2262 2264 # report any conflicts
2263 2265 if stats and stats[3] > 0:
2264 2266 # write out state for --continue
2265 2267 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
2266 2268 repo.vfs.write('graftstate', ''.join(nodelines))
2267 2269 extra = ''
2268 2270 if opts.get('user'):
2269 2271 extra += ' --user %s' % util.shellquote(opts['user'])
2270 2272 if opts.get('date'):
2271 2273 extra += ' --date %s' % util.shellquote(opts['date'])
2272 2274 if opts.get('log'):
2273 2275 extra += ' --log'
2274 2276 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
2275 2277 raise error.Abort(
2276 2278 _("unresolved conflicts, can't continue"),
2277 2279 hint=hint)
2278 2280 else:
2279 2281 cont = False
2280 2282
2281 2283 # commit
2282 2284 node = repo.commit(text=message, user=user,
2283 2285 date=date, extra=extra, editor=editor)
2284 2286 if node is None:
2285 2287 ui.warn(
2286 2288 _('note: graft of %d:%s created no changes to commit\n') %
2287 2289 (ctx.rev(), ctx))
2288 2290
2289 2291 # remove state when we complete successfully
2290 2292 if not opts.get('dry_run'):
2291 2293 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
2292 2294
2293 2295 return 0
2294 2296
2295 2297 @command('grep',
2296 2298 [('0', 'print0', None, _('end fields with NUL')),
2297 2299 ('', 'all', None, _('print all revisions that match')),
2298 2300 ('a', 'text', None, _('treat all files as text')),
2299 2301 ('f', 'follow', None,
2300 2302 _('follow changeset history,'
2301 2303 ' or file history across copies and renames')),
2302 2304 ('i', 'ignore-case', None, _('ignore case when matching')),
2303 2305 ('l', 'files-with-matches', None,
2304 2306 _('print only filenames and revisions that match')),
2305 2307 ('n', 'line-number', None, _('print matching line numbers')),
2306 2308 ('r', 'rev', [],
2307 2309 _('only search files changed within revision range'), _('REV')),
2308 2310 ('u', 'user', None, _('list the author (long with -v)')),
2309 2311 ('d', 'date', None, _('list the date (short with -q)')),
2310 2312 ] + formatteropts + walkopts,
2311 2313 _('[OPTION]... PATTERN [FILE]...'),
2312 2314 inferrepo=True)
2313 2315 def grep(ui, repo, pattern, *pats, **opts):
2314 2316 """search revision history for a pattern in specified files
2315 2317
2316 2318 Search revision history for a regular expression in the specified
2317 2319 files or the entire project.
2318 2320
2319 2321 By default, grep prints the most recent revision number for each
2320 2322 file in which it finds a match. To get it to print every revision
2321 2323 that contains a change in match status ("-" for a match that becomes
2322 2324 a non-match, or "+" for a non-match that becomes a match), use the
2323 2325 --all flag.
2324 2326
2325 2327 PATTERN can be any Python (roughly Perl-compatible) regular
2326 2328 expression.
2327 2329
2328 2330 If no FILEs are specified (and -f/--follow isn't set), all files in
2329 2331 the repository are searched, including those that don't exist in the
2330 2332 current branch or have been deleted in a prior changeset.
2331 2333
2332 2334 Returns 0 if a match is found, 1 otherwise.
2333 2335 """
2334 2336 opts = pycompat.byteskwargs(opts)
2335 2337 reflags = re.M
2336 2338 if opts.get('ignore_case'):
2337 2339 reflags |= re.I
2338 2340 try:
2339 2341 regexp = util.re.compile(pattern, reflags)
2340 2342 except re.error as inst:
2341 2343 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
2342 2344 return 1
2343 2345 sep, eol = ':', '\n'
2344 2346 if opts.get('print0'):
2345 2347 sep = eol = '\0'
2346 2348
2347 2349 getfile = util.lrucachefunc(repo.file)
2348 2350
2349 2351 def matchlines(body):
2350 2352 begin = 0
2351 2353 linenum = 0
2352 2354 while begin < len(body):
2353 2355 match = regexp.search(body, begin)
2354 2356 if not match:
2355 2357 break
2356 2358 mstart, mend = match.span()
2357 2359 linenum += body.count('\n', begin, mstart) + 1
2358 2360 lstart = body.rfind('\n', begin, mstart) + 1 or begin
2359 2361 begin = body.find('\n', mend) + 1 or len(body) + 1
2360 2362 lend = begin - 1
2361 2363 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
2362 2364
2363 2365 class linestate(object):
2364 2366 def __init__(self, line, linenum, colstart, colend):
2365 2367 self.line = line
2366 2368 self.linenum = linenum
2367 2369 self.colstart = colstart
2368 2370 self.colend = colend
2369 2371
2370 2372 def __hash__(self):
2371 2373 return hash((self.linenum, self.line))
2372 2374
2373 2375 def __eq__(self, other):
2374 2376 return self.line == other.line
2375 2377
2376 2378 def findpos(self):
2377 2379 """Iterate all (start, end) indices of matches"""
2378 2380 yield self.colstart, self.colend
2379 2381 p = self.colend
2380 2382 while p < len(self.line):
2381 2383 m = regexp.search(self.line, p)
2382 2384 if not m:
2383 2385 break
2384 2386 yield m.span()
2385 2387 p = m.end()
2386 2388
2387 2389 matches = {}
2388 2390 copies = {}
2389 2391 def grepbody(fn, rev, body):
2390 2392 matches[rev].setdefault(fn, [])
2391 2393 m = matches[rev][fn]
2392 2394 for lnum, cstart, cend, line in matchlines(body):
2393 2395 s = linestate(line, lnum, cstart, cend)
2394 2396 m.append(s)
2395 2397
2396 2398 def difflinestates(a, b):
2397 2399 sm = difflib.SequenceMatcher(None, a, b)
2398 2400 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2399 2401 if tag == 'insert':
2400 2402 for i in xrange(blo, bhi):
2401 2403 yield ('+', b[i])
2402 2404 elif tag == 'delete':
2403 2405 for i in xrange(alo, ahi):
2404 2406 yield ('-', a[i])
2405 2407 elif tag == 'replace':
2406 2408 for i in xrange(alo, ahi):
2407 2409 yield ('-', a[i])
2408 2410 for i in xrange(blo, bhi):
2409 2411 yield ('+', b[i])
2410 2412
2411 2413 def display(fm, fn, ctx, pstates, states):
2412 2414 rev = ctx.rev()
2413 2415 if fm.isplain():
2414 2416 formatuser = ui.shortuser
2415 2417 else:
2416 2418 formatuser = str
2417 2419 if ui.quiet:
2418 2420 datefmt = '%Y-%m-%d'
2419 2421 else:
2420 2422 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
2421 2423 found = False
2422 2424 @util.cachefunc
2423 2425 def binary():
2424 2426 flog = getfile(fn)
2425 2427 return util.binary(flog.read(ctx.filenode(fn)))
2426 2428
2427 2429 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
2428 2430 if opts.get('all'):
2429 2431 iter = difflinestates(pstates, states)
2430 2432 else:
2431 2433 iter = [('', l) for l in states]
2432 2434 for change, l in iter:
2433 2435 fm.startitem()
2434 2436 fm.data(node=fm.hexfunc(ctx.node()))
2435 2437 cols = [
2436 2438 ('filename', fn, True),
2437 2439 ('rev', rev, True),
2438 2440 ('linenumber', l.linenum, opts.get('line_number')),
2439 2441 ]
2440 2442 if opts.get('all'):
2441 2443 cols.append(('change', change, True))
2442 2444 cols.extend([
2443 2445 ('user', formatuser(ctx.user()), opts.get('user')),
2444 2446 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
2445 2447 ])
2446 2448 lastcol = next(name for name, data, cond in reversed(cols) if cond)
2447 2449 for name, data, cond in cols:
2448 2450 field = fieldnamemap.get(name, name)
2449 2451 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
2450 2452 if cond and name != lastcol:
2451 2453 fm.plain(sep, label='grep.sep')
2452 2454 if not opts.get('files_with_matches'):
2453 2455 fm.plain(sep, label='grep.sep')
2454 2456 if not opts.get('text') and binary():
2455 2457 fm.plain(_(" Binary file matches"))
2456 2458 else:
2457 2459 displaymatches(fm.nested('texts'), l)
2458 2460 fm.plain(eol)
2459 2461 found = True
2460 2462 if opts.get('files_with_matches'):
2461 2463 break
2462 2464 return found
2463 2465
2464 2466 def displaymatches(fm, l):
2465 2467 p = 0
2466 2468 for s, e in l.findpos():
2467 2469 if p < s:
2468 2470 fm.startitem()
2469 2471 fm.write('text', '%s', l.line[p:s])
2470 2472 fm.data(matched=False)
2471 2473 fm.startitem()
2472 2474 fm.write('text', '%s', l.line[s:e], label='grep.match')
2473 2475 fm.data(matched=True)
2474 2476 p = e
2475 2477 if p < len(l.line):
2476 2478 fm.startitem()
2477 2479 fm.write('text', '%s', l.line[p:])
2478 2480 fm.data(matched=False)
2479 2481 fm.end()
2480 2482
2481 2483 skip = {}
2482 2484 revfiles = {}
2483 2485 matchfn = scmutil.match(repo[None], pats, opts)
2484 2486 found = False
2485 2487 follow = opts.get('follow')
2486 2488
2487 2489 def prep(ctx, fns):
2488 2490 rev = ctx.rev()
2489 2491 pctx = ctx.p1()
2490 2492 parent = pctx.rev()
2491 2493 matches.setdefault(rev, {})
2492 2494 matches.setdefault(parent, {})
2493 2495 files = revfiles.setdefault(rev, [])
2494 2496 for fn in fns:
2495 2497 flog = getfile(fn)
2496 2498 try:
2497 2499 fnode = ctx.filenode(fn)
2498 2500 except error.LookupError:
2499 2501 continue
2500 2502
2501 2503 copied = flog.renamed(fnode)
2502 2504 copy = follow and copied and copied[0]
2503 2505 if copy:
2504 2506 copies.setdefault(rev, {})[fn] = copy
2505 2507 if fn in skip:
2506 2508 if copy:
2507 2509 skip[copy] = True
2508 2510 continue
2509 2511 files.append(fn)
2510 2512
2511 2513 if fn not in matches[rev]:
2512 2514 grepbody(fn, rev, flog.read(fnode))
2513 2515
2514 2516 pfn = copy or fn
2515 2517 if pfn not in matches[parent]:
2516 2518 try:
2517 2519 fnode = pctx.filenode(pfn)
2518 2520 grepbody(pfn, parent, flog.read(fnode))
2519 2521 except error.LookupError:
2520 2522 pass
2521 2523
2522 2524 ui.pager('grep')
2523 2525 fm = ui.formatter('grep', opts)
2524 2526 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2525 2527 rev = ctx.rev()
2526 2528 parent = ctx.p1().rev()
2527 2529 for fn in sorted(revfiles.get(rev, [])):
2528 2530 states = matches[rev][fn]
2529 2531 copy = copies.get(rev, {}).get(fn)
2530 2532 if fn in skip:
2531 2533 if copy:
2532 2534 skip[copy] = True
2533 2535 continue
2534 2536 pstates = matches.get(parent, {}).get(copy or fn, [])
2535 2537 if pstates or states:
2536 2538 r = display(fm, fn, ctx, pstates, states)
2537 2539 found = found or r
2538 2540 if r and not opts.get('all'):
2539 2541 skip[fn] = True
2540 2542 if copy:
2541 2543 skip[copy] = True
2542 2544 del matches[rev]
2543 2545 del revfiles[rev]
2544 2546 fm.end()
2545 2547
2546 2548 return not found
2547 2549
2548 2550 @command('heads',
2549 2551 [('r', 'rev', '',
2550 2552 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
2551 2553 ('t', 'topo', False, _('show topological heads only')),
2552 2554 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
2553 2555 ('c', 'closed', False, _('show normal and closed branch heads')),
2554 2556 ] + templateopts,
2555 2557 _('[-ct] [-r STARTREV] [REV]...'))
2556 2558 def heads(ui, repo, *branchrevs, **opts):
2557 2559 """show branch heads
2558 2560
2559 2561 With no arguments, show all open branch heads in the repository.
2560 2562 Branch heads are changesets that have no descendants on the
2561 2563 same branch. They are where development generally takes place and
2562 2564 are the usual targets for update and merge operations.
2563 2565
2564 2566 If one or more REVs are given, only open branch heads on the
2565 2567 branches associated with the specified changesets are shown. This
2566 2568 means that you can use :hg:`heads .` to see the heads on the
2567 2569 currently checked-out branch.
2568 2570
2569 2571 If -c/--closed is specified, also show branch heads marked closed
2570 2572 (see :hg:`commit --close-branch`).
2571 2573
2572 2574 If STARTREV is specified, only those heads that are descendants of
2573 2575 STARTREV will be displayed.
2574 2576
2575 2577 If -t/--topo is specified, named branch mechanics will be ignored and only
2576 2578 topological heads (changesets with no children) will be shown.
2577 2579
2578 2580 Returns 0 if matching heads are found, 1 if not.
2579 2581 """
2580 2582
2581 2583 opts = pycompat.byteskwargs(opts)
2582 2584 start = None
2583 2585 if 'rev' in opts:
2584 2586 start = scmutil.revsingle(repo, opts['rev'], None).node()
2585 2587
2586 2588 if opts.get('topo'):
2587 2589 heads = [repo[h] for h in repo.heads(start)]
2588 2590 else:
2589 2591 heads = []
2590 2592 for branch in repo.branchmap():
2591 2593 heads += repo.branchheads(branch, start, opts.get('closed'))
2592 2594 heads = [repo[h] for h in heads]
2593 2595
2594 2596 if branchrevs:
2595 2597 branches = set(repo[br].branch() for br in branchrevs)
2596 2598 heads = [h for h in heads if h.branch() in branches]
2597 2599
2598 2600 if opts.get('active') and branchrevs:
2599 2601 dagheads = repo.heads(start)
2600 2602 heads = [h for h in heads if h.node() in dagheads]
2601 2603
2602 2604 if branchrevs:
2603 2605 haveheads = set(h.branch() for h in heads)
2604 2606 if branches - haveheads:
2605 2607 headless = ', '.join(b for b in branches - haveheads)
2606 2608 msg = _('no open branch heads found on branches %s')
2607 2609 if opts.get('rev'):
2608 2610 msg += _(' (started at %s)') % opts['rev']
2609 2611 ui.warn((msg + '\n') % headless)
2610 2612
2611 2613 if not heads:
2612 2614 return 1
2613 2615
2614 2616 ui.pager('heads')
2615 2617 heads = sorted(heads, key=lambda x: -x.rev())
2616 2618 displayer = cmdutil.show_changeset(ui, repo, opts)
2617 2619 for ctx in heads:
2618 2620 displayer.show(ctx)
2619 2621 displayer.close()
2620 2622
2621 2623 @command('help',
2622 2624 [('e', 'extension', None, _('show only help for extensions')),
2623 2625 ('c', 'command', None, _('show only help for commands')),
2624 2626 ('k', 'keyword', None, _('show topics matching keyword')),
2625 2627 ('s', 'system', [], _('show help for specific platform(s)')),
2626 2628 ],
2627 2629 _('[-ecks] [TOPIC]'),
2628 2630 norepo=True)
2629 2631 def help_(ui, name=None, **opts):
2630 2632 """show help for a given topic or a help overview
2631 2633
2632 2634 With no arguments, print a list of commands with short help messages.
2633 2635
2634 2636 Given a topic, extension, or command name, print help for that
2635 2637 topic.
2636 2638
2637 2639 Returns 0 if successful.
2638 2640 """
2639 2641
2640 2642 keep = opts.get(r'system') or []
2641 2643 if len(keep) == 0:
2642 2644 if pycompat.sysplatform.startswith('win'):
2643 2645 keep.append('windows')
2644 2646 elif pycompat.sysplatform == 'OpenVMS':
2645 2647 keep.append('vms')
2646 2648 elif pycompat.sysplatform == 'plan9':
2647 2649 keep.append('plan9')
2648 2650 else:
2649 2651 keep.append('unix')
2650 2652 keep.append(pycompat.sysplatform.lower())
2651 2653 if ui.verbose:
2652 2654 keep.append('verbose')
2653 2655
2654 2656 commands = sys.modules[__name__]
2655 2657 formatted = help.formattedhelp(ui, commands, name, keep=keep, **opts)
2656 2658 ui.pager('help')
2657 2659 ui.write(formatted)
2658 2660
2659 2661
2660 2662 @command('identify|id',
2661 2663 [('r', 'rev', '',
2662 2664 _('identify the specified revision'), _('REV')),
2663 2665 ('n', 'num', None, _('show local revision number')),
2664 2666 ('i', 'id', None, _('show global revision id')),
2665 2667 ('b', 'branch', None, _('show branch')),
2666 2668 ('t', 'tags', None, _('show tags')),
2667 2669 ('B', 'bookmarks', None, _('show bookmarks')),
2668 2670 ] + remoteopts,
2669 2671 _('[-nibtB] [-r REV] [SOURCE]'),
2670 2672 optionalrepo=True)
2671 2673 def identify(ui, repo, source=None, rev=None,
2672 2674 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
2673 2675 """identify the working directory or specified revision
2674 2676
2675 2677 Print a summary identifying the repository state at REV using one or
2676 2678 two parent hash identifiers, followed by a "+" if the working
2677 2679 directory has uncommitted changes, the branch name (if not default),
2678 2680 a list of tags, and a list of bookmarks.
2679 2681
2680 2682 When REV is not given, print a summary of the current state of the
2681 2683 repository.
2682 2684
2683 2685 Specifying a path to a repository root or Mercurial bundle will
2684 2686 cause lookup to operate on that repository/bundle.
2685 2687
2686 2688 .. container:: verbose
2687 2689
2688 2690 Examples:
2689 2691
2690 2692 - generate a build identifier for the working directory::
2691 2693
2692 2694 hg id --id > build-id.dat
2693 2695
2694 2696 - find the revision corresponding to a tag::
2695 2697
2696 2698 hg id -n -r 1.3
2697 2699
2698 2700 - check the most recent revision of a remote repository::
2699 2701
2700 2702 hg id -r tip https://www.mercurial-scm.org/repo/hg/
2701 2703
2702 2704 See :hg:`log` for generating more information about specific revisions,
2703 2705 including full hash identifiers.
2704 2706
2705 2707 Returns 0 if successful.
2706 2708 """
2707 2709
2708 2710 opts = pycompat.byteskwargs(opts)
2709 2711 if not repo and not source:
2710 2712 raise error.Abort(_("there is no Mercurial repository here "
2711 2713 "(.hg not found)"))
2712 2714
2713 2715 if ui.debugflag:
2714 2716 hexfunc = hex
2715 2717 else:
2716 2718 hexfunc = short
2717 2719 default = not (num or id or branch or tags or bookmarks)
2718 2720 output = []
2719 2721 revs = []
2720 2722
2721 2723 if source:
2722 2724 source, branches = hg.parseurl(ui.expandpath(source))
2723 2725 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
2724 2726 repo = peer.local()
2725 2727 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
2726 2728
2727 2729 if not repo:
2728 2730 if num or branch or tags:
2729 2731 raise error.Abort(
2730 2732 _("can't query remote revision number, branch, or tags"))
2731 2733 if not rev and revs:
2732 2734 rev = revs[0]
2733 2735 if not rev:
2734 2736 rev = "tip"
2735 2737
2736 2738 remoterev = peer.lookup(rev)
2737 2739 if default or id:
2738 2740 output = [hexfunc(remoterev)]
2739 2741
2740 2742 def getbms():
2741 2743 bms = []
2742 2744
2743 2745 if 'bookmarks' in peer.listkeys('namespaces'):
2744 2746 hexremoterev = hex(remoterev)
2745 2747 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
2746 2748 if bmr == hexremoterev]
2747 2749
2748 2750 return sorted(bms)
2749 2751
2750 2752 if bookmarks:
2751 2753 output.extend(getbms())
2752 2754 elif default and not ui.quiet:
2753 2755 # multiple bookmarks for a single parent separated by '/'
2754 2756 bm = '/'.join(getbms())
2755 2757 if bm:
2756 2758 output.append(bm)
2757 2759 else:
2758 2760 ctx = scmutil.revsingle(repo, rev, None)
2759 2761
2760 2762 if ctx.rev() is None:
2761 2763 ctx = repo[None]
2762 2764 parents = ctx.parents()
2763 2765 taglist = []
2764 2766 for p in parents:
2765 2767 taglist.extend(p.tags())
2766 2768
2767 2769 changed = ""
2768 2770 if default or id or num:
2769 2771 if (any(repo.status())
2770 2772 or any(ctx.sub(s).dirty() for s in ctx.substate)):
2771 2773 changed = '+'
2772 2774 if default or id:
2773 2775 output = ["%s%s" %
2774 2776 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2775 2777 if num:
2776 2778 output.append("%s%s" %
2777 2779 ('+'.join(["%d" % p.rev() for p in parents]), changed))
2778 2780 else:
2779 2781 if default or id:
2780 2782 output = [hexfunc(ctx.node())]
2781 2783 if num:
2782 2784 output.append(pycompat.bytestr(ctx.rev()))
2783 2785 taglist = ctx.tags()
2784 2786
2785 2787 if default and not ui.quiet:
2786 2788 b = ctx.branch()
2787 2789 if b != 'default':
2788 2790 output.append("(%s)" % b)
2789 2791
2790 2792 # multiple tags for a single parent separated by '/'
2791 2793 t = '/'.join(taglist)
2792 2794 if t:
2793 2795 output.append(t)
2794 2796
2795 2797 # multiple bookmarks for a single parent separated by '/'
2796 2798 bm = '/'.join(ctx.bookmarks())
2797 2799 if bm:
2798 2800 output.append(bm)
2799 2801 else:
2800 2802 if branch:
2801 2803 output.append(ctx.branch())
2802 2804
2803 2805 if tags:
2804 2806 output.extend(taglist)
2805 2807
2806 2808 if bookmarks:
2807 2809 output.extend(ctx.bookmarks())
2808 2810
2809 2811 ui.write("%s\n" % ' '.join(output))
2810 2812
2811 2813 @command('import|patch',
2812 2814 [('p', 'strip', 1,
2813 2815 _('directory strip option for patch. This has the same '
2814 2816 'meaning as the corresponding patch option'), _('NUM')),
2815 2817 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
2816 2818 ('e', 'edit', False, _('invoke editor on commit messages')),
2817 2819 ('f', 'force', None,
2818 2820 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
2819 2821 ('', 'no-commit', None,
2820 2822 _("don't commit, just update the working directory")),
2821 2823 ('', 'bypass', None,
2822 2824 _("apply patch without touching the working directory")),
2823 2825 ('', 'partial', None,
2824 2826 _('commit even if some hunks fail')),
2825 2827 ('', 'exact', None,
2826 2828 _('abort if patch would apply lossily')),
2827 2829 ('', 'prefix', '',
2828 2830 _('apply patch to subdirectory'), _('DIR')),
2829 2831 ('', 'import-branch', None,
2830 2832 _('use any branch information in patch (implied by --exact)'))] +
2831 2833 commitopts + commitopts2 + similarityopts,
2832 2834 _('[OPTION]... PATCH...'))
2833 2835 def import_(ui, repo, patch1=None, *patches, **opts):
2834 2836 """import an ordered set of patches
2835 2837
2836 2838 Import a list of patches and commit them individually (unless
2837 2839 --no-commit is specified).
2838 2840
2839 2841 To read a patch from standard input (stdin), use "-" as the patch
2840 2842 name. If a URL is specified, the patch will be downloaded from
2841 2843 there.
2842 2844
2843 2845 Import first applies changes to the working directory (unless
2844 2846 --bypass is specified), import will abort if there are outstanding
2845 2847 changes.
2846 2848
2847 2849 Use --bypass to apply and commit patches directly to the
2848 2850 repository, without affecting the working directory. Without
2849 2851 --exact, patches will be applied on top of the working directory
2850 2852 parent revision.
2851 2853
2852 2854 You can import a patch straight from a mail message. Even patches
2853 2855 as attachments work (to use the body part, it must have type
2854 2856 text/plain or text/x-patch). From and Subject headers of email
2855 2857 message are used as default committer and commit message. All
2856 2858 text/plain body parts before first diff are added to the commit
2857 2859 message.
2858 2860
2859 2861 If the imported patch was generated by :hg:`export`, user and
2860 2862 description from patch override values from message headers and
2861 2863 body. Values given on command line with -m/--message and -u/--user
2862 2864 override these.
2863 2865
2864 2866 If --exact is specified, import will set the working directory to
2865 2867 the parent of each patch before applying it, and will abort if the
2866 2868 resulting changeset has a different ID than the one recorded in
2867 2869 the patch. This will guard against various ways that portable
2868 2870 patch formats and mail systems might fail to transfer Mercurial
2869 2871 data or metadata. See :hg:`bundle` for lossless transmission.
2870 2872
2871 2873 Use --partial to ensure a changeset will be created from the patch
2872 2874 even if some hunks fail to apply. Hunks that fail to apply will be
2873 2875 written to a <target-file>.rej file. Conflicts can then be resolved
2874 2876 by hand before :hg:`commit --amend` is run to update the created
2875 2877 changeset. This flag exists to let people import patches that
2876 2878 partially apply without losing the associated metadata (author,
2877 2879 date, description, ...).
2878 2880
2879 2881 .. note::
2880 2882
2881 2883 When no hunks apply cleanly, :hg:`import --partial` will create
2882 2884 an empty changeset, importing only the patch metadata.
2883 2885
2884 2886 With -s/--similarity, hg will attempt to discover renames and
2885 2887 copies in the patch in the same way as :hg:`addremove`.
2886 2888
2887 2889 It is possible to use external patch programs to perform the patch
2888 2890 by setting the ``ui.patch`` configuration option. For the default
2889 2891 internal tool, the fuzz can also be configured via ``patch.fuzz``.
2890 2892 See :hg:`help config` for more information about configuration
2891 2893 files and how to use these options.
2892 2894
2893 2895 See :hg:`help dates` for a list of formats valid for -d/--date.
2894 2896
2895 2897 .. container:: verbose
2896 2898
2897 2899 Examples:
2898 2900
2899 2901 - import a traditional patch from a website and detect renames::
2900 2902
2901 2903 hg import -s 80 http://example.com/bugfix.patch
2902 2904
2903 2905 - import a changeset from an hgweb server::
2904 2906
2905 2907 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
2906 2908
2907 2909 - import all the patches in an Unix-style mbox::
2908 2910
2909 2911 hg import incoming-patches.mbox
2910 2912
2911 2913 - import patches from stdin::
2912 2914
2913 2915 hg import -
2914 2916
2915 2917 - attempt to exactly restore an exported changeset (not always
2916 2918 possible)::
2917 2919
2918 2920 hg import --exact proposed-fix.patch
2919 2921
2920 2922 - use an external tool to apply a patch which is too fuzzy for
2921 2923 the default internal tool.
2922 2924
2923 2925 hg import --config ui.patch="patch --merge" fuzzy.patch
2924 2926
2925 2927 - change the default fuzzing from 2 to a less strict 7
2926 2928
2927 2929 hg import --config ui.fuzz=7 fuzz.patch
2928 2930
2929 2931 Returns 0 on success, 1 on partial success (see --partial).
2930 2932 """
2931 2933
2932 2934 opts = pycompat.byteskwargs(opts)
2933 2935 if not patch1:
2934 2936 raise error.Abort(_('need at least one patch to import'))
2935 2937
2936 2938 patches = (patch1,) + patches
2937 2939
2938 2940 date = opts.get('date')
2939 2941 if date:
2940 2942 opts['date'] = util.parsedate(date)
2941 2943
2942 2944 exact = opts.get('exact')
2943 2945 update = not opts.get('bypass')
2944 2946 if not update and opts.get('no_commit'):
2945 2947 raise error.Abort(_('cannot use --no-commit with --bypass'))
2946 2948 try:
2947 2949 sim = float(opts.get('similarity') or 0)
2948 2950 except ValueError:
2949 2951 raise error.Abort(_('similarity must be a number'))
2950 2952 if sim < 0 or sim > 100:
2951 2953 raise error.Abort(_('similarity must be between 0 and 100'))
2952 2954 if sim and not update:
2953 2955 raise error.Abort(_('cannot use --similarity with --bypass'))
2954 2956 if exact:
2955 2957 if opts.get('edit'):
2956 2958 raise error.Abort(_('cannot use --exact with --edit'))
2957 2959 if opts.get('prefix'):
2958 2960 raise error.Abort(_('cannot use --exact with --prefix'))
2959 2961
2960 2962 base = opts["base"]
2961 2963 wlock = dsguard = lock = tr = None
2962 2964 msgs = []
2963 2965 ret = 0
2964 2966
2965 2967
2966 2968 try:
2967 2969 wlock = repo.wlock()
2968 2970
2969 2971 if update:
2970 2972 cmdutil.checkunfinished(repo)
2971 2973 if (exact or not opts.get('force')):
2972 2974 cmdutil.bailifchanged(repo)
2973 2975
2974 2976 if not opts.get('no_commit'):
2975 2977 lock = repo.lock()
2976 2978 tr = repo.transaction('import')
2977 2979 else:
2978 2980 dsguard = dirstateguard.dirstateguard(repo, 'import')
2979 2981 parents = repo[None].parents()
2980 2982 for patchurl in patches:
2981 2983 if patchurl == '-':
2982 2984 ui.status(_('applying patch from stdin\n'))
2983 2985 patchfile = ui.fin
2984 2986 patchurl = 'stdin' # for error message
2985 2987 else:
2986 2988 patchurl = os.path.join(base, patchurl)
2987 2989 ui.status(_('applying %s\n') % patchurl)
2988 2990 patchfile = hg.openpath(ui, patchurl)
2989 2991
2990 2992 haspatch = False
2991 2993 for hunk in patch.split(patchfile):
2992 2994 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
2993 2995 parents, opts,
2994 2996 msgs, hg.clean)
2995 2997 if msg:
2996 2998 haspatch = True
2997 2999 ui.note(msg + '\n')
2998 3000 if update or exact:
2999 3001 parents = repo[None].parents()
3000 3002 else:
3001 3003 parents = [repo[node]]
3002 3004 if rej:
3003 3005 ui.write_err(_("patch applied partially\n"))
3004 3006 ui.write_err(_("(fix the .rej files and run "
3005 3007 "`hg commit --amend`)\n"))
3006 3008 ret = 1
3007 3009 break
3008 3010
3009 3011 if not haspatch:
3010 3012 raise error.Abort(_('%s: no diffs found') % patchurl)
3011 3013
3012 3014 if tr:
3013 3015 tr.close()
3014 3016 if msgs:
3015 3017 repo.savecommitmessage('\n* * *\n'.join(msgs))
3016 3018 if dsguard:
3017 3019 dsguard.close()
3018 3020 return ret
3019 3021 finally:
3020 3022 if tr:
3021 3023 tr.release()
3022 3024 release(lock, dsguard, wlock)
3023 3025
3024 3026 @command('incoming|in',
3025 3027 [('f', 'force', None,
3026 3028 _('run even if remote repository is unrelated')),
3027 3029 ('n', 'newest-first', None, _('show newest record first')),
3028 3030 ('', 'bundle', '',
3029 3031 _('file to store the bundles into'), _('FILE')),
3030 3032 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3031 3033 ('B', 'bookmarks', False, _("compare bookmarks")),
3032 3034 ('b', 'branch', [],
3033 3035 _('a specific branch you would like to pull'), _('BRANCH')),
3034 3036 ] + logopts + remoteopts + subrepoopts,
3035 3037 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3036 3038 def incoming(ui, repo, source="default", **opts):
3037 3039 """show new changesets found in source
3038 3040
3039 3041 Show new changesets found in the specified path/URL or the default
3040 3042 pull location. These are the changesets that would have been pulled
3041 3043 if a pull at the time you issued this command.
3042 3044
3043 3045 See pull for valid source format details.
3044 3046
3045 3047 .. container:: verbose
3046 3048
3047 3049 With -B/--bookmarks, the result of bookmark comparison between
3048 3050 local and remote repositories is displayed. With -v/--verbose,
3049 3051 status is also displayed for each bookmark like below::
3050 3052
3051 3053 BM1 01234567890a added
3052 3054 BM2 1234567890ab advanced
3053 3055 BM3 234567890abc diverged
3054 3056 BM4 34567890abcd changed
3055 3057
3056 3058 The action taken locally when pulling depends on the
3057 3059 status of each bookmark:
3058 3060
3059 3061 :``added``: pull will create it
3060 3062 :``advanced``: pull will update it
3061 3063 :``diverged``: pull will create a divergent bookmark
3062 3064 :``changed``: result depends on remote changesets
3063 3065
3064 3066 From the point of view of pulling behavior, bookmark
3065 3067 existing only in the remote repository are treated as ``added``,
3066 3068 even if it is in fact locally deleted.
3067 3069
3068 3070 .. container:: verbose
3069 3071
3070 3072 For remote repository, using --bundle avoids downloading the
3071 3073 changesets twice if the incoming is followed by a pull.
3072 3074
3073 3075 Examples:
3074 3076
3075 3077 - show incoming changes with patches and full description::
3076 3078
3077 3079 hg incoming -vp
3078 3080
3079 3081 - show incoming changes excluding merges, store a bundle::
3080 3082
3081 3083 hg in -vpM --bundle incoming.hg
3082 3084 hg pull incoming.hg
3083 3085
3084 3086 - briefly list changes inside a bundle::
3085 3087
3086 3088 hg in changes.hg -T "{desc|firstline}\\n"
3087 3089
3088 3090 Returns 0 if there are incoming changes, 1 otherwise.
3089 3091 """
3090 3092 opts = pycompat.byteskwargs(opts)
3091 3093 if opts.get('graph'):
3092 3094 cmdutil.checkunsupportedgraphflags([], opts)
3093 3095 def display(other, chlist, displayer):
3094 3096 revdag = cmdutil.graphrevs(other, chlist, opts)
3095 3097 cmdutil.displaygraph(ui, repo, revdag, displayer,
3096 3098 graphmod.asciiedges)
3097 3099
3098 3100 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3099 3101 return 0
3100 3102
3101 3103 if opts.get('bundle') and opts.get('subrepos'):
3102 3104 raise error.Abort(_('cannot combine --bundle and --subrepos'))
3103 3105
3104 3106 if opts.get('bookmarks'):
3105 3107 source, branches = hg.parseurl(ui.expandpath(source),
3106 3108 opts.get('branch'))
3107 3109 other = hg.peer(repo, opts, source)
3108 3110 if 'bookmarks' not in other.listkeys('namespaces'):
3109 3111 ui.warn(_("remote doesn't support bookmarks\n"))
3110 3112 return 0
3111 3113 ui.pager('incoming')
3112 3114 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3113 3115 return bookmarks.incoming(ui, repo, other)
3114 3116
3115 3117 repo._subtoppath = ui.expandpath(source)
3116 3118 try:
3117 3119 return hg.incoming(ui, repo, source, opts)
3118 3120 finally:
3119 3121 del repo._subtoppath
3120 3122
3121 3123
3122 3124 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3123 3125 norepo=True)
3124 3126 def init(ui, dest=".", **opts):
3125 3127 """create a new repository in the given directory
3126 3128
3127 3129 Initialize a new repository in the given directory. If the given
3128 3130 directory does not exist, it will be created.
3129 3131
3130 3132 If no directory is given, the current directory is used.
3131 3133
3132 3134 It is possible to specify an ``ssh://`` URL as the destination.
3133 3135 See :hg:`help urls` for more information.
3134 3136
3135 3137 Returns 0 on success.
3136 3138 """
3137 3139 opts = pycompat.byteskwargs(opts)
3138 3140 hg.peer(ui, opts, ui.expandpath(dest), create=True)
3139 3141
3140 3142 @command('locate',
3141 3143 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3142 3144 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3143 3145 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
3144 3146 ] + walkopts,
3145 3147 _('[OPTION]... [PATTERN]...'))
3146 3148 def locate(ui, repo, *pats, **opts):
3147 3149 """locate files matching specific patterns (DEPRECATED)
3148 3150
3149 3151 Print files under Mercurial control in the working directory whose
3150 3152 names match the given patterns.
3151 3153
3152 3154 By default, this command searches all directories in the working
3153 3155 directory. To search just the current directory and its
3154 3156 subdirectories, use "--include .".
3155 3157
3156 3158 If no patterns are given to match, this command prints the names
3157 3159 of all files under Mercurial control in the working directory.
3158 3160
3159 3161 If you want to feed the output of this command into the "xargs"
3160 3162 command, use the -0 option to both this command and "xargs". This
3161 3163 will avoid the problem of "xargs" treating single filenames that
3162 3164 contain whitespace as multiple filenames.
3163 3165
3164 3166 See :hg:`help files` for a more versatile command.
3165 3167
3166 3168 Returns 0 if a match is found, 1 otherwise.
3167 3169 """
3168 3170 opts = pycompat.byteskwargs(opts)
3169 3171 if opts.get('print0'):
3170 3172 end = '\0'
3171 3173 else:
3172 3174 end = '\n'
3173 3175 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
3174 3176
3175 3177 ret = 1
3176 3178 ctx = repo[rev]
3177 3179 m = scmutil.match(ctx, pats, opts, default='relglob',
3178 3180 badfn=lambda x, y: False)
3179 3181
3180 3182 ui.pager('locate')
3181 3183 for abs in ctx.matches(m):
3182 3184 if opts.get('fullpath'):
3183 3185 ui.write(repo.wjoin(abs), end)
3184 3186 else:
3185 3187 ui.write(((pats and m.rel(abs)) or abs), end)
3186 3188 ret = 0
3187 3189
3188 3190 return ret
3189 3191
3190 3192 @command('^log|history',
3191 3193 [('f', 'follow', None,
3192 3194 _('follow changeset history, or file history across copies and renames')),
3193 3195 ('', 'follow-first', None,
3194 3196 _('only follow the first parent of merge changesets (DEPRECATED)')),
3195 3197 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
3196 3198 ('C', 'copies', None, _('show copied files')),
3197 3199 ('k', 'keyword', [],
3198 3200 _('do case-insensitive search for a given text'), _('TEXT')),
3199 3201 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
3200 3202 ('', 'removed', None, _('include revisions where files were removed')),
3201 3203 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
3202 3204 ('u', 'user', [], _('revisions committed by user'), _('USER')),
3203 3205 ('', 'only-branch', [],
3204 3206 _('show only changesets within the given named branch (DEPRECATED)'),
3205 3207 _('BRANCH')),
3206 3208 ('b', 'branch', [],
3207 3209 _('show changesets within the given named branch'), _('BRANCH')),
3208 3210 ('P', 'prune', [],
3209 3211 _('do not display revision or any of its ancestors'), _('REV')),
3210 3212 ] + logopts + walkopts,
3211 3213 _('[OPTION]... [FILE]'),
3212 3214 inferrepo=True)
3213 3215 def log(ui, repo, *pats, **opts):
3214 3216 """show revision history of entire repository or files
3215 3217
3216 3218 Print the revision history of the specified files or the entire
3217 3219 project.
3218 3220
3219 3221 If no revision range is specified, the default is ``tip:0`` unless
3220 3222 --follow is set, in which case the working directory parent is
3221 3223 used as the starting revision.
3222 3224
3223 3225 File history is shown without following rename or copy history of
3224 3226 files. Use -f/--follow with a filename to follow history across
3225 3227 renames and copies. --follow without a filename will only show
3226 3228 ancestors or descendants of the starting revision.
3227 3229
3228 3230 By default this command prints revision number and changeset id,
3229 3231 tags, non-trivial parents, user, date and time, and a summary for
3230 3232 each commit. When the -v/--verbose switch is used, the list of
3231 3233 changed files and full commit message are shown.
3232 3234
3233 3235 With --graph the revisions are shown as an ASCII art DAG with the most
3234 3236 recent changeset at the top.
3235 3237 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
3236 3238 and '+' represents a fork where the changeset from the lines below is a
3237 3239 parent of the 'o' merge on the same line.
3238 3240 Paths in the DAG are represented with '|', '/' and so forth. ':' in place
3239 3241 of a '|' indicates one or more revisions in a path are omitted.
3240 3242
3241 3243 .. note::
3242 3244
3243 3245 :hg:`log --patch` may generate unexpected diff output for merge
3244 3246 changesets, as it will only compare the merge changeset against
3245 3247 its first parent. Also, only files different from BOTH parents
3246 3248 will appear in files:.
3247 3249
3248 3250 .. note::
3249 3251
3250 3252 For performance reasons, :hg:`log FILE` may omit duplicate changes
3251 3253 made on branches and will not show removals or mode changes. To
3252 3254 see all such changes, use the --removed switch.
3253 3255
3254 3256 .. container:: verbose
3255 3257
3256 3258 Some examples:
3257 3259
3258 3260 - changesets with full descriptions and file lists::
3259 3261
3260 3262 hg log -v
3261 3263
3262 3264 - changesets ancestral to the working directory::
3263 3265
3264 3266 hg log -f
3265 3267
3266 3268 - last 10 commits on the current branch::
3267 3269
3268 3270 hg log -l 10 -b .
3269 3271
3270 3272 - changesets showing all modifications of a file, including removals::
3271 3273
3272 3274 hg log --removed file.c
3273 3275
3274 3276 - all changesets that touch a directory, with diffs, excluding merges::
3275 3277
3276 3278 hg log -Mp lib/
3277 3279
3278 3280 - all revision numbers that match a keyword::
3279 3281
3280 3282 hg log -k bug --template "{rev}\\n"
3281 3283
3282 3284 - the full hash identifier of the working directory parent::
3283 3285
3284 3286 hg log -r . --template "{node}\\n"
3285 3287
3286 3288 - list available log templates::
3287 3289
3288 3290 hg log -T list
3289 3291
3290 3292 - check if a given changeset is included in a tagged release::
3291 3293
3292 3294 hg log -r "a21ccf and ancestor(1.9)"
3293 3295
3294 3296 - find all changesets by some user in a date range::
3295 3297
3296 3298 hg log -k alice -d "may 2008 to jul 2008"
3297 3299
3298 3300 - summary of all changesets after the last tag::
3299 3301
3300 3302 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
3301 3303
3302 3304 See :hg:`help dates` for a list of formats valid for -d/--date.
3303 3305
3304 3306 See :hg:`help revisions` for more about specifying and ordering
3305 3307 revisions.
3306 3308
3307 3309 See :hg:`help templates` for more about pre-packaged styles and
3308 3310 specifying custom templates.
3309 3311
3310 3312 Returns 0 on success.
3311 3313
3312 3314 """
3313 3315 opts = pycompat.byteskwargs(opts)
3314 3316 if opts.get('follow') and opts.get('rev'):
3315 3317 opts['rev'] = [revsetlang.formatspec('reverse(::%lr)', opts.get('rev'))]
3316 3318 del opts['follow']
3317 3319
3318 3320 if opts.get('graph'):
3319 3321 return cmdutil.graphlog(ui, repo, pats, opts)
3320 3322
3321 3323 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
3322 3324 limit = cmdutil.loglimit(opts)
3323 3325 count = 0
3324 3326
3325 3327 getrenamed = None
3326 3328 if opts.get('copies'):
3327 3329 endrev = None
3328 3330 if opts.get('rev'):
3329 3331 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
3330 3332 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
3331 3333
3332 3334 ui.pager('log')
3333 3335 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3334 3336 for rev in revs:
3335 3337 if count == limit:
3336 3338 break
3337 3339 ctx = repo[rev]
3338 3340 copies = None
3339 3341 if getrenamed is not None and rev:
3340 3342 copies = []
3341 3343 for fn in ctx.files():
3342 3344 rename = getrenamed(fn, rev)
3343 3345 if rename:
3344 3346 copies.append((fn, rename[0]))
3345 3347 if filematcher:
3346 3348 revmatchfn = filematcher(ctx.rev())
3347 3349 else:
3348 3350 revmatchfn = None
3349 3351 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
3350 3352 if displayer.flush(ctx):
3351 3353 count += 1
3352 3354
3353 3355 displayer.close()
3354 3356
3355 3357 @command('manifest',
3356 3358 [('r', 'rev', '', _('revision to display'), _('REV')),
3357 3359 ('', 'all', False, _("list files from all revisions"))]
3358 3360 + formatteropts,
3359 3361 _('[-r REV]'))
3360 3362 def manifest(ui, repo, node=None, rev=None, **opts):
3361 3363 """output the current or given revision of the project manifest
3362 3364
3363 3365 Print a list of version controlled files for the given revision.
3364 3366 If no revision is given, the first parent of the working directory
3365 3367 is used, or the null revision if no revision is checked out.
3366 3368
3367 3369 With -v, print file permissions, symlink and executable bits.
3368 3370 With --debug, print file revision hashes.
3369 3371
3370 3372 If option --all is specified, the list of all files from all revisions
3371 3373 is printed. This includes deleted and renamed files.
3372 3374
3373 3375 Returns 0 on success.
3374 3376 """
3375 3377 opts = pycompat.byteskwargs(opts)
3376 3378 fm = ui.formatter('manifest', opts)
3377 3379
3378 3380 if opts.get('all'):
3379 3381 if rev or node:
3380 3382 raise error.Abort(_("can't specify a revision with --all"))
3381 3383
3382 3384 res = []
3383 3385 prefix = "data/"
3384 3386 suffix = ".i"
3385 3387 plen = len(prefix)
3386 3388 slen = len(suffix)
3387 3389 with repo.lock():
3388 3390 for fn, b, size in repo.store.datafiles():
3389 3391 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
3390 3392 res.append(fn[plen:-slen])
3391 3393 ui.pager('manifest')
3392 3394 for f in res:
3393 3395 fm.startitem()
3394 3396 fm.write("path", '%s\n', f)
3395 3397 fm.end()
3396 3398 return
3397 3399
3398 3400 if rev and node:
3399 3401 raise error.Abort(_("please specify just one revision"))
3400 3402
3401 3403 if not node:
3402 3404 node = rev
3403 3405
3404 3406 char = {'l': '@', 'x': '*', '': ''}
3405 3407 mode = {'l': '644', 'x': '755', '': '644'}
3406 3408 ctx = scmutil.revsingle(repo, node)
3407 3409 mf = ctx.manifest()
3408 3410 ui.pager('manifest')
3409 3411 for f in ctx:
3410 3412 fm.startitem()
3411 3413 fl = ctx[f].flags()
3412 3414 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
3413 3415 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
3414 3416 fm.write('path', '%s\n', f)
3415 3417 fm.end()
3416 3418
3417 3419 @command('^merge',
3418 3420 [('f', 'force', None,
3419 3421 _('force a merge including outstanding changes (DEPRECATED)')),
3420 3422 ('r', 'rev', '', _('revision to merge'), _('REV')),
3421 3423 ('P', 'preview', None,
3422 3424 _('review revisions to merge (no merge is performed)'))
3423 3425 ] + mergetoolopts,
3424 3426 _('[-P] [[-r] REV]'))
3425 3427 def merge(ui, repo, node=None, **opts):
3426 3428 """merge another revision into working directory
3427 3429
3428 3430 The current working directory is updated with all changes made in
3429 3431 the requested revision since the last common predecessor revision.
3430 3432
3431 3433 Files that changed between either parent are marked as changed for
3432 3434 the next commit and a commit must be performed before any further
3433 3435 updates to the repository are allowed. The next commit will have
3434 3436 two parents.
3435 3437
3436 3438 ``--tool`` can be used to specify the merge tool used for file
3437 3439 merges. It overrides the HGMERGE environment variable and your
3438 3440 configuration files. See :hg:`help merge-tools` for options.
3439 3441
3440 3442 If no revision is specified, the working directory's parent is a
3441 3443 head revision, and the current branch contains exactly one other
3442 3444 head, the other head is merged with by default. Otherwise, an
3443 3445 explicit revision with which to merge with must be provided.
3444 3446
3445 3447 See :hg:`help resolve` for information on handling file conflicts.
3446 3448
3447 3449 To undo an uncommitted merge, use :hg:`update --clean .` which
3448 3450 will check out a clean copy of the original merge parent, losing
3449 3451 all changes.
3450 3452
3451 3453 Returns 0 on success, 1 if there are unresolved files.
3452 3454 """
3453 3455
3454 3456 opts = pycompat.byteskwargs(opts)
3455 3457 if opts.get('rev') and node:
3456 3458 raise error.Abort(_("please specify just one revision"))
3457 3459 if not node:
3458 3460 node = opts.get('rev')
3459 3461
3460 3462 if node:
3461 3463 node = scmutil.revsingle(repo, node).node()
3462 3464
3463 3465 if not node:
3464 3466 node = repo[destutil.destmerge(repo)].node()
3465 3467
3466 3468 if opts.get('preview'):
3467 3469 # find nodes that are ancestors of p2 but not of p1
3468 3470 p1 = repo.lookup('.')
3469 3471 p2 = repo.lookup(node)
3470 3472 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
3471 3473
3472 3474 displayer = cmdutil.show_changeset(ui, repo, opts)
3473 3475 for node in nodes:
3474 3476 displayer.show(repo[node])
3475 3477 displayer.close()
3476 3478 return 0
3477 3479
3478 3480 try:
3479 3481 # ui.forcemerge is an internal variable, do not document
3480 3482 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
3481 3483 force = opts.get('force')
3482 3484 labels = ['working copy', 'merge rev']
3483 3485 return hg.merge(repo, node, force=force, mergeforce=force,
3484 3486 labels=labels)
3485 3487 finally:
3486 3488 ui.setconfig('ui', 'forcemerge', '', 'merge')
3487 3489
3488 3490 @command('outgoing|out',
3489 3491 [('f', 'force', None, _('run even when the destination is unrelated')),
3490 3492 ('r', 'rev', [],
3491 3493 _('a changeset intended to be included in the destination'), _('REV')),
3492 3494 ('n', 'newest-first', None, _('show newest record first')),
3493 3495 ('B', 'bookmarks', False, _('compare bookmarks')),
3494 3496 ('b', 'branch', [], _('a specific branch you would like to push'),
3495 3497 _('BRANCH')),
3496 3498 ] + logopts + remoteopts + subrepoopts,
3497 3499 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
3498 3500 def outgoing(ui, repo, dest=None, **opts):
3499 3501 """show changesets not found in the destination
3500 3502
3501 3503 Show changesets not found in the specified destination repository
3502 3504 or the default push location. These are the changesets that would
3503 3505 be pushed if a push was requested.
3504 3506
3505 3507 See pull for details of valid destination formats.
3506 3508
3507 3509 .. container:: verbose
3508 3510
3509 3511 With -B/--bookmarks, the result of bookmark comparison between
3510 3512 local and remote repositories is displayed. With -v/--verbose,
3511 3513 status is also displayed for each bookmark like below::
3512 3514
3513 3515 BM1 01234567890a added
3514 3516 BM2 deleted
3515 3517 BM3 234567890abc advanced
3516 3518 BM4 34567890abcd diverged
3517 3519 BM5 4567890abcde changed
3518 3520
3519 3521 The action taken when pushing depends on the
3520 3522 status of each bookmark:
3521 3523
3522 3524 :``added``: push with ``-B`` will create it
3523 3525 :``deleted``: push with ``-B`` will delete it
3524 3526 :``advanced``: push will update it
3525 3527 :``diverged``: push with ``-B`` will update it
3526 3528 :``changed``: push with ``-B`` will update it
3527 3529
3528 3530 From the point of view of pushing behavior, bookmarks
3529 3531 existing only in the remote repository are treated as
3530 3532 ``deleted``, even if it is in fact added remotely.
3531 3533
3532 3534 Returns 0 if there are outgoing changes, 1 otherwise.
3533 3535 """
3534 3536 opts = pycompat.byteskwargs(opts)
3535 3537 if opts.get('graph'):
3536 3538 cmdutil.checkunsupportedgraphflags([], opts)
3537 3539 o, other = hg._outgoing(ui, repo, dest, opts)
3538 3540 if not o:
3539 3541 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3540 3542 return
3541 3543
3542 3544 revdag = cmdutil.graphrevs(repo, o, opts)
3543 3545 ui.pager('outgoing')
3544 3546 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
3545 3547 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
3546 3548 cmdutil.outgoinghooks(ui, repo, other, opts, o)
3547 3549 return 0
3548 3550
3549 3551 if opts.get('bookmarks'):
3550 3552 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3551 3553 dest, branches = hg.parseurl(dest, opts.get('branch'))
3552 3554 other = hg.peer(repo, opts, dest)
3553 3555 if 'bookmarks' not in other.listkeys('namespaces'):
3554 3556 ui.warn(_("remote doesn't support bookmarks\n"))
3555 3557 return 0
3556 3558 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
3557 3559 ui.pager('outgoing')
3558 3560 return bookmarks.outgoing(ui, repo, other)
3559 3561
3560 3562 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
3561 3563 try:
3562 3564 return hg.outgoing(ui, repo, dest, opts)
3563 3565 finally:
3564 3566 del repo._subtoppath
3565 3567
3566 3568 @command('parents',
3567 3569 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
3568 3570 ] + templateopts,
3569 3571 _('[-r REV] [FILE]'),
3570 3572 inferrepo=True)
3571 3573 def parents(ui, repo, file_=None, **opts):
3572 3574 """show the parents of the working directory or revision (DEPRECATED)
3573 3575
3574 3576 Print the working directory's parent revisions. If a revision is
3575 3577 given via -r/--rev, the parent of that revision will be printed.
3576 3578 If a file argument is given, the revision in which the file was
3577 3579 last changed (before the working directory revision or the
3578 3580 argument to --rev if given) is printed.
3579 3581
3580 3582 This command is equivalent to::
3581 3583
3582 3584 hg log -r "p1()+p2()" or
3583 3585 hg log -r "p1(REV)+p2(REV)" or
3584 3586 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
3585 3587 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
3586 3588
3587 3589 See :hg:`summary` and :hg:`help revsets` for related information.
3588 3590
3589 3591 Returns 0 on success.
3590 3592 """
3591 3593
3592 3594 opts = pycompat.byteskwargs(opts)
3593 3595 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3594 3596
3595 3597 if file_:
3596 3598 m = scmutil.match(ctx, (file_,), opts)
3597 3599 if m.anypats() or len(m.files()) != 1:
3598 3600 raise error.Abort(_('can only specify an explicit filename'))
3599 3601 file_ = m.files()[0]
3600 3602 filenodes = []
3601 3603 for cp in ctx.parents():
3602 3604 if not cp:
3603 3605 continue
3604 3606 try:
3605 3607 filenodes.append(cp.filenode(file_))
3606 3608 except error.LookupError:
3607 3609 pass
3608 3610 if not filenodes:
3609 3611 raise error.Abort(_("'%s' not found in manifest!") % file_)
3610 3612 p = []
3611 3613 for fn in filenodes:
3612 3614 fctx = repo.filectx(file_, fileid=fn)
3613 3615 p.append(fctx.node())
3614 3616 else:
3615 3617 p = [cp.node() for cp in ctx.parents()]
3616 3618
3617 3619 displayer = cmdutil.show_changeset(ui, repo, opts)
3618 3620 for n in p:
3619 3621 if n != nullid:
3620 3622 displayer.show(repo[n])
3621 3623 displayer.close()
3622 3624
3623 3625 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
3624 3626 def paths(ui, repo, search=None, **opts):
3625 3627 """show aliases for remote repositories
3626 3628
3627 3629 Show definition of symbolic path name NAME. If no name is given,
3628 3630 show definition of all available names.
3629 3631
3630 3632 Option -q/--quiet suppresses all output when searching for NAME
3631 3633 and shows only the path names when listing all definitions.
3632 3634
3633 3635 Path names are defined in the [paths] section of your
3634 3636 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
3635 3637 repository, ``.hg/hgrc`` is used, too.
3636 3638
3637 3639 The path names ``default`` and ``default-push`` have a special
3638 3640 meaning. When performing a push or pull operation, they are used
3639 3641 as fallbacks if no location is specified on the command-line.
3640 3642 When ``default-push`` is set, it will be used for push and
3641 3643 ``default`` will be used for pull; otherwise ``default`` is used
3642 3644 as the fallback for both. When cloning a repository, the clone
3643 3645 source is written as ``default`` in ``.hg/hgrc``.
3644 3646
3645 3647 .. note::
3646 3648
3647 3649 ``default`` and ``default-push`` apply to all inbound (e.g.
3648 3650 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
3649 3651 and :hg:`bundle`) operations.
3650 3652
3651 3653 See :hg:`help urls` for more information.
3652 3654
3653 3655 Returns 0 on success.
3654 3656 """
3655 3657
3656 3658 opts = pycompat.byteskwargs(opts)
3657 3659 ui.pager('paths')
3658 3660 if search:
3659 3661 pathitems = [(name, path) for name, path in ui.paths.iteritems()
3660 3662 if name == search]
3661 3663 else:
3662 3664 pathitems = sorted(ui.paths.iteritems())
3663 3665
3664 3666 fm = ui.formatter('paths', opts)
3665 3667 if fm.isplain():
3666 3668 hidepassword = util.hidepassword
3667 3669 else:
3668 3670 hidepassword = str
3669 3671 if ui.quiet:
3670 3672 namefmt = '%s\n'
3671 3673 else:
3672 3674 namefmt = '%s = '
3673 3675 showsubopts = not search and not ui.quiet
3674 3676
3675 3677 for name, path in pathitems:
3676 3678 fm.startitem()
3677 3679 fm.condwrite(not search, 'name', namefmt, name)
3678 3680 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
3679 3681 for subopt, value in sorted(path.suboptions.items()):
3680 3682 assert subopt not in ('name', 'url')
3681 3683 if showsubopts:
3682 3684 fm.plain('%s:%s = ' % (name, subopt))
3683 3685 fm.condwrite(showsubopts, subopt, '%s\n', value)
3684 3686
3685 3687 fm.end()
3686 3688
3687 3689 if search and not pathitems:
3688 3690 if not ui.quiet:
3689 3691 ui.warn(_("not found!\n"))
3690 3692 return 1
3691 3693 else:
3692 3694 return 0
3693 3695
3694 3696 @command('phase',
3695 3697 [('p', 'public', False, _('set changeset phase to public')),
3696 3698 ('d', 'draft', False, _('set changeset phase to draft')),
3697 3699 ('s', 'secret', False, _('set changeset phase to secret')),
3698 3700 ('f', 'force', False, _('allow to move boundary backward')),
3699 3701 ('r', 'rev', [], _('target revision'), _('REV')),
3700 3702 ],
3701 3703 _('[-p|-d|-s] [-f] [-r] [REV...]'))
3702 3704 def phase(ui, repo, *revs, **opts):
3703 3705 """set or show the current phase name
3704 3706
3705 3707 With no argument, show the phase name of the current revision(s).
3706 3708
3707 3709 With one of -p/--public, -d/--draft or -s/--secret, change the
3708 3710 phase value of the specified revisions.
3709 3711
3710 3712 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
3711 3713 lower phase to an higher phase. Phases are ordered as follows::
3712 3714
3713 3715 public < draft < secret
3714 3716
3715 3717 Returns 0 on success, 1 if some phases could not be changed.
3716 3718
3717 3719 (For more information about the phases concept, see :hg:`help phases`.)
3718 3720 """
3719 3721 opts = pycompat.byteskwargs(opts)
3720 3722 # search for a unique phase argument
3721 3723 targetphase = None
3722 3724 for idx, name in enumerate(phases.phasenames):
3723 3725 if opts[name]:
3724 3726 if targetphase is not None:
3725 3727 raise error.Abort(_('only one phase can be specified'))
3726 3728 targetphase = idx
3727 3729
3728 3730 # look for specified revision
3729 3731 revs = list(revs)
3730 3732 revs.extend(opts['rev'])
3731 3733 if not revs:
3732 3734 # display both parents as the second parent phase can influence
3733 3735 # the phase of a merge commit
3734 3736 revs = [c.rev() for c in repo[None].parents()]
3735 3737
3736 3738 revs = scmutil.revrange(repo, revs)
3737 3739
3738 3740 lock = None
3739 3741 ret = 0
3740 3742 if targetphase is None:
3741 3743 # display
3742 3744 for r in revs:
3743 3745 ctx = repo[r]
3744 3746 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
3745 3747 else:
3746 3748 tr = None
3747 3749 lock = repo.lock()
3748 3750 try:
3749 3751 tr = repo.transaction("phase")
3750 3752 # set phase
3751 3753 if not revs:
3752 3754 raise error.Abort(_('empty revision set'))
3753 3755 nodes = [repo[r].node() for r in revs]
3754 3756 # moving revision from public to draft may hide them
3755 3757 # We have to check result on an unfiltered repository
3756 3758 unfi = repo.unfiltered()
3757 3759 getphase = unfi._phasecache.phase
3758 3760 olddata = [getphase(unfi, r) for r in unfi]
3759 3761 phases.advanceboundary(repo, tr, targetphase, nodes)
3760 3762 if opts['force']:
3761 3763 phases.retractboundary(repo, tr, targetphase, nodes)
3762 3764 tr.close()
3763 3765 finally:
3764 3766 if tr is not None:
3765 3767 tr.release()
3766 3768 lock.release()
3767 3769 getphase = unfi._phasecache.phase
3768 3770 newdata = [getphase(unfi, r) for r in unfi]
3769 3771 changes = sum(newdata[r] != olddata[r] for r in unfi)
3770 3772 cl = unfi.changelog
3771 3773 rejected = [n for n in nodes
3772 3774 if newdata[cl.rev(n)] < targetphase]
3773 3775 if rejected:
3774 3776 ui.warn(_('cannot move %i changesets to a higher '
3775 3777 'phase, use --force\n') % len(rejected))
3776 3778 ret = 1
3777 3779 if changes:
3778 3780 msg = _('phase changed for %i changesets\n') % changes
3779 3781 if ret:
3780 3782 ui.status(msg)
3781 3783 else:
3782 3784 ui.note(msg)
3783 3785 else:
3784 3786 ui.warn(_('no phases changed\n'))
3785 3787 return ret
3786 3788
3787 3789 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
3788 3790 """Run after a changegroup has been added via pull/unbundle
3789 3791
3790 3792 This takes arguments below:
3791 3793
3792 3794 :modheads: change of heads by pull/unbundle
3793 3795 :optupdate: updating working directory is needed or not
3794 3796 :checkout: update destination revision (or None to default destination)
3795 3797 :brev: a name, which might be a bookmark to be activated after updating
3796 3798 """
3797 3799 if modheads == 0:
3798 3800 return
3799 3801 if optupdate:
3800 3802 try:
3801 3803 return hg.updatetotally(ui, repo, checkout, brev)
3802 3804 except error.UpdateAbort as inst:
3803 3805 msg = _("not updating: %s") % str(inst)
3804 3806 hint = inst.hint
3805 3807 raise error.UpdateAbort(msg, hint=hint)
3806 3808 if modheads > 1:
3807 3809 currentbranchheads = len(repo.branchheads())
3808 3810 if currentbranchheads == modheads:
3809 3811 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3810 3812 elif currentbranchheads > 1:
3811 3813 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
3812 3814 "merge)\n"))
3813 3815 else:
3814 3816 ui.status(_("(run 'hg heads' to see heads)\n"))
3815 3817 else:
3816 3818 ui.status(_("(run 'hg update' to get a working copy)\n"))
3817 3819
3818 3820 @command('^pull',
3819 3821 [('u', 'update', None,
3820 3822 _('update to new branch head if changesets were pulled')),
3821 3823 ('f', 'force', None, _('run even when remote repository is unrelated')),
3822 3824 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3823 3825 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
3824 3826 ('b', 'branch', [], _('a specific branch you would like to pull'),
3825 3827 _('BRANCH')),
3826 3828 ] + remoteopts,
3827 3829 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
3828 3830 def pull(ui, repo, source="default", **opts):
3829 3831 """pull changes from the specified source
3830 3832
3831 3833 Pull changes from a remote repository to a local one.
3832 3834
3833 3835 This finds all changes from the repository at the specified path
3834 3836 or URL and adds them to a local repository (the current one unless
3835 3837 -R is specified). By default, this does not update the copy of the
3836 3838 project in the working directory.
3837 3839
3838 3840 Use :hg:`incoming` if you want to see what would have been added
3839 3841 by a pull at the time you issued this command. If you then decide
3840 3842 to add those changes to the repository, you should use :hg:`pull
3841 3843 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3842 3844
3843 3845 If SOURCE is omitted, the 'default' path will be used.
3844 3846 See :hg:`help urls` for more information.
3845 3847
3846 3848 Specifying bookmark as ``.`` is equivalent to specifying the active
3847 3849 bookmark's name.
3848 3850
3849 3851 Returns 0 on success, 1 if an update had unresolved files.
3850 3852 """
3851 3853
3852 3854 opts = pycompat.byteskwargs(opts)
3853 3855 if ui.configbool('commands', 'update.requiredest') and opts.get('update'):
3854 3856 msg = _('update destination required by configuration')
3855 3857 hint = _('use hg pull followed by hg update DEST')
3856 3858 raise error.Abort(msg, hint=hint)
3857 3859
3858 3860 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3859 3861 ui.status(_('pulling from %s\n') % util.hidepassword(source))
3860 3862 other = hg.peer(repo, opts, source)
3861 3863 try:
3862 3864 revs, checkout = hg.addbranchrevs(repo, other, branches,
3863 3865 opts.get('rev'))
3864 3866
3865 3867
3866 3868 pullopargs = {}
3867 3869 if opts.get('bookmark'):
3868 3870 if not revs:
3869 3871 revs = []
3870 3872 # The list of bookmark used here is not the one used to actually
3871 3873 # update the bookmark name. This can result in the revision pulled
3872 3874 # not ending up with the name of the bookmark because of a race
3873 3875 # condition on the server. (See issue 4689 for details)
3874 3876 remotebookmarks = other.listkeys('bookmarks')
3875 3877 pullopargs['remotebookmarks'] = remotebookmarks
3876 3878 for b in opts['bookmark']:
3877 3879 b = repo._bookmarks.expandname(b)
3878 3880 if b not in remotebookmarks:
3879 3881 raise error.Abort(_('remote bookmark %s not found!') % b)
3880 3882 revs.append(remotebookmarks[b])
3881 3883
3882 3884 if revs:
3883 3885 try:
3884 3886 # When 'rev' is a bookmark name, we cannot guarantee that it
3885 3887 # will be updated with that name because of a race condition
3886 3888 # server side. (See issue 4689 for details)
3887 3889 oldrevs = revs
3888 3890 revs = [] # actually, nodes
3889 3891 for r in oldrevs:
3890 3892 node = other.lookup(r)
3891 3893 revs.append(node)
3892 3894 if r == checkout:
3893 3895 checkout = node
3894 3896 except error.CapabilityError:
3895 3897 err = _("other repository doesn't support revision lookup, "
3896 3898 "so a rev cannot be specified.")
3897 3899 raise error.Abort(err)
3898 3900
3899 3901 pullopargs.update(opts.get('opargs', {}))
3900 3902 modheads = exchange.pull(repo, other, heads=revs,
3901 3903 force=opts.get('force'),
3902 3904 bookmarks=opts.get('bookmark', ()),
3903 3905 opargs=pullopargs).cgresult
3904 3906
3905 3907 # brev is a name, which might be a bookmark to be activated at
3906 3908 # the end of the update. In other words, it is an explicit
3907 3909 # destination of the update
3908 3910 brev = None
3909 3911
3910 3912 if checkout:
3911 3913 checkout = str(repo.changelog.rev(checkout))
3912 3914
3913 3915 # order below depends on implementation of
3914 3916 # hg.addbranchrevs(). opts['bookmark'] is ignored,
3915 3917 # because 'checkout' is determined without it.
3916 3918 if opts.get('rev'):
3917 3919 brev = opts['rev'][0]
3918 3920 elif opts.get('branch'):
3919 3921 brev = opts['branch'][0]
3920 3922 else:
3921 3923 brev = branches[0]
3922 3924 repo._subtoppath = source
3923 3925 try:
3924 3926 ret = postincoming(ui, repo, modheads, opts.get('update'),
3925 3927 checkout, brev)
3926 3928
3927 3929 finally:
3928 3930 del repo._subtoppath
3929 3931
3930 3932 finally:
3931 3933 other.close()
3932 3934 return ret
3933 3935
3934 3936 @command('^push',
3935 3937 [('f', 'force', None, _('force push')),
3936 3938 ('r', 'rev', [],
3937 3939 _('a changeset intended to be included in the destination'),
3938 3940 _('REV')),
3939 3941 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
3940 3942 ('b', 'branch', [],
3941 3943 _('a specific branch you would like to push'), _('BRANCH')),
3942 3944 ('', 'new-branch', False, _('allow pushing a new branch')),
3943 3945 ] + remoteopts,
3944 3946 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
3945 3947 def push(ui, repo, dest=None, **opts):
3946 3948 """push changes to the specified destination
3947 3949
3948 3950 Push changesets from the local repository to the specified
3949 3951 destination.
3950 3952
3951 3953 This operation is symmetrical to pull: it is identical to a pull
3952 3954 in the destination repository from the current one.
3953 3955
3954 3956 By default, push will not allow creation of new heads at the
3955 3957 destination, since multiple heads would make it unclear which head
3956 3958 to use. In this situation, it is recommended to pull and merge
3957 3959 before pushing.
3958 3960
3959 3961 Use --new-branch if you want to allow push to create a new named
3960 3962 branch that is not present at the destination. This allows you to
3961 3963 only create a new branch without forcing other changes.
3962 3964
3963 3965 .. note::
3964 3966
3965 3967 Extra care should be taken with the -f/--force option,
3966 3968 which will push all new heads on all branches, an action which will
3967 3969 almost always cause confusion for collaborators.
3968 3970
3969 3971 If -r/--rev is used, the specified revision and all its ancestors
3970 3972 will be pushed to the remote repository.
3971 3973
3972 3974 If -B/--bookmark is used, the specified bookmarked revision, its
3973 3975 ancestors, and the bookmark will be pushed to the remote
3974 3976 repository. Specifying ``.`` is equivalent to specifying the active
3975 3977 bookmark's name.
3976 3978
3977 3979 Please see :hg:`help urls` for important details about ``ssh://``
3978 3980 URLs. If DESTINATION is omitted, a default path will be used.
3979 3981
3980 3982 Returns 0 if push was successful, 1 if nothing to push.
3981 3983 """
3982 3984
3983 3985 opts = pycompat.byteskwargs(opts)
3984 3986 if opts.get('bookmark'):
3985 3987 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
3986 3988 for b in opts['bookmark']:
3987 3989 # translate -B options to -r so changesets get pushed
3988 3990 b = repo._bookmarks.expandname(b)
3989 3991 if b in repo._bookmarks:
3990 3992 opts.setdefault('rev', []).append(b)
3991 3993 else:
3992 3994 # if we try to push a deleted bookmark, translate it to null
3993 3995 # this lets simultaneous -r, -b options continue working
3994 3996 opts.setdefault('rev', []).append("null")
3995 3997
3996 3998 path = ui.paths.getpath(dest, default=('default-push', 'default'))
3997 3999 if not path:
3998 4000 raise error.Abort(_('default repository not configured!'),
3999 4001 hint=_("see 'hg help config.paths'"))
4000 4002 dest = path.pushloc or path.loc
4001 4003 branches = (path.branch, opts.get('branch') or [])
4002 4004 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4003 4005 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4004 4006 other = hg.peer(repo, opts, dest)
4005 4007
4006 4008 if revs:
4007 4009 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4008 4010 if not revs:
4009 4011 raise error.Abort(_("specified revisions evaluate to an empty set"),
4010 4012 hint=_("use different revision arguments"))
4011 4013 elif path.pushrev:
4012 4014 # It doesn't make any sense to specify ancestor revisions. So limit
4013 4015 # to DAG heads to make discovery simpler.
4014 4016 expr = revsetlang.formatspec('heads(%r)', path.pushrev)
4015 4017 revs = scmutil.revrange(repo, [expr])
4016 4018 revs = [repo[rev].node() for rev in revs]
4017 4019 if not revs:
4018 4020 raise error.Abort(_('default push revset for path evaluates to an '
4019 4021 'empty set'))
4020 4022
4021 4023 repo._subtoppath = dest
4022 4024 try:
4023 4025 # push subrepos depth-first for coherent ordering
4024 4026 c = repo['']
4025 4027 subs = c.substate # only repos that are committed
4026 4028 for s in sorted(subs):
4027 4029 result = c.sub(s).push(opts)
4028 4030 if result == 0:
4029 4031 return not result
4030 4032 finally:
4031 4033 del repo._subtoppath
4032 4034 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
4033 4035 newbranch=opts.get('new_branch'),
4034 4036 bookmarks=opts.get('bookmark', ()),
4035 4037 opargs=opts.get('opargs'))
4036 4038
4037 4039 result = not pushop.cgresult
4038 4040
4039 4041 if pushop.bkresult is not None:
4040 4042 if pushop.bkresult == 2:
4041 4043 result = 2
4042 4044 elif not result and pushop.bkresult:
4043 4045 result = 2
4044 4046
4045 4047 return result
4046 4048
4047 4049 @command('recover', [])
4048 4050 def recover(ui, repo):
4049 4051 """roll back an interrupted transaction
4050 4052
4051 4053 Recover from an interrupted commit or pull.
4052 4054
4053 4055 This command tries to fix the repository status after an
4054 4056 interrupted operation. It should only be necessary when Mercurial
4055 4057 suggests it.
4056 4058
4057 4059 Returns 0 if successful, 1 if nothing to recover or verify fails.
4058 4060 """
4059 4061 if repo.recover():
4060 4062 return hg.verify(repo)
4061 4063 return 1
4062 4064
4063 4065 @command('^remove|rm',
4064 4066 [('A', 'after', None, _('record delete for missing files')),
4065 4067 ('f', 'force', None,
4066 4068 _('forget added files, delete modified files')),
4067 4069 ] + subrepoopts + walkopts,
4068 4070 _('[OPTION]... FILE...'),
4069 4071 inferrepo=True)
4070 4072 def remove(ui, repo, *pats, **opts):
4071 4073 """remove the specified files on the next commit
4072 4074
4073 4075 Schedule the indicated files for removal from the current branch.
4074 4076
4075 4077 This command schedules the files to be removed at the next commit.
4076 4078 To undo a remove before that, see :hg:`revert`. To undo added
4077 4079 files, see :hg:`forget`.
4078 4080
4079 4081 .. container:: verbose
4080 4082
4081 4083 -A/--after can be used to remove only files that have already
4082 4084 been deleted, -f/--force can be used to force deletion, and -Af
4083 4085 can be used to remove files from the next revision without
4084 4086 deleting them from the working directory.
4085 4087
4086 4088 The following table details the behavior of remove for different
4087 4089 file states (columns) and option combinations (rows). The file
4088 4090 states are Added [A], Clean [C], Modified [M] and Missing [!]
4089 4091 (as reported by :hg:`status`). The actions are Warn, Remove
4090 4092 (from branch) and Delete (from disk):
4091 4093
4092 4094 ========= == == == ==
4093 4095 opt/state A C M !
4094 4096 ========= == == == ==
4095 4097 none W RD W R
4096 4098 -f R RD RD R
4097 4099 -A W W W R
4098 4100 -Af R R R R
4099 4101 ========= == == == ==
4100 4102
4101 4103 .. note::
4102 4104
4103 4105 :hg:`remove` never deletes files in Added [A] state from the
4104 4106 working directory, not even if ``--force`` is specified.
4105 4107
4106 4108 Returns 0 on success, 1 if any warnings encountered.
4107 4109 """
4108 4110
4109 4111 opts = pycompat.byteskwargs(opts)
4110 4112 after, force = opts.get('after'), opts.get('force')
4111 4113 if not pats and not after:
4112 4114 raise error.Abort(_('no files specified'))
4113 4115
4114 4116 m = scmutil.match(repo[None], pats, opts)
4115 4117 subrepos = opts.get('subrepos')
4116 4118 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
4117 4119
4118 4120 @command('rename|move|mv',
4119 4121 [('A', 'after', None, _('record a rename that has already occurred')),
4120 4122 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4121 4123 ] + walkopts + dryrunopts,
4122 4124 _('[OPTION]... SOURCE... DEST'))
4123 4125 def rename(ui, repo, *pats, **opts):
4124 4126 """rename files; equivalent of copy + remove
4125 4127
4126 4128 Mark dest as copies of sources; mark sources for deletion. If dest
4127 4129 is a directory, copies are put in that directory. If dest is a
4128 4130 file, there can only be one source.
4129 4131
4130 4132 By default, this command copies the contents of files as they
4131 4133 exist in the working directory. If invoked with -A/--after, the
4132 4134 operation is recorded, but no copying is performed.
4133 4135
4134 4136 This command takes effect at the next commit. To undo a rename
4135 4137 before that, see :hg:`revert`.
4136 4138
4137 4139 Returns 0 on success, 1 if errors are encountered.
4138 4140 """
4139 4141 opts = pycompat.byteskwargs(opts)
4140 4142 with repo.wlock(False):
4141 4143 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4142 4144
4143 4145 @command('resolve',
4144 4146 [('a', 'all', None, _('select all unresolved files')),
4145 4147 ('l', 'list', None, _('list state of files needing merge')),
4146 4148 ('m', 'mark', None, _('mark files as resolved')),
4147 4149 ('u', 'unmark', None, _('mark files as unresolved')),
4148 4150 ('n', 'no-status', None, _('hide status prefix'))]
4149 4151 + mergetoolopts + walkopts + formatteropts,
4150 4152 _('[OPTION]... [FILE]...'),
4151 4153 inferrepo=True)
4152 4154 def resolve(ui, repo, *pats, **opts):
4153 4155 """redo merges or set/view the merge status of files
4154 4156
4155 4157 Merges with unresolved conflicts are often the result of
4156 4158 non-interactive merging using the ``internal:merge`` configuration
4157 4159 setting, or a command-line merge tool like ``diff3``. The resolve
4158 4160 command is used to manage the files involved in a merge, after
4159 4161 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4160 4162 working directory must have two parents). See :hg:`help
4161 4163 merge-tools` for information on configuring merge tools.
4162 4164
4163 4165 The resolve command can be used in the following ways:
4164 4166
4165 4167 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4166 4168 files, discarding any previous merge attempts. Re-merging is not
4167 4169 performed for files already marked as resolved. Use ``--all/-a``
4168 4170 to select all unresolved files. ``--tool`` can be used to specify
4169 4171 the merge tool used for the given files. It overrides the HGMERGE
4170 4172 environment variable and your configuration files. Previous file
4171 4173 contents are saved with a ``.orig`` suffix.
4172 4174
4173 4175 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4174 4176 (e.g. after having manually fixed-up the files). The default is
4175 4177 to mark all unresolved files.
4176 4178
4177 4179 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
4178 4180 default is to mark all resolved files.
4179 4181
4180 4182 - :hg:`resolve -l`: list files which had or still have conflicts.
4181 4183 In the printed list, ``U`` = unresolved and ``R`` = resolved.
4182 4184 You can use ``set:unresolved()`` or ``set:resolved()`` to filter
4183 4185 the list. See :hg:`help filesets` for details.
4184 4186
4185 4187 .. note::
4186 4188
4187 4189 Mercurial will not let you commit files with unresolved merge
4188 4190 conflicts. You must use :hg:`resolve -m ...` before you can
4189 4191 commit after a conflicting merge.
4190 4192
4191 4193 Returns 0 on success, 1 if any files fail a resolve attempt.
4192 4194 """
4193 4195
4194 4196 opts = pycompat.byteskwargs(opts)
4195 4197 flaglist = 'all mark unmark list no_status'.split()
4196 4198 all, mark, unmark, show, nostatus = \
4197 4199 [opts.get(o) for o in flaglist]
4198 4200
4199 4201 if (show and (mark or unmark)) or (mark and unmark):
4200 4202 raise error.Abort(_("too many options specified"))
4201 4203 if pats and all:
4202 4204 raise error.Abort(_("can't specify --all and patterns"))
4203 4205 if not (all or pats or show or mark or unmark):
4204 4206 raise error.Abort(_('no files or directories specified'),
4205 4207 hint=('use --all to re-merge all unresolved files'))
4206 4208
4207 4209 if show:
4208 4210 ui.pager('resolve')
4209 4211 fm = ui.formatter('resolve', opts)
4210 4212 ms = mergemod.mergestate.read(repo)
4211 4213 m = scmutil.match(repo[None], pats, opts)
4212 4214 for f in ms:
4213 4215 if not m(f):
4214 4216 continue
4215 4217 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
4216 4218 'd': 'driverresolved'}[ms[f]]
4217 4219 fm.startitem()
4218 4220 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
4219 4221 fm.write('path', '%s\n', f, label=l)
4220 4222 fm.end()
4221 4223 return 0
4222 4224
4223 4225 with repo.wlock():
4224 4226 ms = mergemod.mergestate.read(repo)
4225 4227
4226 4228 if not (ms.active() or repo.dirstate.p2() != nullid):
4227 4229 raise error.Abort(
4228 4230 _('resolve command not applicable when not merging'))
4229 4231
4230 4232 wctx = repo[None]
4231 4233
4232 4234 if ms.mergedriver and ms.mdstate() == 'u':
4233 4235 proceed = mergemod.driverpreprocess(repo, ms, wctx)
4234 4236 ms.commit()
4235 4237 # allow mark and unmark to go through
4236 4238 if not mark and not unmark and not proceed:
4237 4239 return 1
4238 4240
4239 4241 m = scmutil.match(wctx, pats, opts)
4240 4242 ret = 0
4241 4243 didwork = False
4242 4244 runconclude = False
4243 4245
4244 4246 tocomplete = []
4245 4247 for f in ms:
4246 4248 if not m(f):
4247 4249 continue
4248 4250
4249 4251 didwork = True
4250 4252
4251 4253 # don't let driver-resolved files be marked, and run the conclude
4252 4254 # step if asked to resolve
4253 4255 if ms[f] == "d":
4254 4256 exact = m.exact(f)
4255 4257 if mark:
4256 4258 if exact:
4257 4259 ui.warn(_('not marking %s as it is driver-resolved\n')
4258 4260 % f)
4259 4261 elif unmark:
4260 4262 if exact:
4261 4263 ui.warn(_('not unmarking %s as it is driver-resolved\n')
4262 4264 % f)
4263 4265 else:
4264 4266 runconclude = True
4265 4267 continue
4266 4268
4267 4269 if mark:
4268 4270 ms.mark(f, "r")
4269 4271 elif unmark:
4270 4272 ms.mark(f, "u")
4271 4273 else:
4272 4274 # backup pre-resolve (merge uses .orig for its own purposes)
4273 4275 a = repo.wjoin(f)
4274 4276 try:
4275 4277 util.copyfile(a, a + ".resolve")
4276 4278 except (IOError, OSError) as inst:
4277 4279 if inst.errno != errno.ENOENT:
4278 4280 raise
4279 4281
4280 4282 try:
4281 4283 # preresolve file
4282 4284 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4283 4285 'resolve')
4284 4286 complete, r = ms.preresolve(f, wctx)
4285 4287 if not complete:
4286 4288 tocomplete.append(f)
4287 4289 elif r:
4288 4290 ret = 1
4289 4291 finally:
4290 4292 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4291 4293 ms.commit()
4292 4294
4293 4295 # replace filemerge's .orig file with our resolve file, but only
4294 4296 # for merges that are complete
4295 4297 if complete:
4296 4298 try:
4297 4299 util.rename(a + ".resolve",
4298 4300 scmutil.origpath(ui, repo, a))
4299 4301 except OSError as inst:
4300 4302 if inst.errno != errno.ENOENT:
4301 4303 raise
4302 4304
4303 4305 for f in tocomplete:
4304 4306 try:
4305 4307 # resolve file
4306 4308 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
4307 4309 'resolve')
4308 4310 r = ms.resolve(f, wctx)
4309 4311 if r:
4310 4312 ret = 1
4311 4313 finally:
4312 4314 ui.setconfig('ui', 'forcemerge', '', 'resolve')
4313 4315 ms.commit()
4314 4316
4315 4317 # replace filemerge's .orig file with our resolve file
4316 4318 a = repo.wjoin(f)
4317 4319 try:
4318 4320 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
4319 4321 except OSError as inst:
4320 4322 if inst.errno != errno.ENOENT:
4321 4323 raise
4322 4324
4323 4325 ms.commit()
4324 4326 ms.recordactions()
4325 4327
4326 4328 if not didwork and pats:
4327 4329 hint = None
4328 4330 if not any([p for p in pats if p.find(':') >= 0]):
4329 4331 pats = ['path:%s' % p for p in pats]
4330 4332 m = scmutil.match(wctx, pats, opts)
4331 4333 for f in ms:
4332 4334 if not m(f):
4333 4335 continue
4334 4336 flags = ''.join(['-%s ' % o[0] for o in flaglist
4335 4337 if opts.get(o)])
4336 4338 hint = _("(try: hg resolve %s%s)\n") % (
4337 4339 flags,
4338 4340 ' '.join(pats))
4339 4341 break
4340 4342 ui.warn(_("arguments do not match paths that need resolving\n"))
4341 4343 if hint:
4342 4344 ui.warn(hint)
4343 4345 elif ms.mergedriver and ms.mdstate() != 's':
4344 4346 # run conclude step when either a driver-resolved file is requested
4345 4347 # or there are no driver-resolved files
4346 4348 # we can't use 'ret' to determine whether any files are unresolved
4347 4349 # because we might not have tried to resolve some
4348 4350 if ((runconclude or not list(ms.driverresolved()))
4349 4351 and not list(ms.unresolved())):
4350 4352 proceed = mergemod.driverconclude(repo, ms, wctx)
4351 4353 ms.commit()
4352 4354 if not proceed:
4353 4355 return 1
4354 4356
4355 4357 # Nudge users into finishing an unfinished operation
4356 4358 unresolvedf = list(ms.unresolved())
4357 4359 driverresolvedf = list(ms.driverresolved())
4358 4360 if not unresolvedf and not driverresolvedf:
4359 4361 ui.status(_('(no more unresolved files)\n'))
4360 4362 cmdutil.checkafterresolved(repo)
4361 4363 elif not unresolvedf:
4362 4364 ui.status(_('(no more unresolved files -- '
4363 4365 'run "hg resolve --all" to conclude)\n'))
4364 4366
4365 4367 return ret
4366 4368
4367 4369 @command('revert',
4368 4370 [('a', 'all', None, _('revert all changes when no arguments given')),
4369 4371 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
4370 4372 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
4371 4373 ('C', 'no-backup', None, _('do not save backup copies of files')),
4372 4374 ('i', 'interactive', None,
4373 4375 _('interactively select the changes (EXPERIMENTAL)')),
4374 4376 ] + walkopts + dryrunopts,
4375 4377 _('[OPTION]... [-r REV] [NAME]...'))
4376 4378 def revert(ui, repo, *pats, **opts):
4377 4379 """restore files to their checkout state
4378 4380
4379 4381 .. note::
4380 4382
4381 4383 To check out earlier revisions, you should use :hg:`update REV`.
4382 4384 To cancel an uncommitted merge (and lose your changes),
4383 4385 use :hg:`update --clean .`.
4384 4386
4385 4387 With no revision specified, revert the specified files or directories
4386 4388 to the contents they had in the parent of the working directory.
4387 4389 This restores the contents of files to an unmodified
4388 4390 state and unschedules adds, removes, copies, and renames. If the
4389 4391 working directory has two parents, you must explicitly specify a
4390 4392 revision.
4391 4393
4392 4394 Using the -r/--rev or -d/--date options, revert the given files or
4393 4395 directories to their states as of a specific revision. Because
4394 4396 revert does not change the working directory parents, this will
4395 4397 cause these files to appear modified. This can be helpful to "back
4396 4398 out" some or all of an earlier change. See :hg:`backout` for a
4397 4399 related method.
4398 4400
4399 4401 Modified files are saved with a .orig suffix before reverting.
4400 4402 To disable these backups, use --no-backup. It is possible to store
4401 4403 the backup files in a custom directory relative to the root of the
4402 4404 repository by setting the ``ui.origbackuppath`` configuration
4403 4405 option.
4404 4406
4405 4407 See :hg:`help dates` for a list of formats valid for -d/--date.
4406 4408
4407 4409 See :hg:`help backout` for a way to reverse the effect of an
4408 4410 earlier changeset.
4409 4411
4410 4412 Returns 0 on success.
4411 4413 """
4412 4414
4413 4415 if opts.get("date"):
4414 4416 if opts.get("rev"):
4415 4417 raise error.Abort(_("you can't specify a revision and a date"))
4416 4418 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
4417 4419
4418 4420 parent, p2 = repo.dirstate.parents()
4419 4421 if not opts.get('rev') and p2 != nullid:
4420 4422 # revert after merge is a trap for new users (issue2915)
4421 4423 raise error.Abort(_('uncommitted merge with no revision specified'),
4422 4424 hint=_("use 'hg update' or see 'hg help revert'"))
4423 4425
4424 4426 ctx = scmutil.revsingle(repo, opts.get('rev'))
4425 4427
4426 4428 if (not (pats or opts.get('include') or opts.get('exclude') or
4427 4429 opts.get('all') or opts.get('interactive'))):
4428 4430 msg = _("no files or directories specified")
4429 4431 if p2 != nullid:
4430 4432 hint = _("uncommitted merge, use --all to discard all changes,"
4431 4433 " or 'hg update -C .' to abort the merge")
4432 4434 raise error.Abort(msg, hint=hint)
4433 4435 dirty = any(repo.status())
4434 4436 node = ctx.node()
4435 4437 if node != parent:
4436 4438 if dirty:
4437 4439 hint = _("uncommitted changes, use --all to discard all"
4438 4440 " changes, or 'hg update %s' to update") % ctx.rev()
4439 4441 else:
4440 4442 hint = _("use --all to revert all files,"
4441 4443 " or 'hg update %s' to update") % ctx.rev()
4442 4444 elif dirty:
4443 4445 hint = _("uncommitted changes, use --all to discard all changes")
4444 4446 else:
4445 4447 hint = _("use --all to revert all files")
4446 4448 raise error.Abort(msg, hint=hint)
4447 4449
4448 4450 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
4449 4451
4450 4452 @command('rollback', dryrunopts +
4451 4453 [('f', 'force', False, _('ignore safety measures'))])
4452 4454 def rollback(ui, repo, **opts):
4453 4455 """roll back the last transaction (DANGEROUS) (DEPRECATED)
4454 4456
4455 4457 Please use :hg:`commit --amend` instead of rollback to correct
4456 4458 mistakes in the last commit.
4457 4459
4458 4460 This command should be used with care. There is only one level of
4459 4461 rollback, and there is no way to undo a rollback. It will also
4460 4462 restore the dirstate at the time of the last transaction, losing
4461 4463 any dirstate changes since that time. This command does not alter
4462 4464 the working directory.
4463 4465
4464 4466 Transactions are used to encapsulate the effects of all commands
4465 4467 that create new changesets or propagate existing changesets into a
4466 4468 repository.
4467 4469
4468 4470 .. container:: verbose
4469 4471
4470 4472 For example, the following commands are transactional, and their
4471 4473 effects can be rolled back:
4472 4474
4473 4475 - commit
4474 4476 - import
4475 4477 - pull
4476 4478 - push (with this repository as the destination)
4477 4479 - unbundle
4478 4480
4479 4481 To avoid permanent data loss, rollback will refuse to rollback a
4480 4482 commit transaction if it isn't checked out. Use --force to
4481 4483 override this protection.
4482 4484
4483 4485 The rollback command can be entirely disabled by setting the
4484 4486 ``ui.rollback`` configuration setting to false. If you're here
4485 4487 because you want to use rollback and it's disabled, you can
4486 4488 re-enable the command by setting ``ui.rollback`` to true.
4487 4489
4488 4490 This command is not intended for use on public repositories. Once
4489 4491 changes are visible for pull by other users, rolling a transaction
4490 4492 back locally is ineffective (someone else may already have pulled
4491 4493 the changes). Furthermore, a race is possible with readers of the
4492 4494 repository; for example an in-progress pull from the repository
4493 4495 may fail if a rollback is performed.
4494 4496
4495 4497 Returns 0 on success, 1 if no rollback data is available.
4496 4498 """
4497 4499 if not ui.configbool('ui', 'rollback', True):
4498 4500 raise error.Abort(_('rollback is disabled because it is unsafe'),
4499 4501 hint=('see `hg help -v rollback` for information'))
4500 4502 return repo.rollback(dryrun=opts.get(r'dry_run'),
4501 4503 force=opts.get(r'force'))
4502 4504
4503 4505 @command('root', [])
4504 4506 def root(ui, repo):
4505 4507 """print the root (top) of the current working directory
4506 4508
4507 4509 Print the root directory of the current repository.
4508 4510
4509 4511 Returns 0 on success.
4510 4512 """
4511 4513 ui.write(repo.root + "\n")
4512 4514
4513 4515 @command('^serve',
4514 4516 [('A', 'accesslog', '', _('name of access log file to write to'),
4515 4517 _('FILE')),
4516 4518 ('d', 'daemon', None, _('run server in background')),
4517 4519 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
4518 4520 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
4519 4521 # use string type, then we can check if something was passed
4520 4522 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
4521 4523 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
4522 4524 _('ADDR')),
4523 4525 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
4524 4526 _('PREFIX')),
4525 4527 ('n', 'name', '',
4526 4528 _('name to show in web pages (default: working directory)'), _('NAME')),
4527 4529 ('', 'web-conf', '',
4528 4530 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
4529 4531 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
4530 4532 _('FILE')),
4531 4533 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
4532 4534 ('', 'stdio', None, _('for remote clients (ADVANCED)')),
4533 4535 ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
4534 4536 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
4535 4537 ('', 'style', '', _('template style to use'), _('STYLE')),
4536 4538 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4537 4539 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))]
4538 4540 + subrepoopts,
4539 4541 _('[OPTION]...'),
4540 4542 optionalrepo=True)
4541 4543 def serve(ui, repo, **opts):
4542 4544 """start stand-alone webserver
4543 4545
4544 4546 Start a local HTTP repository browser and pull server. You can use
4545 4547 this for ad-hoc sharing and browsing of repositories. It is
4546 4548 recommended to use a real web server to serve a repository for
4547 4549 longer periods of time.
4548 4550
4549 4551 Please note that the server does not implement access control.
4550 4552 This means that, by default, anybody can read from the server and
4551 4553 nobody can write to it by default. Set the ``web.allow_push``
4552 4554 option to ``*`` to allow everybody to push to the server. You
4553 4555 should use a real web server if you need to authenticate users.
4554 4556
4555 4557 By default, the server logs accesses to stdout and errors to
4556 4558 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
4557 4559 files.
4558 4560
4559 4561 To have the server choose a free port number to listen on, specify
4560 4562 a port number of 0; in this case, the server will print the port
4561 4563 number it uses.
4562 4564
4563 4565 Returns 0 on success.
4564 4566 """
4565 4567
4566 4568 opts = pycompat.byteskwargs(opts)
4567 4569 if opts["stdio"] and opts["cmdserver"]:
4568 4570 raise error.Abort(_("cannot use --stdio with --cmdserver"))
4569 4571
4570 4572 if opts["stdio"]:
4571 4573 if repo is None:
4572 4574 raise error.RepoError(_("there is no Mercurial repository here"
4573 4575 " (.hg not found)"))
4574 4576 s = sshserver.sshserver(ui, repo)
4575 4577 s.serve_forever()
4576 4578
4577 4579 service = server.createservice(ui, repo, opts)
4578 4580 return server.runservice(opts, initfn=service.init, runfn=service.run)
4579 4581
4580 4582 @command('^status|st',
4581 4583 [('A', 'all', None, _('show status of all files')),
4582 4584 ('m', 'modified', None, _('show only modified files')),
4583 4585 ('a', 'added', None, _('show only added files')),
4584 4586 ('r', 'removed', None, _('show only removed files')),
4585 4587 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4586 4588 ('c', 'clean', None, _('show only files without changes')),
4587 4589 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4588 4590 ('i', 'ignored', None, _('show only ignored files')),
4589 4591 ('n', 'no-status', None, _('hide status prefix')),
4590 4592 ('C', 'copies', None, _('show source of copied files')),
4591 4593 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4592 4594 ('', 'rev', [], _('show difference from revision'), _('REV')),
4593 4595 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
4594 4596 ] + walkopts + subrepoopts + formatteropts,
4595 4597 _('[OPTION]... [FILE]...'),
4596 4598 inferrepo=True)
4597 4599 def status(ui, repo, *pats, **opts):
4598 4600 """show changed files in the working directory
4599 4601
4600 4602 Show status of files in the repository. If names are given, only
4601 4603 files that match are shown. Files that are clean or ignored or
4602 4604 the source of a copy/move operation, are not listed unless
4603 4605 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
4604 4606 Unless options described with "show only ..." are given, the
4605 4607 options -mardu are used.
4606 4608
4607 4609 Option -q/--quiet hides untracked (unknown and ignored) files
4608 4610 unless explicitly requested with -u/--unknown or -i/--ignored.
4609 4611
4610 4612 .. note::
4611 4613
4612 4614 :hg:`status` may appear to disagree with diff if permissions have
4613 4615 changed or a merge has occurred. The standard diff format does
4614 4616 not report permission changes and diff only reports changes
4615 4617 relative to one merge parent.
4616 4618
4617 4619 If one revision is given, it is used as the base revision.
4618 4620 If two revisions are given, the differences between them are
4619 4621 shown. The --change option can also be used as a shortcut to list
4620 4622 the changed files of a revision from its first parent.
4621 4623
4622 4624 The codes used to show the status of files are::
4623 4625
4624 4626 M = modified
4625 4627 A = added
4626 4628 R = removed
4627 4629 C = clean
4628 4630 ! = missing (deleted by non-hg command, but still tracked)
4629 4631 ? = not tracked
4630 4632 I = ignored
4631 4633 = origin of the previous file (with --copies)
4632 4634
4633 4635 .. container:: verbose
4634 4636
4635 4637 Examples:
4636 4638
4637 4639 - show changes in the working directory relative to a
4638 4640 changeset::
4639 4641
4640 4642 hg status --rev 9353
4641 4643
4642 4644 - show changes in the working directory relative to the
4643 4645 current directory (see :hg:`help patterns` for more information)::
4644 4646
4645 4647 hg status re:
4646 4648
4647 4649 - show all changes including copies in an existing changeset::
4648 4650
4649 4651 hg status --copies --change 9353
4650 4652
4651 4653 - get a NUL separated list of added files, suitable for xargs::
4652 4654
4653 4655 hg status -an0
4654 4656
4655 4657 Returns 0 on success.
4656 4658 """
4657 4659
4658 4660 opts = pycompat.byteskwargs(opts)
4659 4661 revs = opts.get('rev')
4660 4662 change = opts.get('change')
4661 4663
4662 4664 if revs and change:
4663 4665 msg = _('cannot specify --rev and --change at the same time')
4664 4666 raise error.Abort(msg)
4665 4667 elif change:
4666 4668 node2 = scmutil.revsingle(repo, change, None).node()
4667 4669 node1 = repo[node2].p1().node()
4668 4670 else:
4669 4671 node1, node2 = scmutil.revpair(repo, revs)
4670 4672
4671 4673 if pats or ui.configbool('commands', 'status.relative'):
4672 4674 cwd = repo.getcwd()
4673 4675 else:
4674 4676 cwd = ''
4675 4677
4676 4678 if opts.get('print0'):
4677 4679 end = '\0'
4678 4680 else:
4679 4681 end = '\n'
4680 4682 copy = {}
4681 4683 states = 'modified added removed deleted unknown ignored clean'.split()
4682 4684 show = [k for k in states if opts.get(k)]
4683 4685 if opts.get('all'):
4684 4686 show += ui.quiet and (states[:4] + ['clean']) or states
4685 4687 if not show:
4686 4688 if ui.quiet:
4687 4689 show = states[:4]
4688 4690 else:
4689 4691 show = states[:5]
4690 4692
4691 4693 m = scmutil.match(repo[node2], pats, opts)
4692 4694 stat = repo.status(node1, node2, m,
4693 4695 'ignored' in show, 'clean' in show, 'unknown' in show,
4694 4696 opts.get('subrepos'))
4695 4697 changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
4696 4698
4697 4699 if (opts.get('all') or opts.get('copies')
4698 4700 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
4699 4701 copy = copies.pathcopies(repo[node1], repo[node2], m)
4700 4702
4701 4703 ui.pager('status')
4702 4704 fm = ui.formatter('status', opts)
4703 4705 fmt = '%s' + end
4704 4706 showchar = not opts.get('no_status')
4705 4707
4706 4708 for state, char, files in changestates:
4707 4709 if state in show:
4708 4710 label = 'status.' + state
4709 4711 for f in files:
4710 4712 fm.startitem()
4711 4713 fm.condwrite(showchar, 'status', '%s ', char, label=label)
4712 4714 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
4713 4715 if f in copy:
4714 4716 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
4715 4717 label='status.copied')
4716 4718 fm.end()
4717 4719
4718 4720 @command('^summary|sum',
4719 4721 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
4720 4722 def summary(ui, repo, **opts):
4721 4723 """summarize working directory state
4722 4724
4723 4725 This generates a brief summary of the working directory state,
4724 4726 including parents, branch, commit status, phase and available updates.
4725 4727
4726 4728 With the --remote option, this will check the default paths for
4727 4729 incoming and outgoing changes. This can be time-consuming.
4728 4730
4729 4731 Returns 0 on success.
4730 4732 """
4731 4733
4732 4734 opts = pycompat.byteskwargs(opts)
4733 4735 ui.pager('summary')
4734 4736 ctx = repo[None]
4735 4737 parents = ctx.parents()
4736 4738 pnode = parents[0].node()
4737 4739 marks = []
4738 4740
4739 4741 ms = None
4740 4742 try:
4741 4743 ms = mergemod.mergestate.read(repo)
4742 4744 except error.UnsupportedMergeRecords as e:
4743 4745 s = ' '.join(e.recordtypes)
4744 4746 ui.warn(
4745 4747 _('warning: merge state has unsupported record types: %s\n') % s)
4746 4748 unresolved = 0
4747 4749 else:
4748 4750 unresolved = [f for f in ms if ms[f] == 'u']
4749 4751
4750 4752 for p in parents:
4751 4753 # label with log.changeset (instead of log.parent) since this
4752 4754 # shows a working directory parent *changeset*:
4753 4755 # i18n: column positioning for "hg summary"
4754 4756 ui.write(_('parent: %d:%s ') % (p.rev(), p),
4755 4757 label=cmdutil._changesetlabels(p))
4756 4758 ui.write(' '.join(p.tags()), label='log.tag')
4757 4759 if p.bookmarks():
4758 4760 marks.extend(p.bookmarks())
4759 4761 if p.rev() == -1:
4760 4762 if not len(repo):
4761 4763 ui.write(_(' (empty repository)'))
4762 4764 else:
4763 4765 ui.write(_(' (no revision checked out)'))
4764 4766 if p.obsolete():
4765 4767 ui.write(_(' (obsolete)'))
4766 4768 if p.troubled():
4767 4769 ui.write(' ('
4768 4770 + ', '.join(ui.label(trouble, 'trouble.%s' % trouble)
4769 4771 for trouble in p.troubles())
4770 4772 + ')')
4771 4773 ui.write('\n')
4772 4774 if p.description():
4773 4775 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
4774 4776 label='log.summary')
4775 4777
4776 4778 branch = ctx.branch()
4777 4779 bheads = repo.branchheads(branch)
4778 4780 # i18n: column positioning for "hg summary"
4779 4781 m = _('branch: %s\n') % branch
4780 4782 if branch != 'default':
4781 4783 ui.write(m, label='log.branch')
4782 4784 else:
4783 4785 ui.status(m, label='log.branch')
4784 4786
4785 4787 if marks:
4786 4788 active = repo._activebookmark
4787 4789 # i18n: column positioning for "hg summary"
4788 4790 ui.write(_('bookmarks:'), label='log.bookmark')
4789 4791 if active is not None:
4790 4792 if active in marks:
4791 4793 ui.write(' *' + active, label=bookmarks.activebookmarklabel)
4792 4794 marks.remove(active)
4793 4795 else:
4794 4796 ui.write(' [%s]' % active, label=bookmarks.activebookmarklabel)
4795 4797 for m in marks:
4796 4798 ui.write(' ' + m, label='log.bookmark')
4797 4799 ui.write('\n', label='log.bookmark')
4798 4800
4799 4801 status = repo.status(unknown=True)
4800 4802
4801 4803 c = repo.dirstate.copies()
4802 4804 copied, renamed = [], []
4803 4805 for d, s in c.iteritems():
4804 4806 if s in status.removed:
4805 4807 status.removed.remove(s)
4806 4808 renamed.append(d)
4807 4809 else:
4808 4810 copied.append(d)
4809 4811 if d in status.added:
4810 4812 status.added.remove(d)
4811 4813
4812 4814 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
4813 4815
4814 4816 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
4815 4817 (ui.label(_('%d added'), 'status.added'), status.added),
4816 4818 (ui.label(_('%d removed'), 'status.removed'), status.removed),
4817 4819 (ui.label(_('%d renamed'), 'status.copied'), renamed),
4818 4820 (ui.label(_('%d copied'), 'status.copied'), copied),
4819 4821 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
4820 4822 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
4821 4823 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
4822 4824 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
4823 4825 t = []
4824 4826 for l, s in labels:
4825 4827 if s:
4826 4828 t.append(l % len(s))
4827 4829
4828 4830 t = ', '.join(t)
4829 4831 cleanworkdir = False
4830 4832
4831 4833 if repo.vfs.exists('graftstate'):
4832 4834 t += _(' (graft in progress)')
4833 4835 if repo.vfs.exists('updatestate'):
4834 4836 t += _(' (interrupted update)')
4835 4837 elif len(parents) > 1:
4836 4838 t += _(' (merge)')
4837 4839 elif branch != parents[0].branch():
4838 4840 t += _(' (new branch)')
4839 4841 elif (parents[0].closesbranch() and
4840 4842 pnode in repo.branchheads(branch, closed=True)):
4841 4843 t += _(' (head closed)')
4842 4844 elif not (status.modified or status.added or status.removed or renamed or
4843 4845 copied or subs):
4844 4846 t += _(' (clean)')
4845 4847 cleanworkdir = True
4846 4848 elif pnode not in bheads:
4847 4849 t += _(' (new branch head)')
4848 4850
4849 4851 if parents:
4850 4852 pendingphase = max(p.phase() for p in parents)
4851 4853 else:
4852 4854 pendingphase = phases.public
4853 4855
4854 4856 if pendingphase > phases.newcommitphase(ui):
4855 4857 t += ' (%s)' % phases.phasenames[pendingphase]
4856 4858
4857 4859 if cleanworkdir:
4858 4860 # i18n: column positioning for "hg summary"
4859 4861 ui.status(_('commit: %s\n') % t.strip())
4860 4862 else:
4861 4863 # i18n: column positioning for "hg summary"
4862 4864 ui.write(_('commit: %s\n') % t.strip())
4863 4865
4864 4866 # all ancestors of branch heads - all ancestors of parent = new csets
4865 4867 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
4866 4868 bheads))
4867 4869
4868 4870 if new == 0:
4869 4871 # i18n: column positioning for "hg summary"
4870 4872 ui.status(_('update: (current)\n'))
4871 4873 elif pnode not in bheads:
4872 4874 # i18n: column positioning for "hg summary"
4873 4875 ui.write(_('update: %d new changesets (update)\n') % new)
4874 4876 else:
4875 4877 # i18n: column positioning for "hg summary"
4876 4878 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
4877 4879 (new, len(bheads)))
4878 4880
4879 4881 t = []
4880 4882 draft = len(repo.revs('draft()'))
4881 4883 if draft:
4882 4884 t.append(_('%d draft') % draft)
4883 4885 secret = len(repo.revs('secret()'))
4884 4886 if secret:
4885 4887 t.append(_('%d secret') % secret)
4886 4888
4887 4889 if draft or secret:
4888 4890 ui.status(_('phases: %s\n') % ', '.join(t))
4889 4891
4890 4892 if obsolete.isenabled(repo, obsolete.createmarkersopt):
4891 4893 for trouble in ("unstable", "divergent", "bumped"):
4892 4894 numtrouble = len(repo.revs(trouble + "()"))
4893 4895 # We write all the possibilities to ease translation
4894 4896 troublemsg = {
4895 4897 "unstable": _("unstable: %d changesets"),
4896 4898 "divergent": _("divergent: %d changesets"),
4897 4899 "bumped": _("bumped: %d changesets"),
4898 4900 }
4899 4901 if numtrouble > 0:
4900 4902 ui.status(troublemsg[trouble] % numtrouble + "\n")
4901 4903
4902 4904 cmdutil.summaryhooks(ui, repo)
4903 4905
4904 4906 if opts.get('remote'):
4905 4907 needsincoming, needsoutgoing = True, True
4906 4908 else:
4907 4909 needsincoming, needsoutgoing = False, False
4908 4910 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
4909 4911 if i:
4910 4912 needsincoming = True
4911 4913 if o:
4912 4914 needsoutgoing = True
4913 4915 if not needsincoming and not needsoutgoing:
4914 4916 return
4915 4917
4916 4918 def getincoming():
4917 4919 source, branches = hg.parseurl(ui.expandpath('default'))
4918 4920 sbranch = branches[0]
4919 4921 try:
4920 4922 other = hg.peer(repo, {}, source)
4921 4923 except error.RepoError:
4922 4924 if opts.get('remote'):
4923 4925 raise
4924 4926 return source, sbranch, None, None, None
4925 4927 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
4926 4928 if revs:
4927 4929 revs = [other.lookup(rev) for rev in revs]
4928 4930 ui.debug('comparing with %s\n' % util.hidepassword(source))
4929 4931 repo.ui.pushbuffer()
4930 4932 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
4931 4933 repo.ui.popbuffer()
4932 4934 return source, sbranch, other, commoninc, commoninc[1]
4933 4935
4934 4936 if needsincoming:
4935 4937 source, sbranch, sother, commoninc, incoming = getincoming()
4936 4938 else:
4937 4939 source = sbranch = sother = commoninc = incoming = None
4938 4940
4939 4941 def getoutgoing():
4940 4942 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
4941 4943 dbranch = branches[0]
4942 4944 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
4943 4945 if source != dest:
4944 4946 try:
4945 4947 dother = hg.peer(repo, {}, dest)
4946 4948 except error.RepoError:
4947 4949 if opts.get('remote'):
4948 4950 raise
4949 4951 return dest, dbranch, None, None
4950 4952 ui.debug('comparing with %s\n' % util.hidepassword(dest))
4951 4953 elif sother is None:
4952 4954 # there is no explicit destination peer, but source one is invalid
4953 4955 return dest, dbranch, None, None
4954 4956 else:
4955 4957 dother = sother
4956 4958 if (source != dest or (sbranch is not None and sbranch != dbranch)):
4957 4959 common = None
4958 4960 else:
4959 4961 common = commoninc
4960 4962 if revs:
4961 4963 revs = [repo.lookup(rev) for rev in revs]
4962 4964 repo.ui.pushbuffer()
4963 4965 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
4964 4966 commoninc=common)
4965 4967 repo.ui.popbuffer()
4966 4968 return dest, dbranch, dother, outgoing
4967 4969
4968 4970 if needsoutgoing:
4969 4971 dest, dbranch, dother, outgoing = getoutgoing()
4970 4972 else:
4971 4973 dest = dbranch = dother = outgoing = None
4972 4974
4973 4975 if opts.get('remote'):
4974 4976 t = []
4975 4977 if incoming:
4976 4978 t.append(_('1 or more incoming'))
4977 4979 o = outgoing.missing
4978 4980 if o:
4979 4981 t.append(_('%d outgoing') % len(o))
4980 4982 other = dother or sother
4981 4983 if 'bookmarks' in other.listkeys('namespaces'):
4982 4984 counts = bookmarks.summary(repo, other)
4983 4985 if counts[0] > 0:
4984 4986 t.append(_('%d incoming bookmarks') % counts[0])
4985 4987 if counts[1] > 0:
4986 4988 t.append(_('%d outgoing bookmarks') % counts[1])
4987 4989
4988 4990 if t:
4989 4991 # i18n: column positioning for "hg summary"
4990 4992 ui.write(_('remote: %s\n') % (', '.join(t)))
4991 4993 else:
4992 4994 # i18n: column positioning for "hg summary"
4993 4995 ui.status(_('remote: (synced)\n'))
4994 4996
4995 4997 cmdutil.summaryremotehooks(ui, repo, opts,
4996 4998 ((source, sbranch, sother, commoninc),
4997 4999 (dest, dbranch, dother, outgoing)))
4998 5000
4999 5001 @command('tag',
5000 5002 [('f', 'force', None, _('force tag')),
5001 5003 ('l', 'local', None, _('make the tag local')),
5002 5004 ('r', 'rev', '', _('revision to tag'), _('REV')),
5003 5005 ('', 'remove', None, _('remove a tag')),
5004 5006 # -l/--local is already there, commitopts cannot be used
5005 5007 ('e', 'edit', None, _('invoke editor on commit messages')),
5006 5008 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5007 5009 ] + commitopts2,
5008 5010 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5009 5011 def tag(ui, repo, name1, *names, **opts):
5010 5012 """add one or more tags for the current or given revision
5011 5013
5012 5014 Name a particular revision using <name>.
5013 5015
5014 5016 Tags are used to name particular revisions of the repository and are
5015 5017 very useful to compare different revisions, to go back to significant
5016 5018 earlier versions or to mark branch points as releases, etc. Changing
5017 5019 an existing tag is normally disallowed; use -f/--force to override.
5018 5020
5019 5021 If no revision is given, the parent of the working directory is
5020 5022 used.
5021 5023
5022 5024 To facilitate version control, distribution, and merging of tags,
5023 5025 they are stored as a file named ".hgtags" which is managed similarly
5024 5026 to other project files and can be hand-edited if necessary. This
5025 5027 also means that tagging creates a new commit. The file
5026 5028 ".hg/localtags" is used for local tags (not shared among
5027 5029 repositories).
5028 5030
5029 5031 Tag commits are usually made at the head of a branch. If the parent
5030 5032 of the working directory is not a branch head, :hg:`tag` aborts; use
5031 5033 -f/--force to force the tag commit to be based on a non-head
5032 5034 changeset.
5033 5035
5034 5036 See :hg:`help dates` for a list of formats valid for -d/--date.
5035 5037
5036 5038 Since tag names have priority over branch names during revision
5037 5039 lookup, using an existing branch name as a tag name is discouraged.
5038 5040
5039 5041 Returns 0 on success.
5040 5042 """
5041 5043 opts = pycompat.byteskwargs(opts)
5042 5044 wlock = lock = None
5043 5045 try:
5044 5046 wlock = repo.wlock()
5045 5047 lock = repo.lock()
5046 5048 rev_ = "."
5047 5049 names = [t.strip() for t in (name1,) + names]
5048 5050 if len(names) != len(set(names)):
5049 5051 raise error.Abort(_('tag names must be unique'))
5050 5052 for n in names:
5051 5053 scmutil.checknewlabel(repo, n, 'tag')
5052 5054 if not n:
5053 5055 raise error.Abort(_('tag names cannot consist entirely of '
5054 5056 'whitespace'))
5055 5057 if opts.get('rev') and opts.get('remove'):
5056 5058 raise error.Abort(_("--rev and --remove are incompatible"))
5057 5059 if opts.get('rev'):
5058 5060 rev_ = opts['rev']
5059 5061 message = opts.get('message')
5060 5062 if opts.get('remove'):
5061 5063 if opts.get('local'):
5062 5064 expectedtype = 'local'
5063 5065 else:
5064 5066 expectedtype = 'global'
5065 5067
5066 5068 for n in names:
5067 5069 if not repo.tagtype(n):
5068 5070 raise error.Abort(_("tag '%s' does not exist") % n)
5069 5071 if repo.tagtype(n) != expectedtype:
5070 5072 if expectedtype == 'global':
5071 5073 raise error.Abort(_("tag '%s' is not a global tag") % n)
5072 5074 else:
5073 5075 raise error.Abort(_("tag '%s' is not a local tag") % n)
5074 5076 rev_ = 'null'
5075 5077 if not message:
5076 5078 # we don't translate commit messages
5077 5079 message = 'Removed tag %s' % ', '.join(names)
5078 5080 elif not opts.get('force'):
5079 5081 for n in names:
5080 5082 if n in repo.tags():
5081 5083 raise error.Abort(_("tag '%s' already exists "
5082 5084 "(use -f to force)") % n)
5083 5085 if not opts.get('local'):
5084 5086 p1, p2 = repo.dirstate.parents()
5085 5087 if p2 != nullid:
5086 5088 raise error.Abort(_('uncommitted merge'))
5087 5089 bheads = repo.branchheads()
5088 5090 if not opts.get('force') and bheads and p1 not in bheads:
5089 5091 raise error.Abort(_('working directory is not at a branch head '
5090 5092 '(use -f to force)'))
5091 5093 r = scmutil.revsingle(repo, rev_).node()
5092 5094
5093 5095 if not message:
5094 5096 # we don't translate commit messages
5095 5097 message = ('Added tag %s for changeset %s' %
5096 5098 (', '.join(names), short(r)))
5097 5099
5098 5100 date = opts.get('date')
5099 5101 if date:
5100 5102 date = util.parsedate(date)
5101 5103
5102 5104 if opts.get('remove'):
5103 5105 editform = 'tag.remove'
5104 5106 else:
5105 5107 editform = 'tag.add'
5106 5108 editor = cmdutil.getcommiteditor(editform=editform,
5107 5109 **pycompat.strkwargs(opts))
5108 5110
5109 5111 # don't allow tagging the null rev
5110 5112 if (not opts.get('remove') and
5111 5113 scmutil.revsingle(repo, rev_).rev() == nullrev):
5112 5114 raise error.Abort(_("cannot tag null revision"))
5113 5115
5114 5116 tagsmod.tag(repo, names, r, message, opts.get('local'),
5115 5117 opts.get('user'), date, editor=editor)
5116 5118 finally:
5117 5119 release(lock, wlock)
5118 5120
5119 5121 @command('tags', formatteropts, '')
5120 5122 def tags(ui, repo, **opts):
5121 5123 """list repository tags
5122 5124
5123 5125 This lists both regular and local tags. When the -v/--verbose
5124 5126 switch is used, a third column "local" is printed for local tags.
5125 5127 When the -q/--quiet switch is used, only the tag name is printed.
5126 5128
5127 5129 Returns 0 on success.
5128 5130 """
5129 5131
5130 5132 opts = pycompat.byteskwargs(opts)
5131 5133 ui.pager('tags')
5132 5134 fm = ui.formatter('tags', opts)
5133 5135 hexfunc = fm.hexfunc
5134 5136 tagtype = ""
5135 5137
5136 5138 for t, n in reversed(repo.tagslist()):
5137 5139 hn = hexfunc(n)
5138 5140 label = 'tags.normal'
5139 5141 tagtype = ''
5140 5142 if repo.tagtype(t) == 'local':
5141 5143 label = 'tags.local'
5142 5144 tagtype = 'local'
5143 5145
5144 5146 fm.startitem()
5145 5147 fm.write('tag', '%s', t, label=label)
5146 5148 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5147 5149 fm.condwrite(not ui.quiet, 'rev node', fmt,
5148 5150 repo.changelog.rev(n), hn, label=label)
5149 5151 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5150 5152 tagtype, label=label)
5151 5153 fm.plain('\n')
5152 5154 fm.end()
5153 5155
5154 5156 @command('tip',
5155 5157 [('p', 'patch', None, _('show patch')),
5156 5158 ('g', 'git', None, _('use git extended diff format')),
5157 5159 ] + templateopts,
5158 5160 _('[-p] [-g]'))
5159 5161 def tip(ui, repo, **opts):
5160 5162 """show the tip revision (DEPRECATED)
5161 5163
5162 5164 The tip revision (usually just called the tip) is the changeset
5163 5165 most recently added to the repository (and therefore the most
5164 5166 recently changed head).
5165 5167
5166 5168 If you have just made a commit, that commit will be the tip. If
5167 5169 you have just pulled changes from another repository, the tip of
5168 5170 that repository becomes the current tip. The "tip" tag is special
5169 5171 and cannot be renamed or assigned to a different changeset.
5170 5172
5171 5173 This command is deprecated, please use :hg:`heads` instead.
5172 5174
5173 5175 Returns 0 on success.
5174 5176 """
5175 5177 opts = pycompat.byteskwargs(opts)
5176 5178 displayer = cmdutil.show_changeset(ui, repo, opts)
5177 5179 displayer.show(repo['tip'])
5178 5180 displayer.close()
5179 5181
5180 5182 @command('unbundle',
5181 5183 [('u', 'update', None,
5182 5184 _('update to new branch head if changesets were unbundled'))],
5183 5185 _('[-u] FILE...'))
5184 5186 def unbundle(ui, repo, fname1, *fnames, **opts):
5185 5187 """apply one or more bundle files
5186 5188
5187 5189 Apply one or more bundle files generated by :hg:`bundle`.
5188 5190
5189 5191 Returns 0 on success, 1 if an update has unresolved files.
5190 5192 """
5191 5193 fnames = (fname1,) + fnames
5192 5194
5193 5195 with repo.lock():
5194 5196 for fname in fnames:
5195 5197 f = hg.openpath(ui, fname)
5196 5198 gen = exchange.readbundle(ui, f, fname)
5197 5199 if isinstance(gen, streamclone.streamcloneapplier):
5198 5200 raise error.Abort(
5199 5201 _('packed bundles cannot be applied with '
5200 5202 '"hg unbundle"'),
5201 5203 hint=_('use "hg debugapplystreamclonebundle"'))
5202 5204 url = 'bundle:' + fname
5203 5205 if isinstance(gen, bundle2.unbundle20):
5204 5206 with repo.transaction('unbundle') as tr:
5205 5207 try:
5206 5208 op = bundle2.applybundle(repo, gen, tr,
5207 5209 source='unbundle',
5208 5210 url=url)
5209 5211 except error.BundleUnknownFeatureError as exc:
5210 5212 raise error.Abort(
5211 5213 _('%s: unknown bundle feature, %s') % (fname, exc),
5212 5214 hint=_("see https://mercurial-scm.org/"
5213 5215 "wiki/BundleFeature for more "
5214 5216 "information"))
5215 5217 changes = [r.get('return', 0)
5216 5218 for r in op.records['changegroup']]
5217 5219 modheads = changegroup.combineresults(changes)
5218 5220 else:
5219 5221 txnname = 'unbundle\n%s' % util.hidepassword(url)
5220 5222 with repo.transaction(txnname) as tr:
5221 5223 modheads, addednodes = gen.apply(repo, tr, 'unbundle', url)
5222 5224
5223 5225 return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
5224 5226
5225 5227 @command('^update|up|checkout|co',
5226 5228 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5227 5229 ('c', 'check', None, _('require clean working directory')),
5228 5230 ('m', 'merge', None, _('merge uncommitted changes')),
5229 5231 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5230 5232 ('r', 'rev', '', _('revision'), _('REV'))
5231 5233 ] + mergetoolopts,
5232 5234 _('[-C|-c|-m] [-d DATE] [[-r] REV]'))
5233 5235 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5234 5236 merge=None, tool=None):
5235 5237 """update working directory (or switch revisions)
5236 5238
5237 5239 Update the repository's working directory to the specified
5238 5240 changeset. If no changeset is specified, update to the tip of the
5239 5241 current named branch and move the active bookmark (see :hg:`help
5240 5242 bookmarks`).
5241 5243
5242 5244 Update sets the working directory's parent revision to the specified
5243 5245 changeset (see :hg:`help parents`).
5244 5246
5245 5247 If the changeset is not a descendant or ancestor of the working
5246 5248 directory's parent and there are uncommitted changes, the update is
5247 5249 aborted. With the -c/--check option, the working directory is checked
5248 5250 for uncommitted changes; if none are found, the working directory is
5249 5251 updated to the specified changeset.
5250 5252
5251 5253 .. container:: verbose
5252 5254
5253 5255 The -C/--clean, -c/--check, and -m/--merge options control what
5254 5256 happens if the working directory contains uncommitted changes.
5255 5257 At most of one of them can be specified.
5256 5258
5257 5259 1. If no option is specified, and if
5258 5260 the requested changeset is an ancestor or descendant of
5259 5261 the working directory's parent, the uncommitted changes
5260 5262 are merged into the requested changeset and the merged
5261 5263 result is left uncommitted. If the requested changeset is
5262 5264 not an ancestor or descendant (that is, it is on another
5263 5265 branch), the update is aborted and the uncommitted changes
5264 5266 are preserved.
5265 5267
5266 5268 2. With the -m/--merge option, the update is allowed even if the
5267 5269 requested changeset is not an ancestor or descendant of
5268 5270 the working directory's parent.
5269 5271
5270 5272 3. With the -c/--check option, the update is aborted and the
5271 5273 uncommitted changes are preserved.
5272 5274
5273 5275 4. With the -C/--clean option, uncommitted changes are discarded and
5274 5276 the working directory is updated to the requested changeset.
5275 5277
5276 5278 To cancel an uncommitted merge (and lose your changes), use
5277 5279 :hg:`update --clean .`.
5278 5280
5279 5281 Use null as the changeset to remove the working directory (like
5280 5282 :hg:`clone -U`).
5281 5283
5282 5284 If you want to revert just one file to an older revision, use
5283 5285 :hg:`revert [-r REV] NAME`.
5284 5286
5285 5287 See :hg:`help dates` for a list of formats valid for -d/--date.
5286 5288
5287 5289 Returns 0 on success, 1 if there are unresolved files.
5288 5290 """
5289 5291 if rev and node:
5290 5292 raise error.Abort(_("please specify just one revision"))
5291 5293
5292 5294 if ui.configbool('commands', 'update.requiredest'):
5293 5295 if not node and not rev and not date:
5294 5296 raise error.Abort(_('you must specify a destination'),
5295 5297 hint=_('for example: hg update ".::"'))
5296 5298
5297 5299 if rev is None or rev == '':
5298 5300 rev = node
5299 5301
5300 5302 if date and rev is not None:
5301 5303 raise error.Abort(_("you can't specify a revision and a date"))
5302 5304
5303 5305 if len([x for x in (clean, check, merge) if x]) > 1:
5304 5306 raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
5305 5307 "or -m/merge"))
5306 5308
5307 5309 updatecheck = None
5308 5310 if check:
5309 5311 updatecheck = 'abort'
5310 5312 elif merge:
5311 5313 updatecheck = 'none'
5312 5314
5313 5315 with repo.wlock():
5314 5316 cmdutil.clearunfinished(repo)
5315 5317
5316 5318 if date:
5317 5319 rev = cmdutil.finddate(ui, repo, date)
5318 5320
5319 5321 # if we defined a bookmark, we have to remember the original name
5320 5322 brev = rev
5321 5323 rev = scmutil.revsingle(repo, rev, rev).rev()
5322 5324
5323 5325 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5324 5326
5325 5327 return hg.updatetotally(ui, repo, rev, brev, clean=clean,
5326 5328 updatecheck=updatecheck)
5327 5329
5328 5330 @command('verify', [])
5329 5331 def verify(ui, repo):
5330 5332 """verify the integrity of the repository
5331 5333
5332 5334 Verify the integrity of the current repository.
5333 5335
5334 5336 This will perform an extensive check of the repository's
5335 5337 integrity, validating the hashes and checksums of each entry in
5336 5338 the changelog, manifest, and tracked files, as well as the
5337 5339 integrity of their crosslinks and indices.
5338 5340
5339 5341 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
5340 5342 for more information about recovery from corruption of the
5341 5343 repository.
5342 5344
5343 5345 Returns 0 on success, 1 if errors are encountered.
5344 5346 """
5345 5347 return hg.verify(repo)
5346 5348
5347 5349 @command('version', [] + formatteropts, norepo=True)
5348 5350 def version_(ui, **opts):
5349 5351 """output version and copyright information"""
5350 5352 opts = pycompat.byteskwargs(opts)
5351 5353 if ui.verbose:
5352 5354 ui.pager('version')
5353 5355 fm = ui.formatter("version", opts)
5354 5356 fm.startitem()
5355 5357 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
5356 5358 util.version())
5357 5359 license = _(
5358 5360 "(see https://mercurial-scm.org for more information)\n"
5359 5361 "\nCopyright (C) 2005-2017 Matt Mackall and others\n"
5360 5362 "This is free software; see the source for copying conditions. "
5361 5363 "There is NO\nwarranty; "
5362 5364 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
5363 5365 )
5364 5366 if not ui.quiet:
5365 5367 fm.plain(license)
5366 5368
5367 5369 if ui.verbose:
5368 5370 fm.plain(_("\nEnabled extensions:\n\n"))
5369 5371 # format names and versions into columns
5370 5372 names = []
5371 5373 vers = []
5372 5374 isinternals = []
5373 5375 for name, module in extensions.extensions():
5374 5376 names.append(name)
5375 5377 vers.append(extensions.moduleversion(module) or None)
5376 5378 isinternals.append(extensions.ismoduleinternal(module))
5377 5379 fn = fm.nested("extensions")
5378 5380 if names:
5379 5381 namefmt = " %%-%ds " % max(len(n) for n in names)
5380 5382 places = [_("external"), _("internal")]
5381 5383 for n, v, p in zip(names, vers, isinternals):
5382 5384 fn.startitem()
5383 5385 fn.condwrite(ui.verbose, "name", namefmt, n)
5384 5386 if ui.verbose:
5385 5387 fn.plain("%s " % places[p])
5386 5388 fn.data(bundled=p)
5387 5389 fn.condwrite(ui.verbose and v, "ver", "%s", v)
5388 5390 if ui.verbose:
5389 5391 fn.plain("\n")
5390 5392 fn.end()
5391 5393 fm.end()
5392 5394
5393 5395 def loadcmdtable(ui, name, cmdtable):
5394 5396 """Load command functions from specified cmdtable
5395 5397 """
5396 5398 overrides = [cmd for cmd in cmdtable if cmd in table]
5397 5399 if overrides:
5398 5400 ui.warn(_("extension '%s' overrides commands: %s\n")
5399 5401 % (name, " ".join(overrides)))
5400 5402 table.update(cmdtable)
@@ -1,2205 +1,2216 b''
1 1 # debugcommands.py - command processing for debug* commands
2 2 #
3 3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import difflib
11 11 import errno
12 12 import operator
13 13 import os
14 14 import random
15 15 import socket
16 16 import string
17 17 import sys
18 18 import tempfile
19 19 import time
20 20
21 21 from .i18n import _
22 22 from .node import (
23 23 bin,
24 24 hex,
25 25 nullhex,
26 26 nullid,
27 27 nullrev,
28 28 short,
29 29 )
30 30 from . import (
31 31 bundle2,
32 32 changegroup,
33 33 cmdutil,
34 34 color,
35 35 context,
36 36 dagparser,
37 37 dagutil,
38 38 encoding,
39 39 error,
40 40 exchange,
41 41 extensions,
42 42 filemerge,
43 43 fileset,
44 44 formatter,
45 45 hg,
46 46 localrepo,
47 47 lock as lockmod,
48 48 merge as mergemod,
49 49 obsolete,
50 50 phases,
51 51 policy,
52 52 pvec,
53 53 pycompat,
54 54 registrar,
55 55 repair,
56 56 revlog,
57 57 revset,
58 58 revsetlang,
59 59 scmutil,
60 60 setdiscovery,
61 61 simplemerge,
62 62 smartset,
63 63 sslutil,
64 64 streamclone,
65 65 templater,
66 66 treediscovery,
67 67 upgrade,
68 68 util,
69 69 vfs as vfsmod,
70 70 )
71 71
72 72 release = lockmod.release
73 73
74 74 command = registrar.command()
75 75
76 76 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
77 77 def debugancestor(ui, repo, *args):
78 78 """find the ancestor revision of two revisions in a given index"""
79 79 if len(args) == 3:
80 80 index, rev1, rev2 = args
81 81 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
82 82 lookup = r.lookup
83 83 elif len(args) == 2:
84 84 if not repo:
85 85 raise error.Abort(_('there is no Mercurial repository here '
86 86 '(.hg not found)'))
87 87 rev1, rev2 = args
88 88 r = repo.changelog
89 89 lookup = repo.lookup
90 90 else:
91 91 raise error.Abort(_('either two or three arguments required'))
92 92 a = r.ancestor(lookup(rev1), lookup(rev2))
93 93 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
94 94
95 95 @command('debugapplystreamclonebundle', [], 'FILE')
96 96 def debugapplystreamclonebundle(ui, repo, fname):
97 97 """apply a stream clone bundle file"""
98 98 f = hg.openpath(ui, fname)
99 99 gen = exchange.readbundle(ui, f, fname)
100 100 gen.apply(repo)
101 101
102 102 @command('debugbuilddag',
103 103 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
104 104 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
105 105 ('n', 'new-file', None, _('add new file at each rev'))],
106 106 _('[OPTION]... [TEXT]'))
107 107 def debugbuilddag(ui, repo, text=None,
108 108 mergeable_file=False,
109 109 overwritten_file=False,
110 110 new_file=False):
111 111 """builds a repo with a given DAG from scratch in the current empty repo
112 112
113 113 The description of the DAG is read from stdin if not given on the
114 114 command line.
115 115
116 116 Elements:
117 117
118 118 - "+n" is a linear run of n nodes based on the current default parent
119 119 - "." is a single node based on the current default parent
120 120 - "$" resets the default parent to null (implied at the start);
121 121 otherwise the default parent is always the last node created
122 122 - "<p" sets the default parent to the backref p
123 123 - "*p" is a fork at parent p, which is a backref
124 124 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
125 125 - "/p2" is a merge of the preceding node and p2
126 126 - ":tag" defines a local tag for the preceding node
127 127 - "@branch" sets the named branch for subsequent nodes
128 128 - "#...\\n" is a comment up to the end of the line
129 129
130 130 Whitespace between the above elements is ignored.
131 131
132 132 A backref is either
133 133
134 134 - a number n, which references the node curr-n, where curr is the current
135 135 node, or
136 136 - the name of a local tag you placed earlier using ":tag", or
137 137 - empty to denote the default parent.
138 138
139 139 All string valued-elements are either strictly alphanumeric, or must
140 140 be enclosed in double quotes ("..."), with "\\" as escape character.
141 141 """
142 142
143 143 if text is None:
144 144 ui.status(_("reading DAG from stdin\n"))
145 145 text = ui.fin.read()
146 146
147 147 cl = repo.changelog
148 148 if len(cl) > 0:
149 149 raise error.Abort(_('repository is not empty'))
150 150
151 151 # determine number of revs in DAG
152 152 total = 0
153 153 for type, data in dagparser.parsedag(text):
154 154 if type == 'n':
155 155 total += 1
156 156
157 157 if mergeable_file:
158 158 linesperrev = 2
159 159 # make a file with k lines per rev
160 160 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
161 161 initialmergedlines.append("")
162 162
163 163 tags = []
164 164
165 165 wlock = lock = tr = None
166 166 try:
167 167 wlock = repo.wlock()
168 168 lock = repo.lock()
169 169 tr = repo.transaction("builddag")
170 170
171 171 at = -1
172 172 atbranch = 'default'
173 173 nodeids = []
174 174 id = 0
175 175 ui.progress(_('building'), id, unit=_('revisions'), total=total)
176 176 for type, data in dagparser.parsedag(text):
177 177 if type == 'n':
178 178 ui.note(('node %s\n' % str(data)))
179 179 id, ps = data
180 180
181 181 files = []
182 182 fctxs = {}
183 183
184 184 p2 = None
185 185 if mergeable_file:
186 186 fn = "mf"
187 187 p1 = repo[ps[0]]
188 188 if len(ps) > 1:
189 189 p2 = repo[ps[1]]
190 190 pa = p1.ancestor(p2)
191 191 base, local, other = [x[fn].data() for x in (pa, p1,
192 192 p2)]
193 193 m3 = simplemerge.Merge3Text(base, local, other)
194 194 ml = [l.strip() for l in m3.merge_lines()]
195 195 ml.append("")
196 196 elif at > 0:
197 197 ml = p1[fn].data().split("\n")
198 198 else:
199 199 ml = initialmergedlines
200 200 ml[id * linesperrev] += " r%i" % id
201 201 mergedtext = "\n".join(ml)
202 202 files.append(fn)
203 203 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
204 204
205 205 if overwritten_file:
206 206 fn = "of"
207 207 files.append(fn)
208 208 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
209 209
210 210 if new_file:
211 211 fn = "nf%i" % id
212 212 files.append(fn)
213 213 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
214 214 if len(ps) > 1:
215 215 if not p2:
216 216 p2 = repo[ps[1]]
217 217 for fn in p2:
218 218 if fn.startswith("nf"):
219 219 files.append(fn)
220 220 fctxs[fn] = p2[fn]
221 221
222 222 def fctxfn(repo, cx, path):
223 223 return fctxs.get(path)
224 224
225 225 if len(ps) == 0 or ps[0] < 0:
226 226 pars = [None, None]
227 227 elif len(ps) == 1:
228 228 pars = [nodeids[ps[0]], None]
229 229 else:
230 230 pars = [nodeids[p] for p in ps]
231 231 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
232 232 date=(id, 0),
233 233 user="debugbuilddag",
234 234 extra={'branch': atbranch})
235 235 nodeid = repo.commitctx(cx)
236 236 nodeids.append(nodeid)
237 237 at = id
238 238 elif type == 'l':
239 239 id, name = data
240 240 ui.note(('tag %s\n' % name))
241 241 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
242 242 elif type == 'a':
243 243 ui.note(('branch %s\n' % data))
244 244 atbranch = data
245 245 ui.progress(_('building'), id, unit=_('revisions'), total=total)
246 246 tr.close()
247 247
248 248 if tags:
249 249 repo.vfs.write("localtags", "".join(tags))
250 250 finally:
251 251 ui.progress(_('building'), None)
252 252 release(tr, lock, wlock)
253 253
254 254 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
255 255 indent_string = ' ' * indent
256 256 if all:
257 257 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
258 258 % indent_string)
259 259
260 260 def showchunks(named):
261 261 ui.write("\n%s%s\n" % (indent_string, named))
262 262 chain = None
263 263 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
264 264 node = chunkdata['node']
265 265 p1 = chunkdata['p1']
266 266 p2 = chunkdata['p2']
267 267 cs = chunkdata['cs']
268 268 deltabase = chunkdata['deltabase']
269 269 delta = chunkdata['delta']
270 270 ui.write("%s%s %s %s %s %s %s\n" %
271 271 (indent_string, hex(node), hex(p1), hex(p2),
272 272 hex(cs), hex(deltabase), len(delta)))
273 273 chain = node
274 274
275 275 chunkdata = gen.changelogheader()
276 276 showchunks("changelog")
277 277 chunkdata = gen.manifestheader()
278 278 showchunks("manifest")
279 279 for chunkdata in iter(gen.filelogheader, {}):
280 280 fname = chunkdata['filename']
281 281 showchunks(fname)
282 282 else:
283 283 if isinstance(gen, bundle2.unbundle20):
284 284 raise error.Abort(_('use debugbundle2 for this file'))
285 285 chunkdata = gen.changelogheader()
286 286 chain = None
287 287 for chunkdata in iter(lambda: gen.deltachunk(chain), {}):
288 288 node = chunkdata['node']
289 289 ui.write("%s%s\n" % (indent_string, hex(node)))
290 290 chain = node
291 291
292 292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 293 """display version and markers contained in 'data'"""
294 294 data = part.read()
295 295 indent_string = ' ' * indent
296 296 try:
297 297 version, markers = obsolete._readmarkers(data)
298 298 except error.UnknownVersion as exc:
299 299 msg = "%sunsupported version: %s (%d bytes)\n"
300 300 msg %= indent_string, exc.version, len(data)
301 301 ui.write(msg)
302 302 else:
303 303 msg = "%sversion: %s (%d bytes)\n"
304 304 msg %= indent_string, version, len(data)
305 305 ui.write(msg)
306 306 fm = ui.formatter('debugobsolete', opts)
307 307 for rawmarker in sorted(markers):
308 308 m = obsolete.marker(None, rawmarker)
309 309 fm.startitem()
310 310 fm.plain(indent_string)
311 311 cmdutil.showmarker(fm, m)
312 312 fm.end()
313 313
314 def _debugphaseheads(ui, data, indent=0):
315 """display version and markers contained in 'data'"""
316 indent_string = ' ' * indent
317 headsbyphase = bundle2._readphaseheads(data)
318 for phase in phases.allphases:
319 for head in headsbyphase[phase]:
320 ui.write(indent_string)
321 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
322
314 323 def _debugbundle2(ui, gen, all=None, **opts):
315 324 """lists the contents of a bundle2"""
316 325 if not isinstance(gen, bundle2.unbundle20):
317 326 raise error.Abort(_('not a bundle2 file'))
318 327 ui.write(('Stream params: %s\n' % repr(gen.params)))
319 328 parttypes = opts.get('part_type', [])
320 329 for part in gen.iterparts():
321 330 if parttypes and part.type not in parttypes:
322 331 continue
323 332 ui.write('%s -- %r\n' % (part.type, repr(part.params)))
324 333 if part.type == 'changegroup':
325 334 version = part.params.get('version', '01')
326 335 cg = changegroup.getunbundler(version, part, 'UN')
327 336 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
328 337 if part.type == 'obsmarkers':
329 338 _debugobsmarkers(ui, part, indent=4, **opts)
339 if part.type == 'phase-heads':
340 _debugphaseheads(ui, part, indent=4)
330 341
331 342 @command('debugbundle',
332 343 [('a', 'all', None, _('show all details')),
333 344 ('', 'part-type', [], _('show only the named part type')),
334 345 ('', 'spec', None, _('print the bundlespec of the bundle'))],
335 346 _('FILE'),
336 347 norepo=True)
337 348 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
338 349 """lists the contents of a bundle"""
339 350 with hg.openpath(ui, bundlepath) as f:
340 351 if spec:
341 352 spec = exchange.getbundlespec(ui, f)
342 353 ui.write('%s\n' % spec)
343 354 return
344 355
345 356 gen = exchange.readbundle(ui, f, bundlepath)
346 357 if isinstance(gen, bundle2.unbundle20):
347 358 return _debugbundle2(ui, gen, all=all, **opts)
348 359 _debugchangegroup(ui, gen, all=all, **opts)
349 360
350 361 @command('debugcheckstate', [], '')
351 362 def debugcheckstate(ui, repo):
352 363 """validate the correctness of the current dirstate"""
353 364 parent1, parent2 = repo.dirstate.parents()
354 365 m1 = repo[parent1].manifest()
355 366 m2 = repo[parent2].manifest()
356 367 errors = 0
357 368 for f in repo.dirstate:
358 369 state = repo.dirstate[f]
359 370 if state in "nr" and f not in m1:
360 371 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
361 372 errors += 1
362 373 if state in "a" and f in m1:
363 374 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
364 375 errors += 1
365 376 if state in "m" and f not in m1 and f not in m2:
366 377 ui.warn(_("%s in state %s, but not in either manifest\n") %
367 378 (f, state))
368 379 errors += 1
369 380 for f in m1:
370 381 state = repo.dirstate[f]
371 382 if state not in "nrm":
372 383 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
373 384 errors += 1
374 385 if errors:
375 386 error = _(".hg/dirstate inconsistent with current parent's manifest")
376 387 raise error.Abort(error)
377 388
378 389 @command('debugcolor',
379 390 [('', 'style', None, _('show all configured styles'))],
380 391 'hg debugcolor')
381 392 def debugcolor(ui, repo, **opts):
382 393 """show available color, effects or style"""
383 394 ui.write(('color mode: %s\n') % ui._colormode)
384 395 if opts.get('style'):
385 396 return _debugdisplaystyle(ui)
386 397 else:
387 398 return _debugdisplaycolor(ui)
388 399
389 400 def _debugdisplaycolor(ui):
390 401 ui = ui.copy()
391 402 ui._styles.clear()
392 403 for effect in color._activeeffects(ui).keys():
393 404 ui._styles[effect] = effect
394 405 if ui._terminfoparams:
395 406 for k, v in ui.configitems('color'):
396 407 if k.startswith('color.'):
397 408 ui._styles[k] = k[6:]
398 409 elif k.startswith('terminfo.'):
399 410 ui._styles[k] = k[9:]
400 411 ui.write(_('available colors:\n'))
401 412 # sort label with a '_' after the other to group '_background' entry.
402 413 items = sorted(ui._styles.items(),
403 414 key=lambda i: ('_' in i[0], i[0], i[1]))
404 415 for colorname, label in items:
405 416 ui.write(('%s\n') % colorname, label=label)
406 417
407 418 def _debugdisplaystyle(ui):
408 419 ui.write(_('available style:\n'))
409 420 width = max(len(s) for s in ui._styles)
410 421 for label, effects in sorted(ui._styles.items()):
411 422 ui.write('%s' % label, label=label)
412 423 if effects:
413 424 # 50
414 425 ui.write(': ')
415 426 ui.write(' ' * (max(0, width - len(label))))
416 427 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
417 428 ui.write('\n')
418 429
419 430 @command('debugcreatestreamclonebundle', [], 'FILE')
420 431 def debugcreatestreamclonebundle(ui, repo, fname):
421 432 """create a stream clone bundle file
422 433
423 434 Stream bundles are special bundles that are essentially archives of
424 435 revlog files. They are commonly used for cloning very quickly.
425 436 """
426 437 # TODO we may want to turn this into an abort when this functionality
427 438 # is moved into `hg bundle`.
428 439 if phases.hassecret(repo):
429 440 ui.warn(_('(warning: stream clone bundle will contain secret '
430 441 'revisions)\n'))
431 442
432 443 requirements, gen = streamclone.generatebundlev1(repo)
433 444 changegroup.writechunks(ui, gen, fname)
434 445
435 446 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
436 447
437 448 @command('debugdag',
438 449 [('t', 'tags', None, _('use tags as labels')),
439 450 ('b', 'branches', None, _('annotate with branch names')),
440 451 ('', 'dots', None, _('use dots for runs')),
441 452 ('s', 'spaces', None, _('separate elements by spaces'))],
442 453 _('[OPTION]... [FILE [REV]...]'),
443 454 optionalrepo=True)
444 455 def debugdag(ui, repo, file_=None, *revs, **opts):
445 456 """format the changelog or an index DAG as a concise textual description
446 457
447 458 If you pass a revlog index, the revlog's DAG is emitted. If you list
448 459 revision numbers, they get labeled in the output as rN.
449 460
450 461 Otherwise, the changelog DAG of the current repo is emitted.
451 462 """
452 463 spaces = opts.get('spaces')
453 464 dots = opts.get('dots')
454 465 if file_:
455 466 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
456 467 file_)
457 468 revs = set((int(r) for r in revs))
458 469 def events():
459 470 for r in rlog:
460 471 yield 'n', (r, list(p for p in rlog.parentrevs(r)
461 472 if p != -1))
462 473 if r in revs:
463 474 yield 'l', (r, "r%i" % r)
464 475 elif repo:
465 476 cl = repo.changelog
466 477 tags = opts.get('tags')
467 478 branches = opts.get('branches')
468 479 if tags:
469 480 labels = {}
470 481 for l, n in repo.tags().items():
471 482 labels.setdefault(cl.rev(n), []).append(l)
472 483 def events():
473 484 b = "default"
474 485 for r in cl:
475 486 if branches:
476 487 newb = cl.read(cl.node(r))[5]['branch']
477 488 if newb != b:
478 489 yield 'a', newb
479 490 b = newb
480 491 yield 'n', (r, list(p for p in cl.parentrevs(r)
481 492 if p != -1))
482 493 if tags:
483 494 ls = labels.get(r)
484 495 if ls:
485 496 for l in ls:
486 497 yield 'l', (r, l)
487 498 else:
488 499 raise error.Abort(_('need repo for changelog dag'))
489 500
490 501 for line in dagparser.dagtextlines(events(),
491 502 addspaces=spaces,
492 503 wraplabels=True,
493 504 wrapannotations=True,
494 505 wrapnonlinear=dots,
495 506 usedots=dots,
496 507 maxlinewidth=70):
497 508 ui.write(line)
498 509 ui.write("\n")
499 510
500 511 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
501 512 def debugdata(ui, repo, file_, rev=None, **opts):
502 513 """dump the contents of a data file revision"""
503 514 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
504 515 if rev is not None:
505 516 raise error.CommandError('debugdata', _('invalid arguments'))
506 517 file_, rev = None, file_
507 518 elif rev is None:
508 519 raise error.CommandError('debugdata', _('invalid arguments'))
509 520 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
510 521 try:
511 522 ui.write(r.revision(r.lookup(rev), raw=True))
512 523 except KeyError:
513 524 raise error.Abort(_('invalid revision identifier %s') % rev)
514 525
515 526 @command('debugdate',
516 527 [('e', 'extended', None, _('try extended date formats'))],
517 528 _('[-e] DATE [RANGE]'),
518 529 norepo=True, optionalrepo=True)
519 530 def debugdate(ui, date, range=None, **opts):
520 531 """parse and display a date"""
521 532 if opts["extended"]:
522 533 d = util.parsedate(date, util.extendeddateformats)
523 534 else:
524 535 d = util.parsedate(date)
525 536 ui.write(("internal: %s %s\n") % d)
526 537 ui.write(("standard: %s\n") % util.datestr(d))
527 538 if range:
528 539 m = util.matchdate(range)
529 540 ui.write(("match: %s\n") % m(d[0]))
530 541
531 542 @command('debugdeltachain',
532 543 cmdutil.debugrevlogopts + cmdutil.formatteropts,
533 544 _('-c|-m|FILE'),
534 545 optionalrepo=True)
535 546 def debugdeltachain(ui, repo, file_=None, **opts):
536 547 """dump information about delta chains in a revlog
537 548
538 549 Output can be templatized. Available template keywords are:
539 550
540 551 :``rev``: revision number
541 552 :``chainid``: delta chain identifier (numbered by unique base)
542 553 :``chainlen``: delta chain length to this revision
543 554 :``prevrev``: previous revision in delta chain
544 555 :``deltatype``: role of delta / how it was computed
545 556 :``compsize``: compressed size of revision
546 557 :``uncompsize``: uncompressed size of revision
547 558 :``chainsize``: total size of compressed revisions in chain
548 559 :``chainratio``: total chain size divided by uncompressed revision size
549 560 (new delta chains typically start at ratio 2.00)
550 561 :``lindist``: linear distance from base revision in delta chain to end
551 562 of this revision
552 563 :``extradist``: total size of revisions not part of this delta chain from
553 564 base of delta chain to end of this revision; a measurement
554 565 of how much extra data we need to read/seek across to read
555 566 the delta chain for this revision
556 567 :``extraratio``: extradist divided by chainsize; another representation of
557 568 how much unrelated data is needed to load this delta chain
558 569 """
559 570 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
560 571 index = r.index
561 572 generaldelta = r.version & revlog.FLAG_GENERALDELTA
562 573
563 574 def revinfo(rev):
564 575 e = index[rev]
565 576 compsize = e[1]
566 577 uncompsize = e[2]
567 578 chainsize = 0
568 579
569 580 if generaldelta:
570 581 if e[3] == e[5]:
571 582 deltatype = 'p1'
572 583 elif e[3] == e[6]:
573 584 deltatype = 'p2'
574 585 elif e[3] == rev - 1:
575 586 deltatype = 'prev'
576 587 elif e[3] == rev:
577 588 deltatype = 'base'
578 589 else:
579 590 deltatype = 'other'
580 591 else:
581 592 if e[3] == rev:
582 593 deltatype = 'base'
583 594 else:
584 595 deltatype = 'prev'
585 596
586 597 chain = r._deltachain(rev)[0]
587 598 for iterrev in chain:
588 599 e = index[iterrev]
589 600 chainsize += e[1]
590 601
591 602 return compsize, uncompsize, deltatype, chain, chainsize
592 603
593 604 fm = ui.formatter('debugdeltachain', opts)
594 605
595 606 fm.plain(' rev chain# chainlen prev delta '
596 607 'size rawsize chainsize ratio lindist extradist '
597 608 'extraratio\n')
598 609
599 610 chainbases = {}
600 611 for rev in r:
601 612 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
602 613 chainbase = chain[0]
603 614 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
604 615 basestart = r.start(chainbase)
605 616 revstart = r.start(rev)
606 617 lineardist = revstart + comp - basestart
607 618 extradist = lineardist - chainsize
608 619 try:
609 620 prevrev = chain[-2]
610 621 except IndexError:
611 622 prevrev = -1
612 623
613 624 chainratio = float(chainsize) / float(uncomp)
614 625 extraratio = float(extradist) / float(chainsize)
615 626
616 627 fm.startitem()
617 628 fm.write('rev chainid chainlen prevrev deltatype compsize '
618 629 'uncompsize chainsize chainratio lindist extradist '
619 630 'extraratio',
620 631 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n',
621 632 rev, chainid, len(chain), prevrev, deltatype, comp,
622 633 uncomp, chainsize, chainratio, lineardist, extradist,
623 634 extraratio,
624 635 rev=rev, chainid=chainid, chainlen=len(chain),
625 636 prevrev=prevrev, deltatype=deltatype, compsize=comp,
626 637 uncompsize=uncomp, chainsize=chainsize,
627 638 chainratio=chainratio, lindist=lineardist,
628 639 extradist=extradist, extraratio=extraratio)
629 640
630 641 fm.end()
631 642
632 643 @command('debugdirstate|debugstate',
633 644 [('', 'nodates', None, _('do not display the saved mtime')),
634 645 ('', 'datesort', None, _('sort by saved mtime'))],
635 646 _('[OPTION]...'))
636 647 def debugstate(ui, repo, **opts):
637 648 """show the contents of the current dirstate"""
638 649
639 650 nodates = opts.get('nodates')
640 651 datesort = opts.get('datesort')
641 652
642 653 timestr = ""
643 654 if datesort:
644 655 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
645 656 else:
646 657 keyfunc = None # sort by filename
647 658 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
648 659 if ent[3] == -1:
649 660 timestr = 'unset '
650 661 elif nodates:
651 662 timestr = 'set '
652 663 else:
653 664 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
654 665 time.localtime(ent[3]))
655 666 if ent[1] & 0o20000:
656 667 mode = 'lnk'
657 668 else:
658 669 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
659 670 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
660 671 for f in repo.dirstate.copies():
661 672 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
662 673
663 674 @command('debugdiscovery',
664 675 [('', 'old', None, _('use old-style discovery')),
665 676 ('', 'nonheads', None,
666 677 _('use old-style discovery with non-heads included')),
667 678 ] + cmdutil.remoteopts,
668 679 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
669 680 def debugdiscovery(ui, repo, remoteurl="default", **opts):
670 681 """runs the changeset discovery protocol in isolation"""
671 682 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
672 683 opts.get('branch'))
673 684 remote = hg.peer(repo, opts, remoteurl)
674 685 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
675 686
676 687 # make sure tests are repeatable
677 688 random.seed(12323)
678 689
679 690 def doit(localheads, remoteheads, remote=remote):
680 691 if opts.get('old'):
681 692 if localheads:
682 693 raise error.Abort('cannot use localheads with old style '
683 694 'discovery')
684 695 if not util.safehasattr(remote, 'branches'):
685 696 # enable in-client legacy support
686 697 remote = localrepo.locallegacypeer(remote.local())
687 698 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
688 699 force=True)
689 700 common = set(common)
690 701 if not opts.get('nonheads'):
691 702 ui.write(("unpruned common: %s\n") %
692 703 " ".join(sorted(short(n) for n in common)))
693 704 dag = dagutil.revlogdag(repo.changelog)
694 705 all = dag.ancestorset(dag.internalizeall(common))
695 706 common = dag.externalizeall(dag.headsetofconnecteds(all))
696 707 else:
697 708 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
698 709 common = set(common)
699 710 rheads = set(hds)
700 711 lheads = set(repo.heads())
701 712 ui.write(("common heads: %s\n") %
702 713 " ".join(sorted(short(n) for n in common)))
703 714 if lheads <= common:
704 715 ui.write(("local is subset\n"))
705 716 elif rheads <= common:
706 717 ui.write(("remote is subset\n"))
707 718
708 719 serverlogs = opts.get('serverlog')
709 720 if serverlogs:
710 721 for filename in serverlogs:
711 722 with open(filename, 'r') as logfile:
712 723 line = logfile.readline()
713 724 while line:
714 725 parts = line.strip().split(';')
715 726 op = parts[1]
716 727 if op == 'cg':
717 728 pass
718 729 elif op == 'cgss':
719 730 doit(parts[2].split(' '), parts[3].split(' '))
720 731 elif op == 'unb':
721 732 doit(parts[3].split(' '), parts[2].split(' '))
722 733 line = logfile.readline()
723 734 else:
724 735 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
725 736 opts.get('remote_head'))
726 737 localrevs = opts.get('local_head')
727 738 doit(localrevs, remoterevs)
728 739
729 740 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
730 741 def debugextensions(ui, **opts):
731 742 '''show information about active extensions'''
732 743 exts = extensions.extensions(ui)
733 744 hgver = util.version()
734 745 fm = ui.formatter('debugextensions', opts)
735 746 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
736 747 isinternal = extensions.ismoduleinternal(extmod)
737 748 extsource = pycompat.fsencode(extmod.__file__)
738 749 if isinternal:
739 750 exttestedwith = [] # never expose magic string to users
740 751 else:
741 752 exttestedwith = getattr(extmod, 'testedwith', '').split()
742 753 extbuglink = getattr(extmod, 'buglink', None)
743 754
744 755 fm.startitem()
745 756
746 757 if ui.quiet or ui.verbose:
747 758 fm.write('name', '%s\n', extname)
748 759 else:
749 760 fm.write('name', '%s', extname)
750 761 if isinternal or hgver in exttestedwith:
751 762 fm.plain('\n')
752 763 elif not exttestedwith:
753 764 fm.plain(_(' (untested!)\n'))
754 765 else:
755 766 lasttestedversion = exttestedwith[-1]
756 767 fm.plain(' (%s!)\n' % lasttestedversion)
757 768
758 769 fm.condwrite(ui.verbose and extsource, 'source',
759 770 _(' location: %s\n'), extsource or "")
760 771
761 772 if ui.verbose:
762 773 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
763 774 fm.data(bundled=isinternal)
764 775
765 776 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
766 777 _(' tested with: %s\n'),
767 778 fm.formatlist(exttestedwith, name='ver'))
768 779
769 780 fm.condwrite(ui.verbose and extbuglink, 'buglink',
770 781 _(' bug reporting: %s\n'), extbuglink or "")
771 782
772 783 fm.end()
773 784
774 785 @command('debugfileset',
775 786 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
776 787 _('[-r REV] FILESPEC'))
777 788 def debugfileset(ui, repo, expr, **opts):
778 789 '''parse and apply a fileset specification'''
779 790 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
780 791 if ui.verbose:
781 792 tree = fileset.parse(expr)
782 793 ui.note(fileset.prettyformat(tree), "\n")
783 794
784 795 for f in ctx.getfileset(expr):
785 796 ui.write("%s\n" % f)
786 797
787 798 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
788 799 def debugfsinfo(ui, path="."):
789 800 """show information detected about current filesystem"""
790 801 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
791 802 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
792 803 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
793 804 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
794 805 casesensitive = '(unknown)'
795 806 try:
796 807 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
797 808 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
798 809 except OSError:
799 810 pass
800 811 ui.write(('case-sensitive: %s\n') % casesensitive)
801 812
802 813 @command('debuggetbundle',
803 814 [('H', 'head', [], _('id of head node'), _('ID')),
804 815 ('C', 'common', [], _('id of common node'), _('ID')),
805 816 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
806 817 _('REPO FILE [-H|-C ID]...'),
807 818 norepo=True)
808 819 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
809 820 """retrieves a bundle from a repo
810 821
811 822 Every ID must be a full-length hex node id string. Saves the bundle to the
812 823 given file.
813 824 """
814 825 repo = hg.peer(ui, opts, repopath)
815 826 if not repo.capable('getbundle'):
816 827 raise error.Abort("getbundle() not supported by target repository")
817 828 args = {}
818 829 if common:
819 830 args['common'] = [bin(s) for s in common]
820 831 if head:
821 832 args['heads'] = [bin(s) for s in head]
822 833 # TODO: get desired bundlecaps from command line.
823 834 args['bundlecaps'] = None
824 835 bundle = repo.getbundle('debug', **args)
825 836
826 837 bundletype = opts.get('type', 'bzip2').lower()
827 838 btypes = {'none': 'HG10UN',
828 839 'bzip2': 'HG10BZ',
829 840 'gzip': 'HG10GZ',
830 841 'bundle2': 'HG20'}
831 842 bundletype = btypes.get(bundletype)
832 843 if bundletype not in bundle2.bundletypes:
833 844 raise error.Abort(_('unknown bundle type specified with --type'))
834 845 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
835 846
836 847 @command('debugignore', [], '[FILE]')
837 848 def debugignore(ui, repo, *files, **opts):
838 849 """display the combined ignore pattern and information about ignored files
839 850
840 851 With no argument display the combined ignore pattern.
841 852
842 853 Given space separated file names, shows if the given file is ignored and
843 854 if so, show the ignore rule (file and line number) that matched it.
844 855 """
845 856 ignore = repo.dirstate._ignore
846 857 if not files:
847 858 # Show all the patterns
848 859 ui.write("%s\n" % repr(ignore))
849 860 else:
850 861 for f in files:
851 862 nf = util.normpath(f)
852 863 ignored = None
853 864 ignoredata = None
854 865 if nf != '.':
855 866 if ignore(nf):
856 867 ignored = nf
857 868 ignoredata = repo.dirstate._ignorefileandline(nf)
858 869 else:
859 870 for p in util.finddirs(nf):
860 871 if ignore(p):
861 872 ignored = p
862 873 ignoredata = repo.dirstate._ignorefileandline(p)
863 874 break
864 875 if ignored:
865 876 if ignored == nf:
866 877 ui.write(_("%s is ignored\n") % f)
867 878 else:
868 879 ui.write(_("%s is ignored because of "
869 880 "containing folder %s\n")
870 881 % (f, ignored))
871 882 ignorefile, lineno, line = ignoredata
872 883 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
873 884 % (ignorefile, lineno, line))
874 885 else:
875 886 ui.write(_("%s is not ignored\n") % f)
876 887
877 888 @command('debugindex', cmdutil.debugrevlogopts +
878 889 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
879 890 _('[-f FORMAT] -c|-m|FILE'),
880 891 optionalrepo=True)
881 892 def debugindex(ui, repo, file_=None, **opts):
882 893 """dump the contents of an index file"""
883 894 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
884 895 format = opts.get('format', 0)
885 896 if format not in (0, 1):
886 897 raise error.Abort(_("unknown format %d") % format)
887 898
888 899 generaldelta = r.version & revlog.FLAG_GENERALDELTA
889 900 if generaldelta:
890 901 basehdr = ' delta'
891 902 else:
892 903 basehdr = ' base'
893 904
894 905 if ui.debugflag:
895 906 shortfn = hex
896 907 else:
897 908 shortfn = short
898 909
899 910 # There might not be anything in r, so have a sane default
900 911 idlen = 12
901 912 for i in r:
902 913 idlen = len(shortfn(r.node(i)))
903 914 break
904 915
905 916 if format == 0:
906 917 ui.write((" rev offset length " + basehdr + " linkrev"
907 918 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
908 919 elif format == 1:
909 920 ui.write((" rev flag offset length"
910 921 " size " + basehdr + " link p1 p2"
911 922 " %s\n") % "nodeid".rjust(idlen))
912 923
913 924 for i in r:
914 925 node = r.node(i)
915 926 if generaldelta:
916 927 base = r.deltaparent(i)
917 928 else:
918 929 base = r.chainbase(i)
919 930 if format == 0:
920 931 try:
921 932 pp = r.parents(node)
922 933 except Exception:
923 934 pp = [nullid, nullid]
924 935 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
925 936 i, r.start(i), r.length(i), base, r.linkrev(i),
926 937 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
927 938 elif format == 1:
928 939 pr = r.parentrevs(i)
929 940 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
930 941 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
931 942 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
932 943
933 944 @command('debugindexdot', cmdutil.debugrevlogopts,
934 945 _('-c|-m|FILE'), optionalrepo=True)
935 946 def debugindexdot(ui, repo, file_=None, **opts):
936 947 """dump an index DAG as a graphviz dot file"""
937 948 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
938 949 ui.write(("digraph G {\n"))
939 950 for i in r:
940 951 node = r.node(i)
941 952 pp = r.parents(node)
942 953 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
943 954 if pp[1] != nullid:
944 955 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
945 956 ui.write("}\n")
946 957
947 958 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
948 959 def debuginstall(ui, **opts):
949 960 '''test Mercurial installation
950 961
951 962 Returns 0 on success.
952 963 '''
953 964
954 965 def writetemp(contents):
955 966 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
956 967 f = os.fdopen(fd, pycompat.sysstr("wb"))
957 968 f.write(contents)
958 969 f.close()
959 970 return name
960 971
961 972 problems = 0
962 973
963 974 fm = ui.formatter('debuginstall', opts)
964 975 fm.startitem()
965 976
966 977 # encoding
967 978 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
968 979 err = None
969 980 try:
970 981 encoding.fromlocal("test")
971 982 except error.Abort as inst:
972 983 err = inst
973 984 problems += 1
974 985 fm.condwrite(err, 'encodingerror', _(" %s\n"
975 986 " (check that your locale is properly set)\n"), err)
976 987
977 988 # Python
978 989 fm.write('pythonexe', _("checking Python executable (%s)\n"),
979 990 pycompat.sysexecutable)
980 991 fm.write('pythonver', _("checking Python version (%s)\n"),
981 992 ("%d.%d.%d" % sys.version_info[:3]))
982 993 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
983 994 os.path.dirname(pycompat.fsencode(os.__file__)))
984 995
985 996 security = set(sslutil.supportedprotocols)
986 997 if sslutil.hassni:
987 998 security.add('sni')
988 999
989 1000 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
990 1001 fm.formatlist(sorted(security), name='protocol',
991 1002 fmt='%s', sep=','))
992 1003
993 1004 # These are warnings, not errors. So don't increment problem count. This
994 1005 # may change in the future.
995 1006 if 'tls1.2' not in security:
996 1007 fm.plain(_(' TLS 1.2 not supported by Python install; '
997 1008 'network connections lack modern security\n'))
998 1009 if 'sni' not in security:
999 1010 fm.plain(_(' SNI not supported by Python install; may have '
1000 1011 'connectivity issues with some servers\n'))
1001 1012
1002 1013 # TODO print CA cert info
1003 1014
1004 1015 # hg version
1005 1016 hgver = util.version()
1006 1017 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1007 1018 hgver.split('+')[0])
1008 1019 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1009 1020 '+'.join(hgver.split('+')[1:]))
1010 1021
1011 1022 # compiled modules
1012 1023 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1013 1024 policy.policy)
1014 1025 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1015 1026 os.path.dirname(pycompat.fsencode(__file__)))
1016 1027
1017 1028 if policy.policy in ('c', 'allow'):
1018 1029 err = None
1019 1030 try:
1020 1031 from .cext import (
1021 1032 base85,
1022 1033 bdiff,
1023 1034 mpatch,
1024 1035 osutil,
1025 1036 )
1026 1037 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1027 1038 except Exception as inst:
1028 1039 err = inst
1029 1040 problems += 1
1030 1041 fm.condwrite(err, 'extensionserror', " %s\n", err)
1031 1042
1032 1043 compengines = util.compengines._engines.values()
1033 1044 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1034 1045 fm.formatlist(sorted(e.name() for e in compengines),
1035 1046 name='compengine', fmt='%s', sep=', '))
1036 1047 fm.write('compenginesavail', _('checking available compression engines '
1037 1048 '(%s)\n'),
1038 1049 fm.formatlist(sorted(e.name() for e in compengines
1039 1050 if e.available()),
1040 1051 name='compengine', fmt='%s', sep=', '))
1041 1052 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1042 1053 fm.write('compenginesserver', _('checking available compression engines '
1043 1054 'for wire protocol (%s)\n'),
1044 1055 fm.formatlist([e.name() for e in wirecompengines
1045 1056 if e.wireprotosupport()],
1046 1057 name='compengine', fmt='%s', sep=', '))
1047 1058
1048 1059 # templates
1049 1060 p = templater.templatepaths()
1050 1061 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1051 1062 fm.condwrite(not p, '', _(" no template directories found\n"))
1052 1063 if p:
1053 1064 m = templater.templatepath("map-cmdline.default")
1054 1065 if m:
1055 1066 # template found, check if it is working
1056 1067 err = None
1057 1068 try:
1058 1069 templater.templater.frommapfile(m)
1059 1070 except Exception as inst:
1060 1071 err = inst
1061 1072 p = None
1062 1073 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1063 1074 else:
1064 1075 p = None
1065 1076 fm.condwrite(p, 'defaulttemplate',
1066 1077 _("checking default template (%s)\n"), m)
1067 1078 fm.condwrite(not m, 'defaulttemplatenotfound',
1068 1079 _(" template '%s' not found\n"), "default")
1069 1080 if not p:
1070 1081 problems += 1
1071 1082 fm.condwrite(not p, '',
1072 1083 _(" (templates seem to have been installed incorrectly)\n"))
1073 1084
1074 1085 # editor
1075 1086 editor = ui.geteditor()
1076 1087 editor = util.expandpath(editor)
1077 1088 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1078 1089 cmdpath = util.findexe(pycompat.shlexsplit(editor)[0])
1079 1090 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1080 1091 _(" No commit editor set and can't find %s in PATH\n"
1081 1092 " (specify a commit editor in your configuration"
1082 1093 " file)\n"), not cmdpath and editor == 'vi' and editor)
1083 1094 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1084 1095 _(" Can't find editor '%s' in PATH\n"
1085 1096 " (specify a commit editor in your configuration"
1086 1097 " file)\n"), not cmdpath and editor)
1087 1098 if not cmdpath and editor != 'vi':
1088 1099 problems += 1
1089 1100
1090 1101 # check username
1091 1102 username = None
1092 1103 err = None
1093 1104 try:
1094 1105 username = ui.username()
1095 1106 except error.Abort as e:
1096 1107 err = e
1097 1108 problems += 1
1098 1109
1099 1110 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1100 1111 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1101 1112 " (specify a username in your configuration file)\n"), err)
1102 1113
1103 1114 fm.condwrite(not problems, '',
1104 1115 _("no problems detected\n"))
1105 1116 if not problems:
1106 1117 fm.data(problems=problems)
1107 1118 fm.condwrite(problems, 'problems',
1108 1119 _("%d problems detected,"
1109 1120 " please check your install!\n"), problems)
1110 1121 fm.end()
1111 1122
1112 1123 return problems
1113 1124
1114 1125 @command('debugknown', [], _('REPO ID...'), norepo=True)
1115 1126 def debugknown(ui, repopath, *ids, **opts):
1116 1127 """test whether node ids are known to a repo
1117 1128
1118 1129 Every ID must be a full-length hex node id string. Returns a list of 0s
1119 1130 and 1s indicating unknown/known.
1120 1131 """
1121 1132 repo = hg.peer(ui, opts, repopath)
1122 1133 if not repo.capable('known'):
1123 1134 raise error.Abort("known() not supported by target repository")
1124 1135 flags = repo.known([bin(s) for s in ids])
1125 1136 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1126 1137
1127 1138 @command('debuglabelcomplete', [], _('LABEL...'))
1128 1139 def debuglabelcomplete(ui, repo, *args):
1129 1140 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1130 1141 debugnamecomplete(ui, repo, *args)
1131 1142
1132 1143 @command('debuglocks',
1133 1144 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1134 1145 ('W', 'force-wlock', None,
1135 1146 _('free the working state lock (DANGEROUS)'))],
1136 1147 _('[OPTION]...'))
1137 1148 def debuglocks(ui, repo, **opts):
1138 1149 """show or modify state of locks
1139 1150
1140 1151 By default, this command will show which locks are held. This
1141 1152 includes the user and process holding the lock, the amount of time
1142 1153 the lock has been held, and the machine name where the process is
1143 1154 running if it's not local.
1144 1155
1145 1156 Locks protect the integrity of Mercurial's data, so should be
1146 1157 treated with care. System crashes or other interruptions may cause
1147 1158 locks to not be properly released, though Mercurial will usually
1148 1159 detect and remove such stale locks automatically.
1149 1160
1150 1161 However, detecting stale locks may not always be possible (for
1151 1162 instance, on a shared filesystem). Removing locks may also be
1152 1163 blocked by filesystem permissions.
1153 1164
1154 1165 Returns 0 if no locks are held.
1155 1166
1156 1167 """
1157 1168
1158 1169 if opts.get('force_lock'):
1159 1170 repo.svfs.unlink('lock')
1160 1171 if opts.get('force_wlock'):
1161 1172 repo.vfs.unlink('wlock')
1162 1173 if opts.get('force_lock') or opts.get('force_lock'):
1163 1174 return 0
1164 1175
1165 1176 now = time.time()
1166 1177 held = 0
1167 1178
1168 1179 def report(vfs, name, method):
1169 1180 # this causes stale locks to get reaped for more accurate reporting
1170 1181 try:
1171 1182 l = method(False)
1172 1183 except error.LockHeld:
1173 1184 l = None
1174 1185
1175 1186 if l:
1176 1187 l.release()
1177 1188 else:
1178 1189 try:
1179 1190 stat = vfs.lstat(name)
1180 1191 age = now - stat.st_mtime
1181 1192 user = util.username(stat.st_uid)
1182 1193 locker = vfs.readlock(name)
1183 1194 if ":" in locker:
1184 1195 host, pid = locker.split(':')
1185 1196 if host == socket.gethostname():
1186 1197 locker = 'user %s, process %s' % (user, pid)
1187 1198 else:
1188 1199 locker = 'user %s, process %s, host %s' \
1189 1200 % (user, pid, host)
1190 1201 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1191 1202 return 1
1192 1203 except OSError as e:
1193 1204 if e.errno != errno.ENOENT:
1194 1205 raise
1195 1206
1196 1207 ui.write(("%-6s free\n") % (name + ":"))
1197 1208 return 0
1198 1209
1199 1210 held += report(repo.svfs, "lock", repo.lock)
1200 1211 held += report(repo.vfs, "wlock", repo.wlock)
1201 1212
1202 1213 return held
1203 1214
1204 1215 @command('debugmergestate', [], '')
1205 1216 def debugmergestate(ui, repo, *args):
1206 1217 """print merge state
1207 1218
1208 1219 Use --verbose to print out information about whether v1 or v2 merge state
1209 1220 was chosen."""
1210 1221 def _hashornull(h):
1211 1222 if h == nullhex:
1212 1223 return 'null'
1213 1224 else:
1214 1225 return h
1215 1226
1216 1227 def printrecords(version):
1217 1228 ui.write(('* version %s records\n') % version)
1218 1229 if version == 1:
1219 1230 records = v1records
1220 1231 else:
1221 1232 records = v2records
1222 1233
1223 1234 for rtype, record in records:
1224 1235 # pretty print some record types
1225 1236 if rtype == 'L':
1226 1237 ui.write(('local: %s\n') % record)
1227 1238 elif rtype == 'O':
1228 1239 ui.write(('other: %s\n') % record)
1229 1240 elif rtype == 'm':
1230 1241 driver, mdstate = record.split('\0', 1)
1231 1242 ui.write(('merge driver: %s (state "%s")\n')
1232 1243 % (driver, mdstate))
1233 1244 elif rtype in 'FDC':
1234 1245 r = record.split('\0')
1235 1246 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1236 1247 if version == 1:
1237 1248 onode = 'not stored in v1 format'
1238 1249 flags = r[7]
1239 1250 else:
1240 1251 onode, flags = r[7:9]
1241 1252 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1242 1253 % (f, rtype, state, _hashornull(hash)))
1243 1254 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1244 1255 ui.write((' ancestor path: %s (node %s)\n')
1245 1256 % (afile, _hashornull(anode)))
1246 1257 ui.write((' other path: %s (node %s)\n')
1247 1258 % (ofile, _hashornull(onode)))
1248 1259 elif rtype == 'f':
1249 1260 filename, rawextras = record.split('\0', 1)
1250 1261 extras = rawextras.split('\0')
1251 1262 i = 0
1252 1263 extrastrings = []
1253 1264 while i < len(extras):
1254 1265 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1255 1266 i += 2
1256 1267
1257 1268 ui.write(('file extras: %s (%s)\n')
1258 1269 % (filename, ', '.join(extrastrings)))
1259 1270 elif rtype == 'l':
1260 1271 labels = record.split('\0', 2)
1261 1272 labels = [l for l in labels if len(l) > 0]
1262 1273 ui.write(('labels:\n'))
1263 1274 ui.write((' local: %s\n' % labels[0]))
1264 1275 ui.write((' other: %s\n' % labels[1]))
1265 1276 if len(labels) > 2:
1266 1277 ui.write((' base: %s\n' % labels[2]))
1267 1278 else:
1268 1279 ui.write(('unrecognized entry: %s\t%s\n')
1269 1280 % (rtype, record.replace('\0', '\t')))
1270 1281
1271 1282 # Avoid mergestate.read() since it may raise an exception for unsupported
1272 1283 # merge state records. We shouldn't be doing this, but this is OK since this
1273 1284 # command is pretty low-level.
1274 1285 ms = mergemod.mergestate(repo)
1275 1286
1276 1287 # sort so that reasonable information is on top
1277 1288 v1records = ms._readrecordsv1()
1278 1289 v2records = ms._readrecordsv2()
1279 1290 order = 'LOml'
1280 1291 def key(r):
1281 1292 idx = order.find(r[0])
1282 1293 if idx == -1:
1283 1294 return (1, r[1])
1284 1295 else:
1285 1296 return (0, idx)
1286 1297 v1records.sort(key=key)
1287 1298 v2records.sort(key=key)
1288 1299
1289 1300 if not v1records and not v2records:
1290 1301 ui.write(('no merge state found\n'))
1291 1302 elif not v2records:
1292 1303 ui.note(('no version 2 merge state\n'))
1293 1304 printrecords(1)
1294 1305 elif ms._v1v2match(v1records, v2records):
1295 1306 ui.note(('v1 and v2 states match: using v2\n'))
1296 1307 printrecords(2)
1297 1308 else:
1298 1309 ui.note(('v1 and v2 states mismatch: using v1\n'))
1299 1310 printrecords(1)
1300 1311 if ui.verbose:
1301 1312 printrecords(2)
1302 1313
1303 1314 @command('debugnamecomplete', [], _('NAME...'))
1304 1315 def debugnamecomplete(ui, repo, *args):
1305 1316 '''complete "names" - tags, open branch names, bookmark names'''
1306 1317
1307 1318 names = set()
1308 1319 # since we previously only listed open branches, we will handle that
1309 1320 # specially (after this for loop)
1310 1321 for name, ns in repo.names.iteritems():
1311 1322 if name != 'branches':
1312 1323 names.update(ns.listnames(repo))
1313 1324 names.update(tag for (tag, heads, tip, closed)
1314 1325 in repo.branchmap().iterbranches() if not closed)
1315 1326 completions = set()
1316 1327 if not args:
1317 1328 args = ['']
1318 1329 for a in args:
1319 1330 completions.update(n for n in names if n.startswith(a))
1320 1331 ui.write('\n'.join(sorted(completions)))
1321 1332 ui.write('\n')
1322 1333
1323 1334 @command('debugobsolete',
1324 1335 [('', 'flags', 0, _('markers flag')),
1325 1336 ('', 'record-parents', False,
1326 1337 _('record parent information for the precursor')),
1327 1338 ('r', 'rev', [], _('display markers relevant to REV')),
1328 1339 ('', 'exclusive', False, _('restrict display to markers only '
1329 1340 'relevant to REV')),
1330 1341 ('', 'index', False, _('display index of the marker')),
1331 1342 ('', 'delete', [], _('delete markers specified by indices')),
1332 1343 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1333 1344 _('[OBSOLETED [REPLACEMENT ...]]'))
1334 1345 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1335 1346 """create arbitrary obsolete marker
1336 1347
1337 1348 With no arguments, displays the list of obsolescence markers."""
1338 1349
1339 1350 def parsenodeid(s):
1340 1351 try:
1341 1352 # We do not use revsingle/revrange functions here to accept
1342 1353 # arbitrary node identifiers, possibly not present in the
1343 1354 # local repository.
1344 1355 n = bin(s)
1345 1356 if len(n) != len(nullid):
1346 1357 raise TypeError()
1347 1358 return n
1348 1359 except TypeError:
1349 1360 raise error.Abort('changeset references must be full hexadecimal '
1350 1361 'node identifiers')
1351 1362
1352 1363 if opts.get('delete'):
1353 1364 indices = []
1354 1365 for v in opts.get('delete'):
1355 1366 try:
1356 1367 indices.append(int(v))
1357 1368 except ValueError:
1358 1369 raise error.Abort(_('invalid index value: %r') % v,
1359 1370 hint=_('use integers for indices'))
1360 1371
1361 1372 if repo.currenttransaction():
1362 1373 raise error.Abort(_('cannot delete obsmarkers in the middle '
1363 1374 'of transaction.'))
1364 1375
1365 1376 with repo.lock():
1366 1377 n = repair.deleteobsmarkers(repo.obsstore, indices)
1367 1378 ui.write(_('deleted %i obsolescence markers\n') % n)
1368 1379
1369 1380 return
1370 1381
1371 1382 if precursor is not None:
1372 1383 if opts['rev']:
1373 1384 raise error.Abort('cannot select revision when creating marker')
1374 1385 metadata = {}
1375 1386 metadata['user'] = opts['user'] or ui.username()
1376 1387 succs = tuple(parsenodeid(succ) for succ in successors)
1377 1388 l = repo.lock()
1378 1389 try:
1379 1390 tr = repo.transaction('debugobsolete')
1380 1391 try:
1381 1392 date = opts.get('date')
1382 1393 if date:
1383 1394 date = util.parsedate(date)
1384 1395 else:
1385 1396 date = None
1386 1397 prec = parsenodeid(precursor)
1387 1398 parents = None
1388 1399 if opts['record_parents']:
1389 1400 if prec not in repo.unfiltered():
1390 1401 raise error.Abort('cannot used --record-parents on '
1391 1402 'unknown changesets')
1392 1403 parents = repo.unfiltered()[prec].parents()
1393 1404 parents = tuple(p.node() for p in parents)
1394 1405 repo.obsstore.create(tr, prec, succs, opts['flags'],
1395 1406 parents=parents, date=date,
1396 1407 metadata=metadata, ui=ui)
1397 1408 tr.close()
1398 1409 except ValueError as exc:
1399 1410 raise error.Abort(_('bad obsmarker input: %s') % exc)
1400 1411 finally:
1401 1412 tr.release()
1402 1413 finally:
1403 1414 l.release()
1404 1415 else:
1405 1416 if opts['rev']:
1406 1417 revs = scmutil.revrange(repo, opts['rev'])
1407 1418 nodes = [repo[r].node() for r in revs]
1408 1419 markers = list(obsolete.getmarkers(repo, nodes=nodes,
1409 1420 exclusive=opts['exclusive']))
1410 1421 markers.sort(key=lambda x: x._data)
1411 1422 else:
1412 1423 markers = obsolete.getmarkers(repo)
1413 1424
1414 1425 markerstoiter = markers
1415 1426 isrelevant = lambda m: True
1416 1427 if opts.get('rev') and opts.get('index'):
1417 1428 markerstoiter = obsolete.getmarkers(repo)
1418 1429 markerset = set(markers)
1419 1430 isrelevant = lambda m: m in markerset
1420 1431
1421 1432 fm = ui.formatter('debugobsolete', opts)
1422 1433 for i, m in enumerate(markerstoiter):
1423 1434 if not isrelevant(m):
1424 1435 # marker can be irrelevant when we're iterating over a set
1425 1436 # of markers (markerstoiter) which is bigger than the set
1426 1437 # of markers we want to display (markers)
1427 1438 # this can happen if both --index and --rev options are
1428 1439 # provided and thus we need to iterate over all of the markers
1429 1440 # to get the correct indices, but only display the ones that
1430 1441 # are relevant to --rev value
1431 1442 continue
1432 1443 fm.startitem()
1433 1444 ind = i if opts.get('index') else None
1434 1445 cmdutil.showmarker(fm, m, index=ind)
1435 1446 fm.end()
1436 1447
1437 1448 @command('debugpathcomplete',
1438 1449 [('f', 'full', None, _('complete an entire path')),
1439 1450 ('n', 'normal', None, _('show only normal files')),
1440 1451 ('a', 'added', None, _('show only added files')),
1441 1452 ('r', 'removed', None, _('show only removed files'))],
1442 1453 _('FILESPEC...'))
1443 1454 def debugpathcomplete(ui, repo, *specs, **opts):
1444 1455 '''complete part or all of a tracked path
1445 1456
1446 1457 This command supports shells that offer path name completion. It
1447 1458 currently completes only files already known to the dirstate.
1448 1459
1449 1460 Completion extends only to the next path segment unless
1450 1461 --full is specified, in which case entire paths are used.'''
1451 1462
1452 1463 def complete(path, acceptable):
1453 1464 dirstate = repo.dirstate
1454 1465 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1455 1466 rootdir = repo.root + pycompat.ossep
1456 1467 if spec != repo.root and not spec.startswith(rootdir):
1457 1468 return [], []
1458 1469 if os.path.isdir(spec):
1459 1470 spec += '/'
1460 1471 spec = spec[len(rootdir):]
1461 1472 fixpaths = pycompat.ossep != '/'
1462 1473 if fixpaths:
1463 1474 spec = spec.replace(pycompat.ossep, '/')
1464 1475 speclen = len(spec)
1465 1476 fullpaths = opts['full']
1466 1477 files, dirs = set(), set()
1467 1478 adddir, addfile = dirs.add, files.add
1468 1479 for f, st in dirstate.iteritems():
1469 1480 if f.startswith(spec) and st[0] in acceptable:
1470 1481 if fixpaths:
1471 1482 f = f.replace('/', pycompat.ossep)
1472 1483 if fullpaths:
1473 1484 addfile(f)
1474 1485 continue
1475 1486 s = f.find(pycompat.ossep, speclen)
1476 1487 if s >= 0:
1477 1488 adddir(f[:s])
1478 1489 else:
1479 1490 addfile(f)
1480 1491 return files, dirs
1481 1492
1482 1493 acceptable = ''
1483 1494 if opts['normal']:
1484 1495 acceptable += 'nm'
1485 1496 if opts['added']:
1486 1497 acceptable += 'a'
1487 1498 if opts['removed']:
1488 1499 acceptable += 'r'
1489 1500 cwd = repo.getcwd()
1490 1501 if not specs:
1491 1502 specs = ['.']
1492 1503
1493 1504 files, dirs = set(), set()
1494 1505 for spec in specs:
1495 1506 f, d = complete(spec, acceptable or 'nmar')
1496 1507 files.update(f)
1497 1508 dirs.update(d)
1498 1509 files.update(dirs)
1499 1510 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1500 1511 ui.write('\n')
1501 1512
1502 1513 @command('debugpickmergetool',
1503 1514 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1504 1515 ('', 'changedelete', None, _('emulate merging change and delete')),
1505 1516 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1506 1517 _('[PATTERN]...'),
1507 1518 inferrepo=True)
1508 1519 def debugpickmergetool(ui, repo, *pats, **opts):
1509 1520 """examine which merge tool is chosen for specified file
1510 1521
1511 1522 As described in :hg:`help merge-tools`, Mercurial examines
1512 1523 configurations below in this order to decide which merge tool is
1513 1524 chosen for specified file.
1514 1525
1515 1526 1. ``--tool`` option
1516 1527 2. ``HGMERGE`` environment variable
1517 1528 3. configurations in ``merge-patterns`` section
1518 1529 4. configuration of ``ui.merge``
1519 1530 5. configurations in ``merge-tools`` section
1520 1531 6. ``hgmerge`` tool (for historical reason only)
1521 1532 7. default tool for fallback (``:merge`` or ``:prompt``)
1522 1533
1523 1534 This command writes out examination result in the style below::
1524 1535
1525 1536 FILE = MERGETOOL
1526 1537
1527 1538 By default, all files known in the first parent context of the
1528 1539 working directory are examined. Use file patterns and/or -I/-X
1529 1540 options to limit target files. -r/--rev is also useful to examine
1530 1541 files in another context without actual updating to it.
1531 1542
1532 1543 With --debug, this command shows warning messages while matching
1533 1544 against ``merge-patterns`` and so on, too. It is recommended to
1534 1545 use this option with explicit file patterns and/or -I/-X options,
1535 1546 because this option increases amount of output per file according
1536 1547 to configurations in hgrc.
1537 1548
1538 1549 With -v/--verbose, this command shows configurations below at
1539 1550 first (only if specified).
1540 1551
1541 1552 - ``--tool`` option
1542 1553 - ``HGMERGE`` environment variable
1543 1554 - configuration of ``ui.merge``
1544 1555
1545 1556 If merge tool is chosen before matching against
1546 1557 ``merge-patterns``, this command can't show any helpful
1547 1558 information, even with --debug. In such case, information above is
1548 1559 useful to know why a merge tool is chosen.
1549 1560 """
1550 1561 overrides = {}
1551 1562 if opts['tool']:
1552 1563 overrides[('ui', 'forcemerge')] = opts['tool']
1553 1564 ui.note(('with --tool %r\n') % (opts['tool']))
1554 1565
1555 1566 with ui.configoverride(overrides, 'debugmergepatterns'):
1556 1567 hgmerge = encoding.environ.get("HGMERGE")
1557 1568 if hgmerge is not None:
1558 1569 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1559 1570 uimerge = ui.config("ui", "merge")
1560 1571 if uimerge:
1561 1572 ui.note(('with ui.merge=%r\n') % (uimerge))
1562 1573
1563 1574 ctx = scmutil.revsingle(repo, opts.get('rev'))
1564 1575 m = scmutil.match(ctx, pats, opts)
1565 1576 changedelete = opts['changedelete']
1566 1577 for path in ctx.walk(m):
1567 1578 fctx = ctx[path]
1568 1579 try:
1569 1580 if not ui.debugflag:
1570 1581 ui.pushbuffer(error=True)
1571 1582 tool, toolpath = filemerge._picktool(repo, ui, path,
1572 1583 fctx.isbinary(),
1573 1584 'l' in fctx.flags(),
1574 1585 changedelete)
1575 1586 finally:
1576 1587 if not ui.debugflag:
1577 1588 ui.popbuffer()
1578 1589 ui.write(('%s = %s\n') % (path, tool))
1579 1590
1580 1591 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1581 1592 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1582 1593 '''access the pushkey key/value protocol
1583 1594
1584 1595 With two args, list the keys in the given namespace.
1585 1596
1586 1597 With five args, set a key to new if it currently is set to old.
1587 1598 Reports success or failure.
1588 1599 '''
1589 1600
1590 1601 target = hg.peer(ui, {}, repopath)
1591 1602 if keyinfo:
1592 1603 key, old, new = keyinfo
1593 1604 r = target.pushkey(namespace, key, old, new)
1594 1605 ui.status(str(r) + '\n')
1595 1606 return not r
1596 1607 else:
1597 1608 for k, v in sorted(target.listkeys(namespace).iteritems()):
1598 1609 ui.write("%s\t%s\n" % (util.escapestr(k),
1599 1610 util.escapestr(v)))
1600 1611
1601 1612 @command('debugpvec', [], _('A B'))
1602 1613 def debugpvec(ui, repo, a, b=None):
1603 1614 ca = scmutil.revsingle(repo, a)
1604 1615 cb = scmutil.revsingle(repo, b)
1605 1616 pa = pvec.ctxpvec(ca)
1606 1617 pb = pvec.ctxpvec(cb)
1607 1618 if pa == pb:
1608 1619 rel = "="
1609 1620 elif pa > pb:
1610 1621 rel = ">"
1611 1622 elif pa < pb:
1612 1623 rel = "<"
1613 1624 elif pa | pb:
1614 1625 rel = "|"
1615 1626 ui.write(_("a: %s\n") % pa)
1616 1627 ui.write(_("b: %s\n") % pb)
1617 1628 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1618 1629 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1619 1630 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1620 1631 pa.distance(pb), rel))
1621 1632
1622 1633 @command('debugrebuilddirstate|debugrebuildstate',
1623 1634 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1624 1635 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1625 1636 'the working copy parent')),
1626 1637 ],
1627 1638 _('[-r REV]'))
1628 1639 def debugrebuilddirstate(ui, repo, rev, **opts):
1629 1640 """rebuild the dirstate as it would look like for the given revision
1630 1641
1631 1642 If no revision is specified the first current parent will be used.
1632 1643
1633 1644 The dirstate will be set to the files of the given revision.
1634 1645 The actual working directory content or existing dirstate
1635 1646 information such as adds or removes is not considered.
1636 1647
1637 1648 ``minimal`` will only rebuild the dirstate status for files that claim to be
1638 1649 tracked but are not in the parent manifest, or that exist in the parent
1639 1650 manifest but are not in the dirstate. It will not change adds, removes, or
1640 1651 modified files that are in the working copy parent.
1641 1652
1642 1653 One use of this command is to make the next :hg:`status` invocation
1643 1654 check the actual file content.
1644 1655 """
1645 1656 ctx = scmutil.revsingle(repo, rev)
1646 1657 with repo.wlock():
1647 1658 dirstate = repo.dirstate
1648 1659 changedfiles = None
1649 1660 # See command doc for what minimal does.
1650 1661 if opts.get('minimal'):
1651 1662 manifestfiles = set(ctx.manifest().keys())
1652 1663 dirstatefiles = set(dirstate)
1653 1664 manifestonly = manifestfiles - dirstatefiles
1654 1665 dsonly = dirstatefiles - manifestfiles
1655 1666 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1656 1667 changedfiles = manifestonly | dsnotadded
1657 1668
1658 1669 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1659 1670
1660 1671 @command('debugrebuildfncache', [], '')
1661 1672 def debugrebuildfncache(ui, repo):
1662 1673 """rebuild the fncache file"""
1663 1674 repair.rebuildfncache(ui, repo)
1664 1675
1665 1676 @command('debugrename',
1666 1677 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1667 1678 _('[-r REV] FILE'))
1668 1679 def debugrename(ui, repo, file1, *pats, **opts):
1669 1680 """dump rename information"""
1670 1681
1671 1682 ctx = scmutil.revsingle(repo, opts.get('rev'))
1672 1683 m = scmutil.match(ctx, (file1,) + pats, opts)
1673 1684 for abs in ctx.walk(m):
1674 1685 fctx = ctx[abs]
1675 1686 o = fctx.filelog().renamed(fctx.filenode())
1676 1687 rel = m.rel(abs)
1677 1688 if o:
1678 1689 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1679 1690 else:
1680 1691 ui.write(_("%s not renamed\n") % rel)
1681 1692
1682 1693 @command('debugrevlog', cmdutil.debugrevlogopts +
1683 1694 [('d', 'dump', False, _('dump index data'))],
1684 1695 _('-c|-m|FILE'),
1685 1696 optionalrepo=True)
1686 1697 def debugrevlog(ui, repo, file_=None, **opts):
1687 1698 """show data and statistics about a revlog"""
1688 1699 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1689 1700
1690 1701 if opts.get("dump"):
1691 1702 numrevs = len(r)
1692 1703 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1693 1704 " rawsize totalsize compression heads chainlen\n"))
1694 1705 ts = 0
1695 1706 heads = set()
1696 1707
1697 1708 for rev in xrange(numrevs):
1698 1709 dbase = r.deltaparent(rev)
1699 1710 if dbase == -1:
1700 1711 dbase = rev
1701 1712 cbase = r.chainbase(rev)
1702 1713 clen = r.chainlen(rev)
1703 1714 p1, p2 = r.parentrevs(rev)
1704 1715 rs = r.rawsize(rev)
1705 1716 ts = ts + rs
1706 1717 heads -= set(r.parentrevs(rev))
1707 1718 heads.add(rev)
1708 1719 try:
1709 1720 compression = ts / r.end(rev)
1710 1721 except ZeroDivisionError:
1711 1722 compression = 0
1712 1723 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1713 1724 "%11d %5d %8d\n" %
1714 1725 (rev, p1, p2, r.start(rev), r.end(rev),
1715 1726 r.start(dbase), r.start(cbase),
1716 1727 r.start(p1), r.start(p2),
1717 1728 rs, ts, compression, len(heads), clen))
1718 1729 return 0
1719 1730
1720 1731 v = r.version
1721 1732 format = v & 0xFFFF
1722 1733 flags = []
1723 1734 gdelta = False
1724 1735 if v & revlog.FLAG_INLINE_DATA:
1725 1736 flags.append('inline')
1726 1737 if v & revlog.FLAG_GENERALDELTA:
1727 1738 gdelta = True
1728 1739 flags.append('generaldelta')
1729 1740 if not flags:
1730 1741 flags = ['(none)']
1731 1742
1732 1743 nummerges = 0
1733 1744 numfull = 0
1734 1745 numprev = 0
1735 1746 nump1 = 0
1736 1747 nump2 = 0
1737 1748 numother = 0
1738 1749 nump1prev = 0
1739 1750 nump2prev = 0
1740 1751 chainlengths = []
1741 1752
1742 1753 datasize = [None, 0, 0]
1743 1754 fullsize = [None, 0, 0]
1744 1755 deltasize = [None, 0, 0]
1745 1756 chunktypecounts = {}
1746 1757 chunktypesizes = {}
1747 1758
1748 1759 def addsize(size, l):
1749 1760 if l[0] is None or size < l[0]:
1750 1761 l[0] = size
1751 1762 if size > l[1]:
1752 1763 l[1] = size
1753 1764 l[2] += size
1754 1765
1755 1766 numrevs = len(r)
1756 1767 for rev in xrange(numrevs):
1757 1768 p1, p2 = r.parentrevs(rev)
1758 1769 delta = r.deltaparent(rev)
1759 1770 if format > 0:
1760 1771 addsize(r.rawsize(rev), datasize)
1761 1772 if p2 != nullrev:
1762 1773 nummerges += 1
1763 1774 size = r.length(rev)
1764 1775 if delta == nullrev:
1765 1776 chainlengths.append(0)
1766 1777 numfull += 1
1767 1778 addsize(size, fullsize)
1768 1779 else:
1769 1780 chainlengths.append(chainlengths[delta] + 1)
1770 1781 addsize(size, deltasize)
1771 1782 if delta == rev - 1:
1772 1783 numprev += 1
1773 1784 if delta == p1:
1774 1785 nump1prev += 1
1775 1786 elif delta == p2:
1776 1787 nump2prev += 1
1777 1788 elif delta == p1:
1778 1789 nump1 += 1
1779 1790 elif delta == p2:
1780 1791 nump2 += 1
1781 1792 elif delta != nullrev:
1782 1793 numother += 1
1783 1794
1784 1795 # Obtain data on the raw chunks in the revlog.
1785 1796 segment = r._getsegmentforrevs(rev, rev)[1]
1786 1797 if segment:
1787 1798 chunktype = segment[0]
1788 1799 else:
1789 1800 chunktype = 'empty'
1790 1801
1791 1802 if chunktype not in chunktypecounts:
1792 1803 chunktypecounts[chunktype] = 0
1793 1804 chunktypesizes[chunktype] = 0
1794 1805
1795 1806 chunktypecounts[chunktype] += 1
1796 1807 chunktypesizes[chunktype] += size
1797 1808
1798 1809 # Adjust size min value for empty cases
1799 1810 for size in (datasize, fullsize, deltasize):
1800 1811 if size[0] is None:
1801 1812 size[0] = 0
1802 1813
1803 1814 numdeltas = numrevs - numfull
1804 1815 numoprev = numprev - nump1prev - nump2prev
1805 1816 totalrawsize = datasize[2]
1806 1817 datasize[2] /= numrevs
1807 1818 fulltotal = fullsize[2]
1808 1819 fullsize[2] /= numfull
1809 1820 deltatotal = deltasize[2]
1810 1821 if numrevs - numfull > 0:
1811 1822 deltasize[2] /= numrevs - numfull
1812 1823 totalsize = fulltotal + deltatotal
1813 1824 avgchainlen = sum(chainlengths) / numrevs
1814 1825 maxchainlen = max(chainlengths)
1815 1826 compratio = 1
1816 1827 if totalsize:
1817 1828 compratio = totalrawsize / totalsize
1818 1829
1819 1830 basedfmtstr = '%%%dd\n'
1820 1831 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
1821 1832
1822 1833 def dfmtstr(max):
1823 1834 return basedfmtstr % len(str(max))
1824 1835 def pcfmtstr(max, padding=0):
1825 1836 return basepcfmtstr % (len(str(max)), ' ' * padding)
1826 1837
1827 1838 def pcfmt(value, total):
1828 1839 if total:
1829 1840 return (value, 100 * float(value) / total)
1830 1841 else:
1831 1842 return value, 100.0
1832 1843
1833 1844 ui.write(('format : %d\n') % format)
1834 1845 ui.write(('flags : %s\n') % ', '.join(flags))
1835 1846
1836 1847 ui.write('\n')
1837 1848 fmt = pcfmtstr(totalsize)
1838 1849 fmt2 = dfmtstr(totalsize)
1839 1850 ui.write(('revisions : ') + fmt2 % numrevs)
1840 1851 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
1841 1852 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
1842 1853 ui.write(('revisions : ') + fmt2 % numrevs)
1843 1854 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
1844 1855 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
1845 1856 ui.write(('revision size : ') + fmt2 % totalsize)
1846 1857 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
1847 1858 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
1848 1859
1849 1860 def fmtchunktype(chunktype):
1850 1861 if chunktype == 'empty':
1851 1862 return ' %s : ' % chunktype
1852 1863 elif chunktype in string.ascii_letters:
1853 1864 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
1854 1865 else:
1855 1866 return ' 0x%s : ' % hex(chunktype)
1856 1867
1857 1868 ui.write('\n')
1858 1869 ui.write(('chunks : ') + fmt2 % numrevs)
1859 1870 for chunktype in sorted(chunktypecounts):
1860 1871 ui.write(fmtchunktype(chunktype))
1861 1872 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
1862 1873 ui.write(('chunks size : ') + fmt2 % totalsize)
1863 1874 for chunktype in sorted(chunktypecounts):
1864 1875 ui.write(fmtchunktype(chunktype))
1865 1876 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
1866 1877
1867 1878 ui.write('\n')
1868 1879 fmt = dfmtstr(max(avgchainlen, compratio))
1869 1880 ui.write(('avg chain length : ') + fmt % avgchainlen)
1870 1881 ui.write(('max chain length : ') + fmt % maxchainlen)
1871 1882 ui.write(('compression ratio : ') + fmt % compratio)
1872 1883
1873 1884 if format > 0:
1874 1885 ui.write('\n')
1875 1886 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
1876 1887 % tuple(datasize))
1877 1888 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
1878 1889 % tuple(fullsize))
1879 1890 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
1880 1891 % tuple(deltasize))
1881 1892
1882 1893 if numdeltas > 0:
1883 1894 ui.write('\n')
1884 1895 fmt = pcfmtstr(numdeltas)
1885 1896 fmt2 = pcfmtstr(numdeltas, 4)
1886 1897 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
1887 1898 if numprev > 0:
1888 1899 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
1889 1900 numprev))
1890 1901 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
1891 1902 numprev))
1892 1903 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
1893 1904 numprev))
1894 1905 if gdelta:
1895 1906 ui.write(('deltas against p1 : ')
1896 1907 + fmt % pcfmt(nump1, numdeltas))
1897 1908 ui.write(('deltas against p2 : ')
1898 1909 + fmt % pcfmt(nump2, numdeltas))
1899 1910 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
1900 1911 numdeltas))
1901 1912
1902 1913 @command('debugrevspec',
1903 1914 [('', 'optimize', None,
1904 1915 _('print parsed tree after optimizing (DEPRECATED)')),
1905 1916 ('', 'show-revs', True, _('print list of result revisions (default)')),
1906 1917 ('s', 'show-set', None, _('print internal representation of result set')),
1907 1918 ('p', 'show-stage', [],
1908 1919 _('print parsed tree at the given stage'), _('NAME')),
1909 1920 ('', 'no-optimized', False, _('evaluate tree without optimization')),
1910 1921 ('', 'verify-optimized', False, _('verify optimized result')),
1911 1922 ],
1912 1923 ('REVSPEC'))
1913 1924 def debugrevspec(ui, repo, expr, **opts):
1914 1925 """parse and apply a revision specification
1915 1926
1916 1927 Use -p/--show-stage option to print the parsed tree at the given stages.
1917 1928 Use -p all to print tree at every stage.
1918 1929
1919 1930 Use --no-show-revs option with -s or -p to print only the set
1920 1931 representation or the parsed tree respectively.
1921 1932
1922 1933 Use --verify-optimized to compare the optimized result with the unoptimized
1923 1934 one. Returns 1 if the optimized result differs.
1924 1935 """
1925 1936 stages = [
1926 1937 ('parsed', lambda tree: tree),
1927 1938 ('expanded', lambda tree: revsetlang.expandaliases(ui, tree)),
1928 1939 ('concatenated', revsetlang.foldconcat),
1929 1940 ('analyzed', revsetlang.analyze),
1930 1941 ('optimized', revsetlang.optimize),
1931 1942 ]
1932 1943 if opts['no_optimized']:
1933 1944 stages = stages[:-1]
1934 1945 if opts['verify_optimized'] and opts['no_optimized']:
1935 1946 raise error.Abort(_('cannot use --verify-optimized with '
1936 1947 '--no-optimized'))
1937 1948 stagenames = set(n for n, f in stages)
1938 1949
1939 1950 showalways = set()
1940 1951 showchanged = set()
1941 1952 if ui.verbose and not opts['show_stage']:
1942 1953 # show parsed tree by --verbose (deprecated)
1943 1954 showalways.add('parsed')
1944 1955 showchanged.update(['expanded', 'concatenated'])
1945 1956 if opts['optimize']:
1946 1957 showalways.add('optimized')
1947 1958 if opts['show_stage'] and opts['optimize']:
1948 1959 raise error.Abort(_('cannot use --optimize with --show-stage'))
1949 1960 if opts['show_stage'] == ['all']:
1950 1961 showalways.update(stagenames)
1951 1962 else:
1952 1963 for n in opts['show_stage']:
1953 1964 if n not in stagenames:
1954 1965 raise error.Abort(_('invalid stage name: %s') % n)
1955 1966 showalways.update(opts['show_stage'])
1956 1967
1957 1968 treebystage = {}
1958 1969 printedtree = None
1959 1970 tree = revsetlang.parse(expr, lookup=repo.__contains__)
1960 1971 for n, f in stages:
1961 1972 treebystage[n] = tree = f(tree)
1962 1973 if n in showalways or (n in showchanged and tree != printedtree):
1963 1974 if opts['show_stage'] or n != 'parsed':
1964 1975 ui.write(("* %s:\n") % n)
1965 1976 ui.write(revsetlang.prettyformat(tree), "\n")
1966 1977 printedtree = tree
1967 1978
1968 1979 if opts['verify_optimized']:
1969 1980 arevs = revset.makematcher(treebystage['analyzed'])(repo)
1970 1981 brevs = revset.makematcher(treebystage['optimized'])(repo)
1971 1982 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1972 1983 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
1973 1984 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
1974 1985 arevs = list(arevs)
1975 1986 brevs = list(brevs)
1976 1987 if arevs == brevs:
1977 1988 return 0
1978 1989 ui.write(('--- analyzed\n'), label='diff.file_a')
1979 1990 ui.write(('+++ optimized\n'), label='diff.file_b')
1980 1991 sm = difflib.SequenceMatcher(None, arevs, brevs)
1981 1992 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1982 1993 if tag in ('delete', 'replace'):
1983 1994 for c in arevs[alo:ahi]:
1984 1995 ui.write('-%s\n' % c, label='diff.deleted')
1985 1996 if tag in ('insert', 'replace'):
1986 1997 for c in brevs[blo:bhi]:
1987 1998 ui.write('+%s\n' % c, label='diff.inserted')
1988 1999 if tag == 'equal':
1989 2000 for c in arevs[alo:ahi]:
1990 2001 ui.write(' %s\n' % c)
1991 2002 return 1
1992 2003
1993 2004 func = revset.makematcher(tree)
1994 2005 revs = func(repo)
1995 2006 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
1996 2007 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
1997 2008 if not opts['show_revs']:
1998 2009 return
1999 2010 for c in revs:
2000 2011 ui.write("%s\n" % c)
2001 2012
2002 2013 @command('debugsetparents', [], _('REV1 [REV2]'))
2003 2014 def debugsetparents(ui, repo, rev1, rev2=None):
2004 2015 """manually set the parents of the current working directory
2005 2016
2006 2017 This is useful for writing repository conversion tools, but should
2007 2018 be used with care. For example, neither the working directory nor the
2008 2019 dirstate is updated, so file status may be incorrect after running this
2009 2020 command.
2010 2021
2011 2022 Returns 0 on success.
2012 2023 """
2013 2024
2014 2025 r1 = scmutil.revsingle(repo, rev1).node()
2015 2026 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2016 2027
2017 2028 with repo.wlock():
2018 2029 repo.setparents(r1, r2)
2019 2030
2020 2031 @command('debugsub',
2021 2032 [('r', 'rev', '',
2022 2033 _('revision to check'), _('REV'))],
2023 2034 _('[-r REV] [REV]'))
2024 2035 def debugsub(ui, repo, rev=None):
2025 2036 ctx = scmutil.revsingle(repo, rev, None)
2026 2037 for k, v in sorted(ctx.substate.items()):
2027 2038 ui.write(('path %s\n') % k)
2028 2039 ui.write((' source %s\n') % v[0])
2029 2040 ui.write((' revision %s\n') % v[1])
2030 2041
2031 2042 @command('debugsuccessorssets',
2032 2043 [],
2033 2044 _('[REV]'))
2034 2045 def debugsuccessorssets(ui, repo, *revs):
2035 2046 """show set of successors for revision
2036 2047
2037 2048 A successors set of changeset A is a consistent group of revisions that
2038 2049 succeed A. It contains non-obsolete changesets only.
2039 2050
2040 2051 In most cases a changeset A has a single successors set containing a single
2041 2052 successor (changeset A replaced by A').
2042 2053
2043 2054 A changeset that is made obsolete with no successors are called "pruned".
2044 2055 Such changesets have no successors sets at all.
2045 2056
2046 2057 A changeset that has been "split" will have a successors set containing
2047 2058 more than one successor.
2048 2059
2049 2060 A changeset that has been rewritten in multiple different ways is called
2050 2061 "divergent". Such changesets have multiple successor sets (each of which
2051 2062 may also be split, i.e. have multiple successors).
2052 2063
2053 2064 Results are displayed as follows::
2054 2065
2055 2066 <rev1>
2056 2067 <successors-1A>
2057 2068 <rev2>
2058 2069 <successors-2A>
2059 2070 <successors-2B1> <successors-2B2> <successors-2B3>
2060 2071
2061 2072 Here rev2 has two possible (i.e. divergent) successors sets. The first
2062 2073 holds one element, whereas the second holds three (i.e. the changeset has
2063 2074 been split).
2064 2075 """
2065 2076 # passed to successorssets caching computation from one call to another
2066 2077 cache = {}
2067 2078 ctx2str = str
2068 2079 node2str = short
2069 2080 if ui.debug():
2070 2081 def ctx2str(ctx):
2071 2082 return ctx.hex()
2072 2083 node2str = hex
2073 2084 for rev in scmutil.revrange(repo, revs):
2074 2085 ctx = repo[rev]
2075 2086 ui.write('%s\n'% ctx2str(ctx))
2076 2087 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2077 2088 if succsset:
2078 2089 ui.write(' ')
2079 2090 ui.write(node2str(succsset[0]))
2080 2091 for node in succsset[1:]:
2081 2092 ui.write(' ')
2082 2093 ui.write(node2str(node))
2083 2094 ui.write('\n')
2084 2095
2085 2096 @command('debugtemplate',
2086 2097 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2087 2098 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2088 2099 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2089 2100 optionalrepo=True)
2090 2101 def debugtemplate(ui, repo, tmpl, **opts):
2091 2102 """parse and apply a template
2092 2103
2093 2104 If -r/--rev is given, the template is processed as a log template and
2094 2105 applied to the given changesets. Otherwise, it is processed as a generic
2095 2106 template.
2096 2107
2097 2108 Use --verbose to print the parsed tree.
2098 2109 """
2099 2110 revs = None
2100 2111 if opts['rev']:
2101 2112 if repo is None:
2102 2113 raise error.RepoError(_('there is no Mercurial repository here '
2103 2114 '(.hg not found)'))
2104 2115 revs = scmutil.revrange(repo, opts['rev'])
2105 2116
2106 2117 props = {}
2107 2118 for d in opts['define']:
2108 2119 try:
2109 2120 k, v = (e.strip() for e in d.split('=', 1))
2110 2121 if not k or k == 'ui':
2111 2122 raise ValueError
2112 2123 props[k] = v
2113 2124 except ValueError:
2114 2125 raise error.Abort(_('malformed keyword definition: %s') % d)
2115 2126
2116 2127 if ui.verbose:
2117 2128 aliases = ui.configitems('templatealias')
2118 2129 tree = templater.parse(tmpl)
2119 2130 ui.note(templater.prettyformat(tree), '\n')
2120 2131 newtree = templater.expandaliases(tree, aliases)
2121 2132 if newtree != tree:
2122 2133 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2123 2134
2124 2135 if revs is None:
2125 2136 t = formatter.maketemplater(ui, tmpl)
2126 2137 props['ui'] = ui
2127 2138 ui.write(t.render(props))
2128 2139 else:
2129 2140 displayer = cmdutil.makelogtemplater(ui, repo, tmpl)
2130 2141 for r in revs:
2131 2142 displayer.show(repo[r], **props)
2132 2143 displayer.close()
2133 2144
2134 2145 @command('debugupdatecaches', [])
2135 2146 def debugupdatecaches(ui, repo, *pats, **opts):
2136 2147 """warm all known caches in the repository"""
2137 2148 with repo.wlock():
2138 2149 with repo.lock():
2139 2150 repo.updatecaches()
2140 2151
2141 2152 @command('debugupgraderepo', [
2142 2153 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2143 2154 ('', 'run', False, _('performs an upgrade')),
2144 2155 ])
2145 2156 def debugupgraderepo(ui, repo, run=False, optimize=None):
2146 2157 """upgrade a repository to use different features
2147 2158
2148 2159 If no arguments are specified, the repository is evaluated for upgrade
2149 2160 and a list of problems and potential optimizations is printed.
2150 2161
2151 2162 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2152 2163 can be influenced via additional arguments. More details will be provided
2153 2164 by the command output when run without ``--run``.
2154 2165
2155 2166 During the upgrade, the repository will be locked and no writes will be
2156 2167 allowed.
2157 2168
2158 2169 At the end of the upgrade, the repository may not be readable while new
2159 2170 repository data is swapped in. This window will be as long as it takes to
2160 2171 rename some directories inside the ``.hg`` directory. On most machines, this
2161 2172 should complete almost instantaneously and the chances of a consumer being
2162 2173 unable to access the repository should be low.
2163 2174 """
2164 2175 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2165 2176
2166 2177 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2167 2178 inferrepo=True)
2168 2179 def debugwalk(ui, repo, *pats, **opts):
2169 2180 """show how files match on given patterns"""
2170 2181 m = scmutil.match(repo[None], pats, opts)
2171 2182 ui.write(('matcher: %r\n' % m))
2172 2183 items = list(repo[None].walk(m))
2173 2184 if not items:
2174 2185 return
2175 2186 f = lambda fn: fn
2176 2187 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2177 2188 f = lambda fn: util.normpath(fn)
2178 2189 fmt = 'f %%-%ds %%-%ds %%s' % (
2179 2190 max([len(abs) for abs in items]),
2180 2191 max([len(m.rel(abs)) for abs in items]))
2181 2192 for abs in items:
2182 2193 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2183 2194 ui.write("%s\n" % line.rstrip())
2184 2195
2185 2196 @command('debugwireargs',
2186 2197 [('', 'three', '', 'three'),
2187 2198 ('', 'four', '', 'four'),
2188 2199 ('', 'five', '', 'five'),
2189 2200 ] + cmdutil.remoteopts,
2190 2201 _('REPO [OPTIONS]... [ONE [TWO]]'),
2191 2202 norepo=True)
2192 2203 def debugwireargs(ui, repopath, *vals, **opts):
2193 2204 repo = hg.peer(ui, opts, repopath)
2194 2205 for opt in cmdutil.remoteopts:
2195 2206 del opts[opt[1]]
2196 2207 args = {}
2197 2208 for k, v in opts.iteritems():
2198 2209 if v:
2199 2210 args[k] = v
2200 2211 # run twice to check that we don't mess up the stream for the next command
2201 2212 res1 = repo.debugwireargs(*vals, **args)
2202 2213 res2 = repo.debugwireargs(*vals, **args)
2203 2214 ui.write("%s\n" % res1)
2204 2215 if res1 != res2:
2205 2216 ui.warn("%s\n" % res2)
@@ -1,493 +1,519 b''
1 1 """ Mercurial phases support code
2 2
3 3 ---
4 4
5 5 Copyright 2011 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
6 6 Logilab SA <contact@logilab.fr>
7 7 Augie Fackler <durin42@gmail.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License version 2 or any later version.
11 11
12 12 ---
13 13
14 14 This module implements most phase logic in mercurial.
15 15
16 16
17 17 Basic Concept
18 18 =============
19 19
20 20 A 'changeset phase' is an indicator that tells us how a changeset is
21 21 manipulated and communicated. The details of each phase is described
22 22 below, here we describe the properties they have in common.
23 23
24 24 Like bookmarks, phases are not stored in history and thus are not
25 25 permanent and leave no audit trail.
26 26
27 27 First, no changeset can be in two phases at once. Phases are ordered,
28 28 so they can be considered from lowest to highest. The default, lowest
29 29 phase is 'public' - this is the normal phase of existing changesets. A
30 30 child changeset can not be in a lower phase than its parents.
31 31
32 32 These phases share a hierarchy of traits:
33 33
34 34 immutable shared
35 35 public: X X
36 36 draft: X
37 37 secret:
38 38
39 39 Local commits are draft by default.
40 40
41 41 Phase Movement and Exchange
42 42 ===========================
43 43
44 44 Phase data is exchanged by pushkey on pull and push. Some servers have
45 45 a publish option set, we call such a server a "publishing server".
46 46 Pushing a draft changeset to a publishing server changes the phase to
47 47 public.
48 48
49 49 A small list of fact/rules define the exchange of phase:
50 50
51 51 * old client never changes server states
52 52 * pull never changes server states
53 53 * publish and old server changesets are seen as public by client
54 54 * any secret changeset seen in another repository is lowered to at
55 55 least draft
56 56
57 57 Here is the final table summing up the 49 possible use cases of phase
58 58 exchange:
59 59
60 60 server
61 61 old publish non-publish
62 62 N X N D P N D P
63 63 old client
64 64 pull
65 65 N - X/X - X/D X/P - X/D X/P
66 66 X - X/X - X/D X/P - X/D X/P
67 67 push
68 68 X X/X X/X X/P X/P X/P X/D X/D X/P
69 69 new client
70 70 pull
71 71 N - P/X - P/D P/P - D/D P/P
72 72 D - P/X - P/D P/P - D/D P/P
73 73 P - P/X - P/D P/P - P/D P/P
74 74 push
75 75 D P/X P/X P/P P/P P/P D/D D/D P/P
76 76 P P/X P/X P/P P/P P/P P/P P/P P/P
77 77
78 78 Legend:
79 79
80 80 A/B = final state on client / state on server
81 81
82 82 * N = new/not present,
83 83 * P = public,
84 84 * D = draft,
85 85 * X = not tracked (i.e., the old client or server has no internal
86 86 way of recording the phase.)
87 87
88 88 passive = only pushes
89 89
90 90
91 91 A cell here can be read like this:
92 92
93 93 "When a new client pushes a draft changeset (D) to a publishing
94 94 server where it's not present (N), it's marked public on both
95 95 sides (P/P)."
96 96
97 97 Note: old client behave as a publishing server with draft only content
98 98 - other people see it as public
99 99 - content is pushed as draft
100 100
101 101 """
102 102
103 103 from __future__ import absolute_import
104 104
105 105 import errno
106 106
107 107 from .i18n import _
108 108 from .node import (
109 109 bin,
110 110 hex,
111 111 nullid,
112 112 nullrev,
113 113 short,
114 114 )
115 115 from . import (
116 116 error,
117 117 smartset,
118 118 txnutil,
119 119 util,
120 120 )
121 121
122 122 allphases = public, draft, secret = range(3)
123 123 trackedphases = allphases[1:]
124 124 phasenames = ['public', 'draft', 'secret']
125 125
126 126 def _readroots(repo, phasedefaults=None):
127 127 """Read phase roots from disk
128 128
129 129 phasedefaults is a list of fn(repo, roots) callable, which are
130 130 executed if the phase roots file does not exist. When phases are
131 131 being initialized on an existing repository, this could be used to
132 132 set selected changesets phase to something else than public.
133 133
134 134 Return (roots, dirty) where dirty is true if roots differ from
135 135 what is being stored.
136 136 """
137 137 repo = repo.unfiltered()
138 138 dirty = False
139 139 roots = [set() for i in allphases]
140 140 try:
141 141 f, pending = txnutil.trypending(repo.root, repo.svfs, 'phaseroots')
142 142 try:
143 143 for line in f:
144 144 phase, nh = line.split()
145 145 roots[int(phase)].add(bin(nh))
146 146 finally:
147 147 f.close()
148 148 except IOError as inst:
149 149 if inst.errno != errno.ENOENT:
150 150 raise
151 151 if phasedefaults:
152 152 for f in phasedefaults:
153 153 roots = f(repo, roots)
154 154 dirty = True
155 155 return roots, dirty
156 156
157 157 class phasecache(object):
158 158 def __init__(self, repo, phasedefaults, _load=True):
159 159 if _load:
160 160 # Cheap trick to allow shallow-copy without copy module
161 161 self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
162 162 self._phaserevs = None
163 163 self._phasesets = None
164 164 self.filterunknown(repo)
165 165 self.opener = repo.svfs
166 166
167 167 def getrevset(self, repo, phases):
168 168 """return a smartset for the given phases"""
169 169 self.loadphaserevs(repo) # ensure phase's sets are loaded
170 170
171 171 if self._phasesets and all(self._phasesets[p] is not None
172 172 for p in phases):
173 173 # fast path - use _phasesets
174 174 revs = self._phasesets[phases[0]]
175 175 if len(phases) > 1:
176 176 revs = revs.copy() # only copy when needed
177 177 for p in phases[1:]:
178 178 revs.update(self._phasesets[p])
179 179 if repo.changelog.filteredrevs:
180 180 revs = revs - repo.changelog.filteredrevs
181 181 return smartset.baseset(revs)
182 182 else:
183 183 # slow path - enumerate all revisions
184 184 phase = self.phase
185 185 revs = (r for r in repo if phase(repo, r) in phases)
186 186 return smartset.generatorset(revs, iterasc=True)
187 187
188 188 def copy(self):
189 189 # Shallow copy meant to ensure isolation in
190 190 # advance/retractboundary(), nothing more.
191 191 ph = self.__class__(None, None, _load=False)
192 192 ph.phaseroots = self.phaseroots[:]
193 193 ph.dirty = self.dirty
194 194 ph.opener = self.opener
195 195 ph._phaserevs = self._phaserevs
196 196 ph._phasesets = self._phasesets
197 197 return ph
198 198
199 199 def replace(self, phcache):
200 200 """replace all values in 'self' with content of phcache"""
201 201 for a in ('phaseroots', 'dirty', 'opener', '_phaserevs', '_phasesets'):
202 202 setattr(self, a, getattr(phcache, a))
203 203
204 204 def _getphaserevsnative(self, repo):
205 205 repo = repo.unfiltered()
206 206 nativeroots = []
207 207 for phase in trackedphases:
208 208 nativeroots.append(map(repo.changelog.rev, self.phaseroots[phase]))
209 209 return repo.changelog.computephases(nativeroots)
210 210
211 211 def _computephaserevspure(self, repo):
212 212 repo = repo.unfiltered()
213 213 revs = [public] * len(repo.changelog)
214 214 self._phaserevs = revs
215 215 self._populatephaseroots(repo)
216 216 for phase in trackedphases:
217 217 roots = list(map(repo.changelog.rev, self.phaseroots[phase]))
218 218 if roots:
219 219 for rev in roots:
220 220 revs[rev] = phase
221 221 for rev in repo.changelog.descendants(roots):
222 222 revs[rev] = phase
223 223
224 224 def loadphaserevs(self, repo):
225 225 """ensure phase information is loaded in the object"""
226 226 if self._phaserevs is None:
227 227 try:
228 228 res = self._getphaserevsnative(repo)
229 229 self._phaserevs, self._phasesets = res
230 230 except AttributeError:
231 231 self._computephaserevspure(repo)
232 232
233 233 def invalidate(self):
234 234 self._phaserevs = None
235 235 self._phasesets = None
236 236
237 237 def _populatephaseroots(self, repo):
238 238 """Fills the _phaserevs cache with phases for the roots.
239 239 """
240 240 cl = repo.changelog
241 241 phaserevs = self._phaserevs
242 242 for phase in trackedphases:
243 243 roots = map(cl.rev, self.phaseroots[phase])
244 244 for root in roots:
245 245 phaserevs[root] = phase
246 246
247 247 def phase(self, repo, rev):
248 248 # We need a repo argument here to be able to build _phaserevs
249 249 # if necessary. The repository instance is not stored in
250 250 # phasecache to avoid reference cycles. The changelog instance
251 251 # is not stored because it is a filecache() property and can
252 252 # be replaced without us being notified.
253 253 if rev == nullrev:
254 254 return public
255 255 if rev < nullrev:
256 256 raise ValueError(_('cannot lookup negative revision'))
257 257 if self._phaserevs is None or rev >= len(self._phaserevs):
258 258 self.invalidate()
259 259 self.loadphaserevs(repo)
260 260 return self._phaserevs[rev]
261 261
262 262 def write(self):
263 263 if not self.dirty:
264 264 return
265 265 f = self.opener('phaseroots', 'w', atomictemp=True, checkambig=True)
266 266 try:
267 267 self._write(f)
268 268 finally:
269 269 f.close()
270 270
271 271 def _write(self, fp):
272 272 for phase, roots in enumerate(self.phaseroots):
273 273 for h in roots:
274 274 fp.write('%i %s\n' % (phase, hex(h)))
275 275 self.dirty = False
276 276
277 277 def _updateroots(self, phase, newroots, tr):
278 278 self.phaseroots[phase] = newroots
279 279 self.invalidate()
280 280 self.dirty = True
281 281
282 282 tr.addfilegenerator('phase', ('phaseroots',), self._write)
283 283 tr.hookargs['phases_moved'] = '1'
284 284
285 285 def advanceboundary(self, repo, tr, targetphase, nodes):
286 286 # Be careful to preserve shallow-copied values: do not update
287 287 # phaseroots values, replace them.
288 288
289 289 repo = repo.unfiltered()
290 290 delroots = [] # set of root deleted by this path
291 291 for phase in xrange(targetphase + 1, len(allphases)):
292 292 # filter nodes that are not in a compatible phase already
293 293 nodes = [n for n in nodes
294 294 if self.phase(repo, repo[n].rev()) >= phase]
295 295 if not nodes:
296 296 break # no roots to move anymore
297 297 olds = self.phaseroots[phase]
298 298 roots = set(ctx.node() for ctx in repo.set(
299 299 'roots((%ln::) - (%ln::%ln))', olds, olds, nodes))
300 300 if olds != roots:
301 301 self._updateroots(phase, roots, tr)
302 302 # some roots may need to be declared for lower phases
303 303 delroots.extend(olds - roots)
304 304 # declare deleted root in the target phase
305 305 if targetphase != 0:
306 306 self.retractboundary(repo, tr, targetphase, delroots)
307 307 repo.invalidatevolatilesets()
308 308
309 309 def retractboundary(self, repo, tr, targetphase, nodes):
310 310 # Be careful to preserve shallow-copied values: do not update
311 311 # phaseroots values, replace them.
312 312
313 313 repo = repo.unfiltered()
314 314 currentroots = self.phaseroots[targetphase]
315 315 newroots = [n for n in nodes
316 316 if self.phase(repo, repo[n].rev()) < targetphase]
317 317 if newroots:
318 318 if nullid in newroots:
319 319 raise error.Abort(_('cannot change null revision phase'))
320 320 currentroots = currentroots.copy()
321 321 currentroots.update(newroots)
322 322
323 323 # Only compute new roots for revs above the roots that are being
324 324 # retracted.
325 325 minnewroot = min(repo[n].rev() for n in newroots)
326 326 aboveroots = [n for n in currentroots
327 327 if repo[n].rev() >= minnewroot]
328 328 updatedroots = repo.set('roots(%ln::)', aboveroots)
329 329
330 330 finalroots = set(n for n in currentroots if repo[n].rev() <
331 331 minnewroot)
332 332 finalroots.update(ctx.node() for ctx in updatedroots)
333 333
334 334 self._updateroots(targetphase, finalroots, tr)
335 335 repo.invalidatevolatilesets()
336 336
337 337 def filterunknown(self, repo):
338 338 """remove unknown nodes from the phase boundary
339 339
340 340 Nothing is lost as unknown nodes only hold data for their descendants.
341 341 """
342 342 filtered = False
343 343 nodemap = repo.changelog.nodemap # to filter unknown nodes
344 344 for phase, nodes in enumerate(self.phaseroots):
345 345 missing = sorted(node for node in nodes if node not in nodemap)
346 346 if missing:
347 347 for mnode in missing:
348 348 repo.ui.debug(
349 349 'removing unknown node %s from %i-phase boundary\n'
350 350 % (short(mnode), phase))
351 351 nodes.symmetric_difference_update(missing)
352 352 filtered = True
353 353 if filtered:
354 354 self.dirty = True
355 355 # filterunknown is called by repo.destroyed, we may have no changes in
356 356 # root but phaserevs contents is certainly invalid (or at least we
357 357 # have not proper way to check that). related to issue 3858.
358 358 #
359 359 # The other caller is __init__ that have no _phaserevs initialized
360 360 # anyway. If this change we should consider adding a dedicated
361 361 # "destroyed" function to phasecache or a proper cache key mechanism
362 362 # (see branchmap one)
363 363 self.invalidate()
364 364
365 365 def advanceboundary(repo, tr, targetphase, nodes):
366 366 """Add nodes to a phase changing other nodes phases if necessary.
367 367
368 368 This function move boundary *forward* this means that all nodes
369 369 are set in the target phase or kept in a *lower* phase.
370 370
371 371 Simplify boundary to contains phase roots only."""
372 372 phcache = repo._phasecache.copy()
373 373 phcache.advanceboundary(repo, tr, targetphase, nodes)
374 374 repo._phasecache.replace(phcache)
375 375
376 376 def retractboundary(repo, tr, targetphase, nodes):
377 377 """Set nodes back to a phase changing other nodes phases if
378 378 necessary.
379 379
380 380 This function move boundary *backward* this means that all nodes
381 381 are set in the target phase or kept in a *higher* phase.
382 382
383 383 Simplify boundary to contains phase roots only."""
384 384 phcache = repo._phasecache.copy()
385 385 phcache.retractboundary(repo, tr, targetphase, nodes)
386 386 repo._phasecache.replace(phcache)
387 387
388 388 def listphases(repo):
389 389 """List phases root for serialization over pushkey"""
390 390 # Use ordered dictionary so behavior is deterministic.
391 391 keys = util.sortdict()
392 392 value = '%i' % draft
393 393 for root in repo._phasecache.phaseroots[draft]:
394 394 keys[hex(root)] = value
395 395
396 396 if repo.publishing():
397 397 # Add an extra data to let remote know we are a publishing
398 398 # repo. Publishing repo can't just pretend they are old repo.
399 399 # When pushing to a publishing repo, the client still need to
400 400 # push phase boundary
401 401 #
402 402 # Push do not only push changeset. It also push phase data.
403 403 # New phase data may apply to common changeset which won't be
404 404 # push (as they are common). Here is a very simple example:
405 405 #
406 406 # 1) repo A push changeset X as draft to repo B
407 407 # 2) repo B make changeset X public
408 408 # 3) repo B push to repo A. X is not pushed but the data that
409 409 # X as now public should
410 410 #
411 411 # The server can't handle it on it's own as it has no idea of
412 412 # client phase data.
413 413 keys['publishing'] = 'True'
414 414 return keys
415 415
416 416 def pushphase(repo, nhex, oldphasestr, newphasestr):
417 417 """List phases root for serialization over pushkey"""
418 418 repo = repo.unfiltered()
419 419 with repo.lock():
420 420 currentphase = repo[nhex].phase()
421 421 newphase = abs(int(newphasestr)) # let's avoid negative index surprise
422 422 oldphase = abs(int(oldphasestr)) # let's avoid negative index surprise
423 423 if currentphase == oldphase and newphase < oldphase:
424 424 with repo.transaction('pushkey-phase') as tr:
425 425 advanceboundary(repo, tr, newphase, [bin(nhex)])
426 426 return True
427 427 elif currentphase == newphase:
428 428 # raced, but got correct result
429 429 return True
430 430 else:
431 431 return False
432 432
433 def subsetphaseheads(repo, subset):
434 """Finds the phase heads for a subset of a history
435
436 Returns a list indexed by phase number where each item is a list of phase
437 head nodes.
438 """
439 cl = repo.changelog
440
441 headsbyphase = [[] for i in allphases]
442 # No need to keep track of secret phase; any heads in the subset that
443 # are not mentioned are implicitly secret.
444 for phase in allphases[:-1]:
445 revset = "heads(%%ln & %s())" % phasenames[phase]
446 headsbyphase[phase] = [cl.node(r) for r in repo.revs(revset, subset)]
447 return headsbyphase
448
449 def updatephases(repo, tr, headsbyphase, addednodes):
450 """Updates the repo with the given phase heads"""
451 # First make all the added revisions secret because changegroup.apply()
452 # currently sets the phase to draft.
453 retractboundary(repo, tr, secret, addednodes)
454
455 # Now advance phase boundaries of all but secret phase
456 for phase in allphases[:-1]:
457 advanceboundary(repo, tr, phase, headsbyphase[phase])
458
433 459 def analyzeremotephases(repo, subset, roots):
434 460 """Compute phases heads and root in a subset of node from root dict
435 461
436 462 * subset is heads of the subset
437 463 * roots is {<nodeid> => phase} mapping. key and value are string.
438 464
439 465 Accept unknown element input
440 466 """
441 467 repo = repo.unfiltered()
442 468 # build list from dictionary
443 469 draftroots = []
444 470 nodemap = repo.changelog.nodemap # to filter unknown nodes
445 471 for nhex, phase in roots.iteritems():
446 472 if nhex == 'publishing': # ignore data related to publish option
447 473 continue
448 474 node = bin(nhex)
449 475 phase = int(phase)
450 476 if phase == public:
451 477 if node != nullid:
452 478 repo.ui.warn(_('ignoring inconsistent public root'
453 479 ' from remote: %s\n') % nhex)
454 480 elif phase == draft:
455 481 if node in nodemap:
456 482 draftroots.append(node)
457 483 else:
458 484 repo.ui.warn(_('ignoring unexpected root from remote: %i %s\n')
459 485 % (phase, nhex))
460 486 # compute heads
461 487 publicheads = newheads(repo, subset, draftroots)
462 488 return publicheads, draftroots
463 489
464 490 def newheads(repo, heads, roots):
465 491 """compute new head of a subset minus another
466 492
467 493 * `heads`: define the first subset
468 494 * `roots`: define the second we subtract from the first"""
469 495 repo = repo.unfiltered()
470 496 revset = repo.set('heads((%ln + parents(%ln)) - (%ln::%ln))',
471 497 heads, roots, roots, heads)
472 498 return [c.node() for c in revset]
473 499
474 500
475 501 def newcommitphase(ui):
476 502 """helper to get the target phase of new commit
477 503
478 504 Handle all possible values for the phases.new-commit options.
479 505
480 506 """
481 507 v = ui.config('phases', 'new-commit', draft)
482 508 try:
483 509 return phasenames.index(v)
484 510 except ValueError:
485 511 try:
486 512 return int(v)
487 513 except ValueError:
488 514 msg = _("phases.new-commit: not a valid phase name ('%s')")
489 515 raise error.ConfigError(msg % v)
490 516
491 517 def hassecret(repo):
492 518 """utility function that check if a repo have any secret changeset."""
493 519 return bool(repo._phasecache.phaseroots[2])
General Comments 0
You need to be logged in to leave comments. Login now