##// END OF EJS Templates
move opener from util to scmutil
Adrian Buehlmann -
r13970:d1391335 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,37 +1,37
1 1 #!/usr/bin/env python
2 2 # Undump a dump from dumprevlog
3 3 # $ hg init
4 4 # $ undumprevlog < repo.dump
5 5
6 6 import sys
7 from mercurial import revlog, node, util, transaction
7 from mercurial import revlog, node, scmutil, util, transaction
8 8
9 9 for fp in (sys.stdin, sys.stdout, sys.stderr):
10 10 util.set_binary(fp)
11 11
12 opener = util.opener('.', False)
12 opener = scmutil.opener('.', False)
13 13 tr = transaction.transaction(sys.stderr.write, opener, "undump.journal")
14 14 while 1:
15 15 l = sys.stdin.readline()
16 16 if not l:
17 17 break
18 18 if l.startswith("file:"):
19 19 f = l[6:-1]
20 20 r = revlog.revlog(opener, f)
21 21 print f
22 22 elif l.startswith("node:"):
23 23 n = node.bin(l[6:-1])
24 24 elif l.startswith("linkrev:"):
25 25 lr = int(l[9:-1])
26 26 elif l.startswith("parents:"):
27 27 p = l[9:-1].split()
28 28 p1 = node.bin(p[0])
29 29 p2 = node.bin(p[1])
30 30 elif l.startswith("length:"):
31 31 length = int(l[8:-1])
32 32 sys.stdin.readline() # start marker
33 33 d = sys.stdin.read(length)
34 34 sys.stdin.readline() # end marker
35 35 r.addrevision(d, tr, lr, p1, p2)
36 36
37 37 tr.close()
@@ -1,1172 +1,1172
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4
5 5 import os
6 6 import re
7 7 import sys
8 8 import cPickle as pickle
9 9 import tempfile
10 10 import urllib
11 11 import urllib2
12 12
13 from mercurial import strutil, util, encoding
13 from mercurial import strutil, scmutil, util, encoding
14 14 from mercurial.i18n import _
15 15
16 16 # Subversion stuff. Works best with very recent Python SVN bindings
17 17 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
18 18 # these bindings.
19 19
20 20 from cStringIO import StringIO
21 21
22 22 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
23 23 from common import commandline, converter_source, converter_sink, mapfile
24 24
25 25 try:
26 26 from svn.core import SubversionException, Pool
27 27 import svn
28 28 import svn.client
29 29 import svn.core
30 30 import svn.ra
31 31 import svn.delta
32 32 import transport
33 33 import warnings
34 34 warnings.filterwarnings('ignore',
35 35 module='svn.core',
36 36 category=DeprecationWarning)
37 37
38 38 except ImportError:
39 39 svn = None
40 40
41 41 class SvnPathNotFound(Exception):
42 42 pass
43 43
44 44 def revsplit(rev):
45 45 """Parse a revision string and return (uuid, path, revnum)."""
46 46 url, revnum = rev.rsplit('@', 1)
47 47 parts = url.split('/', 1)
48 48 mod = ''
49 49 if len(parts) > 1:
50 50 mod = '/' + parts[1]
51 51 return parts[0][4:], mod, int(revnum)
52 52
53 53 def geturl(path):
54 54 try:
55 55 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
56 56 except SubversionException:
57 57 pass
58 58 if os.path.isdir(path):
59 59 path = os.path.normpath(os.path.abspath(path))
60 60 if os.name == 'nt':
61 61 path = '/' + util.normpath(path)
62 62 # Module URL is later compared with the repository URL returned
63 63 # by svn API, which is UTF-8.
64 64 path = encoding.tolocal(path)
65 65 return 'file://%s' % urllib.quote(path)
66 66 return path
67 67
68 68 def optrev(number):
69 69 optrev = svn.core.svn_opt_revision_t()
70 70 optrev.kind = svn.core.svn_opt_revision_number
71 71 optrev.value.number = number
72 72 return optrev
73 73
74 74 class changedpath(object):
75 75 def __init__(self, p):
76 76 self.copyfrom_path = p.copyfrom_path
77 77 self.copyfrom_rev = p.copyfrom_rev
78 78 self.action = p.action
79 79
80 80 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
81 81 strict_node_history=False):
82 82 protocol = -1
83 83 def receiver(orig_paths, revnum, author, date, message, pool):
84 84 if orig_paths is not None:
85 85 for k, v in orig_paths.iteritems():
86 86 orig_paths[k] = changedpath(v)
87 87 pickle.dump((orig_paths, revnum, author, date, message),
88 88 fp, protocol)
89 89
90 90 try:
91 91 # Use an ra of our own so that our parent can consume
92 92 # our results without confusing the server.
93 93 t = transport.SvnRaTransport(url=url)
94 94 svn.ra.get_log(t.ra, paths, start, end, limit,
95 95 discover_changed_paths,
96 96 strict_node_history,
97 97 receiver)
98 98 except SubversionException, (inst, num):
99 99 pickle.dump(num, fp, protocol)
100 100 except IOError:
101 101 # Caller may interrupt the iteration
102 102 pickle.dump(None, fp, protocol)
103 103 else:
104 104 pickle.dump(None, fp, protocol)
105 105 fp.close()
106 106 # With large history, cleanup process goes crazy and suddenly
107 107 # consumes *huge* amount of memory. The output file being closed,
108 108 # there is no need for clean termination.
109 109 os._exit(0)
110 110
111 111 def debugsvnlog(ui, **opts):
112 112 """Fetch SVN log in a subprocess and channel them back to parent to
113 113 avoid memory collection issues.
114 114 """
115 115 util.set_binary(sys.stdin)
116 116 util.set_binary(sys.stdout)
117 117 args = decodeargs(sys.stdin.read())
118 118 get_log_child(sys.stdout, *args)
119 119
120 120 class logstream(object):
121 121 """Interruptible revision log iterator."""
122 122 def __init__(self, stdout):
123 123 self._stdout = stdout
124 124
125 125 def __iter__(self):
126 126 while True:
127 127 try:
128 128 entry = pickle.load(self._stdout)
129 129 except EOFError:
130 130 raise util.Abort(_('Mercurial failed to run itself, check'
131 131 ' hg executable is in PATH'))
132 132 try:
133 133 orig_paths, revnum, author, date, message = entry
134 134 except:
135 135 if entry is None:
136 136 break
137 137 raise SubversionException("child raised exception", entry)
138 138 yield entry
139 139
140 140 def close(self):
141 141 if self._stdout:
142 142 self._stdout.close()
143 143 self._stdout = None
144 144
145 145
146 146 # Check to see if the given path is a local Subversion repo. Verify this by
147 147 # looking for several svn-specific files and directories in the given
148 148 # directory.
149 149 def filecheck(ui, path, proto):
150 150 for x in ('locks', 'hooks', 'format', 'db'):
151 151 if not os.path.exists(os.path.join(path, x)):
152 152 return False
153 153 return True
154 154
155 155 # Check to see if a given path is the root of an svn repo over http. We verify
156 156 # this by requesting a version-controlled URL we know can't exist and looking
157 157 # for the svn-specific "not found" XML.
158 158 def httpcheck(ui, path, proto):
159 159 try:
160 160 opener = urllib2.build_opener()
161 161 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
162 162 data = rsp.read()
163 163 except urllib2.HTTPError, inst:
164 164 if inst.code != 404:
165 165 # Except for 404 we cannot know for sure this is not an svn repo
166 166 ui.warn(_('svn: cannot probe remote repository, assume it could '
167 167 'be a subversion repository. Use --source-type if you '
168 168 'know better.\n'))
169 169 return True
170 170 data = inst.fp.read()
171 171 except:
172 172 # Could be urllib2.URLError if the URL is invalid or anything else.
173 173 return False
174 174 return '<m:human-readable errcode="160013">' in data
175 175
176 176 protomap = {'http': httpcheck,
177 177 'https': httpcheck,
178 178 'file': filecheck,
179 179 }
180 180 def issvnurl(ui, url):
181 181 try:
182 182 proto, path = url.split('://', 1)
183 183 if proto == 'file':
184 184 path = urllib.url2pathname(path)
185 185 except ValueError:
186 186 proto = 'file'
187 187 path = os.path.abspath(url)
188 188 if proto == 'file':
189 189 path = path.replace(os.sep, '/')
190 190 check = protomap.get(proto, lambda *args: False)
191 191 while '/' in path:
192 192 if check(ui, path, proto):
193 193 return True
194 194 path = path.rsplit('/', 1)[0]
195 195 return False
196 196
197 197 # SVN conversion code stolen from bzr-svn and tailor
198 198 #
199 199 # Subversion looks like a versioned filesystem, branches structures
200 200 # are defined by conventions and not enforced by the tool. First,
201 201 # we define the potential branches (modules) as "trunk" and "branches"
202 202 # children directories. Revisions are then identified by their
203 203 # module and revision number (and a repository identifier).
204 204 #
205 205 # The revision graph is really a tree (or a forest). By default, a
206 206 # revision parent is the previous revision in the same module. If the
207 207 # module directory is copied/moved from another module then the
208 208 # revision is the module root and its parent the source revision in
209 209 # the parent module. A revision has at most one parent.
210 210 #
211 211 class svn_source(converter_source):
212 212 def __init__(self, ui, url, rev=None):
213 213 super(svn_source, self).__init__(ui, url, rev=rev)
214 214
215 215 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
216 216 (os.path.exists(url) and
217 217 os.path.exists(os.path.join(url, '.svn'))) or
218 218 issvnurl(ui, url)):
219 219 raise NoRepo(_("%s does not look like a Subversion repository")
220 220 % url)
221 221 if svn is None:
222 222 raise MissingTool(_('Could not load Subversion python bindings'))
223 223
224 224 try:
225 225 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
226 226 if version < (1, 4):
227 227 raise MissingTool(_('Subversion python bindings %d.%d found, '
228 228 '1.4 or later required') % version)
229 229 except AttributeError:
230 230 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
231 231 'or later required'))
232 232
233 233 self.lastrevs = {}
234 234
235 235 latest = None
236 236 try:
237 237 # Support file://path@rev syntax. Useful e.g. to convert
238 238 # deleted branches.
239 239 at = url.rfind('@')
240 240 if at >= 0:
241 241 latest = int(url[at + 1:])
242 242 url = url[:at]
243 243 except ValueError:
244 244 pass
245 245 self.url = geturl(url)
246 246 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
247 247 try:
248 248 self.transport = transport.SvnRaTransport(url=self.url)
249 249 self.ra = self.transport.ra
250 250 self.ctx = self.transport.client
251 251 self.baseurl = svn.ra.get_repos_root(self.ra)
252 252 # Module is either empty or a repository path starting with
253 253 # a slash and not ending with a slash.
254 254 self.module = urllib.unquote(self.url[len(self.baseurl):])
255 255 self.prevmodule = None
256 256 self.rootmodule = self.module
257 257 self.commits = {}
258 258 self.paths = {}
259 259 self.uuid = svn.ra.get_uuid(self.ra)
260 260 except SubversionException:
261 261 ui.traceback()
262 262 raise NoRepo(_("%s does not look like a Subversion repository")
263 263 % self.url)
264 264
265 265 if rev:
266 266 try:
267 267 latest = int(rev)
268 268 except ValueError:
269 269 raise util.Abort(_('svn: revision %s is not an integer') % rev)
270 270
271 271 self.trunkname = self.ui.config('convert', 'svn.trunk', 'trunk').strip('/')
272 272 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
273 273 try:
274 274 self.startrev = int(self.startrev)
275 275 if self.startrev < 0:
276 276 self.startrev = 0
277 277 except ValueError:
278 278 raise util.Abort(_('svn: start revision %s is not an integer')
279 279 % self.startrev)
280 280
281 281 self.head = self.latest(self.module, latest)
282 282 if not self.head:
283 283 raise util.Abort(_('no revision found in module %s')
284 284 % self.module)
285 285 self.last_changed = self.revnum(self.head)
286 286
287 287 self._changescache = None
288 288
289 289 if os.path.exists(os.path.join(url, '.svn/entries')):
290 290 self.wc = url
291 291 else:
292 292 self.wc = None
293 293 self.convertfp = None
294 294
295 295 def setrevmap(self, revmap):
296 296 lastrevs = {}
297 297 for revid in revmap.iterkeys():
298 298 uuid, module, revnum = revsplit(revid)
299 299 lastrevnum = lastrevs.setdefault(module, revnum)
300 300 if revnum > lastrevnum:
301 301 lastrevs[module] = revnum
302 302 self.lastrevs = lastrevs
303 303
304 304 def exists(self, path, optrev):
305 305 try:
306 306 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
307 307 optrev, False, self.ctx)
308 308 return True
309 309 except SubversionException:
310 310 return False
311 311
312 312 def getheads(self):
313 313
314 314 def isdir(path, revnum):
315 315 kind = self._checkpath(path, revnum)
316 316 return kind == svn.core.svn_node_dir
317 317
318 318 def getcfgpath(name, rev):
319 319 cfgpath = self.ui.config('convert', 'svn.' + name)
320 320 if cfgpath is not None and cfgpath.strip() == '':
321 321 return None
322 322 path = (cfgpath or name).strip('/')
323 323 if not self.exists(path, rev):
324 324 if self.module.endswith(path) and name == 'trunk':
325 325 # we are converting from inside this directory
326 326 return None
327 327 if cfgpath:
328 328 raise util.Abort(_('expected %s to be at %r, but not found')
329 329 % (name, path))
330 330 return None
331 331 self.ui.note(_('found %s at %r\n') % (name, path))
332 332 return path
333 333
334 334 rev = optrev(self.last_changed)
335 335 oldmodule = ''
336 336 trunk = getcfgpath('trunk', rev)
337 337 self.tags = getcfgpath('tags', rev)
338 338 branches = getcfgpath('branches', rev)
339 339
340 340 # If the project has a trunk or branches, we will extract heads
341 341 # from them. We keep the project root otherwise.
342 342 if trunk:
343 343 oldmodule = self.module or ''
344 344 self.module += '/' + trunk
345 345 self.head = self.latest(self.module, self.last_changed)
346 346 if not self.head:
347 347 raise util.Abort(_('no revision found in module %s')
348 348 % self.module)
349 349
350 350 # First head in the list is the module's head
351 351 self.heads = [self.head]
352 352 if self.tags is not None:
353 353 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
354 354
355 355 # Check if branches bring a few more heads to the list
356 356 if branches:
357 357 rpath = self.url.strip('/')
358 358 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
359 359 rev, False, self.ctx)
360 360 for branch in branchnames.keys():
361 361 module = '%s/%s/%s' % (oldmodule, branches, branch)
362 362 if not isdir(module, self.last_changed):
363 363 continue
364 364 brevid = self.latest(module, self.last_changed)
365 365 if not brevid:
366 366 self.ui.note(_('ignoring empty branch %s\n') % branch)
367 367 continue
368 368 self.ui.note(_('found branch %s at %d\n') %
369 369 (branch, self.revnum(brevid)))
370 370 self.heads.append(brevid)
371 371
372 372 if self.startrev and self.heads:
373 373 if len(self.heads) > 1:
374 374 raise util.Abort(_('svn: start revision is not supported '
375 375 'with more than one branch'))
376 376 revnum = self.revnum(self.heads[0])
377 377 if revnum < self.startrev:
378 378 raise util.Abort(
379 379 _('svn: no revision found after start revision %d')
380 380 % self.startrev)
381 381
382 382 return self.heads
383 383
384 384 def getchanges(self, rev):
385 385 if self._changescache and self._changescache[0] == rev:
386 386 return self._changescache[1]
387 387 self._changescache = None
388 388 (paths, parents) = self.paths[rev]
389 389 if parents:
390 390 files, self.removed, copies = self.expandpaths(rev, paths, parents)
391 391 else:
392 392 # Perform a full checkout on roots
393 393 uuid, module, revnum = revsplit(rev)
394 394 entries = svn.client.ls(self.baseurl + urllib.quote(module),
395 395 optrev(revnum), True, self.ctx)
396 396 files = [n for n, e in entries.iteritems()
397 397 if e.kind == svn.core.svn_node_file]
398 398 copies = {}
399 399 self.removed = set()
400 400
401 401 files.sort()
402 402 files = zip(files, [rev] * len(files))
403 403
404 404 # caller caches the result, so free it here to release memory
405 405 del self.paths[rev]
406 406 return (files, copies)
407 407
408 408 def getchangedfiles(self, rev, i):
409 409 changes = self.getchanges(rev)
410 410 self._changescache = (rev, changes)
411 411 return [f[0] for f in changes[0]]
412 412
413 413 def getcommit(self, rev):
414 414 if rev not in self.commits:
415 415 uuid, module, revnum = revsplit(rev)
416 416 self.module = module
417 417 self.reparent(module)
418 418 # We assume that:
419 419 # - requests for revisions after "stop" come from the
420 420 # revision graph backward traversal. Cache all of them
421 421 # down to stop, they will be used eventually.
422 422 # - requests for revisions before "stop" come to get
423 423 # isolated branches parents. Just fetch what is needed.
424 424 stop = self.lastrevs.get(module, 0)
425 425 if revnum < stop:
426 426 stop = revnum + 1
427 427 self._fetch_revisions(revnum, stop)
428 428 commit = self.commits[rev]
429 429 # caller caches the result, so free it here to release memory
430 430 del self.commits[rev]
431 431 return commit
432 432
433 433 def gettags(self):
434 434 tags = {}
435 435 if self.tags is None:
436 436 return tags
437 437
438 438 # svn tags are just a convention, project branches left in a
439 439 # 'tags' directory. There is no other relationship than
440 440 # ancestry, which is expensive to discover and makes them hard
441 441 # to update incrementally. Worse, past revisions may be
442 442 # referenced by tags far away in the future, requiring a deep
443 443 # history traversal on every calculation. Current code
444 444 # performs a single backward traversal, tracking moves within
445 445 # the tags directory (tag renaming) and recording a new tag
446 446 # everytime a project is copied from outside the tags
447 447 # directory. It also lists deleted tags, this behaviour may
448 448 # change in the future.
449 449 pendings = []
450 450 tagspath = self.tags
451 451 start = svn.ra.get_latest_revnum(self.ra)
452 452 stream = self._getlog([self.tags], start, self.startrev)
453 453 try:
454 454 for entry in stream:
455 455 origpaths, revnum, author, date, message = entry
456 456 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
457 457 in origpaths.iteritems() if e.copyfrom_path]
458 458 # Apply moves/copies from more specific to general
459 459 copies.sort(reverse=True)
460 460
461 461 srctagspath = tagspath
462 462 if copies and copies[-1][2] == tagspath:
463 463 # Track tags directory moves
464 464 srctagspath = copies.pop()[0]
465 465
466 466 for source, sourcerev, dest in copies:
467 467 if not dest.startswith(tagspath + '/'):
468 468 continue
469 469 for tag in pendings:
470 470 if tag[0].startswith(dest):
471 471 tagpath = source + tag[0][len(dest):]
472 472 tag[:2] = [tagpath, sourcerev]
473 473 break
474 474 else:
475 475 pendings.append([source, sourcerev, dest])
476 476
477 477 # Filter out tags with children coming from different
478 478 # parts of the repository like:
479 479 # /tags/tag.1 (from /trunk:10)
480 480 # /tags/tag.1/foo (from /branches/foo:12)
481 481 # Here/tags/tag.1 discarded as well as its children.
482 482 # It happens with tools like cvs2svn. Such tags cannot
483 483 # be represented in mercurial.
484 484 addeds = dict((p, e.copyfrom_path) for p, e
485 485 in origpaths.iteritems()
486 486 if e.action == 'A' and e.copyfrom_path)
487 487 badroots = set()
488 488 for destroot in addeds:
489 489 for source, sourcerev, dest in pendings:
490 490 if (not dest.startswith(destroot + '/')
491 491 or source.startswith(addeds[destroot] + '/')):
492 492 continue
493 493 badroots.add(destroot)
494 494 break
495 495
496 496 for badroot in badroots:
497 497 pendings = [p for p in pendings if p[2] != badroot
498 498 and not p[2].startswith(badroot + '/')]
499 499
500 500 # Tell tag renamings from tag creations
501 501 remainings = []
502 502 for source, sourcerev, dest in pendings:
503 503 tagname = dest.split('/')[-1]
504 504 if source.startswith(srctagspath):
505 505 remainings.append([source, sourcerev, tagname])
506 506 continue
507 507 if tagname in tags:
508 508 # Keep the latest tag value
509 509 continue
510 510 # From revision may be fake, get one with changes
511 511 try:
512 512 tagid = self.latest(source, sourcerev)
513 513 if tagid and tagname not in tags:
514 514 tags[tagname] = tagid
515 515 except SvnPathNotFound:
516 516 # It happens when we are following directories
517 517 # we assumed were copied with their parents
518 518 # but were really created in the tag
519 519 # directory.
520 520 pass
521 521 pendings = remainings
522 522 tagspath = srctagspath
523 523 finally:
524 524 stream.close()
525 525 return tags
526 526
527 527 def converted(self, rev, destrev):
528 528 if not self.wc:
529 529 return
530 530 if self.convertfp is None:
531 531 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
532 532 'a')
533 533 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
534 534 self.convertfp.flush()
535 535
536 536 def revid(self, revnum, module=None):
537 537 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
538 538
539 539 def revnum(self, rev):
540 540 return int(rev.split('@')[-1])
541 541
542 542 def latest(self, path, stop=0):
543 543 """Find the latest revid affecting path, up to stop. It may return
544 544 a revision in a different module, since a branch may be moved without
545 545 a change being reported. Return None if computed module does not
546 546 belong to rootmodule subtree.
547 547 """
548 548 if not path.startswith(self.rootmodule):
549 549 # Requests on foreign branches may be forbidden at server level
550 550 self.ui.debug('ignoring foreign branch %r\n' % path)
551 551 return None
552 552
553 553 if not stop:
554 554 stop = svn.ra.get_latest_revnum(self.ra)
555 555 try:
556 556 prevmodule = self.reparent('')
557 557 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
558 558 self.reparent(prevmodule)
559 559 except SubversionException:
560 560 dirent = None
561 561 if not dirent:
562 562 raise SvnPathNotFound(_('%s not found up to revision %d')
563 563 % (path, stop))
564 564
565 565 # stat() gives us the previous revision on this line of
566 566 # development, but it might be in *another module*. Fetch the
567 567 # log and detect renames down to the latest revision.
568 568 stream = self._getlog([path], stop, dirent.created_rev)
569 569 try:
570 570 for entry in stream:
571 571 paths, revnum, author, date, message = entry
572 572 if revnum <= dirent.created_rev:
573 573 break
574 574
575 575 for p in paths:
576 576 if not path.startswith(p) or not paths[p].copyfrom_path:
577 577 continue
578 578 newpath = paths[p].copyfrom_path + path[len(p):]
579 579 self.ui.debug("branch renamed from %s to %s at %d\n" %
580 580 (path, newpath, revnum))
581 581 path = newpath
582 582 break
583 583 finally:
584 584 stream.close()
585 585
586 586 if not path.startswith(self.rootmodule):
587 587 self.ui.debug('ignoring foreign branch %r\n' % path)
588 588 return None
589 589 return self.revid(dirent.created_rev, path)
590 590
591 591 def reparent(self, module):
592 592 """Reparent the svn transport and return the previous parent."""
593 593 if self.prevmodule == module:
594 594 return module
595 595 svnurl = self.baseurl + urllib.quote(module)
596 596 prevmodule = self.prevmodule
597 597 if prevmodule is None:
598 598 prevmodule = ''
599 599 self.ui.debug("reparent to %s\n" % svnurl)
600 600 svn.ra.reparent(self.ra, svnurl)
601 601 self.prevmodule = module
602 602 return prevmodule
603 603
604 604 def expandpaths(self, rev, paths, parents):
605 605 changed, removed = set(), set()
606 606 copies = {}
607 607
608 608 new_module, revnum = revsplit(rev)[1:]
609 609 if new_module != self.module:
610 610 self.module = new_module
611 611 self.reparent(self.module)
612 612
613 613 for i, (path, ent) in enumerate(paths):
614 614 self.ui.progress(_('scanning paths'), i, item=path,
615 615 total=len(paths))
616 616 entrypath = self.getrelpath(path)
617 617
618 618 kind = self._checkpath(entrypath, revnum)
619 619 if kind == svn.core.svn_node_file:
620 620 changed.add(self.recode(entrypath))
621 621 if not ent.copyfrom_path or not parents:
622 622 continue
623 623 # Copy sources not in parent revisions cannot be
624 624 # represented, ignore their origin for now
625 625 pmodule, prevnum = revsplit(parents[0])[1:]
626 626 if ent.copyfrom_rev < prevnum:
627 627 continue
628 628 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
629 629 if not copyfrom_path:
630 630 continue
631 631 self.ui.debug("copied to %s from %s@%s\n" %
632 632 (entrypath, copyfrom_path, ent.copyfrom_rev))
633 633 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
634 634 elif kind == 0: # gone, but had better be a deleted *file*
635 635 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
636 636 pmodule, prevnum = revsplit(parents[0])[1:]
637 637 parentpath = pmodule + "/" + entrypath
638 638 fromkind = self._checkpath(entrypath, prevnum, pmodule)
639 639
640 640 if fromkind == svn.core.svn_node_file:
641 641 removed.add(self.recode(entrypath))
642 642 elif fromkind == svn.core.svn_node_dir:
643 643 oroot = parentpath.strip('/')
644 644 nroot = path.strip('/')
645 645 children = self._iterfiles(oroot, prevnum)
646 646 for childpath in children:
647 647 childpath = childpath.replace(oroot, nroot)
648 648 childpath = self.getrelpath("/" + childpath, pmodule)
649 649 if childpath:
650 650 removed.add(self.recode(childpath))
651 651 else:
652 652 self.ui.debug('unknown path in revision %d: %s\n' % \
653 653 (revnum, path))
654 654 elif kind == svn.core.svn_node_dir:
655 655 if ent.action == 'M':
656 656 # If the directory just had a prop change,
657 657 # then we shouldn't need to look for its children.
658 658 continue
659 659 if ent.action == 'R' and parents:
660 660 # If a directory is replacing a file, mark the previous
661 661 # file as deleted
662 662 pmodule, prevnum = revsplit(parents[0])[1:]
663 663 pkind = self._checkpath(entrypath, prevnum, pmodule)
664 664 if pkind == svn.core.svn_node_file:
665 665 removed.add(self.recode(entrypath))
666 666 elif pkind == svn.core.svn_node_dir:
667 667 # We do not know what files were kept or removed,
668 668 # mark them all as changed.
669 669 for childpath in self._iterfiles(pmodule, prevnum):
670 670 childpath = self.getrelpath("/" + childpath)
671 671 if childpath:
672 672 changed.add(self.recode(childpath))
673 673
674 674 for childpath in self._iterfiles(path, revnum):
675 675 childpath = self.getrelpath("/" + childpath)
676 676 if childpath:
677 677 changed.add(self.recode(childpath))
678 678
679 679 # Handle directory copies
680 680 if not ent.copyfrom_path or not parents:
681 681 continue
682 682 # Copy sources not in parent revisions cannot be
683 683 # represented, ignore their origin for now
684 684 pmodule, prevnum = revsplit(parents[0])[1:]
685 685 if ent.copyfrom_rev < prevnum:
686 686 continue
687 687 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
688 688 if not copyfrompath:
689 689 continue
690 690 self.ui.debug("mark %s came from %s:%d\n"
691 691 % (path, copyfrompath, ent.copyfrom_rev))
692 692 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
693 693 for childpath in children:
694 694 childpath = self.getrelpath("/" + childpath, pmodule)
695 695 if not childpath:
696 696 continue
697 697 copytopath = path + childpath[len(copyfrompath):]
698 698 copytopath = self.getrelpath(copytopath)
699 699 copies[self.recode(copytopath)] = self.recode(childpath)
700 700
701 701 self.ui.progress(_('scanning paths'), None)
702 702 changed.update(removed)
703 703 return (list(changed), removed, copies)
704 704
705 705 def _fetch_revisions(self, from_revnum, to_revnum):
706 706 if from_revnum < to_revnum:
707 707 from_revnum, to_revnum = to_revnum, from_revnum
708 708
709 709 self.child_cset = None
710 710
711 711 def parselogentry(orig_paths, revnum, author, date, message):
712 712 """Return the parsed commit object or None, and True if
713 713 the revision is a branch root.
714 714 """
715 715 self.ui.debug("parsing revision %d (%d changes)\n" %
716 716 (revnum, len(orig_paths)))
717 717
718 718 branched = False
719 719 rev = self.revid(revnum)
720 720 # branch log might return entries for a parent we already have
721 721
722 722 if rev in self.commits or revnum < to_revnum:
723 723 return None, branched
724 724
725 725 parents = []
726 726 # check whether this revision is the start of a branch or part
727 727 # of a branch renaming
728 728 orig_paths = sorted(orig_paths.iteritems())
729 729 root_paths = [(p, e) for p, e in orig_paths
730 730 if self.module.startswith(p)]
731 731 if root_paths:
732 732 path, ent = root_paths[-1]
733 733 if ent.copyfrom_path:
734 734 branched = True
735 735 newpath = ent.copyfrom_path + self.module[len(path):]
736 736 # ent.copyfrom_rev may not be the actual last revision
737 737 previd = self.latest(newpath, ent.copyfrom_rev)
738 738 if previd is not None:
739 739 prevmodule, prevnum = revsplit(previd)[1:]
740 740 if prevnum >= self.startrev:
741 741 parents = [previd]
742 742 self.ui.note(
743 743 _('found parent of branch %s at %d: %s\n') %
744 744 (self.module, prevnum, prevmodule))
745 745 else:
746 746 self.ui.debug("no copyfrom path, don't know what to do.\n")
747 747
748 748 paths = []
749 749 # filter out unrelated paths
750 750 for path, ent in orig_paths:
751 751 if self.getrelpath(path) is None:
752 752 continue
753 753 paths.append((path, ent))
754 754
755 755 # Example SVN datetime. Includes microseconds.
756 756 # ISO-8601 conformant
757 757 # '2007-01-04T17:35:00.902377Z'
758 758 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
759 759
760 760 log = message and self.recode(message) or ''
761 761 author = author and self.recode(author) or ''
762 762 try:
763 763 branch = self.module.split("/")[-1]
764 764 if branch == self.trunkname:
765 765 branch = None
766 766 except IndexError:
767 767 branch = None
768 768
769 769 cset = commit(author=author,
770 770 date=util.datestr(date),
771 771 desc=log,
772 772 parents=parents,
773 773 branch=branch,
774 774 rev=rev)
775 775
776 776 self.commits[rev] = cset
777 777 # The parents list is *shared* among self.paths and the
778 778 # commit object. Both will be updated below.
779 779 self.paths[rev] = (paths, cset.parents)
780 780 if self.child_cset and not self.child_cset.parents:
781 781 self.child_cset.parents[:] = [rev]
782 782 self.child_cset = cset
783 783 return cset, branched
784 784
785 785 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
786 786 (self.module, from_revnum, to_revnum))
787 787
788 788 try:
789 789 firstcset = None
790 790 lastonbranch = False
791 791 stream = self._getlog([self.module], from_revnum, to_revnum)
792 792 try:
793 793 for entry in stream:
794 794 paths, revnum, author, date, message = entry
795 795 if revnum < self.startrev:
796 796 lastonbranch = True
797 797 break
798 798 if not paths:
799 799 self.ui.debug('revision %d has no entries\n' % revnum)
800 800 # If we ever leave the loop on an empty
801 801 # revision, do not try to get a parent branch
802 802 lastonbranch = lastonbranch or revnum == 0
803 803 continue
804 804 cset, lastonbranch = parselogentry(paths, revnum, author,
805 805 date, message)
806 806 if cset:
807 807 firstcset = cset
808 808 if lastonbranch:
809 809 break
810 810 finally:
811 811 stream.close()
812 812
813 813 if not lastonbranch and firstcset and not firstcset.parents:
814 814 # The first revision of the sequence (the last fetched one)
815 815 # has invalid parents if not a branch root. Find the parent
816 816 # revision now, if any.
817 817 try:
818 818 firstrevnum = self.revnum(firstcset.rev)
819 819 if firstrevnum > 1:
820 820 latest = self.latest(self.module, firstrevnum - 1)
821 821 if latest:
822 822 firstcset.parents.append(latest)
823 823 except SvnPathNotFound:
824 824 pass
825 825 except SubversionException, (inst, num):
826 826 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
827 827 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
828 828 raise
829 829
830 830 def getfile(self, file, rev):
831 831 # TODO: ra.get_file transmits the whole file instead of diffs.
832 832 if file in self.removed:
833 833 raise IOError()
834 834 mode = ''
835 835 try:
836 836 new_module, revnum = revsplit(rev)[1:]
837 837 if self.module != new_module:
838 838 self.module = new_module
839 839 self.reparent(self.module)
840 840 io = StringIO()
841 841 info = svn.ra.get_file(self.ra, file, revnum, io)
842 842 data = io.getvalue()
843 843 # ra.get_files() seems to keep a reference on the input buffer
844 844 # preventing collection. Release it explicitely.
845 845 io.close()
846 846 if isinstance(info, list):
847 847 info = info[-1]
848 848 mode = ("svn:executable" in info) and 'x' or ''
849 849 mode = ("svn:special" in info) and 'l' or mode
850 850 except SubversionException, e:
851 851 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
852 852 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
853 853 if e.apr_err in notfound: # File not found
854 854 raise IOError()
855 855 raise
856 856 if mode == 'l':
857 857 link_prefix = "link "
858 858 if data.startswith(link_prefix):
859 859 data = data[len(link_prefix):]
860 860 return data, mode
861 861
862 862 def _iterfiles(self, path, revnum):
863 863 """Enumerate all files in path at revnum, recursively."""
864 864 path = path.strip('/')
865 865 pool = Pool()
866 866 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
867 867 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
868 868 if path:
869 869 path += '/'
870 870 return ((path + p) for p, e in entries.iteritems()
871 871 if e.kind == svn.core.svn_node_file)
872 872
873 873 def getrelpath(self, path, module=None):
874 874 if module is None:
875 875 module = self.module
876 876 # Given the repository url of this wc, say
877 877 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
878 878 # extract the "entry" portion (a relative path) from what
879 879 # svn log --xml says, ie
880 880 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
881 881 # that is to say "tests/PloneTestCase.py"
882 882 if path.startswith(module):
883 883 relative = path.rstrip('/')[len(module):]
884 884 if relative.startswith('/'):
885 885 return relative[1:]
886 886 elif relative == '':
887 887 return relative
888 888
889 889 # The path is outside our tracked tree...
890 890 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
891 891 return None
892 892
893 893 def _checkpath(self, path, revnum, module=None):
894 894 if module is not None:
895 895 prevmodule = self.reparent('')
896 896 path = module + '/' + path
897 897 try:
898 898 # ra.check_path does not like leading slashes very much, it leads
899 899 # to PROPFIND subversion errors
900 900 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
901 901 finally:
902 902 if module is not None:
903 903 self.reparent(prevmodule)
904 904
905 905 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
906 906 strict_node_history=False):
907 907 # Normalize path names, svn >= 1.5 only wants paths relative to
908 908 # supplied URL
909 909 relpaths = []
910 910 for p in paths:
911 911 if not p.startswith('/'):
912 912 p = self.module + '/' + p
913 913 relpaths.append(p.strip('/'))
914 914 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
915 915 strict_node_history]
916 916 arg = encodeargs(args)
917 917 hgexe = util.hgexecutable()
918 918 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
919 919 stdin, stdout = util.popen2(util.quotecommand(cmd))
920 920 stdin.write(arg)
921 921 try:
922 922 stdin.close()
923 923 except IOError:
924 924 raise util.Abort(_('Mercurial failed to run itself, check'
925 925 ' hg executable is in PATH'))
926 926 return logstream(stdout)
927 927
928 928 pre_revprop_change = '''#!/bin/sh
929 929
930 930 REPOS="$1"
931 931 REV="$2"
932 932 USER="$3"
933 933 PROPNAME="$4"
934 934 ACTION="$5"
935 935
936 936 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
937 937 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
938 938 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
939 939
940 940 echo "Changing prohibited revision property" >&2
941 941 exit 1
942 942 '''
943 943
944 944 class svn_sink(converter_sink, commandline):
945 945 commit_re = re.compile(r'Committed revision (\d+).', re.M)
946 946 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
947 947
948 948 def prerun(self):
949 949 if self.wc:
950 950 os.chdir(self.wc)
951 951
952 952 def postrun(self):
953 953 if self.wc:
954 954 os.chdir(self.cwd)
955 955
956 956 def join(self, name):
957 957 return os.path.join(self.wc, '.svn', name)
958 958
959 959 def revmapfile(self):
960 960 return self.join('hg-shamap')
961 961
962 962 def authorfile(self):
963 963 return self.join('hg-authormap')
964 964
965 965 def __init__(self, ui, path):
966 966
967 967 converter_sink.__init__(self, ui, path)
968 968 commandline.__init__(self, ui, 'svn')
969 969 self.delete = []
970 970 self.setexec = []
971 971 self.delexec = []
972 972 self.copies = []
973 973 self.wc = None
974 974 self.cwd = os.getcwd()
975 975
976 976 path = os.path.realpath(path)
977 977
978 978 created = False
979 979 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
980 980 self.wc = path
981 981 self.run0('update')
982 982 else:
983 983 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
984 984
985 985 if os.path.isdir(os.path.dirname(path)):
986 986 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
987 987 ui.status(_('initializing svn repository %r\n') %
988 988 os.path.basename(path))
989 989 commandline(ui, 'svnadmin').run0('create', path)
990 990 created = path
991 991 path = util.normpath(path)
992 992 if not path.startswith('/'):
993 993 path = '/' + path
994 994 path = 'file://' + path
995 995
996 996 ui.status(_('initializing svn working copy %r\n')
997 997 % os.path.basename(wcpath))
998 998 self.run0('checkout', path, wcpath)
999 999
1000 1000 self.wc = wcpath
1001 self.opener = util.opener(self.wc)
1002 self.wopener = util.opener(self.wc)
1001 self.opener = scmutil.opener(self.wc)
1002 self.wopener = scmutil.opener(self.wc)
1003 1003 self.childmap = mapfile(ui, self.join('hg-childmap'))
1004 1004 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1005 1005
1006 1006 if created:
1007 1007 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1008 1008 fp = open(hook, 'w')
1009 1009 fp.write(pre_revprop_change)
1010 1010 fp.close()
1011 1011 util.set_flags(hook, False, True)
1012 1012
1013 1013 output = self.run0('info')
1014 1014 self.uuid = self.uuid_re.search(output).group(1).strip()
1015 1015
1016 1016 def wjoin(self, *names):
1017 1017 return os.path.join(self.wc, *names)
1018 1018
1019 1019 def putfile(self, filename, flags, data):
1020 1020 if 'l' in flags:
1021 1021 self.wopener.symlink(data, filename)
1022 1022 else:
1023 1023 try:
1024 1024 if os.path.islink(self.wjoin(filename)):
1025 1025 os.unlink(filename)
1026 1026 except OSError:
1027 1027 pass
1028 1028 self.wopener(filename, 'w').write(data)
1029 1029
1030 1030 if self.is_exec:
1031 1031 was_exec = self.is_exec(self.wjoin(filename))
1032 1032 else:
1033 1033 # On filesystems not supporting execute-bit, there is no way
1034 1034 # to know if it is set but asking subversion. Setting it
1035 1035 # systematically is just as expensive and much simpler.
1036 1036 was_exec = 'x' not in flags
1037 1037
1038 1038 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1039 1039 if was_exec:
1040 1040 if 'x' not in flags:
1041 1041 self.delexec.append(filename)
1042 1042 else:
1043 1043 if 'x' in flags:
1044 1044 self.setexec.append(filename)
1045 1045
1046 1046 def _copyfile(self, source, dest):
1047 1047 # SVN's copy command pukes if the destination file exists, but
1048 1048 # our copyfile method expects to record a copy that has
1049 1049 # already occurred. Cross the semantic gap.
1050 1050 wdest = self.wjoin(dest)
1051 1051 exists = os.path.lexists(wdest)
1052 1052 if exists:
1053 1053 fd, tempname = tempfile.mkstemp(
1054 1054 prefix='hg-copy-', dir=os.path.dirname(wdest))
1055 1055 os.close(fd)
1056 1056 os.unlink(tempname)
1057 1057 os.rename(wdest, tempname)
1058 1058 try:
1059 1059 self.run0('copy', source, dest)
1060 1060 finally:
1061 1061 if exists:
1062 1062 try:
1063 1063 os.unlink(wdest)
1064 1064 except OSError:
1065 1065 pass
1066 1066 os.rename(tempname, wdest)
1067 1067
1068 1068 def dirs_of(self, files):
1069 1069 dirs = set()
1070 1070 for f in files:
1071 1071 if os.path.isdir(self.wjoin(f)):
1072 1072 dirs.add(f)
1073 1073 for i in strutil.rfindall(f, '/'):
1074 1074 dirs.add(f[:i])
1075 1075 return dirs
1076 1076
1077 1077 def add_dirs(self, files):
1078 1078 add_dirs = [d for d in sorted(self.dirs_of(files))
1079 1079 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1080 1080 if add_dirs:
1081 1081 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1082 1082 return add_dirs
1083 1083
1084 1084 def add_files(self, files):
1085 1085 if files:
1086 1086 self.xargs(files, 'add', quiet=True)
1087 1087 return files
1088 1088
1089 1089 def tidy_dirs(self, names):
1090 1090 deleted = []
1091 1091 for d in sorted(self.dirs_of(names), reverse=True):
1092 1092 wd = self.wjoin(d)
1093 1093 if os.listdir(wd) == '.svn':
1094 1094 self.run0('delete', d)
1095 1095 deleted.append(d)
1096 1096 return deleted
1097 1097
1098 1098 def addchild(self, parent, child):
1099 1099 self.childmap[parent] = child
1100 1100
1101 1101 def revid(self, rev):
1102 1102 return u"svn:%s@%s" % (self.uuid, rev)
1103 1103
1104 1104 def putcommit(self, files, copies, parents, commit, source, revmap):
1105 1105 # Apply changes to working copy
1106 1106 for f, v in files:
1107 1107 try:
1108 1108 data, mode = source.getfile(f, v)
1109 1109 except IOError:
1110 1110 self.delete.append(f)
1111 1111 else:
1112 1112 self.putfile(f, mode, data)
1113 1113 if f in copies:
1114 1114 self.copies.append([copies[f], f])
1115 1115 files = [f[0] for f in files]
1116 1116
1117 1117 for parent in parents:
1118 1118 try:
1119 1119 return self.revid(self.childmap[parent])
1120 1120 except KeyError:
1121 1121 pass
1122 1122 entries = set(self.delete)
1123 1123 files = frozenset(files)
1124 1124 entries.update(self.add_dirs(files.difference(entries)))
1125 1125 if self.copies:
1126 1126 for s, d in self.copies:
1127 1127 self._copyfile(s, d)
1128 1128 self.copies = []
1129 1129 if self.delete:
1130 1130 self.xargs(self.delete, 'delete')
1131 1131 self.delete = []
1132 1132 entries.update(self.add_files(files.difference(entries)))
1133 1133 entries.update(self.tidy_dirs(entries))
1134 1134 if self.delexec:
1135 1135 self.xargs(self.delexec, 'propdel', 'svn:executable')
1136 1136 self.delexec = []
1137 1137 if self.setexec:
1138 1138 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1139 1139 self.setexec = []
1140 1140
1141 1141 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1142 1142 fp = os.fdopen(fd, 'w')
1143 1143 fp.write(commit.desc)
1144 1144 fp.close()
1145 1145 try:
1146 1146 output = self.run0('commit',
1147 1147 username=util.shortuser(commit.author),
1148 1148 file=messagefile,
1149 1149 encoding='utf-8')
1150 1150 try:
1151 1151 rev = self.commit_re.search(output).group(1)
1152 1152 except AttributeError:
1153 1153 if not files:
1154 1154 return parents[0]
1155 1155 self.ui.warn(_('unexpected svn output:\n'))
1156 1156 self.ui.warn(output)
1157 1157 raise util.Abort(_('unable to cope with svn output'))
1158 1158 if commit.rev:
1159 1159 self.run('propset', 'hg:convert-rev', commit.rev,
1160 1160 revprop=True, revision=rev)
1161 1161 if commit.branch and commit.branch != 'default':
1162 1162 self.run('propset', 'hg:convert-branch', commit.branch,
1163 1163 revprop=True, revision=rev)
1164 1164 for parent in parents:
1165 1165 self.addchild(parent, rev)
1166 1166 return self.revid(rev)
1167 1167 finally:
1168 1168 os.unlink(messagefile)
1169 1169
1170 1170 def puttags(self, tags):
1171 1171 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1172 1172 return None, None
@@ -1,325 +1,325
1 1 # extdiff.py - external diff program support for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to allow external programs to compare revisions
9 9
10 10 The extdiff Mercurial extension allows you to use external programs
11 11 to compare revisions, or revision with working directory. The external
12 12 diff programs are called with a configurable set of options and two
13 13 non-option arguments: paths to directories containing snapshots of
14 14 files to compare.
15 15
16 16 The extdiff extension also allows to configure new diff commands, so
17 17 you do not need to type :hg:`extdiff -p kdiff3` always. ::
18 18
19 19 [extdiff]
20 20 # add new command that runs GNU diff(1) in 'context diff' mode
21 21 cdiff = gdiff -Nprc5
22 22 ## or the old way:
23 23 #cmd.cdiff = gdiff
24 24 #opts.cdiff = -Nprc5
25 25
26 26 # add new command called vdiff, runs kdiff3
27 27 vdiff = kdiff3
28 28
29 29 # add new command called meld, runs meld (no need to name twice)
30 30 meld =
31 31
32 32 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
33 33 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
34 34 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
35 35 # your .vimrc
36 36 vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
37 37
38 38 Tool arguments can include variables that are expanded at runtime::
39 39
40 40 $parent1, $plabel1 - filename, descriptive label of first parent
41 41 $child, $clabel - filename, descriptive label of child revision
42 42 $parent2, $plabel2 - filename, descriptive label of second parent
43 43 $parent is an alias for $parent1.
44 44
45 45 The extdiff extension will look in your [diff-tools] and [merge-tools]
46 46 sections for diff tool arguments, when none are specified in [extdiff].
47 47
48 48 ::
49 49
50 50 [extdiff]
51 51 kdiff3 =
52 52
53 53 [diff-tools]
54 54 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
55 55
56 56 You can use -I/-X and list of file or directory names like normal
57 57 :hg:`diff` command. The extdiff extension makes snapshots of only
58 58 needed files, so running the external diff program will actually be
59 59 pretty fast (at least faster than having to compare the entire tree).
60 60 '''
61 61
62 62 from mercurial.i18n import _
63 63 from mercurial.node import short, nullid
64 from mercurial import cmdutil, util, commands, encoding
64 from mercurial import cmdutil, scmutil, util, commands, encoding
65 65 import os, shlex, shutil, tempfile, re
66 66
67 67 def snapshot(ui, repo, files, node, tmproot):
68 68 '''snapshot files as of some revision
69 69 if not using snapshot, -I/-X does not work and recursive diff
70 70 in tools like kdiff3 and meld displays too many files.'''
71 71 dirname = os.path.basename(repo.root)
72 72 if dirname == "":
73 73 dirname = "root"
74 74 if node is not None:
75 75 dirname = '%s.%s' % (dirname, short(node))
76 76 base = os.path.join(tmproot, dirname)
77 77 os.mkdir(base)
78 78 if node is not None:
79 79 ui.note(_('making snapshot of %d files from rev %s\n') %
80 80 (len(files), short(node)))
81 81 else:
82 82 ui.note(_('making snapshot of %d files from working directory\n') %
83 83 (len(files)))
84 wopener = util.opener(base)
84 wopener = scmutil.opener(base)
85 85 fns_and_mtime = []
86 86 ctx = repo[node]
87 87 for fn in files:
88 88 wfn = util.pconvert(fn)
89 89 if not wfn in ctx:
90 90 # File doesn't exist; could be a bogus modify
91 91 continue
92 92 ui.note(' %s\n' % wfn)
93 93 dest = os.path.join(base, wfn)
94 94 fctx = ctx[wfn]
95 95 data = repo.wwritedata(wfn, fctx.data())
96 96 if 'l' in fctx.flags():
97 97 wopener.symlink(data, wfn)
98 98 else:
99 99 wopener(wfn, 'w').write(data)
100 100 if 'x' in fctx.flags():
101 101 util.set_flags(dest, False, True)
102 102 if node is None:
103 103 fns_and_mtime.append((dest, repo.wjoin(fn), os.path.getmtime(dest)))
104 104 return dirname, fns_and_mtime
105 105
106 106 def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
107 107 '''Do the actuall diff:
108 108
109 109 - copy to a temp structure if diffing 2 internal revisions
110 110 - copy to a temp structure if diffing working revision with
111 111 another one and more than 1 file is changed
112 112 - just invoke the diff for a single file in the working dir
113 113 '''
114 114
115 115 revs = opts.get('rev')
116 116 change = opts.get('change')
117 117 args = ' '.join(diffopts)
118 118 do3way = '$parent2' in args
119 119
120 120 if revs and change:
121 121 msg = _('cannot specify --rev and --change at the same time')
122 122 raise util.Abort(msg)
123 123 elif change:
124 124 node2 = cmdutil.revsingle(repo, change, None).node()
125 125 node1a, node1b = repo.changelog.parents(node2)
126 126 else:
127 127 node1a, node2 = cmdutil.revpair(repo, revs)
128 128 if not revs:
129 129 node1b = repo.dirstate.p2()
130 130 else:
131 131 node1b = nullid
132 132
133 133 # Disable 3-way merge if there is only one parent
134 134 if do3way:
135 135 if node1b == nullid:
136 136 do3way = False
137 137
138 138 matcher = cmdutil.match(repo, pats, opts)
139 139 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
140 140 if do3way:
141 141 mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
142 142 else:
143 143 mod_b, add_b, rem_b = set(), set(), set()
144 144 modadd = mod_a | add_a | mod_b | add_b
145 145 common = modadd | rem_a | rem_b
146 146 if not common:
147 147 return 0
148 148
149 149 tmproot = tempfile.mkdtemp(prefix='extdiff.')
150 150 try:
151 151 # Always make a copy of node1a (and node1b, if applicable)
152 152 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
153 153 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
154 154 rev1a = '@%d' % repo[node1a].rev()
155 155 if do3way:
156 156 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
157 157 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
158 158 rev1b = '@%d' % repo[node1b].rev()
159 159 else:
160 160 dir1b = None
161 161 rev1b = ''
162 162
163 163 fns_and_mtime = []
164 164
165 165 # If node2 in not the wc or there is >1 change, copy it
166 166 dir2root = ''
167 167 rev2 = ''
168 168 if node2:
169 169 dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
170 170 rev2 = '@%d' % repo[node2].rev()
171 171 elif len(common) > 1:
172 172 #we only actually need to get the files to copy back to
173 173 #the working dir in this case (because the other cases
174 174 #are: diffing 2 revisions or single file -- in which case
175 175 #the file is already directly passed to the diff tool).
176 176 dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
177 177 else:
178 178 # This lets the diff tool open the changed file directly
179 179 dir2 = ''
180 180 dir2root = repo.root
181 181
182 182 label1a = rev1a
183 183 label1b = rev1b
184 184 label2 = rev2
185 185
186 186 # If only one change, diff the files instead of the directories
187 187 # Handle bogus modifies correctly by checking if the files exist
188 188 if len(common) == 1:
189 189 common_file = util.localpath(common.pop())
190 190 dir1a = os.path.join(tmproot, dir1a, common_file)
191 191 label1a = common_file + rev1a
192 192 if not os.path.isfile(dir1a):
193 193 dir1a = os.devnull
194 194 if do3way:
195 195 dir1b = os.path.join(tmproot, dir1b, common_file)
196 196 label1b = common_file + rev1b
197 197 if not os.path.isfile(dir1b):
198 198 dir1b = os.devnull
199 199 dir2 = os.path.join(dir2root, dir2, common_file)
200 200 label2 = common_file + rev2
201 201
202 202 # Function to quote file/dir names in the argument string.
203 203 # When not operating in 3-way mode, an empty string is
204 204 # returned for parent2
205 205 replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b,
206 206 plabel1=label1a, plabel2=label1b,
207 207 clabel=label2, child=dir2)
208 208 def quote(match):
209 209 key = match.group()[1:]
210 210 if not do3way and key == 'parent2':
211 211 return ''
212 212 return util.shellquote(replace[key])
213 213
214 214 # Match parent2 first, so 'parent1?' will match both parent1 and parent
215 215 regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel)'
216 216 if not do3way and not re.search(regex, args):
217 217 args += ' $parent1 $child'
218 218 args = re.sub(regex, quote, args)
219 219 cmdline = util.shellquote(diffcmd) + ' ' + args
220 220
221 221 ui.debug('running %r in %s\n' % (cmdline, tmproot))
222 222 util.system(cmdline, cwd=tmproot)
223 223
224 224 for copy_fn, working_fn, mtime in fns_and_mtime:
225 225 if os.path.getmtime(copy_fn) != mtime:
226 226 ui.debug('file changed while diffing. '
227 227 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
228 228 util.copyfile(copy_fn, working_fn)
229 229
230 230 return 1
231 231 finally:
232 232 ui.note(_('cleaning up temp directory\n'))
233 233 shutil.rmtree(tmproot)
234 234
235 235 def extdiff(ui, repo, *pats, **opts):
236 236 '''use external program to diff repository (or selected files)
237 237
238 238 Show differences between revisions for the specified files, using
239 239 an external program. The default program used is diff, with
240 240 default options "-Npru".
241 241
242 242 To select a different program, use the -p/--program option. The
243 243 program will be passed the names of two directories to compare. To
244 244 pass additional options to the program, use -o/--option. These
245 245 will be passed before the names of the directories to compare.
246 246
247 247 When two revision arguments are given, then changes are shown
248 248 between those revisions. If only one revision is specified then
249 249 that revision is compared to the working directory, and, when no
250 250 revisions are specified, the working directory files are compared
251 251 to its parent.'''
252 252 program = opts.get('program')
253 253 option = opts.get('option')
254 254 if not program:
255 255 program = 'diff'
256 256 option = option or ['-Npru']
257 257 return dodiff(ui, repo, program, option, pats, opts)
258 258
259 259 cmdtable = {
260 260 "extdiff":
261 261 (extdiff,
262 262 [('p', 'program', '',
263 263 _('comparison program to run'), _('CMD')),
264 264 ('o', 'option', [],
265 265 _('pass option to comparison program'), _('OPT')),
266 266 ('r', 'rev', [],
267 267 _('revision'), _('REV')),
268 268 ('c', 'change', '',
269 269 _('change made by revision'), _('REV')),
270 270 ] + commands.walkopts,
271 271 _('hg extdiff [OPT]... [FILE]...')),
272 272 }
273 273
274 274 def uisetup(ui):
275 275 for cmd, path in ui.configitems('extdiff'):
276 276 if cmd.startswith('cmd.'):
277 277 cmd = cmd[4:]
278 278 if not path:
279 279 path = cmd
280 280 diffopts = ui.config('extdiff', 'opts.' + cmd, '')
281 281 diffopts = diffopts and [diffopts] or []
282 282 elif cmd.startswith('opts.'):
283 283 continue
284 284 else:
285 285 # command = path opts
286 286 if path:
287 287 diffopts = shlex.split(path)
288 288 path = diffopts.pop(0)
289 289 else:
290 290 path, diffopts = cmd, []
291 291 # look for diff arguments in [diff-tools] then [merge-tools]
292 292 if diffopts == []:
293 293 args = ui.config('diff-tools', cmd+'.diffargs') or \
294 294 ui.config('merge-tools', cmd+'.diffargs')
295 295 if args:
296 296 diffopts = shlex.split(args)
297 297 def save(cmd, path, diffopts):
298 298 '''use closure to save diff command to use'''
299 299 def mydiff(ui, repo, *pats, **opts):
300 300 return dodiff(ui, repo, path, diffopts + opts['option'],
301 301 pats, opts)
302 302 doc = _('''\
303 303 use %(path)s to diff repository (or selected files)
304 304
305 305 Show differences between revisions for the specified files, using
306 306 the %(path)s program.
307 307
308 308 When two revision arguments are given, then changes are shown
309 309 between those revisions. If only one revision is specified then
310 310 that revision is compared to the working directory, and, when no
311 311 revisions are specified, the working directory files are compared
312 312 to its parent.\
313 313 ''') % dict(path=util.uirepr(path))
314 314
315 315 # We must translate the docstring right away since it is
316 316 # used as a format string. The string will unfortunately
317 317 # be translated again in commands.helpcmd and this will
318 318 # fail when the docstring contains non-ASCII characters.
319 319 # Decoding the string to a Unicode string here (using the
320 320 # right encoding) prevents that.
321 321 mydiff.__doc__ = doc.decode(encoding.encoding)
322 322 return mydiff
323 323 cmdtable[cmd] = (save(cmd, path, diffopts),
324 324 cmdtable['extdiff'][1][1:],
325 325 _('hg %s [OPTION]... [FILE]...') % cmd)
@@ -1,3273 +1,3273
1 1 # mq.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''manage a stack of patches
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use :hg:`help command` for more details)::
18 18
19 19 create new patch qnew
20 20 import existing patch qimport
21 21
22 22 print patch series qseries
23 23 print applied patches qapplied
24 24
25 25 add known patch to applied stack qpush
26 26 remove patch from applied stack qpop
27 27 refresh contents of top applied patch qrefresh
28 28
29 29 By default, mq will automatically use git patches when required to
30 30 avoid losing file mode changes, copy records, binary files or empty
31 31 files creations or deletions. This behaviour can be configured with::
32 32
33 33 [mq]
34 34 git = auto/keep/yes/no
35 35
36 36 If set to 'keep', mq will obey the [diff] section configuration while
37 37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 38 'no', mq will override the [diff] section and always generate git or
39 39 regular patches, possibly losing data in the second case.
40 40
41 41 You will by default be managing a patch queue named "patches". You can
42 42 create other, independent patch queues with the :hg:`qqueue` command.
43 43 '''
44 44
45 45 from mercurial.i18n import _
46 46 from mercurial.node import bin, hex, short, nullid, nullrev
47 47 from mercurial.lock import release
48 from mercurial import commands, cmdutil, hg, patch, util
48 from mercurial import commands, cmdutil, hg, patch, scmutil, util
49 49 from mercurial import repair, extensions, url, error
50 50 import os, sys, re, errno, shutil
51 51
52 52 commands.norepo += " qclone"
53 53
54 54 # Patch names looks like unix-file names.
55 55 # They must be joinable with queue directory and result in the patch path.
56 56 normname = util.normpath
57 57
58 58 class statusentry(object):
59 59 def __init__(self, node, name):
60 60 self.node, self.name = node, name
61 61 def __repr__(self):
62 62 return hex(self.node) + ':' + self.name
63 63
64 64 class patchheader(object):
65 65 def __init__(self, pf, plainmode=False):
66 66 def eatdiff(lines):
67 67 while lines:
68 68 l = lines[-1]
69 69 if (l.startswith("diff -") or
70 70 l.startswith("Index:") or
71 71 l.startswith("===========")):
72 72 del lines[-1]
73 73 else:
74 74 break
75 75 def eatempty(lines):
76 76 while lines:
77 77 if not lines[-1].strip():
78 78 del lines[-1]
79 79 else:
80 80 break
81 81
82 82 message = []
83 83 comments = []
84 84 user = None
85 85 date = None
86 86 parent = None
87 87 format = None
88 88 subject = None
89 89 branch = None
90 90 nodeid = None
91 91 diffstart = 0
92 92
93 93 for line in file(pf):
94 94 line = line.rstrip()
95 95 if (line.startswith('diff --git')
96 96 or (diffstart and line.startswith('+++ '))):
97 97 diffstart = 2
98 98 break
99 99 diffstart = 0 # reset
100 100 if line.startswith("--- "):
101 101 diffstart = 1
102 102 continue
103 103 elif format == "hgpatch":
104 104 # parse values when importing the result of an hg export
105 105 if line.startswith("# User "):
106 106 user = line[7:]
107 107 elif line.startswith("# Date "):
108 108 date = line[7:]
109 109 elif line.startswith("# Parent "):
110 110 parent = line[9:]
111 111 elif line.startswith("# Branch "):
112 112 branch = line[9:]
113 113 elif line.startswith("# Node ID "):
114 114 nodeid = line[10:]
115 115 elif not line.startswith("# ") and line:
116 116 message.append(line)
117 117 format = None
118 118 elif line == '# HG changeset patch':
119 119 message = []
120 120 format = "hgpatch"
121 121 elif (format != "tagdone" and (line.startswith("Subject: ") or
122 122 line.startswith("subject: "))):
123 123 subject = line[9:]
124 124 format = "tag"
125 125 elif (format != "tagdone" and (line.startswith("From: ") or
126 126 line.startswith("from: "))):
127 127 user = line[6:]
128 128 format = "tag"
129 129 elif (format != "tagdone" and (line.startswith("Date: ") or
130 130 line.startswith("date: "))):
131 131 date = line[6:]
132 132 format = "tag"
133 133 elif format == "tag" and line == "":
134 134 # when looking for tags (subject: from: etc) they
135 135 # end once you find a blank line in the source
136 136 format = "tagdone"
137 137 elif message or line:
138 138 message.append(line)
139 139 comments.append(line)
140 140
141 141 eatdiff(message)
142 142 eatdiff(comments)
143 143 # Remember the exact starting line of the patch diffs before consuming
144 144 # empty lines, for external use by TortoiseHg and others
145 145 self.diffstartline = len(comments)
146 146 eatempty(message)
147 147 eatempty(comments)
148 148
149 149 # make sure message isn't empty
150 150 if format and format.startswith("tag") and subject:
151 151 message.insert(0, "")
152 152 message.insert(0, subject)
153 153
154 154 self.message = message
155 155 self.comments = comments
156 156 self.user = user
157 157 self.date = date
158 158 self.parent = parent
159 159 # nodeid and branch are for external use by TortoiseHg and others
160 160 self.nodeid = nodeid
161 161 self.branch = branch
162 162 self.haspatch = diffstart > 1
163 163 self.plainmode = plainmode
164 164
165 165 def setuser(self, user):
166 166 if not self.updateheader(['From: ', '# User '], user):
167 167 try:
168 168 patchheaderat = self.comments.index('# HG changeset patch')
169 169 self.comments.insert(patchheaderat + 1, '# User ' + user)
170 170 except ValueError:
171 171 if self.plainmode or self._hasheader(['Date: ']):
172 172 self.comments = ['From: ' + user] + self.comments
173 173 else:
174 174 tmp = ['# HG changeset patch', '# User ' + user, '']
175 175 self.comments = tmp + self.comments
176 176 self.user = user
177 177
178 178 def setdate(self, date):
179 179 if not self.updateheader(['Date: ', '# Date '], date):
180 180 try:
181 181 patchheaderat = self.comments.index('# HG changeset patch')
182 182 self.comments.insert(patchheaderat + 1, '# Date ' + date)
183 183 except ValueError:
184 184 if self.plainmode or self._hasheader(['From: ']):
185 185 self.comments = ['Date: ' + date] + self.comments
186 186 else:
187 187 tmp = ['# HG changeset patch', '# Date ' + date, '']
188 188 self.comments = tmp + self.comments
189 189 self.date = date
190 190
191 191 def setparent(self, parent):
192 192 if not self.updateheader(['# Parent '], parent):
193 193 try:
194 194 patchheaderat = self.comments.index('# HG changeset patch')
195 195 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
196 196 except ValueError:
197 197 pass
198 198 self.parent = parent
199 199
200 200 def setmessage(self, message):
201 201 if self.comments:
202 202 self._delmsg()
203 203 self.message = [message]
204 204 self.comments += self.message
205 205
206 206 def updateheader(self, prefixes, new):
207 207 '''Update all references to a field in the patch header.
208 208 Return whether the field is present.'''
209 209 res = False
210 210 for prefix in prefixes:
211 211 for i in xrange(len(self.comments)):
212 212 if self.comments[i].startswith(prefix):
213 213 self.comments[i] = prefix + new
214 214 res = True
215 215 break
216 216 return res
217 217
218 218 def _hasheader(self, prefixes):
219 219 '''Check if a header starts with any of the given prefixes.'''
220 220 for prefix in prefixes:
221 221 for comment in self.comments:
222 222 if comment.startswith(prefix):
223 223 return True
224 224 return False
225 225
226 226 def __str__(self):
227 227 if not self.comments:
228 228 return ''
229 229 return '\n'.join(self.comments) + '\n\n'
230 230
231 231 def _delmsg(self):
232 232 '''Remove existing message, keeping the rest of the comments fields.
233 233 If comments contains 'subject: ', message will prepend
234 234 the field and a blank line.'''
235 235 if self.message:
236 236 subj = 'subject: ' + self.message[0].lower()
237 237 for i in xrange(len(self.comments)):
238 238 if subj == self.comments[i].lower():
239 239 del self.comments[i]
240 240 self.message = self.message[2:]
241 241 break
242 242 ci = 0
243 243 for mi in self.message:
244 244 while mi != self.comments[ci]:
245 245 ci += 1
246 246 del self.comments[ci]
247 247
248 248 class queue(object):
249 249 def __init__(self, ui, path, patchdir=None):
250 250 self.basepath = path
251 251 try:
252 252 fh = open(os.path.join(path, 'patches.queue'))
253 253 cur = fh.read().rstrip()
254 254 fh.close()
255 255 if not cur:
256 256 curpath = os.path.join(path, 'patches')
257 257 else:
258 258 curpath = os.path.join(path, 'patches-' + cur)
259 259 except IOError:
260 260 curpath = os.path.join(path, 'patches')
261 261 self.path = patchdir or curpath
262 self.opener = util.opener(self.path)
262 self.opener = scmutil.opener(self.path)
263 263 self.ui = ui
264 264 self.applied_dirty = 0
265 265 self.series_dirty = 0
266 266 self.added = []
267 267 self.series_path = "series"
268 268 self.status_path = "status"
269 269 self.guards_path = "guards"
270 270 self.active_guards = None
271 271 self.guards_dirty = False
272 272 # Handle mq.git as a bool with extended values
273 273 try:
274 274 gitmode = ui.configbool('mq', 'git', None)
275 275 if gitmode is None:
276 276 raise error.ConfigError()
277 277 self.gitmode = gitmode and 'yes' or 'no'
278 278 except error.ConfigError:
279 279 self.gitmode = ui.config('mq', 'git', 'auto').lower()
280 280 self.plainmode = ui.configbool('mq', 'plain', False)
281 281
282 282 @util.propertycache
283 283 def applied(self):
284 284 if os.path.exists(self.join(self.status_path)):
285 285 def parselines(lines):
286 286 for l in lines:
287 287 entry = l.split(':', 1)
288 288 if len(entry) > 1:
289 289 n, name = entry
290 290 yield statusentry(bin(n), name)
291 291 elif l.strip():
292 292 self.ui.warn(_('malformated mq status line: %s\n') % entry)
293 293 # else we ignore empty lines
294 294 lines = self.opener(self.status_path).read().splitlines()
295 295 return list(parselines(lines))
296 296 return []
297 297
298 298 @util.propertycache
299 299 def full_series(self):
300 300 if os.path.exists(self.join(self.series_path)):
301 301 return self.opener(self.series_path).read().splitlines()
302 302 return []
303 303
304 304 @util.propertycache
305 305 def series(self):
306 306 self.parse_series()
307 307 return self.series
308 308
309 309 @util.propertycache
310 310 def series_guards(self):
311 311 self.parse_series()
312 312 return self.series_guards
313 313
314 314 def invalidate(self):
315 315 for a in 'applied full_series series series_guards'.split():
316 316 if a in self.__dict__:
317 317 delattr(self, a)
318 318 self.applied_dirty = 0
319 319 self.series_dirty = 0
320 320 self.guards_dirty = False
321 321 self.active_guards = None
322 322
323 323 def diffopts(self, opts={}, patchfn=None):
324 324 diffopts = patch.diffopts(self.ui, opts)
325 325 if self.gitmode == 'auto':
326 326 diffopts.upgrade = True
327 327 elif self.gitmode == 'keep':
328 328 pass
329 329 elif self.gitmode in ('yes', 'no'):
330 330 diffopts.git = self.gitmode == 'yes'
331 331 else:
332 332 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
333 333 ' got %s') % self.gitmode)
334 334 if patchfn:
335 335 diffopts = self.patchopts(diffopts, patchfn)
336 336 return diffopts
337 337
338 338 def patchopts(self, diffopts, *patches):
339 339 """Return a copy of input diff options with git set to true if
340 340 referenced patch is a git patch and should be preserved as such.
341 341 """
342 342 diffopts = diffopts.copy()
343 343 if not diffopts.git and self.gitmode == 'keep':
344 344 for patchfn in patches:
345 345 patchf = self.opener(patchfn, 'r')
346 346 # if the patch was a git patch, refresh it as a git patch
347 347 for line in patchf:
348 348 if line.startswith('diff --git'):
349 349 diffopts.git = True
350 350 break
351 351 patchf.close()
352 352 return diffopts
353 353
354 354 def join(self, *p):
355 355 return os.path.join(self.path, *p)
356 356
357 357 def find_series(self, patch):
358 358 def matchpatch(l):
359 359 l = l.split('#', 1)[0]
360 360 return l.strip() == patch
361 361 for index, l in enumerate(self.full_series):
362 362 if matchpatch(l):
363 363 return index
364 364 return None
365 365
366 366 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
367 367
368 368 def parse_series(self):
369 369 self.series = []
370 370 self.series_guards = []
371 371 for l in self.full_series:
372 372 h = l.find('#')
373 373 if h == -1:
374 374 patch = l
375 375 comment = ''
376 376 elif h == 0:
377 377 continue
378 378 else:
379 379 patch = l[:h]
380 380 comment = l[h:]
381 381 patch = patch.strip()
382 382 if patch:
383 383 if patch in self.series:
384 384 raise util.Abort(_('%s appears more than once in %s') %
385 385 (patch, self.join(self.series_path)))
386 386 self.series.append(patch)
387 387 self.series_guards.append(self.guard_re.findall(comment))
388 388
389 389 def check_guard(self, guard):
390 390 if not guard:
391 391 return _('guard cannot be an empty string')
392 392 bad_chars = '# \t\r\n\f'
393 393 first = guard[0]
394 394 if first in '-+':
395 395 return (_('guard %r starts with invalid character: %r') %
396 396 (guard, first))
397 397 for c in bad_chars:
398 398 if c in guard:
399 399 return _('invalid character in guard %r: %r') % (guard, c)
400 400
401 401 def set_active(self, guards):
402 402 for guard in guards:
403 403 bad = self.check_guard(guard)
404 404 if bad:
405 405 raise util.Abort(bad)
406 406 guards = sorted(set(guards))
407 407 self.ui.debug('active guards: %s\n' % ' '.join(guards))
408 408 self.active_guards = guards
409 409 self.guards_dirty = True
410 410
411 411 def active(self):
412 412 if self.active_guards is None:
413 413 self.active_guards = []
414 414 try:
415 415 guards = self.opener(self.guards_path).read().split()
416 416 except IOError, err:
417 417 if err.errno != errno.ENOENT:
418 418 raise
419 419 guards = []
420 420 for i, guard in enumerate(guards):
421 421 bad = self.check_guard(guard)
422 422 if bad:
423 423 self.ui.warn('%s:%d: %s\n' %
424 424 (self.join(self.guards_path), i + 1, bad))
425 425 else:
426 426 self.active_guards.append(guard)
427 427 return self.active_guards
428 428
429 429 def set_guards(self, idx, guards):
430 430 for g in guards:
431 431 if len(g) < 2:
432 432 raise util.Abort(_('guard %r too short') % g)
433 433 if g[0] not in '-+':
434 434 raise util.Abort(_('guard %r starts with invalid char') % g)
435 435 bad = self.check_guard(g[1:])
436 436 if bad:
437 437 raise util.Abort(bad)
438 438 drop = self.guard_re.sub('', self.full_series[idx])
439 439 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
440 440 self.parse_series()
441 441 self.series_dirty = True
442 442
443 443 def pushable(self, idx):
444 444 if isinstance(idx, str):
445 445 idx = self.series.index(idx)
446 446 patchguards = self.series_guards[idx]
447 447 if not patchguards:
448 448 return True, None
449 449 guards = self.active()
450 450 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
451 451 if exactneg:
452 452 return False, exactneg[0]
453 453 pos = [g for g in patchguards if g[0] == '+']
454 454 exactpos = [g for g in pos if g[1:] in guards]
455 455 if pos:
456 456 if exactpos:
457 457 return True, exactpos[0]
458 458 return False, pos
459 459 return True, ''
460 460
461 461 def explain_pushable(self, idx, all_patches=False):
462 462 write = all_patches and self.ui.write or self.ui.warn
463 463 if all_patches or self.ui.verbose:
464 464 if isinstance(idx, str):
465 465 idx = self.series.index(idx)
466 466 pushable, why = self.pushable(idx)
467 467 if all_patches and pushable:
468 468 if why is None:
469 469 write(_('allowing %s - no guards in effect\n') %
470 470 self.series[idx])
471 471 else:
472 472 if not why:
473 473 write(_('allowing %s - no matching negative guards\n') %
474 474 self.series[idx])
475 475 else:
476 476 write(_('allowing %s - guarded by %r\n') %
477 477 (self.series[idx], why))
478 478 if not pushable:
479 479 if why:
480 480 write(_('skipping %s - guarded by %r\n') %
481 481 (self.series[idx], why))
482 482 else:
483 483 write(_('skipping %s - no matching guards\n') %
484 484 self.series[idx])
485 485
486 486 def save_dirty(self):
487 487 def write_list(items, path):
488 488 fp = self.opener(path, 'w')
489 489 for i in items:
490 490 fp.write("%s\n" % i)
491 491 fp.close()
492 492 if self.applied_dirty:
493 493 write_list(map(str, self.applied), self.status_path)
494 494 if self.series_dirty:
495 495 write_list(self.full_series, self.series_path)
496 496 if self.guards_dirty:
497 497 write_list(self.active_guards, self.guards_path)
498 498 if self.added:
499 499 qrepo = self.qrepo()
500 500 if qrepo:
501 501 qrepo[None].add(f for f in self.added if f not in qrepo[None])
502 502 self.added = []
503 503
504 504 def removeundo(self, repo):
505 505 undo = repo.sjoin('undo')
506 506 if not os.path.exists(undo):
507 507 return
508 508 try:
509 509 os.unlink(undo)
510 510 except OSError, inst:
511 511 self.ui.warn(_('error removing undo: %s\n') % str(inst))
512 512
513 513 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
514 514 fp=None, changes=None, opts={}):
515 515 stat = opts.get('stat')
516 516 m = cmdutil.match(repo, files, opts)
517 517 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
518 518 changes, stat, fp)
519 519
520 520 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
521 521 # first try just applying the patch
522 522 (err, n) = self.apply(repo, [patch], update_status=False,
523 523 strict=True, merge=rev)
524 524
525 525 if err == 0:
526 526 return (err, n)
527 527
528 528 if n is None:
529 529 raise util.Abort(_("apply failed for patch %s") % patch)
530 530
531 531 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
532 532
533 533 # apply failed, strip away that rev and merge.
534 534 hg.clean(repo, head)
535 535 self.strip(repo, [n], update=False, backup='strip')
536 536
537 537 ctx = repo[rev]
538 538 ret = hg.merge(repo, rev)
539 539 if ret:
540 540 raise util.Abort(_("update returned %d") % ret)
541 541 n = repo.commit(ctx.description(), ctx.user(), force=True)
542 542 if n is None:
543 543 raise util.Abort(_("repo commit failed"))
544 544 try:
545 545 ph = patchheader(mergeq.join(patch), self.plainmode)
546 546 except:
547 547 raise util.Abort(_("unable to read %s") % patch)
548 548
549 549 diffopts = self.patchopts(diffopts, patch)
550 550 patchf = self.opener(patch, "w")
551 551 comments = str(ph)
552 552 if comments:
553 553 patchf.write(comments)
554 554 self.printdiff(repo, diffopts, head, n, fp=patchf)
555 555 patchf.close()
556 556 self.removeundo(repo)
557 557 return (0, n)
558 558
559 559 def qparents(self, repo, rev=None):
560 560 if rev is None:
561 561 (p1, p2) = repo.dirstate.parents()
562 562 if p2 == nullid:
563 563 return p1
564 564 if not self.applied:
565 565 return None
566 566 return self.applied[-1].node
567 567 p1, p2 = repo.changelog.parents(rev)
568 568 if p2 != nullid and p2 in [x.node for x in self.applied]:
569 569 return p2
570 570 return p1
571 571
572 572 def mergepatch(self, repo, mergeq, series, diffopts):
573 573 if not self.applied:
574 574 # each of the patches merged in will have two parents. This
575 575 # can confuse the qrefresh, qdiff, and strip code because it
576 576 # needs to know which parent is actually in the patch queue.
577 577 # so, we insert a merge marker with only one parent. This way
578 578 # the first patch in the queue is never a merge patch
579 579 #
580 580 pname = ".hg.patches.merge.marker"
581 581 n = repo.commit('[mq]: merge marker', force=True)
582 582 self.removeundo(repo)
583 583 self.applied.append(statusentry(n, pname))
584 584 self.applied_dirty = 1
585 585
586 586 head = self.qparents(repo)
587 587
588 588 for patch in series:
589 589 patch = mergeq.lookup(patch, strict=True)
590 590 if not patch:
591 591 self.ui.warn(_("patch %s does not exist\n") % patch)
592 592 return (1, None)
593 593 pushable, reason = self.pushable(patch)
594 594 if not pushable:
595 595 self.explain_pushable(patch, all_patches=True)
596 596 continue
597 597 info = mergeq.isapplied(patch)
598 598 if not info:
599 599 self.ui.warn(_("patch %s is not applied\n") % patch)
600 600 return (1, None)
601 601 rev = info[1]
602 602 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
603 603 if head:
604 604 self.applied.append(statusentry(head, patch))
605 605 self.applied_dirty = 1
606 606 if err:
607 607 return (err, head)
608 608 self.save_dirty()
609 609 return (0, head)
610 610
611 611 def patch(self, repo, patchfile):
612 612 '''Apply patchfile to the working directory.
613 613 patchfile: name of patch file'''
614 614 files = {}
615 615 try:
616 616 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
617 617 files=files, eolmode=None)
618 618 except Exception, inst:
619 619 self.ui.note(str(inst) + '\n')
620 620 if not self.ui.verbose:
621 621 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
622 622 return (False, files, False)
623 623
624 624 return (True, files, fuzz)
625 625
626 626 def apply(self, repo, series, list=False, update_status=True,
627 627 strict=False, patchdir=None, merge=None, all_files=None):
628 628 wlock = lock = tr = None
629 629 try:
630 630 wlock = repo.wlock()
631 631 lock = repo.lock()
632 632 tr = repo.transaction("qpush")
633 633 try:
634 634 ret = self._apply(repo, series, list, update_status,
635 635 strict, patchdir, merge, all_files=all_files)
636 636 tr.close()
637 637 self.save_dirty()
638 638 return ret
639 639 except:
640 640 try:
641 641 tr.abort()
642 642 finally:
643 643 repo.invalidate()
644 644 repo.dirstate.invalidate()
645 645 raise
646 646 finally:
647 647 release(tr, lock, wlock)
648 648 self.removeundo(repo)
649 649
650 650 def _apply(self, repo, series, list=False, update_status=True,
651 651 strict=False, patchdir=None, merge=None, all_files=None):
652 652 '''returns (error, hash)
653 653 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
654 654 # TODO unify with commands.py
655 655 if not patchdir:
656 656 patchdir = self.path
657 657 err = 0
658 658 n = None
659 659 for patchname in series:
660 660 pushable, reason = self.pushable(patchname)
661 661 if not pushable:
662 662 self.explain_pushable(patchname, all_patches=True)
663 663 continue
664 664 self.ui.status(_("applying %s\n") % patchname)
665 665 pf = os.path.join(patchdir, patchname)
666 666
667 667 try:
668 668 ph = patchheader(self.join(patchname), self.plainmode)
669 669 except:
670 670 self.ui.warn(_("unable to read %s\n") % patchname)
671 671 err = 1
672 672 break
673 673
674 674 message = ph.message
675 675 if not message:
676 676 # The commit message should not be translated
677 677 message = "imported patch %s\n" % patchname
678 678 else:
679 679 if list:
680 680 # The commit message should not be translated
681 681 message.append("\nimported patch %s" % patchname)
682 682 message = '\n'.join(message)
683 683
684 684 if ph.haspatch:
685 685 (patcherr, files, fuzz) = self.patch(repo, pf)
686 686 if all_files is not None:
687 687 all_files.update(files)
688 688 patcherr = not patcherr
689 689 else:
690 690 self.ui.warn(_("patch %s is empty\n") % patchname)
691 691 patcherr, files, fuzz = 0, [], 0
692 692
693 693 if merge and files:
694 694 # Mark as removed/merged and update dirstate parent info
695 695 removed = []
696 696 merged = []
697 697 for f in files:
698 698 if os.path.lexists(repo.wjoin(f)):
699 699 merged.append(f)
700 700 else:
701 701 removed.append(f)
702 702 for f in removed:
703 703 repo.dirstate.remove(f)
704 704 for f in merged:
705 705 repo.dirstate.merge(f)
706 706 p1, p2 = repo.dirstate.parents()
707 707 repo.dirstate.setparents(p1, merge)
708 708
709 709 files = cmdutil.updatedir(self.ui, repo, files)
710 710 match = cmdutil.matchfiles(repo, files or [])
711 711 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
712 712
713 713 if n is None:
714 714 raise util.Abort(_("repository commit failed"))
715 715
716 716 if update_status:
717 717 self.applied.append(statusentry(n, patchname))
718 718
719 719 if patcherr:
720 720 self.ui.warn(_("patch failed, rejects left in working dir\n"))
721 721 err = 2
722 722 break
723 723
724 724 if fuzz and strict:
725 725 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
726 726 err = 3
727 727 break
728 728 return (err, n)
729 729
730 730 def _cleanup(self, patches, numrevs, keep=False):
731 731 if not keep:
732 732 r = self.qrepo()
733 733 if r:
734 734 r[None].remove(patches, True)
735 735 else:
736 736 for p in patches:
737 737 os.unlink(self.join(p))
738 738
739 739 if numrevs:
740 740 del self.applied[:numrevs]
741 741 self.applied_dirty = 1
742 742
743 743 for i in sorted([self.find_series(p) for p in patches], reverse=True):
744 744 del self.full_series[i]
745 745 self.parse_series()
746 746 self.series_dirty = 1
747 747
748 748 def _revpatches(self, repo, revs):
749 749 firstrev = repo[self.applied[0].node].rev()
750 750 patches = []
751 751 for i, rev in enumerate(revs):
752 752
753 753 if rev < firstrev:
754 754 raise util.Abort(_('revision %d is not managed') % rev)
755 755
756 756 ctx = repo[rev]
757 757 base = self.applied[i].node
758 758 if ctx.node() != base:
759 759 msg = _('cannot delete revision %d above applied patches')
760 760 raise util.Abort(msg % rev)
761 761
762 762 patch = self.applied[i].name
763 763 for fmt in ('[mq]: %s', 'imported patch %s'):
764 764 if ctx.description() == fmt % patch:
765 765 msg = _('patch %s finalized without changeset message\n')
766 766 repo.ui.status(msg % patch)
767 767 break
768 768
769 769 patches.append(patch)
770 770 return patches
771 771
772 772 def finish(self, repo, revs):
773 773 patches = self._revpatches(repo, sorted(revs))
774 774 self._cleanup(patches, len(patches))
775 775
776 776 def delete(self, repo, patches, opts):
777 777 if not patches and not opts.get('rev'):
778 778 raise util.Abort(_('qdelete requires at least one revision or '
779 779 'patch name'))
780 780
781 781 realpatches = []
782 782 for patch in patches:
783 783 patch = self.lookup(patch, strict=True)
784 784 info = self.isapplied(patch)
785 785 if info:
786 786 raise util.Abort(_("cannot delete applied patch %s") % patch)
787 787 if patch not in self.series:
788 788 raise util.Abort(_("patch %s not in series file") % patch)
789 789 if patch not in realpatches:
790 790 realpatches.append(patch)
791 791
792 792 numrevs = 0
793 793 if opts.get('rev'):
794 794 if not self.applied:
795 795 raise util.Abort(_('no patches applied'))
796 796 revs = cmdutil.revrange(repo, opts.get('rev'))
797 797 if len(revs) > 1 and revs[0] > revs[1]:
798 798 revs.reverse()
799 799 revpatches = self._revpatches(repo, revs)
800 800 realpatches += revpatches
801 801 numrevs = len(revpatches)
802 802
803 803 self._cleanup(realpatches, numrevs, opts.get('keep'))
804 804
805 805 def check_toppatch(self, repo):
806 806 if self.applied:
807 807 top = self.applied[-1].node
808 808 patch = self.applied[-1].name
809 809 pp = repo.dirstate.parents()
810 810 if top not in pp:
811 811 raise util.Abort(_("working directory revision is not qtip"))
812 812 return top, patch
813 813 return None, None
814 814
815 815 def check_substate(self, repo):
816 816 '''return list of subrepos at a different revision than substate.
817 817 Abort if any subrepos have uncommitted changes.'''
818 818 inclsubs = []
819 819 wctx = repo[None]
820 820 for s in wctx.substate:
821 821 if wctx.sub(s).dirty(True):
822 822 raise util.Abort(
823 823 _("uncommitted changes in subrepository %s") % s)
824 824 elif wctx.sub(s).dirty():
825 825 inclsubs.append(s)
826 826 return inclsubs
827 827
828 828 def check_localchanges(self, repo, force=False, refresh=True):
829 829 m, a, r, d = repo.status()[:4]
830 830 if (m or a or r or d) and not force:
831 831 if refresh:
832 832 raise util.Abort(_("local changes found, refresh first"))
833 833 else:
834 834 raise util.Abort(_("local changes found"))
835 835 return m, a, r, d
836 836
837 837 _reserved = ('series', 'status', 'guards')
838 838 def check_reserved_name(self, name):
839 839 if (name in self._reserved or name.startswith('.hg')
840 840 or name.startswith('.mq') or '#' in name or ':' in name):
841 841 raise util.Abort(_('"%s" cannot be used as the name of a patch')
842 842 % name)
843 843
844 844 def new(self, repo, patchfn, *pats, **opts):
845 845 """options:
846 846 msg: a string or a no-argument function returning a string
847 847 """
848 848 msg = opts.get('msg')
849 849 user = opts.get('user')
850 850 date = opts.get('date')
851 851 if date:
852 852 date = util.parsedate(date)
853 853 diffopts = self.diffopts({'git': opts.get('git')})
854 854 self.check_reserved_name(patchfn)
855 855 if os.path.exists(self.join(patchfn)):
856 856 if os.path.isdir(self.join(patchfn)):
857 857 raise util.Abort(_('"%s" already exists as a directory')
858 858 % patchfn)
859 859 else:
860 860 raise util.Abort(_('patch "%s" already exists') % patchfn)
861 861
862 862 inclsubs = self.check_substate(repo)
863 863 if inclsubs:
864 864 inclsubs.append('.hgsubstate')
865 865 if opts.get('include') or opts.get('exclude') or pats:
866 866 if inclsubs:
867 867 pats = list(pats or []) + inclsubs
868 868 match = cmdutil.match(repo, pats, opts)
869 869 # detect missing files in pats
870 870 def badfn(f, msg):
871 871 if f != '.hgsubstate': # .hgsubstate is auto-created
872 872 raise util.Abort('%s: %s' % (f, msg))
873 873 match.bad = badfn
874 874 m, a, r, d = repo.status(match=match)[:4]
875 875 else:
876 876 m, a, r, d = self.check_localchanges(repo, force=True)
877 877 match = cmdutil.matchfiles(repo, m + a + r + inclsubs)
878 878 if len(repo[None].parents()) > 1:
879 879 raise util.Abort(_('cannot manage merge changesets'))
880 880 commitfiles = m + a + r
881 881 self.check_toppatch(repo)
882 882 insert = self.full_series_end()
883 883 wlock = repo.wlock()
884 884 try:
885 885 try:
886 886 # if patch file write fails, abort early
887 887 p = self.opener(patchfn, "w")
888 888 except IOError, e:
889 889 raise util.Abort(_('cannot write patch "%s": %s')
890 890 % (patchfn, e.strerror))
891 891 try:
892 892 if self.plainmode:
893 893 if user:
894 894 p.write("From: " + user + "\n")
895 895 if not date:
896 896 p.write("\n")
897 897 if date:
898 898 p.write("Date: %d %d\n\n" % date)
899 899 else:
900 900 p.write("# HG changeset patch\n")
901 901 p.write("# Parent "
902 902 + hex(repo[None].p1().node()) + "\n")
903 903 if user:
904 904 p.write("# User " + user + "\n")
905 905 if date:
906 906 p.write("# Date %s %s\n\n" % date)
907 907 if hasattr(msg, '__call__'):
908 908 msg = msg()
909 909 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
910 910 n = repo.commit(commitmsg, user, date, match=match, force=True)
911 911 if n is None:
912 912 raise util.Abort(_("repo commit failed"))
913 913 try:
914 914 self.full_series[insert:insert] = [patchfn]
915 915 self.applied.append(statusentry(n, patchfn))
916 916 self.parse_series()
917 917 self.series_dirty = 1
918 918 self.applied_dirty = 1
919 919 if msg:
920 920 msg = msg + "\n\n"
921 921 p.write(msg)
922 922 if commitfiles:
923 923 parent = self.qparents(repo, n)
924 924 chunks = patch.diff(repo, node1=parent, node2=n,
925 925 match=match, opts=diffopts)
926 926 for chunk in chunks:
927 927 p.write(chunk)
928 928 p.close()
929 929 wlock.release()
930 930 wlock = None
931 931 r = self.qrepo()
932 932 if r:
933 933 r[None].add([patchfn])
934 934 except:
935 935 repo.rollback()
936 936 raise
937 937 except Exception:
938 938 patchpath = self.join(patchfn)
939 939 try:
940 940 os.unlink(patchpath)
941 941 except:
942 942 self.ui.warn(_('error unlinking %s\n') % patchpath)
943 943 raise
944 944 self.removeundo(repo)
945 945 finally:
946 946 release(wlock)
947 947
948 948 def strip(self, repo, revs, update=True, backup="all", force=None):
949 949 wlock = lock = None
950 950 try:
951 951 wlock = repo.wlock()
952 952 lock = repo.lock()
953 953
954 954 if update:
955 955 self.check_localchanges(repo, force=force, refresh=False)
956 956 urev = self.qparents(repo, revs[0])
957 957 hg.clean(repo, urev)
958 958 repo.dirstate.write()
959 959
960 960 self.removeundo(repo)
961 961 for rev in revs:
962 962 repair.strip(self.ui, repo, rev, backup)
963 963 # strip may have unbundled a set of backed up revisions after
964 964 # the actual strip
965 965 self.removeundo(repo)
966 966 finally:
967 967 release(lock, wlock)
968 968
969 969 def isapplied(self, patch):
970 970 """returns (index, rev, patch)"""
971 971 for i, a in enumerate(self.applied):
972 972 if a.name == patch:
973 973 return (i, a.node, a.name)
974 974 return None
975 975
976 976 # if the exact patch name does not exist, we try a few
977 977 # variations. If strict is passed, we try only #1
978 978 #
979 979 # 1) a number to indicate an offset in the series file
980 980 # 2) a unique substring of the patch name was given
981 981 # 3) patchname[-+]num to indicate an offset in the series file
982 982 def lookup(self, patch, strict=False):
983 983 patch = patch and str(patch)
984 984
985 985 def partial_name(s):
986 986 if s in self.series:
987 987 return s
988 988 matches = [x for x in self.series if s in x]
989 989 if len(matches) > 1:
990 990 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
991 991 for m in matches:
992 992 self.ui.warn(' %s\n' % m)
993 993 return None
994 994 if matches:
995 995 return matches[0]
996 996 if self.series and self.applied:
997 997 if s == 'qtip':
998 998 return self.series[self.series_end(True)-1]
999 999 if s == 'qbase':
1000 1000 return self.series[0]
1001 1001 return None
1002 1002
1003 1003 if patch is None:
1004 1004 return None
1005 1005 if patch in self.series:
1006 1006 return patch
1007 1007
1008 1008 if not os.path.isfile(self.join(patch)):
1009 1009 try:
1010 1010 sno = int(patch)
1011 1011 except (ValueError, OverflowError):
1012 1012 pass
1013 1013 else:
1014 1014 if -len(self.series) <= sno < len(self.series):
1015 1015 return self.series[sno]
1016 1016
1017 1017 if not strict:
1018 1018 res = partial_name(patch)
1019 1019 if res:
1020 1020 return res
1021 1021 minus = patch.rfind('-')
1022 1022 if minus >= 0:
1023 1023 res = partial_name(patch[:minus])
1024 1024 if res:
1025 1025 i = self.series.index(res)
1026 1026 try:
1027 1027 off = int(patch[minus + 1:] or 1)
1028 1028 except (ValueError, OverflowError):
1029 1029 pass
1030 1030 else:
1031 1031 if i - off >= 0:
1032 1032 return self.series[i - off]
1033 1033 plus = patch.rfind('+')
1034 1034 if plus >= 0:
1035 1035 res = partial_name(patch[:plus])
1036 1036 if res:
1037 1037 i = self.series.index(res)
1038 1038 try:
1039 1039 off = int(patch[plus + 1:] or 1)
1040 1040 except (ValueError, OverflowError):
1041 1041 pass
1042 1042 else:
1043 1043 if i + off < len(self.series):
1044 1044 return self.series[i + off]
1045 1045 raise util.Abort(_("patch %s not in series") % patch)
1046 1046
1047 1047 def push(self, repo, patch=None, force=False, list=False,
1048 1048 mergeq=None, all=False, move=False, exact=False):
1049 1049 diffopts = self.diffopts()
1050 1050 wlock = repo.wlock()
1051 1051 try:
1052 1052 heads = []
1053 1053 for b, ls in repo.branchmap().iteritems():
1054 1054 heads += ls
1055 1055 if not heads:
1056 1056 heads = [nullid]
1057 1057 if repo.dirstate.p1() not in heads and not exact:
1058 1058 self.ui.status(_("(working directory not at a head)\n"))
1059 1059
1060 1060 if not self.series:
1061 1061 self.ui.warn(_('no patches in series\n'))
1062 1062 return 0
1063 1063
1064 1064 patch = self.lookup(patch)
1065 1065 # Suppose our series file is: A B C and the current 'top'
1066 1066 # patch is B. qpush C should be performed (moving forward)
1067 1067 # qpush B is a NOP (no change) qpush A is an error (can't
1068 1068 # go backwards with qpush)
1069 1069 if patch:
1070 1070 info = self.isapplied(patch)
1071 1071 if info and info[0] >= len(self.applied) - 1:
1072 1072 self.ui.warn(
1073 1073 _('qpush: %s is already at the top\n') % patch)
1074 1074 return 0
1075 1075
1076 1076 pushable, reason = self.pushable(patch)
1077 1077 if pushable:
1078 1078 if self.series.index(patch) < self.series_end():
1079 1079 raise util.Abort(
1080 1080 _("cannot push to a previous patch: %s") % patch)
1081 1081 else:
1082 1082 if reason:
1083 1083 reason = _('guarded by %r') % reason
1084 1084 else:
1085 1085 reason = _('no matching guards')
1086 1086 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1087 1087 return 1
1088 1088 elif all:
1089 1089 patch = self.series[-1]
1090 1090 if self.isapplied(patch):
1091 1091 self.ui.warn(_('all patches are currently applied\n'))
1092 1092 return 0
1093 1093
1094 1094 # Following the above example, starting at 'top' of B:
1095 1095 # qpush should be performed (pushes C), but a subsequent
1096 1096 # qpush without an argument is an error (nothing to
1097 1097 # apply). This allows a loop of "...while hg qpush..." to
1098 1098 # work as it detects an error when done
1099 1099 start = self.series_end()
1100 1100 if start == len(self.series):
1101 1101 self.ui.warn(_('patch series already fully applied\n'))
1102 1102 return 1
1103 1103 if not force:
1104 1104 self.check_localchanges(repo)
1105 1105
1106 1106 if exact:
1107 1107 if move:
1108 1108 raise util.Abort(_("cannot use --exact and --move together"))
1109 1109 if self.applied:
1110 1110 raise util.Abort(_("cannot push --exact with applied patches"))
1111 1111 root = self.series[start]
1112 1112 target = patchheader(self.join(root), self.plainmode).parent
1113 1113 if not target:
1114 1114 raise util.Abort(_("%s does not have a parent recorded" % root))
1115 1115 if not repo[target] == repo['.']:
1116 1116 hg.update(repo, target)
1117 1117
1118 1118 if move:
1119 1119 if not patch:
1120 1120 raise util.Abort(_("please specify the patch to move"))
1121 1121 for i, rpn in enumerate(self.full_series[start:]):
1122 1122 # strip markers for patch guards
1123 1123 if self.guard_re.split(rpn, 1)[0] == patch:
1124 1124 break
1125 1125 index = start + i
1126 1126 assert index < len(self.full_series)
1127 1127 fullpatch = self.full_series[index]
1128 1128 del self.full_series[index]
1129 1129 self.full_series.insert(start, fullpatch)
1130 1130 self.parse_series()
1131 1131 self.series_dirty = 1
1132 1132
1133 1133 self.applied_dirty = 1
1134 1134 if start > 0:
1135 1135 self.check_toppatch(repo)
1136 1136 if not patch:
1137 1137 patch = self.series[start]
1138 1138 end = start + 1
1139 1139 else:
1140 1140 end = self.series.index(patch, start) + 1
1141 1141
1142 1142 s = self.series[start:end]
1143 1143 all_files = set()
1144 1144 try:
1145 1145 if mergeq:
1146 1146 ret = self.mergepatch(repo, mergeq, s, diffopts)
1147 1147 else:
1148 1148 ret = self.apply(repo, s, list, all_files=all_files)
1149 1149 except:
1150 1150 self.ui.warn(_('cleaning up working directory...'))
1151 1151 node = repo.dirstate.p1()
1152 1152 hg.revert(repo, node, None)
1153 1153 # only remove unknown files that we know we touched or
1154 1154 # created while patching
1155 1155 for f in all_files:
1156 1156 if f not in repo.dirstate:
1157 1157 try:
1158 1158 util.unlinkpath(repo.wjoin(f))
1159 1159 except OSError, inst:
1160 1160 if inst.errno != errno.ENOENT:
1161 1161 raise
1162 1162 self.ui.warn(_('done\n'))
1163 1163 raise
1164 1164
1165 1165 if not self.applied:
1166 1166 return ret[0]
1167 1167 top = self.applied[-1].name
1168 1168 if ret[0] and ret[0] > 1:
1169 1169 msg = _("errors during apply, please fix and refresh %s\n")
1170 1170 self.ui.write(msg % top)
1171 1171 else:
1172 1172 self.ui.write(_("now at: %s\n") % top)
1173 1173 return ret[0]
1174 1174
1175 1175 finally:
1176 1176 wlock.release()
1177 1177
1178 1178 def pop(self, repo, patch=None, force=False, update=True, all=False):
1179 1179 wlock = repo.wlock()
1180 1180 try:
1181 1181 if patch:
1182 1182 # index, rev, patch
1183 1183 info = self.isapplied(patch)
1184 1184 if not info:
1185 1185 patch = self.lookup(patch)
1186 1186 info = self.isapplied(patch)
1187 1187 if not info:
1188 1188 raise util.Abort(_("patch %s is not applied") % patch)
1189 1189
1190 1190 if not self.applied:
1191 1191 # Allow qpop -a to work repeatedly,
1192 1192 # but not qpop without an argument
1193 1193 self.ui.warn(_("no patches applied\n"))
1194 1194 return not all
1195 1195
1196 1196 if all:
1197 1197 start = 0
1198 1198 elif patch:
1199 1199 start = info[0] + 1
1200 1200 else:
1201 1201 start = len(self.applied) - 1
1202 1202
1203 1203 if start >= len(self.applied):
1204 1204 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1205 1205 return
1206 1206
1207 1207 if not update:
1208 1208 parents = repo.dirstate.parents()
1209 1209 rr = [x.node for x in self.applied]
1210 1210 for p in parents:
1211 1211 if p in rr:
1212 1212 self.ui.warn(_("qpop: forcing dirstate update\n"))
1213 1213 update = True
1214 1214 else:
1215 1215 parents = [p.node() for p in repo[None].parents()]
1216 1216 needupdate = False
1217 1217 for entry in self.applied[start:]:
1218 1218 if entry.node in parents:
1219 1219 needupdate = True
1220 1220 break
1221 1221 update = needupdate
1222 1222
1223 1223 if not force and update:
1224 1224 self.check_localchanges(repo)
1225 1225
1226 1226 self.applied_dirty = 1
1227 1227 end = len(self.applied)
1228 1228 rev = self.applied[start].node
1229 1229 if update:
1230 1230 top = self.check_toppatch(repo)[0]
1231 1231
1232 1232 try:
1233 1233 heads = repo.changelog.heads(rev)
1234 1234 except error.LookupError:
1235 1235 node = short(rev)
1236 1236 raise util.Abort(_('trying to pop unknown node %s') % node)
1237 1237
1238 1238 if heads != [self.applied[-1].node]:
1239 1239 raise util.Abort(_("popping would remove a revision not "
1240 1240 "managed by this patch queue"))
1241 1241
1242 1242 # we know there are no local changes, so we can make a simplified
1243 1243 # form of hg.update.
1244 1244 if update:
1245 1245 qp = self.qparents(repo, rev)
1246 1246 ctx = repo[qp]
1247 1247 m, a, r, d = repo.status(qp, top)[:4]
1248 1248 if d:
1249 1249 raise util.Abort(_("deletions found between repo revs"))
1250 1250 for f in a:
1251 1251 try:
1252 1252 util.unlinkpath(repo.wjoin(f))
1253 1253 except OSError, e:
1254 1254 if e.errno != errno.ENOENT:
1255 1255 raise
1256 1256 repo.dirstate.forget(f)
1257 1257 for f in m + r:
1258 1258 fctx = ctx[f]
1259 1259 repo.wwrite(f, fctx.data(), fctx.flags())
1260 1260 repo.dirstate.normal(f)
1261 1261 repo.dirstate.setparents(qp, nullid)
1262 1262 for patch in reversed(self.applied[start:end]):
1263 1263 self.ui.status(_("popping %s\n") % patch.name)
1264 1264 del self.applied[start:end]
1265 1265 self.strip(repo, [rev], update=False, backup='strip')
1266 1266 if self.applied:
1267 1267 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1268 1268 else:
1269 1269 self.ui.write(_("patch queue now empty\n"))
1270 1270 finally:
1271 1271 wlock.release()
1272 1272
1273 1273 def diff(self, repo, pats, opts):
1274 1274 top, patch = self.check_toppatch(repo)
1275 1275 if not top:
1276 1276 self.ui.write(_("no patches applied\n"))
1277 1277 return
1278 1278 qp = self.qparents(repo, top)
1279 1279 if opts.get('reverse'):
1280 1280 node1, node2 = None, qp
1281 1281 else:
1282 1282 node1, node2 = qp, None
1283 1283 diffopts = self.diffopts(opts, patch)
1284 1284 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1285 1285
1286 1286 def refresh(self, repo, pats=None, **opts):
1287 1287 if not self.applied:
1288 1288 self.ui.write(_("no patches applied\n"))
1289 1289 return 1
1290 1290 msg = opts.get('msg', '').rstrip()
1291 1291 newuser = opts.get('user')
1292 1292 newdate = opts.get('date')
1293 1293 if newdate:
1294 1294 newdate = '%d %d' % util.parsedate(newdate)
1295 1295 wlock = repo.wlock()
1296 1296
1297 1297 try:
1298 1298 self.check_toppatch(repo)
1299 1299 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1300 1300 if repo.changelog.heads(top) != [top]:
1301 1301 raise util.Abort(_("cannot refresh a revision with children"))
1302 1302
1303 1303 inclsubs = self.check_substate(repo)
1304 1304
1305 1305 cparents = repo.changelog.parents(top)
1306 1306 patchparent = self.qparents(repo, top)
1307 1307 ph = patchheader(self.join(patchfn), self.plainmode)
1308 1308 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1309 1309 if msg:
1310 1310 ph.setmessage(msg)
1311 1311 if newuser:
1312 1312 ph.setuser(newuser)
1313 1313 if newdate:
1314 1314 ph.setdate(newdate)
1315 1315 ph.setparent(hex(patchparent))
1316 1316
1317 1317 # only commit new patch when write is complete
1318 1318 patchf = self.opener(patchfn, 'w', atomictemp=True)
1319 1319
1320 1320 comments = str(ph)
1321 1321 if comments:
1322 1322 patchf.write(comments)
1323 1323
1324 1324 # update the dirstate in place, strip off the qtip commit
1325 1325 # and then commit.
1326 1326 #
1327 1327 # this should really read:
1328 1328 # mm, dd, aa = repo.status(top, patchparent)[:3]
1329 1329 # but we do it backwards to take advantage of manifest/chlog
1330 1330 # caching against the next repo.status call
1331 1331 mm, aa, dd = repo.status(patchparent, top)[:3]
1332 1332 changes = repo.changelog.read(top)
1333 1333 man = repo.manifest.read(changes[0])
1334 1334 aaa = aa[:]
1335 1335 matchfn = cmdutil.match(repo, pats, opts)
1336 1336 # in short mode, we only diff the files included in the
1337 1337 # patch already plus specified files
1338 1338 if opts.get('short'):
1339 1339 # if amending a patch, we start with existing
1340 1340 # files plus specified files - unfiltered
1341 1341 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1342 1342 # filter with inc/exl options
1343 1343 matchfn = cmdutil.match(repo, opts=opts)
1344 1344 else:
1345 1345 match = cmdutil.matchall(repo)
1346 1346 m, a, r, d = repo.status(match=match)[:4]
1347 1347 mm = set(mm)
1348 1348 aa = set(aa)
1349 1349 dd = set(dd)
1350 1350
1351 1351 # we might end up with files that were added between
1352 1352 # qtip and the dirstate parent, but then changed in the
1353 1353 # local dirstate. in this case, we want them to only
1354 1354 # show up in the added section
1355 1355 for x in m:
1356 1356 if x not in aa:
1357 1357 mm.add(x)
1358 1358 # we might end up with files added by the local dirstate that
1359 1359 # were deleted by the patch. In this case, they should only
1360 1360 # show up in the changed section.
1361 1361 for x in a:
1362 1362 if x in dd:
1363 1363 dd.remove(x)
1364 1364 mm.add(x)
1365 1365 else:
1366 1366 aa.add(x)
1367 1367 # make sure any files deleted in the local dirstate
1368 1368 # are not in the add or change column of the patch
1369 1369 forget = []
1370 1370 for x in d + r:
1371 1371 if x in aa:
1372 1372 aa.remove(x)
1373 1373 forget.append(x)
1374 1374 continue
1375 1375 else:
1376 1376 mm.discard(x)
1377 1377 dd.add(x)
1378 1378
1379 1379 m = list(mm)
1380 1380 r = list(dd)
1381 1381 a = list(aa)
1382 1382 c = [filter(matchfn, l) for l in (m, a, r)]
1383 1383 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2] + inclsubs))
1384 1384 chunks = patch.diff(repo, patchparent, match=match,
1385 1385 changes=c, opts=diffopts)
1386 1386 for chunk in chunks:
1387 1387 patchf.write(chunk)
1388 1388
1389 1389 try:
1390 1390 if diffopts.git or diffopts.upgrade:
1391 1391 copies = {}
1392 1392 for dst in a:
1393 1393 src = repo.dirstate.copied(dst)
1394 1394 # during qfold, the source file for copies may
1395 1395 # be removed. Treat this as a simple add.
1396 1396 if src is not None and src in repo.dirstate:
1397 1397 copies.setdefault(src, []).append(dst)
1398 1398 repo.dirstate.add(dst)
1399 1399 # remember the copies between patchparent and qtip
1400 1400 for dst in aaa:
1401 1401 f = repo.file(dst)
1402 1402 src = f.renamed(man[dst])
1403 1403 if src:
1404 1404 copies.setdefault(src[0], []).extend(
1405 1405 copies.get(dst, []))
1406 1406 if dst in a:
1407 1407 copies[src[0]].append(dst)
1408 1408 # we can't copy a file created by the patch itself
1409 1409 if dst in copies:
1410 1410 del copies[dst]
1411 1411 for src, dsts in copies.iteritems():
1412 1412 for dst in dsts:
1413 1413 repo.dirstate.copy(src, dst)
1414 1414 else:
1415 1415 for dst in a:
1416 1416 repo.dirstate.add(dst)
1417 1417 # Drop useless copy information
1418 1418 for f in list(repo.dirstate.copies()):
1419 1419 repo.dirstate.copy(None, f)
1420 1420 for f in r:
1421 1421 repo.dirstate.remove(f)
1422 1422 # if the patch excludes a modified file, mark that
1423 1423 # file with mtime=0 so status can see it.
1424 1424 mm = []
1425 1425 for i in xrange(len(m)-1, -1, -1):
1426 1426 if not matchfn(m[i]):
1427 1427 mm.append(m[i])
1428 1428 del m[i]
1429 1429 for f in m:
1430 1430 repo.dirstate.normal(f)
1431 1431 for f in mm:
1432 1432 repo.dirstate.normallookup(f)
1433 1433 for f in forget:
1434 1434 repo.dirstate.forget(f)
1435 1435
1436 1436 if not msg:
1437 1437 if not ph.message:
1438 1438 message = "[mq]: %s\n" % patchfn
1439 1439 else:
1440 1440 message = "\n".join(ph.message)
1441 1441 else:
1442 1442 message = msg
1443 1443
1444 1444 user = ph.user or changes[1]
1445 1445
1446 1446 # assumes strip can roll itself back if interrupted
1447 1447 repo.dirstate.setparents(*cparents)
1448 1448 self.applied.pop()
1449 1449 self.applied_dirty = 1
1450 1450 self.strip(repo, [top], update=False,
1451 1451 backup='strip')
1452 1452 except:
1453 1453 repo.dirstate.invalidate()
1454 1454 raise
1455 1455
1456 1456 try:
1457 1457 # might be nice to attempt to roll back strip after this
1458 1458 n = repo.commit(message, user, ph.date, match=match,
1459 1459 force=True)
1460 1460 # only write patch after a successful commit
1461 1461 patchf.rename()
1462 1462 self.applied.append(statusentry(n, patchfn))
1463 1463 except:
1464 1464 ctx = repo[cparents[0]]
1465 1465 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1466 1466 self.save_dirty()
1467 1467 self.ui.warn(_('refresh interrupted while patch was popped! '
1468 1468 '(revert --all, qpush to recover)\n'))
1469 1469 raise
1470 1470 finally:
1471 1471 wlock.release()
1472 1472 self.removeundo(repo)
1473 1473
1474 1474 def init(self, repo, create=False):
1475 1475 if not create and os.path.isdir(self.path):
1476 1476 raise util.Abort(_("patch queue directory already exists"))
1477 1477 try:
1478 1478 os.mkdir(self.path)
1479 1479 except OSError, inst:
1480 1480 if inst.errno != errno.EEXIST or not create:
1481 1481 raise
1482 1482 if create:
1483 1483 return self.qrepo(create=True)
1484 1484
1485 1485 def unapplied(self, repo, patch=None):
1486 1486 if patch and patch not in self.series:
1487 1487 raise util.Abort(_("patch %s is not in series file") % patch)
1488 1488 if not patch:
1489 1489 start = self.series_end()
1490 1490 else:
1491 1491 start = self.series.index(patch) + 1
1492 1492 unapplied = []
1493 1493 for i in xrange(start, len(self.series)):
1494 1494 pushable, reason = self.pushable(i)
1495 1495 if pushable:
1496 1496 unapplied.append((i, self.series[i]))
1497 1497 self.explain_pushable(i)
1498 1498 return unapplied
1499 1499
1500 1500 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1501 1501 summary=False):
1502 1502 def displayname(pfx, patchname, state):
1503 1503 if pfx:
1504 1504 self.ui.write(pfx)
1505 1505 if summary:
1506 1506 ph = patchheader(self.join(patchname), self.plainmode)
1507 1507 msg = ph.message and ph.message[0] or ''
1508 1508 if self.ui.formatted():
1509 1509 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1510 1510 if width > 0:
1511 1511 msg = util.ellipsis(msg, width)
1512 1512 else:
1513 1513 msg = ''
1514 1514 self.ui.write(patchname, label='qseries.' + state)
1515 1515 self.ui.write(': ')
1516 1516 self.ui.write(msg, label='qseries.message.' + state)
1517 1517 else:
1518 1518 self.ui.write(patchname, label='qseries.' + state)
1519 1519 self.ui.write('\n')
1520 1520
1521 1521 applied = set([p.name for p in self.applied])
1522 1522 if length is None:
1523 1523 length = len(self.series) - start
1524 1524 if not missing:
1525 1525 if self.ui.verbose:
1526 1526 idxwidth = len(str(start + length - 1))
1527 1527 for i in xrange(start, start + length):
1528 1528 patch = self.series[i]
1529 1529 if patch in applied:
1530 1530 char, state = 'A', 'applied'
1531 1531 elif self.pushable(i)[0]:
1532 1532 char, state = 'U', 'unapplied'
1533 1533 else:
1534 1534 char, state = 'G', 'guarded'
1535 1535 pfx = ''
1536 1536 if self.ui.verbose:
1537 1537 pfx = '%*d %s ' % (idxwidth, i, char)
1538 1538 elif status and status != char:
1539 1539 continue
1540 1540 displayname(pfx, patch, state)
1541 1541 else:
1542 1542 msng_list = []
1543 1543 for root, dirs, files in os.walk(self.path):
1544 1544 d = root[len(self.path) + 1:]
1545 1545 for f in files:
1546 1546 fl = os.path.join(d, f)
1547 1547 if (fl not in self.series and
1548 1548 fl not in (self.status_path, self.series_path,
1549 1549 self.guards_path)
1550 1550 and not fl.startswith('.')):
1551 1551 msng_list.append(fl)
1552 1552 for x in sorted(msng_list):
1553 1553 pfx = self.ui.verbose and ('D ') or ''
1554 1554 displayname(pfx, x, 'missing')
1555 1555
1556 1556 def issaveline(self, l):
1557 1557 if l.name == '.hg.patches.save.line':
1558 1558 return True
1559 1559
1560 1560 def qrepo(self, create=False):
1561 1561 ui = self.ui.copy()
1562 1562 ui.setconfig('paths', 'default', '', overlay=False)
1563 1563 ui.setconfig('paths', 'default-push', '', overlay=False)
1564 1564 if create or os.path.isdir(self.join(".hg")):
1565 1565 return hg.repository(ui, path=self.path, create=create)
1566 1566
1567 1567 def restore(self, repo, rev, delete=None, qupdate=None):
1568 1568 desc = repo[rev].description().strip()
1569 1569 lines = desc.splitlines()
1570 1570 i = 0
1571 1571 datastart = None
1572 1572 series = []
1573 1573 applied = []
1574 1574 qpp = None
1575 1575 for i, line in enumerate(lines):
1576 1576 if line == 'Patch Data:':
1577 1577 datastart = i + 1
1578 1578 elif line.startswith('Dirstate:'):
1579 1579 l = line.rstrip()
1580 1580 l = l[10:].split(' ')
1581 1581 qpp = [bin(x) for x in l]
1582 1582 elif datastart is not None:
1583 1583 l = line.rstrip()
1584 1584 n, name = l.split(':', 1)
1585 1585 if n:
1586 1586 applied.append(statusentry(bin(n), name))
1587 1587 else:
1588 1588 series.append(l)
1589 1589 if datastart is None:
1590 1590 self.ui.warn(_("No saved patch data found\n"))
1591 1591 return 1
1592 1592 self.ui.warn(_("restoring status: %s\n") % lines[0])
1593 1593 self.full_series = series
1594 1594 self.applied = applied
1595 1595 self.parse_series()
1596 1596 self.series_dirty = 1
1597 1597 self.applied_dirty = 1
1598 1598 heads = repo.changelog.heads()
1599 1599 if delete:
1600 1600 if rev not in heads:
1601 1601 self.ui.warn(_("save entry has children, leaving it alone\n"))
1602 1602 else:
1603 1603 self.ui.warn(_("removing save entry %s\n") % short(rev))
1604 1604 pp = repo.dirstate.parents()
1605 1605 if rev in pp:
1606 1606 update = True
1607 1607 else:
1608 1608 update = False
1609 1609 self.strip(repo, [rev], update=update, backup='strip')
1610 1610 if qpp:
1611 1611 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1612 1612 (short(qpp[0]), short(qpp[1])))
1613 1613 if qupdate:
1614 1614 self.ui.status(_("updating queue directory\n"))
1615 1615 r = self.qrepo()
1616 1616 if not r:
1617 1617 self.ui.warn(_("Unable to load queue repository\n"))
1618 1618 return 1
1619 1619 hg.clean(r, qpp[0])
1620 1620
1621 1621 def save(self, repo, msg=None):
1622 1622 if not self.applied:
1623 1623 self.ui.warn(_("save: no patches applied, exiting\n"))
1624 1624 return 1
1625 1625 if self.issaveline(self.applied[-1]):
1626 1626 self.ui.warn(_("status is already saved\n"))
1627 1627 return 1
1628 1628
1629 1629 if not msg:
1630 1630 msg = _("hg patches saved state")
1631 1631 else:
1632 1632 msg = "hg patches: " + msg.rstrip('\r\n')
1633 1633 r = self.qrepo()
1634 1634 if r:
1635 1635 pp = r.dirstate.parents()
1636 1636 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1637 1637 msg += "\n\nPatch Data:\n"
1638 1638 msg += ''.join('%s\n' % x for x in self.applied)
1639 1639 msg += ''.join(':%s\n' % x for x in self.full_series)
1640 1640 n = repo.commit(msg, force=True)
1641 1641 if not n:
1642 1642 self.ui.warn(_("repo commit failed\n"))
1643 1643 return 1
1644 1644 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1645 1645 self.applied_dirty = 1
1646 1646 self.removeundo(repo)
1647 1647
1648 1648 def full_series_end(self):
1649 1649 if self.applied:
1650 1650 p = self.applied[-1].name
1651 1651 end = self.find_series(p)
1652 1652 if end is None:
1653 1653 return len(self.full_series)
1654 1654 return end + 1
1655 1655 return 0
1656 1656
1657 1657 def series_end(self, all_patches=False):
1658 1658 """If all_patches is False, return the index of the next pushable patch
1659 1659 in the series, or the series length. If all_patches is True, return the
1660 1660 index of the first patch past the last applied one.
1661 1661 """
1662 1662 end = 0
1663 1663 def next(start):
1664 1664 if all_patches or start >= len(self.series):
1665 1665 return start
1666 1666 for i in xrange(start, len(self.series)):
1667 1667 p, reason = self.pushable(i)
1668 1668 if p:
1669 1669 break
1670 1670 self.explain_pushable(i)
1671 1671 return i
1672 1672 if self.applied:
1673 1673 p = self.applied[-1].name
1674 1674 try:
1675 1675 end = self.series.index(p)
1676 1676 except ValueError:
1677 1677 return 0
1678 1678 return next(end + 1)
1679 1679 return next(end)
1680 1680
1681 1681 def appliedname(self, index):
1682 1682 pname = self.applied[index].name
1683 1683 if not self.ui.verbose:
1684 1684 p = pname
1685 1685 else:
1686 1686 p = str(self.series.index(pname)) + " " + pname
1687 1687 return p
1688 1688
1689 1689 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1690 1690 force=None, git=False):
1691 1691 def checkseries(patchname):
1692 1692 if patchname in self.series:
1693 1693 raise util.Abort(_('patch %s is already in the series file')
1694 1694 % patchname)
1695 1695 def checkfile(patchname):
1696 1696 if not force and os.path.exists(self.join(patchname)):
1697 1697 raise util.Abort(_('patch "%s" already exists')
1698 1698 % patchname)
1699 1699
1700 1700 if rev:
1701 1701 if files:
1702 1702 raise util.Abort(_('option "-r" not valid when importing '
1703 1703 'files'))
1704 1704 rev = cmdutil.revrange(repo, rev)
1705 1705 rev.sort(reverse=True)
1706 1706 if (len(files) > 1 or len(rev) > 1) and patchname:
1707 1707 raise util.Abort(_('option "-n" not valid when importing multiple '
1708 1708 'patches'))
1709 1709 if rev:
1710 1710 # If mq patches are applied, we can only import revisions
1711 1711 # that form a linear path to qbase.
1712 1712 # Otherwise, they should form a linear path to a head.
1713 1713 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1714 1714 if len(heads) > 1:
1715 1715 raise util.Abort(_('revision %d is the root of more than one '
1716 1716 'branch') % rev[-1])
1717 1717 if self.applied:
1718 1718 base = repo.changelog.node(rev[0])
1719 1719 if base in [n.node for n in self.applied]:
1720 1720 raise util.Abort(_('revision %d is already managed')
1721 1721 % rev[0])
1722 1722 if heads != [self.applied[-1].node]:
1723 1723 raise util.Abort(_('revision %d is not the parent of '
1724 1724 'the queue') % rev[0])
1725 1725 base = repo.changelog.rev(self.applied[0].node)
1726 1726 lastparent = repo.changelog.parentrevs(base)[0]
1727 1727 else:
1728 1728 if heads != [repo.changelog.node(rev[0])]:
1729 1729 raise util.Abort(_('revision %d has unmanaged children')
1730 1730 % rev[0])
1731 1731 lastparent = None
1732 1732
1733 1733 diffopts = self.diffopts({'git': git})
1734 1734 for r in rev:
1735 1735 p1, p2 = repo.changelog.parentrevs(r)
1736 1736 n = repo.changelog.node(r)
1737 1737 if p2 != nullrev:
1738 1738 raise util.Abort(_('cannot import merge revision %d') % r)
1739 1739 if lastparent and lastparent != r:
1740 1740 raise util.Abort(_('revision %d is not the parent of %d')
1741 1741 % (r, lastparent))
1742 1742 lastparent = p1
1743 1743
1744 1744 if not patchname:
1745 1745 patchname = normname('%d.diff' % r)
1746 1746 self.check_reserved_name(patchname)
1747 1747 checkseries(patchname)
1748 1748 checkfile(patchname)
1749 1749 self.full_series.insert(0, patchname)
1750 1750
1751 1751 patchf = self.opener(patchname, "w")
1752 1752 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1753 1753 patchf.close()
1754 1754
1755 1755 se = statusentry(n, patchname)
1756 1756 self.applied.insert(0, se)
1757 1757
1758 1758 self.added.append(patchname)
1759 1759 patchname = None
1760 1760 self.parse_series()
1761 1761 self.applied_dirty = 1
1762 1762 self.series_dirty = True
1763 1763
1764 1764 for i, filename in enumerate(files):
1765 1765 if existing:
1766 1766 if filename == '-':
1767 1767 raise util.Abort(_('-e is incompatible with import from -'))
1768 1768 filename = normname(filename)
1769 1769 self.check_reserved_name(filename)
1770 1770 originpath = self.join(filename)
1771 1771 if not os.path.isfile(originpath):
1772 1772 raise util.Abort(_("patch %s does not exist") % filename)
1773 1773
1774 1774 if patchname:
1775 1775 self.check_reserved_name(patchname)
1776 1776 checkfile(patchname)
1777 1777
1778 1778 self.ui.write(_('renaming %s to %s\n')
1779 1779 % (filename, patchname))
1780 1780 util.rename(originpath, self.join(patchname))
1781 1781 else:
1782 1782 patchname = filename
1783 1783
1784 1784 else:
1785 1785 try:
1786 1786 if filename == '-':
1787 1787 if not patchname:
1788 1788 raise util.Abort(
1789 1789 _('need --name to import a patch from -'))
1790 1790 text = sys.stdin.read()
1791 1791 else:
1792 1792 fp = url.open(self.ui, filename)
1793 1793 text = fp.read()
1794 1794 fp.close()
1795 1795 except (OSError, IOError):
1796 1796 raise util.Abort(_("unable to read file %s") % filename)
1797 1797 if not patchname:
1798 1798 patchname = normname(os.path.basename(filename))
1799 1799 self.check_reserved_name(patchname)
1800 1800 checkfile(patchname)
1801 1801 patchf = self.opener(patchname, "w")
1802 1802 patchf.write(text)
1803 1803 patchf.close()
1804 1804 if not force:
1805 1805 checkseries(patchname)
1806 1806 if patchname not in self.series:
1807 1807 index = self.full_series_end() + i
1808 1808 self.full_series[index:index] = [patchname]
1809 1809 self.parse_series()
1810 1810 self.series_dirty = True
1811 1811 self.ui.warn(_("adding %s to series file\n") % patchname)
1812 1812 self.added.append(patchname)
1813 1813 patchname = None
1814 1814
1815 1815 self.removeundo(repo)
1816 1816
1817 1817 def delete(ui, repo, *patches, **opts):
1818 1818 """remove patches from queue
1819 1819
1820 1820 The patches must not be applied, and at least one patch is required. With
1821 1821 -k/--keep, the patch files are preserved in the patch directory.
1822 1822
1823 1823 To stop managing a patch and move it into permanent history,
1824 1824 use the :hg:`qfinish` command."""
1825 1825 q = repo.mq
1826 1826 q.delete(repo, patches, opts)
1827 1827 q.save_dirty()
1828 1828 return 0
1829 1829
1830 1830 def applied(ui, repo, patch=None, **opts):
1831 1831 """print the patches already applied
1832 1832
1833 1833 Returns 0 on success."""
1834 1834
1835 1835 q = repo.mq
1836 1836
1837 1837 if patch:
1838 1838 if patch not in q.series:
1839 1839 raise util.Abort(_("patch %s is not in series file") % patch)
1840 1840 end = q.series.index(patch) + 1
1841 1841 else:
1842 1842 end = q.series_end(True)
1843 1843
1844 1844 if opts.get('last') and not end:
1845 1845 ui.write(_("no patches applied\n"))
1846 1846 return 1
1847 1847 elif opts.get('last') and end == 1:
1848 1848 ui.write(_("only one patch applied\n"))
1849 1849 return 1
1850 1850 elif opts.get('last'):
1851 1851 start = end - 2
1852 1852 end = 1
1853 1853 else:
1854 1854 start = 0
1855 1855
1856 1856 q.qseries(repo, length=end, start=start, status='A',
1857 1857 summary=opts.get('summary'))
1858 1858
1859 1859
1860 1860 def unapplied(ui, repo, patch=None, **opts):
1861 1861 """print the patches not yet applied
1862 1862
1863 1863 Returns 0 on success."""
1864 1864
1865 1865 q = repo.mq
1866 1866 if patch:
1867 1867 if patch not in q.series:
1868 1868 raise util.Abort(_("patch %s is not in series file") % patch)
1869 1869 start = q.series.index(patch) + 1
1870 1870 else:
1871 1871 start = q.series_end(True)
1872 1872
1873 1873 if start == len(q.series) and opts.get('first'):
1874 1874 ui.write(_("all patches applied\n"))
1875 1875 return 1
1876 1876
1877 1877 length = opts.get('first') and 1 or None
1878 1878 q.qseries(repo, start=start, length=length, status='U',
1879 1879 summary=opts.get('summary'))
1880 1880
1881 1881 def qimport(ui, repo, *filename, **opts):
1882 1882 """import a patch
1883 1883
1884 1884 The patch is inserted into the series after the last applied
1885 1885 patch. If no patches have been applied, qimport prepends the patch
1886 1886 to the series.
1887 1887
1888 1888 The patch will have the same name as its source file unless you
1889 1889 give it a new one with -n/--name.
1890 1890
1891 1891 You can register an existing patch inside the patch directory with
1892 1892 the -e/--existing flag.
1893 1893
1894 1894 With -f/--force, an existing patch of the same name will be
1895 1895 overwritten.
1896 1896
1897 1897 An existing changeset may be placed under mq control with -r/--rev
1898 1898 (e.g. qimport --rev tip -n patch will place tip under mq control).
1899 1899 With -g/--git, patches imported with --rev will use the git diff
1900 1900 format. See the diffs help topic for information on why this is
1901 1901 important for preserving rename/copy information and permission
1902 1902 changes. Use :hg:`qfinish` to remove changesets from mq control.
1903 1903
1904 1904 To import a patch from standard input, pass - as the patch file.
1905 1905 When importing from standard input, a patch name must be specified
1906 1906 using the --name flag.
1907 1907
1908 1908 To import an existing patch while renaming it::
1909 1909
1910 1910 hg qimport -e existing-patch -n new-name
1911 1911
1912 1912 Returns 0 if import succeeded.
1913 1913 """
1914 1914 q = repo.mq
1915 1915 try:
1916 1916 q.qimport(repo, filename, patchname=opts.get('name'),
1917 1917 existing=opts.get('existing'), force=opts.get('force'),
1918 1918 rev=opts.get('rev'), git=opts.get('git'))
1919 1919 finally:
1920 1920 q.save_dirty()
1921 1921
1922 1922 if opts.get('push') and not opts.get('rev'):
1923 1923 return q.push(repo, None)
1924 1924 return 0
1925 1925
1926 1926 def qinit(ui, repo, create):
1927 1927 """initialize a new queue repository
1928 1928
1929 1929 This command also creates a series file for ordering patches, and
1930 1930 an mq-specific .hgignore file in the queue repository, to exclude
1931 1931 the status and guards files (these contain mostly transient state).
1932 1932
1933 1933 Returns 0 if initialization succeeded."""
1934 1934 q = repo.mq
1935 1935 r = q.init(repo, create)
1936 1936 q.save_dirty()
1937 1937 if r:
1938 1938 if not os.path.exists(r.wjoin('.hgignore')):
1939 1939 fp = r.wopener('.hgignore', 'w')
1940 1940 fp.write('^\\.hg\n')
1941 1941 fp.write('^\\.mq\n')
1942 1942 fp.write('syntax: glob\n')
1943 1943 fp.write('status\n')
1944 1944 fp.write('guards\n')
1945 1945 fp.close()
1946 1946 if not os.path.exists(r.wjoin('series')):
1947 1947 r.wopener('series', 'w').close()
1948 1948 r[None].add(['.hgignore', 'series'])
1949 1949 commands.add(ui, r)
1950 1950 return 0
1951 1951
1952 1952 def init(ui, repo, **opts):
1953 1953 """init a new queue repository (DEPRECATED)
1954 1954
1955 1955 The queue repository is unversioned by default. If
1956 1956 -c/--create-repo is specified, qinit will create a separate nested
1957 1957 repository for patches (qinit -c may also be run later to convert
1958 1958 an unversioned patch repository into a versioned one). You can use
1959 1959 qcommit to commit changes to this queue repository.
1960 1960
1961 1961 This command is deprecated. Without -c, it's implied by other relevant
1962 1962 commands. With -c, use :hg:`init --mq` instead."""
1963 1963 return qinit(ui, repo, create=opts.get('create_repo'))
1964 1964
1965 1965 def clone(ui, source, dest=None, **opts):
1966 1966 '''clone main and patch repository at same time
1967 1967
1968 1968 If source is local, destination will have no patches applied. If
1969 1969 source is remote, this command can not check if patches are
1970 1970 applied in source, so cannot guarantee that patches are not
1971 1971 applied in destination. If you clone remote repository, be sure
1972 1972 before that it has no patches applied.
1973 1973
1974 1974 Source patch repository is looked for in <src>/.hg/patches by
1975 1975 default. Use -p <url> to change.
1976 1976
1977 1977 The patch directory must be a nested Mercurial repository, as
1978 1978 would be created by :hg:`init --mq`.
1979 1979
1980 1980 Return 0 on success.
1981 1981 '''
1982 1982 def patchdir(repo):
1983 1983 url = repo.url()
1984 1984 if url.endswith('/'):
1985 1985 url = url[:-1]
1986 1986 return url + '/.hg/patches'
1987 1987 if dest is None:
1988 1988 dest = hg.defaultdest(source)
1989 1989 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
1990 1990 if opts.get('patches'):
1991 1991 patchespath = ui.expandpath(opts.get('patches'))
1992 1992 else:
1993 1993 patchespath = patchdir(sr)
1994 1994 try:
1995 1995 hg.repository(ui, patchespath)
1996 1996 except error.RepoError:
1997 1997 raise util.Abort(_('versioned patch repository not found'
1998 1998 ' (see init --mq)'))
1999 1999 qbase, destrev = None, None
2000 2000 if sr.local():
2001 2001 if sr.mq.applied:
2002 2002 qbase = sr.mq.applied[0].node
2003 2003 if not hg.islocal(dest):
2004 2004 heads = set(sr.heads())
2005 2005 destrev = list(heads.difference(sr.heads(qbase)))
2006 2006 destrev.append(sr.changelog.parents(qbase)[0])
2007 2007 elif sr.capable('lookup'):
2008 2008 try:
2009 2009 qbase = sr.lookup('qbase')
2010 2010 except error.RepoError:
2011 2011 pass
2012 2012 ui.note(_('cloning main repository\n'))
2013 2013 sr, dr = hg.clone(ui, sr.url(), dest,
2014 2014 pull=opts.get('pull'),
2015 2015 rev=destrev,
2016 2016 update=False,
2017 2017 stream=opts.get('uncompressed'))
2018 2018 ui.note(_('cloning patch repository\n'))
2019 2019 hg.clone(ui, opts.get('patches') or patchdir(sr), patchdir(dr),
2020 2020 pull=opts.get('pull'), update=not opts.get('noupdate'),
2021 2021 stream=opts.get('uncompressed'))
2022 2022 if dr.local():
2023 2023 if qbase:
2024 2024 ui.note(_('stripping applied patches from destination '
2025 2025 'repository\n'))
2026 2026 dr.mq.strip(dr, [qbase], update=False, backup=None)
2027 2027 if not opts.get('noupdate'):
2028 2028 ui.note(_('updating destination repository\n'))
2029 2029 hg.update(dr, dr.changelog.tip())
2030 2030
2031 2031 def commit(ui, repo, *pats, **opts):
2032 2032 """commit changes in the queue repository (DEPRECATED)
2033 2033
2034 2034 This command is deprecated; use :hg:`commit --mq` instead."""
2035 2035 q = repo.mq
2036 2036 r = q.qrepo()
2037 2037 if not r:
2038 2038 raise util.Abort('no queue repository')
2039 2039 commands.commit(r.ui, r, *pats, **opts)
2040 2040
2041 2041 def series(ui, repo, **opts):
2042 2042 """print the entire series file
2043 2043
2044 2044 Returns 0 on success."""
2045 2045 repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
2046 2046 return 0
2047 2047
2048 2048 def top(ui, repo, **opts):
2049 2049 """print the name of the current patch
2050 2050
2051 2051 Returns 0 on success."""
2052 2052 q = repo.mq
2053 2053 t = q.applied and q.series_end(True) or 0
2054 2054 if t:
2055 2055 q.qseries(repo, start=t - 1, length=1, status='A',
2056 2056 summary=opts.get('summary'))
2057 2057 else:
2058 2058 ui.write(_("no patches applied\n"))
2059 2059 return 1
2060 2060
2061 2061 def next(ui, repo, **opts):
2062 2062 """print the name of the next patch
2063 2063
2064 2064 Returns 0 on success."""
2065 2065 q = repo.mq
2066 2066 end = q.series_end()
2067 2067 if end == len(q.series):
2068 2068 ui.write(_("all patches applied\n"))
2069 2069 return 1
2070 2070 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2071 2071
2072 2072 def prev(ui, repo, **opts):
2073 2073 """print the name of the previous patch
2074 2074
2075 2075 Returns 0 on success."""
2076 2076 q = repo.mq
2077 2077 l = len(q.applied)
2078 2078 if l == 1:
2079 2079 ui.write(_("only one patch applied\n"))
2080 2080 return 1
2081 2081 if not l:
2082 2082 ui.write(_("no patches applied\n"))
2083 2083 return 1
2084 2084 q.qseries(repo, start=l - 2, length=1, status='A',
2085 2085 summary=opts.get('summary'))
2086 2086
2087 2087 def setupheaderopts(ui, opts):
2088 2088 if not opts.get('user') and opts.get('currentuser'):
2089 2089 opts['user'] = ui.username()
2090 2090 if not opts.get('date') and opts.get('currentdate'):
2091 2091 opts['date'] = "%d %d" % util.makedate()
2092 2092
2093 2093 def new(ui, repo, patch, *args, **opts):
2094 2094 """create a new patch
2095 2095
2096 2096 qnew creates a new patch on top of the currently-applied patch (if
2097 2097 any). The patch will be initialized with any outstanding changes
2098 2098 in the working directory. You may also use -I/--include,
2099 2099 -X/--exclude, and/or a list of files after the patch name to add
2100 2100 only changes to matching files to the new patch, leaving the rest
2101 2101 as uncommitted modifications.
2102 2102
2103 2103 -u/--user and -d/--date can be used to set the (given) user and
2104 2104 date, respectively. -U/--currentuser and -D/--currentdate set user
2105 2105 to current user and date to current date.
2106 2106
2107 2107 -e/--edit, -m/--message or -l/--logfile set the patch header as
2108 2108 well as the commit message. If none is specified, the header is
2109 2109 empty and the commit message is '[mq]: PATCH'.
2110 2110
2111 2111 Use the -g/--git option to keep the patch in the git extended diff
2112 2112 format. Read the diffs help topic for more information on why this
2113 2113 is important for preserving permission changes and copy/rename
2114 2114 information.
2115 2115
2116 2116 Returns 0 on successful creation of a new patch.
2117 2117 """
2118 2118 msg = cmdutil.logmessage(opts)
2119 2119 def getmsg():
2120 2120 return ui.edit(msg, opts.get('user') or ui.username())
2121 2121 q = repo.mq
2122 2122 opts['msg'] = msg
2123 2123 if opts.get('edit'):
2124 2124 opts['msg'] = getmsg
2125 2125 else:
2126 2126 opts['msg'] = msg
2127 2127 setupheaderopts(ui, opts)
2128 2128 q.new(repo, patch, *args, **opts)
2129 2129 q.save_dirty()
2130 2130 return 0
2131 2131
2132 2132 def refresh(ui, repo, *pats, **opts):
2133 2133 """update the current patch
2134 2134
2135 2135 If any file patterns are provided, the refreshed patch will
2136 2136 contain only the modifications that match those patterns; the
2137 2137 remaining modifications will remain in the working directory.
2138 2138
2139 2139 If -s/--short is specified, files currently included in the patch
2140 2140 will be refreshed just like matched files and remain in the patch.
2141 2141
2142 2142 If -e/--edit is specified, Mercurial will start your configured editor for
2143 2143 you to enter a message. In case qrefresh fails, you will find a backup of
2144 2144 your message in ``.hg/last-message.txt``.
2145 2145
2146 2146 hg add/remove/copy/rename work as usual, though you might want to
2147 2147 use git-style patches (-g/--git or [diff] git=1) to track copies
2148 2148 and renames. See the diffs help topic for more information on the
2149 2149 git diff format.
2150 2150
2151 2151 Returns 0 on success.
2152 2152 """
2153 2153 q = repo.mq
2154 2154 message = cmdutil.logmessage(opts)
2155 2155 if opts.get('edit'):
2156 2156 if not q.applied:
2157 2157 ui.write(_("no patches applied\n"))
2158 2158 return 1
2159 2159 if message:
2160 2160 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2161 2161 patch = q.applied[-1].name
2162 2162 ph = patchheader(q.join(patch), q.plainmode)
2163 2163 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2164 2164 # We don't want to lose the patch message if qrefresh fails (issue2062)
2165 2165 msgfile = repo.opener('last-message.txt', 'wb')
2166 2166 msgfile.write(message)
2167 2167 msgfile.close()
2168 2168 setupheaderopts(ui, opts)
2169 2169 ret = q.refresh(repo, pats, msg=message, **opts)
2170 2170 q.save_dirty()
2171 2171 return ret
2172 2172
2173 2173 def diff(ui, repo, *pats, **opts):
2174 2174 """diff of the current patch and subsequent modifications
2175 2175
2176 2176 Shows a diff which includes the current patch as well as any
2177 2177 changes which have been made in the working directory since the
2178 2178 last refresh (thus showing what the current patch would become
2179 2179 after a qrefresh).
2180 2180
2181 2181 Use :hg:`diff` if you only want to see the changes made since the
2182 2182 last qrefresh, or :hg:`export qtip` if you want to see changes
2183 2183 made by the current patch without including changes made since the
2184 2184 qrefresh.
2185 2185
2186 2186 Returns 0 on success.
2187 2187 """
2188 2188 repo.mq.diff(repo, pats, opts)
2189 2189 return 0
2190 2190
2191 2191 def fold(ui, repo, *files, **opts):
2192 2192 """fold the named patches into the current patch
2193 2193
2194 2194 Patches must not yet be applied. Each patch will be successively
2195 2195 applied to the current patch in the order given. If all the
2196 2196 patches apply successfully, the current patch will be refreshed
2197 2197 with the new cumulative patch, and the folded patches will be
2198 2198 deleted. With -k/--keep, the folded patch files will not be
2199 2199 removed afterwards.
2200 2200
2201 2201 The header for each folded patch will be concatenated with the
2202 2202 current patch header, separated by a line of ``* * *``.
2203 2203
2204 2204 Returns 0 on success."""
2205 2205
2206 2206 q = repo.mq
2207 2207
2208 2208 if not files:
2209 2209 raise util.Abort(_('qfold requires at least one patch name'))
2210 2210 if not q.check_toppatch(repo)[0]:
2211 2211 raise util.Abort(_('no patches applied'))
2212 2212 q.check_localchanges(repo)
2213 2213
2214 2214 message = cmdutil.logmessage(opts)
2215 2215 if opts.get('edit'):
2216 2216 if message:
2217 2217 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2218 2218
2219 2219 parent = q.lookup('qtip')
2220 2220 patches = []
2221 2221 messages = []
2222 2222 for f in files:
2223 2223 p = q.lookup(f)
2224 2224 if p in patches or p == parent:
2225 2225 ui.warn(_('Skipping already folded patch %s\n') % p)
2226 2226 if q.isapplied(p):
2227 2227 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
2228 2228 patches.append(p)
2229 2229
2230 2230 for p in patches:
2231 2231 if not message:
2232 2232 ph = patchheader(q.join(p), q.plainmode)
2233 2233 if ph.message:
2234 2234 messages.append(ph.message)
2235 2235 pf = q.join(p)
2236 2236 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2237 2237 if not patchsuccess:
2238 2238 raise util.Abort(_('error folding patch %s') % p)
2239 2239 cmdutil.updatedir(ui, repo, files)
2240 2240
2241 2241 if not message:
2242 2242 ph = patchheader(q.join(parent), q.plainmode)
2243 2243 message, user = ph.message, ph.user
2244 2244 for msg in messages:
2245 2245 message.append('* * *')
2246 2246 message.extend(msg)
2247 2247 message = '\n'.join(message)
2248 2248
2249 2249 if opts.get('edit'):
2250 2250 message = ui.edit(message, user or ui.username())
2251 2251
2252 2252 diffopts = q.patchopts(q.diffopts(), *patches)
2253 2253 q.refresh(repo, msg=message, git=diffopts.git)
2254 2254 q.delete(repo, patches, opts)
2255 2255 q.save_dirty()
2256 2256
2257 2257 def goto(ui, repo, patch, **opts):
2258 2258 '''push or pop patches until named patch is at top of stack
2259 2259
2260 2260 Returns 0 on success.'''
2261 2261 q = repo.mq
2262 2262 patch = q.lookup(patch)
2263 2263 if q.isapplied(patch):
2264 2264 ret = q.pop(repo, patch, force=opts.get('force'))
2265 2265 else:
2266 2266 ret = q.push(repo, patch, force=opts.get('force'))
2267 2267 q.save_dirty()
2268 2268 return ret
2269 2269
2270 2270 def guard(ui, repo, *args, **opts):
2271 2271 '''set or print guards for a patch
2272 2272
2273 2273 Guards control whether a patch can be pushed. A patch with no
2274 2274 guards is always pushed. A patch with a positive guard ("+foo") is
2275 2275 pushed only if the :hg:`qselect` command has activated it. A patch with
2276 2276 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2277 2277 has activated it.
2278 2278
2279 2279 With no arguments, print the currently active guards.
2280 2280 With arguments, set guards for the named patch.
2281 2281
2282 2282 .. note::
2283 2283 Specifying negative guards now requires '--'.
2284 2284
2285 2285 To set guards on another patch::
2286 2286
2287 2287 hg qguard other.patch -- +2.6.17 -stable
2288 2288
2289 2289 Returns 0 on success.
2290 2290 '''
2291 2291 def status(idx):
2292 2292 guards = q.series_guards[idx] or ['unguarded']
2293 2293 if q.series[idx] in applied:
2294 2294 state = 'applied'
2295 2295 elif q.pushable(idx)[0]:
2296 2296 state = 'unapplied'
2297 2297 else:
2298 2298 state = 'guarded'
2299 2299 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2300 2300 ui.write('%s: ' % ui.label(q.series[idx], label))
2301 2301
2302 2302 for i, guard in enumerate(guards):
2303 2303 if guard.startswith('+'):
2304 2304 ui.write(guard, label='qguard.positive')
2305 2305 elif guard.startswith('-'):
2306 2306 ui.write(guard, label='qguard.negative')
2307 2307 else:
2308 2308 ui.write(guard, label='qguard.unguarded')
2309 2309 if i != len(guards) - 1:
2310 2310 ui.write(' ')
2311 2311 ui.write('\n')
2312 2312 q = repo.mq
2313 2313 applied = set(p.name for p in q.applied)
2314 2314 patch = None
2315 2315 args = list(args)
2316 2316 if opts.get('list'):
2317 2317 if args or opts.get('none'):
2318 2318 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2319 2319 for i in xrange(len(q.series)):
2320 2320 status(i)
2321 2321 return
2322 2322 if not args or args[0][0:1] in '-+':
2323 2323 if not q.applied:
2324 2324 raise util.Abort(_('no patches applied'))
2325 2325 patch = q.applied[-1].name
2326 2326 if patch is None and args[0][0:1] not in '-+':
2327 2327 patch = args.pop(0)
2328 2328 if patch is None:
2329 2329 raise util.Abort(_('no patch to work with'))
2330 2330 if args or opts.get('none'):
2331 2331 idx = q.find_series(patch)
2332 2332 if idx is None:
2333 2333 raise util.Abort(_('no patch named %s') % patch)
2334 2334 q.set_guards(idx, args)
2335 2335 q.save_dirty()
2336 2336 else:
2337 2337 status(q.series.index(q.lookup(patch)))
2338 2338
2339 2339 def header(ui, repo, patch=None):
2340 2340 """print the header of the topmost or specified patch
2341 2341
2342 2342 Returns 0 on success."""
2343 2343 q = repo.mq
2344 2344
2345 2345 if patch:
2346 2346 patch = q.lookup(patch)
2347 2347 else:
2348 2348 if not q.applied:
2349 2349 ui.write(_('no patches applied\n'))
2350 2350 return 1
2351 2351 patch = q.lookup('qtip')
2352 2352 ph = patchheader(q.join(patch), q.plainmode)
2353 2353
2354 2354 ui.write('\n'.join(ph.message) + '\n')
2355 2355
2356 2356 def lastsavename(path):
2357 2357 (directory, base) = os.path.split(path)
2358 2358 names = os.listdir(directory)
2359 2359 namere = re.compile("%s.([0-9]+)" % base)
2360 2360 maxindex = None
2361 2361 maxname = None
2362 2362 for f in names:
2363 2363 m = namere.match(f)
2364 2364 if m:
2365 2365 index = int(m.group(1))
2366 2366 if maxindex is None or index > maxindex:
2367 2367 maxindex = index
2368 2368 maxname = f
2369 2369 if maxname:
2370 2370 return (os.path.join(directory, maxname), maxindex)
2371 2371 return (None, None)
2372 2372
2373 2373 def savename(path):
2374 2374 (last, index) = lastsavename(path)
2375 2375 if last is None:
2376 2376 index = 0
2377 2377 newpath = path + ".%d" % (index + 1)
2378 2378 return newpath
2379 2379
2380 2380 def push(ui, repo, patch=None, **opts):
2381 2381 """push the next patch onto the stack
2382 2382
2383 2383 When -f/--force is applied, all local changes in patched files
2384 2384 will be lost.
2385 2385
2386 2386 Return 0 on success.
2387 2387 """
2388 2388 q = repo.mq
2389 2389 mergeq = None
2390 2390
2391 2391 if opts.get('merge'):
2392 2392 if opts.get('name'):
2393 2393 newpath = repo.join(opts.get('name'))
2394 2394 else:
2395 2395 newpath, i = lastsavename(q.path)
2396 2396 if not newpath:
2397 2397 ui.warn(_("no saved queues found, please use -n\n"))
2398 2398 return 1
2399 2399 mergeq = queue(ui, repo.join(""), newpath)
2400 2400 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2401 2401 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2402 2402 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2403 2403 exact=opts.get('exact'))
2404 2404 return ret
2405 2405
2406 2406 def pop(ui, repo, patch=None, **opts):
2407 2407 """pop the current patch off the stack
2408 2408
2409 2409 By default, pops off the top of the patch stack. If given a patch
2410 2410 name, keeps popping off patches until the named patch is at the
2411 2411 top of the stack.
2412 2412
2413 2413 Return 0 on success.
2414 2414 """
2415 2415 localupdate = True
2416 2416 if opts.get('name'):
2417 2417 q = queue(ui, repo.join(""), repo.join(opts.get('name')))
2418 2418 ui.warn(_('using patch queue: %s\n') % q.path)
2419 2419 localupdate = False
2420 2420 else:
2421 2421 q = repo.mq
2422 2422 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2423 2423 all=opts.get('all'))
2424 2424 q.save_dirty()
2425 2425 return ret
2426 2426
2427 2427 def rename(ui, repo, patch, name=None, **opts):
2428 2428 """rename a patch
2429 2429
2430 2430 With one argument, renames the current patch to PATCH1.
2431 2431 With two arguments, renames PATCH1 to PATCH2.
2432 2432
2433 2433 Returns 0 on success."""
2434 2434
2435 2435 q = repo.mq
2436 2436
2437 2437 if not name:
2438 2438 name = patch
2439 2439 patch = None
2440 2440
2441 2441 if patch:
2442 2442 patch = q.lookup(patch)
2443 2443 else:
2444 2444 if not q.applied:
2445 2445 ui.write(_('no patches applied\n'))
2446 2446 return
2447 2447 patch = q.lookup('qtip')
2448 2448 absdest = q.join(name)
2449 2449 if os.path.isdir(absdest):
2450 2450 name = normname(os.path.join(name, os.path.basename(patch)))
2451 2451 absdest = q.join(name)
2452 2452 if os.path.exists(absdest):
2453 2453 raise util.Abort(_('%s already exists') % absdest)
2454 2454
2455 2455 if name in q.series:
2456 2456 raise util.Abort(
2457 2457 _('A patch named %s already exists in the series file') % name)
2458 2458
2459 2459 ui.note(_('renaming %s to %s\n') % (patch, name))
2460 2460 i = q.find_series(patch)
2461 2461 guards = q.guard_re.findall(q.full_series[i])
2462 2462 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2463 2463 q.parse_series()
2464 2464 q.series_dirty = 1
2465 2465
2466 2466 info = q.isapplied(patch)
2467 2467 if info:
2468 2468 q.applied[info[0]] = statusentry(info[1], name)
2469 2469 q.applied_dirty = 1
2470 2470
2471 2471 destdir = os.path.dirname(absdest)
2472 2472 if not os.path.isdir(destdir):
2473 2473 os.makedirs(destdir)
2474 2474 util.rename(q.join(patch), absdest)
2475 2475 r = q.qrepo()
2476 2476 if r and patch in r.dirstate:
2477 2477 wctx = r[None]
2478 2478 wlock = r.wlock()
2479 2479 try:
2480 2480 if r.dirstate[patch] == 'a':
2481 2481 r.dirstate.forget(patch)
2482 2482 r.dirstate.add(name)
2483 2483 else:
2484 2484 if r.dirstate[name] == 'r':
2485 2485 wctx.undelete([name])
2486 2486 wctx.copy(patch, name)
2487 2487 wctx.remove([patch], False)
2488 2488 finally:
2489 2489 wlock.release()
2490 2490
2491 2491 q.save_dirty()
2492 2492
2493 2493 def restore(ui, repo, rev, **opts):
2494 2494 """restore the queue state saved by a revision (DEPRECATED)
2495 2495
2496 2496 This command is deprecated, use :hg:`rebase` instead."""
2497 2497 rev = repo.lookup(rev)
2498 2498 q = repo.mq
2499 2499 q.restore(repo, rev, delete=opts.get('delete'),
2500 2500 qupdate=opts.get('update'))
2501 2501 q.save_dirty()
2502 2502 return 0
2503 2503
2504 2504 def save(ui, repo, **opts):
2505 2505 """save current queue state (DEPRECATED)
2506 2506
2507 2507 This command is deprecated, use :hg:`rebase` instead."""
2508 2508 q = repo.mq
2509 2509 message = cmdutil.logmessage(opts)
2510 2510 ret = q.save(repo, msg=message)
2511 2511 if ret:
2512 2512 return ret
2513 2513 q.save_dirty()
2514 2514 if opts.get('copy'):
2515 2515 path = q.path
2516 2516 if opts.get('name'):
2517 2517 newpath = os.path.join(q.basepath, opts.get('name'))
2518 2518 if os.path.exists(newpath):
2519 2519 if not os.path.isdir(newpath):
2520 2520 raise util.Abort(_('destination %s exists and is not '
2521 2521 'a directory') % newpath)
2522 2522 if not opts.get('force'):
2523 2523 raise util.Abort(_('destination %s exists, '
2524 2524 'use -f to force') % newpath)
2525 2525 else:
2526 2526 newpath = savename(path)
2527 2527 ui.warn(_("copy %s to %s\n") % (path, newpath))
2528 2528 util.copyfiles(path, newpath)
2529 2529 if opts.get('empty'):
2530 2530 try:
2531 2531 os.unlink(q.join(q.status_path))
2532 2532 except:
2533 2533 pass
2534 2534 return 0
2535 2535
2536 2536 def strip(ui, repo, *revs, **opts):
2537 2537 """strip changesets and all their descendants from the repository
2538 2538
2539 2539 The strip command removes the specified changesets and all their
2540 2540 descendants. If the working directory has uncommitted changes,
2541 2541 the operation is aborted unless the --force flag is supplied.
2542 2542
2543 2543 If a parent of the working directory is stripped, then the working
2544 2544 directory will automatically be updated to the most recent
2545 2545 available ancestor of the stripped parent after the operation
2546 2546 completes.
2547 2547
2548 2548 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2549 2549 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2550 2550 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2551 2551 where BUNDLE is the bundle file created by the strip. Note that
2552 2552 the local revision numbers will in general be different after the
2553 2553 restore.
2554 2554
2555 2555 Use the --no-backup option to discard the backup bundle once the
2556 2556 operation completes.
2557 2557
2558 2558 Return 0 on success.
2559 2559 """
2560 2560 backup = 'all'
2561 2561 if opts.get('backup'):
2562 2562 backup = 'strip'
2563 2563 elif opts.get('no_backup') or opts.get('nobackup'):
2564 2564 backup = 'none'
2565 2565
2566 2566 cl = repo.changelog
2567 2567 revs = set(cmdutil.revrange(repo, revs))
2568 2568 if not revs:
2569 2569 raise util.Abort(_('empty revision set'))
2570 2570
2571 2571 descendants = set(cl.descendants(*revs))
2572 2572 strippedrevs = revs.union(descendants)
2573 2573 roots = revs.difference(descendants)
2574 2574
2575 2575 update = False
2576 2576 # if one of the wdir parent is stripped we'll need
2577 2577 # to update away to an earlier revision
2578 2578 for p in repo.dirstate.parents():
2579 2579 if p != nullid and cl.rev(p) in strippedrevs:
2580 2580 update = True
2581 2581 break
2582 2582
2583 2583 rootnodes = set(cl.node(r) for r in roots)
2584 2584
2585 2585 q = repo.mq
2586 2586 if q.applied:
2587 2587 # refresh queue state if we're about to strip
2588 2588 # applied patches
2589 2589 if cl.rev(repo.lookup('qtip')) in strippedrevs:
2590 2590 q.applied_dirty = True
2591 2591 start = 0
2592 2592 end = len(q.applied)
2593 2593 for i, statusentry in enumerate(q.applied):
2594 2594 if statusentry.node in rootnodes:
2595 2595 # if one of the stripped roots is an applied
2596 2596 # patch, only part of the queue is stripped
2597 2597 start = i
2598 2598 break
2599 2599 del q.applied[start:end]
2600 2600 q.save_dirty()
2601 2601
2602 2602 revs = list(rootnodes)
2603 2603 if update and opts.get('keep'):
2604 2604 wlock = repo.wlock()
2605 2605 try:
2606 2606 urev = repo.mq.qparents(repo, revs[0])
2607 2607 repo.dirstate.rebuild(urev, repo[urev].manifest())
2608 2608 repo.dirstate.write()
2609 2609 update = False
2610 2610 finally:
2611 2611 wlock.release()
2612 2612
2613 2613 repo.mq.strip(repo, revs, backup=backup, update=update,
2614 2614 force=opts.get('force'))
2615 2615 return 0
2616 2616
2617 2617 def select(ui, repo, *args, **opts):
2618 2618 '''set or print guarded patches to push
2619 2619
2620 2620 Use the :hg:`qguard` command to set or print guards on patch, then use
2621 2621 qselect to tell mq which guards to use. A patch will be pushed if
2622 2622 it has no guards or any positive guards match the currently
2623 2623 selected guard, but will not be pushed if any negative guards
2624 2624 match the current guard. For example::
2625 2625
2626 2626 qguard foo.patch -- -stable (negative guard)
2627 2627 qguard bar.patch +stable (positive guard)
2628 2628 qselect stable
2629 2629
2630 2630 This activates the "stable" guard. mq will skip foo.patch (because
2631 2631 it has a negative match) but push bar.patch (because it has a
2632 2632 positive match).
2633 2633
2634 2634 With no arguments, prints the currently active guards.
2635 2635 With one argument, sets the active guard.
2636 2636
2637 2637 Use -n/--none to deactivate guards (no other arguments needed).
2638 2638 When no guards are active, patches with positive guards are
2639 2639 skipped and patches with negative guards are pushed.
2640 2640
2641 2641 qselect can change the guards on applied patches. It does not pop
2642 2642 guarded patches by default. Use --pop to pop back to the last
2643 2643 applied patch that is not guarded. Use --reapply (which implies
2644 2644 --pop) to push back to the current patch afterwards, but skip
2645 2645 guarded patches.
2646 2646
2647 2647 Use -s/--series to print a list of all guards in the series file
2648 2648 (no other arguments needed). Use -v for more information.
2649 2649
2650 2650 Returns 0 on success.'''
2651 2651
2652 2652 q = repo.mq
2653 2653 guards = q.active()
2654 2654 if args or opts.get('none'):
2655 2655 old_unapplied = q.unapplied(repo)
2656 2656 old_guarded = [i for i in xrange(len(q.applied)) if
2657 2657 not q.pushable(i)[0]]
2658 2658 q.set_active(args)
2659 2659 q.save_dirty()
2660 2660 if not args:
2661 2661 ui.status(_('guards deactivated\n'))
2662 2662 if not opts.get('pop') and not opts.get('reapply'):
2663 2663 unapplied = q.unapplied(repo)
2664 2664 guarded = [i for i in xrange(len(q.applied))
2665 2665 if not q.pushable(i)[0]]
2666 2666 if len(unapplied) != len(old_unapplied):
2667 2667 ui.status(_('number of unguarded, unapplied patches has '
2668 2668 'changed from %d to %d\n') %
2669 2669 (len(old_unapplied), len(unapplied)))
2670 2670 if len(guarded) != len(old_guarded):
2671 2671 ui.status(_('number of guarded, applied patches has changed '
2672 2672 'from %d to %d\n') %
2673 2673 (len(old_guarded), len(guarded)))
2674 2674 elif opts.get('series'):
2675 2675 guards = {}
2676 2676 noguards = 0
2677 2677 for gs in q.series_guards:
2678 2678 if not gs:
2679 2679 noguards += 1
2680 2680 for g in gs:
2681 2681 guards.setdefault(g, 0)
2682 2682 guards[g] += 1
2683 2683 if ui.verbose:
2684 2684 guards['NONE'] = noguards
2685 2685 guards = guards.items()
2686 2686 guards.sort(key=lambda x: x[0][1:])
2687 2687 if guards:
2688 2688 ui.note(_('guards in series file:\n'))
2689 2689 for guard, count in guards:
2690 2690 ui.note('%2d ' % count)
2691 2691 ui.write(guard, '\n')
2692 2692 else:
2693 2693 ui.note(_('no guards in series file\n'))
2694 2694 else:
2695 2695 if guards:
2696 2696 ui.note(_('active guards:\n'))
2697 2697 for g in guards:
2698 2698 ui.write(g, '\n')
2699 2699 else:
2700 2700 ui.write(_('no active guards\n'))
2701 2701 reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
2702 2702 popped = False
2703 2703 if opts.get('pop') or opts.get('reapply'):
2704 2704 for i in xrange(len(q.applied)):
2705 2705 pushable, reason = q.pushable(i)
2706 2706 if not pushable:
2707 2707 ui.status(_('popping guarded patches\n'))
2708 2708 popped = True
2709 2709 if i == 0:
2710 2710 q.pop(repo, all=True)
2711 2711 else:
2712 2712 q.pop(repo, i - 1)
2713 2713 break
2714 2714 if popped:
2715 2715 try:
2716 2716 if reapply:
2717 2717 ui.status(_('reapplying unguarded patches\n'))
2718 2718 q.push(repo, reapply)
2719 2719 finally:
2720 2720 q.save_dirty()
2721 2721
2722 2722 def finish(ui, repo, *revrange, **opts):
2723 2723 """move applied patches into repository history
2724 2724
2725 2725 Finishes the specified revisions (corresponding to applied
2726 2726 patches) by moving them out of mq control into regular repository
2727 2727 history.
2728 2728
2729 2729 Accepts a revision range or the -a/--applied option. If --applied
2730 2730 is specified, all applied mq revisions are removed from mq
2731 2731 control. Otherwise, the given revisions must be at the base of the
2732 2732 stack of applied patches.
2733 2733
2734 2734 This can be especially useful if your changes have been applied to
2735 2735 an upstream repository, or if you are about to push your changes
2736 2736 to upstream.
2737 2737
2738 2738 Returns 0 on success.
2739 2739 """
2740 2740 if not opts.get('applied') and not revrange:
2741 2741 raise util.Abort(_('no revisions specified'))
2742 2742 elif opts.get('applied'):
2743 2743 revrange = ('qbase::qtip',) + revrange
2744 2744
2745 2745 q = repo.mq
2746 2746 if not q.applied:
2747 2747 ui.status(_('no patches applied\n'))
2748 2748 return 0
2749 2749
2750 2750 revs = cmdutil.revrange(repo, revrange)
2751 2751 q.finish(repo, revs)
2752 2752 q.save_dirty()
2753 2753 return 0
2754 2754
2755 2755 def qqueue(ui, repo, name=None, **opts):
2756 2756 '''manage multiple patch queues
2757 2757
2758 2758 Supports switching between different patch queues, as well as creating
2759 2759 new patch queues and deleting existing ones.
2760 2760
2761 2761 Omitting a queue name or specifying -l/--list will show you the registered
2762 2762 queues - by default the "normal" patches queue is registered. The currently
2763 2763 active queue will be marked with "(active)".
2764 2764
2765 2765 To create a new queue, use -c/--create. The queue is automatically made
2766 2766 active, except in the case where there are applied patches from the
2767 2767 currently active queue in the repository. Then the queue will only be
2768 2768 created and switching will fail.
2769 2769
2770 2770 To delete an existing queue, use --delete. You cannot delete the currently
2771 2771 active queue.
2772 2772
2773 2773 Returns 0 on success.
2774 2774 '''
2775 2775
2776 2776 q = repo.mq
2777 2777
2778 2778 _defaultqueue = 'patches'
2779 2779 _allqueues = 'patches.queues'
2780 2780 _activequeue = 'patches.queue'
2781 2781
2782 2782 def _getcurrent():
2783 2783 cur = os.path.basename(q.path)
2784 2784 if cur.startswith('patches-'):
2785 2785 cur = cur[8:]
2786 2786 return cur
2787 2787
2788 2788 def _noqueues():
2789 2789 try:
2790 2790 fh = repo.opener(_allqueues, 'r')
2791 2791 fh.close()
2792 2792 except IOError:
2793 2793 return True
2794 2794
2795 2795 return False
2796 2796
2797 2797 def _getqueues():
2798 2798 current = _getcurrent()
2799 2799
2800 2800 try:
2801 2801 fh = repo.opener(_allqueues, 'r')
2802 2802 queues = [queue.strip() for queue in fh if queue.strip()]
2803 2803 fh.close()
2804 2804 if current not in queues:
2805 2805 queues.append(current)
2806 2806 except IOError:
2807 2807 queues = [_defaultqueue]
2808 2808
2809 2809 return sorted(queues)
2810 2810
2811 2811 def _setactive(name):
2812 2812 if q.applied:
2813 2813 raise util.Abort(_('patches applied - cannot set new queue active'))
2814 2814 _setactivenocheck(name)
2815 2815
2816 2816 def _setactivenocheck(name):
2817 2817 fh = repo.opener(_activequeue, 'w')
2818 2818 if name != 'patches':
2819 2819 fh.write(name)
2820 2820 fh.close()
2821 2821
2822 2822 def _addqueue(name):
2823 2823 fh = repo.opener(_allqueues, 'a')
2824 2824 fh.write('%s\n' % (name,))
2825 2825 fh.close()
2826 2826
2827 2827 def _queuedir(name):
2828 2828 if name == 'patches':
2829 2829 return repo.join('patches')
2830 2830 else:
2831 2831 return repo.join('patches-' + name)
2832 2832
2833 2833 def _validname(name):
2834 2834 for n in name:
2835 2835 if n in ':\\/.':
2836 2836 return False
2837 2837 return True
2838 2838
2839 2839 def _delete(name):
2840 2840 if name not in existing:
2841 2841 raise util.Abort(_('cannot delete queue that does not exist'))
2842 2842
2843 2843 current = _getcurrent()
2844 2844
2845 2845 if name == current:
2846 2846 raise util.Abort(_('cannot delete currently active queue'))
2847 2847
2848 2848 fh = repo.opener('patches.queues.new', 'w')
2849 2849 for queue in existing:
2850 2850 if queue == name:
2851 2851 continue
2852 2852 fh.write('%s\n' % (queue,))
2853 2853 fh.close()
2854 2854 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2855 2855
2856 2856 if not name or opts.get('list'):
2857 2857 current = _getcurrent()
2858 2858 for queue in _getqueues():
2859 2859 ui.write('%s' % (queue,))
2860 2860 if queue == current and not ui.quiet:
2861 2861 ui.write(_(' (active)\n'))
2862 2862 else:
2863 2863 ui.write('\n')
2864 2864 return
2865 2865
2866 2866 if not _validname(name):
2867 2867 raise util.Abort(
2868 2868 _('invalid queue name, may not contain the characters ":\\/."'))
2869 2869
2870 2870 existing = _getqueues()
2871 2871
2872 2872 if opts.get('create'):
2873 2873 if name in existing:
2874 2874 raise util.Abort(_('queue "%s" already exists') % name)
2875 2875 if _noqueues():
2876 2876 _addqueue(_defaultqueue)
2877 2877 _addqueue(name)
2878 2878 _setactive(name)
2879 2879 elif opts.get('rename'):
2880 2880 current = _getcurrent()
2881 2881 if name == current:
2882 2882 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
2883 2883 if name in existing:
2884 2884 raise util.Abort(_('queue "%s" already exists') % name)
2885 2885
2886 2886 olddir = _queuedir(current)
2887 2887 newdir = _queuedir(name)
2888 2888
2889 2889 if os.path.exists(newdir):
2890 2890 raise util.Abort(_('non-queue directory "%s" already exists') %
2891 2891 newdir)
2892 2892
2893 2893 fh = repo.opener('patches.queues.new', 'w')
2894 2894 for queue in existing:
2895 2895 if queue == current:
2896 2896 fh.write('%s\n' % (name,))
2897 2897 if os.path.exists(olddir):
2898 2898 util.rename(olddir, newdir)
2899 2899 else:
2900 2900 fh.write('%s\n' % (queue,))
2901 2901 fh.close()
2902 2902 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2903 2903 _setactivenocheck(name)
2904 2904 elif opts.get('delete'):
2905 2905 _delete(name)
2906 2906 elif opts.get('purge'):
2907 2907 if name in existing:
2908 2908 _delete(name)
2909 2909 qdir = _queuedir(name)
2910 2910 if os.path.exists(qdir):
2911 2911 shutil.rmtree(qdir)
2912 2912 else:
2913 2913 if name not in existing:
2914 2914 raise util.Abort(_('use --create to create a new queue'))
2915 2915 _setactive(name)
2916 2916
2917 2917 def reposetup(ui, repo):
2918 2918 class mqrepo(repo.__class__):
2919 2919 @util.propertycache
2920 2920 def mq(self):
2921 2921 return queue(self.ui, self.join(""))
2922 2922
2923 2923 def abort_if_wdir_patched(self, errmsg, force=False):
2924 2924 if self.mq.applied and not force:
2925 2925 parents = self.dirstate.parents()
2926 2926 patches = [s.node for s in self.mq.applied]
2927 2927 if parents[0] in patches or parents[1] in patches:
2928 2928 raise util.Abort(errmsg)
2929 2929
2930 2930 def commit(self, text="", user=None, date=None, match=None,
2931 2931 force=False, editor=False, extra={}):
2932 2932 self.abort_if_wdir_patched(
2933 2933 _('cannot commit over an applied mq patch'),
2934 2934 force)
2935 2935
2936 2936 return super(mqrepo, self).commit(text, user, date, match, force,
2937 2937 editor, extra)
2938 2938
2939 2939 def checkpush(self, force, revs):
2940 2940 if self.mq.applied and not force:
2941 2941 haspatches = True
2942 2942 if revs:
2943 2943 # Assume applied patches have no non-patch descendants
2944 2944 # and are not on remote already. If they appear in the
2945 2945 # set of resolved 'revs', bail out.
2946 2946 applied = set(e.node for e in self.mq.applied)
2947 2947 haspatches = bool([n for n in revs if n in applied])
2948 2948 if haspatches:
2949 2949 raise util.Abort(_('source has mq patches applied'))
2950 2950 super(mqrepo, self).checkpush(force, revs)
2951 2951
2952 2952 def _findtags(self):
2953 2953 '''augment tags from base class with patch tags'''
2954 2954 result = super(mqrepo, self)._findtags()
2955 2955
2956 2956 q = self.mq
2957 2957 if not q.applied:
2958 2958 return result
2959 2959
2960 2960 mqtags = [(patch.node, patch.name) for patch in q.applied]
2961 2961
2962 2962 try:
2963 2963 r = self.changelog.rev(mqtags[-1][0])
2964 2964 except error.RepoLookupError:
2965 2965 self.ui.warn(_('mq status file refers to unknown node %s\n')
2966 2966 % short(mqtags[-1][0]))
2967 2967 return result
2968 2968
2969 2969 mqtags.append((mqtags[-1][0], 'qtip'))
2970 2970 mqtags.append((mqtags[0][0], 'qbase'))
2971 2971 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2972 2972 tags = result[0]
2973 2973 for patch in mqtags:
2974 2974 if patch[1] in tags:
2975 2975 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2976 2976 % patch[1])
2977 2977 else:
2978 2978 tags[patch[1]] = patch[0]
2979 2979
2980 2980 return result
2981 2981
2982 2982 def _branchtags(self, partial, lrev):
2983 2983 q = self.mq
2984 2984 if not q.applied:
2985 2985 return super(mqrepo, self)._branchtags(partial, lrev)
2986 2986
2987 2987 cl = self.changelog
2988 2988 qbasenode = q.applied[0].node
2989 2989 try:
2990 2990 qbase = cl.rev(qbasenode)
2991 2991 except error.LookupError:
2992 2992 self.ui.warn(_('mq status file refers to unknown node %s\n')
2993 2993 % short(qbasenode))
2994 2994 return super(mqrepo, self)._branchtags(partial, lrev)
2995 2995
2996 2996 start = lrev + 1
2997 2997 if start < qbase:
2998 2998 # update the cache (excluding the patches) and save it
2999 2999 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
3000 3000 self._updatebranchcache(partial, ctxgen)
3001 3001 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
3002 3002 start = qbase
3003 3003 # if start = qbase, the cache is as updated as it should be.
3004 3004 # if start > qbase, the cache includes (part of) the patches.
3005 3005 # we might as well use it, but we won't save it.
3006 3006
3007 3007 # update the cache up to the tip
3008 3008 ctxgen = (self[r] for r in xrange(start, len(cl)))
3009 3009 self._updatebranchcache(partial, ctxgen)
3010 3010
3011 3011 return partial
3012 3012
3013 3013 if repo.local():
3014 3014 repo.__class__ = mqrepo
3015 3015
3016 3016 def mqimport(orig, ui, repo, *args, **kwargs):
3017 3017 if (hasattr(repo, 'abort_if_wdir_patched')
3018 3018 and not kwargs.get('no_commit', False)):
3019 3019 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
3020 3020 kwargs.get('force'))
3021 3021 return orig(ui, repo, *args, **kwargs)
3022 3022
3023 3023 def mqinit(orig, ui, *args, **kwargs):
3024 3024 mq = kwargs.pop('mq', None)
3025 3025
3026 3026 if not mq:
3027 3027 return orig(ui, *args, **kwargs)
3028 3028
3029 3029 if args:
3030 3030 repopath = args[0]
3031 3031 if not hg.islocal(repopath):
3032 3032 raise util.Abort(_('only a local queue repository '
3033 3033 'may be initialized'))
3034 3034 else:
3035 3035 repopath = cmdutil.findrepo(os.getcwd())
3036 3036 if not repopath:
3037 3037 raise util.Abort(_('there is no Mercurial repository here '
3038 3038 '(.hg not found)'))
3039 3039 repo = hg.repository(ui, repopath)
3040 3040 return qinit(ui, repo, True)
3041 3041
3042 3042 def mqcommand(orig, ui, repo, *args, **kwargs):
3043 3043 """Add --mq option to operate on patch repository instead of main"""
3044 3044
3045 3045 # some commands do not like getting unknown options
3046 3046 mq = kwargs.pop('mq', None)
3047 3047
3048 3048 if not mq:
3049 3049 return orig(ui, repo, *args, **kwargs)
3050 3050
3051 3051 q = repo.mq
3052 3052 r = q.qrepo()
3053 3053 if not r:
3054 3054 raise util.Abort(_('no queue repository'))
3055 3055 return orig(r.ui, r, *args, **kwargs)
3056 3056
3057 3057 def summary(orig, ui, repo, *args, **kwargs):
3058 3058 r = orig(ui, repo, *args, **kwargs)
3059 3059 q = repo.mq
3060 3060 m = []
3061 3061 a, u = len(q.applied), len(q.unapplied(repo))
3062 3062 if a:
3063 3063 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3064 3064 if u:
3065 3065 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3066 3066 if m:
3067 3067 ui.write("mq: %s\n" % ', '.join(m))
3068 3068 else:
3069 3069 ui.note(_("mq: (empty queue)\n"))
3070 3070 return r
3071 3071
3072 3072 def uisetup(ui):
3073 3073 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3074 3074
3075 3075 extensions.wrapcommand(commands.table, 'import', mqimport)
3076 3076 extensions.wrapcommand(commands.table, 'summary', summary)
3077 3077
3078 3078 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3079 3079 entry[1].extend(mqopt)
3080 3080
3081 3081 nowrap = set(commands.norepo.split(" ") + ['qrecord'])
3082 3082
3083 3083 def dotable(cmdtable):
3084 3084 for cmd in cmdtable.keys():
3085 3085 cmd = cmdutil.parsealiases(cmd)[0]
3086 3086 if cmd in nowrap:
3087 3087 continue
3088 3088 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3089 3089 entry[1].extend(mqopt)
3090 3090
3091 3091 dotable(commands.table)
3092 3092
3093 3093 for extname, extmodule in extensions.extensions():
3094 3094 if extmodule.__file__ != __file__:
3095 3095 dotable(getattr(extmodule, 'cmdtable', {}))
3096 3096
3097 3097 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
3098 3098
3099 3099 cmdtable = {
3100 3100 "qapplied":
3101 3101 (applied,
3102 3102 [('1', 'last', None, _('show only the last patch'))] + seriesopts,
3103 3103 _('hg qapplied [-1] [-s] [PATCH]')),
3104 3104 "qclone":
3105 3105 (clone,
3106 3106 [('', 'pull', None, _('use pull protocol to copy metadata')),
3107 3107 ('U', 'noupdate', None, _('do not update the new working directories')),
3108 3108 ('', 'uncompressed', None,
3109 3109 _('use uncompressed transfer (fast over LAN)')),
3110 3110 ('p', 'patches', '',
3111 3111 _('location of source patch repository'), _('REPO')),
3112 3112 ] + commands.remoteopts,
3113 3113 _('hg qclone [OPTION]... SOURCE [DEST]')),
3114 3114 "qcommit|qci":
3115 3115 (commit,
3116 3116 commands.table["^commit|ci"][1],
3117 3117 _('hg qcommit [OPTION]... [FILE]...')),
3118 3118 "^qdiff":
3119 3119 (diff,
3120 3120 commands.diffopts + commands.diffopts2 + commands.walkopts,
3121 3121 _('hg qdiff [OPTION]... [FILE]...')),
3122 3122 "qdelete|qremove|qrm":
3123 3123 (delete,
3124 3124 [('k', 'keep', None, _('keep patch file')),
3125 3125 ('r', 'rev', [],
3126 3126 _('stop managing a revision (DEPRECATED)'), _('REV'))],
3127 3127 _('hg qdelete [-k] [PATCH]...')),
3128 3128 'qfold':
3129 3129 (fold,
3130 3130 [('e', 'edit', None, _('edit patch header')),
3131 3131 ('k', 'keep', None, _('keep folded patch files')),
3132 3132 ] + commands.commitopts,
3133 3133 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
3134 3134 'qgoto':
3135 3135 (goto,
3136 3136 [('f', 'force', None, _('overwrite any local changes'))],
3137 3137 _('hg qgoto [OPTION]... PATCH')),
3138 3138 'qguard':
3139 3139 (guard,
3140 3140 [('l', 'list', None, _('list all patches and guards')),
3141 3141 ('n', 'none', None, _('drop all guards'))],
3142 3142 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')),
3143 3143 'qheader': (header, [], _('hg qheader [PATCH]')),
3144 3144 "qimport":
3145 3145 (qimport,
3146 3146 [('e', 'existing', None, _('import file in patch directory')),
3147 3147 ('n', 'name', '',
3148 3148 _('name of patch file'), _('NAME')),
3149 3149 ('f', 'force', None, _('overwrite existing files')),
3150 3150 ('r', 'rev', [],
3151 3151 _('place existing revisions under mq control'), _('REV')),
3152 3152 ('g', 'git', None, _('use git extended diff format')),
3153 3153 ('P', 'push', None, _('qpush after importing'))],
3154 3154 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
3155 3155 "^qinit":
3156 3156 (init,
3157 3157 [('c', 'create-repo', None, _('create queue repository'))],
3158 3158 _('hg qinit [-c]')),
3159 3159 "^qnew":
3160 3160 (new,
3161 3161 [('e', 'edit', None, _('edit commit message')),
3162 3162 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
3163 3163 ('g', 'git', None, _('use git extended diff format')),
3164 3164 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
3165 3165 ('u', 'user', '',
3166 3166 _('add "From: <USER>" to patch'), _('USER')),
3167 3167 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
3168 3168 ('d', 'date', '',
3169 3169 _('add "Date: <DATE>" to patch'), _('DATE'))
3170 3170 ] + commands.walkopts + commands.commitopts,
3171 3171 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')),
3172 3172 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
3173 3173 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
3174 3174 "^qpop":
3175 3175 (pop,
3176 3176 [('a', 'all', None, _('pop all patches')),
3177 3177 ('n', 'name', '',
3178 3178 _('queue name to pop (DEPRECATED)'), _('NAME')),
3179 3179 ('f', 'force', None, _('forget any local changes to patched files'))],
3180 3180 _('hg qpop [-a] [-f] [PATCH | INDEX]')),
3181 3181 "^qpush":
3182 3182 (push,
3183 3183 [('f', 'force', None, _('apply on top of local changes')),
3184 3184 ('e', 'exact', None, _('apply the target patch to its recorded parent')),
3185 3185 ('l', 'list', None, _('list patch name in commit text')),
3186 3186 ('a', 'all', None, _('apply all patches')),
3187 3187 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
3188 3188 ('n', 'name', '',
3189 3189 _('merge queue name (DEPRECATED)'), _('NAME')),
3190 3190 ('', 'move', None, _('reorder patch series and apply only the patch'))],
3191 3191 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]')),
3192 3192 "^qrefresh":
3193 3193 (refresh,
3194 3194 [('e', 'edit', None, _('edit commit message')),
3195 3195 ('g', 'git', None, _('use git extended diff format')),
3196 3196 ('s', 'short', None,
3197 3197 _('refresh only files already in the patch and specified files')),
3198 3198 ('U', 'currentuser', None,
3199 3199 _('add/update author field in patch with current user')),
3200 3200 ('u', 'user', '',
3201 3201 _('add/update author field in patch with given user'), _('USER')),
3202 3202 ('D', 'currentdate', None,
3203 3203 _('add/update date field in patch with current date')),
3204 3204 ('d', 'date', '',
3205 3205 _('add/update date field in patch with given date'), _('DATE'))
3206 3206 ] + commands.walkopts + commands.commitopts,
3207 3207 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
3208 3208 'qrename|qmv':
3209 3209 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
3210 3210 "qrestore":
3211 3211 (restore,
3212 3212 [('d', 'delete', None, _('delete save entry')),
3213 3213 ('u', 'update', None, _('update queue working directory'))],
3214 3214 _('hg qrestore [-d] [-u] REV')),
3215 3215 "qsave":
3216 3216 (save,
3217 3217 [('c', 'copy', None, _('copy patch directory')),
3218 3218 ('n', 'name', '',
3219 3219 _('copy directory name'), _('NAME')),
3220 3220 ('e', 'empty', None, _('clear queue status file')),
3221 3221 ('f', 'force', None, _('force copy'))] + commands.commitopts,
3222 3222 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
3223 3223 "qselect":
3224 3224 (select,
3225 3225 [('n', 'none', None, _('disable all guards')),
3226 3226 ('s', 'series', None, _('list all guards in series file')),
3227 3227 ('', 'pop', None, _('pop to before first guarded applied patch')),
3228 3228 ('', 'reapply', None, _('pop, then reapply patches'))],
3229 3229 _('hg qselect [OPTION]... [GUARD]...')),
3230 3230 "qseries":
3231 3231 (series,
3232 3232 [('m', 'missing', None, _('print patches not in series')),
3233 3233 ] + seriesopts,
3234 3234 _('hg qseries [-ms]')),
3235 3235 "strip":
3236 3236 (strip,
3237 3237 [('f', 'force', None, _('force removal of changesets even if the '
3238 3238 'working directory has uncommitted changes')),
3239 3239 ('b', 'backup', None, _('bundle only changesets with local revision'
3240 3240 ' number greater than REV which are not'
3241 3241 ' descendants of REV (DEPRECATED)')),
3242 3242 ('n', 'no-backup', None, _('no backups')),
3243 3243 ('', 'nobackup', None, _('no backups (DEPRECATED)')),
3244 3244 ('k', 'keep', None, _("do not modify working copy during strip"))],
3245 3245 _('hg strip [-k] [-f] [-n] REV...')),
3246 3246 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
3247 3247 "qunapplied":
3248 3248 (unapplied,
3249 3249 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
3250 3250 _('hg qunapplied [-1] [-s] [PATCH]')),
3251 3251 "qfinish":
3252 3252 (finish,
3253 3253 [('a', 'applied', None, _('finish all applied changesets'))],
3254 3254 _('hg qfinish [-a] [REV]...')),
3255 3255 'qqueue':
3256 3256 (qqueue,
3257 3257 [
3258 3258 ('l', 'list', False, _('list all available queues')),
3259 3259 ('c', 'create', False, _('create new queue')),
3260 3260 ('', 'rename', False, _('rename active queue')),
3261 3261 ('', 'delete', False, _('delete reference to queue')),
3262 3262 ('', 'purge', False, _('delete queue, and remove patch dir')),
3263 3263 ],
3264 3264 _('[OPTION] [QUEUE]')),
3265 3265 }
3266 3266
3267 3267 colortable = {'qguard.negative': 'red',
3268 3268 'qguard.positive': 'yellow',
3269 3269 'qguard.unguarded': 'green',
3270 3270 'qseries.applied': 'blue bold underline',
3271 3271 'qseries.guarded': 'black bold',
3272 3272 'qseries.missing': 'red bold',
3273 3273 'qseries.unapplied': 'black bold'}
@@ -1,647 +1,647
1 1 # Patch transplanting extension for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to transplant changesets from another branch
9 9
10 10 This extension allows you to transplant patches from another branch.
11 11
12 12 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 13 map from a changeset hash to its hash in the source repository.
14 14 '''
15 15
16 16 from mercurial.i18n import _
17 17 import os, tempfile
18 18 from mercurial import bundlerepo, cmdutil, hg, merge, match
19 from mercurial import patch, revlog, util, error
19 from mercurial import patch, revlog, scmutil, util, error
20 20 from mercurial import revset, templatekw
21 21
22 22 class transplantentry(object):
23 23 def __init__(self, lnode, rnode):
24 24 self.lnode = lnode
25 25 self.rnode = rnode
26 26
27 27 class transplants(object):
28 28 def __init__(self, path=None, transplantfile=None, opener=None):
29 29 self.path = path
30 30 self.transplantfile = transplantfile
31 31 self.opener = opener
32 32
33 33 if not opener:
34 self.opener = util.opener(self.path)
34 self.opener = scmutil.opener(self.path)
35 35 self.transplants = {}
36 36 self.dirty = False
37 37 self.read()
38 38
39 39 def read(self):
40 40 abspath = os.path.join(self.path, self.transplantfile)
41 41 if self.transplantfile and os.path.exists(abspath):
42 42 for line in self.opener(self.transplantfile).read().splitlines():
43 43 lnode, rnode = map(revlog.bin, line.split(':'))
44 44 list = self.transplants.setdefault(rnode, [])
45 45 list.append(transplantentry(lnode, rnode))
46 46
47 47 def write(self):
48 48 if self.dirty and self.transplantfile:
49 49 if not os.path.isdir(self.path):
50 50 os.mkdir(self.path)
51 51 fp = self.opener(self.transplantfile, 'w')
52 52 for list in self.transplants.itervalues():
53 53 for t in list:
54 54 l, r = map(revlog.hex, (t.lnode, t.rnode))
55 55 fp.write(l + ':' + r + '\n')
56 56 fp.close()
57 57 self.dirty = False
58 58
59 59 def get(self, rnode):
60 60 return self.transplants.get(rnode) or []
61 61
62 62 def set(self, lnode, rnode):
63 63 list = self.transplants.setdefault(rnode, [])
64 64 list.append(transplantentry(lnode, rnode))
65 65 self.dirty = True
66 66
67 67 def remove(self, transplant):
68 68 list = self.transplants.get(transplant.rnode)
69 69 if list:
70 70 del list[list.index(transplant)]
71 71 self.dirty = True
72 72
73 73 class transplanter(object):
74 74 def __init__(self, ui, repo):
75 75 self.ui = ui
76 76 self.path = repo.join('transplant')
77 self.opener = util.opener(self.path)
77 self.opener = scmutil.opener(self.path)
78 78 self.transplants = transplants(self.path, 'transplants',
79 79 opener=self.opener)
80 80
81 81 def applied(self, repo, node, parent):
82 82 '''returns True if a node is already an ancestor of parent
83 83 or has already been transplanted'''
84 84 if hasnode(repo, node):
85 85 if node in repo.changelog.reachable(parent, stop=node):
86 86 return True
87 87 for t in self.transplants.get(node):
88 88 # it might have been stripped
89 89 if not hasnode(repo, t.lnode):
90 90 self.transplants.remove(t)
91 91 return False
92 92 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
93 93 return True
94 94 return False
95 95
96 96 def apply(self, repo, source, revmap, merges, opts={}):
97 97 '''apply the revisions in revmap one by one in revision order'''
98 98 revs = sorted(revmap)
99 99 p1, p2 = repo.dirstate.parents()
100 100 pulls = []
101 101 diffopts = patch.diffopts(self.ui, opts)
102 102 diffopts.git = True
103 103
104 104 lock = wlock = None
105 105 try:
106 106 wlock = repo.wlock()
107 107 lock = repo.lock()
108 108 for rev in revs:
109 109 node = revmap[rev]
110 110 revstr = '%s:%s' % (rev, revlog.short(node))
111 111
112 112 if self.applied(repo, node, p1):
113 113 self.ui.warn(_('skipping already applied revision %s\n') %
114 114 revstr)
115 115 continue
116 116
117 117 parents = source.changelog.parents(node)
118 118 if not opts.get('filter'):
119 119 # If the changeset parent is the same as the
120 120 # wdir's parent, just pull it.
121 121 if parents[0] == p1:
122 122 pulls.append(node)
123 123 p1 = node
124 124 continue
125 125 if pulls:
126 126 if source != repo:
127 127 repo.pull(source, heads=pulls)
128 128 merge.update(repo, pulls[-1], False, False, None)
129 129 p1, p2 = repo.dirstate.parents()
130 130 pulls = []
131 131
132 132 domerge = False
133 133 if node in merges:
134 134 # pulling all the merge revs at once would mean we
135 135 # couldn't transplant after the latest even if
136 136 # transplants before them fail.
137 137 domerge = True
138 138 if not hasnode(repo, node):
139 139 repo.pull(source, heads=[node])
140 140
141 141 if parents[1] != revlog.nullid:
142 142 self.ui.note(_('skipping merge changeset %s:%s\n')
143 143 % (rev, revlog.short(node)))
144 144 patchfile = None
145 145 else:
146 146 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
147 147 fp = os.fdopen(fd, 'w')
148 148 gen = patch.diff(source, parents[0], node, opts=diffopts)
149 149 for chunk in gen:
150 150 fp.write(chunk)
151 151 fp.close()
152 152
153 153 del revmap[rev]
154 154 if patchfile or domerge:
155 155 try:
156 156 n = self.applyone(repo, node,
157 157 source.changelog.read(node),
158 158 patchfile, merge=domerge,
159 159 log=opts.get('log'),
160 160 filter=opts.get('filter'))
161 161 if n and domerge:
162 162 self.ui.status(_('%s merged at %s\n') % (revstr,
163 163 revlog.short(n)))
164 164 elif n:
165 165 self.ui.status(_('%s transplanted to %s\n')
166 166 % (revlog.short(node),
167 167 revlog.short(n)))
168 168 finally:
169 169 if patchfile:
170 170 os.unlink(patchfile)
171 171 if pulls:
172 172 repo.pull(source, heads=pulls)
173 173 merge.update(repo, pulls[-1], False, False, None)
174 174 finally:
175 175 self.saveseries(revmap, merges)
176 176 self.transplants.write()
177 177 lock.release()
178 178 wlock.release()
179 179
180 180 def filter(self, filter, node, changelog, patchfile):
181 181 '''arbitrarily rewrite changeset before applying it'''
182 182
183 183 self.ui.status(_('filtering %s\n') % patchfile)
184 184 user, date, msg = (changelog[1], changelog[2], changelog[4])
185 185 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
186 186 fp = os.fdopen(fd, 'w')
187 187 fp.write("# HG changeset patch\n")
188 188 fp.write("# User %s\n" % user)
189 189 fp.write("# Date %d %d\n" % date)
190 190 fp.write(msg + '\n')
191 191 fp.close()
192 192
193 193 try:
194 194 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
195 195 util.shellquote(patchfile)),
196 196 environ={'HGUSER': changelog[1],
197 197 'HGREVISION': revlog.hex(node),
198 198 },
199 199 onerr=util.Abort, errprefix=_('filter failed'))
200 200 user, date, msg = self.parselog(file(headerfile))[1:4]
201 201 finally:
202 202 os.unlink(headerfile)
203 203
204 204 return (user, date, msg)
205 205
206 206 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
207 207 filter=None):
208 208 '''apply the patch in patchfile to the repository as a transplant'''
209 209 (manifest, user, (time, timezone), files, message) = cl[:5]
210 210 date = "%d %d" % (time, timezone)
211 211 extra = {'transplant_source': node}
212 212 if filter:
213 213 (user, date, message) = self.filter(filter, node, cl, patchfile)
214 214
215 215 if log:
216 216 # we don't translate messages inserted into commits
217 217 message += '\n(transplanted from %s)' % revlog.hex(node)
218 218
219 219 self.ui.status(_('applying %s\n') % revlog.short(node))
220 220 self.ui.note('%s %s\n%s\n' % (user, date, message))
221 221
222 222 if not patchfile and not merge:
223 223 raise util.Abort(_('can only omit patchfile if merging'))
224 224 if patchfile:
225 225 try:
226 226 files = {}
227 227 try:
228 228 patch.patch(patchfile, self.ui, cwd=repo.root,
229 229 files=files, eolmode=None)
230 230 if not files:
231 231 self.ui.warn(_('%s: empty changeset')
232 232 % revlog.hex(node))
233 233 return None
234 234 finally:
235 235 files = cmdutil.updatedir(self.ui, repo, files)
236 236 except Exception, inst:
237 237 seriespath = os.path.join(self.path, 'series')
238 238 if os.path.exists(seriespath):
239 239 os.unlink(seriespath)
240 240 p1 = repo.dirstate.p1()
241 241 p2 = node
242 242 self.log(user, date, message, p1, p2, merge=merge)
243 243 self.ui.write(str(inst) + '\n')
244 244 raise util.Abort(_('fix up the merge and run '
245 245 'hg transplant --continue'))
246 246 else:
247 247 files = None
248 248 if merge:
249 249 p1, p2 = repo.dirstate.parents()
250 250 repo.dirstate.setparents(p1, node)
251 251 m = match.always(repo.root, '')
252 252 else:
253 253 m = match.exact(repo.root, '', files)
254 254
255 255 n = repo.commit(message, user, date, extra=extra, match=m)
256 256 if not n:
257 257 # Crash here to prevent an unclear crash later, in
258 258 # transplants.write(). This can happen if patch.patch()
259 259 # does nothing but claims success or if repo.status() fails
260 260 # to report changes done by patch.patch(). These both
261 261 # appear to be bugs in other parts of Mercurial, but dying
262 262 # here, as soon as we can detect the problem, is preferable
263 263 # to silently dropping changesets on the floor.
264 264 raise RuntimeError('nothing committed after transplant')
265 265 if not merge:
266 266 self.transplants.set(n, node)
267 267
268 268 return n
269 269
270 270 def resume(self, repo, source, opts=None):
271 271 '''recover last transaction and apply remaining changesets'''
272 272 if os.path.exists(os.path.join(self.path, 'journal')):
273 273 n, node = self.recover(repo)
274 274 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
275 275 revlog.short(n)))
276 276 seriespath = os.path.join(self.path, 'series')
277 277 if not os.path.exists(seriespath):
278 278 self.transplants.write()
279 279 return
280 280 nodes, merges = self.readseries()
281 281 revmap = {}
282 282 for n in nodes:
283 283 revmap[source.changelog.rev(n)] = n
284 284 os.unlink(seriespath)
285 285
286 286 self.apply(repo, source, revmap, merges, opts)
287 287
288 288 def recover(self, repo):
289 289 '''commit working directory using journal metadata'''
290 290 node, user, date, message, parents = self.readlog()
291 291 merge = len(parents) == 2
292 292
293 293 if not user or not date or not message or not parents[0]:
294 294 raise util.Abort(_('transplant log file is corrupt'))
295 295
296 296 extra = {'transplant_source': node}
297 297 wlock = repo.wlock()
298 298 try:
299 299 p1, p2 = repo.dirstate.parents()
300 300 if p1 != parents[0]:
301 301 raise util.Abort(
302 302 _('working dir not at transplant parent %s') %
303 303 revlog.hex(parents[0]))
304 304 if merge:
305 305 repo.dirstate.setparents(p1, parents[1])
306 306 n = repo.commit(message, user, date, extra=extra)
307 307 if not n:
308 308 raise util.Abort(_('commit failed'))
309 309 if not merge:
310 310 self.transplants.set(n, node)
311 311 self.unlog()
312 312
313 313 return n, node
314 314 finally:
315 315 wlock.release()
316 316
317 317 def readseries(self):
318 318 nodes = []
319 319 merges = []
320 320 cur = nodes
321 321 for line in self.opener('series').read().splitlines():
322 322 if line.startswith('# Merges'):
323 323 cur = merges
324 324 continue
325 325 cur.append(revlog.bin(line))
326 326
327 327 return (nodes, merges)
328 328
329 329 def saveseries(self, revmap, merges):
330 330 if not revmap:
331 331 return
332 332
333 333 if not os.path.isdir(self.path):
334 334 os.mkdir(self.path)
335 335 series = self.opener('series', 'w')
336 336 for rev in sorted(revmap):
337 337 series.write(revlog.hex(revmap[rev]) + '\n')
338 338 if merges:
339 339 series.write('# Merges\n')
340 340 for m in merges:
341 341 series.write(revlog.hex(m) + '\n')
342 342 series.close()
343 343
344 344 def parselog(self, fp):
345 345 parents = []
346 346 message = []
347 347 node = revlog.nullid
348 348 inmsg = False
349 349 user = None
350 350 date = None
351 351 for line in fp.read().splitlines():
352 352 if inmsg:
353 353 message.append(line)
354 354 elif line.startswith('# User '):
355 355 user = line[7:]
356 356 elif line.startswith('# Date '):
357 357 date = line[7:]
358 358 elif line.startswith('# Node ID '):
359 359 node = revlog.bin(line[10:])
360 360 elif line.startswith('# Parent '):
361 361 parents.append(revlog.bin(line[9:]))
362 362 elif not line.startswith('# '):
363 363 inmsg = True
364 364 message.append(line)
365 365 if None in (user, date):
366 366 raise util.Abort(_("filter corrupted changeset (no user or date)"))
367 367 return (node, user, date, '\n'.join(message), parents)
368 368
369 369 def log(self, user, date, message, p1, p2, merge=False):
370 370 '''journal changelog metadata for later recover'''
371 371
372 372 if not os.path.isdir(self.path):
373 373 os.mkdir(self.path)
374 374 fp = self.opener('journal', 'w')
375 375 fp.write('# User %s\n' % user)
376 376 fp.write('# Date %s\n' % date)
377 377 fp.write('# Node ID %s\n' % revlog.hex(p2))
378 378 fp.write('# Parent ' + revlog.hex(p1) + '\n')
379 379 if merge:
380 380 fp.write('# Parent ' + revlog.hex(p2) + '\n')
381 381 fp.write(message.rstrip() + '\n')
382 382 fp.close()
383 383
384 384 def readlog(self):
385 385 return self.parselog(self.opener('journal'))
386 386
387 387 def unlog(self):
388 388 '''remove changelog journal'''
389 389 absdst = os.path.join(self.path, 'journal')
390 390 if os.path.exists(absdst):
391 391 os.unlink(absdst)
392 392
393 393 def transplantfilter(self, repo, source, root):
394 394 def matchfn(node):
395 395 if self.applied(repo, node, root):
396 396 return False
397 397 if source.changelog.parents(node)[1] != revlog.nullid:
398 398 return False
399 399 extra = source.changelog.read(node)[5]
400 400 cnode = extra.get('transplant_source')
401 401 if cnode and self.applied(repo, cnode, root):
402 402 return False
403 403 return True
404 404
405 405 return matchfn
406 406
407 407 def hasnode(repo, node):
408 408 try:
409 409 return repo.changelog.rev(node) is not None
410 410 except error.RevlogError:
411 411 return False
412 412
413 413 def browserevs(ui, repo, nodes, opts):
414 414 '''interactively transplant changesets'''
415 415 def browsehelp(ui):
416 416 ui.write(_('y: transplant this changeset\n'
417 417 'n: skip this changeset\n'
418 418 'm: merge at this changeset\n'
419 419 'p: show patch\n'
420 420 'c: commit selected changesets\n'
421 421 'q: cancel transplant\n'
422 422 '?: show this help\n'))
423 423
424 424 displayer = cmdutil.show_changeset(ui, repo, opts)
425 425 transplants = []
426 426 merges = []
427 427 for node in nodes:
428 428 displayer.show(repo[node])
429 429 action = None
430 430 while not action:
431 431 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
432 432 if action == '?':
433 433 browsehelp(ui)
434 434 action = None
435 435 elif action == 'p':
436 436 parent = repo.changelog.parents(node)[0]
437 437 for chunk in patch.diff(repo, parent, node):
438 438 ui.write(chunk)
439 439 action = None
440 440 elif action not in ('y', 'n', 'm', 'c', 'q'):
441 441 ui.write(_('no such option\n'))
442 442 action = None
443 443 if action == 'y':
444 444 transplants.append(node)
445 445 elif action == 'm':
446 446 merges.append(node)
447 447 elif action == 'c':
448 448 break
449 449 elif action == 'q':
450 450 transplants = ()
451 451 merges = ()
452 452 break
453 453 displayer.close()
454 454 return (transplants, merges)
455 455
456 456 def transplant(ui, repo, *revs, **opts):
457 457 '''transplant changesets from another branch
458 458
459 459 Selected changesets will be applied on top of the current working
460 460 directory with the log of the original changeset. The changesets
461 461 are copied and will thus appear twice in the history. Use the
462 462 rebase extension instead if you want to move a whole branch of
463 463 unpublished changesets.
464 464
465 465 If --log is specified, log messages will have a comment appended
466 466 of the form::
467 467
468 468 (transplanted from CHANGESETHASH)
469 469
470 470 You can rewrite the changelog message with the --filter option.
471 471 Its argument will be invoked with the current changelog message as
472 472 $1 and the patch as $2.
473 473
474 474 If --source/-s is specified, selects changesets from the named
475 475 repository. If --branch/-b is specified, selects changesets from
476 476 the branch holding the named revision, up to that revision. If
477 477 --all/-a is specified, all changesets on the branch will be
478 478 transplanted, otherwise you will be prompted to select the
479 479 changesets you want.
480 480
481 481 :hg:`transplant --branch REVISION --all` will transplant the
482 482 selected branch (up to the named revision) onto your current
483 483 working directory.
484 484
485 485 You can optionally mark selected transplanted changesets as merge
486 486 changesets. You will not be prompted to transplant any ancestors
487 487 of a merged transplant, and you can merge descendants of them
488 488 normally instead of transplanting them.
489 489
490 490 If no merges or revisions are provided, :hg:`transplant` will
491 491 start an interactive changeset browser.
492 492
493 493 If a changeset application fails, you can fix the merge by hand
494 494 and then resume where you left off by calling :hg:`transplant
495 495 --continue/-c`.
496 496 '''
497 497 def incwalk(repo, incoming, branches, match=util.always):
498 498 if not branches:
499 499 branches = None
500 500 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
501 501 if match(node):
502 502 yield node
503 503
504 504 def transplantwalk(repo, root, branches, match=util.always):
505 505 if not branches:
506 506 branches = repo.heads()
507 507 ancestors = []
508 508 for branch in branches:
509 509 ancestors.append(repo.changelog.ancestor(root, branch))
510 510 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
511 511 if match(node):
512 512 yield node
513 513
514 514 def checkopts(opts, revs):
515 515 if opts.get('continue'):
516 516 if opts.get('branch') or opts.get('all') or opts.get('merge'):
517 517 raise util.Abort(_('--continue is incompatible with '
518 518 'branch, all or merge'))
519 519 return
520 520 if not (opts.get('source') or revs or
521 521 opts.get('merge') or opts.get('branch')):
522 522 raise util.Abort(_('no source URL, branch tag or revision '
523 523 'list provided'))
524 524 if opts.get('all'):
525 525 if not opts.get('branch'):
526 526 raise util.Abort(_('--all requires a branch revision'))
527 527 if revs:
528 528 raise util.Abort(_('--all is incompatible with a '
529 529 'revision list'))
530 530
531 531 checkopts(opts, revs)
532 532
533 533 if not opts.get('log'):
534 534 opts['log'] = ui.config('transplant', 'log')
535 535 if not opts.get('filter'):
536 536 opts['filter'] = ui.config('transplant', 'filter')
537 537
538 538 tp = transplanter(ui, repo)
539 539
540 540 p1, p2 = repo.dirstate.parents()
541 541 if len(repo) > 0 and p1 == revlog.nullid:
542 542 raise util.Abort(_('no revision checked out'))
543 543 if not opts.get('continue'):
544 544 if p2 != revlog.nullid:
545 545 raise util.Abort(_('outstanding uncommitted merges'))
546 546 m, a, r, d = repo.status()[:4]
547 547 if m or a or r or d:
548 548 raise util.Abort(_('outstanding local changes'))
549 549
550 550 bundle = None
551 551 source = opts.get('source')
552 552 if source:
553 553 sourcerepo = ui.expandpath(source)
554 554 source = hg.repository(ui, sourcerepo)
555 555 source, common, incoming, bundle = bundlerepo.getremotechanges(ui, repo,
556 556 source, force=True)
557 557 else:
558 558 source = repo
559 559
560 560 try:
561 561 if opts.get('continue'):
562 562 tp.resume(repo, source, opts)
563 563 return
564 564
565 565 tf = tp.transplantfilter(repo, source, p1)
566 566 if opts.get('prune'):
567 567 prune = [source.lookup(r)
568 568 for r in cmdutil.revrange(source, opts.get('prune'))]
569 569 matchfn = lambda x: tf(x) and x not in prune
570 570 else:
571 571 matchfn = tf
572 572 branches = map(source.lookup, opts.get('branch', ()))
573 573 merges = map(source.lookup, opts.get('merge', ()))
574 574 revmap = {}
575 575 if revs:
576 576 for r in cmdutil.revrange(source, revs):
577 577 revmap[int(r)] = source.lookup(r)
578 578 elif opts.get('all') or not merges:
579 579 if source != repo:
580 580 alltransplants = incwalk(source, incoming, branches,
581 581 match=matchfn)
582 582 else:
583 583 alltransplants = transplantwalk(source, p1, branches,
584 584 match=matchfn)
585 585 if opts.get('all'):
586 586 revs = alltransplants
587 587 else:
588 588 revs, newmerges = browserevs(ui, source, alltransplants, opts)
589 589 merges.extend(newmerges)
590 590 for r in revs:
591 591 revmap[source.changelog.rev(r)] = r
592 592 for r in merges:
593 593 revmap[source.changelog.rev(r)] = r
594 594
595 595 tp.apply(repo, source, revmap, merges, opts)
596 596 finally:
597 597 if bundle:
598 598 source.close()
599 599 os.unlink(bundle)
600 600
601 601 def revsettransplanted(repo, subset, x):
602 602 """``transplanted(set)``
603 603 Transplanted changesets in set.
604 604 """
605 605 if x:
606 606 s = revset.getset(repo, subset, x)
607 607 else:
608 608 s = subset
609 609 cs = set()
610 610 for r in xrange(0, len(repo)):
611 611 if repo[r].extra().get('transplant_source'):
612 612 cs.add(r)
613 613 return [r for r in s if r in cs]
614 614
615 615 def kwtransplanted(repo, ctx, **args):
616 616 """:transplanted: String. The node identifier of the transplanted
617 617 changeset if any."""
618 618 n = ctx.extra().get('transplant_source')
619 619 return n and revlog.hex(n) or ''
620 620
621 621 def extsetup(ui):
622 622 revset.symbols['transplanted'] = revsettransplanted
623 623 templatekw.keywords['transplanted'] = kwtransplanted
624 624
625 625 cmdtable = {
626 626 "transplant":
627 627 (transplant,
628 628 [('s', 'source', '',
629 629 _('pull patches from REPO'), _('REPO')),
630 630 ('b', 'branch', [],
631 631 _('pull patches from branch BRANCH'), _('BRANCH')),
632 632 ('a', 'all', None, _('pull all changesets up to BRANCH')),
633 633 ('p', 'prune', [],
634 634 _('skip over REV'), _('REV')),
635 635 ('m', 'merge', [],
636 636 _('merge at REV'), _('REV')),
637 637 ('', 'log', None, _('append transplant info to log message')),
638 638 ('c', 'continue', None, _('continue last transplant session '
639 639 'after repair')),
640 640 ('', 'filter', '',
641 641 _('filter changesets through command'), _('CMD'))],
642 642 _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
643 643 '[-m REV] [REV]...'))
644 644 }
645 645
646 646 # tell hggettext to extract docstrings from these functions:
647 647 i18nfunctions = [revsettransplanted, kwtransplanted]
@@ -1,284 +1,284
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from node import hex
10 10 import cmdutil
11 import util, encoding
11 import scmutil, util, encoding
12 12 import cStringIO, os, tarfile, time, zipfile
13 13 import zlib, gzip
14 14
15 15 def tidyprefix(dest, kind, prefix):
16 16 '''choose prefix to use for names in archive. make sure prefix is
17 17 safe for consumers.'''
18 18
19 19 if prefix:
20 20 prefix = util.normpath(prefix)
21 21 else:
22 22 if not isinstance(dest, str):
23 23 raise ValueError('dest must be string if no prefix')
24 24 prefix = os.path.basename(dest)
25 25 lower = prefix.lower()
26 26 for sfx in exts.get(kind, []):
27 27 if lower.endswith(sfx):
28 28 prefix = prefix[:-len(sfx)]
29 29 break
30 30 lpfx = os.path.normpath(util.localpath(prefix))
31 31 prefix = util.pconvert(lpfx)
32 32 if not prefix.endswith('/'):
33 33 prefix += '/'
34 34 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
35 35 raise util.Abort(_('archive prefix contains illegal components'))
36 36 return prefix
37 37
38 38 exts = {
39 39 'tar': ['.tar'],
40 40 'tbz2': ['.tbz2', '.tar.bz2'],
41 41 'tgz': ['.tgz', '.tar.gz'],
42 42 'zip': ['.zip'],
43 43 }
44 44
45 45 def guesskind(dest):
46 46 for kind, extensions in exts.iteritems():
47 47 if util.any(dest.endswith(ext) for ext in extensions):
48 48 return kind
49 49 return None
50 50
51 51
52 52 class tarit(object):
53 53 '''write archive to tar file or stream. can write uncompressed,
54 54 or compress with gzip or bzip2.'''
55 55
56 56 class GzipFileWithTime(gzip.GzipFile):
57 57
58 58 def __init__(self, *args, **kw):
59 59 timestamp = None
60 60 if 'timestamp' in kw:
61 61 timestamp = kw.pop('timestamp')
62 62 if timestamp is None:
63 63 self.timestamp = time.time()
64 64 else:
65 65 self.timestamp = timestamp
66 66 gzip.GzipFile.__init__(self, *args, **kw)
67 67
68 68 def _write_gzip_header(self):
69 69 self.fileobj.write('\037\213') # magic header
70 70 self.fileobj.write('\010') # compression method
71 71 # Python 2.6 deprecates self.filename
72 72 fname = getattr(self, 'name', None) or self.filename
73 73 if fname and fname.endswith('.gz'):
74 74 fname = fname[:-3]
75 75 flags = 0
76 76 if fname:
77 77 flags = gzip.FNAME
78 78 self.fileobj.write(chr(flags))
79 79 gzip.write32u(self.fileobj, long(self.timestamp))
80 80 self.fileobj.write('\002')
81 81 self.fileobj.write('\377')
82 82 if fname:
83 83 self.fileobj.write(fname + '\000')
84 84
85 85 def __init__(self, dest, mtime, kind=''):
86 86 self.mtime = mtime
87 87 self.fileobj = None
88 88
89 89 def taropen(name, mode, fileobj=None):
90 90 if kind == 'gz':
91 91 mode = mode[0]
92 92 if not fileobj:
93 93 fileobj = open(name, mode + 'b')
94 94 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
95 95 zlib.Z_BEST_COMPRESSION,
96 96 fileobj, timestamp=mtime)
97 97 self.fileobj = gzfileobj
98 98 return tarfile.TarFile.taropen(name, mode, gzfileobj)
99 99 else:
100 100 self.fileobj = fileobj
101 101 return tarfile.open(name, mode + kind, fileobj)
102 102
103 103 if isinstance(dest, str):
104 104 self.z = taropen(dest, mode='w:')
105 105 else:
106 106 # Python 2.5-2.5.1 have a regression that requires a name arg
107 107 self.z = taropen(name='', mode='w|', fileobj=dest)
108 108
109 109 def addfile(self, name, mode, islink, data):
110 110 i = tarfile.TarInfo(name)
111 111 i.mtime = self.mtime
112 112 i.size = len(data)
113 113 if islink:
114 114 i.type = tarfile.SYMTYPE
115 115 i.mode = 0777
116 116 i.linkname = data
117 117 data = None
118 118 i.size = 0
119 119 else:
120 120 i.mode = mode
121 121 data = cStringIO.StringIO(data)
122 122 self.z.addfile(i, data)
123 123
124 124 def done(self):
125 125 self.z.close()
126 126 if self.fileobj:
127 127 self.fileobj.close()
128 128
129 129 class tellable(object):
130 130 '''provide tell method for zipfile.ZipFile when writing to http
131 131 response file object.'''
132 132
133 133 def __init__(self, fp):
134 134 self.fp = fp
135 135 self.offset = 0
136 136
137 137 def __getattr__(self, key):
138 138 return getattr(self.fp, key)
139 139
140 140 def write(self, s):
141 141 self.fp.write(s)
142 142 self.offset += len(s)
143 143
144 144 def tell(self):
145 145 return self.offset
146 146
147 147 class zipit(object):
148 148 '''write archive to zip file or stream. can write uncompressed,
149 149 or compressed with deflate.'''
150 150
151 151 def __init__(self, dest, mtime, compress=True):
152 152 if not isinstance(dest, str):
153 153 try:
154 154 dest.tell()
155 155 except (AttributeError, IOError):
156 156 dest = tellable(dest)
157 157 self.z = zipfile.ZipFile(dest, 'w',
158 158 compress and zipfile.ZIP_DEFLATED or
159 159 zipfile.ZIP_STORED)
160 160
161 161 # Python's zipfile module emits deprecation warnings if we try
162 162 # to store files with a date before 1980.
163 163 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
164 164 if mtime < epoch:
165 165 mtime = epoch
166 166
167 167 self.date_time = time.gmtime(mtime)[:6]
168 168
169 169 def addfile(self, name, mode, islink, data):
170 170 i = zipfile.ZipInfo(name, self.date_time)
171 171 i.compress_type = self.z.compression
172 172 # unzip will not honor unix file modes unless file creator is
173 173 # set to unix (id 3).
174 174 i.create_system = 3
175 175 ftype = 0x8000 # UNX_IFREG in unzip source code
176 176 if islink:
177 177 mode = 0777
178 178 ftype = 0xa000 # UNX_IFLNK in unzip source code
179 179 i.external_attr = (mode | ftype) << 16L
180 180 self.z.writestr(i, data)
181 181
182 182 def done(self):
183 183 self.z.close()
184 184
185 185 class fileit(object):
186 186 '''write archive as files in directory.'''
187 187
188 188 def __init__(self, name, mtime):
189 189 self.basedir = name
190 self.opener = util.opener(self.basedir)
190 self.opener = scmutil.opener(self.basedir)
191 191
192 192 def addfile(self, name, mode, islink, data):
193 193 if islink:
194 194 self.opener.symlink(data, name)
195 195 return
196 196 f = self.opener(name, "w", atomictemp=True)
197 197 f.write(data)
198 198 f.rename()
199 199 destfile = os.path.join(self.basedir, name)
200 200 os.chmod(destfile, mode)
201 201
202 202 def done(self):
203 203 pass
204 204
205 205 archivers = {
206 206 'files': fileit,
207 207 'tar': tarit,
208 208 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
209 209 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
210 210 'uzip': lambda name, mtime: zipit(name, mtime, False),
211 211 'zip': zipit,
212 212 }
213 213
214 214 def archive(repo, dest, node, kind, decode=True, matchfn=None,
215 215 prefix=None, mtime=None, subrepos=False):
216 216 '''create archive of repo as it was at node.
217 217
218 218 dest can be name of directory, name of archive file, or file
219 219 object to write archive to.
220 220
221 221 kind is type of archive to create.
222 222
223 223 decode tells whether to put files through decode filters from
224 224 hgrc.
225 225
226 226 matchfn is function to filter names of files to write to archive.
227 227
228 228 prefix is name of path to put before every archive member.'''
229 229
230 230 if kind == 'files':
231 231 if prefix:
232 232 raise util.Abort(_('cannot give prefix when archiving to files'))
233 233 else:
234 234 prefix = tidyprefix(dest, kind, prefix)
235 235
236 236 def write(name, mode, islink, getdata):
237 237 if matchfn and not matchfn(name):
238 238 return
239 239 data = getdata()
240 240 if decode:
241 241 data = repo.wwritedata(name, data)
242 242 archiver.addfile(prefix + name, mode, islink, data)
243 243
244 244 if kind not in archivers:
245 245 raise util.Abort(_("unknown archive type '%s'") % kind)
246 246
247 247 ctx = repo[node]
248 248 archiver = archivers[kind](dest, mtime or ctx.date()[0])
249 249
250 250 if repo.ui.configbool("ui", "archivemeta", True):
251 251 def metadata():
252 252 base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
253 253 repo[0].hex(), hex(node), encoding.fromlocal(ctx.branch()))
254 254
255 255 tags = ''.join('tag: %s\n' % t for t in ctx.tags()
256 256 if repo.tagtype(t) == 'global')
257 257 if not tags:
258 258 repo.ui.pushbuffer()
259 259 opts = {'template': '{latesttag}\n{latesttagdistance}',
260 260 'style': '', 'patch': None, 'git': None}
261 261 cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
262 262 ltags, dist = repo.ui.popbuffer().split('\n')
263 263 tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
264 264 tags += 'latesttagdistance: %s\n' % dist
265 265
266 266 return base + tags
267 267
268 268 write('.hg_archival.txt', 0644, False, metadata)
269 269
270 270 total = len(ctx.manifest())
271 271 repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
272 272 for i, f in enumerate(ctx):
273 273 ff = ctx.flags(f)
274 274 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
275 275 repo.ui.progress(_('archiving'), i + 1, item=f,
276 276 unit=_('files'), total=total)
277 277 repo.ui.progress(_('archiving'), None)
278 278
279 279 if subrepos:
280 280 for subpath in ctx.substate:
281 281 sub = ctx.sub(subpath)
282 282 sub.archive(repo.ui, archiver, prefix)
283 283
284 284 archiver.done()
@@ -1,4899 +1,4900
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, sys, difflib, time, tempfile
12 import hg, util, revlog, extensions, copies, error, bookmarks
12 import hg, scmutil, util, revlog, extensions, copies, error, bookmarks
13 13 import patch, help, mdiff, url, encoding, templatekw, discovery
14 14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
15 15 import merge as mergemod
16 16 import minirst, revset, templatefilters
17 17 import dagparser
18 18
19 19 # Commands start here, listed alphabetically
20 20
21 21 def add(ui, repo, *pats, **opts):
22 22 """add the specified files on the next commit
23 23
24 24 Schedule files to be version controlled and added to the
25 25 repository.
26 26
27 27 The files will be added to the repository at the next commit. To
28 28 undo an add before that, see :hg:`forget`.
29 29
30 30 If no names are given, add all files to the repository.
31 31
32 32 .. container:: verbose
33 33
34 34 An example showing how new (unknown) files are added
35 35 automatically by :hg:`add`::
36 36
37 37 $ ls
38 38 foo.c
39 39 $ hg status
40 40 ? foo.c
41 41 $ hg add
42 42 adding foo.c
43 43 $ hg status
44 44 A foo.c
45 45
46 46 Returns 0 if all files are successfully added.
47 47 """
48 48
49 49 m = cmdutil.match(repo, pats, opts)
50 50 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
51 51 opts.get('subrepos'), prefix="")
52 52 return rejected and 1 or 0
53 53
54 54 def addremove(ui, repo, *pats, **opts):
55 55 """add all new files, delete all missing files
56 56
57 57 Add all new files and remove all missing files from the
58 58 repository.
59 59
60 60 New files are ignored if they match any of the patterns in
61 61 ``.hgignore``. As with add, these changes take effect at the next
62 62 commit.
63 63
64 64 Use the -s/--similarity option to detect renamed files. With a
65 65 parameter greater than 0, this compares every removed file with
66 66 every added file and records those similar enough as renames. This
67 67 option takes a percentage between 0 (disabled) and 100 (files must
68 68 be identical) as its parameter. Detecting renamed files this way
69 69 can be expensive. After using this option, :hg:`status -C` can be
70 70 used to check which files were identified as moved or renamed.
71 71
72 72 Returns 0 if all files are successfully added.
73 73 """
74 74 try:
75 75 sim = float(opts.get('similarity') or 100)
76 76 except ValueError:
77 77 raise util.Abort(_('similarity must be a number'))
78 78 if sim < 0 or sim > 100:
79 79 raise util.Abort(_('similarity must be between 0 and 100'))
80 80 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
81 81
82 82 def annotate(ui, repo, *pats, **opts):
83 83 """show changeset information by line for each file
84 84
85 85 List changes in files, showing the revision id responsible for
86 86 each line
87 87
88 88 This command is useful for discovering when a change was made and
89 89 by whom.
90 90
91 91 Without the -a/--text option, annotate will avoid processing files
92 92 it detects as binary. With -a, annotate will annotate the file
93 93 anyway, although the results will probably be neither useful
94 94 nor desirable.
95 95
96 96 Returns 0 on success.
97 97 """
98 98 if opts.get('follow'):
99 99 # --follow is deprecated and now just an alias for -f/--file
100 100 # to mimic the behavior of Mercurial before version 1.5
101 101 opts['file'] = 1
102 102
103 103 datefunc = ui.quiet and util.shortdate or util.datestr
104 104 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
105 105
106 106 if not pats:
107 107 raise util.Abort(_('at least one filename or pattern is required'))
108 108
109 109 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
110 110 ('number', lambda x: str(x[0].rev())),
111 111 ('changeset', lambda x: short(x[0].node())),
112 112 ('date', getdate),
113 113 ('file', lambda x: x[0].path()),
114 114 ]
115 115
116 116 if (not opts.get('user') and not opts.get('changeset')
117 117 and not opts.get('date') and not opts.get('file')):
118 118 opts['number'] = 1
119 119
120 120 linenumber = opts.get('line_number') is not None
121 121 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
122 122 raise util.Abort(_('at least one of -n/-c is required for -l'))
123 123
124 124 funcmap = [func for op, func in opmap if opts.get(op)]
125 125 if linenumber:
126 126 lastfunc = funcmap[-1]
127 127 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
128 128
129 129 def bad(x, y):
130 130 raise util.Abort("%s: %s" % (x, y))
131 131
132 132 ctx = cmdutil.revsingle(repo, opts.get('rev'))
133 133 m = cmdutil.match(repo, pats, opts)
134 134 m.bad = bad
135 135 follow = not opts.get('no_follow')
136 136 for abs in ctx.walk(m):
137 137 fctx = ctx[abs]
138 138 if not opts.get('text') and util.binary(fctx.data()):
139 139 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
140 140 continue
141 141
142 142 lines = fctx.annotate(follow=follow, linenumber=linenumber)
143 143 pieces = []
144 144
145 145 for f in funcmap:
146 146 l = [f(n) for n, dummy in lines]
147 147 if l:
148 148 sized = [(x, encoding.colwidth(x)) for x in l]
149 149 ml = max([w for x, w in sized])
150 150 pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized])
151 151
152 152 if pieces:
153 153 for p, l in zip(zip(*pieces), lines):
154 154 ui.write("%s: %s" % (" ".join(p), l[1]))
155 155
156 156 def archive(ui, repo, dest, **opts):
157 157 '''create an unversioned archive of a repository revision
158 158
159 159 By default, the revision used is the parent of the working
160 160 directory; use -r/--rev to specify a different revision.
161 161
162 162 The archive type is automatically detected based on file
163 163 extension (or override using -t/--type).
164 164
165 165 Valid types are:
166 166
167 167 :``files``: a directory full of files (default)
168 168 :``tar``: tar archive, uncompressed
169 169 :``tbz2``: tar archive, compressed using bzip2
170 170 :``tgz``: tar archive, compressed using gzip
171 171 :``uzip``: zip archive, uncompressed
172 172 :``zip``: zip archive, compressed using deflate
173 173
174 174 The exact name of the destination archive or directory is given
175 175 using a format string; see :hg:`help export` for details.
176 176
177 177 Each member added to an archive file has a directory prefix
178 178 prepended. Use -p/--prefix to specify a format string for the
179 179 prefix. The default is the basename of the archive, with suffixes
180 180 removed.
181 181
182 182 Returns 0 on success.
183 183 '''
184 184
185 185 ctx = cmdutil.revsingle(repo, opts.get('rev'))
186 186 if not ctx:
187 187 raise util.Abort(_('no working directory: please specify a revision'))
188 188 node = ctx.node()
189 189 dest = cmdutil.make_filename(repo, dest, node)
190 190 if os.path.realpath(dest) == repo.root:
191 191 raise util.Abort(_('repository root cannot be destination'))
192 192
193 193 kind = opts.get('type') or archival.guesskind(dest) or 'files'
194 194 prefix = opts.get('prefix')
195 195
196 196 if dest == '-':
197 197 if kind == 'files':
198 198 raise util.Abort(_('cannot archive plain files to stdout'))
199 199 dest = sys.stdout
200 200 if not prefix:
201 201 prefix = os.path.basename(repo.root) + '-%h'
202 202
203 203 prefix = cmdutil.make_filename(repo, prefix, node)
204 204 matchfn = cmdutil.match(repo, [], opts)
205 205 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
206 206 matchfn, prefix, subrepos=opts.get('subrepos'))
207 207
208 208 def backout(ui, repo, node=None, rev=None, **opts):
209 209 '''reverse effect of earlier changeset
210 210
211 211 Prepare a new changeset with the effect of REV undone in the
212 212 current working directory.
213 213
214 214 If REV is the parent of the working directory, then this new changeset
215 215 is committed automatically. Otherwise, hg needs to merge the
216 216 changes and the merged result is left uncommitted.
217 217
218 218 By default, the pending changeset will have one parent,
219 219 maintaining a linear history. With --merge, the pending changeset
220 220 will instead have two parents: the old parent of the working
221 221 directory and a new child of REV that simply undoes REV.
222 222
223 223 Before version 1.7, the behavior without --merge was equivalent to
224 224 specifying --merge followed by :hg:`update --clean .` to cancel
225 225 the merge and leave the child of REV as a head to be merged
226 226 separately.
227 227
228 228 See :hg:`help dates` for a list of formats valid for -d/--date.
229 229
230 230 Returns 0 on success.
231 231 '''
232 232 if rev and node:
233 233 raise util.Abort(_("please specify just one revision"))
234 234
235 235 if not rev:
236 236 rev = node
237 237
238 238 if not rev:
239 239 raise util.Abort(_("please specify a revision to backout"))
240 240
241 241 date = opts.get('date')
242 242 if date:
243 243 opts['date'] = util.parsedate(date)
244 244
245 245 cmdutil.bail_if_changed(repo)
246 246 node = cmdutil.revsingle(repo, rev).node()
247 247
248 248 op1, op2 = repo.dirstate.parents()
249 249 a = repo.changelog.ancestor(op1, node)
250 250 if a != node:
251 251 raise util.Abort(_('cannot backout change on a different branch'))
252 252
253 253 p1, p2 = repo.changelog.parents(node)
254 254 if p1 == nullid:
255 255 raise util.Abort(_('cannot backout a change with no parents'))
256 256 if p2 != nullid:
257 257 if not opts.get('parent'):
258 258 raise util.Abort(_('cannot backout a merge changeset without '
259 259 '--parent'))
260 260 p = repo.lookup(opts['parent'])
261 261 if p not in (p1, p2):
262 262 raise util.Abort(_('%s is not a parent of %s') %
263 263 (short(p), short(node)))
264 264 parent = p
265 265 else:
266 266 if opts.get('parent'):
267 267 raise util.Abort(_('cannot use --parent on non-merge changeset'))
268 268 parent = p1
269 269
270 270 # the backout should appear on the same branch
271 271 branch = repo.dirstate.branch()
272 272 hg.clean(repo, node, show_stats=False)
273 273 repo.dirstate.setbranch(branch)
274 274 revert_opts = opts.copy()
275 275 revert_opts['date'] = None
276 276 revert_opts['all'] = True
277 277 revert_opts['rev'] = hex(parent)
278 278 revert_opts['no_backup'] = None
279 279 revert(ui, repo, **revert_opts)
280 280 if not opts.get('merge') and op1 != node:
281 281 try:
282 282 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
283 283 return hg.update(repo, op1)
284 284 finally:
285 285 ui.setconfig('ui', 'forcemerge', '')
286 286
287 287 commit_opts = opts.copy()
288 288 commit_opts['addremove'] = False
289 289 if not commit_opts['message'] and not commit_opts['logfile']:
290 290 # we don't translate commit messages
291 291 commit_opts['message'] = "Backed out changeset %s" % short(node)
292 292 commit_opts['force_editor'] = True
293 293 commit(ui, repo, **commit_opts)
294 294 def nice(node):
295 295 return '%d:%s' % (repo.changelog.rev(node), short(node))
296 296 ui.status(_('changeset %s backs out changeset %s\n') %
297 297 (nice(repo.changelog.tip()), nice(node)))
298 298 if opts.get('merge') and op1 != node:
299 299 hg.clean(repo, op1, show_stats=False)
300 300 ui.status(_('merging with changeset %s\n')
301 301 % nice(repo.changelog.tip()))
302 302 try:
303 303 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
304 304 return hg.merge(repo, hex(repo.changelog.tip()))
305 305 finally:
306 306 ui.setconfig('ui', 'forcemerge', '')
307 307 return 0
308 308
309 309 def bisect(ui, repo, rev=None, extra=None, command=None,
310 310 reset=None, good=None, bad=None, skip=None, extend=None,
311 311 noupdate=None):
312 312 """subdivision search of changesets
313 313
314 314 This command helps to find changesets which introduce problems. To
315 315 use, mark the earliest changeset you know exhibits the problem as
316 316 bad, then mark the latest changeset which is free from the problem
317 317 as good. Bisect will update your working directory to a revision
318 318 for testing (unless the -U/--noupdate option is specified). Once
319 319 you have performed tests, mark the working directory as good or
320 320 bad, and bisect will either update to another candidate changeset
321 321 or announce that it has found the bad revision.
322 322
323 323 As a shortcut, you can also use the revision argument to mark a
324 324 revision as good or bad without checking it out first.
325 325
326 326 If you supply a command, it will be used for automatic bisection.
327 327 Its exit status will be used to mark revisions as good or bad:
328 328 status 0 means good, 125 means to skip the revision, 127
329 329 (command not found) will abort the bisection, and any other
330 330 non-zero exit status means the revision is bad.
331 331
332 332 Returns 0 on success.
333 333 """
334 334 def extendbisectrange(nodes, good):
335 335 # bisect is incomplete when it ends on a merge node and
336 336 # one of the parent was not checked.
337 337 parents = repo[nodes[0]].parents()
338 338 if len(parents) > 1:
339 339 side = good and state['bad'] or state['good']
340 340 num = len(set(i.node() for i in parents) & set(side))
341 341 if num == 1:
342 342 return parents[0].ancestor(parents[1])
343 343 return None
344 344
345 345 def print_result(nodes, good):
346 346 displayer = cmdutil.show_changeset(ui, repo, {})
347 347 if len(nodes) == 1:
348 348 # narrowed it down to a single revision
349 349 if good:
350 350 ui.write(_("The first good revision is:\n"))
351 351 else:
352 352 ui.write(_("The first bad revision is:\n"))
353 353 displayer.show(repo[nodes[0]])
354 354 parents = repo[nodes[0]].parents()
355 355 extendnode = extendbisectrange(nodes, good)
356 356 if extendnode is not None:
357 357 ui.write(_('Not all ancestors of this changeset have been'
358 358 ' checked.\nUse bisect --extend to continue the '
359 359 'bisection from\nthe common ancestor, %s.\n')
360 360 % short(extendnode.node()))
361 361 else:
362 362 # multiple possible revisions
363 363 if good:
364 364 ui.write(_("Due to skipped revisions, the first "
365 365 "good revision could be any of:\n"))
366 366 else:
367 367 ui.write(_("Due to skipped revisions, the first "
368 368 "bad revision could be any of:\n"))
369 369 for n in nodes:
370 370 displayer.show(repo[n])
371 371 displayer.close()
372 372
373 373 def check_state(state, interactive=True):
374 374 if not state['good'] or not state['bad']:
375 375 if (good or bad or skip or reset) and interactive:
376 376 return
377 377 if not state['good']:
378 378 raise util.Abort(_('cannot bisect (no known good revisions)'))
379 379 else:
380 380 raise util.Abort(_('cannot bisect (no known bad revisions)'))
381 381 return True
382 382
383 383 # backward compatibility
384 384 if rev in "good bad reset init".split():
385 385 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
386 386 cmd, rev, extra = rev, extra, None
387 387 if cmd == "good":
388 388 good = True
389 389 elif cmd == "bad":
390 390 bad = True
391 391 else:
392 392 reset = True
393 393 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
394 394 raise util.Abort(_('incompatible arguments'))
395 395
396 396 if reset:
397 397 p = repo.join("bisect.state")
398 398 if os.path.exists(p):
399 399 os.unlink(p)
400 400 return
401 401
402 402 state = hbisect.load_state(repo)
403 403
404 404 if command:
405 405 changesets = 1
406 406 try:
407 407 while changesets:
408 408 # update state
409 409 status = util.system(command)
410 410 if status == 125:
411 411 transition = "skip"
412 412 elif status == 0:
413 413 transition = "good"
414 414 # status < 0 means process was killed
415 415 elif status == 127:
416 416 raise util.Abort(_("failed to execute %s") % command)
417 417 elif status < 0:
418 418 raise util.Abort(_("%s killed") % command)
419 419 else:
420 420 transition = "bad"
421 421 ctx = cmdutil.revsingle(repo, rev)
422 422 rev = None # clear for future iterations
423 423 state[transition].append(ctx.node())
424 424 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
425 425 check_state(state, interactive=False)
426 426 # bisect
427 427 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
428 428 # update to next check
429 429 cmdutil.bail_if_changed(repo)
430 430 hg.clean(repo, nodes[0], show_stats=False)
431 431 finally:
432 432 hbisect.save_state(repo, state)
433 433 print_result(nodes, good)
434 434 return
435 435
436 436 # update state
437 437
438 438 if rev:
439 439 nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])]
440 440 else:
441 441 nodes = [repo.lookup('.')]
442 442
443 443 if good or bad or skip:
444 444 if good:
445 445 state['good'] += nodes
446 446 elif bad:
447 447 state['bad'] += nodes
448 448 elif skip:
449 449 state['skip'] += nodes
450 450 hbisect.save_state(repo, state)
451 451
452 452 if not check_state(state):
453 453 return
454 454
455 455 # actually bisect
456 456 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
457 457 if extend:
458 458 if not changesets:
459 459 extendnode = extendbisectrange(nodes, good)
460 460 if extendnode is not None:
461 461 ui.write(_("Extending search to changeset %d:%s\n"
462 462 % (extendnode.rev(), short(extendnode.node()))))
463 463 if noupdate:
464 464 return
465 465 cmdutil.bail_if_changed(repo)
466 466 return hg.clean(repo, extendnode.node())
467 467 raise util.Abort(_("nothing to extend"))
468 468
469 469 if changesets == 0:
470 470 print_result(nodes, good)
471 471 else:
472 472 assert len(nodes) == 1 # only a single node can be tested next
473 473 node = nodes[0]
474 474 # compute the approximate number of remaining tests
475 475 tests, size = 0, 2
476 476 while size <= changesets:
477 477 tests, size = tests + 1, size * 2
478 478 rev = repo.changelog.rev(node)
479 479 ui.write(_("Testing changeset %d:%s "
480 480 "(%d changesets remaining, ~%d tests)\n")
481 481 % (rev, short(node), changesets, tests))
482 482 if not noupdate:
483 483 cmdutil.bail_if_changed(repo)
484 484 return hg.clean(repo, node)
485 485
486 486 def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
487 487 '''track a line of development with movable markers
488 488
489 489 Bookmarks are pointers to certain commits that move when
490 490 committing. Bookmarks are local. They can be renamed, copied and
491 491 deleted. It is possible to use bookmark names in :hg:`merge` and
492 492 :hg:`update` to merge and update respectively to a given bookmark.
493 493
494 494 You can use :hg:`bookmark NAME` to set a bookmark on the working
495 495 directory's parent revision with the given name. If you specify
496 496 a revision using -r REV (where REV may be an existing bookmark),
497 497 the bookmark is assigned to that revision.
498 498
499 499 Bookmarks can be pushed and pulled between repositories (see :hg:`help
500 500 push` and :hg:`help pull`). This requires both the local and remote
501 501 repositories to support bookmarks. For versions prior to 1.8, this means
502 502 the bookmarks extension must be enabled.
503 503 '''
504 504 hexfn = ui.debugflag and hex or short
505 505 marks = repo._bookmarks
506 506 cur = repo.changectx('.').node()
507 507
508 508 if rename:
509 509 if rename not in marks:
510 510 raise util.Abort(_("bookmark '%s' does not exist") % rename)
511 511 if mark in marks and not force:
512 512 raise util.Abort(_("bookmark '%s' already exists "
513 513 "(use -f to force)") % mark)
514 514 if mark is None:
515 515 raise util.Abort(_("new bookmark name required"))
516 516 marks[mark] = marks[rename]
517 517 if repo._bookmarkcurrent == rename:
518 518 bookmarks.setcurrent(repo, mark)
519 519 del marks[rename]
520 520 bookmarks.write(repo)
521 521 return
522 522
523 523 if delete:
524 524 if mark is None:
525 525 raise util.Abort(_("bookmark name required"))
526 526 if mark not in marks:
527 527 raise util.Abort(_("bookmark '%s' does not exist") % mark)
528 528 if mark == repo._bookmarkcurrent:
529 529 bookmarks.setcurrent(repo, None)
530 530 del marks[mark]
531 531 bookmarks.write(repo)
532 532 return
533 533
534 534 if mark is not None:
535 535 if "\n" in mark:
536 536 raise util.Abort(_("bookmark name cannot contain newlines"))
537 537 mark = mark.strip()
538 538 if not mark:
539 539 raise util.Abort(_("bookmark names cannot consist entirely of "
540 540 "whitespace"))
541 541 if mark in marks and not force:
542 542 raise util.Abort(_("bookmark '%s' already exists "
543 543 "(use -f to force)") % mark)
544 544 if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
545 545 and not force):
546 546 raise util.Abort(
547 547 _("a bookmark cannot have the name of an existing branch"))
548 548 if rev:
549 549 marks[mark] = repo.lookup(rev)
550 550 else:
551 551 marks[mark] = repo.changectx('.').node()
552 552 if repo.changectx('.').node() == marks[mark]:
553 553 bookmarks.setcurrent(repo, mark)
554 554 bookmarks.write(repo)
555 555 return
556 556
557 557 if mark is None:
558 558 if rev:
559 559 raise util.Abort(_("bookmark name required"))
560 560 if len(marks) == 0:
561 561 ui.status(_("no bookmarks set\n"))
562 562 else:
563 563 for bmark, n in sorted(marks.iteritems()):
564 564 current = repo._bookmarkcurrent
565 565 if bmark == current and n == cur:
566 566 prefix, label = '*', 'bookmarks.current'
567 567 else:
568 568 prefix, label = ' ', ''
569 569
570 570 if ui.quiet:
571 571 ui.write("%s\n" % bmark, label=label)
572 572 else:
573 573 ui.write(" %s %-25s %d:%s\n" % (
574 574 prefix, bmark, repo.changelog.rev(n), hexfn(n)),
575 575 label=label)
576 576 return
577 577
578 578 def branch(ui, repo, label=None, **opts):
579 579 """set or show the current branch name
580 580
581 581 With no argument, show the current branch name. With one argument,
582 582 set the working directory branch name (the branch will not exist
583 583 in the repository until the next commit). Standard practice
584 584 recommends that primary development take place on the 'default'
585 585 branch.
586 586
587 587 Unless -f/--force is specified, branch will not let you set a
588 588 branch name that already exists, even if it's inactive.
589 589
590 590 Use -C/--clean to reset the working directory branch to that of
591 591 the parent of the working directory, negating a previous branch
592 592 change.
593 593
594 594 Use the command :hg:`update` to switch to an existing branch. Use
595 595 :hg:`commit --close-branch` to mark this branch as closed.
596 596
597 597 Returns 0 on success.
598 598 """
599 599
600 600 if opts.get('clean'):
601 601 label = repo[None].p1().branch()
602 602 repo.dirstate.setbranch(label)
603 603 ui.status(_('reset working directory to branch %s\n') % label)
604 604 elif label:
605 605 if not opts.get('force') and label in repo.branchtags():
606 606 if label not in [p.branch() for p in repo.parents()]:
607 607 raise util.Abort(_('a branch of the same name already exists'
608 608 " (use 'hg update' to switch to it)"))
609 609 repo.dirstate.setbranch(label)
610 610 ui.status(_('marked working directory as branch %s\n') % label)
611 611 else:
612 612 ui.write("%s\n" % repo.dirstate.branch())
613 613
614 614 def branches(ui, repo, active=False, closed=False):
615 615 """list repository named branches
616 616
617 617 List the repository's named branches, indicating which ones are
618 618 inactive. If -c/--closed is specified, also list branches which have
619 619 been marked closed (see :hg:`commit --close-branch`).
620 620
621 621 If -a/--active is specified, only show active branches. A branch
622 622 is considered active if it contains repository heads.
623 623
624 624 Use the command :hg:`update` to switch to an existing branch.
625 625
626 626 Returns 0.
627 627 """
628 628
629 629 hexfunc = ui.debugflag and hex or short
630 630 activebranches = [repo[n].branch() for n in repo.heads()]
631 631 def testactive(tag, node):
632 632 realhead = tag in activebranches
633 633 open = node in repo.branchheads(tag, closed=False)
634 634 return realhead and open
635 635 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
636 636 for tag, node in repo.branchtags().items()],
637 637 reverse=True)
638 638
639 639 for isactive, node, tag in branches:
640 640 if (not active) or isactive:
641 641 if ui.quiet:
642 642 ui.write("%s\n" % tag)
643 643 else:
644 644 hn = repo.lookup(node)
645 645 if isactive:
646 646 label = 'branches.active'
647 647 notice = ''
648 648 elif hn not in repo.branchheads(tag, closed=False):
649 649 if not closed:
650 650 continue
651 651 label = 'branches.closed'
652 652 notice = _(' (closed)')
653 653 else:
654 654 label = 'branches.inactive'
655 655 notice = _(' (inactive)')
656 656 if tag == repo.dirstate.branch():
657 657 label = 'branches.current'
658 658 rev = str(node).rjust(31 - encoding.colwidth(tag))
659 659 rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
660 660 tag = ui.label(tag, label)
661 661 ui.write("%s %s%s\n" % (tag, rev, notice))
662 662
663 663 def bundle(ui, repo, fname, dest=None, **opts):
664 664 """create a changegroup file
665 665
666 666 Generate a compressed changegroup file collecting changesets not
667 667 known to be in another repository.
668 668
669 669 If you omit the destination repository, then hg assumes the
670 670 destination will have all the nodes you specify with --base
671 671 parameters. To create a bundle containing all changesets, use
672 672 -a/--all (or --base null).
673 673
674 674 You can change compression method with the -t/--type option.
675 675 The available compression methods are: none, bzip2, and
676 676 gzip (by default, bundles are compressed using bzip2).
677 677
678 678 The bundle file can then be transferred using conventional means
679 679 and applied to another repository with the unbundle or pull
680 680 command. This is useful when direct push and pull are not
681 681 available or when exporting an entire repository is undesirable.
682 682
683 683 Applying bundles preserves all changeset contents including
684 684 permissions, copy/rename information, and revision history.
685 685
686 686 Returns 0 on success, 1 if no changes found.
687 687 """
688 688 revs = None
689 689 if 'rev' in opts:
690 690 revs = cmdutil.revrange(repo, opts['rev'])
691 691
692 692 if opts.get('all'):
693 693 base = ['null']
694 694 else:
695 695 base = cmdutil.revrange(repo, opts.get('base'))
696 696 if base:
697 697 if dest:
698 698 raise util.Abort(_("--base is incompatible with specifying "
699 699 "a destination"))
700 700 base = [repo.lookup(rev) for rev in base]
701 701 # create the right base
702 702 # XXX: nodesbetween / changegroup* should be "fixed" instead
703 703 o = []
704 704 has = set((nullid,))
705 705 for n in base:
706 706 has.update(repo.changelog.reachable(n))
707 707 if revs:
708 708 revs = [repo.lookup(rev) for rev in revs]
709 709 visit = revs[:]
710 710 has.difference_update(visit)
711 711 else:
712 712 visit = repo.changelog.heads()
713 713 seen = {}
714 714 while visit:
715 715 n = visit.pop(0)
716 716 parents = [p for p in repo.changelog.parents(n) if p not in has]
717 717 if len(parents) == 0:
718 718 if n not in has:
719 719 o.append(n)
720 720 else:
721 721 for p in parents:
722 722 if p not in seen:
723 723 seen[p] = 1
724 724 visit.append(p)
725 725 else:
726 726 dest = ui.expandpath(dest or 'default-push', dest or 'default')
727 727 dest, branches = hg.parseurl(dest, opts.get('branch'))
728 728 other = hg.repository(hg.remoteui(repo, opts), dest)
729 729 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
730 730 if revs:
731 731 revs = [repo.lookup(rev) for rev in revs]
732 732 o = discovery.findoutgoing(repo, other, force=opts.get('force'))
733 733
734 734 if not o:
735 735 ui.status(_("no changes found\n"))
736 736 return 1
737 737
738 738 if revs:
739 739 cg = repo.changegroupsubset(o, revs, 'bundle')
740 740 else:
741 741 cg = repo.changegroup(o, 'bundle')
742 742
743 743 bundletype = opts.get('type', 'bzip2').lower()
744 744 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
745 745 bundletype = btypes.get(bundletype)
746 746 if bundletype not in changegroup.bundletypes:
747 747 raise util.Abort(_('unknown bundle type specified with --type'))
748 748
749 749 changegroup.writebundle(cg, fname, bundletype)
750 750
751 751 def cat(ui, repo, file1, *pats, **opts):
752 752 """output the current or given revision of files
753 753
754 754 Print the specified files as they were at the given revision. If
755 755 no revision is given, the parent of the working directory is used,
756 756 or tip if no revision is checked out.
757 757
758 758 Output may be to a file, in which case the name of the file is
759 759 given using a format string. The formatting rules are the same as
760 760 for the export command, with the following additions:
761 761
762 762 :``%s``: basename of file being printed
763 763 :``%d``: dirname of file being printed, or '.' if in repository root
764 764 :``%p``: root-relative path name of file being printed
765 765
766 766 Returns 0 on success.
767 767 """
768 768 ctx = cmdutil.revsingle(repo, opts.get('rev'))
769 769 err = 1
770 770 m = cmdutil.match(repo, (file1,) + pats, opts)
771 771 for abs in ctx.walk(m):
772 772 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
773 773 data = ctx[abs].data()
774 774 if opts.get('decode'):
775 775 data = repo.wwritedata(abs, data)
776 776 fp.write(data)
777 777 fp.close()
778 778 err = 0
779 779 return err
780 780
781 781 def clone(ui, source, dest=None, **opts):
782 782 """make a copy of an existing repository
783 783
784 784 Create a copy of an existing repository in a new directory.
785 785
786 786 If no destination directory name is specified, it defaults to the
787 787 basename of the source.
788 788
789 789 The location of the source is added to the new repository's
790 790 ``.hg/hgrc`` file, as the default to be used for future pulls.
791 791
792 792 See :hg:`help urls` for valid source format details.
793 793
794 794 It is possible to specify an ``ssh://`` URL as the destination, but no
795 795 ``.hg/hgrc`` and working directory will be created on the remote side.
796 796 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
797 797
798 798 A set of changesets (tags, or branch names) to pull may be specified
799 799 by listing each changeset (tag, or branch name) with -r/--rev.
800 800 If -r/--rev is used, the cloned repository will contain only a subset
801 801 of the changesets of the source repository. Only the set of changesets
802 802 defined by all -r/--rev options (including all their ancestors)
803 803 will be pulled into the destination repository.
804 804 No subsequent changesets (including subsequent tags) will be present
805 805 in the destination.
806 806
807 807 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
808 808 local source repositories.
809 809
810 810 For efficiency, hardlinks are used for cloning whenever the source
811 811 and destination are on the same filesystem (note this applies only
812 812 to the repository data, not to the working directory). Some
813 813 filesystems, such as AFS, implement hardlinking incorrectly, but
814 814 do not report errors. In these cases, use the --pull option to
815 815 avoid hardlinking.
816 816
817 817 In some cases, you can clone repositories and the working directory
818 818 using full hardlinks with ::
819 819
820 820 $ cp -al REPO REPOCLONE
821 821
822 822 This is the fastest way to clone, but it is not always safe. The
823 823 operation is not atomic (making sure REPO is not modified during
824 824 the operation is up to you) and you have to make sure your editor
825 825 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
826 826 this is not compatible with certain extensions that place their
827 827 metadata under the .hg directory, such as mq.
828 828
829 829 Mercurial will update the working directory to the first applicable
830 830 revision from this list:
831 831
832 832 a) null if -U or the source repository has no changesets
833 833 b) if -u . and the source repository is local, the first parent of
834 834 the source repository's working directory
835 835 c) the changeset specified with -u (if a branch name, this means the
836 836 latest head of that branch)
837 837 d) the changeset specified with -r
838 838 e) the tipmost head specified with -b
839 839 f) the tipmost head specified with the url#branch source syntax
840 840 g) the tipmost head of the default branch
841 841 h) tip
842 842
843 843 Returns 0 on success.
844 844 """
845 845 if opts.get('noupdate') and opts.get('updaterev'):
846 846 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
847 847
848 848 r = hg.clone(hg.remoteui(ui, opts), source, dest,
849 849 pull=opts.get('pull'),
850 850 stream=opts.get('uncompressed'),
851 851 rev=opts.get('rev'),
852 852 update=opts.get('updaterev') or not opts.get('noupdate'),
853 853 branch=opts.get('branch'))
854 854
855 855 return r is None
856 856
857 857 def commit(ui, repo, *pats, **opts):
858 858 """commit the specified files or all outstanding changes
859 859
860 860 Commit changes to the given files into the repository. Unlike a
861 861 centralized SCM, this operation is a local operation. See
862 862 :hg:`push` for a way to actively distribute your changes.
863 863
864 864 If a list of files is omitted, all changes reported by :hg:`status`
865 865 will be committed.
866 866
867 867 If you are committing the result of a merge, do not provide any
868 868 filenames or -I/-X filters.
869 869
870 870 If no commit message is specified, Mercurial starts your
871 871 configured editor where you can enter a message. In case your
872 872 commit fails, you will find a backup of your message in
873 873 ``.hg/last-message.txt``.
874 874
875 875 See :hg:`help dates` for a list of formats valid for -d/--date.
876 876
877 877 Returns 0 on success, 1 if nothing changed.
878 878 """
879 879 extra = {}
880 880 if opts.get('close_branch'):
881 881 if repo['.'].node() not in repo.branchheads():
882 882 # The topo heads set is included in the branch heads set of the
883 883 # current branch, so it's sufficient to test branchheads
884 884 raise util.Abort(_('can only close branch heads'))
885 885 extra['close'] = 1
886 886 e = cmdutil.commiteditor
887 887 if opts.get('force_editor'):
888 888 e = cmdutil.commitforceeditor
889 889
890 890 def commitfunc(ui, repo, message, match, opts):
891 891 return repo.commit(message, opts.get('user'), opts.get('date'), match,
892 892 editor=e, extra=extra)
893 893
894 894 branch = repo[None].branch()
895 895 bheads = repo.branchheads(branch)
896 896
897 897 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
898 898 if not node:
899 899 stat = repo.status(match=cmdutil.match(repo, pats, opts))
900 900 if stat[3]:
901 901 ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
902 902 % len(stat[3]))
903 903 else:
904 904 ui.status(_("nothing changed\n"))
905 905 return 1
906 906
907 907 ctx = repo[node]
908 908 parents = ctx.parents()
909 909
910 910 if bheads and not [x for x in parents
911 911 if x.node() in bheads and x.branch() == branch]:
912 912 ui.status(_('created new head\n'))
913 913 # The message is not printed for initial roots. For the other
914 914 # changesets, it is printed in the following situations:
915 915 #
916 916 # Par column: for the 2 parents with ...
917 917 # N: null or no parent
918 918 # B: parent is on another named branch
919 919 # C: parent is a regular non head changeset
920 920 # H: parent was a branch head of the current branch
921 921 # Msg column: whether we print "created new head" message
922 922 # In the following, it is assumed that there already exists some
923 923 # initial branch heads of the current branch, otherwise nothing is
924 924 # printed anyway.
925 925 #
926 926 # Par Msg Comment
927 927 # NN y additional topo root
928 928 #
929 929 # BN y additional branch root
930 930 # CN y additional topo head
931 931 # HN n usual case
932 932 #
933 933 # BB y weird additional branch root
934 934 # CB y branch merge
935 935 # HB n merge with named branch
936 936 #
937 937 # CC y additional head from merge
938 938 # CH n merge with a head
939 939 #
940 940 # HH n head merge: head count decreases
941 941
942 942 if not opts.get('close_branch'):
943 943 for r in parents:
944 944 if r.extra().get('close') and r.branch() == branch:
945 945 ui.status(_('reopening closed branch head %d\n') % r)
946 946
947 947 if ui.debugflag:
948 948 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
949 949 elif ui.verbose:
950 950 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
951 951
952 952 def copy(ui, repo, *pats, **opts):
953 953 """mark files as copied for the next commit
954 954
955 955 Mark dest as having copies of source files. If dest is a
956 956 directory, copies are put in that directory. If dest is a file,
957 957 the source must be a single file.
958 958
959 959 By default, this command copies the contents of files as they
960 960 exist in the working directory. If invoked with -A/--after, the
961 961 operation is recorded, but no copying is performed.
962 962
963 963 This command takes effect with the next commit. To undo a copy
964 964 before that, see :hg:`revert`.
965 965
966 966 Returns 0 on success, 1 if errors are encountered.
967 967 """
968 968 wlock = repo.wlock(False)
969 969 try:
970 970 return cmdutil.copy(ui, repo, pats, opts)
971 971 finally:
972 972 wlock.release()
973 973
974 974 def debugancestor(ui, repo, *args):
975 975 """find the ancestor revision of two revisions in a given index"""
976 976 if len(args) == 3:
977 977 index, rev1, rev2 = args
978 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
978 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
979 979 lookup = r.lookup
980 980 elif len(args) == 2:
981 981 if not repo:
982 982 raise util.Abort(_("there is no Mercurial repository here "
983 983 "(.hg not found)"))
984 984 rev1, rev2 = args
985 985 r = repo.changelog
986 986 lookup = repo.lookup
987 987 else:
988 988 raise util.Abort(_('either two or three arguments required'))
989 989 a = r.ancestor(lookup(rev1), lookup(rev2))
990 990 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
991 991
992 992 def debugbuilddag(ui, repo, text,
993 993 mergeable_file=False,
994 994 appended_file=False,
995 995 overwritten_file=False,
996 996 new_file=False):
997 997 """builds a repo with a given dag from scratch in the current empty repo
998 998
999 999 Elements:
1000 1000
1001 1001 - "+n" is a linear run of n nodes based on the current default parent
1002 1002 - "." is a single node based on the current default parent
1003 1003 - "$" resets the default parent to null (implied at the start);
1004 1004 otherwise the default parent is always the last node created
1005 1005 - "<p" sets the default parent to the backref p
1006 1006 - "*p" is a fork at parent p, which is a backref
1007 1007 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1008 1008 - "/p2" is a merge of the preceding node and p2
1009 1009 - ":tag" defines a local tag for the preceding node
1010 1010 - "@branch" sets the named branch for subsequent nodes
1011 1011 - "!command" runs the command using your shell
1012 1012 - "!!my command\\n" is like "!", but to the end of the line
1013 1013 - "#...\\n" is a comment up to the end of the line
1014 1014
1015 1015 Whitespace between the above elements is ignored.
1016 1016
1017 1017 A backref is either
1018 1018
1019 1019 - a number n, which references the node curr-n, where curr is the current
1020 1020 node, or
1021 1021 - the name of a local tag you placed earlier using ":tag", or
1022 1022 - empty to denote the default parent.
1023 1023
1024 1024 All string valued-elements are either strictly alphanumeric, or must
1025 1025 be enclosed in double quotes ("..."), with "\\" as escape character.
1026 1026
1027 1027 Note that the --overwritten-file and --appended-file options imply the
1028 1028 use of "HGMERGE=internal:local" during DAG buildup.
1029 1029 """
1030 1030
1031 1031 if not (mergeable_file or appended_file or overwritten_file or new_file):
1032 1032 raise util.Abort(_('need at least one of -m, -a, -o, -n'))
1033 1033
1034 1034 if len(repo.changelog) > 0:
1035 1035 raise util.Abort(_('repository is not empty'))
1036 1036
1037 1037 if overwritten_file or appended_file:
1038 1038 # we don't want to fail in merges during buildup
1039 1039 os.environ['HGMERGE'] = 'internal:local'
1040 1040
1041 1041 def writefile(fname, text, fmode="wb"):
1042 1042 f = open(fname, fmode)
1043 1043 try:
1044 1044 f.write(text)
1045 1045 finally:
1046 1046 f.close()
1047 1047
1048 1048 if mergeable_file:
1049 1049 linesperrev = 2
1050 1050 # determine number of revs in DAG
1051 1051 n = 0
1052 1052 for type, data in dagparser.parsedag(text):
1053 1053 if type == 'n':
1054 1054 n += 1
1055 1055 # make a file with k lines per rev
1056 1056 writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev))
1057 1057 + "\n")
1058 1058
1059 1059 at = -1
1060 1060 atbranch = 'default'
1061 1061 for type, data in dagparser.parsedag(text):
1062 1062 if type == 'n':
1063 1063 ui.status('node %s\n' % str(data))
1064 1064 id, ps = data
1065 1065 p1 = ps[0]
1066 1066 if p1 != at:
1067 1067 update(ui, repo, node=str(p1), clean=True)
1068 1068 at = p1
1069 1069 if repo.dirstate.branch() != atbranch:
1070 1070 branch(ui, repo, atbranch, force=True)
1071 1071 if len(ps) > 1:
1072 1072 p2 = ps[1]
1073 1073 merge(ui, repo, node=p2)
1074 1074
1075 1075 if mergeable_file:
1076 1076 f = open("mf", "rb+")
1077 1077 try:
1078 1078 lines = f.read().split("\n")
1079 1079 lines[id * linesperrev] += " r%i" % id
1080 1080 f.seek(0)
1081 1081 f.write("\n".join(lines))
1082 1082 finally:
1083 1083 f.close()
1084 1084
1085 1085 if appended_file:
1086 1086 writefile("af", "r%i\n" % id, "ab")
1087 1087
1088 1088 if overwritten_file:
1089 1089 writefile("of", "r%i\n" % id)
1090 1090
1091 1091 if new_file:
1092 1092 writefile("nf%i" % id, "r%i\n" % id)
1093 1093
1094 1094 commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0))
1095 1095 at = id
1096 1096 elif type == 'l':
1097 1097 id, name = data
1098 1098 ui.status('tag %s\n' % name)
1099 1099 tag(ui, repo, name, local=True)
1100 1100 elif type == 'a':
1101 1101 ui.status('branch %s\n' % data)
1102 1102 atbranch = data
1103 1103 elif type in 'cC':
1104 1104 r = util.system(data, cwd=repo.root)
1105 1105 if r:
1106 1106 desc, r = util.explain_exit(r)
1107 1107 raise util.Abort(_('%s command %s') % (data, desc))
1108 1108
1109 1109 def debugcommands(ui, cmd='', *args):
1110 1110 """list all available commands and options"""
1111 1111 for cmd, vals in sorted(table.iteritems()):
1112 1112 cmd = cmd.split('|')[0].strip('^')
1113 1113 opts = ', '.join([i[1] for i in vals[1]])
1114 1114 ui.write('%s: %s\n' % (cmd, opts))
1115 1115
1116 1116 def debugcomplete(ui, cmd='', **opts):
1117 1117 """returns the completion list associated with the given command"""
1118 1118
1119 1119 if opts.get('options'):
1120 1120 options = []
1121 1121 otables = [globalopts]
1122 1122 if cmd:
1123 1123 aliases, entry = cmdutil.findcmd(cmd, table, False)
1124 1124 otables.append(entry[1])
1125 1125 for t in otables:
1126 1126 for o in t:
1127 1127 if "(DEPRECATED)" in o[3]:
1128 1128 continue
1129 1129 if o[0]:
1130 1130 options.append('-%s' % o[0])
1131 1131 options.append('--%s' % o[1])
1132 1132 ui.write("%s\n" % "\n".join(options))
1133 1133 return
1134 1134
1135 1135 cmdlist = cmdutil.findpossible(cmd, table)
1136 1136 if ui.verbose:
1137 1137 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1138 1138 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1139 1139
1140 1140 def debugfsinfo(ui, path = "."):
1141 1141 """show information detected about current filesystem"""
1142 1142 open('.debugfsinfo', 'w').write('')
1143 1143 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
1144 1144 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
1145 1145 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
1146 1146 and 'yes' or 'no'))
1147 1147 os.unlink('.debugfsinfo')
1148 1148
1149 1149 def debugrebuildstate(ui, repo, rev="tip"):
1150 1150 """rebuild the dirstate as it would look like for the given revision"""
1151 1151 ctx = cmdutil.revsingle(repo, rev)
1152 1152 wlock = repo.wlock()
1153 1153 try:
1154 1154 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1155 1155 finally:
1156 1156 wlock.release()
1157 1157
1158 1158 def debugcheckstate(ui, repo):
1159 1159 """validate the correctness of the current dirstate"""
1160 1160 parent1, parent2 = repo.dirstate.parents()
1161 1161 m1 = repo[parent1].manifest()
1162 1162 m2 = repo[parent2].manifest()
1163 1163 errors = 0
1164 1164 for f in repo.dirstate:
1165 1165 state = repo.dirstate[f]
1166 1166 if state in "nr" and f not in m1:
1167 1167 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1168 1168 errors += 1
1169 1169 if state in "a" and f in m1:
1170 1170 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1171 1171 errors += 1
1172 1172 if state in "m" and f not in m1 and f not in m2:
1173 1173 ui.warn(_("%s in state %s, but not in either manifest\n") %
1174 1174 (f, state))
1175 1175 errors += 1
1176 1176 for f in m1:
1177 1177 state = repo.dirstate[f]
1178 1178 if state not in "nrm":
1179 1179 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1180 1180 errors += 1
1181 1181 if errors:
1182 1182 error = _(".hg/dirstate inconsistent with current parent's manifest")
1183 1183 raise util.Abort(error)
1184 1184
1185 1185 def showconfig(ui, repo, *values, **opts):
1186 1186 """show combined config settings from all hgrc files
1187 1187
1188 1188 With no arguments, print names and values of all config items.
1189 1189
1190 1190 With one argument of the form section.name, print just the value
1191 1191 of that config item.
1192 1192
1193 1193 With multiple arguments, print names and values of all config
1194 1194 items with matching section names.
1195 1195
1196 1196 With --debug, the source (filename and line number) is printed
1197 1197 for each config item.
1198 1198
1199 1199 Returns 0 on success.
1200 1200 """
1201 1201
1202 1202 for f in util.rcpath():
1203 1203 ui.debug(_('read config from: %s\n') % f)
1204 1204 untrusted = bool(opts.get('untrusted'))
1205 1205 if values:
1206 1206 sections = [v for v in values if '.' not in v]
1207 1207 items = [v for v in values if '.' in v]
1208 1208 if len(items) > 1 or items and sections:
1209 1209 raise util.Abort(_('only one config item permitted'))
1210 1210 for section, name, value in ui.walkconfig(untrusted=untrusted):
1211 1211 value = str(value).replace('\n', '\\n')
1212 1212 sectname = section + '.' + name
1213 1213 if values:
1214 1214 for v in values:
1215 1215 if v == section:
1216 1216 ui.debug('%s: ' %
1217 1217 ui.configsource(section, name, untrusted))
1218 1218 ui.write('%s=%s\n' % (sectname, value))
1219 1219 elif v == sectname:
1220 1220 ui.debug('%s: ' %
1221 1221 ui.configsource(section, name, untrusted))
1222 1222 ui.write(value, '\n')
1223 1223 else:
1224 1224 ui.debug('%s: ' %
1225 1225 ui.configsource(section, name, untrusted))
1226 1226 ui.write('%s=%s\n' % (sectname, value))
1227 1227
1228 1228 def debugknown(ui, repopath, *ids, **opts):
1229 1229 """test whether node ids are known to a repo
1230 1230
1231 1231 Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
1232 1232 indicating unknown/known.
1233 1233 """
1234 1234 repo = hg.repository(ui, repopath)
1235 1235 if not repo.capable('known'):
1236 1236 raise util.Abort("known() not supported by target repository")
1237 1237 flags = repo.known([bin(s) for s in ids])
1238 1238 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1239 1239
1240 1240 def debugbundle(ui, bundlepath, all=None, **opts):
1241 1241 """lists the contents of a bundle"""
1242 1242 f = url.open(ui, bundlepath)
1243 1243 try:
1244 1244 gen = changegroup.readbundle(f, bundlepath)
1245 1245 if all:
1246 1246 ui.write("format: id, p1, p2, cset, len(delta)\n")
1247 1247
1248 1248 def showchunks(named):
1249 1249 ui.write("\n%s\n" % named)
1250 1250 while 1:
1251 1251 chunkdata = gen.parsechunk()
1252 1252 if not chunkdata:
1253 1253 break
1254 1254 node = chunkdata['node']
1255 1255 p1 = chunkdata['p1']
1256 1256 p2 = chunkdata['p2']
1257 1257 cs = chunkdata['cs']
1258 1258 delta = chunkdata['data']
1259 1259 ui.write("%s %s %s %s %s\n" %
1260 1260 (hex(node), hex(p1), hex(p2),
1261 1261 hex(cs), len(delta)))
1262 1262
1263 1263 showchunks("changelog")
1264 1264 showchunks("manifest")
1265 1265 while 1:
1266 1266 fname = gen.chunk()
1267 1267 if not fname:
1268 1268 break
1269 1269 showchunks(fname)
1270 1270 else:
1271 1271 while 1:
1272 1272 chunkdata = gen.parsechunk()
1273 1273 if not chunkdata:
1274 1274 break
1275 1275 node = chunkdata['node']
1276 1276 ui.write("%s\n" % hex(node))
1277 1277 finally:
1278 1278 f.close()
1279 1279
1280 1280 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1281 1281 """retrieves a bundle from a repo
1282 1282
1283 1283 Every ID must be a full-length hex node id string. Saves the bundle to the
1284 1284 given file.
1285 1285 """
1286 1286 repo = hg.repository(ui, repopath)
1287 1287 if not repo.capable('getbundle'):
1288 1288 raise util.Abort("getbundle() not supported by target repository")
1289 1289 args = {}
1290 1290 if common:
1291 1291 args['common'] = [bin(s) for s in common]
1292 1292 if head:
1293 1293 args['heads'] = [bin(s) for s in head]
1294 1294 bundle = repo.getbundle('debug', **args)
1295 1295
1296 1296 bundletype = opts.get('type', 'bzip2').lower()
1297 1297 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1298 1298 bundletype = btypes.get(bundletype)
1299 1299 if bundletype not in changegroup.bundletypes:
1300 1300 raise util.Abort(_('unknown bundle type specified with --type'))
1301 1301 changegroup.writebundle(bundle, bundlepath, bundletype)
1302 1302
1303 1303 def debugpushkey(ui, repopath, namespace, *keyinfo):
1304 1304 '''access the pushkey key/value protocol
1305 1305
1306 1306 With two args, list the keys in the given namespace.
1307 1307
1308 1308 With five args, set a key to new if it currently is set to old.
1309 1309 Reports success or failure.
1310 1310 '''
1311 1311
1312 1312 target = hg.repository(ui, repopath)
1313 1313 if keyinfo:
1314 1314 key, old, new = keyinfo
1315 1315 r = target.pushkey(namespace, key, old, new)
1316 1316 ui.status(str(r) + '\n')
1317 1317 return not r
1318 1318 else:
1319 1319 for k, v in target.listkeys(namespace).iteritems():
1320 1320 ui.write("%s\t%s\n" % (k.encode('string-escape'),
1321 1321 v.encode('string-escape')))
1322 1322
1323 1323 def debugrevspec(ui, repo, expr):
1324 1324 '''parse and apply a revision specification'''
1325 1325 if ui.verbose:
1326 1326 tree = revset.parse(expr)[0]
1327 1327 ui.note(tree, "\n")
1328 1328 func = revset.match(expr)
1329 1329 for c in func(repo, range(len(repo))):
1330 1330 ui.write("%s\n" % c)
1331 1331
1332 1332 def debugsetparents(ui, repo, rev1, rev2=None):
1333 1333 """manually set the parents of the current working directory
1334 1334
1335 1335 This is useful for writing repository conversion tools, but should
1336 1336 be used with care.
1337 1337
1338 1338 Returns 0 on success.
1339 1339 """
1340 1340
1341 1341 r1 = cmdutil.revsingle(repo, rev1).node()
1342 1342 r2 = cmdutil.revsingle(repo, rev2, 'null').node()
1343 1343
1344 1344 wlock = repo.wlock()
1345 1345 try:
1346 1346 repo.dirstate.setparents(r1, r2)
1347 1347 finally:
1348 1348 wlock.release()
1349 1349
1350 1350 def debugstate(ui, repo, nodates=None, datesort=None):
1351 1351 """show the contents of the current dirstate"""
1352 1352 timestr = ""
1353 1353 showdate = not nodates
1354 1354 if datesort:
1355 1355 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
1356 1356 else:
1357 1357 keyfunc = None # sort by filename
1358 1358 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
1359 1359 if showdate:
1360 1360 if ent[3] == -1:
1361 1361 # Pad or slice to locale representation
1362 1362 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
1363 1363 time.localtime(0)))
1364 1364 timestr = 'unset'
1365 1365 timestr = (timestr[:locale_len] +
1366 1366 ' ' * (locale_len - len(timestr)))
1367 1367 else:
1368 1368 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
1369 1369 time.localtime(ent[3]))
1370 1370 if ent[1] & 020000:
1371 1371 mode = 'lnk'
1372 1372 else:
1373 1373 mode = '%3o' % (ent[1] & 0777)
1374 1374 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
1375 1375 for f in repo.dirstate.copies():
1376 1376 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1377 1377
1378 1378 def debugsub(ui, repo, rev=None):
1379 1379 ctx = cmdutil.revsingle(repo, rev, None)
1380 1380 for k, v in sorted(ctx.substate.items()):
1381 1381 ui.write('path %s\n' % k)
1382 1382 ui.write(' source %s\n' % v[0])
1383 1383 ui.write(' revision %s\n' % v[1])
1384 1384
1385 1385 def debugdag(ui, repo, file_=None, *revs, **opts):
1386 1386 """format the changelog or an index DAG as a concise textual description
1387 1387
1388 1388 If you pass a revlog index, the revlog's DAG is emitted. If you list
1389 1389 revision numbers, they get labelled in the output as rN.
1390 1390
1391 1391 Otherwise, the changelog DAG of the current repo is emitted.
1392 1392 """
1393 1393 spaces = opts.get('spaces')
1394 1394 dots = opts.get('dots')
1395 1395 if file_:
1396 rlog = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1396 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1397 1397 revs = set((int(r) for r in revs))
1398 1398 def events():
1399 1399 for r in rlog:
1400 1400 yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
1401 1401 if r in revs:
1402 1402 yield 'l', (r, "r%i" % r)
1403 1403 elif repo:
1404 1404 cl = repo.changelog
1405 1405 tags = opts.get('tags')
1406 1406 branches = opts.get('branches')
1407 1407 if tags:
1408 1408 labels = {}
1409 1409 for l, n in repo.tags().items():
1410 1410 labels.setdefault(cl.rev(n), []).append(l)
1411 1411 def events():
1412 1412 b = "default"
1413 1413 for r in cl:
1414 1414 if branches:
1415 1415 newb = cl.read(cl.node(r))[5]['branch']
1416 1416 if newb != b:
1417 1417 yield 'a', newb
1418 1418 b = newb
1419 1419 yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
1420 1420 if tags:
1421 1421 ls = labels.get(r)
1422 1422 if ls:
1423 1423 for l in ls:
1424 1424 yield 'l', (r, l)
1425 1425 else:
1426 1426 raise util.Abort(_('need repo for changelog dag'))
1427 1427
1428 1428 for line in dagparser.dagtextlines(events(),
1429 1429 addspaces=spaces,
1430 1430 wraplabels=True,
1431 1431 wrapannotations=True,
1432 1432 wrapnonlinear=dots,
1433 1433 usedots=dots,
1434 1434 maxlinewidth=70):
1435 1435 ui.write(line)
1436 1436 ui.write("\n")
1437 1437
1438 1438 def debugdata(ui, repo, file_, rev):
1439 1439 """dump the contents of a data file revision"""
1440 1440 r = None
1441 1441 if repo:
1442 1442 filelog = repo.file(file_)
1443 1443 if len(filelog):
1444 1444 r = filelog
1445 1445 if not r:
1446 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
1446 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False),
1447 file_[:-2] + ".i")
1447 1448 try:
1448 1449 ui.write(r.revision(r.lookup(rev)))
1449 1450 except KeyError:
1450 1451 raise util.Abort(_('invalid revision identifier %s') % rev)
1451 1452
1452 1453 def debugdate(ui, date, range=None, **opts):
1453 1454 """parse and display a date"""
1454 1455 if opts["extended"]:
1455 1456 d = util.parsedate(date, util.extendeddateformats)
1456 1457 else:
1457 1458 d = util.parsedate(date)
1458 1459 ui.write("internal: %s %s\n" % d)
1459 1460 ui.write("standard: %s\n" % util.datestr(d))
1460 1461 if range:
1461 1462 m = util.matchdate(range)
1462 1463 ui.write("match: %s\n" % m(d[0]))
1463 1464
1464 1465 def debugignore(ui, repo, *values, **opts):
1465 1466 """display the combined ignore pattern"""
1466 1467 ignore = repo.dirstate._ignore
1467 1468 if hasattr(ignore, 'includepat'):
1468 1469 ui.write("%s\n" % ignore.includepat)
1469 1470 else:
1470 1471 raise util.Abort(_("no ignore patterns found"))
1471 1472
1472 1473 def debugindex(ui, repo, file_, **opts):
1473 1474 """dump the contents of an index file"""
1474 1475 r = None
1475 1476 if repo:
1476 1477 filelog = repo.file(file_)
1477 1478 if len(filelog):
1478 1479 r = filelog
1479 1480
1480 1481 format = opts.get('format', 0)
1481 1482 if format not in (0, 1):
1482 1483 raise util.Abort(_("unknown format %d") % format)
1483 1484
1484 1485 if not r:
1485 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1486 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1486 1487
1487 1488 if format == 0:
1488 1489 ui.write(" rev offset length base linkrev"
1489 1490 " nodeid p1 p2\n")
1490 1491 elif format == 1:
1491 1492 ui.write(" rev flag offset length"
1492 1493 " size base link p1 p2 nodeid\n")
1493 1494
1494 1495 for i in r:
1495 1496 node = r.node(i)
1496 1497 if format == 0:
1497 1498 try:
1498 1499 pp = r.parents(node)
1499 1500 except:
1500 1501 pp = [nullid, nullid]
1501 1502 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1502 1503 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1503 1504 short(node), short(pp[0]), short(pp[1])))
1504 1505 elif format == 1:
1505 1506 pr = r.parentrevs(i)
1506 1507 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1507 1508 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1508 1509 r.base(i), r.linkrev(i), pr[0], pr[1], short(node)))
1509 1510
1510 1511 def debugindexdot(ui, repo, file_):
1511 1512 """dump an index DAG as a graphviz dot file"""
1512 1513 r = None
1513 1514 if repo:
1514 1515 filelog = repo.file(file_)
1515 1516 if len(filelog):
1516 1517 r = filelog
1517 1518 if not r:
1518 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1519 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1519 1520 ui.write("digraph G {\n")
1520 1521 for i in r:
1521 1522 node = r.node(i)
1522 1523 pp = r.parents(node)
1523 1524 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1524 1525 if pp[1] != nullid:
1525 1526 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1526 1527 ui.write("}\n")
1527 1528
1528 1529 def debuginstall(ui):
1529 1530 '''test Mercurial installation
1530 1531
1531 1532 Returns 0 on success.
1532 1533 '''
1533 1534
1534 1535 def writetemp(contents):
1535 1536 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1536 1537 f = os.fdopen(fd, "wb")
1537 1538 f.write(contents)
1538 1539 f.close()
1539 1540 return name
1540 1541
1541 1542 problems = 0
1542 1543
1543 1544 # encoding
1544 1545 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1545 1546 try:
1546 1547 encoding.fromlocal("test")
1547 1548 except util.Abort, inst:
1548 1549 ui.write(" %s\n" % inst)
1549 1550 ui.write(_(" (check that your locale is properly set)\n"))
1550 1551 problems += 1
1551 1552
1552 1553 # compiled modules
1553 1554 ui.status(_("Checking installed modules (%s)...\n")
1554 1555 % os.path.dirname(__file__))
1555 1556 try:
1556 1557 import bdiff, mpatch, base85, osutil
1557 1558 except Exception, inst:
1558 1559 ui.write(" %s\n" % inst)
1559 1560 ui.write(_(" One or more extensions could not be found"))
1560 1561 ui.write(_(" (check that you compiled the extensions)\n"))
1561 1562 problems += 1
1562 1563
1563 1564 # templates
1564 1565 ui.status(_("Checking templates...\n"))
1565 1566 try:
1566 1567 import templater
1567 1568 templater.templater(templater.templatepath("map-cmdline.default"))
1568 1569 except Exception, inst:
1569 1570 ui.write(" %s\n" % inst)
1570 1571 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1571 1572 problems += 1
1572 1573
1573 1574 # editor
1574 1575 ui.status(_("Checking commit editor...\n"))
1575 1576 editor = ui.geteditor()
1576 1577 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1577 1578 if not cmdpath:
1578 1579 if editor == 'vi':
1579 1580 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1580 1581 ui.write(_(" (specify a commit editor in your configuration"
1581 1582 " file)\n"))
1582 1583 else:
1583 1584 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1584 1585 ui.write(_(" (specify a commit editor in your configuration"
1585 1586 " file)\n"))
1586 1587 problems += 1
1587 1588
1588 1589 # check username
1589 1590 ui.status(_("Checking username...\n"))
1590 1591 try:
1591 1592 ui.username()
1592 1593 except util.Abort, e:
1593 1594 ui.write(" %s\n" % e)
1594 1595 ui.write(_(" (specify a username in your configuration file)\n"))
1595 1596 problems += 1
1596 1597
1597 1598 if not problems:
1598 1599 ui.status(_("No problems detected\n"))
1599 1600 else:
1600 1601 ui.write(_("%s problems detected,"
1601 1602 " please check your install!\n") % problems)
1602 1603
1603 1604 return problems
1604 1605
1605 1606 def debugrename(ui, repo, file1, *pats, **opts):
1606 1607 """dump rename information"""
1607 1608
1608 1609 ctx = cmdutil.revsingle(repo, opts.get('rev'))
1609 1610 m = cmdutil.match(repo, (file1,) + pats, opts)
1610 1611 for abs in ctx.walk(m):
1611 1612 fctx = ctx[abs]
1612 1613 o = fctx.filelog().renamed(fctx.filenode())
1613 1614 rel = m.rel(abs)
1614 1615 if o:
1615 1616 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1616 1617 else:
1617 1618 ui.write(_("%s not renamed\n") % rel)
1618 1619
1619 1620 def debugwalk(ui, repo, *pats, **opts):
1620 1621 """show how files match on given patterns"""
1621 1622 m = cmdutil.match(repo, pats, opts)
1622 1623 items = list(repo.walk(m))
1623 1624 if not items:
1624 1625 return
1625 1626 fmt = 'f %%-%ds %%-%ds %%s' % (
1626 1627 max([len(abs) for abs in items]),
1627 1628 max([len(m.rel(abs)) for abs in items]))
1628 1629 for abs in items:
1629 1630 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1630 1631 ui.write("%s\n" % line.rstrip())
1631 1632
1632 1633 def debugwireargs(ui, repopath, *vals, **opts):
1633 1634 repo = hg.repository(hg.remoteui(ui, opts), repopath)
1634 1635 for opt in remoteopts:
1635 1636 del opts[opt[1]]
1636 1637 args = {}
1637 1638 for k, v in opts.iteritems():
1638 1639 if v:
1639 1640 args[k] = v
1640 1641 # run twice to check that we don't mess up the stream for the next command
1641 1642 res1 = repo.debugwireargs(*vals, **args)
1642 1643 res2 = repo.debugwireargs(*vals, **args)
1643 1644 ui.write("%s\n" % res1)
1644 1645 if res1 != res2:
1645 1646 ui.warn("%s\n" % res2)
1646 1647
1647 1648 def diff(ui, repo, *pats, **opts):
1648 1649 """diff repository (or selected files)
1649 1650
1650 1651 Show differences between revisions for the specified files.
1651 1652
1652 1653 Differences between files are shown using the unified diff format.
1653 1654
1654 1655 .. note::
1655 1656 diff may generate unexpected results for merges, as it will
1656 1657 default to comparing against the working directory's first
1657 1658 parent changeset if no revisions are specified.
1658 1659
1659 1660 When two revision arguments are given, then changes are shown
1660 1661 between those revisions. If only one revision is specified then
1661 1662 that revision is compared to the working directory, and, when no
1662 1663 revisions are specified, the working directory files are compared
1663 1664 to its parent.
1664 1665
1665 1666 Alternatively you can specify -c/--change with a revision to see
1666 1667 the changes in that changeset relative to its first parent.
1667 1668
1668 1669 Without the -a/--text option, diff will avoid generating diffs of
1669 1670 files it detects as binary. With -a, diff will generate a diff
1670 1671 anyway, probably with undesirable results.
1671 1672
1672 1673 Use the -g/--git option to generate diffs in the git extended diff
1673 1674 format. For more information, read :hg:`help diffs`.
1674 1675
1675 1676 Returns 0 on success.
1676 1677 """
1677 1678
1678 1679 revs = opts.get('rev')
1679 1680 change = opts.get('change')
1680 1681 stat = opts.get('stat')
1681 1682 reverse = opts.get('reverse')
1682 1683
1683 1684 if revs and change:
1684 1685 msg = _('cannot specify --rev and --change at the same time')
1685 1686 raise util.Abort(msg)
1686 1687 elif change:
1687 1688 node2 = cmdutil.revsingle(repo, change, None).node()
1688 1689 node1 = repo[node2].p1().node()
1689 1690 else:
1690 1691 node1, node2 = cmdutil.revpair(repo, revs)
1691 1692
1692 1693 if reverse:
1693 1694 node1, node2 = node2, node1
1694 1695
1695 1696 diffopts = patch.diffopts(ui, opts)
1696 1697 m = cmdutil.match(repo, pats, opts)
1697 1698 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
1698 1699 listsubrepos=opts.get('subrepos'))
1699 1700
1700 1701 def export(ui, repo, *changesets, **opts):
1701 1702 """dump the header and diffs for one or more changesets
1702 1703
1703 1704 Print the changeset header and diffs for one or more revisions.
1704 1705
1705 1706 The information shown in the changeset header is: author, date,
1706 1707 branch name (if non-default), changeset hash, parent(s) and commit
1707 1708 comment.
1708 1709
1709 1710 .. note::
1710 1711 export may generate unexpected diff output for merge
1711 1712 changesets, as it will compare the merge changeset against its
1712 1713 first parent only.
1713 1714
1714 1715 Output may be to a file, in which case the name of the file is
1715 1716 given using a format string. The formatting rules are as follows:
1716 1717
1717 1718 :``%%``: literal "%" character
1718 1719 :``%H``: changeset hash (40 hexadecimal digits)
1719 1720 :``%N``: number of patches being generated
1720 1721 :``%R``: changeset revision number
1721 1722 :``%b``: basename of the exporting repository
1722 1723 :``%h``: short-form changeset hash (12 hexadecimal digits)
1723 1724 :``%n``: zero-padded sequence number, starting at 1
1724 1725 :``%r``: zero-padded changeset revision number
1725 1726
1726 1727 Without the -a/--text option, export will avoid generating diffs
1727 1728 of files it detects as binary. With -a, export will generate a
1728 1729 diff anyway, probably with undesirable results.
1729 1730
1730 1731 Use the -g/--git option to generate diffs in the git extended diff
1731 1732 format. See :hg:`help diffs` for more information.
1732 1733
1733 1734 With the --switch-parent option, the diff will be against the
1734 1735 second parent. It can be useful to review a merge.
1735 1736
1736 1737 Returns 0 on success.
1737 1738 """
1738 1739 changesets += tuple(opts.get('rev', []))
1739 1740 if not changesets:
1740 1741 raise util.Abort(_("export requires at least one changeset"))
1741 1742 revs = cmdutil.revrange(repo, changesets)
1742 1743 if len(revs) > 1:
1743 1744 ui.note(_('exporting patches:\n'))
1744 1745 else:
1745 1746 ui.note(_('exporting patch:\n'))
1746 1747 cmdutil.export(repo, revs, template=opts.get('output'),
1747 1748 switch_parent=opts.get('switch_parent'),
1748 1749 opts=patch.diffopts(ui, opts))
1749 1750
1750 1751 def forget(ui, repo, *pats, **opts):
1751 1752 """forget the specified files on the next commit
1752 1753
1753 1754 Mark the specified files so they will no longer be tracked
1754 1755 after the next commit.
1755 1756
1756 1757 This only removes files from the current branch, not from the
1757 1758 entire project history, and it does not delete them from the
1758 1759 working directory.
1759 1760
1760 1761 To undo a forget before the next commit, see :hg:`add`.
1761 1762
1762 1763 Returns 0 on success.
1763 1764 """
1764 1765
1765 1766 if not pats:
1766 1767 raise util.Abort(_('no files specified'))
1767 1768
1768 1769 m = cmdutil.match(repo, pats, opts)
1769 1770 s = repo.status(match=m, clean=True)
1770 1771 forget = sorted(s[0] + s[1] + s[3] + s[6])
1771 1772 errs = 0
1772 1773
1773 1774 for f in m.files():
1774 1775 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1775 1776 ui.warn(_('not removing %s: file is already untracked\n')
1776 1777 % m.rel(f))
1777 1778 errs = 1
1778 1779
1779 1780 for f in forget:
1780 1781 if ui.verbose or not m.exact(f):
1781 1782 ui.status(_('removing %s\n') % m.rel(f))
1782 1783
1783 1784 repo[None].remove(forget, unlink=False)
1784 1785 return errs
1785 1786
1786 1787 def grep(ui, repo, pattern, *pats, **opts):
1787 1788 """search for a pattern in specified files and revisions
1788 1789
1789 1790 Search revisions of files for a regular expression.
1790 1791
1791 1792 This command behaves differently than Unix grep. It only accepts
1792 1793 Python/Perl regexps. It searches repository history, not the
1793 1794 working directory. It always prints the revision number in which a
1794 1795 match appears.
1795 1796
1796 1797 By default, grep only prints output for the first revision of a
1797 1798 file in which it finds a match. To get it to print every revision
1798 1799 that contains a change in match status ("-" for a match that
1799 1800 becomes a non-match, or "+" for a non-match that becomes a match),
1800 1801 use the --all flag.
1801 1802
1802 1803 Returns 0 if a match is found, 1 otherwise.
1803 1804 """
1804 1805 reflags = 0
1805 1806 if opts.get('ignore_case'):
1806 1807 reflags |= re.I
1807 1808 try:
1808 1809 regexp = re.compile(pattern, reflags)
1809 1810 except re.error, inst:
1810 1811 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1811 1812 return 1
1812 1813 sep, eol = ':', '\n'
1813 1814 if opts.get('print0'):
1814 1815 sep = eol = '\0'
1815 1816
1816 1817 getfile = util.lrucachefunc(repo.file)
1817 1818
1818 1819 def matchlines(body):
1819 1820 begin = 0
1820 1821 linenum = 0
1821 1822 while True:
1822 1823 match = regexp.search(body, begin)
1823 1824 if not match:
1824 1825 break
1825 1826 mstart, mend = match.span()
1826 1827 linenum += body.count('\n', begin, mstart) + 1
1827 1828 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1828 1829 begin = body.find('\n', mend) + 1 or len(body)
1829 1830 lend = begin - 1
1830 1831 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1831 1832
1832 1833 class linestate(object):
1833 1834 def __init__(self, line, linenum, colstart, colend):
1834 1835 self.line = line
1835 1836 self.linenum = linenum
1836 1837 self.colstart = colstart
1837 1838 self.colend = colend
1838 1839
1839 1840 def __hash__(self):
1840 1841 return hash((self.linenum, self.line))
1841 1842
1842 1843 def __eq__(self, other):
1843 1844 return self.line == other.line
1844 1845
1845 1846 matches = {}
1846 1847 copies = {}
1847 1848 def grepbody(fn, rev, body):
1848 1849 matches[rev].setdefault(fn, [])
1849 1850 m = matches[rev][fn]
1850 1851 for lnum, cstart, cend, line in matchlines(body):
1851 1852 s = linestate(line, lnum, cstart, cend)
1852 1853 m.append(s)
1853 1854
1854 1855 def difflinestates(a, b):
1855 1856 sm = difflib.SequenceMatcher(None, a, b)
1856 1857 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1857 1858 if tag == 'insert':
1858 1859 for i in xrange(blo, bhi):
1859 1860 yield ('+', b[i])
1860 1861 elif tag == 'delete':
1861 1862 for i in xrange(alo, ahi):
1862 1863 yield ('-', a[i])
1863 1864 elif tag == 'replace':
1864 1865 for i in xrange(alo, ahi):
1865 1866 yield ('-', a[i])
1866 1867 for i in xrange(blo, bhi):
1867 1868 yield ('+', b[i])
1868 1869
1869 1870 def display(fn, ctx, pstates, states):
1870 1871 rev = ctx.rev()
1871 1872 datefunc = ui.quiet and util.shortdate or util.datestr
1872 1873 found = False
1873 1874 filerevmatches = {}
1874 1875 def binary():
1875 1876 flog = getfile(fn)
1876 1877 return util.binary(flog.read(ctx.filenode(fn)))
1877 1878
1878 1879 if opts.get('all'):
1879 1880 iter = difflinestates(pstates, states)
1880 1881 else:
1881 1882 iter = [('', l) for l in states]
1882 1883 for change, l in iter:
1883 1884 cols = [fn, str(rev)]
1884 1885 before, match, after = None, None, None
1885 1886 if opts.get('line_number'):
1886 1887 cols.append(str(l.linenum))
1887 1888 if opts.get('all'):
1888 1889 cols.append(change)
1889 1890 if opts.get('user'):
1890 1891 cols.append(ui.shortuser(ctx.user()))
1891 1892 if opts.get('date'):
1892 1893 cols.append(datefunc(ctx.date()))
1893 1894 if opts.get('files_with_matches'):
1894 1895 c = (fn, rev)
1895 1896 if c in filerevmatches:
1896 1897 continue
1897 1898 filerevmatches[c] = 1
1898 1899 else:
1899 1900 before = l.line[:l.colstart]
1900 1901 match = l.line[l.colstart:l.colend]
1901 1902 after = l.line[l.colend:]
1902 1903 ui.write(sep.join(cols))
1903 1904 if before is not None:
1904 1905 if not opts.get('text') and binary():
1905 1906 ui.write(sep + " Binary file matches")
1906 1907 else:
1907 1908 ui.write(sep + before)
1908 1909 ui.write(match, label='grep.match')
1909 1910 ui.write(after)
1910 1911 ui.write(eol)
1911 1912 found = True
1912 1913 return found
1913 1914
1914 1915 skip = {}
1915 1916 revfiles = {}
1916 1917 matchfn = cmdutil.match(repo, pats, opts)
1917 1918 found = False
1918 1919 follow = opts.get('follow')
1919 1920
1920 1921 def prep(ctx, fns):
1921 1922 rev = ctx.rev()
1922 1923 pctx = ctx.p1()
1923 1924 parent = pctx.rev()
1924 1925 matches.setdefault(rev, {})
1925 1926 matches.setdefault(parent, {})
1926 1927 files = revfiles.setdefault(rev, [])
1927 1928 for fn in fns:
1928 1929 flog = getfile(fn)
1929 1930 try:
1930 1931 fnode = ctx.filenode(fn)
1931 1932 except error.LookupError:
1932 1933 continue
1933 1934
1934 1935 copied = flog.renamed(fnode)
1935 1936 copy = follow and copied and copied[0]
1936 1937 if copy:
1937 1938 copies.setdefault(rev, {})[fn] = copy
1938 1939 if fn in skip:
1939 1940 if copy:
1940 1941 skip[copy] = True
1941 1942 continue
1942 1943 files.append(fn)
1943 1944
1944 1945 if fn not in matches[rev]:
1945 1946 grepbody(fn, rev, flog.read(fnode))
1946 1947
1947 1948 pfn = copy or fn
1948 1949 if pfn not in matches[parent]:
1949 1950 try:
1950 1951 fnode = pctx.filenode(pfn)
1951 1952 grepbody(pfn, parent, flog.read(fnode))
1952 1953 except error.LookupError:
1953 1954 pass
1954 1955
1955 1956 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1956 1957 rev = ctx.rev()
1957 1958 parent = ctx.p1().rev()
1958 1959 for fn in sorted(revfiles.get(rev, [])):
1959 1960 states = matches[rev][fn]
1960 1961 copy = copies.get(rev, {}).get(fn)
1961 1962 if fn in skip:
1962 1963 if copy:
1963 1964 skip[copy] = True
1964 1965 continue
1965 1966 pstates = matches.get(parent, {}).get(copy or fn, [])
1966 1967 if pstates or states:
1967 1968 r = display(fn, ctx, pstates, states)
1968 1969 found = found or r
1969 1970 if r and not opts.get('all'):
1970 1971 skip[fn] = True
1971 1972 if copy:
1972 1973 skip[copy] = True
1973 1974 del matches[rev]
1974 1975 del revfiles[rev]
1975 1976
1976 1977 return not found
1977 1978
1978 1979 def heads(ui, repo, *branchrevs, **opts):
1979 1980 """show current repository heads or show branch heads
1980 1981
1981 1982 With no arguments, show all repository branch heads.
1982 1983
1983 1984 Repository "heads" are changesets with no child changesets. They are
1984 1985 where development generally takes place and are the usual targets
1985 1986 for update and merge operations. Branch heads are changesets that have
1986 1987 no child changeset on the same branch.
1987 1988
1988 1989 If one or more REVs are given, only branch heads on the branches
1989 1990 associated with the specified changesets are shown.
1990 1991
1991 1992 If -c/--closed is specified, also show branch heads marked closed
1992 1993 (see :hg:`commit --close-branch`).
1993 1994
1994 1995 If STARTREV is specified, only those heads that are descendants of
1995 1996 STARTREV will be displayed.
1996 1997
1997 1998 If -t/--topo is specified, named branch mechanics will be ignored and only
1998 1999 changesets without children will be shown.
1999 2000
2000 2001 Returns 0 if matching heads are found, 1 if not.
2001 2002 """
2002 2003
2003 2004 start = None
2004 2005 if 'rev' in opts:
2005 2006 start = cmdutil.revsingle(repo, opts['rev'], None).node()
2006 2007
2007 2008 if opts.get('topo'):
2008 2009 heads = [repo[h] for h in repo.heads(start)]
2009 2010 else:
2010 2011 heads = []
2011 2012 for b, ls in repo.branchmap().iteritems():
2012 2013 if start is None:
2013 2014 heads += [repo[h] for h in ls]
2014 2015 continue
2015 2016 startrev = repo.changelog.rev(start)
2016 2017 descendants = set(repo.changelog.descendants(startrev))
2017 2018 descendants.add(startrev)
2018 2019 rev = repo.changelog.rev
2019 2020 heads += [repo[h] for h in ls if rev(h) in descendants]
2020 2021
2021 2022 if branchrevs:
2022 2023 branches = set(repo[br].branch() for br in branchrevs)
2023 2024 heads = [h for h in heads if h.branch() in branches]
2024 2025
2025 2026 if not opts.get('closed'):
2026 2027 heads = [h for h in heads if not h.extra().get('close')]
2027 2028
2028 2029 if opts.get('active') and branchrevs:
2029 2030 dagheads = repo.heads(start)
2030 2031 heads = [h for h in heads if h.node() in dagheads]
2031 2032
2032 2033 if branchrevs:
2033 2034 haveheads = set(h.branch() for h in heads)
2034 2035 if branches - haveheads:
2035 2036 headless = ', '.join(b for b in branches - haveheads)
2036 2037 msg = _('no open branch heads found on branches %s')
2037 2038 if opts.get('rev'):
2038 2039 msg += _(' (started at %s)' % opts['rev'])
2039 2040 ui.warn((msg + '\n') % headless)
2040 2041
2041 2042 if not heads:
2042 2043 return 1
2043 2044
2044 2045 heads = sorted(heads, key=lambda x: -x.rev())
2045 2046 displayer = cmdutil.show_changeset(ui, repo, opts)
2046 2047 for ctx in heads:
2047 2048 displayer.show(ctx)
2048 2049 displayer.close()
2049 2050
2050 2051 def help_(ui, name=None, with_version=False, unknowncmd=False, full=True):
2051 2052 """show help for a given topic or a help overview
2052 2053
2053 2054 With no arguments, print a list of commands with short help messages.
2054 2055
2055 2056 Given a topic, extension, or command name, print help for that
2056 2057 topic.
2057 2058
2058 2059 Returns 0 if successful.
2059 2060 """
2060 2061 option_lists = []
2061 2062 textwidth = min(ui.termwidth(), 80) - 2
2062 2063
2063 2064 def addglobalopts(aliases):
2064 2065 if ui.verbose:
2065 2066 option_lists.append((_("global options:"), globalopts))
2066 2067 if name == 'shortlist':
2067 2068 option_lists.append((_('use "hg help" for the full list '
2068 2069 'of commands'), ()))
2069 2070 else:
2070 2071 if name == 'shortlist':
2071 2072 msg = _('use "hg help" for the full list of commands '
2072 2073 'or "hg -v" for details')
2073 2074 elif name and not full:
2074 2075 msg = _('use "hg help %s" to show the full help text' % name)
2075 2076 elif aliases:
2076 2077 msg = _('use "hg -v help%s" to show builtin aliases and '
2077 2078 'global options') % (name and " " + name or "")
2078 2079 else:
2079 2080 msg = _('use "hg -v help %s" to show global options') % name
2080 2081 option_lists.append((msg, ()))
2081 2082
2082 2083 def helpcmd(name):
2083 2084 if with_version:
2084 2085 version_(ui)
2085 2086 ui.write('\n')
2086 2087
2087 2088 try:
2088 2089 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
2089 2090 except error.AmbiguousCommand, inst:
2090 2091 # py3k fix: except vars can't be used outside the scope of the
2091 2092 # except block, nor can be used inside a lambda. python issue4617
2092 2093 prefix = inst.args[0]
2093 2094 select = lambda c: c.lstrip('^').startswith(prefix)
2094 2095 helplist(_('list of commands:\n\n'), select)
2095 2096 return
2096 2097
2097 2098 # check if it's an invalid alias and display its error if it is
2098 2099 if getattr(entry[0], 'badalias', False):
2099 2100 if not unknowncmd:
2100 2101 entry[0](ui)
2101 2102 return
2102 2103
2103 2104 # synopsis
2104 2105 if len(entry) > 2:
2105 2106 if entry[2].startswith('hg'):
2106 2107 ui.write("%s\n" % entry[2])
2107 2108 else:
2108 2109 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
2109 2110 else:
2110 2111 ui.write('hg %s\n' % aliases[0])
2111 2112
2112 2113 # aliases
2113 2114 if full and not ui.quiet and len(aliases) > 1:
2114 2115 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
2115 2116
2116 2117 # description
2117 2118 doc = gettext(entry[0].__doc__)
2118 2119 if not doc:
2119 2120 doc = _("(no help text available)")
2120 2121 if hasattr(entry[0], 'definition'): # aliased command
2121 2122 if entry[0].definition.startswith('!'): # shell alias
2122 2123 doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
2123 2124 else:
2124 2125 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
2125 2126 if ui.quiet or not full:
2126 2127 doc = doc.splitlines()[0]
2127 2128 keep = ui.verbose and ['verbose'] or []
2128 2129 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
2129 2130 ui.write("\n%s\n" % formatted)
2130 2131 if pruned:
2131 2132 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
2132 2133
2133 2134 if not ui.quiet:
2134 2135 # options
2135 2136 if entry[1]:
2136 2137 option_lists.append((_("options:\n"), entry[1]))
2137 2138
2138 2139 addglobalopts(False)
2139 2140
2140 2141 def helplist(header, select=None):
2141 2142 h = {}
2142 2143 cmds = {}
2143 2144 for c, e in table.iteritems():
2144 2145 f = c.split("|", 1)[0]
2145 2146 if select and not select(f):
2146 2147 continue
2147 2148 if (not select and name != 'shortlist' and
2148 2149 e[0].__module__ != __name__):
2149 2150 continue
2150 2151 if name == "shortlist" and not f.startswith("^"):
2151 2152 continue
2152 2153 f = f.lstrip("^")
2153 2154 if not ui.debugflag and f.startswith("debug"):
2154 2155 continue
2155 2156 doc = e[0].__doc__
2156 2157 if doc and 'DEPRECATED' in doc and not ui.verbose:
2157 2158 continue
2158 2159 doc = gettext(doc)
2159 2160 if not doc:
2160 2161 doc = _("(no help text available)")
2161 2162 h[f] = doc.splitlines()[0].rstrip()
2162 2163 cmds[f] = c.lstrip("^")
2163 2164
2164 2165 if not h:
2165 2166 ui.status(_('no commands defined\n'))
2166 2167 return
2167 2168
2168 2169 ui.status(header)
2169 2170 fns = sorted(h)
2170 2171 m = max(map(len, fns))
2171 2172 for f in fns:
2172 2173 if ui.verbose:
2173 2174 commands = cmds[f].replace("|",", ")
2174 2175 ui.write(" %s:\n %s\n"%(commands, h[f]))
2175 2176 else:
2176 2177 ui.write('%s\n' % (util.wrap(h[f], textwidth,
2177 2178 initindent=' %-*s ' % (m, f),
2178 2179 hangindent=' ' * (m + 4))))
2179 2180
2180 2181 if not ui.quiet:
2181 2182 addglobalopts(True)
2182 2183
2183 2184 def helptopic(name):
2184 2185 for names, header, doc in help.helptable:
2185 2186 if name in names:
2186 2187 break
2187 2188 else:
2188 2189 raise error.UnknownCommand(name)
2189 2190
2190 2191 # description
2191 2192 if not doc:
2192 2193 doc = _("(no help text available)")
2193 2194 if hasattr(doc, '__call__'):
2194 2195 doc = doc()
2195 2196
2196 2197 ui.write("%s\n\n" % header)
2197 2198 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
2198 2199
2199 2200 def helpext(name):
2200 2201 try:
2201 2202 mod = extensions.find(name)
2202 2203 doc = gettext(mod.__doc__) or _('no help text available')
2203 2204 except KeyError:
2204 2205 mod = None
2205 2206 doc = extensions.disabledext(name)
2206 2207 if not doc:
2207 2208 raise error.UnknownCommand(name)
2208 2209
2209 2210 if '\n' not in doc:
2210 2211 head, tail = doc, ""
2211 2212 else:
2212 2213 head, tail = doc.split('\n', 1)
2213 2214 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
2214 2215 if tail:
2215 2216 ui.write(minirst.format(tail, textwidth))
2216 2217 ui.status('\n\n')
2217 2218
2218 2219 if mod:
2219 2220 try:
2220 2221 ct = mod.cmdtable
2221 2222 except AttributeError:
2222 2223 ct = {}
2223 2224 modcmds = set([c.split('|', 1)[0] for c in ct])
2224 2225 helplist(_('list of commands:\n\n'), modcmds.__contains__)
2225 2226 else:
2226 2227 ui.write(_('use "hg help extensions" for information on enabling '
2227 2228 'extensions\n'))
2228 2229
2229 2230 def helpextcmd(name):
2230 2231 cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
2231 2232 doc = gettext(mod.__doc__).splitlines()[0]
2232 2233
2233 2234 msg = help.listexts(_("'%s' is provided by the following "
2234 2235 "extension:") % cmd, {ext: doc}, len(ext),
2235 2236 indent=4)
2236 2237 ui.write(minirst.format(msg, textwidth))
2237 2238 ui.write('\n\n')
2238 2239 ui.write(_('use "hg help extensions" for information on enabling '
2239 2240 'extensions\n'))
2240 2241
2241 2242 help.addtopichook('revsets', revset.makedoc)
2242 2243 help.addtopichook('templates', templatekw.makedoc)
2243 2244 help.addtopichook('templates', templatefilters.makedoc)
2244 2245
2245 2246 if name and name != 'shortlist':
2246 2247 i = None
2247 2248 if unknowncmd:
2248 2249 queries = (helpextcmd,)
2249 2250 else:
2250 2251 queries = (helptopic, helpcmd, helpext, helpextcmd)
2251 2252 for f in queries:
2252 2253 try:
2253 2254 f(name)
2254 2255 i = None
2255 2256 break
2256 2257 except error.UnknownCommand, inst:
2257 2258 i = inst
2258 2259 if i:
2259 2260 raise i
2260 2261
2261 2262 else:
2262 2263 # program name
2263 2264 if ui.verbose or with_version:
2264 2265 version_(ui)
2265 2266 else:
2266 2267 ui.status(_("Mercurial Distributed SCM\n"))
2267 2268 ui.status('\n')
2268 2269
2269 2270 # list of commands
2270 2271 if name == "shortlist":
2271 2272 header = _('basic commands:\n\n')
2272 2273 else:
2273 2274 header = _('list of commands:\n\n')
2274 2275
2275 2276 helplist(header)
2276 2277 if name != 'shortlist':
2277 2278 exts, maxlength = extensions.enabled()
2278 2279 text = help.listexts(_('enabled extensions:'), exts, maxlength)
2279 2280 if text:
2280 2281 ui.write("\n%s\n" % minirst.format(text, textwidth))
2281 2282
2282 2283 # list all option lists
2283 2284 opt_output = []
2284 2285 multioccur = False
2285 2286 for title, options in option_lists:
2286 2287 opt_output.append(("\n%s" % title, None))
2287 2288 for option in options:
2288 2289 if len(option) == 5:
2289 2290 shortopt, longopt, default, desc, optlabel = option
2290 2291 else:
2291 2292 shortopt, longopt, default, desc = option
2292 2293 optlabel = _("VALUE") # default label
2293 2294
2294 2295 if _("DEPRECATED") in desc and not ui.verbose:
2295 2296 continue
2296 2297 if isinstance(default, list):
2297 2298 numqualifier = " %s [+]" % optlabel
2298 2299 multioccur = True
2299 2300 elif (default is not None) and not isinstance(default, bool):
2300 2301 numqualifier = " %s" % optlabel
2301 2302 else:
2302 2303 numqualifier = ""
2303 2304 opt_output.append(("%2s%s" %
2304 2305 (shortopt and "-%s" % shortopt,
2305 2306 longopt and " --%s%s" %
2306 2307 (longopt, numqualifier)),
2307 2308 "%s%s" % (desc,
2308 2309 default
2309 2310 and _(" (default: %s)") % default
2310 2311 or "")))
2311 2312 if multioccur:
2312 2313 msg = _("\n[+] marked option can be specified multiple times")
2313 2314 if ui.verbose and name != 'shortlist':
2314 2315 opt_output.append((msg, None))
2315 2316 else:
2316 2317 opt_output.insert(-1, (msg, None))
2317 2318
2318 2319 if not name:
2319 2320 ui.write(_("\nadditional help topics:\n\n"))
2320 2321 topics = []
2321 2322 for names, header, doc in help.helptable:
2322 2323 topics.append((sorted(names, key=len, reverse=True)[0], header))
2323 2324 topics_len = max([len(s[0]) for s in topics])
2324 2325 for t, desc in topics:
2325 2326 ui.write(" %-*s %s\n" % (topics_len, t, desc))
2326 2327
2327 2328 if opt_output:
2328 2329 colwidth = encoding.colwidth
2329 2330 # normalize: (opt or message, desc or None, width of opt)
2330 2331 entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
2331 2332 for opt, desc in opt_output]
2332 2333 hanging = max([e[2] for e in entries])
2333 2334 for opt, desc, width in entries:
2334 2335 if desc:
2335 2336 initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
2336 2337 hangindent = ' ' * (hanging + 3)
2337 2338 ui.write('%s\n' % (util.wrap(desc, textwidth,
2338 2339 initindent=initindent,
2339 2340 hangindent=hangindent)))
2340 2341 else:
2341 2342 ui.write("%s\n" % opt)
2342 2343
2343 2344 def identify(ui, repo, source=None, rev=None,
2344 2345 num=None, id=None, branch=None, tags=None, bookmarks=None):
2345 2346 """identify the working copy or specified revision
2346 2347
2347 2348 Print a summary identifying the repository state at REV using one or
2348 2349 two parent hash identifiers, followed by a "+" if the working
2349 2350 directory has uncommitted changes, the branch name (if not default),
2350 2351 a list of tags, and a list of bookmarks.
2351 2352
2352 2353 When REV is not given, print a summary of the current state of the
2353 2354 repository.
2354 2355
2355 2356 Specifying a path to a repository root or Mercurial bundle will
2356 2357 cause lookup to operate on that repository/bundle.
2357 2358
2358 2359 Returns 0 if successful.
2359 2360 """
2360 2361
2361 2362 if not repo and not source:
2362 2363 raise util.Abort(_("there is no Mercurial repository here "
2363 2364 "(.hg not found)"))
2364 2365
2365 2366 hexfunc = ui.debugflag and hex or short
2366 2367 default = not (num or id or branch or tags or bookmarks)
2367 2368 output = []
2368 2369 revs = []
2369 2370
2370 2371 if source:
2371 2372 source, branches = hg.parseurl(ui.expandpath(source))
2372 2373 repo = hg.repository(ui, source)
2373 2374 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
2374 2375
2375 2376 if not repo.local():
2376 2377 if num or branch or tags:
2377 2378 raise util.Abort(
2378 2379 _("can't query remote revision number, branch, or tags"))
2379 2380 if not rev and revs:
2380 2381 rev = revs[0]
2381 2382 if not rev:
2382 2383 rev = "tip"
2383 2384
2384 2385 remoterev = repo.lookup(rev)
2385 2386 if default or id:
2386 2387 output = [hexfunc(remoterev)]
2387 2388
2388 2389 def getbms():
2389 2390 bms = []
2390 2391
2391 2392 if 'bookmarks' in repo.listkeys('namespaces'):
2392 2393 hexremoterev = hex(remoterev)
2393 2394 bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
2394 2395 if bmr == hexremoterev]
2395 2396
2396 2397 return bms
2397 2398
2398 2399 if bookmarks:
2399 2400 output.extend(getbms())
2400 2401 elif default and not ui.quiet:
2401 2402 # multiple bookmarks for a single parent separated by '/'
2402 2403 bm = '/'.join(getbms())
2403 2404 if bm:
2404 2405 output.append(bm)
2405 2406 else:
2406 2407 if not rev:
2407 2408 ctx = repo[None]
2408 2409 parents = ctx.parents()
2409 2410 changed = ""
2410 2411 if default or id or num:
2411 2412 changed = util.any(repo.status()) and "+" or ""
2412 2413 if default or id:
2413 2414 output = ["%s%s" %
2414 2415 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
2415 2416 if num:
2416 2417 output.append("%s%s" %
2417 2418 ('+'.join([str(p.rev()) for p in parents]), changed))
2418 2419 else:
2419 2420 ctx = cmdutil.revsingle(repo, rev)
2420 2421 if default or id:
2421 2422 output = [hexfunc(ctx.node())]
2422 2423 if num:
2423 2424 output.append(str(ctx.rev()))
2424 2425
2425 2426 if default and not ui.quiet:
2426 2427 b = ctx.branch()
2427 2428 if b != 'default':
2428 2429 output.append("(%s)" % b)
2429 2430
2430 2431 # multiple tags for a single parent separated by '/'
2431 2432 t = '/'.join(ctx.tags())
2432 2433 if t:
2433 2434 output.append(t)
2434 2435
2435 2436 # multiple bookmarks for a single parent separated by '/'
2436 2437 bm = '/'.join(ctx.bookmarks())
2437 2438 if bm:
2438 2439 output.append(bm)
2439 2440 else:
2440 2441 if branch:
2441 2442 output.append(ctx.branch())
2442 2443
2443 2444 if tags:
2444 2445 output.extend(ctx.tags())
2445 2446
2446 2447 if bookmarks:
2447 2448 output.extend(ctx.bookmarks())
2448 2449
2449 2450 ui.write("%s\n" % ' '.join(output))
2450 2451
2451 2452 def import_(ui, repo, patch1, *patches, **opts):
2452 2453 """import an ordered set of patches
2453 2454
2454 2455 Import a list of patches and commit them individually (unless
2455 2456 --no-commit is specified).
2456 2457
2457 2458 If there are outstanding changes in the working directory, import
2458 2459 will abort unless given the -f/--force flag.
2459 2460
2460 2461 You can import a patch straight from a mail message. Even patches
2461 2462 as attachments work (to use the body part, it must have type
2462 2463 text/plain or text/x-patch). From and Subject headers of email
2463 2464 message are used as default committer and commit message. All
2464 2465 text/plain body parts before first diff are added to commit
2465 2466 message.
2466 2467
2467 2468 If the imported patch was generated by :hg:`export`, user and
2468 2469 description from patch override values from message headers and
2469 2470 body. Values given on command line with -m/--message and -u/--user
2470 2471 override these.
2471 2472
2472 2473 If --exact is specified, import will set the working directory to
2473 2474 the parent of each patch before applying it, and will abort if the
2474 2475 resulting changeset has a different ID than the one recorded in
2475 2476 the patch. This may happen due to character set problems or other
2476 2477 deficiencies in the text patch format.
2477 2478
2478 2479 With -s/--similarity, hg will attempt to discover renames and
2479 2480 copies in the patch in the same way as 'addremove'.
2480 2481
2481 2482 To read a patch from standard input, use "-" as the patch name. If
2482 2483 a URL is specified, the patch will be downloaded from it.
2483 2484 See :hg:`help dates` for a list of formats valid for -d/--date.
2484 2485
2485 2486 Returns 0 on success.
2486 2487 """
2487 2488 patches = (patch1,) + patches
2488 2489
2489 2490 date = opts.get('date')
2490 2491 if date:
2491 2492 opts['date'] = util.parsedate(date)
2492 2493
2493 2494 try:
2494 2495 sim = float(opts.get('similarity') or 0)
2495 2496 except ValueError:
2496 2497 raise util.Abort(_('similarity must be a number'))
2497 2498 if sim < 0 or sim > 100:
2498 2499 raise util.Abort(_('similarity must be between 0 and 100'))
2499 2500
2500 2501 if opts.get('exact') or not opts.get('force'):
2501 2502 cmdutil.bail_if_changed(repo)
2502 2503
2503 2504 d = opts["base"]
2504 2505 strip = opts["strip"]
2505 2506 wlock = lock = None
2506 2507 msgs = []
2507 2508
2508 2509 def tryone(ui, hunk):
2509 2510 tmpname, message, user, date, branch, nodeid, p1, p2 = \
2510 2511 patch.extract(ui, hunk)
2511 2512
2512 2513 if not tmpname:
2513 2514 return None
2514 2515 commitid = _('to working directory')
2515 2516
2516 2517 try:
2517 2518 cmdline_message = cmdutil.logmessage(opts)
2518 2519 if cmdline_message:
2519 2520 # pickup the cmdline msg
2520 2521 message = cmdline_message
2521 2522 elif message:
2522 2523 # pickup the patch msg
2523 2524 message = message.strip()
2524 2525 else:
2525 2526 # launch the editor
2526 2527 message = None
2527 2528 ui.debug('message:\n%s\n' % message)
2528 2529
2529 2530 wp = repo.parents()
2530 2531 if opts.get('exact'):
2531 2532 if not nodeid or not p1:
2532 2533 raise util.Abort(_('not a Mercurial patch'))
2533 2534 p1 = repo.lookup(p1)
2534 2535 p2 = repo.lookup(p2 or hex(nullid))
2535 2536
2536 2537 if p1 != wp[0].node():
2537 2538 hg.clean(repo, p1)
2538 2539 repo.dirstate.setparents(p1, p2)
2539 2540 elif p2:
2540 2541 try:
2541 2542 p1 = repo.lookup(p1)
2542 2543 p2 = repo.lookup(p2)
2543 2544 if p1 == wp[0].node():
2544 2545 repo.dirstate.setparents(p1, p2)
2545 2546 except error.RepoError:
2546 2547 pass
2547 2548 if opts.get('exact') or opts.get('import_branch'):
2548 2549 repo.dirstate.setbranch(branch or 'default')
2549 2550
2550 2551 files = {}
2551 2552 try:
2552 2553 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2553 2554 files=files, eolmode=None)
2554 2555 finally:
2555 2556 files = cmdutil.updatedir(ui, repo, files,
2556 2557 similarity=sim / 100.0)
2557 2558 if opts.get('no_commit'):
2558 2559 if message:
2559 2560 msgs.append(message)
2560 2561 else:
2561 2562 if opts.get('exact'):
2562 2563 m = None
2563 2564 else:
2564 2565 m = cmdutil.matchfiles(repo, files or [])
2565 2566 n = repo.commit(message, opts.get('user') or user,
2566 2567 opts.get('date') or date, match=m,
2567 2568 editor=cmdutil.commiteditor)
2568 2569 if opts.get('exact'):
2569 2570 if hex(n) != nodeid:
2570 2571 repo.rollback()
2571 2572 raise util.Abort(_('patch is damaged'
2572 2573 ' or loses information'))
2573 2574 # Force a dirstate write so that the next transaction
2574 2575 # backups an up-do-date file.
2575 2576 repo.dirstate.write()
2576 2577 if n:
2577 2578 commitid = short(n)
2578 2579
2579 2580 return commitid
2580 2581 finally:
2581 2582 os.unlink(tmpname)
2582 2583
2583 2584 try:
2584 2585 wlock = repo.wlock()
2585 2586 lock = repo.lock()
2586 2587 lastcommit = None
2587 2588 for p in patches:
2588 2589 pf = os.path.join(d, p)
2589 2590
2590 2591 if pf == '-':
2591 2592 ui.status(_("applying patch from stdin\n"))
2592 2593 pf = sys.stdin
2593 2594 else:
2594 2595 ui.status(_("applying %s\n") % p)
2595 2596 pf = url.open(ui, pf)
2596 2597
2597 2598 haspatch = False
2598 2599 for hunk in patch.split(pf):
2599 2600 commitid = tryone(ui, hunk)
2600 2601 if commitid:
2601 2602 haspatch = True
2602 2603 if lastcommit:
2603 2604 ui.status(_('applied %s\n') % lastcommit)
2604 2605 lastcommit = commitid
2605 2606
2606 2607 if not haspatch:
2607 2608 raise util.Abort(_('no diffs found'))
2608 2609
2609 2610 if msgs:
2610 2611 repo.opener('last-message.txt', 'wb').write('\n* * *\n'.join(msgs))
2611 2612 finally:
2612 2613 release(lock, wlock)
2613 2614
2614 2615 def incoming(ui, repo, source="default", **opts):
2615 2616 """show new changesets found in source
2616 2617
2617 2618 Show new changesets found in the specified path/URL or the default
2618 2619 pull location. These are the changesets that would have been pulled
2619 2620 if a pull at the time you issued this command.
2620 2621
2621 2622 For remote repository, using --bundle avoids downloading the
2622 2623 changesets twice if the incoming is followed by a pull.
2623 2624
2624 2625 See pull for valid source format details.
2625 2626
2626 2627 Returns 0 if there are incoming changes, 1 otherwise.
2627 2628 """
2628 2629 if opts.get('bundle') and opts.get('subrepos'):
2629 2630 raise util.Abort(_('cannot combine --bundle and --subrepos'))
2630 2631
2631 2632 if opts.get('bookmarks'):
2632 2633 source, branches = hg.parseurl(ui.expandpath(source),
2633 2634 opts.get('branch'))
2634 2635 other = hg.repository(hg.remoteui(repo, opts), source)
2635 2636 if 'bookmarks' not in other.listkeys('namespaces'):
2636 2637 ui.warn(_("remote doesn't support bookmarks\n"))
2637 2638 return 0
2638 2639 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2639 2640 return bookmarks.diff(ui, repo, other)
2640 2641
2641 2642 ret = hg.incoming(ui, repo, source, opts)
2642 2643 return ret
2643 2644
2644 2645 def init(ui, dest=".", **opts):
2645 2646 """create a new repository in the given directory
2646 2647
2647 2648 Initialize a new repository in the given directory. If the given
2648 2649 directory does not exist, it will be created.
2649 2650
2650 2651 If no directory is given, the current directory is used.
2651 2652
2652 2653 It is possible to specify an ``ssh://`` URL as the destination.
2653 2654 See :hg:`help urls` for more information.
2654 2655
2655 2656 Returns 0 on success.
2656 2657 """
2657 2658 hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1)
2658 2659
2659 2660 def locate(ui, repo, *pats, **opts):
2660 2661 """locate files matching specific patterns
2661 2662
2662 2663 Print files under Mercurial control in the working directory whose
2663 2664 names match the given patterns.
2664 2665
2665 2666 By default, this command searches all directories in the working
2666 2667 directory. To search just the current directory and its
2667 2668 subdirectories, use "--include .".
2668 2669
2669 2670 If no patterns are given to match, this command prints the names
2670 2671 of all files under Mercurial control in the working directory.
2671 2672
2672 2673 If you want to feed the output of this command into the "xargs"
2673 2674 command, use the -0 option to both this command and "xargs". This
2674 2675 will avoid the problem of "xargs" treating single filenames that
2675 2676 contain whitespace as multiple filenames.
2676 2677
2677 2678 Returns 0 if a match is found, 1 otherwise.
2678 2679 """
2679 2680 end = opts.get('print0') and '\0' or '\n'
2680 2681 rev = cmdutil.revsingle(repo, opts.get('rev'), None).node()
2681 2682
2682 2683 ret = 1
2683 2684 m = cmdutil.match(repo, pats, opts, default='relglob')
2684 2685 m.bad = lambda x, y: False
2685 2686 for abs in repo[rev].walk(m):
2686 2687 if not rev and abs not in repo.dirstate:
2687 2688 continue
2688 2689 if opts.get('fullpath'):
2689 2690 ui.write(repo.wjoin(abs), end)
2690 2691 else:
2691 2692 ui.write(((pats and m.rel(abs)) or abs), end)
2692 2693 ret = 0
2693 2694
2694 2695 return ret
2695 2696
2696 2697 def log(ui, repo, *pats, **opts):
2697 2698 """show revision history of entire repository or files
2698 2699
2699 2700 Print the revision history of the specified files or the entire
2700 2701 project.
2701 2702
2702 2703 File history is shown without following rename or copy history of
2703 2704 files. Use -f/--follow with a filename to follow history across
2704 2705 renames and copies. --follow without a filename will only show
2705 2706 ancestors or descendants of the starting revision. --follow-first
2706 2707 only follows the first parent of merge revisions.
2707 2708
2708 2709 If no revision range is specified, the default is ``tip:0`` unless
2709 2710 --follow is set, in which case the working directory parent is
2710 2711 used as the starting revision. You can specify a revision set for
2711 2712 log, see :hg:`help revsets` for more information.
2712 2713
2713 2714 See :hg:`help dates` for a list of formats valid for -d/--date.
2714 2715
2715 2716 By default this command prints revision number and changeset id,
2716 2717 tags, non-trivial parents, user, date and time, and a summary for
2717 2718 each commit. When the -v/--verbose switch is used, the list of
2718 2719 changed files and full commit message are shown.
2719 2720
2720 2721 .. note::
2721 2722 log -p/--patch may generate unexpected diff output for merge
2722 2723 changesets, as it will only compare the merge changeset against
2723 2724 its first parent. Also, only files different from BOTH parents
2724 2725 will appear in files:.
2725 2726
2726 2727 Returns 0 on success.
2727 2728 """
2728 2729
2729 2730 matchfn = cmdutil.match(repo, pats, opts)
2730 2731 limit = cmdutil.loglimit(opts)
2731 2732 count = 0
2732 2733
2733 2734 endrev = None
2734 2735 if opts.get('copies') and opts.get('rev'):
2735 2736 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2736 2737
2737 2738 df = False
2738 2739 if opts["date"]:
2739 2740 df = util.matchdate(opts["date"])
2740 2741
2741 2742 branches = opts.get('branch', []) + opts.get('only_branch', [])
2742 2743 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2743 2744
2744 2745 displayer = cmdutil.show_changeset(ui, repo, opts, True)
2745 2746 def prep(ctx, fns):
2746 2747 rev = ctx.rev()
2747 2748 parents = [p for p in repo.changelog.parentrevs(rev)
2748 2749 if p != nullrev]
2749 2750 if opts.get('no_merges') and len(parents) == 2:
2750 2751 return
2751 2752 if opts.get('only_merges') and len(parents) != 2:
2752 2753 return
2753 2754 if opts.get('branch') and ctx.branch() not in opts['branch']:
2754 2755 return
2755 2756 if df and not df(ctx.date()[0]):
2756 2757 return
2757 2758 if opts['user'] and not [k for k in opts['user']
2758 2759 if k.lower() in ctx.user().lower()]:
2759 2760 return
2760 2761 if opts.get('keyword'):
2761 2762 for k in [kw.lower() for kw in opts['keyword']]:
2762 2763 if (k in ctx.user().lower() or
2763 2764 k in ctx.description().lower() or
2764 2765 k in " ".join(ctx.files()).lower()):
2765 2766 break
2766 2767 else:
2767 2768 return
2768 2769
2769 2770 copies = None
2770 2771 if opts.get('copies') and rev:
2771 2772 copies = []
2772 2773 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2773 2774 for fn in ctx.files():
2774 2775 rename = getrenamed(fn, rev)
2775 2776 if rename:
2776 2777 copies.append((fn, rename[0]))
2777 2778
2778 2779 revmatchfn = None
2779 2780 if opts.get('patch') or opts.get('stat'):
2780 2781 if opts.get('follow') or opts.get('follow_first'):
2781 2782 # note: this might be wrong when following through merges
2782 2783 revmatchfn = cmdutil.match(repo, fns, default='path')
2783 2784 else:
2784 2785 revmatchfn = matchfn
2785 2786
2786 2787 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
2787 2788
2788 2789 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2789 2790 if count == limit:
2790 2791 break
2791 2792 if displayer.flush(ctx.rev()):
2792 2793 count += 1
2793 2794 displayer.close()
2794 2795
2795 2796 def manifest(ui, repo, node=None, rev=None):
2796 2797 """output the current or given revision of the project manifest
2797 2798
2798 2799 Print a list of version controlled files for the given revision.
2799 2800 If no revision is given, the first parent of the working directory
2800 2801 is used, or the null revision if no revision is checked out.
2801 2802
2802 2803 With -v, print file permissions, symlink and executable bits.
2803 2804 With --debug, print file revision hashes.
2804 2805
2805 2806 Returns 0 on success.
2806 2807 """
2807 2808
2808 2809 if rev and node:
2809 2810 raise util.Abort(_("please specify just one revision"))
2810 2811
2811 2812 if not node:
2812 2813 node = rev
2813 2814
2814 2815 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2815 2816 ctx = cmdutil.revsingle(repo, node)
2816 2817 for f in ctx:
2817 2818 if ui.debugflag:
2818 2819 ui.write("%40s " % hex(ctx.manifest()[f]))
2819 2820 if ui.verbose:
2820 2821 ui.write(decor[ctx.flags(f)])
2821 2822 ui.write("%s\n" % f)
2822 2823
2823 2824 def merge(ui, repo, node=None, **opts):
2824 2825 """merge working directory with another revision
2825 2826
2826 2827 The current working directory is updated with all changes made in
2827 2828 the requested revision since the last common predecessor revision.
2828 2829
2829 2830 Files that changed between either parent are marked as changed for
2830 2831 the next commit and a commit must be performed before any further
2831 2832 updates to the repository are allowed. The next commit will have
2832 2833 two parents.
2833 2834
2834 2835 ``--tool`` can be used to specify the merge tool used for file
2835 2836 merges. It overrides the HGMERGE environment variable and your
2836 2837 configuration files. See :hg:`help merge-tools` for options.
2837 2838
2838 2839 If no revision is specified, the working directory's parent is a
2839 2840 head revision, and the current branch contains exactly one other
2840 2841 head, the other head is merged with by default. Otherwise, an
2841 2842 explicit revision with which to merge with must be provided.
2842 2843
2843 2844 :hg:`resolve` must be used to resolve unresolved files.
2844 2845
2845 2846 To undo an uncommitted merge, use :hg:`update --clean .` which
2846 2847 will check out a clean copy of the original merge parent, losing
2847 2848 all changes.
2848 2849
2849 2850 Returns 0 on success, 1 if there are unresolved files.
2850 2851 """
2851 2852
2852 2853 if opts.get('rev') and node:
2853 2854 raise util.Abort(_("please specify just one revision"))
2854 2855 if not node:
2855 2856 node = opts.get('rev')
2856 2857
2857 2858 if not node:
2858 2859 branch = repo[None].branch()
2859 2860 bheads = repo.branchheads(branch)
2860 2861 if len(bheads) > 2:
2861 2862 raise util.Abort(_(
2862 2863 'branch \'%s\' has %d heads - '
2863 2864 'please merge with an explicit rev\n'
2864 2865 '(run \'hg heads .\' to see heads)')
2865 2866 % (branch, len(bheads)))
2866 2867
2867 2868 parent = repo.dirstate.p1()
2868 2869 if len(bheads) == 1:
2869 2870 if len(repo.heads()) > 1:
2870 2871 raise util.Abort(_(
2871 2872 'branch \'%s\' has one head - '
2872 2873 'please merge with an explicit rev\n'
2873 2874 '(run \'hg heads\' to see all heads)')
2874 2875 % branch)
2875 2876 msg = _('there is nothing to merge')
2876 2877 if parent != repo.lookup(repo[None].branch()):
2877 2878 msg = _('%s - use "hg update" instead') % msg
2878 2879 raise util.Abort(msg)
2879 2880
2880 2881 if parent not in bheads:
2881 2882 raise util.Abort(_('working dir not at a head rev - '
2882 2883 'use "hg update" or merge with an explicit rev'))
2883 2884 node = parent == bheads[0] and bheads[-1] or bheads[0]
2884 2885 else:
2885 2886 node = cmdutil.revsingle(repo, node).node()
2886 2887
2887 2888 if opts.get('preview'):
2888 2889 # find nodes that are ancestors of p2 but not of p1
2889 2890 p1 = repo.lookup('.')
2890 2891 p2 = repo.lookup(node)
2891 2892 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2892 2893
2893 2894 displayer = cmdutil.show_changeset(ui, repo, opts)
2894 2895 for node in nodes:
2895 2896 displayer.show(repo[node])
2896 2897 displayer.close()
2897 2898 return 0
2898 2899
2899 2900 try:
2900 2901 # ui.forcemerge is an internal variable, do not document
2901 2902 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
2902 2903 return hg.merge(repo, node, force=opts.get('force'))
2903 2904 finally:
2904 2905 ui.setconfig('ui', 'forcemerge', '')
2905 2906
2906 2907 def outgoing(ui, repo, dest=None, **opts):
2907 2908 """show changesets not found in the destination
2908 2909
2909 2910 Show changesets not found in the specified destination repository
2910 2911 or the default push location. These are the changesets that would
2911 2912 be pushed if a push was requested.
2912 2913
2913 2914 See pull for details of valid destination formats.
2914 2915
2915 2916 Returns 0 if there are outgoing changes, 1 otherwise.
2916 2917 """
2917 2918
2918 2919 if opts.get('bookmarks'):
2919 2920 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2920 2921 dest, branches = hg.parseurl(dest, opts.get('branch'))
2921 2922 other = hg.repository(hg.remoteui(repo, opts), dest)
2922 2923 if 'bookmarks' not in other.listkeys('namespaces'):
2923 2924 ui.warn(_("remote doesn't support bookmarks\n"))
2924 2925 return 0
2925 2926 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2926 2927 return bookmarks.diff(ui, other, repo)
2927 2928
2928 2929 ret = hg.outgoing(ui, repo, dest, opts)
2929 2930 return ret
2930 2931
2931 2932 def parents(ui, repo, file_=None, **opts):
2932 2933 """show the parents of the working directory or revision
2933 2934
2934 2935 Print the working directory's parent revisions. If a revision is
2935 2936 given via -r/--rev, the parent of that revision will be printed.
2936 2937 If a file argument is given, the revision in which the file was
2937 2938 last changed (before the working directory revision or the
2938 2939 argument to --rev if given) is printed.
2939 2940
2940 2941 Returns 0 on success.
2941 2942 """
2942 2943
2943 2944 ctx = cmdutil.revsingle(repo, opts.get('rev'), None)
2944 2945
2945 2946 if file_:
2946 2947 m = cmdutil.match(repo, (file_,), opts)
2947 2948 if m.anypats() or len(m.files()) != 1:
2948 2949 raise util.Abort(_('can only specify an explicit filename'))
2949 2950 file_ = m.files()[0]
2950 2951 filenodes = []
2951 2952 for cp in ctx.parents():
2952 2953 if not cp:
2953 2954 continue
2954 2955 try:
2955 2956 filenodes.append(cp.filenode(file_))
2956 2957 except error.LookupError:
2957 2958 pass
2958 2959 if not filenodes:
2959 2960 raise util.Abort(_("'%s' not found in manifest!") % file_)
2960 2961 fl = repo.file(file_)
2961 2962 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2962 2963 else:
2963 2964 p = [cp.node() for cp in ctx.parents()]
2964 2965
2965 2966 displayer = cmdutil.show_changeset(ui, repo, opts)
2966 2967 for n in p:
2967 2968 if n != nullid:
2968 2969 displayer.show(repo[n])
2969 2970 displayer.close()
2970 2971
2971 2972 def paths(ui, repo, search=None):
2972 2973 """show aliases for remote repositories
2973 2974
2974 2975 Show definition of symbolic path name NAME. If no name is given,
2975 2976 show definition of all available names.
2976 2977
2977 2978 Path names are defined in the [paths] section of your
2978 2979 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
2979 2980 repository, ``.hg/hgrc`` is used, too.
2980 2981
2981 2982 The path names ``default`` and ``default-push`` have a special
2982 2983 meaning. When performing a push or pull operation, they are used
2983 2984 as fallbacks if no location is specified on the command-line.
2984 2985 When ``default-push`` is set, it will be used for push and
2985 2986 ``default`` will be used for pull; otherwise ``default`` is used
2986 2987 as the fallback for both. When cloning a repository, the clone
2987 2988 source is written as ``default`` in ``.hg/hgrc``. Note that
2988 2989 ``default`` and ``default-push`` apply to all inbound (e.g.
2989 2990 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2990 2991 :hg:`bundle`) operations.
2991 2992
2992 2993 See :hg:`help urls` for more information.
2993 2994
2994 2995 Returns 0 on success.
2995 2996 """
2996 2997 if search:
2997 2998 for name, path in ui.configitems("paths"):
2998 2999 if name == search:
2999 3000 ui.write("%s\n" % url.hidepassword(path))
3000 3001 return
3001 3002 ui.warn(_("not found!\n"))
3002 3003 return 1
3003 3004 else:
3004 3005 for name, path in ui.configitems("paths"):
3005 3006 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
3006 3007
3007 3008 def postincoming(ui, repo, modheads, optupdate, checkout):
3008 3009 if modheads == 0:
3009 3010 return
3010 3011 if optupdate:
3011 3012 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
3012 3013 return hg.update(repo, checkout)
3013 3014 else:
3014 3015 ui.status(_("not updating, since new heads added\n"))
3015 3016 if modheads > 1:
3016 3017 currentbranchheads = len(repo.branchheads())
3017 3018 if currentbranchheads == modheads:
3018 3019 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
3019 3020 elif currentbranchheads > 1:
3020 3021 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
3021 3022 else:
3022 3023 ui.status(_("(run 'hg heads' to see heads)\n"))
3023 3024 else:
3024 3025 ui.status(_("(run 'hg update' to get a working copy)\n"))
3025 3026
3026 3027 def pull(ui, repo, source="default", **opts):
3027 3028 """pull changes from the specified source
3028 3029
3029 3030 Pull changes from a remote repository to a local one.
3030 3031
3031 3032 This finds all changes from the repository at the specified path
3032 3033 or URL and adds them to a local repository (the current one unless
3033 3034 -R is specified). By default, this does not update the copy of the
3034 3035 project in the working directory.
3035 3036
3036 3037 Use :hg:`incoming` if you want to see what would have been added
3037 3038 by a pull at the time you issued this command. If you then decide
3038 3039 to add those changes to the repository, you should use :hg:`pull
3039 3040 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
3040 3041
3041 3042 If SOURCE is omitted, the 'default' path will be used.
3042 3043 See :hg:`help urls` for more information.
3043 3044
3044 3045 Returns 0 on success, 1 if an update had unresolved files.
3045 3046 """
3046 3047 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
3047 3048 other = hg.repository(hg.remoteui(repo, opts), source)
3048 3049 ui.status(_('pulling from %s\n') % url.hidepassword(source))
3049 3050 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3050 3051
3051 3052 if opts.get('bookmark'):
3052 3053 if not revs:
3053 3054 revs = []
3054 3055 rb = other.listkeys('bookmarks')
3055 3056 for b in opts['bookmark']:
3056 3057 if b not in rb:
3057 3058 raise util.Abort(_('remote bookmark %s not found!') % b)
3058 3059 revs.append(rb[b])
3059 3060
3060 3061 if revs:
3061 3062 try:
3062 3063 revs = [other.lookup(rev) for rev in revs]
3063 3064 except error.CapabilityError:
3064 3065 err = _("other repository doesn't support revision lookup, "
3065 3066 "so a rev cannot be specified.")
3066 3067 raise util.Abort(err)
3067 3068
3068 3069 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
3069 3070 bookmarks.updatefromremote(ui, repo, other)
3070 3071 if checkout:
3071 3072 checkout = str(repo.changelog.rev(other.lookup(checkout)))
3072 3073 repo._subtoppath = source
3073 3074 try:
3074 3075 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
3075 3076
3076 3077 finally:
3077 3078 del repo._subtoppath
3078 3079
3079 3080 # update specified bookmarks
3080 3081 if opts.get('bookmark'):
3081 3082 for b in opts['bookmark']:
3082 3083 # explicit pull overrides local bookmark if any
3083 3084 ui.status(_("importing bookmark %s\n") % b)
3084 3085 repo._bookmarks[b] = repo[rb[b]].node()
3085 3086 bookmarks.write(repo)
3086 3087
3087 3088 return ret
3088 3089
3089 3090 def push(ui, repo, dest=None, **opts):
3090 3091 """push changes to the specified destination
3091 3092
3092 3093 Push changesets from the local repository to the specified
3093 3094 destination.
3094 3095
3095 3096 This operation is symmetrical to pull: it is identical to a pull
3096 3097 in the destination repository from the current one.
3097 3098
3098 3099 By default, push will not allow creation of new heads at the
3099 3100 destination, since multiple heads would make it unclear which head
3100 3101 to use. In this situation, it is recommended to pull and merge
3101 3102 before pushing.
3102 3103
3103 3104 Use --new-branch if you want to allow push to create a new named
3104 3105 branch that is not present at the destination. This allows you to
3105 3106 only create a new branch without forcing other changes.
3106 3107
3107 3108 Use -f/--force to override the default behavior and push all
3108 3109 changesets on all branches.
3109 3110
3110 3111 If -r/--rev is used, the specified revision and all its ancestors
3111 3112 will be pushed to the remote repository.
3112 3113
3113 3114 Please see :hg:`help urls` for important details about ``ssh://``
3114 3115 URLs. If DESTINATION is omitted, a default path will be used.
3115 3116
3116 3117 Returns 0 if push was successful, 1 if nothing to push.
3117 3118 """
3118 3119
3119 3120 if opts.get('bookmark'):
3120 3121 for b in opts['bookmark']:
3121 3122 # translate -B options to -r so changesets get pushed
3122 3123 if b in repo._bookmarks:
3123 3124 opts.setdefault('rev', []).append(b)
3124 3125 else:
3125 3126 # if we try to push a deleted bookmark, translate it to null
3126 3127 # this lets simultaneous -r, -b options continue working
3127 3128 opts.setdefault('rev', []).append("null")
3128 3129
3129 3130 dest = ui.expandpath(dest or 'default-push', dest or 'default')
3130 3131 dest, branches = hg.parseurl(dest, opts.get('branch'))
3131 3132 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
3132 3133 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
3133 3134 other = hg.repository(hg.remoteui(repo, opts), dest)
3134 3135 if revs:
3135 3136 revs = [repo.lookup(rev) for rev in revs]
3136 3137
3137 3138 repo._subtoppath = dest
3138 3139 try:
3139 3140 # push subrepos depth-first for coherent ordering
3140 3141 c = repo['']
3141 3142 subs = c.substate # only repos that are committed
3142 3143 for s in sorted(subs):
3143 3144 if not c.sub(s).push(opts.get('force')):
3144 3145 return False
3145 3146 finally:
3146 3147 del repo._subtoppath
3147 3148 result = repo.push(other, opts.get('force'), revs=revs,
3148 3149 newbranch=opts.get('new_branch'))
3149 3150
3150 3151 result = (result == 0)
3151 3152
3152 3153 if opts.get('bookmark'):
3153 3154 rb = other.listkeys('bookmarks')
3154 3155 for b in opts['bookmark']:
3155 3156 # explicit push overrides remote bookmark if any
3156 3157 if b in repo._bookmarks:
3157 3158 ui.status(_("exporting bookmark %s\n") % b)
3158 3159 new = repo[b].hex()
3159 3160 elif b in rb:
3160 3161 ui.status(_("deleting remote bookmark %s\n") % b)
3161 3162 new = '' # delete
3162 3163 else:
3163 3164 ui.warn(_('bookmark %s does not exist on the local '
3164 3165 'or remote repository!\n') % b)
3165 3166 return 2
3166 3167 old = rb.get(b, '')
3167 3168 r = other.pushkey('bookmarks', b, old, new)
3168 3169 if not r:
3169 3170 ui.warn(_('updating bookmark %s failed!\n') % b)
3170 3171 if not result:
3171 3172 result = 2
3172 3173
3173 3174 return result
3174 3175
3175 3176 def recover(ui, repo):
3176 3177 """roll back an interrupted transaction
3177 3178
3178 3179 Recover from an interrupted commit or pull.
3179 3180
3180 3181 This command tries to fix the repository status after an
3181 3182 interrupted operation. It should only be necessary when Mercurial
3182 3183 suggests it.
3183 3184
3184 3185 Returns 0 if successful, 1 if nothing to recover or verify fails.
3185 3186 """
3186 3187 if repo.recover():
3187 3188 return hg.verify(repo)
3188 3189 return 1
3189 3190
3190 3191 def remove(ui, repo, *pats, **opts):
3191 3192 """remove the specified files on the next commit
3192 3193
3193 3194 Schedule the indicated files for removal from the repository.
3194 3195
3195 3196 This only removes files from the current branch, not from the
3196 3197 entire project history. -A/--after can be used to remove only
3197 3198 files that have already been deleted, -f/--force can be used to
3198 3199 force deletion, and -Af can be used to remove files from the next
3199 3200 revision without deleting them from the working directory.
3200 3201
3201 3202 The following table details the behavior of remove for different
3202 3203 file states (columns) and option combinations (rows). The file
3203 3204 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
3204 3205 reported by :hg:`status`). The actions are Warn, Remove (from
3205 3206 branch) and Delete (from disk)::
3206 3207
3207 3208 A C M !
3208 3209 none W RD W R
3209 3210 -f R RD RD R
3210 3211 -A W W W R
3211 3212 -Af R R R R
3212 3213
3213 3214 This command schedules the files to be removed at the next commit.
3214 3215 To undo a remove before that, see :hg:`revert`.
3215 3216
3216 3217 Returns 0 on success, 1 if any warnings encountered.
3217 3218 """
3218 3219
3219 3220 ret = 0
3220 3221 after, force = opts.get('after'), opts.get('force')
3221 3222 if not pats and not after:
3222 3223 raise util.Abort(_('no files specified'))
3223 3224
3224 3225 m = cmdutil.match(repo, pats, opts)
3225 3226 s = repo.status(match=m, clean=True)
3226 3227 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
3227 3228
3228 3229 for f in m.files():
3229 3230 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
3230 3231 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
3231 3232 ret = 1
3232 3233
3233 3234 if force:
3234 3235 remove, forget = modified + deleted + clean, added
3235 3236 elif after:
3236 3237 remove, forget = deleted, []
3237 3238 for f in modified + added + clean:
3238 3239 ui.warn(_('not removing %s: file still exists (use -f'
3239 3240 ' to force removal)\n') % m.rel(f))
3240 3241 ret = 1
3241 3242 else:
3242 3243 remove, forget = deleted + clean, []
3243 3244 for f in modified:
3244 3245 ui.warn(_('not removing %s: file is modified (use -f'
3245 3246 ' to force removal)\n') % m.rel(f))
3246 3247 ret = 1
3247 3248 for f in added:
3248 3249 ui.warn(_('not removing %s: file has been marked for add (use -f'
3249 3250 ' to force removal)\n') % m.rel(f))
3250 3251 ret = 1
3251 3252
3252 3253 for f in sorted(remove + forget):
3253 3254 if ui.verbose or not m.exact(f):
3254 3255 ui.status(_('removing %s\n') % m.rel(f))
3255 3256
3256 3257 repo[None].forget(forget)
3257 3258 repo[None].remove(remove, unlink=not after)
3258 3259 return ret
3259 3260
3260 3261 def rename(ui, repo, *pats, **opts):
3261 3262 """rename files; equivalent of copy + remove
3262 3263
3263 3264 Mark dest as copies of sources; mark sources for deletion. If dest
3264 3265 is a directory, copies are put in that directory. If dest is a
3265 3266 file, there can only be one source.
3266 3267
3267 3268 By default, this command copies the contents of files as they
3268 3269 exist in the working directory. If invoked with -A/--after, the
3269 3270 operation is recorded, but no copying is performed.
3270 3271
3271 3272 This command takes effect at the next commit. To undo a rename
3272 3273 before that, see :hg:`revert`.
3273 3274
3274 3275 Returns 0 on success, 1 if errors are encountered.
3275 3276 """
3276 3277 wlock = repo.wlock(False)
3277 3278 try:
3278 3279 return cmdutil.copy(ui, repo, pats, opts, rename=True)
3279 3280 finally:
3280 3281 wlock.release()
3281 3282
3282 3283 def resolve(ui, repo, *pats, **opts):
3283 3284 """redo merges or set/view the merge status of files
3284 3285
3285 3286 Merges with unresolved conflicts are often the result of
3286 3287 non-interactive merging using the ``internal:merge`` configuration
3287 3288 setting, or a command-line merge tool like ``diff3``. The resolve
3288 3289 command is used to manage the files involved in a merge, after
3289 3290 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
3290 3291 working directory must have two parents).
3291 3292
3292 3293 The resolve command can be used in the following ways:
3293 3294
3294 3295 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
3295 3296 files, discarding any previous merge attempts. Re-merging is not
3296 3297 performed for files already marked as resolved. Use ``--all/-a``
3297 3298 to selects all unresolved files. ``--tool`` can be used to specify
3298 3299 the merge tool used for the given files. It overrides the HGMERGE
3299 3300 environment variable and your configuration files.
3300 3301
3301 3302 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
3302 3303 (e.g. after having manually fixed-up the files). The default is
3303 3304 to mark all unresolved files.
3304 3305
3305 3306 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
3306 3307 default is to mark all resolved files.
3307 3308
3308 3309 - :hg:`resolve -l`: list files which had or still have conflicts.
3309 3310 In the printed list, ``U`` = unresolved and ``R`` = resolved.
3310 3311
3311 3312 Note that Mercurial will not let you commit files with unresolved
3312 3313 merge conflicts. You must use :hg:`resolve -m ...` before you can
3313 3314 commit after a conflicting merge.
3314 3315
3315 3316 Returns 0 on success, 1 if any files fail a resolve attempt.
3316 3317 """
3317 3318
3318 3319 all, mark, unmark, show, nostatus = \
3319 3320 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
3320 3321
3321 3322 if (show and (mark or unmark)) or (mark and unmark):
3322 3323 raise util.Abort(_("too many options specified"))
3323 3324 if pats and all:
3324 3325 raise util.Abort(_("can't specify --all and patterns"))
3325 3326 if not (all or pats or show or mark or unmark):
3326 3327 raise util.Abort(_('no files or directories specified; '
3327 3328 'use --all to remerge all files'))
3328 3329
3329 3330 ms = mergemod.mergestate(repo)
3330 3331 m = cmdutil.match(repo, pats, opts)
3331 3332 ret = 0
3332 3333
3333 3334 for f in ms:
3334 3335 if m(f):
3335 3336 if show:
3336 3337 if nostatus:
3337 3338 ui.write("%s\n" % f)
3338 3339 else:
3339 3340 ui.write("%s %s\n" % (ms[f].upper(), f),
3340 3341 label='resolve.' +
3341 3342 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
3342 3343 elif mark:
3343 3344 ms.mark(f, "r")
3344 3345 elif unmark:
3345 3346 ms.mark(f, "u")
3346 3347 else:
3347 3348 wctx = repo[None]
3348 3349 mctx = wctx.parents()[-1]
3349 3350
3350 3351 # backup pre-resolve (merge uses .orig for its own purposes)
3351 3352 a = repo.wjoin(f)
3352 3353 util.copyfile(a, a + ".resolve")
3353 3354
3354 3355 try:
3355 3356 # resolve file
3356 3357 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
3357 3358 if ms.resolve(f, wctx, mctx):
3358 3359 ret = 1
3359 3360 finally:
3360 3361 ui.setconfig('ui', 'forcemerge', '')
3361 3362
3362 3363 # replace filemerge's .orig file with our resolve file
3363 3364 util.rename(a + ".resolve", a + ".orig")
3364 3365
3365 3366 ms.commit()
3366 3367 return ret
3367 3368
3368 3369 def revert(ui, repo, *pats, **opts):
3369 3370 """restore individual files or directories to an earlier state
3370 3371
3371 3372 .. note::
3372 3373 This command is most likely not what you are looking for.
3373 3374 Revert will partially overwrite content in the working
3374 3375 directory without changing the working directory parents. Use
3375 3376 :hg:`update -r rev` to check out earlier revisions, or
3376 3377 :hg:`update --clean .` to undo a merge which has added another
3377 3378 parent.
3378 3379
3379 3380 With no revision specified, revert the named files or directories
3380 3381 to the contents they had in the parent of the working directory.
3381 3382 This restores the contents of the affected files to an unmodified
3382 3383 state and unschedules adds, removes, copies, and renames. If the
3383 3384 working directory has two parents, you must explicitly specify a
3384 3385 revision.
3385 3386
3386 3387 Using the -r/--rev option, revert the given files or directories
3387 3388 to their contents as of a specific revision. This can be helpful
3388 3389 to "roll back" some or all of an earlier change. See :hg:`help
3389 3390 dates` for a list of formats valid for -d/--date.
3390 3391
3391 3392 Revert modifies the working directory. It does not commit any
3392 3393 changes, or change the parent of the working directory. If you
3393 3394 revert to a revision other than the parent of the working
3394 3395 directory, the reverted files will thus appear modified
3395 3396 afterwards.
3396 3397
3397 3398 If a file has been deleted, it is restored. Files scheduled for
3398 3399 addition are just unscheduled and left as they are. If the
3399 3400 executable mode of a file was changed, it is reset.
3400 3401
3401 3402 If names are given, all files matching the names are reverted.
3402 3403 If no arguments are given, no files are reverted.
3403 3404
3404 3405 Modified files are saved with a .orig suffix before reverting.
3405 3406 To disable these backups, use --no-backup.
3406 3407
3407 3408 Returns 0 on success.
3408 3409 """
3409 3410
3410 3411 if opts.get("date"):
3411 3412 if opts.get("rev"):
3412 3413 raise util.Abort(_("you can't specify a revision and a date"))
3413 3414 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
3414 3415
3415 3416 parent, p2 = repo.dirstate.parents()
3416 3417 if not opts.get('rev') and p2 != nullid:
3417 3418 raise util.Abort(_('uncommitted merge - '
3418 3419 'use "hg update", see "hg help revert"'))
3419 3420
3420 3421 if not pats and not opts.get('all'):
3421 3422 raise util.Abort(_('no files or directories specified; '
3422 3423 'use --all to revert the whole repo'))
3423 3424
3424 3425 ctx = cmdutil.revsingle(repo, opts.get('rev'))
3425 3426 node = ctx.node()
3426 3427 mf = ctx.manifest()
3427 3428 if node == parent:
3428 3429 pmf = mf
3429 3430 else:
3430 3431 pmf = None
3431 3432
3432 3433 # need all matching names in dirstate and manifest of target rev,
3433 3434 # so have to walk both. do not print errors if files exist in one
3434 3435 # but not other.
3435 3436
3436 3437 names = {}
3437 3438
3438 3439 wlock = repo.wlock()
3439 3440 try:
3440 3441 # walk dirstate.
3441 3442
3442 3443 m = cmdutil.match(repo, pats, opts)
3443 3444 m.bad = lambda x, y: False
3444 3445 for abs in repo.walk(m):
3445 3446 names[abs] = m.rel(abs), m.exact(abs)
3446 3447
3447 3448 # walk target manifest.
3448 3449
3449 3450 def badfn(path, msg):
3450 3451 if path in names:
3451 3452 return
3452 3453 path_ = path + '/'
3453 3454 for f in names:
3454 3455 if f.startswith(path_):
3455 3456 return
3456 3457 ui.warn("%s: %s\n" % (m.rel(path), msg))
3457 3458
3458 3459 m = cmdutil.match(repo, pats, opts)
3459 3460 m.bad = badfn
3460 3461 for abs in repo[node].walk(m):
3461 3462 if abs not in names:
3462 3463 names[abs] = m.rel(abs), m.exact(abs)
3463 3464
3464 3465 m = cmdutil.matchfiles(repo, names)
3465 3466 changes = repo.status(match=m)[:4]
3466 3467 modified, added, removed, deleted = map(set, changes)
3467 3468
3468 3469 # if f is a rename, also revert the source
3469 3470 cwd = repo.getcwd()
3470 3471 for f in added:
3471 3472 src = repo.dirstate.copied(f)
3472 3473 if src and src not in names and repo.dirstate[src] == 'r':
3473 3474 removed.add(src)
3474 3475 names[src] = (repo.pathto(src, cwd), True)
3475 3476
3476 3477 def removeforget(abs):
3477 3478 if repo.dirstate[abs] == 'a':
3478 3479 return _('forgetting %s\n')
3479 3480 return _('removing %s\n')
3480 3481
3481 3482 revert = ([], _('reverting %s\n'))
3482 3483 add = ([], _('adding %s\n'))
3483 3484 remove = ([], removeforget)
3484 3485 undelete = ([], _('undeleting %s\n'))
3485 3486
3486 3487 disptable = (
3487 3488 # dispatch table:
3488 3489 # file state
3489 3490 # action if in target manifest
3490 3491 # action if not in target manifest
3491 3492 # make backup if in target manifest
3492 3493 # make backup if not in target manifest
3493 3494 (modified, revert, remove, True, True),
3494 3495 (added, revert, remove, True, False),
3495 3496 (removed, undelete, None, False, False),
3496 3497 (deleted, revert, remove, False, False),
3497 3498 )
3498 3499
3499 3500 for abs, (rel, exact) in sorted(names.items()):
3500 3501 mfentry = mf.get(abs)
3501 3502 target = repo.wjoin(abs)
3502 3503 def handle(xlist, dobackup):
3503 3504 xlist[0].append(abs)
3504 3505 if (dobackup and not opts.get('no_backup') and
3505 3506 os.path.lexists(target)):
3506 3507 bakname = "%s.orig" % rel
3507 3508 ui.note(_('saving current version of %s as %s\n') %
3508 3509 (rel, bakname))
3509 3510 if not opts.get('dry_run'):
3510 3511 util.rename(target, bakname)
3511 3512 if ui.verbose or not exact:
3512 3513 msg = xlist[1]
3513 3514 if not isinstance(msg, basestring):
3514 3515 msg = msg(abs)
3515 3516 ui.status(msg % rel)
3516 3517 for table, hitlist, misslist, backuphit, backupmiss in disptable:
3517 3518 if abs not in table:
3518 3519 continue
3519 3520 # file has changed in dirstate
3520 3521 if mfentry:
3521 3522 handle(hitlist, backuphit)
3522 3523 elif misslist is not None:
3523 3524 handle(misslist, backupmiss)
3524 3525 break
3525 3526 else:
3526 3527 if abs not in repo.dirstate:
3527 3528 if mfentry:
3528 3529 handle(add, True)
3529 3530 elif exact:
3530 3531 ui.warn(_('file not managed: %s\n') % rel)
3531 3532 continue
3532 3533 # file has not changed in dirstate
3533 3534 if node == parent:
3534 3535 if exact:
3535 3536 ui.warn(_('no changes needed to %s\n') % rel)
3536 3537 continue
3537 3538 if pmf is None:
3538 3539 # only need parent manifest in this unlikely case,
3539 3540 # so do not read by default
3540 3541 pmf = repo[parent].manifest()
3541 3542 if abs in pmf:
3542 3543 if mfentry:
3543 3544 # if version of file is same in parent and target
3544 3545 # manifests, do nothing
3545 3546 if (pmf[abs] != mfentry or
3546 3547 pmf.flags(abs) != mf.flags(abs)):
3547 3548 handle(revert, False)
3548 3549 else:
3549 3550 handle(remove, False)
3550 3551
3551 3552 if not opts.get('dry_run'):
3552 3553 def checkout(f):
3553 3554 fc = ctx[f]
3554 3555 repo.wwrite(f, fc.data(), fc.flags())
3555 3556
3556 3557 audit_path = util.path_auditor(repo.root)
3557 3558 for f in remove[0]:
3558 3559 if repo.dirstate[f] == 'a':
3559 3560 repo.dirstate.forget(f)
3560 3561 continue
3561 3562 audit_path(f)
3562 3563 try:
3563 3564 util.unlinkpath(repo.wjoin(f))
3564 3565 except OSError:
3565 3566 pass
3566 3567 repo.dirstate.remove(f)
3567 3568
3568 3569 normal = None
3569 3570 if node == parent:
3570 3571 # We're reverting to our parent. If possible, we'd like status
3571 3572 # to report the file as clean. We have to use normallookup for
3572 3573 # merges to avoid losing information about merged/dirty files.
3573 3574 if p2 != nullid:
3574 3575 normal = repo.dirstate.normallookup
3575 3576 else:
3576 3577 normal = repo.dirstate.normal
3577 3578 for f in revert[0]:
3578 3579 checkout(f)
3579 3580 if normal:
3580 3581 normal(f)
3581 3582
3582 3583 for f in add[0]:
3583 3584 checkout(f)
3584 3585 repo.dirstate.add(f)
3585 3586
3586 3587 normal = repo.dirstate.normallookup
3587 3588 if node == parent and p2 == nullid:
3588 3589 normal = repo.dirstate.normal
3589 3590 for f in undelete[0]:
3590 3591 checkout(f)
3591 3592 normal(f)
3592 3593
3593 3594 finally:
3594 3595 wlock.release()
3595 3596
3596 3597 def rollback(ui, repo, **opts):
3597 3598 """roll back the last transaction (dangerous)
3598 3599
3599 3600 This command should be used with care. There is only one level of
3600 3601 rollback, and there is no way to undo a rollback. It will also
3601 3602 restore the dirstate at the time of the last transaction, losing
3602 3603 any dirstate changes since that time. This command does not alter
3603 3604 the working directory.
3604 3605
3605 3606 Transactions are used to encapsulate the effects of all commands
3606 3607 that create new changesets or propagate existing changesets into a
3607 3608 repository. For example, the following commands are transactional,
3608 3609 and their effects can be rolled back:
3609 3610
3610 3611 - commit
3611 3612 - import
3612 3613 - pull
3613 3614 - push (with this repository as the destination)
3614 3615 - unbundle
3615 3616
3616 3617 This command is not intended for use on public repositories. Once
3617 3618 changes are visible for pull by other users, rolling a transaction
3618 3619 back locally is ineffective (someone else may already have pulled
3619 3620 the changes). Furthermore, a race is possible with readers of the
3620 3621 repository; for example an in-progress pull from the repository
3621 3622 may fail if a rollback is performed.
3622 3623
3623 3624 Returns 0 on success, 1 if no rollback data is available.
3624 3625 """
3625 3626 return repo.rollback(opts.get('dry_run'))
3626 3627
3627 3628 def root(ui, repo):
3628 3629 """print the root (top) of the current working directory
3629 3630
3630 3631 Print the root directory of the current repository.
3631 3632
3632 3633 Returns 0 on success.
3633 3634 """
3634 3635 ui.write(repo.root + "\n")
3635 3636
3636 3637 def serve(ui, repo, **opts):
3637 3638 """start stand-alone webserver
3638 3639
3639 3640 Start a local HTTP repository browser and pull server. You can use
3640 3641 this for ad-hoc sharing and browsing of repositories. It is
3641 3642 recommended to use a real web server to serve a repository for
3642 3643 longer periods of time.
3643 3644
3644 3645 Please note that the server does not implement access control.
3645 3646 This means that, by default, anybody can read from the server and
3646 3647 nobody can write to it by default. Set the ``web.allow_push``
3647 3648 option to ``*`` to allow everybody to push to the server. You
3648 3649 should use a real web server if you need to authenticate users.
3649 3650
3650 3651 By default, the server logs accesses to stdout and errors to
3651 3652 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3652 3653 files.
3653 3654
3654 3655 To have the server choose a free port number to listen on, specify
3655 3656 a port number of 0; in this case, the server will print the port
3656 3657 number it uses.
3657 3658
3658 3659 Returns 0 on success.
3659 3660 """
3660 3661
3661 3662 if opts["stdio"]:
3662 3663 if repo is None:
3663 3664 raise error.RepoError(_("There is no Mercurial repository here"
3664 3665 " (.hg not found)"))
3665 3666 s = sshserver.sshserver(ui, repo)
3666 3667 s.serve_forever()
3667 3668
3668 3669 # this way we can check if something was given in the command-line
3669 3670 if opts.get('port'):
3670 3671 opts['port'] = util.getport(opts.get('port'))
3671 3672
3672 3673 baseui = repo and repo.baseui or ui
3673 3674 optlist = ("name templates style address port prefix ipv6"
3674 3675 " accesslog errorlog certificate encoding")
3675 3676 for o in optlist.split():
3676 3677 val = opts.get(o, '')
3677 3678 if val in (None, ''): # should check against default options instead
3678 3679 continue
3679 3680 baseui.setconfig("web", o, val)
3680 3681 if repo and repo.ui != baseui:
3681 3682 repo.ui.setconfig("web", o, val)
3682 3683
3683 3684 o = opts.get('web_conf') or opts.get('webdir_conf')
3684 3685 if not o:
3685 3686 if not repo:
3686 3687 raise error.RepoError(_("There is no Mercurial repository"
3687 3688 " here (.hg not found)"))
3688 3689 o = repo.root
3689 3690
3690 3691 app = hgweb.hgweb(o, baseui=ui)
3691 3692
3692 3693 class service(object):
3693 3694 def init(self):
3694 3695 util.set_signal_handler()
3695 3696 self.httpd = hgweb.server.create_server(ui, app)
3696 3697
3697 3698 if opts['port'] and not ui.verbose:
3698 3699 return
3699 3700
3700 3701 if self.httpd.prefix:
3701 3702 prefix = self.httpd.prefix.strip('/') + '/'
3702 3703 else:
3703 3704 prefix = ''
3704 3705
3705 3706 port = ':%d' % self.httpd.port
3706 3707 if port == ':80':
3707 3708 port = ''
3708 3709
3709 3710 bindaddr = self.httpd.addr
3710 3711 if bindaddr == '0.0.0.0':
3711 3712 bindaddr = '*'
3712 3713 elif ':' in bindaddr: # IPv6
3713 3714 bindaddr = '[%s]' % bindaddr
3714 3715
3715 3716 fqaddr = self.httpd.fqaddr
3716 3717 if ':' in fqaddr:
3717 3718 fqaddr = '[%s]' % fqaddr
3718 3719 if opts['port']:
3719 3720 write = ui.status
3720 3721 else:
3721 3722 write = ui.write
3722 3723 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3723 3724 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3724 3725
3725 3726 def run(self):
3726 3727 self.httpd.serve_forever()
3727 3728
3728 3729 service = service()
3729 3730
3730 3731 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3731 3732
3732 3733 def status(ui, repo, *pats, **opts):
3733 3734 """show changed files in the working directory
3734 3735
3735 3736 Show status of files in the repository. If names are given, only
3736 3737 files that match are shown. Files that are clean or ignored or
3737 3738 the source of a copy/move operation, are not listed unless
3738 3739 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3739 3740 Unless options described with "show only ..." are given, the
3740 3741 options -mardu are used.
3741 3742
3742 3743 Option -q/--quiet hides untracked (unknown and ignored) files
3743 3744 unless explicitly requested with -u/--unknown or -i/--ignored.
3744 3745
3745 3746 .. note::
3746 3747 status may appear to disagree with diff if permissions have
3747 3748 changed or a merge has occurred. The standard diff format does
3748 3749 not report permission changes and diff only reports changes
3749 3750 relative to one merge parent.
3750 3751
3751 3752 If one revision is given, it is used as the base revision.
3752 3753 If two revisions are given, the differences between them are
3753 3754 shown. The --change option can also be used as a shortcut to list
3754 3755 the changed files of a revision from its first parent.
3755 3756
3756 3757 The codes used to show the status of files are::
3757 3758
3758 3759 M = modified
3759 3760 A = added
3760 3761 R = removed
3761 3762 C = clean
3762 3763 ! = missing (deleted by non-hg command, but still tracked)
3763 3764 ? = not tracked
3764 3765 I = ignored
3765 3766 = origin of the previous file listed as A (added)
3766 3767
3767 3768 Returns 0 on success.
3768 3769 """
3769 3770
3770 3771 revs = opts.get('rev')
3771 3772 change = opts.get('change')
3772 3773
3773 3774 if revs and change:
3774 3775 msg = _('cannot specify --rev and --change at the same time')
3775 3776 raise util.Abort(msg)
3776 3777 elif change:
3777 3778 node2 = repo.lookup(change)
3778 3779 node1 = repo[node2].p1().node()
3779 3780 else:
3780 3781 node1, node2 = cmdutil.revpair(repo, revs)
3781 3782
3782 3783 cwd = (pats and repo.getcwd()) or ''
3783 3784 end = opts.get('print0') and '\0' or '\n'
3784 3785 copy = {}
3785 3786 states = 'modified added removed deleted unknown ignored clean'.split()
3786 3787 show = [k for k in states if opts.get(k)]
3787 3788 if opts.get('all'):
3788 3789 show += ui.quiet and (states[:4] + ['clean']) or states
3789 3790 if not show:
3790 3791 show = ui.quiet and states[:4] or states[:5]
3791 3792
3792 3793 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3793 3794 'ignored' in show, 'clean' in show, 'unknown' in show,
3794 3795 opts.get('subrepos'))
3795 3796 changestates = zip(states, 'MAR!?IC', stat)
3796 3797
3797 3798 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3798 3799 ctxn = repo[nullid]
3799 3800 ctx1 = repo[node1]
3800 3801 ctx2 = repo[node2]
3801 3802 added = stat[1]
3802 3803 if node2 is None:
3803 3804 added = stat[0] + stat[1] # merged?
3804 3805
3805 3806 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3806 3807 if k in added:
3807 3808 copy[k] = v
3808 3809 elif v in added:
3809 3810 copy[v] = k
3810 3811
3811 3812 for state, char, files in changestates:
3812 3813 if state in show:
3813 3814 format = "%s %%s%s" % (char, end)
3814 3815 if opts.get('no_status'):
3815 3816 format = "%%s%s" % end
3816 3817
3817 3818 for f in files:
3818 3819 ui.write(format % repo.pathto(f, cwd),
3819 3820 label='status.' + state)
3820 3821 if f in copy:
3821 3822 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3822 3823 label='status.copied')
3823 3824
3824 3825 def summary(ui, repo, **opts):
3825 3826 """summarize working directory state
3826 3827
3827 3828 This generates a brief summary of the working directory state,
3828 3829 including parents, branch, commit status, and available updates.
3829 3830
3830 3831 With the --remote option, this will check the default paths for
3831 3832 incoming and outgoing changes. This can be time-consuming.
3832 3833
3833 3834 Returns 0 on success.
3834 3835 """
3835 3836
3836 3837 ctx = repo[None]
3837 3838 parents = ctx.parents()
3838 3839 pnode = parents[0].node()
3839 3840
3840 3841 for p in parents:
3841 3842 # label with log.changeset (instead of log.parent) since this
3842 3843 # shows a working directory parent *changeset*:
3843 3844 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3844 3845 label='log.changeset')
3845 3846 ui.write(' '.join(p.tags()), label='log.tag')
3846 3847 if p.bookmarks():
3847 3848 ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
3848 3849 if p.rev() == -1:
3849 3850 if not len(repo):
3850 3851 ui.write(_(' (empty repository)'))
3851 3852 else:
3852 3853 ui.write(_(' (no revision checked out)'))
3853 3854 ui.write('\n')
3854 3855 if p.description():
3855 3856 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3856 3857 label='log.summary')
3857 3858
3858 3859 branch = ctx.branch()
3859 3860 bheads = repo.branchheads(branch)
3860 3861 m = _('branch: %s\n') % branch
3861 3862 if branch != 'default':
3862 3863 ui.write(m, label='log.branch')
3863 3864 else:
3864 3865 ui.status(m, label='log.branch')
3865 3866
3866 3867 st = list(repo.status(unknown=True))[:6]
3867 3868
3868 3869 c = repo.dirstate.copies()
3869 3870 copied, renamed = [], []
3870 3871 for d, s in c.iteritems():
3871 3872 if s in st[2]:
3872 3873 st[2].remove(s)
3873 3874 renamed.append(d)
3874 3875 else:
3875 3876 copied.append(d)
3876 3877 if d in st[1]:
3877 3878 st[1].remove(d)
3878 3879 st.insert(3, renamed)
3879 3880 st.insert(4, copied)
3880 3881
3881 3882 ms = mergemod.mergestate(repo)
3882 3883 st.append([f for f in ms if ms[f] == 'u'])
3883 3884
3884 3885 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3885 3886 st.append(subs)
3886 3887
3887 3888 labels = [ui.label(_('%d modified'), 'status.modified'),
3888 3889 ui.label(_('%d added'), 'status.added'),
3889 3890 ui.label(_('%d removed'), 'status.removed'),
3890 3891 ui.label(_('%d renamed'), 'status.copied'),
3891 3892 ui.label(_('%d copied'), 'status.copied'),
3892 3893 ui.label(_('%d deleted'), 'status.deleted'),
3893 3894 ui.label(_('%d unknown'), 'status.unknown'),
3894 3895 ui.label(_('%d ignored'), 'status.ignored'),
3895 3896 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3896 3897 ui.label(_('%d subrepos'), 'status.modified')]
3897 3898 t = []
3898 3899 for s, l in zip(st, labels):
3899 3900 if s:
3900 3901 t.append(l % len(s))
3901 3902
3902 3903 t = ', '.join(t)
3903 3904 cleanworkdir = False
3904 3905
3905 3906 if len(parents) > 1:
3906 3907 t += _(' (merge)')
3907 3908 elif branch != parents[0].branch():
3908 3909 t += _(' (new branch)')
3909 3910 elif (parents[0].extra().get('close') and
3910 3911 pnode in repo.branchheads(branch, closed=True)):
3911 3912 t += _(' (head closed)')
3912 3913 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
3913 3914 t += _(' (clean)')
3914 3915 cleanworkdir = True
3915 3916 elif pnode not in bheads:
3916 3917 t += _(' (new branch head)')
3917 3918
3918 3919 if cleanworkdir:
3919 3920 ui.status(_('commit: %s\n') % t.strip())
3920 3921 else:
3921 3922 ui.write(_('commit: %s\n') % t.strip())
3922 3923
3923 3924 # all ancestors of branch heads - all ancestors of parent = new csets
3924 3925 new = [0] * len(repo)
3925 3926 cl = repo.changelog
3926 3927 for a in [cl.rev(n) for n in bheads]:
3927 3928 new[a] = 1
3928 3929 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3929 3930 new[a] = 1
3930 3931 for a in [p.rev() for p in parents]:
3931 3932 if a >= 0:
3932 3933 new[a] = 0
3933 3934 for a in cl.ancestors(*[p.rev() for p in parents]):
3934 3935 new[a] = 0
3935 3936 new = sum(new)
3936 3937
3937 3938 if new == 0:
3938 3939 ui.status(_('update: (current)\n'))
3939 3940 elif pnode not in bheads:
3940 3941 ui.write(_('update: %d new changesets (update)\n') % new)
3941 3942 else:
3942 3943 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3943 3944 (new, len(bheads)))
3944 3945
3945 3946 if opts.get('remote'):
3946 3947 t = []
3947 3948 source, branches = hg.parseurl(ui.expandpath('default'))
3948 3949 other = hg.repository(hg.remoteui(repo, {}), source)
3949 3950 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3950 3951 ui.debug('comparing with %s\n' % url.hidepassword(source))
3951 3952 repo.ui.pushbuffer()
3952 3953 common, incoming, rheads = discovery.findcommonincoming(repo, other)
3953 3954 repo.ui.popbuffer()
3954 3955 if incoming:
3955 3956 t.append(_('1 or more incoming'))
3956 3957
3957 3958 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3958 3959 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3959 3960 other = hg.repository(hg.remoteui(repo, {}), dest)
3960 3961 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3961 3962 repo.ui.pushbuffer()
3962 3963 o = discovery.findoutgoing(repo, other)
3963 3964 repo.ui.popbuffer()
3964 3965 o = repo.changelog.nodesbetween(o, None)[0]
3965 3966 if o:
3966 3967 t.append(_('%d outgoing') % len(o))
3967 3968 if 'bookmarks' in other.listkeys('namespaces'):
3968 3969 lmarks = repo.listkeys('bookmarks')
3969 3970 rmarks = other.listkeys('bookmarks')
3970 3971 diff = set(rmarks) - set(lmarks)
3971 3972 if len(diff) > 0:
3972 3973 t.append(_('%d incoming bookmarks') % len(diff))
3973 3974 diff = set(lmarks) - set(rmarks)
3974 3975 if len(diff) > 0:
3975 3976 t.append(_('%d outgoing bookmarks') % len(diff))
3976 3977
3977 3978 if t:
3978 3979 ui.write(_('remote: %s\n') % (', '.join(t)))
3979 3980 else:
3980 3981 ui.status(_('remote: (synced)\n'))
3981 3982
3982 3983 def tag(ui, repo, name1, *names, **opts):
3983 3984 """add one or more tags for the current or given revision
3984 3985
3985 3986 Name a particular revision using <name>.
3986 3987
3987 3988 Tags are used to name particular revisions of the repository and are
3988 3989 very useful to compare different revisions, to go back to significant
3989 3990 earlier versions or to mark branch points as releases, etc. Changing
3990 3991 an existing tag is normally disallowed; use -f/--force to override.
3991 3992
3992 3993 If no revision is given, the parent of the working directory is
3993 3994 used, or tip if no revision is checked out.
3994 3995
3995 3996 To facilitate version control, distribution, and merging of tags,
3996 3997 they are stored as a file named ".hgtags" which is managed similarly
3997 3998 to other project files and can be hand-edited if necessary. This
3998 3999 also means that tagging creates a new commit. The file
3999 4000 ".hg/localtags" is used for local tags (not shared among
4000 4001 repositories).
4001 4002
4002 4003 Tag commits are usually made at the head of a branch. If the parent
4003 4004 of the working directory is not a branch head, :hg:`tag` aborts; use
4004 4005 -f/--force to force the tag commit to be based on a non-head
4005 4006 changeset.
4006 4007
4007 4008 See :hg:`help dates` for a list of formats valid for -d/--date.
4008 4009
4009 4010 Since tag names have priority over branch names during revision
4010 4011 lookup, using an existing branch name as a tag name is discouraged.
4011 4012
4012 4013 Returns 0 on success.
4013 4014 """
4014 4015
4015 4016 rev_ = "."
4016 4017 names = [t.strip() for t in (name1,) + names]
4017 4018 if len(names) != len(set(names)):
4018 4019 raise util.Abort(_('tag names must be unique'))
4019 4020 for n in names:
4020 4021 if n in ['tip', '.', 'null']:
4021 4022 raise util.Abort(_('the name \'%s\' is reserved') % n)
4022 4023 if not n:
4023 4024 raise util.Abort(_('tag names cannot consist entirely of whitespace'))
4024 4025 if opts.get('rev') and opts.get('remove'):
4025 4026 raise util.Abort(_("--rev and --remove are incompatible"))
4026 4027 if opts.get('rev'):
4027 4028 rev_ = opts['rev']
4028 4029 message = opts.get('message')
4029 4030 if opts.get('remove'):
4030 4031 expectedtype = opts.get('local') and 'local' or 'global'
4031 4032 for n in names:
4032 4033 if not repo.tagtype(n):
4033 4034 raise util.Abort(_('tag \'%s\' does not exist') % n)
4034 4035 if repo.tagtype(n) != expectedtype:
4035 4036 if expectedtype == 'global':
4036 4037 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
4037 4038 else:
4038 4039 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
4039 4040 rev_ = nullid
4040 4041 if not message:
4041 4042 # we don't translate commit messages
4042 4043 message = 'Removed tag %s' % ', '.join(names)
4043 4044 elif not opts.get('force'):
4044 4045 for n in names:
4045 4046 if n in repo.tags():
4046 4047 raise util.Abort(_('tag \'%s\' already exists '
4047 4048 '(use -f to force)') % n)
4048 4049 if not opts.get('local'):
4049 4050 p1, p2 = repo.dirstate.parents()
4050 4051 if p2 != nullid:
4051 4052 raise util.Abort(_('uncommitted merge'))
4052 4053 bheads = repo.branchheads()
4053 4054 if not opts.get('force') and bheads and p1 not in bheads:
4054 4055 raise util.Abort(_('not at a branch head (use -f to force)'))
4055 4056 r = cmdutil.revsingle(repo, rev_).node()
4056 4057
4057 4058 if not message:
4058 4059 # we don't translate commit messages
4059 4060 message = ('Added tag %s for changeset %s' %
4060 4061 (', '.join(names), short(r)))
4061 4062
4062 4063 date = opts.get('date')
4063 4064 if date:
4064 4065 date = util.parsedate(date)
4065 4066
4066 4067 if opts.get('edit'):
4067 4068 message = ui.edit(message, ui.username())
4068 4069
4069 4070 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
4070 4071
4071 4072 def tags(ui, repo):
4072 4073 """list repository tags
4073 4074
4074 4075 This lists both regular and local tags. When the -v/--verbose
4075 4076 switch is used, a third column "local" is printed for local tags.
4076 4077
4077 4078 Returns 0 on success.
4078 4079 """
4079 4080
4080 4081 hexfunc = ui.debugflag and hex or short
4081 4082 tagtype = ""
4082 4083
4083 4084 for t, n in reversed(repo.tagslist()):
4084 4085 if ui.quiet:
4085 4086 ui.write("%s\n" % t)
4086 4087 continue
4087 4088
4088 4089 hn = hexfunc(n)
4089 4090 r = "%5d:%s" % (repo.changelog.rev(n), hn)
4090 4091 spaces = " " * (30 - encoding.colwidth(t))
4091 4092
4092 4093 if ui.verbose:
4093 4094 if repo.tagtype(t) == 'local':
4094 4095 tagtype = " local"
4095 4096 else:
4096 4097 tagtype = ""
4097 4098 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
4098 4099
4099 4100 def tip(ui, repo, **opts):
4100 4101 """show the tip revision
4101 4102
4102 4103 The tip revision (usually just called the tip) is the changeset
4103 4104 most recently added to the repository (and therefore the most
4104 4105 recently changed head).
4105 4106
4106 4107 If you have just made a commit, that commit will be the tip. If
4107 4108 you have just pulled changes from another repository, the tip of
4108 4109 that repository becomes the current tip. The "tip" tag is special
4109 4110 and cannot be renamed or assigned to a different changeset.
4110 4111
4111 4112 Returns 0 on success.
4112 4113 """
4113 4114 displayer = cmdutil.show_changeset(ui, repo, opts)
4114 4115 displayer.show(repo[len(repo) - 1])
4115 4116 displayer.close()
4116 4117
4117 4118 def unbundle(ui, repo, fname1, *fnames, **opts):
4118 4119 """apply one or more changegroup files
4119 4120
4120 4121 Apply one or more compressed changegroup files generated by the
4121 4122 bundle command.
4122 4123
4123 4124 Returns 0 on success, 1 if an update has unresolved files.
4124 4125 """
4125 4126 fnames = (fname1,) + fnames
4126 4127
4127 4128 lock = repo.lock()
4128 4129 wc = repo['.']
4129 4130 try:
4130 4131 for fname in fnames:
4131 4132 f = url.open(ui, fname)
4132 4133 gen = changegroup.readbundle(f, fname)
4133 4134 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
4134 4135 lock=lock)
4135 4136 bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
4136 4137 finally:
4137 4138 lock.release()
4138 4139 return postincoming(ui, repo, modheads, opts.get('update'), None)
4139 4140
4140 4141 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
4141 4142 """update working directory (or switch revisions)
4142 4143
4143 4144 Update the repository's working directory to the specified
4144 4145 changeset. If no changeset is specified, update to the tip of the
4145 4146 current named branch.
4146 4147
4147 4148 If the changeset is not a descendant of the working directory's
4148 4149 parent, the update is aborted. With the -c/--check option, the
4149 4150 working directory is checked for uncommitted changes; if none are
4150 4151 found, the working directory is updated to the specified
4151 4152 changeset.
4152 4153
4153 4154 The following rules apply when the working directory contains
4154 4155 uncommitted changes:
4155 4156
4156 4157 1. If neither -c/--check nor -C/--clean is specified, and if
4157 4158 the requested changeset is an ancestor or descendant of
4158 4159 the working directory's parent, the uncommitted changes
4159 4160 are merged into the requested changeset and the merged
4160 4161 result is left uncommitted. If the requested changeset is
4161 4162 not an ancestor or descendant (that is, it is on another
4162 4163 branch), the update is aborted and the uncommitted changes
4163 4164 are preserved.
4164 4165
4165 4166 2. With the -c/--check option, the update is aborted and the
4166 4167 uncommitted changes are preserved.
4167 4168
4168 4169 3. With the -C/--clean option, uncommitted changes are discarded and
4169 4170 the working directory is updated to the requested changeset.
4170 4171
4171 4172 Use null as the changeset to remove the working directory (like
4172 4173 :hg:`clone -U`).
4173 4174
4174 4175 If you want to update just one file to an older changeset, use
4175 4176 :hg:`revert`.
4176 4177
4177 4178 See :hg:`help dates` for a list of formats valid for -d/--date.
4178 4179
4179 4180 Returns 0 on success, 1 if there are unresolved files.
4180 4181 """
4181 4182 if rev and node:
4182 4183 raise util.Abort(_("please specify just one revision"))
4183 4184
4184 4185 if rev is None or rev == '':
4185 4186 rev = node
4186 4187
4187 4188 # if we defined a bookmark, we have to remember the original bookmark name
4188 4189 brev = rev
4189 4190 rev = cmdutil.revsingle(repo, rev, rev).rev()
4190 4191
4191 4192 if check and clean:
4192 4193 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
4193 4194
4194 4195 if check:
4195 4196 # we could use dirty() but we can ignore merge and branch trivia
4196 4197 c = repo[None]
4197 4198 if c.modified() or c.added() or c.removed():
4198 4199 raise util.Abort(_("uncommitted local changes"))
4199 4200
4200 4201 if date:
4201 4202 if rev is not None:
4202 4203 raise util.Abort(_("you can't specify a revision and a date"))
4203 4204 rev = cmdutil.finddate(ui, repo, date)
4204 4205
4205 4206 if clean or check:
4206 4207 ret = hg.clean(repo, rev)
4207 4208 else:
4208 4209 ret = hg.update(repo, rev)
4209 4210
4210 4211 if brev in repo._bookmarks:
4211 4212 bookmarks.setcurrent(repo, brev)
4212 4213
4213 4214 return ret
4214 4215
4215 4216 def verify(ui, repo):
4216 4217 """verify the integrity of the repository
4217 4218
4218 4219 Verify the integrity of the current repository.
4219 4220
4220 4221 This will perform an extensive check of the repository's
4221 4222 integrity, validating the hashes and checksums of each entry in
4222 4223 the changelog, manifest, and tracked files, as well as the
4223 4224 integrity of their crosslinks and indices.
4224 4225
4225 4226 Returns 0 on success, 1 if errors are encountered.
4226 4227 """
4227 4228 return hg.verify(repo)
4228 4229
4229 4230 def version_(ui):
4230 4231 """output version and copyright information"""
4231 4232 ui.write(_("Mercurial Distributed SCM (version %s)\n")
4232 4233 % util.version())
4233 4234 ui.status(_(
4234 4235 "(see http://mercurial.selenic.com for more information)\n"
4235 4236 "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
4236 4237 "This is free software; see the source for copying conditions. "
4237 4238 "There is NO\nwarranty; "
4238 4239 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
4239 4240 ))
4240 4241
4241 4242 # Command options and aliases are listed here, alphabetically
4242 4243
4243 4244 globalopts = [
4244 4245 ('R', 'repository', '',
4245 4246 _('repository root directory or name of overlay bundle file'),
4246 4247 _('REPO')),
4247 4248 ('', 'cwd', '',
4248 4249 _('change working directory'), _('DIR')),
4249 4250 ('y', 'noninteractive', None,
4250 4251 _('do not prompt, assume \'yes\' for any required answers')),
4251 4252 ('q', 'quiet', None, _('suppress output')),
4252 4253 ('v', 'verbose', None, _('enable additional output')),
4253 4254 ('', 'config', [],
4254 4255 _('set/override config option (use \'section.name=value\')'),
4255 4256 _('CONFIG')),
4256 4257 ('', 'debug', None, _('enable debugging output')),
4257 4258 ('', 'debugger', None, _('start debugger')),
4258 4259 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
4259 4260 _('ENCODE')),
4260 4261 ('', 'encodingmode', encoding.encodingmode,
4261 4262 _('set the charset encoding mode'), _('MODE')),
4262 4263 ('', 'traceback', None, _('always print a traceback on exception')),
4263 4264 ('', 'time', None, _('time how long the command takes')),
4264 4265 ('', 'profile', None, _('print command execution profile')),
4265 4266 ('', 'version', None, _('output version information and exit')),
4266 4267 ('h', 'help', None, _('display help and exit')),
4267 4268 ]
4268 4269
4269 4270 dryrunopts = [('n', 'dry-run', None,
4270 4271 _('do not perform actions, just print output'))]
4271 4272
4272 4273 remoteopts = [
4273 4274 ('e', 'ssh', '',
4274 4275 _('specify ssh command to use'), _('CMD')),
4275 4276 ('', 'remotecmd', '',
4276 4277 _('specify hg command to run on the remote side'), _('CMD')),
4277 4278 ('', 'insecure', None,
4278 4279 _('do not verify server certificate (ignoring web.cacerts config)')),
4279 4280 ]
4280 4281
4281 4282 walkopts = [
4282 4283 ('I', 'include', [],
4283 4284 _('include names matching the given patterns'), _('PATTERN')),
4284 4285 ('X', 'exclude', [],
4285 4286 _('exclude names matching the given patterns'), _('PATTERN')),
4286 4287 ]
4287 4288
4288 4289 commitopts = [
4289 4290 ('m', 'message', '',
4290 4291 _('use text as commit message'), _('TEXT')),
4291 4292 ('l', 'logfile', '',
4292 4293 _('read commit message from file'), _('FILE')),
4293 4294 ]
4294 4295
4295 4296 commitopts2 = [
4296 4297 ('d', 'date', '',
4297 4298 _('record datecode as commit date'), _('DATE')),
4298 4299 ('u', 'user', '',
4299 4300 _('record the specified user as committer'), _('USER')),
4300 4301 ]
4301 4302
4302 4303 templateopts = [
4303 4304 ('', 'style', '',
4304 4305 _('display using template map file'), _('STYLE')),
4305 4306 ('', 'template', '',
4306 4307 _('display with template'), _('TEMPLATE')),
4307 4308 ]
4308 4309
4309 4310 logopts = [
4310 4311 ('p', 'patch', None, _('show patch')),
4311 4312 ('g', 'git', None, _('use git extended diff format')),
4312 4313 ('l', 'limit', '',
4313 4314 _('limit number of changes displayed'), _('NUM')),
4314 4315 ('M', 'no-merges', None, _('do not show merges')),
4315 4316 ('', 'stat', None, _('output diffstat-style summary of changes')),
4316 4317 ] + templateopts
4317 4318
4318 4319 diffopts = [
4319 4320 ('a', 'text', None, _('treat all files as text')),
4320 4321 ('g', 'git', None, _('use git extended diff format')),
4321 4322 ('', 'nodates', None, _('omit dates from diff headers'))
4322 4323 ]
4323 4324
4324 4325 diffopts2 = [
4325 4326 ('p', 'show-function', None, _('show which function each change is in')),
4326 4327 ('', 'reverse', None, _('produce a diff that undoes the changes')),
4327 4328 ('w', 'ignore-all-space', None,
4328 4329 _('ignore white space when comparing lines')),
4329 4330 ('b', 'ignore-space-change', None,
4330 4331 _('ignore changes in the amount of white space')),
4331 4332 ('B', 'ignore-blank-lines', None,
4332 4333 _('ignore changes whose lines are all blank')),
4333 4334 ('U', 'unified', '',
4334 4335 _('number of lines of context to show'), _('NUM')),
4335 4336 ('', 'stat', None, _('output diffstat-style summary of changes')),
4336 4337 ]
4337 4338
4338 4339 similarityopts = [
4339 4340 ('s', 'similarity', '',
4340 4341 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
4341 4342 ]
4342 4343
4343 4344 subrepoopts = [
4344 4345 ('S', 'subrepos', None,
4345 4346 _('recurse into subrepositories'))
4346 4347 ]
4347 4348
4348 4349 table = {
4349 4350 "^add": (add, walkopts + subrepoopts + dryrunopts,
4350 4351 _('[OPTION]... [FILE]...')),
4351 4352 "addremove":
4352 4353 (addremove, similarityopts + walkopts + dryrunopts,
4353 4354 _('[OPTION]... [FILE]...')),
4354 4355 "^annotate|blame":
4355 4356 (annotate,
4356 4357 [('r', 'rev', '',
4357 4358 _('annotate the specified revision'), _('REV')),
4358 4359 ('', 'follow', None,
4359 4360 _('follow copies/renames and list the filename (DEPRECATED)')),
4360 4361 ('', 'no-follow', None, _("don't follow copies and renames")),
4361 4362 ('a', 'text', None, _('treat all files as text')),
4362 4363 ('u', 'user', None, _('list the author (long with -v)')),
4363 4364 ('f', 'file', None, _('list the filename')),
4364 4365 ('d', 'date', None, _('list the date (short with -q)')),
4365 4366 ('n', 'number', None, _('list the revision number (default)')),
4366 4367 ('c', 'changeset', None, _('list the changeset')),
4367 4368 ('l', 'line-number', None,
4368 4369 _('show line number at the first appearance'))
4369 4370 ] + walkopts,
4370 4371 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
4371 4372 "archive":
4372 4373 (archive,
4373 4374 [('', 'no-decode', None, _('do not pass files through decoders')),
4374 4375 ('p', 'prefix', '',
4375 4376 _('directory prefix for files in archive'), _('PREFIX')),
4376 4377 ('r', 'rev', '',
4377 4378 _('revision to distribute'), _('REV')),
4378 4379 ('t', 'type', '',
4379 4380 _('type of distribution to create'), _('TYPE')),
4380 4381 ] + subrepoopts + walkopts,
4381 4382 _('[OPTION]... DEST')),
4382 4383 "backout":
4383 4384 (backout,
4384 4385 [('', 'merge', None,
4385 4386 _('merge with old dirstate parent after backout')),
4386 4387 ('', 'parent', '',
4387 4388 _('parent to choose when backing out merge'), _('REV')),
4388 4389 ('t', 'tool', '',
4389 4390 _('specify merge tool')),
4390 4391 ('r', 'rev', '',
4391 4392 _('revision to backout'), _('REV')),
4392 4393 ] + walkopts + commitopts + commitopts2,
4393 4394 _('[OPTION]... [-r] REV')),
4394 4395 "bisect":
4395 4396 (bisect,
4396 4397 [('r', 'reset', False, _('reset bisect state')),
4397 4398 ('g', 'good', False, _('mark changeset good')),
4398 4399 ('b', 'bad', False, _('mark changeset bad')),
4399 4400 ('s', 'skip', False, _('skip testing changeset')),
4400 4401 ('e', 'extend', False, _('extend the bisect range')),
4401 4402 ('c', 'command', '',
4402 4403 _('use command to check changeset state'), _('CMD')),
4403 4404 ('U', 'noupdate', False, _('do not update to target'))],
4404 4405 _("[-gbsr] [-U] [-c CMD] [REV]")),
4405 4406 "bookmarks":
4406 4407 (bookmark,
4407 4408 [('f', 'force', False, _('force')),
4408 4409 ('r', 'rev', '', _('revision'), _('REV')),
4409 4410 ('d', 'delete', False, _('delete a given bookmark')),
4410 4411 ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))],
4411 4412 _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
4412 4413 "branch":
4413 4414 (branch,
4414 4415 [('f', 'force', None,
4415 4416 _('set branch name even if it shadows an existing branch')),
4416 4417 ('C', 'clean', None, _('reset branch name to parent branch name'))],
4417 4418 _('[-fC] [NAME]')),
4418 4419 "branches":
4419 4420 (branches,
4420 4421 [('a', 'active', False,
4421 4422 _('show only branches that have unmerged heads')),
4422 4423 ('c', 'closed', False,
4423 4424 _('show normal and closed branches'))],
4424 4425 _('[-ac]')),
4425 4426 "bundle":
4426 4427 (bundle,
4427 4428 [('f', 'force', None,
4428 4429 _('run even when the destination is unrelated')),
4429 4430 ('r', 'rev', [],
4430 4431 _('a changeset intended to be added to the destination'),
4431 4432 _('REV')),
4432 4433 ('b', 'branch', [],
4433 4434 _('a specific branch you would like to bundle'),
4434 4435 _('BRANCH')),
4435 4436 ('', 'base', [],
4436 4437 _('a base changeset assumed to be available at the destination'),
4437 4438 _('REV')),
4438 4439 ('a', 'all', None, _('bundle all changesets in the repository')),
4439 4440 ('t', 'type', 'bzip2',
4440 4441 _('bundle compression type to use'), _('TYPE')),
4441 4442 ] + remoteopts,
4442 4443 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
4443 4444 "cat":
4444 4445 (cat,
4445 4446 [('o', 'output', '',
4446 4447 _('print output to file with formatted name'), _('FORMAT')),
4447 4448 ('r', 'rev', '',
4448 4449 _('print the given revision'), _('REV')),
4449 4450 ('', 'decode', None, _('apply any matching decode filter')),
4450 4451 ] + walkopts,
4451 4452 _('[OPTION]... FILE...')),
4452 4453 "^clone":
4453 4454 (clone,
4454 4455 [('U', 'noupdate', None,
4455 4456 _('the clone will include an empty working copy (only a repository)')),
4456 4457 ('u', 'updaterev', '',
4457 4458 _('revision, tag or branch to check out'), _('REV')),
4458 4459 ('r', 'rev', [],
4459 4460 _('include the specified changeset'), _('REV')),
4460 4461 ('b', 'branch', [],
4461 4462 _('clone only the specified branch'), _('BRANCH')),
4462 4463 ('', 'pull', None, _('use pull protocol to copy metadata')),
4463 4464 ('', 'uncompressed', None,
4464 4465 _('use uncompressed transfer (fast over LAN)')),
4465 4466 ] + remoteopts,
4466 4467 _('[OPTION]... SOURCE [DEST]')),
4467 4468 "^commit|ci":
4468 4469 (commit,
4469 4470 [('A', 'addremove', None,
4470 4471 _('mark new/missing files as added/removed before committing')),
4471 4472 ('', 'close-branch', None,
4472 4473 _('mark a branch as closed, hiding it from the branch list')),
4473 4474 ] + walkopts + commitopts + commitopts2,
4474 4475 _('[OPTION]... [FILE]...')),
4475 4476 "copy|cp":
4476 4477 (copy,
4477 4478 [('A', 'after', None, _('record a copy that has already occurred')),
4478 4479 ('f', 'force', None,
4479 4480 _('forcibly copy over an existing managed file')),
4480 4481 ] + walkopts + dryrunopts,
4481 4482 _('[OPTION]... [SOURCE]... DEST')),
4482 4483 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
4483 4484 "debugbuilddag":
4484 4485 (debugbuilddag,
4485 4486 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
4486 4487 ('a', 'appended-file', None, _('add single file all revs append to')),
4487 4488 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
4488 4489 ('n', 'new-file', None, _('add new file at each rev')),
4489 4490 ],
4490 4491 _('[OPTION]... TEXT')),
4491 4492 "debugbundle":
4492 4493 (debugbundle,
4493 4494 [('a', 'all', None, _('show all details')),
4494 4495 ],
4495 4496 _('FILE')),
4496 4497 "debugcheckstate": (debugcheckstate, [], ''),
4497 4498 "debugcommands": (debugcommands, [], _('[COMMAND]')),
4498 4499 "debugcomplete":
4499 4500 (debugcomplete,
4500 4501 [('o', 'options', None, _('show the command options'))],
4501 4502 _('[-o] CMD')),
4502 4503 "debugdag":
4503 4504 (debugdag,
4504 4505 [('t', 'tags', None, _('use tags as labels')),
4505 4506 ('b', 'branches', None, _('annotate with branch names')),
4506 4507 ('', 'dots', None, _('use dots for runs')),
4507 4508 ('s', 'spaces', None, _('separate elements by spaces')),
4508 4509 ],
4509 4510 _('[OPTION]... [FILE [REV]...]')),
4510 4511 "debugdate":
4511 4512 (debugdate,
4512 4513 [('e', 'extended', None, _('try extended date formats'))],
4513 4514 _('[-e] DATE [RANGE]')),
4514 4515 "debugdata": (debugdata, [], _('FILE REV')),
4515 4516 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
4516 4517 "debuggetbundle":
4517 4518 (debuggetbundle,
4518 4519 [('H', 'head', [], _('id of head node'), _('ID')),
4519 4520 ('C', 'common', [], _('id of common node'), _('ID')),
4520 4521 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
4521 4522 ],
4522 4523 _('REPO FILE [-H|-C ID]...')),
4523 4524 "debugignore": (debugignore, [], ''),
4524 4525 "debugindex": (debugindex,
4525 4526 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
4526 4527 _('FILE')),
4527 4528 "debugindexdot": (debugindexdot, [], _('FILE')),
4528 4529 "debuginstall": (debuginstall, [], ''),
4529 4530 "debugknown": (debugknown, [], _('REPO ID...')),
4530 4531 "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')),
4531 4532 "debugrebuildstate":
4532 4533 (debugrebuildstate,
4533 4534 [('r', 'rev', '',
4534 4535 _('revision to rebuild to'), _('REV'))],
4535 4536 _('[-r REV] [REV]')),
4536 4537 "debugrename":
4537 4538 (debugrename,
4538 4539 [('r', 'rev', '',
4539 4540 _('revision to debug'), _('REV'))],
4540 4541 _('[-r REV] FILE')),
4541 4542 "debugrevspec":
4542 4543 (debugrevspec, [], ('REVSPEC')),
4543 4544 "debugsetparents":
4544 4545 (debugsetparents, [], _('REV1 [REV2]')),
4545 4546 "debugstate":
4546 4547 (debugstate,
4547 4548 [('', 'nodates', None, _('do not display the saved mtime')),
4548 4549 ('', 'datesort', None, _('sort by saved mtime'))],
4549 4550 _('[OPTION]...')),
4550 4551 "debugsub":
4551 4552 (debugsub,
4552 4553 [('r', 'rev', '',
4553 4554 _('revision to check'), _('REV'))],
4554 4555 _('[-r REV] [REV]')),
4555 4556 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
4556 4557 "debugwireargs":
4557 4558 (debugwireargs,
4558 4559 [('', 'three', '', 'three'),
4559 4560 ('', 'four', '', 'four'),
4560 4561 ] + remoteopts,
4561 4562 _('REPO [OPTIONS]... [ONE [TWO]]')),
4562 4563 "^diff":
4563 4564 (diff,
4564 4565 [('r', 'rev', [],
4565 4566 _('revision'), _('REV')),
4566 4567 ('c', 'change', '',
4567 4568 _('change made by revision'), _('REV'))
4568 4569 ] + diffopts + diffopts2 + walkopts + subrepoopts,
4569 4570 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
4570 4571 "^export":
4571 4572 (export,
4572 4573 [('o', 'output', '',
4573 4574 _('print output to file with formatted name'), _('FORMAT')),
4574 4575 ('', 'switch-parent', None, _('diff against the second parent')),
4575 4576 ('r', 'rev', [],
4576 4577 _('revisions to export'), _('REV')),
4577 4578 ] + diffopts,
4578 4579 _('[OPTION]... [-o OUTFILESPEC] REV...')),
4579 4580 "^forget":
4580 4581 (forget,
4581 4582 [] + walkopts,
4582 4583 _('[OPTION]... FILE...')),
4583 4584 "grep":
4584 4585 (grep,
4585 4586 [('0', 'print0', None, _('end fields with NUL')),
4586 4587 ('', 'all', None, _('print all revisions that match')),
4587 4588 ('a', 'text', None, _('treat all files as text')),
4588 4589 ('f', 'follow', None,
4589 4590 _('follow changeset history,'
4590 4591 ' or file history across copies and renames')),
4591 4592 ('i', 'ignore-case', None, _('ignore case when matching')),
4592 4593 ('l', 'files-with-matches', None,
4593 4594 _('print only filenames and revisions that match')),
4594 4595 ('n', 'line-number', None, _('print matching line numbers')),
4595 4596 ('r', 'rev', [],
4596 4597 _('only search files changed within revision range'), _('REV')),
4597 4598 ('u', 'user', None, _('list the author (long with -v)')),
4598 4599 ('d', 'date', None, _('list the date (short with -q)')),
4599 4600 ] + walkopts,
4600 4601 _('[OPTION]... PATTERN [FILE]...')),
4601 4602 "heads":
4602 4603 (heads,
4603 4604 [('r', 'rev', '',
4604 4605 _('show only heads which are descendants of STARTREV'),
4605 4606 _('STARTREV')),
4606 4607 ('t', 'topo', False, _('show topological heads only')),
4607 4608 ('a', 'active', False,
4608 4609 _('show active branchheads only (DEPRECATED)')),
4609 4610 ('c', 'closed', False,
4610 4611 _('show normal and closed branch heads')),
4611 4612 ] + templateopts,
4612 4613 _('[-ac] [-r STARTREV] [REV]...')),
4613 4614 "help": (help_, [], _('[TOPIC]')),
4614 4615 "identify|id":
4615 4616 (identify,
4616 4617 [('r', 'rev', '',
4617 4618 _('identify the specified revision'), _('REV')),
4618 4619 ('n', 'num', None, _('show local revision number')),
4619 4620 ('i', 'id', None, _('show global revision id')),
4620 4621 ('b', 'branch', None, _('show branch')),
4621 4622 ('t', 'tags', None, _('show tags')),
4622 4623 ('B', 'bookmarks', None, _('show bookmarks'))],
4623 4624 _('[-nibtB] [-r REV] [SOURCE]')),
4624 4625 "import|patch":
4625 4626 (import_,
4626 4627 [('p', 'strip', 1,
4627 4628 _('directory strip option for patch. This has the same '
4628 4629 'meaning as the corresponding patch option'),
4629 4630 _('NUM')),
4630 4631 ('b', 'base', '',
4631 4632 _('base path'), _('PATH')),
4632 4633 ('f', 'force', None,
4633 4634 _('skip check for outstanding uncommitted changes')),
4634 4635 ('', 'no-commit', None,
4635 4636 _("don't commit, just update the working directory")),
4636 4637 ('', 'exact', None,
4637 4638 _('apply patch to the nodes from which it was generated')),
4638 4639 ('', 'import-branch', None,
4639 4640 _('use any branch information in patch (implied by --exact)'))] +
4640 4641 commitopts + commitopts2 + similarityopts,
4641 4642 _('[OPTION]... PATCH...')),
4642 4643 "incoming|in":
4643 4644 (incoming,
4644 4645 [('f', 'force', None,
4645 4646 _('run even if remote repository is unrelated')),
4646 4647 ('n', 'newest-first', None, _('show newest record first')),
4647 4648 ('', 'bundle', '',
4648 4649 _('file to store the bundles into'), _('FILE')),
4649 4650 ('r', 'rev', [],
4650 4651 _('a remote changeset intended to be added'), _('REV')),
4651 4652 ('B', 'bookmarks', False, _("compare bookmarks")),
4652 4653 ('b', 'branch', [],
4653 4654 _('a specific branch you would like to pull'), _('BRANCH')),
4654 4655 ] + logopts + remoteopts + subrepoopts,
4655 4656 _('[-p] [-n] [-M] [-f] [-r REV]...'
4656 4657 ' [--bundle FILENAME] [SOURCE]')),
4657 4658 "^init":
4658 4659 (init,
4659 4660 remoteopts,
4660 4661 _('[-e CMD] [--remotecmd CMD] [DEST]')),
4661 4662 "locate":
4662 4663 (locate,
4663 4664 [('r', 'rev', '',
4664 4665 _('search the repository as it is in REV'), _('REV')),
4665 4666 ('0', 'print0', None,
4666 4667 _('end filenames with NUL, for use with xargs')),
4667 4668 ('f', 'fullpath', None,
4668 4669 _('print complete paths from the filesystem root')),
4669 4670 ] + walkopts,
4670 4671 _('[OPTION]... [PATTERN]...')),
4671 4672 "^log|history":
4672 4673 (log,
4673 4674 [('f', 'follow', None,
4674 4675 _('follow changeset history,'
4675 4676 ' or file history across copies and renames')),
4676 4677 ('', 'follow-first', None,
4677 4678 _('only follow the first parent of merge changesets')),
4678 4679 ('d', 'date', '',
4679 4680 _('show revisions matching date spec'), _('DATE')),
4680 4681 ('C', 'copies', None, _('show copied files')),
4681 4682 ('k', 'keyword', [],
4682 4683 _('do case-insensitive search for a given text'), _('TEXT')),
4683 4684 ('r', 'rev', [],
4684 4685 _('show the specified revision or range'), _('REV')),
4685 4686 ('', 'removed', None, _('include revisions where files were removed')),
4686 4687 ('m', 'only-merges', None, _('show only merges')),
4687 4688 ('u', 'user', [],
4688 4689 _('revisions committed by user'), _('USER')),
4689 4690 ('', 'only-branch', [],
4690 4691 _('show only changesets within the given named branch (DEPRECATED)'),
4691 4692 _('BRANCH')),
4692 4693 ('b', 'branch', [],
4693 4694 _('show changesets within the given named branch'), _('BRANCH')),
4694 4695 ('P', 'prune', [],
4695 4696 _('do not display revision or any of its ancestors'), _('REV')),
4696 4697 ] + logopts + walkopts,
4697 4698 _('[OPTION]... [FILE]')),
4698 4699 "manifest":
4699 4700 (manifest,
4700 4701 [('r', 'rev', '',
4701 4702 _('revision to display'), _('REV'))],
4702 4703 _('[-r REV]')),
4703 4704 "^merge":
4704 4705 (merge,
4705 4706 [('f', 'force', None, _('force a merge with outstanding changes')),
4706 4707 ('t', 'tool', '', _('specify merge tool')),
4707 4708 ('r', 'rev', '',
4708 4709 _('revision to merge'), _('REV')),
4709 4710 ('P', 'preview', None,
4710 4711 _('review revisions to merge (no merge is performed)'))],
4711 4712 _('[-P] [-f] [[-r] REV]')),
4712 4713 "outgoing|out":
4713 4714 (outgoing,
4714 4715 [('f', 'force', None,
4715 4716 _('run even when the destination is unrelated')),
4716 4717 ('r', 'rev', [],
4717 4718 _('a changeset intended to be included in the destination'),
4718 4719 _('REV')),
4719 4720 ('n', 'newest-first', None, _('show newest record first')),
4720 4721 ('B', 'bookmarks', False, _("compare bookmarks")),
4721 4722 ('b', 'branch', [],
4722 4723 _('a specific branch you would like to push'), _('BRANCH')),
4723 4724 ] + logopts + remoteopts + subrepoopts,
4724 4725 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
4725 4726 "parents":
4726 4727 (parents,
4727 4728 [('r', 'rev', '',
4728 4729 _('show parents of the specified revision'), _('REV')),
4729 4730 ] + templateopts,
4730 4731 _('[-r REV] [FILE]')),
4731 4732 "paths": (paths, [], _('[NAME]')),
4732 4733 "^pull":
4733 4734 (pull,
4734 4735 [('u', 'update', None,
4735 4736 _('update to new branch head if changesets were pulled')),
4736 4737 ('f', 'force', None,
4737 4738 _('run even when remote repository is unrelated')),
4738 4739 ('r', 'rev', [],
4739 4740 _('a remote changeset intended to be added'), _('REV')),
4740 4741 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4741 4742 ('b', 'branch', [],
4742 4743 _('a specific branch you would like to pull'), _('BRANCH')),
4743 4744 ] + remoteopts,
4744 4745 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
4745 4746 "^push":
4746 4747 (push,
4747 4748 [('f', 'force', None, _('force push')),
4748 4749 ('r', 'rev', [],
4749 4750 _('a changeset intended to be included in the destination'),
4750 4751 _('REV')),
4751 4752 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4752 4753 ('b', 'branch', [],
4753 4754 _('a specific branch you would like to push'), _('BRANCH')),
4754 4755 ('', 'new-branch', False, _('allow pushing a new branch')),
4755 4756 ] + remoteopts,
4756 4757 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
4757 4758 "recover": (recover, []),
4758 4759 "^remove|rm":
4759 4760 (remove,
4760 4761 [('A', 'after', None, _('record delete for missing files')),
4761 4762 ('f', 'force', None,
4762 4763 _('remove (and delete) file even if added or modified')),
4763 4764 ] + walkopts,
4764 4765 _('[OPTION]... FILE...')),
4765 4766 "rename|move|mv":
4766 4767 (rename,
4767 4768 [('A', 'after', None, _('record a rename that has already occurred')),
4768 4769 ('f', 'force', None,
4769 4770 _('forcibly copy over an existing managed file')),
4770 4771 ] + walkopts + dryrunopts,
4771 4772 _('[OPTION]... SOURCE... DEST')),
4772 4773 "resolve":
4773 4774 (resolve,
4774 4775 [('a', 'all', None, _('select all unresolved files')),
4775 4776 ('l', 'list', None, _('list state of files needing merge')),
4776 4777 ('m', 'mark', None, _('mark files as resolved')),
4777 4778 ('u', 'unmark', None, _('mark files as unresolved')),
4778 4779 ('t', 'tool', '', _('specify merge tool')),
4779 4780 ('n', 'no-status', None, _('hide status prefix'))]
4780 4781 + walkopts,
4781 4782 _('[OPTION]... [FILE]...')),
4782 4783 "revert":
4783 4784 (revert,
4784 4785 [('a', 'all', None, _('revert all changes when no arguments given')),
4785 4786 ('d', 'date', '',
4786 4787 _('tipmost revision matching date'), _('DATE')),
4787 4788 ('r', 'rev', '',
4788 4789 _('revert to the specified revision'), _('REV')),
4789 4790 ('', 'no-backup', None, _('do not save backup copies of files')),
4790 4791 ] + walkopts + dryrunopts,
4791 4792 _('[OPTION]... [-r REV] [NAME]...')),
4792 4793 "rollback": (rollback, dryrunopts),
4793 4794 "root": (root, []),
4794 4795 "^serve":
4795 4796 (serve,
4796 4797 [('A', 'accesslog', '',
4797 4798 _('name of access log file to write to'), _('FILE')),
4798 4799 ('d', 'daemon', None, _('run server in background')),
4799 4800 ('', 'daemon-pipefds', '',
4800 4801 _('used internally by daemon mode'), _('NUM')),
4801 4802 ('E', 'errorlog', '',
4802 4803 _('name of error log file to write to'), _('FILE')),
4803 4804 # use string type, then we can check if something was passed
4804 4805 ('p', 'port', '',
4805 4806 _('port to listen on (default: 8000)'), _('PORT')),
4806 4807 ('a', 'address', '',
4807 4808 _('address to listen on (default: all interfaces)'), _('ADDR')),
4808 4809 ('', 'prefix', '',
4809 4810 _('prefix path to serve from (default: server root)'), _('PREFIX')),
4810 4811 ('n', 'name', '',
4811 4812 _('name to show in web pages (default: working directory)'),
4812 4813 _('NAME')),
4813 4814 ('', 'web-conf', '',
4814 4815 _('name of the hgweb config file (see "hg help hgweb")'),
4815 4816 _('FILE')),
4816 4817 ('', 'webdir-conf', '',
4817 4818 _('name of the hgweb config file (DEPRECATED)'), _('FILE')),
4818 4819 ('', 'pid-file', '',
4819 4820 _('name of file to write process ID to'), _('FILE')),
4820 4821 ('', 'stdio', None, _('for remote clients')),
4821 4822 ('t', 'templates', '',
4822 4823 _('web templates to use'), _('TEMPLATE')),
4823 4824 ('', 'style', '',
4824 4825 _('template style to use'), _('STYLE')),
4825 4826 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4826 4827 ('', 'certificate', '',
4827 4828 _('SSL certificate file'), _('FILE'))],
4828 4829 _('[OPTION]...')),
4829 4830 "showconfig|debugconfig":
4830 4831 (showconfig,
4831 4832 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4832 4833 _('[-u] [NAME]...')),
4833 4834 "^summary|sum":
4834 4835 (summary,
4835 4836 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4836 4837 "^status|st":
4837 4838 (status,
4838 4839 [('A', 'all', None, _('show status of all files')),
4839 4840 ('m', 'modified', None, _('show only modified files')),
4840 4841 ('a', 'added', None, _('show only added files')),
4841 4842 ('r', 'removed', None, _('show only removed files')),
4842 4843 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4843 4844 ('c', 'clean', None, _('show only files without changes')),
4844 4845 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4845 4846 ('i', 'ignored', None, _('show only ignored files')),
4846 4847 ('n', 'no-status', None, _('hide status prefix')),
4847 4848 ('C', 'copies', None, _('show source of copied files')),
4848 4849 ('0', 'print0', None,
4849 4850 _('end filenames with NUL, for use with xargs')),
4850 4851 ('', 'rev', [],
4851 4852 _('show difference from revision'), _('REV')),
4852 4853 ('', 'change', '',
4853 4854 _('list the changed files of a revision'), _('REV')),
4854 4855 ] + walkopts + subrepoopts,
4855 4856 _('[OPTION]... [FILE]...')),
4856 4857 "tag":
4857 4858 (tag,
4858 4859 [('f', 'force', None, _('force tag')),
4859 4860 ('l', 'local', None, _('make the tag local')),
4860 4861 ('r', 'rev', '',
4861 4862 _('revision to tag'), _('REV')),
4862 4863 ('', 'remove', None, _('remove a tag')),
4863 4864 # -l/--local is already there, commitopts cannot be used
4864 4865 ('e', 'edit', None, _('edit commit message')),
4865 4866 ('m', 'message', '',
4866 4867 _('use <text> as commit message'), _('TEXT')),
4867 4868 ] + commitopts2,
4868 4869 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4869 4870 "tags": (tags, [], ''),
4870 4871 "tip":
4871 4872 (tip,
4872 4873 [('p', 'patch', None, _('show patch')),
4873 4874 ('g', 'git', None, _('use git extended diff format')),
4874 4875 ] + templateopts,
4875 4876 _('[-p] [-g]')),
4876 4877 "unbundle":
4877 4878 (unbundle,
4878 4879 [('u', 'update', None,
4879 4880 _('update to new branch head if changesets were unbundled'))],
4880 4881 _('[-u] FILE...')),
4881 4882 "^update|up|checkout|co":
4882 4883 (update,
4883 4884 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4884 4885 ('c', 'check', None,
4885 4886 _('update across branches if no uncommitted changes')),
4886 4887 ('d', 'date', '',
4887 4888 _('tipmost revision matching date'), _('DATE')),
4888 4889 ('r', 'rev', '',
4889 4890 _('revision'), _('REV'))],
4890 4891 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4891 4892 "verify": (verify, []),
4892 4893 "version": (version_, []),
4893 4894 }
4894 4895
4895 4896 norepo = ("clone init version help debugcommands debugcomplete"
4896 4897 " debugdate debuginstall debugfsinfo debugpushkey debugwireargs"
4897 4898 " debugknown debuggetbundle debugbundle")
4898 4899 optionalrepo = ("identify paths serve showconfig debugancestor debugdag"
4899 4900 " debugdata debugindex debugindexdot")
@@ -1,1935 +1,1935
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo, discovery, pushkey
11 11 import changelog, dirstate, filelog, manifest, context, bookmarks
12 12 import lock, transaction, store, encoding
13 import util, extensions, hook, error
13 import scmutil, util, extensions, hook, error
14 14 import match as matchmod
15 15 import merge as mergemod
16 16 import tags as tagsmod
17 17 import url as urlmod
18 18 from lock import release
19 19 import weakref, errno, os, time, inspect
20 20 propertycache = util.propertycache
21 21
22 22 class localrepository(repo.repository):
23 23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
24 24 'known', 'getbundle'))
25 25 supportedformats = set(('revlogv1', 'parentdelta'))
26 26 supported = supportedformats | set(('store', 'fncache', 'shared',
27 27 'dotencode'))
28 28
29 29 def __init__(self, baseui, path=None, create=0):
30 30 repo.repository.__init__(self)
31 31 self.root = os.path.realpath(util.expandpath(path))
32 32 self.path = os.path.join(self.root, ".hg")
33 33 self.origroot = path
34 34 self.auditor = util.path_auditor(self.root, self._checknested)
35 self.opener = util.opener(self.path)
36 self.wopener = util.opener(self.root)
35 self.opener = scmutil.opener(self.path)
36 self.wopener = scmutil.opener(self.root)
37 37 self.baseui = baseui
38 38 self.ui = baseui.copy()
39 39
40 40 try:
41 41 self.ui.readconfig(self.join("hgrc"), self.root)
42 42 extensions.loadall(self.ui)
43 43 except IOError:
44 44 pass
45 45
46 46 if not os.path.isdir(self.path):
47 47 if create:
48 48 if not os.path.exists(path):
49 49 util.makedirs(path)
50 50 util.makedir(self.path, notindexed=True)
51 51 requirements = ["revlogv1"]
52 52 if self.ui.configbool('format', 'usestore', True):
53 53 os.mkdir(os.path.join(self.path, "store"))
54 54 requirements.append("store")
55 55 if self.ui.configbool('format', 'usefncache', True):
56 56 requirements.append("fncache")
57 57 if self.ui.configbool('format', 'dotencode', True):
58 58 requirements.append('dotencode')
59 59 # create an invalid changelog
60 60 self.opener("00changelog.i", "a").write(
61 61 '\0\0\0\2' # represents revlogv2
62 62 ' dummy changelog to prevent using the old repo layout'
63 63 )
64 64 if self.ui.configbool('format', 'parentdelta', False):
65 65 requirements.append("parentdelta")
66 66 else:
67 67 raise error.RepoError(_("repository %s not found") % path)
68 68 elif create:
69 69 raise error.RepoError(_("repository %s already exists") % path)
70 70 else:
71 71 # find requirements
72 72 requirements = set()
73 73 try:
74 74 requirements = set(self.opener("requires").read().splitlines())
75 75 except IOError, inst:
76 76 if inst.errno != errno.ENOENT:
77 77 raise
78 78 for r in requirements - self.supported:
79 79 raise error.RequirementError(
80 80 _("requirement '%s' not supported") % r)
81 81
82 82 self.sharedpath = self.path
83 83 try:
84 84 s = os.path.realpath(self.opener("sharedpath").read())
85 85 if not os.path.exists(s):
86 86 raise error.RepoError(
87 87 _('.hg/sharedpath points to nonexistent directory %s') % s)
88 88 self.sharedpath = s
89 89 except IOError, inst:
90 90 if inst.errno != errno.ENOENT:
91 91 raise
92 92
93 self.store = store.store(requirements, self.sharedpath, util.opener)
93 self.store = store.store(requirements, self.sharedpath, scmutil.opener)
94 94 self.spath = self.store.path
95 95 self.sopener = self.store.opener
96 96 self.sjoin = self.store.join
97 97 self.opener.createmode = self.store.createmode
98 98 self._applyrequirements(requirements)
99 99 if create:
100 100 self._writerequirements()
101 101
102 102 # These two define the set of tags for this repository. _tags
103 103 # maps tag name to node; _tagtypes maps tag name to 'global' or
104 104 # 'local'. (Global tags are defined by .hgtags across all
105 105 # heads, and local tags are defined in .hg/localtags.) They
106 106 # constitute the in-memory cache of tags.
107 107 self._tags = None
108 108 self._tagtypes = None
109 109
110 110 self._branchcache = None
111 111 self._branchcachetip = None
112 112 self.nodetagscache = None
113 113 self.filterpats = {}
114 114 self._datafilters = {}
115 115 self._transref = self._lockref = self._wlockref = None
116 116
117 117 def _applyrequirements(self, requirements):
118 118 self.requirements = requirements
119 119 self.sopener.options = {}
120 120 if 'parentdelta' in requirements:
121 121 self.sopener.options['parentdelta'] = 1
122 122
123 123 def _writerequirements(self):
124 124 reqfile = self.opener("requires", "w")
125 125 for r in self.requirements:
126 126 reqfile.write("%s\n" % r)
127 127 reqfile.close()
128 128
129 129 def _checknested(self, path):
130 130 """Determine if path is a legal nested repository."""
131 131 if not path.startswith(self.root):
132 132 return False
133 133 subpath = path[len(self.root) + 1:]
134 134
135 135 # XXX: Checking against the current working copy is wrong in
136 136 # the sense that it can reject things like
137 137 #
138 138 # $ hg cat -r 10 sub/x.txt
139 139 #
140 140 # if sub/ is no longer a subrepository in the working copy
141 141 # parent revision.
142 142 #
143 143 # However, it can of course also allow things that would have
144 144 # been rejected before, such as the above cat command if sub/
145 145 # is a subrepository now, but was a normal directory before.
146 146 # The old path auditor would have rejected by mistake since it
147 147 # panics when it sees sub/.hg/.
148 148 #
149 149 # All in all, checking against the working copy seems sensible
150 150 # since we want to prevent access to nested repositories on
151 151 # the filesystem *now*.
152 152 ctx = self[None]
153 153 parts = util.splitpath(subpath)
154 154 while parts:
155 155 prefix = os.sep.join(parts)
156 156 if prefix in ctx.substate:
157 157 if prefix == subpath:
158 158 return True
159 159 else:
160 160 sub = ctx.sub(prefix)
161 161 return sub.checknested(subpath[len(prefix) + 1:])
162 162 else:
163 163 parts.pop()
164 164 return False
165 165
166 166 @util.propertycache
167 167 def _bookmarks(self):
168 168 return bookmarks.read(self)
169 169
170 170 @util.propertycache
171 171 def _bookmarkcurrent(self):
172 172 return bookmarks.readcurrent(self)
173 173
174 174 @propertycache
175 175 def changelog(self):
176 176 c = changelog.changelog(self.sopener)
177 177 if 'HG_PENDING' in os.environ:
178 178 p = os.environ['HG_PENDING']
179 179 if p.startswith(self.root):
180 180 c.readpending('00changelog.i.a')
181 181 self.sopener.options['defversion'] = c.version
182 182 return c
183 183
184 184 @propertycache
185 185 def manifest(self):
186 186 return manifest.manifest(self.sopener)
187 187
188 188 @propertycache
189 189 def dirstate(self):
190 190 warned = [0]
191 191 def validate(node):
192 192 try:
193 193 r = self.changelog.rev(node)
194 194 return node
195 195 except error.LookupError:
196 196 if not warned[0]:
197 197 warned[0] = True
198 198 self.ui.warn(_("warning: ignoring unknown"
199 199 " working parent %s!\n") % short(node))
200 200 return nullid
201 201
202 202 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
203 203
204 204 def __getitem__(self, changeid):
205 205 if changeid is None:
206 206 return context.workingctx(self)
207 207 return context.changectx(self, changeid)
208 208
209 209 def __contains__(self, changeid):
210 210 try:
211 211 return bool(self.lookup(changeid))
212 212 except error.RepoLookupError:
213 213 return False
214 214
215 215 def __nonzero__(self):
216 216 return True
217 217
218 218 def __len__(self):
219 219 return len(self.changelog)
220 220
221 221 def __iter__(self):
222 222 for i in xrange(len(self)):
223 223 yield i
224 224
225 225 def url(self):
226 226 return 'file:' + self.root
227 227
228 228 def hook(self, name, throw=False, **args):
229 229 return hook.hook(self.ui, self, name, throw, **args)
230 230
231 231 tag_disallowed = ':\r\n'
232 232
233 233 def _tag(self, names, node, message, local, user, date, extra={}):
234 234 if isinstance(names, str):
235 235 allchars = names
236 236 names = (names,)
237 237 else:
238 238 allchars = ''.join(names)
239 239 for c in self.tag_disallowed:
240 240 if c in allchars:
241 241 raise util.Abort(_('%r cannot be used in a tag name') % c)
242 242
243 243 branches = self.branchmap()
244 244 for name in names:
245 245 self.hook('pretag', throw=True, node=hex(node), tag=name,
246 246 local=local)
247 247 if name in branches:
248 248 self.ui.warn(_("warning: tag %s conflicts with existing"
249 249 " branch name\n") % name)
250 250
251 251 def writetags(fp, names, munge, prevtags):
252 252 fp.seek(0, 2)
253 253 if prevtags and prevtags[-1] != '\n':
254 254 fp.write('\n')
255 255 for name in names:
256 256 m = munge and munge(name) or name
257 257 if self._tagtypes and name in self._tagtypes:
258 258 old = self._tags.get(name, nullid)
259 259 fp.write('%s %s\n' % (hex(old), m))
260 260 fp.write('%s %s\n' % (hex(node), m))
261 261 fp.close()
262 262
263 263 prevtags = ''
264 264 if local:
265 265 try:
266 266 fp = self.opener('localtags', 'r+')
267 267 except IOError:
268 268 fp = self.opener('localtags', 'a')
269 269 else:
270 270 prevtags = fp.read()
271 271
272 272 # local tags are stored in the current charset
273 273 writetags(fp, names, None, prevtags)
274 274 for name in names:
275 275 self.hook('tag', node=hex(node), tag=name, local=local)
276 276 return
277 277
278 278 try:
279 279 fp = self.wfile('.hgtags', 'rb+')
280 280 except IOError:
281 281 fp = self.wfile('.hgtags', 'ab')
282 282 else:
283 283 prevtags = fp.read()
284 284
285 285 # committed tags are stored in UTF-8
286 286 writetags(fp, names, encoding.fromlocal, prevtags)
287 287
288 288 fp.close()
289 289
290 290 if '.hgtags' not in self.dirstate:
291 291 self[None].add(['.hgtags'])
292 292
293 293 m = matchmod.exact(self.root, '', ['.hgtags'])
294 294 tagnode = self.commit(message, user, date, extra=extra, match=m)
295 295
296 296 for name in names:
297 297 self.hook('tag', node=hex(node), tag=name, local=local)
298 298
299 299 return tagnode
300 300
301 301 def tag(self, names, node, message, local, user, date):
302 302 '''tag a revision with one or more symbolic names.
303 303
304 304 names is a list of strings or, when adding a single tag, names may be a
305 305 string.
306 306
307 307 if local is True, the tags are stored in a per-repository file.
308 308 otherwise, they are stored in the .hgtags file, and a new
309 309 changeset is committed with the change.
310 310
311 311 keyword arguments:
312 312
313 313 local: whether to store tags in non-version-controlled file
314 314 (default False)
315 315
316 316 message: commit message to use if committing
317 317
318 318 user: name of user to use if committing
319 319
320 320 date: date tuple to use if committing'''
321 321
322 322 if not local:
323 323 for x in self.status()[:5]:
324 324 if '.hgtags' in x:
325 325 raise util.Abort(_('working copy of .hgtags is changed '
326 326 '(please commit .hgtags manually)'))
327 327
328 328 self.tags() # instantiate the cache
329 329 self._tag(names, node, message, local, user, date)
330 330
331 331 def tags(self):
332 332 '''return a mapping of tag to node'''
333 333 if self._tags is None:
334 334 (self._tags, self._tagtypes) = self._findtags()
335 335
336 336 return self._tags
337 337
338 338 def _findtags(self):
339 339 '''Do the hard work of finding tags. Return a pair of dicts
340 340 (tags, tagtypes) where tags maps tag name to node, and tagtypes
341 341 maps tag name to a string like \'global\' or \'local\'.
342 342 Subclasses or extensions are free to add their own tags, but
343 343 should be aware that the returned dicts will be retained for the
344 344 duration of the localrepo object.'''
345 345
346 346 # XXX what tagtype should subclasses/extensions use? Currently
347 347 # mq and bookmarks add tags, but do not set the tagtype at all.
348 348 # Should each extension invent its own tag type? Should there
349 349 # be one tagtype for all such "virtual" tags? Or is the status
350 350 # quo fine?
351 351
352 352 alltags = {} # map tag name to (node, hist)
353 353 tagtypes = {}
354 354
355 355 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
356 356 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
357 357
358 358 # Build the return dicts. Have to re-encode tag names because
359 359 # the tags module always uses UTF-8 (in order not to lose info
360 360 # writing to the cache), but the rest of Mercurial wants them in
361 361 # local encoding.
362 362 tags = {}
363 363 for (name, (node, hist)) in alltags.iteritems():
364 364 if node != nullid:
365 365 try:
366 366 # ignore tags to unknown nodes
367 367 self.changelog.lookup(node)
368 368 tags[encoding.tolocal(name)] = node
369 369 except error.LookupError:
370 370 pass
371 371 tags['tip'] = self.changelog.tip()
372 372 tagtypes = dict([(encoding.tolocal(name), value)
373 373 for (name, value) in tagtypes.iteritems()])
374 374 return (tags, tagtypes)
375 375
376 376 def tagtype(self, tagname):
377 377 '''
378 378 return the type of the given tag. result can be:
379 379
380 380 'local' : a local tag
381 381 'global' : a global tag
382 382 None : tag does not exist
383 383 '''
384 384
385 385 self.tags()
386 386
387 387 return self._tagtypes.get(tagname)
388 388
389 389 def tagslist(self):
390 390 '''return a list of tags ordered by revision'''
391 391 l = []
392 392 for t, n in self.tags().iteritems():
393 393 r = self.changelog.rev(n)
394 394 l.append((r, t, n))
395 395 return [(t, n) for r, t, n in sorted(l)]
396 396
397 397 def nodetags(self, node):
398 398 '''return the tags associated with a node'''
399 399 if not self.nodetagscache:
400 400 self.nodetagscache = {}
401 401 for t, n in self.tags().iteritems():
402 402 self.nodetagscache.setdefault(n, []).append(t)
403 403 for tags in self.nodetagscache.itervalues():
404 404 tags.sort()
405 405 return self.nodetagscache.get(node, [])
406 406
407 407 def nodebookmarks(self, node):
408 408 marks = []
409 409 for bookmark, n in self._bookmarks.iteritems():
410 410 if n == node:
411 411 marks.append(bookmark)
412 412 return sorted(marks)
413 413
414 414 def _branchtags(self, partial, lrev):
415 415 # TODO: rename this function?
416 416 tiprev = len(self) - 1
417 417 if lrev != tiprev:
418 418 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
419 419 self._updatebranchcache(partial, ctxgen)
420 420 self._writebranchcache(partial, self.changelog.tip(), tiprev)
421 421
422 422 return partial
423 423
424 424 def updatebranchcache(self):
425 425 tip = self.changelog.tip()
426 426 if self._branchcache is not None and self._branchcachetip == tip:
427 427 return self._branchcache
428 428
429 429 oldtip = self._branchcachetip
430 430 self._branchcachetip = tip
431 431 if oldtip is None or oldtip not in self.changelog.nodemap:
432 432 partial, last, lrev = self._readbranchcache()
433 433 else:
434 434 lrev = self.changelog.rev(oldtip)
435 435 partial = self._branchcache
436 436
437 437 self._branchtags(partial, lrev)
438 438 # this private cache holds all heads (not just tips)
439 439 self._branchcache = partial
440 440
441 441 def branchmap(self):
442 442 '''returns a dictionary {branch: [branchheads]}'''
443 443 self.updatebranchcache()
444 444 return self._branchcache
445 445
446 446 def branchtags(self):
447 447 '''return a dict where branch names map to the tipmost head of
448 448 the branch, open heads come before closed'''
449 449 bt = {}
450 450 for bn, heads in self.branchmap().iteritems():
451 451 tip = heads[-1]
452 452 for h in reversed(heads):
453 453 if 'close' not in self.changelog.read(h)[5]:
454 454 tip = h
455 455 break
456 456 bt[bn] = tip
457 457 return bt
458 458
459 459 def _readbranchcache(self):
460 460 partial = {}
461 461 try:
462 462 f = self.opener("cache/branchheads")
463 463 lines = f.read().split('\n')
464 464 f.close()
465 465 except (IOError, OSError):
466 466 return {}, nullid, nullrev
467 467
468 468 try:
469 469 last, lrev = lines.pop(0).split(" ", 1)
470 470 last, lrev = bin(last), int(lrev)
471 471 if lrev >= len(self) or self[lrev].node() != last:
472 472 # invalidate the cache
473 473 raise ValueError('invalidating branch cache (tip differs)')
474 474 for l in lines:
475 475 if not l:
476 476 continue
477 477 node, label = l.split(" ", 1)
478 478 label = encoding.tolocal(label.strip())
479 479 partial.setdefault(label, []).append(bin(node))
480 480 except KeyboardInterrupt:
481 481 raise
482 482 except Exception, inst:
483 483 if self.ui.debugflag:
484 484 self.ui.warn(str(inst), '\n')
485 485 partial, last, lrev = {}, nullid, nullrev
486 486 return partial, last, lrev
487 487
488 488 def _writebranchcache(self, branches, tip, tiprev):
489 489 try:
490 490 f = self.opener("cache/branchheads", "w", atomictemp=True)
491 491 f.write("%s %s\n" % (hex(tip), tiprev))
492 492 for label, nodes in branches.iteritems():
493 493 for node in nodes:
494 494 f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
495 495 f.rename()
496 496 except (IOError, OSError):
497 497 pass
498 498
499 499 def _updatebranchcache(self, partial, ctxgen):
500 500 # collect new branch entries
501 501 newbranches = {}
502 502 for c in ctxgen:
503 503 newbranches.setdefault(c.branch(), []).append(c.node())
504 504 # if older branchheads are reachable from new ones, they aren't
505 505 # really branchheads. Note checking parents is insufficient:
506 506 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
507 507 for branch, newnodes in newbranches.iteritems():
508 508 bheads = partial.setdefault(branch, [])
509 509 bheads.extend(newnodes)
510 510 if len(bheads) <= 1:
511 511 continue
512 512 # starting from tip means fewer passes over reachable
513 513 while newnodes:
514 514 latest = newnodes.pop()
515 515 if latest not in bheads:
516 516 continue
517 517 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
518 518 reachable = self.changelog.reachable(latest, minbhrev)
519 519 reachable.remove(latest)
520 520 bheads = [b for b in bheads if b not in reachable]
521 521 partial[branch] = bheads
522 522
523 523 def lookup(self, key):
524 524 if isinstance(key, int):
525 525 return self.changelog.node(key)
526 526 elif key == '.':
527 527 return self.dirstate.p1()
528 528 elif key == 'null':
529 529 return nullid
530 530 elif key == 'tip':
531 531 return self.changelog.tip()
532 532 n = self.changelog._match(key)
533 533 if n:
534 534 return n
535 535 if key in self._bookmarks:
536 536 return self._bookmarks[key]
537 537 if key in self.tags():
538 538 return self.tags()[key]
539 539 if key in self.branchtags():
540 540 return self.branchtags()[key]
541 541 n = self.changelog._partialmatch(key)
542 542 if n:
543 543 return n
544 544
545 545 # can't find key, check if it might have come from damaged dirstate
546 546 if key in self.dirstate.parents():
547 547 raise error.Abort(_("working directory has unknown parent '%s'!")
548 548 % short(key))
549 549 try:
550 550 if len(key) == 20:
551 551 key = hex(key)
552 552 except:
553 553 pass
554 554 raise error.RepoLookupError(_("unknown revision '%s'") % key)
555 555
556 556 def lookupbranch(self, key, remote=None):
557 557 repo = remote or self
558 558 if key in repo.branchmap():
559 559 return key
560 560
561 561 repo = (remote and remote.local()) and remote or self
562 562 return repo[key].branch()
563 563
564 564 def known(self, nodes):
565 565 nm = self.changelog.nodemap
566 566 return [(n in nm) for n in nodes]
567 567
568 568 def local(self):
569 569 return True
570 570
571 571 def join(self, f):
572 572 return os.path.join(self.path, f)
573 573
574 574 def wjoin(self, f):
575 575 return os.path.join(self.root, f)
576 576
577 577 def file(self, f):
578 578 if f[0] == '/':
579 579 f = f[1:]
580 580 return filelog.filelog(self.sopener, f)
581 581
582 582 def changectx(self, changeid):
583 583 return self[changeid]
584 584
585 585 def parents(self, changeid=None):
586 586 '''get list of changectxs for parents of changeid'''
587 587 return self[changeid].parents()
588 588
589 589 def filectx(self, path, changeid=None, fileid=None):
590 590 """changeid can be a changeset revision, node, or tag.
591 591 fileid can be a file revision or node."""
592 592 return context.filectx(self, path, changeid, fileid)
593 593
594 594 def getcwd(self):
595 595 return self.dirstate.getcwd()
596 596
597 597 def pathto(self, f, cwd=None):
598 598 return self.dirstate.pathto(f, cwd)
599 599
600 600 def wfile(self, f, mode='r'):
601 601 return self.wopener(f, mode)
602 602
603 603 def _link(self, f):
604 604 return os.path.islink(self.wjoin(f))
605 605
606 606 def _loadfilter(self, filter):
607 607 if filter not in self.filterpats:
608 608 l = []
609 609 for pat, cmd in self.ui.configitems(filter):
610 610 if cmd == '!':
611 611 continue
612 612 mf = matchmod.match(self.root, '', [pat])
613 613 fn = None
614 614 params = cmd
615 615 for name, filterfn in self._datafilters.iteritems():
616 616 if cmd.startswith(name):
617 617 fn = filterfn
618 618 params = cmd[len(name):].lstrip()
619 619 break
620 620 if not fn:
621 621 fn = lambda s, c, **kwargs: util.filter(s, c)
622 622 # Wrap old filters not supporting keyword arguments
623 623 if not inspect.getargspec(fn)[2]:
624 624 oldfn = fn
625 625 fn = lambda s, c, **kwargs: oldfn(s, c)
626 626 l.append((mf, fn, params))
627 627 self.filterpats[filter] = l
628 628 return self.filterpats[filter]
629 629
630 630 def _filter(self, filterpats, filename, data):
631 631 for mf, fn, cmd in filterpats:
632 632 if mf(filename):
633 633 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
634 634 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
635 635 break
636 636
637 637 return data
638 638
639 639 @propertycache
640 640 def _encodefilterpats(self):
641 641 return self._loadfilter('encode')
642 642
643 643 @propertycache
644 644 def _decodefilterpats(self):
645 645 return self._loadfilter('decode')
646 646
647 647 def adddatafilter(self, name, filter):
648 648 self._datafilters[name] = filter
649 649
650 650 def wread(self, filename):
651 651 if self._link(filename):
652 652 data = os.readlink(self.wjoin(filename))
653 653 else:
654 654 data = self.wopener(filename, 'r').read()
655 655 return self._filter(self._encodefilterpats, filename, data)
656 656
657 657 def wwrite(self, filename, data, flags):
658 658 data = self._filter(self._decodefilterpats, filename, data)
659 659 if 'l' in flags:
660 660 self.wopener.symlink(data, filename)
661 661 else:
662 662 self.wopener(filename, 'w').write(data)
663 663 if 'x' in flags:
664 664 util.set_flags(self.wjoin(filename), False, True)
665 665
666 666 def wwritedata(self, filename, data):
667 667 return self._filter(self._decodefilterpats, filename, data)
668 668
669 669 def transaction(self, desc):
670 670 tr = self._transref and self._transref() or None
671 671 if tr and tr.running():
672 672 return tr.nest()
673 673
674 674 # abort here if the journal already exists
675 675 if os.path.exists(self.sjoin("journal")):
676 676 raise error.RepoError(
677 677 _("abandoned transaction found - run hg recover"))
678 678
679 679 # save dirstate for rollback
680 680 try:
681 681 ds = self.opener("dirstate").read()
682 682 except IOError:
683 683 ds = ""
684 684 self.opener("journal.dirstate", "w").write(ds)
685 685 self.opener("journal.branch", "w").write(
686 686 encoding.fromlocal(self.dirstate.branch()))
687 687 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
688 688
689 689 renames = [(self.sjoin("journal"), self.sjoin("undo")),
690 690 (self.join("journal.dirstate"), self.join("undo.dirstate")),
691 691 (self.join("journal.branch"), self.join("undo.branch")),
692 692 (self.join("journal.desc"), self.join("undo.desc"))]
693 693 tr = transaction.transaction(self.ui.warn, self.sopener,
694 694 self.sjoin("journal"),
695 695 aftertrans(renames),
696 696 self.store.createmode)
697 697 self._transref = weakref.ref(tr)
698 698 return tr
699 699
700 700 def recover(self):
701 701 lock = self.lock()
702 702 try:
703 703 if os.path.exists(self.sjoin("journal")):
704 704 self.ui.status(_("rolling back interrupted transaction\n"))
705 705 transaction.rollback(self.sopener, self.sjoin("journal"),
706 706 self.ui.warn)
707 707 self.invalidate()
708 708 return True
709 709 else:
710 710 self.ui.warn(_("no interrupted transaction available\n"))
711 711 return False
712 712 finally:
713 713 lock.release()
714 714
715 715 def rollback(self, dryrun=False):
716 716 wlock = lock = None
717 717 try:
718 718 wlock = self.wlock()
719 719 lock = self.lock()
720 720 if os.path.exists(self.sjoin("undo")):
721 721 try:
722 722 args = self.opener("undo.desc", "r").read().splitlines()
723 723 if len(args) >= 3 and self.ui.verbose:
724 724 desc = _("repository tip rolled back to revision %s"
725 725 " (undo %s: %s)\n") % (
726 726 int(args[0]) - 1, args[1], args[2])
727 727 elif len(args) >= 2:
728 728 desc = _("repository tip rolled back to revision %s"
729 729 " (undo %s)\n") % (
730 730 int(args[0]) - 1, args[1])
731 731 except IOError:
732 732 desc = _("rolling back unknown transaction\n")
733 733 self.ui.status(desc)
734 734 if dryrun:
735 735 return
736 736 transaction.rollback(self.sopener, self.sjoin("undo"),
737 737 self.ui.warn)
738 738 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
739 739 if os.path.exists(self.join('undo.bookmarks')):
740 740 util.rename(self.join('undo.bookmarks'),
741 741 self.join('bookmarks'))
742 742 try:
743 743 branch = self.opener("undo.branch").read()
744 744 self.dirstate.setbranch(branch)
745 745 except IOError:
746 746 self.ui.warn(_("named branch could not be reset, "
747 747 "current branch is still: %s\n")
748 748 % self.dirstate.branch())
749 749 self.invalidate()
750 750 self.dirstate.invalidate()
751 751 self.destroyed()
752 752 parents = tuple([p.rev() for p in self.parents()])
753 753 if len(parents) > 1:
754 754 self.ui.status(_("working directory now based on "
755 755 "revisions %d and %d\n") % parents)
756 756 else:
757 757 self.ui.status(_("working directory now based on "
758 758 "revision %d\n") % parents)
759 759 else:
760 760 self.ui.warn(_("no rollback information available\n"))
761 761 return 1
762 762 finally:
763 763 release(lock, wlock)
764 764
765 765 def invalidatecaches(self):
766 766 self._tags = None
767 767 self._tagtypes = None
768 768 self.nodetagscache = None
769 769 self._branchcache = None # in UTF-8
770 770 self._branchcachetip = None
771 771
772 772 def invalidate(self):
773 773 for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
774 774 if a in self.__dict__:
775 775 delattr(self, a)
776 776 self.invalidatecaches()
777 777
778 778 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
779 779 try:
780 780 l = lock.lock(lockname, 0, releasefn, desc=desc)
781 781 except error.LockHeld, inst:
782 782 if not wait:
783 783 raise
784 784 self.ui.warn(_("waiting for lock on %s held by %r\n") %
785 785 (desc, inst.locker))
786 786 # default to 600 seconds timeout
787 787 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
788 788 releasefn, desc=desc)
789 789 if acquirefn:
790 790 acquirefn()
791 791 return l
792 792
793 793 def lock(self, wait=True):
794 794 '''Lock the repository store (.hg/store) and return a weak reference
795 795 to the lock. Use this before modifying the store (e.g. committing or
796 796 stripping). If you are opening a transaction, get a lock as well.)'''
797 797 l = self._lockref and self._lockref()
798 798 if l is not None and l.held:
799 799 l.lock()
800 800 return l
801 801
802 802 l = self._lock(self.sjoin("lock"), wait, self.store.write,
803 803 self.invalidate, _('repository %s') % self.origroot)
804 804 self._lockref = weakref.ref(l)
805 805 return l
806 806
807 807 def wlock(self, wait=True):
808 808 '''Lock the non-store parts of the repository (everything under
809 809 .hg except .hg/store) and return a weak reference to the lock.
810 810 Use this before modifying files in .hg.'''
811 811 l = self._wlockref and self._wlockref()
812 812 if l is not None and l.held:
813 813 l.lock()
814 814 return l
815 815
816 816 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
817 817 self.dirstate.invalidate, _('working directory of %s') %
818 818 self.origroot)
819 819 self._wlockref = weakref.ref(l)
820 820 return l
821 821
822 822 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
823 823 """
824 824 commit an individual file as part of a larger transaction
825 825 """
826 826
827 827 fname = fctx.path()
828 828 text = fctx.data()
829 829 flog = self.file(fname)
830 830 fparent1 = manifest1.get(fname, nullid)
831 831 fparent2 = fparent2o = manifest2.get(fname, nullid)
832 832
833 833 meta = {}
834 834 copy = fctx.renamed()
835 835 if copy and copy[0] != fname:
836 836 # Mark the new revision of this file as a copy of another
837 837 # file. This copy data will effectively act as a parent
838 838 # of this new revision. If this is a merge, the first
839 839 # parent will be the nullid (meaning "look up the copy data")
840 840 # and the second one will be the other parent. For example:
841 841 #
842 842 # 0 --- 1 --- 3 rev1 changes file foo
843 843 # \ / rev2 renames foo to bar and changes it
844 844 # \- 2 -/ rev3 should have bar with all changes and
845 845 # should record that bar descends from
846 846 # bar in rev2 and foo in rev1
847 847 #
848 848 # this allows this merge to succeed:
849 849 #
850 850 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
851 851 # \ / merging rev3 and rev4 should use bar@rev2
852 852 # \- 2 --- 4 as the merge base
853 853 #
854 854
855 855 cfname = copy[0]
856 856 crev = manifest1.get(cfname)
857 857 newfparent = fparent2
858 858
859 859 if manifest2: # branch merge
860 860 if fparent2 == nullid or crev is None: # copied on remote side
861 861 if cfname in manifest2:
862 862 crev = manifest2[cfname]
863 863 newfparent = fparent1
864 864
865 865 # find source in nearest ancestor if we've lost track
866 866 if not crev:
867 867 self.ui.debug(" %s: searching for copy revision for %s\n" %
868 868 (fname, cfname))
869 869 for ancestor in self[None].ancestors():
870 870 if cfname in ancestor:
871 871 crev = ancestor[cfname].filenode()
872 872 break
873 873
874 874 if crev:
875 875 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
876 876 meta["copy"] = cfname
877 877 meta["copyrev"] = hex(crev)
878 878 fparent1, fparent2 = nullid, newfparent
879 879 else:
880 880 self.ui.warn(_("warning: can't find ancestor for '%s' "
881 881 "copied from '%s'!\n") % (fname, cfname))
882 882
883 883 elif fparent2 != nullid:
884 884 # is one parent an ancestor of the other?
885 885 fparentancestor = flog.ancestor(fparent1, fparent2)
886 886 if fparentancestor == fparent1:
887 887 fparent1, fparent2 = fparent2, nullid
888 888 elif fparentancestor == fparent2:
889 889 fparent2 = nullid
890 890
891 891 # is the file changed?
892 892 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
893 893 changelist.append(fname)
894 894 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
895 895
896 896 # are just the flags changed during merge?
897 897 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
898 898 changelist.append(fname)
899 899
900 900 return fparent1
901 901
902 902 def commit(self, text="", user=None, date=None, match=None, force=False,
903 903 editor=False, extra={}):
904 904 """Add a new revision to current repository.
905 905
906 906 Revision information is gathered from the working directory,
907 907 match can be used to filter the committed files. If editor is
908 908 supplied, it is called to get a commit message.
909 909 """
910 910
911 911 def fail(f, msg):
912 912 raise util.Abort('%s: %s' % (f, msg))
913 913
914 914 if not match:
915 915 match = matchmod.always(self.root, '')
916 916
917 917 if not force:
918 918 vdirs = []
919 919 match.dir = vdirs.append
920 920 match.bad = fail
921 921
922 922 wlock = self.wlock()
923 923 try:
924 924 wctx = self[None]
925 925 merge = len(wctx.parents()) > 1
926 926
927 927 if (not force and merge and match and
928 928 (match.files() or match.anypats())):
929 929 raise util.Abort(_('cannot partially commit a merge '
930 930 '(do not specify files or patterns)'))
931 931
932 932 changes = self.status(match=match, clean=force)
933 933 if force:
934 934 changes[0].extend(changes[6]) # mq may commit unchanged files
935 935
936 936 # check subrepos
937 937 subs = []
938 938 removedsubs = set()
939 939 for p in wctx.parents():
940 940 removedsubs.update(s for s in p.substate if match(s))
941 941 for s in wctx.substate:
942 942 removedsubs.discard(s)
943 943 if match(s) and wctx.sub(s).dirty():
944 944 subs.append(s)
945 945 if (subs or removedsubs):
946 946 if (not match('.hgsub') and
947 947 '.hgsub' in (wctx.modified() + wctx.added())):
948 948 raise util.Abort(_("can't commit subrepos without .hgsub"))
949 949 if '.hgsubstate' not in changes[0]:
950 950 changes[0].insert(0, '.hgsubstate')
951 951
952 952 if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
953 953 changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
954 954 if changedsubs:
955 955 raise util.Abort(_("uncommitted changes in subrepo %s")
956 956 % changedsubs[0])
957 957
958 958 # make sure all explicit patterns are matched
959 959 if not force and match.files():
960 960 matched = set(changes[0] + changes[1] + changes[2])
961 961
962 962 for f in match.files():
963 963 if f == '.' or f in matched or f in wctx.substate:
964 964 continue
965 965 if f in changes[3]: # missing
966 966 fail(f, _('file not found!'))
967 967 if f in vdirs: # visited directory
968 968 d = f + '/'
969 969 for mf in matched:
970 970 if mf.startswith(d):
971 971 break
972 972 else:
973 973 fail(f, _("no match under directory!"))
974 974 elif f not in self.dirstate:
975 975 fail(f, _("file not tracked!"))
976 976
977 977 if (not force and not extra.get("close") and not merge
978 978 and not (changes[0] or changes[1] or changes[2])
979 979 and wctx.branch() == wctx.p1().branch()):
980 980 return None
981 981
982 982 ms = mergemod.mergestate(self)
983 983 for f in changes[0]:
984 984 if f in ms and ms[f] == 'u':
985 985 raise util.Abort(_("unresolved merge conflicts "
986 986 "(see hg help resolve)"))
987 987
988 988 cctx = context.workingctx(self, text, user, date, extra, changes)
989 989 if editor:
990 990 cctx._text = editor(self, cctx, subs)
991 991 edited = (text != cctx._text)
992 992
993 993 # commit subs
994 994 if subs or removedsubs:
995 995 state = wctx.substate.copy()
996 996 for s in sorted(subs):
997 997 sub = wctx.sub(s)
998 998 self.ui.status(_('committing subrepository %s\n') %
999 999 subrepo.subrelpath(sub))
1000 1000 sr = sub.commit(cctx._text, user, date)
1001 1001 state[s] = (state[s][0], sr)
1002 1002 subrepo.writestate(self, state)
1003 1003
1004 1004 # Save commit message in case this transaction gets rolled back
1005 1005 # (e.g. by a pretxncommit hook). Leave the content alone on
1006 1006 # the assumption that the user will use the same editor again.
1007 1007 msgfile = self.opener('last-message.txt', 'wb')
1008 1008 msgfile.write(cctx._text)
1009 1009 msgfile.close()
1010 1010
1011 1011 p1, p2 = self.dirstate.parents()
1012 1012 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1013 1013 try:
1014 1014 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
1015 1015 ret = self.commitctx(cctx, True)
1016 1016 except:
1017 1017 if edited:
1018 1018 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
1019 1019 self.ui.write(
1020 1020 _('note: commit message saved in %s\n') % msgfn)
1021 1021 raise
1022 1022
1023 1023 # update bookmarks, dirstate and mergestate
1024 1024 bookmarks.update(self, p1, ret)
1025 1025 for f in changes[0] + changes[1]:
1026 1026 self.dirstate.normal(f)
1027 1027 for f in changes[2]:
1028 1028 self.dirstate.forget(f)
1029 1029 self.dirstate.setparents(ret)
1030 1030 ms.reset()
1031 1031 finally:
1032 1032 wlock.release()
1033 1033
1034 1034 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
1035 1035 return ret
1036 1036
1037 1037 def commitctx(self, ctx, error=False):
1038 1038 """Add a new revision to current repository.
1039 1039 Revision information is passed via the context argument.
1040 1040 """
1041 1041
1042 1042 tr = lock = None
1043 1043 removed = list(ctx.removed())
1044 1044 p1, p2 = ctx.p1(), ctx.p2()
1045 1045 m1 = p1.manifest().copy()
1046 1046 m2 = p2.manifest()
1047 1047 user = ctx.user()
1048 1048
1049 1049 lock = self.lock()
1050 1050 try:
1051 1051 tr = self.transaction("commit")
1052 1052 trp = weakref.proxy(tr)
1053 1053
1054 1054 # check in files
1055 1055 new = {}
1056 1056 changed = []
1057 1057 linkrev = len(self)
1058 1058 for f in sorted(ctx.modified() + ctx.added()):
1059 1059 self.ui.note(f + "\n")
1060 1060 try:
1061 1061 fctx = ctx[f]
1062 1062 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1063 1063 changed)
1064 1064 m1.set(f, fctx.flags())
1065 1065 except OSError, inst:
1066 1066 self.ui.warn(_("trouble committing %s!\n") % f)
1067 1067 raise
1068 1068 except IOError, inst:
1069 1069 errcode = getattr(inst, 'errno', errno.ENOENT)
1070 1070 if error or errcode and errcode != errno.ENOENT:
1071 1071 self.ui.warn(_("trouble committing %s!\n") % f)
1072 1072 raise
1073 1073 else:
1074 1074 removed.append(f)
1075 1075
1076 1076 # update manifest
1077 1077 m1.update(new)
1078 1078 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1079 1079 drop = [f for f in removed if f in m1]
1080 1080 for f in drop:
1081 1081 del m1[f]
1082 1082 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1083 1083 p2.manifestnode(), (new, drop))
1084 1084
1085 1085 # update changelog
1086 1086 self.changelog.delayupdate()
1087 1087 n = self.changelog.add(mn, changed + removed, ctx.description(),
1088 1088 trp, p1.node(), p2.node(),
1089 1089 user, ctx.date(), ctx.extra().copy())
1090 1090 p = lambda: self.changelog.writepending() and self.root or ""
1091 1091 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1092 1092 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1093 1093 parent2=xp2, pending=p)
1094 1094 self.changelog.finalize(trp)
1095 1095 tr.close()
1096 1096
1097 1097 if self._branchcache:
1098 1098 self.updatebranchcache()
1099 1099 return n
1100 1100 finally:
1101 1101 if tr:
1102 1102 tr.release()
1103 1103 lock.release()
1104 1104
1105 1105 def destroyed(self):
1106 1106 '''Inform the repository that nodes have been destroyed.
1107 1107 Intended for use by strip and rollback, so there's a common
1108 1108 place for anything that has to be done after destroying history.'''
1109 1109 # XXX it might be nice if we could take the list of destroyed
1110 1110 # nodes, but I don't see an easy way for rollback() to do that
1111 1111
1112 1112 # Ensure the persistent tag cache is updated. Doing it now
1113 1113 # means that the tag cache only has to worry about destroyed
1114 1114 # heads immediately after a strip/rollback. That in turn
1115 1115 # guarantees that "cachetip == currenttip" (comparing both rev
1116 1116 # and node) always means no nodes have been added or destroyed.
1117 1117
1118 1118 # XXX this is suboptimal when qrefresh'ing: we strip the current
1119 1119 # head, refresh the tag cache, then immediately add a new head.
1120 1120 # But I think doing it this way is necessary for the "instant
1121 1121 # tag cache retrieval" case to work.
1122 1122 self.invalidatecaches()
1123 1123
1124 1124 def walk(self, match, node=None):
1125 1125 '''
1126 1126 walk recursively through the directory tree or a given
1127 1127 changeset, finding all files matched by the match
1128 1128 function
1129 1129 '''
1130 1130 return self[node].walk(match)
1131 1131
1132 1132 def status(self, node1='.', node2=None, match=None,
1133 1133 ignored=False, clean=False, unknown=False,
1134 1134 listsubrepos=False):
1135 1135 """return status of files between two nodes or node and working directory
1136 1136
1137 1137 If node1 is None, use the first dirstate parent instead.
1138 1138 If node2 is None, compare node1 with working directory.
1139 1139 """
1140 1140
1141 1141 def mfmatches(ctx):
1142 1142 mf = ctx.manifest().copy()
1143 1143 for fn in mf.keys():
1144 1144 if not match(fn):
1145 1145 del mf[fn]
1146 1146 return mf
1147 1147
1148 1148 if isinstance(node1, context.changectx):
1149 1149 ctx1 = node1
1150 1150 else:
1151 1151 ctx1 = self[node1]
1152 1152 if isinstance(node2, context.changectx):
1153 1153 ctx2 = node2
1154 1154 else:
1155 1155 ctx2 = self[node2]
1156 1156
1157 1157 working = ctx2.rev() is None
1158 1158 parentworking = working and ctx1 == self['.']
1159 1159 match = match or matchmod.always(self.root, self.getcwd())
1160 1160 listignored, listclean, listunknown = ignored, clean, unknown
1161 1161
1162 1162 # load earliest manifest first for caching reasons
1163 1163 if not working and ctx2.rev() < ctx1.rev():
1164 1164 ctx2.manifest()
1165 1165
1166 1166 if not parentworking:
1167 1167 def bad(f, msg):
1168 1168 if f not in ctx1:
1169 1169 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1170 1170 match.bad = bad
1171 1171
1172 1172 if working: # we need to scan the working dir
1173 1173 subrepos = []
1174 1174 if '.hgsub' in self.dirstate:
1175 1175 subrepos = ctx1.substate.keys()
1176 1176 s = self.dirstate.status(match, subrepos, listignored,
1177 1177 listclean, listunknown)
1178 1178 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1179 1179
1180 1180 # check for any possibly clean files
1181 1181 if parentworking and cmp:
1182 1182 fixup = []
1183 1183 # do a full compare of any files that might have changed
1184 1184 for f in sorted(cmp):
1185 1185 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1186 1186 or ctx1[f].cmp(ctx2[f])):
1187 1187 modified.append(f)
1188 1188 else:
1189 1189 fixup.append(f)
1190 1190
1191 1191 # update dirstate for files that are actually clean
1192 1192 if fixup:
1193 1193 if listclean:
1194 1194 clean += fixup
1195 1195
1196 1196 try:
1197 1197 # updating the dirstate is optional
1198 1198 # so we don't wait on the lock
1199 1199 wlock = self.wlock(False)
1200 1200 try:
1201 1201 for f in fixup:
1202 1202 self.dirstate.normal(f)
1203 1203 finally:
1204 1204 wlock.release()
1205 1205 except error.LockError:
1206 1206 pass
1207 1207
1208 1208 if not parentworking:
1209 1209 mf1 = mfmatches(ctx1)
1210 1210 if working:
1211 1211 # we are comparing working dir against non-parent
1212 1212 # generate a pseudo-manifest for the working dir
1213 1213 mf2 = mfmatches(self['.'])
1214 1214 for f in cmp + modified + added:
1215 1215 mf2[f] = None
1216 1216 mf2.set(f, ctx2.flags(f))
1217 1217 for f in removed:
1218 1218 if f in mf2:
1219 1219 del mf2[f]
1220 1220 else:
1221 1221 # we are comparing two revisions
1222 1222 deleted, unknown, ignored = [], [], []
1223 1223 mf2 = mfmatches(ctx2)
1224 1224
1225 1225 modified, added, clean = [], [], []
1226 1226 for fn in mf2:
1227 1227 if fn in mf1:
1228 1228 if (fn not in deleted and
1229 1229 (mf1.flags(fn) != mf2.flags(fn) or
1230 1230 (mf1[fn] != mf2[fn] and
1231 1231 (mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
1232 1232 modified.append(fn)
1233 1233 elif listclean:
1234 1234 clean.append(fn)
1235 1235 del mf1[fn]
1236 1236 elif fn not in deleted:
1237 1237 added.append(fn)
1238 1238 removed = mf1.keys()
1239 1239
1240 1240 r = modified, added, removed, deleted, unknown, ignored, clean
1241 1241
1242 1242 if listsubrepos:
1243 1243 for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
1244 1244 if working:
1245 1245 rev2 = None
1246 1246 else:
1247 1247 rev2 = ctx2.substate[subpath][1]
1248 1248 try:
1249 1249 submatch = matchmod.narrowmatcher(subpath, match)
1250 1250 s = sub.status(rev2, match=submatch, ignored=listignored,
1251 1251 clean=listclean, unknown=listunknown,
1252 1252 listsubrepos=True)
1253 1253 for rfiles, sfiles in zip(r, s):
1254 1254 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
1255 1255 except error.LookupError:
1256 1256 self.ui.status(_("skipping missing subrepository: %s\n")
1257 1257 % subpath)
1258 1258
1259 1259 for l in r:
1260 1260 l.sort()
1261 1261 return r
1262 1262
1263 1263 def heads(self, start=None):
1264 1264 heads = self.changelog.heads(start)
1265 1265 # sort the output in rev descending order
1266 1266 return sorted(heads, key=self.changelog.rev, reverse=True)
1267 1267
1268 1268 def branchheads(self, branch=None, start=None, closed=False):
1269 1269 '''return a (possibly filtered) list of heads for the given branch
1270 1270
1271 1271 Heads are returned in topological order, from newest to oldest.
1272 1272 If branch is None, use the dirstate branch.
1273 1273 If start is not None, return only heads reachable from start.
1274 1274 If closed is True, return heads that are marked as closed as well.
1275 1275 '''
1276 1276 if branch is None:
1277 1277 branch = self[None].branch()
1278 1278 branches = self.branchmap()
1279 1279 if branch not in branches:
1280 1280 return []
1281 1281 # the cache returns heads ordered lowest to highest
1282 1282 bheads = list(reversed(branches[branch]))
1283 1283 if start is not None:
1284 1284 # filter out the heads that cannot be reached from startrev
1285 1285 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1286 1286 bheads = [h for h in bheads if h in fbheads]
1287 1287 if not closed:
1288 1288 bheads = [h for h in bheads if
1289 1289 ('close' not in self.changelog.read(h)[5])]
1290 1290 return bheads
1291 1291
1292 1292 def branches(self, nodes):
1293 1293 if not nodes:
1294 1294 nodes = [self.changelog.tip()]
1295 1295 b = []
1296 1296 for n in nodes:
1297 1297 t = n
1298 1298 while 1:
1299 1299 p = self.changelog.parents(n)
1300 1300 if p[1] != nullid or p[0] == nullid:
1301 1301 b.append((t, n, p[0], p[1]))
1302 1302 break
1303 1303 n = p[0]
1304 1304 return b
1305 1305
1306 1306 def between(self, pairs):
1307 1307 r = []
1308 1308
1309 1309 for top, bottom in pairs:
1310 1310 n, l, i = top, [], 0
1311 1311 f = 1
1312 1312
1313 1313 while n != bottom and n != nullid:
1314 1314 p = self.changelog.parents(n)[0]
1315 1315 if i == f:
1316 1316 l.append(n)
1317 1317 f = f * 2
1318 1318 n = p
1319 1319 i += 1
1320 1320
1321 1321 r.append(l)
1322 1322
1323 1323 return r
1324 1324
1325 1325 def pull(self, remote, heads=None, force=False):
1326 1326 lock = self.lock()
1327 1327 try:
1328 1328 usecommon = remote.capable('getbundle')
1329 1329 tmp = discovery.findcommonincoming(self, remote, heads=heads,
1330 1330 force=force, commononly=usecommon)
1331 1331 common, fetch, rheads = tmp
1332 1332 if not fetch:
1333 1333 self.ui.status(_("no changes found\n"))
1334 1334 result = 0
1335 1335 else:
1336 1336 if heads is None and list(common) == [nullid]:
1337 1337 self.ui.status(_("requesting all changes\n"))
1338 1338 elif heads is None and remote.capable('changegroupsubset'):
1339 1339 # issue1320, avoid a race if remote changed after discovery
1340 1340 heads = rheads
1341 1341
1342 1342 if usecommon:
1343 1343 cg = remote.getbundle('pull', common=common,
1344 1344 heads=heads or rheads)
1345 1345 elif heads is None:
1346 1346 cg = remote.changegroup(fetch, 'pull')
1347 1347 elif not remote.capable('changegroupsubset'):
1348 1348 raise util.Abort(_("partial pull cannot be done because "
1349 1349 "other repository doesn't support "
1350 1350 "changegroupsubset."))
1351 1351 else:
1352 1352 cg = remote.changegroupsubset(fetch, heads, 'pull')
1353 1353 result = self.addchangegroup(cg, 'pull', remote.url(),
1354 1354 lock=lock)
1355 1355 finally:
1356 1356 lock.release()
1357 1357
1358 1358 return result
1359 1359
1360 1360 def checkpush(self, force, revs):
1361 1361 """Extensions can override this function if additional checks have
1362 1362 to be performed before pushing, or call it if they override push
1363 1363 command.
1364 1364 """
1365 1365 pass
1366 1366
1367 1367 def push(self, remote, force=False, revs=None, newbranch=False):
1368 1368 '''Push outgoing changesets (limited by revs) from the current
1369 1369 repository to remote. Return an integer:
1370 1370 - 0 means HTTP error *or* nothing to push
1371 1371 - 1 means we pushed and remote head count is unchanged *or*
1372 1372 we have outgoing changesets but refused to push
1373 1373 - other values as described by addchangegroup()
1374 1374 '''
1375 1375 # there are two ways to push to remote repo:
1376 1376 #
1377 1377 # addchangegroup assumes local user can lock remote
1378 1378 # repo (local filesystem, old ssh servers).
1379 1379 #
1380 1380 # unbundle assumes local user cannot lock remote repo (new ssh
1381 1381 # servers, http servers).
1382 1382
1383 1383 self.checkpush(force, revs)
1384 1384 lock = None
1385 1385 unbundle = remote.capable('unbundle')
1386 1386 if not unbundle:
1387 1387 lock = remote.lock()
1388 1388 try:
1389 1389 cg, remote_heads = discovery.prepush(self, remote, force, revs,
1390 1390 newbranch)
1391 1391 ret = remote_heads
1392 1392 if cg is not None:
1393 1393 if unbundle:
1394 1394 # local repo finds heads on server, finds out what
1395 1395 # revs it must push. once revs transferred, if server
1396 1396 # finds it has different heads (someone else won
1397 1397 # commit/push race), server aborts.
1398 1398 if force:
1399 1399 remote_heads = ['force']
1400 1400 # ssh: return remote's addchangegroup()
1401 1401 # http: return remote's addchangegroup() or 0 for error
1402 1402 ret = remote.unbundle(cg, remote_heads, 'push')
1403 1403 else:
1404 1404 # we return an integer indicating remote head count change
1405 1405 ret = remote.addchangegroup(cg, 'push', self.url(),
1406 1406 lock=lock)
1407 1407 finally:
1408 1408 if lock is not None:
1409 1409 lock.release()
1410 1410
1411 1411 self.ui.debug("checking for updated bookmarks\n")
1412 1412 rb = remote.listkeys('bookmarks')
1413 1413 for k in rb.keys():
1414 1414 if k in self._bookmarks:
1415 1415 nr, nl = rb[k], hex(self._bookmarks[k])
1416 1416 if nr in self:
1417 1417 cr = self[nr]
1418 1418 cl = self[nl]
1419 1419 if cl in cr.descendants():
1420 1420 r = remote.pushkey('bookmarks', k, nr, nl)
1421 1421 if r:
1422 1422 self.ui.status(_("updating bookmark %s\n") % k)
1423 1423 else:
1424 1424 self.ui.warn(_('updating bookmark %s'
1425 1425 ' failed!\n') % k)
1426 1426
1427 1427 return ret
1428 1428
1429 1429 def changegroupinfo(self, nodes, source):
1430 1430 if self.ui.verbose or source == 'bundle':
1431 1431 self.ui.status(_("%d changesets found\n") % len(nodes))
1432 1432 if self.ui.debugflag:
1433 1433 self.ui.debug("list of changesets:\n")
1434 1434 for node in nodes:
1435 1435 self.ui.debug("%s\n" % hex(node))
1436 1436
1437 1437 def changegroupsubset(self, bases, heads, source):
1438 1438 """Compute a changegroup consisting of all the nodes that are
1439 1439 descendents of any of the bases and ancestors of any of the heads.
1440 1440 Return a chunkbuffer object whose read() method will return
1441 1441 successive changegroup chunks.
1442 1442
1443 1443 It is fairly complex as determining which filenodes and which
1444 1444 manifest nodes need to be included for the changeset to be complete
1445 1445 is non-trivial.
1446 1446
1447 1447 Another wrinkle is doing the reverse, figuring out which changeset in
1448 1448 the changegroup a particular filenode or manifestnode belongs to.
1449 1449 """
1450 1450 cl = self.changelog
1451 1451 if not bases:
1452 1452 bases = [nullid]
1453 1453 csets, bases, heads = cl.nodesbetween(bases, heads)
1454 1454 # We assume that all ancestors of bases are known
1455 1455 common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
1456 1456 return self._changegroupsubset(common, csets, heads, source)
1457 1457
1458 1458 def getbundle(self, source, heads=None, common=None):
1459 1459 """Like changegroupsubset, but returns the set difference between the
1460 1460 ancestors of heads and the ancestors common.
1461 1461
1462 1462 If heads is None, use the local heads. If common is None, use [nullid].
1463 1463
1464 1464 The nodes in common might not all be known locally due to the way the
1465 1465 current discovery protocol works.
1466 1466 """
1467 1467 cl = self.changelog
1468 1468 if common:
1469 1469 nm = cl.nodemap
1470 1470 common = [n for n in common if n in nm]
1471 1471 else:
1472 1472 common = [nullid]
1473 1473 if not heads:
1474 1474 heads = cl.heads()
1475 1475 common, missing = cl.findcommonmissing(common, heads)
1476 1476 return self._changegroupsubset(common, missing, heads, source)
1477 1477
1478 1478 def _changegroupsubset(self, commonrevs, csets, heads, source):
1479 1479
1480 1480 cl = self.changelog
1481 1481 mf = self.manifest
1482 1482 mfs = {} # needed manifests
1483 1483 fnodes = {} # needed file nodes
1484 1484 changedfiles = set()
1485 1485 fstate = ['', {}]
1486 1486 count = [0]
1487 1487
1488 1488 # can we go through the fast path ?
1489 1489 heads.sort()
1490 1490 if heads == sorted(self.heads()):
1491 1491 return self._changegroup(csets, source)
1492 1492
1493 1493 # slow path
1494 1494 self.hook('preoutgoing', throw=True, source=source)
1495 1495 self.changegroupinfo(csets, source)
1496 1496
1497 1497 # filter any nodes that claim to be part of the known set
1498 1498 def prune(revlog, missing):
1499 1499 for n in missing:
1500 1500 if revlog.linkrev(revlog.rev(n)) not in commonrevs:
1501 1501 yield n
1502 1502
1503 1503 def lookup(revlog, x):
1504 1504 if revlog == cl:
1505 1505 c = cl.read(x)
1506 1506 changedfiles.update(c[3])
1507 1507 mfs.setdefault(c[0], x)
1508 1508 count[0] += 1
1509 1509 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1510 1510 return x
1511 1511 elif revlog == mf:
1512 1512 clnode = mfs[x]
1513 1513 mdata = mf.readfast(x)
1514 1514 for f in changedfiles:
1515 1515 if f in mdata:
1516 1516 fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
1517 1517 count[0] += 1
1518 1518 self.ui.progress(_('bundling'), count[0],
1519 1519 unit=_('manifests'), total=len(mfs))
1520 1520 return mfs[x]
1521 1521 else:
1522 1522 self.ui.progress(
1523 1523 _('bundling'), count[0], item=fstate[0],
1524 1524 unit=_('files'), total=len(changedfiles))
1525 1525 return fstate[1][x]
1526 1526
1527 1527 bundler = changegroup.bundle10(lookup)
1528 1528
1529 1529 def gengroup():
1530 1530 # Create a changenode group generator that will call our functions
1531 1531 # back to lookup the owning changenode and collect information.
1532 1532 for chunk in cl.group(csets, bundler):
1533 1533 yield chunk
1534 1534 self.ui.progress(_('bundling'), None)
1535 1535
1536 1536 # Create a generator for the manifestnodes that calls our lookup
1537 1537 # and data collection functions back.
1538 1538 count[0] = 0
1539 1539 for chunk in mf.group(prune(mf, mfs), bundler):
1540 1540 yield chunk
1541 1541 self.ui.progress(_('bundling'), None)
1542 1542
1543 1543 mfs.clear()
1544 1544
1545 1545 # Go through all our files in order sorted by name.
1546 1546 count[0] = 0
1547 1547 for fname in sorted(changedfiles):
1548 1548 filerevlog = self.file(fname)
1549 1549 if not len(filerevlog):
1550 1550 raise util.Abort(_("empty or missing revlog for %s") % fname)
1551 1551 fstate[0] = fname
1552 1552 fstate[1] = fnodes.pop(fname, {})
1553 1553 first = True
1554 1554
1555 1555 for chunk in filerevlog.group(prune(filerevlog, fstate[1]),
1556 1556 bundler):
1557 1557 if first:
1558 1558 if chunk == bundler.close():
1559 1559 break
1560 1560 count[0] += 1
1561 1561 yield bundler.fileheader(fname)
1562 1562 first = False
1563 1563 yield chunk
1564 1564 # Signal that no more groups are left.
1565 1565 yield bundler.close()
1566 1566 self.ui.progress(_('bundling'), None)
1567 1567
1568 1568 if csets:
1569 1569 self.hook('outgoing', node=hex(csets[0]), source=source)
1570 1570
1571 1571 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1572 1572
1573 1573 def changegroup(self, basenodes, source):
1574 1574 # to avoid a race we use changegroupsubset() (issue1320)
1575 1575 return self.changegroupsubset(basenodes, self.heads(), source)
1576 1576
1577 1577 def _changegroup(self, nodes, source):
1578 1578 """Compute the changegroup of all nodes that we have that a recipient
1579 1579 doesn't. Return a chunkbuffer object whose read() method will return
1580 1580 successive changegroup chunks.
1581 1581
1582 1582 This is much easier than the previous function as we can assume that
1583 1583 the recipient has any changenode we aren't sending them.
1584 1584
1585 1585 nodes is the set of nodes to send"""
1586 1586
1587 1587 cl = self.changelog
1588 1588 mf = self.manifest
1589 1589 mfs = {}
1590 1590 changedfiles = set()
1591 1591 fstate = ['']
1592 1592 count = [0]
1593 1593
1594 1594 self.hook('preoutgoing', throw=True, source=source)
1595 1595 self.changegroupinfo(nodes, source)
1596 1596
1597 1597 revset = set([cl.rev(n) for n in nodes])
1598 1598
1599 1599 def gennodelst(log):
1600 1600 for r in log:
1601 1601 if log.linkrev(r) in revset:
1602 1602 yield log.node(r)
1603 1603
1604 1604 def lookup(revlog, x):
1605 1605 if revlog == cl:
1606 1606 c = cl.read(x)
1607 1607 changedfiles.update(c[3])
1608 1608 mfs.setdefault(c[0], x)
1609 1609 count[0] += 1
1610 1610 self.ui.progress(_('bundling'), count[0], unit=_('changesets'))
1611 1611 return x
1612 1612 elif revlog == mf:
1613 1613 count[0] += 1
1614 1614 self.ui.progress(_('bundling'), count[0],
1615 1615 unit=_('manifests'), total=len(mfs))
1616 1616 return cl.node(revlog.linkrev(revlog.rev(x)))
1617 1617 else:
1618 1618 self.ui.progress(
1619 1619 _('bundling'), count[0], item=fstate[0],
1620 1620 total=len(changedfiles), unit=_('files'))
1621 1621 return cl.node(revlog.linkrev(revlog.rev(x)))
1622 1622
1623 1623 bundler = changegroup.bundle10(lookup)
1624 1624
1625 1625 def gengroup():
1626 1626 '''yield a sequence of changegroup chunks (strings)'''
1627 1627 # construct a list of all changed files
1628 1628
1629 1629 for chunk in cl.group(nodes, bundler):
1630 1630 yield chunk
1631 1631 self.ui.progress(_('bundling'), None)
1632 1632
1633 1633 count[0] = 0
1634 1634 for chunk in mf.group(gennodelst(mf), bundler):
1635 1635 yield chunk
1636 1636 self.ui.progress(_('bundling'), None)
1637 1637
1638 1638 count[0] = 0
1639 1639 for fname in sorted(changedfiles):
1640 1640 filerevlog = self.file(fname)
1641 1641 if not len(filerevlog):
1642 1642 raise util.Abort(_("empty or missing revlog for %s") % fname)
1643 1643 fstate[0] = fname
1644 1644 first = True
1645 1645 for chunk in filerevlog.group(gennodelst(filerevlog), bundler):
1646 1646 if first:
1647 1647 if chunk == bundler.close():
1648 1648 break
1649 1649 count[0] += 1
1650 1650 yield bundler.fileheader(fname)
1651 1651 first = False
1652 1652 yield chunk
1653 1653 yield bundler.close()
1654 1654 self.ui.progress(_('bundling'), None)
1655 1655
1656 1656 if nodes:
1657 1657 self.hook('outgoing', node=hex(nodes[0]), source=source)
1658 1658
1659 1659 return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
1660 1660
1661 1661 def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
1662 1662 """Add the changegroup returned by source.read() to this repo.
1663 1663 srctype is a string like 'push', 'pull', or 'unbundle'. url is
1664 1664 the URL of the repo where this changegroup is coming from.
1665 1665 If lock is not None, the function takes ownership of the lock
1666 1666 and releases it after the changegroup is added.
1667 1667
1668 1668 Return an integer summarizing the change to this repo:
1669 1669 - nothing changed or no source: 0
1670 1670 - more heads than before: 1+added heads (2..n)
1671 1671 - fewer heads than before: -1-removed heads (-2..-n)
1672 1672 - number of heads stays the same: 1
1673 1673 """
1674 1674 def csmap(x):
1675 1675 self.ui.debug("add changeset %s\n" % short(x))
1676 1676 return len(cl)
1677 1677
1678 1678 def revmap(x):
1679 1679 return cl.rev(x)
1680 1680
1681 1681 if not source:
1682 1682 return 0
1683 1683
1684 1684 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1685 1685
1686 1686 changesets = files = revisions = 0
1687 1687 efiles = set()
1688 1688
1689 1689 # write changelog data to temp files so concurrent readers will not see
1690 1690 # inconsistent view
1691 1691 cl = self.changelog
1692 1692 cl.delayupdate()
1693 1693 oldheads = len(cl.heads())
1694 1694
1695 1695 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
1696 1696 try:
1697 1697 trp = weakref.proxy(tr)
1698 1698 # pull off the changeset group
1699 1699 self.ui.status(_("adding changesets\n"))
1700 1700 clstart = len(cl)
1701 1701 class prog(object):
1702 1702 step = _('changesets')
1703 1703 count = 1
1704 1704 ui = self.ui
1705 1705 total = None
1706 1706 def __call__(self):
1707 1707 self.ui.progress(self.step, self.count, unit=_('chunks'),
1708 1708 total=self.total)
1709 1709 self.count += 1
1710 1710 pr = prog()
1711 1711 source.callback = pr
1712 1712
1713 1713 if (cl.addgroup(source, csmap, trp) is None
1714 1714 and not emptyok):
1715 1715 raise util.Abort(_("received changelog group is empty"))
1716 1716 clend = len(cl)
1717 1717 changesets = clend - clstart
1718 1718 for c in xrange(clstart, clend):
1719 1719 efiles.update(self[c].files())
1720 1720 efiles = len(efiles)
1721 1721 self.ui.progress(_('changesets'), None)
1722 1722
1723 1723 # pull off the manifest group
1724 1724 self.ui.status(_("adding manifests\n"))
1725 1725 pr.step = _('manifests')
1726 1726 pr.count = 1
1727 1727 pr.total = changesets # manifests <= changesets
1728 1728 # no need to check for empty manifest group here:
1729 1729 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1730 1730 # no new manifest will be created and the manifest group will
1731 1731 # be empty during the pull
1732 1732 self.manifest.addgroup(source, revmap, trp)
1733 1733 self.ui.progress(_('manifests'), None)
1734 1734
1735 1735 needfiles = {}
1736 1736 if self.ui.configbool('server', 'validate', default=False):
1737 1737 # validate incoming csets have their manifests
1738 1738 for cset in xrange(clstart, clend):
1739 1739 mfest = self.changelog.read(self.changelog.node(cset))[0]
1740 1740 mfest = self.manifest.readdelta(mfest)
1741 1741 # store file nodes we must see
1742 1742 for f, n in mfest.iteritems():
1743 1743 needfiles.setdefault(f, set()).add(n)
1744 1744
1745 1745 # process the files
1746 1746 self.ui.status(_("adding file changes\n"))
1747 1747 pr.step = 'files'
1748 1748 pr.count = 1
1749 1749 pr.total = efiles
1750 1750 source.callback = None
1751 1751
1752 1752 while 1:
1753 1753 f = source.chunk()
1754 1754 if not f:
1755 1755 break
1756 1756 self.ui.debug("adding %s revisions\n" % f)
1757 1757 pr()
1758 1758 fl = self.file(f)
1759 1759 o = len(fl)
1760 1760 if fl.addgroup(source, revmap, trp) is None:
1761 1761 raise util.Abort(_("received file revlog group is empty"))
1762 1762 revisions += len(fl) - o
1763 1763 files += 1
1764 1764 if f in needfiles:
1765 1765 needs = needfiles[f]
1766 1766 for new in xrange(o, len(fl)):
1767 1767 n = fl.node(new)
1768 1768 if n in needs:
1769 1769 needs.remove(n)
1770 1770 if not needs:
1771 1771 del needfiles[f]
1772 1772 self.ui.progress(_('files'), None)
1773 1773
1774 1774 for f, needs in needfiles.iteritems():
1775 1775 fl = self.file(f)
1776 1776 for n in needs:
1777 1777 try:
1778 1778 fl.rev(n)
1779 1779 except error.LookupError:
1780 1780 raise util.Abort(
1781 1781 _('missing file data for %s:%s - run hg verify') %
1782 1782 (f, hex(n)))
1783 1783
1784 1784 newheads = len(cl.heads())
1785 1785 heads = ""
1786 1786 if oldheads and newheads != oldheads:
1787 1787 heads = _(" (%+d heads)") % (newheads - oldheads)
1788 1788
1789 1789 self.ui.status(_("added %d changesets"
1790 1790 " with %d changes to %d files%s\n")
1791 1791 % (changesets, revisions, files, heads))
1792 1792
1793 1793 if changesets > 0:
1794 1794 p = lambda: cl.writepending() and self.root or ""
1795 1795 self.hook('pretxnchangegroup', throw=True,
1796 1796 node=hex(cl.node(clstart)), source=srctype,
1797 1797 url=url, pending=p)
1798 1798
1799 1799 # make changelog see real files again
1800 1800 cl.finalize(trp)
1801 1801
1802 1802 tr.close()
1803 1803 finally:
1804 1804 tr.release()
1805 1805 if lock:
1806 1806 lock.release()
1807 1807
1808 1808 if changesets > 0:
1809 1809 # forcefully update the on-disk branch cache
1810 1810 self.ui.debug("updating the branch cache\n")
1811 1811 self.updatebranchcache()
1812 1812 self.hook("changegroup", node=hex(cl.node(clstart)),
1813 1813 source=srctype, url=url)
1814 1814
1815 1815 for i in xrange(clstart, clend):
1816 1816 self.hook("incoming", node=hex(cl.node(i)),
1817 1817 source=srctype, url=url)
1818 1818
1819 1819 # never return 0 here:
1820 1820 if newheads < oldheads:
1821 1821 return newheads - oldheads - 1
1822 1822 else:
1823 1823 return newheads - oldheads + 1
1824 1824
1825 1825
1826 1826 def stream_in(self, remote, requirements):
1827 1827 lock = self.lock()
1828 1828 try:
1829 1829 fp = remote.stream_out()
1830 1830 l = fp.readline()
1831 1831 try:
1832 1832 resp = int(l)
1833 1833 except ValueError:
1834 1834 raise error.ResponseError(
1835 1835 _('Unexpected response from remote server:'), l)
1836 1836 if resp == 1:
1837 1837 raise util.Abort(_('operation forbidden by server'))
1838 1838 elif resp == 2:
1839 1839 raise util.Abort(_('locking the remote repository failed'))
1840 1840 elif resp != 0:
1841 1841 raise util.Abort(_('the server sent an unknown error code'))
1842 1842 self.ui.status(_('streaming all changes\n'))
1843 1843 l = fp.readline()
1844 1844 try:
1845 1845 total_files, total_bytes = map(int, l.split(' ', 1))
1846 1846 except (ValueError, TypeError):
1847 1847 raise error.ResponseError(
1848 1848 _('Unexpected response from remote server:'), l)
1849 1849 self.ui.status(_('%d files to transfer, %s of data\n') %
1850 1850 (total_files, util.bytecount(total_bytes)))
1851 1851 start = time.time()
1852 1852 for i in xrange(total_files):
1853 1853 # XXX doesn't support '\n' or '\r' in filenames
1854 1854 l = fp.readline()
1855 1855 try:
1856 1856 name, size = l.split('\0', 1)
1857 1857 size = int(size)
1858 1858 except (ValueError, TypeError):
1859 1859 raise error.ResponseError(
1860 1860 _('Unexpected response from remote server:'), l)
1861 1861 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1862 1862 # for backwards compat, name was partially encoded
1863 1863 ofp = self.sopener(store.decodedir(name), 'w')
1864 1864 for chunk in util.filechunkiter(fp, limit=size):
1865 1865 ofp.write(chunk)
1866 1866 ofp.close()
1867 1867 elapsed = time.time() - start
1868 1868 if elapsed <= 0:
1869 1869 elapsed = 0.001
1870 1870 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1871 1871 (util.bytecount(total_bytes), elapsed,
1872 1872 util.bytecount(total_bytes / elapsed)))
1873 1873
1874 1874 # new requirements = old non-format requirements + new format-related
1875 1875 # requirements from the streamed-in repository
1876 1876 requirements.update(set(self.requirements) - self.supportedformats)
1877 1877 self._applyrequirements(requirements)
1878 1878 self._writerequirements()
1879 1879
1880 1880 self.invalidate()
1881 1881 return len(self.heads()) + 1
1882 1882 finally:
1883 1883 lock.release()
1884 1884
1885 1885 def clone(self, remote, heads=[], stream=False):
1886 1886 '''clone remote repository.
1887 1887
1888 1888 keyword arguments:
1889 1889 heads: list of revs to clone (forces use of pull)
1890 1890 stream: use streaming clone if possible'''
1891 1891
1892 1892 # now, all clients that can request uncompressed clones can
1893 1893 # read repo formats supported by all servers that can serve
1894 1894 # them.
1895 1895
1896 1896 # if revlog format changes, client will have to check version
1897 1897 # and format flags on "stream" capability, and use
1898 1898 # uncompressed only if compatible.
1899 1899
1900 1900 if stream and not heads:
1901 1901 # 'stream' means remote revlog format is revlogv1 only
1902 1902 if remote.capable('stream'):
1903 1903 return self.stream_in(remote, set(('revlogv1',)))
1904 1904 # otherwise, 'streamreqs' contains the remote revlog format
1905 1905 streamreqs = remote.capable('streamreqs')
1906 1906 if streamreqs:
1907 1907 streamreqs = set(streamreqs.split(','))
1908 1908 # if we support it, stream in and adjust our requirements
1909 1909 if not streamreqs - self.supportedformats:
1910 1910 return self.stream_in(remote, streamreqs)
1911 1911 return self.pull(remote, heads)
1912 1912
1913 1913 def pushkey(self, namespace, key, old, new):
1914 1914 return pushkey.push(self, namespace, key, old, new)
1915 1915
1916 1916 def listkeys(self, namespace):
1917 1917 return pushkey.list(self, namespace)
1918 1918
1919 1919 def debugwireargs(self, one, two, three=None, four=None):
1920 1920 '''used to test argument passing over the wire'''
1921 1921 return "%s %s %s %s" % (one, two, three, four)
1922 1922
1923 1923 # used to avoid circular references so destructors work
1924 1924 def aftertrans(files):
1925 1925 renamefiles = [tuple(t) for t in files]
1926 1926 def a():
1927 1927 for src, dest in renamefiles:
1928 1928 util.rename(src, dest)
1929 1929 return a
1930 1930
1931 1931 def instance(ui, path, create):
1932 1932 return localrepository(ui, urlmod.localpath(path), create)
1933 1933
1934 1934 def islocal(path):
1935 1935 return True
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now