##// END OF EJS Templates
cleanup: fix some list comprehension redefinitions of existing vars...
Mads Kiilerich -
r22201:269688a3 default
parent child Browse files
Show More
@@ -1,1315 +1,1315
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4
5 5 import os, re, sys, tempfile, urllib, urllib2
6 6 import xml.dom.minidom
7 7 import cPickle as pickle
8 8
9 9 from mercurial import strutil, scmutil, util, encoding
10 10 from mercurial.i18n import _
11 11
12 12 propertycache = util.propertycache
13 13
14 14 # Subversion stuff. Works best with very recent Python SVN bindings
15 15 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
16 16 # these bindings.
17 17
18 18 from cStringIO import StringIO
19 19
20 20 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
21 21 from common import commandline, converter_source, converter_sink, mapfile
22 22 from common import makedatetimestamp
23 23
24 24 try:
25 25 from svn.core import SubversionException, Pool
26 26 import svn
27 27 import svn.client
28 28 import svn.core
29 29 import svn.ra
30 30 import svn.delta
31 31 import transport
32 32 import warnings
33 33 warnings.filterwarnings('ignore',
34 34 module='svn.core',
35 35 category=DeprecationWarning)
36 36
37 37 except ImportError:
38 38 svn = None
39 39
40 40 class SvnPathNotFound(Exception):
41 41 pass
42 42
43 43 def revsplit(rev):
44 44 """Parse a revision string and return (uuid, path, revnum).
45 45 >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
46 46 ... '/proj%20B/mytrunk/mytrunk@1')
47 47 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
48 48 >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
49 49 ('', '', 1)
50 50 >>> revsplit('@7')
51 51 ('', '', 7)
52 52 >>> revsplit('7')
53 53 ('', '', 0)
54 54 >>> revsplit('bad')
55 55 ('', '', 0)
56 56 """
57 57 parts = rev.rsplit('@', 1)
58 58 revnum = 0
59 59 if len(parts) > 1:
60 60 revnum = int(parts[1])
61 61 parts = parts[0].split('/', 1)
62 62 uuid = ''
63 63 mod = ''
64 64 if len(parts) > 1 and parts[0].startswith('svn:'):
65 65 uuid = parts[0][4:]
66 66 mod = '/' + parts[1]
67 67 return uuid, mod, revnum
68 68
69 69 def quote(s):
70 70 # As of svn 1.7, many svn calls expect "canonical" paths. In
71 71 # theory, we should call svn.core.*canonicalize() on all paths
72 72 # before passing them to the API. Instead, we assume the base url
73 73 # is canonical and copy the behaviour of svn URL encoding function
74 74 # so we can extend it safely with new components. The "safe"
75 75 # characters were taken from the "svn_uri__char_validity" table in
76 76 # libsvn_subr/path.c.
77 77 return urllib.quote(s, "!$&'()*+,-./:=@_~")
78 78
79 79 def geturl(path):
80 80 try:
81 81 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
82 82 except SubversionException:
83 83 # svn.client.url_from_path() fails with local repositories
84 84 pass
85 85 if os.path.isdir(path):
86 86 path = os.path.normpath(os.path.abspath(path))
87 87 if os.name == 'nt':
88 88 path = '/' + util.normpath(path)
89 89 # Module URL is later compared with the repository URL returned
90 90 # by svn API, which is UTF-8.
91 91 path = encoding.tolocal(path)
92 92 path = 'file://%s' % quote(path)
93 93 return svn.core.svn_path_canonicalize(path)
94 94
95 95 def optrev(number):
96 96 optrev = svn.core.svn_opt_revision_t()
97 97 optrev.kind = svn.core.svn_opt_revision_number
98 98 optrev.value.number = number
99 99 return optrev
100 100
101 101 class changedpath(object):
102 102 def __init__(self, p):
103 103 self.copyfrom_path = p.copyfrom_path
104 104 self.copyfrom_rev = p.copyfrom_rev
105 105 self.action = p.action
106 106
107 107 def get_log_child(fp, url, paths, start, end, limit=0,
108 108 discover_changed_paths=True, strict_node_history=False):
109 109 protocol = -1
110 110 def receiver(orig_paths, revnum, author, date, message, pool):
111 111 paths = {}
112 112 if orig_paths is not None:
113 113 for k, v in orig_paths.iteritems():
114 114 paths[k] = changedpath(v)
115 115 pickle.dump((paths, revnum, author, date, message),
116 116 fp, protocol)
117 117
118 118 try:
119 119 # Use an ra of our own so that our parent can consume
120 120 # our results without confusing the server.
121 121 t = transport.SvnRaTransport(url=url)
122 122 svn.ra.get_log(t.ra, paths, start, end, limit,
123 123 discover_changed_paths,
124 124 strict_node_history,
125 125 receiver)
126 126 except IOError:
127 127 # Caller may interrupt the iteration
128 128 pickle.dump(None, fp, protocol)
129 129 except Exception, inst:
130 130 pickle.dump(str(inst), fp, protocol)
131 131 else:
132 132 pickle.dump(None, fp, protocol)
133 133 fp.close()
134 134 # With large history, cleanup process goes crazy and suddenly
135 135 # consumes *huge* amount of memory. The output file being closed,
136 136 # there is no need for clean termination.
137 137 os._exit(0)
138 138
139 139 def debugsvnlog(ui, **opts):
140 140 """Fetch SVN log in a subprocess and channel them back to parent to
141 141 avoid memory collection issues.
142 142 """
143 143 if svn is None:
144 144 raise util.Abort(_('debugsvnlog could not load Subversion python '
145 145 'bindings'))
146 146
147 147 util.setbinary(sys.stdin)
148 148 util.setbinary(sys.stdout)
149 149 args = decodeargs(sys.stdin.read())
150 150 get_log_child(sys.stdout, *args)
151 151
152 152 class logstream(object):
153 153 """Interruptible revision log iterator."""
154 154 def __init__(self, stdout):
155 155 self._stdout = stdout
156 156
157 157 def __iter__(self):
158 158 while True:
159 159 try:
160 160 entry = pickle.load(self._stdout)
161 161 except EOFError:
162 162 raise util.Abort(_('Mercurial failed to run itself, check'
163 163 ' hg executable is in PATH'))
164 164 try:
165 165 orig_paths, revnum, author, date, message = entry
166 166 except (TypeError, ValueError):
167 167 if entry is None:
168 168 break
169 169 raise util.Abort(_("log stream exception '%s'") % entry)
170 170 yield entry
171 171
172 172 def close(self):
173 173 if self._stdout:
174 174 self._stdout.close()
175 175 self._stdout = None
176 176
177 177 class directlogstream(list):
178 178 """Direct revision log iterator.
179 179 This can be used for debugging and development but it will probably leak
180 180 memory and is not suitable for real conversions."""
181 181 def __init__(self, url, paths, start, end, limit=0,
182 182 discover_changed_paths=True, strict_node_history=False):
183 183
184 184 def receiver(orig_paths, revnum, author, date, message, pool):
185 185 paths = {}
186 186 if orig_paths is not None:
187 187 for k, v in orig_paths.iteritems():
188 188 paths[k] = changedpath(v)
189 189 self.append((paths, revnum, author, date, message))
190 190
191 191 # Use an ra of our own so that our parent can consume
192 192 # our results without confusing the server.
193 193 t = transport.SvnRaTransport(url=url)
194 194 svn.ra.get_log(t.ra, paths, start, end, limit,
195 195 discover_changed_paths,
196 196 strict_node_history,
197 197 receiver)
198 198
199 199 def close(self):
200 200 pass
201 201
202 202 # Check to see if the given path is a local Subversion repo. Verify this by
203 203 # looking for several svn-specific files and directories in the given
204 204 # directory.
205 205 def filecheck(ui, path, proto):
206 206 for x in ('locks', 'hooks', 'format', 'db'):
207 207 if not os.path.exists(os.path.join(path, x)):
208 208 return False
209 209 return True
210 210
211 211 # Check to see if a given path is the root of an svn repo over http. We verify
212 212 # this by requesting a version-controlled URL we know can't exist and looking
213 213 # for the svn-specific "not found" XML.
214 214 def httpcheck(ui, path, proto):
215 215 try:
216 216 opener = urllib2.build_opener()
217 217 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
218 218 data = rsp.read()
219 219 except urllib2.HTTPError, inst:
220 220 if inst.code != 404:
221 221 # Except for 404 we cannot know for sure this is not an svn repo
222 222 ui.warn(_('svn: cannot probe remote repository, assume it could '
223 223 'be a subversion repository. Use --source-type if you '
224 224 'know better.\n'))
225 225 return True
226 226 data = inst.fp.read()
227 227 except Exception:
228 228 # Could be urllib2.URLError if the URL is invalid or anything else.
229 229 return False
230 230 return '<m:human-readable errcode="160013">' in data
231 231
232 232 protomap = {'http': httpcheck,
233 233 'https': httpcheck,
234 234 'file': filecheck,
235 235 }
236 236 def issvnurl(ui, url):
237 237 try:
238 238 proto, path = url.split('://', 1)
239 239 if proto == 'file':
240 240 if (os.name == 'nt' and path[:1] == '/' and path[1:2].isalpha()
241 241 and path[2:6].lower() == '%3a/'):
242 242 path = path[:2] + ':/' + path[6:]
243 243 path = urllib.url2pathname(path)
244 244 except ValueError:
245 245 proto = 'file'
246 246 path = os.path.abspath(url)
247 247 if proto == 'file':
248 248 path = util.pconvert(path)
249 249 check = protomap.get(proto, lambda *args: False)
250 250 while '/' in path:
251 251 if check(ui, path, proto):
252 252 return True
253 253 path = path.rsplit('/', 1)[0]
254 254 return False
255 255
256 256 # SVN conversion code stolen from bzr-svn and tailor
257 257 #
258 258 # Subversion looks like a versioned filesystem, branches structures
259 259 # are defined by conventions and not enforced by the tool. First,
260 260 # we define the potential branches (modules) as "trunk" and "branches"
261 261 # children directories. Revisions are then identified by their
262 262 # module and revision number (and a repository identifier).
263 263 #
264 264 # The revision graph is really a tree (or a forest). By default, a
265 265 # revision parent is the previous revision in the same module. If the
266 266 # module directory is copied/moved from another module then the
267 267 # revision is the module root and its parent the source revision in
268 268 # the parent module. A revision has at most one parent.
269 269 #
270 270 class svn_source(converter_source):
271 271 def __init__(self, ui, url, rev=None):
272 272 super(svn_source, self).__init__(ui, url, rev=rev)
273 273
274 274 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
275 275 (os.path.exists(url) and
276 276 os.path.exists(os.path.join(url, '.svn'))) or
277 277 issvnurl(ui, url)):
278 278 raise NoRepo(_("%s does not look like a Subversion repository")
279 279 % url)
280 280 if svn is None:
281 281 raise MissingTool(_('could not load Subversion python bindings'))
282 282
283 283 try:
284 284 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
285 285 if version < (1, 4):
286 286 raise MissingTool(_('Subversion python bindings %d.%d found, '
287 287 '1.4 or later required') % version)
288 288 except AttributeError:
289 289 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
290 290 'or later required'))
291 291
292 292 self.lastrevs = {}
293 293
294 294 latest = None
295 295 try:
296 296 # Support file://path@rev syntax. Useful e.g. to convert
297 297 # deleted branches.
298 298 at = url.rfind('@')
299 299 if at >= 0:
300 300 latest = int(url[at + 1:])
301 301 url = url[:at]
302 302 except ValueError:
303 303 pass
304 304 self.url = geturl(url)
305 305 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
306 306 try:
307 307 self.transport = transport.SvnRaTransport(url=self.url)
308 308 self.ra = self.transport.ra
309 309 self.ctx = self.transport.client
310 310 self.baseurl = svn.ra.get_repos_root(self.ra)
311 311 # Module is either empty or a repository path starting with
312 312 # a slash and not ending with a slash.
313 313 self.module = urllib.unquote(self.url[len(self.baseurl):])
314 314 self.prevmodule = None
315 315 self.rootmodule = self.module
316 316 self.commits = {}
317 317 self.paths = {}
318 318 self.uuid = svn.ra.get_uuid(self.ra)
319 319 except SubversionException:
320 320 ui.traceback()
321 321 raise NoRepo(_("%s does not look like a Subversion repository")
322 322 % self.url)
323 323
324 324 if rev:
325 325 try:
326 326 latest = int(rev)
327 327 except ValueError:
328 328 raise util.Abort(_('svn: revision %s is not an integer') % rev)
329 329
330 330 self.trunkname = self.ui.config('convert', 'svn.trunk',
331 331 'trunk').strip('/')
332 332 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
333 333 try:
334 334 self.startrev = int(self.startrev)
335 335 if self.startrev < 0:
336 336 self.startrev = 0
337 337 except ValueError:
338 338 raise util.Abort(_('svn: start revision %s is not an integer')
339 339 % self.startrev)
340 340
341 341 try:
342 342 self.head = self.latest(self.module, latest)
343 343 except SvnPathNotFound:
344 344 self.head = None
345 345 if not self.head:
346 346 raise util.Abort(_('no revision found in module %s')
347 347 % self.module)
348 348 self.last_changed = self.revnum(self.head)
349 349
350 350 self._changescache = None
351 351
352 352 if os.path.exists(os.path.join(url, '.svn/entries')):
353 353 self.wc = url
354 354 else:
355 355 self.wc = None
356 356 self.convertfp = None
357 357
358 358 def setrevmap(self, revmap):
359 359 lastrevs = {}
360 360 for revid in revmap.iterkeys():
361 361 uuid, module, revnum = revsplit(revid)
362 362 lastrevnum = lastrevs.setdefault(module, revnum)
363 363 if revnum > lastrevnum:
364 364 lastrevs[module] = revnum
365 365 self.lastrevs = lastrevs
366 366
367 367 def exists(self, path, optrev):
368 368 try:
369 369 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
370 370 optrev, False, self.ctx)
371 371 return True
372 372 except SubversionException:
373 373 return False
374 374
375 375 def getheads(self):
376 376
377 377 def isdir(path, revnum):
378 378 kind = self._checkpath(path, revnum)
379 379 return kind == svn.core.svn_node_dir
380 380
381 381 def getcfgpath(name, rev):
382 382 cfgpath = self.ui.config('convert', 'svn.' + name)
383 383 if cfgpath is not None and cfgpath.strip() == '':
384 384 return None
385 385 path = (cfgpath or name).strip('/')
386 386 if not self.exists(path, rev):
387 387 if self.module.endswith(path) and name == 'trunk':
388 388 # we are converting from inside this directory
389 389 return None
390 390 if cfgpath:
391 391 raise util.Abort(_('expected %s to be at %r, but not found')
392 392 % (name, path))
393 393 return None
394 394 self.ui.note(_('found %s at %r\n') % (name, path))
395 395 return path
396 396
397 397 rev = optrev(self.last_changed)
398 398 oldmodule = ''
399 399 trunk = getcfgpath('trunk', rev)
400 400 self.tags = getcfgpath('tags', rev)
401 401 branches = getcfgpath('branches', rev)
402 402
403 403 # If the project has a trunk or branches, we will extract heads
404 404 # from them. We keep the project root otherwise.
405 405 if trunk:
406 406 oldmodule = self.module or ''
407 407 self.module += '/' + trunk
408 408 self.head = self.latest(self.module, self.last_changed)
409 409 if not self.head:
410 410 raise util.Abort(_('no revision found in module %s')
411 411 % self.module)
412 412
413 413 # First head in the list is the module's head
414 414 self.heads = [self.head]
415 415 if self.tags is not None:
416 416 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
417 417
418 418 # Check if branches bring a few more heads to the list
419 419 if branches:
420 420 rpath = self.url.strip('/')
421 421 branchnames = svn.client.ls(rpath + '/' + quote(branches),
422 422 rev, False, self.ctx)
423 423 for branch in sorted(branchnames):
424 424 module = '%s/%s/%s' % (oldmodule, branches, branch)
425 425 if not isdir(module, self.last_changed):
426 426 continue
427 427 brevid = self.latest(module, self.last_changed)
428 428 if not brevid:
429 429 self.ui.note(_('ignoring empty branch %s\n') % branch)
430 430 continue
431 431 self.ui.note(_('found branch %s at %d\n') %
432 432 (branch, self.revnum(brevid)))
433 433 self.heads.append(brevid)
434 434
435 435 if self.startrev and self.heads:
436 436 if len(self.heads) > 1:
437 437 raise util.Abort(_('svn: start revision is not supported '
438 438 'with more than one branch'))
439 439 revnum = self.revnum(self.heads[0])
440 440 if revnum < self.startrev:
441 441 raise util.Abort(
442 442 _('svn: no revision found after start revision %d')
443 443 % self.startrev)
444 444
445 445 return self.heads
446 446
447 447 def getchanges(self, rev):
448 448 if self._changescache and self._changescache[0] == rev:
449 449 return self._changescache[1]
450 450 self._changescache = None
451 451 (paths, parents) = self.paths[rev]
452 452 if parents:
453 453 files, self.removed, copies = self.expandpaths(rev, paths, parents)
454 454 else:
455 455 # Perform a full checkout on roots
456 456 uuid, module, revnum = revsplit(rev)
457 457 entries = svn.client.ls(self.baseurl + quote(module),
458 458 optrev(revnum), True, self.ctx)
459 459 files = [n for n, e in entries.iteritems()
460 460 if e.kind == svn.core.svn_node_file]
461 461 copies = {}
462 462 self.removed = set()
463 463
464 464 files.sort()
465 465 files = zip(files, [rev] * len(files))
466 466
467 467 # caller caches the result, so free it here to release memory
468 468 del self.paths[rev]
469 469 return (files, copies)
470 470
471 471 def getchangedfiles(self, rev, i):
472 472 changes = self.getchanges(rev)
473 473 self._changescache = (rev, changes)
474 474 return [f[0] for f in changes[0]]
475 475
476 476 def getcommit(self, rev):
477 477 if rev not in self.commits:
478 478 uuid, module, revnum = revsplit(rev)
479 479 self.module = module
480 480 self.reparent(module)
481 481 # We assume that:
482 482 # - requests for revisions after "stop" come from the
483 483 # revision graph backward traversal. Cache all of them
484 484 # down to stop, they will be used eventually.
485 485 # - requests for revisions before "stop" come to get
486 486 # isolated branches parents. Just fetch what is needed.
487 487 stop = self.lastrevs.get(module, 0)
488 488 if revnum < stop:
489 489 stop = revnum + 1
490 490 self._fetch_revisions(revnum, stop)
491 491 if rev not in self.commits:
492 492 raise util.Abort(_('svn: revision %s not found') % revnum)
493 commit = self.commits[rev]
493 revcommit = self.commits[rev]
494 494 # caller caches the result, so free it here to release memory
495 495 del self.commits[rev]
496 return commit
496 return revcommit
497 497
498 498 def checkrevformat(self, revstr, mapname='splicemap'):
499 499 """ fails if revision format does not match the correct format"""
500 500 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
501 501 '[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
502 502 '{12,12}(.*)\@[0-9]+$',revstr):
503 503 raise util.Abort(_('%s entry %s is not a valid revision'
504 504 ' identifier') % (mapname, revstr))
505 505
506 506 def gettags(self):
507 507 tags = {}
508 508 if self.tags is None:
509 509 return tags
510 510
511 511 # svn tags are just a convention, project branches left in a
512 512 # 'tags' directory. There is no other relationship than
513 513 # ancestry, which is expensive to discover and makes them hard
514 514 # to update incrementally. Worse, past revisions may be
515 515 # referenced by tags far away in the future, requiring a deep
516 516 # history traversal on every calculation. Current code
517 517 # performs a single backward traversal, tracking moves within
518 518 # the tags directory (tag renaming) and recording a new tag
519 519 # everytime a project is copied from outside the tags
520 520 # directory. It also lists deleted tags, this behaviour may
521 521 # change in the future.
522 522 pendings = []
523 523 tagspath = self.tags
524 524 start = svn.ra.get_latest_revnum(self.ra)
525 525 stream = self._getlog([self.tags], start, self.startrev)
526 526 try:
527 527 for entry in stream:
528 528 origpaths, revnum, author, date, message = entry
529 529 if not origpaths:
530 530 origpaths = []
531 531 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
532 532 in origpaths.iteritems() if e.copyfrom_path]
533 533 # Apply moves/copies from more specific to general
534 534 copies.sort(reverse=True)
535 535
536 536 srctagspath = tagspath
537 537 if copies and copies[-1][2] == tagspath:
538 538 # Track tags directory moves
539 539 srctagspath = copies.pop()[0]
540 540
541 541 for source, sourcerev, dest in copies:
542 542 if not dest.startswith(tagspath + '/'):
543 543 continue
544 544 for tag in pendings:
545 545 if tag[0].startswith(dest):
546 546 tagpath = source + tag[0][len(dest):]
547 547 tag[:2] = [tagpath, sourcerev]
548 548 break
549 549 else:
550 550 pendings.append([source, sourcerev, dest])
551 551
552 552 # Filter out tags with children coming from different
553 553 # parts of the repository like:
554 554 # /tags/tag.1 (from /trunk:10)
555 555 # /tags/tag.1/foo (from /branches/foo:12)
556 556 # Here/tags/tag.1 discarded as well as its children.
557 557 # It happens with tools like cvs2svn. Such tags cannot
558 558 # be represented in mercurial.
559 559 addeds = dict((p, e.copyfrom_path) for p, e
560 560 in origpaths.iteritems()
561 561 if e.action == 'A' and e.copyfrom_path)
562 562 badroots = set()
563 563 for destroot in addeds:
564 564 for source, sourcerev, dest in pendings:
565 565 if (not dest.startswith(destroot + '/')
566 566 or source.startswith(addeds[destroot] + '/')):
567 567 continue
568 568 badroots.add(destroot)
569 569 break
570 570
571 571 for badroot in badroots:
572 572 pendings = [p for p in pendings if p[2] != badroot
573 573 and not p[2].startswith(badroot + '/')]
574 574
575 575 # Tell tag renamings from tag creations
576 576 renamings = []
577 577 for source, sourcerev, dest in pendings:
578 578 tagname = dest.split('/')[-1]
579 579 if source.startswith(srctagspath):
580 580 renamings.append([source, sourcerev, tagname])
581 581 continue
582 582 if tagname in tags:
583 583 # Keep the latest tag value
584 584 continue
585 585 # From revision may be fake, get one with changes
586 586 try:
587 587 tagid = self.latest(source, sourcerev)
588 588 if tagid and tagname not in tags:
589 589 tags[tagname] = tagid
590 590 except SvnPathNotFound:
591 591 # It happens when we are following directories
592 592 # we assumed were copied with their parents
593 593 # but were really created in the tag
594 594 # directory.
595 595 pass
596 596 pendings = renamings
597 597 tagspath = srctagspath
598 598 finally:
599 599 stream.close()
600 600 return tags
601 601
602 602 def converted(self, rev, destrev):
603 603 if not self.wc:
604 604 return
605 605 if self.convertfp is None:
606 606 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
607 607 'a')
608 608 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
609 609 self.convertfp.flush()
610 610
611 611 def revid(self, revnum, module=None):
612 612 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
613 613
614 614 def revnum(self, rev):
615 615 return int(rev.split('@')[-1])
616 616
617 617 def latest(self, path, stop=None):
618 618 """Find the latest revid affecting path, up to stop revision
619 619 number. If stop is None, default to repository latest
620 620 revision. It may return a revision in a different module,
621 621 since a branch may be moved without a change being
622 622 reported. Return None if computed module does not belong to
623 623 rootmodule subtree.
624 624 """
625 625 def findchanges(path, start, stop=None):
626 626 stream = self._getlog([path], start, stop or 1)
627 627 try:
628 628 for entry in stream:
629 629 paths, revnum, author, date, message = entry
630 630 if stop is None and paths:
631 631 # We do not know the latest changed revision,
632 632 # keep the first one with changed paths.
633 633 break
634 634 if revnum <= stop:
635 635 break
636 636
637 637 for p in paths:
638 638 if (not path.startswith(p) or
639 639 not paths[p].copyfrom_path):
640 640 continue
641 641 newpath = paths[p].copyfrom_path + path[len(p):]
642 642 self.ui.debug("branch renamed from %s to %s at %d\n" %
643 643 (path, newpath, revnum))
644 644 path = newpath
645 645 break
646 646 if not paths:
647 647 revnum = None
648 648 return revnum, path
649 649 finally:
650 650 stream.close()
651 651
652 652 if not path.startswith(self.rootmodule):
653 653 # Requests on foreign branches may be forbidden at server level
654 654 self.ui.debug('ignoring foreign branch %r\n' % path)
655 655 return None
656 656
657 657 if stop is None:
658 658 stop = svn.ra.get_latest_revnum(self.ra)
659 659 try:
660 660 prevmodule = self.reparent('')
661 661 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
662 662 self.reparent(prevmodule)
663 663 except SubversionException:
664 664 dirent = None
665 665 if not dirent:
666 666 raise SvnPathNotFound(_('%s not found up to revision %d')
667 667 % (path, stop))
668 668
669 669 # stat() gives us the previous revision on this line of
670 670 # development, but it might be in *another module*. Fetch the
671 671 # log and detect renames down to the latest revision.
672 672 revnum, realpath = findchanges(path, stop, dirent.created_rev)
673 673 if revnum is None:
674 674 # Tools like svnsync can create empty revision, when
675 675 # synchronizing only a subtree for instance. These empty
676 676 # revisions created_rev still have their original values
677 677 # despite all changes having disappeared and can be
678 678 # returned by ra.stat(), at least when stating the root
679 679 # module. In that case, do not trust created_rev and scan
680 680 # the whole history.
681 681 revnum, realpath = findchanges(path, stop)
682 682 if revnum is None:
683 683 self.ui.debug('ignoring empty branch %r\n' % realpath)
684 684 return None
685 685
686 686 if not realpath.startswith(self.rootmodule):
687 687 self.ui.debug('ignoring foreign branch %r\n' % realpath)
688 688 return None
689 689 return self.revid(revnum, realpath)
690 690
691 691 def reparent(self, module):
692 692 """Reparent the svn transport and return the previous parent."""
693 693 if self.prevmodule == module:
694 694 return module
695 695 svnurl = self.baseurl + quote(module)
696 696 prevmodule = self.prevmodule
697 697 if prevmodule is None:
698 698 prevmodule = ''
699 699 self.ui.debug("reparent to %s\n" % svnurl)
700 700 svn.ra.reparent(self.ra, svnurl)
701 701 self.prevmodule = module
702 702 return prevmodule
703 703
704 704 def expandpaths(self, rev, paths, parents):
705 705 changed, removed = set(), set()
706 706 copies = {}
707 707
708 708 new_module, revnum = revsplit(rev)[1:]
709 709 if new_module != self.module:
710 710 self.module = new_module
711 711 self.reparent(self.module)
712 712
713 713 for i, (path, ent) in enumerate(paths):
714 714 self.ui.progress(_('scanning paths'), i, item=path,
715 715 total=len(paths))
716 716 entrypath = self.getrelpath(path)
717 717
718 718 kind = self._checkpath(entrypath, revnum)
719 719 if kind == svn.core.svn_node_file:
720 720 changed.add(self.recode(entrypath))
721 721 if not ent.copyfrom_path or not parents:
722 722 continue
723 723 # Copy sources not in parent revisions cannot be
724 724 # represented, ignore their origin for now
725 725 pmodule, prevnum = revsplit(parents[0])[1:]
726 726 if ent.copyfrom_rev < prevnum:
727 727 continue
728 728 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
729 729 if not copyfrom_path:
730 730 continue
731 731 self.ui.debug("copied to %s from %s@%s\n" %
732 732 (entrypath, copyfrom_path, ent.copyfrom_rev))
733 733 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
734 734 elif kind == 0: # gone, but had better be a deleted *file*
735 735 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
736 736 pmodule, prevnum = revsplit(parents[0])[1:]
737 737 parentpath = pmodule + "/" + entrypath
738 738 fromkind = self._checkpath(entrypath, prevnum, pmodule)
739 739
740 740 if fromkind == svn.core.svn_node_file:
741 741 removed.add(self.recode(entrypath))
742 742 elif fromkind == svn.core.svn_node_dir:
743 743 oroot = parentpath.strip('/')
744 744 nroot = path.strip('/')
745 745 children = self._iterfiles(oroot, prevnum)
746 746 for childpath in children:
747 747 childpath = childpath.replace(oroot, nroot)
748 748 childpath = self.getrelpath("/" + childpath, pmodule)
749 749 if childpath:
750 750 removed.add(self.recode(childpath))
751 751 else:
752 752 self.ui.debug('unknown path in revision %d: %s\n' % \
753 753 (revnum, path))
754 754 elif kind == svn.core.svn_node_dir:
755 755 if ent.action == 'M':
756 756 # If the directory just had a prop change,
757 757 # then we shouldn't need to look for its children.
758 758 continue
759 759 if ent.action == 'R' and parents:
760 760 # If a directory is replacing a file, mark the previous
761 761 # file as deleted
762 762 pmodule, prevnum = revsplit(parents[0])[1:]
763 763 pkind = self._checkpath(entrypath, prevnum, pmodule)
764 764 if pkind == svn.core.svn_node_file:
765 765 removed.add(self.recode(entrypath))
766 766 elif pkind == svn.core.svn_node_dir:
767 767 # We do not know what files were kept or removed,
768 768 # mark them all as changed.
769 769 for childpath in self._iterfiles(pmodule, prevnum):
770 770 childpath = self.getrelpath("/" + childpath)
771 771 if childpath:
772 772 changed.add(self.recode(childpath))
773 773
774 774 for childpath in self._iterfiles(path, revnum):
775 775 childpath = self.getrelpath("/" + childpath)
776 776 if childpath:
777 777 changed.add(self.recode(childpath))
778 778
779 779 # Handle directory copies
780 780 if not ent.copyfrom_path or not parents:
781 781 continue
782 782 # Copy sources not in parent revisions cannot be
783 783 # represented, ignore their origin for now
784 784 pmodule, prevnum = revsplit(parents[0])[1:]
785 785 if ent.copyfrom_rev < prevnum:
786 786 continue
787 787 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
788 788 if not copyfrompath:
789 789 continue
790 790 self.ui.debug("mark %s came from %s:%d\n"
791 791 % (path, copyfrompath, ent.copyfrom_rev))
792 792 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
793 793 for childpath in children:
794 794 childpath = self.getrelpath("/" + childpath, pmodule)
795 795 if not childpath:
796 796 continue
797 797 copytopath = path + childpath[len(copyfrompath):]
798 798 copytopath = self.getrelpath(copytopath)
799 799 copies[self.recode(copytopath)] = self.recode(childpath)
800 800
801 801 self.ui.progress(_('scanning paths'), None)
802 802 changed.update(removed)
803 803 return (list(changed), removed, copies)
804 804
805 805 def _fetch_revisions(self, from_revnum, to_revnum):
806 806 if from_revnum < to_revnum:
807 807 from_revnum, to_revnum = to_revnum, from_revnum
808 808
809 809 self.child_cset = None
810 810
811 811 def parselogentry(orig_paths, revnum, author, date, message):
812 812 """Return the parsed commit object or None, and True if
813 813 the revision is a branch root.
814 814 """
815 815 self.ui.debug("parsing revision %d (%d changes)\n" %
816 816 (revnum, len(orig_paths)))
817 817
818 818 branched = False
819 819 rev = self.revid(revnum)
820 820 # branch log might return entries for a parent we already have
821 821
822 822 if rev in self.commits or revnum < to_revnum:
823 823 return None, branched
824 824
825 825 parents = []
826 826 # check whether this revision is the start of a branch or part
827 827 # of a branch renaming
828 828 orig_paths = sorted(orig_paths.iteritems())
829 829 root_paths = [(p, e) for p, e in orig_paths
830 830 if self.module.startswith(p)]
831 831 if root_paths:
832 832 path, ent = root_paths[-1]
833 833 if ent.copyfrom_path:
834 834 branched = True
835 835 newpath = ent.copyfrom_path + self.module[len(path):]
836 836 # ent.copyfrom_rev may not be the actual last revision
837 837 previd = self.latest(newpath, ent.copyfrom_rev)
838 838 if previd is not None:
839 839 prevmodule, prevnum = revsplit(previd)[1:]
840 840 if prevnum >= self.startrev:
841 841 parents = [previd]
842 842 self.ui.note(
843 843 _('found parent of branch %s at %d: %s\n') %
844 844 (self.module, prevnum, prevmodule))
845 845 else:
846 846 self.ui.debug("no copyfrom path, don't know what to do.\n")
847 847
848 848 paths = []
849 849 # filter out unrelated paths
850 850 for path, ent in orig_paths:
851 851 if self.getrelpath(path) is None:
852 852 continue
853 853 paths.append((path, ent))
854 854
855 855 # Example SVN datetime. Includes microseconds.
856 856 # ISO-8601 conformant
857 857 # '2007-01-04T17:35:00.902377Z'
858 858 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
859 859 if self.ui.configbool('convert', 'localtimezone'):
860 860 date = makedatetimestamp(date[0])
861 861
862 862 log = message and self.recode(message) or ''
863 863 author = author and self.recode(author) or ''
864 864 try:
865 865 branch = self.module.split("/")[-1]
866 866 if branch == self.trunkname:
867 867 branch = None
868 868 except IndexError:
869 869 branch = None
870 870
871 871 cset = commit(author=author,
872 872 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
873 873 desc=log,
874 874 parents=parents,
875 875 branch=branch,
876 876 rev=rev)
877 877
878 878 self.commits[rev] = cset
879 879 # The parents list is *shared* among self.paths and the
880 880 # commit object. Both will be updated below.
881 881 self.paths[rev] = (paths, cset.parents)
882 882 if self.child_cset and not self.child_cset.parents:
883 883 self.child_cset.parents[:] = [rev]
884 884 self.child_cset = cset
885 885 return cset, branched
886 886
887 887 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
888 888 (self.module, from_revnum, to_revnum))
889 889
890 890 try:
891 891 firstcset = None
892 892 lastonbranch = False
893 893 stream = self._getlog([self.module], from_revnum, to_revnum)
894 894 try:
895 895 for entry in stream:
896 896 paths, revnum, author, date, message = entry
897 897 if revnum < self.startrev:
898 898 lastonbranch = True
899 899 break
900 900 if not paths:
901 901 self.ui.debug('revision %d has no entries\n' % revnum)
902 902 # If we ever leave the loop on an empty
903 903 # revision, do not try to get a parent branch
904 904 lastonbranch = lastonbranch or revnum == 0
905 905 continue
906 906 cset, lastonbranch = parselogentry(paths, revnum, author,
907 907 date, message)
908 908 if cset:
909 909 firstcset = cset
910 910 if lastonbranch:
911 911 break
912 912 finally:
913 913 stream.close()
914 914
915 915 if not lastonbranch and firstcset and not firstcset.parents:
916 916 # The first revision of the sequence (the last fetched one)
917 917 # has invalid parents if not a branch root. Find the parent
918 918 # revision now, if any.
919 919 try:
920 920 firstrevnum = self.revnum(firstcset.rev)
921 921 if firstrevnum > 1:
922 922 latest = self.latest(self.module, firstrevnum - 1)
923 923 if latest:
924 924 firstcset.parents.append(latest)
925 925 except SvnPathNotFound:
926 926 pass
927 927 except SubversionException, (inst, num):
928 928 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
929 929 raise util.Abort(_('svn: branch has no revision %s')
930 930 % to_revnum)
931 931 raise
932 932
933 933 def getfile(self, file, rev):
934 934 # TODO: ra.get_file transmits the whole file instead of diffs.
935 935 if file in self.removed:
936 936 raise IOError
937 937 mode = ''
938 938 try:
939 939 new_module, revnum = revsplit(rev)[1:]
940 940 if self.module != new_module:
941 941 self.module = new_module
942 942 self.reparent(self.module)
943 943 io = StringIO()
944 944 info = svn.ra.get_file(self.ra, file, revnum, io)
945 945 data = io.getvalue()
946 946 # ra.get_file() seems to keep a reference on the input buffer
947 947 # preventing collection. Release it explicitly.
948 948 io.close()
949 949 if isinstance(info, list):
950 950 info = info[-1]
951 951 mode = ("svn:executable" in info) and 'x' or ''
952 952 mode = ("svn:special" in info) and 'l' or mode
953 953 except SubversionException, e:
954 954 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
955 955 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
956 956 if e.apr_err in notfound: # File not found
957 957 raise IOError
958 958 raise
959 959 if mode == 'l':
960 960 link_prefix = "link "
961 961 if data.startswith(link_prefix):
962 962 data = data[len(link_prefix):]
963 963 return data, mode
964 964
965 965 def _iterfiles(self, path, revnum):
966 966 """Enumerate all files in path at revnum, recursively."""
967 967 path = path.strip('/')
968 968 pool = Pool()
969 969 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
970 970 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
971 971 if path:
972 972 path += '/'
973 973 return ((path + p) for p, e in entries.iteritems()
974 974 if e.kind == svn.core.svn_node_file)
975 975
976 976 def getrelpath(self, path, module=None):
977 977 if module is None:
978 978 module = self.module
979 979 # Given the repository url of this wc, say
980 980 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
981 981 # extract the "entry" portion (a relative path) from what
982 982 # svn log --xml says, i.e.
983 983 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
984 984 # that is to say "tests/PloneTestCase.py"
985 985 if path.startswith(module):
986 986 relative = path.rstrip('/')[len(module):]
987 987 if relative.startswith('/'):
988 988 return relative[1:]
989 989 elif relative == '':
990 990 return relative
991 991
992 992 # The path is outside our tracked tree...
993 993 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
994 994 return None
995 995
996 996 def _checkpath(self, path, revnum, module=None):
997 997 if module is not None:
998 998 prevmodule = self.reparent('')
999 999 path = module + '/' + path
1000 1000 try:
1001 1001 # ra.check_path does not like leading slashes very much, it leads
1002 1002 # to PROPFIND subversion errors
1003 1003 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1004 1004 finally:
1005 1005 if module is not None:
1006 1006 self.reparent(prevmodule)
1007 1007
1008 1008 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1009 1009 strict_node_history=False):
1010 1010 # Normalize path names, svn >= 1.5 only wants paths relative to
1011 1011 # supplied URL
1012 1012 relpaths = []
1013 1013 for p in paths:
1014 1014 if not p.startswith('/'):
1015 1015 p = self.module + '/' + p
1016 1016 relpaths.append(p.strip('/'))
1017 1017 args = [self.baseurl, relpaths, start, end, limit,
1018 1018 discover_changed_paths, strict_node_history]
1019 1019 # undocumented feature: debugsvnlog can be disabled
1020 1020 if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
1021 1021 return directlogstream(*args)
1022 1022 arg = encodeargs(args)
1023 1023 hgexe = util.hgexecutable()
1024 1024 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1025 1025 stdin, stdout = util.popen2(util.quotecommand(cmd))
1026 1026 stdin.write(arg)
1027 1027 try:
1028 1028 stdin.close()
1029 1029 except IOError:
1030 1030 raise util.Abort(_('Mercurial failed to run itself, check'
1031 1031 ' hg executable is in PATH'))
1032 1032 return logstream(stdout)
1033 1033
1034 1034 pre_revprop_change = '''#!/bin/sh
1035 1035
1036 1036 REPOS="$1"
1037 1037 REV="$2"
1038 1038 USER="$3"
1039 1039 PROPNAME="$4"
1040 1040 ACTION="$5"
1041 1041
1042 1042 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1043 1043 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1044 1044 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1045 1045
1046 1046 echo "Changing prohibited revision property" >&2
1047 1047 exit 1
1048 1048 '''
1049 1049
1050 1050 class svn_sink(converter_sink, commandline):
1051 1051 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1052 1052 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1053 1053
1054 1054 def prerun(self):
1055 1055 if self.wc:
1056 1056 os.chdir(self.wc)
1057 1057
1058 1058 def postrun(self):
1059 1059 if self.wc:
1060 1060 os.chdir(self.cwd)
1061 1061
1062 1062 def join(self, name):
1063 1063 return os.path.join(self.wc, '.svn', name)
1064 1064
1065 1065 def revmapfile(self):
1066 1066 return self.join('hg-shamap')
1067 1067
1068 1068 def authorfile(self):
1069 1069 return self.join('hg-authormap')
1070 1070
1071 1071 def __init__(self, ui, path):
1072 1072
1073 1073 converter_sink.__init__(self, ui, path)
1074 1074 commandline.__init__(self, ui, 'svn')
1075 1075 self.delete = []
1076 1076 self.setexec = []
1077 1077 self.delexec = []
1078 1078 self.copies = []
1079 1079 self.wc = None
1080 1080 self.cwd = os.getcwd()
1081 1081
1082 1082 created = False
1083 1083 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1084 1084 self.wc = os.path.realpath(path)
1085 1085 self.run0('update')
1086 1086 else:
1087 1087 if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
1088 1088 path = os.path.realpath(path)
1089 1089 if os.path.isdir(os.path.dirname(path)):
1090 1090 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1091 1091 ui.status(_('initializing svn repository %r\n') %
1092 1092 os.path.basename(path))
1093 1093 commandline(ui, 'svnadmin').run0('create', path)
1094 1094 created = path
1095 1095 path = util.normpath(path)
1096 1096 if not path.startswith('/'):
1097 1097 path = '/' + path
1098 1098 path = 'file://' + path
1099 1099
1100 1100 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1101 1101 ui.status(_('initializing svn working copy %r\n')
1102 1102 % os.path.basename(wcpath))
1103 1103 self.run0('checkout', path, wcpath)
1104 1104
1105 1105 self.wc = wcpath
1106 1106 self.opener = scmutil.opener(self.wc)
1107 1107 self.wopener = scmutil.opener(self.wc)
1108 1108 self.childmap = mapfile(ui, self.join('hg-childmap'))
1109 1109 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1110 1110
1111 1111 if created:
1112 1112 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1113 1113 fp = open(hook, 'w')
1114 1114 fp.write(pre_revprop_change)
1115 1115 fp.close()
1116 1116 util.setflags(hook, False, True)
1117 1117
1118 1118 output = self.run0('info')
1119 1119 self.uuid = self.uuid_re.search(output).group(1).strip()
1120 1120
1121 1121 def wjoin(self, *names):
1122 1122 return os.path.join(self.wc, *names)
1123 1123
1124 1124 @propertycache
1125 1125 def manifest(self):
1126 1126 # As of svn 1.7, the "add" command fails when receiving
1127 1127 # already tracked entries, so we have to track and filter them
1128 1128 # ourselves.
1129 1129 m = set()
1130 1130 output = self.run0('ls', recursive=True, xml=True)
1131 1131 doc = xml.dom.minidom.parseString(output)
1132 1132 for e in doc.getElementsByTagName('entry'):
1133 1133 for n in e.childNodes:
1134 1134 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1135 1135 continue
1136 1136 name = ''.join(c.data for c in n.childNodes
1137 1137 if c.nodeType == c.TEXT_NODE)
1138 1138 # Entries are compared with names coming from
1139 1139 # mercurial, so bytes with undefined encoding. Our
1140 1140 # best bet is to assume they are in local
1141 1141 # encoding. They will be passed to command line calls
1142 1142 # later anyway, so they better be.
1143 1143 m.add(encoding.tolocal(name.encode('utf-8')))
1144 1144 break
1145 1145 return m
1146 1146
1147 1147 def putfile(self, filename, flags, data):
1148 1148 if 'l' in flags:
1149 1149 self.wopener.symlink(data, filename)
1150 1150 else:
1151 1151 try:
1152 1152 if os.path.islink(self.wjoin(filename)):
1153 1153 os.unlink(filename)
1154 1154 except OSError:
1155 1155 pass
1156 1156 self.wopener.write(filename, data)
1157 1157
1158 1158 if self.is_exec:
1159 1159 if self.is_exec(self.wjoin(filename)):
1160 1160 if 'x' not in flags:
1161 1161 self.delexec.append(filename)
1162 1162 else:
1163 1163 if 'x' in flags:
1164 1164 self.setexec.append(filename)
1165 1165 util.setflags(self.wjoin(filename), False, 'x' in flags)
1166 1166
1167 1167 def _copyfile(self, source, dest):
1168 1168 # SVN's copy command pukes if the destination file exists, but
1169 1169 # our copyfile method expects to record a copy that has
1170 1170 # already occurred. Cross the semantic gap.
1171 1171 wdest = self.wjoin(dest)
1172 1172 exists = os.path.lexists(wdest)
1173 1173 if exists:
1174 1174 fd, tempname = tempfile.mkstemp(
1175 1175 prefix='hg-copy-', dir=os.path.dirname(wdest))
1176 1176 os.close(fd)
1177 1177 os.unlink(tempname)
1178 1178 os.rename(wdest, tempname)
1179 1179 try:
1180 1180 self.run0('copy', source, dest)
1181 1181 finally:
1182 1182 self.manifest.add(dest)
1183 1183 if exists:
1184 1184 try:
1185 1185 os.unlink(wdest)
1186 1186 except OSError:
1187 1187 pass
1188 1188 os.rename(tempname, wdest)
1189 1189
1190 1190 def dirs_of(self, files):
1191 1191 dirs = set()
1192 1192 for f in files:
1193 1193 if os.path.isdir(self.wjoin(f)):
1194 1194 dirs.add(f)
1195 1195 for i in strutil.rfindall(f, '/'):
1196 1196 dirs.add(f[:i])
1197 1197 return dirs
1198 1198
1199 1199 def add_dirs(self, files):
1200 1200 add_dirs = [d for d in sorted(self.dirs_of(files))
1201 1201 if d not in self.manifest]
1202 1202 if add_dirs:
1203 1203 self.manifest.update(add_dirs)
1204 1204 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1205 1205 return add_dirs
1206 1206
1207 1207 def add_files(self, files):
1208 1208 files = [f for f in files if f not in self.manifest]
1209 1209 if files:
1210 1210 self.manifest.update(files)
1211 1211 self.xargs(files, 'add', quiet=True)
1212 1212 return files
1213 1213
1214 1214 def tidy_dirs(self, names):
1215 1215 deleted = []
1216 1216 for d in sorted(self.dirs_of(names), reverse=True):
1217 1217 wd = self.wjoin(d)
1218 1218 if os.listdir(wd) == '.svn':
1219 1219 self.run0('delete', d)
1220 1220 self.manifest.remove(d)
1221 1221 deleted.append(d)
1222 1222 return deleted
1223 1223
1224 1224 def addchild(self, parent, child):
1225 1225 self.childmap[parent] = child
1226 1226
1227 1227 def revid(self, rev):
1228 1228 return u"svn:%s@%s" % (self.uuid, rev)
1229 1229
1230 1230 def putcommit(self, files, copies, parents, commit, source, revmap):
1231 1231 for parent in parents:
1232 1232 try:
1233 1233 return self.revid(self.childmap[parent])
1234 1234 except KeyError:
1235 1235 pass
1236 1236
1237 1237 # Apply changes to working copy
1238 1238 for f, v in files:
1239 1239 try:
1240 1240 data, mode = source.getfile(f, v)
1241 1241 except IOError:
1242 1242 self.delete.append(f)
1243 1243 else:
1244 1244 self.putfile(f, mode, data)
1245 1245 if f in copies:
1246 1246 self.copies.append([copies[f], f])
1247 1247 files = [f[0] for f in files]
1248 1248
1249 1249 entries = set(self.delete)
1250 1250 files = frozenset(files)
1251 1251 entries.update(self.add_dirs(files.difference(entries)))
1252 1252 if self.copies:
1253 1253 for s, d in self.copies:
1254 1254 self._copyfile(s, d)
1255 1255 self.copies = []
1256 1256 if self.delete:
1257 1257 self.xargs(self.delete, 'delete')
1258 1258 for f in self.delete:
1259 1259 self.manifest.remove(f)
1260 1260 self.delete = []
1261 1261 entries.update(self.add_files(files.difference(entries)))
1262 1262 entries.update(self.tidy_dirs(entries))
1263 1263 if self.delexec:
1264 1264 self.xargs(self.delexec, 'propdel', 'svn:executable')
1265 1265 self.delexec = []
1266 1266 if self.setexec:
1267 1267 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1268 1268 self.setexec = []
1269 1269
1270 1270 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1271 1271 fp = os.fdopen(fd, 'w')
1272 1272 fp.write(commit.desc)
1273 1273 fp.close()
1274 1274 try:
1275 1275 output = self.run0('commit',
1276 1276 username=util.shortuser(commit.author),
1277 1277 file=messagefile,
1278 1278 encoding='utf-8')
1279 1279 try:
1280 1280 rev = self.commit_re.search(output).group(1)
1281 1281 except AttributeError:
1282 1282 if not files:
1283 1283 return parents[0]
1284 1284 self.ui.warn(_('unexpected svn output:\n'))
1285 1285 self.ui.warn(output)
1286 1286 raise util.Abort(_('unable to cope with svn output'))
1287 1287 if commit.rev:
1288 1288 self.run('propset', 'hg:convert-rev', commit.rev,
1289 1289 revprop=True, revision=rev)
1290 1290 if commit.branch and commit.branch != 'default':
1291 1291 self.run('propset', 'hg:convert-branch', commit.branch,
1292 1292 revprop=True, revision=rev)
1293 1293 for parent in parents:
1294 1294 self.addchild(parent, rev)
1295 1295 return self.revid(rev)
1296 1296 finally:
1297 1297 os.unlink(messagefile)
1298 1298
1299 1299 def puttags(self, tags):
1300 1300 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1301 1301 return None, None
1302 1302
1303 1303 def hascommitfrommap(self, rev):
1304 1304 # We trust that revisions referenced in a map still is present
1305 1305 # TODO: implement something better if necessary and feasible
1306 1306 return True
1307 1307
1308 1308 def hascommitforsplicemap(self, rev):
1309 1309 # This is not correct as one can convert to an existing subversion
1310 1310 # repository and childmap would not list all revisions. Too bad.
1311 1311 if rev in self.childmap:
1312 1312 return True
1313 1313 raise util.Abort(_('splice map revision %s not found in subversion '
1314 1314 'child map (revision lookups are not implemented)')
1315 1315 % rev)
@@ -1,6060 +1,6060
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, bin, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _
11 11 import os, re, difflib, time, tempfile, errno, shlex
12 12 import sys
13 13 import hg, scmutil, util, revlog, copies, error, bookmarks
14 14 import patch, help, encoding, templatekw, discovery
15 15 import archival, changegroup, cmdutil, hbisect
16 16 import sshserver, hgweb, commandserver
17 17 import extensions
18 18 from hgweb import server as hgweb_server
19 19 import merge as mergemod
20 20 import minirst, revset, fileset
21 21 import dagparser, context, simplemerge, graphmod
22 22 import random
23 23 import setdiscovery, treediscovery, dagutil, pvec, localrepo
24 24 import phases, obsolete, exchange
25 25
26 26 table = {}
27 27
28 28 command = cmdutil.command(table)
29 29
30 30 # Space delimited list of commands that don't require local repositories.
31 31 # This should be populated by passing norepo=True into the @command decorator.
32 32 norepo = ''
33 33 # Space delimited list of commands that optionally require local repositories.
34 34 # This should be populated by passing optionalrepo=True into the @command
35 35 # decorator.
36 36 optionalrepo = ''
37 37 # Space delimited list of commands that will examine arguments looking for
38 38 # a repository. This should be populated by passing inferrepo=True into the
39 39 # @command decorator.
40 40 inferrepo = ''
41 41
42 42 # common command options
43 43
44 44 globalopts = [
45 45 ('R', 'repository', '',
46 46 _('repository root directory or name of overlay bundle file'),
47 47 _('REPO')),
48 48 ('', 'cwd', '',
49 49 _('change working directory'), _('DIR')),
50 50 ('y', 'noninteractive', None,
51 51 _('do not prompt, automatically pick the first choice for all prompts')),
52 52 ('q', 'quiet', None, _('suppress output')),
53 53 ('v', 'verbose', None, _('enable additional output')),
54 54 ('', 'config', [],
55 55 _('set/override config option (use \'section.name=value\')'),
56 56 _('CONFIG')),
57 57 ('', 'debug', None, _('enable debugging output')),
58 58 ('', 'debugger', None, _('start debugger')),
59 59 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
60 60 _('ENCODE')),
61 61 ('', 'encodingmode', encoding.encodingmode,
62 62 _('set the charset encoding mode'), _('MODE')),
63 63 ('', 'traceback', None, _('always print a traceback on exception')),
64 64 ('', 'time', None, _('time how long the command takes')),
65 65 ('', 'profile', None, _('print command execution profile')),
66 66 ('', 'version', None, _('output version information and exit')),
67 67 ('h', 'help', None, _('display help and exit')),
68 68 ('', 'hidden', False, _('consider hidden changesets')),
69 69 ]
70 70
71 71 dryrunopts = [('n', 'dry-run', None,
72 72 _('do not perform actions, just print output'))]
73 73
74 74 remoteopts = [
75 75 ('e', 'ssh', '',
76 76 _('specify ssh command to use'), _('CMD')),
77 77 ('', 'remotecmd', '',
78 78 _('specify hg command to run on the remote side'), _('CMD')),
79 79 ('', 'insecure', None,
80 80 _('do not verify server certificate (ignoring web.cacerts config)')),
81 81 ]
82 82
83 83 walkopts = [
84 84 ('I', 'include', [],
85 85 _('include names matching the given patterns'), _('PATTERN')),
86 86 ('X', 'exclude', [],
87 87 _('exclude names matching the given patterns'), _('PATTERN')),
88 88 ]
89 89
90 90 commitopts = [
91 91 ('m', 'message', '',
92 92 _('use text as commit message'), _('TEXT')),
93 93 ('l', 'logfile', '',
94 94 _('read commit message from file'), _('FILE')),
95 95 ]
96 96
97 97 commitopts2 = [
98 98 ('d', 'date', '',
99 99 _('record the specified date as commit date'), _('DATE')),
100 100 ('u', 'user', '',
101 101 _('record the specified user as committer'), _('USER')),
102 102 ]
103 103
104 104 templateopts = [
105 105 ('', 'style', '',
106 106 _('display using template map file (DEPRECATED)'), _('STYLE')),
107 107 ('T', 'template', '',
108 108 _('display with template'), _('TEMPLATE')),
109 109 ]
110 110
111 111 logopts = [
112 112 ('p', 'patch', None, _('show patch')),
113 113 ('g', 'git', None, _('use git extended diff format')),
114 114 ('l', 'limit', '',
115 115 _('limit number of changes displayed'), _('NUM')),
116 116 ('M', 'no-merges', None, _('do not show merges')),
117 117 ('', 'stat', None, _('output diffstat-style summary of changes')),
118 118 ('G', 'graph', None, _("show the revision DAG")),
119 119 ] + templateopts
120 120
121 121 diffopts = [
122 122 ('a', 'text', None, _('treat all files as text')),
123 123 ('g', 'git', None, _('use git extended diff format')),
124 124 ('', 'nodates', None, _('omit dates from diff headers'))
125 125 ]
126 126
127 127 diffwsopts = [
128 128 ('w', 'ignore-all-space', None,
129 129 _('ignore white space when comparing lines')),
130 130 ('b', 'ignore-space-change', None,
131 131 _('ignore changes in the amount of white space')),
132 132 ('B', 'ignore-blank-lines', None,
133 133 _('ignore changes whose lines are all blank')),
134 134 ]
135 135
136 136 diffopts2 = [
137 137 ('p', 'show-function', None, _('show which function each change is in')),
138 138 ('', 'reverse', None, _('produce a diff that undoes the changes')),
139 139 ] + diffwsopts + [
140 140 ('U', 'unified', '',
141 141 _('number of lines of context to show'), _('NUM')),
142 142 ('', 'stat', None, _('output diffstat-style summary of changes')),
143 143 ]
144 144
145 145 mergetoolopts = [
146 146 ('t', 'tool', '', _('specify merge tool')),
147 147 ]
148 148
149 149 similarityopts = [
150 150 ('s', 'similarity', '',
151 151 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
152 152 ]
153 153
154 154 subrepoopts = [
155 155 ('S', 'subrepos', None,
156 156 _('recurse into subrepositories'))
157 157 ]
158 158
159 159 # Commands start here, listed alphabetically
160 160
161 161 @command('^add',
162 162 walkopts + subrepoopts + dryrunopts,
163 163 _('[OPTION]... [FILE]...'),
164 164 inferrepo=True)
165 165 def add(ui, repo, *pats, **opts):
166 166 """add the specified files on the next commit
167 167
168 168 Schedule files to be version controlled and added to the
169 169 repository.
170 170
171 171 The files will be added to the repository at the next commit. To
172 172 undo an add before that, see :hg:`forget`.
173 173
174 174 If no names are given, add all files to the repository.
175 175
176 176 .. container:: verbose
177 177
178 178 An example showing how new (unknown) files are added
179 179 automatically by :hg:`add`::
180 180
181 181 $ ls
182 182 foo.c
183 183 $ hg status
184 184 ? foo.c
185 185 $ hg add
186 186 adding foo.c
187 187 $ hg status
188 188 A foo.c
189 189
190 190 Returns 0 if all files are successfully added.
191 191 """
192 192
193 193 m = scmutil.match(repo[None], pats, opts)
194 194 rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
195 195 opts.get('subrepos'), prefix="", explicitonly=False)
196 196 return rejected and 1 or 0
197 197
198 198 @command('addremove',
199 199 similarityopts + walkopts + dryrunopts,
200 200 _('[OPTION]... [FILE]...'),
201 201 inferrepo=True)
202 202 def addremove(ui, repo, *pats, **opts):
203 203 """add all new files, delete all missing files
204 204
205 205 Add all new files and remove all missing files from the
206 206 repository.
207 207
208 208 New files are ignored if they match any of the patterns in
209 209 ``.hgignore``. As with add, these changes take effect at the next
210 210 commit.
211 211
212 212 Use the -s/--similarity option to detect renamed files. This
213 213 option takes a percentage between 0 (disabled) and 100 (files must
214 214 be identical) as its parameter. With a parameter greater than 0,
215 215 this compares every removed file with every added file and records
216 216 those similar enough as renames. Detecting renamed files this way
217 217 can be expensive. After using this option, :hg:`status -C` can be
218 218 used to check which files were identified as moved or renamed. If
219 219 not specified, -s/--similarity defaults to 100 and only renames of
220 220 identical files are detected.
221 221
222 222 Returns 0 if all files are successfully added.
223 223 """
224 224 try:
225 225 sim = float(opts.get('similarity') or 100)
226 226 except ValueError:
227 227 raise util.Abort(_('similarity must be a number'))
228 228 if sim < 0 or sim > 100:
229 229 raise util.Abort(_('similarity must be between 0 and 100'))
230 230 return scmutil.addremove(repo, pats, opts, similarity=sim / 100.0)
231 231
232 232 @command('^annotate|blame',
233 233 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
234 234 ('', 'follow', None,
235 235 _('follow copies/renames and list the filename (DEPRECATED)')),
236 236 ('', 'no-follow', None, _("don't follow copies and renames")),
237 237 ('a', 'text', None, _('treat all files as text')),
238 238 ('u', 'user', None, _('list the author (long with -v)')),
239 239 ('f', 'file', None, _('list the filename')),
240 240 ('d', 'date', None, _('list the date (short with -q)')),
241 241 ('n', 'number', None, _('list the revision number (default)')),
242 242 ('c', 'changeset', None, _('list the changeset')),
243 243 ('l', 'line-number', None, _('show line number at the first appearance'))
244 244 ] + diffwsopts + walkopts,
245 245 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
246 246 inferrepo=True)
247 247 def annotate(ui, repo, *pats, **opts):
248 248 """show changeset information by line for each file
249 249
250 250 List changes in files, showing the revision id responsible for
251 251 each line
252 252
253 253 This command is useful for discovering when a change was made and
254 254 by whom.
255 255
256 256 Without the -a/--text option, annotate will avoid processing files
257 257 it detects as binary. With -a, annotate will annotate the file
258 258 anyway, although the results will probably be neither useful
259 259 nor desirable.
260 260
261 261 Returns 0 on success.
262 262 """
263 263 if opts.get('follow'):
264 264 # --follow is deprecated and now just an alias for -f/--file
265 265 # to mimic the behavior of Mercurial before version 1.5
266 266 opts['file'] = True
267 267
268 268 datefunc = ui.quiet and util.shortdate or util.datestr
269 269 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
270 270
271 271 if not pats:
272 272 raise util.Abort(_('at least one filename or pattern is required'))
273 273
274 274 hexfn = ui.debugflag and hex or short
275 275
276 276 opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
277 277 ('number', ' ', lambda x: str(x[0].rev())),
278 278 ('changeset', ' ', lambda x: hexfn(x[0].node())),
279 279 ('date', ' ', getdate),
280 280 ('file', ' ', lambda x: x[0].path()),
281 281 ('line_number', ':', lambda x: str(x[1])),
282 282 ]
283 283
284 284 if (not opts.get('user') and not opts.get('changeset')
285 285 and not opts.get('date') and not opts.get('file')):
286 286 opts['number'] = True
287 287
288 288 linenumber = opts.get('line_number') is not None
289 289 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
290 290 raise util.Abort(_('at least one of -n/-c is required for -l'))
291 291
292 292 funcmap = [(func, sep) for op, sep, func in opmap if opts.get(op)]
293 293 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
294 294
295 295 def bad(x, y):
296 296 raise util.Abort("%s: %s" % (x, y))
297 297
298 298 ctx = scmutil.revsingle(repo, opts.get('rev'))
299 299 m = scmutil.match(ctx, pats, opts)
300 300 m.bad = bad
301 301 follow = not opts.get('no_follow')
302 302 diffopts = patch.diffopts(ui, opts, section='annotate')
303 303 for abs in ctx.walk(m):
304 304 fctx = ctx[abs]
305 305 if not opts.get('text') and util.binary(fctx.data()):
306 306 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
307 307 continue
308 308
309 309 lines = fctx.annotate(follow=follow, linenumber=linenumber,
310 310 diffopts=diffopts)
311 311 pieces = []
312 312
313 313 for f, sep in funcmap:
314 314 l = [f(n) for n, dummy in lines]
315 315 if l:
316 316 sized = [(x, encoding.colwidth(x)) for x in l]
317 317 ml = max([w for x, w in sized])
318 318 pieces.append(["%s%s%s" % (sep, ' ' * (ml - w), x)
319 319 for x, w in sized])
320 320
321 321 if pieces:
322 322 for p, l in zip(zip(*pieces), lines):
323 323 ui.write("%s: %s" % ("".join(p), l[1]))
324 324
325 325 if lines and not lines[-1][1].endswith('\n'):
326 326 ui.write('\n')
327 327
328 328 @command('archive',
329 329 [('', 'no-decode', None, _('do not pass files through decoders')),
330 330 ('p', 'prefix', '', _('directory prefix for files in archive'),
331 331 _('PREFIX')),
332 332 ('r', 'rev', '', _('revision to distribute'), _('REV')),
333 333 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
334 334 ] + subrepoopts + walkopts,
335 335 _('[OPTION]... DEST'))
336 336 def archive(ui, repo, dest, **opts):
337 337 '''create an unversioned archive of a repository revision
338 338
339 339 By default, the revision used is the parent of the working
340 340 directory; use -r/--rev to specify a different revision.
341 341
342 342 The archive type is automatically detected based on file
343 343 extension (or override using -t/--type).
344 344
345 345 .. container:: verbose
346 346
347 347 Examples:
348 348
349 349 - create a zip file containing the 1.0 release::
350 350
351 351 hg archive -r 1.0 project-1.0.zip
352 352
353 353 - create a tarball excluding .hg files::
354 354
355 355 hg archive project.tar.gz -X ".hg*"
356 356
357 357 Valid types are:
358 358
359 359 :``files``: a directory full of files (default)
360 360 :``tar``: tar archive, uncompressed
361 361 :``tbz2``: tar archive, compressed using bzip2
362 362 :``tgz``: tar archive, compressed using gzip
363 363 :``uzip``: zip archive, uncompressed
364 364 :``zip``: zip archive, compressed using deflate
365 365
366 366 The exact name of the destination archive or directory is given
367 367 using a format string; see :hg:`help export` for details.
368 368
369 369 Each member added to an archive file has a directory prefix
370 370 prepended. Use -p/--prefix to specify a format string for the
371 371 prefix. The default is the basename of the archive, with suffixes
372 372 removed.
373 373
374 374 Returns 0 on success.
375 375 '''
376 376
377 377 ctx = scmutil.revsingle(repo, opts.get('rev'))
378 378 if not ctx:
379 379 raise util.Abort(_('no working directory: please specify a revision'))
380 380 node = ctx.node()
381 381 dest = cmdutil.makefilename(repo, dest, node)
382 382 if os.path.realpath(dest) == repo.root:
383 383 raise util.Abort(_('repository root cannot be destination'))
384 384
385 385 kind = opts.get('type') or archival.guesskind(dest) or 'files'
386 386 prefix = opts.get('prefix')
387 387
388 388 if dest == '-':
389 389 if kind == 'files':
390 390 raise util.Abort(_('cannot archive plain files to stdout'))
391 391 dest = cmdutil.makefileobj(repo, dest)
392 392 if not prefix:
393 393 prefix = os.path.basename(repo.root) + '-%h'
394 394
395 395 prefix = cmdutil.makefilename(repo, prefix, node)
396 396 matchfn = scmutil.match(ctx, [], opts)
397 397 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
398 398 matchfn, prefix, subrepos=opts.get('subrepos'))
399 399
400 400 @command('backout',
401 401 [('', 'merge', None, _('merge with old dirstate parent after backout')),
402 402 ('', 'parent', '',
403 403 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
404 404 ('r', 'rev', '', _('revision to backout'), _('REV')),
405 405 ('e', 'edit', False, _('invoke editor on commit messages')),
406 406 ] + mergetoolopts + walkopts + commitopts + commitopts2,
407 407 _('[OPTION]... [-r] REV'))
408 408 def backout(ui, repo, node=None, rev=None, **opts):
409 409 '''reverse effect of earlier changeset
410 410
411 411 Prepare a new changeset with the effect of REV undone in the
412 412 current working directory.
413 413
414 414 If REV is the parent of the working directory, then this new changeset
415 415 is committed automatically. Otherwise, hg needs to merge the
416 416 changes and the merged result is left uncommitted.
417 417
418 418 .. note::
419 419
420 420 backout cannot be used to fix either an unwanted or
421 421 incorrect merge.
422 422
423 423 .. container:: verbose
424 424
425 425 By default, the pending changeset will have one parent,
426 426 maintaining a linear history. With --merge, the pending
427 427 changeset will instead have two parents: the old parent of the
428 428 working directory and a new child of REV that simply undoes REV.
429 429
430 430 Before version 1.7, the behavior without --merge was equivalent
431 431 to specifying --merge followed by :hg:`update --clean .` to
432 432 cancel the merge and leave the child of REV as a head to be
433 433 merged separately.
434 434
435 435 See :hg:`help dates` for a list of formats valid for -d/--date.
436 436
437 437 Returns 0 on success, 1 if nothing to backout or there are unresolved
438 438 files.
439 439 '''
440 440 if rev and node:
441 441 raise util.Abort(_("please specify just one revision"))
442 442
443 443 if not rev:
444 444 rev = node
445 445
446 446 if not rev:
447 447 raise util.Abort(_("please specify a revision to backout"))
448 448
449 449 date = opts.get('date')
450 450 if date:
451 451 opts['date'] = util.parsedate(date)
452 452
453 453 cmdutil.checkunfinished(repo)
454 454 cmdutil.bailifchanged(repo)
455 455 node = scmutil.revsingle(repo, rev).node()
456 456
457 457 op1, op2 = repo.dirstate.parents()
458 458 if node not in repo.changelog.commonancestorsheads(op1, node):
459 459 raise util.Abort(_('cannot backout change that is not an ancestor'))
460 460
461 461 p1, p2 = repo.changelog.parents(node)
462 462 if p1 == nullid:
463 463 raise util.Abort(_('cannot backout a change with no parents'))
464 464 if p2 != nullid:
465 465 if not opts.get('parent'):
466 466 raise util.Abort(_('cannot backout a merge changeset'))
467 467 p = repo.lookup(opts['parent'])
468 468 if p not in (p1, p2):
469 469 raise util.Abort(_('%s is not a parent of %s') %
470 470 (short(p), short(node)))
471 471 parent = p
472 472 else:
473 473 if opts.get('parent'):
474 474 raise util.Abort(_('cannot use --parent on non-merge changeset'))
475 475 parent = p1
476 476
477 477 # the backout should appear on the same branch
478 478 wlock = repo.wlock()
479 479 try:
480 480 branch = repo.dirstate.branch()
481 481 bheads = repo.branchheads(branch)
482 482 rctx = scmutil.revsingle(repo, hex(parent))
483 483 if not opts.get('merge') and op1 != node:
484 484 try:
485 485 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
486 486 'backout')
487 487 stats = mergemod.update(repo, parent, True, True, False,
488 488 node, False)
489 489 repo.setparents(op1, op2)
490 490 hg._showstats(repo, stats)
491 491 if stats[3]:
492 492 repo.ui.status(_("use 'hg resolve' to retry unresolved "
493 493 "file merges\n"))
494 494 else:
495 495 msg = _("changeset %s backed out, "
496 496 "don't forget to commit.\n")
497 497 ui.status(msg % short(node))
498 498 return stats[3] > 0
499 499 finally:
500 500 ui.setconfig('ui', 'forcemerge', '', '')
501 501 else:
502 502 hg.clean(repo, node, show_stats=False)
503 503 repo.dirstate.setbranch(branch)
504 504 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
505 505
506 506
507 507 def commitfunc(ui, repo, message, match, opts):
508 508 editform = 'backout'
509 509 e = cmdutil.getcommiteditor(editform=editform, **opts)
510 510 if not message:
511 511 # we don't translate commit messages
512 512 message = "Backed out changeset %s" % short(node)
513 513 e = cmdutil.getcommiteditor(edit=True, editform=editform)
514 514 return repo.commit(message, opts.get('user'), opts.get('date'),
515 515 match, editor=e)
516 516 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
517 517 if not newnode:
518 518 ui.status(_("nothing changed\n"))
519 519 return 1
520 520 cmdutil.commitstatus(repo, newnode, branch, bheads)
521 521
522 522 def nice(node):
523 523 return '%d:%s' % (repo.changelog.rev(node), short(node))
524 524 ui.status(_('changeset %s backs out changeset %s\n') %
525 525 (nice(repo.changelog.tip()), nice(node)))
526 526 if opts.get('merge') and op1 != node:
527 527 hg.clean(repo, op1, show_stats=False)
528 528 ui.status(_('merging with changeset %s\n')
529 529 % nice(repo.changelog.tip()))
530 530 try:
531 531 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
532 532 'backout')
533 533 return hg.merge(repo, hex(repo.changelog.tip()))
534 534 finally:
535 535 ui.setconfig('ui', 'forcemerge', '', '')
536 536 finally:
537 537 wlock.release()
538 538 return 0
539 539
540 540 @command('bisect',
541 541 [('r', 'reset', False, _('reset bisect state')),
542 542 ('g', 'good', False, _('mark changeset good')),
543 543 ('b', 'bad', False, _('mark changeset bad')),
544 544 ('s', 'skip', False, _('skip testing changeset')),
545 545 ('e', 'extend', False, _('extend the bisect range')),
546 546 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
547 547 ('U', 'noupdate', False, _('do not update to target'))],
548 548 _("[-gbsr] [-U] [-c CMD] [REV]"))
549 549 def bisect(ui, repo, rev=None, extra=None, command=None,
550 550 reset=None, good=None, bad=None, skip=None, extend=None,
551 551 noupdate=None):
552 552 """subdivision search of changesets
553 553
554 554 This command helps to find changesets which introduce problems. To
555 555 use, mark the earliest changeset you know exhibits the problem as
556 556 bad, then mark the latest changeset which is free from the problem
557 557 as good. Bisect will update your working directory to a revision
558 558 for testing (unless the -U/--noupdate option is specified). Once
559 559 you have performed tests, mark the working directory as good or
560 560 bad, and bisect will either update to another candidate changeset
561 561 or announce that it has found the bad revision.
562 562
563 563 As a shortcut, you can also use the revision argument to mark a
564 564 revision as good or bad without checking it out first.
565 565
566 566 If you supply a command, it will be used for automatic bisection.
567 567 The environment variable HG_NODE will contain the ID of the
568 568 changeset being tested. The exit status of the command will be
569 569 used to mark revisions as good or bad: status 0 means good, 125
570 570 means to skip the revision, 127 (command not found) will abort the
571 571 bisection, and any other non-zero exit status means the revision
572 572 is bad.
573 573
574 574 .. container:: verbose
575 575
576 576 Some examples:
577 577
578 578 - start a bisection with known bad revision 34, and good revision 12::
579 579
580 580 hg bisect --bad 34
581 581 hg bisect --good 12
582 582
583 583 - advance the current bisection by marking current revision as good or
584 584 bad::
585 585
586 586 hg bisect --good
587 587 hg bisect --bad
588 588
589 589 - mark the current revision, or a known revision, to be skipped (e.g. if
590 590 that revision is not usable because of another issue)::
591 591
592 592 hg bisect --skip
593 593 hg bisect --skip 23
594 594
595 595 - skip all revisions that do not touch directories ``foo`` or ``bar``::
596 596
597 597 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
598 598
599 599 - forget the current bisection::
600 600
601 601 hg bisect --reset
602 602
603 603 - use 'make && make tests' to automatically find the first broken
604 604 revision::
605 605
606 606 hg bisect --reset
607 607 hg bisect --bad 34
608 608 hg bisect --good 12
609 609 hg bisect --command "make && make tests"
610 610
611 611 - see all changesets whose states are already known in the current
612 612 bisection::
613 613
614 614 hg log -r "bisect(pruned)"
615 615
616 616 - see the changeset currently being bisected (especially useful
617 617 if running with -U/--noupdate)::
618 618
619 619 hg log -r "bisect(current)"
620 620
621 621 - see all changesets that took part in the current bisection::
622 622
623 623 hg log -r "bisect(range)"
624 624
625 625 - you can even get a nice graph::
626 626
627 627 hg log --graph -r "bisect(range)"
628 628
629 629 See :hg:`help revsets` for more about the `bisect()` keyword.
630 630
631 631 Returns 0 on success.
632 632 """
633 633 def extendbisectrange(nodes, good):
634 634 # bisect is incomplete when it ends on a merge node and
635 635 # one of the parent was not checked.
636 636 parents = repo[nodes[0]].parents()
637 637 if len(parents) > 1:
638 638 side = good and state['bad'] or state['good']
639 639 num = len(set(i.node() for i in parents) & set(side))
640 640 if num == 1:
641 641 return parents[0].ancestor(parents[1])
642 642 return None
643 643
644 644 def print_result(nodes, good):
645 645 displayer = cmdutil.show_changeset(ui, repo, {})
646 646 if len(nodes) == 1:
647 647 # narrowed it down to a single revision
648 648 if good:
649 649 ui.write(_("The first good revision is:\n"))
650 650 else:
651 651 ui.write(_("The first bad revision is:\n"))
652 652 displayer.show(repo[nodes[0]])
653 653 extendnode = extendbisectrange(nodes, good)
654 654 if extendnode is not None:
655 655 ui.write(_('Not all ancestors of this changeset have been'
656 656 ' checked.\nUse bisect --extend to continue the '
657 657 'bisection from\nthe common ancestor, %s.\n')
658 658 % extendnode)
659 659 else:
660 660 # multiple possible revisions
661 661 if good:
662 662 ui.write(_("Due to skipped revisions, the first "
663 663 "good revision could be any of:\n"))
664 664 else:
665 665 ui.write(_("Due to skipped revisions, the first "
666 666 "bad revision could be any of:\n"))
667 667 for n in nodes:
668 668 displayer.show(repo[n])
669 669 displayer.close()
670 670
671 671 def check_state(state, interactive=True):
672 672 if not state['good'] or not state['bad']:
673 673 if (good or bad or skip or reset) and interactive:
674 674 return
675 675 if not state['good']:
676 676 raise util.Abort(_('cannot bisect (no known good revisions)'))
677 677 else:
678 678 raise util.Abort(_('cannot bisect (no known bad revisions)'))
679 679 return True
680 680
681 681 # backward compatibility
682 682 if rev in "good bad reset init".split():
683 683 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
684 684 cmd, rev, extra = rev, extra, None
685 685 if cmd == "good":
686 686 good = True
687 687 elif cmd == "bad":
688 688 bad = True
689 689 else:
690 690 reset = True
691 691 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
692 692 raise util.Abort(_('incompatible arguments'))
693 693
694 694 cmdutil.checkunfinished(repo)
695 695
696 696 if reset:
697 697 p = repo.join("bisect.state")
698 698 if os.path.exists(p):
699 699 os.unlink(p)
700 700 return
701 701
702 702 state = hbisect.load_state(repo)
703 703
704 704 if command:
705 705 changesets = 1
706 706 if noupdate:
707 707 try:
708 708 node = state['current'][0]
709 709 except LookupError:
710 710 raise util.Abort(_('current bisect revision is unknown - '
711 711 'start a new bisect to fix'))
712 712 else:
713 713 node, p2 = repo.dirstate.parents()
714 714 if p2 != nullid:
715 715 raise util.Abort(_('current bisect revision is a merge'))
716 716 try:
717 717 while changesets:
718 718 # update state
719 719 state['current'] = [node]
720 720 hbisect.save_state(repo, state)
721 721 status = util.system(command,
722 722 environ={'HG_NODE': hex(node)},
723 723 out=ui.fout)
724 724 if status == 125:
725 725 transition = "skip"
726 726 elif status == 0:
727 727 transition = "good"
728 728 # status < 0 means process was killed
729 729 elif status == 127:
730 730 raise util.Abort(_("failed to execute %s") % command)
731 731 elif status < 0:
732 732 raise util.Abort(_("%s killed") % command)
733 733 else:
734 734 transition = "bad"
735 735 ctx = scmutil.revsingle(repo, rev, node)
736 736 rev = None # clear for future iterations
737 737 state[transition].append(ctx.node())
738 738 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
739 739 check_state(state, interactive=False)
740 740 # bisect
741 741 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
742 742 # update to next check
743 743 node = nodes[0]
744 744 if not noupdate:
745 745 cmdutil.bailifchanged(repo)
746 746 hg.clean(repo, node, show_stats=False)
747 747 finally:
748 748 state['current'] = [node]
749 749 hbisect.save_state(repo, state)
750 750 print_result(nodes, bgood)
751 751 return
752 752
753 753 # update state
754 754
755 755 if rev:
756 756 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
757 757 else:
758 758 nodes = [repo.lookup('.')]
759 759
760 760 if good or bad or skip:
761 761 if good:
762 762 state['good'] += nodes
763 763 elif bad:
764 764 state['bad'] += nodes
765 765 elif skip:
766 766 state['skip'] += nodes
767 767 hbisect.save_state(repo, state)
768 768
769 769 if not check_state(state):
770 770 return
771 771
772 772 # actually bisect
773 773 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
774 774 if extend:
775 775 if not changesets:
776 776 extendnode = extendbisectrange(nodes, good)
777 777 if extendnode is not None:
778 778 ui.write(_("Extending search to changeset %d:%s\n")
779 779 % (extendnode.rev(), extendnode))
780 780 state['current'] = [extendnode.node()]
781 781 hbisect.save_state(repo, state)
782 782 if noupdate:
783 783 return
784 784 cmdutil.bailifchanged(repo)
785 785 return hg.clean(repo, extendnode.node())
786 786 raise util.Abort(_("nothing to extend"))
787 787
788 788 if changesets == 0:
789 789 print_result(nodes, good)
790 790 else:
791 791 assert len(nodes) == 1 # only a single node can be tested next
792 792 node = nodes[0]
793 793 # compute the approximate number of remaining tests
794 794 tests, size = 0, 2
795 795 while size <= changesets:
796 796 tests, size = tests + 1, size * 2
797 797 rev = repo.changelog.rev(node)
798 798 ui.write(_("Testing changeset %d:%s "
799 799 "(%d changesets remaining, ~%d tests)\n")
800 800 % (rev, short(node), changesets, tests))
801 801 state['current'] = [node]
802 802 hbisect.save_state(repo, state)
803 803 if not noupdate:
804 804 cmdutil.bailifchanged(repo)
805 805 return hg.clean(repo, node)
806 806
807 807 @command('bookmarks|bookmark',
808 808 [('f', 'force', False, _('force')),
809 809 ('r', 'rev', '', _('revision'), _('REV')),
810 810 ('d', 'delete', False, _('delete a given bookmark')),
811 811 ('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
812 812 ('i', 'inactive', False, _('mark a bookmark inactive'))],
813 813 _('hg bookmarks [OPTIONS]... [NAME]...'))
814 814 def bookmark(ui, repo, *names, **opts):
815 815 '''create a new bookmark or list existing bookmarks
816 816
817 817 Bookmarks are labels on changesets to help track lines of development.
818 818 Bookmarks are unversioned and can be moved, renamed and deleted.
819 819 Deleting or moving a bookmark has no effect on the associated changesets.
820 820
821 821 Creating or updating to a bookmark causes it to be marked as 'active'.
822 822 Active bookmarks are indicated with a '*'.
823 823 When a commit is made, an active bookmark will advance to the new commit.
824 824 A plain :hg:`update` will also advance an active bookmark, if possible.
825 825 Updating away from a bookmark will cause it to be deactivated.
826 826
827 827 Bookmarks can be pushed and pulled between repositories (see
828 828 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
829 829 diverged, a new 'divergent bookmark' of the form 'name@path' will
830 830 be created. Using :hg:'merge' will resolve the divergence.
831 831
832 832 A bookmark named '@' has the special property that :hg:`clone` will
833 833 check it out by default if it exists.
834 834
835 835 .. container:: verbose
836 836
837 837 Examples:
838 838
839 839 - create an active bookmark for a new line of development::
840 840
841 841 hg book new-feature
842 842
843 843 - create an inactive bookmark as a place marker::
844 844
845 845 hg book -i reviewed
846 846
847 847 - create an inactive bookmark on another changeset::
848 848
849 849 hg book -r .^ tested
850 850
851 851 - move the '@' bookmark from another branch::
852 852
853 853 hg book -f @
854 854 '''
855 855 force = opts.get('force')
856 856 rev = opts.get('rev')
857 857 delete = opts.get('delete')
858 858 rename = opts.get('rename')
859 859 inactive = opts.get('inactive')
860 860
861 861 def checkformat(mark):
862 862 mark = mark.strip()
863 863 if not mark:
864 864 raise util.Abort(_("bookmark names cannot consist entirely of "
865 865 "whitespace"))
866 866 scmutil.checknewlabel(repo, mark, 'bookmark')
867 867 return mark
868 868
869 869 def checkconflict(repo, mark, cur, force=False, target=None):
870 870 if mark in marks and not force:
871 871 if target:
872 872 if marks[mark] == target and target == cur:
873 873 # re-activating a bookmark
874 874 return
875 875 anc = repo.changelog.ancestors([repo[target].rev()])
876 876 bmctx = repo[marks[mark]]
877 877 divs = [repo[b].node() for b in marks
878 878 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
879 879
880 880 # allow resolving a single divergent bookmark even if moving
881 881 # the bookmark across branches when a revision is specified
882 882 # that contains a divergent bookmark
883 883 if bmctx.rev() not in anc and target in divs:
884 884 bookmarks.deletedivergent(repo, [target], mark)
885 885 return
886 886
887 887 deletefrom = [b for b in divs
888 888 if repo[b].rev() in anc or b == target]
889 889 bookmarks.deletedivergent(repo, deletefrom, mark)
890 890 if bookmarks.validdest(repo, bmctx, repo[target]):
891 891 ui.status(_("moving bookmark '%s' forward from %s\n") %
892 892 (mark, short(bmctx.node())))
893 893 return
894 894 raise util.Abort(_("bookmark '%s' already exists "
895 895 "(use -f to force)") % mark)
896 896 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
897 897 and not force):
898 898 raise util.Abort(
899 899 _("a bookmark cannot have the name of an existing branch"))
900 900
901 901 if delete and rename:
902 902 raise util.Abort(_("--delete and --rename are incompatible"))
903 903 if delete and rev:
904 904 raise util.Abort(_("--rev is incompatible with --delete"))
905 905 if rename and rev:
906 906 raise util.Abort(_("--rev is incompatible with --rename"))
907 907 if not names and (delete or rev):
908 908 raise util.Abort(_("bookmark name required"))
909 909
910 910 if delete or rename or names or inactive:
911 911 wlock = repo.wlock()
912 912 try:
913 913 cur = repo.changectx('.').node()
914 914 marks = repo._bookmarks
915 915 if delete:
916 916 for mark in names:
917 917 if mark not in marks:
918 918 raise util.Abort(_("bookmark '%s' does not exist") %
919 919 mark)
920 920 if mark == repo._bookmarkcurrent:
921 921 bookmarks.unsetcurrent(repo)
922 922 del marks[mark]
923 923 marks.write()
924 924
925 925 elif rename:
926 926 if not names:
927 927 raise util.Abort(_("new bookmark name required"))
928 928 elif len(names) > 1:
929 929 raise util.Abort(_("only one new bookmark name allowed"))
930 930 mark = checkformat(names[0])
931 931 if rename not in marks:
932 932 raise util.Abort(_("bookmark '%s' does not exist") % rename)
933 933 checkconflict(repo, mark, cur, force)
934 934 marks[mark] = marks[rename]
935 935 if repo._bookmarkcurrent == rename and not inactive:
936 936 bookmarks.setcurrent(repo, mark)
937 937 del marks[rename]
938 938 marks.write()
939 939
940 940 elif names:
941 941 newact = None
942 942 for mark in names:
943 943 mark = checkformat(mark)
944 944 if newact is None:
945 945 newact = mark
946 946 if inactive and mark == repo._bookmarkcurrent:
947 947 bookmarks.unsetcurrent(repo)
948 948 return
949 949 tgt = cur
950 950 if rev:
951 951 tgt = scmutil.revsingle(repo, rev).node()
952 952 checkconflict(repo, mark, cur, force, tgt)
953 953 marks[mark] = tgt
954 954 if not inactive and cur == marks[newact] and not rev:
955 955 bookmarks.setcurrent(repo, newact)
956 956 elif cur != tgt and newact == repo._bookmarkcurrent:
957 957 bookmarks.unsetcurrent(repo)
958 958 marks.write()
959 959
960 960 elif inactive:
961 961 if len(marks) == 0:
962 962 ui.status(_("no bookmarks set\n"))
963 963 elif not repo._bookmarkcurrent:
964 964 ui.status(_("no active bookmark\n"))
965 965 else:
966 966 bookmarks.unsetcurrent(repo)
967 967 finally:
968 968 wlock.release()
969 969 else: # show bookmarks
970 970 hexfn = ui.debugflag and hex or short
971 971 marks = repo._bookmarks
972 972 if len(marks) == 0:
973 973 ui.status(_("no bookmarks set\n"))
974 974 else:
975 975 for bmark, n in sorted(marks.iteritems()):
976 976 current = repo._bookmarkcurrent
977 977 if bmark == current:
978 978 prefix, label = '*', 'bookmarks.current'
979 979 else:
980 980 prefix, label = ' ', ''
981 981
982 982 if ui.quiet:
983 983 ui.write("%s\n" % bmark, label=label)
984 984 else:
985 985 pad = " " * (25 - encoding.colwidth(bmark))
986 986 ui.write(" %s %s%s %d:%s\n" % (
987 987 prefix, bmark, pad, repo.changelog.rev(n), hexfn(n)),
988 988 label=label)
989 989
990 990 @command('branch',
991 991 [('f', 'force', None,
992 992 _('set branch name even if it shadows an existing branch')),
993 993 ('C', 'clean', None, _('reset branch name to parent branch name'))],
994 994 _('[-fC] [NAME]'))
995 995 def branch(ui, repo, label=None, **opts):
996 996 """set or show the current branch name
997 997
998 998 .. note::
999 999
1000 1000 Branch names are permanent and global. Use :hg:`bookmark` to create a
1001 1001 light-weight bookmark instead. See :hg:`help glossary` for more
1002 1002 information about named branches and bookmarks.
1003 1003
1004 1004 With no argument, show the current branch name. With one argument,
1005 1005 set the working directory branch name (the branch will not exist
1006 1006 in the repository until the next commit). Standard practice
1007 1007 recommends that primary development take place on the 'default'
1008 1008 branch.
1009 1009
1010 1010 Unless -f/--force is specified, branch will not let you set a
1011 1011 branch name that already exists, even if it's inactive.
1012 1012
1013 1013 Use -C/--clean to reset the working directory branch to that of
1014 1014 the parent of the working directory, negating a previous branch
1015 1015 change.
1016 1016
1017 1017 Use the command :hg:`update` to switch to an existing branch. Use
1018 1018 :hg:`commit --close-branch` to mark this branch as closed.
1019 1019
1020 1020 Returns 0 on success.
1021 1021 """
1022 1022 if label:
1023 1023 label = label.strip()
1024 1024
1025 1025 if not opts.get('clean') and not label:
1026 1026 ui.write("%s\n" % repo.dirstate.branch())
1027 1027 return
1028 1028
1029 1029 wlock = repo.wlock()
1030 1030 try:
1031 1031 if opts.get('clean'):
1032 1032 label = repo[None].p1().branch()
1033 1033 repo.dirstate.setbranch(label)
1034 1034 ui.status(_('reset working directory to branch %s\n') % label)
1035 1035 elif label:
1036 1036 if not opts.get('force') and label in repo.branchmap():
1037 1037 if label not in [p.branch() for p in repo.parents()]:
1038 1038 raise util.Abort(_('a branch of the same name already'
1039 1039 ' exists'),
1040 1040 # i18n: "it" refers to an existing branch
1041 1041 hint=_("use 'hg update' to switch to it"))
1042 1042 scmutil.checknewlabel(repo, label, 'branch')
1043 1043 repo.dirstate.setbranch(label)
1044 1044 ui.status(_('marked working directory as branch %s\n') % label)
1045 1045 ui.status(_('(branches are permanent and global, '
1046 1046 'did you want a bookmark?)\n'))
1047 1047 finally:
1048 1048 wlock.release()
1049 1049
1050 1050 @command('branches',
1051 1051 [('a', 'active', False, _('show only branches that have unmerged heads')),
1052 1052 ('c', 'closed', False, _('show normal and closed branches'))],
1053 1053 _('[-ac]'))
1054 1054 def branches(ui, repo, active=False, closed=False):
1055 1055 """list repository named branches
1056 1056
1057 1057 List the repository's named branches, indicating which ones are
1058 1058 inactive. If -c/--closed is specified, also list branches which have
1059 1059 been marked closed (see :hg:`commit --close-branch`).
1060 1060
1061 1061 If -a/--active is specified, only show active branches. A branch
1062 1062 is considered active if it contains repository heads.
1063 1063
1064 1064 Use the command :hg:`update` to switch to an existing branch.
1065 1065
1066 1066 Returns 0.
1067 1067 """
1068 1068
1069 1069 hexfunc = ui.debugflag and hex or short
1070 1070
1071 1071 allheads = set(repo.heads())
1072 1072 branches = []
1073 1073 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1074 1074 isactive = not isclosed and bool(set(heads) & allheads)
1075 1075 branches.append((tag, repo[tip], isactive, not isclosed))
1076 1076 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1077 1077 reverse=True)
1078 1078
1079 1079 for tag, ctx, isactive, isopen in branches:
1080 1080 if (not active) or isactive:
1081 1081 if isactive:
1082 1082 label = 'branches.active'
1083 1083 notice = ''
1084 1084 elif not isopen:
1085 1085 if not closed:
1086 1086 continue
1087 1087 label = 'branches.closed'
1088 1088 notice = _(' (closed)')
1089 1089 else:
1090 1090 label = 'branches.inactive'
1091 1091 notice = _(' (inactive)')
1092 1092 if tag == repo.dirstate.branch():
1093 1093 label = 'branches.current'
1094 1094 rev = str(ctx.rev()).rjust(31 - encoding.colwidth(tag))
1095 1095 rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
1096 1096 'log.changeset changeset.%s' % ctx.phasestr())
1097 1097 labeledtag = ui.label(tag, label)
1098 1098 if ui.quiet:
1099 1099 ui.write("%s\n" % labeledtag)
1100 1100 else:
1101 1101 ui.write("%s %s%s\n" % (labeledtag, rev, notice))
1102 1102
1103 1103 @command('bundle',
1104 1104 [('f', 'force', None, _('run even when the destination is unrelated')),
1105 1105 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1106 1106 _('REV')),
1107 1107 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1108 1108 _('BRANCH')),
1109 1109 ('', 'base', [],
1110 1110 _('a base changeset assumed to be available at the destination'),
1111 1111 _('REV')),
1112 1112 ('a', 'all', None, _('bundle all changesets in the repository')),
1113 1113 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1114 1114 ] + remoteopts,
1115 1115 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1116 1116 def bundle(ui, repo, fname, dest=None, **opts):
1117 1117 """create a changegroup file
1118 1118
1119 1119 Generate a compressed changegroup file collecting changesets not
1120 1120 known to be in another repository.
1121 1121
1122 1122 If you omit the destination repository, then hg assumes the
1123 1123 destination will have all the nodes you specify with --base
1124 1124 parameters. To create a bundle containing all changesets, use
1125 1125 -a/--all (or --base null).
1126 1126
1127 1127 You can change compression method with the -t/--type option.
1128 1128 The available compression methods are: none, bzip2, and
1129 1129 gzip (by default, bundles are compressed using bzip2).
1130 1130
1131 1131 The bundle file can then be transferred using conventional means
1132 1132 and applied to another repository with the unbundle or pull
1133 1133 command. This is useful when direct push and pull are not
1134 1134 available or when exporting an entire repository is undesirable.
1135 1135
1136 1136 Applying bundles preserves all changeset contents including
1137 1137 permissions, copy/rename information, and revision history.
1138 1138
1139 1139 Returns 0 on success, 1 if no changes found.
1140 1140 """
1141 1141 revs = None
1142 1142 if 'rev' in opts:
1143 1143 revs = scmutil.revrange(repo, opts['rev'])
1144 1144
1145 1145 bundletype = opts.get('type', 'bzip2').lower()
1146 1146 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
1147 1147 bundletype = btypes.get(bundletype)
1148 1148 if bundletype not in changegroup.bundletypes:
1149 1149 raise util.Abort(_('unknown bundle type specified with --type'))
1150 1150
1151 1151 if opts.get('all'):
1152 1152 base = ['null']
1153 1153 else:
1154 1154 base = scmutil.revrange(repo, opts.get('base'))
1155 1155 # TODO: get desired bundlecaps from command line.
1156 1156 bundlecaps = None
1157 1157 if base:
1158 1158 if dest:
1159 1159 raise util.Abort(_("--base is incompatible with specifying "
1160 1160 "a destination"))
1161 1161 common = [repo.lookup(rev) for rev in base]
1162 1162 heads = revs and map(repo.lookup, revs) or revs
1163 1163 cg = changegroup.getbundle(repo, 'bundle', heads=heads, common=common,
1164 1164 bundlecaps=bundlecaps)
1165 1165 outgoing = None
1166 1166 else:
1167 1167 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1168 1168 dest, branches = hg.parseurl(dest, opts.get('branch'))
1169 1169 other = hg.peer(repo, opts, dest)
1170 1170 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1171 1171 heads = revs and map(repo.lookup, revs) or revs
1172 1172 outgoing = discovery.findcommonoutgoing(repo, other,
1173 1173 onlyheads=heads,
1174 1174 force=opts.get('force'),
1175 1175 portable=True)
1176 1176 cg = changegroup.getlocalbundle(repo, 'bundle', outgoing, bundlecaps)
1177 1177 if not cg:
1178 1178 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1179 1179 return 1
1180 1180
1181 1181 changegroup.writebundle(cg, fname, bundletype)
1182 1182
1183 1183 @command('cat',
1184 1184 [('o', 'output', '',
1185 1185 _('print output to file with formatted name'), _('FORMAT')),
1186 1186 ('r', 'rev', '', _('print the given revision'), _('REV')),
1187 1187 ('', 'decode', None, _('apply any matching decode filter')),
1188 1188 ] + walkopts,
1189 1189 _('[OPTION]... FILE...'),
1190 1190 inferrepo=True)
1191 1191 def cat(ui, repo, file1, *pats, **opts):
1192 1192 """output the current or given revision of files
1193 1193
1194 1194 Print the specified files as they were at the given revision. If
1195 1195 no revision is given, the parent of the working directory is used.
1196 1196
1197 1197 Output may be to a file, in which case the name of the file is
1198 1198 given using a format string. The formatting rules as follows:
1199 1199
1200 1200 :``%%``: literal "%" character
1201 1201 :``%s``: basename of file being printed
1202 1202 :``%d``: dirname of file being printed, or '.' if in repository root
1203 1203 :``%p``: root-relative path name of file being printed
1204 1204 :``%H``: changeset hash (40 hexadecimal digits)
1205 1205 :``%R``: changeset revision number
1206 1206 :``%h``: short-form changeset hash (12 hexadecimal digits)
1207 1207 :``%r``: zero-padded changeset revision number
1208 1208 :``%b``: basename of the exporting repository
1209 1209
1210 1210 Returns 0 on success.
1211 1211 """
1212 1212 ctx = scmutil.revsingle(repo, opts.get('rev'))
1213 1213 m = scmutil.match(ctx, (file1,) + pats, opts)
1214 1214
1215 1215 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1216 1216
1217 1217 @command('^clone',
1218 1218 [('U', 'noupdate', None,
1219 1219 _('the clone will include an empty working copy (only a repository)')),
1220 1220 ('u', 'updaterev', '', _('revision, tag or branch to check out'), _('REV')),
1221 1221 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1222 1222 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1223 1223 ('', 'pull', None, _('use pull protocol to copy metadata')),
1224 1224 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1225 1225 ] + remoteopts,
1226 1226 _('[OPTION]... SOURCE [DEST]'),
1227 1227 norepo=True)
1228 1228 def clone(ui, source, dest=None, **opts):
1229 1229 """make a copy of an existing repository
1230 1230
1231 1231 Create a copy of an existing repository in a new directory.
1232 1232
1233 1233 If no destination directory name is specified, it defaults to the
1234 1234 basename of the source.
1235 1235
1236 1236 The location of the source is added to the new repository's
1237 1237 ``.hg/hgrc`` file, as the default to be used for future pulls.
1238 1238
1239 1239 Only local paths and ``ssh://`` URLs are supported as
1240 1240 destinations. For ``ssh://`` destinations, no working directory or
1241 1241 ``.hg/hgrc`` will be created on the remote side.
1242 1242
1243 1243 To pull only a subset of changesets, specify one or more revisions
1244 1244 identifiers with -r/--rev or branches with -b/--branch. The
1245 1245 resulting clone will contain only the specified changesets and
1246 1246 their ancestors. These options (or 'clone src#rev dest') imply
1247 1247 --pull, even for local source repositories. Note that specifying a
1248 1248 tag will include the tagged changeset but not the changeset
1249 1249 containing the tag.
1250 1250
1251 1251 If the source repository has a bookmark called '@' set, that
1252 1252 revision will be checked out in the new repository by default.
1253 1253
1254 1254 To check out a particular version, use -u/--update, or
1255 1255 -U/--noupdate to create a clone with no working directory.
1256 1256
1257 1257 .. container:: verbose
1258 1258
1259 1259 For efficiency, hardlinks are used for cloning whenever the
1260 1260 source and destination are on the same filesystem (note this
1261 1261 applies only to the repository data, not to the working
1262 1262 directory). Some filesystems, such as AFS, implement hardlinking
1263 1263 incorrectly, but do not report errors. In these cases, use the
1264 1264 --pull option to avoid hardlinking.
1265 1265
1266 1266 In some cases, you can clone repositories and the working
1267 1267 directory using full hardlinks with ::
1268 1268
1269 1269 $ cp -al REPO REPOCLONE
1270 1270
1271 1271 This is the fastest way to clone, but it is not always safe. The
1272 1272 operation is not atomic (making sure REPO is not modified during
1273 1273 the operation is up to you) and you have to make sure your
1274 1274 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1275 1275 so). Also, this is not compatible with certain extensions that
1276 1276 place their metadata under the .hg directory, such as mq.
1277 1277
1278 1278 Mercurial will update the working directory to the first applicable
1279 1279 revision from this list:
1280 1280
1281 1281 a) null if -U or the source repository has no changesets
1282 1282 b) if -u . and the source repository is local, the first parent of
1283 1283 the source repository's working directory
1284 1284 c) the changeset specified with -u (if a branch name, this means the
1285 1285 latest head of that branch)
1286 1286 d) the changeset specified with -r
1287 1287 e) the tipmost head specified with -b
1288 1288 f) the tipmost head specified with the url#branch source syntax
1289 1289 g) the revision marked with the '@' bookmark, if present
1290 1290 h) the tipmost head of the default branch
1291 1291 i) tip
1292 1292
1293 1293 Examples:
1294 1294
1295 1295 - clone a remote repository to a new directory named hg/::
1296 1296
1297 1297 hg clone http://selenic.com/hg
1298 1298
1299 1299 - create a lightweight local clone::
1300 1300
1301 1301 hg clone project/ project-feature/
1302 1302
1303 1303 - clone from an absolute path on an ssh server (note double-slash)::
1304 1304
1305 1305 hg clone ssh://user@server//home/projects/alpha/
1306 1306
1307 1307 - do a high-speed clone over a LAN while checking out a
1308 1308 specified version::
1309 1309
1310 1310 hg clone --uncompressed http://server/repo -u 1.5
1311 1311
1312 1312 - create a repository without changesets after a particular revision::
1313 1313
1314 1314 hg clone -r 04e544 experimental/ good/
1315 1315
1316 1316 - clone (and track) a particular named branch::
1317 1317
1318 1318 hg clone http://selenic.com/hg#stable
1319 1319
1320 1320 See :hg:`help urls` for details on specifying URLs.
1321 1321
1322 1322 Returns 0 on success.
1323 1323 """
1324 1324 if opts.get('noupdate') and opts.get('updaterev'):
1325 1325 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
1326 1326
1327 1327 r = hg.clone(ui, opts, source, dest,
1328 1328 pull=opts.get('pull'),
1329 1329 stream=opts.get('uncompressed'),
1330 1330 rev=opts.get('rev'),
1331 1331 update=opts.get('updaterev') or not opts.get('noupdate'),
1332 1332 branch=opts.get('branch'))
1333 1333
1334 1334 return r is None
1335 1335
1336 1336 @command('^commit|ci',
1337 1337 [('A', 'addremove', None,
1338 1338 _('mark new/missing files as added/removed before committing')),
1339 1339 ('', 'close-branch', None,
1340 1340 _('mark a branch as closed, hiding it from the branch list')),
1341 1341 ('', 'amend', None, _('amend the parent of the working dir')),
1342 1342 ('s', 'secret', None, _('use the secret phase for committing')),
1343 1343 ('e', 'edit', None, _('invoke editor on commit messages')),
1344 1344 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1345 1345 _('[OPTION]... [FILE]...'),
1346 1346 inferrepo=True)
1347 1347 def commit(ui, repo, *pats, **opts):
1348 1348 """commit the specified files or all outstanding changes
1349 1349
1350 1350 Commit changes to the given files into the repository. Unlike a
1351 1351 centralized SCM, this operation is a local operation. See
1352 1352 :hg:`push` for a way to actively distribute your changes.
1353 1353
1354 1354 If a list of files is omitted, all changes reported by :hg:`status`
1355 1355 will be committed.
1356 1356
1357 1357 If you are committing the result of a merge, do not provide any
1358 1358 filenames or -I/-X filters.
1359 1359
1360 1360 If no commit message is specified, Mercurial starts your
1361 1361 configured editor where you can enter a message. In case your
1362 1362 commit fails, you will find a backup of your message in
1363 1363 ``.hg/last-message.txt``.
1364 1364
1365 1365 The --amend flag can be used to amend the parent of the
1366 1366 working directory with a new commit that contains the changes
1367 1367 in the parent in addition to those currently reported by :hg:`status`,
1368 1368 if there are any. The old commit is stored in a backup bundle in
1369 1369 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1370 1370 on how to restore it).
1371 1371
1372 1372 Message, user and date are taken from the amended commit unless
1373 1373 specified. When a message isn't specified on the command line,
1374 1374 the editor will open with the message of the amended commit.
1375 1375
1376 1376 It is not possible to amend public changesets (see :hg:`help phases`)
1377 1377 or changesets that have children.
1378 1378
1379 1379 See :hg:`help dates` for a list of formats valid for -d/--date.
1380 1380
1381 1381 Returns 0 on success, 1 if nothing changed.
1382 1382 """
1383 1383 if opts.get('subrepos'):
1384 1384 if opts.get('amend'):
1385 1385 raise util.Abort(_('cannot amend with --subrepos'))
1386 1386 # Let --subrepos on the command line override config setting.
1387 1387 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1388 1388
1389 1389 cmdutil.checkunfinished(repo, commit=True)
1390 1390
1391 1391 branch = repo[None].branch()
1392 1392 bheads = repo.branchheads(branch)
1393 1393
1394 1394 extra = {}
1395 1395 if opts.get('close_branch'):
1396 1396 extra['close'] = 1
1397 1397
1398 1398 if not bheads:
1399 1399 raise util.Abort(_('can only close branch heads'))
1400 1400 elif opts.get('amend'):
1401 1401 if repo.parents()[0].p1().branch() != branch and \
1402 1402 repo.parents()[0].p2().branch() != branch:
1403 1403 raise util.Abort(_('can only close branch heads'))
1404 1404
1405 1405 if opts.get('amend'):
1406 1406 if ui.configbool('ui', 'commitsubrepos'):
1407 1407 raise util.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1408 1408
1409 1409 old = repo['.']
1410 1410 if old.phase() == phases.public:
1411 1411 raise util.Abort(_('cannot amend public changesets'))
1412 1412 if len(repo[None].parents()) > 1:
1413 1413 raise util.Abort(_('cannot amend while merging'))
1414 1414 if (not obsolete._enabled) and old.children():
1415 1415 raise util.Abort(_('cannot amend changeset with children'))
1416 1416
1417 1417 # commitfunc is used only for temporary amend commit by cmdutil.amend
1418 1418 def commitfunc(ui, repo, message, match, opts):
1419 1419 return repo.commit(message,
1420 1420 opts.get('user') or old.user(),
1421 1421 opts.get('date') or old.date(),
1422 1422 match,
1423 1423 extra=extra)
1424 1424
1425 1425 current = repo._bookmarkcurrent
1426 1426 marks = old.bookmarks()
1427 1427 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1428 1428 if node == old.node():
1429 1429 ui.status(_("nothing changed\n"))
1430 1430 return 1
1431 1431 elif marks:
1432 1432 ui.debug('moving bookmarks %r from %s to %s\n' %
1433 1433 (marks, old.hex(), hex(node)))
1434 1434 newmarks = repo._bookmarks
1435 1435 for bm in marks:
1436 1436 newmarks[bm] = node
1437 1437 if bm == current:
1438 1438 bookmarks.setcurrent(repo, bm)
1439 1439 newmarks.write()
1440 1440 else:
1441 1441 def commitfunc(ui, repo, message, match, opts):
1442 1442 backup = ui.backupconfig('phases', 'new-commit')
1443 1443 baseui = repo.baseui
1444 1444 basebackup = baseui.backupconfig('phases', 'new-commit')
1445 1445 try:
1446 1446 if opts.get('secret'):
1447 1447 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1448 1448 # Propagate to subrepos
1449 1449 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1450 1450
1451 1451 editform = 'commit.normal'
1452 1452 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1453 1453 return repo.commit(message, opts.get('user'), opts.get('date'),
1454 1454 match,
1455 1455 editor=editor,
1456 1456 extra=extra)
1457 1457 finally:
1458 1458 ui.restoreconfig(backup)
1459 1459 repo.baseui.restoreconfig(basebackup)
1460 1460
1461 1461
1462 1462 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1463 1463
1464 1464 if not node:
1465 1465 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
1466 1466 if stat[3]:
1467 1467 ui.status(_("nothing changed (%d missing files, see "
1468 1468 "'hg status')\n") % len(stat[3]))
1469 1469 else:
1470 1470 ui.status(_("nothing changed\n"))
1471 1471 return 1
1472 1472
1473 1473 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1474 1474
1475 1475 @command('config|showconfig|debugconfig',
1476 1476 [('u', 'untrusted', None, _('show untrusted configuration options')),
1477 1477 ('e', 'edit', None, _('edit user config')),
1478 1478 ('l', 'local', None, _('edit repository config')),
1479 1479 ('g', 'global', None, _('edit global config'))],
1480 1480 _('[-u] [NAME]...'),
1481 1481 optionalrepo=True)
1482 1482 def config(ui, repo, *values, **opts):
1483 1483 """show combined config settings from all hgrc files
1484 1484
1485 1485 With no arguments, print names and values of all config items.
1486 1486
1487 1487 With one argument of the form section.name, print just the value
1488 1488 of that config item.
1489 1489
1490 1490 With multiple arguments, print names and values of all config
1491 1491 items with matching section names.
1492 1492
1493 1493 With --edit, start an editor on the user-level config file. With
1494 1494 --global, edit the system-wide config file. With --local, edit the
1495 1495 repository-level config file.
1496 1496
1497 1497 With --debug, the source (filename and line number) is printed
1498 1498 for each config item.
1499 1499
1500 1500 See :hg:`help config` for more information about config files.
1501 1501
1502 1502 Returns 0 on success.
1503 1503
1504 1504 """
1505 1505
1506 1506 if opts.get('edit') or opts.get('local') or opts.get('global'):
1507 1507 if opts.get('local') and opts.get('global'):
1508 1508 raise util.Abort(_("can't use --local and --global together"))
1509 1509
1510 1510 if opts.get('local'):
1511 1511 if not repo:
1512 1512 raise util.Abort(_("can't use --local outside a repository"))
1513 1513 paths = [repo.join('hgrc')]
1514 1514 elif opts.get('global'):
1515 1515 paths = scmutil.systemrcpath()
1516 1516 else:
1517 1517 paths = scmutil.userrcpath()
1518 1518
1519 1519 for f in paths:
1520 1520 if os.path.exists(f):
1521 1521 break
1522 1522 else:
1523 1523 f = paths[0]
1524 1524 fp = open(f, "w")
1525 1525 fp.write(
1526 1526 '# example config (see "hg help config" for more info)\n'
1527 1527 '\n'
1528 1528 '[ui]\n'
1529 1529 '# name and email, e.g.\n'
1530 1530 '# username = Jane Doe <jdoe@example.com>\n'
1531 1531 'username =\n'
1532 1532 '\n'
1533 1533 '[extensions]\n'
1534 1534 '# uncomment these lines to enable some popular extensions\n'
1535 1535 '# (see "hg help extensions" for more info)\n'
1536 1536 '# pager =\n'
1537 1537 '# progress =\n'
1538 1538 '# color =\n')
1539 1539 fp.close()
1540 1540
1541 1541 editor = ui.geteditor()
1542 1542 util.system("%s \"%s\"" % (editor, f),
1543 1543 onerr=util.Abort, errprefix=_("edit failed"),
1544 1544 out=ui.fout)
1545 1545 return
1546 1546
1547 1547 for f in scmutil.rcpath():
1548 1548 ui.debug('read config from: %s\n' % f)
1549 1549 untrusted = bool(opts.get('untrusted'))
1550 1550 if values:
1551 1551 sections = [v for v in values if '.' not in v]
1552 1552 items = [v for v in values if '.' in v]
1553 1553 if len(items) > 1 or items and sections:
1554 1554 raise util.Abort(_('only one config item permitted'))
1555 1555 for section, name, value in ui.walkconfig(untrusted=untrusted):
1556 1556 value = str(value).replace('\n', '\\n')
1557 1557 sectname = section + '.' + name
1558 1558 if values:
1559 1559 for v in values:
1560 1560 if v == section:
1561 1561 ui.debug('%s: ' %
1562 1562 ui.configsource(section, name, untrusted))
1563 1563 ui.write('%s=%s\n' % (sectname, value))
1564 1564 elif v == sectname:
1565 1565 ui.debug('%s: ' %
1566 1566 ui.configsource(section, name, untrusted))
1567 1567 ui.write(value, '\n')
1568 1568 else:
1569 1569 ui.debug('%s: ' %
1570 1570 ui.configsource(section, name, untrusted))
1571 1571 ui.write('%s=%s\n' % (sectname, value))
1572 1572
1573 1573 @command('copy|cp',
1574 1574 [('A', 'after', None, _('record a copy that has already occurred')),
1575 1575 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1576 1576 ] + walkopts + dryrunopts,
1577 1577 _('[OPTION]... [SOURCE]... DEST'))
1578 1578 def copy(ui, repo, *pats, **opts):
1579 1579 """mark files as copied for the next commit
1580 1580
1581 1581 Mark dest as having copies of source files. If dest is a
1582 1582 directory, copies are put in that directory. If dest is a file,
1583 1583 the source must be a single file.
1584 1584
1585 1585 By default, this command copies the contents of files as they
1586 1586 exist in the working directory. If invoked with -A/--after, the
1587 1587 operation is recorded, but no copying is performed.
1588 1588
1589 1589 This command takes effect with the next commit. To undo a copy
1590 1590 before that, see :hg:`revert`.
1591 1591
1592 1592 Returns 0 on success, 1 if errors are encountered.
1593 1593 """
1594 1594 wlock = repo.wlock(False)
1595 1595 try:
1596 1596 return cmdutil.copy(ui, repo, pats, opts)
1597 1597 finally:
1598 1598 wlock.release()
1599 1599
1600 1600 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
1601 1601 def debugancestor(ui, repo, *args):
1602 1602 """find the ancestor revision of two revisions in a given index"""
1603 1603 if len(args) == 3:
1604 1604 index, rev1, rev2 = args
1605 1605 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index)
1606 1606 lookup = r.lookup
1607 1607 elif len(args) == 2:
1608 1608 if not repo:
1609 1609 raise util.Abort(_("there is no Mercurial repository here "
1610 1610 "(.hg not found)"))
1611 1611 rev1, rev2 = args
1612 1612 r = repo.changelog
1613 1613 lookup = repo.lookup
1614 1614 else:
1615 1615 raise util.Abort(_('either two or three arguments required'))
1616 1616 a = r.ancestor(lookup(rev1), lookup(rev2))
1617 1617 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1618 1618
1619 1619 @command('debugbuilddag',
1620 1620 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
1621 1621 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
1622 1622 ('n', 'new-file', None, _('add new file at each rev'))],
1623 1623 _('[OPTION]... [TEXT]'))
1624 1624 def debugbuilddag(ui, repo, text=None,
1625 1625 mergeable_file=False,
1626 1626 overwritten_file=False,
1627 1627 new_file=False):
1628 1628 """builds a repo with a given DAG from scratch in the current empty repo
1629 1629
1630 1630 The description of the DAG is read from stdin if not given on the
1631 1631 command line.
1632 1632
1633 1633 Elements:
1634 1634
1635 1635 - "+n" is a linear run of n nodes based on the current default parent
1636 1636 - "." is a single node based on the current default parent
1637 1637 - "$" resets the default parent to null (implied at the start);
1638 1638 otherwise the default parent is always the last node created
1639 1639 - "<p" sets the default parent to the backref p
1640 1640 - "*p" is a fork at parent p, which is a backref
1641 1641 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
1642 1642 - "/p2" is a merge of the preceding node and p2
1643 1643 - ":tag" defines a local tag for the preceding node
1644 1644 - "@branch" sets the named branch for subsequent nodes
1645 1645 - "#...\\n" is a comment up to the end of the line
1646 1646
1647 1647 Whitespace between the above elements is ignored.
1648 1648
1649 1649 A backref is either
1650 1650
1651 1651 - a number n, which references the node curr-n, where curr is the current
1652 1652 node, or
1653 1653 - the name of a local tag you placed earlier using ":tag", or
1654 1654 - empty to denote the default parent.
1655 1655
1656 1656 All string valued-elements are either strictly alphanumeric, or must
1657 1657 be enclosed in double quotes ("..."), with "\\" as escape character.
1658 1658 """
1659 1659
1660 1660 if text is None:
1661 1661 ui.status(_("reading DAG from stdin\n"))
1662 1662 text = ui.fin.read()
1663 1663
1664 1664 cl = repo.changelog
1665 1665 if len(cl) > 0:
1666 1666 raise util.Abort(_('repository is not empty'))
1667 1667
1668 1668 # determine number of revs in DAG
1669 1669 total = 0
1670 1670 for type, data in dagparser.parsedag(text):
1671 1671 if type == 'n':
1672 1672 total += 1
1673 1673
1674 1674 if mergeable_file:
1675 1675 linesperrev = 2
1676 1676 # make a file with k lines per rev
1677 1677 initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)]
1678 1678 initialmergedlines.append("")
1679 1679
1680 1680 tags = []
1681 1681
1682 1682 lock = tr = None
1683 1683 try:
1684 1684 lock = repo.lock()
1685 1685 tr = repo.transaction("builddag")
1686 1686
1687 1687 at = -1
1688 1688 atbranch = 'default'
1689 1689 nodeids = []
1690 1690 id = 0
1691 1691 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1692 1692 for type, data in dagparser.parsedag(text):
1693 1693 if type == 'n':
1694 1694 ui.note(('node %s\n' % str(data)))
1695 1695 id, ps = data
1696 1696
1697 1697 files = []
1698 1698 fctxs = {}
1699 1699
1700 1700 p2 = None
1701 1701 if mergeable_file:
1702 1702 fn = "mf"
1703 1703 p1 = repo[ps[0]]
1704 1704 if len(ps) > 1:
1705 1705 p2 = repo[ps[1]]
1706 1706 pa = p1.ancestor(p2)
1707 1707 base, local, other = [x[fn].data() for x in (pa, p1,
1708 1708 p2)]
1709 1709 m3 = simplemerge.Merge3Text(base, local, other)
1710 1710 ml = [l.strip() for l in m3.merge_lines()]
1711 1711 ml.append("")
1712 1712 elif at > 0:
1713 1713 ml = p1[fn].data().split("\n")
1714 1714 else:
1715 1715 ml = initialmergedlines
1716 1716 ml[id * linesperrev] += " r%i" % id
1717 1717 mergedtext = "\n".join(ml)
1718 1718 files.append(fn)
1719 1719 fctxs[fn] = context.memfilectx(repo, fn, mergedtext)
1720 1720
1721 1721 if overwritten_file:
1722 1722 fn = "of"
1723 1723 files.append(fn)
1724 1724 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1725 1725
1726 1726 if new_file:
1727 1727 fn = "nf%i" % id
1728 1728 files.append(fn)
1729 1729 fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id)
1730 1730 if len(ps) > 1:
1731 1731 if not p2:
1732 1732 p2 = repo[ps[1]]
1733 1733 for fn in p2:
1734 1734 if fn.startswith("nf"):
1735 1735 files.append(fn)
1736 1736 fctxs[fn] = p2[fn]
1737 1737
1738 1738 def fctxfn(repo, cx, path):
1739 1739 return fctxs.get(path)
1740 1740
1741 1741 if len(ps) == 0 or ps[0] < 0:
1742 1742 pars = [None, None]
1743 1743 elif len(ps) == 1:
1744 1744 pars = [nodeids[ps[0]], None]
1745 1745 else:
1746 1746 pars = [nodeids[p] for p in ps]
1747 1747 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
1748 1748 date=(id, 0),
1749 1749 user="debugbuilddag",
1750 1750 extra={'branch': atbranch})
1751 1751 nodeid = repo.commitctx(cx)
1752 1752 nodeids.append(nodeid)
1753 1753 at = id
1754 1754 elif type == 'l':
1755 1755 id, name = data
1756 1756 ui.note(('tag %s\n' % name))
1757 1757 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
1758 1758 elif type == 'a':
1759 1759 ui.note(('branch %s\n' % data))
1760 1760 atbranch = data
1761 1761 ui.progress(_('building'), id, unit=_('revisions'), total=total)
1762 1762 tr.close()
1763 1763
1764 1764 if tags:
1765 1765 repo.opener.write("localtags", "".join(tags))
1766 1766 finally:
1767 1767 ui.progress(_('building'), None)
1768 1768 release(tr, lock)
1769 1769
1770 1770 @command('debugbundle',
1771 1771 [('a', 'all', None, _('show all details'))],
1772 1772 _('FILE'),
1773 1773 norepo=True)
1774 1774 def debugbundle(ui, bundlepath, all=None, **opts):
1775 1775 """lists the contents of a bundle"""
1776 1776 f = hg.openpath(ui, bundlepath)
1777 1777 try:
1778 1778 gen = exchange.readbundle(ui, f, bundlepath)
1779 1779 if all:
1780 1780 ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n"))
1781 1781
1782 1782 def showchunks(named):
1783 1783 ui.write("\n%s\n" % named)
1784 1784 chain = None
1785 1785 while True:
1786 1786 chunkdata = gen.deltachunk(chain)
1787 1787 if not chunkdata:
1788 1788 break
1789 1789 node = chunkdata['node']
1790 1790 p1 = chunkdata['p1']
1791 1791 p2 = chunkdata['p2']
1792 1792 cs = chunkdata['cs']
1793 1793 deltabase = chunkdata['deltabase']
1794 1794 delta = chunkdata['delta']
1795 1795 ui.write("%s %s %s %s %s %s\n" %
1796 1796 (hex(node), hex(p1), hex(p2),
1797 1797 hex(cs), hex(deltabase), len(delta)))
1798 1798 chain = node
1799 1799
1800 1800 chunkdata = gen.changelogheader()
1801 1801 showchunks("changelog")
1802 1802 chunkdata = gen.manifestheader()
1803 1803 showchunks("manifest")
1804 1804 while True:
1805 1805 chunkdata = gen.filelogheader()
1806 1806 if not chunkdata:
1807 1807 break
1808 1808 fname = chunkdata['filename']
1809 1809 showchunks(fname)
1810 1810 else:
1811 1811 chunkdata = gen.changelogheader()
1812 1812 chain = None
1813 1813 while True:
1814 1814 chunkdata = gen.deltachunk(chain)
1815 1815 if not chunkdata:
1816 1816 break
1817 1817 node = chunkdata['node']
1818 1818 ui.write("%s\n" % hex(node))
1819 1819 chain = node
1820 1820 finally:
1821 1821 f.close()
1822 1822
1823 1823 @command('debugcheckstate', [], '')
1824 1824 def debugcheckstate(ui, repo):
1825 1825 """validate the correctness of the current dirstate"""
1826 1826 parent1, parent2 = repo.dirstate.parents()
1827 1827 m1 = repo[parent1].manifest()
1828 1828 m2 = repo[parent2].manifest()
1829 1829 errors = 0
1830 1830 for f in repo.dirstate:
1831 1831 state = repo.dirstate[f]
1832 1832 if state in "nr" and f not in m1:
1833 1833 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1834 1834 errors += 1
1835 1835 if state in "a" and f in m1:
1836 1836 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1837 1837 errors += 1
1838 1838 if state in "m" and f not in m1 and f not in m2:
1839 1839 ui.warn(_("%s in state %s, but not in either manifest\n") %
1840 1840 (f, state))
1841 1841 errors += 1
1842 1842 for f in m1:
1843 1843 state = repo.dirstate[f]
1844 1844 if state not in "nrm":
1845 1845 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1846 1846 errors += 1
1847 1847 if errors:
1848 1848 error = _(".hg/dirstate inconsistent with current parent's manifest")
1849 1849 raise util.Abort(error)
1850 1850
1851 1851 @command('debugcommands', [], _('[COMMAND]'), norepo=True)
1852 1852 def debugcommands(ui, cmd='', *args):
1853 1853 """list all available commands and options"""
1854 1854 for cmd, vals in sorted(table.iteritems()):
1855 1855 cmd = cmd.split('|')[0].strip('^')
1856 1856 opts = ', '.join([i[1] for i in vals[1]])
1857 1857 ui.write('%s: %s\n' % (cmd, opts))
1858 1858
1859 1859 @command('debugcomplete',
1860 1860 [('o', 'options', None, _('show the command options'))],
1861 1861 _('[-o] CMD'),
1862 1862 norepo=True)
1863 1863 def debugcomplete(ui, cmd='', **opts):
1864 1864 """returns the completion list associated with the given command"""
1865 1865
1866 1866 if opts.get('options'):
1867 1867 options = []
1868 1868 otables = [globalopts]
1869 1869 if cmd:
1870 1870 aliases, entry = cmdutil.findcmd(cmd, table, False)
1871 1871 otables.append(entry[1])
1872 1872 for t in otables:
1873 1873 for o in t:
1874 1874 if "(DEPRECATED)" in o[3]:
1875 1875 continue
1876 1876 if o[0]:
1877 1877 options.append('-%s' % o[0])
1878 1878 options.append('--%s' % o[1])
1879 1879 ui.write("%s\n" % "\n".join(options))
1880 1880 return
1881 1881
1882 1882 cmdlist = cmdutil.findpossible(cmd, table)
1883 1883 if ui.verbose:
1884 1884 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
1885 1885 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
1886 1886
1887 1887 @command('debugdag',
1888 1888 [('t', 'tags', None, _('use tags as labels')),
1889 1889 ('b', 'branches', None, _('annotate with branch names')),
1890 1890 ('', 'dots', None, _('use dots for runs')),
1891 1891 ('s', 'spaces', None, _('separate elements by spaces'))],
1892 1892 _('[OPTION]... [FILE [REV]...]'),
1893 1893 optionalrepo=True)
1894 1894 def debugdag(ui, repo, file_=None, *revs, **opts):
1895 1895 """format the changelog or an index DAG as a concise textual description
1896 1896
1897 1897 If you pass a revlog index, the revlog's DAG is emitted. If you list
1898 1898 revision numbers, they get labeled in the output as rN.
1899 1899
1900 1900 Otherwise, the changelog DAG of the current repo is emitted.
1901 1901 """
1902 1902 spaces = opts.get('spaces')
1903 1903 dots = opts.get('dots')
1904 1904 if file_:
1905 1905 rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
1906 1906 revs = set((int(r) for r in revs))
1907 1907 def events():
1908 1908 for r in rlog:
1909 1909 yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
1910 1910 if p != -1)))
1911 1911 if r in revs:
1912 1912 yield 'l', (r, "r%i" % r)
1913 1913 elif repo:
1914 1914 cl = repo.changelog
1915 1915 tags = opts.get('tags')
1916 1916 branches = opts.get('branches')
1917 1917 if tags:
1918 1918 labels = {}
1919 1919 for l, n in repo.tags().items():
1920 1920 labels.setdefault(cl.rev(n), []).append(l)
1921 1921 def events():
1922 1922 b = "default"
1923 1923 for r in cl:
1924 1924 if branches:
1925 1925 newb = cl.read(cl.node(r))[5]['branch']
1926 1926 if newb != b:
1927 1927 yield 'a', newb
1928 1928 b = newb
1929 1929 yield 'n', (r, list(set(p for p in cl.parentrevs(r)
1930 1930 if p != -1)))
1931 1931 if tags:
1932 1932 ls = labels.get(r)
1933 1933 if ls:
1934 1934 for l in ls:
1935 1935 yield 'l', (r, l)
1936 1936 else:
1937 1937 raise util.Abort(_('need repo for changelog dag'))
1938 1938
1939 1939 for line in dagparser.dagtextlines(events(),
1940 1940 addspaces=spaces,
1941 1941 wraplabels=True,
1942 1942 wrapannotations=True,
1943 1943 wrapnonlinear=dots,
1944 1944 usedots=dots,
1945 1945 maxlinewidth=70):
1946 1946 ui.write(line)
1947 1947 ui.write("\n")
1948 1948
1949 1949 @command('debugdata',
1950 1950 [('c', 'changelog', False, _('open changelog')),
1951 1951 ('m', 'manifest', False, _('open manifest'))],
1952 1952 _('-c|-m|FILE REV'))
1953 1953 def debugdata(ui, repo, file_, rev=None, **opts):
1954 1954 """dump the contents of a data file revision"""
1955 1955 if opts.get('changelog') or opts.get('manifest'):
1956 1956 file_, rev = None, file_
1957 1957 elif rev is None:
1958 1958 raise error.CommandError('debugdata', _('invalid arguments'))
1959 1959 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
1960 1960 try:
1961 1961 ui.write(r.revision(r.lookup(rev)))
1962 1962 except KeyError:
1963 1963 raise util.Abort(_('invalid revision identifier %s') % rev)
1964 1964
1965 1965 @command('debugdate',
1966 1966 [('e', 'extended', None, _('try extended date formats'))],
1967 1967 _('[-e] DATE [RANGE]'),
1968 1968 norepo=True, optionalrepo=True)
1969 1969 def debugdate(ui, date, range=None, **opts):
1970 1970 """parse and display a date"""
1971 1971 if opts["extended"]:
1972 1972 d = util.parsedate(date, util.extendeddateformats)
1973 1973 else:
1974 1974 d = util.parsedate(date)
1975 1975 ui.write(("internal: %s %s\n") % d)
1976 1976 ui.write(("standard: %s\n") % util.datestr(d))
1977 1977 if range:
1978 1978 m = util.matchdate(range)
1979 1979 ui.write(("match: %s\n") % m(d[0]))
1980 1980
1981 1981 @command('debugdiscovery',
1982 1982 [('', 'old', None, _('use old-style discovery')),
1983 1983 ('', 'nonheads', None,
1984 1984 _('use old-style discovery with non-heads included')),
1985 1985 ] + remoteopts,
1986 1986 _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
1987 1987 def debugdiscovery(ui, repo, remoteurl="default", **opts):
1988 1988 """runs the changeset discovery protocol in isolation"""
1989 1989 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
1990 1990 opts.get('branch'))
1991 1991 remote = hg.peer(repo, opts, remoteurl)
1992 1992 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
1993 1993
1994 1994 # make sure tests are repeatable
1995 1995 random.seed(12323)
1996 1996
1997 1997 def doit(localheads, remoteheads, remote=remote):
1998 1998 if opts.get('old'):
1999 1999 if localheads:
2000 2000 raise util.Abort('cannot use localheads with old style '
2001 2001 'discovery')
2002 2002 if not util.safehasattr(remote, 'branches'):
2003 2003 # enable in-client legacy support
2004 2004 remote = localrepo.locallegacypeer(remote.local())
2005 2005 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
2006 2006 force=True)
2007 2007 common = set(common)
2008 2008 if not opts.get('nonheads'):
2009 2009 ui.write(("unpruned common: %s\n") %
2010 2010 " ".join(sorted(short(n) for n in common)))
2011 2011 dag = dagutil.revlogdag(repo.changelog)
2012 2012 all = dag.ancestorset(dag.internalizeall(common))
2013 2013 common = dag.externalizeall(dag.headsetofconnecteds(all))
2014 2014 else:
2015 2015 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote)
2016 2016 common = set(common)
2017 2017 rheads = set(hds)
2018 2018 lheads = set(repo.heads())
2019 2019 ui.write(("common heads: %s\n") %
2020 2020 " ".join(sorted(short(n) for n in common)))
2021 2021 if lheads <= common:
2022 2022 ui.write(("local is subset\n"))
2023 2023 elif rheads <= common:
2024 2024 ui.write(("remote is subset\n"))
2025 2025
2026 2026 serverlogs = opts.get('serverlog')
2027 2027 if serverlogs:
2028 2028 for filename in serverlogs:
2029 2029 logfile = open(filename, 'r')
2030 2030 try:
2031 2031 line = logfile.readline()
2032 2032 while line:
2033 2033 parts = line.strip().split(';')
2034 2034 op = parts[1]
2035 2035 if op == 'cg':
2036 2036 pass
2037 2037 elif op == 'cgss':
2038 2038 doit(parts[2].split(' '), parts[3].split(' '))
2039 2039 elif op == 'unb':
2040 2040 doit(parts[3].split(' '), parts[2].split(' '))
2041 2041 line = logfile.readline()
2042 2042 finally:
2043 2043 logfile.close()
2044 2044
2045 2045 else:
2046 2046 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches,
2047 2047 opts.get('remote_head'))
2048 2048 localrevs = opts.get('local_head')
2049 2049 doit(localrevs, remoterevs)
2050 2050
2051 2051 @command('debugfileset',
2052 2052 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
2053 2053 _('[-r REV] FILESPEC'))
2054 2054 def debugfileset(ui, repo, expr, **opts):
2055 2055 '''parse and apply a fileset specification'''
2056 2056 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
2057 2057 if ui.verbose:
2058 2058 tree = fileset.parse(expr)[0]
2059 2059 ui.note(tree, "\n")
2060 2060
2061 2061 for f in ctx.getfileset(expr):
2062 2062 ui.write("%s\n" % f)
2063 2063
2064 2064 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
2065 2065 def debugfsinfo(ui, path="."):
2066 2066 """show information detected about current filesystem"""
2067 2067 util.writefile('.debugfsinfo', '')
2068 2068 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
2069 2069 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
2070 2070 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
2071 2071 ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo')
2072 2072 and 'yes' or 'no'))
2073 2073 os.unlink('.debugfsinfo')
2074 2074
2075 2075 @command('debuggetbundle',
2076 2076 [('H', 'head', [], _('id of head node'), _('ID')),
2077 2077 ('C', 'common', [], _('id of common node'), _('ID')),
2078 2078 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
2079 2079 _('REPO FILE [-H|-C ID]...'),
2080 2080 norepo=True)
2081 2081 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
2082 2082 """retrieves a bundle from a repo
2083 2083
2084 2084 Every ID must be a full-length hex node id string. Saves the bundle to the
2085 2085 given file.
2086 2086 """
2087 2087 repo = hg.peer(ui, opts, repopath)
2088 2088 if not repo.capable('getbundle'):
2089 2089 raise util.Abort("getbundle() not supported by target repository")
2090 2090 args = {}
2091 2091 if common:
2092 2092 args['common'] = [bin(s) for s in common]
2093 2093 if head:
2094 2094 args['heads'] = [bin(s) for s in head]
2095 2095 # TODO: get desired bundlecaps from command line.
2096 2096 args['bundlecaps'] = None
2097 2097 bundle = repo.getbundle('debug', **args)
2098 2098
2099 2099 bundletype = opts.get('type', 'bzip2').lower()
2100 2100 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
2101 2101 bundletype = btypes.get(bundletype)
2102 2102 if bundletype not in changegroup.bundletypes:
2103 2103 raise util.Abort(_('unknown bundle type specified with --type'))
2104 2104 changegroup.writebundle(bundle, bundlepath, bundletype)
2105 2105
2106 2106 @command('debugignore', [], '')
2107 2107 def debugignore(ui, repo, *values, **opts):
2108 2108 """display the combined ignore pattern"""
2109 2109 ignore = repo.dirstate._ignore
2110 2110 includepat = getattr(ignore, 'includepat', None)
2111 2111 if includepat is not None:
2112 2112 ui.write("%s\n" % includepat)
2113 2113 else:
2114 2114 raise util.Abort(_("no ignore patterns found"))
2115 2115
2116 2116 @command('debugindex',
2117 2117 [('c', 'changelog', False, _('open changelog')),
2118 2118 ('m', 'manifest', False, _('open manifest')),
2119 2119 ('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2120 2120 _('[-f FORMAT] -c|-m|FILE'),
2121 2121 optionalrepo=True)
2122 2122 def debugindex(ui, repo, file_=None, **opts):
2123 2123 """dump the contents of an index file"""
2124 2124 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
2125 2125 format = opts.get('format', 0)
2126 2126 if format not in (0, 1):
2127 2127 raise util.Abort(_("unknown format %d") % format)
2128 2128
2129 2129 generaldelta = r.version & revlog.REVLOGGENERALDELTA
2130 2130 if generaldelta:
2131 2131 basehdr = ' delta'
2132 2132 else:
2133 2133 basehdr = ' base'
2134 2134
2135 2135 if format == 0:
2136 2136 ui.write(" rev offset length " + basehdr + " linkrev"
2137 2137 " nodeid p1 p2\n")
2138 2138 elif format == 1:
2139 2139 ui.write(" rev flag offset length"
2140 2140 " size " + basehdr + " link p1 p2"
2141 2141 " nodeid\n")
2142 2142
2143 2143 for i in r:
2144 2144 node = r.node(i)
2145 2145 if generaldelta:
2146 2146 base = r.deltaparent(i)
2147 2147 else:
2148 2148 base = r.chainbase(i)
2149 2149 if format == 0:
2150 2150 try:
2151 2151 pp = r.parents(node)
2152 2152 except Exception:
2153 2153 pp = [nullid, nullid]
2154 2154 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
2155 2155 i, r.start(i), r.length(i), base, r.linkrev(i),
2156 2156 short(node), short(pp[0]), short(pp[1])))
2157 2157 elif format == 1:
2158 2158 pr = r.parentrevs(i)
2159 2159 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
2160 2160 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2161 2161 base, r.linkrev(i), pr[0], pr[1], short(node)))
2162 2162
2163 2163 @command('debugindexdot', [], _('FILE'), optionalrepo=True)
2164 2164 def debugindexdot(ui, repo, file_):
2165 2165 """dump an index DAG as a graphviz dot file"""
2166 2166 r = None
2167 2167 if repo:
2168 2168 filelog = repo.file(file_)
2169 2169 if len(filelog):
2170 2170 r = filelog
2171 2171 if not r:
2172 2172 r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_)
2173 2173 ui.write(("digraph G {\n"))
2174 2174 for i in r:
2175 2175 node = r.node(i)
2176 2176 pp = r.parents(node)
2177 2177 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
2178 2178 if pp[1] != nullid:
2179 2179 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
2180 2180 ui.write("}\n")
2181 2181
2182 2182 @command('debuginstall', [], '', norepo=True)
2183 2183 def debuginstall(ui):
2184 2184 '''test Mercurial installation
2185 2185
2186 2186 Returns 0 on success.
2187 2187 '''
2188 2188
2189 2189 def writetemp(contents):
2190 2190 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
2191 2191 f = os.fdopen(fd, "wb")
2192 2192 f.write(contents)
2193 2193 f.close()
2194 2194 return name
2195 2195
2196 2196 problems = 0
2197 2197
2198 2198 # encoding
2199 2199 ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
2200 2200 try:
2201 2201 encoding.fromlocal("test")
2202 2202 except util.Abort, inst:
2203 2203 ui.write(" %s\n" % inst)
2204 2204 ui.write(_(" (check that your locale is properly set)\n"))
2205 2205 problems += 1
2206 2206
2207 2207 # Python
2208 2208 ui.status(_("checking Python executable (%s)\n") % sys.executable)
2209 2209 ui.status(_("checking Python version (%s)\n")
2210 2210 % ("%s.%s.%s" % sys.version_info[:3]))
2211 2211 ui.status(_("checking Python lib (%s)...\n")
2212 2212 % os.path.dirname(os.__file__))
2213 2213
2214 2214 # compiled modules
2215 2215 ui.status(_("checking installed modules (%s)...\n")
2216 2216 % os.path.dirname(__file__))
2217 2217 try:
2218 2218 import bdiff, mpatch, base85, osutil
2219 2219 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
2220 2220 except Exception, inst:
2221 2221 ui.write(" %s\n" % inst)
2222 2222 ui.write(_(" One or more extensions could not be found"))
2223 2223 ui.write(_(" (check that you compiled the extensions)\n"))
2224 2224 problems += 1
2225 2225
2226 2226 # templates
2227 2227 import templater
2228 2228 p = templater.templatepath()
2229 2229 ui.status(_("checking templates (%s)...\n") % ' '.join(p))
2230 2230 if p:
2231 2231 m = templater.templatepath("map-cmdline.default")
2232 2232 if m:
2233 2233 # template found, check if it is working
2234 2234 try:
2235 2235 templater.templater(m)
2236 2236 except Exception, inst:
2237 2237 ui.write(" %s\n" % inst)
2238 2238 p = None
2239 2239 else:
2240 2240 ui.write(_(" template 'default' not found\n"))
2241 2241 p = None
2242 2242 else:
2243 2243 ui.write(_(" no template directories found\n"))
2244 2244 if not p:
2245 2245 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
2246 2246 problems += 1
2247 2247
2248 2248 # editor
2249 2249 ui.status(_("checking commit editor...\n"))
2250 2250 editor = ui.geteditor()
2251 2251 cmdpath = util.findexe(shlex.split(editor)[0])
2252 2252 if not cmdpath:
2253 2253 if editor == 'vi':
2254 2254 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
2255 2255 ui.write(_(" (specify a commit editor in your configuration"
2256 2256 " file)\n"))
2257 2257 else:
2258 2258 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
2259 2259 ui.write(_(" (specify a commit editor in your configuration"
2260 2260 " file)\n"))
2261 2261 problems += 1
2262 2262
2263 2263 # check username
2264 2264 ui.status(_("checking username...\n"))
2265 2265 try:
2266 2266 ui.username()
2267 2267 except util.Abort, e:
2268 2268 ui.write(" %s\n" % e)
2269 2269 ui.write(_(" (specify a username in your configuration file)\n"))
2270 2270 problems += 1
2271 2271
2272 2272 if not problems:
2273 2273 ui.status(_("no problems detected\n"))
2274 2274 else:
2275 2275 ui.write(_("%s problems detected,"
2276 2276 " please check your install!\n") % problems)
2277 2277
2278 2278 return problems
2279 2279
2280 2280 @command('debugknown', [], _('REPO ID...'), norepo=True)
2281 2281 def debugknown(ui, repopath, *ids, **opts):
2282 2282 """test whether node ids are known to a repo
2283 2283
2284 2284 Every ID must be a full-length hex node id string. Returns a list of 0s
2285 2285 and 1s indicating unknown/known.
2286 2286 """
2287 2287 repo = hg.peer(ui, opts, repopath)
2288 2288 if not repo.capable('known'):
2289 2289 raise util.Abort("known() not supported by target repository")
2290 2290 flags = repo.known([bin(s) for s in ids])
2291 2291 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2292 2292
2293 2293 @command('debuglabelcomplete', [], _('LABEL...'))
2294 2294 def debuglabelcomplete(ui, repo, *args):
2295 2295 '''complete "labels" - tags, open branch names, bookmark names'''
2296 2296
2297 2297 labels = set()
2298 2298 labels.update(t[0] for t in repo.tagslist())
2299 2299 labels.update(repo._bookmarks.keys())
2300 2300 labels.update(tag for (tag, heads, tip, closed)
2301 2301 in repo.branchmap().iterbranches() if not closed)
2302 2302 completions = set()
2303 2303 if not args:
2304 2304 args = ['']
2305 2305 for a in args:
2306 2306 completions.update(l for l in labels if l.startswith(a))
2307 2307 ui.write('\n'.join(sorted(completions)))
2308 2308 ui.write('\n')
2309 2309
2310 2310 @command('debugobsolete',
2311 2311 [('', 'flags', 0, _('markers flag')),
2312 2312 ] + commitopts2,
2313 2313 _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
2314 2314 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2315 2315 """create arbitrary obsolete marker
2316 2316
2317 2317 With no arguments, displays the list of obsolescence markers."""
2318 2318
2319 2319 def parsenodeid(s):
2320 2320 try:
2321 2321 # We do not use revsingle/revrange functions here to accept
2322 2322 # arbitrary node identifiers, possibly not present in the
2323 2323 # local repository.
2324 2324 n = bin(s)
2325 2325 if len(n) != len(nullid):
2326 2326 raise TypeError()
2327 2327 return n
2328 2328 except TypeError:
2329 2329 raise util.Abort('changeset references must be full hexadecimal '
2330 2330 'node identifiers')
2331 2331
2332 2332 if precursor is not None:
2333 2333 metadata = {}
2334 2334 if 'date' in opts:
2335 2335 metadata['date'] = opts['date']
2336 2336 metadata['user'] = opts['user'] or ui.username()
2337 2337 succs = tuple(parsenodeid(succ) for succ in successors)
2338 2338 l = repo.lock()
2339 2339 try:
2340 2340 tr = repo.transaction('debugobsolete')
2341 2341 try:
2342 2342 repo.obsstore.create(tr, parsenodeid(precursor), succs,
2343 2343 opts['flags'], metadata)
2344 2344 tr.close()
2345 2345 finally:
2346 2346 tr.release()
2347 2347 finally:
2348 2348 l.release()
2349 2349 else:
2350 2350 for m in obsolete.allmarkers(repo):
2351 2351 cmdutil.showmarker(ui, m)
2352 2352
2353 2353 @command('debugpathcomplete',
2354 2354 [('f', 'full', None, _('complete an entire path')),
2355 2355 ('n', 'normal', None, _('show only normal files')),
2356 2356 ('a', 'added', None, _('show only added files')),
2357 2357 ('r', 'removed', None, _('show only removed files'))],
2358 2358 _('FILESPEC...'))
2359 2359 def debugpathcomplete(ui, repo, *specs, **opts):
2360 2360 '''complete part or all of a tracked path
2361 2361
2362 2362 This command supports shells that offer path name completion. It
2363 2363 currently completes only files already known to the dirstate.
2364 2364
2365 2365 Completion extends only to the next path segment unless
2366 2366 --full is specified, in which case entire paths are used.'''
2367 2367
2368 2368 def complete(path, acceptable):
2369 2369 dirstate = repo.dirstate
2370 2370 spec = os.path.normpath(os.path.join(os.getcwd(), path))
2371 2371 rootdir = repo.root + os.sep
2372 2372 if spec != repo.root and not spec.startswith(rootdir):
2373 2373 return [], []
2374 2374 if os.path.isdir(spec):
2375 2375 spec += '/'
2376 2376 spec = spec[len(rootdir):]
2377 2377 fixpaths = os.sep != '/'
2378 2378 if fixpaths:
2379 2379 spec = spec.replace(os.sep, '/')
2380 2380 speclen = len(spec)
2381 2381 fullpaths = opts['full']
2382 2382 files, dirs = set(), set()
2383 2383 adddir, addfile = dirs.add, files.add
2384 2384 for f, st in dirstate.iteritems():
2385 2385 if f.startswith(spec) and st[0] in acceptable:
2386 2386 if fixpaths:
2387 2387 f = f.replace('/', os.sep)
2388 2388 if fullpaths:
2389 2389 addfile(f)
2390 2390 continue
2391 2391 s = f.find(os.sep, speclen)
2392 2392 if s >= 0:
2393 2393 adddir(f[:s])
2394 2394 else:
2395 2395 addfile(f)
2396 2396 return files, dirs
2397 2397
2398 2398 acceptable = ''
2399 2399 if opts['normal']:
2400 2400 acceptable += 'nm'
2401 2401 if opts['added']:
2402 2402 acceptable += 'a'
2403 2403 if opts['removed']:
2404 2404 acceptable += 'r'
2405 2405 cwd = repo.getcwd()
2406 2406 if not specs:
2407 2407 specs = ['.']
2408 2408
2409 2409 files, dirs = set(), set()
2410 2410 for spec in specs:
2411 2411 f, d = complete(spec, acceptable or 'nmar')
2412 2412 files.update(f)
2413 2413 dirs.update(d)
2414 2414 files.update(dirs)
2415 2415 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2416 2416 ui.write('\n')
2417 2417
2418 2418 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2419 2419 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2420 2420 '''access the pushkey key/value protocol
2421 2421
2422 2422 With two args, list the keys in the given namespace.
2423 2423
2424 2424 With five args, set a key to new if it currently is set to old.
2425 2425 Reports success or failure.
2426 2426 '''
2427 2427
2428 2428 target = hg.peer(ui, {}, repopath)
2429 2429 if keyinfo:
2430 2430 key, old, new = keyinfo
2431 2431 r = target.pushkey(namespace, key, old, new)
2432 2432 ui.status(str(r) + '\n')
2433 2433 return not r
2434 2434 else:
2435 2435 for k, v in sorted(target.listkeys(namespace).iteritems()):
2436 2436 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2437 2437 v.encode('string-escape')))
2438 2438
2439 2439 @command('debugpvec', [], _('A B'))
2440 2440 def debugpvec(ui, repo, a, b=None):
2441 2441 ca = scmutil.revsingle(repo, a)
2442 2442 cb = scmutil.revsingle(repo, b)
2443 2443 pa = pvec.ctxpvec(ca)
2444 2444 pb = pvec.ctxpvec(cb)
2445 2445 if pa == pb:
2446 2446 rel = "="
2447 2447 elif pa > pb:
2448 2448 rel = ">"
2449 2449 elif pa < pb:
2450 2450 rel = "<"
2451 2451 elif pa | pb:
2452 2452 rel = "|"
2453 2453 ui.write(_("a: %s\n") % pa)
2454 2454 ui.write(_("b: %s\n") % pb)
2455 2455 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2456 2456 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2457 2457 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2458 2458 pa.distance(pb), rel))
2459 2459
2460 2460 @command('debugrebuilddirstate|debugrebuildstate',
2461 2461 [('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
2462 2462 _('[-r REV]'))
2463 2463 def debugrebuilddirstate(ui, repo, rev):
2464 2464 """rebuild the dirstate as it would look like for the given revision
2465 2465
2466 2466 If no revision is specified the first current parent will be used.
2467 2467
2468 2468 The dirstate will be set to the files of the given revision.
2469 2469 The actual working directory content or existing dirstate
2470 2470 information such as adds or removes is not considered.
2471 2471
2472 2472 One use of this command is to make the next :hg:`status` invocation
2473 2473 check the actual file content.
2474 2474 """
2475 2475 ctx = scmutil.revsingle(repo, rev)
2476 2476 wlock = repo.wlock()
2477 2477 try:
2478 2478 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
2479 2479 finally:
2480 2480 wlock.release()
2481 2481
2482 2482 @command('debugrename',
2483 2483 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2484 2484 _('[-r REV] FILE'))
2485 2485 def debugrename(ui, repo, file1, *pats, **opts):
2486 2486 """dump rename information"""
2487 2487
2488 2488 ctx = scmutil.revsingle(repo, opts.get('rev'))
2489 2489 m = scmutil.match(ctx, (file1,) + pats, opts)
2490 2490 for abs in ctx.walk(m):
2491 2491 fctx = ctx[abs]
2492 2492 o = fctx.filelog().renamed(fctx.filenode())
2493 2493 rel = m.rel(abs)
2494 2494 if o:
2495 2495 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2496 2496 else:
2497 2497 ui.write(_("%s not renamed\n") % rel)
2498 2498
2499 2499 @command('debugrevlog',
2500 2500 [('c', 'changelog', False, _('open changelog')),
2501 2501 ('m', 'manifest', False, _('open manifest')),
2502 2502 ('d', 'dump', False, _('dump index data'))],
2503 2503 _('-c|-m|FILE'),
2504 2504 optionalrepo=True)
2505 2505 def debugrevlog(ui, repo, file_=None, **opts):
2506 2506 """show data and statistics about a revlog"""
2507 2507 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2508 2508
2509 2509 if opts.get("dump"):
2510 2510 numrevs = len(r)
2511 2511 ui.write("# rev p1rev p2rev start end deltastart base p1 p2"
2512 2512 " rawsize totalsize compression heads\n")
2513 2513 ts = 0
2514 2514 heads = set()
2515 2515 for rev in xrange(numrevs):
2516 2516 dbase = r.deltaparent(rev)
2517 2517 if dbase == -1:
2518 2518 dbase = rev
2519 2519 cbase = r.chainbase(rev)
2520 2520 p1, p2 = r.parentrevs(rev)
2521 2521 rs = r.rawsize(rev)
2522 2522 ts = ts + rs
2523 2523 heads -= set(r.parentrevs(rev))
2524 2524 heads.add(rev)
2525 2525 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d %11d %5d\n" %
2526 2526 (rev, p1, p2, r.start(rev), r.end(rev),
2527 2527 r.start(dbase), r.start(cbase),
2528 2528 r.start(p1), r.start(p2),
2529 2529 rs, ts, ts / r.end(rev), len(heads)))
2530 2530 return 0
2531 2531
2532 2532 v = r.version
2533 2533 format = v & 0xFFFF
2534 2534 flags = []
2535 2535 gdelta = False
2536 2536 if v & revlog.REVLOGNGINLINEDATA:
2537 2537 flags.append('inline')
2538 2538 if v & revlog.REVLOGGENERALDELTA:
2539 2539 gdelta = True
2540 2540 flags.append('generaldelta')
2541 2541 if not flags:
2542 2542 flags = ['(none)']
2543 2543
2544 2544 nummerges = 0
2545 2545 numfull = 0
2546 2546 numprev = 0
2547 2547 nump1 = 0
2548 2548 nump2 = 0
2549 2549 numother = 0
2550 2550 nump1prev = 0
2551 2551 nump2prev = 0
2552 2552 chainlengths = []
2553 2553
2554 2554 datasize = [None, 0, 0L]
2555 2555 fullsize = [None, 0, 0L]
2556 2556 deltasize = [None, 0, 0L]
2557 2557
2558 2558 def addsize(size, l):
2559 2559 if l[0] is None or size < l[0]:
2560 2560 l[0] = size
2561 2561 if size > l[1]:
2562 2562 l[1] = size
2563 2563 l[2] += size
2564 2564
2565 2565 numrevs = len(r)
2566 2566 for rev in xrange(numrevs):
2567 2567 p1, p2 = r.parentrevs(rev)
2568 2568 delta = r.deltaparent(rev)
2569 2569 if format > 0:
2570 2570 addsize(r.rawsize(rev), datasize)
2571 2571 if p2 != nullrev:
2572 2572 nummerges += 1
2573 2573 size = r.length(rev)
2574 2574 if delta == nullrev:
2575 2575 chainlengths.append(0)
2576 2576 numfull += 1
2577 2577 addsize(size, fullsize)
2578 2578 else:
2579 2579 chainlengths.append(chainlengths[delta] + 1)
2580 2580 addsize(size, deltasize)
2581 2581 if delta == rev - 1:
2582 2582 numprev += 1
2583 2583 if delta == p1:
2584 2584 nump1prev += 1
2585 2585 elif delta == p2:
2586 2586 nump2prev += 1
2587 2587 elif delta == p1:
2588 2588 nump1 += 1
2589 2589 elif delta == p2:
2590 2590 nump2 += 1
2591 2591 elif delta != nullrev:
2592 2592 numother += 1
2593 2593
2594 2594 # Adjust size min value for empty cases
2595 2595 for size in (datasize, fullsize, deltasize):
2596 2596 if size[0] is None:
2597 2597 size[0] = 0
2598 2598
2599 2599 numdeltas = numrevs - numfull
2600 2600 numoprev = numprev - nump1prev - nump2prev
2601 2601 totalrawsize = datasize[2]
2602 2602 datasize[2] /= numrevs
2603 2603 fulltotal = fullsize[2]
2604 2604 fullsize[2] /= numfull
2605 2605 deltatotal = deltasize[2]
2606 2606 if numrevs - numfull > 0:
2607 2607 deltasize[2] /= numrevs - numfull
2608 2608 totalsize = fulltotal + deltatotal
2609 2609 avgchainlen = sum(chainlengths) / numrevs
2610 2610 compratio = totalrawsize / totalsize
2611 2611
2612 2612 basedfmtstr = '%%%dd\n'
2613 2613 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2614 2614
2615 2615 def dfmtstr(max):
2616 2616 return basedfmtstr % len(str(max))
2617 2617 def pcfmtstr(max, padding=0):
2618 2618 return basepcfmtstr % (len(str(max)), ' ' * padding)
2619 2619
2620 2620 def pcfmt(value, total):
2621 2621 return (value, 100 * float(value) / total)
2622 2622
2623 2623 ui.write(('format : %d\n') % format)
2624 2624 ui.write(('flags : %s\n') % ', '.join(flags))
2625 2625
2626 2626 ui.write('\n')
2627 2627 fmt = pcfmtstr(totalsize)
2628 2628 fmt2 = dfmtstr(totalsize)
2629 2629 ui.write(('revisions : ') + fmt2 % numrevs)
2630 2630 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2631 2631 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2632 2632 ui.write(('revisions : ') + fmt2 % numrevs)
2633 2633 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2634 2634 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2635 2635 ui.write(('revision size : ') + fmt2 % totalsize)
2636 2636 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2637 2637 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2638 2638
2639 2639 ui.write('\n')
2640 2640 fmt = dfmtstr(max(avgchainlen, compratio))
2641 2641 ui.write(('avg chain length : ') + fmt % avgchainlen)
2642 2642 ui.write(('compression ratio : ') + fmt % compratio)
2643 2643
2644 2644 if format > 0:
2645 2645 ui.write('\n')
2646 2646 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2647 2647 % tuple(datasize))
2648 2648 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2649 2649 % tuple(fullsize))
2650 2650 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2651 2651 % tuple(deltasize))
2652 2652
2653 2653 if numdeltas > 0:
2654 2654 ui.write('\n')
2655 2655 fmt = pcfmtstr(numdeltas)
2656 2656 fmt2 = pcfmtstr(numdeltas, 4)
2657 2657 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2658 2658 if numprev > 0:
2659 2659 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2660 2660 numprev))
2661 2661 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2662 2662 numprev))
2663 2663 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2664 2664 numprev))
2665 2665 if gdelta:
2666 2666 ui.write(('deltas against p1 : ')
2667 2667 + fmt % pcfmt(nump1, numdeltas))
2668 2668 ui.write(('deltas against p2 : ')
2669 2669 + fmt % pcfmt(nump2, numdeltas))
2670 2670 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2671 2671 numdeltas))
2672 2672
2673 2673 @command('debugrevspec',
2674 2674 [('', 'optimize', None, _('print parsed tree after optimizing'))],
2675 2675 ('REVSPEC'))
2676 2676 def debugrevspec(ui, repo, expr, **opts):
2677 2677 """parse and apply a revision specification
2678 2678
2679 2679 Use --verbose to print the parsed tree before and after aliases
2680 2680 expansion.
2681 2681 """
2682 2682 if ui.verbose:
2683 2683 tree = revset.parse(expr)[0]
2684 2684 ui.note(revset.prettyformat(tree), "\n")
2685 2685 newtree = revset.findaliases(ui, tree)
2686 2686 if newtree != tree:
2687 2687 ui.note(revset.prettyformat(newtree), "\n")
2688 2688 if opts["optimize"]:
2689 2689 weight, optimizedtree = revset.optimize(newtree, True)
2690 2690 ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n")
2691 2691 func = revset.match(ui, expr)
2692 2692 for c in func(repo, revset.spanset(repo)):
2693 2693 ui.write("%s\n" % c)
2694 2694
2695 2695 @command('debugsetparents', [], _('REV1 [REV2]'))
2696 2696 def debugsetparents(ui, repo, rev1, rev2=None):
2697 2697 """manually set the parents of the current working directory
2698 2698
2699 2699 This is useful for writing repository conversion tools, but should
2700 2700 be used with care.
2701 2701
2702 2702 Returns 0 on success.
2703 2703 """
2704 2704
2705 2705 r1 = scmutil.revsingle(repo, rev1).node()
2706 2706 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2707 2707
2708 2708 wlock = repo.wlock()
2709 2709 try:
2710 2710 repo.setparents(r1, r2)
2711 2711 finally:
2712 2712 wlock.release()
2713 2713
2714 2714 @command('debugdirstate|debugstate',
2715 2715 [('', 'nodates', None, _('do not display the saved mtime')),
2716 2716 ('', 'datesort', None, _('sort by saved mtime'))],
2717 2717 _('[OPTION]...'))
2718 2718 def debugstate(ui, repo, nodates=None, datesort=None):
2719 2719 """show the contents of the current dirstate"""
2720 2720 timestr = ""
2721 2721 showdate = not nodates
2722 2722 if datesort:
2723 2723 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2724 2724 else:
2725 2725 keyfunc = None # sort by filename
2726 2726 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2727 2727 if showdate:
2728 2728 if ent[3] == -1:
2729 2729 # Pad or slice to locale representation
2730 2730 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
2731 2731 time.localtime(0)))
2732 2732 timestr = 'unset'
2733 2733 timestr = (timestr[:locale_len] +
2734 2734 ' ' * (locale_len - len(timestr)))
2735 2735 else:
2736 2736 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2737 2737 time.localtime(ent[3]))
2738 2738 if ent[1] & 020000:
2739 2739 mode = 'lnk'
2740 2740 else:
2741 2741 mode = '%3o' % (ent[1] & 0777 & ~util.umask)
2742 2742 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2743 2743 for f in repo.dirstate.copies():
2744 2744 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2745 2745
2746 2746 @command('debugsub',
2747 2747 [('r', 'rev', '',
2748 2748 _('revision to check'), _('REV'))],
2749 2749 _('[-r REV] [REV]'))
2750 2750 def debugsub(ui, repo, rev=None):
2751 2751 ctx = scmutil.revsingle(repo, rev, None)
2752 2752 for k, v in sorted(ctx.substate.items()):
2753 2753 ui.write(('path %s\n') % k)
2754 2754 ui.write((' source %s\n') % v[0])
2755 2755 ui.write((' revision %s\n') % v[1])
2756 2756
2757 2757 @command('debugsuccessorssets',
2758 2758 [],
2759 2759 _('[REV]'))
2760 2760 def debugsuccessorssets(ui, repo, *revs):
2761 2761 """show set of successors for revision
2762 2762
2763 2763 A successors set of changeset A is a consistent group of revisions that
2764 2764 succeed A. It contains non-obsolete changesets only.
2765 2765
2766 2766 In most cases a changeset A has a single successors set containing a single
2767 2767 successor (changeset A replaced by A').
2768 2768
2769 2769 A changeset that is made obsolete with no successors are called "pruned".
2770 2770 Such changesets have no successors sets at all.
2771 2771
2772 2772 A changeset that has been "split" will have a successors set containing
2773 2773 more than one successor.
2774 2774
2775 2775 A changeset that has been rewritten in multiple different ways is called
2776 2776 "divergent". Such changesets have multiple successor sets (each of which
2777 2777 may also be split, i.e. have multiple successors).
2778 2778
2779 2779 Results are displayed as follows::
2780 2780
2781 2781 <rev1>
2782 2782 <successors-1A>
2783 2783 <rev2>
2784 2784 <successors-2A>
2785 2785 <successors-2B1> <successors-2B2> <successors-2B3>
2786 2786
2787 2787 Here rev2 has two possible (i.e. divergent) successors sets. The first
2788 2788 holds one element, whereas the second holds three (i.e. the changeset has
2789 2789 been split).
2790 2790 """
2791 2791 # passed to successorssets caching computation from one call to another
2792 2792 cache = {}
2793 2793 ctx2str = str
2794 2794 node2str = short
2795 2795 if ui.debug():
2796 2796 def ctx2str(ctx):
2797 2797 return ctx.hex()
2798 2798 node2str = hex
2799 2799 for rev in scmutil.revrange(repo, revs):
2800 2800 ctx = repo[rev]
2801 2801 ui.write('%s\n'% ctx2str(ctx))
2802 2802 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2803 2803 if succsset:
2804 2804 ui.write(' ')
2805 2805 ui.write(node2str(succsset[0]))
2806 2806 for node in succsset[1:]:
2807 2807 ui.write(' ')
2808 2808 ui.write(node2str(node))
2809 2809 ui.write('\n')
2810 2810
2811 2811 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
2812 2812 def debugwalk(ui, repo, *pats, **opts):
2813 2813 """show how files match on given patterns"""
2814 2814 m = scmutil.match(repo[None], pats, opts)
2815 2815 items = list(repo.walk(m))
2816 2816 if not items:
2817 2817 return
2818 2818 f = lambda fn: fn
2819 2819 if ui.configbool('ui', 'slash') and os.sep != '/':
2820 2820 f = lambda fn: util.normpath(fn)
2821 2821 fmt = 'f %%-%ds %%-%ds %%s' % (
2822 2822 max([len(abs) for abs in items]),
2823 2823 max([len(m.rel(abs)) for abs in items]))
2824 2824 for abs in items:
2825 2825 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2826 2826 ui.write("%s\n" % line.rstrip())
2827 2827
2828 2828 @command('debugwireargs',
2829 2829 [('', 'three', '', 'three'),
2830 2830 ('', 'four', '', 'four'),
2831 2831 ('', 'five', '', 'five'),
2832 2832 ] + remoteopts,
2833 2833 _('REPO [OPTIONS]... [ONE [TWO]]'),
2834 2834 norepo=True)
2835 2835 def debugwireargs(ui, repopath, *vals, **opts):
2836 2836 repo = hg.peer(ui, opts, repopath)
2837 2837 for opt in remoteopts:
2838 2838 del opts[opt[1]]
2839 2839 args = {}
2840 2840 for k, v in opts.iteritems():
2841 2841 if v:
2842 2842 args[k] = v
2843 2843 # run twice to check that we don't mess up the stream for the next command
2844 2844 res1 = repo.debugwireargs(*vals, **args)
2845 2845 res2 = repo.debugwireargs(*vals, **args)
2846 2846 ui.write("%s\n" % res1)
2847 2847 if res1 != res2:
2848 2848 ui.warn("%s\n" % res2)
2849 2849
2850 2850 @command('^diff',
2851 2851 [('r', 'rev', [], _('revision'), _('REV')),
2852 2852 ('c', 'change', '', _('change made by revision'), _('REV'))
2853 2853 ] + diffopts + diffopts2 + walkopts + subrepoopts,
2854 2854 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
2855 2855 inferrepo=True)
2856 2856 def diff(ui, repo, *pats, **opts):
2857 2857 """diff repository (or selected files)
2858 2858
2859 2859 Show differences between revisions for the specified files.
2860 2860
2861 2861 Differences between files are shown using the unified diff format.
2862 2862
2863 2863 .. note::
2864 2864
2865 2865 diff may generate unexpected results for merges, as it will
2866 2866 default to comparing against the working directory's first
2867 2867 parent changeset if no revisions are specified.
2868 2868
2869 2869 When two revision arguments are given, then changes are shown
2870 2870 between those revisions. If only one revision is specified then
2871 2871 that revision is compared to the working directory, and, when no
2872 2872 revisions are specified, the working directory files are compared
2873 2873 to its parent.
2874 2874
2875 2875 Alternatively you can specify -c/--change with a revision to see
2876 2876 the changes in that changeset relative to its first parent.
2877 2877
2878 2878 Without the -a/--text option, diff will avoid generating diffs of
2879 2879 files it detects as binary. With -a, diff will generate a diff
2880 2880 anyway, probably with undesirable results.
2881 2881
2882 2882 Use the -g/--git option to generate diffs in the git extended diff
2883 2883 format. For more information, read :hg:`help diffs`.
2884 2884
2885 2885 .. container:: verbose
2886 2886
2887 2887 Examples:
2888 2888
2889 2889 - compare a file in the current working directory to its parent::
2890 2890
2891 2891 hg diff foo.c
2892 2892
2893 2893 - compare two historical versions of a directory, with rename info::
2894 2894
2895 2895 hg diff --git -r 1.0:1.2 lib/
2896 2896
2897 2897 - get change stats relative to the last change on some date::
2898 2898
2899 2899 hg diff --stat -r "date('may 2')"
2900 2900
2901 2901 - diff all newly-added files that contain a keyword::
2902 2902
2903 2903 hg diff "set:added() and grep(GNU)"
2904 2904
2905 2905 - compare a revision and its parents::
2906 2906
2907 2907 hg diff -c 9353 # compare against first parent
2908 2908 hg diff -r 9353^:9353 # same using revset syntax
2909 2909 hg diff -r 9353^2:9353 # compare against the second parent
2910 2910
2911 2911 Returns 0 on success.
2912 2912 """
2913 2913
2914 2914 revs = opts.get('rev')
2915 2915 change = opts.get('change')
2916 2916 stat = opts.get('stat')
2917 2917 reverse = opts.get('reverse')
2918 2918
2919 2919 if revs and change:
2920 2920 msg = _('cannot specify --rev and --change at the same time')
2921 2921 raise util.Abort(msg)
2922 2922 elif change:
2923 2923 node2 = scmutil.revsingle(repo, change, None).node()
2924 2924 node1 = repo[node2].p1().node()
2925 2925 else:
2926 2926 node1, node2 = scmutil.revpair(repo, revs)
2927 2927
2928 2928 if reverse:
2929 2929 node1, node2 = node2, node1
2930 2930
2931 2931 diffopts = patch.diffopts(ui, opts)
2932 2932 m = scmutil.match(repo[node2], pats, opts)
2933 2933 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
2934 2934 listsubrepos=opts.get('subrepos'))
2935 2935
2936 2936 @command('^export',
2937 2937 [('o', 'output', '',
2938 2938 _('print output to file with formatted name'), _('FORMAT')),
2939 2939 ('', 'switch-parent', None, _('diff against the second parent')),
2940 2940 ('r', 'rev', [], _('revisions to export'), _('REV')),
2941 2941 ] + diffopts,
2942 2942 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
2943 2943 def export(ui, repo, *changesets, **opts):
2944 2944 """dump the header and diffs for one or more changesets
2945 2945
2946 2946 Print the changeset header and diffs for one or more revisions.
2947 2947 If no revision is given, the parent of the working directory is used.
2948 2948
2949 2949 The information shown in the changeset header is: author, date,
2950 2950 branch name (if non-default), changeset hash, parent(s) and commit
2951 2951 comment.
2952 2952
2953 2953 .. note::
2954 2954
2955 2955 export may generate unexpected diff output for merge
2956 2956 changesets, as it will compare the merge changeset against its
2957 2957 first parent only.
2958 2958
2959 2959 Output may be to a file, in which case the name of the file is
2960 2960 given using a format string. The formatting rules are as follows:
2961 2961
2962 2962 :``%%``: literal "%" character
2963 2963 :``%H``: changeset hash (40 hexadecimal digits)
2964 2964 :``%N``: number of patches being generated
2965 2965 :``%R``: changeset revision number
2966 2966 :``%b``: basename of the exporting repository
2967 2967 :``%h``: short-form changeset hash (12 hexadecimal digits)
2968 2968 :``%m``: first line of the commit message (only alphanumeric characters)
2969 2969 :``%n``: zero-padded sequence number, starting at 1
2970 2970 :``%r``: zero-padded changeset revision number
2971 2971
2972 2972 Without the -a/--text option, export will avoid generating diffs
2973 2973 of files it detects as binary. With -a, export will generate a
2974 2974 diff anyway, probably with undesirable results.
2975 2975
2976 2976 Use the -g/--git option to generate diffs in the git extended diff
2977 2977 format. See :hg:`help diffs` for more information.
2978 2978
2979 2979 With the --switch-parent option, the diff will be against the
2980 2980 second parent. It can be useful to review a merge.
2981 2981
2982 2982 .. container:: verbose
2983 2983
2984 2984 Examples:
2985 2985
2986 2986 - use export and import to transplant a bugfix to the current
2987 2987 branch::
2988 2988
2989 2989 hg export -r 9353 | hg import -
2990 2990
2991 2991 - export all the changesets between two revisions to a file with
2992 2992 rename information::
2993 2993
2994 2994 hg export --git -r 123:150 > changes.txt
2995 2995
2996 2996 - split outgoing changes into a series of patches with
2997 2997 descriptive names::
2998 2998
2999 2999 hg export -r "outgoing()" -o "%n-%m.patch"
3000 3000
3001 3001 Returns 0 on success.
3002 3002 """
3003 3003 changesets += tuple(opts.get('rev', []))
3004 3004 if not changesets:
3005 3005 changesets = ['.']
3006 3006 revs = scmutil.revrange(repo, changesets)
3007 3007 if not revs:
3008 3008 raise util.Abort(_("export requires at least one changeset"))
3009 3009 if len(revs) > 1:
3010 3010 ui.note(_('exporting patches:\n'))
3011 3011 else:
3012 3012 ui.note(_('exporting patch:\n'))
3013 3013 cmdutil.export(repo, revs, template=opts.get('output'),
3014 3014 switch_parent=opts.get('switch_parent'),
3015 3015 opts=patch.diffopts(ui, opts))
3016 3016
3017 3017 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3018 3018 def forget(ui, repo, *pats, **opts):
3019 3019 """forget the specified files on the next commit
3020 3020
3021 3021 Mark the specified files so they will no longer be tracked
3022 3022 after the next commit.
3023 3023
3024 3024 This only removes files from the current branch, not from the
3025 3025 entire project history, and it does not delete them from the
3026 3026 working directory.
3027 3027
3028 3028 To undo a forget before the next commit, see :hg:`add`.
3029 3029
3030 3030 .. container:: verbose
3031 3031
3032 3032 Examples:
3033 3033
3034 3034 - forget newly-added binary files::
3035 3035
3036 3036 hg forget "set:added() and binary()"
3037 3037
3038 3038 - forget files that would be excluded by .hgignore::
3039 3039
3040 3040 hg forget "set:hgignore()"
3041 3041
3042 3042 Returns 0 on success.
3043 3043 """
3044 3044
3045 3045 if not pats:
3046 3046 raise util.Abort(_('no files specified'))
3047 3047
3048 3048 m = scmutil.match(repo[None], pats, opts)
3049 3049 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3050 3050 return rejected and 1 or 0
3051 3051
3052 3052 @command(
3053 3053 'graft',
3054 3054 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3055 3055 ('c', 'continue', False, _('resume interrupted graft')),
3056 3056 ('e', 'edit', False, _('invoke editor on commit messages')),
3057 3057 ('', 'log', None, _('append graft info to log message')),
3058 3058 ('f', 'force', False, _('force graft')),
3059 3059 ('D', 'currentdate', False,
3060 3060 _('record the current date as commit date')),
3061 3061 ('U', 'currentuser', False,
3062 3062 _('record the current user as committer'), _('DATE'))]
3063 3063 + commitopts2 + mergetoolopts + dryrunopts,
3064 3064 _('[OPTION]... [-r] REV...'))
3065 3065 def graft(ui, repo, *revs, **opts):
3066 3066 '''copy changes from other branches onto the current branch
3067 3067
3068 3068 This command uses Mercurial's merge logic to copy individual
3069 3069 changes from other branches without merging branches in the
3070 3070 history graph. This is sometimes known as 'backporting' or
3071 3071 'cherry-picking'. By default, graft will copy user, date, and
3072 3072 description from the source changesets.
3073 3073
3074 3074 Changesets that are ancestors of the current revision, that have
3075 3075 already been grafted, or that are merges will be skipped.
3076 3076
3077 3077 If --log is specified, log messages will have a comment appended
3078 3078 of the form::
3079 3079
3080 3080 (grafted from CHANGESETHASH)
3081 3081
3082 3082 If --force is specified, revisions will be grafted even if they
3083 3083 are already ancestors of or have been grafted to the destination.
3084 3084 This is useful when the revisions have since been backed out.
3085 3085
3086 3086 If a graft merge results in conflicts, the graft process is
3087 3087 interrupted so that the current merge can be manually resolved.
3088 3088 Once all conflicts are addressed, the graft process can be
3089 3089 continued with the -c/--continue option.
3090 3090
3091 3091 .. note::
3092 3092
3093 3093 The -c/--continue option does not reapply earlier options, except
3094 3094 for --force.
3095 3095
3096 3096 .. container:: verbose
3097 3097
3098 3098 Examples:
3099 3099
3100 3100 - copy a single change to the stable branch and edit its description::
3101 3101
3102 3102 hg update stable
3103 3103 hg graft --edit 9393
3104 3104
3105 3105 - graft a range of changesets with one exception, updating dates::
3106 3106
3107 3107 hg graft -D "2085::2093 and not 2091"
3108 3108
3109 3109 - continue a graft after resolving conflicts::
3110 3110
3111 3111 hg graft -c
3112 3112
3113 3113 - show the source of a grafted changeset::
3114 3114
3115 3115 hg log --debug -r .
3116 3116
3117 3117 See :hg:`help revisions` and :hg:`help revsets` for more about
3118 3118 specifying revisions.
3119 3119
3120 3120 Returns 0 on successful completion.
3121 3121 '''
3122 3122
3123 3123 revs = list(revs)
3124 3124 revs.extend(opts['rev'])
3125 3125
3126 3126 if not opts.get('user') and opts.get('currentuser'):
3127 3127 opts['user'] = ui.username()
3128 3128 if not opts.get('date') and opts.get('currentdate'):
3129 3129 opts['date'] = "%d %d" % util.makedate()
3130 3130
3131 3131 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3132 3132
3133 3133 cont = False
3134 3134 if opts['continue']:
3135 3135 cont = True
3136 3136 if revs:
3137 3137 raise util.Abort(_("can't specify --continue and revisions"))
3138 3138 # read in unfinished revisions
3139 3139 try:
3140 3140 nodes = repo.opener.read('graftstate').splitlines()
3141 3141 revs = [repo[node].rev() for node in nodes]
3142 3142 except IOError, inst:
3143 3143 if inst.errno != errno.ENOENT:
3144 3144 raise
3145 3145 raise util.Abort(_("no graft state found, can't continue"))
3146 3146 else:
3147 3147 cmdutil.checkunfinished(repo)
3148 3148 cmdutil.bailifchanged(repo)
3149 3149 if not revs:
3150 3150 raise util.Abort(_('no revisions specified'))
3151 3151 revs = scmutil.revrange(repo, revs)
3152 3152
3153 3153 # check for merges
3154 3154 for rev in repo.revs('%ld and merge()', revs):
3155 3155 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3156 3156 revs.remove(rev)
3157 3157 if not revs:
3158 3158 return -1
3159 3159
3160 3160 # Don't check in the --continue case, in effect retaining --force across
3161 3161 # --continues. That's because without --force, any revisions we decided to
3162 3162 # skip would have been filtered out here, so they wouldn't have made their
3163 3163 # way to the graftstate. With --force, any revisions we would have otherwise
3164 3164 # skipped would not have been filtered out, and if they hadn't been applied
3165 3165 # already, they'd have been in the graftstate.
3166 3166 if not (cont or opts.get('force')):
3167 3167 # check for ancestors of dest branch
3168 3168 crev = repo['.'].rev()
3169 3169 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3170 3170 # Cannot use x.remove(y) on smart set, this has to be a list.
3171 3171 # XXX make this lazy in the future
3172 3172 revs = list(revs)
3173 3173 # don't mutate while iterating, create a copy
3174 3174 for rev in list(revs):
3175 3175 if rev in ancestors:
3176 3176 ui.warn(_('skipping ancestor revision %s\n') % rev)
3177 3177 # XXX remove on list is slow
3178 3178 revs.remove(rev)
3179 3179 if not revs:
3180 3180 return -1
3181 3181
3182 3182 # analyze revs for earlier grafts
3183 3183 ids = {}
3184 3184 for ctx in repo.set("%ld", revs):
3185 3185 ids[ctx.hex()] = ctx.rev()
3186 3186 n = ctx.extra().get('source')
3187 3187 if n:
3188 3188 ids[n] = ctx.rev()
3189 3189
3190 3190 # check ancestors for earlier grafts
3191 3191 ui.debug('scanning for duplicate grafts\n')
3192 3192
3193 3193 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3194 3194 ctx = repo[rev]
3195 3195 n = ctx.extra().get('source')
3196 3196 if n in ids:
3197 3197 r = repo[n].rev()
3198 3198 if r in revs:
3199 3199 ui.warn(_('skipping revision %s (already grafted to %s)\n')
3200 3200 % (r, rev))
3201 3201 revs.remove(r)
3202 3202 elif ids[n] in revs:
3203 3203 ui.warn(_('skipping already grafted revision %s '
3204 3204 '(%s also has origin %d)\n') % (ids[n], rev, r))
3205 3205 revs.remove(ids[n])
3206 3206 elif ctx.hex() in ids:
3207 3207 r = ids[ctx.hex()]
3208 3208 ui.warn(_('skipping already grafted revision %s '
3209 3209 '(was grafted from %d)\n') % (r, rev))
3210 3210 revs.remove(r)
3211 3211 if not revs:
3212 3212 return -1
3213 3213
3214 3214 wlock = repo.wlock()
3215 3215 try:
3216 3216 current = repo['.']
3217 3217 for pos, ctx in enumerate(repo.set("%ld", revs)):
3218 3218
3219 3219 ui.status(_('grafting revision %s\n') % ctx.rev())
3220 3220 if opts.get('dry_run'):
3221 3221 continue
3222 3222
3223 3223 source = ctx.extra().get('source')
3224 3224 if not source:
3225 3225 source = ctx.hex()
3226 3226 extra = {'source': source}
3227 3227 user = ctx.user()
3228 3228 if opts.get('user'):
3229 3229 user = opts['user']
3230 3230 date = ctx.date()
3231 3231 if opts.get('date'):
3232 3232 date = opts['date']
3233 3233 message = ctx.description()
3234 3234 if opts.get('log'):
3235 3235 message += '\n(grafted from %s)' % ctx.hex()
3236 3236
3237 3237 # we don't merge the first commit when continuing
3238 3238 if not cont:
3239 3239 # perform the graft merge with p1(rev) as 'ancestor'
3240 3240 try:
3241 3241 # ui.forcemerge is an internal variable, do not document
3242 3242 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3243 3243 'graft')
3244 3244 stats = mergemod.update(repo, ctx.node(), True, True, False,
3245 3245 ctx.p1().node(),
3246 3246 labels=['local', 'graft'])
3247 3247 finally:
3248 3248 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3249 3249 # report any conflicts
3250 3250 if stats and stats[3] > 0:
3251 3251 # write out state for --continue
3252 3252 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3253 3253 repo.opener.write('graftstate', ''.join(nodelines))
3254 3254 raise util.Abort(
3255 3255 _("unresolved conflicts, can't continue"),
3256 3256 hint=_('use hg resolve and hg graft --continue'))
3257 3257 else:
3258 3258 cont = False
3259 3259
3260 3260 # drop the second merge parent
3261 3261 repo.setparents(current.node(), nullid)
3262 3262 repo.dirstate.write()
3263 3263 # fix up dirstate for copies and renames
3264 3264 cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
3265 3265
3266 3266 # commit
3267 3267 node = repo.commit(text=message, user=user,
3268 3268 date=date, extra=extra, editor=editor)
3269 3269 if node is None:
3270 3270 ui.status(_('graft for revision %s is empty\n') % ctx.rev())
3271 3271 else:
3272 3272 current = repo[node]
3273 3273 finally:
3274 3274 wlock.release()
3275 3275
3276 3276 # remove state when we complete successfully
3277 3277 if not opts.get('dry_run'):
3278 3278 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3279 3279
3280 3280 return 0
3281 3281
3282 3282 @command('grep',
3283 3283 [('0', 'print0', None, _('end fields with NUL')),
3284 3284 ('', 'all', None, _('print all revisions that match')),
3285 3285 ('a', 'text', None, _('treat all files as text')),
3286 3286 ('f', 'follow', None,
3287 3287 _('follow changeset history,'
3288 3288 ' or file history across copies and renames')),
3289 3289 ('i', 'ignore-case', None, _('ignore case when matching')),
3290 3290 ('l', 'files-with-matches', None,
3291 3291 _('print only filenames and revisions that match')),
3292 3292 ('n', 'line-number', None, _('print matching line numbers')),
3293 3293 ('r', 'rev', [],
3294 3294 _('only search files changed within revision range'), _('REV')),
3295 3295 ('u', 'user', None, _('list the author (long with -v)')),
3296 3296 ('d', 'date', None, _('list the date (short with -q)')),
3297 3297 ] + walkopts,
3298 3298 _('[OPTION]... PATTERN [FILE]...'),
3299 3299 inferrepo=True)
3300 3300 def grep(ui, repo, pattern, *pats, **opts):
3301 3301 """search for a pattern in specified files and revisions
3302 3302
3303 3303 Search revisions of files for a regular expression.
3304 3304
3305 3305 This command behaves differently than Unix grep. It only accepts
3306 3306 Python/Perl regexps. It searches repository history, not the
3307 3307 working directory. It always prints the revision number in which a
3308 3308 match appears.
3309 3309
3310 3310 By default, grep only prints output for the first revision of a
3311 3311 file in which it finds a match. To get it to print every revision
3312 3312 that contains a change in match status ("-" for a match that
3313 3313 becomes a non-match, or "+" for a non-match that becomes a match),
3314 3314 use the --all flag.
3315 3315
3316 3316 Returns 0 if a match is found, 1 otherwise.
3317 3317 """
3318 3318 reflags = re.M
3319 3319 if opts.get('ignore_case'):
3320 3320 reflags |= re.I
3321 3321 try:
3322 3322 regexp = util.re.compile(pattern, reflags)
3323 3323 except re.error, inst:
3324 3324 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3325 3325 return 1
3326 3326 sep, eol = ':', '\n'
3327 3327 if opts.get('print0'):
3328 3328 sep = eol = '\0'
3329 3329
3330 3330 getfile = util.lrucachefunc(repo.file)
3331 3331
3332 3332 def matchlines(body):
3333 3333 begin = 0
3334 3334 linenum = 0
3335 3335 while begin < len(body):
3336 3336 match = regexp.search(body, begin)
3337 3337 if not match:
3338 3338 break
3339 3339 mstart, mend = match.span()
3340 3340 linenum += body.count('\n', begin, mstart) + 1
3341 3341 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3342 3342 begin = body.find('\n', mend) + 1 or len(body) + 1
3343 3343 lend = begin - 1
3344 3344 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3345 3345
3346 3346 class linestate(object):
3347 3347 def __init__(self, line, linenum, colstart, colend):
3348 3348 self.line = line
3349 3349 self.linenum = linenum
3350 3350 self.colstart = colstart
3351 3351 self.colend = colend
3352 3352
3353 3353 def __hash__(self):
3354 3354 return hash((self.linenum, self.line))
3355 3355
3356 3356 def __eq__(self, other):
3357 3357 return self.line == other.line
3358 3358
3359 3359 def __iter__(self):
3360 3360 yield (self.line[:self.colstart], '')
3361 3361 yield (self.line[self.colstart:self.colend], 'grep.match')
3362 3362 rest = self.line[self.colend:]
3363 3363 while rest != '':
3364 3364 match = regexp.search(rest)
3365 3365 if not match:
3366 3366 yield (rest, '')
3367 3367 break
3368 3368 mstart, mend = match.span()
3369 3369 yield (rest[:mstart], '')
3370 3370 yield (rest[mstart:mend], 'grep.match')
3371 3371 rest = rest[mend:]
3372 3372
3373 3373 matches = {}
3374 3374 copies = {}
3375 3375 def grepbody(fn, rev, body):
3376 3376 matches[rev].setdefault(fn, [])
3377 3377 m = matches[rev][fn]
3378 3378 for lnum, cstart, cend, line in matchlines(body):
3379 3379 s = linestate(line, lnum, cstart, cend)
3380 3380 m.append(s)
3381 3381
3382 3382 def difflinestates(a, b):
3383 3383 sm = difflib.SequenceMatcher(None, a, b)
3384 3384 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3385 3385 if tag == 'insert':
3386 3386 for i in xrange(blo, bhi):
3387 3387 yield ('+', b[i])
3388 3388 elif tag == 'delete':
3389 3389 for i in xrange(alo, ahi):
3390 3390 yield ('-', a[i])
3391 3391 elif tag == 'replace':
3392 3392 for i in xrange(alo, ahi):
3393 3393 yield ('-', a[i])
3394 3394 for i in xrange(blo, bhi):
3395 3395 yield ('+', b[i])
3396 3396
3397 3397 def display(fn, ctx, pstates, states):
3398 3398 rev = ctx.rev()
3399 3399 datefunc = ui.quiet and util.shortdate or util.datestr
3400 3400 found = False
3401 3401 @util.cachefunc
3402 3402 def binary():
3403 3403 flog = getfile(fn)
3404 3404 return util.binary(flog.read(ctx.filenode(fn)))
3405 3405
3406 3406 if opts.get('all'):
3407 3407 iter = difflinestates(pstates, states)
3408 3408 else:
3409 3409 iter = [('', l) for l in states]
3410 3410 for change, l in iter:
3411 3411 cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')]
3412 3412
3413 3413 if opts.get('line_number'):
3414 3414 cols.append((str(l.linenum), 'grep.linenumber'))
3415 3415 if opts.get('all'):
3416 3416 cols.append((change, 'grep.change'))
3417 3417 if opts.get('user'):
3418 3418 cols.append((ui.shortuser(ctx.user()), 'grep.user'))
3419 3419 if opts.get('date'):
3420 3420 cols.append((datefunc(ctx.date()), 'grep.date'))
3421 3421 for col, label in cols[:-1]:
3422 3422 ui.write(col, label=label)
3423 3423 ui.write(sep, label='grep.sep')
3424 3424 ui.write(cols[-1][0], label=cols[-1][1])
3425 3425 if not opts.get('files_with_matches'):
3426 3426 ui.write(sep, label='grep.sep')
3427 3427 if not opts.get('text') and binary():
3428 3428 ui.write(" Binary file matches")
3429 3429 else:
3430 3430 for s, label in l:
3431 3431 ui.write(s, label=label)
3432 3432 ui.write(eol)
3433 3433 found = True
3434 3434 if opts.get('files_with_matches'):
3435 3435 break
3436 3436 return found
3437 3437
3438 3438 skip = {}
3439 3439 revfiles = {}
3440 3440 matchfn = scmutil.match(repo[None], pats, opts)
3441 3441 found = False
3442 3442 follow = opts.get('follow')
3443 3443
3444 3444 def prep(ctx, fns):
3445 3445 rev = ctx.rev()
3446 3446 pctx = ctx.p1()
3447 3447 parent = pctx.rev()
3448 3448 matches.setdefault(rev, {})
3449 3449 matches.setdefault(parent, {})
3450 3450 files = revfiles.setdefault(rev, [])
3451 3451 for fn in fns:
3452 3452 flog = getfile(fn)
3453 3453 try:
3454 3454 fnode = ctx.filenode(fn)
3455 3455 except error.LookupError:
3456 3456 continue
3457 3457
3458 3458 copied = flog.renamed(fnode)
3459 3459 copy = follow and copied and copied[0]
3460 3460 if copy:
3461 3461 copies.setdefault(rev, {})[fn] = copy
3462 3462 if fn in skip:
3463 3463 if copy:
3464 3464 skip[copy] = True
3465 3465 continue
3466 3466 files.append(fn)
3467 3467
3468 3468 if fn not in matches[rev]:
3469 3469 grepbody(fn, rev, flog.read(fnode))
3470 3470
3471 3471 pfn = copy or fn
3472 3472 if pfn not in matches[parent]:
3473 3473 try:
3474 3474 fnode = pctx.filenode(pfn)
3475 3475 grepbody(pfn, parent, flog.read(fnode))
3476 3476 except error.LookupError:
3477 3477 pass
3478 3478
3479 3479 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3480 3480 rev = ctx.rev()
3481 3481 parent = ctx.p1().rev()
3482 3482 for fn in sorted(revfiles.get(rev, [])):
3483 3483 states = matches[rev][fn]
3484 3484 copy = copies.get(rev, {}).get(fn)
3485 3485 if fn in skip:
3486 3486 if copy:
3487 3487 skip[copy] = True
3488 3488 continue
3489 3489 pstates = matches.get(parent, {}).get(copy or fn, [])
3490 3490 if pstates or states:
3491 3491 r = display(fn, ctx, pstates, states)
3492 3492 found = found or r
3493 3493 if r and not opts.get('all'):
3494 3494 skip[fn] = True
3495 3495 if copy:
3496 3496 skip[copy] = True
3497 3497 del matches[rev]
3498 3498 del revfiles[rev]
3499 3499
3500 3500 return not found
3501 3501
3502 3502 @command('heads',
3503 3503 [('r', 'rev', '',
3504 3504 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3505 3505 ('t', 'topo', False, _('show topological heads only')),
3506 3506 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3507 3507 ('c', 'closed', False, _('show normal and closed branch heads')),
3508 3508 ] + templateopts,
3509 3509 _('[-ct] [-r STARTREV] [REV]...'))
3510 3510 def heads(ui, repo, *branchrevs, **opts):
3511 3511 """show branch heads
3512 3512
3513 3513 With no arguments, show all open branch heads in the repository.
3514 3514 Branch heads are changesets that have no descendants on the
3515 3515 same branch. They are where development generally takes place and
3516 3516 are the usual targets for update and merge operations.
3517 3517
3518 3518 If one or more REVs are given, only open branch heads on the
3519 3519 branches associated with the specified changesets are shown. This
3520 3520 means that you can use :hg:`heads .` to see the heads on the
3521 3521 currently checked-out branch.
3522 3522
3523 3523 If -c/--closed is specified, also show branch heads marked closed
3524 3524 (see :hg:`commit --close-branch`).
3525 3525
3526 3526 If STARTREV is specified, only those heads that are descendants of
3527 3527 STARTREV will be displayed.
3528 3528
3529 3529 If -t/--topo is specified, named branch mechanics will be ignored and only
3530 3530 topological heads (changesets with no children) will be shown.
3531 3531
3532 3532 Returns 0 if matching heads are found, 1 if not.
3533 3533 """
3534 3534
3535 3535 start = None
3536 3536 if 'rev' in opts:
3537 3537 start = scmutil.revsingle(repo, opts['rev'], None).node()
3538 3538
3539 3539 if opts.get('topo'):
3540 3540 heads = [repo[h] for h in repo.heads(start)]
3541 3541 else:
3542 3542 heads = []
3543 3543 for branch in repo.branchmap():
3544 3544 heads += repo.branchheads(branch, start, opts.get('closed'))
3545 3545 heads = [repo[h] for h in heads]
3546 3546
3547 3547 if branchrevs:
3548 3548 branches = set(repo[br].branch() for br in branchrevs)
3549 3549 heads = [h for h in heads if h.branch() in branches]
3550 3550
3551 3551 if opts.get('active') and branchrevs:
3552 3552 dagheads = repo.heads(start)
3553 3553 heads = [h for h in heads if h.node() in dagheads]
3554 3554
3555 3555 if branchrevs:
3556 3556 haveheads = set(h.branch() for h in heads)
3557 3557 if branches - haveheads:
3558 3558 headless = ', '.join(b for b in branches - haveheads)
3559 3559 msg = _('no open branch heads found on branches %s')
3560 3560 if opts.get('rev'):
3561 3561 msg += _(' (started at %s)') % opts['rev']
3562 3562 ui.warn((msg + '\n') % headless)
3563 3563
3564 3564 if not heads:
3565 3565 return 1
3566 3566
3567 3567 heads = sorted(heads, key=lambda x: -x.rev())
3568 3568 displayer = cmdutil.show_changeset(ui, repo, opts)
3569 3569 for ctx in heads:
3570 3570 displayer.show(ctx)
3571 3571 displayer.close()
3572 3572
3573 3573 @command('help',
3574 3574 [('e', 'extension', None, _('show only help for extensions')),
3575 3575 ('c', 'command', None, _('show only help for commands')),
3576 3576 ('k', 'keyword', '', _('show topics matching keyword')),
3577 3577 ],
3578 3578 _('[-ec] [TOPIC]'),
3579 3579 norepo=True)
3580 3580 def help_(ui, name=None, **opts):
3581 3581 """show help for a given topic or a help overview
3582 3582
3583 3583 With no arguments, print a list of commands with short help messages.
3584 3584
3585 3585 Given a topic, extension, or command name, print help for that
3586 3586 topic.
3587 3587
3588 3588 Returns 0 if successful.
3589 3589 """
3590 3590
3591 3591 textwidth = min(ui.termwidth(), 80) - 2
3592 3592
3593 3593 keep = ui.verbose and ['verbose'] or []
3594 3594 text = help.help_(ui, name, **opts)
3595 3595
3596 3596 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3597 3597 if 'verbose' in pruned:
3598 3598 keep.append('omitted')
3599 3599 else:
3600 3600 keep.append('notomitted')
3601 3601 formatted, pruned = minirst.format(text, textwidth, keep=keep)
3602 3602 ui.write(formatted)
3603 3603
3604 3604
3605 3605 @command('identify|id',
3606 3606 [('r', 'rev', '',
3607 3607 _('identify the specified revision'), _('REV')),
3608 3608 ('n', 'num', None, _('show local revision number')),
3609 3609 ('i', 'id', None, _('show global revision id')),
3610 3610 ('b', 'branch', None, _('show branch')),
3611 3611 ('t', 'tags', None, _('show tags')),
3612 3612 ('B', 'bookmarks', None, _('show bookmarks')),
3613 3613 ] + remoteopts,
3614 3614 _('[-nibtB] [-r REV] [SOURCE]'),
3615 3615 optionalrepo=True)
3616 3616 def identify(ui, repo, source=None, rev=None,
3617 3617 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3618 3618 """identify the working copy or specified revision
3619 3619
3620 3620 Print a summary identifying the repository state at REV using one or
3621 3621 two parent hash identifiers, followed by a "+" if the working
3622 3622 directory has uncommitted changes, the branch name (if not default),
3623 3623 a list of tags, and a list of bookmarks.
3624 3624
3625 3625 When REV is not given, print a summary of the current state of the
3626 3626 repository.
3627 3627
3628 3628 Specifying a path to a repository root or Mercurial bundle will
3629 3629 cause lookup to operate on that repository/bundle.
3630 3630
3631 3631 .. container:: verbose
3632 3632
3633 3633 Examples:
3634 3634
3635 3635 - generate a build identifier for the working directory::
3636 3636
3637 3637 hg id --id > build-id.dat
3638 3638
3639 3639 - find the revision corresponding to a tag::
3640 3640
3641 3641 hg id -n -r 1.3
3642 3642
3643 3643 - check the most recent revision of a remote repository::
3644 3644
3645 3645 hg id -r tip http://selenic.com/hg/
3646 3646
3647 3647 Returns 0 if successful.
3648 3648 """
3649 3649
3650 3650 if not repo and not source:
3651 3651 raise util.Abort(_("there is no Mercurial repository here "
3652 3652 "(.hg not found)"))
3653 3653
3654 3654 hexfunc = ui.debugflag and hex or short
3655 3655 default = not (num or id or branch or tags or bookmarks)
3656 3656 output = []
3657 3657 revs = []
3658 3658
3659 3659 if source:
3660 3660 source, branches = hg.parseurl(ui.expandpath(source))
3661 3661 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
3662 3662 repo = peer.local()
3663 3663 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
3664 3664
3665 3665 if not repo:
3666 3666 if num or branch or tags:
3667 3667 raise util.Abort(
3668 3668 _("can't query remote revision number, branch, or tags"))
3669 3669 if not rev and revs:
3670 3670 rev = revs[0]
3671 3671 if not rev:
3672 3672 rev = "tip"
3673 3673
3674 3674 remoterev = peer.lookup(rev)
3675 3675 if default or id:
3676 3676 output = [hexfunc(remoterev)]
3677 3677
3678 3678 def getbms():
3679 3679 bms = []
3680 3680
3681 3681 if 'bookmarks' in peer.listkeys('namespaces'):
3682 3682 hexremoterev = hex(remoterev)
3683 3683 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
3684 3684 if bmr == hexremoterev]
3685 3685
3686 3686 return sorted(bms)
3687 3687
3688 3688 if bookmarks:
3689 3689 output.extend(getbms())
3690 3690 elif default and not ui.quiet:
3691 3691 # multiple bookmarks for a single parent separated by '/'
3692 3692 bm = '/'.join(getbms())
3693 3693 if bm:
3694 3694 output.append(bm)
3695 3695 else:
3696 3696 if not rev:
3697 3697 ctx = repo[None]
3698 3698 parents = ctx.parents()
3699 3699 changed = ""
3700 3700 if default or id or num:
3701 3701 if (util.any(repo.status())
3702 3702 or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
3703 3703 changed = '+'
3704 3704 if default or id:
3705 3705 output = ["%s%s" %
3706 3706 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
3707 3707 if num:
3708 3708 output.append("%s%s" %
3709 3709 ('+'.join([str(p.rev()) for p in parents]), changed))
3710 3710 else:
3711 3711 ctx = scmutil.revsingle(repo, rev)
3712 3712 if default or id:
3713 3713 output = [hexfunc(ctx.node())]
3714 3714 if num:
3715 3715 output.append(str(ctx.rev()))
3716 3716
3717 3717 if default and not ui.quiet:
3718 3718 b = ctx.branch()
3719 3719 if b != 'default':
3720 3720 output.append("(%s)" % b)
3721 3721
3722 3722 # multiple tags for a single parent separated by '/'
3723 3723 t = '/'.join(ctx.tags())
3724 3724 if t:
3725 3725 output.append(t)
3726 3726
3727 3727 # multiple bookmarks for a single parent separated by '/'
3728 3728 bm = '/'.join(ctx.bookmarks())
3729 3729 if bm:
3730 3730 output.append(bm)
3731 3731 else:
3732 3732 if branch:
3733 3733 output.append(ctx.branch())
3734 3734
3735 3735 if tags:
3736 3736 output.extend(ctx.tags())
3737 3737
3738 3738 if bookmarks:
3739 3739 output.extend(ctx.bookmarks())
3740 3740
3741 3741 ui.write("%s\n" % ' '.join(output))
3742 3742
3743 3743 @command('import|patch',
3744 3744 [('p', 'strip', 1,
3745 3745 _('directory strip option for patch. This has the same '
3746 3746 'meaning as the corresponding patch option'), _('NUM')),
3747 3747 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
3748 3748 ('e', 'edit', False, _('invoke editor on commit messages')),
3749 3749 ('f', 'force', None,
3750 3750 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
3751 3751 ('', 'no-commit', None,
3752 3752 _("don't commit, just update the working directory")),
3753 3753 ('', 'bypass', None,
3754 3754 _("apply patch without touching the working directory")),
3755 3755 ('', 'partial', None,
3756 3756 _('commit even if some hunks fail')),
3757 3757 ('', 'exact', None,
3758 3758 _('apply patch to the nodes from which it was generated')),
3759 3759 ('', 'import-branch', None,
3760 3760 _('use any branch information in patch (implied by --exact)'))] +
3761 3761 commitopts + commitopts2 + similarityopts,
3762 3762 _('[OPTION]... PATCH...'))
3763 3763 def import_(ui, repo, patch1=None, *patches, **opts):
3764 3764 """import an ordered set of patches
3765 3765
3766 3766 Import a list of patches and commit them individually (unless
3767 3767 --no-commit is specified).
3768 3768
3769 3769 Because import first applies changes to the working directory,
3770 3770 import will abort if there are outstanding changes.
3771 3771
3772 3772 You can import a patch straight from a mail message. Even patches
3773 3773 as attachments work (to use the body part, it must have type
3774 3774 text/plain or text/x-patch). From and Subject headers of email
3775 3775 message are used as default committer and commit message. All
3776 3776 text/plain body parts before first diff are added to commit
3777 3777 message.
3778 3778
3779 3779 If the imported patch was generated by :hg:`export`, user and
3780 3780 description from patch override values from message headers and
3781 3781 body. Values given on command line with -m/--message and -u/--user
3782 3782 override these.
3783 3783
3784 3784 If --exact is specified, import will set the working directory to
3785 3785 the parent of each patch before applying it, and will abort if the
3786 3786 resulting changeset has a different ID than the one recorded in
3787 3787 the patch. This may happen due to character set problems or other
3788 3788 deficiencies in the text patch format.
3789 3789
3790 3790 Use --bypass to apply and commit patches directly to the
3791 3791 repository, not touching the working directory. Without --exact,
3792 3792 patches will be applied on top of the working directory parent
3793 3793 revision.
3794 3794
3795 3795 With -s/--similarity, hg will attempt to discover renames and
3796 3796 copies in the patch in the same way as :hg:`addremove`.
3797 3797
3798 3798 Use --partial to ensure a changeset will be created from the patch
3799 3799 even if some hunks fail to apply. Hunks that fail to apply will be
3800 3800 written to a <target-file>.rej file. Conflicts can then be resolved
3801 3801 by hand before :hg:`commit --amend` is run to update the created
3802 3802 changeset. This flag exists to let people import patches that
3803 3803 partially apply without losing the associated metadata (author,
3804 3804 date, description, ...). Note that when none of the hunk applies
3805 3805 cleanly, :hg:`import --partial` will create an empty changeset,
3806 3806 importing only the patch metadata.
3807 3807
3808 3808 To read a patch from standard input, use "-" as the patch name. If
3809 3809 a URL is specified, the patch will be downloaded from it.
3810 3810 See :hg:`help dates` for a list of formats valid for -d/--date.
3811 3811
3812 3812 .. container:: verbose
3813 3813
3814 3814 Examples:
3815 3815
3816 3816 - import a traditional patch from a website and detect renames::
3817 3817
3818 3818 hg import -s 80 http://example.com/bugfix.patch
3819 3819
3820 3820 - import a changeset from an hgweb server::
3821 3821
3822 3822 hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
3823 3823
3824 3824 - import all the patches in an Unix-style mbox::
3825 3825
3826 3826 hg import incoming-patches.mbox
3827 3827
3828 3828 - attempt to exactly restore an exported changeset (not always
3829 3829 possible)::
3830 3830
3831 3831 hg import --exact proposed-fix.patch
3832 3832
3833 3833 Returns 0 on success, 1 on partial success (see --partial).
3834 3834 """
3835 3835
3836 3836 if not patch1:
3837 3837 raise util.Abort(_('need at least one patch to import'))
3838 3838
3839 3839 patches = (patch1,) + patches
3840 3840
3841 3841 date = opts.get('date')
3842 3842 if date:
3843 3843 opts['date'] = util.parsedate(date)
3844 3844
3845 3845 update = not opts.get('bypass')
3846 3846 if not update and opts.get('no_commit'):
3847 3847 raise util.Abort(_('cannot use --no-commit with --bypass'))
3848 3848 try:
3849 3849 sim = float(opts.get('similarity') or 0)
3850 3850 except ValueError:
3851 3851 raise util.Abort(_('similarity must be a number'))
3852 3852 if sim < 0 or sim > 100:
3853 3853 raise util.Abort(_('similarity must be between 0 and 100'))
3854 3854 if sim and not update:
3855 3855 raise util.Abort(_('cannot use --similarity with --bypass'))
3856 3856
3857 3857 if update:
3858 3858 cmdutil.checkunfinished(repo)
3859 3859 if (opts.get('exact') or not opts.get('force')) and update:
3860 3860 cmdutil.bailifchanged(repo)
3861 3861
3862 3862 base = opts["base"]
3863 3863 wlock = lock = tr = None
3864 3864 msgs = []
3865 3865 ret = 0
3866 3866
3867 3867
3868 3868 try:
3869 3869 try:
3870 3870 wlock = repo.wlock()
3871 3871 if not opts.get('no_commit'):
3872 3872 lock = repo.lock()
3873 3873 tr = repo.transaction('import')
3874 3874 parents = repo.parents()
3875 3875 for patchurl in patches:
3876 3876 if patchurl == '-':
3877 3877 ui.status(_('applying patch from stdin\n'))
3878 3878 patchfile = ui.fin
3879 3879 patchurl = 'stdin' # for error message
3880 3880 else:
3881 3881 patchurl = os.path.join(base, patchurl)
3882 3882 ui.status(_('applying %s\n') % patchurl)
3883 3883 patchfile = hg.openpath(ui, patchurl)
3884 3884
3885 3885 haspatch = False
3886 3886 for hunk in patch.split(patchfile):
3887 3887 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
3888 3888 parents, opts,
3889 3889 msgs, hg.clean)
3890 3890 if msg:
3891 3891 haspatch = True
3892 3892 ui.note(msg + '\n')
3893 3893 if update or opts.get('exact'):
3894 3894 parents = repo.parents()
3895 3895 else:
3896 3896 parents = [repo[node]]
3897 3897 if rej:
3898 3898 ui.write_err(_("patch applied partially\n"))
3899 3899 ui.write_err(_("(fix the .rej files and run "
3900 3900 "`hg commit --amend`)\n"))
3901 3901 ret = 1
3902 3902 break
3903 3903
3904 3904 if not haspatch:
3905 3905 raise util.Abort(_('%s: no diffs found') % patchurl)
3906 3906
3907 3907 if tr:
3908 3908 tr.close()
3909 3909 if msgs:
3910 3910 repo.savecommitmessage('\n* * *\n'.join(msgs))
3911 3911 return ret
3912 3912 except: # re-raises
3913 3913 # wlock.release() indirectly calls dirstate.write(): since
3914 3914 # we're crashing, we do not want to change the working dir
3915 3915 # parent after all, so make sure it writes nothing
3916 3916 repo.dirstate.invalidate()
3917 3917 raise
3918 3918 finally:
3919 3919 if tr:
3920 3920 tr.release()
3921 3921 release(lock, wlock)
3922 3922
3923 3923 @command('incoming|in',
3924 3924 [('f', 'force', None,
3925 3925 _('run even if remote repository is unrelated')),
3926 3926 ('n', 'newest-first', None, _('show newest record first')),
3927 3927 ('', 'bundle', '',
3928 3928 _('file to store the bundles into'), _('FILE')),
3929 3929 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
3930 3930 ('B', 'bookmarks', False, _("compare bookmarks")),
3931 3931 ('b', 'branch', [],
3932 3932 _('a specific branch you would like to pull'), _('BRANCH')),
3933 3933 ] + logopts + remoteopts + subrepoopts,
3934 3934 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
3935 3935 def incoming(ui, repo, source="default", **opts):
3936 3936 """show new changesets found in source
3937 3937
3938 3938 Show new changesets found in the specified path/URL or the default
3939 3939 pull location. These are the changesets that would have been pulled
3940 3940 if a pull at the time you issued this command.
3941 3941
3942 3942 For remote repository, using --bundle avoids downloading the
3943 3943 changesets twice if the incoming is followed by a pull.
3944 3944
3945 3945 See pull for valid source format details.
3946 3946
3947 3947 .. container:: verbose
3948 3948
3949 3949 Examples:
3950 3950
3951 3951 - show incoming changes with patches and full description::
3952 3952
3953 3953 hg incoming -vp
3954 3954
3955 3955 - show incoming changes excluding merges, store a bundle::
3956 3956
3957 3957 hg in -vpM --bundle incoming.hg
3958 3958 hg pull incoming.hg
3959 3959
3960 3960 - briefly list changes inside a bundle::
3961 3961
3962 3962 hg in changes.hg -T "{desc|firstline}\\n"
3963 3963
3964 3964 Returns 0 if there are incoming changes, 1 otherwise.
3965 3965 """
3966 3966 if opts.get('graph'):
3967 3967 cmdutil.checkunsupportedgraphflags([], opts)
3968 3968 def display(other, chlist, displayer):
3969 3969 revdag = cmdutil.graphrevs(other, chlist, opts)
3970 3970 showparents = [ctx.node() for ctx in repo[None].parents()]
3971 3971 cmdutil.displaygraph(ui, revdag, displayer, showparents,
3972 3972 graphmod.asciiedges)
3973 3973
3974 3974 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
3975 3975 return 0
3976 3976
3977 3977 if opts.get('bundle') and opts.get('subrepos'):
3978 3978 raise util.Abort(_('cannot combine --bundle and --subrepos'))
3979 3979
3980 3980 if opts.get('bookmarks'):
3981 3981 source, branches = hg.parseurl(ui.expandpath(source),
3982 3982 opts.get('branch'))
3983 3983 other = hg.peer(repo, opts, source)
3984 3984 if 'bookmarks' not in other.listkeys('namespaces'):
3985 3985 ui.warn(_("remote doesn't support bookmarks\n"))
3986 3986 return 0
3987 3987 ui.status(_('comparing with %s\n') % util.hidepassword(source))
3988 3988 return bookmarks.diff(ui, repo, other)
3989 3989
3990 3990 repo._subtoppath = ui.expandpath(source)
3991 3991 try:
3992 3992 return hg.incoming(ui, repo, source, opts)
3993 3993 finally:
3994 3994 del repo._subtoppath
3995 3995
3996 3996
3997 3997 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
3998 3998 norepo=True)
3999 3999 def init(ui, dest=".", **opts):
4000 4000 """create a new repository in the given directory
4001 4001
4002 4002 Initialize a new repository in the given directory. If the given
4003 4003 directory does not exist, it will be created.
4004 4004
4005 4005 If no directory is given, the current directory is used.
4006 4006
4007 4007 It is possible to specify an ``ssh://`` URL as the destination.
4008 4008 See :hg:`help urls` for more information.
4009 4009
4010 4010 Returns 0 on success.
4011 4011 """
4012 4012 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4013 4013
4014 4014 @command('locate',
4015 4015 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4016 4016 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4017 4017 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4018 4018 ] + walkopts,
4019 4019 _('[OPTION]... [PATTERN]...'))
4020 4020 def locate(ui, repo, *pats, **opts):
4021 4021 """locate files matching specific patterns
4022 4022
4023 4023 Print files under Mercurial control in the working directory whose
4024 4024 names match the given patterns.
4025 4025
4026 4026 By default, this command searches all directories in the working
4027 4027 directory. To search just the current directory and its
4028 4028 subdirectories, use "--include .".
4029 4029
4030 4030 If no patterns are given to match, this command prints the names
4031 4031 of all files under Mercurial control in the working directory.
4032 4032
4033 4033 If you want to feed the output of this command into the "xargs"
4034 4034 command, use the -0 option to both this command and "xargs". This
4035 4035 will avoid the problem of "xargs" treating single filenames that
4036 4036 contain whitespace as multiple filenames.
4037 4037
4038 4038 Returns 0 if a match is found, 1 otherwise.
4039 4039 """
4040 4040 end = opts.get('print0') and '\0' or '\n'
4041 4041 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4042 4042
4043 4043 ret = 1
4044 4044 ctx = repo[rev]
4045 4045 m = scmutil.match(ctx, pats, opts, default='relglob')
4046 4046 m.bad = lambda x, y: False
4047 4047
4048 4048 for abs in ctx.matches(m):
4049 4049 if opts.get('fullpath'):
4050 4050 ui.write(repo.wjoin(abs), end)
4051 4051 else:
4052 4052 ui.write(((pats and m.rel(abs)) or abs), end)
4053 4053 ret = 0
4054 4054
4055 4055 return ret
4056 4056
4057 4057 @command('^log|history',
4058 4058 [('f', 'follow', None,
4059 4059 _('follow changeset history, or file history across copies and renames')),
4060 4060 ('', 'follow-first', None,
4061 4061 _('only follow the first parent of merge changesets (DEPRECATED)')),
4062 4062 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4063 4063 ('C', 'copies', None, _('show copied files')),
4064 4064 ('k', 'keyword', [],
4065 4065 _('do case-insensitive search for a given text'), _('TEXT')),
4066 4066 ('r', 'rev', [], _('show the specified revision or range'), _('REV')),
4067 4067 ('', 'removed', None, _('include revisions where files were removed')),
4068 4068 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4069 4069 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4070 4070 ('', 'only-branch', [],
4071 4071 _('show only changesets within the given named branch (DEPRECATED)'),
4072 4072 _('BRANCH')),
4073 4073 ('b', 'branch', [],
4074 4074 _('show changesets within the given named branch'), _('BRANCH')),
4075 4075 ('P', 'prune', [],
4076 4076 _('do not display revision or any of its ancestors'), _('REV')),
4077 4077 ] + logopts + walkopts,
4078 4078 _('[OPTION]... [FILE]'),
4079 4079 inferrepo=True)
4080 4080 def log(ui, repo, *pats, **opts):
4081 4081 """show revision history of entire repository or files
4082 4082
4083 4083 Print the revision history of the specified files or the entire
4084 4084 project.
4085 4085
4086 4086 If no revision range is specified, the default is ``tip:0`` unless
4087 4087 --follow is set, in which case the working directory parent is
4088 4088 used as the starting revision.
4089 4089
4090 4090 File history is shown without following rename or copy history of
4091 4091 files. Use -f/--follow with a filename to follow history across
4092 4092 renames and copies. --follow without a filename will only show
4093 4093 ancestors or descendants of the starting revision.
4094 4094
4095 4095 By default this command prints revision number and changeset id,
4096 4096 tags, non-trivial parents, user, date and time, and a summary for
4097 4097 each commit. When the -v/--verbose switch is used, the list of
4098 4098 changed files and full commit message are shown.
4099 4099
4100 4100 With --graph the revisions are shown as an ASCII art DAG with the most
4101 4101 recent changeset at the top.
4102 4102 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4103 4103 and '+' represents a fork where the changeset from the lines below is a
4104 4104 parent of the 'o' merge on the same line.
4105 4105
4106 4106 .. note::
4107 4107
4108 4108 log -p/--patch may generate unexpected diff output for merge
4109 4109 changesets, as it will only compare the merge changeset against
4110 4110 its first parent. Also, only files different from BOTH parents
4111 4111 will appear in files:.
4112 4112
4113 4113 .. note::
4114 4114
4115 4115 for performance reasons, log FILE may omit duplicate changes
4116 4116 made on branches and will not show deletions. To see all
4117 4117 changes including duplicates and deletions, use the --removed
4118 4118 switch.
4119 4119
4120 4120 .. container:: verbose
4121 4121
4122 4122 Some examples:
4123 4123
4124 4124 - changesets with full descriptions and file lists::
4125 4125
4126 4126 hg log -v
4127 4127
4128 4128 - changesets ancestral to the working directory::
4129 4129
4130 4130 hg log -f
4131 4131
4132 4132 - last 10 commits on the current branch::
4133 4133
4134 4134 hg log -l 10 -b .
4135 4135
4136 4136 - changesets showing all modifications of a file, including removals::
4137 4137
4138 4138 hg log --removed file.c
4139 4139
4140 4140 - all changesets that touch a directory, with diffs, excluding merges::
4141 4141
4142 4142 hg log -Mp lib/
4143 4143
4144 4144 - all revision numbers that match a keyword::
4145 4145
4146 4146 hg log -k bug --template "{rev}\\n"
4147 4147
4148 4148 - list available log templates::
4149 4149
4150 4150 hg log -T list
4151 4151
4152 4152 - check if a given changeset is included is a tagged release::
4153 4153
4154 4154 hg log -r "a21ccf and ancestor(1.9)"
4155 4155
4156 4156 - find all changesets by some user in a date range::
4157 4157
4158 4158 hg log -k alice -d "may 2008 to jul 2008"
4159 4159
4160 4160 - summary of all changesets after the last tag::
4161 4161
4162 4162 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4163 4163
4164 4164 See :hg:`help dates` for a list of formats valid for -d/--date.
4165 4165
4166 4166 See :hg:`help revisions` and :hg:`help revsets` for more about
4167 4167 specifying revisions.
4168 4168
4169 4169 See :hg:`help templates` for more about pre-packaged styles and
4170 4170 specifying custom templates.
4171 4171
4172 4172 Returns 0 on success.
4173 4173 """
4174 4174 if opts.get('graph'):
4175 4175 return cmdutil.graphlog(ui, repo, *pats, **opts)
4176 4176
4177 4177 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4178 4178 limit = cmdutil.loglimit(opts)
4179 4179 count = 0
4180 4180
4181 4181 getrenamed = None
4182 4182 if opts.get('copies'):
4183 4183 endrev = None
4184 4184 if opts.get('rev'):
4185 4185 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4186 4186 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4187 4187
4188 4188 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4189 4189 for rev in revs:
4190 4190 if count == limit:
4191 4191 break
4192 4192 ctx = repo[rev]
4193 4193 copies = None
4194 4194 if getrenamed is not None and rev:
4195 4195 copies = []
4196 4196 for fn in ctx.files():
4197 4197 rename = getrenamed(fn, rev)
4198 4198 if rename:
4199 4199 copies.append((fn, rename[0]))
4200 4200 revmatchfn = filematcher and filematcher(ctx.rev()) or None
4201 4201 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4202 4202 if displayer.flush(rev):
4203 4203 count += 1
4204 4204
4205 4205 displayer.close()
4206 4206
4207 4207 @command('manifest',
4208 4208 [('r', 'rev', '', _('revision to display'), _('REV')),
4209 4209 ('', 'all', False, _("list files from all revisions"))],
4210 4210 _('[-r REV]'))
4211 4211 def manifest(ui, repo, node=None, rev=None, **opts):
4212 4212 """output the current or given revision of the project manifest
4213 4213
4214 4214 Print a list of version controlled files for the given revision.
4215 4215 If no revision is given, the first parent of the working directory
4216 4216 is used, or the null revision if no revision is checked out.
4217 4217
4218 4218 With -v, print file permissions, symlink and executable bits.
4219 4219 With --debug, print file revision hashes.
4220 4220
4221 4221 If option --all is specified, the list of all files from all revisions
4222 4222 is printed. This includes deleted and renamed files.
4223 4223
4224 4224 Returns 0 on success.
4225 4225 """
4226 4226
4227 4227 fm = ui.formatter('manifest', opts)
4228 4228
4229 4229 if opts.get('all'):
4230 4230 if rev or node:
4231 4231 raise util.Abort(_("can't specify a revision with --all"))
4232 4232
4233 4233 res = []
4234 4234 prefix = "data/"
4235 4235 suffix = ".i"
4236 4236 plen = len(prefix)
4237 4237 slen = len(suffix)
4238 4238 lock = repo.lock()
4239 4239 try:
4240 4240 for fn, b, size in repo.store.datafiles():
4241 4241 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4242 4242 res.append(fn[plen:-slen])
4243 4243 finally:
4244 4244 lock.release()
4245 4245 for f in res:
4246 4246 fm.startitem()
4247 4247 fm.write("path", '%s\n', f)
4248 4248 fm.end()
4249 4249 return
4250 4250
4251 4251 if rev and node:
4252 4252 raise util.Abort(_("please specify just one revision"))
4253 4253
4254 4254 if not node:
4255 4255 node = rev
4256 4256
4257 4257 char = {'l': '@', 'x': '*', '': ''}
4258 4258 mode = {'l': '644', 'x': '755', '': '644'}
4259 4259 ctx = scmutil.revsingle(repo, node)
4260 4260 mf = ctx.manifest()
4261 4261 for f in ctx:
4262 4262 fm.startitem()
4263 4263 fl = ctx[f].flags()
4264 4264 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4265 4265 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4266 4266 fm.write('path', '%s\n', f)
4267 4267 fm.end()
4268 4268
4269 4269 @command('^merge',
4270 4270 [('f', 'force', None,
4271 4271 _('force a merge including outstanding changes (DEPRECATED)')),
4272 4272 ('r', 'rev', '', _('revision to merge'), _('REV')),
4273 4273 ('P', 'preview', None,
4274 4274 _('review revisions to merge (no merge is performed)'))
4275 4275 ] + mergetoolopts,
4276 4276 _('[-P] [-f] [[-r] REV]'))
4277 4277 def merge(ui, repo, node=None, **opts):
4278 4278 """merge working directory with another revision
4279 4279
4280 4280 The current working directory is updated with all changes made in
4281 4281 the requested revision since the last common predecessor revision.
4282 4282
4283 4283 Files that changed between either parent are marked as changed for
4284 4284 the next commit and a commit must be performed before any further
4285 4285 updates to the repository are allowed. The next commit will have
4286 4286 two parents.
4287 4287
4288 4288 ``--tool`` can be used to specify the merge tool used for file
4289 4289 merges. It overrides the HGMERGE environment variable and your
4290 4290 configuration files. See :hg:`help merge-tools` for options.
4291 4291
4292 4292 If no revision is specified, the working directory's parent is a
4293 4293 head revision, and the current branch contains exactly one other
4294 4294 head, the other head is merged with by default. Otherwise, an
4295 4295 explicit revision with which to merge with must be provided.
4296 4296
4297 4297 :hg:`resolve` must be used to resolve unresolved files.
4298 4298
4299 4299 To undo an uncommitted merge, use :hg:`update --clean .` which
4300 4300 will check out a clean copy of the original merge parent, losing
4301 4301 all changes.
4302 4302
4303 4303 Returns 0 on success, 1 if there are unresolved files.
4304 4304 """
4305 4305
4306 4306 if opts.get('rev') and node:
4307 4307 raise util.Abort(_("please specify just one revision"))
4308 4308 if not node:
4309 4309 node = opts.get('rev')
4310 4310
4311 4311 if node:
4312 4312 node = scmutil.revsingle(repo, node).node()
4313 4313
4314 4314 if not node and repo._bookmarkcurrent:
4315 4315 bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
4316 4316 curhead = repo[repo._bookmarkcurrent].node()
4317 4317 if len(bmheads) == 2:
4318 4318 if curhead == bmheads[0]:
4319 4319 node = bmheads[1]
4320 4320 else:
4321 4321 node = bmheads[0]
4322 4322 elif len(bmheads) > 2:
4323 4323 raise util.Abort(_("multiple matching bookmarks to merge - "
4324 4324 "please merge with an explicit rev or bookmark"),
4325 4325 hint=_("run 'hg heads' to see all heads"))
4326 4326 elif len(bmheads) <= 1:
4327 4327 raise util.Abort(_("no matching bookmark to merge - "
4328 4328 "please merge with an explicit rev or bookmark"),
4329 4329 hint=_("run 'hg heads' to see all heads"))
4330 4330
4331 4331 if not node and not repo._bookmarkcurrent:
4332 4332 branch = repo[None].branch()
4333 4333 bheads = repo.branchheads(branch)
4334 4334 nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
4335 4335
4336 4336 if len(nbhs) > 2:
4337 4337 raise util.Abort(_("branch '%s' has %d heads - "
4338 4338 "please merge with an explicit rev")
4339 4339 % (branch, len(bheads)),
4340 4340 hint=_("run 'hg heads .' to see heads"))
4341 4341
4342 4342 parent = repo.dirstate.p1()
4343 4343 if len(nbhs) <= 1:
4344 4344 if len(bheads) > 1:
4345 4345 raise util.Abort(_("heads are bookmarked - "
4346 4346 "please merge with an explicit rev"),
4347 4347 hint=_("run 'hg heads' to see all heads"))
4348 4348 if len(repo.heads()) > 1:
4349 4349 raise util.Abort(_("branch '%s' has one head - "
4350 4350 "please merge with an explicit rev")
4351 4351 % branch,
4352 4352 hint=_("run 'hg heads' to see all heads"))
4353 4353 msg, hint = _('nothing to merge'), None
4354 4354 if parent != repo.lookup(branch):
4355 4355 hint = _("use 'hg update' instead")
4356 4356 raise util.Abort(msg, hint=hint)
4357 4357
4358 4358 if parent not in bheads:
4359 4359 raise util.Abort(_('working directory not at a head revision'),
4360 4360 hint=_("use 'hg update' or merge with an "
4361 4361 "explicit revision"))
4362 4362 if parent == nbhs[0]:
4363 4363 node = nbhs[-1]
4364 4364 else:
4365 4365 node = nbhs[0]
4366 4366
4367 4367 if opts.get('preview'):
4368 4368 # find nodes that are ancestors of p2 but not of p1
4369 4369 p1 = repo.lookup('.')
4370 4370 p2 = repo.lookup(node)
4371 4371 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4372 4372
4373 4373 displayer = cmdutil.show_changeset(ui, repo, opts)
4374 4374 for node in nodes:
4375 4375 displayer.show(repo[node])
4376 4376 displayer.close()
4377 4377 return 0
4378 4378
4379 4379 try:
4380 4380 # ui.forcemerge is an internal variable, do not document
4381 4381 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4382 4382 return hg.merge(repo, node, force=opts.get('force'))
4383 4383 finally:
4384 4384 ui.setconfig('ui', 'forcemerge', '', 'merge')
4385 4385
4386 4386 @command('outgoing|out',
4387 4387 [('f', 'force', None, _('run even when the destination is unrelated')),
4388 4388 ('r', 'rev', [],
4389 4389 _('a changeset intended to be included in the destination'), _('REV')),
4390 4390 ('n', 'newest-first', None, _('show newest record first')),
4391 4391 ('B', 'bookmarks', False, _('compare bookmarks')),
4392 4392 ('b', 'branch', [], _('a specific branch you would like to push'),
4393 4393 _('BRANCH')),
4394 4394 ] + logopts + remoteopts + subrepoopts,
4395 4395 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4396 4396 def outgoing(ui, repo, dest=None, **opts):
4397 4397 """show changesets not found in the destination
4398 4398
4399 4399 Show changesets not found in the specified destination repository
4400 4400 or the default push location. These are the changesets that would
4401 4401 be pushed if a push was requested.
4402 4402
4403 4403 See pull for details of valid destination formats.
4404 4404
4405 4405 Returns 0 if there are outgoing changes, 1 otherwise.
4406 4406 """
4407 4407 if opts.get('graph'):
4408 4408 cmdutil.checkunsupportedgraphflags([], opts)
4409 4409 o, other = hg._outgoing(ui, repo, dest, opts)
4410 4410 if not o:
4411 4411 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4412 4412 return
4413 4413
4414 4414 revdag = cmdutil.graphrevs(repo, o, opts)
4415 4415 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4416 4416 showparents = [ctx.node() for ctx in repo[None].parents()]
4417 4417 cmdutil.displaygraph(ui, revdag, displayer, showparents,
4418 4418 graphmod.asciiedges)
4419 4419 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4420 4420 return 0
4421 4421
4422 4422 if opts.get('bookmarks'):
4423 4423 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4424 4424 dest, branches = hg.parseurl(dest, opts.get('branch'))
4425 4425 other = hg.peer(repo, opts, dest)
4426 4426 if 'bookmarks' not in other.listkeys('namespaces'):
4427 4427 ui.warn(_("remote doesn't support bookmarks\n"))
4428 4428 return 0
4429 4429 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4430 4430 return bookmarks.diff(ui, other, repo)
4431 4431
4432 4432 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4433 4433 try:
4434 4434 return hg.outgoing(ui, repo, dest, opts)
4435 4435 finally:
4436 4436 del repo._subtoppath
4437 4437
4438 4438 @command('parents',
4439 4439 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4440 4440 ] + templateopts,
4441 4441 _('[-r REV] [FILE]'),
4442 4442 inferrepo=True)
4443 4443 def parents(ui, repo, file_=None, **opts):
4444 4444 """show the parents of the working directory or revision
4445 4445
4446 4446 Print the working directory's parent revisions. If a revision is
4447 4447 given via -r/--rev, the parent of that revision will be printed.
4448 4448 If a file argument is given, the revision in which the file was
4449 4449 last changed (before the working directory revision or the
4450 4450 argument to --rev if given) is printed.
4451 4451
4452 4452 Returns 0 on success.
4453 4453 """
4454 4454
4455 4455 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4456 4456
4457 4457 if file_:
4458 4458 m = scmutil.match(ctx, (file_,), opts)
4459 4459 if m.anypats() or len(m.files()) != 1:
4460 4460 raise util.Abort(_('can only specify an explicit filename'))
4461 4461 file_ = m.files()[0]
4462 4462 filenodes = []
4463 4463 for cp in ctx.parents():
4464 4464 if not cp:
4465 4465 continue
4466 4466 try:
4467 4467 filenodes.append(cp.filenode(file_))
4468 4468 except error.LookupError:
4469 4469 pass
4470 4470 if not filenodes:
4471 4471 raise util.Abort(_("'%s' not found in manifest!") % file_)
4472 4472 p = []
4473 4473 for fn in filenodes:
4474 4474 fctx = repo.filectx(file_, fileid=fn)
4475 4475 p.append(fctx.node())
4476 4476 else:
4477 4477 p = [cp.node() for cp in ctx.parents()]
4478 4478
4479 4479 displayer = cmdutil.show_changeset(ui, repo, opts)
4480 4480 for n in p:
4481 4481 if n != nullid:
4482 4482 displayer.show(repo[n])
4483 4483 displayer.close()
4484 4484
4485 4485 @command('paths', [], _('[NAME]'), optionalrepo=True)
4486 4486 def paths(ui, repo, search=None):
4487 4487 """show aliases for remote repositories
4488 4488
4489 4489 Show definition of symbolic path name NAME. If no name is given,
4490 4490 show definition of all available names.
4491 4491
4492 4492 Option -q/--quiet suppresses all output when searching for NAME
4493 4493 and shows only the path names when listing all definitions.
4494 4494
4495 4495 Path names are defined in the [paths] section of your
4496 4496 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4497 4497 repository, ``.hg/hgrc`` is used, too.
4498 4498
4499 4499 The path names ``default`` and ``default-push`` have a special
4500 4500 meaning. When performing a push or pull operation, they are used
4501 4501 as fallbacks if no location is specified on the command-line.
4502 4502 When ``default-push`` is set, it will be used for push and
4503 4503 ``default`` will be used for pull; otherwise ``default`` is used
4504 4504 as the fallback for both. When cloning a repository, the clone
4505 4505 source is written as ``default`` in ``.hg/hgrc``. Note that
4506 4506 ``default`` and ``default-push`` apply to all inbound (e.g.
4507 4507 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
4508 4508 :hg:`bundle`) operations.
4509 4509
4510 4510 See :hg:`help urls` for more information.
4511 4511
4512 4512 Returns 0 on success.
4513 4513 """
4514 4514 if search:
4515 4515 for name, path in ui.configitems("paths"):
4516 4516 if name == search:
4517 4517 ui.status("%s\n" % util.hidepassword(path))
4518 4518 return
4519 4519 if not ui.quiet:
4520 4520 ui.warn(_("not found!\n"))
4521 4521 return 1
4522 4522 else:
4523 4523 for name, path in ui.configitems("paths"):
4524 4524 if ui.quiet:
4525 4525 ui.write("%s\n" % name)
4526 4526 else:
4527 4527 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
4528 4528
4529 4529 @command('phase',
4530 4530 [('p', 'public', False, _('set changeset phase to public')),
4531 4531 ('d', 'draft', False, _('set changeset phase to draft')),
4532 4532 ('s', 'secret', False, _('set changeset phase to secret')),
4533 4533 ('f', 'force', False, _('allow to move boundary backward')),
4534 4534 ('r', 'rev', [], _('target revision'), _('REV')),
4535 4535 ],
4536 4536 _('[-p|-d|-s] [-f] [-r] REV...'))
4537 4537 def phase(ui, repo, *revs, **opts):
4538 4538 """set or show the current phase name
4539 4539
4540 4540 With no argument, show the phase name of specified revisions.
4541 4541
4542 4542 With one of -p/--public, -d/--draft or -s/--secret, change the
4543 4543 phase value of the specified revisions.
4544 4544
4545 4545 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4546 4546 lower phase to an higher phase. Phases are ordered as follows::
4547 4547
4548 4548 public < draft < secret
4549 4549
4550 4550 Returns 0 on success, 1 if no phases were changed or some could not
4551 4551 be changed.
4552 4552 """
4553 4553 # search for a unique phase argument
4554 4554 targetphase = None
4555 4555 for idx, name in enumerate(phases.phasenames):
4556 4556 if opts[name]:
4557 4557 if targetphase is not None:
4558 4558 raise util.Abort(_('only one phase can be specified'))
4559 4559 targetphase = idx
4560 4560
4561 4561 # look for specified revision
4562 4562 revs = list(revs)
4563 4563 revs.extend(opts['rev'])
4564 4564 if not revs:
4565 4565 raise util.Abort(_('no revisions specified'))
4566 4566
4567 4567 revs = scmutil.revrange(repo, revs)
4568 4568
4569 4569 lock = None
4570 4570 ret = 0
4571 4571 if targetphase is None:
4572 4572 # display
4573 4573 for r in revs:
4574 4574 ctx = repo[r]
4575 4575 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4576 4576 else:
4577 4577 tr = None
4578 4578 lock = repo.lock()
4579 4579 try:
4580 4580 tr = repo.transaction("phase")
4581 4581 # set phase
4582 4582 if not revs:
4583 4583 raise util.Abort(_('empty revision set'))
4584 4584 nodes = [repo[r].node() for r in revs]
4585 4585 olddata = repo._phasecache.getphaserevs(repo)[:]
4586 4586 phases.advanceboundary(repo, tr, targetphase, nodes)
4587 4587 if opts['force']:
4588 4588 phases.retractboundary(repo, tr, targetphase, nodes)
4589 4589 tr.close()
4590 4590 finally:
4591 4591 if tr is not None:
4592 4592 tr.release()
4593 4593 lock.release()
4594 4594 # moving revision from public to draft may hide them
4595 4595 # We have to check result on an unfiltered repository
4596 4596 unfi = repo.unfiltered()
4597 4597 newdata = repo._phasecache.getphaserevs(unfi)
4598 4598 changes = sum(o != newdata[i] for i, o in enumerate(olddata))
4599 4599 cl = unfi.changelog
4600 4600 rejected = [n for n in nodes
4601 4601 if newdata[cl.rev(n)] < targetphase]
4602 4602 if rejected:
4603 4603 ui.warn(_('cannot move %i changesets to a higher '
4604 4604 'phase, use --force\n') % len(rejected))
4605 4605 ret = 1
4606 4606 if changes:
4607 4607 msg = _('phase changed for %i changesets\n') % changes
4608 4608 if ret:
4609 4609 ui.status(msg)
4610 4610 else:
4611 4611 ui.note(msg)
4612 4612 else:
4613 4613 ui.warn(_('no phases changed\n'))
4614 4614 ret = 1
4615 4615 return ret
4616 4616
4617 4617 def postincoming(ui, repo, modheads, optupdate, checkout):
4618 4618 if modheads == 0:
4619 4619 return
4620 4620 if optupdate:
4621 4621 checkout, movemarkfrom = bookmarks.calculateupdate(ui, repo, checkout)
4622 4622 try:
4623 4623 ret = hg.update(repo, checkout)
4624 4624 except util.Abort, inst:
4625 4625 ui.warn(_("not updating: %s\n") % str(inst))
4626 4626 if inst.hint:
4627 4627 ui.warn(_("(%s)\n") % inst.hint)
4628 4628 return 0
4629 4629 if not ret and not checkout:
4630 4630 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
4631 4631 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
4632 4632 return ret
4633 4633 if modheads > 1:
4634 4634 currentbranchheads = len(repo.branchheads())
4635 4635 if currentbranchheads == modheads:
4636 4636 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
4637 4637 elif currentbranchheads > 1:
4638 4638 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
4639 4639 "merge)\n"))
4640 4640 else:
4641 4641 ui.status(_("(run 'hg heads' to see heads)\n"))
4642 4642 else:
4643 4643 ui.status(_("(run 'hg update' to get a working copy)\n"))
4644 4644
4645 4645 @command('^pull',
4646 4646 [('u', 'update', None,
4647 4647 _('update to new branch head if changesets were pulled')),
4648 4648 ('f', 'force', None, _('run even when remote repository is unrelated')),
4649 4649 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4650 4650 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
4651 4651 ('b', 'branch', [], _('a specific branch you would like to pull'),
4652 4652 _('BRANCH')),
4653 4653 ] + remoteopts,
4654 4654 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
4655 4655 def pull(ui, repo, source="default", **opts):
4656 4656 """pull changes from the specified source
4657 4657
4658 4658 Pull changes from a remote repository to a local one.
4659 4659
4660 4660 This finds all changes from the repository at the specified path
4661 4661 or URL and adds them to a local repository (the current one unless
4662 4662 -R is specified). By default, this does not update the copy of the
4663 4663 project in the working directory.
4664 4664
4665 4665 Use :hg:`incoming` if you want to see what would have been added
4666 4666 by a pull at the time you issued this command. If you then decide
4667 4667 to add those changes to the repository, you should use :hg:`pull
4668 4668 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
4669 4669
4670 4670 If SOURCE is omitted, the 'default' path will be used.
4671 4671 See :hg:`help urls` for more information.
4672 4672
4673 4673 Returns 0 on success, 1 if an update had unresolved files.
4674 4674 """
4675 4675 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
4676 4676 other = hg.peer(repo, opts, source)
4677 4677 try:
4678 4678 ui.status(_('pulling from %s\n') % util.hidepassword(source))
4679 4679 revs, checkout = hg.addbranchrevs(repo, other, branches,
4680 4680 opts.get('rev'))
4681 4681
4682 4682 remotebookmarks = other.listkeys('bookmarks')
4683 4683
4684 4684 if opts.get('bookmark'):
4685 4685 if not revs:
4686 4686 revs = []
4687 4687 for b in opts['bookmark']:
4688 4688 if b not in remotebookmarks:
4689 4689 raise util.Abort(_('remote bookmark %s not found!') % b)
4690 4690 revs.append(remotebookmarks[b])
4691 4691
4692 4692 if revs:
4693 4693 try:
4694 4694 revs = [other.lookup(rev) for rev in revs]
4695 4695 except error.CapabilityError:
4696 4696 err = _("other repository doesn't support revision lookup, "
4697 4697 "so a rev cannot be specified.")
4698 4698 raise util.Abort(err)
4699 4699
4700 4700 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
4701 4701 bookmarks.updatefromremote(ui, repo, remotebookmarks, source)
4702 4702 if checkout:
4703 4703 checkout = str(repo.changelog.rev(other.lookup(checkout)))
4704 4704 repo._subtoppath = source
4705 4705 try:
4706 4706 ret = postincoming(ui, repo, modheads, opts.get('update'), checkout)
4707 4707
4708 4708 finally:
4709 4709 del repo._subtoppath
4710 4710
4711 4711 # update specified bookmarks
4712 4712 if opts.get('bookmark'):
4713 4713 marks = repo._bookmarks
4714 4714 for b in opts['bookmark']:
4715 4715 # explicit pull overrides local bookmark if any
4716 4716 ui.status(_("importing bookmark %s\n") % b)
4717 4717 marks[b] = repo[remotebookmarks[b]].node()
4718 4718 marks.write()
4719 4719 finally:
4720 4720 other.close()
4721 4721 return ret
4722 4722
4723 4723 @command('^push',
4724 4724 [('f', 'force', None, _('force push')),
4725 4725 ('r', 'rev', [],
4726 4726 _('a changeset intended to be included in the destination'),
4727 4727 _('REV')),
4728 4728 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
4729 4729 ('b', 'branch', [],
4730 4730 _('a specific branch you would like to push'), _('BRANCH')),
4731 4731 ('', 'new-branch', False, _('allow pushing a new branch')),
4732 4732 ] + remoteopts,
4733 4733 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
4734 4734 def push(ui, repo, dest=None, **opts):
4735 4735 """push changes to the specified destination
4736 4736
4737 4737 Push changesets from the local repository to the specified
4738 4738 destination.
4739 4739
4740 4740 This operation is symmetrical to pull: it is identical to a pull
4741 4741 in the destination repository from the current one.
4742 4742
4743 4743 By default, push will not allow creation of new heads at the
4744 4744 destination, since multiple heads would make it unclear which head
4745 4745 to use. In this situation, it is recommended to pull and merge
4746 4746 before pushing.
4747 4747
4748 4748 Use --new-branch if you want to allow push to create a new named
4749 4749 branch that is not present at the destination. This allows you to
4750 4750 only create a new branch without forcing other changes.
4751 4751
4752 4752 .. note::
4753 4753
4754 4754 Extra care should be taken with the -f/--force option,
4755 4755 which will push all new heads on all branches, an action which will
4756 4756 almost always cause confusion for collaborators.
4757 4757
4758 4758 If -r/--rev is used, the specified revision and all its ancestors
4759 4759 will be pushed to the remote repository.
4760 4760
4761 4761 If -B/--bookmark is used, the specified bookmarked revision, its
4762 4762 ancestors, and the bookmark will be pushed to the remote
4763 4763 repository.
4764 4764
4765 4765 Please see :hg:`help urls` for important details about ``ssh://``
4766 4766 URLs. If DESTINATION is omitted, a default path will be used.
4767 4767
4768 4768 Returns 0 if push was successful, 1 if nothing to push.
4769 4769 """
4770 4770
4771 4771 if opts.get('bookmark'):
4772 4772 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
4773 4773 for b in opts['bookmark']:
4774 4774 # translate -B options to -r so changesets get pushed
4775 4775 if b in repo._bookmarks:
4776 4776 opts.setdefault('rev', []).append(b)
4777 4777 else:
4778 4778 # if we try to push a deleted bookmark, translate it to null
4779 4779 # this lets simultaneous -r, -b options continue working
4780 4780 opts.setdefault('rev', []).append("null")
4781 4781
4782 4782 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4783 4783 dest, branches = hg.parseurl(dest, opts.get('branch'))
4784 4784 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
4785 4785 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
4786 4786 try:
4787 4787 other = hg.peer(repo, opts, dest)
4788 4788 except error.RepoError:
4789 4789 if dest == "default-push":
4790 4790 raise util.Abort(_("default repository not configured!"),
4791 4791 hint=_('see the "path" section in "hg help config"'))
4792 4792 else:
4793 4793 raise
4794 4794
4795 4795 if revs:
4796 4796 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
4797 4797
4798 4798 repo._subtoppath = dest
4799 4799 try:
4800 4800 # push subrepos depth-first for coherent ordering
4801 4801 c = repo['']
4802 4802 subs = c.substate # only repos that are committed
4803 4803 for s in sorted(subs):
4804 4804 result = c.sub(s).push(opts)
4805 4805 if result == 0:
4806 4806 return not result
4807 4807 finally:
4808 4808 del repo._subtoppath
4809 4809 result = repo.push(other, opts.get('force'), revs=revs,
4810 4810 newbranch=opts.get('new_branch'))
4811 4811
4812 4812 result = not result
4813 4813
4814 4814 if opts.get('bookmark'):
4815 4815 bresult = bookmarks.pushtoremote(ui, repo, other, opts['bookmark'])
4816 4816 if bresult == 2:
4817 4817 return 2
4818 4818 if not result and bresult:
4819 4819 result = 2
4820 4820
4821 4821 return result
4822 4822
4823 4823 @command('recover', [])
4824 4824 def recover(ui, repo):
4825 4825 """roll back an interrupted transaction
4826 4826
4827 4827 Recover from an interrupted commit or pull.
4828 4828
4829 4829 This command tries to fix the repository status after an
4830 4830 interrupted operation. It should only be necessary when Mercurial
4831 4831 suggests it.
4832 4832
4833 4833 Returns 0 if successful, 1 if nothing to recover or verify fails.
4834 4834 """
4835 4835 if repo.recover():
4836 4836 return hg.verify(repo)
4837 4837 return 1
4838 4838
4839 4839 @command('^remove|rm',
4840 4840 [('A', 'after', None, _('record delete for missing files')),
4841 4841 ('f', 'force', None,
4842 4842 _('remove (and delete) file even if added or modified')),
4843 4843 ] + walkopts,
4844 4844 _('[OPTION]... FILE...'),
4845 4845 inferrepo=True)
4846 4846 def remove(ui, repo, *pats, **opts):
4847 4847 """remove the specified files on the next commit
4848 4848
4849 4849 Schedule the indicated files for removal from the current branch.
4850 4850
4851 4851 This command schedules the files to be removed at the next commit.
4852 4852 To undo a remove before that, see :hg:`revert`. To undo added
4853 4853 files, see :hg:`forget`.
4854 4854
4855 4855 .. container:: verbose
4856 4856
4857 4857 -A/--after can be used to remove only files that have already
4858 4858 been deleted, -f/--force can be used to force deletion, and -Af
4859 4859 can be used to remove files from the next revision without
4860 4860 deleting them from the working directory.
4861 4861
4862 4862 The following table details the behavior of remove for different
4863 4863 file states (columns) and option combinations (rows). The file
4864 4864 states are Added [A], Clean [C], Modified [M] and Missing [!]
4865 4865 (as reported by :hg:`status`). The actions are Warn, Remove
4866 4866 (from branch) and Delete (from disk):
4867 4867
4868 4868 ========= == == == ==
4869 4869 opt/state A C M !
4870 4870 ========= == == == ==
4871 4871 none W RD W R
4872 4872 -f R RD RD R
4873 4873 -A W W W R
4874 4874 -Af R R R R
4875 4875 ========= == == == ==
4876 4876
4877 4877 Note that remove never deletes files in Added [A] state from the
4878 4878 working directory, not even if option --force is specified.
4879 4879
4880 4880 Returns 0 on success, 1 if any warnings encountered.
4881 4881 """
4882 4882
4883 4883 ret = 0
4884 4884 after, force = opts.get('after'), opts.get('force')
4885 4885 if not pats and not after:
4886 4886 raise util.Abort(_('no files specified'))
4887 4887
4888 4888 m = scmutil.match(repo[None], pats, opts)
4889 4889 s = repo.status(match=m, clean=True)
4890 4890 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
4891 4891
4892 4892 # warn about failure to delete explicit files/dirs
4893 4893 wctx = repo[None]
4894 4894 for f in m.files():
4895 4895 if f in repo.dirstate or f in wctx.dirs():
4896 4896 continue
4897 4897 if os.path.exists(m.rel(f)):
4898 4898 if os.path.isdir(m.rel(f)):
4899 4899 ui.warn(_('not removing %s: no tracked files\n') % m.rel(f))
4900 4900 else:
4901 4901 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
4902 4902 # missing files will generate a warning elsewhere
4903 4903 ret = 1
4904 4904
4905 4905 if force:
4906 4906 list = modified + deleted + clean + added
4907 4907 elif after:
4908 4908 list = deleted
4909 4909 for f in modified + added + clean:
4910 4910 ui.warn(_('not removing %s: file still exists\n') % m.rel(f))
4911 4911 ret = 1
4912 4912 else:
4913 4913 list = deleted + clean
4914 4914 for f in modified:
4915 4915 ui.warn(_('not removing %s: file is modified (use -f'
4916 4916 ' to force removal)\n') % m.rel(f))
4917 4917 ret = 1
4918 4918 for f in added:
4919 4919 ui.warn(_('not removing %s: file has been marked for add'
4920 4920 ' (use forget to undo)\n') % m.rel(f))
4921 4921 ret = 1
4922 4922
4923 4923 for f in sorted(list):
4924 4924 if ui.verbose or not m.exact(f):
4925 4925 ui.status(_('removing %s\n') % m.rel(f))
4926 4926
4927 4927 wlock = repo.wlock()
4928 4928 try:
4929 4929 if not after:
4930 4930 for f in list:
4931 4931 if f in added:
4932 4932 continue # we never unlink added files on remove
4933 4933 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
4934 4934 repo[None].forget(list)
4935 4935 finally:
4936 4936 wlock.release()
4937 4937
4938 4938 return ret
4939 4939
4940 4940 @command('rename|move|mv',
4941 4941 [('A', 'after', None, _('record a rename that has already occurred')),
4942 4942 ('f', 'force', None, _('forcibly copy over an existing managed file')),
4943 4943 ] + walkopts + dryrunopts,
4944 4944 _('[OPTION]... SOURCE... DEST'))
4945 4945 def rename(ui, repo, *pats, **opts):
4946 4946 """rename files; equivalent of copy + remove
4947 4947
4948 4948 Mark dest as copies of sources; mark sources for deletion. If dest
4949 4949 is a directory, copies are put in that directory. If dest is a
4950 4950 file, there can only be one source.
4951 4951
4952 4952 By default, this command copies the contents of files as they
4953 4953 exist in the working directory. If invoked with -A/--after, the
4954 4954 operation is recorded, but no copying is performed.
4955 4955
4956 4956 This command takes effect at the next commit. To undo a rename
4957 4957 before that, see :hg:`revert`.
4958 4958
4959 4959 Returns 0 on success, 1 if errors are encountered.
4960 4960 """
4961 4961 wlock = repo.wlock(False)
4962 4962 try:
4963 4963 return cmdutil.copy(ui, repo, pats, opts, rename=True)
4964 4964 finally:
4965 4965 wlock.release()
4966 4966
4967 4967 @command('resolve',
4968 4968 [('a', 'all', None, _('select all unresolved files')),
4969 4969 ('l', 'list', None, _('list state of files needing merge')),
4970 4970 ('m', 'mark', None, _('mark files as resolved')),
4971 4971 ('u', 'unmark', None, _('mark files as unresolved')),
4972 4972 ('n', 'no-status', None, _('hide status prefix'))]
4973 4973 + mergetoolopts + walkopts,
4974 4974 _('[OPTION]... [FILE]...'),
4975 4975 inferrepo=True)
4976 4976 def resolve(ui, repo, *pats, **opts):
4977 4977 """redo merges or set/view the merge status of files
4978 4978
4979 4979 Merges with unresolved conflicts are often the result of
4980 4980 non-interactive merging using the ``internal:merge`` configuration
4981 4981 setting, or a command-line merge tool like ``diff3``. The resolve
4982 4982 command is used to manage the files involved in a merge, after
4983 4983 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
4984 4984 working directory must have two parents). See :hg:`help
4985 4985 merge-tools` for information on configuring merge tools.
4986 4986
4987 4987 The resolve command can be used in the following ways:
4988 4988
4989 4989 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
4990 4990 files, discarding any previous merge attempts. Re-merging is not
4991 4991 performed for files already marked as resolved. Use ``--all/-a``
4992 4992 to select all unresolved files. ``--tool`` can be used to specify
4993 4993 the merge tool used for the given files. It overrides the HGMERGE
4994 4994 environment variable and your configuration files. Previous file
4995 4995 contents are saved with a ``.orig`` suffix.
4996 4996
4997 4997 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
4998 4998 (e.g. after having manually fixed-up the files). The default is
4999 4999 to mark all unresolved files.
5000 5000
5001 5001 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5002 5002 default is to mark all resolved files.
5003 5003
5004 5004 - :hg:`resolve -l`: list files which had or still have conflicts.
5005 5005 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5006 5006
5007 5007 Note that Mercurial will not let you commit files with unresolved
5008 5008 merge conflicts. You must use :hg:`resolve -m ...` before you can
5009 5009 commit after a conflicting merge.
5010 5010
5011 5011 Returns 0 on success, 1 if any files fail a resolve attempt.
5012 5012 """
5013 5013
5014 5014 all, mark, unmark, show, nostatus = \
5015 5015 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
5016 5016
5017 5017 if (show and (mark or unmark)) or (mark and unmark):
5018 5018 raise util.Abort(_("too many options specified"))
5019 5019 if pats and all:
5020 5020 raise util.Abort(_("can't specify --all and patterns"))
5021 5021 if not (all or pats or show or mark or unmark):
5022 5022 raise util.Abort(_('no files or directories specified'),
5023 5023 hint=('use --all to remerge all files'))
5024 5024
5025 5025 wlock = repo.wlock()
5026 5026 try:
5027 5027 ms = mergemod.mergestate(repo)
5028 5028
5029 5029 if not ms.active() and not show:
5030 5030 raise util.Abort(
5031 5031 _('resolve command not applicable when not merging'))
5032 5032
5033 5033 m = scmutil.match(repo[None], pats, opts)
5034 5034 ret = 0
5035 5035 didwork = False
5036 5036
5037 5037 for f in ms:
5038 5038 if not m(f):
5039 5039 continue
5040 5040
5041 5041 didwork = True
5042 5042
5043 5043 if show:
5044 5044 if nostatus:
5045 5045 ui.write("%s\n" % f)
5046 5046 else:
5047 5047 ui.write("%s %s\n" % (ms[f].upper(), f),
5048 5048 label='resolve.' +
5049 5049 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
5050 5050 elif mark:
5051 5051 ms.mark(f, "r")
5052 5052 elif unmark:
5053 5053 ms.mark(f, "u")
5054 5054 else:
5055 5055 wctx = repo[None]
5056 5056
5057 5057 # backup pre-resolve (merge uses .orig for its own purposes)
5058 5058 a = repo.wjoin(f)
5059 5059 util.copyfile(a, a + ".resolve")
5060 5060
5061 5061 try:
5062 5062 # resolve file
5063 5063 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5064 5064 'resolve')
5065 5065 if ms.resolve(f, wctx):
5066 5066 ret = 1
5067 5067 finally:
5068 5068 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5069 5069 ms.commit()
5070 5070
5071 5071 # replace filemerge's .orig file with our resolve file
5072 5072 util.rename(a + ".resolve", a + ".orig")
5073 5073
5074 5074 ms.commit()
5075 5075
5076 5076 if not didwork and pats:
5077 5077 ui.warn(_("arguments do not match paths that need resolving\n"))
5078 5078
5079 5079 finally:
5080 5080 wlock.release()
5081 5081
5082 5082 # Nudge users into finishing an unfinished operation. We don't print
5083 5083 # this with the list/show operation because we want list/show to remain
5084 5084 # machine readable.
5085 5085 if not list(ms.unresolved()) and not show:
5086 5086 ui.status(_('(no more unresolved files)\n'))
5087 5087
5088 5088 return ret
5089 5089
5090 5090 @command('revert',
5091 5091 [('a', 'all', None, _('revert all changes when no arguments given')),
5092 5092 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5093 5093 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5094 5094 ('C', 'no-backup', None, _('do not save backup copies of files')),
5095 5095 ] + walkopts + dryrunopts,
5096 5096 _('[OPTION]... [-r REV] [NAME]...'))
5097 5097 def revert(ui, repo, *pats, **opts):
5098 5098 """restore files to their checkout state
5099 5099
5100 5100 .. note::
5101 5101
5102 5102 To check out earlier revisions, you should use :hg:`update REV`.
5103 5103 To cancel an uncommitted merge (and lose your changes),
5104 5104 use :hg:`update --clean .`.
5105 5105
5106 5106 With no revision specified, revert the specified files or directories
5107 5107 to the contents they had in the parent of the working directory.
5108 5108 This restores the contents of files to an unmodified
5109 5109 state and unschedules adds, removes, copies, and renames. If the
5110 5110 working directory has two parents, you must explicitly specify a
5111 5111 revision.
5112 5112
5113 5113 Using the -r/--rev or -d/--date options, revert the given files or
5114 5114 directories to their states as of a specific revision. Because
5115 5115 revert does not change the working directory parents, this will
5116 5116 cause these files to appear modified. This can be helpful to "back
5117 5117 out" some or all of an earlier change. See :hg:`backout` for a
5118 5118 related method.
5119 5119
5120 5120 Modified files are saved with a .orig suffix before reverting.
5121 5121 To disable these backups, use --no-backup.
5122 5122
5123 5123 See :hg:`help dates` for a list of formats valid for -d/--date.
5124 5124
5125 5125 Returns 0 on success.
5126 5126 """
5127 5127
5128 5128 if opts.get("date"):
5129 5129 if opts.get("rev"):
5130 5130 raise util.Abort(_("you can't specify a revision and a date"))
5131 5131 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5132 5132
5133 5133 parent, p2 = repo.dirstate.parents()
5134 5134 if not opts.get('rev') and p2 != nullid:
5135 5135 # revert after merge is a trap for new users (issue2915)
5136 5136 raise util.Abort(_('uncommitted merge with no revision specified'),
5137 5137 hint=_('use "hg update" or see "hg help revert"'))
5138 5138
5139 5139 ctx = scmutil.revsingle(repo, opts.get('rev'))
5140 5140
5141 5141 if not pats and not opts.get('all'):
5142 5142 msg = _("no files or directories specified")
5143 5143 if p2 != nullid:
5144 5144 hint = _("uncommitted merge, use --all to discard all changes,"
5145 5145 " or 'hg update -C .' to abort the merge")
5146 5146 raise util.Abort(msg, hint=hint)
5147 5147 dirty = util.any(repo.status())
5148 5148 node = ctx.node()
5149 5149 if node != parent:
5150 5150 if dirty:
5151 5151 hint = _("uncommitted changes, use --all to discard all"
5152 5152 " changes, or 'hg update %s' to update") % ctx.rev()
5153 5153 else:
5154 5154 hint = _("use --all to revert all files,"
5155 5155 " or 'hg update %s' to update") % ctx.rev()
5156 5156 elif dirty:
5157 5157 hint = _("uncommitted changes, use --all to discard all changes")
5158 5158 else:
5159 5159 hint = _("use --all to revert all files")
5160 5160 raise util.Abort(msg, hint=hint)
5161 5161
5162 5162 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5163 5163
5164 5164 @command('rollback', dryrunopts +
5165 5165 [('f', 'force', False, _('ignore safety measures'))])
5166 5166 def rollback(ui, repo, **opts):
5167 5167 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5168 5168
5169 5169 Please use :hg:`commit --amend` instead of rollback to correct
5170 5170 mistakes in the last commit.
5171 5171
5172 5172 This command should be used with care. There is only one level of
5173 5173 rollback, and there is no way to undo a rollback. It will also
5174 5174 restore the dirstate at the time of the last transaction, losing
5175 5175 any dirstate changes since that time. This command does not alter
5176 5176 the working directory.
5177 5177
5178 5178 Transactions are used to encapsulate the effects of all commands
5179 5179 that create new changesets or propagate existing changesets into a
5180 5180 repository.
5181 5181
5182 5182 .. container:: verbose
5183 5183
5184 5184 For example, the following commands are transactional, and their
5185 5185 effects can be rolled back:
5186 5186
5187 5187 - commit
5188 5188 - import
5189 5189 - pull
5190 5190 - push (with this repository as the destination)
5191 5191 - unbundle
5192 5192
5193 5193 To avoid permanent data loss, rollback will refuse to rollback a
5194 5194 commit transaction if it isn't checked out. Use --force to
5195 5195 override this protection.
5196 5196
5197 5197 This command is not intended for use on public repositories. Once
5198 5198 changes are visible for pull by other users, rolling a transaction
5199 5199 back locally is ineffective (someone else may already have pulled
5200 5200 the changes). Furthermore, a race is possible with readers of the
5201 5201 repository; for example an in-progress pull from the repository
5202 5202 may fail if a rollback is performed.
5203 5203
5204 5204 Returns 0 on success, 1 if no rollback data is available.
5205 5205 """
5206 5206 return repo.rollback(dryrun=opts.get('dry_run'),
5207 5207 force=opts.get('force'))
5208 5208
5209 5209 @command('root', [])
5210 5210 def root(ui, repo):
5211 5211 """print the root (top) of the current working directory
5212 5212
5213 5213 Print the root directory of the current repository.
5214 5214
5215 5215 Returns 0 on success.
5216 5216 """
5217 5217 ui.write(repo.root + "\n")
5218 5218
5219 5219 @command('^serve',
5220 5220 [('A', 'accesslog', '', _('name of access log file to write to'),
5221 5221 _('FILE')),
5222 5222 ('d', 'daemon', None, _('run server in background')),
5223 5223 ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('NUM')),
5224 5224 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5225 5225 # use string type, then we can check if something was passed
5226 5226 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5227 5227 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5228 5228 _('ADDR')),
5229 5229 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5230 5230 _('PREFIX')),
5231 5231 ('n', 'name', '',
5232 5232 _('name to show in web pages (default: working directory)'), _('NAME')),
5233 5233 ('', 'web-conf', '',
5234 5234 _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')),
5235 5235 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5236 5236 _('FILE')),
5237 5237 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5238 5238 ('', 'stdio', None, _('for remote clients')),
5239 5239 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5240 5240 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5241 5241 ('', 'style', '', _('template style to use'), _('STYLE')),
5242 5242 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5243 5243 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5244 5244 _('[OPTION]...'),
5245 5245 optionalrepo=True)
5246 5246 def serve(ui, repo, **opts):
5247 5247 """start stand-alone webserver
5248 5248
5249 5249 Start a local HTTP repository browser and pull server. You can use
5250 5250 this for ad-hoc sharing and browsing of repositories. It is
5251 5251 recommended to use a real web server to serve a repository for
5252 5252 longer periods of time.
5253 5253
5254 5254 Please note that the server does not implement access control.
5255 5255 This means that, by default, anybody can read from the server and
5256 5256 nobody can write to it by default. Set the ``web.allow_push``
5257 5257 option to ``*`` to allow everybody to push to the server. You
5258 5258 should use a real web server if you need to authenticate users.
5259 5259
5260 5260 By default, the server logs accesses to stdout and errors to
5261 5261 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5262 5262 files.
5263 5263
5264 5264 To have the server choose a free port number to listen on, specify
5265 5265 a port number of 0; in this case, the server will print the port
5266 5266 number it uses.
5267 5267
5268 5268 Returns 0 on success.
5269 5269 """
5270 5270
5271 5271 if opts["stdio"] and opts["cmdserver"]:
5272 5272 raise util.Abort(_("cannot use --stdio with --cmdserver"))
5273 5273
5274 5274 if opts["stdio"]:
5275 5275 if repo is None:
5276 5276 raise error.RepoError(_("there is no Mercurial repository here"
5277 5277 " (.hg not found)"))
5278 5278 s = sshserver.sshserver(ui, repo)
5279 5279 s.serve_forever()
5280 5280
5281 5281 if opts["cmdserver"]:
5282 5282 s = commandserver.server(ui, repo, opts["cmdserver"])
5283 5283 return s.serve()
5284 5284
5285 5285 # this way we can check if something was given in the command-line
5286 5286 if opts.get('port'):
5287 5287 opts['port'] = util.getport(opts.get('port'))
5288 5288
5289 5289 baseui = repo and repo.baseui or ui
5290 5290 optlist = ("name templates style address port prefix ipv6"
5291 5291 " accesslog errorlog certificate encoding")
5292 5292 for o in optlist.split():
5293 5293 val = opts.get(o, '')
5294 5294 if val in (None, ''): # should check against default options instead
5295 5295 continue
5296 5296 baseui.setconfig("web", o, val, 'serve')
5297 5297 if repo and repo.ui != baseui:
5298 5298 repo.ui.setconfig("web", o, val, 'serve')
5299 5299
5300 5300 o = opts.get('web_conf') or opts.get('webdir_conf')
5301 5301 if not o:
5302 5302 if not repo:
5303 5303 raise error.RepoError(_("there is no Mercurial repository"
5304 5304 " here (.hg not found)"))
5305 5305 o = repo
5306 5306
5307 5307 app = hgweb.hgweb(o, baseui=baseui)
5308 5308 service = httpservice(ui, app, opts)
5309 5309 cmdutil.service(opts, initfn=service.init, runfn=service.run)
5310 5310
5311 5311 class httpservice(object):
5312 5312 def __init__(self, ui, app, opts):
5313 5313 self.ui = ui
5314 5314 self.app = app
5315 5315 self.opts = opts
5316 5316
5317 5317 def init(self):
5318 5318 util.setsignalhandler()
5319 5319 self.httpd = hgweb_server.create_server(self.ui, self.app)
5320 5320
5321 5321 if self.opts['port'] and not self.ui.verbose:
5322 5322 return
5323 5323
5324 5324 if self.httpd.prefix:
5325 5325 prefix = self.httpd.prefix.strip('/') + '/'
5326 5326 else:
5327 5327 prefix = ''
5328 5328
5329 5329 port = ':%d' % self.httpd.port
5330 5330 if port == ':80':
5331 5331 port = ''
5332 5332
5333 5333 bindaddr = self.httpd.addr
5334 5334 if bindaddr == '0.0.0.0':
5335 5335 bindaddr = '*'
5336 5336 elif ':' in bindaddr: # IPv6
5337 5337 bindaddr = '[%s]' % bindaddr
5338 5338
5339 5339 fqaddr = self.httpd.fqaddr
5340 5340 if ':' in fqaddr:
5341 5341 fqaddr = '[%s]' % fqaddr
5342 5342 if self.opts['port']:
5343 5343 write = self.ui.status
5344 5344 else:
5345 5345 write = self.ui.write
5346 5346 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
5347 5347 (fqaddr, port, prefix, bindaddr, self.httpd.port))
5348 5348 self.ui.flush() # avoid buffering of status message
5349 5349
5350 5350 def run(self):
5351 5351 self.httpd.serve_forever()
5352 5352
5353 5353
5354 5354 @command('^status|st',
5355 5355 [('A', 'all', None, _('show status of all files')),
5356 5356 ('m', 'modified', None, _('show only modified files')),
5357 5357 ('a', 'added', None, _('show only added files')),
5358 5358 ('r', 'removed', None, _('show only removed files')),
5359 5359 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5360 5360 ('c', 'clean', None, _('show only files without changes')),
5361 5361 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5362 5362 ('i', 'ignored', None, _('show only ignored files')),
5363 5363 ('n', 'no-status', None, _('hide status prefix')),
5364 5364 ('C', 'copies', None, _('show source of copied files')),
5365 5365 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5366 5366 ('', 'rev', [], _('show difference from revision'), _('REV')),
5367 5367 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5368 5368 ] + walkopts + subrepoopts,
5369 5369 _('[OPTION]... [FILE]...'),
5370 5370 inferrepo=True)
5371 5371 def status(ui, repo, *pats, **opts):
5372 5372 """show changed files in the working directory
5373 5373
5374 5374 Show status of files in the repository. If names are given, only
5375 5375 files that match are shown. Files that are clean or ignored or
5376 5376 the source of a copy/move operation, are not listed unless
5377 5377 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5378 5378 Unless options described with "show only ..." are given, the
5379 5379 options -mardu are used.
5380 5380
5381 5381 Option -q/--quiet hides untracked (unknown and ignored) files
5382 5382 unless explicitly requested with -u/--unknown or -i/--ignored.
5383 5383
5384 5384 .. note::
5385 5385
5386 5386 status may appear to disagree with diff if permissions have
5387 5387 changed or a merge has occurred. The standard diff format does
5388 5388 not report permission changes and diff only reports changes
5389 5389 relative to one merge parent.
5390 5390
5391 5391 If one revision is given, it is used as the base revision.
5392 5392 If two revisions are given, the differences between them are
5393 5393 shown. The --change option can also be used as a shortcut to list
5394 5394 the changed files of a revision from its first parent.
5395 5395
5396 5396 The codes used to show the status of files are::
5397 5397
5398 5398 M = modified
5399 5399 A = added
5400 5400 R = removed
5401 5401 C = clean
5402 5402 ! = missing (deleted by non-hg command, but still tracked)
5403 5403 ? = not tracked
5404 5404 I = ignored
5405 5405 = origin of the previous file (with --copies)
5406 5406
5407 5407 .. container:: verbose
5408 5408
5409 5409 Examples:
5410 5410
5411 5411 - show changes in the working directory relative to a
5412 5412 changeset::
5413 5413
5414 5414 hg status --rev 9353
5415 5415
5416 5416 - show all changes including copies in an existing changeset::
5417 5417
5418 5418 hg status --copies --change 9353
5419 5419
5420 5420 - get a NUL separated list of added files, suitable for xargs::
5421 5421
5422 5422 hg status -an0
5423 5423
5424 5424 Returns 0 on success.
5425 5425 """
5426 5426
5427 5427 revs = opts.get('rev')
5428 5428 change = opts.get('change')
5429 5429
5430 5430 if revs and change:
5431 5431 msg = _('cannot specify --rev and --change at the same time')
5432 5432 raise util.Abort(msg)
5433 5433 elif change:
5434 5434 node2 = scmutil.revsingle(repo, change, None).node()
5435 5435 node1 = repo[node2].p1().node()
5436 5436 else:
5437 5437 node1, node2 = scmutil.revpair(repo, revs)
5438 5438
5439 5439 cwd = (pats and repo.getcwd()) or ''
5440 5440 end = opts.get('print0') and '\0' or '\n'
5441 5441 copy = {}
5442 5442 states = 'modified added removed deleted unknown ignored clean'.split()
5443 5443 show = [k for k in states if opts.get(k)]
5444 5444 if opts.get('all'):
5445 5445 show += ui.quiet and (states[:4] + ['clean']) or states
5446 5446 if not show:
5447 5447 show = ui.quiet and states[:4] or states[:5]
5448 5448
5449 5449 stat = repo.status(node1, node2, scmutil.match(repo[node2], pats, opts),
5450 5450 'ignored' in show, 'clean' in show, 'unknown' in show,
5451 5451 opts.get('subrepos'))
5452 5452 changestates = zip(states, 'MAR!?IC', stat)
5453 5453
5454 5454 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
5455 5455 copy = copies.pathcopies(repo[node1], repo[node2])
5456 5456
5457 5457 fm = ui.formatter('status', opts)
5458 5458 fmt = '%s' + end
5459 5459 showchar = not opts.get('no_status')
5460 5460
5461 5461 for state, char, files in changestates:
5462 5462 if state in show:
5463 5463 label = 'status.' + state
5464 5464 for f in files:
5465 5465 fm.startitem()
5466 5466 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5467 5467 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5468 5468 if f in copy:
5469 5469 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5470 5470 label='status.copied')
5471 5471 fm.end()
5472 5472
5473 5473 @command('^summary|sum',
5474 5474 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5475 5475 def summary(ui, repo, **opts):
5476 5476 """summarize working directory state
5477 5477
5478 5478 This generates a brief summary of the working directory state,
5479 5479 including parents, branch, commit status, and available updates.
5480 5480
5481 5481 With the --remote option, this will check the default paths for
5482 5482 incoming and outgoing changes. This can be time-consuming.
5483 5483
5484 5484 Returns 0 on success.
5485 5485 """
5486 5486
5487 5487 ctx = repo[None]
5488 5488 parents = ctx.parents()
5489 5489 pnode = parents[0].node()
5490 5490 marks = []
5491 5491
5492 5492 for p in parents:
5493 5493 # label with log.changeset (instead of log.parent) since this
5494 5494 # shows a working directory parent *changeset*:
5495 5495 # i18n: column positioning for "hg summary"
5496 5496 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5497 5497 label='log.changeset changeset.%s' % p.phasestr())
5498 5498 ui.write(' '.join(p.tags()), label='log.tag')
5499 5499 if p.bookmarks():
5500 5500 marks.extend(p.bookmarks())
5501 5501 if p.rev() == -1:
5502 5502 if not len(repo):
5503 5503 ui.write(_(' (empty repository)'))
5504 5504 else:
5505 5505 ui.write(_(' (no revision checked out)'))
5506 5506 ui.write('\n')
5507 5507 if p.description():
5508 5508 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5509 5509 label='log.summary')
5510 5510
5511 5511 branch = ctx.branch()
5512 5512 bheads = repo.branchheads(branch)
5513 5513 # i18n: column positioning for "hg summary"
5514 5514 m = _('branch: %s\n') % branch
5515 5515 if branch != 'default':
5516 5516 ui.write(m, label='log.branch')
5517 5517 else:
5518 5518 ui.status(m, label='log.branch')
5519 5519
5520 5520 if marks:
5521 5521 current = repo._bookmarkcurrent
5522 5522 # i18n: column positioning for "hg summary"
5523 5523 ui.write(_('bookmarks:'), label='log.bookmark')
5524 5524 if current is not None:
5525 5525 if current in marks:
5526 5526 ui.write(' *' + current, label='bookmarks.current')
5527 5527 marks.remove(current)
5528 5528 else:
5529 5529 ui.write(' [%s]' % current, label='bookmarks.current')
5530 5530 for m in marks:
5531 5531 ui.write(' ' + m, label='log.bookmark')
5532 5532 ui.write('\n', label='log.bookmark')
5533 5533
5534 5534 st = list(repo.status(unknown=True))[:6]
5535 5535
5536 5536 c = repo.dirstate.copies()
5537 5537 copied, renamed = [], []
5538 5538 for d, s in c.iteritems():
5539 5539 if s in st[2]:
5540 5540 st[2].remove(s)
5541 5541 renamed.append(d)
5542 5542 else:
5543 5543 copied.append(d)
5544 5544 if d in st[1]:
5545 5545 st[1].remove(d)
5546 5546 st.insert(3, renamed)
5547 5547 st.insert(4, copied)
5548 5548
5549 5549 ms = mergemod.mergestate(repo)
5550 5550 st.append([f for f in ms if ms[f] == 'u'])
5551 5551
5552 5552 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
5553 5553 st.append(subs)
5554 5554
5555 5555 labels = [ui.label(_('%d modified'), 'status.modified'),
5556 5556 ui.label(_('%d added'), 'status.added'),
5557 5557 ui.label(_('%d removed'), 'status.removed'),
5558 5558 ui.label(_('%d renamed'), 'status.copied'),
5559 5559 ui.label(_('%d copied'), 'status.copied'),
5560 5560 ui.label(_('%d deleted'), 'status.deleted'),
5561 5561 ui.label(_('%d unknown'), 'status.unknown'),
5562 5562 ui.label(_('%d ignored'), 'status.ignored'),
5563 5563 ui.label(_('%d unresolved'), 'resolve.unresolved'),
5564 5564 ui.label(_('%d subrepos'), 'status.modified')]
5565 5565 t = []
5566 5566 for s, l in zip(st, labels):
5567 5567 if s:
5568 5568 t.append(l % len(s))
5569 5569
5570 5570 t = ', '.join(t)
5571 5571 cleanworkdir = False
5572 5572
5573 5573 if repo.vfs.exists('updatestate'):
5574 5574 t += _(' (interrupted update)')
5575 5575 elif len(parents) > 1:
5576 5576 t += _(' (merge)')
5577 5577 elif branch != parents[0].branch():
5578 5578 t += _(' (new branch)')
5579 5579 elif (parents[0].closesbranch() and
5580 5580 pnode in repo.branchheads(branch, closed=True)):
5581 5581 t += _(' (head closed)')
5582 5582 elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
5583 5583 t += _(' (clean)')
5584 5584 cleanworkdir = True
5585 5585 elif pnode not in bheads:
5586 5586 t += _(' (new branch head)')
5587 5587
5588 5588 if cleanworkdir:
5589 5589 # i18n: column positioning for "hg summary"
5590 5590 ui.status(_('commit: %s\n') % t.strip())
5591 5591 else:
5592 5592 # i18n: column positioning for "hg summary"
5593 5593 ui.write(_('commit: %s\n') % t.strip())
5594 5594
5595 5595 # all ancestors of branch heads - all ancestors of parent = new csets
5596 new = len(repo.changelog.findmissing([ctx.node() for ctx in parents],
5596 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
5597 5597 bheads))
5598 5598
5599 5599 if new == 0:
5600 5600 # i18n: column positioning for "hg summary"
5601 5601 ui.status(_('update: (current)\n'))
5602 5602 elif pnode not in bheads:
5603 5603 # i18n: column positioning for "hg summary"
5604 5604 ui.write(_('update: %d new changesets (update)\n') % new)
5605 5605 else:
5606 5606 # i18n: column positioning for "hg summary"
5607 5607 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
5608 5608 (new, len(bheads)))
5609 5609
5610 5610 cmdutil.summaryhooks(ui, repo)
5611 5611
5612 5612 if opts.get('remote'):
5613 5613 needsincoming, needsoutgoing = True, True
5614 5614 else:
5615 5615 needsincoming, needsoutgoing = False, False
5616 5616 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
5617 5617 if i:
5618 5618 needsincoming = True
5619 5619 if o:
5620 5620 needsoutgoing = True
5621 5621 if not needsincoming and not needsoutgoing:
5622 5622 return
5623 5623
5624 5624 def getincoming():
5625 5625 source, branches = hg.parseurl(ui.expandpath('default'))
5626 5626 sbranch = branches[0]
5627 5627 try:
5628 5628 other = hg.peer(repo, {}, source)
5629 5629 except error.RepoError:
5630 5630 if opts.get('remote'):
5631 5631 raise
5632 5632 return source, sbranch, None, None, None
5633 5633 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
5634 5634 if revs:
5635 5635 revs = [other.lookup(rev) for rev in revs]
5636 5636 ui.debug('comparing with %s\n' % util.hidepassword(source))
5637 5637 repo.ui.pushbuffer()
5638 5638 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
5639 5639 repo.ui.popbuffer()
5640 5640 return source, sbranch, other, commoninc, commoninc[1]
5641 5641
5642 5642 if needsincoming:
5643 5643 source, sbranch, sother, commoninc, incoming = getincoming()
5644 5644 else:
5645 5645 source = sbranch = sother = commoninc = incoming = None
5646 5646
5647 5647 def getoutgoing():
5648 5648 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
5649 5649 dbranch = branches[0]
5650 5650 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
5651 5651 if source != dest:
5652 5652 try:
5653 5653 dother = hg.peer(repo, {}, dest)
5654 5654 except error.RepoError:
5655 5655 if opts.get('remote'):
5656 5656 raise
5657 5657 return dest, dbranch, None, None
5658 5658 ui.debug('comparing with %s\n' % util.hidepassword(dest))
5659 5659 elif sother is None:
5660 5660 # there is no explicit destination peer, but source one is invalid
5661 5661 return dest, dbranch, None, None
5662 5662 else:
5663 5663 dother = sother
5664 5664 if (source != dest or (sbranch is not None and sbranch != dbranch)):
5665 5665 common = None
5666 5666 else:
5667 5667 common = commoninc
5668 5668 if revs:
5669 5669 revs = [repo.lookup(rev) for rev in revs]
5670 5670 repo.ui.pushbuffer()
5671 5671 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
5672 5672 commoninc=common)
5673 5673 repo.ui.popbuffer()
5674 5674 return dest, dbranch, dother, outgoing
5675 5675
5676 5676 if needsoutgoing:
5677 5677 dest, dbranch, dother, outgoing = getoutgoing()
5678 5678 else:
5679 5679 dest = dbranch = dother = outgoing = None
5680 5680
5681 5681 if opts.get('remote'):
5682 5682 t = []
5683 5683 if incoming:
5684 5684 t.append(_('1 or more incoming'))
5685 5685 o = outgoing.missing
5686 5686 if o:
5687 5687 t.append(_('%d outgoing') % len(o))
5688 5688 other = dother or sother
5689 5689 if 'bookmarks' in other.listkeys('namespaces'):
5690 5690 lmarks = repo.listkeys('bookmarks')
5691 5691 rmarks = other.listkeys('bookmarks')
5692 5692 diff = set(rmarks) - set(lmarks)
5693 5693 if len(diff) > 0:
5694 5694 t.append(_('%d incoming bookmarks') % len(diff))
5695 5695 diff = set(lmarks) - set(rmarks)
5696 5696 if len(diff) > 0:
5697 5697 t.append(_('%d outgoing bookmarks') % len(diff))
5698 5698
5699 5699 if t:
5700 5700 # i18n: column positioning for "hg summary"
5701 5701 ui.write(_('remote: %s\n') % (', '.join(t)))
5702 5702 else:
5703 5703 # i18n: column positioning for "hg summary"
5704 5704 ui.status(_('remote: (synced)\n'))
5705 5705
5706 5706 cmdutil.summaryremotehooks(ui, repo, opts,
5707 5707 ((source, sbranch, sother, commoninc),
5708 5708 (dest, dbranch, dother, outgoing)))
5709 5709
5710 5710 @command('tag',
5711 5711 [('f', 'force', None, _('force tag')),
5712 5712 ('l', 'local', None, _('make the tag local')),
5713 5713 ('r', 'rev', '', _('revision to tag'), _('REV')),
5714 5714 ('', 'remove', None, _('remove a tag')),
5715 5715 # -l/--local is already there, commitopts cannot be used
5716 5716 ('e', 'edit', None, _('invoke editor on commit messages')),
5717 5717 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
5718 5718 ] + commitopts2,
5719 5719 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
5720 5720 def tag(ui, repo, name1, *names, **opts):
5721 5721 """add one or more tags for the current or given revision
5722 5722
5723 5723 Name a particular revision using <name>.
5724 5724
5725 5725 Tags are used to name particular revisions of the repository and are
5726 5726 very useful to compare different revisions, to go back to significant
5727 5727 earlier versions or to mark branch points as releases, etc. Changing
5728 5728 an existing tag is normally disallowed; use -f/--force to override.
5729 5729
5730 5730 If no revision is given, the parent of the working directory is
5731 5731 used.
5732 5732
5733 5733 To facilitate version control, distribution, and merging of tags,
5734 5734 they are stored as a file named ".hgtags" which is managed similarly
5735 5735 to other project files and can be hand-edited if necessary. This
5736 5736 also means that tagging creates a new commit. The file
5737 5737 ".hg/localtags" is used for local tags (not shared among
5738 5738 repositories).
5739 5739
5740 5740 Tag commits are usually made at the head of a branch. If the parent
5741 5741 of the working directory is not a branch head, :hg:`tag` aborts; use
5742 5742 -f/--force to force the tag commit to be based on a non-head
5743 5743 changeset.
5744 5744
5745 5745 See :hg:`help dates` for a list of formats valid for -d/--date.
5746 5746
5747 5747 Since tag names have priority over branch names during revision
5748 5748 lookup, using an existing branch name as a tag name is discouraged.
5749 5749
5750 5750 Returns 0 on success.
5751 5751 """
5752 5752 wlock = lock = None
5753 5753 try:
5754 5754 wlock = repo.wlock()
5755 5755 lock = repo.lock()
5756 5756 rev_ = "."
5757 5757 names = [t.strip() for t in (name1,) + names]
5758 5758 if len(names) != len(set(names)):
5759 5759 raise util.Abort(_('tag names must be unique'))
5760 5760 for n in names:
5761 5761 scmutil.checknewlabel(repo, n, 'tag')
5762 5762 if not n:
5763 5763 raise util.Abort(_('tag names cannot consist entirely of '
5764 5764 'whitespace'))
5765 5765 if opts.get('rev') and opts.get('remove'):
5766 5766 raise util.Abort(_("--rev and --remove are incompatible"))
5767 5767 if opts.get('rev'):
5768 5768 rev_ = opts['rev']
5769 5769 message = opts.get('message')
5770 5770 if opts.get('remove'):
5771 5771 expectedtype = opts.get('local') and 'local' or 'global'
5772 5772 for n in names:
5773 5773 if not repo.tagtype(n):
5774 5774 raise util.Abort(_("tag '%s' does not exist") % n)
5775 5775 if repo.tagtype(n) != expectedtype:
5776 5776 if expectedtype == 'global':
5777 5777 raise util.Abort(_("tag '%s' is not a global tag") % n)
5778 5778 else:
5779 5779 raise util.Abort(_("tag '%s' is not a local tag") % n)
5780 5780 rev_ = nullid
5781 5781 if not message:
5782 5782 # we don't translate commit messages
5783 5783 message = 'Removed tag %s' % ', '.join(names)
5784 5784 elif not opts.get('force'):
5785 5785 for n in names:
5786 5786 if n in repo.tags():
5787 5787 raise util.Abort(_("tag '%s' already exists "
5788 5788 "(use -f to force)") % n)
5789 5789 if not opts.get('local'):
5790 5790 p1, p2 = repo.dirstate.parents()
5791 5791 if p2 != nullid:
5792 5792 raise util.Abort(_('uncommitted merge'))
5793 5793 bheads = repo.branchheads()
5794 5794 if not opts.get('force') and bheads and p1 not in bheads:
5795 5795 raise util.Abort(_('not at a branch head (use -f to force)'))
5796 5796 r = scmutil.revsingle(repo, rev_).node()
5797 5797
5798 5798 if not message:
5799 5799 # we don't translate commit messages
5800 5800 message = ('Added tag %s for changeset %s' %
5801 5801 (', '.join(names), short(r)))
5802 5802
5803 5803 date = opts.get('date')
5804 5804 if date:
5805 5805 date = util.parsedate(date)
5806 5806
5807 5807 if opts.get('remove'):
5808 5808 editform = 'tag.remove'
5809 5809 else:
5810 5810 editform = 'tag.add'
5811 5811 editor = cmdutil.getcommiteditor(editform=editform, **opts)
5812 5812
5813 5813 # don't allow tagging the null rev
5814 5814 if (not opts.get('remove') and
5815 5815 scmutil.revsingle(repo, rev_).rev() == nullrev):
5816 5816 raise util.Abort(_("cannot tag null revision"))
5817 5817
5818 5818 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
5819 5819 editor=editor)
5820 5820 finally:
5821 5821 release(lock, wlock)
5822 5822
5823 5823 @command('tags', [], '')
5824 5824 def tags(ui, repo, **opts):
5825 5825 """list repository tags
5826 5826
5827 5827 This lists both regular and local tags. When the -v/--verbose
5828 5828 switch is used, a third column "local" is printed for local tags.
5829 5829
5830 5830 Returns 0 on success.
5831 5831 """
5832 5832
5833 5833 fm = ui.formatter('tags', opts)
5834 5834 hexfunc = ui.debugflag and hex or short
5835 5835 tagtype = ""
5836 5836
5837 5837 for t, n in reversed(repo.tagslist()):
5838 5838 hn = hexfunc(n)
5839 5839 label = 'tags.normal'
5840 5840 tagtype = ''
5841 5841 if repo.tagtype(t) == 'local':
5842 5842 label = 'tags.local'
5843 5843 tagtype = 'local'
5844 5844
5845 5845 fm.startitem()
5846 5846 fm.write('tag', '%s', t, label=label)
5847 5847 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
5848 5848 fm.condwrite(not ui.quiet, 'rev id', fmt,
5849 5849 repo.changelog.rev(n), hn, label=label)
5850 5850 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
5851 5851 tagtype, label=label)
5852 5852 fm.plain('\n')
5853 5853 fm.end()
5854 5854
5855 5855 @command('tip',
5856 5856 [('p', 'patch', None, _('show patch')),
5857 5857 ('g', 'git', None, _('use git extended diff format')),
5858 5858 ] + templateopts,
5859 5859 _('[-p] [-g]'))
5860 5860 def tip(ui, repo, **opts):
5861 5861 """show the tip revision (DEPRECATED)
5862 5862
5863 5863 The tip revision (usually just called the tip) is the changeset
5864 5864 most recently added to the repository (and therefore the most
5865 5865 recently changed head).
5866 5866
5867 5867 If you have just made a commit, that commit will be the tip. If
5868 5868 you have just pulled changes from another repository, the tip of
5869 5869 that repository becomes the current tip. The "tip" tag is special
5870 5870 and cannot be renamed or assigned to a different changeset.
5871 5871
5872 5872 This command is deprecated, please use :hg:`heads` instead.
5873 5873
5874 5874 Returns 0 on success.
5875 5875 """
5876 5876 displayer = cmdutil.show_changeset(ui, repo, opts)
5877 5877 displayer.show(repo['tip'])
5878 5878 displayer.close()
5879 5879
5880 5880 @command('unbundle',
5881 5881 [('u', 'update', None,
5882 5882 _('update to new branch head if changesets were unbundled'))],
5883 5883 _('[-u] FILE...'))
5884 5884 def unbundle(ui, repo, fname1, *fnames, **opts):
5885 5885 """apply one or more changegroup files
5886 5886
5887 5887 Apply one or more compressed changegroup files generated by the
5888 5888 bundle command.
5889 5889
5890 5890 Returns 0 on success, 1 if an update has unresolved files.
5891 5891 """
5892 5892 fnames = (fname1,) + fnames
5893 5893
5894 5894 lock = repo.lock()
5895 5895 try:
5896 5896 for fname in fnames:
5897 5897 f = hg.openpath(ui, fname)
5898 5898 gen = exchange.readbundle(ui, f, fname)
5899 5899 modheads = changegroup.addchangegroup(repo, gen, 'unbundle',
5900 5900 'bundle:' + fname)
5901 5901 finally:
5902 5902 lock.release()
5903 5903
5904 5904 return postincoming(ui, repo, modheads, opts.get('update'), None)
5905 5905
5906 5906 @command('^update|up|checkout|co',
5907 5907 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
5908 5908 ('c', 'check', None,
5909 5909 _('update across branches if no uncommitted changes')),
5910 5910 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5911 5911 ('r', 'rev', '', _('revision'), _('REV'))
5912 5912 ] + mergetoolopts,
5913 5913 _('[-c] [-C] [-d DATE] [[-r] REV]'))
5914 5914 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
5915 5915 tool=None):
5916 5916 """update working directory (or switch revisions)
5917 5917
5918 5918 Update the repository's working directory to the specified
5919 5919 changeset. If no changeset is specified, update to the tip of the
5920 5920 current named branch and move the current bookmark (see :hg:`help
5921 5921 bookmarks`).
5922 5922
5923 5923 Update sets the working directory's parent revision to the specified
5924 5924 changeset (see :hg:`help parents`).
5925 5925
5926 5926 If the changeset is not a descendant or ancestor of the working
5927 5927 directory's parent, the update is aborted. With the -c/--check
5928 5928 option, the working directory is checked for uncommitted changes; if
5929 5929 none are found, the working directory is updated to the specified
5930 5930 changeset.
5931 5931
5932 5932 .. container:: verbose
5933 5933
5934 5934 The following rules apply when the working directory contains
5935 5935 uncommitted changes:
5936 5936
5937 5937 1. If neither -c/--check nor -C/--clean is specified, and if
5938 5938 the requested changeset is an ancestor or descendant of
5939 5939 the working directory's parent, the uncommitted changes
5940 5940 are merged into the requested changeset and the merged
5941 5941 result is left uncommitted. If the requested changeset is
5942 5942 not an ancestor or descendant (that is, it is on another
5943 5943 branch), the update is aborted and the uncommitted changes
5944 5944 are preserved.
5945 5945
5946 5946 2. With the -c/--check option, the update is aborted and the
5947 5947 uncommitted changes are preserved.
5948 5948
5949 5949 3. With the -C/--clean option, uncommitted changes are discarded and
5950 5950 the working directory is updated to the requested changeset.
5951 5951
5952 5952 To cancel an uncommitted merge (and lose your changes), use
5953 5953 :hg:`update --clean .`.
5954 5954
5955 5955 Use null as the changeset to remove the working directory (like
5956 5956 :hg:`clone -U`).
5957 5957
5958 5958 If you want to revert just one file to an older revision, use
5959 5959 :hg:`revert [-r REV] NAME`.
5960 5960
5961 5961 See :hg:`help dates` for a list of formats valid for -d/--date.
5962 5962
5963 5963 Returns 0 on success, 1 if there are unresolved files.
5964 5964 """
5965 5965 if rev and node:
5966 5966 raise util.Abort(_("please specify just one revision"))
5967 5967
5968 5968 if rev is None or rev == '':
5969 5969 rev = node
5970 5970
5971 5971 cmdutil.clearunfinished(repo)
5972 5972
5973 5973 # with no argument, we also move the current bookmark, if any
5974 5974 rev, movemarkfrom = bookmarks.calculateupdate(ui, repo, rev)
5975 5975
5976 5976 # if we defined a bookmark, we have to remember the original bookmark name
5977 5977 brev = rev
5978 5978 rev = scmutil.revsingle(repo, rev, rev).rev()
5979 5979
5980 5980 if check and clean:
5981 5981 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
5982 5982
5983 5983 if date:
5984 5984 if rev is not None:
5985 5985 raise util.Abort(_("you can't specify a revision and a date"))
5986 5986 rev = cmdutil.finddate(ui, repo, date)
5987 5987
5988 5988 if check:
5989 5989 c = repo[None]
5990 5990 if c.dirty(merge=False, branch=False, missing=True):
5991 5991 raise util.Abort(_("uncommitted changes"))
5992 5992 if rev is None:
5993 5993 rev = repo[repo[None].branch()].rev()
5994 5994 mergemod._checkunknown(repo, repo[None], repo[rev])
5995 5995
5996 5996 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
5997 5997
5998 5998 if clean:
5999 5999 ret = hg.clean(repo, rev)
6000 6000 else:
6001 6001 ret = hg.update(repo, rev)
6002 6002
6003 6003 if not ret and movemarkfrom:
6004 6004 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
6005 6005 ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
6006 6006 elif brev in repo._bookmarks:
6007 6007 bookmarks.setcurrent(repo, brev)
6008 6008 ui.status(_("(activating bookmark %s)\n") % brev)
6009 6009 elif brev:
6010 6010 if repo._bookmarkcurrent:
6011 6011 ui.status(_("(leaving bookmark %s)\n") %
6012 6012 repo._bookmarkcurrent)
6013 6013 bookmarks.unsetcurrent(repo)
6014 6014
6015 6015 return ret
6016 6016
6017 6017 @command('verify', [])
6018 6018 def verify(ui, repo):
6019 6019 """verify the integrity of the repository
6020 6020
6021 6021 Verify the integrity of the current repository.
6022 6022
6023 6023 This will perform an extensive check of the repository's
6024 6024 integrity, validating the hashes and checksums of each entry in
6025 6025 the changelog, manifest, and tracked files, as well as the
6026 6026 integrity of their crosslinks and indices.
6027 6027
6028 6028 Please see http://mercurial.selenic.com/wiki/RepositoryCorruption
6029 6029 for more information about recovery from corruption of the
6030 6030 repository.
6031 6031
6032 6032 Returns 0 on success, 1 if errors are encountered.
6033 6033 """
6034 6034 return hg.verify(repo)
6035 6035
6036 6036 @command('version', [], norepo=True)
6037 6037 def version_(ui):
6038 6038 """output version and copyright information"""
6039 6039 ui.write(_("Mercurial Distributed SCM (version %s)\n")
6040 6040 % util.version())
6041 6041 ui.status(_(
6042 6042 "(see http://mercurial.selenic.com for more information)\n"
6043 6043 "\nCopyright (C) 2005-2014 Matt Mackall and others\n"
6044 6044 "This is free software; see the source for copying conditions. "
6045 6045 "There is NO\nwarranty; "
6046 6046 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6047 6047 ))
6048 6048
6049 6049 ui.note(_("\nEnabled extensions:\n\n"))
6050 6050 if ui.verbose:
6051 6051 # format names and versions into columns
6052 6052 names = []
6053 6053 vers = []
6054 6054 for name, module in extensions.extensions():
6055 6055 names.append(name)
6056 6056 vers.append(extensions.moduleversion(module))
6057 6057 if names:
6058 6058 maxnamelen = max(len(n) for n in names)
6059 6059 for i, name in enumerate(names):
6060 6060 ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i]))
@@ -1,1692 +1,1692
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, nullrev, short, hex, bin
9 9 from i18n import _
10 10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 11 import match as matchmod
12 12 import os, errno, stat
13 13 import obsolete as obsmod
14 14 import repoview
15 15 import fileset
16 16 import revlog
17 17
18 18 propertycache = util.propertycache
19 19
20 20 class basectx(object):
21 21 """A basectx object represents the common logic for its children:
22 22 changectx: read-only context that is already present in the repo,
23 23 workingctx: a context that represents the working directory and can
24 24 be committed,
25 25 memctx: a context that represents changes in-memory and can also
26 26 be committed."""
27 27 def __new__(cls, repo, changeid='', *args, **kwargs):
28 28 if isinstance(changeid, basectx):
29 29 return changeid
30 30
31 31 o = super(basectx, cls).__new__(cls)
32 32
33 33 o._repo = repo
34 34 o._rev = nullrev
35 35 o._node = nullid
36 36
37 37 return o
38 38
39 39 def __str__(self):
40 40 return short(self.node())
41 41
42 42 def __int__(self):
43 43 return self.rev()
44 44
45 45 def __repr__(self):
46 46 return "<%s %s>" % (type(self).__name__, str(self))
47 47
48 48 def __eq__(self, other):
49 49 try:
50 50 return type(self) == type(other) and self._rev == other._rev
51 51 except AttributeError:
52 52 return False
53 53
54 54 def __ne__(self, other):
55 55 return not (self == other)
56 56
57 57 def __contains__(self, key):
58 58 return key in self._manifest
59 59
60 60 def __getitem__(self, key):
61 61 return self.filectx(key)
62 62
63 63 def __iter__(self):
64 64 for f in sorted(self._manifest):
65 65 yield f
66 66
67 67 def _manifestmatches(self, match, s):
68 68 """generate a new manifest filtered by the match argument
69 69
70 70 This method is for internal use only and mainly exists to provide an
71 71 object oriented way for other contexts to customize the manifest
72 72 generation.
73 73 """
74 74 if match.always():
75 75 return self.manifest().copy()
76 76
77 77 files = match.files()
78 78 if (match.matchfn == match.exact or
79 79 (not match.anypats() and util.all(fn in self for fn in files))):
80 80 return self.manifest().intersectfiles(files)
81 81
82 82 mf = self.manifest().copy()
83 83 for fn in mf.keys():
84 84 if not match(fn):
85 85 del mf[fn]
86 86 return mf
87 87
88 88 def _matchstatus(self, other, s, match, listignored, listclean,
89 89 listunknown):
90 90 """return match.always if match is none
91 91
92 92 This internal method provides a way for child objects to override the
93 93 match operator.
94 94 """
95 95 return match or matchmod.always(self._repo.root, self._repo.getcwd())
96 96
97 97 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
98 98 """provide a hook to allow child objects to preprocess status results
99 99
100 100 For example, this allows other contexts, such as workingctx, to query
101 101 the dirstate before comparing the manifests.
102 102 """
103 103 # load earliest manifest first for caching reasons
104 104 if self.rev() < other.rev():
105 105 self.manifest()
106 106 return s
107 107
108 108 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
109 109 """provide a hook to allow child objects to postprocess status results
110 110
111 111 For example, this allows other contexts, such as workingctx, to filter
112 112 suspect symlinks in the case of FAT32 and NTFS filesytems.
113 113 """
114 114 return s
115 115
116 116 def _buildstatus(self, other, s, match, listignored, listclean,
117 117 listunknown):
118 118 """build a status with respect to another context"""
119 119 mf1 = other._manifestmatches(match, s)
120 120 mf2 = self._manifestmatches(match, s)
121 121
122 122 modified, added, clean = [], [], []
123 123 deleted, unknown, ignored = s[3], s[4], s[5]
124 124 withflags = mf1.withflags() | mf2.withflags()
125 125 for fn, mf2node in mf2.iteritems():
126 126 if fn in mf1:
127 127 if (fn not in deleted and
128 128 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
129 129 (mf1[fn] != mf2node and
130 130 (mf2node or self[fn].cmp(other[fn]))))):
131 131 modified.append(fn)
132 132 elif listclean:
133 133 clean.append(fn)
134 134 del mf1[fn]
135 135 elif fn not in deleted:
136 136 added.append(fn)
137 137 removed = mf1.keys()
138 138 if removed:
139 139 # need to filter files if they are already reported as removed
140 140 unknown = [fn for fn in unknown if fn not in mf1]
141 141 ignored = [fn for fn in ignored if fn not in mf1]
142 142
143 143 return [modified, added, removed, deleted, unknown, ignored, clean]
144 144
145 145 @propertycache
146 146 def substate(self):
147 147 return subrepo.state(self, self._repo.ui)
148 148
149 149 def subrev(self, subpath):
150 150 return self.substate[subpath][1]
151 151
152 152 def rev(self):
153 153 return self._rev
154 154 def node(self):
155 155 return self._node
156 156 def hex(self):
157 157 return hex(self.node())
158 158 def manifest(self):
159 159 return self._manifest
160 160 def phasestr(self):
161 161 return phases.phasenames[self.phase()]
162 162 def mutable(self):
163 163 return self.phase() > phases.public
164 164
165 165 def getfileset(self, expr):
166 166 return fileset.getfileset(self, expr)
167 167
168 168 def obsolete(self):
169 169 """True if the changeset is obsolete"""
170 170 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
171 171
172 172 def extinct(self):
173 173 """True if the changeset is extinct"""
174 174 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
175 175
176 176 def unstable(self):
177 177 """True if the changeset is not obsolete but it's ancestor are"""
178 178 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
179 179
180 180 def bumped(self):
181 181 """True if the changeset try to be a successor of a public changeset
182 182
183 183 Only non-public and non-obsolete changesets may be bumped.
184 184 """
185 185 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
186 186
187 187 def divergent(self):
188 188 """Is a successors of a changeset with multiple possible successors set
189 189
190 190 Only non-public and non-obsolete changesets may be divergent.
191 191 """
192 192 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
193 193
194 194 def troubled(self):
195 195 """True if the changeset is either unstable, bumped or divergent"""
196 196 return self.unstable() or self.bumped() or self.divergent()
197 197
198 198 def troubles(self):
199 199 """return the list of troubles affecting this changesets.
200 200
201 201 Troubles are returned as strings. possible values are:
202 202 - unstable,
203 203 - bumped,
204 204 - divergent.
205 205 """
206 206 troubles = []
207 207 if self.unstable():
208 208 troubles.append('unstable')
209 209 if self.bumped():
210 210 troubles.append('bumped')
211 211 if self.divergent():
212 212 troubles.append('divergent')
213 213 return troubles
214 214
215 215 def parents(self):
216 216 """return contexts for each parent changeset"""
217 217 return self._parents
218 218
219 219 def p1(self):
220 220 return self._parents[0]
221 221
222 222 def p2(self):
223 223 if len(self._parents) == 2:
224 224 return self._parents[1]
225 225 return changectx(self._repo, -1)
226 226
227 227 def _fileinfo(self, path):
228 228 if '_manifest' in self.__dict__:
229 229 try:
230 230 return self._manifest[path], self._manifest.flags(path)
231 231 except KeyError:
232 232 raise error.ManifestLookupError(self._node, path,
233 233 _('not found in manifest'))
234 234 if '_manifestdelta' in self.__dict__ or path in self.files():
235 235 if path in self._manifestdelta:
236 236 return (self._manifestdelta[path],
237 237 self._manifestdelta.flags(path))
238 238 node, flag = self._repo.manifest.find(self._changeset[0], path)
239 239 if not node:
240 240 raise error.ManifestLookupError(self._node, path,
241 241 _('not found in manifest'))
242 242
243 243 return node, flag
244 244
245 245 def filenode(self, path):
246 246 return self._fileinfo(path)[0]
247 247
248 248 def flags(self, path):
249 249 try:
250 250 return self._fileinfo(path)[1]
251 251 except error.LookupError:
252 252 return ''
253 253
254 254 def sub(self, path):
255 255 return subrepo.subrepo(self, path)
256 256
257 257 def match(self, pats=[], include=None, exclude=None, default='glob'):
258 258 r = self._repo
259 259 return matchmod.match(r.root, r.getcwd(), pats,
260 260 include, exclude, default,
261 261 auditor=r.auditor, ctx=self)
262 262
263 263 def diff(self, ctx2=None, match=None, **opts):
264 264 """Returns a diff generator for the given contexts and matcher"""
265 265 if ctx2 is None:
266 266 ctx2 = self.p1()
267 267 if ctx2 is not None:
268 268 ctx2 = self._repo[ctx2]
269 269 diffopts = patch.diffopts(self._repo.ui, opts)
270 270 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
271 271
272 272 @propertycache
273 273 def _dirs(self):
274 274 return scmutil.dirs(self._manifest)
275 275
276 276 def dirs(self):
277 277 return self._dirs
278 278
279 279 def dirty(self, missing=False, merge=True, branch=True):
280 280 return False
281 281
282 282 def status(self, other=None, match=None, listignored=False,
283 283 listclean=False, listunknown=False, listsubrepos=False):
284 284 """return status of files between two nodes or node and working
285 285 directory.
286 286
287 287 If other is None, compare this node with working directory.
288 288
289 289 returns (modified, added, removed, deleted, unknown, ignored, clean)
290 290 """
291 291
292 292 ctx1 = self
293 293 ctx2 = self._repo[other]
294 294
295 295 # This next code block is, admittedly, fragile logic that tests for
296 296 # reversing the contexts and wouldn't need to exist if it weren't for
297 297 # the fast (and common) code path of comparing the working directory
298 298 # with its first parent.
299 299 #
300 300 # What we're aiming for here is the ability to call:
301 301 #
302 302 # workingctx.status(parentctx)
303 303 #
304 304 # If we always built the manifest for each context and compared those,
305 305 # then we'd be done. But the special case of the above call means we
306 306 # just copy the manifest of the parent.
307 307 reversed = False
308 308 if (not isinstance(ctx1, changectx)
309 309 and isinstance(ctx2, changectx)):
310 310 reversed = True
311 311 ctx1, ctx2 = ctx2, ctx1
312 312
313 313 r = [[], [], [], [], [], [], []]
314 314 match = ctx2._matchstatus(ctx1, r, match, listignored, listclean,
315 315 listunknown)
316 316 r = ctx2._prestatus(ctx1, r, match, listignored, listclean, listunknown)
317 317 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
318 318 listunknown)
319 319 r = ctx2._poststatus(ctx1, r, match, listignored, listclean,
320 320 listunknown)
321 321
322 322 if reversed:
323 323 # reverse added and removed
324 324 r[1], r[2] = r[2], r[1]
325 325
326 326 if listsubrepos:
327 327 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
328 328 rev2 = ctx2.subrev(subpath)
329 329 try:
330 330 submatch = matchmod.narrowmatcher(subpath, match)
331 331 s = sub.status(rev2, match=submatch, ignored=listignored,
332 332 clean=listclean, unknown=listunknown,
333 333 listsubrepos=True)
334 334 for rfiles, sfiles in zip(r, s):
335 335 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
336 336 except error.LookupError:
337 337 self._repo.ui.status(_("skipping missing "
338 338 "subrepository: %s\n") % subpath)
339 339
340 340 for l in r:
341 341 l.sort()
342 342
343 343 # we return a tuple to signify that this list isn't changing
344 344 return tuple(r)
345 345
346 346
347 347 def makememctx(repo, parents, text, user, date, branch, files, store,
348 348 editor=None):
349 349 def getfilectx(repo, memctx, path):
350 350 data, (islink, isexec), copied = store.getfile(path)
351 351 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
352 352 copied=copied, memctx=memctx)
353 353 extra = {}
354 354 if branch:
355 355 extra['branch'] = encoding.fromlocal(branch)
356 356 ctx = memctx(repo, parents, text, files, getfilectx, user,
357 357 date, extra, editor)
358 358 return ctx
359 359
360 360 class changectx(basectx):
361 361 """A changecontext object makes access to data related to a particular
362 362 changeset convenient. It represents a read-only context already present in
363 363 the repo."""
364 364 def __init__(self, repo, changeid=''):
365 365 """changeid is a revision number, node, or tag"""
366 366
367 367 # since basectx.__new__ already took care of copying the object, we
368 368 # don't need to do anything in __init__, so we just exit here
369 369 if isinstance(changeid, basectx):
370 370 return
371 371
372 372 if changeid == '':
373 373 changeid = '.'
374 374 self._repo = repo
375 375
376 376 if isinstance(changeid, int):
377 377 try:
378 378 self._node = repo.changelog.node(changeid)
379 379 except IndexError:
380 380 raise error.RepoLookupError(
381 381 _("unknown revision '%s'") % changeid)
382 382 self._rev = changeid
383 383 return
384 384 if isinstance(changeid, long):
385 385 changeid = str(changeid)
386 386 if changeid == '.':
387 387 self._node = repo.dirstate.p1()
388 388 self._rev = repo.changelog.rev(self._node)
389 389 return
390 390 if changeid == 'null':
391 391 self._node = nullid
392 392 self._rev = nullrev
393 393 return
394 394 if changeid == 'tip':
395 395 self._node = repo.changelog.tip()
396 396 self._rev = repo.changelog.rev(self._node)
397 397 return
398 398 if len(changeid) == 20:
399 399 try:
400 400 self._node = changeid
401 401 self._rev = repo.changelog.rev(changeid)
402 402 return
403 403 except LookupError:
404 404 pass
405 405
406 406 try:
407 407 r = int(changeid)
408 408 if str(r) != changeid:
409 409 raise ValueError
410 410 l = len(repo.changelog)
411 411 if r < 0:
412 412 r += l
413 413 if r < 0 or r >= l:
414 414 raise ValueError
415 415 self._rev = r
416 416 self._node = repo.changelog.node(r)
417 417 return
418 418 except (ValueError, OverflowError, IndexError):
419 419 pass
420 420
421 421 if len(changeid) == 40:
422 422 try:
423 423 self._node = bin(changeid)
424 424 self._rev = repo.changelog.rev(self._node)
425 425 return
426 426 except (TypeError, LookupError):
427 427 pass
428 428
429 429 if changeid in repo._bookmarks:
430 430 self._node = repo._bookmarks[changeid]
431 431 self._rev = repo.changelog.rev(self._node)
432 432 return
433 433 if changeid in repo._tagscache.tags:
434 434 self._node = repo._tagscache.tags[changeid]
435 435 self._rev = repo.changelog.rev(self._node)
436 436 return
437 437 try:
438 438 self._node = repo.branchtip(changeid)
439 439 self._rev = repo.changelog.rev(self._node)
440 440 return
441 441 except error.RepoLookupError:
442 442 pass
443 443
444 444 self._node = repo.changelog._partialmatch(changeid)
445 445 if self._node is not None:
446 446 self._rev = repo.changelog.rev(self._node)
447 447 return
448 448
449 449 # lookup failed
450 450 # check if it might have come from damaged dirstate
451 451 #
452 452 # XXX we could avoid the unfiltered if we had a recognizable exception
453 453 # for filtered changeset access
454 454 if changeid in repo.unfiltered().dirstate.parents():
455 455 raise error.Abort(_("working directory has unknown parent '%s'!")
456 456 % short(changeid))
457 457 try:
458 458 if len(changeid) == 20:
459 459 changeid = hex(changeid)
460 460 except TypeError:
461 461 pass
462 462 raise error.RepoLookupError(
463 463 _("unknown revision '%s'") % changeid)
464 464
465 465 def __hash__(self):
466 466 try:
467 467 return hash(self._rev)
468 468 except AttributeError:
469 469 return id(self)
470 470
471 471 def __nonzero__(self):
472 472 return self._rev != nullrev
473 473
474 474 @propertycache
475 475 def _changeset(self):
476 476 return self._repo.changelog.read(self.rev())
477 477
478 478 @propertycache
479 479 def _manifest(self):
480 480 return self._repo.manifest.read(self._changeset[0])
481 481
482 482 @propertycache
483 483 def _manifestdelta(self):
484 484 return self._repo.manifest.readdelta(self._changeset[0])
485 485
486 486 @propertycache
487 487 def _parents(self):
488 488 p = self._repo.changelog.parentrevs(self._rev)
489 489 if p[1] == nullrev:
490 490 p = p[:-1]
491 491 return [changectx(self._repo, x) for x in p]
492 492
493 493 def changeset(self):
494 494 return self._changeset
495 495 def manifestnode(self):
496 496 return self._changeset[0]
497 497
498 498 def user(self):
499 499 return self._changeset[1]
500 500 def date(self):
501 501 return self._changeset[2]
502 502 def files(self):
503 503 return self._changeset[3]
504 504 def description(self):
505 505 return self._changeset[4]
506 506 def branch(self):
507 507 return encoding.tolocal(self._changeset[5].get("branch"))
508 508 def closesbranch(self):
509 509 return 'close' in self._changeset[5]
510 510 def extra(self):
511 511 return self._changeset[5]
512 512 def tags(self):
513 513 return self._repo.nodetags(self._node)
514 514 def bookmarks(self):
515 515 return self._repo.nodebookmarks(self._node)
516 516 def phase(self):
517 517 return self._repo._phasecache.phase(self._repo, self._rev)
518 518 def hidden(self):
519 519 return self._rev in repoview.filterrevs(self._repo, 'visible')
520 520
521 521 def children(self):
522 522 """return contexts for each child changeset"""
523 523 c = self._repo.changelog.children(self._node)
524 524 return [changectx(self._repo, x) for x in c]
525 525
526 526 def ancestors(self):
527 527 for a in self._repo.changelog.ancestors([self._rev]):
528 528 yield changectx(self._repo, a)
529 529
530 530 def descendants(self):
531 531 for d in self._repo.changelog.descendants([self._rev]):
532 532 yield changectx(self._repo, d)
533 533
534 534 def filectx(self, path, fileid=None, filelog=None):
535 535 """get a file context from this changeset"""
536 536 if fileid is None:
537 537 fileid = self.filenode(path)
538 538 return filectx(self._repo, path, fileid=fileid,
539 539 changectx=self, filelog=filelog)
540 540
541 541 def ancestor(self, c2, warn=False):
542 542 """
543 543 return the "best" ancestor context of self and c2
544 544 """
545 545 # deal with workingctxs
546 546 n2 = c2._node
547 547 if n2 is None:
548 548 n2 = c2._parents[0]._node
549 549 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
550 550 if not cahs:
551 551 anc = nullid
552 552 elif len(cahs) == 1:
553 553 anc = cahs[0]
554 554 else:
555 555 for r in self._repo.ui.configlist('merge', 'preferancestor'):
556 556 ctx = changectx(self._repo, r)
557 557 anc = ctx.node()
558 558 if anc in cahs:
559 559 break
560 560 else:
561 561 anc = self._repo.changelog.ancestor(self._node, n2)
562 562 if warn:
563 563 self._repo.ui.status(
564 564 (_("note: using %s as ancestor of %s and %s\n") %
565 565 (short(anc), short(self._node), short(n2))) +
566 566 ''.join(_(" alternatively, use --config "
567 567 "merge.preferancestor=%s\n") %
568 568 short(n) for n in sorted(cahs) if n != anc))
569 569 return changectx(self._repo, anc)
570 570
571 571 def descendant(self, other):
572 572 """True if other is descendant of this changeset"""
573 573 return self._repo.changelog.descendant(self._rev, other._rev)
574 574
575 575 def walk(self, match):
576 576 fset = set(match.files())
577 577 # for dirstate.walk, files=['.'] means "walk the whole tree".
578 578 # follow that here, too
579 579 fset.discard('.')
580 580
581 581 # avoid the entire walk if we're only looking for specific files
582 582 if fset and not match.anypats():
583 583 if util.all([fn in self for fn in fset]):
584 584 for fn in sorted(fset):
585 585 if match(fn):
586 586 yield fn
587 587 raise StopIteration
588 588
589 589 for fn in self:
590 590 if fn in fset:
591 591 # specified pattern is the exact name
592 592 fset.remove(fn)
593 593 if match(fn):
594 594 yield fn
595 595 for fn in sorted(fset):
596 596 if fn in self._dirs:
597 597 # specified pattern is a directory
598 598 continue
599 599 match.bad(fn, _('no such file in rev %s') % self)
600 600
601 601 def matches(self, match):
602 602 return self.walk(match)
603 603
604 604 class basefilectx(object):
605 605 """A filecontext object represents the common logic for its children:
606 606 filectx: read-only access to a filerevision that is already present
607 607 in the repo,
608 608 workingfilectx: a filecontext that represents files from the working
609 609 directory,
610 610 memfilectx: a filecontext that represents files in-memory."""
611 611 def __new__(cls, repo, path, *args, **kwargs):
612 612 return super(basefilectx, cls).__new__(cls)
613 613
614 614 @propertycache
615 615 def _filelog(self):
616 616 return self._repo.file(self._path)
617 617
618 618 @propertycache
619 619 def _changeid(self):
620 620 if '_changeid' in self.__dict__:
621 621 return self._changeid
622 622 elif '_changectx' in self.__dict__:
623 623 return self._changectx.rev()
624 624 else:
625 625 return self._filelog.linkrev(self._filerev)
626 626
627 627 @propertycache
628 628 def _filenode(self):
629 629 if '_fileid' in self.__dict__:
630 630 return self._filelog.lookup(self._fileid)
631 631 else:
632 632 return self._changectx.filenode(self._path)
633 633
634 634 @propertycache
635 635 def _filerev(self):
636 636 return self._filelog.rev(self._filenode)
637 637
638 638 @propertycache
639 639 def _repopath(self):
640 640 return self._path
641 641
642 642 def __nonzero__(self):
643 643 try:
644 644 self._filenode
645 645 return True
646 646 except error.LookupError:
647 647 # file is missing
648 648 return False
649 649
650 650 def __str__(self):
651 651 return "%s@%s" % (self.path(), self._changectx)
652 652
653 653 def __repr__(self):
654 654 return "<%s %s>" % (type(self).__name__, str(self))
655 655
656 656 def __hash__(self):
657 657 try:
658 658 return hash((self._path, self._filenode))
659 659 except AttributeError:
660 660 return id(self)
661 661
662 662 def __eq__(self, other):
663 663 try:
664 664 return (type(self) == type(other) and self._path == other._path
665 665 and self._filenode == other._filenode)
666 666 except AttributeError:
667 667 return False
668 668
669 669 def __ne__(self, other):
670 670 return not (self == other)
671 671
672 672 def filerev(self):
673 673 return self._filerev
674 674 def filenode(self):
675 675 return self._filenode
676 676 def flags(self):
677 677 return self._changectx.flags(self._path)
678 678 def filelog(self):
679 679 return self._filelog
680 680 def rev(self):
681 681 return self._changeid
682 682 def linkrev(self):
683 683 return self._filelog.linkrev(self._filerev)
684 684 def node(self):
685 685 return self._changectx.node()
686 686 def hex(self):
687 687 return self._changectx.hex()
688 688 def user(self):
689 689 return self._changectx.user()
690 690 def date(self):
691 691 return self._changectx.date()
692 692 def files(self):
693 693 return self._changectx.files()
694 694 def description(self):
695 695 return self._changectx.description()
696 696 def branch(self):
697 697 return self._changectx.branch()
698 698 def extra(self):
699 699 return self._changectx.extra()
700 700 def phase(self):
701 701 return self._changectx.phase()
702 702 def phasestr(self):
703 703 return self._changectx.phasestr()
704 704 def manifest(self):
705 705 return self._changectx.manifest()
706 706 def changectx(self):
707 707 return self._changectx
708 708
709 709 def path(self):
710 710 return self._path
711 711
712 712 def isbinary(self):
713 713 try:
714 714 return util.binary(self.data())
715 715 except IOError:
716 716 return False
717 717 def isexec(self):
718 718 return 'x' in self.flags()
719 719 def islink(self):
720 720 return 'l' in self.flags()
721 721
722 722 def cmp(self, fctx):
723 723 """compare with other file context
724 724
725 725 returns True if different than fctx.
726 726 """
727 727 if (fctx._filerev is None
728 728 and (self._repo._encodefilterpats
729 729 # if file data starts with '\1\n', empty metadata block is
730 730 # prepended, which adds 4 bytes to filelog.size().
731 731 or self.size() - 4 == fctx.size())
732 732 or self.size() == fctx.size()):
733 733 return self._filelog.cmp(self._filenode, fctx.data())
734 734
735 735 return True
736 736
737 737 def parents(self):
738 p = self._path
738 _path = self._path
739 739 fl = self._filelog
740 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
740 pl = [(_path, n, fl) for n in self._filelog.parents(self._filenode)]
741 741
742 742 r = self._filelog.renamed(self._filenode)
743 743 if r:
744 744 pl[0] = (r[0], r[1], None)
745 745
746 746 return [filectx(self._repo, p, fileid=n, filelog=l)
747 747 for p, n, l in pl if n != nullid]
748 748
749 749 def p1(self):
750 750 return self.parents()[0]
751 751
752 752 def p2(self):
753 753 p = self.parents()
754 754 if len(p) == 2:
755 755 return p[1]
756 756 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
757 757
758 758 def annotate(self, follow=False, linenumber=None, diffopts=None):
759 759 '''returns a list of tuples of (ctx, line) for each line
760 760 in the file, where ctx is the filectx of the node where
761 761 that line was last changed.
762 762 This returns tuples of ((ctx, linenumber), line) for each line,
763 763 if "linenumber" parameter is NOT "None".
764 764 In such tuples, linenumber means one at the first appearance
765 765 in the managed file.
766 766 To reduce annotation cost,
767 767 this returns fixed value(False is used) as linenumber,
768 768 if "linenumber" parameter is "False".'''
769 769
770 770 if linenumber is None:
771 771 def decorate(text, rev):
772 772 return ([rev] * len(text.splitlines()), text)
773 773 elif linenumber:
774 774 def decorate(text, rev):
775 775 size = len(text.splitlines())
776 776 return ([(rev, i) for i in xrange(1, size + 1)], text)
777 777 else:
778 778 def decorate(text, rev):
779 779 return ([(rev, False)] * len(text.splitlines()), text)
780 780
781 781 def pair(parent, child):
782 782 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
783 783 refine=True)
784 784 for (a1, a2, b1, b2), t in blocks:
785 785 # Changed blocks ('!') or blocks made only of blank lines ('~')
786 786 # belong to the child.
787 787 if t == '=':
788 788 child[0][b1:b2] = parent[0][a1:a2]
789 789 return child
790 790
791 791 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
792 792
793 793 def parents(f):
794 794 pl = f.parents()
795 795
796 796 # Don't return renamed parents if we aren't following.
797 797 if not follow:
798 798 pl = [p for p in pl if p.path() == f.path()]
799 799
800 800 # renamed filectx won't have a filelog yet, so set it
801 801 # from the cache to save time
802 802 for p in pl:
803 803 if not '_filelog' in p.__dict__:
804 804 p._filelog = getlog(p.path())
805 805
806 806 return pl
807 807
808 808 # use linkrev to find the first changeset where self appeared
809 809 if self.rev() != self.linkrev():
810 810 base = self.filectx(self.filenode())
811 811 else:
812 812 base = self
813 813
814 814 # This algorithm would prefer to be recursive, but Python is a
815 815 # bit recursion-hostile. Instead we do an iterative
816 816 # depth-first search.
817 817
818 818 visit = [base]
819 819 hist = {}
820 820 pcache = {}
821 821 needed = {base: 1}
822 822 while visit:
823 823 f = visit[-1]
824 824 pcached = f in pcache
825 825 if not pcached:
826 826 pcache[f] = parents(f)
827 827
828 828 ready = True
829 829 pl = pcache[f]
830 830 for p in pl:
831 831 if p not in hist:
832 832 ready = False
833 833 visit.append(p)
834 834 if not pcached:
835 835 needed[p] = needed.get(p, 0) + 1
836 836 if ready:
837 837 visit.pop()
838 838 reusable = f in hist
839 839 if reusable:
840 840 curr = hist[f]
841 841 else:
842 842 curr = decorate(f.data(), f)
843 843 for p in pl:
844 844 if not reusable:
845 845 curr = pair(hist[p], curr)
846 846 if needed[p] == 1:
847 847 del hist[p]
848 848 del needed[p]
849 849 else:
850 850 needed[p] -= 1
851 851
852 852 hist[f] = curr
853 853 pcache[f] = []
854 854
855 855 return zip(hist[base][0], hist[base][1].splitlines(True))
856 856
857 857 def ancestors(self, followfirst=False):
858 858 visit = {}
859 859 c = self
860 860 cut = followfirst and 1 or None
861 861 while True:
862 862 for parent in c.parents()[:cut]:
863 863 visit[(parent.rev(), parent.node())] = parent
864 864 if not visit:
865 865 break
866 866 c = visit.pop(max(visit))
867 867 yield c
868 868
869 869 class filectx(basefilectx):
870 870 """A filecontext object makes access to data related to a particular
871 871 filerevision convenient."""
872 872 def __init__(self, repo, path, changeid=None, fileid=None,
873 873 filelog=None, changectx=None):
874 874 """changeid can be a changeset revision, node, or tag.
875 875 fileid can be a file revision or node."""
876 876 self._repo = repo
877 877 self._path = path
878 878
879 879 assert (changeid is not None
880 880 or fileid is not None
881 881 or changectx is not None), \
882 882 ("bad args: changeid=%r, fileid=%r, changectx=%r"
883 883 % (changeid, fileid, changectx))
884 884
885 885 if filelog is not None:
886 886 self._filelog = filelog
887 887
888 888 if changeid is not None:
889 889 self._changeid = changeid
890 890 if changectx is not None:
891 891 self._changectx = changectx
892 892 if fileid is not None:
893 893 self._fileid = fileid
894 894
895 895 @propertycache
896 896 def _changectx(self):
897 897 try:
898 898 return changectx(self._repo, self._changeid)
899 899 except error.RepoLookupError:
900 900 # Linkrev may point to any revision in the repository. When the
901 901 # repository is filtered this may lead to `filectx` trying to build
902 902 # `changectx` for filtered revision. In such case we fallback to
903 903 # creating `changectx` on the unfiltered version of the reposition.
904 904 # This fallback should not be an issue because `changectx` from
905 905 # `filectx` are not used in complex operations that care about
906 906 # filtering.
907 907 #
908 908 # This fallback is a cheap and dirty fix that prevent several
909 909 # crashes. It does not ensure the behavior is correct. However the
910 910 # behavior was not correct before filtering either and "incorrect
911 911 # behavior" is seen as better as "crash"
912 912 #
913 913 # Linkrevs have several serious troubles with filtering that are
914 914 # complicated to solve. Proper handling of the issue here should be
915 915 # considered when solving linkrev issue are on the table.
916 916 return changectx(self._repo.unfiltered(), self._changeid)
917 917
918 918 def filectx(self, fileid):
919 919 '''opens an arbitrary revision of the file without
920 920 opening a new filelog'''
921 921 return filectx(self._repo, self._path, fileid=fileid,
922 922 filelog=self._filelog)
923 923
924 924 def data(self):
925 925 return self._filelog.read(self._filenode)
926 926 def size(self):
927 927 return self._filelog.size(self._filerev)
928 928
929 929 def renamed(self):
930 930 """check if file was actually renamed in this changeset revision
931 931
932 932 If rename logged in file revision, we report copy for changeset only
933 933 if file revisions linkrev points back to the changeset in question
934 934 or both changeset parents contain different file revisions.
935 935 """
936 936
937 937 renamed = self._filelog.renamed(self._filenode)
938 938 if not renamed:
939 939 return renamed
940 940
941 941 if self.rev() == self.linkrev():
942 942 return renamed
943 943
944 944 name = self.path()
945 945 fnode = self._filenode
946 946 for p in self._changectx.parents():
947 947 try:
948 948 if fnode == p.filenode(name):
949 949 return None
950 950 except error.LookupError:
951 951 pass
952 952 return renamed
953 953
954 954 def children(self):
955 955 # hard for renames
956 956 c = self._filelog.children(self._filenode)
957 957 return [filectx(self._repo, self._path, fileid=x,
958 958 filelog=self._filelog) for x in c]
959 959
960 960 class committablectx(basectx):
961 961 """A committablectx object provides common functionality for a context that
962 962 wants the ability to commit, e.g. workingctx or memctx."""
963 963 def __init__(self, repo, text="", user=None, date=None, extra=None,
964 964 changes=None):
965 965 self._repo = repo
966 966 self._rev = None
967 967 self._node = None
968 968 self._text = text
969 969 if date:
970 970 self._date = util.parsedate(date)
971 971 if user:
972 972 self._user = user
973 973 if changes:
974 974 self._status = changes
975 975
976 976 self._extra = {}
977 977 if extra:
978 978 self._extra = extra.copy()
979 979 if 'branch' not in self._extra:
980 980 try:
981 981 branch = encoding.fromlocal(self._repo.dirstate.branch())
982 982 except UnicodeDecodeError:
983 983 raise util.Abort(_('branch name not in UTF-8!'))
984 984 self._extra['branch'] = branch
985 985 if self._extra['branch'] == '':
986 986 self._extra['branch'] = 'default'
987 987
988 988 def __str__(self):
989 989 return str(self._parents[0]) + "+"
990 990
991 991 def __nonzero__(self):
992 992 return True
993 993
994 994 def _buildflagfunc(self):
995 995 # Create a fallback function for getting file flags when the
996 996 # filesystem doesn't support them
997 997
998 998 copiesget = self._repo.dirstate.copies().get
999 999
1000 1000 if len(self._parents) < 2:
1001 1001 # when we have one parent, it's easy: copy from parent
1002 1002 man = self._parents[0].manifest()
1003 1003 def func(f):
1004 1004 f = copiesget(f, f)
1005 1005 return man.flags(f)
1006 1006 else:
1007 1007 # merges are tricky: we try to reconstruct the unstored
1008 1008 # result from the merge (issue1802)
1009 1009 p1, p2 = self._parents
1010 1010 pa = p1.ancestor(p2)
1011 1011 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1012 1012
1013 1013 def func(f):
1014 1014 f = copiesget(f, f) # may be wrong for merges with copies
1015 1015 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1016 1016 if fl1 == fl2:
1017 1017 return fl1
1018 1018 if fl1 == fla:
1019 1019 return fl2
1020 1020 if fl2 == fla:
1021 1021 return fl1
1022 1022 return '' # punt for conflicts
1023 1023
1024 1024 return func
1025 1025
1026 1026 @propertycache
1027 1027 def _flagfunc(self):
1028 1028 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1029 1029
1030 1030 @propertycache
1031 1031 def _manifest(self):
1032 1032 """generate a manifest corresponding to the values in self._status"""
1033 1033
1034 1034 man = self._parents[0].manifest().copy()
1035 1035 if len(self._parents) > 1:
1036 1036 man2 = self.p2().manifest()
1037 1037 def getman(f):
1038 1038 if f in man:
1039 1039 return man
1040 1040 return man2
1041 1041 else:
1042 1042 getman = lambda f: man
1043 1043
1044 1044 copied = self._repo.dirstate.copies()
1045 1045 ff = self._flagfunc
1046 1046 modified, added, removed, deleted = self._status[:4]
1047 1047 for i, l in (("a", added), ("m", modified)):
1048 1048 for f in l:
1049 1049 orig = copied.get(f, f)
1050 1050 man[f] = getman(orig).get(orig, nullid) + i
1051 1051 try:
1052 1052 man.set(f, ff(f))
1053 1053 except OSError:
1054 1054 pass
1055 1055
1056 1056 for f in deleted + removed:
1057 1057 if f in man:
1058 1058 del man[f]
1059 1059
1060 1060 return man
1061 1061
1062 1062 @propertycache
1063 1063 def _status(self):
1064 1064 return self._repo.status()
1065 1065
1066 1066 @propertycache
1067 1067 def _user(self):
1068 1068 return self._repo.ui.username()
1069 1069
1070 1070 @propertycache
1071 1071 def _date(self):
1072 1072 return util.makedate()
1073 1073
1074 1074 def subrev(self, subpath):
1075 1075 return None
1076 1076
1077 1077 def user(self):
1078 1078 return self._user or self._repo.ui.username()
1079 1079 def date(self):
1080 1080 return self._date
1081 1081 def description(self):
1082 1082 return self._text
1083 1083 def files(self):
1084 1084 return sorted(self._status[0] + self._status[1] + self._status[2])
1085 1085
1086 1086 def modified(self):
1087 1087 return self._status[0]
1088 1088 def added(self):
1089 1089 return self._status[1]
1090 1090 def removed(self):
1091 1091 return self._status[2]
1092 1092 def deleted(self):
1093 1093 return self._status[3]
1094 1094 def unknown(self):
1095 1095 return self._status[4]
1096 1096 def ignored(self):
1097 1097 return self._status[5]
1098 1098 def clean(self):
1099 1099 return self._status[6]
1100 1100 def branch(self):
1101 1101 return encoding.tolocal(self._extra['branch'])
1102 1102 def closesbranch(self):
1103 1103 return 'close' in self._extra
1104 1104 def extra(self):
1105 1105 return self._extra
1106 1106
1107 1107 def tags(self):
1108 1108 t = []
1109 1109 for p in self.parents():
1110 1110 t.extend(p.tags())
1111 1111 return t
1112 1112
1113 1113 def bookmarks(self):
1114 1114 b = []
1115 1115 for p in self.parents():
1116 1116 b.extend(p.bookmarks())
1117 1117 return b
1118 1118
1119 1119 def phase(self):
1120 1120 phase = phases.draft # default phase to draft
1121 1121 for p in self.parents():
1122 1122 phase = max(phase, p.phase())
1123 1123 return phase
1124 1124
1125 1125 def hidden(self):
1126 1126 return False
1127 1127
1128 1128 def children(self):
1129 1129 return []
1130 1130
1131 1131 def flags(self, path):
1132 1132 if '_manifest' in self.__dict__:
1133 1133 try:
1134 1134 return self._manifest.flags(path)
1135 1135 except KeyError:
1136 1136 return ''
1137 1137
1138 1138 try:
1139 1139 return self._flagfunc(path)
1140 1140 except OSError:
1141 1141 return ''
1142 1142
1143 1143 def ancestor(self, c2):
1144 1144 """return the ancestor context of self and c2"""
1145 1145 return self._parents[0].ancestor(c2) # punt on two parents for now
1146 1146
1147 1147 def walk(self, match):
1148 1148 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1149 1149 True, False))
1150 1150
1151 1151 def matches(self, match):
1152 1152 return sorted(self._repo.dirstate.matches(match))
1153 1153
1154 1154 def ancestors(self):
1155 1155 for a in self._repo.changelog.ancestors(
1156 1156 [p.rev() for p in self._parents]):
1157 1157 yield changectx(self._repo, a)
1158 1158
1159 1159 def markcommitted(self, node):
1160 1160 """Perform post-commit cleanup necessary after committing this ctx
1161 1161
1162 1162 Specifically, this updates backing stores this working context
1163 1163 wraps to reflect the fact that the changes reflected by this
1164 1164 workingctx have been committed. For example, it marks
1165 1165 modified and added files as normal in the dirstate.
1166 1166
1167 1167 """
1168 1168
1169 1169 for f in self.modified() + self.added():
1170 1170 self._repo.dirstate.normal(f)
1171 1171 for f in self.removed():
1172 1172 self._repo.dirstate.drop(f)
1173 1173 self._repo.dirstate.setparents(node)
1174 1174
1175 1175 def dirs(self):
1176 1176 return self._repo.dirstate.dirs()
1177 1177
1178 1178 class workingctx(committablectx):
1179 1179 """A workingctx object makes access to data related to
1180 1180 the current working directory convenient.
1181 1181 date - any valid date string or (unixtime, offset), or None.
1182 1182 user - username string, or None.
1183 1183 extra - a dictionary of extra values, or None.
1184 1184 changes - a list of file lists as returned by localrepo.status()
1185 1185 or None to use the repository status.
1186 1186 """
1187 1187 def __init__(self, repo, text="", user=None, date=None, extra=None,
1188 1188 changes=None):
1189 1189 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1190 1190
1191 1191 def __iter__(self):
1192 1192 d = self._repo.dirstate
1193 1193 for f in d:
1194 1194 if d[f] != 'r':
1195 1195 yield f
1196 1196
1197 1197 def __contains__(self, key):
1198 1198 return self._repo.dirstate[key] not in "?r"
1199 1199
1200 1200 @propertycache
1201 1201 def _parents(self):
1202 1202 p = self._repo.dirstate.parents()
1203 1203 if p[1] == nullid:
1204 1204 p = p[:-1]
1205 1205 return [changectx(self._repo, x) for x in p]
1206 1206
1207 1207 def filectx(self, path, filelog=None):
1208 1208 """get a file context from the working directory"""
1209 1209 return workingfilectx(self._repo, path, workingctx=self,
1210 1210 filelog=filelog)
1211 1211
1212 1212 def dirty(self, missing=False, merge=True, branch=True):
1213 1213 "check whether a working directory is modified"
1214 1214 # check subrepos first
1215 1215 for s in sorted(self.substate):
1216 1216 if self.sub(s).dirty():
1217 1217 return True
1218 1218 # check current working dir
1219 1219 return ((merge and self.p2()) or
1220 1220 (branch and self.branch() != self.p1().branch()) or
1221 1221 self.modified() or self.added() or self.removed() or
1222 1222 (missing and self.deleted()))
1223 1223
1224 1224 def add(self, list, prefix=""):
1225 1225 join = lambda f: os.path.join(prefix, f)
1226 1226 wlock = self._repo.wlock()
1227 1227 ui, ds = self._repo.ui, self._repo.dirstate
1228 1228 try:
1229 1229 rejected = []
1230 1230 lstat = self._repo.wvfs.lstat
1231 1231 for f in list:
1232 1232 scmutil.checkportable(ui, join(f))
1233 1233 try:
1234 1234 st = lstat(f)
1235 1235 except OSError:
1236 1236 ui.warn(_("%s does not exist!\n") % join(f))
1237 1237 rejected.append(f)
1238 1238 continue
1239 1239 if st.st_size > 10000000:
1240 1240 ui.warn(_("%s: up to %d MB of RAM may be required "
1241 1241 "to manage this file\n"
1242 1242 "(use 'hg revert %s' to cancel the "
1243 1243 "pending addition)\n")
1244 1244 % (f, 3 * st.st_size // 1000000, join(f)))
1245 1245 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1246 1246 ui.warn(_("%s not added: only files and symlinks "
1247 1247 "supported currently\n") % join(f))
1248 1248 rejected.append(f)
1249 1249 elif ds[f] in 'amn':
1250 1250 ui.warn(_("%s already tracked!\n") % join(f))
1251 1251 elif ds[f] == 'r':
1252 1252 ds.normallookup(f)
1253 1253 else:
1254 1254 ds.add(f)
1255 1255 return rejected
1256 1256 finally:
1257 1257 wlock.release()
1258 1258
1259 1259 def forget(self, files, prefix=""):
1260 1260 join = lambda f: os.path.join(prefix, f)
1261 1261 wlock = self._repo.wlock()
1262 1262 try:
1263 1263 rejected = []
1264 1264 for f in files:
1265 1265 if f not in self._repo.dirstate:
1266 1266 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1267 1267 rejected.append(f)
1268 1268 elif self._repo.dirstate[f] != 'a':
1269 1269 self._repo.dirstate.remove(f)
1270 1270 else:
1271 1271 self._repo.dirstate.drop(f)
1272 1272 return rejected
1273 1273 finally:
1274 1274 wlock.release()
1275 1275
1276 1276 def undelete(self, list):
1277 1277 pctxs = self.parents()
1278 1278 wlock = self._repo.wlock()
1279 1279 try:
1280 1280 for f in list:
1281 1281 if self._repo.dirstate[f] != 'r':
1282 1282 self._repo.ui.warn(_("%s not removed!\n") % f)
1283 1283 else:
1284 1284 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1285 1285 t = fctx.data()
1286 1286 self._repo.wwrite(f, t, fctx.flags())
1287 1287 self._repo.dirstate.normal(f)
1288 1288 finally:
1289 1289 wlock.release()
1290 1290
1291 1291 def copy(self, source, dest):
1292 1292 try:
1293 1293 st = self._repo.wvfs.lstat(dest)
1294 1294 except OSError, err:
1295 1295 if err.errno != errno.ENOENT:
1296 1296 raise
1297 1297 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1298 1298 return
1299 1299 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1300 1300 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1301 1301 "symbolic link\n") % dest)
1302 1302 else:
1303 1303 wlock = self._repo.wlock()
1304 1304 try:
1305 1305 if self._repo.dirstate[dest] in '?r':
1306 1306 self._repo.dirstate.add(dest)
1307 1307 self._repo.dirstate.copy(source, dest)
1308 1308 finally:
1309 1309 wlock.release()
1310 1310
1311 1311 def _filtersuspectsymlink(self, files):
1312 1312 if not files or self._repo.dirstate._checklink:
1313 1313 return files
1314 1314
1315 1315 # Symlink placeholders may get non-symlink-like contents
1316 1316 # via user error or dereferencing by NFS or Samba servers,
1317 1317 # so we filter out any placeholders that don't look like a
1318 1318 # symlink
1319 1319 sane = []
1320 1320 for f in files:
1321 1321 if self.flags(f) == 'l':
1322 1322 d = self[f].data()
1323 1323 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1324 1324 self._repo.ui.debug('ignoring suspect symlink placeholder'
1325 1325 ' "%s"\n' % f)
1326 1326 continue
1327 1327 sane.append(f)
1328 1328 return sane
1329 1329
1330 1330 def _checklookup(self, files):
1331 1331 # check for any possibly clean files
1332 1332 if not files:
1333 1333 return [], []
1334 1334
1335 1335 modified = []
1336 1336 fixup = []
1337 1337 pctx = self._parents[0]
1338 1338 # do a full compare of any files that might have changed
1339 1339 for f in sorted(files):
1340 1340 if (f not in pctx or self.flags(f) != pctx.flags(f)
1341 1341 or pctx[f].cmp(self[f])):
1342 1342 modified.append(f)
1343 1343 else:
1344 1344 fixup.append(f)
1345 1345
1346 1346 # update dirstate for files that are actually clean
1347 1347 if fixup:
1348 1348 try:
1349 1349 # updating the dirstate is optional
1350 1350 # so we don't wait on the lock
1351 1351 # wlock can invalidate the dirstate, so cache normal _after_
1352 1352 # taking the lock
1353 1353 wlock = self._repo.wlock(False)
1354 1354 normal = self._repo.dirstate.normal
1355 1355 try:
1356 1356 for f in fixup:
1357 1357 normal(f)
1358 1358 finally:
1359 1359 wlock.release()
1360 1360 except error.LockError:
1361 1361 pass
1362 1362 return modified, fixup
1363 1363
1364 1364 def _manifestmatches(self, match, s):
1365 1365 """Slow path for workingctx
1366 1366
1367 1367 The fast path is when we compare the working directory to its parent
1368 1368 which means this function is comparing with a non-parent; therefore we
1369 1369 need to build a manifest and return what matches.
1370 1370 """
1371 1371 mf = self._repo['.']._manifestmatches(match, s)
1372 1372 modified, added, removed = s[0:3]
1373 1373 for f in modified + added:
1374 1374 mf[f] = None
1375 1375 mf.set(f, self.flags(f))
1376 1376 for f in removed:
1377 1377 if f in mf:
1378 1378 del mf[f]
1379 1379 return mf
1380 1380
1381 1381 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
1382 1382 """override the parent hook with a dirstate query
1383 1383
1384 1384 We use this prestatus hook to populate the status with information from
1385 1385 the dirstate.
1386 1386 """
1387 1387 # doesn't need to call super; if that changes, be aware that super
1388 1388 # calls self.manifest which would slow down the common case of calling
1389 1389 # status against a workingctx's parent
1390 1390 return self._dirstatestatus(match, listignored, listclean, listunknown)
1391 1391
1392 1392 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
1393 1393 """override the parent hook with a filter for suspect symlinks
1394 1394
1395 1395 We use this poststatus hook to filter out symlinks that might have
1396 1396 accidentally ended up with the entire contents of the file they are
1397 1397 susposed to be linking to.
1398 1398 """
1399 1399 s[0] = self._filtersuspectsymlink(s[0])
1400 1400 self._status = s[:]
1401 1401 return s
1402 1402
1403 1403 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1404 1404 unknown=False):
1405 1405 '''Gets the status from the dirstate -- internal use only.'''
1406 1406 listignored, listclean, listunknown = ignored, clean, unknown
1407 1407 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1408 1408 subrepos = []
1409 1409 if '.hgsub' in self:
1410 1410 subrepos = sorted(self.substate)
1411 1411 s = self._repo.dirstate.status(match, subrepos, listignored,
1412 1412 listclean, listunknown)
1413 1413 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1414 1414
1415 1415 # check for any possibly clean files
1416 1416 if cmp:
1417 1417 modified2, fixup = self._checklookup(cmp)
1418 1418 modified += modified2
1419 1419
1420 1420 # update dirstate for files that are actually clean
1421 1421 if fixup and listclean:
1422 1422 clean += fixup
1423 1423
1424 1424 return [modified, added, removed, deleted, unknown, ignored, clean]
1425 1425
1426 1426 def _buildstatus(self, other, s, match, listignored, listclean,
1427 1427 listunknown):
1428 1428 """build a status with respect to another context
1429 1429
1430 1430 This includes logic for maintaining the fast path of status when
1431 1431 comparing the working directory against its parent, which is to skip
1432 1432 building a new manifest if self (working directory) is not comparing
1433 1433 against its parent (repo['.']).
1434 1434 """
1435 1435 if other != self._repo['.']:
1436 1436 s = super(workingctx, self)._buildstatus(other, s, match,
1437 1437 listignored, listclean,
1438 1438 listunknown)
1439 1439 return s
1440 1440
1441 1441 def _matchstatus(self, other, s, match, listignored, listclean,
1442 1442 listunknown):
1443 1443 """override the match method with a filter for directory patterns
1444 1444
1445 1445 We use inheritance to customize the match.bad method only in cases of
1446 1446 workingctx since it belongs only to the working directory when
1447 1447 comparing against the parent changeset.
1448 1448
1449 1449 If we aren't comparing against the working directory's parent, then we
1450 1450 just use the default match object sent to us.
1451 1451 """
1452 1452 superself = super(workingctx, self)
1453 1453 match = superself._matchstatus(other, s, match, listignored, listclean,
1454 1454 listunknown)
1455 1455 if other != self._repo['.']:
1456 1456 def bad(f, msg):
1457 1457 # 'f' may be a directory pattern from 'match.files()',
1458 1458 # so 'f not in ctx1' is not enough
1459 1459 if f not in other and f not in other.dirs():
1460 1460 self._repo.ui.warn('%s: %s\n' %
1461 1461 (self._repo.dirstate.pathto(f), msg))
1462 1462 match.bad = bad
1463 1463 return match
1464 1464
1465 1465 def status(self, other='.', match=None, listignored=False,
1466 1466 listclean=False, listunknown=False, listsubrepos=False):
1467 1467 # yet to be determined: what to do if 'other' is a 'workingctx' or a
1468 1468 # 'memctx'?
1469 1469 s = super(workingctx, self).status(other, match, listignored, listclean,
1470 1470 listunknown, listsubrepos)
1471 1471 # calling 'super' subtly reveresed the contexts, so we flip the results
1472 1472 # (s[1] is 'added' and s[2] is 'removed')
1473 1473 s = list(s)
1474 1474 s[1], s[2] = s[2], s[1]
1475 1475 return tuple(s)
1476 1476
1477 1477 class committablefilectx(basefilectx):
1478 1478 """A committablefilectx provides common functionality for a file context
1479 1479 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1480 1480 def __init__(self, repo, path, filelog=None, ctx=None):
1481 1481 self._repo = repo
1482 1482 self._path = path
1483 1483 self._changeid = None
1484 1484 self._filerev = self._filenode = None
1485 1485
1486 1486 if filelog is not None:
1487 1487 self._filelog = filelog
1488 1488 if ctx:
1489 1489 self._changectx = ctx
1490 1490
1491 1491 def __nonzero__(self):
1492 1492 return True
1493 1493
1494 1494 def parents(self):
1495 1495 '''return parent filectxs, following copies if necessary'''
1496 1496 def filenode(ctx, path):
1497 1497 return ctx._manifest.get(path, nullid)
1498 1498
1499 1499 path = self._path
1500 1500 fl = self._filelog
1501 1501 pcl = self._changectx._parents
1502 1502 renamed = self.renamed()
1503 1503
1504 1504 if renamed:
1505 1505 pl = [renamed + (None,)]
1506 1506 else:
1507 1507 pl = [(path, filenode(pcl[0], path), fl)]
1508 1508
1509 1509 for pc in pcl[1:]:
1510 1510 pl.append((path, filenode(pc, path), fl))
1511 1511
1512 1512 return [filectx(self._repo, p, fileid=n, filelog=l)
1513 1513 for p, n, l in pl if n != nullid]
1514 1514
1515 1515 def children(self):
1516 1516 return []
1517 1517
1518 1518 class workingfilectx(committablefilectx):
1519 1519 """A workingfilectx object makes access to data related to a particular
1520 1520 file in the working directory convenient."""
1521 1521 def __init__(self, repo, path, filelog=None, workingctx=None):
1522 1522 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1523 1523
1524 1524 @propertycache
1525 1525 def _changectx(self):
1526 1526 return workingctx(self._repo)
1527 1527
1528 1528 def data(self):
1529 1529 return self._repo.wread(self._path)
1530 1530 def renamed(self):
1531 1531 rp = self._repo.dirstate.copied(self._path)
1532 1532 if not rp:
1533 1533 return None
1534 1534 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1535 1535
1536 1536 def size(self):
1537 1537 return self._repo.wvfs.lstat(self._path).st_size
1538 1538 def date(self):
1539 1539 t, tz = self._changectx.date()
1540 1540 try:
1541 1541 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1542 1542 except OSError, err:
1543 1543 if err.errno != errno.ENOENT:
1544 1544 raise
1545 1545 return (t, tz)
1546 1546
1547 1547 def cmp(self, fctx):
1548 1548 """compare with other file context
1549 1549
1550 1550 returns True if different than fctx.
1551 1551 """
1552 1552 # fctx should be a filectx (not a workingfilectx)
1553 1553 # invert comparison to reuse the same code path
1554 1554 return fctx.cmp(self)
1555 1555
1556 1556 def remove(self, ignoremissing=False):
1557 1557 """wraps unlink for a repo's working directory"""
1558 1558 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1559 1559
1560 1560 def write(self, data, flags):
1561 1561 """wraps repo.wwrite"""
1562 1562 self._repo.wwrite(self._path, data, flags)
1563 1563
1564 1564 class memctx(committablectx):
1565 1565 """Use memctx to perform in-memory commits via localrepo.commitctx().
1566 1566
1567 1567 Revision information is supplied at initialization time while
1568 1568 related files data and is made available through a callback
1569 1569 mechanism. 'repo' is the current localrepo, 'parents' is a
1570 1570 sequence of two parent revisions identifiers (pass None for every
1571 1571 missing parent), 'text' is the commit message and 'files' lists
1572 1572 names of files touched by the revision (normalized and relative to
1573 1573 repository root).
1574 1574
1575 1575 filectxfn(repo, memctx, path) is a callable receiving the
1576 1576 repository, the current memctx object and the normalized path of
1577 1577 requested file, relative to repository root. It is fired by the
1578 1578 commit function for every file in 'files', but calls order is
1579 1579 undefined. If the file is available in the revision being
1580 1580 committed (updated or added), filectxfn returns a memfilectx
1581 1581 object. If the file was removed, filectxfn raises an
1582 1582 IOError. Moved files are represented by marking the source file
1583 1583 removed and the new file added with copy information (see
1584 1584 memfilectx).
1585 1585
1586 1586 user receives the committer name and defaults to current
1587 1587 repository username, date is the commit date in any format
1588 1588 supported by util.parsedate() and defaults to current date, extra
1589 1589 is a dictionary of metadata or is left empty.
1590 1590 """
1591 1591 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1592 1592 date=None, extra=None, editor=False):
1593 1593 super(memctx, self).__init__(repo, text, user, date, extra)
1594 1594 self._rev = None
1595 1595 self._node = None
1596 1596 parents = [(p or nullid) for p in parents]
1597 1597 p1, p2 = parents
1598 1598 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1599 1599 files = sorted(set(files))
1600 1600 self._status = [files, [], [], [], []]
1601 1601 self._filectxfn = filectxfn
1602 1602 self.substate = {}
1603 1603
1604 1604 # if store is not callable, wrap it in a function
1605 1605 if not callable(filectxfn):
1606 1606 def getfilectx(repo, memctx, path):
1607 1607 fctx = filectxfn[path]
1608 1608 # this is weird but apparently we only keep track of one parent
1609 1609 # (why not only store that instead of a tuple?)
1610 1610 copied = fctx.renamed()
1611 1611 if copied:
1612 1612 copied = copied[0]
1613 1613 return memfilectx(repo, path, fctx.data(),
1614 1614 islink=fctx.islink(), isexec=fctx.isexec(),
1615 1615 copied=copied, memctx=memctx)
1616 1616 self._filectxfn = getfilectx
1617 1617
1618 1618 self._extra = extra and extra.copy() or {}
1619 1619 if self._extra.get('branch', '') == '':
1620 1620 self._extra['branch'] = 'default'
1621 1621
1622 1622 if editor:
1623 1623 self._text = editor(self._repo, self, [])
1624 1624 self._repo.savecommitmessage(self._text)
1625 1625
1626 1626 def filectx(self, path, filelog=None):
1627 1627 """get a file context from the working directory"""
1628 1628 return self._filectxfn(self._repo, self, path)
1629 1629
1630 1630 def commit(self):
1631 1631 """commit context to the repo"""
1632 1632 return self._repo.commitctx(self)
1633 1633
1634 1634 @propertycache
1635 1635 def _manifest(self):
1636 1636 """generate a manifest based on the return values of filectxfn"""
1637 1637
1638 1638 # keep this simple for now; just worry about p1
1639 1639 pctx = self._parents[0]
1640 1640 man = pctx.manifest().copy()
1641 1641
1642 1642 for f, fnode in man.iteritems():
1643 1643 p1node = nullid
1644 1644 p2node = nullid
1645 1645 p = pctx[f].parents() # if file isn't in pctx, check p2?
1646 1646 if len(p) > 0:
1647 1647 p1node = p[0].node()
1648 1648 if len(p) > 1:
1649 1649 p2node = p[1].node()
1650 1650 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1651 1651
1652 1652 return man
1653 1653
1654 1654
1655 1655 class memfilectx(committablefilectx):
1656 1656 """memfilectx represents an in-memory file to commit.
1657 1657
1658 1658 See memctx and commitablefilectx for more details.
1659 1659 """
1660 1660 def __init__(self, repo, path, data, islink=False,
1661 1661 isexec=False, copied=None, memctx=None):
1662 1662 """
1663 1663 path is the normalized file path relative to repository root.
1664 1664 data is the file content as a string.
1665 1665 islink is True if the file is a symbolic link.
1666 1666 isexec is True if the file is executable.
1667 1667 copied is the source file path if current file was copied in the
1668 1668 revision being committed, or None."""
1669 1669 super(memfilectx, self).__init__(repo, path, None, memctx)
1670 1670 self._data = data
1671 1671 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1672 1672 self._copied = None
1673 1673 if copied:
1674 1674 self._copied = (copied, nullid)
1675 1675
1676 1676 def data(self):
1677 1677 return self._data
1678 1678 def size(self):
1679 1679 return len(self.data())
1680 1680 def flags(self):
1681 1681 return self._flags
1682 1682 def renamed(self):
1683 1683 return self._copied
1684 1684
1685 1685 def remove(self, ignoremissing=False):
1686 1686 """wraps unlink for a repo's working directory"""
1687 1687 # need to figure out what to do here
1688 1688 del self._changectx[self._path]
1689 1689
1690 1690 def write(self, data, flags):
1691 1691 """wraps repo.wwrite"""
1692 1692 self._data = data
@@ -1,1781 +1,1780
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 from node import hex, nullid, short
8 8 from i18n import _
9 9 import urllib
10 10 import peer, changegroup, subrepo, pushkey, obsolete, repoview
11 11 import changelog, dirstate, filelog, manifest, context, bookmarks, phases
12 12 import lock as lockmod
13 13 import transaction, store, encoding, exchange, bundle2
14 14 import scmutil, util, extensions, hook, error, revset
15 15 import match as matchmod
16 16 import merge as mergemod
17 17 import tags as tagsmod
18 18 from lock import release
19 19 import weakref, errno, os, time, inspect
20 20 import branchmap, pathutil
21 21 propertycache = util.propertycache
22 22 filecache = scmutil.filecache
23 23
24 24 class repofilecache(filecache):
25 25 """All filecache usage on repo are done for logic that should be unfiltered
26 26 """
27 27
28 28 def __get__(self, repo, type=None):
29 29 return super(repofilecache, self).__get__(repo.unfiltered(), type)
30 30 def __set__(self, repo, value):
31 31 return super(repofilecache, self).__set__(repo.unfiltered(), value)
32 32 def __delete__(self, repo):
33 33 return super(repofilecache, self).__delete__(repo.unfiltered())
34 34
35 35 class storecache(repofilecache):
36 36 """filecache for files in the store"""
37 37 def join(self, obj, fname):
38 38 return obj.sjoin(fname)
39 39
40 40 class unfilteredpropertycache(propertycache):
41 41 """propertycache that apply to unfiltered repo only"""
42 42
43 43 def __get__(self, repo, type=None):
44 44 unfi = repo.unfiltered()
45 45 if unfi is repo:
46 46 return super(unfilteredpropertycache, self).__get__(unfi)
47 47 return getattr(unfi, self.name)
48 48
49 49 class filteredpropertycache(propertycache):
50 50 """propertycache that must take filtering in account"""
51 51
52 52 def cachevalue(self, obj, value):
53 53 object.__setattr__(obj, self.name, value)
54 54
55 55
56 56 def hasunfilteredcache(repo, name):
57 57 """check if a repo has an unfilteredpropertycache value for <name>"""
58 58 return name in vars(repo.unfiltered())
59 59
60 60 def unfilteredmethod(orig):
61 61 """decorate method that always need to be run on unfiltered version"""
62 62 def wrapper(repo, *args, **kwargs):
63 63 return orig(repo.unfiltered(), *args, **kwargs)
64 64 return wrapper
65 65
66 66 moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
67 67 'unbundle'))
68 68 legacycaps = moderncaps.union(set(['changegroupsubset']))
69 69
70 70 class localpeer(peer.peerrepository):
71 71 '''peer for a local repo; reflects only the most recent API'''
72 72
73 73 def __init__(self, repo, caps=moderncaps):
74 74 peer.peerrepository.__init__(self)
75 75 self._repo = repo.filtered('served')
76 76 self.ui = repo.ui
77 77 self._caps = repo._restrictcapabilities(caps)
78 78 self.requirements = repo.requirements
79 79 self.supportedformats = repo.supportedformats
80 80
81 81 def close(self):
82 82 self._repo.close()
83 83
84 84 def _capabilities(self):
85 85 return self._caps
86 86
87 87 def local(self):
88 88 return self._repo
89 89
90 90 def canpush(self):
91 91 return True
92 92
93 93 def url(self):
94 94 return self._repo.url()
95 95
96 96 def lookup(self, key):
97 97 return self._repo.lookup(key)
98 98
99 99 def branchmap(self):
100 100 return self._repo.branchmap()
101 101
102 102 def heads(self):
103 103 return self._repo.heads()
104 104
105 105 def known(self, nodes):
106 106 return self._repo.known(nodes)
107 107
108 108 def getbundle(self, source, heads=None, common=None, bundlecaps=None,
109 109 format='HG10', **kwargs):
110 110 cg = exchange.getbundle(self._repo, source, heads=heads,
111 111 common=common, bundlecaps=bundlecaps, **kwargs)
112 112 if bundlecaps is not None and 'HG2X' in bundlecaps:
113 113 # When requesting a bundle2, getbundle returns a stream to make the
114 114 # wire level function happier. We need to build a proper object
115 115 # from it in local peer.
116 116 cg = bundle2.unbundle20(self.ui, cg)
117 117 return cg
118 118
119 119 # TODO We might want to move the next two calls into legacypeer and add
120 120 # unbundle instead.
121 121
122 122 def unbundle(self, cg, heads, url):
123 123 """apply a bundle on a repo
124 124
125 125 This function handles the repo locking itself."""
126 126 try:
127 127 cg = exchange.readbundle(self.ui, cg, None)
128 128 ret = exchange.unbundle(self._repo, cg, heads, 'push', url)
129 129 if util.safehasattr(ret, 'getchunks'):
130 130 # This is a bundle20 object, turn it into an unbundler.
131 131 # This little dance should be dropped eventually when the API
132 132 # is finally improved.
133 133 stream = util.chunkbuffer(ret.getchunks())
134 134 ret = bundle2.unbundle20(self.ui, stream)
135 135 return ret
136 136 except error.PushRaced, exc:
137 137 raise error.ResponseError(_('push failed:'), str(exc))
138 138
139 139 def lock(self):
140 140 return self._repo.lock()
141 141
142 142 def addchangegroup(self, cg, source, url):
143 143 return changegroup.addchangegroup(self._repo, cg, source, url)
144 144
145 145 def pushkey(self, namespace, key, old, new):
146 146 return self._repo.pushkey(namespace, key, old, new)
147 147
148 148 def listkeys(self, namespace):
149 149 return self._repo.listkeys(namespace)
150 150
151 151 def debugwireargs(self, one, two, three=None, four=None, five=None):
152 152 '''used to test argument passing over the wire'''
153 153 return "%s %s %s %s %s" % (one, two, three, four, five)
154 154
155 155 class locallegacypeer(localpeer):
156 156 '''peer extension which implements legacy methods too; used for tests with
157 157 restricted capabilities'''
158 158
159 159 def __init__(self, repo):
160 160 localpeer.__init__(self, repo, caps=legacycaps)
161 161
162 162 def branches(self, nodes):
163 163 return self._repo.branches(nodes)
164 164
165 165 def between(self, pairs):
166 166 return self._repo.between(pairs)
167 167
168 168 def changegroup(self, basenodes, source):
169 169 return changegroup.changegroup(self._repo, basenodes, source)
170 170
171 171 def changegroupsubset(self, bases, heads, source):
172 172 return changegroup.changegroupsubset(self._repo, bases, heads, source)
173 173
174 174 class localrepository(object):
175 175
176 176 supportedformats = set(('revlogv1', 'generaldelta'))
177 177 _basesupported = supportedformats | set(('store', 'fncache', 'shared',
178 178 'dotencode'))
179 179 openerreqs = set(('revlogv1', 'generaldelta'))
180 180 requirements = ['revlogv1']
181 181 filtername = None
182 182
183 183 bundle2caps = {'HG2X': (),
184 184 'b2x:listkeys': (),
185 185 'b2x:pushkey': (),
186 186 'b2x:changegroup': (),
187 187 }
188 188
189 189 # a list of (ui, featureset) functions.
190 190 # only functions defined in module of enabled extensions are invoked
191 191 featuresetupfuncs = set()
192 192
193 193 def _baserequirements(self, create):
194 194 return self.requirements[:]
195 195
196 196 def __init__(self, baseui, path=None, create=False):
197 197 self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True)
198 198 self.wopener = self.wvfs
199 199 self.root = self.wvfs.base
200 200 self.path = self.wvfs.join(".hg")
201 201 self.origroot = path
202 202 self.auditor = pathutil.pathauditor(self.root, self._checknested)
203 203 self.vfs = scmutil.vfs(self.path)
204 204 self.opener = self.vfs
205 205 self.baseui = baseui
206 206 self.ui = baseui.copy()
207 207 self.ui.copy = baseui.copy # prevent copying repo configuration
208 208 # A list of callback to shape the phase if no data were found.
209 209 # Callback are in the form: func(repo, roots) --> processed root.
210 210 # This list it to be filled by extension during repo setup
211 211 self._phasedefaults = []
212 212 try:
213 213 self.ui.readconfig(self.join("hgrc"), self.root)
214 214 extensions.loadall(self.ui)
215 215 except IOError:
216 216 pass
217 217
218 218 if self.featuresetupfuncs:
219 219 self.supported = set(self._basesupported) # use private copy
220 220 extmods = set(m.__name__ for n, m
221 221 in extensions.extensions(self.ui))
222 222 for setupfunc in self.featuresetupfuncs:
223 223 if setupfunc.__module__ in extmods:
224 224 setupfunc(self.ui, self.supported)
225 225 else:
226 226 self.supported = self._basesupported
227 227
228 228 if not self.vfs.isdir():
229 229 if create:
230 230 if not self.wvfs.exists():
231 231 self.wvfs.makedirs()
232 232 self.vfs.makedir(notindexed=True)
233 233 requirements = self._baserequirements(create)
234 234 if self.ui.configbool('format', 'usestore', True):
235 235 self.vfs.mkdir("store")
236 236 requirements.append("store")
237 237 if self.ui.configbool('format', 'usefncache', True):
238 238 requirements.append("fncache")
239 239 if self.ui.configbool('format', 'dotencode', True):
240 240 requirements.append('dotencode')
241 241 # create an invalid changelog
242 242 self.vfs.append(
243 243 "00changelog.i",
244 244 '\0\0\0\2' # represents revlogv2
245 245 ' dummy changelog to prevent using the old repo layout'
246 246 )
247 247 if self.ui.configbool('format', 'generaldelta', False):
248 248 requirements.append("generaldelta")
249 249 requirements = set(requirements)
250 250 else:
251 251 raise error.RepoError(_("repository %s not found") % path)
252 252 elif create:
253 253 raise error.RepoError(_("repository %s already exists") % path)
254 254 else:
255 255 try:
256 256 requirements = scmutil.readrequires(self.vfs, self.supported)
257 257 except IOError, inst:
258 258 if inst.errno != errno.ENOENT:
259 259 raise
260 260 requirements = set()
261 261
262 262 self.sharedpath = self.path
263 263 try:
264 264 vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'),
265 265 realpath=True)
266 266 s = vfs.base
267 267 if not vfs.exists():
268 268 raise error.RepoError(
269 269 _('.hg/sharedpath points to nonexistent directory %s') % s)
270 270 self.sharedpath = s
271 271 except IOError, inst:
272 272 if inst.errno != errno.ENOENT:
273 273 raise
274 274
275 275 self.store = store.store(requirements, self.sharedpath, scmutil.vfs)
276 276 self.spath = self.store.path
277 277 self.svfs = self.store.vfs
278 278 self.sopener = self.svfs
279 279 self.sjoin = self.store.join
280 280 self.vfs.createmode = self.store.createmode
281 281 self._applyrequirements(requirements)
282 282 if create:
283 283 self._writerequirements()
284 284
285 285
286 286 self._branchcaches = {}
287 287 self.filterpats = {}
288 288 self._datafilters = {}
289 289 self._transref = self._lockref = self._wlockref = None
290 290
291 291 # A cache for various files under .hg/ that tracks file changes,
292 292 # (used by the filecache decorator)
293 293 #
294 294 # Maps a property name to its util.filecacheentry
295 295 self._filecache = {}
296 296
297 297 # hold sets of revision to be filtered
298 298 # should be cleared when something might have changed the filter value:
299 299 # - new changesets,
300 300 # - phase change,
301 301 # - new obsolescence marker,
302 302 # - working directory parent change,
303 303 # - bookmark changes
304 304 self.filteredrevcache = {}
305 305
306 306 def close(self):
307 307 pass
308 308
309 309 def _restrictcapabilities(self, caps):
310 310 # bundle2 is not ready for prime time, drop it unless explicitly
311 311 # required by the tests (or some brave tester)
312 312 if self.ui.configbool('experimental', 'bundle2-exp', False):
313 313 caps = set(caps)
314 314 capsblob = bundle2.encodecaps(self.bundle2caps)
315 315 caps.add('bundle2-exp=' + urllib.quote(capsblob))
316 316 return caps
317 317
318 318 def _applyrequirements(self, requirements):
319 319 self.requirements = requirements
320 320 self.sopener.options = dict((r, 1) for r in requirements
321 321 if r in self.openerreqs)
322 322 chunkcachesize = self.ui.configint('format', 'chunkcachesize')
323 323 if chunkcachesize is not None:
324 324 self.sopener.options['chunkcachesize'] = chunkcachesize
325 325
326 326 def _writerequirements(self):
327 327 reqfile = self.opener("requires", "w")
328 328 for r in sorted(self.requirements):
329 329 reqfile.write("%s\n" % r)
330 330 reqfile.close()
331 331
332 332 def _checknested(self, path):
333 333 """Determine if path is a legal nested repository."""
334 334 if not path.startswith(self.root):
335 335 return False
336 336 subpath = path[len(self.root) + 1:]
337 337 normsubpath = util.pconvert(subpath)
338 338
339 339 # XXX: Checking against the current working copy is wrong in
340 340 # the sense that it can reject things like
341 341 #
342 342 # $ hg cat -r 10 sub/x.txt
343 343 #
344 344 # if sub/ is no longer a subrepository in the working copy
345 345 # parent revision.
346 346 #
347 347 # However, it can of course also allow things that would have
348 348 # been rejected before, such as the above cat command if sub/
349 349 # is a subrepository now, but was a normal directory before.
350 350 # The old path auditor would have rejected by mistake since it
351 351 # panics when it sees sub/.hg/.
352 352 #
353 353 # All in all, checking against the working copy seems sensible
354 354 # since we want to prevent access to nested repositories on
355 355 # the filesystem *now*.
356 356 ctx = self[None]
357 357 parts = util.splitpath(subpath)
358 358 while parts:
359 359 prefix = '/'.join(parts)
360 360 if prefix in ctx.substate:
361 361 if prefix == normsubpath:
362 362 return True
363 363 else:
364 364 sub = ctx.sub(prefix)
365 365 return sub.checknested(subpath[len(prefix) + 1:])
366 366 else:
367 367 parts.pop()
368 368 return False
369 369
370 370 def peer(self):
371 371 return localpeer(self) # not cached to avoid reference cycle
372 372
373 373 def unfiltered(self):
374 374 """Return unfiltered version of the repository
375 375
376 376 Intended to be overwritten by filtered repo."""
377 377 return self
378 378
379 379 def filtered(self, name):
380 380 """Return a filtered version of a repository"""
381 381 # build a new class with the mixin and the current class
382 382 # (possibly subclass of the repo)
383 383 class proxycls(repoview.repoview, self.unfiltered().__class__):
384 384 pass
385 385 return proxycls(self, name)
386 386
387 387 @repofilecache('bookmarks')
388 388 def _bookmarks(self):
389 389 return bookmarks.bmstore(self)
390 390
391 391 @repofilecache('bookmarks.current')
392 392 def _bookmarkcurrent(self):
393 393 return bookmarks.readcurrent(self)
394 394
395 395 def bookmarkheads(self, bookmark):
396 396 name = bookmark.split('@', 1)[0]
397 397 heads = []
398 398 for mark, n in self._bookmarks.iteritems():
399 399 if mark.split('@', 1)[0] == name:
400 400 heads.append(n)
401 401 return heads
402 402
403 403 @storecache('phaseroots')
404 404 def _phasecache(self):
405 405 return phases.phasecache(self, self._phasedefaults)
406 406
407 407 @storecache('obsstore')
408 408 def obsstore(self):
409 409 store = obsolete.obsstore(self.sopener)
410 410 if store and not obsolete._enabled:
411 411 # message is rare enough to not be translated
412 412 msg = 'obsolete feature not enabled but %i markers found!\n'
413 413 self.ui.warn(msg % len(list(store)))
414 414 return store
415 415
416 416 @storecache('00changelog.i')
417 417 def changelog(self):
418 418 c = changelog.changelog(self.sopener)
419 419 if 'HG_PENDING' in os.environ:
420 420 p = os.environ['HG_PENDING']
421 421 if p.startswith(self.root):
422 422 c.readpending('00changelog.i.a')
423 423 return c
424 424
425 425 @storecache('00manifest.i')
426 426 def manifest(self):
427 427 return manifest.manifest(self.sopener)
428 428
429 429 @repofilecache('dirstate')
430 430 def dirstate(self):
431 431 warned = [0]
432 432 def validate(node):
433 433 try:
434 434 self.changelog.rev(node)
435 435 return node
436 436 except error.LookupError:
437 437 if not warned[0]:
438 438 warned[0] = True
439 439 self.ui.warn(_("warning: ignoring unknown"
440 440 " working parent %s!\n") % short(node))
441 441 return nullid
442 442
443 443 return dirstate.dirstate(self.opener, self.ui, self.root, validate)
444 444
445 445 def __getitem__(self, changeid):
446 446 if changeid is None:
447 447 return context.workingctx(self)
448 448 return context.changectx(self, changeid)
449 449
450 450 def __contains__(self, changeid):
451 451 try:
452 452 return bool(self.lookup(changeid))
453 453 except error.RepoLookupError:
454 454 return False
455 455
456 456 def __nonzero__(self):
457 457 return True
458 458
459 459 def __len__(self):
460 460 return len(self.changelog)
461 461
462 462 def __iter__(self):
463 463 return iter(self.changelog)
464 464
465 465 def revs(self, expr, *args):
466 466 '''Return a list of revisions matching the given revset'''
467 467 expr = revset.formatspec(expr, *args)
468 468 m = revset.match(None, expr)
469 469 return m(self, revset.spanset(self))
470 470
471 471 def set(self, expr, *args):
472 472 '''
473 473 Yield a context for each matching revision, after doing arg
474 474 replacement via revset.formatspec
475 475 '''
476 476 for r in self.revs(expr, *args):
477 477 yield self[r]
478 478
479 479 def url(self):
480 480 return 'file:' + self.root
481 481
482 482 def hook(self, name, throw=False, **args):
483 483 """Call a hook, passing this repo instance.
484 484
485 485 This a convenience method to aid invoking hooks. Extensions likely
486 486 won't call this unless they have registered a custom hook or are
487 487 replacing code that is expected to call a hook.
488 488 """
489 489 return hook.hook(self.ui, self, name, throw, **args)
490 490
491 491 @unfilteredmethod
492 492 def _tag(self, names, node, message, local, user, date, extra={},
493 493 editor=False):
494 494 if isinstance(names, str):
495 495 names = (names,)
496 496
497 497 branches = self.branchmap()
498 498 for name in names:
499 499 self.hook('pretag', throw=True, node=hex(node), tag=name,
500 500 local=local)
501 501 if name in branches:
502 502 self.ui.warn(_("warning: tag %s conflicts with existing"
503 503 " branch name\n") % name)
504 504
505 505 def writetags(fp, names, munge, prevtags):
506 506 fp.seek(0, 2)
507 507 if prevtags and prevtags[-1] != '\n':
508 508 fp.write('\n')
509 509 for name in names:
510 510 m = munge and munge(name) or name
511 511 if (self._tagscache.tagtypes and
512 512 name in self._tagscache.tagtypes):
513 513 old = self.tags().get(name, nullid)
514 514 fp.write('%s %s\n' % (hex(old), m))
515 515 fp.write('%s %s\n' % (hex(node), m))
516 516 fp.close()
517 517
518 518 prevtags = ''
519 519 if local:
520 520 try:
521 521 fp = self.opener('localtags', 'r+')
522 522 except IOError:
523 523 fp = self.opener('localtags', 'a')
524 524 else:
525 525 prevtags = fp.read()
526 526
527 527 # local tags are stored in the current charset
528 528 writetags(fp, names, None, prevtags)
529 529 for name in names:
530 530 self.hook('tag', node=hex(node), tag=name, local=local)
531 531 return
532 532
533 533 try:
534 534 fp = self.wfile('.hgtags', 'rb+')
535 535 except IOError, e:
536 536 if e.errno != errno.ENOENT:
537 537 raise
538 538 fp = self.wfile('.hgtags', 'ab')
539 539 else:
540 540 prevtags = fp.read()
541 541
542 542 # committed tags are stored in UTF-8
543 543 writetags(fp, names, encoding.fromlocal, prevtags)
544 544
545 545 fp.close()
546 546
547 547 self.invalidatecaches()
548 548
549 549 if '.hgtags' not in self.dirstate:
550 550 self[None].add(['.hgtags'])
551 551
552 552 m = matchmod.exact(self.root, '', ['.hgtags'])
553 553 tagnode = self.commit(message, user, date, extra=extra, match=m,
554 554 editor=editor)
555 555
556 556 for name in names:
557 557 self.hook('tag', node=hex(node), tag=name, local=local)
558 558
559 559 return tagnode
560 560
561 561 def tag(self, names, node, message, local, user, date, editor=False):
562 562 '''tag a revision with one or more symbolic names.
563 563
564 564 names is a list of strings or, when adding a single tag, names may be a
565 565 string.
566 566
567 567 if local is True, the tags are stored in a per-repository file.
568 568 otherwise, they are stored in the .hgtags file, and a new
569 569 changeset is committed with the change.
570 570
571 571 keyword arguments:
572 572
573 573 local: whether to store tags in non-version-controlled file
574 574 (default False)
575 575
576 576 message: commit message to use if committing
577 577
578 578 user: name of user to use if committing
579 579
580 580 date: date tuple to use if committing'''
581 581
582 582 if not local:
583 583 for x in self.status()[:5]:
584 584 if '.hgtags' in x:
585 585 raise util.Abort(_('working copy of .hgtags is changed '
586 586 '(please commit .hgtags manually)'))
587 587
588 588 self.tags() # instantiate the cache
589 589 self._tag(names, node, message, local, user, date, editor=editor)
590 590
591 591 @filteredpropertycache
592 592 def _tagscache(self):
593 593 '''Returns a tagscache object that contains various tags related
594 594 caches.'''
595 595
596 596 # This simplifies its cache management by having one decorated
597 597 # function (this one) and the rest simply fetch things from it.
598 598 class tagscache(object):
599 599 def __init__(self):
600 600 # These two define the set of tags for this repository. tags
601 601 # maps tag name to node; tagtypes maps tag name to 'global' or
602 602 # 'local'. (Global tags are defined by .hgtags across all
603 603 # heads, and local tags are defined in .hg/localtags.)
604 604 # They constitute the in-memory cache of tags.
605 605 self.tags = self.tagtypes = None
606 606
607 607 self.nodetagscache = self.tagslist = None
608 608
609 609 cache = tagscache()
610 610 cache.tags, cache.tagtypes = self._findtags()
611 611
612 612 return cache
613 613
614 614 def tags(self):
615 615 '''return a mapping of tag to node'''
616 616 t = {}
617 617 if self.changelog.filteredrevs:
618 618 tags, tt = self._findtags()
619 619 else:
620 620 tags = self._tagscache.tags
621 621 for k, v in tags.iteritems():
622 622 try:
623 623 # ignore tags to unknown nodes
624 624 self.changelog.rev(v)
625 625 t[k] = v
626 626 except (error.LookupError, ValueError):
627 627 pass
628 628 return t
629 629
630 630 def _findtags(self):
631 631 '''Do the hard work of finding tags. Return a pair of dicts
632 632 (tags, tagtypes) where tags maps tag name to node, and tagtypes
633 633 maps tag name to a string like \'global\' or \'local\'.
634 634 Subclasses or extensions are free to add their own tags, but
635 635 should be aware that the returned dicts will be retained for the
636 636 duration of the localrepo object.'''
637 637
638 638 # XXX what tagtype should subclasses/extensions use? Currently
639 639 # mq and bookmarks add tags, but do not set the tagtype at all.
640 640 # Should each extension invent its own tag type? Should there
641 641 # be one tagtype for all such "virtual" tags? Or is the status
642 642 # quo fine?
643 643
644 644 alltags = {} # map tag name to (node, hist)
645 645 tagtypes = {}
646 646
647 647 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
648 648 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
649 649
650 650 # Build the return dicts. Have to re-encode tag names because
651 651 # the tags module always uses UTF-8 (in order not to lose info
652 652 # writing to the cache), but the rest of Mercurial wants them in
653 653 # local encoding.
654 654 tags = {}
655 655 for (name, (node, hist)) in alltags.iteritems():
656 656 if node != nullid:
657 657 tags[encoding.tolocal(name)] = node
658 658 tags['tip'] = self.changelog.tip()
659 659 tagtypes = dict([(encoding.tolocal(name), value)
660 660 for (name, value) in tagtypes.iteritems()])
661 661 return (tags, tagtypes)
662 662
663 663 def tagtype(self, tagname):
664 664 '''
665 665 return the type of the given tag. result can be:
666 666
667 667 'local' : a local tag
668 668 'global' : a global tag
669 669 None : tag does not exist
670 670 '''
671 671
672 672 return self._tagscache.tagtypes.get(tagname)
673 673
674 674 def tagslist(self):
675 675 '''return a list of tags ordered by revision'''
676 676 if not self._tagscache.tagslist:
677 677 l = []
678 678 for t, n in self.tags().iteritems():
679 r = self.changelog.rev(n)
680 l.append((r, t, n))
679 l.append((self.changelog.rev(n), t, n))
681 680 self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
682 681
683 682 return self._tagscache.tagslist
684 683
685 684 def nodetags(self, node):
686 685 '''return the tags associated with a node'''
687 686 if not self._tagscache.nodetagscache:
688 687 nodetagscache = {}
689 688 for t, n in self._tagscache.tags.iteritems():
690 689 nodetagscache.setdefault(n, []).append(t)
691 690 for tags in nodetagscache.itervalues():
692 691 tags.sort()
693 692 self._tagscache.nodetagscache = nodetagscache
694 693 return self._tagscache.nodetagscache.get(node, [])
695 694
696 695 def nodebookmarks(self, node):
697 696 marks = []
698 697 for bookmark, n in self._bookmarks.iteritems():
699 698 if n == node:
700 699 marks.append(bookmark)
701 700 return sorted(marks)
702 701
703 702 def branchmap(self):
704 703 '''returns a dictionary {branch: [branchheads]} with branchheads
705 704 ordered by increasing revision number'''
706 705 branchmap.updatecache(self)
707 706 return self._branchcaches[self.filtername]
708 707
709 708 def branchtip(self, branch):
710 709 '''return the tip node for a given branch'''
711 710 try:
712 711 return self.branchmap().branchtip(branch)
713 712 except KeyError:
714 713 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
715 714
716 715 def lookup(self, key):
717 716 return self[key].node()
718 717
719 718 def lookupbranch(self, key, remote=None):
720 719 repo = remote or self
721 720 if key in repo.branchmap():
722 721 return key
723 722
724 723 repo = (remote and remote.local()) and remote or self
725 724 return repo[key].branch()
726 725
727 726 def known(self, nodes):
728 727 nm = self.changelog.nodemap
729 728 pc = self._phasecache
730 729 result = []
731 730 for n in nodes:
732 731 r = nm.get(n)
733 732 resp = not (r is None or pc.phase(self, r) >= phases.secret)
734 733 result.append(resp)
735 734 return result
736 735
737 736 def local(self):
738 737 return self
739 738
740 739 def cancopy(self):
741 740 # so statichttprepo's override of local() works
742 741 if not self.local():
743 742 return False
744 743 if not self.ui.configbool('phases', 'publish', True):
745 744 return True
746 745 # if publishing we can't copy if there is filtered content
747 746 return not self.filtered('visible').changelog.filteredrevs
748 747
749 748 def join(self, f):
750 749 return os.path.join(self.path, f)
751 750
752 751 def wjoin(self, f):
753 752 return os.path.join(self.root, f)
754 753
755 754 def file(self, f):
756 755 if f[0] == '/':
757 756 f = f[1:]
758 757 return filelog.filelog(self.sopener, f)
759 758
760 759 def changectx(self, changeid):
761 760 return self[changeid]
762 761
763 762 def parents(self, changeid=None):
764 763 '''get list of changectxs for parents of changeid'''
765 764 return self[changeid].parents()
766 765
767 766 def setparents(self, p1, p2=nullid):
768 767 copies = self.dirstate.setparents(p1, p2)
769 768 pctx = self[p1]
770 769 if copies:
771 770 # Adjust copy records, the dirstate cannot do it, it
772 771 # requires access to parents manifests. Preserve them
773 772 # only for entries added to first parent.
774 773 for f in copies:
775 774 if f not in pctx and copies[f] in pctx:
776 775 self.dirstate.copy(copies[f], f)
777 776 if p2 == nullid:
778 777 for f, s in sorted(self.dirstate.copies().items()):
779 778 if f not in pctx and s not in pctx:
780 779 self.dirstate.copy(None, f)
781 780
782 781 def filectx(self, path, changeid=None, fileid=None):
783 782 """changeid can be a changeset revision, node, or tag.
784 783 fileid can be a file revision or node."""
785 784 return context.filectx(self, path, changeid, fileid)
786 785
787 786 def getcwd(self):
788 787 return self.dirstate.getcwd()
789 788
790 789 def pathto(self, f, cwd=None):
791 790 return self.dirstate.pathto(f, cwd)
792 791
793 792 def wfile(self, f, mode='r'):
794 793 return self.wopener(f, mode)
795 794
796 795 def _link(self, f):
797 796 return self.wvfs.islink(f)
798 797
799 798 def _loadfilter(self, filter):
800 799 if filter not in self.filterpats:
801 800 l = []
802 801 for pat, cmd in self.ui.configitems(filter):
803 802 if cmd == '!':
804 803 continue
805 804 mf = matchmod.match(self.root, '', [pat])
806 805 fn = None
807 806 params = cmd
808 807 for name, filterfn in self._datafilters.iteritems():
809 808 if cmd.startswith(name):
810 809 fn = filterfn
811 810 params = cmd[len(name):].lstrip()
812 811 break
813 812 if not fn:
814 813 fn = lambda s, c, **kwargs: util.filter(s, c)
815 814 # Wrap old filters not supporting keyword arguments
816 815 if not inspect.getargspec(fn)[2]:
817 816 oldfn = fn
818 817 fn = lambda s, c, **kwargs: oldfn(s, c)
819 818 l.append((mf, fn, params))
820 819 self.filterpats[filter] = l
821 820 return self.filterpats[filter]
822 821
823 822 def _filter(self, filterpats, filename, data):
824 823 for mf, fn, cmd in filterpats:
825 824 if mf(filename):
826 825 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
827 826 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
828 827 break
829 828
830 829 return data
831 830
832 831 @unfilteredpropertycache
833 832 def _encodefilterpats(self):
834 833 return self._loadfilter('encode')
835 834
836 835 @unfilteredpropertycache
837 836 def _decodefilterpats(self):
838 837 return self._loadfilter('decode')
839 838
840 839 def adddatafilter(self, name, filter):
841 840 self._datafilters[name] = filter
842 841
843 842 def wread(self, filename):
844 843 if self._link(filename):
845 844 data = self.wvfs.readlink(filename)
846 845 else:
847 846 data = self.wopener.read(filename)
848 847 return self._filter(self._encodefilterpats, filename, data)
849 848
850 849 def wwrite(self, filename, data, flags):
851 850 data = self._filter(self._decodefilterpats, filename, data)
852 851 if 'l' in flags:
853 852 self.wopener.symlink(data, filename)
854 853 else:
855 854 self.wopener.write(filename, data)
856 855 if 'x' in flags:
857 856 self.wvfs.setflags(filename, False, True)
858 857
859 858 def wwritedata(self, filename, data):
860 859 return self._filter(self._decodefilterpats, filename, data)
861 860
862 861 def transaction(self, desc, report=None):
863 862 tr = self._transref and self._transref() or None
864 863 if tr and tr.running():
865 864 return tr.nest()
866 865
867 866 # abort here if the journal already exists
868 867 if self.svfs.exists("journal"):
869 868 raise error.RepoError(
870 869 _("abandoned transaction found"),
871 870 hint=_("run 'hg recover' to clean up transaction"))
872 871
873 872 def onclose():
874 873 self.store.write(self._transref())
875 874
876 875 self._writejournal(desc)
877 876 renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()]
878 877 rp = report and report or self.ui.warn
879 878 tr = transaction.transaction(rp, self.sopener,
880 879 "journal",
881 880 aftertrans(renames),
882 881 self.store.createmode,
883 882 onclose)
884 883 self._transref = weakref.ref(tr)
885 884 return tr
886 885
887 886 def _journalfiles(self):
888 887 return ((self.svfs, 'journal'),
889 888 (self.vfs, 'journal.dirstate'),
890 889 (self.vfs, 'journal.branch'),
891 890 (self.vfs, 'journal.desc'),
892 891 (self.vfs, 'journal.bookmarks'),
893 892 (self.svfs, 'journal.phaseroots'))
894 893
895 894 def undofiles(self):
896 895 return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
897 896
898 897 def _writejournal(self, desc):
899 898 self.opener.write("journal.dirstate",
900 899 self.opener.tryread("dirstate"))
901 900 self.opener.write("journal.branch",
902 901 encoding.fromlocal(self.dirstate.branch()))
903 902 self.opener.write("journal.desc",
904 903 "%d\n%s\n" % (len(self), desc))
905 904 self.opener.write("journal.bookmarks",
906 905 self.opener.tryread("bookmarks"))
907 906 self.sopener.write("journal.phaseroots",
908 907 self.sopener.tryread("phaseroots"))
909 908
910 909 def recover(self):
911 910 lock = self.lock()
912 911 try:
913 912 if self.svfs.exists("journal"):
914 913 self.ui.status(_("rolling back interrupted transaction\n"))
915 914 transaction.rollback(self.sopener, "journal",
916 915 self.ui.warn)
917 916 self.invalidate()
918 917 return True
919 918 else:
920 919 self.ui.warn(_("no interrupted transaction available\n"))
921 920 return False
922 921 finally:
923 922 lock.release()
924 923
925 924 def rollback(self, dryrun=False, force=False):
926 925 wlock = lock = None
927 926 try:
928 927 wlock = self.wlock()
929 928 lock = self.lock()
930 929 if self.svfs.exists("undo"):
931 930 return self._rollback(dryrun, force)
932 931 else:
933 932 self.ui.warn(_("no rollback information available\n"))
934 933 return 1
935 934 finally:
936 935 release(lock, wlock)
937 936
938 937 @unfilteredmethod # Until we get smarter cache management
939 938 def _rollback(self, dryrun, force):
940 939 ui = self.ui
941 940 try:
942 941 args = self.opener.read('undo.desc').splitlines()
943 942 (oldlen, desc, detail) = (int(args[0]), args[1], None)
944 943 if len(args) >= 3:
945 944 detail = args[2]
946 945 oldtip = oldlen - 1
947 946
948 947 if detail and ui.verbose:
949 948 msg = (_('repository tip rolled back to revision %s'
950 949 ' (undo %s: %s)\n')
951 950 % (oldtip, desc, detail))
952 951 else:
953 952 msg = (_('repository tip rolled back to revision %s'
954 953 ' (undo %s)\n')
955 954 % (oldtip, desc))
956 955 except IOError:
957 956 msg = _('rolling back unknown transaction\n')
958 957 desc = None
959 958
960 959 if not force and self['.'] != self['tip'] and desc == 'commit':
961 960 raise util.Abort(
962 961 _('rollback of last commit while not checked out '
963 962 'may lose data'), hint=_('use -f to force'))
964 963
965 964 ui.status(msg)
966 965 if dryrun:
967 966 return 0
968 967
969 968 parents = self.dirstate.parents()
970 969 self.destroying()
971 970 transaction.rollback(self.sopener, 'undo', ui.warn)
972 971 if self.vfs.exists('undo.bookmarks'):
973 972 self.vfs.rename('undo.bookmarks', 'bookmarks')
974 973 if self.svfs.exists('undo.phaseroots'):
975 974 self.svfs.rename('undo.phaseroots', 'phaseroots')
976 975 self.invalidate()
977 976
978 977 parentgone = (parents[0] not in self.changelog.nodemap or
979 978 parents[1] not in self.changelog.nodemap)
980 979 if parentgone:
981 980 self.vfs.rename('undo.dirstate', 'dirstate')
982 981 try:
983 982 branch = self.opener.read('undo.branch')
984 983 self.dirstate.setbranch(encoding.tolocal(branch))
985 984 except IOError:
986 985 ui.warn(_('named branch could not be reset: '
987 986 'current branch is still \'%s\'\n')
988 987 % self.dirstate.branch())
989 988
990 989 self.dirstate.invalidate()
991 990 parents = tuple([p.rev() for p in self.parents()])
992 991 if len(parents) > 1:
993 992 ui.status(_('working directory now based on '
994 993 'revisions %d and %d\n') % parents)
995 994 else:
996 995 ui.status(_('working directory now based on '
997 996 'revision %d\n') % parents)
998 997 # TODO: if we know which new heads may result from this rollback, pass
999 998 # them to destroy(), which will prevent the branchhead cache from being
1000 999 # invalidated.
1001 1000 self.destroyed()
1002 1001 return 0
1003 1002
1004 1003 def invalidatecaches(self):
1005 1004
1006 1005 if '_tagscache' in vars(self):
1007 1006 # can't use delattr on proxy
1008 1007 del self.__dict__['_tagscache']
1009 1008
1010 1009 self.unfiltered()._branchcaches.clear()
1011 1010 self.invalidatevolatilesets()
1012 1011
1013 1012 def invalidatevolatilesets(self):
1014 1013 self.filteredrevcache.clear()
1015 1014 obsolete.clearobscaches(self)
1016 1015
1017 1016 def invalidatedirstate(self):
1018 1017 '''Invalidates the dirstate, causing the next call to dirstate
1019 1018 to check if it was modified since the last time it was read,
1020 1019 rereading it if it has.
1021 1020
1022 1021 This is different to dirstate.invalidate() that it doesn't always
1023 1022 rereads the dirstate. Use dirstate.invalidate() if you want to
1024 1023 explicitly read the dirstate again (i.e. restoring it to a previous
1025 1024 known good state).'''
1026 1025 if hasunfilteredcache(self, 'dirstate'):
1027 1026 for k in self.dirstate._filecache:
1028 1027 try:
1029 1028 delattr(self.dirstate, k)
1030 1029 except AttributeError:
1031 1030 pass
1032 1031 delattr(self.unfiltered(), 'dirstate')
1033 1032
1034 1033 def invalidate(self):
1035 1034 unfiltered = self.unfiltered() # all file caches are stored unfiltered
1036 1035 for k in self._filecache:
1037 1036 # dirstate is invalidated separately in invalidatedirstate()
1038 1037 if k == 'dirstate':
1039 1038 continue
1040 1039
1041 1040 try:
1042 1041 delattr(unfiltered, k)
1043 1042 except AttributeError:
1044 1043 pass
1045 1044 self.invalidatecaches()
1046 1045 self.store.invalidatecaches()
1047 1046
1048 1047 def invalidateall(self):
1049 1048 '''Fully invalidates both store and non-store parts, causing the
1050 1049 subsequent operation to reread any outside changes.'''
1051 1050 # extension should hook this to invalidate its caches
1052 1051 self.invalidate()
1053 1052 self.invalidatedirstate()
1054 1053
1055 1054 def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc):
1056 1055 try:
1057 1056 l = lockmod.lock(vfs, lockname, 0, releasefn, desc=desc)
1058 1057 except error.LockHeld, inst:
1059 1058 if not wait:
1060 1059 raise
1061 1060 self.ui.warn(_("waiting for lock on %s held by %r\n") %
1062 1061 (desc, inst.locker))
1063 1062 # default to 600 seconds timeout
1064 1063 l = lockmod.lock(vfs, lockname,
1065 1064 int(self.ui.config("ui", "timeout", "600")),
1066 1065 releasefn, desc=desc)
1067 1066 self.ui.warn(_("got lock after %s seconds\n") % l.delay)
1068 1067 if acquirefn:
1069 1068 acquirefn()
1070 1069 return l
1071 1070
1072 1071 def _afterlock(self, callback):
1073 1072 """add a callback to the current repository lock.
1074 1073
1075 1074 The callback will be executed on lock release."""
1076 1075 l = self._lockref and self._lockref()
1077 1076 if l:
1078 1077 l.postrelease.append(callback)
1079 1078 else:
1080 1079 callback()
1081 1080
1082 1081 def lock(self, wait=True):
1083 1082 '''Lock the repository store (.hg/store) and return a weak reference
1084 1083 to the lock. Use this before modifying the store (e.g. committing or
1085 1084 stripping). If you are opening a transaction, get a lock as well.)'''
1086 1085 l = self._lockref and self._lockref()
1087 1086 if l is not None and l.held:
1088 1087 l.lock()
1089 1088 return l
1090 1089
1091 1090 def unlock():
1092 1091 for k, ce in self._filecache.items():
1093 1092 if k == 'dirstate' or k not in self.__dict__:
1094 1093 continue
1095 1094 ce.refresh()
1096 1095
1097 1096 l = self._lock(self.svfs, "lock", wait, unlock,
1098 1097 self.invalidate, _('repository %s') % self.origroot)
1099 1098 self._lockref = weakref.ref(l)
1100 1099 return l
1101 1100
1102 1101 def wlock(self, wait=True):
1103 1102 '''Lock the non-store parts of the repository (everything under
1104 1103 .hg except .hg/store) and return a weak reference to the lock.
1105 1104 Use this before modifying files in .hg.'''
1106 1105 l = self._wlockref and self._wlockref()
1107 1106 if l is not None and l.held:
1108 1107 l.lock()
1109 1108 return l
1110 1109
1111 1110 def unlock():
1112 1111 self.dirstate.write()
1113 1112 self._filecache['dirstate'].refresh()
1114 1113
1115 1114 l = self._lock(self.vfs, "wlock", wait, unlock,
1116 1115 self.invalidatedirstate, _('working directory of %s') %
1117 1116 self.origroot)
1118 1117 self._wlockref = weakref.ref(l)
1119 1118 return l
1120 1119
1121 1120 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
1122 1121 """
1123 1122 commit an individual file as part of a larger transaction
1124 1123 """
1125 1124
1126 1125 fname = fctx.path()
1127 1126 text = fctx.data()
1128 1127 flog = self.file(fname)
1129 1128 fparent1 = manifest1.get(fname, nullid)
1130 1129 fparent2 = fparent2o = manifest2.get(fname, nullid)
1131 1130
1132 1131 meta = {}
1133 1132 copy = fctx.renamed()
1134 1133 if copy and copy[0] != fname:
1135 1134 # Mark the new revision of this file as a copy of another
1136 1135 # file. This copy data will effectively act as a parent
1137 1136 # of this new revision. If this is a merge, the first
1138 1137 # parent will be the nullid (meaning "look up the copy data")
1139 1138 # and the second one will be the other parent. For example:
1140 1139 #
1141 1140 # 0 --- 1 --- 3 rev1 changes file foo
1142 1141 # \ / rev2 renames foo to bar and changes it
1143 1142 # \- 2 -/ rev3 should have bar with all changes and
1144 1143 # should record that bar descends from
1145 1144 # bar in rev2 and foo in rev1
1146 1145 #
1147 1146 # this allows this merge to succeed:
1148 1147 #
1149 1148 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
1150 1149 # \ / merging rev3 and rev4 should use bar@rev2
1151 1150 # \- 2 --- 4 as the merge base
1152 1151 #
1153 1152
1154 1153 cfname = copy[0]
1155 1154 crev = manifest1.get(cfname)
1156 1155 newfparent = fparent2
1157 1156
1158 1157 if manifest2: # branch merge
1159 1158 if fparent2 == nullid or crev is None: # copied on remote side
1160 1159 if cfname in manifest2:
1161 1160 crev = manifest2[cfname]
1162 1161 newfparent = fparent1
1163 1162
1164 1163 # find source in nearest ancestor if we've lost track
1165 1164 if not crev:
1166 1165 self.ui.debug(" %s: searching for copy revision for %s\n" %
1167 1166 (fname, cfname))
1168 1167 for ancestor in self[None].ancestors():
1169 1168 if cfname in ancestor:
1170 1169 crev = ancestor[cfname].filenode()
1171 1170 break
1172 1171
1173 1172 if crev:
1174 1173 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
1175 1174 meta["copy"] = cfname
1176 1175 meta["copyrev"] = hex(crev)
1177 1176 fparent1, fparent2 = nullid, newfparent
1178 1177 else:
1179 1178 self.ui.warn(_("warning: can't find ancestor for '%s' "
1180 1179 "copied from '%s'!\n") % (fname, cfname))
1181 1180
1182 1181 elif fparent1 == nullid:
1183 1182 fparent1, fparent2 = fparent2, nullid
1184 1183 elif fparent2 != nullid:
1185 1184 # is one parent an ancestor of the other?
1186 1185 fparentancestors = flog.commonancestorsheads(fparent1, fparent2)
1187 1186 if fparent1 in fparentancestors:
1188 1187 fparent1, fparent2 = fparent2, nullid
1189 1188 elif fparent2 in fparentancestors:
1190 1189 fparent2 = nullid
1191 1190
1192 1191 # is the file changed?
1193 1192 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
1194 1193 changelist.append(fname)
1195 1194 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
1196 1195
1197 1196 # are just the flags changed during merge?
1198 1197 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
1199 1198 changelist.append(fname)
1200 1199
1201 1200 return fparent1
1202 1201
1203 1202 @unfilteredmethod
1204 1203 def commit(self, text="", user=None, date=None, match=None, force=False,
1205 1204 editor=False, extra={}):
1206 1205 """Add a new revision to current repository.
1207 1206
1208 1207 Revision information is gathered from the working directory,
1209 1208 match can be used to filter the committed files. If editor is
1210 1209 supplied, it is called to get a commit message.
1211 1210 """
1212 1211
1213 1212 def fail(f, msg):
1214 1213 raise util.Abort('%s: %s' % (f, msg))
1215 1214
1216 1215 if not match:
1217 1216 match = matchmod.always(self.root, '')
1218 1217
1219 1218 if not force:
1220 1219 vdirs = []
1221 1220 match.explicitdir = vdirs.append
1222 1221 match.bad = fail
1223 1222
1224 1223 wlock = self.wlock()
1225 1224 try:
1226 1225 wctx = self[None]
1227 1226 merge = len(wctx.parents()) > 1
1228 1227
1229 1228 if (not force and merge and match and
1230 1229 (match.files() or match.anypats())):
1231 1230 raise util.Abort(_('cannot partially commit a merge '
1232 1231 '(do not specify files or patterns)'))
1233 1232
1234 1233 changes = self.status(match=match, clean=force)
1235 1234 if force:
1236 1235 changes[0].extend(changes[6]) # mq may commit unchanged files
1237 1236
1238 1237 # check subrepos
1239 1238 subs = []
1240 1239 commitsubs = set()
1241 1240 newstate = wctx.substate.copy()
1242 1241 # only manage subrepos and .hgsubstate if .hgsub is present
1243 1242 if '.hgsub' in wctx:
1244 1243 # we'll decide whether to track this ourselves, thanks
1245 1244 for c in changes[:3]:
1246 1245 if '.hgsubstate' in c:
1247 1246 c.remove('.hgsubstate')
1248 1247
1249 1248 # compare current state to last committed state
1250 1249 # build new substate based on last committed state
1251 1250 oldstate = wctx.p1().substate
1252 1251 for s in sorted(newstate.keys()):
1253 1252 if not match(s):
1254 1253 # ignore working copy, use old state if present
1255 1254 if s in oldstate:
1256 1255 newstate[s] = oldstate[s]
1257 1256 continue
1258 1257 if not force:
1259 1258 raise util.Abort(
1260 1259 _("commit with new subrepo %s excluded") % s)
1261 1260 if wctx.sub(s).dirty(True):
1262 1261 if not self.ui.configbool('ui', 'commitsubrepos'):
1263 1262 raise util.Abort(
1264 1263 _("uncommitted changes in subrepo %s") % s,
1265 1264 hint=_("use --subrepos for recursive commit"))
1266 1265 subs.append(s)
1267 1266 commitsubs.add(s)
1268 1267 else:
1269 1268 bs = wctx.sub(s).basestate()
1270 1269 newstate[s] = (newstate[s][0], bs, newstate[s][2])
1271 1270 if oldstate.get(s, (None, None, None))[1] != bs:
1272 1271 subs.append(s)
1273 1272
1274 1273 # check for removed subrepos
1275 1274 for p in wctx.parents():
1276 1275 r = [s for s in p.substate if s not in newstate]
1277 1276 subs += [s for s in r if match(s)]
1278 1277 if subs:
1279 1278 if (not match('.hgsub') and
1280 1279 '.hgsub' in (wctx.modified() + wctx.added())):
1281 1280 raise util.Abort(
1282 1281 _("can't commit subrepos without .hgsub"))
1283 1282 changes[0].insert(0, '.hgsubstate')
1284 1283
1285 1284 elif '.hgsub' in changes[2]:
1286 1285 # clean up .hgsubstate when .hgsub is removed
1287 1286 if ('.hgsubstate' in wctx and
1288 1287 '.hgsubstate' not in changes[0] + changes[1] + changes[2]):
1289 1288 changes[2].insert(0, '.hgsubstate')
1290 1289
1291 1290 # make sure all explicit patterns are matched
1292 1291 if not force and match.files():
1293 1292 matched = set(changes[0] + changes[1] + changes[2])
1294 1293
1295 1294 for f in match.files():
1296 1295 f = self.dirstate.normalize(f)
1297 1296 if f == '.' or f in matched or f in wctx.substate:
1298 1297 continue
1299 1298 if f in changes[3]: # missing
1300 1299 fail(f, _('file not found!'))
1301 1300 if f in vdirs: # visited directory
1302 1301 d = f + '/'
1303 1302 for mf in matched:
1304 1303 if mf.startswith(d):
1305 1304 break
1306 1305 else:
1307 1306 fail(f, _("no match under directory!"))
1308 1307 elif f not in self.dirstate:
1309 1308 fail(f, _("file not tracked!"))
1310 1309
1311 1310 cctx = context.workingctx(self, text, user, date, extra, changes)
1312 1311
1313 1312 if (not force and not extra.get("close") and not merge
1314 1313 and not cctx.files()
1315 1314 and wctx.branch() == wctx.p1().branch()):
1316 1315 return None
1317 1316
1318 1317 if merge and cctx.deleted():
1319 1318 raise util.Abort(_("cannot commit merge with missing files"))
1320 1319
1321 1320 ms = mergemod.mergestate(self)
1322 1321 for f in changes[0]:
1323 1322 if f in ms and ms[f] == 'u':
1324 1323 raise util.Abort(_("unresolved merge conflicts "
1325 1324 "(see hg help resolve)"))
1326 1325
1327 1326 if editor:
1328 1327 cctx._text = editor(self, cctx, subs)
1329 1328 edited = (text != cctx._text)
1330 1329
1331 1330 # Save commit message in case this transaction gets rolled back
1332 1331 # (e.g. by a pretxncommit hook). Leave the content alone on
1333 1332 # the assumption that the user will use the same editor again.
1334 1333 msgfn = self.savecommitmessage(cctx._text)
1335 1334
1336 1335 # commit subs and write new state
1337 1336 if subs:
1338 1337 for s in sorted(commitsubs):
1339 1338 sub = wctx.sub(s)
1340 1339 self.ui.status(_('committing subrepository %s\n') %
1341 1340 subrepo.subrelpath(sub))
1342 1341 sr = sub.commit(cctx._text, user, date)
1343 1342 newstate[s] = (newstate[s][0], sr)
1344 1343 subrepo.writestate(self, newstate)
1345 1344
1346 1345 p1, p2 = self.dirstate.parents()
1347 1346 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
1348 1347 try:
1349 1348 self.hook("precommit", throw=True, parent1=hookp1,
1350 1349 parent2=hookp2)
1351 1350 ret = self.commitctx(cctx, True)
1352 1351 except: # re-raises
1353 1352 if edited:
1354 1353 self.ui.write(
1355 1354 _('note: commit message saved in %s\n') % msgfn)
1356 1355 raise
1357 1356
1358 1357 # update bookmarks, dirstate and mergestate
1359 1358 bookmarks.update(self, [p1, p2], ret)
1360 1359 cctx.markcommitted(ret)
1361 1360 ms.reset()
1362 1361 finally:
1363 1362 wlock.release()
1364 1363
1365 1364 def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
1366 1365 self.hook("commit", node=node, parent1=parent1, parent2=parent2)
1367 1366 self._afterlock(commithook)
1368 1367 return ret
1369 1368
1370 1369 @unfilteredmethod
1371 1370 def commitctx(self, ctx, error=False):
1372 1371 """Add a new revision to current repository.
1373 1372 Revision information is passed via the context argument.
1374 1373 """
1375 1374
1376 1375 tr = lock = None
1377 1376 removed = list(ctx.removed())
1378 1377 p1, p2 = ctx.p1(), ctx.p2()
1379 1378 user = ctx.user()
1380 1379
1381 1380 lock = self.lock()
1382 1381 try:
1383 1382 tr = self.transaction("commit")
1384 1383 trp = weakref.proxy(tr)
1385 1384
1386 1385 if ctx.files():
1387 1386 m1 = p1.manifest().copy()
1388 1387 m2 = p2.manifest()
1389 1388
1390 1389 # check in files
1391 1390 new = {}
1392 1391 changed = []
1393 1392 linkrev = len(self)
1394 1393 for f in sorted(ctx.modified() + ctx.added()):
1395 1394 self.ui.note(f + "\n")
1396 1395 try:
1397 1396 fctx = ctx[f]
1398 1397 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
1399 1398 changed)
1400 1399 m1.set(f, fctx.flags())
1401 1400 except OSError, inst:
1402 1401 self.ui.warn(_("trouble committing %s!\n") % f)
1403 1402 raise
1404 1403 except IOError, inst:
1405 1404 errcode = getattr(inst, 'errno', errno.ENOENT)
1406 1405 if error or errcode and errcode != errno.ENOENT:
1407 1406 self.ui.warn(_("trouble committing %s!\n") % f)
1408 1407 raise
1409 1408 else:
1410 1409 removed.append(f)
1411 1410
1412 1411 # update manifest
1413 1412 m1.update(new)
1414 1413 removed = [f for f in sorted(removed) if f in m1 or f in m2]
1415 1414 drop = [f for f in removed if f in m1]
1416 1415 for f in drop:
1417 1416 del m1[f]
1418 1417 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
1419 1418 p2.manifestnode(), (new, drop))
1420 1419 files = changed + removed
1421 1420 else:
1422 1421 mn = p1.manifestnode()
1423 1422 files = []
1424 1423
1425 1424 # update changelog
1426 1425 self.changelog.delayupdate()
1427 1426 n = self.changelog.add(mn, files, ctx.description(),
1428 1427 trp, p1.node(), p2.node(),
1429 1428 user, ctx.date(), ctx.extra().copy())
1430 1429 p = lambda: self.changelog.writepending() and self.root or ""
1431 1430 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
1432 1431 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
1433 1432 parent2=xp2, pending=p)
1434 1433 self.changelog.finalize(trp)
1435 1434 # set the new commit is proper phase
1436 1435 targetphase = subrepo.newcommitphase(self.ui, ctx)
1437 1436 if targetphase:
1438 1437 # retract boundary do not alter parent changeset.
1439 1438 # if a parent have higher the resulting phase will
1440 1439 # be compliant anyway
1441 1440 #
1442 1441 # if minimal phase was 0 we don't need to retract anything
1443 1442 phases.retractboundary(self, tr, targetphase, [n])
1444 1443 tr.close()
1445 1444 branchmap.updatecache(self.filtered('served'))
1446 1445 return n
1447 1446 finally:
1448 1447 if tr:
1449 1448 tr.release()
1450 1449 lock.release()
1451 1450
1452 1451 @unfilteredmethod
1453 1452 def destroying(self):
1454 1453 '''Inform the repository that nodes are about to be destroyed.
1455 1454 Intended for use by strip and rollback, so there's a common
1456 1455 place for anything that has to be done before destroying history.
1457 1456
1458 1457 This is mostly useful for saving state that is in memory and waiting
1459 1458 to be flushed when the current lock is released. Because a call to
1460 1459 destroyed is imminent, the repo will be invalidated causing those
1461 1460 changes to stay in memory (waiting for the next unlock), or vanish
1462 1461 completely.
1463 1462 '''
1464 1463 # When using the same lock to commit and strip, the phasecache is left
1465 1464 # dirty after committing. Then when we strip, the repo is invalidated,
1466 1465 # causing those changes to disappear.
1467 1466 if '_phasecache' in vars(self):
1468 1467 self._phasecache.write()
1469 1468
1470 1469 @unfilteredmethod
1471 1470 def destroyed(self):
1472 1471 '''Inform the repository that nodes have been destroyed.
1473 1472 Intended for use by strip and rollback, so there's a common
1474 1473 place for anything that has to be done after destroying history.
1475 1474 '''
1476 1475 # When one tries to:
1477 1476 # 1) destroy nodes thus calling this method (e.g. strip)
1478 1477 # 2) use phasecache somewhere (e.g. commit)
1479 1478 #
1480 1479 # then 2) will fail because the phasecache contains nodes that were
1481 1480 # removed. We can either remove phasecache from the filecache,
1482 1481 # causing it to reload next time it is accessed, or simply filter
1483 1482 # the removed nodes now and write the updated cache.
1484 1483 self._phasecache.filterunknown(self)
1485 1484 self._phasecache.write()
1486 1485
1487 1486 # update the 'served' branch cache to help read only server process
1488 1487 # Thanks to branchcache collaboration this is done from the nearest
1489 1488 # filtered subset and it is expected to be fast.
1490 1489 branchmap.updatecache(self.filtered('served'))
1491 1490
1492 1491 # Ensure the persistent tag cache is updated. Doing it now
1493 1492 # means that the tag cache only has to worry about destroyed
1494 1493 # heads immediately after a strip/rollback. That in turn
1495 1494 # guarantees that "cachetip == currenttip" (comparing both rev
1496 1495 # and node) always means no nodes have been added or destroyed.
1497 1496
1498 1497 # XXX this is suboptimal when qrefresh'ing: we strip the current
1499 1498 # head, refresh the tag cache, then immediately add a new head.
1500 1499 # But I think doing it this way is necessary for the "instant
1501 1500 # tag cache retrieval" case to work.
1502 1501 self.invalidate()
1503 1502
1504 1503 def walk(self, match, node=None):
1505 1504 '''
1506 1505 walk recursively through the directory tree or a given
1507 1506 changeset, finding all files matched by the match
1508 1507 function
1509 1508 '''
1510 1509 return self[node].walk(match)
1511 1510
1512 1511 def status(self, node1='.', node2=None, match=None,
1513 1512 ignored=False, clean=False, unknown=False,
1514 1513 listsubrepos=False):
1515 1514 '''a convenience method that calls node1.status(node2)'''
1516 1515 return self[node1].status(node2, match, ignored, clean, unknown,
1517 1516 listsubrepos)
1518 1517
1519 1518 def heads(self, start=None):
1520 1519 heads = self.changelog.heads(start)
1521 1520 # sort the output in rev descending order
1522 1521 return sorted(heads, key=self.changelog.rev, reverse=True)
1523 1522
1524 1523 def branchheads(self, branch=None, start=None, closed=False):
1525 1524 '''return a (possibly filtered) list of heads for the given branch
1526 1525
1527 1526 Heads are returned in topological order, from newest to oldest.
1528 1527 If branch is None, use the dirstate branch.
1529 1528 If start is not None, return only heads reachable from start.
1530 1529 If closed is True, return heads that are marked as closed as well.
1531 1530 '''
1532 1531 if branch is None:
1533 1532 branch = self[None].branch()
1534 1533 branches = self.branchmap()
1535 1534 if branch not in branches:
1536 1535 return []
1537 1536 # the cache returns heads ordered lowest to highest
1538 1537 bheads = list(reversed(branches.branchheads(branch, closed=closed)))
1539 1538 if start is not None:
1540 1539 # filter out the heads that cannot be reached from startrev
1541 1540 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1542 1541 bheads = [h for h in bheads if h in fbheads]
1543 1542 return bheads
1544 1543
1545 1544 def branches(self, nodes):
1546 1545 if not nodes:
1547 1546 nodes = [self.changelog.tip()]
1548 1547 b = []
1549 1548 for n in nodes:
1550 1549 t = n
1551 1550 while True:
1552 1551 p = self.changelog.parents(n)
1553 1552 if p[1] != nullid or p[0] == nullid:
1554 1553 b.append((t, n, p[0], p[1]))
1555 1554 break
1556 1555 n = p[0]
1557 1556 return b
1558 1557
1559 1558 def between(self, pairs):
1560 1559 r = []
1561 1560
1562 1561 for top, bottom in pairs:
1563 1562 n, l, i = top, [], 0
1564 1563 f = 1
1565 1564
1566 1565 while n != bottom and n != nullid:
1567 1566 p = self.changelog.parents(n)[0]
1568 1567 if i == f:
1569 1568 l.append(n)
1570 1569 f = f * 2
1571 1570 n = p
1572 1571 i += 1
1573 1572
1574 1573 r.append(l)
1575 1574
1576 1575 return r
1577 1576
1578 1577 def pull(self, remote, heads=None, force=False):
1579 1578 return exchange.pull (self, remote, heads, force)
1580 1579
1581 1580 def checkpush(self, pushop):
1582 1581 """Extensions can override this function if additional checks have
1583 1582 to be performed before pushing, or call it if they override push
1584 1583 command.
1585 1584 """
1586 1585 pass
1587 1586
1588 1587 @unfilteredpropertycache
1589 1588 def prepushoutgoinghooks(self):
1590 1589 """Return util.hooks consists of "(repo, remote, outgoing)"
1591 1590 functions, which are called before pushing changesets.
1592 1591 """
1593 1592 return util.hooks()
1594 1593
1595 1594 def push(self, remote, force=False, revs=None, newbranch=False):
1596 1595 return exchange.push(self, remote, force, revs, newbranch)
1597 1596
1598 1597 def stream_in(self, remote, requirements):
1599 1598 lock = self.lock()
1600 1599 try:
1601 1600 # Save remote branchmap. We will use it later
1602 1601 # to speed up branchcache creation
1603 1602 rbranchmap = None
1604 1603 if remote.capable("branchmap"):
1605 1604 rbranchmap = remote.branchmap()
1606 1605
1607 1606 fp = remote.stream_out()
1608 1607 l = fp.readline()
1609 1608 try:
1610 1609 resp = int(l)
1611 1610 except ValueError:
1612 1611 raise error.ResponseError(
1613 1612 _('unexpected response from remote server:'), l)
1614 1613 if resp == 1:
1615 1614 raise util.Abort(_('operation forbidden by server'))
1616 1615 elif resp == 2:
1617 1616 raise util.Abort(_('locking the remote repository failed'))
1618 1617 elif resp != 0:
1619 1618 raise util.Abort(_('the server sent an unknown error code'))
1620 1619 self.ui.status(_('streaming all changes\n'))
1621 1620 l = fp.readline()
1622 1621 try:
1623 1622 total_files, total_bytes = map(int, l.split(' ', 1))
1624 1623 except (ValueError, TypeError):
1625 1624 raise error.ResponseError(
1626 1625 _('unexpected response from remote server:'), l)
1627 1626 self.ui.status(_('%d files to transfer, %s of data\n') %
1628 1627 (total_files, util.bytecount(total_bytes)))
1629 1628 handled_bytes = 0
1630 1629 self.ui.progress(_('clone'), 0, total=total_bytes)
1631 1630 start = time.time()
1632 1631
1633 1632 tr = self.transaction(_('clone'))
1634 1633 try:
1635 1634 for i in xrange(total_files):
1636 1635 # XXX doesn't support '\n' or '\r' in filenames
1637 1636 l = fp.readline()
1638 1637 try:
1639 1638 name, size = l.split('\0', 1)
1640 1639 size = int(size)
1641 1640 except (ValueError, TypeError):
1642 1641 raise error.ResponseError(
1643 1642 _('unexpected response from remote server:'), l)
1644 1643 if self.ui.debugflag:
1645 1644 self.ui.debug('adding %s (%s)\n' %
1646 1645 (name, util.bytecount(size)))
1647 1646 # for backwards compat, name was partially encoded
1648 1647 ofp = self.sopener(store.decodedir(name), 'w')
1649 1648 for chunk in util.filechunkiter(fp, limit=size):
1650 1649 handled_bytes += len(chunk)
1651 1650 self.ui.progress(_('clone'), handled_bytes,
1652 1651 total=total_bytes)
1653 1652 ofp.write(chunk)
1654 1653 ofp.close()
1655 1654 tr.close()
1656 1655 finally:
1657 1656 tr.release()
1658 1657
1659 1658 # Writing straight to files circumvented the inmemory caches
1660 1659 self.invalidate()
1661 1660
1662 1661 elapsed = time.time() - start
1663 1662 if elapsed <= 0:
1664 1663 elapsed = 0.001
1665 1664 self.ui.progress(_('clone'), None)
1666 1665 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1667 1666 (util.bytecount(total_bytes), elapsed,
1668 1667 util.bytecount(total_bytes / elapsed)))
1669 1668
1670 1669 # new requirements = old non-format requirements +
1671 1670 # new format-related
1672 1671 # requirements from the streamed-in repository
1673 1672 requirements.update(set(self.requirements) - self.supportedformats)
1674 1673 self._applyrequirements(requirements)
1675 1674 self._writerequirements()
1676 1675
1677 1676 if rbranchmap:
1678 1677 rbheads = []
1679 1678 for bheads in rbranchmap.itervalues():
1680 1679 rbheads.extend(bheads)
1681 1680
1682 1681 if rbheads:
1683 1682 rtiprev = max((int(self.changelog.rev(node))
1684 1683 for node in rbheads))
1685 1684 cache = branchmap.branchcache(rbranchmap,
1686 1685 self[rtiprev].node(),
1687 1686 rtiprev)
1688 1687 # Try to stick it as low as possible
1689 1688 # filter above served are unlikely to be fetch from a clone
1690 1689 for candidate in ('base', 'immutable', 'served'):
1691 1690 rview = self.filtered(candidate)
1692 1691 if cache.validfor(rview):
1693 1692 self._branchcaches[candidate] = cache
1694 1693 cache.write(rview)
1695 1694 break
1696 1695 self.invalidate()
1697 1696 return len(self.heads()) + 1
1698 1697 finally:
1699 1698 lock.release()
1700 1699
1701 1700 def clone(self, remote, heads=[], stream=False):
1702 1701 '''clone remote repository.
1703 1702
1704 1703 keyword arguments:
1705 1704 heads: list of revs to clone (forces use of pull)
1706 1705 stream: use streaming clone if possible'''
1707 1706
1708 1707 # now, all clients that can request uncompressed clones can
1709 1708 # read repo formats supported by all servers that can serve
1710 1709 # them.
1711 1710
1712 1711 # if revlog format changes, client will have to check version
1713 1712 # and format flags on "stream" capability, and use
1714 1713 # uncompressed only if compatible.
1715 1714
1716 1715 if not stream:
1717 1716 # if the server explicitly prefers to stream (for fast LANs)
1718 1717 stream = remote.capable('stream-preferred')
1719 1718
1720 1719 if stream and not heads:
1721 1720 # 'stream' means remote revlog format is revlogv1 only
1722 1721 if remote.capable('stream'):
1723 1722 return self.stream_in(remote, set(('revlogv1',)))
1724 1723 # otherwise, 'streamreqs' contains the remote revlog format
1725 1724 streamreqs = remote.capable('streamreqs')
1726 1725 if streamreqs:
1727 1726 streamreqs = set(streamreqs.split(','))
1728 1727 # if we support it, stream in and adjust our requirements
1729 1728 if not streamreqs - self.supportedformats:
1730 1729 return self.stream_in(remote, streamreqs)
1731 1730 return self.pull(remote, heads)
1732 1731
1733 1732 def pushkey(self, namespace, key, old, new):
1734 1733 self.hook('prepushkey', throw=True, namespace=namespace, key=key,
1735 1734 old=old, new=new)
1736 1735 self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
1737 1736 ret = pushkey.push(self, namespace, key, old, new)
1738 1737 self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
1739 1738 ret=ret)
1740 1739 return ret
1741 1740
1742 1741 def listkeys(self, namespace):
1743 1742 self.hook('prelistkeys', throw=True, namespace=namespace)
1744 1743 self.ui.debug('listing keys for "%s"\n' % namespace)
1745 1744 values = pushkey.list(self, namespace)
1746 1745 self.hook('listkeys', namespace=namespace, values=values)
1747 1746 return values
1748 1747
1749 1748 def debugwireargs(self, one, two, three=None, four=None, five=None):
1750 1749 '''used to test argument passing over the wire'''
1751 1750 return "%s %s %s %s %s" % (one, two, three, four, five)
1752 1751
1753 1752 def savecommitmessage(self, text):
1754 1753 fp = self.opener('last-message.txt', 'wb')
1755 1754 try:
1756 1755 fp.write(text)
1757 1756 finally:
1758 1757 fp.close()
1759 1758 return self.pathto(fp.name[len(self.root) + 1:])
1760 1759
1761 1760 # used to avoid circular references so destructors work
1762 1761 def aftertrans(files):
1763 1762 renamefiles = [tuple(t) for t in files]
1764 1763 def a():
1765 1764 for vfs, src, dest in renamefiles:
1766 1765 try:
1767 1766 vfs.rename(src, dest)
1768 1767 except OSError: # journal file does not yet exist
1769 1768 pass
1770 1769 return a
1771 1770
1772 1771 def undoname(fn):
1773 1772 base, name = os.path.split(fn)
1774 1773 assert name.startswith('journal')
1775 1774 return os.path.join(base, name.replace('journal', 'undo', 1))
1776 1775
1777 1776 def instance(ui, path, create):
1778 1777 return localrepository(ui, util.urllocalpath(path), create)
1779 1778
1780 1779 def islocal(path):
1781 1780 return True
@@ -1,868 +1,868
1 1 # wireproto.py - generic wire protocol support functions
2 2 #
3 3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import urllib, tempfile, os, sys
9 9 from i18n import _
10 10 from node import bin, hex
11 11 import changegroup as changegroupmod, bundle2, pushkey as pushkeymod
12 12 import peer, error, encoding, util, store, exchange
13 13
14 14
15 15 class abstractserverproto(object):
16 16 """abstract class that summarizes the protocol API
17 17
18 18 Used as reference and documentation.
19 19 """
20 20
21 21 def getargs(self, args):
22 22 """return the value for arguments in <args>
23 23
24 24 returns a list of values (same order as <args>)"""
25 25 raise NotImplementedError()
26 26
27 27 def getfile(self, fp):
28 28 """write the whole content of a file into a file like object
29 29
30 30 The file is in the form::
31 31
32 32 (<chunk-size>\n<chunk>)+0\n
33 33
34 34 chunk size is the ascii version of the int.
35 35 """
36 36 raise NotImplementedError()
37 37
38 38 def redirect(self):
39 39 """may setup interception for stdout and stderr
40 40
41 41 See also the `restore` method."""
42 42 raise NotImplementedError()
43 43
44 44 # If the `redirect` function does install interception, the `restore`
45 45 # function MUST be defined. If interception is not used, this function
46 46 # MUST NOT be defined.
47 47 #
48 48 # left commented here on purpose
49 49 #
50 50 #def restore(self):
51 51 # """reinstall previous stdout and stderr and return intercepted stdout
52 52 # """
53 53 # raise NotImplementedError()
54 54
55 55 def groupchunks(self, cg):
56 56 """return 4096 chunks from a changegroup object
57 57
58 58 Some protocols may have compressed the contents."""
59 59 raise NotImplementedError()
60 60
61 61 # abstract batching support
62 62
63 63 class future(object):
64 64 '''placeholder for a value to be set later'''
65 65 def set(self, value):
66 66 if util.safehasattr(self, 'value'):
67 67 raise error.RepoError("future is already set")
68 68 self.value = value
69 69
70 70 class batcher(object):
71 71 '''base class for batches of commands submittable in a single request
72 72
73 73 All methods invoked on instances of this class are simply queued and
74 74 return a a future for the result. Once you call submit(), all the queued
75 75 calls are performed and the results set in their respective futures.
76 76 '''
77 77 def __init__(self):
78 78 self.calls = []
79 79 def __getattr__(self, name):
80 80 def call(*args, **opts):
81 81 resref = future()
82 82 self.calls.append((name, args, opts, resref,))
83 83 return resref
84 84 return call
85 85 def submit(self):
86 86 pass
87 87
88 88 class localbatch(batcher):
89 89 '''performs the queued calls directly'''
90 90 def __init__(self, local):
91 91 batcher.__init__(self)
92 92 self.local = local
93 93 def submit(self):
94 94 for name, args, opts, resref in self.calls:
95 95 resref.set(getattr(self.local, name)(*args, **opts))
96 96
97 97 class remotebatch(batcher):
98 98 '''batches the queued calls; uses as few roundtrips as possible'''
99 99 def __init__(self, remote):
100 100 '''remote must support _submitbatch(encbatch) and
101 101 _submitone(op, encargs)'''
102 102 batcher.__init__(self)
103 103 self.remote = remote
104 104 def submit(self):
105 105 req, rsp = [], []
106 106 for name, args, opts, resref in self.calls:
107 107 mtd = getattr(self.remote, name)
108 108 batchablefn = getattr(mtd, 'batchable', None)
109 109 if batchablefn is not None:
110 110 batchable = batchablefn(mtd.im_self, *args, **opts)
111 111 encargsorres, encresref = batchable.next()
112 112 if encresref:
113 113 req.append((name, encargsorres,))
114 114 rsp.append((batchable, encresref, resref,))
115 115 else:
116 116 resref.set(encargsorres)
117 117 else:
118 118 if req:
119 119 self._submitreq(req, rsp)
120 120 req, rsp = [], []
121 121 resref.set(mtd(*args, **opts))
122 122 if req:
123 123 self._submitreq(req, rsp)
124 124 def _submitreq(self, req, rsp):
125 125 encresults = self.remote._submitbatch(req)
126 126 for encres, r in zip(encresults, rsp):
127 127 batchable, encresref, resref = r
128 128 encresref.set(encres)
129 129 resref.set(batchable.next())
130 130
131 131 def batchable(f):
132 132 '''annotation for batchable methods
133 133
134 134 Such methods must implement a coroutine as follows:
135 135
136 136 @batchable
137 137 def sample(self, one, two=None):
138 138 # Handle locally computable results first:
139 139 if not one:
140 140 yield "a local result", None
141 141 # Build list of encoded arguments suitable for your wire protocol:
142 142 encargs = [('one', encode(one),), ('two', encode(two),)]
143 143 # Create future for injection of encoded result:
144 144 encresref = future()
145 145 # Return encoded arguments and future:
146 146 yield encargs, encresref
147 147 # Assuming the future to be filled with the result from the batched
148 148 # request now. Decode it:
149 149 yield decode(encresref.value)
150 150
151 151 The decorator returns a function which wraps this coroutine as a plain
152 152 method, but adds the original method as an attribute called "batchable",
153 153 which is used by remotebatch to split the call into separate encoding and
154 154 decoding phases.
155 155 '''
156 156 def plain(*args, **opts):
157 157 batchable = f(*args, **opts)
158 158 encargsorres, encresref = batchable.next()
159 159 if not encresref:
160 160 return encargsorres # a local result in this case
161 161 self = args[0]
162 162 encresref.set(self._submitone(f.func_name, encargsorres))
163 163 return batchable.next()
164 164 setattr(plain, 'batchable', f)
165 165 return plain
166 166
167 167 # list of nodes encoding / decoding
168 168
169 169 def decodelist(l, sep=' '):
170 170 if l:
171 171 return map(bin, l.split(sep))
172 172 return []
173 173
174 174 def encodelist(l, sep=' '):
175 175 return sep.join(map(hex, l))
176 176
177 177 # batched call argument encoding
178 178
179 179 def escapearg(plain):
180 180 return (plain
181 181 .replace(':', '::')
182 182 .replace(',', ':,')
183 183 .replace(';', ':;')
184 184 .replace('=', ':='))
185 185
186 186 def unescapearg(escaped):
187 187 return (escaped
188 188 .replace(':=', '=')
189 189 .replace(':;', ';')
190 190 .replace(':,', ',')
191 191 .replace('::', ':'))
192 192
193 193 # mapping of options accepted by getbundle and their types
194 194 #
195 195 # Meant to be extended by extensions. It is extensions responsibility to ensure
196 196 # such options are properly processed in exchange.getbundle.
197 197 #
198 198 # supported types are:
199 199 #
200 200 # :nodes: list of binary nodes
201 201 # :csv: list of comma-separated values
202 202 # :plain: string with no transformation needed.
203 203 gboptsmap = {'heads': 'nodes',
204 204 'common': 'nodes',
205 205 'bundlecaps': 'csv',
206 206 'listkeys': 'csv',
207 207 'cg': 'boolean'}
208 208
209 209 # client side
210 210
211 211 class wirepeer(peer.peerrepository):
212 212
213 213 def batch(self):
214 214 return remotebatch(self)
215 215 def _submitbatch(self, req):
216 216 cmds = []
217 217 for op, argsdict in req:
218 218 args = ','.join('%s=%s' % p for p in argsdict.iteritems())
219 219 cmds.append('%s %s' % (op, args))
220 220 rsp = self._call("batch", cmds=';'.join(cmds))
221 221 return rsp.split(';')
222 222 def _submitone(self, op, args):
223 223 return self._call(op, **args)
224 224
225 225 @batchable
226 226 def lookup(self, key):
227 227 self.requirecap('lookup', _('look up remote revision'))
228 228 f = future()
229 229 yield {'key': encoding.fromlocal(key)}, f
230 230 d = f.value
231 231 success, data = d[:-1].split(" ", 1)
232 232 if int(success):
233 233 yield bin(data)
234 234 self._abort(error.RepoError(data))
235 235
236 236 @batchable
237 237 def heads(self):
238 238 f = future()
239 239 yield {}, f
240 240 d = f.value
241 241 try:
242 242 yield decodelist(d[:-1])
243 243 except ValueError:
244 244 self._abort(error.ResponseError(_("unexpected response:"), d))
245 245
246 246 @batchable
247 247 def known(self, nodes):
248 248 f = future()
249 249 yield {'nodes': encodelist(nodes)}, f
250 250 d = f.value
251 251 try:
252 yield [bool(int(f)) for f in d]
252 yield [bool(int(b)) for b in d]
253 253 except ValueError:
254 254 self._abort(error.ResponseError(_("unexpected response:"), d))
255 255
256 256 @batchable
257 257 def branchmap(self):
258 258 f = future()
259 259 yield {}, f
260 260 d = f.value
261 261 try:
262 262 branchmap = {}
263 263 for branchpart in d.splitlines():
264 264 branchname, branchheads = branchpart.split(' ', 1)
265 265 branchname = encoding.tolocal(urllib.unquote(branchname))
266 266 branchheads = decodelist(branchheads)
267 267 branchmap[branchname] = branchheads
268 268 yield branchmap
269 269 except TypeError:
270 270 self._abort(error.ResponseError(_("unexpected response:"), d))
271 271
272 272 def branches(self, nodes):
273 273 n = encodelist(nodes)
274 274 d = self._call("branches", nodes=n)
275 275 try:
276 276 br = [tuple(decodelist(b)) for b in d.splitlines()]
277 277 return br
278 278 except ValueError:
279 279 self._abort(error.ResponseError(_("unexpected response:"), d))
280 280
281 281 def between(self, pairs):
282 282 batch = 8 # avoid giant requests
283 283 r = []
284 284 for i in xrange(0, len(pairs), batch):
285 285 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
286 286 d = self._call("between", pairs=n)
287 287 try:
288 288 r.extend(l and decodelist(l) or [] for l in d.splitlines())
289 289 except ValueError:
290 290 self._abort(error.ResponseError(_("unexpected response:"), d))
291 291 return r
292 292
293 293 @batchable
294 294 def pushkey(self, namespace, key, old, new):
295 295 if not self.capable('pushkey'):
296 296 yield False, None
297 297 f = future()
298 298 self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
299 299 yield {'namespace': encoding.fromlocal(namespace),
300 300 'key': encoding.fromlocal(key),
301 301 'old': encoding.fromlocal(old),
302 302 'new': encoding.fromlocal(new)}, f
303 303 d = f.value
304 304 d, output = d.split('\n', 1)
305 305 try:
306 306 d = bool(int(d))
307 307 except ValueError:
308 308 raise error.ResponseError(
309 309 _('push failed (unexpected response):'), d)
310 310 for l in output.splitlines(True):
311 311 self.ui.status(_('remote: '), l)
312 312 yield d
313 313
314 314 @batchable
315 315 def listkeys(self, namespace):
316 316 if not self.capable('pushkey'):
317 317 yield {}, None
318 318 f = future()
319 319 self.ui.debug('preparing listkeys for "%s"\n' % namespace)
320 320 yield {'namespace': encoding.fromlocal(namespace)}, f
321 321 d = f.value
322 322 yield pushkeymod.decodekeys(d)
323 323
324 324 def stream_out(self):
325 325 return self._callstream('stream_out')
326 326
327 327 def changegroup(self, nodes, kind):
328 328 n = encodelist(nodes)
329 329 f = self._callcompressable("changegroup", roots=n)
330 330 return changegroupmod.unbundle10(f, 'UN')
331 331
332 332 def changegroupsubset(self, bases, heads, kind):
333 333 self.requirecap('changegroupsubset', _('look up remote changes'))
334 334 bases = encodelist(bases)
335 335 heads = encodelist(heads)
336 336 f = self._callcompressable("changegroupsubset",
337 337 bases=bases, heads=heads)
338 338 return changegroupmod.unbundle10(f, 'UN')
339 339
340 340 def getbundle(self, source, **kwargs):
341 341 self.requirecap('getbundle', _('look up remote changes'))
342 342 opts = {}
343 343 for key, value in kwargs.iteritems():
344 344 if value is None:
345 345 continue
346 346 keytype = gboptsmap.get(key)
347 347 if keytype is None:
348 348 assert False, 'unexpected'
349 349 elif keytype == 'nodes':
350 350 value = encodelist(value)
351 351 elif keytype == 'csv':
352 352 value = ','.join(value)
353 353 elif keytype == 'boolean':
354 354 value = bool(value)
355 355 elif keytype != 'plain':
356 356 raise KeyError('unknown getbundle option type %s'
357 357 % keytype)
358 358 opts[key] = value
359 359 f = self._callcompressable("getbundle", **opts)
360 360 bundlecaps = kwargs.get('bundlecaps')
361 361 if bundlecaps is not None and 'HG2X' in bundlecaps:
362 362 return bundle2.unbundle20(self.ui, f)
363 363 else:
364 364 return changegroupmod.unbundle10(f, 'UN')
365 365
366 366 def unbundle(self, cg, heads, source):
367 367 '''Send cg (a readable file-like object representing the
368 368 changegroup to push, typically a chunkbuffer object) to the
369 369 remote server as a bundle.
370 370
371 371 When pushing a bundle10 stream, return an integer indicating the
372 372 result of the push (see localrepository.addchangegroup()).
373 373
374 374 When pushing a bundle20 stream, return a bundle20 stream.'''
375 375
376 376 if heads != ['force'] and self.capable('unbundlehash'):
377 377 heads = encodelist(['hashed',
378 378 util.sha1(''.join(sorted(heads))).digest()])
379 379 else:
380 380 heads = encodelist(heads)
381 381
382 382 if util.safehasattr(cg, 'deltaheader'):
383 383 # this a bundle10, do the old style call sequence
384 384 ret, output = self._callpush("unbundle", cg, heads=heads)
385 385 if ret == "":
386 386 raise error.ResponseError(
387 387 _('push failed:'), output)
388 388 try:
389 389 ret = int(ret)
390 390 except ValueError:
391 391 raise error.ResponseError(
392 392 _('push failed (unexpected response):'), ret)
393 393
394 394 for l in output.splitlines(True):
395 395 self.ui.status(_('remote: '), l)
396 396 else:
397 397 # bundle2 push. Send a stream, fetch a stream.
398 398 stream = self._calltwowaystream('unbundle', cg, heads=heads)
399 399 ret = bundle2.unbundle20(self.ui, stream)
400 400 return ret
401 401
402 402 def debugwireargs(self, one, two, three=None, four=None, five=None):
403 403 # don't pass optional arguments left at their default value
404 404 opts = {}
405 405 if three is not None:
406 406 opts['three'] = three
407 407 if four is not None:
408 408 opts['four'] = four
409 409 return self._call('debugwireargs', one=one, two=two, **opts)
410 410
411 411 def _call(self, cmd, **args):
412 412 """execute <cmd> on the server
413 413
414 414 The command is expected to return a simple string.
415 415
416 416 returns the server reply as a string."""
417 417 raise NotImplementedError()
418 418
419 419 def _callstream(self, cmd, **args):
420 420 """execute <cmd> on the server
421 421
422 422 The command is expected to return a stream.
423 423
424 424 returns the server reply as a file like object."""
425 425 raise NotImplementedError()
426 426
427 427 def _callcompressable(self, cmd, **args):
428 428 """execute <cmd> on the server
429 429
430 430 The command is expected to return a stream.
431 431
432 432 The stream may have been compressed in some implementations. This
433 433 function takes care of the decompression. This is the only difference
434 434 with _callstream.
435 435
436 436 returns the server reply as a file like object.
437 437 """
438 438 raise NotImplementedError()
439 439
440 440 def _callpush(self, cmd, fp, **args):
441 441 """execute a <cmd> on server
442 442
443 443 The command is expected to be related to a push. Push has a special
444 444 return method.
445 445
446 446 returns the server reply as a (ret, output) tuple. ret is either
447 447 empty (error) or a stringified int.
448 448 """
449 449 raise NotImplementedError()
450 450
451 451 def _calltwowaystream(self, cmd, fp, **args):
452 452 """execute <cmd> on server
453 453
454 454 The command will send a stream to the server and get a stream in reply.
455 455 """
456 456 raise NotImplementedError()
457 457
458 458 def _abort(self, exception):
459 459 """clearly abort the wire protocol connection and raise the exception
460 460 """
461 461 raise NotImplementedError()
462 462
463 463 # server side
464 464
465 465 # wire protocol command can either return a string or one of these classes.
466 466 class streamres(object):
467 467 """wireproto reply: binary stream
468 468
469 469 The call was successful and the result is a stream.
470 470 Iterate on the `self.gen` attribute to retrieve chunks.
471 471 """
472 472 def __init__(self, gen):
473 473 self.gen = gen
474 474
475 475 class pushres(object):
476 476 """wireproto reply: success with simple integer return
477 477
478 478 The call was successful and returned an integer contained in `self.res`.
479 479 """
480 480 def __init__(self, res):
481 481 self.res = res
482 482
483 483 class pusherr(object):
484 484 """wireproto reply: failure
485 485
486 486 The call failed. The `self.res` attribute contains the error message.
487 487 """
488 488 def __init__(self, res):
489 489 self.res = res
490 490
491 491 class ooberror(object):
492 492 """wireproto reply: failure of a batch of operation
493 493
494 494 Something failed during a batch call. The error message is stored in
495 495 `self.message`.
496 496 """
497 497 def __init__(self, message):
498 498 self.message = message
499 499
500 500 def dispatch(repo, proto, command):
501 501 repo = repo.filtered("served")
502 502 func, spec = commands[command]
503 503 args = proto.getargs(spec)
504 504 return func(repo, proto, *args)
505 505
506 506 def options(cmd, keys, others):
507 507 opts = {}
508 508 for k in keys:
509 509 if k in others:
510 510 opts[k] = others[k]
511 511 del others[k]
512 512 if others:
513 513 sys.stderr.write("warning: %s ignored unexpected arguments %s\n"
514 514 % (cmd, ",".join(others)))
515 515 return opts
516 516
517 517 # list of commands
518 518 commands = {}
519 519
520 520 def wireprotocommand(name, args=''):
521 521 """decorator for wire protocol command"""
522 522 def register(func):
523 523 commands[name] = (func, args)
524 524 return func
525 525 return register
526 526
527 527 @wireprotocommand('batch', 'cmds *')
528 528 def batch(repo, proto, cmds, others):
529 529 repo = repo.filtered("served")
530 530 res = []
531 531 for pair in cmds.split(';'):
532 532 op, args = pair.split(' ', 1)
533 533 vals = {}
534 534 for a in args.split(','):
535 535 if a:
536 536 n, v = a.split('=')
537 537 vals[n] = unescapearg(v)
538 538 func, spec = commands[op]
539 539 if spec:
540 540 keys = spec.split()
541 541 data = {}
542 542 for k in keys:
543 543 if k == '*':
544 544 star = {}
545 545 for key in vals.keys():
546 546 if key not in keys:
547 547 star[key] = vals[key]
548 548 data['*'] = star
549 549 else:
550 550 data[k] = vals[k]
551 551 result = func(repo, proto, *[data[k] for k in keys])
552 552 else:
553 553 result = func(repo, proto)
554 554 if isinstance(result, ooberror):
555 555 return result
556 556 res.append(escapearg(result))
557 557 return ';'.join(res)
558 558
559 559 @wireprotocommand('between', 'pairs')
560 560 def between(repo, proto, pairs):
561 561 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
562 562 r = []
563 563 for b in repo.between(pairs):
564 564 r.append(encodelist(b) + "\n")
565 565 return "".join(r)
566 566
567 567 @wireprotocommand('branchmap')
568 568 def branchmap(repo, proto):
569 569 branchmap = repo.branchmap()
570 570 heads = []
571 571 for branch, nodes in branchmap.iteritems():
572 572 branchname = urllib.quote(encoding.fromlocal(branch))
573 573 branchnodes = encodelist(nodes)
574 574 heads.append('%s %s' % (branchname, branchnodes))
575 575 return '\n'.join(heads)
576 576
577 577 @wireprotocommand('branches', 'nodes')
578 578 def branches(repo, proto, nodes):
579 579 nodes = decodelist(nodes)
580 580 r = []
581 581 for b in repo.branches(nodes):
582 582 r.append(encodelist(b) + "\n")
583 583 return "".join(r)
584 584
585 585
586 586 wireprotocaps = ['lookup', 'changegroupsubset', 'branchmap', 'pushkey',
587 587 'known', 'getbundle', 'unbundlehash', 'batch']
588 588
589 589 def _capabilities(repo, proto):
590 590 """return a list of capabilities for a repo
591 591
592 592 This function exists to allow extensions to easily wrap capabilities
593 593 computation
594 594
595 595 - returns a lists: easy to alter
596 596 - change done here will be propagated to both `capabilities` and `hello`
597 597 command without any other action needed.
598 598 """
599 599 # copy to prevent modification of the global list
600 600 caps = list(wireprotocaps)
601 601 if _allowstream(repo.ui):
602 602 if repo.ui.configbool('server', 'preferuncompressed', False):
603 603 caps.append('stream-preferred')
604 604 requiredformats = repo.requirements & repo.supportedformats
605 605 # if our local revlogs are just revlogv1, add 'stream' cap
606 606 if not requiredformats - set(('revlogv1',)):
607 607 caps.append('stream')
608 608 # otherwise, add 'streamreqs' detailing our local revlog format
609 609 else:
610 610 caps.append('streamreqs=%s' % ','.join(requiredformats))
611 611 if repo.ui.configbool('experimental', 'bundle2-exp', False):
612 612 capsblob = bundle2.encodecaps(repo.bundle2caps)
613 613 caps.append('bundle2-exp=' + urllib.quote(capsblob))
614 614 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
615 615 caps.append('httpheader=1024')
616 616 return caps
617 617
618 618 # If you are writing an extension and consider wrapping this function. Wrap
619 619 # `_capabilities` instead.
620 620 @wireprotocommand('capabilities')
621 621 def capabilities(repo, proto):
622 622 return ' '.join(_capabilities(repo, proto))
623 623
624 624 @wireprotocommand('changegroup', 'roots')
625 625 def changegroup(repo, proto, roots):
626 626 nodes = decodelist(roots)
627 627 cg = changegroupmod.changegroup(repo, nodes, 'serve')
628 628 return streamres(proto.groupchunks(cg))
629 629
630 630 @wireprotocommand('changegroupsubset', 'bases heads')
631 631 def changegroupsubset(repo, proto, bases, heads):
632 632 bases = decodelist(bases)
633 633 heads = decodelist(heads)
634 634 cg = changegroupmod.changegroupsubset(repo, bases, heads, 'serve')
635 635 return streamres(proto.groupchunks(cg))
636 636
637 637 @wireprotocommand('debugwireargs', 'one two *')
638 638 def debugwireargs(repo, proto, one, two, others):
639 639 # only accept optional args from the known set
640 640 opts = options('debugwireargs', ['three', 'four'], others)
641 641 return repo.debugwireargs(one, two, **opts)
642 642
643 643 # List of options accepted by getbundle.
644 644 #
645 645 # Meant to be extended by extensions. It is the extension's responsibility to
646 646 # ensure such options are properly processed in exchange.getbundle.
647 647 gboptslist = ['heads', 'common', 'bundlecaps']
648 648
649 649 @wireprotocommand('getbundle', '*')
650 650 def getbundle(repo, proto, others):
651 651 opts = options('getbundle', gboptsmap.keys(), others)
652 652 for k, v in opts.iteritems():
653 653 keytype = gboptsmap[k]
654 654 if keytype == 'nodes':
655 655 opts[k] = decodelist(v)
656 656 elif keytype == 'csv':
657 657 opts[k] = set(v.split(','))
658 658 elif keytype == 'boolean':
659 659 opts[k] = '%i' % bool(v)
660 660 elif keytype != 'plain':
661 661 raise KeyError('unknown getbundle option type %s'
662 662 % keytype)
663 663 cg = exchange.getbundle(repo, 'serve', **opts)
664 664 return streamres(proto.groupchunks(cg))
665 665
666 666 @wireprotocommand('heads')
667 667 def heads(repo, proto):
668 668 h = repo.heads()
669 669 return encodelist(h) + "\n"
670 670
671 671 @wireprotocommand('hello')
672 672 def hello(repo, proto):
673 673 '''the hello command returns a set of lines describing various
674 674 interesting things about the server, in an RFC822-like format.
675 675 Currently the only one defined is "capabilities", which
676 676 consists of a line in the form:
677 677
678 678 capabilities: space separated list of tokens
679 679 '''
680 680 return "capabilities: %s\n" % (capabilities(repo, proto))
681 681
682 682 @wireprotocommand('listkeys', 'namespace')
683 683 def listkeys(repo, proto, namespace):
684 684 d = repo.listkeys(encoding.tolocal(namespace)).items()
685 685 return pushkeymod.encodekeys(d)
686 686
687 687 @wireprotocommand('lookup', 'key')
688 688 def lookup(repo, proto, key):
689 689 try:
690 690 k = encoding.tolocal(key)
691 691 c = repo[k]
692 692 r = c.hex()
693 693 success = 1
694 694 except Exception, inst:
695 695 r = str(inst)
696 696 success = 0
697 697 return "%s %s\n" % (success, r)
698 698
699 699 @wireprotocommand('known', 'nodes *')
700 700 def known(repo, proto, nodes, others):
701 701 return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
702 702
703 703 @wireprotocommand('pushkey', 'namespace key old new')
704 704 def pushkey(repo, proto, namespace, key, old, new):
705 705 # compatibility with pre-1.8 clients which were accidentally
706 706 # sending raw binary nodes rather than utf-8-encoded hex
707 707 if len(new) == 20 and new.encode('string-escape') != new:
708 708 # looks like it could be a binary node
709 709 try:
710 710 new.decode('utf-8')
711 711 new = encoding.tolocal(new) # but cleanly decodes as UTF-8
712 712 except UnicodeDecodeError:
713 713 pass # binary, leave unmodified
714 714 else:
715 715 new = encoding.tolocal(new) # normal path
716 716
717 717 if util.safehasattr(proto, 'restore'):
718 718
719 719 proto.redirect()
720 720
721 721 try:
722 722 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
723 723 encoding.tolocal(old), new) or False
724 724 except util.Abort:
725 725 r = False
726 726
727 727 output = proto.restore()
728 728
729 729 return '%s\n%s' % (int(r), output)
730 730
731 731 r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
732 732 encoding.tolocal(old), new)
733 733 return '%s\n' % int(r)
734 734
735 735 def _allowstream(ui):
736 736 return ui.configbool('server', 'uncompressed', True, untrusted=True)
737 737
738 738 def _walkstreamfiles(repo):
739 739 # this is it's own function so extensions can override it
740 740 return repo.store.walk()
741 741
742 742 @wireprotocommand('stream_out')
743 743 def stream(repo, proto):
744 744 '''If the server supports streaming clone, it advertises the "stream"
745 745 capability with a value representing the version and flags of the repo
746 746 it is serving. Client checks to see if it understands the format.
747 747
748 748 The format is simple: the server writes out a line with the amount
749 749 of files, then the total amount of bytes to be transferred (separated
750 750 by a space). Then, for each file, the server first writes the filename
751 751 and file size (separated by the null character), then the file contents.
752 752 '''
753 753
754 754 if not _allowstream(repo.ui):
755 755 return '1\n'
756 756
757 757 entries = []
758 758 total_bytes = 0
759 759 try:
760 760 # get consistent snapshot of repo, lock during scan
761 761 lock = repo.lock()
762 762 try:
763 763 repo.ui.debug('scanning\n')
764 764 for name, ename, size in _walkstreamfiles(repo):
765 765 if size:
766 766 entries.append((name, size))
767 767 total_bytes += size
768 768 finally:
769 769 lock.release()
770 770 except error.LockError:
771 771 return '2\n' # error: 2
772 772
773 773 def streamer(repo, entries, total):
774 774 '''stream out all metadata files in repository.'''
775 775 yield '0\n' # success
776 776 repo.ui.debug('%d files, %d bytes to transfer\n' %
777 777 (len(entries), total_bytes))
778 778 yield '%d %d\n' % (len(entries), total_bytes)
779 779
780 780 sopener = repo.sopener
781 781 oldaudit = sopener.mustaudit
782 782 debugflag = repo.ui.debugflag
783 783 sopener.mustaudit = False
784 784
785 785 try:
786 786 for name, size in entries:
787 787 if debugflag:
788 788 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
789 789 # partially encode name over the wire for backwards compat
790 790 yield '%s\0%d\n' % (store.encodedir(name), size)
791 791 if size <= 65536:
792 792 fp = sopener(name)
793 793 try:
794 794 data = fp.read(size)
795 795 finally:
796 796 fp.close()
797 797 yield data
798 798 else:
799 799 for chunk in util.filechunkiter(sopener(name), limit=size):
800 800 yield chunk
801 801 # replace with "finally:" when support for python 2.4 has been dropped
802 802 except Exception:
803 803 sopener.mustaudit = oldaudit
804 804 raise
805 805 sopener.mustaudit = oldaudit
806 806
807 807 return streamres(streamer(repo, entries, total_bytes))
808 808
809 809 @wireprotocommand('unbundle', 'heads')
810 810 def unbundle(repo, proto, heads):
811 811 their_heads = decodelist(heads)
812 812
813 813 try:
814 814 proto.redirect()
815 815
816 816 exchange.check_heads(repo, their_heads, 'preparing changes')
817 817
818 818 # write bundle data to temporary file because it can be big
819 819 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
820 820 fp = os.fdopen(fd, 'wb+')
821 821 r = 0
822 822 try:
823 823 proto.getfile(fp)
824 824 fp.seek(0)
825 825 gen = exchange.readbundle(repo.ui, fp, None)
826 826 r = exchange.unbundle(repo, gen, their_heads, 'serve',
827 827 proto._client())
828 828 if util.safehasattr(r, 'addpart'):
829 829 # The return looks streameable, we are in the bundle2 case and
830 830 # should return a stream.
831 831 return streamres(r.getchunks())
832 832 return pushres(r)
833 833
834 834 finally:
835 835 fp.close()
836 836 os.unlink(tempname)
837 837 except error.BundleValueError, exc:
838 838 bundler = bundle2.bundle20(repo.ui)
839 839 errpart = bundler.newpart('B2X:ERROR:UNSUPPORTEDCONTENT')
840 840 if exc.parttype is not None:
841 841 errpart.addparam('parttype', exc.parttype)
842 842 if exc.params:
843 843 errpart.addparam('params', '\0'.join(exc.params))
844 844 return streamres(bundler.getchunks())
845 845 except util.Abort, inst:
846 846 # The old code we moved used sys.stderr directly.
847 847 # We did not change it to minimise code change.
848 848 # This need to be moved to something proper.
849 849 # Feel free to do it.
850 850 if getattr(inst, 'duringunbundle2', False):
851 851 bundler = bundle2.bundle20(repo.ui)
852 852 manargs = [('message', str(inst))]
853 853 advargs = []
854 854 if inst.hint is not None:
855 855 advargs.append(('hint', inst.hint))
856 856 bundler.addpart(bundle2.bundlepart('B2X:ERROR:ABORT',
857 857 manargs, advargs))
858 858 return streamres(bundler.getchunks())
859 859 else:
860 860 sys.stderr.write("abort: %s\n" % inst)
861 861 return pushres(0)
862 862 except error.PushRaced, exc:
863 863 if getattr(exc, 'duringunbundle2', False):
864 864 bundler = bundle2.bundle20(repo.ui)
865 865 bundler.newpart('B2X:ERROR:PUSHRACED', [('message', str(exc))])
866 866 return streamres(bundler.getchunks())
867 867 else:
868 868 return pusherr(str(exc))
General Comments 0
You need to be logged in to leave comments. Login now