##// END OF EJS Templates
Store empty (default) branch in branch cache, too....
Thomas Arendsen Hein -
r4167:4574a8cb default
parent child Browse files
Show More
@@ -1,1991 +1,1990 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 import repo
12 12 demandload(globals(), "appendfile changegroup")
13 13 demandload(globals(), "changelog dirstate filelog manifest context")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "os revlog time util")
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = ('lookup', 'changegroupsubset')
19 19 supported = ('revlogv1', 'store')
20 20
21 21 def __del__(self):
22 22 self.transhandle = None
23 23 def __init__(self, parentui, path=None, create=0):
24 24 repo.repository.__init__(self)
25 25 if not path:
26 26 p = os.getcwd()
27 27 while not os.path.isdir(os.path.join(p, ".hg")):
28 28 oldp = p
29 29 p = os.path.dirname(p)
30 30 if p == oldp:
31 31 raise repo.RepoError(_("There is no Mercurial repository"
32 32 " here (.hg not found)"))
33 33 path = p
34 34
35 35 self.path = os.path.join(path, ".hg")
36 36 self.root = os.path.realpath(path)
37 37 self.origroot = path
38 38 self.opener = util.opener(self.path)
39 39 self.wopener = util.opener(self.root)
40 40
41 41 if not os.path.isdir(self.path):
42 42 if create:
43 43 if not os.path.exists(path):
44 44 os.mkdir(path)
45 45 os.mkdir(self.path)
46 46 os.mkdir(os.path.join(self.path, "store"))
47 47 requirements = ("revlogv1", "store")
48 48 reqfile = self.opener("requires", "w")
49 49 for r in requirements:
50 50 reqfile.write("%s\n" % r)
51 51 reqfile.close()
52 52 # create an invalid changelog
53 53 self.opener("00changelog.i", "a").write(
54 54 '\0\0\0\2' # represents revlogv2
55 55 ' dummy changelog to prevent using the old repo layout'
56 56 )
57 57 else:
58 58 raise repo.RepoError(_("repository %s not found") % path)
59 59 elif create:
60 60 raise repo.RepoError(_("repository %s already exists") % path)
61 61 else:
62 62 # find requirements
63 63 try:
64 64 requirements = self.opener("requires").read().splitlines()
65 65 except IOError, inst:
66 66 if inst.errno != errno.ENOENT:
67 67 raise
68 68 requirements = []
69 69 # check them
70 70 for r in requirements:
71 71 if r not in self.supported:
72 72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73 73
74 74 # setup store
75 75 if "store" in requirements:
76 76 self.encodefn = util.encodefilename
77 77 self.decodefn = util.decodefilename
78 78 self.spath = os.path.join(self.path, "store")
79 79 else:
80 80 self.encodefn = lambda x: x
81 81 self.decodefn = lambda x: x
82 82 self.spath = self.path
83 83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84 84
85 85 self.ui = ui.ui(parentui=parentui)
86 86 try:
87 87 self.ui.readconfig(self.join("hgrc"), self.root)
88 88 except IOError:
89 89 pass
90 90
91 91 v = self.ui.configrevlog()
92 92 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
93 93 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
94 94 fl = v.get('flags', None)
95 95 flags = 0
96 96 if fl != None:
97 97 for x in fl.split():
98 98 flags |= revlog.flagstr(x)
99 99 elif self.revlogv1:
100 100 flags = revlog.REVLOG_DEFAULT_FLAGS
101 101
102 102 v = self.revlogversion | flags
103 103 self.manifest = manifest.manifest(self.sopener, v)
104 104 self.changelog = changelog.changelog(self.sopener, v)
105 105
106 106 fallback = self.ui.config('ui', 'fallbackencoding')
107 107 if fallback:
108 108 util._fallbackencoding = fallback
109 109
110 110 # the changelog might not have the inline index flag
111 111 # on. If the format of the changelog is the same as found in
112 112 # .hgrc, apply any flags found in the .hgrc as well.
113 113 # Otherwise, just version from the changelog
114 114 v = self.changelog.version
115 115 if v == self.revlogversion:
116 116 v |= flags
117 117 self.revlogversion = v
118 118
119 119 self.tagscache = None
120 120 self.branchcache = None
121 121 self.nodetagscache = None
122 122 self.encodepats = None
123 123 self.decodepats = None
124 124 self.transhandle = None
125 125
126 126 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
127 127
128 128 def url(self):
129 129 return 'file:' + self.root
130 130
131 131 def hook(self, name, throw=False, **args):
132 132 def callhook(hname, funcname):
133 133 '''call python hook. hook is callable object, looked up as
134 134 name in python module. if callable returns "true", hook
135 135 fails, else passes. if hook raises exception, treated as
136 136 hook failure. exception propagates if throw is "true".
137 137
138 138 reason for "true" meaning "hook failed" is so that
139 139 unmodified commands (e.g. mercurial.commands.update) can
140 140 be run as hooks without wrappers to convert return values.'''
141 141
142 142 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
143 143 d = funcname.rfind('.')
144 144 if d == -1:
145 145 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
146 146 % (hname, funcname))
147 147 modname = funcname[:d]
148 148 try:
149 149 obj = __import__(modname)
150 150 except ImportError:
151 151 try:
152 152 # extensions are loaded with hgext_ prefix
153 153 obj = __import__("hgext_%s" % modname)
154 154 except ImportError:
155 155 raise util.Abort(_('%s hook is invalid '
156 156 '(import of "%s" failed)') %
157 157 (hname, modname))
158 158 try:
159 159 for p in funcname.split('.')[1:]:
160 160 obj = getattr(obj, p)
161 161 except AttributeError, err:
162 162 raise util.Abort(_('%s hook is invalid '
163 163 '("%s" is not defined)') %
164 164 (hname, funcname))
165 165 if not callable(obj):
166 166 raise util.Abort(_('%s hook is invalid '
167 167 '("%s" is not callable)') %
168 168 (hname, funcname))
169 169 try:
170 170 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
171 171 except (KeyboardInterrupt, util.SignalInterrupt):
172 172 raise
173 173 except Exception, exc:
174 174 if isinstance(exc, util.Abort):
175 175 self.ui.warn(_('error: %s hook failed: %s\n') %
176 176 (hname, exc.args[0]))
177 177 else:
178 178 self.ui.warn(_('error: %s hook raised an exception: '
179 179 '%s\n') % (hname, exc))
180 180 if throw:
181 181 raise
182 182 self.ui.print_exc()
183 183 return True
184 184 if r:
185 185 if throw:
186 186 raise util.Abort(_('%s hook failed') % hname)
187 187 self.ui.warn(_('warning: %s hook failed\n') % hname)
188 188 return r
189 189
190 190 def runhook(name, cmd):
191 191 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
192 192 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
193 193 r = util.system(cmd, environ=env, cwd=self.root)
194 194 if r:
195 195 desc, r = util.explain_exit(r)
196 196 if throw:
197 197 raise util.Abort(_('%s hook %s') % (name, desc))
198 198 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
199 199 return r
200 200
201 201 r = False
202 202 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
203 203 if hname.split(".", 1)[0] == name and cmd]
204 204 hooks.sort()
205 205 for hname, cmd in hooks:
206 206 if cmd.startswith('python:'):
207 207 r = callhook(hname, cmd[7:].strip()) or r
208 208 else:
209 209 r = runhook(hname, cmd) or r
210 210 return r
211 211
212 212 tag_disallowed = ':\r\n'
213 213
214 214 def tag(self, name, node, message, local, user, date):
215 215 '''tag a revision with a symbolic name.
216 216
217 217 if local is True, the tag is stored in a per-repository file.
218 218 otherwise, it is stored in the .hgtags file, and a new
219 219 changeset is committed with the change.
220 220
221 221 keyword arguments:
222 222
223 223 local: whether to store tag in non-version-controlled file
224 224 (default False)
225 225
226 226 message: commit message to use if committing
227 227
228 228 user: name of user to use if committing
229 229
230 230 date: date tuple to use if committing'''
231 231
232 232 for c in self.tag_disallowed:
233 233 if c in name:
234 234 raise util.Abort(_('%r cannot be used in a tag name') % c)
235 235
236 236 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
237 237
238 238 if local:
239 239 # local tags are stored in the current charset
240 240 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
241 241 self.hook('tag', node=hex(node), tag=name, local=local)
242 242 return
243 243
244 244 for x in self.status()[:5]:
245 245 if '.hgtags' in x:
246 246 raise util.Abort(_('working copy of .hgtags is changed '
247 247 '(please commit .hgtags manually)'))
248 248
249 249 # committed tags are stored in UTF-8
250 250 line = '%s %s\n' % (hex(node), util.fromlocal(name))
251 251 self.wfile('.hgtags', 'ab').write(line)
252 252 if self.dirstate.state('.hgtags') == '?':
253 253 self.add(['.hgtags'])
254 254
255 255 self.commit(['.hgtags'], message, user, date)
256 256 self.hook('tag', node=hex(node), tag=name, local=local)
257 257
258 258 def tags(self):
259 259 '''return a mapping of tag to node'''
260 260 if not self.tagscache:
261 261 self.tagscache = {}
262 262
263 263 def parsetag(line, context):
264 264 if not line:
265 265 return
266 266 s = l.split(" ", 1)
267 267 if len(s) != 2:
268 268 self.ui.warn(_("%s: cannot parse entry\n") % context)
269 269 return
270 270 node, key = s
271 271 key = util.tolocal(key.strip()) # stored in UTF-8
272 272 try:
273 273 bin_n = bin(node)
274 274 except TypeError:
275 275 self.ui.warn(_("%s: node '%s' is not well formed\n") %
276 276 (context, node))
277 277 return
278 278 if bin_n not in self.changelog.nodemap:
279 279 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
280 280 (context, key))
281 281 return
282 282 self.tagscache[key] = bin_n
283 283
284 284 # read the tags file from each head, ending with the tip,
285 285 # and add each tag found to the map, with "newer" ones
286 286 # taking precedence
287 287 f = None
288 288 for rev, node, fnode in self._hgtagsnodes():
289 289 f = (f and f.filectx(fnode) or
290 290 self.filectx('.hgtags', fileid=fnode))
291 291 count = 0
292 292 for l in f.data().splitlines():
293 293 count += 1
294 294 parsetag(l, _("%s, line %d") % (str(f), count))
295 295
296 296 try:
297 297 f = self.opener("localtags")
298 298 count = 0
299 299 for l in f:
300 300 # localtags are stored in the local character set
301 301 # while the internal tag table is stored in UTF-8
302 302 l = util.fromlocal(l)
303 303 count += 1
304 304 parsetag(l, _("localtags, line %d") % count)
305 305 except IOError:
306 306 pass
307 307
308 308 self.tagscache['tip'] = self.changelog.tip()
309 309
310 310 return self.tagscache
311 311
312 312 def _hgtagsnodes(self):
313 313 heads = self.heads()
314 314 heads.reverse()
315 315 last = {}
316 316 ret = []
317 317 for node in heads:
318 318 c = self.changectx(node)
319 319 rev = c.rev()
320 320 try:
321 321 fnode = c.filenode('.hgtags')
322 322 except repo.LookupError:
323 323 continue
324 324 ret.append((rev, node, fnode))
325 325 if fnode in last:
326 326 ret[last[fnode]] = None
327 327 last[fnode] = len(ret) - 1
328 328 return [item for item in ret if item]
329 329
330 330 def tagslist(self):
331 331 '''return a list of tags ordered by revision'''
332 332 l = []
333 333 for t, n in self.tags().items():
334 334 try:
335 335 r = self.changelog.rev(n)
336 336 except:
337 337 r = -2 # sort to the beginning of the list if unknown
338 338 l.append((r, t, n))
339 339 l.sort()
340 340 return [(t, n) for r, t, n in l]
341 341
342 342 def nodetags(self, node):
343 343 '''return the tags associated with a node'''
344 344 if not self.nodetagscache:
345 345 self.nodetagscache = {}
346 346 for t, n in self.tags().items():
347 347 self.nodetagscache.setdefault(n, []).append(t)
348 348 return self.nodetagscache.get(node, [])
349 349
350 350 def _branchtags(self):
351 351 partial, last, lrev = self._readbranchcache()
352 352
353 353 tiprev = self.changelog.count() - 1
354 354 if lrev != tiprev:
355 355 self._updatebranchcache(partial, lrev+1, tiprev+1)
356 356 self._writebranchcache(partial, self.changelog.tip(), tiprev)
357 357
358 358 return partial
359 359
360 360 def branchtags(self):
361 361 if self.branchcache is not None:
362 362 return self.branchcache
363 363
364 364 self.branchcache = {} # avoid recursion in changectx
365 365 partial = self._branchtags()
366 366
367 367 # the branch cache is stored on disk as UTF-8, but in the local
368 368 # charset internally
369 369 for k, v in partial.items():
370 370 self.branchcache[util.tolocal(k)] = v
371 371 return self.branchcache
372 372
373 373 def _readbranchcache(self):
374 374 partial = {}
375 375 try:
376 376 f = self.opener("branches.cache")
377 377 lines = f.read().split('\n')
378 378 f.close()
379 last, lrev = lines.pop(0).rstrip().split(" ", 1)
379 last, lrev = lines.pop(0).split(" ", 1)
380 380 last, lrev = bin(last), int(lrev)
381 381 if not (lrev < self.changelog.count() and
382 382 self.changelog.node(lrev) == last): # sanity check
383 383 # invalidate the cache
384 384 raise ValueError('Invalid branch cache: unknown tip')
385 385 for l in lines:
386 386 if not l: continue
387 node, label = l.rstrip().split(" ", 1)
388 partial[label] = bin(node)
387 node, label = l.split(" ", 1)
388 partial[label.strip()] = bin(node)
389 389 except (KeyboardInterrupt, util.SignalInterrupt):
390 390 raise
391 391 except Exception, inst:
392 392 if self.ui.debugflag:
393 393 self.ui.warn(str(inst), '\n')
394 394 partial, last, lrev = {}, nullid, nullrev
395 395 return partial, last, lrev
396 396
397 397 def _writebranchcache(self, branches, tip, tiprev):
398 398 try:
399 399 f = self.opener("branches.cache", "w")
400 400 f.write("%s %s\n" % (hex(tip), tiprev))
401 401 for label, node in branches.iteritems():
402 402 f.write("%s %s\n" % (hex(node), label))
403 403 except IOError:
404 404 pass
405 405
406 406 def _updatebranchcache(self, partial, start, end):
407 407 for r in xrange(start, end):
408 408 c = self.changectx(r)
409 409 b = c.branch()
410 if b:
411 partial[b] = c.node()
410 partial[b] = c.node()
412 411
413 412 def lookup(self, key):
414 413 if key == '.':
415 414 key = self.dirstate.parents()[0]
416 415 if key == nullid:
417 416 raise repo.RepoError(_("no revision checked out"))
418 417 elif key == 'null':
419 418 return nullid
420 419 n = self.changelog._match(key)
421 420 if n:
422 421 return n
423 422 if key in self.tags():
424 423 return self.tags()[key]
425 424 if key in self.branchtags():
426 425 return self.branchtags()[key]
427 426 n = self.changelog._partialmatch(key)
428 427 if n:
429 428 return n
430 429 raise repo.RepoError(_("unknown revision '%s'") % key)
431 430
432 431 def dev(self):
433 432 return os.lstat(self.path).st_dev
434 433
435 434 def local(self):
436 435 return True
437 436
438 437 def join(self, f):
439 438 return os.path.join(self.path, f)
440 439
441 440 def sjoin(self, f):
442 441 f = self.encodefn(f)
443 442 return os.path.join(self.spath, f)
444 443
445 444 def wjoin(self, f):
446 445 return os.path.join(self.root, f)
447 446
448 447 def file(self, f):
449 448 if f[0] == '/':
450 449 f = f[1:]
451 450 return filelog.filelog(self.sopener, f, self.revlogversion)
452 451
453 452 def changectx(self, changeid=None):
454 453 return context.changectx(self, changeid)
455 454
456 455 def workingctx(self):
457 456 return context.workingctx(self)
458 457
459 458 def parents(self, changeid=None):
460 459 '''
461 460 get list of changectxs for parents of changeid or working directory
462 461 '''
463 462 if changeid is None:
464 463 pl = self.dirstate.parents()
465 464 else:
466 465 n = self.changelog.lookup(changeid)
467 466 pl = self.changelog.parents(n)
468 467 if pl[1] == nullid:
469 468 return [self.changectx(pl[0])]
470 469 return [self.changectx(pl[0]), self.changectx(pl[1])]
471 470
472 471 def filectx(self, path, changeid=None, fileid=None):
473 472 """changeid can be a changeset revision, node, or tag.
474 473 fileid can be a file revision or node."""
475 474 return context.filectx(self, path, changeid, fileid)
476 475
477 476 def getcwd(self):
478 477 return self.dirstate.getcwd()
479 478
480 479 def wfile(self, f, mode='r'):
481 480 return self.wopener(f, mode)
482 481
483 482 def wread(self, filename):
484 483 if self.encodepats == None:
485 484 l = []
486 485 for pat, cmd in self.ui.configitems("encode"):
487 486 mf = util.matcher(self.root, "", [pat], [], [])[1]
488 487 l.append((mf, cmd))
489 488 self.encodepats = l
490 489
491 490 data = self.wopener(filename, 'r').read()
492 491
493 492 for mf, cmd in self.encodepats:
494 493 if mf(filename):
495 494 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
496 495 data = util.filter(data, cmd)
497 496 break
498 497
499 498 return data
500 499
501 500 def wwrite(self, filename, data, fd=None):
502 501 if self.decodepats == None:
503 502 l = []
504 503 for pat, cmd in self.ui.configitems("decode"):
505 504 mf = util.matcher(self.root, "", [pat], [], [])[1]
506 505 l.append((mf, cmd))
507 506 self.decodepats = l
508 507
509 508 for mf, cmd in self.decodepats:
510 509 if mf(filename):
511 510 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
512 511 data = util.filter(data, cmd)
513 512 break
514 513
515 514 if fd:
516 515 return fd.write(data)
517 516 return self.wopener(filename, 'w').write(data)
518 517
519 518 def transaction(self):
520 519 tr = self.transhandle
521 520 if tr != None and tr.running():
522 521 return tr.nest()
523 522
524 523 # save dirstate for rollback
525 524 try:
526 525 ds = self.opener("dirstate").read()
527 526 except IOError:
528 527 ds = ""
529 528 self.opener("journal.dirstate", "w").write(ds)
530 529
531 530 renames = [(self.sjoin("journal"), self.sjoin("undo")),
532 531 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
533 532 tr = transaction.transaction(self.ui.warn, self.sopener,
534 533 self.sjoin("journal"),
535 534 aftertrans(renames))
536 535 self.transhandle = tr
537 536 return tr
538 537
539 538 def recover(self):
540 539 l = self.lock()
541 540 if os.path.exists(self.sjoin("journal")):
542 541 self.ui.status(_("rolling back interrupted transaction\n"))
543 542 transaction.rollback(self.sopener, self.sjoin("journal"))
544 543 self.reload()
545 544 return True
546 545 else:
547 546 self.ui.warn(_("no interrupted transaction available\n"))
548 547 return False
549 548
550 549 def rollback(self, wlock=None):
551 550 if not wlock:
552 551 wlock = self.wlock()
553 552 l = self.lock()
554 553 if os.path.exists(self.sjoin("undo")):
555 554 self.ui.status(_("rolling back last transaction\n"))
556 555 transaction.rollback(self.sopener, self.sjoin("undo"))
557 556 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
558 557 self.reload()
559 558 self.wreload()
560 559 else:
561 560 self.ui.warn(_("no rollback information available\n"))
562 561
563 562 def wreload(self):
564 563 self.dirstate.read()
565 564
566 565 def reload(self):
567 566 self.changelog.load()
568 567 self.manifest.load()
569 568 self.tagscache = None
570 569 self.nodetagscache = None
571 570
572 571 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
573 572 desc=None):
574 573 try:
575 574 l = lock.lock(lockname, 0, releasefn, desc=desc)
576 575 except lock.LockHeld, inst:
577 576 if not wait:
578 577 raise
579 578 self.ui.warn(_("waiting for lock on %s held by %r\n") %
580 579 (desc, inst.locker))
581 580 # default to 600 seconds timeout
582 581 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
583 582 releasefn, desc=desc)
584 583 if acquirefn:
585 584 acquirefn()
586 585 return l
587 586
588 587 def lock(self, wait=1):
589 588 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
590 589 desc=_('repository %s') % self.origroot)
591 590
592 591 def wlock(self, wait=1):
593 592 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
594 593 self.wreload,
595 594 desc=_('working directory of %s') % self.origroot)
596 595
597 596 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
598 597 """
599 598 commit an individual file as part of a larger transaction
600 599 """
601 600
602 601 t = self.wread(fn)
603 602 fl = self.file(fn)
604 603 fp1 = manifest1.get(fn, nullid)
605 604 fp2 = manifest2.get(fn, nullid)
606 605
607 606 meta = {}
608 607 cp = self.dirstate.copied(fn)
609 608 if cp:
610 609 # Mark the new revision of this file as a copy of another
611 610 # file. This copy data will effectively act as a parent
612 611 # of this new revision. If this is a merge, the first
613 612 # parent will be the nullid (meaning "look up the copy data")
614 613 # and the second one will be the other parent. For example:
615 614 #
616 615 # 0 --- 1 --- 3 rev1 changes file foo
617 616 # \ / rev2 renames foo to bar and changes it
618 617 # \- 2 -/ rev3 should have bar with all changes and
619 618 # should record that bar descends from
620 619 # bar in rev2 and foo in rev1
621 620 #
622 621 # this allows this merge to succeed:
623 622 #
624 623 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
625 624 # \ / merging rev3 and rev4 should use bar@rev2
626 625 # \- 2 --- 4 as the merge base
627 626 #
628 627 meta["copy"] = cp
629 628 if not manifest2: # not a branch merge
630 629 meta["copyrev"] = hex(manifest1.get(cp, nullid))
631 630 fp2 = nullid
632 631 elif fp2 != nullid: # copied on remote side
633 632 meta["copyrev"] = hex(manifest1.get(cp, nullid))
634 633 elif fp1 != nullid: # copied on local side, reversed
635 634 meta["copyrev"] = hex(manifest2.get(cp))
636 635 fp2 = fp1
637 636 else: # directory rename
638 637 meta["copyrev"] = hex(manifest1.get(cp, nullid))
639 638 self.ui.debug(_(" %s: copy %s:%s\n") %
640 639 (fn, cp, meta["copyrev"]))
641 640 fp1 = nullid
642 641 elif fp2 != nullid:
643 642 # is one parent an ancestor of the other?
644 643 fpa = fl.ancestor(fp1, fp2)
645 644 if fpa == fp1:
646 645 fp1, fp2 = fp2, nullid
647 646 elif fpa == fp2:
648 647 fp2 = nullid
649 648
650 649 # is the file unmodified from the parent? report existing entry
651 650 if fp2 == nullid and not fl.cmp(fp1, t):
652 651 return fp1
653 652
654 653 changelist.append(fn)
655 654 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
656 655
657 656 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
658 657 if p1 is None:
659 658 p1, p2 = self.dirstate.parents()
660 659 return self.commit(files=files, text=text, user=user, date=date,
661 660 p1=p1, p2=p2, wlock=wlock)
662 661
663 662 def commit(self, files=None, text="", user=None, date=None,
664 663 match=util.always, force=False, lock=None, wlock=None,
665 664 force_editor=False, p1=None, p2=None, extra={}):
666 665
667 666 commit = []
668 667 remove = []
669 668 changed = []
670 669 use_dirstate = (p1 is None) # not rawcommit
671 670 extra = extra.copy()
672 671
673 672 if use_dirstate:
674 673 if files:
675 674 for f in files:
676 675 s = self.dirstate.state(f)
677 676 if s in 'nmai':
678 677 commit.append(f)
679 678 elif s == 'r':
680 679 remove.append(f)
681 680 else:
682 681 self.ui.warn(_("%s not tracked!\n") % f)
683 682 else:
684 683 changes = self.status(match=match)[:5]
685 684 modified, added, removed, deleted, unknown = changes
686 685 commit = modified + added
687 686 remove = removed
688 687 else:
689 688 commit = files
690 689
691 690 if use_dirstate:
692 691 p1, p2 = self.dirstate.parents()
693 692 update_dirstate = True
694 693 else:
695 694 p1, p2 = p1, p2 or nullid
696 695 update_dirstate = (self.dirstate.parents()[0] == p1)
697 696
698 697 c1 = self.changelog.read(p1)
699 698 c2 = self.changelog.read(p2)
700 699 m1 = self.manifest.read(c1[0]).copy()
701 700 m2 = self.manifest.read(c2[0])
702 701
703 702 if use_dirstate:
704 703 branchname = self.workingctx().branch()
705 704 try:
706 705 branchname = branchname.decode('UTF-8').encode('UTF-8')
707 706 except UnicodeDecodeError:
708 707 raise util.Abort(_('branch name not in UTF-8!'))
709 708 else:
710 709 branchname = ""
711 710
712 711 if use_dirstate:
713 712 oldname = c1[5].get("branch", "") # stored in UTF-8
714 713 if not commit and not remove and not force and p2 == nullid and \
715 714 branchname == oldname:
716 715 self.ui.status(_("nothing changed\n"))
717 716 return None
718 717
719 718 xp1 = hex(p1)
720 719 if p2 == nullid: xp2 = ''
721 720 else: xp2 = hex(p2)
722 721
723 722 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
724 723
725 724 if not wlock:
726 725 wlock = self.wlock()
727 726 if not lock:
728 727 lock = self.lock()
729 728 tr = self.transaction()
730 729
731 730 # check in files
732 731 new = {}
733 732 linkrev = self.changelog.count()
734 733 commit.sort()
735 734 for f in commit:
736 735 self.ui.note(f + "\n")
737 736 try:
738 737 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
739 738 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
740 739 except IOError:
741 740 if use_dirstate:
742 741 self.ui.warn(_("trouble committing %s!\n") % f)
743 742 raise
744 743 else:
745 744 remove.append(f)
746 745
747 746 # update manifest
748 747 m1.update(new)
749 748 remove.sort()
750 749
751 750 for f in remove:
752 751 if f in m1:
753 752 del m1[f]
754 753 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
755 754
756 755 # add changeset
757 756 new = new.keys()
758 757 new.sort()
759 758
760 759 user = user or self.ui.username()
761 760 if not text or force_editor:
762 761 edittext = []
763 762 if text:
764 763 edittext.append(text)
765 764 edittext.append("")
766 765 edittext.append("HG: user: %s" % user)
767 766 if p2 != nullid:
768 767 edittext.append("HG: branch merge")
769 768 edittext.extend(["HG: changed %s" % f for f in changed])
770 769 edittext.extend(["HG: removed %s" % f for f in remove])
771 770 if not changed and not remove:
772 771 edittext.append("HG: no files changed")
773 772 edittext.append("")
774 773 # run editor in the repository root
775 774 olddir = os.getcwd()
776 775 os.chdir(self.root)
777 776 text = self.ui.edit("\n".join(edittext), user)
778 777 os.chdir(olddir)
779 778
780 779 lines = [line.rstrip() for line in text.rstrip().splitlines()]
781 780 while lines and not lines[0]:
782 781 del lines[0]
783 782 if not lines:
784 783 return None
785 784 text = '\n'.join(lines)
786 785 if branchname:
787 786 extra["branch"] = branchname
788 787 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
789 788 user, date, extra)
790 789 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
791 790 parent2=xp2)
792 791 tr.close()
793 792
794 793 if use_dirstate or update_dirstate:
795 794 self.dirstate.setparents(n)
796 795 if use_dirstate:
797 796 self.dirstate.update(new, "n")
798 797 self.dirstate.forget(remove)
799 798
800 799 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
801 800 return n
802 801
803 802 def walk(self, node=None, files=[], match=util.always, badmatch=None):
804 803 '''
805 804 walk recursively through the directory tree or a given
806 805 changeset, finding all files matched by the match
807 806 function
808 807
809 808 results are yielded in a tuple (src, filename), where src
810 809 is one of:
811 810 'f' the file was found in the directory tree
812 811 'm' the file was only in the dirstate and not in the tree
813 812 'b' file was not found and matched badmatch
814 813 '''
815 814
816 815 if node:
817 816 fdict = dict.fromkeys(files)
818 817 for fn in self.manifest.read(self.changelog.read(node)[0]):
819 818 for ffn in fdict:
820 819 # match if the file is the exact name or a directory
821 820 if ffn == fn or fn.startswith("%s/" % ffn):
822 821 del fdict[ffn]
823 822 break
824 823 if match(fn):
825 824 yield 'm', fn
826 825 for fn in fdict:
827 826 if badmatch and badmatch(fn):
828 827 if match(fn):
829 828 yield 'b', fn
830 829 else:
831 830 self.ui.warn(_('%s: No such file in rev %s\n') % (
832 831 util.pathto(self.getcwd(), fn), short(node)))
833 832 else:
834 833 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
835 834 yield src, fn
836 835
837 836 def status(self, node1=None, node2=None, files=[], match=util.always,
838 837 wlock=None, list_ignored=False, list_clean=False):
839 838 """return status of files between two nodes or node and working directory
840 839
841 840 If node1 is None, use the first dirstate parent instead.
842 841 If node2 is None, compare node1 with working directory.
843 842 """
844 843
845 844 def fcmp(fn, mf):
846 845 t1 = self.wread(fn)
847 846 return self.file(fn).cmp(mf.get(fn, nullid), t1)
848 847
849 848 def mfmatches(node):
850 849 change = self.changelog.read(node)
851 850 mf = self.manifest.read(change[0]).copy()
852 851 for fn in mf.keys():
853 852 if not match(fn):
854 853 del mf[fn]
855 854 return mf
856 855
857 856 modified, added, removed, deleted, unknown = [], [], [], [], []
858 857 ignored, clean = [], []
859 858
860 859 compareworking = False
861 860 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
862 861 compareworking = True
863 862
864 863 if not compareworking:
865 864 # read the manifest from node1 before the manifest from node2,
866 865 # so that we'll hit the manifest cache if we're going through
867 866 # all the revisions in parent->child order.
868 867 mf1 = mfmatches(node1)
869 868
870 869 # are we comparing the working directory?
871 870 if not node2:
872 871 if not wlock:
873 872 try:
874 873 wlock = self.wlock(wait=0)
875 874 except lock.LockException:
876 875 wlock = None
877 876 (lookup, modified, added, removed, deleted, unknown,
878 877 ignored, clean) = self.dirstate.status(files, match,
879 878 list_ignored, list_clean)
880 879
881 880 # are we comparing working dir against its parent?
882 881 if compareworking:
883 882 if lookup:
884 883 # do a full compare of any files that might have changed
885 884 mf2 = mfmatches(self.dirstate.parents()[0])
886 885 for f in lookup:
887 886 if fcmp(f, mf2):
888 887 modified.append(f)
889 888 else:
890 889 clean.append(f)
891 890 if wlock is not None:
892 891 self.dirstate.update([f], "n")
893 892 else:
894 893 # we are comparing working dir against non-parent
895 894 # generate a pseudo-manifest for the working dir
896 895 # XXX: create it in dirstate.py ?
897 896 mf2 = mfmatches(self.dirstate.parents()[0])
898 897 for f in lookup + modified + added:
899 898 mf2[f] = ""
900 899 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
901 900 for f in removed:
902 901 if f in mf2:
903 902 del mf2[f]
904 903 else:
905 904 # we are comparing two revisions
906 905 mf2 = mfmatches(node2)
907 906
908 907 if not compareworking:
909 908 # flush lists from dirstate before comparing manifests
910 909 modified, added, clean = [], [], []
911 910
912 911 # make sure to sort the files so we talk to the disk in a
913 912 # reasonable order
914 913 mf2keys = mf2.keys()
915 914 mf2keys.sort()
916 915 for fn in mf2keys:
917 916 if mf1.has_key(fn):
918 917 if mf1.flags(fn) != mf2.flags(fn) or \
919 918 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
920 919 modified.append(fn)
921 920 elif list_clean:
922 921 clean.append(fn)
923 922 del mf1[fn]
924 923 else:
925 924 added.append(fn)
926 925
927 926 removed = mf1.keys()
928 927
929 928 # sort and return results:
930 929 for l in modified, added, removed, deleted, unknown, ignored, clean:
931 930 l.sort()
932 931 return (modified, added, removed, deleted, unknown, ignored, clean)
933 932
934 933 def add(self, list, wlock=None):
935 934 if not wlock:
936 935 wlock = self.wlock()
937 936 for f in list:
938 937 p = self.wjoin(f)
939 938 if not os.path.exists(p):
940 939 self.ui.warn(_("%s does not exist!\n") % f)
941 940 elif not os.path.isfile(p):
942 941 self.ui.warn(_("%s not added: only files supported currently\n")
943 942 % f)
944 943 elif self.dirstate.state(f) in 'an':
945 944 self.ui.warn(_("%s already tracked!\n") % f)
946 945 else:
947 946 self.dirstate.update([f], "a")
948 947
949 948 def forget(self, list, wlock=None):
950 949 if not wlock:
951 950 wlock = self.wlock()
952 951 for f in list:
953 952 if self.dirstate.state(f) not in 'ai':
954 953 self.ui.warn(_("%s not added!\n") % f)
955 954 else:
956 955 self.dirstate.forget([f])
957 956
958 957 def remove(self, list, unlink=False, wlock=None):
959 958 if unlink:
960 959 for f in list:
961 960 try:
962 961 util.unlink(self.wjoin(f))
963 962 except OSError, inst:
964 963 if inst.errno != errno.ENOENT:
965 964 raise
966 965 if not wlock:
967 966 wlock = self.wlock()
968 967 for f in list:
969 968 p = self.wjoin(f)
970 969 if os.path.exists(p):
971 970 self.ui.warn(_("%s still exists!\n") % f)
972 971 elif self.dirstate.state(f) == 'a':
973 972 self.dirstate.forget([f])
974 973 elif f not in self.dirstate:
975 974 self.ui.warn(_("%s not tracked!\n") % f)
976 975 else:
977 976 self.dirstate.update([f], "r")
978 977
979 978 def undelete(self, list, wlock=None):
980 979 p = self.dirstate.parents()[0]
981 980 mn = self.changelog.read(p)[0]
982 981 m = self.manifest.read(mn)
983 982 if not wlock:
984 983 wlock = self.wlock()
985 984 for f in list:
986 985 if self.dirstate.state(f) not in "r":
987 986 self.ui.warn("%s not removed!\n" % f)
988 987 else:
989 988 t = self.file(f).read(m[f])
990 989 self.wwrite(f, t)
991 990 util.set_exec(self.wjoin(f), m.execf(f))
992 991 self.dirstate.update([f], "n")
993 992
994 993 def copy(self, source, dest, wlock=None):
995 994 p = self.wjoin(dest)
996 995 if not os.path.exists(p):
997 996 self.ui.warn(_("%s does not exist!\n") % dest)
998 997 elif not os.path.isfile(p):
999 998 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1000 999 else:
1001 1000 if not wlock:
1002 1001 wlock = self.wlock()
1003 1002 if self.dirstate.state(dest) == '?':
1004 1003 self.dirstate.update([dest], "a")
1005 1004 self.dirstate.copy(source, dest)
1006 1005
1007 1006 def heads(self, start=None):
1008 1007 heads = self.changelog.heads(start)
1009 1008 # sort the output in rev descending order
1010 1009 heads = [(-self.changelog.rev(h), h) for h in heads]
1011 1010 heads.sort()
1012 1011 return [n for (r, n) in heads]
1013 1012
1014 1013 # branchlookup returns a dict giving a list of branches for
1015 1014 # each head. A branch is defined as the tag of a node or
1016 1015 # the branch of the node's parents. If a node has multiple
1017 1016 # branch tags, tags are eliminated if they are visible from other
1018 1017 # branch tags.
1019 1018 #
1020 1019 # So, for this graph: a->b->c->d->e
1021 1020 # \ /
1022 1021 # aa -----/
1023 1022 # a has tag 2.6.12
1024 1023 # d has tag 2.6.13
1025 1024 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1026 1025 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1027 1026 # from the list.
1028 1027 #
1029 1028 # It is possible that more than one head will have the same branch tag.
1030 1029 # callers need to check the result for multiple heads under the same
1031 1030 # branch tag if that is a problem for them (ie checkout of a specific
1032 1031 # branch).
1033 1032 #
1034 1033 # passing in a specific branch will limit the depth of the search
1035 1034 # through the parents. It won't limit the branches returned in the
1036 1035 # result though.
1037 1036 def branchlookup(self, heads=None, branch=None):
1038 1037 if not heads:
1039 1038 heads = self.heads()
1040 1039 headt = [ h for h in heads ]
1041 1040 chlog = self.changelog
1042 1041 branches = {}
1043 1042 merges = []
1044 1043 seenmerge = {}
1045 1044
1046 1045 # traverse the tree once for each head, recording in the branches
1047 1046 # dict which tags are visible from this head. The branches
1048 1047 # dict also records which tags are visible from each tag
1049 1048 # while we traverse.
1050 1049 while headt or merges:
1051 1050 if merges:
1052 1051 n, found = merges.pop()
1053 1052 visit = [n]
1054 1053 else:
1055 1054 h = headt.pop()
1056 1055 visit = [h]
1057 1056 found = [h]
1058 1057 seen = {}
1059 1058 while visit:
1060 1059 n = visit.pop()
1061 1060 if n in seen:
1062 1061 continue
1063 1062 pp = chlog.parents(n)
1064 1063 tags = self.nodetags(n)
1065 1064 if tags:
1066 1065 for x in tags:
1067 1066 if x == 'tip':
1068 1067 continue
1069 1068 for f in found:
1070 1069 branches.setdefault(f, {})[n] = 1
1071 1070 branches.setdefault(n, {})[n] = 1
1072 1071 break
1073 1072 if n not in found:
1074 1073 found.append(n)
1075 1074 if branch in tags:
1076 1075 continue
1077 1076 seen[n] = 1
1078 1077 if pp[1] != nullid and n not in seenmerge:
1079 1078 merges.append((pp[1], [x for x in found]))
1080 1079 seenmerge[n] = 1
1081 1080 if pp[0] != nullid:
1082 1081 visit.append(pp[0])
1083 1082 # traverse the branches dict, eliminating branch tags from each
1084 1083 # head that are visible from another branch tag for that head.
1085 1084 out = {}
1086 1085 viscache = {}
1087 1086 for h in heads:
1088 1087 def visible(node):
1089 1088 if node in viscache:
1090 1089 return viscache[node]
1091 1090 ret = {}
1092 1091 visit = [node]
1093 1092 while visit:
1094 1093 x = visit.pop()
1095 1094 if x in viscache:
1096 1095 ret.update(viscache[x])
1097 1096 elif x not in ret:
1098 1097 ret[x] = 1
1099 1098 if x in branches:
1100 1099 visit[len(visit):] = branches[x].keys()
1101 1100 viscache[node] = ret
1102 1101 return ret
1103 1102 if h not in branches:
1104 1103 continue
1105 1104 # O(n^2), but somewhat limited. This only searches the
1106 1105 # tags visible from a specific head, not all the tags in the
1107 1106 # whole repo.
1108 1107 for b in branches[h]:
1109 1108 vis = False
1110 1109 for bb in branches[h].keys():
1111 1110 if b != bb:
1112 1111 if b in visible(bb):
1113 1112 vis = True
1114 1113 break
1115 1114 if not vis:
1116 1115 l = out.setdefault(h, [])
1117 1116 l[len(l):] = self.nodetags(b)
1118 1117 return out
1119 1118
1120 1119 def branches(self, nodes):
1121 1120 if not nodes:
1122 1121 nodes = [self.changelog.tip()]
1123 1122 b = []
1124 1123 for n in nodes:
1125 1124 t = n
1126 1125 while 1:
1127 1126 p = self.changelog.parents(n)
1128 1127 if p[1] != nullid or p[0] == nullid:
1129 1128 b.append((t, n, p[0], p[1]))
1130 1129 break
1131 1130 n = p[0]
1132 1131 return b
1133 1132
1134 1133 def between(self, pairs):
1135 1134 r = []
1136 1135
1137 1136 for top, bottom in pairs:
1138 1137 n, l, i = top, [], 0
1139 1138 f = 1
1140 1139
1141 1140 while n != bottom:
1142 1141 p = self.changelog.parents(n)[0]
1143 1142 if i == f:
1144 1143 l.append(n)
1145 1144 f = f * 2
1146 1145 n = p
1147 1146 i += 1
1148 1147
1149 1148 r.append(l)
1150 1149
1151 1150 return r
1152 1151
1153 1152 def findincoming(self, remote, base=None, heads=None, force=False):
1154 1153 """Return list of roots of the subsets of missing nodes from remote
1155 1154
1156 1155 If base dict is specified, assume that these nodes and their parents
1157 1156 exist on the remote side and that no child of a node of base exists
1158 1157 in both remote and self.
1159 1158 Furthermore base will be updated to include the nodes that exists
1160 1159 in self and remote but no children exists in self and remote.
1161 1160 If a list of heads is specified, return only nodes which are heads
1162 1161 or ancestors of these heads.
1163 1162
1164 1163 All the ancestors of base are in self and in remote.
1165 1164 All the descendants of the list returned are missing in self.
1166 1165 (and so we know that the rest of the nodes are missing in remote, see
1167 1166 outgoing)
1168 1167 """
1169 1168 m = self.changelog.nodemap
1170 1169 search = []
1171 1170 fetch = {}
1172 1171 seen = {}
1173 1172 seenbranch = {}
1174 1173 if base == None:
1175 1174 base = {}
1176 1175
1177 1176 if not heads:
1178 1177 heads = remote.heads()
1179 1178
1180 1179 if self.changelog.tip() == nullid:
1181 1180 base[nullid] = 1
1182 1181 if heads != [nullid]:
1183 1182 return [nullid]
1184 1183 return []
1185 1184
1186 1185 # assume we're closer to the tip than the root
1187 1186 # and start by examining the heads
1188 1187 self.ui.status(_("searching for changes\n"))
1189 1188
1190 1189 unknown = []
1191 1190 for h in heads:
1192 1191 if h not in m:
1193 1192 unknown.append(h)
1194 1193 else:
1195 1194 base[h] = 1
1196 1195
1197 1196 if not unknown:
1198 1197 return []
1199 1198
1200 1199 req = dict.fromkeys(unknown)
1201 1200 reqcnt = 0
1202 1201
1203 1202 # search through remote branches
1204 1203 # a 'branch' here is a linear segment of history, with four parts:
1205 1204 # head, root, first parent, second parent
1206 1205 # (a branch always has two parents (or none) by definition)
1207 1206 unknown = remote.branches(unknown)
1208 1207 while unknown:
1209 1208 r = []
1210 1209 while unknown:
1211 1210 n = unknown.pop(0)
1212 1211 if n[0] in seen:
1213 1212 continue
1214 1213
1215 1214 self.ui.debug(_("examining %s:%s\n")
1216 1215 % (short(n[0]), short(n[1])))
1217 1216 if n[0] == nullid: # found the end of the branch
1218 1217 pass
1219 1218 elif n in seenbranch:
1220 1219 self.ui.debug(_("branch already found\n"))
1221 1220 continue
1222 1221 elif n[1] and n[1] in m: # do we know the base?
1223 1222 self.ui.debug(_("found incomplete branch %s:%s\n")
1224 1223 % (short(n[0]), short(n[1])))
1225 1224 search.append(n) # schedule branch range for scanning
1226 1225 seenbranch[n] = 1
1227 1226 else:
1228 1227 if n[1] not in seen and n[1] not in fetch:
1229 1228 if n[2] in m and n[3] in m:
1230 1229 self.ui.debug(_("found new changeset %s\n") %
1231 1230 short(n[1]))
1232 1231 fetch[n[1]] = 1 # earliest unknown
1233 1232 for p in n[2:4]:
1234 1233 if p in m:
1235 1234 base[p] = 1 # latest known
1236 1235
1237 1236 for p in n[2:4]:
1238 1237 if p not in req and p not in m:
1239 1238 r.append(p)
1240 1239 req[p] = 1
1241 1240 seen[n[0]] = 1
1242 1241
1243 1242 if r:
1244 1243 reqcnt += 1
1245 1244 self.ui.debug(_("request %d: %s\n") %
1246 1245 (reqcnt, " ".join(map(short, r))))
1247 1246 for p in xrange(0, len(r), 10):
1248 1247 for b in remote.branches(r[p:p+10]):
1249 1248 self.ui.debug(_("received %s:%s\n") %
1250 1249 (short(b[0]), short(b[1])))
1251 1250 unknown.append(b)
1252 1251
1253 1252 # do binary search on the branches we found
1254 1253 while search:
1255 1254 n = search.pop(0)
1256 1255 reqcnt += 1
1257 1256 l = remote.between([(n[0], n[1])])[0]
1258 1257 l.append(n[1])
1259 1258 p = n[0]
1260 1259 f = 1
1261 1260 for i in l:
1262 1261 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1263 1262 if i in m:
1264 1263 if f <= 2:
1265 1264 self.ui.debug(_("found new branch changeset %s\n") %
1266 1265 short(p))
1267 1266 fetch[p] = 1
1268 1267 base[i] = 1
1269 1268 else:
1270 1269 self.ui.debug(_("narrowed branch search to %s:%s\n")
1271 1270 % (short(p), short(i)))
1272 1271 search.append((p, i))
1273 1272 break
1274 1273 p, f = i, f * 2
1275 1274
1276 1275 # sanity check our fetch list
1277 1276 for f in fetch.keys():
1278 1277 if f in m:
1279 1278 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1280 1279
1281 1280 if base.keys() == [nullid]:
1282 1281 if force:
1283 1282 self.ui.warn(_("warning: repository is unrelated\n"))
1284 1283 else:
1285 1284 raise util.Abort(_("repository is unrelated"))
1286 1285
1287 1286 self.ui.debug(_("found new changesets starting at ") +
1288 1287 " ".join([short(f) for f in fetch]) + "\n")
1289 1288
1290 1289 self.ui.debug(_("%d total queries\n") % reqcnt)
1291 1290
1292 1291 return fetch.keys()
1293 1292
1294 1293 def findoutgoing(self, remote, base=None, heads=None, force=False):
1295 1294 """Return list of nodes that are roots of subsets not in remote
1296 1295
1297 1296 If base dict is specified, assume that these nodes and their parents
1298 1297 exist on the remote side.
1299 1298 If a list of heads is specified, return only nodes which are heads
1300 1299 or ancestors of these heads, and return a second element which
1301 1300 contains all remote heads which get new children.
1302 1301 """
1303 1302 if base == None:
1304 1303 base = {}
1305 1304 self.findincoming(remote, base, heads, force=force)
1306 1305
1307 1306 self.ui.debug(_("common changesets up to ")
1308 1307 + " ".join(map(short, base.keys())) + "\n")
1309 1308
1310 1309 remain = dict.fromkeys(self.changelog.nodemap)
1311 1310
1312 1311 # prune everything remote has from the tree
1313 1312 del remain[nullid]
1314 1313 remove = base.keys()
1315 1314 while remove:
1316 1315 n = remove.pop(0)
1317 1316 if n in remain:
1318 1317 del remain[n]
1319 1318 for p in self.changelog.parents(n):
1320 1319 remove.append(p)
1321 1320
1322 1321 # find every node whose parents have been pruned
1323 1322 subset = []
1324 1323 # find every remote head that will get new children
1325 1324 updated_heads = {}
1326 1325 for n in remain:
1327 1326 p1, p2 = self.changelog.parents(n)
1328 1327 if p1 not in remain and p2 not in remain:
1329 1328 subset.append(n)
1330 1329 if heads:
1331 1330 if p1 in heads:
1332 1331 updated_heads[p1] = True
1333 1332 if p2 in heads:
1334 1333 updated_heads[p2] = True
1335 1334
1336 1335 # this is the set of all roots we have to push
1337 1336 if heads:
1338 1337 return subset, updated_heads.keys()
1339 1338 else:
1340 1339 return subset
1341 1340
1342 1341 def pull(self, remote, heads=None, force=False, lock=None):
1343 1342 mylock = False
1344 1343 if not lock:
1345 1344 lock = self.lock()
1346 1345 mylock = True
1347 1346
1348 1347 try:
1349 1348 fetch = self.findincoming(remote, force=force)
1350 1349 if fetch == [nullid]:
1351 1350 self.ui.status(_("requesting all changes\n"))
1352 1351
1353 1352 if not fetch:
1354 1353 self.ui.status(_("no changes found\n"))
1355 1354 return 0
1356 1355
1357 1356 if heads is None:
1358 1357 cg = remote.changegroup(fetch, 'pull')
1359 1358 else:
1360 1359 if 'changegroupsubset' not in remote.capabilities:
1361 1360 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1362 1361 cg = remote.changegroupsubset(fetch, heads, 'pull')
1363 1362 return self.addchangegroup(cg, 'pull', remote.url())
1364 1363 finally:
1365 1364 if mylock:
1366 1365 lock.release()
1367 1366
1368 1367 def push(self, remote, force=False, revs=None):
1369 1368 # there are two ways to push to remote repo:
1370 1369 #
1371 1370 # addchangegroup assumes local user can lock remote
1372 1371 # repo (local filesystem, old ssh servers).
1373 1372 #
1374 1373 # unbundle assumes local user cannot lock remote repo (new ssh
1375 1374 # servers, http servers).
1376 1375
1377 1376 if remote.capable('unbundle'):
1378 1377 return self.push_unbundle(remote, force, revs)
1379 1378 return self.push_addchangegroup(remote, force, revs)
1380 1379
1381 1380 def prepush(self, remote, force, revs):
1382 1381 base = {}
1383 1382 remote_heads = remote.heads()
1384 1383 inc = self.findincoming(remote, base, remote_heads, force=force)
1385 1384
1386 1385 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1387 1386 if revs is not None:
1388 1387 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1389 1388 else:
1390 1389 bases, heads = update, self.changelog.heads()
1391 1390
1392 1391 if not bases:
1393 1392 self.ui.status(_("no changes found\n"))
1394 1393 return None, 1
1395 1394 elif not force:
1396 1395 # check if we're creating new remote heads
1397 1396 # to be a remote head after push, node must be either
1398 1397 # - unknown locally
1399 1398 # - a local outgoing head descended from update
1400 1399 # - a remote head that's known locally and not
1401 1400 # ancestral to an outgoing head
1402 1401
1403 1402 warn = 0
1404 1403
1405 1404 if remote_heads == [nullid]:
1406 1405 warn = 0
1407 1406 elif not revs and len(heads) > len(remote_heads):
1408 1407 warn = 1
1409 1408 else:
1410 1409 newheads = list(heads)
1411 1410 for r in remote_heads:
1412 1411 if r in self.changelog.nodemap:
1413 1412 desc = self.changelog.heads(r, heads)
1414 1413 l = [h for h in heads if h in desc]
1415 1414 if not l:
1416 1415 newheads.append(r)
1417 1416 else:
1418 1417 newheads.append(r)
1419 1418 if len(newheads) > len(remote_heads):
1420 1419 warn = 1
1421 1420
1422 1421 if warn:
1423 1422 self.ui.warn(_("abort: push creates new remote branches!\n"))
1424 1423 self.ui.status(_("(did you forget to merge?"
1425 1424 " use push -f to force)\n"))
1426 1425 return None, 1
1427 1426 elif inc:
1428 1427 self.ui.warn(_("note: unsynced remote changes!\n"))
1429 1428
1430 1429
1431 1430 if revs is None:
1432 1431 cg = self.changegroup(update, 'push')
1433 1432 else:
1434 1433 cg = self.changegroupsubset(update, revs, 'push')
1435 1434 return cg, remote_heads
1436 1435
1437 1436 def push_addchangegroup(self, remote, force, revs):
1438 1437 lock = remote.lock()
1439 1438
1440 1439 ret = self.prepush(remote, force, revs)
1441 1440 if ret[0] is not None:
1442 1441 cg, remote_heads = ret
1443 1442 return remote.addchangegroup(cg, 'push', self.url())
1444 1443 return ret[1]
1445 1444
1446 1445 def push_unbundle(self, remote, force, revs):
1447 1446 # local repo finds heads on server, finds out what revs it
1448 1447 # must push. once revs transferred, if server finds it has
1449 1448 # different heads (someone else won commit/push race), server
1450 1449 # aborts.
1451 1450
1452 1451 ret = self.prepush(remote, force, revs)
1453 1452 if ret[0] is not None:
1454 1453 cg, remote_heads = ret
1455 1454 if force: remote_heads = ['force']
1456 1455 return remote.unbundle(cg, remote_heads, 'push')
1457 1456 return ret[1]
1458 1457
1459 1458 def changegroupinfo(self, nodes):
1460 1459 self.ui.note(_("%d changesets found\n") % len(nodes))
1461 1460 if self.ui.debugflag:
1462 1461 self.ui.debug(_("List of changesets:\n"))
1463 1462 for node in nodes:
1464 1463 self.ui.debug("%s\n" % hex(node))
1465 1464
1466 1465 def changegroupsubset(self, bases, heads, source):
1467 1466 """This function generates a changegroup consisting of all the nodes
1468 1467 that are descendents of any of the bases, and ancestors of any of
1469 1468 the heads.
1470 1469
1471 1470 It is fairly complex as determining which filenodes and which
1472 1471 manifest nodes need to be included for the changeset to be complete
1473 1472 is non-trivial.
1474 1473
1475 1474 Another wrinkle is doing the reverse, figuring out which changeset in
1476 1475 the changegroup a particular filenode or manifestnode belongs to."""
1477 1476
1478 1477 self.hook('preoutgoing', throw=True, source=source)
1479 1478
1480 1479 # Set up some initial variables
1481 1480 # Make it easy to refer to self.changelog
1482 1481 cl = self.changelog
1483 1482 # msng is short for missing - compute the list of changesets in this
1484 1483 # changegroup.
1485 1484 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1486 1485 self.changegroupinfo(msng_cl_lst)
1487 1486 # Some bases may turn out to be superfluous, and some heads may be
1488 1487 # too. nodesbetween will return the minimal set of bases and heads
1489 1488 # necessary to re-create the changegroup.
1490 1489
1491 1490 # Known heads are the list of heads that it is assumed the recipient
1492 1491 # of this changegroup will know about.
1493 1492 knownheads = {}
1494 1493 # We assume that all parents of bases are known heads.
1495 1494 for n in bases:
1496 1495 for p in cl.parents(n):
1497 1496 if p != nullid:
1498 1497 knownheads[p] = 1
1499 1498 knownheads = knownheads.keys()
1500 1499 if knownheads:
1501 1500 # Now that we know what heads are known, we can compute which
1502 1501 # changesets are known. The recipient must know about all
1503 1502 # changesets required to reach the known heads from the null
1504 1503 # changeset.
1505 1504 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1506 1505 junk = None
1507 1506 # Transform the list into an ersatz set.
1508 1507 has_cl_set = dict.fromkeys(has_cl_set)
1509 1508 else:
1510 1509 # If there were no known heads, the recipient cannot be assumed to
1511 1510 # know about any changesets.
1512 1511 has_cl_set = {}
1513 1512
1514 1513 # Make it easy to refer to self.manifest
1515 1514 mnfst = self.manifest
1516 1515 # We don't know which manifests are missing yet
1517 1516 msng_mnfst_set = {}
1518 1517 # Nor do we know which filenodes are missing.
1519 1518 msng_filenode_set = {}
1520 1519
1521 1520 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1522 1521 junk = None
1523 1522
1524 1523 # A changeset always belongs to itself, so the changenode lookup
1525 1524 # function for a changenode is identity.
1526 1525 def identity(x):
1527 1526 return x
1528 1527
1529 1528 # A function generating function. Sets up an environment for the
1530 1529 # inner function.
1531 1530 def cmp_by_rev_func(revlog):
1532 1531 # Compare two nodes by their revision number in the environment's
1533 1532 # revision history. Since the revision number both represents the
1534 1533 # most efficient order to read the nodes in, and represents a
1535 1534 # topological sorting of the nodes, this function is often useful.
1536 1535 def cmp_by_rev(a, b):
1537 1536 return cmp(revlog.rev(a), revlog.rev(b))
1538 1537 return cmp_by_rev
1539 1538
1540 1539 # If we determine that a particular file or manifest node must be a
1541 1540 # node that the recipient of the changegroup will already have, we can
1542 1541 # also assume the recipient will have all the parents. This function
1543 1542 # prunes them from the set of missing nodes.
1544 1543 def prune_parents(revlog, hasset, msngset):
1545 1544 haslst = hasset.keys()
1546 1545 haslst.sort(cmp_by_rev_func(revlog))
1547 1546 for node in haslst:
1548 1547 parentlst = [p for p in revlog.parents(node) if p != nullid]
1549 1548 while parentlst:
1550 1549 n = parentlst.pop()
1551 1550 if n not in hasset:
1552 1551 hasset[n] = 1
1553 1552 p = [p for p in revlog.parents(n) if p != nullid]
1554 1553 parentlst.extend(p)
1555 1554 for n in hasset:
1556 1555 msngset.pop(n, None)
1557 1556
1558 1557 # This is a function generating function used to set up an environment
1559 1558 # for the inner function to execute in.
1560 1559 def manifest_and_file_collector(changedfileset):
1561 1560 # This is an information gathering function that gathers
1562 1561 # information from each changeset node that goes out as part of
1563 1562 # the changegroup. The information gathered is a list of which
1564 1563 # manifest nodes are potentially required (the recipient may
1565 1564 # already have them) and total list of all files which were
1566 1565 # changed in any changeset in the changegroup.
1567 1566 #
1568 1567 # We also remember the first changenode we saw any manifest
1569 1568 # referenced by so we can later determine which changenode 'owns'
1570 1569 # the manifest.
1571 1570 def collect_manifests_and_files(clnode):
1572 1571 c = cl.read(clnode)
1573 1572 for f in c[3]:
1574 1573 # This is to make sure we only have one instance of each
1575 1574 # filename string for each filename.
1576 1575 changedfileset.setdefault(f, f)
1577 1576 msng_mnfst_set.setdefault(c[0], clnode)
1578 1577 return collect_manifests_and_files
1579 1578
1580 1579 # Figure out which manifest nodes (of the ones we think might be part
1581 1580 # of the changegroup) the recipient must know about and remove them
1582 1581 # from the changegroup.
1583 1582 def prune_manifests():
1584 1583 has_mnfst_set = {}
1585 1584 for n in msng_mnfst_set:
1586 1585 # If a 'missing' manifest thinks it belongs to a changenode
1587 1586 # the recipient is assumed to have, obviously the recipient
1588 1587 # must have that manifest.
1589 1588 linknode = cl.node(mnfst.linkrev(n))
1590 1589 if linknode in has_cl_set:
1591 1590 has_mnfst_set[n] = 1
1592 1591 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1593 1592
1594 1593 # Use the information collected in collect_manifests_and_files to say
1595 1594 # which changenode any manifestnode belongs to.
1596 1595 def lookup_manifest_link(mnfstnode):
1597 1596 return msng_mnfst_set[mnfstnode]
1598 1597
1599 1598 # A function generating function that sets up the initial environment
1600 1599 # the inner function.
1601 1600 def filenode_collector(changedfiles):
1602 1601 next_rev = [0]
1603 1602 # This gathers information from each manifestnode included in the
1604 1603 # changegroup about which filenodes the manifest node references
1605 1604 # so we can include those in the changegroup too.
1606 1605 #
1607 1606 # It also remembers which changenode each filenode belongs to. It
1608 1607 # does this by assuming the a filenode belongs to the changenode
1609 1608 # the first manifest that references it belongs to.
1610 1609 def collect_msng_filenodes(mnfstnode):
1611 1610 r = mnfst.rev(mnfstnode)
1612 1611 if r == next_rev[0]:
1613 1612 # If the last rev we looked at was the one just previous,
1614 1613 # we only need to see a diff.
1615 1614 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1616 1615 # For each line in the delta
1617 1616 for dline in delta.splitlines():
1618 1617 # get the filename and filenode for that line
1619 1618 f, fnode = dline.split('\0')
1620 1619 fnode = bin(fnode[:40])
1621 1620 f = changedfiles.get(f, None)
1622 1621 # And if the file is in the list of files we care
1623 1622 # about.
1624 1623 if f is not None:
1625 1624 # Get the changenode this manifest belongs to
1626 1625 clnode = msng_mnfst_set[mnfstnode]
1627 1626 # Create the set of filenodes for the file if
1628 1627 # there isn't one already.
1629 1628 ndset = msng_filenode_set.setdefault(f, {})
1630 1629 # And set the filenode's changelog node to the
1631 1630 # manifest's if it hasn't been set already.
1632 1631 ndset.setdefault(fnode, clnode)
1633 1632 else:
1634 1633 # Otherwise we need a full manifest.
1635 1634 m = mnfst.read(mnfstnode)
1636 1635 # For every file in we care about.
1637 1636 for f in changedfiles:
1638 1637 fnode = m.get(f, None)
1639 1638 # If it's in the manifest
1640 1639 if fnode is not None:
1641 1640 # See comments above.
1642 1641 clnode = msng_mnfst_set[mnfstnode]
1643 1642 ndset = msng_filenode_set.setdefault(f, {})
1644 1643 ndset.setdefault(fnode, clnode)
1645 1644 # Remember the revision we hope to see next.
1646 1645 next_rev[0] = r + 1
1647 1646 return collect_msng_filenodes
1648 1647
1649 1648 # We have a list of filenodes we think we need for a file, lets remove
1650 1649 # all those we now the recipient must have.
1651 1650 def prune_filenodes(f, filerevlog):
1652 1651 msngset = msng_filenode_set[f]
1653 1652 hasset = {}
1654 1653 # If a 'missing' filenode thinks it belongs to a changenode we
1655 1654 # assume the recipient must have, then the recipient must have
1656 1655 # that filenode.
1657 1656 for n in msngset:
1658 1657 clnode = cl.node(filerevlog.linkrev(n))
1659 1658 if clnode in has_cl_set:
1660 1659 hasset[n] = 1
1661 1660 prune_parents(filerevlog, hasset, msngset)
1662 1661
1663 1662 # A function generator function that sets up the a context for the
1664 1663 # inner function.
1665 1664 def lookup_filenode_link_func(fname):
1666 1665 msngset = msng_filenode_set[fname]
1667 1666 # Lookup the changenode the filenode belongs to.
1668 1667 def lookup_filenode_link(fnode):
1669 1668 return msngset[fnode]
1670 1669 return lookup_filenode_link
1671 1670
1672 1671 # Now that we have all theses utility functions to help out and
1673 1672 # logically divide up the task, generate the group.
1674 1673 def gengroup():
1675 1674 # The set of changed files starts empty.
1676 1675 changedfiles = {}
1677 1676 # Create a changenode group generator that will call our functions
1678 1677 # back to lookup the owning changenode and collect information.
1679 1678 group = cl.group(msng_cl_lst, identity,
1680 1679 manifest_and_file_collector(changedfiles))
1681 1680 for chnk in group:
1682 1681 yield chnk
1683 1682
1684 1683 # The list of manifests has been collected by the generator
1685 1684 # calling our functions back.
1686 1685 prune_manifests()
1687 1686 msng_mnfst_lst = msng_mnfst_set.keys()
1688 1687 # Sort the manifestnodes by revision number.
1689 1688 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1690 1689 # Create a generator for the manifestnodes that calls our lookup
1691 1690 # and data collection functions back.
1692 1691 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1693 1692 filenode_collector(changedfiles))
1694 1693 for chnk in group:
1695 1694 yield chnk
1696 1695
1697 1696 # These are no longer needed, dereference and toss the memory for
1698 1697 # them.
1699 1698 msng_mnfst_lst = None
1700 1699 msng_mnfst_set.clear()
1701 1700
1702 1701 changedfiles = changedfiles.keys()
1703 1702 changedfiles.sort()
1704 1703 # Go through all our files in order sorted by name.
1705 1704 for fname in changedfiles:
1706 1705 filerevlog = self.file(fname)
1707 1706 # Toss out the filenodes that the recipient isn't really
1708 1707 # missing.
1709 1708 if msng_filenode_set.has_key(fname):
1710 1709 prune_filenodes(fname, filerevlog)
1711 1710 msng_filenode_lst = msng_filenode_set[fname].keys()
1712 1711 else:
1713 1712 msng_filenode_lst = []
1714 1713 # If any filenodes are left, generate the group for them,
1715 1714 # otherwise don't bother.
1716 1715 if len(msng_filenode_lst) > 0:
1717 1716 yield changegroup.genchunk(fname)
1718 1717 # Sort the filenodes by their revision #
1719 1718 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1720 1719 # Create a group generator and only pass in a changenode
1721 1720 # lookup function as we need to collect no information
1722 1721 # from filenodes.
1723 1722 group = filerevlog.group(msng_filenode_lst,
1724 1723 lookup_filenode_link_func(fname))
1725 1724 for chnk in group:
1726 1725 yield chnk
1727 1726 if msng_filenode_set.has_key(fname):
1728 1727 # Don't need this anymore, toss it to free memory.
1729 1728 del msng_filenode_set[fname]
1730 1729 # Signal that no more groups are left.
1731 1730 yield changegroup.closechunk()
1732 1731
1733 1732 if msng_cl_lst:
1734 1733 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1735 1734
1736 1735 return util.chunkbuffer(gengroup())
1737 1736
1738 1737 def changegroup(self, basenodes, source):
1739 1738 """Generate a changegroup of all nodes that we have that a recipient
1740 1739 doesn't.
1741 1740
1742 1741 This is much easier than the previous function as we can assume that
1743 1742 the recipient has any changenode we aren't sending them."""
1744 1743
1745 1744 self.hook('preoutgoing', throw=True, source=source)
1746 1745
1747 1746 cl = self.changelog
1748 1747 nodes = cl.nodesbetween(basenodes, None)[0]
1749 1748 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1750 1749 self.changegroupinfo(nodes)
1751 1750
1752 1751 def identity(x):
1753 1752 return x
1754 1753
1755 1754 def gennodelst(revlog):
1756 1755 for r in xrange(0, revlog.count()):
1757 1756 n = revlog.node(r)
1758 1757 if revlog.linkrev(n) in revset:
1759 1758 yield n
1760 1759
1761 1760 def changed_file_collector(changedfileset):
1762 1761 def collect_changed_files(clnode):
1763 1762 c = cl.read(clnode)
1764 1763 for fname in c[3]:
1765 1764 changedfileset[fname] = 1
1766 1765 return collect_changed_files
1767 1766
1768 1767 def lookuprevlink_func(revlog):
1769 1768 def lookuprevlink(n):
1770 1769 return cl.node(revlog.linkrev(n))
1771 1770 return lookuprevlink
1772 1771
1773 1772 def gengroup():
1774 1773 # construct a list of all changed files
1775 1774 changedfiles = {}
1776 1775
1777 1776 for chnk in cl.group(nodes, identity,
1778 1777 changed_file_collector(changedfiles)):
1779 1778 yield chnk
1780 1779 changedfiles = changedfiles.keys()
1781 1780 changedfiles.sort()
1782 1781
1783 1782 mnfst = self.manifest
1784 1783 nodeiter = gennodelst(mnfst)
1785 1784 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1786 1785 yield chnk
1787 1786
1788 1787 for fname in changedfiles:
1789 1788 filerevlog = self.file(fname)
1790 1789 nodeiter = gennodelst(filerevlog)
1791 1790 nodeiter = list(nodeiter)
1792 1791 if nodeiter:
1793 1792 yield changegroup.genchunk(fname)
1794 1793 lookup = lookuprevlink_func(filerevlog)
1795 1794 for chnk in filerevlog.group(nodeiter, lookup):
1796 1795 yield chnk
1797 1796
1798 1797 yield changegroup.closechunk()
1799 1798
1800 1799 if nodes:
1801 1800 self.hook('outgoing', node=hex(nodes[0]), source=source)
1802 1801
1803 1802 return util.chunkbuffer(gengroup())
1804 1803
1805 1804 def addchangegroup(self, source, srctype, url):
1806 1805 """add changegroup to repo.
1807 1806
1808 1807 return values:
1809 1808 - nothing changed or no source: 0
1810 1809 - more heads than before: 1+added heads (2..n)
1811 1810 - less heads than before: -1-removed heads (-2..-n)
1812 1811 - number of heads stays the same: 1
1813 1812 """
1814 1813 def csmap(x):
1815 1814 self.ui.debug(_("add changeset %s\n") % short(x))
1816 1815 return cl.count()
1817 1816
1818 1817 def revmap(x):
1819 1818 return cl.rev(x)
1820 1819
1821 1820 if not source:
1822 1821 return 0
1823 1822
1824 1823 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1825 1824
1826 1825 changesets = files = revisions = 0
1827 1826
1828 1827 tr = self.transaction()
1829 1828
1830 1829 # write changelog data to temp files so concurrent readers will not see
1831 1830 # inconsistent view
1832 1831 cl = None
1833 1832 try:
1834 1833 cl = appendfile.appendchangelog(self.sopener,
1835 1834 self.changelog.version)
1836 1835
1837 1836 oldheads = len(cl.heads())
1838 1837
1839 1838 # pull off the changeset group
1840 1839 self.ui.status(_("adding changesets\n"))
1841 1840 cor = cl.count() - 1
1842 1841 chunkiter = changegroup.chunkiter(source)
1843 1842 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1844 1843 raise util.Abort(_("received changelog group is empty"))
1845 1844 cnr = cl.count() - 1
1846 1845 changesets = cnr - cor
1847 1846
1848 1847 # pull off the manifest group
1849 1848 self.ui.status(_("adding manifests\n"))
1850 1849 chunkiter = changegroup.chunkiter(source)
1851 1850 # no need to check for empty manifest group here:
1852 1851 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1853 1852 # no new manifest will be created and the manifest group will
1854 1853 # be empty during the pull
1855 1854 self.manifest.addgroup(chunkiter, revmap, tr)
1856 1855
1857 1856 # process the files
1858 1857 self.ui.status(_("adding file changes\n"))
1859 1858 while 1:
1860 1859 f = changegroup.getchunk(source)
1861 1860 if not f:
1862 1861 break
1863 1862 self.ui.debug(_("adding %s revisions\n") % f)
1864 1863 fl = self.file(f)
1865 1864 o = fl.count()
1866 1865 chunkiter = changegroup.chunkiter(source)
1867 1866 if fl.addgroup(chunkiter, revmap, tr) is None:
1868 1867 raise util.Abort(_("received file revlog group is empty"))
1869 1868 revisions += fl.count() - o
1870 1869 files += 1
1871 1870
1872 1871 cl.writedata()
1873 1872 finally:
1874 1873 if cl:
1875 1874 cl.cleanup()
1876 1875
1877 1876 # make changelog see real files again
1878 1877 self.changelog = changelog.changelog(self.sopener,
1879 1878 self.changelog.version)
1880 1879 self.changelog.checkinlinesize(tr)
1881 1880
1882 1881 newheads = len(self.changelog.heads())
1883 1882 heads = ""
1884 1883 if oldheads and newheads != oldheads:
1885 1884 heads = _(" (%+d heads)") % (newheads - oldheads)
1886 1885
1887 1886 self.ui.status(_("added %d changesets"
1888 1887 " with %d changes to %d files%s\n")
1889 1888 % (changesets, revisions, files, heads))
1890 1889
1891 1890 if changesets > 0:
1892 1891 self.hook('pretxnchangegroup', throw=True,
1893 1892 node=hex(self.changelog.node(cor+1)), source=srctype,
1894 1893 url=url)
1895 1894
1896 1895 tr.close()
1897 1896
1898 1897 if changesets > 0:
1899 1898 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1900 1899 source=srctype, url=url)
1901 1900
1902 1901 for i in xrange(cor + 1, cnr + 1):
1903 1902 self.hook("incoming", node=hex(self.changelog.node(i)),
1904 1903 source=srctype, url=url)
1905 1904
1906 1905 # never return 0 here:
1907 1906 if newheads < oldheads:
1908 1907 return newheads - oldheads - 1
1909 1908 else:
1910 1909 return newheads - oldheads + 1
1911 1910
1912 1911
1913 1912 def stream_in(self, remote):
1914 1913 fp = remote.stream_out()
1915 1914 l = fp.readline()
1916 1915 try:
1917 1916 resp = int(l)
1918 1917 except ValueError:
1919 1918 raise util.UnexpectedOutput(
1920 1919 _('Unexpected response from remote server:'), l)
1921 1920 if resp == 1:
1922 1921 raise util.Abort(_('operation forbidden by server'))
1923 1922 elif resp == 2:
1924 1923 raise util.Abort(_('locking the remote repository failed'))
1925 1924 elif resp != 0:
1926 1925 raise util.Abort(_('the server sent an unknown error code'))
1927 1926 self.ui.status(_('streaming all changes\n'))
1928 1927 l = fp.readline()
1929 1928 try:
1930 1929 total_files, total_bytes = map(int, l.split(' ', 1))
1931 1930 except ValueError, TypeError:
1932 1931 raise util.UnexpectedOutput(
1933 1932 _('Unexpected response from remote server:'), l)
1934 1933 self.ui.status(_('%d files to transfer, %s of data\n') %
1935 1934 (total_files, util.bytecount(total_bytes)))
1936 1935 start = time.time()
1937 1936 for i in xrange(total_files):
1938 1937 # XXX doesn't support '\n' or '\r' in filenames
1939 1938 l = fp.readline()
1940 1939 try:
1941 1940 name, size = l.split('\0', 1)
1942 1941 size = int(size)
1943 1942 except ValueError, TypeError:
1944 1943 raise util.UnexpectedOutput(
1945 1944 _('Unexpected response from remote server:'), l)
1946 1945 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1947 1946 ofp = self.sopener(name, 'w')
1948 1947 for chunk in util.filechunkiter(fp, limit=size):
1949 1948 ofp.write(chunk)
1950 1949 ofp.close()
1951 1950 elapsed = time.time() - start
1952 1951 if elapsed <= 0:
1953 1952 elapsed = 0.001
1954 1953 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1955 1954 (util.bytecount(total_bytes), elapsed,
1956 1955 util.bytecount(total_bytes / elapsed)))
1957 1956 self.reload()
1958 1957 return len(self.heads()) + 1
1959 1958
1960 1959 def clone(self, remote, heads=[], stream=False):
1961 1960 '''clone remote repository.
1962 1961
1963 1962 keyword arguments:
1964 1963 heads: list of revs to clone (forces use of pull)
1965 1964 stream: use streaming clone if possible'''
1966 1965
1967 1966 # now, all clients that can request uncompressed clones can
1968 1967 # read repo formats supported by all servers that can serve
1969 1968 # them.
1970 1969
1971 1970 # if revlog format changes, client will have to check version
1972 1971 # and format flags on "stream" capability, and use
1973 1972 # uncompressed only if compatible.
1974 1973
1975 1974 if stream and not heads and remote.capable('stream'):
1976 1975 return self.stream_in(remote)
1977 1976 return self.pull(remote, heads)
1978 1977
1979 1978 # used to avoid circular references so destructors work
1980 1979 def aftertrans(files):
1981 1980 renamefiles = [tuple(t) for t in files]
1982 1981 def a():
1983 1982 for src, dest in renamefiles:
1984 1983 util.rename(src, dest)
1985 1984 return a
1986 1985
1987 1986 def instance(ui, path, create):
1988 1987 return localrepository(ui, util.drop_scheme('file', path), create)
1989 1988
1990 1989 def islocal(path):
1991 1990 return True
@@ -1,20 +1,21 b''
1 1 #!/bin/sh
2 2
3 3 hg init
4 hg branch dummy # needed so -r "" doesn't point to the unnamed/default branch
4 5 touch a
5 6 hg add a
6 7 hg ci -m "a" -d "1000000 0"
7 8
8 9 echo 123 > b
9 10 hg add b
10 11 hg diff --nodates
11 12
12 13 hg diff --nodates -r tip
13 14
14 15 echo foo > a
15 16 hg diff --nodates
16 17
17 18 hg diff -r ""
18 19 hg diff -r tip -r ""
19 20
20 21 true
@@ -1,22 +1,22 b''
1 diff -r acd8075edac9 b
1 diff -r 4da5fa99f904 b
2 2 --- /dev/null
3 3 +++ b/b
4 4 @@ -0,0 +1,1 @@
5 5 +123
6 diff -r acd8075edac9 b
6 diff -r 4da5fa99f904 b
7 7 --- /dev/null
8 8 +++ b/b
9 9 @@ -0,0 +1,1 @@
10 10 +123
11 diff -r acd8075edac9 a
11 diff -r 4da5fa99f904 a
12 12 --- a/a
13 13 +++ b/a
14 14 @@ -0,0 +1,1 @@
15 15 +foo
16 diff -r acd8075edac9 b
16 diff -r 4da5fa99f904 b
17 17 --- /dev/null
18 18 +++ b/b
19 19 @@ -0,0 +1,1 @@
20 20 +123
21 21 abort: Ambiguous identifier!
22 22 abort: Ambiguous identifier!
@@ -1,169 +1,172 b''
1 1 adding changesets
2 2 adding manifests
3 3 adding file changes
4 4 added 2 changesets with 2 changes to 1 files
5 5 (run 'hg update' to get a working copy)
6 6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
7 7 % should fail with encoding error
8 8 M a
9 9 ? latin-1
10 10 ? latin-1-tag
11 11 ? utf-8
12 12 abort: decoding near ' encoded: οΏ½': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)!
13 13 transaction abort!
14 14 rollback completed
15 15 % these should work
16 16 % ascii
17 17 changeset: 5:db5520b4645f
18 18 branch: ?
19 19 tag: tip
20 20 user: test
21 21 date: Mon Jan 12 13:46:40 1970 +0000
22 22 summary: latin1 branch
23 23
24 24 changeset: 4:9cff3c980b58
25 25 user: test
26 26 date: Mon Jan 12 13:46:40 1970 +0000
27 27 summary: Added tag ? for changeset 770b9b11621d
28 28
29 29 changeset: 3:770b9b11621d
30 30 tag: ?
31 31 user: test
32 32 date: Mon Jan 12 13:46:40 1970 +0000
33 33 summary: utf-8 e' encoded: ?
34 34
35 35 changeset: 2:0572af48b948
36 36 user: test
37 37 date: Mon Jan 12 13:46:40 1970 +0000
38 38 summary: latin-1 e' encoded: ?
39 39
40 40 changeset: 1:0e5b7e3f9c4a
41 41 user: test
42 42 date: Mon Jan 12 13:46:40 1970 +0000
43 43 summary: koi8-r: ????? = u'\u0440\u0442\u0443\u0442\u044c'
44 44
45 45 changeset: 0:1e78a93102a3
46 46 user: test
47 47 date: Mon Jan 12 13:46:40 1970 +0000
48 48 summary: latin-1 e': ? = u'\xe9'
49 49
50 50 % latin-1
51 51 changeset: 5:db5520b4645f
52 52 branch: οΏ½
53 53 tag: tip
54 54 user: test
55 55 date: Mon Jan 12 13:46:40 1970 +0000
56 56 summary: latin1 branch
57 57
58 58 changeset: 4:9cff3c980b58
59 59 user: test
60 60 date: Mon Jan 12 13:46:40 1970 +0000
61 61 summary: Added tag οΏ½ for changeset 770b9b11621d
62 62
63 63 changeset: 3:770b9b11621d
64 64 tag: οΏ½
65 65 user: test
66 66 date: Mon Jan 12 13:46:40 1970 +0000
67 67 summary: utf-8 e' encoded: οΏ½
68 68
69 69 changeset: 2:0572af48b948
70 70 user: test
71 71 date: Mon Jan 12 13:46:40 1970 +0000
72 72 summary: latin-1 e' encoded: οΏ½
73 73
74 74 changeset: 1:0e5b7e3f9c4a
75 75 user: test
76 76 date: Mon Jan 12 13:46:40 1970 +0000
77 77 summary: koi8-r: οΏ½οΏ½οΏ½οΏ½οΏ½ = u'\u0440\u0442\u0443\u0442\u044c'
78 78
79 79 changeset: 0:1e78a93102a3
80 80 user: test
81 81 date: Mon Jan 12 13:46:40 1970 +0000
82 82 summary: latin-1 e': οΏ½ = u'\xe9'
83 83
84 84 % utf-8
85 85 changeset: 5:db5520b4645f
86 86 branch: Γ©
87 87 tag: tip
88 88 user: test
89 89 date: Mon Jan 12 13:46:40 1970 +0000
90 90 summary: latin1 branch
91 91
92 92 changeset: 4:9cff3c980b58
93 93 user: test
94 94 date: Mon Jan 12 13:46:40 1970 +0000
95 95 summary: Added tag Γ© for changeset 770b9b11621d
96 96
97 97 changeset: 3:770b9b11621d
98 98 tag: Γ©
99 99 user: test
100 100 date: Mon Jan 12 13:46:40 1970 +0000
101 101 summary: utf-8 e' encoded: Γ©
102 102
103 103 changeset: 2:0572af48b948
104 104 user: test
105 105 date: Mon Jan 12 13:46:40 1970 +0000
106 106 summary: latin-1 e' encoded: Γ©
107 107
108 108 changeset: 1:0e5b7e3f9c4a
109 109 user: test
110 110 date: Mon Jan 12 13:46:40 1970 +0000
111 111 summary: koi8-r: Γ’Γ”Γ•Γ”Γ˜ = u'\u0440\u0442\u0443\u0442\u044c'
112 112
113 113 changeset: 0:1e78a93102a3
114 114 user: test
115 115 date: Mon Jan 12 13:46:40 1970 +0000
116 116 summary: latin-1 e': Γ© = u'\xe9'
117 117
118 118 % ascii
119 119 tip 5:db5520b4645f
120 120 ? 3:770b9b11621d
121 121 % latin-1
122 122 tip 5:db5520b4645f
123 123 οΏ½ 3:770b9b11621d
124 124 % utf-8
125 125 tip 5:db5520b4645f
126 126 Γ© 3:770b9b11621d
127 127 % ascii
128 128 ? 5:db5520b4645f
129 4:9cff3c980b58
129 130 % latin-1
130 131 οΏ½ 5:db5520b4645f
132 4:9cff3c980b58
131 133 % utf-8
132 134 Γ© 5:db5520b4645f
135 4:9cff3c980b58
133 136 % utf-8
134 137 changeset: 5:db5520b4645f
135 138 branch: Γ©
136 139 tag: tip
137 140 user: test
138 141 date: Mon Jan 12 13:46:40 1970 +0000
139 142 summary: latin1 branch
140 143
141 144 changeset: 4:9cff3c980b58
142 145 user: test
143 146 date: Mon Jan 12 13:46:40 1970 +0000
144 147 summary: Added tag Γ© for changeset 770b9b11621d
145 148
146 149 changeset: 3:770b9b11621d
147 150 tag: Γ©
148 151 user: test
149 152 date: Mon Jan 12 13:46:40 1970 +0000
150 153 summary: utf-8 e' encoded: Γ©
151 154
152 155 changeset: 2:0572af48b948
153 156 user: test
154 157 date: Mon Jan 12 13:46:40 1970 +0000
155 158 summary: latin-1 e' encoded: Γ©
156 159
157 160 changeset: 1:0e5b7e3f9c4a
158 161 user: test
159 162 date: Mon Jan 12 13:46:40 1970 +0000
160 163 summary: koi8-r: Ρ€Ρ‚ΡƒΡ‚ΡŒ = u'\u0440\u0442\u0443\u0442\u044c'
161 164
162 165 changeset: 0:1e78a93102a3
163 166 user: test
164 167 date: Mon Jan 12 13:46:40 1970 +0000
165 168 summary: latin-1 e': И = u'\xe9'
166 169
167 170 abort: unknown encoding: dolphin, please check your locale settings
168 171 abort: decoding near 'οΏ½': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)!
169 172 abort: branch name not in UTF-8!
@@ -1,88 +1,89 b''
1 1 #!/bin/sh
2 2
3 3 hg init a
4 4
5 5 cd a
6 6 echo a > a
7 7 hg ci -Ama -d '1 0'
8 8
9 9 hg cp a b
10 10 hg ci -mb -d '2 0'
11 11
12 12 mkdir dir
13 13 hg mv b dir
14 14 hg ci -mc -d '3 0'
15 15
16 16 hg mv a b
17 17 echo a > d
18 18 hg add d
19 19 hg ci -md -d '4 0'
20 20
21 21 hg mv dir/b e
22 22 hg ci -me -d '5 0'
23 23
24 24 hg log a
25 25 echo % -f, directory
26 26 hg log -f dir
27 27 echo % -f, but no args
28 28 hg log -f
29 29 echo % one rename
30 30 hg log -vf a
31 31 echo % many renames
32 32 hg log -vf e
33 33
34 34 echo % log copies
35 35 hg log -vC --template '{rev} {file_copies%filecopy}\n'
36 36
37 37 echo % log copies, non-linear manifest
38 38 hg up -C 3
39 39 hg mv dir/b e
40 40 echo foo > foo
41 41 hg ci -Ame2 -d '6 0'
42 42 hg log -vC --template '{rev} {file_copies%filecopy}\n' -r 5
43 43
44 44 echo '% log -p d'
45 45 hg log -pv d
46 46
47 47 # log --follow tests
48 48 hg init ../follow
49 49 cd ../follow
50 hg branch dummy # needed so -r "" doesn't point to the unnamed/default branch
50 51 echo base > base
51 52 hg ci -Ambase -d '1 0'
52 53
53 54 echo r1 >> base
54 55 hg ci -Amr1 -d '1 0'
55 56 echo r2 >> base
56 57 hg ci -Amr2 -d '1 0'
57 58
58 59 hg up -C 1
59 60 echo b1 > b1
60 61 hg ci -Amb1 -d '1 0'
61 62
62 63 echo % log -f
63 64 hg log -f
64 65
65 66 hg up -C 0
66 67 echo b2 > b2
67 68 hg ci -Amb2 -d '1 0'
68 69
69 70 echo % log -f -r 1:tip
70 71 hg log -f -r 1:tip
71 72
72 73 hg up -C 3
73 74 hg merge tip
74 75 hg ci -mm12 -d '1 0'
75 76
76 77 echo postm >> b1
77 78 hg ci -Amb1.1 -d'1 0'
78 79
79 80 echo % log --follow-first
80 81 hg log --follow-first
81 82
82 83 echo % log -P 2
83 84 hg log -P 2
84 85
85 86 echo '% log -r ""'
86 87 hg log -r ''
87 88
88 89 exit 0
@@ -1,204 +1,219 b''
1 1 adding a
2 2 changeset: 0:8580ff50825a
3 3 user: test
4 4 date: Thu Jan 01 00:00:01 1970 +0000
5 5 summary: a
6 6
7 7 % -f, directory
8 8 abort: can only follow copies/renames for explicit file names
9 9 % -f, but no args
10 10 changeset: 4:b30c444c7c84
11 11 tag: tip
12 12 user: test
13 13 date: Thu Jan 01 00:00:05 1970 +0000
14 14 summary: e
15 15
16 16 changeset: 3:16b60bf3f99a
17 17 user: test
18 18 date: Thu Jan 01 00:00:04 1970 +0000
19 19 summary: d
20 20
21 21 changeset: 2:21fba396af4c
22 22 user: test
23 23 date: Thu Jan 01 00:00:03 1970 +0000
24 24 summary: c
25 25
26 26 changeset: 1:c0296dabce9b
27 27 user: test
28 28 date: Thu Jan 01 00:00:02 1970 +0000
29 29 summary: b
30 30
31 31 changeset: 0:8580ff50825a
32 32 user: test
33 33 date: Thu Jan 01 00:00:01 1970 +0000
34 34 summary: a
35 35
36 36 % one rename
37 37 changeset: 0:8580ff50825a
38 38 user: test
39 39 date: Thu Jan 01 00:00:01 1970 +0000
40 40 files: a
41 41 description:
42 42 a
43 43
44 44
45 45 % many renames
46 46 changeset: 4:b30c444c7c84
47 47 tag: tip
48 48 user: test
49 49 date: Thu Jan 01 00:00:05 1970 +0000
50 50 files: dir/b e
51 51 description:
52 52 e
53 53
54 54
55 55 changeset: 2:21fba396af4c
56 56 user: test
57 57 date: Thu Jan 01 00:00:03 1970 +0000
58 58 files: b dir/b
59 59 description:
60 60 c
61 61
62 62
63 63 changeset: 1:c0296dabce9b
64 64 user: test
65 65 date: Thu Jan 01 00:00:02 1970 +0000
66 66 files: b
67 67 description:
68 68 b
69 69
70 70
71 71 changeset: 0:8580ff50825a
72 72 user: test
73 73 date: Thu Jan 01 00:00:01 1970 +0000
74 74 files: a
75 75 description:
76 76 a
77 77
78 78
79 79 % log copies
80 80 4 e (dir/b)
81 81 3 b (a)
82 82 2 dir/b (b)
83 83 1 b (a)
84 84 0
85 85 % log copies, non-linear manifest
86 86 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
87 87 adding foo
88 88 5 e (dir/b)
89 89 % log -p d
90 90 changeset: 3:16b60bf3f99a
91 91 user: test
92 92 date: Thu Jan 01 00:00:04 1970 +0000
93 93 files: a b d
94 94 description:
95 95 d
96 96
97 97
98 98 diff -r 21fba396af4c -r 16b60bf3f99a d
99 99 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
100 100 +++ b/d Thu Jan 01 00:00:04 1970 +0000
101 101 @@ -0,0 +1,1 @@
102 102 +a
103 103
104 104 adding base
105 105 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
106 106 adding b1
107 107 % log -f
108 changeset: 3:e62f78d544b4
108 changeset: 3:07a62f044f0d
109 branch: dummy
109 110 tag: tip
110 parent: 1:3d5bf5654eda
111 parent: 1:fb3d4e35b279
111 112 user: test
112 113 date: Thu Jan 01 00:00:01 1970 +0000
113 114 summary: b1
114 115
115 changeset: 1:3d5bf5654eda
116 changeset: 1:fb3d4e35b279
117 branch: dummy
116 118 user: test
117 119 date: Thu Jan 01 00:00:01 1970 +0000
118 120 summary: r1
119 121
120 changeset: 0:67e992f2c4f3
122 changeset: 0:ea445bfed6b9
123 branch: dummy
121 124 user: test
122 125 date: Thu Jan 01 00:00:01 1970 +0000
123 126 summary: base
124 127
125 128 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
126 129 adding b2
127 130 % log -f -r 1:tip
128 changeset: 1:3d5bf5654eda
131 changeset: 1:fb3d4e35b279
132 branch: dummy
129 133 user: test
130 134 date: Thu Jan 01 00:00:01 1970 +0000
131 135 summary: r1
132 136
133 changeset: 2:60c670bf5b30
137 changeset: 2:e8882cbc828c
138 branch: dummy
134 139 user: test
135 140 date: Thu Jan 01 00:00:01 1970 +0000
136 141 summary: r2
137 142
138 changeset: 3:e62f78d544b4
139 parent: 1:3d5bf5654eda
143 changeset: 3:07a62f044f0d
144 branch: dummy
145 parent: 1:fb3d4e35b279
140 146 user: test
141 147 date: Thu Jan 01 00:00:01 1970 +0000
142 148 summary: b1
143 149
144 150 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
145 151 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
146 152 (branch merge, don't forget to commit)
147 153 % log --follow-first
148 changeset: 6:2404bbcab562
154 changeset: 6:0f621dafa603
155 branch: dummy
149 156 tag: tip
150 157 user: test
151 158 date: Thu Jan 01 00:00:01 1970 +0000
152 159 summary: b1.1
153 160
154 changeset: 5:302e9dd6890d
155 parent: 3:e62f78d544b4
156 parent: 4:ddb82e70d1a1
161 changeset: 5:0cf53fb6dfd5
162 branch: dummy
163 parent: 3:07a62f044f0d
164 parent: 4:b76598590bc3
157 165 user: test
158 166 date: Thu Jan 01 00:00:01 1970 +0000
159 167 summary: m12
160 168
161 changeset: 3:e62f78d544b4
162 parent: 1:3d5bf5654eda
169 changeset: 3:07a62f044f0d
170 branch: dummy
171 parent: 1:fb3d4e35b279
163 172 user: test
164 173 date: Thu Jan 01 00:00:01 1970 +0000
165 174 summary: b1
166 175
167 changeset: 1:3d5bf5654eda
176 changeset: 1:fb3d4e35b279
177 branch: dummy
168 178 user: test
169 179 date: Thu Jan 01 00:00:01 1970 +0000
170 180 summary: r1
171 181
172 changeset: 0:67e992f2c4f3
182 changeset: 0:ea445bfed6b9
183 branch: dummy
173 184 user: test
174 185 date: Thu Jan 01 00:00:01 1970 +0000
175 186 summary: base
176 187
177 188 % log -P 2
178 changeset: 6:2404bbcab562
189 changeset: 6:0f621dafa603
190 branch: dummy
179 191 tag: tip
180 192 user: test
181 193 date: Thu Jan 01 00:00:01 1970 +0000
182 194 summary: b1.1
183 195
184 changeset: 5:302e9dd6890d
185 parent: 3:e62f78d544b4
186 parent: 4:ddb82e70d1a1
196 changeset: 5:0cf53fb6dfd5
197 branch: dummy
198 parent: 3:07a62f044f0d
199 parent: 4:b76598590bc3
187 200 user: test
188 201 date: Thu Jan 01 00:00:01 1970 +0000
189 202 summary: m12
190 203
191 changeset: 4:ddb82e70d1a1
192 parent: 0:67e992f2c4f3
204 changeset: 4:b76598590bc3
205 branch: dummy
206 parent: 0:ea445bfed6b9
193 207 user: test
194 208 date: Thu Jan 01 00:00:01 1970 +0000
195 209 summary: b2
196 210
197 changeset: 3:e62f78d544b4
198 parent: 1:3d5bf5654eda
211 changeset: 3:07a62f044f0d
212 branch: dummy
213 parent: 1:fb3d4e35b279
199 214 user: test
200 215 date: Thu Jan 01 00:00:01 1970 +0000
201 216 summary: b1
202 217
203 218 % log -r ""
204 219 abort: Ambiguous identifier!
@@ -1,77 +1,80 b''
1 1 foo
2 2 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
3 3 foo
4 4 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
5 5 (branch merge, don't forget to commit)
6 6 foo
7 7 changeset: 5:5f8fb06e083e
8 8 branch: foo
9 9 tag: tip
10 10 parent: 4:4909a3732169
11 11 parent: 3:bf1bc2f45e83
12 12 user: test
13 13 date: Mon Jan 12 13:46:40 1970 +0000
14 14 summary: merge
15 15
16 16 changeset: 4:4909a3732169
17 17 branch: foo
18 18 parent: 1:b699b1cec9c2
19 19 user: test
20 20 date: Mon Jan 12 13:46:40 1970 +0000
21 21 summary: modify a branch
22 22
23 23 changeset: 3:bf1bc2f45e83
24 24 user: test
25 25 date: Mon Jan 12 13:46:40 1970 +0000
26 26 summary: clear branch name
27 27
28 28 changeset: 2:67ec16bde7f1
29 29 branch: bar
30 30 user: test
31 31 date: Mon Jan 12 13:46:40 1970 +0000
32 32 summary: change branch name
33 33
34 34 changeset: 1:b699b1cec9c2
35 35 branch: foo
36 36 user: test
37 37 date: Mon Jan 12 13:46:40 1970 +0000
38 38 summary: add branch name
39 39
40 40 changeset: 0:be8523e69bf8
41 41 user: test
42 42 date: Mon Jan 12 13:46:40 1970 +0000
43 43 summary: initial
44 44
45 45 foo 5:5f8fb06e083e
46 3:bf1bc2f45e83
46 47 bar 2:67ec16bde7f1
47 48 foo
49
48 50 bar
49 51 % test for invalid branch cache
50 52 rolling back last transaction
51 53 changeset: 4:4909a3732169
52 54 branch: foo
53 55 tag: tip
54 56 parent: 1:b699b1cec9c2
55 57 user: test
56 58 date: Mon Jan 12 13:46:40 1970 +0000
57 59 summary: modify a branch
58 60
59 61 Invalid branch cache: unknown tip
60 62 changeset: 4:4909a3732169c0c20011c4f4b8fdff4e3d89b23f
61 63 branch: foo
62 64 tag: tip
63 65 parent: 1:b699b1cec9c2966b3700de4fef0dc123cd754c31
64 66 parent: -1:0000000000000000000000000000000000000000
65 67 manifest: 4:d01b250baaa05909152f7ae07d7a649deea0df9a
66 68 user: test
67 69 date: Mon Jan 12 13:46:40 1970 +0000
68 70 files: a
69 71 extra: branch=foo
70 72 description:
71 73 modify a branch
72 74
73 75
74 76 4:4909a3732169
75 77 4909a3732169c0c20011c4f4b8fdff4e3d89b23f 4
78 bf1bc2f45e834c75404d0ddab57d53beab56e2f8
76 79 4909a3732169c0c20011c4f4b8fdff4e3d89b23f foo
77 80 67ec16bde7f1575d523313b9bca000f6a6f12dca bar
General Comments 0
You need to be logged in to leave comments. Login now