##// END OF EJS Templates
Show number (-v) and list (--debug) of changesets with bundle/pull/push etc.
Thomas Arendsen Hein -
r3513:9383af6f default
parent child Browse files
Show More
@@ -1,1832 +1,1841 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 import repo
12 12 demandload(globals(), "appendfile changegroup")
13 13 demandload(globals(), "changelog dirstate filelog manifest context")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "os revlog time util")
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = ('lookup', 'changegroupsubset')
19 19
20 20 def __del__(self):
21 21 self.transhandle = None
22 22 def __init__(self, parentui, path=None, create=0):
23 23 repo.repository.__init__(self)
24 24 if not path:
25 25 p = os.getcwd()
26 26 while not os.path.isdir(os.path.join(p, ".hg")):
27 27 oldp = p
28 28 p = os.path.dirname(p)
29 29 if p == oldp:
30 30 raise repo.RepoError(_("There is no Mercurial repository"
31 31 " here (.hg not found)"))
32 32 path = p
33 33 self.path = os.path.join(path, ".hg")
34 34
35 35 if not os.path.isdir(self.path):
36 36 if create:
37 37 if not os.path.exists(path):
38 38 os.mkdir(path)
39 39 os.mkdir(self.path)
40 40 os.mkdir(self.join("data"))
41 41 else:
42 42 raise repo.RepoError(_("repository %s not found") % path)
43 43 elif create:
44 44 raise repo.RepoError(_("repository %s already exists") % path)
45 45
46 46 self.root = os.path.abspath(path)
47 47 self.origroot = path
48 48 self.ui = ui.ui(parentui=parentui)
49 49 self.opener = util.opener(self.path)
50 50 self.sopener = util.opener(self.path)
51 51 self.wopener = util.opener(self.root)
52 52
53 53 try:
54 54 self.ui.readconfig(self.join("hgrc"), self.root)
55 55 except IOError:
56 56 pass
57 57
58 58 v = self.ui.configrevlog()
59 59 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
60 60 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
61 61 fl = v.get('flags', None)
62 62 flags = 0
63 63 if fl != None:
64 64 for x in fl.split():
65 65 flags |= revlog.flagstr(x)
66 66 elif self.revlogv1:
67 67 flags = revlog.REVLOG_DEFAULT_FLAGS
68 68
69 69 v = self.revlogversion | flags
70 70 self.manifest = manifest.manifest(self.sopener, v)
71 71 self.changelog = changelog.changelog(self.sopener, v)
72 72
73 73 # the changelog might not have the inline index flag
74 74 # on. If the format of the changelog is the same as found in
75 75 # .hgrc, apply any flags found in the .hgrc as well.
76 76 # Otherwise, just version from the changelog
77 77 v = self.changelog.version
78 78 if v == self.revlogversion:
79 79 v |= flags
80 80 self.revlogversion = v
81 81
82 82 self.tagscache = None
83 83 self.branchcache = None
84 84 self.nodetagscache = None
85 85 self.encodepats = None
86 86 self.decodepats = None
87 87 self.transhandle = None
88 88
89 89 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
90 90
91 91 def url(self):
92 92 return 'file:' + self.root
93 93
94 94 def hook(self, name, throw=False, **args):
95 95 def callhook(hname, funcname):
96 96 '''call python hook. hook is callable object, looked up as
97 97 name in python module. if callable returns "true", hook
98 98 fails, else passes. if hook raises exception, treated as
99 99 hook failure. exception propagates if throw is "true".
100 100
101 101 reason for "true" meaning "hook failed" is so that
102 102 unmodified commands (e.g. mercurial.commands.update) can
103 103 be run as hooks without wrappers to convert return values.'''
104 104
105 105 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
106 106 d = funcname.rfind('.')
107 107 if d == -1:
108 108 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
109 109 % (hname, funcname))
110 110 modname = funcname[:d]
111 111 try:
112 112 obj = __import__(modname)
113 113 except ImportError:
114 114 try:
115 115 # extensions are loaded with hgext_ prefix
116 116 obj = __import__("hgext_%s" % modname)
117 117 except ImportError:
118 118 raise util.Abort(_('%s hook is invalid '
119 119 '(import of "%s" failed)') %
120 120 (hname, modname))
121 121 try:
122 122 for p in funcname.split('.')[1:]:
123 123 obj = getattr(obj, p)
124 124 except AttributeError, err:
125 125 raise util.Abort(_('%s hook is invalid '
126 126 '("%s" is not defined)') %
127 127 (hname, funcname))
128 128 if not callable(obj):
129 129 raise util.Abort(_('%s hook is invalid '
130 130 '("%s" is not callable)') %
131 131 (hname, funcname))
132 132 try:
133 133 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
134 134 except (KeyboardInterrupt, util.SignalInterrupt):
135 135 raise
136 136 except Exception, exc:
137 137 if isinstance(exc, util.Abort):
138 138 self.ui.warn(_('error: %s hook failed: %s\n') %
139 139 (hname, exc.args[0]))
140 140 else:
141 141 self.ui.warn(_('error: %s hook raised an exception: '
142 142 '%s\n') % (hname, exc))
143 143 if throw:
144 144 raise
145 145 self.ui.print_exc()
146 146 return True
147 147 if r:
148 148 if throw:
149 149 raise util.Abort(_('%s hook failed') % hname)
150 150 self.ui.warn(_('warning: %s hook failed\n') % hname)
151 151 return r
152 152
153 153 def runhook(name, cmd):
154 154 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
155 155 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
156 156 r = util.system(cmd, environ=env, cwd=self.root)
157 157 if r:
158 158 desc, r = util.explain_exit(r)
159 159 if throw:
160 160 raise util.Abort(_('%s hook %s') % (name, desc))
161 161 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
162 162 return r
163 163
164 164 r = False
165 165 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
166 166 if hname.split(".", 1)[0] == name and cmd]
167 167 hooks.sort()
168 168 for hname, cmd in hooks:
169 169 if cmd.startswith('python:'):
170 170 r = callhook(hname, cmd[7:].strip()) or r
171 171 else:
172 172 r = runhook(hname, cmd) or r
173 173 return r
174 174
175 175 tag_disallowed = ':\r\n'
176 176
177 177 def tag(self, name, node, message, local, user, date):
178 178 '''tag a revision with a symbolic name.
179 179
180 180 if local is True, the tag is stored in a per-repository file.
181 181 otherwise, it is stored in the .hgtags file, and a new
182 182 changeset is committed with the change.
183 183
184 184 keyword arguments:
185 185
186 186 local: whether to store tag in non-version-controlled file
187 187 (default False)
188 188
189 189 message: commit message to use if committing
190 190
191 191 user: name of user to use if committing
192 192
193 193 date: date tuple to use if committing'''
194 194
195 195 for c in self.tag_disallowed:
196 196 if c in name:
197 197 raise util.Abort(_('%r cannot be used in a tag name') % c)
198 198
199 199 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
200 200
201 201 if local:
202 202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
203 203 self.hook('tag', node=hex(node), tag=name, local=local)
204 204 return
205 205
206 206 for x in self.status()[:5]:
207 207 if '.hgtags' in x:
208 208 raise util.Abort(_('working copy of .hgtags is changed '
209 209 '(please commit .hgtags manually)'))
210 210
211 211 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
212 212 if self.dirstate.state('.hgtags') == '?':
213 213 self.add(['.hgtags'])
214 214
215 215 self.commit(['.hgtags'], message, user, date)
216 216 self.hook('tag', node=hex(node), tag=name, local=local)
217 217
218 218 def tags(self):
219 219 '''return a mapping of tag to node'''
220 220 if not self.tagscache:
221 221 self.tagscache = {}
222 222
223 223 def parsetag(line, context):
224 224 if not line:
225 225 return
226 226 s = l.split(" ", 1)
227 227 if len(s) != 2:
228 228 self.ui.warn(_("%s: cannot parse entry\n") % context)
229 229 return
230 230 node, key = s
231 231 key = key.strip()
232 232 try:
233 233 bin_n = bin(node)
234 234 except TypeError:
235 235 self.ui.warn(_("%s: node '%s' is not well formed\n") %
236 236 (context, node))
237 237 return
238 238 if bin_n not in self.changelog.nodemap:
239 239 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
240 240 (context, key))
241 241 return
242 242 self.tagscache[key] = bin_n
243 243
244 244 # read the tags file from each head, ending with the tip,
245 245 # and add each tag found to the map, with "newer" ones
246 246 # taking precedence
247 247 heads = self.heads()
248 248 heads.reverse()
249 249 seen = {}
250 250 for node in heads:
251 251 f = self.filectx('.hgtags', node)
252 252 if not f or f.filerev() in seen: continue
253 253 seen[f.filerev()] = 1
254 254 count = 0
255 255 for l in f.data().splitlines():
256 256 count += 1
257 257 parsetag(l, _("%s, line %d") % (str(f), count))
258 258
259 259 try:
260 260 f = self.opener("localtags")
261 261 count = 0
262 262 for l in f:
263 263 count += 1
264 264 parsetag(l, _("localtags, line %d") % count)
265 265 except IOError:
266 266 pass
267 267
268 268 self.tagscache['tip'] = self.changelog.tip()
269 269
270 270 return self.tagscache
271 271
272 272 def tagslist(self):
273 273 '''return a list of tags ordered by revision'''
274 274 l = []
275 275 for t, n in self.tags().items():
276 276 try:
277 277 r = self.changelog.rev(n)
278 278 except:
279 279 r = -2 # sort to the beginning of the list if unknown
280 280 l.append((r, t, n))
281 281 l.sort()
282 282 return [(t, n) for r, t, n in l]
283 283
284 284 def nodetags(self, node):
285 285 '''return the tags associated with a node'''
286 286 if not self.nodetagscache:
287 287 self.nodetagscache = {}
288 288 for t, n in self.tags().items():
289 289 self.nodetagscache.setdefault(n, []).append(t)
290 290 return self.nodetagscache.get(node, [])
291 291
292 292 def branchtags(self):
293 293 if self.branchcache != None:
294 294 return self.branchcache
295 295
296 296 self.branchcache = {} # avoid recursion in changectx
297 297
298 298 partial, last, lrev = self._readbranchcache()
299 299
300 300 tiprev = self.changelog.count() - 1
301 301 if lrev != tiprev:
302 302 self._updatebranchcache(partial, lrev+1, tiprev+1)
303 303 self._writebranchcache(partial, self.changelog.tip(), tiprev)
304 304
305 305 self.branchcache = partial
306 306 return self.branchcache
307 307
308 308 def _readbranchcache(self):
309 309 partial = {}
310 310 try:
311 311 f = self.opener("branches.cache")
312 312 last, lrev = f.readline().rstrip().split(" ", 1)
313 313 last, lrev = bin(last), int(lrev)
314 314 if (lrev < self.changelog.count() and
315 315 self.changelog.node(lrev) == last): # sanity check
316 316 for l in f:
317 317 node, label = l.rstrip().split(" ", 1)
318 318 partial[label] = bin(node)
319 319 else: # invalidate the cache
320 320 last, lrev = nullid, -1
321 321 f.close()
322 322 except IOError:
323 323 last, lrev = nullid, -1
324 324 return partial, last, lrev
325 325
326 326 def _writebranchcache(self, branches, tip, tiprev):
327 327 try:
328 328 f = self.opener("branches.cache", "w")
329 329 f.write("%s %s\n" % (hex(tip), tiprev))
330 330 for label, node in branches.iteritems():
331 331 f.write("%s %s\n" % (hex(node), label))
332 332 except IOError:
333 333 pass
334 334
335 335 def _updatebranchcache(self, partial, start, end):
336 336 for r in xrange(start, end):
337 337 c = self.changectx(r)
338 338 b = c.branch()
339 339 if b:
340 340 partial[b] = c.node()
341 341
342 342 def lookup(self, key):
343 343 if key == '.':
344 344 key = self.dirstate.parents()[0]
345 345 if key == nullid:
346 346 raise repo.RepoError(_("no revision checked out"))
347 347 n = self.changelog._match(key)
348 348 if n:
349 349 return n
350 350 if key in self.tags():
351 351 return self.tags()[key]
352 352 if key in self.branchtags():
353 353 return self.branchtags()[key]
354 354 n = self.changelog._partialmatch(key)
355 355 if n:
356 356 return n
357 357 raise repo.RepoError(_("unknown revision '%s'") % key)
358 358
359 359 def dev(self):
360 360 return os.lstat(self.path).st_dev
361 361
362 362 def local(self):
363 363 return True
364 364
365 365 def join(self, f):
366 366 return os.path.join(self.path, f)
367 367
368 368 def sjoin(self, f):
369 369 return os.path.join(self.path, f)
370 370
371 371 def wjoin(self, f):
372 372 return os.path.join(self.root, f)
373 373
374 374 def file(self, f):
375 375 if f[0] == '/':
376 376 f = f[1:]
377 377 return filelog.filelog(self.sopener, f, self.revlogversion)
378 378
379 379 def changectx(self, changeid=None):
380 380 return context.changectx(self, changeid)
381 381
382 382 def workingctx(self):
383 383 return context.workingctx(self)
384 384
385 385 def parents(self, changeid=None):
386 386 '''
387 387 get list of changectxs for parents of changeid or working directory
388 388 '''
389 389 if changeid is None:
390 390 pl = self.dirstate.parents()
391 391 else:
392 392 n = self.changelog.lookup(changeid)
393 393 pl = self.changelog.parents(n)
394 394 if pl[1] == nullid:
395 395 return [self.changectx(pl[0])]
396 396 return [self.changectx(pl[0]), self.changectx(pl[1])]
397 397
398 398 def filectx(self, path, changeid=None, fileid=None):
399 399 """changeid can be a changeset revision, node, or tag.
400 400 fileid can be a file revision or node."""
401 401 return context.filectx(self, path, changeid, fileid)
402 402
403 403 def getcwd(self):
404 404 return self.dirstate.getcwd()
405 405
406 406 def wfile(self, f, mode='r'):
407 407 return self.wopener(f, mode)
408 408
409 409 def wread(self, filename):
410 410 if self.encodepats == None:
411 411 l = []
412 412 for pat, cmd in self.ui.configitems("encode"):
413 413 mf = util.matcher(self.root, "", [pat], [], [])[1]
414 414 l.append((mf, cmd))
415 415 self.encodepats = l
416 416
417 417 data = self.wopener(filename, 'r').read()
418 418
419 419 for mf, cmd in self.encodepats:
420 420 if mf(filename):
421 421 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
422 422 data = util.filter(data, cmd)
423 423 break
424 424
425 425 return data
426 426
427 427 def wwrite(self, filename, data, fd=None):
428 428 if self.decodepats == None:
429 429 l = []
430 430 for pat, cmd in self.ui.configitems("decode"):
431 431 mf = util.matcher(self.root, "", [pat], [], [])[1]
432 432 l.append((mf, cmd))
433 433 self.decodepats = l
434 434
435 435 for mf, cmd in self.decodepats:
436 436 if mf(filename):
437 437 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
438 438 data = util.filter(data, cmd)
439 439 break
440 440
441 441 if fd:
442 442 return fd.write(data)
443 443 return self.wopener(filename, 'w').write(data)
444 444
445 445 def transaction(self):
446 446 tr = self.transhandle
447 447 if tr != None and tr.running():
448 448 return tr.nest()
449 449
450 450 # save dirstate for rollback
451 451 try:
452 452 ds = self.opener("dirstate").read()
453 453 except IOError:
454 454 ds = ""
455 455 self.opener("journal.dirstate", "w").write(ds)
456 456
457 457 tr = transaction.transaction(self.ui.warn, self.sopener,
458 458 self.sjoin("journal"),
459 459 aftertrans(self.path))
460 460 self.transhandle = tr
461 461 return tr
462 462
463 463 def recover(self):
464 464 l = self.lock()
465 465 if os.path.exists(self.sjoin("journal")):
466 466 self.ui.status(_("rolling back interrupted transaction\n"))
467 467 transaction.rollback(self.sopener, self.sjoin("journal"))
468 468 self.reload()
469 469 return True
470 470 else:
471 471 self.ui.warn(_("no interrupted transaction available\n"))
472 472 return False
473 473
474 474 def rollback(self, wlock=None):
475 475 if not wlock:
476 476 wlock = self.wlock()
477 477 l = self.lock()
478 478 if os.path.exists(self.sjoin("undo")):
479 479 self.ui.status(_("rolling back last transaction\n"))
480 480 transaction.rollback(self.sopener, self.sjoin("undo"))
481 481 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
482 482 self.reload()
483 483 self.wreload()
484 484 else:
485 485 self.ui.warn(_("no rollback information available\n"))
486 486
487 487 def wreload(self):
488 488 self.dirstate.read()
489 489
490 490 def reload(self):
491 491 self.changelog.load()
492 492 self.manifest.load()
493 493 self.tagscache = None
494 494 self.nodetagscache = None
495 495
496 496 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
497 497 desc=None):
498 498 try:
499 499 l = lock.lock(lockname, 0, releasefn, desc=desc)
500 500 except lock.LockHeld, inst:
501 501 if not wait:
502 502 raise
503 503 self.ui.warn(_("waiting for lock on %s held by %s\n") %
504 504 (desc, inst.args[0]))
505 505 # default to 600 seconds timeout
506 506 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
507 507 releasefn, desc=desc)
508 508 if acquirefn:
509 509 acquirefn()
510 510 return l
511 511
512 512 def lock(self, wait=1):
513 513 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
514 514 desc=_('repository %s') % self.origroot)
515 515
516 516 def wlock(self, wait=1):
517 517 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
518 518 self.wreload,
519 519 desc=_('working directory of %s') % self.origroot)
520 520
521 521 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
522 522 """
523 523 commit an individual file as part of a larger transaction
524 524 """
525 525
526 526 t = self.wread(fn)
527 527 fl = self.file(fn)
528 528 fp1 = manifest1.get(fn, nullid)
529 529 fp2 = manifest2.get(fn, nullid)
530 530
531 531 meta = {}
532 532 cp = self.dirstate.copied(fn)
533 533 if cp:
534 534 meta["copy"] = cp
535 535 if not manifest2: # not a branch merge
536 536 meta["copyrev"] = hex(manifest1.get(cp, nullid))
537 537 fp2 = nullid
538 538 elif fp2 != nullid: # copied on remote side
539 539 meta["copyrev"] = hex(manifest1.get(cp, nullid))
540 540 else: # copied on local side, reversed
541 541 meta["copyrev"] = hex(manifest2.get(cp))
542 542 fp2 = nullid
543 543 self.ui.debug(_(" %s: copy %s:%s\n") %
544 544 (fn, cp, meta["copyrev"]))
545 545 fp1 = nullid
546 546 elif fp2 != nullid:
547 547 # is one parent an ancestor of the other?
548 548 fpa = fl.ancestor(fp1, fp2)
549 549 if fpa == fp1:
550 550 fp1, fp2 = fp2, nullid
551 551 elif fpa == fp2:
552 552 fp2 = nullid
553 553
554 554 # is the file unmodified from the parent? report existing entry
555 555 if fp2 == nullid and not fl.cmp(fp1, t):
556 556 return fp1
557 557
558 558 changelist.append(fn)
559 559 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
560 560
561 561 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
562 562 orig_parent = self.dirstate.parents()[0] or nullid
563 563 p1 = p1 or self.dirstate.parents()[0] or nullid
564 564 p2 = p2 or self.dirstate.parents()[1] or nullid
565 565 c1 = self.changelog.read(p1)
566 566 c2 = self.changelog.read(p2)
567 567 m1 = self.manifest.read(c1[0]).copy()
568 568 m2 = self.manifest.read(c2[0])
569 569 changed = []
570 570 removed = []
571 571
572 572 if orig_parent == p1:
573 573 update_dirstate = 1
574 574 else:
575 575 update_dirstate = 0
576 576
577 577 if not wlock:
578 578 wlock = self.wlock()
579 579 l = self.lock()
580 580 tr = self.transaction()
581 581 linkrev = self.changelog.count()
582 582 for f in files:
583 583 try:
584 584 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
585 585 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
586 586 except IOError:
587 587 try:
588 588 del m1[f]
589 589 if update_dirstate:
590 590 self.dirstate.forget([f])
591 591 removed.append(f)
592 592 except:
593 593 # deleted from p2?
594 594 pass
595 595
596 596 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
597 597 user = user or self.ui.username()
598 598 n = self.changelog.add(mnode, changed + removed, text,
599 599 tr, p1, p2, user, date)
600 600 tr.close()
601 601 if update_dirstate:
602 602 self.dirstate.setparents(n, nullid)
603 603
604 604 def commit(self, files=None, text="", user=None, date=None,
605 605 match=util.always, force=False, lock=None, wlock=None,
606 606 force_editor=False):
607 607 commit = []
608 608 remove = []
609 609 changed = []
610 610
611 611 if files:
612 612 for f in files:
613 613 s = self.dirstate.state(f)
614 614 if s in 'nmai':
615 615 commit.append(f)
616 616 elif s == 'r':
617 617 remove.append(f)
618 618 else:
619 619 self.ui.warn(_("%s not tracked!\n") % f)
620 620 else:
621 621 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
622 622 commit = modified + added
623 623 remove = removed
624 624
625 625 p1, p2 = self.dirstate.parents()
626 626 c1 = self.changelog.read(p1)
627 627 c2 = self.changelog.read(p2)
628 628 m1 = self.manifest.read(c1[0]).copy()
629 629 m2 = self.manifest.read(c2[0])
630 630
631 631 branchname = self.workingctx().branch()
632 632 oldname = c1[5].get("branch", "")
633 633
634 634 if not commit and not remove and not force and p2 == nullid and \
635 635 branchname == oldname:
636 636 self.ui.status(_("nothing changed\n"))
637 637 return None
638 638
639 639 xp1 = hex(p1)
640 640 if p2 == nullid: xp2 = ''
641 641 else: xp2 = hex(p2)
642 642
643 643 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
644 644
645 645 if not wlock:
646 646 wlock = self.wlock()
647 647 if not lock:
648 648 lock = self.lock()
649 649 tr = self.transaction()
650 650
651 651 # check in files
652 652 new = {}
653 653 linkrev = self.changelog.count()
654 654 commit.sort()
655 655 for f in commit:
656 656 self.ui.note(f + "\n")
657 657 try:
658 658 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
659 659 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
660 660 except IOError:
661 661 self.ui.warn(_("trouble committing %s!\n") % f)
662 662 raise
663 663
664 664 # update manifest
665 665 m1.update(new)
666 666 for f in remove:
667 667 if f in m1:
668 668 del m1[f]
669 669 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
670 670
671 671 # add changeset
672 672 new = new.keys()
673 673 new.sort()
674 674
675 675 user = user or self.ui.username()
676 676 if not text or force_editor:
677 677 edittext = []
678 678 if text:
679 679 edittext.append(text)
680 680 edittext.append("")
681 681 if p2 != nullid:
682 682 edittext.append("HG: branch merge")
683 683 edittext.extend(["HG: changed %s" % f for f in changed])
684 684 edittext.extend(["HG: removed %s" % f for f in remove])
685 685 if not changed and not remove:
686 686 edittext.append("HG: no files changed")
687 687 edittext.append("")
688 688 # run editor in the repository root
689 689 olddir = os.getcwd()
690 690 os.chdir(self.root)
691 691 text = self.ui.edit("\n".join(edittext), user)
692 692 os.chdir(olddir)
693 693
694 694 lines = [line.rstrip() for line in text.rstrip().splitlines()]
695 695 while lines and not lines[0]:
696 696 del lines[0]
697 697 if not lines:
698 698 return None
699 699 text = '\n'.join(lines)
700 700 extra = {}
701 701 if branchname:
702 702 extra["branch"] = branchname
703 703 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
704 704 user, date, extra)
705 705 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
706 706 parent2=xp2)
707 707 tr.close()
708 708
709 709 self.dirstate.setparents(n)
710 710 self.dirstate.update(new, "n")
711 711 self.dirstate.forget(remove)
712 712
713 713 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
714 714 return n
715 715
716 716 def walk(self, node=None, files=[], match=util.always, badmatch=None):
717 717 if node:
718 718 fdict = dict.fromkeys(files)
719 719 for fn in self.manifest.read(self.changelog.read(node)[0]):
720 720 for ffn in fdict:
721 721 # match if the file is the exact name or a directory
722 722 if ffn == fn or fn.startswith("%s/" % ffn):
723 723 del fdict[ffn]
724 724 break
725 725 if match(fn):
726 726 yield 'm', fn
727 727 for fn in fdict:
728 728 if badmatch and badmatch(fn):
729 729 if match(fn):
730 730 yield 'b', fn
731 731 else:
732 732 self.ui.warn(_('%s: No such file in rev %s\n') % (
733 733 util.pathto(self.getcwd(), fn), short(node)))
734 734 else:
735 735 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
736 736 yield src, fn
737 737
738 738 def status(self, node1=None, node2=None, files=[], match=util.always,
739 739 wlock=None, list_ignored=False, list_clean=False):
740 740 """return status of files between two nodes or node and working directory
741 741
742 742 If node1 is None, use the first dirstate parent instead.
743 743 If node2 is None, compare node1 with working directory.
744 744 """
745 745
746 746 def fcmp(fn, mf):
747 747 t1 = self.wread(fn)
748 748 return self.file(fn).cmp(mf.get(fn, nullid), t1)
749 749
750 750 def mfmatches(node):
751 751 change = self.changelog.read(node)
752 752 mf = self.manifest.read(change[0]).copy()
753 753 for fn in mf.keys():
754 754 if not match(fn):
755 755 del mf[fn]
756 756 return mf
757 757
758 758 modified, added, removed, deleted, unknown = [], [], [], [], []
759 759 ignored, clean = [], []
760 760
761 761 compareworking = False
762 762 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
763 763 compareworking = True
764 764
765 765 if not compareworking:
766 766 # read the manifest from node1 before the manifest from node2,
767 767 # so that we'll hit the manifest cache if we're going through
768 768 # all the revisions in parent->child order.
769 769 mf1 = mfmatches(node1)
770 770
771 771 # are we comparing the working directory?
772 772 if not node2:
773 773 if not wlock:
774 774 try:
775 775 wlock = self.wlock(wait=0)
776 776 except lock.LockException:
777 777 wlock = None
778 778 (lookup, modified, added, removed, deleted, unknown,
779 779 ignored, clean) = self.dirstate.status(files, match,
780 780 list_ignored, list_clean)
781 781
782 782 # are we comparing working dir against its parent?
783 783 if compareworking:
784 784 if lookup:
785 785 # do a full compare of any files that might have changed
786 786 mf2 = mfmatches(self.dirstate.parents()[0])
787 787 for f in lookup:
788 788 if fcmp(f, mf2):
789 789 modified.append(f)
790 790 else:
791 791 clean.append(f)
792 792 if wlock is not None:
793 793 self.dirstate.update([f], "n")
794 794 else:
795 795 # we are comparing working dir against non-parent
796 796 # generate a pseudo-manifest for the working dir
797 797 # XXX: create it in dirstate.py ?
798 798 mf2 = mfmatches(self.dirstate.parents()[0])
799 799 for f in lookup + modified + added:
800 800 mf2[f] = ""
801 801 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
802 802 for f in removed:
803 803 if f in mf2:
804 804 del mf2[f]
805 805 else:
806 806 # we are comparing two revisions
807 807 mf2 = mfmatches(node2)
808 808
809 809 if not compareworking:
810 810 # flush lists from dirstate before comparing manifests
811 811 modified, added, clean = [], [], []
812 812
813 813 # make sure to sort the files so we talk to the disk in a
814 814 # reasonable order
815 815 mf2keys = mf2.keys()
816 816 mf2keys.sort()
817 817 for fn in mf2keys:
818 818 if mf1.has_key(fn):
819 819 if mf1.flags(fn) != mf2.flags(fn) or \
820 820 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
821 821 modified.append(fn)
822 822 elif list_clean:
823 823 clean.append(fn)
824 824 del mf1[fn]
825 825 else:
826 826 added.append(fn)
827 827
828 828 removed = mf1.keys()
829 829
830 830 # sort and return results:
831 831 for l in modified, added, removed, deleted, unknown, ignored, clean:
832 832 l.sort()
833 833 return (modified, added, removed, deleted, unknown, ignored, clean)
834 834
835 835 def add(self, list, wlock=None):
836 836 if not wlock:
837 837 wlock = self.wlock()
838 838 for f in list:
839 839 p = self.wjoin(f)
840 840 if not os.path.exists(p):
841 841 self.ui.warn(_("%s does not exist!\n") % f)
842 842 elif not os.path.isfile(p):
843 843 self.ui.warn(_("%s not added: only files supported currently\n")
844 844 % f)
845 845 elif self.dirstate.state(f) in 'an':
846 846 self.ui.warn(_("%s already tracked!\n") % f)
847 847 else:
848 848 self.dirstate.update([f], "a")
849 849
850 850 def forget(self, list, wlock=None):
851 851 if not wlock:
852 852 wlock = self.wlock()
853 853 for f in list:
854 854 if self.dirstate.state(f) not in 'ai':
855 855 self.ui.warn(_("%s not added!\n") % f)
856 856 else:
857 857 self.dirstate.forget([f])
858 858
859 859 def remove(self, list, unlink=False, wlock=None):
860 860 if unlink:
861 861 for f in list:
862 862 try:
863 863 util.unlink(self.wjoin(f))
864 864 except OSError, inst:
865 865 if inst.errno != errno.ENOENT:
866 866 raise
867 867 if not wlock:
868 868 wlock = self.wlock()
869 869 for f in list:
870 870 p = self.wjoin(f)
871 871 if os.path.exists(p):
872 872 self.ui.warn(_("%s still exists!\n") % f)
873 873 elif self.dirstate.state(f) == 'a':
874 874 self.dirstate.forget([f])
875 875 elif f not in self.dirstate:
876 876 self.ui.warn(_("%s not tracked!\n") % f)
877 877 else:
878 878 self.dirstate.update([f], "r")
879 879
880 880 def undelete(self, list, wlock=None):
881 881 p = self.dirstate.parents()[0]
882 882 mn = self.changelog.read(p)[0]
883 883 m = self.manifest.read(mn)
884 884 if not wlock:
885 885 wlock = self.wlock()
886 886 for f in list:
887 887 if self.dirstate.state(f) not in "r":
888 888 self.ui.warn("%s not removed!\n" % f)
889 889 else:
890 890 t = self.file(f).read(m[f])
891 891 self.wwrite(f, t)
892 892 util.set_exec(self.wjoin(f), m.execf(f))
893 893 self.dirstate.update([f], "n")
894 894
895 895 def copy(self, source, dest, wlock=None):
896 896 p = self.wjoin(dest)
897 897 if not os.path.exists(p):
898 898 self.ui.warn(_("%s does not exist!\n") % dest)
899 899 elif not os.path.isfile(p):
900 900 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
901 901 else:
902 902 if not wlock:
903 903 wlock = self.wlock()
904 904 if self.dirstate.state(dest) == '?':
905 905 self.dirstate.update([dest], "a")
906 906 self.dirstate.copy(source, dest)
907 907
908 908 def heads(self, start=None):
909 909 heads = self.changelog.heads(start)
910 910 # sort the output in rev descending order
911 911 heads = [(-self.changelog.rev(h), h) for h in heads]
912 912 heads.sort()
913 913 return [n for (r, n) in heads]
914 914
915 915 # branchlookup returns a dict giving a list of branches for
916 916 # each head. A branch is defined as the tag of a node or
917 917 # the branch of the node's parents. If a node has multiple
918 918 # branch tags, tags are eliminated if they are visible from other
919 919 # branch tags.
920 920 #
921 921 # So, for this graph: a->b->c->d->e
922 922 # \ /
923 923 # aa -----/
924 924 # a has tag 2.6.12
925 925 # d has tag 2.6.13
926 926 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
927 927 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
928 928 # from the list.
929 929 #
930 930 # It is possible that more than one head will have the same branch tag.
931 931 # callers need to check the result for multiple heads under the same
932 932 # branch tag if that is a problem for them (ie checkout of a specific
933 933 # branch).
934 934 #
935 935 # passing in a specific branch will limit the depth of the search
936 936 # through the parents. It won't limit the branches returned in the
937 937 # result though.
938 938 def branchlookup(self, heads=None, branch=None):
939 939 if not heads:
940 940 heads = self.heads()
941 941 headt = [ h for h in heads ]
942 942 chlog = self.changelog
943 943 branches = {}
944 944 merges = []
945 945 seenmerge = {}
946 946
947 947 # traverse the tree once for each head, recording in the branches
948 948 # dict which tags are visible from this head. The branches
949 949 # dict also records which tags are visible from each tag
950 950 # while we traverse.
951 951 while headt or merges:
952 952 if merges:
953 953 n, found = merges.pop()
954 954 visit = [n]
955 955 else:
956 956 h = headt.pop()
957 957 visit = [h]
958 958 found = [h]
959 959 seen = {}
960 960 while visit:
961 961 n = visit.pop()
962 962 if n in seen:
963 963 continue
964 964 pp = chlog.parents(n)
965 965 tags = self.nodetags(n)
966 966 if tags:
967 967 for x in tags:
968 968 if x == 'tip':
969 969 continue
970 970 for f in found:
971 971 branches.setdefault(f, {})[n] = 1
972 972 branches.setdefault(n, {})[n] = 1
973 973 break
974 974 if n not in found:
975 975 found.append(n)
976 976 if branch in tags:
977 977 continue
978 978 seen[n] = 1
979 979 if pp[1] != nullid and n not in seenmerge:
980 980 merges.append((pp[1], [x for x in found]))
981 981 seenmerge[n] = 1
982 982 if pp[0] != nullid:
983 983 visit.append(pp[0])
984 984 # traverse the branches dict, eliminating branch tags from each
985 985 # head that are visible from another branch tag for that head.
986 986 out = {}
987 987 viscache = {}
988 988 for h in heads:
989 989 def visible(node):
990 990 if node in viscache:
991 991 return viscache[node]
992 992 ret = {}
993 993 visit = [node]
994 994 while visit:
995 995 x = visit.pop()
996 996 if x in viscache:
997 997 ret.update(viscache[x])
998 998 elif x not in ret:
999 999 ret[x] = 1
1000 1000 if x in branches:
1001 1001 visit[len(visit):] = branches[x].keys()
1002 1002 viscache[node] = ret
1003 1003 return ret
1004 1004 if h not in branches:
1005 1005 continue
1006 1006 # O(n^2), but somewhat limited. This only searches the
1007 1007 # tags visible from a specific head, not all the tags in the
1008 1008 # whole repo.
1009 1009 for b in branches[h]:
1010 1010 vis = False
1011 1011 for bb in branches[h].keys():
1012 1012 if b != bb:
1013 1013 if b in visible(bb):
1014 1014 vis = True
1015 1015 break
1016 1016 if not vis:
1017 1017 l = out.setdefault(h, [])
1018 1018 l[len(l):] = self.nodetags(b)
1019 1019 return out
1020 1020
1021 1021 def branches(self, nodes):
1022 1022 if not nodes:
1023 1023 nodes = [self.changelog.tip()]
1024 1024 b = []
1025 1025 for n in nodes:
1026 1026 t = n
1027 1027 while 1:
1028 1028 p = self.changelog.parents(n)
1029 1029 if p[1] != nullid or p[0] == nullid:
1030 1030 b.append((t, n, p[0], p[1]))
1031 1031 break
1032 1032 n = p[0]
1033 1033 return b
1034 1034
1035 1035 def between(self, pairs):
1036 1036 r = []
1037 1037
1038 1038 for top, bottom in pairs:
1039 1039 n, l, i = top, [], 0
1040 1040 f = 1
1041 1041
1042 1042 while n != bottom:
1043 1043 p = self.changelog.parents(n)[0]
1044 1044 if i == f:
1045 1045 l.append(n)
1046 1046 f = f * 2
1047 1047 n = p
1048 1048 i += 1
1049 1049
1050 1050 r.append(l)
1051 1051
1052 1052 return r
1053 1053
1054 1054 def findincoming(self, remote, base=None, heads=None, force=False):
1055 1055 """Return list of roots of the subsets of missing nodes from remote
1056 1056
1057 1057 If base dict is specified, assume that these nodes and their parents
1058 1058 exist on the remote side and that no child of a node of base exists
1059 1059 in both remote and self.
1060 1060 Furthermore base will be updated to include the nodes that exists
1061 1061 in self and remote but no children exists in self and remote.
1062 1062 If a list of heads is specified, return only nodes which are heads
1063 1063 or ancestors of these heads.
1064 1064
1065 1065 All the ancestors of base are in self and in remote.
1066 1066 All the descendants of the list returned are missing in self.
1067 1067 (and so we know that the rest of the nodes are missing in remote, see
1068 1068 outgoing)
1069 1069 """
1070 1070 m = self.changelog.nodemap
1071 1071 search = []
1072 1072 fetch = {}
1073 1073 seen = {}
1074 1074 seenbranch = {}
1075 1075 if base == None:
1076 1076 base = {}
1077 1077
1078 1078 if not heads:
1079 1079 heads = remote.heads()
1080 1080
1081 1081 if self.changelog.tip() == nullid:
1082 1082 base[nullid] = 1
1083 1083 if heads != [nullid]:
1084 1084 return [nullid]
1085 1085 return []
1086 1086
1087 1087 # assume we're closer to the tip than the root
1088 1088 # and start by examining the heads
1089 1089 self.ui.status(_("searching for changes\n"))
1090 1090
1091 1091 unknown = []
1092 1092 for h in heads:
1093 1093 if h not in m:
1094 1094 unknown.append(h)
1095 1095 else:
1096 1096 base[h] = 1
1097 1097
1098 1098 if not unknown:
1099 1099 return []
1100 1100
1101 1101 req = dict.fromkeys(unknown)
1102 1102 reqcnt = 0
1103 1103
1104 1104 # search through remote branches
1105 1105 # a 'branch' here is a linear segment of history, with four parts:
1106 1106 # head, root, first parent, second parent
1107 1107 # (a branch always has two parents (or none) by definition)
1108 1108 unknown = remote.branches(unknown)
1109 1109 while unknown:
1110 1110 r = []
1111 1111 while unknown:
1112 1112 n = unknown.pop(0)
1113 1113 if n[0] in seen:
1114 1114 continue
1115 1115
1116 1116 self.ui.debug(_("examining %s:%s\n")
1117 1117 % (short(n[0]), short(n[1])))
1118 1118 if n[0] == nullid: # found the end of the branch
1119 1119 pass
1120 1120 elif n in seenbranch:
1121 1121 self.ui.debug(_("branch already found\n"))
1122 1122 continue
1123 1123 elif n[1] and n[1] in m: # do we know the base?
1124 1124 self.ui.debug(_("found incomplete branch %s:%s\n")
1125 1125 % (short(n[0]), short(n[1])))
1126 1126 search.append(n) # schedule branch range for scanning
1127 1127 seenbranch[n] = 1
1128 1128 else:
1129 1129 if n[1] not in seen and n[1] not in fetch:
1130 1130 if n[2] in m and n[3] in m:
1131 1131 self.ui.debug(_("found new changeset %s\n") %
1132 1132 short(n[1]))
1133 1133 fetch[n[1]] = 1 # earliest unknown
1134 1134 for p in n[2:4]:
1135 1135 if p in m:
1136 1136 base[p] = 1 # latest known
1137 1137
1138 1138 for p in n[2:4]:
1139 1139 if p not in req and p not in m:
1140 1140 r.append(p)
1141 1141 req[p] = 1
1142 1142 seen[n[0]] = 1
1143 1143
1144 1144 if r:
1145 1145 reqcnt += 1
1146 1146 self.ui.debug(_("request %d: %s\n") %
1147 1147 (reqcnt, " ".join(map(short, r))))
1148 1148 for p in xrange(0, len(r), 10):
1149 1149 for b in remote.branches(r[p:p+10]):
1150 1150 self.ui.debug(_("received %s:%s\n") %
1151 1151 (short(b[0]), short(b[1])))
1152 1152 unknown.append(b)
1153 1153
1154 1154 # do binary search on the branches we found
1155 1155 while search:
1156 1156 n = search.pop(0)
1157 1157 reqcnt += 1
1158 1158 l = remote.between([(n[0], n[1])])[0]
1159 1159 l.append(n[1])
1160 1160 p = n[0]
1161 1161 f = 1
1162 1162 for i in l:
1163 1163 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1164 1164 if i in m:
1165 1165 if f <= 2:
1166 1166 self.ui.debug(_("found new branch changeset %s\n") %
1167 1167 short(p))
1168 1168 fetch[p] = 1
1169 1169 base[i] = 1
1170 1170 else:
1171 1171 self.ui.debug(_("narrowed branch search to %s:%s\n")
1172 1172 % (short(p), short(i)))
1173 1173 search.append((p, i))
1174 1174 break
1175 1175 p, f = i, f * 2
1176 1176
1177 1177 # sanity check our fetch list
1178 1178 for f in fetch.keys():
1179 1179 if f in m:
1180 1180 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1181 1181
1182 1182 if base.keys() == [nullid]:
1183 1183 if force:
1184 1184 self.ui.warn(_("warning: repository is unrelated\n"))
1185 1185 else:
1186 1186 raise util.Abort(_("repository is unrelated"))
1187 1187
1188 1188 self.ui.debug(_("found new changesets starting at ") +
1189 1189 " ".join([short(f) for f in fetch]) + "\n")
1190 1190
1191 1191 self.ui.debug(_("%d total queries\n") % reqcnt)
1192 1192
1193 1193 return fetch.keys()
1194 1194
1195 1195 def findoutgoing(self, remote, base=None, heads=None, force=False):
1196 1196 """Return list of nodes that are roots of subsets not in remote
1197 1197
1198 1198 If base dict is specified, assume that these nodes and their parents
1199 1199 exist on the remote side.
1200 1200 If a list of heads is specified, return only nodes which are heads
1201 1201 or ancestors of these heads, and return a second element which
1202 1202 contains all remote heads which get new children.
1203 1203 """
1204 1204 if base == None:
1205 1205 base = {}
1206 1206 self.findincoming(remote, base, heads, force=force)
1207 1207
1208 1208 self.ui.debug(_("common changesets up to ")
1209 1209 + " ".join(map(short, base.keys())) + "\n")
1210 1210
1211 1211 remain = dict.fromkeys(self.changelog.nodemap)
1212 1212
1213 1213 # prune everything remote has from the tree
1214 1214 del remain[nullid]
1215 1215 remove = base.keys()
1216 1216 while remove:
1217 1217 n = remove.pop(0)
1218 1218 if n in remain:
1219 1219 del remain[n]
1220 1220 for p in self.changelog.parents(n):
1221 1221 remove.append(p)
1222 1222
1223 1223 # find every node whose parents have been pruned
1224 1224 subset = []
1225 1225 # find every remote head that will get new children
1226 1226 updated_heads = {}
1227 1227 for n in remain:
1228 1228 p1, p2 = self.changelog.parents(n)
1229 1229 if p1 not in remain and p2 not in remain:
1230 1230 subset.append(n)
1231 1231 if heads:
1232 1232 if p1 in heads:
1233 1233 updated_heads[p1] = True
1234 1234 if p2 in heads:
1235 1235 updated_heads[p2] = True
1236 1236
1237 1237 # this is the set of all roots we have to push
1238 1238 if heads:
1239 1239 return subset, updated_heads.keys()
1240 1240 else:
1241 1241 return subset
1242 1242
1243 1243 def pull(self, remote, heads=None, force=False, lock=None):
1244 1244 mylock = False
1245 1245 if not lock:
1246 1246 lock = self.lock()
1247 1247 mylock = True
1248 1248
1249 1249 try:
1250 1250 fetch = self.findincoming(remote, force=force)
1251 1251 if fetch == [nullid]:
1252 1252 self.ui.status(_("requesting all changes\n"))
1253 1253
1254 1254 if not fetch:
1255 1255 self.ui.status(_("no changes found\n"))
1256 1256 return 0
1257 1257
1258 1258 if heads is None:
1259 1259 cg = remote.changegroup(fetch, 'pull')
1260 1260 else:
1261 1261 if 'changegroupsubset' not in remote.capabilities:
1262 1262 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1263 1263 cg = remote.changegroupsubset(fetch, heads, 'pull')
1264 1264 return self.addchangegroup(cg, 'pull', remote.url())
1265 1265 finally:
1266 1266 if mylock:
1267 1267 lock.release()
1268 1268
1269 1269 def push(self, remote, force=False, revs=None):
1270 1270 # there are two ways to push to remote repo:
1271 1271 #
1272 1272 # addchangegroup assumes local user can lock remote
1273 1273 # repo (local filesystem, old ssh servers).
1274 1274 #
1275 1275 # unbundle assumes local user cannot lock remote repo (new ssh
1276 1276 # servers, http servers).
1277 1277
1278 1278 if remote.capable('unbundle'):
1279 1279 return self.push_unbundle(remote, force, revs)
1280 1280 return self.push_addchangegroup(remote, force, revs)
1281 1281
1282 1282 def prepush(self, remote, force, revs):
1283 1283 base = {}
1284 1284 remote_heads = remote.heads()
1285 1285 inc = self.findincoming(remote, base, remote_heads, force=force)
1286 1286 if not force and inc:
1287 1287 self.ui.warn(_("abort: unsynced remote changes!\n"))
1288 1288 self.ui.status(_("(did you forget to sync?"
1289 1289 " use push -f to force)\n"))
1290 1290 return None, 1
1291 1291
1292 1292 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1293 1293 if revs is not None:
1294 1294 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1295 1295 else:
1296 1296 bases, heads = update, self.changelog.heads()
1297 1297
1298 1298 if not bases:
1299 1299 self.ui.status(_("no changes found\n"))
1300 1300 return None, 1
1301 1301 elif not force:
1302 1302 # FIXME we don't properly detect creation of new heads
1303 1303 # in the push -r case, assume the user knows what he's doing
1304 1304 if not revs and len(remote_heads) < len(heads) \
1305 1305 and remote_heads != [nullid]:
1306 1306 self.ui.warn(_("abort: push creates new remote branches!\n"))
1307 1307 self.ui.status(_("(did you forget to merge?"
1308 1308 " use push -f to force)\n"))
1309 1309 return None, 1
1310 1310
1311 1311 if revs is None:
1312 1312 cg = self.changegroup(update, 'push')
1313 1313 else:
1314 1314 cg = self.changegroupsubset(update, revs, 'push')
1315 1315 return cg, remote_heads
1316 1316
1317 1317 def push_addchangegroup(self, remote, force, revs):
1318 1318 lock = remote.lock()
1319 1319
1320 1320 ret = self.prepush(remote, force, revs)
1321 1321 if ret[0] is not None:
1322 1322 cg, remote_heads = ret
1323 1323 return remote.addchangegroup(cg, 'push', self.url())
1324 1324 return ret[1]
1325 1325
1326 1326 def push_unbundle(self, remote, force, revs):
1327 1327 # local repo finds heads on server, finds out what revs it
1328 1328 # must push. once revs transferred, if server finds it has
1329 1329 # different heads (someone else won commit/push race), server
1330 1330 # aborts.
1331 1331
1332 1332 ret = self.prepush(remote, force, revs)
1333 1333 if ret[0] is not None:
1334 1334 cg, remote_heads = ret
1335 1335 if force: remote_heads = ['force']
1336 1336 return remote.unbundle(cg, remote_heads, 'push')
1337 1337 return ret[1]
1338 1338
1339 def changegroupinfo(self, nodes):
1340 self.ui.note(_("%d changesets found\n") % len(nodes))
1341 if self.ui.debugflag:
1342 self.ui.debug(_("List of changesets:\n"))
1343 for node in nodes:
1344 self.ui.debug("%s\n" % hex(node))
1345
1339 1346 def changegroupsubset(self, bases, heads, source):
1340 1347 """This function generates a changegroup consisting of all the nodes
1341 1348 that are descendents of any of the bases, and ancestors of any of
1342 1349 the heads.
1343 1350
1344 1351 It is fairly complex as determining which filenodes and which
1345 1352 manifest nodes need to be included for the changeset to be complete
1346 1353 is non-trivial.
1347 1354
1348 1355 Another wrinkle is doing the reverse, figuring out which changeset in
1349 1356 the changegroup a particular filenode or manifestnode belongs to."""
1350 1357
1351 1358 self.hook('preoutgoing', throw=True, source=source)
1352 1359
1353 1360 # Set up some initial variables
1354 1361 # Make it easy to refer to self.changelog
1355 1362 cl = self.changelog
1356 1363 # msng is short for missing - compute the list of changesets in this
1357 1364 # changegroup.
1358 1365 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1366 self.changegroupinfo(msng_cl_lst)
1359 1367 # Some bases may turn out to be superfluous, and some heads may be
1360 1368 # too. nodesbetween will return the minimal set of bases and heads
1361 1369 # necessary to re-create the changegroup.
1362 1370
1363 1371 # Known heads are the list of heads that it is assumed the recipient
1364 1372 # of this changegroup will know about.
1365 1373 knownheads = {}
1366 1374 # We assume that all parents of bases are known heads.
1367 1375 for n in bases:
1368 1376 for p in cl.parents(n):
1369 1377 if p != nullid:
1370 1378 knownheads[p] = 1
1371 1379 knownheads = knownheads.keys()
1372 1380 if knownheads:
1373 1381 # Now that we know what heads are known, we can compute which
1374 1382 # changesets are known. The recipient must know about all
1375 1383 # changesets required to reach the known heads from the null
1376 1384 # changeset.
1377 1385 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1378 1386 junk = None
1379 1387 # Transform the list into an ersatz set.
1380 1388 has_cl_set = dict.fromkeys(has_cl_set)
1381 1389 else:
1382 1390 # If there were no known heads, the recipient cannot be assumed to
1383 1391 # know about any changesets.
1384 1392 has_cl_set = {}
1385 1393
1386 1394 # Make it easy to refer to self.manifest
1387 1395 mnfst = self.manifest
1388 1396 # We don't know which manifests are missing yet
1389 1397 msng_mnfst_set = {}
1390 1398 # Nor do we know which filenodes are missing.
1391 1399 msng_filenode_set = {}
1392 1400
1393 1401 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1394 1402 junk = None
1395 1403
1396 1404 # A changeset always belongs to itself, so the changenode lookup
1397 1405 # function for a changenode is identity.
1398 1406 def identity(x):
1399 1407 return x
1400 1408
1401 1409 # A function generating function. Sets up an environment for the
1402 1410 # inner function.
1403 1411 def cmp_by_rev_func(revlog):
1404 1412 # Compare two nodes by their revision number in the environment's
1405 1413 # revision history. Since the revision number both represents the
1406 1414 # most efficient order to read the nodes in, and represents a
1407 1415 # topological sorting of the nodes, this function is often useful.
1408 1416 def cmp_by_rev(a, b):
1409 1417 return cmp(revlog.rev(a), revlog.rev(b))
1410 1418 return cmp_by_rev
1411 1419
1412 1420 # If we determine that a particular file or manifest node must be a
1413 1421 # node that the recipient of the changegroup will already have, we can
1414 1422 # also assume the recipient will have all the parents. This function
1415 1423 # prunes them from the set of missing nodes.
1416 1424 def prune_parents(revlog, hasset, msngset):
1417 1425 haslst = hasset.keys()
1418 1426 haslst.sort(cmp_by_rev_func(revlog))
1419 1427 for node in haslst:
1420 1428 parentlst = [p for p in revlog.parents(node) if p != nullid]
1421 1429 while parentlst:
1422 1430 n = parentlst.pop()
1423 1431 if n not in hasset:
1424 1432 hasset[n] = 1
1425 1433 p = [p for p in revlog.parents(n) if p != nullid]
1426 1434 parentlst.extend(p)
1427 1435 for n in hasset:
1428 1436 msngset.pop(n, None)
1429 1437
1430 1438 # This is a function generating function used to set up an environment
1431 1439 # for the inner function to execute in.
1432 1440 def manifest_and_file_collector(changedfileset):
1433 1441 # This is an information gathering function that gathers
1434 1442 # information from each changeset node that goes out as part of
1435 1443 # the changegroup. The information gathered is a list of which
1436 1444 # manifest nodes are potentially required (the recipient may
1437 1445 # already have them) and total list of all files which were
1438 1446 # changed in any changeset in the changegroup.
1439 1447 #
1440 1448 # We also remember the first changenode we saw any manifest
1441 1449 # referenced by so we can later determine which changenode 'owns'
1442 1450 # the manifest.
1443 1451 def collect_manifests_and_files(clnode):
1444 1452 c = cl.read(clnode)
1445 1453 for f in c[3]:
1446 1454 # This is to make sure we only have one instance of each
1447 1455 # filename string for each filename.
1448 1456 changedfileset.setdefault(f, f)
1449 1457 msng_mnfst_set.setdefault(c[0], clnode)
1450 1458 return collect_manifests_and_files
1451 1459
1452 1460 # Figure out which manifest nodes (of the ones we think might be part
1453 1461 # of the changegroup) the recipient must know about and remove them
1454 1462 # from the changegroup.
1455 1463 def prune_manifests():
1456 1464 has_mnfst_set = {}
1457 1465 for n in msng_mnfst_set:
1458 1466 # If a 'missing' manifest thinks it belongs to a changenode
1459 1467 # the recipient is assumed to have, obviously the recipient
1460 1468 # must have that manifest.
1461 1469 linknode = cl.node(mnfst.linkrev(n))
1462 1470 if linknode in has_cl_set:
1463 1471 has_mnfst_set[n] = 1
1464 1472 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1465 1473
1466 1474 # Use the information collected in collect_manifests_and_files to say
1467 1475 # which changenode any manifestnode belongs to.
1468 1476 def lookup_manifest_link(mnfstnode):
1469 1477 return msng_mnfst_set[mnfstnode]
1470 1478
1471 1479 # A function generating function that sets up the initial environment
1472 1480 # the inner function.
1473 1481 def filenode_collector(changedfiles):
1474 1482 next_rev = [0]
1475 1483 # This gathers information from each manifestnode included in the
1476 1484 # changegroup about which filenodes the manifest node references
1477 1485 # so we can include those in the changegroup too.
1478 1486 #
1479 1487 # It also remembers which changenode each filenode belongs to. It
1480 1488 # does this by assuming the a filenode belongs to the changenode
1481 1489 # the first manifest that references it belongs to.
1482 1490 def collect_msng_filenodes(mnfstnode):
1483 1491 r = mnfst.rev(mnfstnode)
1484 1492 if r == next_rev[0]:
1485 1493 # If the last rev we looked at was the one just previous,
1486 1494 # we only need to see a diff.
1487 1495 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1488 1496 # For each line in the delta
1489 1497 for dline in delta.splitlines():
1490 1498 # get the filename and filenode for that line
1491 1499 f, fnode = dline.split('\0')
1492 1500 fnode = bin(fnode[:40])
1493 1501 f = changedfiles.get(f, None)
1494 1502 # And if the file is in the list of files we care
1495 1503 # about.
1496 1504 if f is not None:
1497 1505 # Get the changenode this manifest belongs to
1498 1506 clnode = msng_mnfst_set[mnfstnode]
1499 1507 # Create the set of filenodes for the file if
1500 1508 # there isn't one already.
1501 1509 ndset = msng_filenode_set.setdefault(f, {})
1502 1510 # And set the filenode's changelog node to the
1503 1511 # manifest's if it hasn't been set already.
1504 1512 ndset.setdefault(fnode, clnode)
1505 1513 else:
1506 1514 # Otherwise we need a full manifest.
1507 1515 m = mnfst.read(mnfstnode)
1508 1516 # For every file in we care about.
1509 1517 for f in changedfiles:
1510 1518 fnode = m.get(f, None)
1511 1519 # If it's in the manifest
1512 1520 if fnode is not None:
1513 1521 # See comments above.
1514 1522 clnode = msng_mnfst_set[mnfstnode]
1515 1523 ndset = msng_filenode_set.setdefault(f, {})
1516 1524 ndset.setdefault(fnode, clnode)
1517 1525 # Remember the revision we hope to see next.
1518 1526 next_rev[0] = r + 1
1519 1527 return collect_msng_filenodes
1520 1528
1521 1529 # We have a list of filenodes we think we need for a file, lets remove
1522 1530 # all those we now the recipient must have.
1523 1531 def prune_filenodes(f, filerevlog):
1524 1532 msngset = msng_filenode_set[f]
1525 1533 hasset = {}
1526 1534 # If a 'missing' filenode thinks it belongs to a changenode we
1527 1535 # assume the recipient must have, then the recipient must have
1528 1536 # that filenode.
1529 1537 for n in msngset:
1530 1538 clnode = cl.node(filerevlog.linkrev(n))
1531 1539 if clnode in has_cl_set:
1532 1540 hasset[n] = 1
1533 1541 prune_parents(filerevlog, hasset, msngset)
1534 1542
1535 1543 # A function generator function that sets up the a context for the
1536 1544 # inner function.
1537 1545 def lookup_filenode_link_func(fname):
1538 1546 msngset = msng_filenode_set[fname]
1539 1547 # Lookup the changenode the filenode belongs to.
1540 1548 def lookup_filenode_link(fnode):
1541 1549 return msngset[fnode]
1542 1550 return lookup_filenode_link
1543 1551
1544 1552 # Now that we have all theses utility functions to help out and
1545 1553 # logically divide up the task, generate the group.
1546 1554 def gengroup():
1547 1555 # The set of changed files starts empty.
1548 1556 changedfiles = {}
1549 1557 # Create a changenode group generator that will call our functions
1550 1558 # back to lookup the owning changenode and collect information.
1551 1559 group = cl.group(msng_cl_lst, identity,
1552 1560 manifest_and_file_collector(changedfiles))
1553 1561 for chnk in group:
1554 1562 yield chnk
1555 1563
1556 1564 # The list of manifests has been collected by the generator
1557 1565 # calling our functions back.
1558 1566 prune_manifests()
1559 1567 msng_mnfst_lst = msng_mnfst_set.keys()
1560 1568 # Sort the manifestnodes by revision number.
1561 1569 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1562 1570 # Create a generator for the manifestnodes that calls our lookup
1563 1571 # and data collection functions back.
1564 1572 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1565 1573 filenode_collector(changedfiles))
1566 1574 for chnk in group:
1567 1575 yield chnk
1568 1576
1569 1577 # These are no longer needed, dereference and toss the memory for
1570 1578 # them.
1571 1579 msng_mnfst_lst = None
1572 1580 msng_mnfst_set.clear()
1573 1581
1574 1582 changedfiles = changedfiles.keys()
1575 1583 changedfiles.sort()
1576 1584 # Go through all our files in order sorted by name.
1577 1585 for fname in changedfiles:
1578 1586 filerevlog = self.file(fname)
1579 1587 # Toss out the filenodes that the recipient isn't really
1580 1588 # missing.
1581 1589 if msng_filenode_set.has_key(fname):
1582 1590 prune_filenodes(fname, filerevlog)
1583 1591 msng_filenode_lst = msng_filenode_set[fname].keys()
1584 1592 else:
1585 1593 msng_filenode_lst = []
1586 1594 # If any filenodes are left, generate the group for them,
1587 1595 # otherwise don't bother.
1588 1596 if len(msng_filenode_lst) > 0:
1589 1597 yield changegroup.genchunk(fname)
1590 1598 # Sort the filenodes by their revision #
1591 1599 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1592 1600 # Create a group generator and only pass in a changenode
1593 1601 # lookup function as we need to collect no information
1594 1602 # from filenodes.
1595 1603 group = filerevlog.group(msng_filenode_lst,
1596 1604 lookup_filenode_link_func(fname))
1597 1605 for chnk in group:
1598 1606 yield chnk
1599 1607 if msng_filenode_set.has_key(fname):
1600 1608 # Don't need this anymore, toss it to free memory.
1601 1609 del msng_filenode_set[fname]
1602 1610 # Signal that no more groups are left.
1603 1611 yield changegroup.closechunk()
1604 1612
1605 1613 if msng_cl_lst:
1606 1614 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1607 1615
1608 1616 return util.chunkbuffer(gengroup())
1609 1617
1610 1618 def changegroup(self, basenodes, source):
1611 1619 """Generate a changegroup of all nodes that we have that a recipient
1612 1620 doesn't.
1613 1621
1614 1622 This is much easier than the previous function as we can assume that
1615 1623 the recipient has any changenode we aren't sending them."""
1616 1624
1617 1625 self.hook('preoutgoing', throw=True, source=source)
1618 1626
1619 1627 cl = self.changelog
1620 1628 nodes = cl.nodesbetween(basenodes, None)[0]
1621 1629 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1630 self.changegroupinfo(nodes)
1622 1631
1623 1632 def identity(x):
1624 1633 return x
1625 1634
1626 1635 def gennodelst(revlog):
1627 1636 for r in xrange(0, revlog.count()):
1628 1637 n = revlog.node(r)
1629 1638 if revlog.linkrev(n) in revset:
1630 1639 yield n
1631 1640
1632 1641 def changed_file_collector(changedfileset):
1633 1642 def collect_changed_files(clnode):
1634 1643 c = cl.read(clnode)
1635 1644 for fname in c[3]:
1636 1645 changedfileset[fname] = 1
1637 1646 return collect_changed_files
1638 1647
1639 1648 def lookuprevlink_func(revlog):
1640 1649 def lookuprevlink(n):
1641 1650 return cl.node(revlog.linkrev(n))
1642 1651 return lookuprevlink
1643 1652
1644 1653 def gengroup():
1645 1654 # construct a list of all changed files
1646 1655 changedfiles = {}
1647 1656
1648 1657 for chnk in cl.group(nodes, identity,
1649 1658 changed_file_collector(changedfiles)):
1650 1659 yield chnk
1651 1660 changedfiles = changedfiles.keys()
1652 1661 changedfiles.sort()
1653 1662
1654 1663 mnfst = self.manifest
1655 1664 nodeiter = gennodelst(mnfst)
1656 1665 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1657 1666 yield chnk
1658 1667
1659 1668 for fname in changedfiles:
1660 1669 filerevlog = self.file(fname)
1661 1670 nodeiter = gennodelst(filerevlog)
1662 1671 nodeiter = list(nodeiter)
1663 1672 if nodeiter:
1664 1673 yield changegroup.genchunk(fname)
1665 1674 lookup = lookuprevlink_func(filerevlog)
1666 1675 for chnk in filerevlog.group(nodeiter, lookup):
1667 1676 yield chnk
1668 1677
1669 1678 yield changegroup.closechunk()
1670 1679
1671 1680 if nodes:
1672 1681 self.hook('outgoing', node=hex(nodes[0]), source=source)
1673 1682
1674 1683 return util.chunkbuffer(gengroup())
1675 1684
1676 1685 def addchangegroup(self, source, srctype, url):
1677 1686 """add changegroup to repo.
1678 1687 returns number of heads modified or added + 1."""
1679 1688
1680 1689 def csmap(x):
1681 1690 self.ui.debug(_("add changeset %s\n") % short(x))
1682 1691 return cl.count()
1683 1692
1684 1693 def revmap(x):
1685 1694 return cl.rev(x)
1686 1695
1687 1696 if not source:
1688 1697 return 0
1689 1698
1690 1699 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1691 1700
1692 1701 changesets = files = revisions = 0
1693 1702
1694 1703 tr = self.transaction()
1695 1704
1696 1705 # write changelog data to temp files so concurrent readers will not see
1697 1706 # inconsistent view
1698 1707 cl = None
1699 1708 try:
1700 1709 cl = appendfile.appendchangelog(self.sopener,
1701 1710 self.changelog.version)
1702 1711
1703 1712 oldheads = len(cl.heads())
1704 1713
1705 1714 # pull off the changeset group
1706 1715 self.ui.status(_("adding changesets\n"))
1707 1716 cor = cl.count() - 1
1708 1717 chunkiter = changegroup.chunkiter(source)
1709 1718 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1710 1719 raise util.Abort(_("received changelog group is empty"))
1711 1720 cnr = cl.count() - 1
1712 1721 changesets = cnr - cor
1713 1722
1714 1723 # pull off the manifest group
1715 1724 self.ui.status(_("adding manifests\n"))
1716 1725 chunkiter = changegroup.chunkiter(source)
1717 1726 # no need to check for empty manifest group here:
1718 1727 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1719 1728 # no new manifest will be created and the manifest group will
1720 1729 # be empty during the pull
1721 1730 self.manifest.addgroup(chunkiter, revmap, tr)
1722 1731
1723 1732 # process the files
1724 1733 self.ui.status(_("adding file changes\n"))
1725 1734 while 1:
1726 1735 f = changegroup.getchunk(source)
1727 1736 if not f:
1728 1737 break
1729 1738 self.ui.debug(_("adding %s revisions\n") % f)
1730 1739 fl = self.file(f)
1731 1740 o = fl.count()
1732 1741 chunkiter = changegroup.chunkiter(source)
1733 1742 if fl.addgroup(chunkiter, revmap, tr) is None:
1734 1743 raise util.Abort(_("received file revlog group is empty"))
1735 1744 revisions += fl.count() - o
1736 1745 files += 1
1737 1746
1738 1747 cl.writedata()
1739 1748 finally:
1740 1749 if cl:
1741 1750 cl.cleanup()
1742 1751
1743 1752 # make changelog see real files again
1744 1753 self.changelog = changelog.changelog(self.sopener,
1745 1754 self.changelog.version)
1746 1755 self.changelog.checkinlinesize(tr)
1747 1756
1748 1757 newheads = len(self.changelog.heads())
1749 1758 heads = ""
1750 1759 if oldheads and newheads != oldheads:
1751 1760 heads = _(" (%+d heads)") % (newheads - oldheads)
1752 1761
1753 1762 self.ui.status(_("added %d changesets"
1754 1763 " with %d changes to %d files%s\n")
1755 1764 % (changesets, revisions, files, heads))
1756 1765
1757 1766 if changesets > 0:
1758 1767 self.hook('pretxnchangegroup', throw=True,
1759 1768 node=hex(self.changelog.node(cor+1)), source=srctype,
1760 1769 url=url)
1761 1770
1762 1771 tr.close()
1763 1772
1764 1773 if changesets > 0:
1765 1774 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1766 1775 source=srctype, url=url)
1767 1776
1768 1777 for i in xrange(cor + 1, cnr + 1):
1769 1778 self.hook("incoming", node=hex(self.changelog.node(i)),
1770 1779 source=srctype, url=url)
1771 1780
1772 1781 return newheads - oldheads + 1
1773 1782
1774 1783
1775 1784 def stream_in(self, remote):
1776 1785 fp = remote.stream_out()
1777 1786 resp = int(fp.readline())
1778 1787 if resp != 0:
1779 1788 raise util.Abort(_('operation forbidden by server'))
1780 1789 self.ui.status(_('streaming all changes\n'))
1781 1790 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1782 1791 self.ui.status(_('%d files to transfer, %s of data\n') %
1783 1792 (total_files, util.bytecount(total_bytes)))
1784 1793 start = time.time()
1785 1794 for i in xrange(total_files):
1786 1795 name, size = fp.readline().split('\0', 1)
1787 1796 size = int(size)
1788 1797 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1789 1798 ofp = self.sopener(name, 'w')
1790 1799 for chunk in util.filechunkiter(fp, limit=size):
1791 1800 ofp.write(chunk)
1792 1801 ofp.close()
1793 1802 elapsed = time.time() - start
1794 1803 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1795 1804 (util.bytecount(total_bytes), elapsed,
1796 1805 util.bytecount(total_bytes / elapsed)))
1797 1806 self.reload()
1798 1807 return len(self.heads()) + 1
1799 1808
1800 1809 def clone(self, remote, heads=[], stream=False):
1801 1810 '''clone remote repository.
1802 1811
1803 1812 keyword arguments:
1804 1813 heads: list of revs to clone (forces use of pull)
1805 1814 stream: use streaming clone if possible'''
1806 1815
1807 1816 # now, all clients that can request uncompressed clones can
1808 1817 # read repo formats supported by all servers that can serve
1809 1818 # them.
1810 1819
1811 1820 # if revlog format changes, client will have to check version
1812 1821 # and format flags on "stream" capability, and use
1813 1822 # uncompressed only if compatible.
1814 1823
1815 1824 if stream and not heads and remote.capable('stream'):
1816 1825 return self.stream_in(remote)
1817 1826 return self.pull(remote, heads)
1818 1827
1819 1828 # used to avoid circular references so destructors work
1820 1829 def aftertrans(base):
1821 1830 p = base
1822 1831 def a():
1823 1832 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1824 1833 util.rename(os.path.join(p, "journal.dirstate"),
1825 1834 os.path.join(p, "undo.dirstate"))
1826 1835 return a
1827 1836
1828 1837 def instance(ui, path, create):
1829 1838 return localrepository(ui, util.drop_scheme('file', path), create)
1830 1839
1831 1840 def islocal(path):
1832 1841 return True
@@ -1,517 +1,587 b''
1 1 adding foo/Bar/file.txt
2 2 adding foo/file.txt
3 3 adding quux/file.py
4 4 3:911600dab2ae
5 5 requesting all changes
6 6 adding changesets
7 7 adding manifests
8 8 adding file changes
9 9 added 1 changesets with 3 changes to 3 files
10 10 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
11 11
12 12 Extension disabled for lack of a hook
13 13 Pushing as user fred
14 14 hgrc = """
15 15 """
16 16 pushing to ../b
17 17 searching for changes
18 18 common changesets up to 6675d58eff77
19 3 changesets found
20 List of changesets:
21 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
22 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
23 911600dab2ae7a9baff75958b84fe606851ce955
19 24 adding changesets
20 25 add changeset ef1ea85a6374
21 26 add changeset f9cafe1212c8
22 27 add changeset 911600dab2ae
23 28 adding manifests
24 29 adding file changes
25 30 adding foo/Bar/file.txt revisions
26 31 adding foo/file.txt revisions
27 32 adding quux/file.py revisions
28 33 added 3 changesets with 3 changes to 3 files
29 34 rolling back last transaction
30 35 0:6675d58eff77
31 36
32 37 Extension disabled for lack of acl.sources
33 38 Pushing as user fred
34 39 hgrc = """
35 40 [hooks]
36 41 pretxnchangegroup.acl = python:hgext.acl.hook
37 42 """
38 43 pushing to ../b
39 44 searching for changes
40 45 common changesets up to 6675d58eff77
46 3 changesets found
47 List of changesets:
48 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
49 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
50 911600dab2ae7a9baff75958b84fe606851ce955
41 51 adding changesets
42 52 add changeset ef1ea85a6374
43 53 add changeset f9cafe1212c8
44 54 add changeset 911600dab2ae
45 55 adding manifests
46 56 adding file changes
47 57 adding foo/Bar/file.txt revisions
48 58 adding foo/file.txt revisions
49 59 adding quux/file.py revisions
50 60 added 3 changesets with 3 changes to 3 files
51 61 calling hook pretxnchangegroup.acl: hgext.acl.hook
52 62 acl: acl.allow not enabled
53 63 acl: acl.deny not enabled
54 64 acl: changes have source "push" - skipping
55 65 rolling back last transaction
56 66 0:6675d58eff77
57 67
58 68 No [acl.allow]/[acl.deny]
59 69 Pushing as user fred
60 70 hgrc = """
61 71 [hooks]
62 72 pretxnchangegroup.acl = python:hgext.acl.hook
63 73 [acl]
64 74 sources = push
65 75 """
66 76 pushing to ../b
67 77 searching for changes
68 78 common changesets up to 6675d58eff77
79 3 changesets found
80 List of changesets:
81 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
82 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
83 911600dab2ae7a9baff75958b84fe606851ce955
69 84 adding changesets
70 85 add changeset ef1ea85a6374
71 86 add changeset f9cafe1212c8
72 87 add changeset 911600dab2ae
73 88 adding manifests
74 89 adding file changes
75 90 adding foo/Bar/file.txt revisions
76 91 adding foo/file.txt revisions
77 92 adding quux/file.py revisions
78 93 added 3 changesets with 3 changes to 3 files
79 94 calling hook pretxnchangegroup.acl: hgext.acl.hook
80 95 acl: acl.allow not enabled
81 96 acl: acl.deny not enabled
82 97 acl: allowing changeset ef1ea85a6374
83 98 acl: allowing changeset f9cafe1212c8
84 99 acl: allowing changeset 911600dab2ae
85 100 rolling back last transaction
86 101 0:6675d58eff77
87 102
88 103 Empty [acl.allow]
89 104 Pushing as user fred
90 105 hgrc = """
91 106 [hooks]
92 107 pretxnchangegroup.acl = python:hgext.acl.hook
93 108 [acl]
94 109 sources = push
95 110 [acl.allow]
96 111 """
97 112 pushing to ../b
98 113 searching for changes
99 114 common changesets up to 6675d58eff77
115 3 changesets found
116 List of changesets:
117 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
118 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
119 911600dab2ae7a9baff75958b84fe606851ce955
100 120 adding changesets
101 121 add changeset ef1ea85a6374
102 122 add changeset f9cafe1212c8
103 123 add changeset 911600dab2ae
104 124 adding manifests
105 125 adding file changes
106 126 adding foo/Bar/file.txt revisions
107 127 adding foo/file.txt revisions
108 128 adding quux/file.py revisions
109 129 added 3 changesets with 3 changes to 3 files
110 130 calling hook pretxnchangegroup.acl: hgext.acl.hook
111 131 acl: acl.allow enabled, 0 entries for user fred
112 132 acl: acl.deny not enabled
113 133 acl: user fred not allowed on foo/file.txt
114 134 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
115 135 abort: acl: access denied for changeset ef1ea85a6374
116 136 transaction abort!
117 137 rollback completed
118 138 no rollback information available
119 139 0:6675d58eff77
120 140
121 141 fred is allowed inside foo/
122 142 Pushing as user fred
123 143 hgrc = """
124 144 [hooks]
125 145 pretxnchangegroup.acl = python:hgext.acl.hook
126 146 [acl]
127 147 sources = push
128 148 [acl.allow]
129 149 foo/** = fred
130 150 """
131 151 pushing to ../b
132 152 searching for changes
133 153 common changesets up to 6675d58eff77
154 3 changesets found
155 List of changesets:
156 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
157 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
158 911600dab2ae7a9baff75958b84fe606851ce955
134 159 adding changesets
135 160 add changeset ef1ea85a6374
136 161 add changeset f9cafe1212c8
137 162 add changeset 911600dab2ae
138 163 adding manifests
139 164 adding file changes
140 165 adding foo/Bar/file.txt revisions
141 166 adding foo/file.txt revisions
142 167 adding quux/file.py revisions
143 168 added 3 changesets with 3 changes to 3 files
144 169 calling hook pretxnchangegroup.acl: hgext.acl.hook
145 170 acl: acl.allow enabled, 1 entries for user fred
146 171 acl: acl.deny not enabled
147 172 acl: allowing changeset ef1ea85a6374
148 173 acl: allowing changeset f9cafe1212c8
149 174 acl: user fred not allowed on quux/file.py
150 175 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
151 176 abort: acl: access denied for changeset 911600dab2ae
152 177 transaction abort!
153 178 rollback completed
154 179 no rollback information available
155 180 0:6675d58eff77
156 181
157 182 Empty [acl.deny]
158 183 Pushing as user barney
159 184 hgrc = """
160 185 [hooks]
161 186 pretxnchangegroup.acl = python:hgext.acl.hook
162 187 [acl]
163 188 sources = push
164 189 [acl.allow]
165 190 foo/** = fred
166 191 [acl.deny]
167 192 """
168 193 pushing to ../b
169 194 searching for changes
170 195 common changesets up to 6675d58eff77
196 3 changesets found
197 List of changesets:
198 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
199 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
200 911600dab2ae7a9baff75958b84fe606851ce955
171 201 adding changesets
172 202 add changeset ef1ea85a6374
173 203 add changeset f9cafe1212c8
174 204 add changeset 911600dab2ae
175 205 adding manifests
176 206 adding file changes
177 207 adding foo/Bar/file.txt revisions
178 208 adding foo/file.txt revisions
179 209 adding quux/file.py revisions
180 210 added 3 changesets with 3 changes to 3 files
181 211 calling hook pretxnchangegroup.acl: hgext.acl.hook
182 212 acl: acl.allow enabled, 0 entries for user barney
183 213 acl: acl.deny enabled, 0 entries for user barney
184 214 acl: user barney not allowed on foo/file.txt
185 215 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
186 216 abort: acl: access denied for changeset ef1ea85a6374
187 217 transaction abort!
188 218 rollback completed
189 219 no rollback information available
190 220 0:6675d58eff77
191 221
192 222 fred is allowed inside foo/, but not foo/bar/ (case matters)
193 223 Pushing as user fred
194 224 hgrc = """
195 225 [hooks]
196 226 pretxnchangegroup.acl = python:hgext.acl.hook
197 227 [acl]
198 228 sources = push
199 229 [acl.allow]
200 230 foo/** = fred
201 231 [acl.deny]
202 232 foo/bar/** = fred
203 233 """
204 234 pushing to ../b
205 235 searching for changes
206 236 common changesets up to 6675d58eff77
237 3 changesets found
238 List of changesets:
239 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
240 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
241 911600dab2ae7a9baff75958b84fe606851ce955
207 242 adding changesets
208 243 add changeset ef1ea85a6374
209 244 add changeset f9cafe1212c8
210 245 add changeset 911600dab2ae
211 246 adding manifests
212 247 adding file changes
213 248 adding foo/Bar/file.txt revisions
214 249 adding foo/file.txt revisions
215 250 adding quux/file.py revisions
216 251 added 3 changesets with 3 changes to 3 files
217 252 calling hook pretxnchangegroup.acl: hgext.acl.hook
218 253 acl: acl.allow enabled, 1 entries for user fred
219 254 acl: acl.deny enabled, 1 entries for user fred
220 255 acl: allowing changeset ef1ea85a6374
221 256 acl: allowing changeset f9cafe1212c8
222 257 acl: user fred not allowed on quux/file.py
223 258 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
224 259 abort: acl: access denied for changeset 911600dab2ae
225 260 transaction abort!
226 261 rollback completed
227 262 no rollback information available
228 263 0:6675d58eff77
229 264
230 265 fred is allowed inside foo/, but not foo/Bar/
231 266 Pushing as user fred
232 267 hgrc = """
233 268 [hooks]
234 269 pretxnchangegroup.acl = python:hgext.acl.hook
235 270 [acl]
236 271 sources = push
237 272 [acl.allow]
238 273 foo/** = fred
239 274 [acl.deny]
240 275 foo/bar/** = fred
241 276 foo/Bar/** = fred
242 277 """
243 278 pushing to ../b
244 279 searching for changes
245 280 common changesets up to 6675d58eff77
281 3 changesets found
282 List of changesets:
283 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
284 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
285 911600dab2ae7a9baff75958b84fe606851ce955
246 286 adding changesets
247 287 add changeset ef1ea85a6374
248 288 add changeset f9cafe1212c8
249 289 add changeset 911600dab2ae
250 290 adding manifests
251 291 adding file changes
252 292 adding foo/Bar/file.txt revisions
253 293 adding foo/file.txt revisions
254 294 adding quux/file.py revisions
255 295 added 3 changesets with 3 changes to 3 files
256 296 calling hook pretxnchangegroup.acl: hgext.acl.hook
257 297 acl: acl.allow enabled, 1 entries for user fred
258 298 acl: acl.deny enabled, 2 entries for user fred
259 299 acl: allowing changeset ef1ea85a6374
260 300 acl: user fred denied on foo/Bar/file.txt
261 301 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
262 302 abort: acl: access denied for changeset f9cafe1212c8
263 303 transaction abort!
264 304 rollback completed
265 305 no rollback information available
266 306 0:6675d58eff77
267 307
268 308 barney is not mentioned => not allowed anywhere
269 309 Pushing as user barney
270 310 hgrc = """
271 311 [hooks]
272 312 pretxnchangegroup.acl = python:hgext.acl.hook
273 313 [acl]
274 314 sources = push
275 315 [acl.allow]
276 316 foo/** = fred
277 317 [acl.deny]
278 318 foo/bar/** = fred
279 319 foo/Bar/** = fred
280 320 """
281 321 pushing to ../b
282 322 searching for changes
283 323 common changesets up to 6675d58eff77
324 3 changesets found
325 List of changesets:
326 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
327 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
328 911600dab2ae7a9baff75958b84fe606851ce955
284 329 adding changesets
285 330 add changeset ef1ea85a6374
286 331 add changeset f9cafe1212c8
287 332 add changeset 911600dab2ae
288 333 adding manifests
289 334 adding file changes
290 335 adding foo/Bar/file.txt revisions
291 336 adding foo/file.txt revisions
292 337 adding quux/file.py revisions
293 338 added 3 changesets with 3 changes to 3 files
294 339 calling hook pretxnchangegroup.acl: hgext.acl.hook
295 340 acl: acl.allow enabled, 0 entries for user barney
296 341 acl: acl.deny enabled, 0 entries for user barney
297 342 acl: user barney not allowed on foo/file.txt
298 343 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
299 344 abort: acl: access denied for changeset ef1ea85a6374
300 345 transaction abort!
301 346 rollback completed
302 347 no rollback information available
303 348 0:6675d58eff77
304 349
305 350 barney is allowed everywhere
306 351 Pushing as user barney
307 352 hgrc = """
308 353 [hooks]
309 354 pretxnchangegroup.acl = python:hgext.acl.hook
310 355 [acl]
311 356 sources = push
312 357 [acl.allow]
313 358 foo/** = fred
314 359 [acl.deny]
315 360 foo/bar/** = fred
316 361 foo/Bar/** = fred
317 362 [acl.allow]
318 363 ** = barney
319 364 """
320 365 pushing to ../b
321 366 searching for changes
322 367 common changesets up to 6675d58eff77
368 3 changesets found
369 List of changesets:
370 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
371 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
372 911600dab2ae7a9baff75958b84fe606851ce955
323 373 adding changesets
324 374 add changeset ef1ea85a6374
325 375 add changeset f9cafe1212c8
326 376 add changeset 911600dab2ae
327 377 adding manifests
328 378 adding file changes
329 379 adding foo/Bar/file.txt revisions
330 380 adding foo/file.txt revisions
331 381 adding quux/file.py revisions
332 382 added 3 changesets with 3 changes to 3 files
333 383 calling hook pretxnchangegroup.acl: hgext.acl.hook
334 384 acl: acl.allow enabled, 1 entries for user barney
335 385 acl: acl.deny enabled, 0 entries for user barney
336 386 acl: allowing changeset ef1ea85a6374
337 387 acl: allowing changeset f9cafe1212c8
338 388 acl: allowing changeset 911600dab2ae
339 389 rolling back last transaction
340 390 0:6675d58eff77
341 391
342 392 wilma can change files with a .txt extension
343 393 Pushing as user wilma
344 394 hgrc = """
345 395 [hooks]
346 396 pretxnchangegroup.acl = python:hgext.acl.hook
347 397 [acl]
348 398 sources = push
349 399 [acl.allow]
350 400 foo/** = fred
351 401 [acl.deny]
352 402 foo/bar/** = fred
353 403 foo/Bar/** = fred
354 404 [acl.allow]
355 405 ** = barney
356 406 **/*.txt = wilma
357 407 """
358 408 pushing to ../b
359 409 searching for changes
360 410 common changesets up to 6675d58eff77
411 3 changesets found
412 List of changesets:
413 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
414 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
415 911600dab2ae7a9baff75958b84fe606851ce955
361 416 adding changesets
362 417 add changeset ef1ea85a6374
363 418 add changeset f9cafe1212c8
364 419 add changeset 911600dab2ae
365 420 adding manifests
366 421 adding file changes
367 422 adding foo/Bar/file.txt revisions
368 423 adding foo/file.txt revisions
369 424 adding quux/file.py revisions
370 425 added 3 changesets with 3 changes to 3 files
371 426 calling hook pretxnchangegroup.acl: hgext.acl.hook
372 427 acl: acl.allow enabled, 1 entries for user wilma
373 428 acl: acl.deny enabled, 0 entries for user wilma
374 429 acl: allowing changeset ef1ea85a6374
375 430 acl: allowing changeset f9cafe1212c8
376 431 acl: user wilma not allowed on quux/file.py
377 432 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
378 433 abort: acl: access denied for changeset 911600dab2ae
379 434 transaction abort!
380 435 rollback completed
381 436 no rollback information available
382 437 0:6675d58eff77
383 438
384 439 file specified by acl.config does not exist
385 440 Pushing as user barney
386 441 hgrc = """
387 442 [hooks]
388 443 pretxnchangegroup.acl = python:hgext.acl.hook
389 444 [acl]
390 445 sources = push
391 446 [acl.allow]
392 447 foo/** = fred
393 448 [acl.deny]
394 449 foo/bar/** = fred
395 450 foo/Bar/** = fred
396 451 [acl.allow]
397 452 ** = barney
398 453 **/*.txt = wilma
399 454 [acl]
400 455 config = ../acl.config
401 456 """
402 457 pushing to ../b
403 458 searching for changes
404 459 common changesets up to 6675d58eff77
460 3 changesets found
461 List of changesets:
462 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
463 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
464 911600dab2ae7a9baff75958b84fe606851ce955
405 465 adding changesets
406 466 add changeset ef1ea85a6374
407 467 add changeset f9cafe1212c8
408 468 add changeset 911600dab2ae
409 469 adding manifests
410 470 adding file changes
411 471 adding foo/Bar/file.txt revisions
412 472 adding foo/file.txt revisions
413 473 adding quux/file.py revisions
414 474 added 3 changesets with 3 changes to 3 files
415 475 calling hook pretxnchangegroup.acl: hgext.acl.hook
416 476 acl: acl.allow enabled, 1 entries for user barney
417 477 acl: acl.deny enabled, 0 entries for user barney
418 478 acl: allowing changeset ef1ea85a6374
419 479 acl: allowing changeset f9cafe1212c8
420 480 acl: allowing changeset 911600dab2ae
421 481 rolling back last transaction
422 482 0:6675d58eff77
423 483
424 484 betty is allowed inside foo/ by a acl.config file
425 485 Pushing as user betty
426 486 hgrc = """
427 487 [hooks]
428 488 pretxnchangegroup.acl = python:hgext.acl.hook
429 489 [acl]
430 490 sources = push
431 491 [acl.allow]
432 492 foo/** = fred
433 493 [acl.deny]
434 494 foo/bar/** = fred
435 495 foo/Bar/** = fred
436 496 [acl.allow]
437 497 ** = barney
438 498 **/*.txt = wilma
439 499 [acl]
440 500 config = ../acl.config
441 501 """
442 502 acl.config = """
443 503 [acl.allow]
444 504 foo/** = betty
445 505 """
446 506 pushing to ../b
447 507 searching for changes
448 508 common changesets up to 6675d58eff77
509 3 changesets found
510 List of changesets:
511 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
512 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
513 911600dab2ae7a9baff75958b84fe606851ce955
449 514 adding changesets
450 515 add changeset ef1ea85a6374
451 516 add changeset f9cafe1212c8
452 517 add changeset 911600dab2ae
453 518 adding manifests
454 519 adding file changes
455 520 adding foo/Bar/file.txt revisions
456 521 adding foo/file.txt revisions
457 522 adding quux/file.py revisions
458 523 added 3 changesets with 3 changes to 3 files
459 524 calling hook pretxnchangegroup.acl: hgext.acl.hook
460 525 acl: acl.allow enabled, 1 entries for user betty
461 526 acl: acl.deny enabled, 0 entries for user betty
462 527 acl: allowing changeset ef1ea85a6374
463 528 acl: allowing changeset f9cafe1212c8
464 529 acl: user betty not allowed on quux/file.py
465 530 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
466 531 abort: acl: access denied for changeset 911600dab2ae
467 532 transaction abort!
468 533 rollback completed
469 534 no rollback information available
470 535 0:6675d58eff77
471 536
472 537 acl.config can set only [acl.allow]/[acl.deny]
473 538 Pushing as user barney
474 539 hgrc = """
475 540 [hooks]
476 541 pretxnchangegroup.acl = python:hgext.acl.hook
477 542 [acl]
478 543 sources = push
479 544 [acl.allow]
480 545 foo/** = fred
481 546 [acl.deny]
482 547 foo/bar/** = fred
483 548 foo/Bar/** = fred
484 549 [acl.allow]
485 550 ** = barney
486 551 **/*.txt = wilma
487 552 [acl]
488 553 config = ../acl.config
489 554 """
490 555 acl.config = """
491 556 [acl.allow]
492 557 foo/** = betty
493 558 [hooks]
494 559 changegroup.acl = false
495 560 """
496 561 pushing to ../b
497 562 searching for changes
498 563 common changesets up to 6675d58eff77
564 3 changesets found
565 List of changesets:
566 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
567 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
568 911600dab2ae7a9baff75958b84fe606851ce955
499 569 adding changesets
500 570 add changeset ef1ea85a6374
501 571 add changeset f9cafe1212c8
502 572 add changeset 911600dab2ae
503 573 adding manifests
504 574 adding file changes
505 575 adding foo/Bar/file.txt revisions
506 576 adding foo/file.txt revisions
507 577 adding quux/file.py revisions
508 578 added 3 changesets with 3 changes to 3 files
509 579 calling hook pretxnchangegroup.acl: hgext.acl.hook
510 580 acl: acl.allow enabled, 1 entries for user barney
511 581 acl: acl.deny enabled, 0 entries for user barney
512 582 acl: allowing changeset ef1ea85a6374
513 583 acl: allowing changeset f9cafe1212c8
514 584 acl: allowing changeset 911600dab2ae
515 585 rolling back last transaction
516 586 0:6675d58eff77
517 587
General Comments 0
You need to be logged in to leave comments. Login now