##// END OF EJS Templates
Merge with stable
Martin Geisler -
r10699:7b0a0d49 merge default
parent child Browse files
Show More
@@ -1,159 +1,159 b''
1 1 # Mercurial extension to provide 'hg relink' command
2 2 #
3 3 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 """recreates hardlinks between repository clones"""
9 9
10 10 from mercurial import cmdutil, hg, util
11 11 from mercurial.i18n import _
12 12 import os, stat
13 13
14 14 def relink(ui, repo, origin=None, **opts):
15 15 """recreate hardlinks between two repositories
16 16
17 17 When repositories are cloned locally, their data files will be
18 18 hardlinked so that they only use the space of a single repository.
19 19
20 20 Unfortunately, subsequent pulls into either repository will break
21 21 hardlinks for any files touched by the new changesets, even if
22 22 both repositories end up pulling the same changes.
23 23
24 24 Similarly, passing --rev to "hg clone" will fail to use any
25 25 hardlinks, falling back to a complete copy of the source
26 26 repository.
27 27
28 28 This command lets you recreate those hardlinks and reclaim that
29 29 wasted space.
30 30
31 31 This repository will be relinked to share space with ORIGIN, which
32 32 must be on the same local disk. If ORIGIN is omitted, looks for
33 33 "default-relink", then "default", in [paths].
34 34
35 35 Do not attempt any read operations on this repository while the
36 36 command is running. (Both repositories will be locked against
37 37 writes.)
38 38 """
39 39 if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'):
40 40 raise util.Abort(_('hardlinks are not supported on this system'))
41 41 src = hg.repository(
42 42 cmdutil.remoteui(repo, opts),
43 43 ui.expandpath(origin or 'default-relink', origin or 'default'))
44 44 if not src.local():
45 45 raise util.Abort('must specify local origin repository')
46 46 ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
47 47 locallock = repo.lock()
48 48 try:
49 49 remotelock = src.lock()
50 50 try:
51 51 candidates = sorted(collect(src.store.path, ui))
52 52 targets = prune(candidates, src.store.path, repo.store.path, ui)
53 53 do_relink(src.store.path, repo.store.path, targets, ui)
54 54 finally:
55 55 remotelock.release()
56 56 finally:
57 57 locallock.release()
58 58
59 59 def collect(src, ui):
60 60 seplen = len(os.path.sep)
61 61 candidates = []
62 62 for dirpath, dirnames, filenames in os.walk(src):
63 63 relpath = dirpath[len(src) + seplen:]
64 64 for filename in filenames:
65 65 if not filename[-2:] in ('.d', '.i'):
66 66 continue
67 67 st = os.stat(os.path.join(dirpath, filename))
68 68 if not stat.S_ISREG(st.st_mode):
69 69 continue
70 70 candidates.append((os.path.join(relpath, filename), st))
71 71
72 72 ui.status(_('collected %d candidate storage files\n') % len(candidates))
73 73 return candidates
74 74
75 75 def prune(candidates, src, dst, ui):
76 76 def linkfilter(src, dst, st):
77 77 try:
78 78 ts = os.stat(dst)
79 79 except OSError:
80 80 # Destination doesn't have this file?
81 81 return False
82 82 if util.samefile(src, dst):
83 83 return False
84 84 if not util.samedevice(src, dst):
85 85 # No point in continuing
86 86 raise util.Abort(
87 87 _('source and destination are on different devices'))
88 88 if st.st_size != ts.st_size:
89 89 return False
90 90 return st
91 91
92 92 targets = []
93 93 for fn, st in candidates:
94 94 srcpath = os.path.join(src, fn)
95 95 tgt = os.path.join(dst, fn)
96 96 ts = linkfilter(srcpath, tgt, st)
97 97 if not ts:
98 98 ui.debug(_('not linkable: %s\n') % fn)
99 99 continue
100 100 targets.append((fn, ts.st_size))
101 101
102 102 ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
103 103 return targets
104 104
105 105 def do_relink(src, dst, files, ui):
106 106 def relinkfile(src, dst):
107 107 bak = dst + '.bak'
108 108 os.rename(dst, bak)
109 109 try:
110 110 util.os_link(src, dst)
111 111 except OSError:
112 112 os.rename(bak, dst)
113 113 raise
114 114 os.remove(bak)
115 115
116 116 CHUNKLEN = 65536
117 117 relinked = 0
118 118 savedbytes = 0
119 119
120 120 pos = 0
121 121 total = len(files)
122 122 for f, sz in files:
123 123 pos += 1
124 124 source = os.path.join(src, f)
125 125 tgt = os.path.join(dst, f)
126 126 # Binary mode, so that read() works correctly, especially on Windows
127 127 sfp = file(source, 'rb')
128 128 dfp = file(tgt, 'rb')
129 129 sin = sfp.read(CHUNKLEN)
130 130 while sin:
131 131 din = dfp.read(CHUNKLEN)
132 132 if sin != din:
133 133 break
134 134 sin = sfp.read(CHUNKLEN)
135 135 sfp.close()
136 136 dfp.close()
137 137 if sin:
138 138 ui.debug(_('not linkable: %s\n') % f)
139 139 continue
140 140 try:
141 141 relinkfile(source, tgt)
142 ui.progress(_('relink'), pos, f, _(' files'), total)
142 ui.progress(_('relinking'), pos, f, _(' files'), total)
143 143 relinked += 1
144 144 savedbytes += sz
145 145 except OSError, inst:
146 146 ui.warn('%s: %s\n' % (tgt, str(inst)))
147 147
148 ui.progress(_('relink'), None, f, _(' files'), total)
148 ui.progress(_('relinking'), None, f, _(' files'), total)
149 149
150 150 ui.status(_('relinked %d files (%d bytes reclaimed)\n') %
151 151 (relinked, savedbytes))
152 152
153 153 cmdtable = {
154 154 'relink': (
155 155 relink,
156 156 [],
157 157 _('[ORIGIN]')
158 158 )
159 159 }
@@ -1,2223 +1,2223 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import lock, transaction, store, encoding
13 13 import util, extensions, hook, error
14 14 import match as matchmod
15 15 import merge as mergemod
16 16 import tags as tagsmod
17 17 from lock import release
18 18 import weakref, stat, errno, os, time, inspect
19 19 propertycache = util.propertycache
20 20
21 21 class localrepository(repo.repository):
22 22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
23 23 supported = set('revlogv1 store fncache shared'.split())
24 24
25 25 def __init__(self, baseui, path=None, create=0):
26 26 repo.repository.__init__(self)
27 27 self.root = os.path.realpath(path)
28 28 self.path = os.path.join(self.root, ".hg")
29 29 self.origroot = path
30 30 self.opener = util.opener(self.path)
31 31 self.wopener = util.opener(self.root)
32 32 self.baseui = baseui
33 33 self.ui = baseui.copy()
34 34
35 35 try:
36 36 self.ui.readconfig(self.join("hgrc"), self.root)
37 37 extensions.loadall(self.ui)
38 38 except IOError:
39 39 pass
40 40
41 41 if not os.path.isdir(self.path):
42 42 if create:
43 43 if not os.path.exists(path):
44 44 os.mkdir(path)
45 45 os.mkdir(self.path)
46 46 requirements = ["revlogv1"]
47 47 if self.ui.configbool('format', 'usestore', True):
48 48 os.mkdir(os.path.join(self.path, "store"))
49 49 requirements.append("store")
50 50 if self.ui.configbool('format', 'usefncache', True):
51 51 requirements.append("fncache")
52 52 # create an invalid changelog
53 53 self.opener("00changelog.i", "a").write(
54 54 '\0\0\0\2' # represents revlogv2
55 55 ' dummy changelog to prevent using the old repo layout'
56 56 )
57 57 reqfile = self.opener("requires", "w")
58 58 for r in requirements:
59 59 reqfile.write("%s\n" % r)
60 60 reqfile.close()
61 61 else:
62 62 raise error.RepoError(_("repository %s not found") % path)
63 63 elif create:
64 64 raise error.RepoError(_("repository %s already exists") % path)
65 65 else:
66 66 # find requirements
67 67 requirements = set()
68 68 try:
69 69 requirements = set(self.opener("requires").read().splitlines())
70 70 except IOError, inst:
71 71 if inst.errno != errno.ENOENT:
72 72 raise
73 73 for r in requirements - self.supported:
74 74 raise error.RepoError(_("requirement '%s' not supported") % r)
75 75
76 76 self.sharedpath = self.path
77 77 try:
78 78 s = os.path.realpath(self.opener("sharedpath").read())
79 79 if not os.path.exists(s):
80 80 raise error.RepoError(
81 81 _('.hg/sharedpath points to nonexistent directory %s') % s)
82 82 self.sharedpath = s
83 83 except IOError, inst:
84 84 if inst.errno != errno.ENOENT:
85 85 raise
86 86
87 87 self.store = store.store(requirements, self.sharedpath, util.opener)
88 88 self.spath = self.store.path
89 89 self.sopener = self.store.opener
90 90 self.sjoin = self.store.join
91 91 self.opener.createmode = self.store.createmode
92 92 self.sopener.options = {}
93 93
94 94 # These two define the set of tags for this repository. _tags
95 95 # maps tag name to node; _tagtypes maps tag name to 'global' or
96 96 # 'local'. (Global tags are defined by .hgtags across all
97 97 # heads, and local tags are defined in .hg/localtags.) They
98 98 # constitute the in-memory cache of tags.
99 99 self._tags = None
100 100 self._tagtypes = None
101 101
102 102 self._branchcache = None # in UTF-8
103 103 self._branchcachetip = None
104 104 self.nodetagscache = None
105 105 self.filterpats = {}
106 106 self._datafilters = {}
107 107 self._transref = self._lockref = self._wlockref = None
108 108
109 109 @propertycache
110 110 def changelog(self):
111 111 c = changelog.changelog(self.sopener)
112 112 if 'HG_PENDING' in os.environ:
113 113 p = os.environ['HG_PENDING']
114 114 if p.startswith(self.root):
115 115 c.readpending('00changelog.i.a')
116 116 self.sopener.options['defversion'] = c.version
117 117 return c
118 118
119 119 @propertycache
120 120 def manifest(self):
121 121 return manifest.manifest(self.sopener)
122 122
123 123 @propertycache
124 124 def dirstate(self):
125 125 return dirstate.dirstate(self.opener, self.ui, self.root)
126 126
127 127 def __getitem__(self, changeid):
128 128 if changeid is None:
129 129 return context.workingctx(self)
130 130 return context.changectx(self, changeid)
131 131
132 132 def __contains__(self, changeid):
133 133 try:
134 134 return bool(self.lookup(changeid))
135 135 except error.RepoLookupError:
136 136 return False
137 137
138 138 def __nonzero__(self):
139 139 return True
140 140
141 141 def __len__(self):
142 142 return len(self.changelog)
143 143
144 144 def __iter__(self):
145 145 for i in xrange(len(self)):
146 146 yield i
147 147
148 148 def url(self):
149 149 return 'file:' + self.root
150 150
151 151 def hook(self, name, throw=False, **args):
152 152 return hook.hook(self.ui, self, name, throw, **args)
153 153
154 154 tag_disallowed = ':\r\n'
155 155
156 156 def _tag(self, names, node, message, local, user, date, extra={}):
157 157 if isinstance(names, str):
158 158 allchars = names
159 159 names = (names,)
160 160 else:
161 161 allchars = ''.join(names)
162 162 for c in self.tag_disallowed:
163 163 if c in allchars:
164 164 raise util.Abort(_('%r cannot be used in a tag name') % c)
165 165
166 166 for name in names:
167 167 self.hook('pretag', throw=True, node=hex(node), tag=name,
168 168 local=local)
169 169
170 170 def writetags(fp, names, munge, prevtags):
171 171 fp.seek(0, 2)
172 172 if prevtags and prevtags[-1] != '\n':
173 173 fp.write('\n')
174 174 for name in names:
175 175 m = munge and munge(name) or name
176 176 if self._tagtypes and name in self._tagtypes:
177 177 old = self._tags.get(name, nullid)
178 178 fp.write('%s %s\n' % (hex(old), m))
179 179 fp.write('%s %s\n' % (hex(node), m))
180 180 fp.close()
181 181
182 182 prevtags = ''
183 183 if local:
184 184 try:
185 185 fp = self.opener('localtags', 'r+')
186 186 except IOError:
187 187 fp = self.opener('localtags', 'a')
188 188 else:
189 189 prevtags = fp.read()
190 190
191 191 # local tags are stored in the current charset
192 192 writetags(fp, names, None, prevtags)
193 193 for name in names:
194 194 self.hook('tag', node=hex(node), tag=name, local=local)
195 195 return
196 196
197 197 try:
198 198 fp = self.wfile('.hgtags', 'rb+')
199 199 except IOError:
200 200 fp = self.wfile('.hgtags', 'ab')
201 201 else:
202 202 prevtags = fp.read()
203 203
204 204 # committed tags are stored in UTF-8
205 205 writetags(fp, names, encoding.fromlocal, prevtags)
206 206
207 207 if '.hgtags' not in self.dirstate:
208 208 self.add(['.hgtags'])
209 209
210 210 m = matchmod.exact(self.root, '', ['.hgtags'])
211 211 tagnode = self.commit(message, user, date, extra=extra, match=m)
212 212
213 213 for name in names:
214 214 self.hook('tag', node=hex(node), tag=name, local=local)
215 215
216 216 return tagnode
217 217
218 218 def tag(self, names, node, message, local, user, date):
219 219 '''tag a revision with one or more symbolic names.
220 220
221 221 names is a list of strings or, when adding a single tag, names may be a
222 222 string.
223 223
224 224 if local is True, the tags are stored in a per-repository file.
225 225 otherwise, they are stored in the .hgtags file, and a new
226 226 changeset is committed with the change.
227 227
228 228 keyword arguments:
229 229
230 230 local: whether to store tags in non-version-controlled file
231 231 (default False)
232 232
233 233 message: commit message to use if committing
234 234
235 235 user: name of user to use if committing
236 236
237 237 date: date tuple to use if committing'''
238 238
239 239 for x in self.status()[:5]:
240 240 if '.hgtags' in x:
241 241 raise util.Abort(_('working copy of .hgtags is changed '
242 242 '(please commit .hgtags manually)'))
243 243
244 244 self.tags() # instantiate the cache
245 245 self._tag(names, node, message, local, user, date)
246 246
247 247 def tags(self):
248 248 '''return a mapping of tag to node'''
249 249 if self._tags is None:
250 250 (self._tags, self._tagtypes) = self._findtags()
251 251
252 252 return self._tags
253 253
254 254 def _findtags(self):
255 255 '''Do the hard work of finding tags. Return a pair of dicts
256 256 (tags, tagtypes) where tags maps tag name to node, and tagtypes
257 257 maps tag name to a string like \'global\' or \'local\'.
258 258 Subclasses or extensions are free to add their own tags, but
259 259 should be aware that the returned dicts will be retained for the
260 260 duration of the localrepo object.'''
261 261
262 262 # XXX what tagtype should subclasses/extensions use? Currently
263 263 # mq and bookmarks add tags, but do not set the tagtype at all.
264 264 # Should each extension invent its own tag type? Should there
265 265 # be one tagtype for all such "virtual" tags? Or is the status
266 266 # quo fine?
267 267
268 268 alltags = {} # map tag name to (node, hist)
269 269 tagtypes = {}
270 270
271 271 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
272 272 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
273 273
274 274 # Build the return dicts. Have to re-encode tag names because
275 275 # the tags module always uses UTF-8 (in order not to lose info
276 276 # writing to the cache), but the rest of Mercurial wants them in
277 277 # local encoding.
278 278 tags = {}
279 279 for (name, (node, hist)) in alltags.iteritems():
280 280 if node != nullid:
281 281 tags[encoding.tolocal(name)] = node
282 282 tags['tip'] = self.changelog.tip()
283 283 tagtypes = dict([(encoding.tolocal(name), value)
284 284 for (name, value) in tagtypes.iteritems()])
285 285 return (tags, tagtypes)
286 286
287 287 def tagtype(self, tagname):
288 288 '''
289 289 return the type of the given tag. result can be:
290 290
291 291 'local' : a local tag
292 292 'global' : a global tag
293 293 None : tag does not exist
294 294 '''
295 295
296 296 self.tags()
297 297
298 298 return self._tagtypes.get(tagname)
299 299
300 300 def tagslist(self):
301 301 '''return a list of tags ordered by revision'''
302 302 l = []
303 303 for t, n in self.tags().iteritems():
304 304 try:
305 305 r = self.changelog.rev(n)
306 306 except:
307 307 r = -2 # sort to the beginning of the list if unknown
308 308 l.append((r, t, n))
309 309 return [(t, n) for r, t, n in sorted(l)]
310 310
311 311 def nodetags(self, node):
312 312 '''return the tags associated with a node'''
313 313 if not self.nodetagscache:
314 314 self.nodetagscache = {}
315 315 for t, n in self.tags().iteritems():
316 316 self.nodetagscache.setdefault(n, []).append(t)
317 317 return self.nodetagscache.get(node, [])
318 318
319 319 def _branchtags(self, partial, lrev):
320 320 # TODO: rename this function?
321 321 tiprev = len(self) - 1
322 322 if lrev != tiprev:
323 323 self._updatebranchcache(partial, lrev + 1, tiprev + 1)
324 324 self._writebranchcache(partial, self.changelog.tip(), tiprev)
325 325
326 326 return partial
327 327
328 328 def branchmap(self):
329 329 '''returns a dictionary {branch: [branchheads]}'''
330 330 tip = self.changelog.tip()
331 331 if self._branchcache is not None and self._branchcachetip == tip:
332 332 return self._branchcache
333 333
334 334 oldtip = self._branchcachetip
335 335 self._branchcachetip = tip
336 336 if oldtip is None or oldtip not in self.changelog.nodemap:
337 337 partial, last, lrev = self._readbranchcache()
338 338 else:
339 339 lrev = self.changelog.rev(oldtip)
340 340 partial = self._branchcache
341 341
342 342 self._branchtags(partial, lrev)
343 343 # this private cache holds all heads (not just tips)
344 344 self._branchcache = partial
345 345
346 346 return self._branchcache
347 347
348 348 def branchtags(self):
349 349 '''return a dict where branch names map to the tipmost head of
350 350 the branch, open heads come before closed'''
351 351 bt = {}
352 352 for bn, heads in self.branchmap().iteritems():
353 353 tip = heads[-1]
354 354 for h in reversed(heads):
355 355 if 'close' not in self.changelog.read(h)[5]:
356 356 tip = h
357 357 break
358 358 bt[bn] = tip
359 359 return bt
360 360
361 361
362 362 def _readbranchcache(self):
363 363 partial = {}
364 364 try:
365 365 f = self.opener("branchheads.cache")
366 366 lines = f.read().split('\n')
367 367 f.close()
368 368 except (IOError, OSError):
369 369 return {}, nullid, nullrev
370 370
371 371 try:
372 372 last, lrev = lines.pop(0).split(" ", 1)
373 373 last, lrev = bin(last), int(lrev)
374 374 if lrev >= len(self) or self[lrev].node() != last:
375 375 # invalidate the cache
376 376 raise ValueError('invalidating branch cache (tip differs)')
377 377 for l in lines:
378 378 if not l:
379 379 continue
380 380 node, label = l.split(" ", 1)
381 381 partial.setdefault(label.strip(), []).append(bin(node))
382 382 except KeyboardInterrupt:
383 383 raise
384 384 except Exception, inst:
385 385 if self.ui.debugflag:
386 386 self.ui.warn(str(inst), '\n')
387 387 partial, last, lrev = {}, nullid, nullrev
388 388 return partial, last, lrev
389 389
390 390 def _writebranchcache(self, branches, tip, tiprev):
391 391 try:
392 392 f = self.opener("branchheads.cache", "w", atomictemp=True)
393 393 f.write("%s %s\n" % (hex(tip), tiprev))
394 394 for label, nodes in branches.iteritems():
395 395 for node in nodes:
396 396 f.write("%s %s\n" % (hex(node), label))
397 397 f.rename()
398 398 except (IOError, OSError):
399 399 pass
400 400
401 401 def _updatebranchcache(self, partial, start, end):
402 402 # collect new branch entries
403 403 newbranches = {}
404 404 for r in xrange(start, end):
405 405 c = self[r]
406 406 newbranches.setdefault(c.branch(), []).append(c.node())
407 407 # if older branchheads are reachable from new ones, they aren't
408 408 # really branchheads. Note checking parents is insufficient:
409 409 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
410 410 for branch, newnodes in newbranches.iteritems():
411 411 bheads = partial.setdefault(branch, [])
412 412 bheads.extend(newnodes)
413 413 if len(bheads) < 2:
414 414 continue
415 415 newbheads = []
416 416 # starting from tip means fewer passes over reachable
417 417 while newnodes:
418 418 latest = newnodes.pop()
419 419 if latest not in bheads:
420 420 continue
421 421 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
422 422 reachable = self.changelog.reachable(latest, minbhrev)
423 423 bheads = [b for b in bheads if b not in reachable]
424 424 newbheads.insert(0, latest)
425 425 bheads.extend(newbheads)
426 426 partial[branch] = bheads
427 427
428 428 def lookup(self, key):
429 429 if isinstance(key, int):
430 430 return self.changelog.node(key)
431 431 elif key == '.':
432 432 return self.dirstate.parents()[0]
433 433 elif key == 'null':
434 434 return nullid
435 435 elif key == 'tip':
436 436 return self.changelog.tip()
437 437 n = self.changelog._match(key)
438 438 if n:
439 439 return n
440 440 if key in self.tags():
441 441 return self.tags()[key]
442 442 if key in self.branchtags():
443 443 return self.branchtags()[key]
444 444 n = self.changelog._partialmatch(key)
445 445 if n:
446 446 return n
447 447
448 448 # can't find key, check if it might have come from damaged dirstate
449 449 if key in self.dirstate.parents():
450 450 raise error.Abort(_("working directory has unknown parent '%s'!")
451 451 % short(key))
452 452 try:
453 453 if len(key) == 20:
454 454 key = hex(key)
455 455 except:
456 456 pass
457 457 raise error.RepoLookupError(_("unknown revision '%s'") % key)
458 458
459 459 def local(self):
460 460 return True
461 461
462 462 def join(self, f):
463 463 return os.path.join(self.path, f)
464 464
465 465 def wjoin(self, f):
466 466 return os.path.join(self.root, f)
467 467
468 468 def rjoin(self, f):
469 469 return os.path.join(self.root, util.pconvert(f))
470 470
471 471 def file(self, f):
472 472 if f[0] == '/':
473 473 f = f[1:]
474 474 return filelog.filelog(self.sopener, f)
475 475
476 476 def changectx(self, changeid):
477 477 return self[changeid]
478 478
479 479 def parents(self, changeid=None):
480 480 '''get list of changectxs for parents of changeid'''
481 481 return self[changeid].parents()
482 482
483 483 def filectx(self, path, changeid=None, fileid=None):
484 484 """changeid can be a changeset revision, node, or tag.
485 485 fileid can be a file revision or node."""
486 486 return context.filectx(self, path, changeid, fileid)
487 487
488 488 def getcwd(self):
489 489 return self.dirstate.getcwd()
490 490
491 491 def pathto(self, f, cwd=None):
492 492 return self.dirstate.pathto(f, cwd)
493 493
494 494 def wfile(self, f, mode='r'):
495 495 return self.wopener(f, mode)
496 496
497 497 def _link(self, f):
498 498 return os.path.islink(self.wjoin(f))
499 499
500 500 def _filter(self, filter, filename, data):
501 501 if filter not in self.filterpats:
502 502 l = []
503 503 for pat, cmd in self.ui.configitems(filter):
504 504 if cmd == '!':
505 505 continue
506 506 mf = matchmod.match(self.root, '', [pat])
507 507 fn = None
508 508 params = cmd
509 509 for name, filterfn in self._datafilters.iteritems():
510 510 if cmd.startswith(name):
511 511 fn = filterfn
512 512 params = cmd[len(name):].lstrip()
513 513 break
514 514 if not fn:
515 515 fn = lambda s, c, **kwargs: util.filter(s, c)
516 516 # Wrap old filters not supporting keyword arguments
517 517 if not inspect.getargspec(fn)[2]:
518 518 oldfn = fn
519 519 fn = lambda s, c, **kwargs: oldfn(s, c)
520 520 l.append((mf, fn, params))
521 521 self.filterpats[filter] = l
522 522
523 523 for mf, fn, cmd in self.filterpats[filter]:
524 524 if mf(filename):
525 525 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
526 526 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
527 527 break
528 528
529 529 return data
530 530
531 531 def adddatafilter(self, name, filter):
532 532 self._datafilters[name] = filter
533 533
534 534 def wread(self, filename):
535 535 if self._link(filename):
536 536 data = os.readlink(self.wjoin(filename))
537 537 else:
538 538 data = self.wopener(filename, 'r').read()
539 539 return self._filter("encode", filename, data)
540 540
541 541 def wwrite(self, filename, data, flags):
542 542 data = self._filter("decode", filename, data)
543 543 try:
544 544 os.unlink(self.wjoin(filename))
545 545 except OSError:
546 546 pass
547 547 if 'l' in flags:
548 548 self.wopener.symlink(data, filename)
549 549 else:
550 550 self.wopener(filename, 'w').write(data)
551 551 if 'x' in flags:
552 552 util.set_flags(self.wjoin(filename), False, True)
553 553
554 554 def wwritedata(self, filename, data):
555 555 return self._filter("decode", filename, data)
556 556
557 557 def transaction(self):
558 558 tr = self._transref and self._transref() or None
559 559 if tr and tr.running():
560 560 return tr.nest()
561 561
562 562 # abort here if the journal already exists
563 563 if os.path.exists(self.sjoin("journal")):
564 564 raise error.RepoError(
565 565 _("abandoned transaction found - run hg recover"))
566 566
567 567 # save dirstate for rollback
568 568 try:
569 569 ds = self.opener("dirstate").read()
570 570 except IOError:
571 571 ds = ""
572 572 self.opener("journal.dirstate", "w").write(ds)
573 573 self.opener("journal.branch", "w").write(self.dirstate.branch())
574 574
575 575 renames = [(self.sjoin("journal"), self.sjoin("undo")),
576 576 (self.join("journal.dirstate"), self.join("undo.dirstate")),
577 577 (self.join("journal.branch"), self.join("undo.branch"))]
578 578 tr = transaction.transaction(self.ui.warn, self.sopener,
579 579 self.sjoin("journal"),
580 580 aftertrans(renames),
581 581 self.store.createmode)
582 582 self._transref = weakref.ref(tr)
583 583 return tr
584 584
585 585 def recover(self):
586 586 lock = self.lock()
587 587 try:
588 588 if os.path.exists(self.sjoin("journal")):
589 589 self.ui.status(_("rolling back interrupted transaction\n"))
590 590 transaction.rollback(self.sopener, self.sjoin("journal"),
591 591 self.ui.warn)
592 592 self.invalidate()
593 593 return True
594 594 else:
595 595 self.ui.warn(_("no interrupted transaction available\n"))
596 596 return False
597 597 finally:
598 598 lock.release()
599 599
600 600 def rollback(self):
601 601 wlock = lock = None
602 602 try:
603 603 wlock = self.wlock()
604 604 lock = self.lock()
605 605 if os.path.exists(self.sjoin("undo")):
606 606 self.ui.status(_("rolling back last transaction\n"))
607 607 transaction.rollback(self.sopener, self.sjoin("undo"),
608 608 self.ui.warn)
609 609 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
610 610 try:
611 611 branch = self.opener("undo.branch").read()
612 612 self.dirstate.setbranch(branch)
613 613 except IOError:
614 614 self.ui.warn(_("Named branch could not be reset, "
615 615 "current branch still is: %s\n")
616 616 % encoding.tolocal(self.dirstate.branch()))
617 617 self.invalidate()
618 618 self.dirstate.invalidate()
619 619 self.destroyed()
620 620 else:
621 621 self.ui.warn(_("no rollback information available\n"))
622 622 finally:
623 623 release(lock, wlock)
624 624
625 625 def invalidatecaches(self):
626 626 self._tags = None
627 627 self._tagtypes = None
628 628 self.nodetagscache = None
629 629 self._branchcache = None # in UTF-8
630 630 self._branchcachetip = None
631 631
632 632 def invalidate(self):
633 633 for a in "changelog manifest".split():
634 634 if a in self.__dict__:
635 635 delattr(self, a)
636 636 self.invalidatecaches()
637 637
638 638 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
639 639 try:
640 640 l = lock.lock(lockname, 0, releasefn, desc=desc)
641 641 except error.LockHeld, inst:
642 642 if not wait:
643 643 raise
644 644 self.ui.warn(_("waiting for lock on %s held by %r\n") %
645 645 (desc, inst.locker))
646 646 # default to 600 seconds timeout
647 647 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
648 648 releasefn, desc=desc)
649 649 if acquirefn:
650 650 acquirefn()
651 651 return l
652 652
653 653 def lock(self, wait=True):
654 654 '''Lock the repository store (.hg/store) and return a weak reference
655 655 to the lock. Use this before modifying the store (e.g. committing or
656 656 stripping). If you are opening a transaction, get a lock as well.)'''
657 657 l = self._lockref and self._lockref()
658 658 if l is not None and l.held:
659 659 l.lock()
660 660 return l
661 661
662 662 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
663 663 _('repository %s') % self.origroot)
664 664 self._lockref = weakref.ref(l)
665 665 return l
666 666
667 667 def wlock(self, wait=True):
668 668 '''Lock the non-store parts of the repository (everything under
669 669 .hg except .hg/store) and return a weak reference to the lock.
670 670 Use this before modifying files in .hg.'''
671 671 l = self._wlockref and self._wlockref()
672 672 if l is not None and l.held:
673 673 l.lock()
674 674 return l
675 675
676 676 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
677 677 self.dirstate.invalidate, _('working directory of %s') %
678 678 self.origroot)
679 679 self._wlockref = weakref.ref(l)
680 680 return l
681 681
682 682 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
683 683 """
684 684 commit an individual file as part of a larger transaction
685 685 """
686 686
687 687 fname = fctx.path()
688 688 text = fctx.data()
689 689 flog = self.file(fname)
690 690 fparent1 = manifest1.get(fname, nullid)
691 691 fparent2 = fparent2o = manifest2.get(fname, nullid)
692 692
693 693 meta = {}
694 694 copy = fctx.renamed()
695 695 if copy and copy[0] != fname:
696 696 # Mark the new revision of this file as a copy of another
697 697 # file. This copy data will effectively act as a parent
698 698 # of this new revision. If this is a merge, the first
699 699 # parent will be the nullid (meaning "look up the copy data")
700 700 # and the second one will be the other parent. For example:
701 701 #
702 702 # 0 --- 1 --- 3 rev1 changes file foo
703 703 # \ / rev2 renames foo to bar and changes it
704 704 # \- 2 -/ rev3 should have bar with all changes and
705 705 # should record that bar descends from
706 706 # bar in rev2 and foo in rev1
707 707 #
708 708 # this allows this merge to succeed:
709 709 #
710 710 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
711 711 # \ / merging rev3 and rev4 should use bar@rev2
712 712 # \- 2 --- 4 as the merge base
713 713 #
714 714
715 715 cfname = copy[0]
716 716 crev = manifest1.get(cfname)
717 717 newfparent = fparent2
718 718
719 719 if manifest2: # branch merge
720 720 if fparent2 == nullid or crev is None: # copied on remote side
721 721 if cfname in manifest2:
722 722 crev = manifest2[cfname]
723 723 newfparent = fparent1
724 724
725 725 # find source in nearest ancestor if we've lost track
726 726 if not crev:
727 727 self.ui.debug(" %s: searching for copy revision for %s\n" %
728 728 (fname, cfname))
729 729 for ancestor in self['.'].ancestors():
730 730 if cfname in ancestor:
731 731 crev = ancestor[cfname].filenode()
732 732 break
733 733
734 734 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
735 735 meta["copy"] = cfname
736 736 meta["copyrev"] = hex(crev)
737 737 fparent1, fparent2 = nullid, newfparent
738 738 elif fparent2 != nullid:
739 739 # is one parent an ancestor of the other?
740 740 fparentancestor = flog.ancestor(fparent1, fparent2)
741 741 if fparentancestor == fparent1:
742 742 fparent1, fparent2 = fparent2, nullid
743 743 elif fparentancestor == fparent2:
744 744 fparent2 = nullid
745 745
746 746 # is the file changed?
747 747 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
748 748 changelist.append(fname)
749 749 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
750 750
751 751 # are just the flags changed during merge?
752 752 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
753 753 changelist.append(fname)
754 754
755 755 return fparent1
756 756
757 757 def commit(self, text="", user=None, date=None, match=None, force=False,
758 758 editor=False, extra={}):
759 759 """Add a new revision to current repository.
760 760
761 761 Revision information is gathered from the working directory,
762 762 match can be used to filter the committed files. If editor is
763 763 supplied, it is called to get a commit message.
764 764 """
765 765
766 766 def fail(f, msg):
767 767 raise util.Abort('%s: %s' % (f, msg))
768 768
769 769 if not match:
770 770 match = matchmod.always(self.root, '')
771 771
772 772 if not force:
773 773 vdirs = []
774 774 match.dir = vdirs.append
775 775 match.bad = fail
776 776
777 777 wlock = self.wlock()
778 778 try:
779 779 p1, p2 = self.dirstate.parents()
780 780 wctx = self[None]
781 781
782 782 if (not force and p2 != nullid and match and
783 783 (match.files() or match.anypats())):
784 784 raise util.Abort(_('cannot partially commit a merge '
785 785 '(do not specify files or patterns)'))
786 786
787 787 changes = self.status(match=match, clean=force)
788 788 if force:
789 789 changes[0].extend(changes[6]) # mq may commit unchanged files
790 790
791 791 # check subrepos
792 792 subs = []
793 793 removedsubs = set()
794 794 for p in wctx.parents():
795 795 removedsubs.update(s for s in p.substate if match(s))
796 796 for s in wctx.substate:
797 797 removedsubs.discard(s)
798 798 if match(s) and wctx.sub(s).dirty():
799 799 subs.append(s)
800 800 if (subs or removedsubs) and '.hgsubstate' not in changes[0]:
801 801 changes[0].insert(0, '.hgsubstate')
802 802
803 803 # make sure all explicit patterns are matched
804 804 if not force and match.files():
805 805 matched = set(changes[0] + changes[1] + changes[2])
806 806
807 807 for f in match.files():
808 808 if f == '.' or f in matched or f in wctx.substate:
809 809 continue
810 810 if f in changes[3]: # missing
811 811 fail(f, _('file not found!'))
812 812 if f in vdirs: # visited directory
813 813 d = f + '/'
814 814 for mf in matched:
815 815 if mf.startswith(d):
816 816 break
817 817 else:
818 818 fail(f, _("no match under directory!"))
819 819 elif f not in self.dirstate:
820 820 fail(f, _("file not tracked!"))
821 821
822 822 if (not force and not extra.get("close") and p2 == nullid
823 823 and not (changes[0] or changes[1] or changes[2])
824 824 and self[None].branch() == self['.'].branch()):
825 825 return None
826 826
827 827 ms = mergemod.mergestate(self)
828 828 for f in changes[0]:
829 829 if f in ms and ms[f] == 'u':
830 830 raise util.Abort(_("unresolved merge conflicts "
831 831 "(see hg resolve)"))
832 832
833 833 cctx = context.workingctx(self, (p1, p2), text, user, date,
834 834 extra, changes)
835 835 if editor:
836 836 cctx._text = editor(self, cctx, subs)
837 837 edited = (text != cctx._text)
838 838
839 839 # commit subs
840 840 if subs or removedsubs:
841 841 state = wctx.substate.copy()
842 842 for s in subs:
843 843 self.ui.status(_('committing subrepository %s\n') % s)
844 844 sr = wctx.sub(s).commit(cctx._text, user, date)
845 845 state[s] = (state[s][0], sr)
846 846 subrepo.writestate(self, state)
847 847
848 848 # Save commit message in case this transaction gets rolled back
849 849 # (e.g. by a pretxncommit hook). Leave the content alone on
850 850 # the assumption that the user will use the same editor again.
851 851 msgfile = self.opener('last-message.txt', 'wb')
852 852 msgfile.write(cctx._text)
853 853 msgfile.close()
854 854
855 855 try:
856 856 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
857 857 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
858 858 ret = self.commitctx(cctx, True)
859 859 except:
860 860 if edited:
861 861 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
862 862 self.ui.write(
863 863 _('note: commit message saved in %s\n') % msgfn)
864 864 raise
865 865
866 866 # update dirstate and mergestate
867 867 for f in changes[0] + changes[1]:
868 868 self.dirstate.normal(f)
869 869 for f in changes[2]:
870 870 self.dirstate.forget(f)
871 871 self.dirstate.setparents(ret)
872 872 ms.reset()
873 873 finally:
874 874 wlock.release()
875 875
876 876 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
877 877 return ret
878 878
879 879 def commitctx(self, ctx, error=False):
880 880 """Add a new revision to current repository.
881 881 Revision information is passed via the context argument.
882 882 """
883 883
884 884 tr = lock = None
885 885 removed = ctx.removed()
886 886 p1, p2 = ctx.p1(), ctx.p2()
887 887 m1 = p1.manifest().copy()
888 888 m2 = p2.manifest()
889 889 user = ctx.user()
890 890
891 891 lock = self.lock()
892 892 try:
893 893 tr = self.transaction()
894 894 trp = weakref.proxy(tr)
895 895
896 896 # check in files
897 897 new = {}
898 898 changed = []
899 899 linkrev = len(self)
900 900 for f in sorted(ctx.modified() + ctx.added()):
901 901 self.ui.note(f + "\n")
902 902 try:
903 903 fctx = ctx[f]
904 904 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
905 905 changed)
906 906 m1.set(f, fctx.flags())
907 907 except OSError, inst:
908 908 self.ui.warn(_("trouble committing %s!\n") % f)
909 909 raise
910 910 except IOError, inst:
911 911 errcode = getattr(inst, 'errno', errno.ENOENT)
912 912 if error or errcode and errcode != errno.ENOENT:
913 913 self.ui.warn(_("trouble committing %s!\n") % f)
914 914 raise
915 915 else:
916 916 removed.append(f)
917 917
918 918 # update manifest
919 919 m1.update(new)
920 920 removed = [f for f in sorted(removed) if f in m1 or f in m2]
921 921 drop = [f for f in removed if f in m1]
922 922 for f in drop:
923 923 del m1[f]
924 924 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
925 925 p2.manifestnode(), (new, drop))
926 926
927 927 # update changelog
928 928 self.changelog.delayupdate()
929 929 n = self.changelog.add(mn, changed + removed, ctx.description(),
930 930 trp, p1.node(), p2.node(),
931 931 user, ctx.date(), ctx.extra().copy())
932 932 p = lambda: self.changelog.writepending() and self.root or ""
933 933 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
934 934 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
935 935 parent2=xp2, pending=p)
936 936 self.changelog.finalize(trp)
937 937 tr.close()
938 938
939 939 if self._branchcache:
940 940 self.branchtags()
941 941 return n
942 942 finally:
943 943 del tr
944 944 lock.release()
945 945
946 946 def destroyed(self):
947 947 '''Inform the repository that nodes have been destroyed.
948 948 Intended for use by strip and rollback, so there's a common
949 949 place for anything that has to be done after destroying history.'''
950 950 # XXX it might be nice if we could take the list of destroyed
951 951 # nodes, but I don't see an easy way for rollback() to do that
952 952
953 953 # Ensure the persistent tag cache is updated. Doing it now
954 954 # means that the tag cache only has to worry about destroyed
955 955 # heads immediately after a strip/rollback. That in turn
956 956 # guarantees that "cachetip == currenttip" (comparing both rev
957 957 # and node) always means no nodes have been added or destroyed.
958 958
959 959 # XXX this is suboptimal when qrefresh'ing: we strip the current
960 960 # head, refresh the tag cache, then immediately add a new head.
961 961 # But I think doing it this way is necessary for the "instant
962 962 # tag cache retrieval" case to work.
963 963 self.invalidatecaches()
964 964
965 965 def walk(self, match, node=None):
966 966 '''
967 967 walk recursively through the directory tree or a given
968 968 changeset, finding all files matched by the match
969 969 function
970 970 '''
971 971 return self[node].walk(match)
972 972
973 973 def status(self, node1='.', node2=None, match=None,
974 974 ignored=False, clean=False, unknown=False):
975 975 """return status of files between two nodes or node and working directory
976 976
977 977 If node1 is None, use the first dirstate parent instead.
978 978 If node2 is None, compare node1 with working directory.
979 979 """
980 980
981 981 def mfmatches(ctx):
982 982 mf = ctx.manifest().copy()
983 983 for fn in mf.keys():
984 984 if not match(fn):
985 985 del mf[fn]
986 986 return mf
987 987
988 988 if isinstance(node1, context.changectx):
989 989 ctx1 = node1
990 990 else:
991 991 ctx1 = self[node1]
992 992 if isinstance(node2, context.changectx):
993 993 ctx2 = node2
994 994 else:
995 995 ctx2 = self[node2]
996 996
997 997 working = ctx2.rev() is None
998 998 parentworking = working and ctx1 == self['.']
999 999 match = match or matchmod.always(self.root, self.getcwd())
1000 1000 listignored, listclean, listunknown = ignored, clean, unknown
1001 1001
1002 1002 # load earliest manifest first for caching reasons
1003 1003 if not working and ctx2.rev() < ctx1.rev():
1004 1004 ctx2.manifest()
1005 1005
1006 1006 if not parentworking:
1007 1007 def bad(f, msg):
1008 1008 if f not in ctx1:
1009 1009 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1010 1010 match.bad = bad
1011 1011
1012 1012 if working: # we need to scan the working dir
1013 1013 subrepos = ctx1.substate.keys()
1014 1014 s = self.dirstate.status(match, subrepos, listignored,
1015 1015 listclean, listunknown)
1016 1016 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1017 1017
1018 1018 # check for any possibly clean files
1019 1019 if parentworking and cmp:
1020 1020 fixup = []
1021 1021 # do a full compare of any files that might have changed
1022 1022 for f in sorted(cmp):
1023 1023 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1024 1024 or ctx1[f].cmp(ctx2[f].data())):
1025 1025 modified.append(f)
1026 1026 else:
1027 1027 fixup.append(f)
1028 1028
1029 1029 if listclean:
1030 1030 clean += fixup
1031 1031
1032 1032 # update dirstate for files that are actually clean
1033 1033 if fixup:
1034 1034 try:
1035 1035 # updating the dirstate is optional
1036 1036 # so we don't wait on the lock
1037 1037 wlock = self.wlock(False)
1038 1038 try:
1039 1039 for f in fixup:
1040 1040 self.dirstate.normal(f)
1041 1041 finally:
1042 1042 wlock.release()
1043 1043 except error.LockError:
1044 1044 pass
1045 1045
1046 1046 if not parentworking:
1047 1047 mf1 = mfmatches(ctx1)
1048 1048 if working:
1049 1049 # we are comparing working dir against non-parent
1050 1050 # generate a pseudo-manifest for the working dir
1051 1051 mf2 = mfmatches(self['.'])
1052 1052 for f in cmp + modified + added:
1053 1053 mf2[f] = None
1054 1054 mf2.set(f, ctx2.flags(f))
1055 1055 for f in removed:
1056 1056 if f in mf2:
1057 1057 del mf2[f]
1058 1058 else:
1059 1059 # we are comparing two revisions
1060 1060 deleted, unknown, ignored = [], [], []
1061 1061 mf2 = mfmatches(ctx2)
1062 1062
1063 1063 modified, added, clean = [], [], []
1064 1064 for fn in mf2:
1065 1065 if fn in mf1:
1066 1066 if (mf1.flags(fn) != mf2.flags(fn) or
1067 1067 (mf1[fn] != mf2[fn] and
1068 1068 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1069 1069 modified.append(fn)
1070 1070 elif listclean:
1071 1071 clean.append(fn)
1072 1072 del mf1[fn]
1073 1073 else:
1074 1074 added.append(fn)
1075 1075 removed = mf1.keys()
1076 1076
1077 1077 r = modified, added, removed, deleted, unknown, ignored, clean
1078 1078 [l.sort() for l in r]
1079 1079 return r
1080 1080
1081 1081 def add(self, list):
1082 1082 wlock = self.wlock()
1083 1083 try:
1084 1084 rejected = []
1085 1085 for f in list:
1086 1086 p = self.wjoin(f)
1087 1087 try:
1088 1088 st = os.lstat(p)
1089 1089 except:
1090 1090 self.ui.warn(_("%s does not exist!\n") % f)
1091 1091 rejected.append(f)
1092 1092 continue
1093 1093 if st.st_size > 10000000:
1094 1094 self.ui.warn(_("%s: files over 10MB may cause memory and"
1095 1095 " performance problems\n"
1096 1096 "(use 'hg revert %s' to unadd the file)\n")
1097 1097 % (f, f))
1098 1098 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1099 1099 self.ui.warn(_("%s not added: only files and symlinks "
1100 1100 "supported currently\n") % f)
1101 1101 rejected.append(p)
1102 1102 elif self.dirstate[f] in 'amn':
1103 1103 self.ui.warn(_("%s already tracked!\n") % f)
1104 1104 elif self.dirstate[f] == 'r':
1105 1105 self.dirstate.normallookup(f)
1106 1106 else:
1107 1107 self.dirstate.add(f)
1108 1108 return rejected
1109 1109 finally:
1110 1110 wlock.release()
1111 1111
1112 1112 def forget(self, list):
1113 1113 wlock = self.wlock()
1114 1114 try:
1115 1115 for f in list:
1116 1116 if self.dirstate[f] != 'a':
1117 1117 self.ui.warn(_("%s not added!\n") % f)
1118 1118 else:
1119 1119 self.dirstate.forget(f)
1120 1120 finally:
1121 1121 wlock.release()
1122 1122
1123 1123 def remove(self, list, unlink=False):
1124 1124 if unlink:
1125 1125 for f in list:
1126 1126 try:
1127 1127 util.unlink(self.wjoin(f))
1128 1128 except OSError, inst:
1129 1129 if inst.errno != errno.ENOENT:
1130 1130 raise
1131 1131 wlock = self.wlock()
1132 1132 try:
1133 1133 for f in list:
1134 1134 if unlink and os.path.exists(self.wjoin(f)):
1135 1135 self.ui.warn(_("%s still exists!\n") % f)
1136 1136 elif self.dirstate[f] == 'a':
1137 1137 self.dirstate.forget(f)
1138 1138 elif f not in self.dirstate:
1139 1139 self.ui.warn(_("%s not tracked!\n") % f)
1140 1140 else:
1141 1141 self.dirstate.remove(f)
1142 1142 finally:
1143 1143 wlock.release()
1144 1144
1145 1145 def undelete(self, list):
1146 1146 manifests = [self.manifest.read(self.changelog.read(p)[0])
1147 1147 for p in self.dirstate.parents() if p != nullid]
1148 1148 wlock = self.wlock()
1149 1149 try:
1150 1150 for f in list:
1151 1151 if self.dirstate[f] != 'r':
1152 1152 self.ui.warn(_("%s not removed!\n") % f)
1153 1153 else:
1154 1154 m = f in manifests[0] and manifests[0] or manifests[1]
1155 1155 t = self.file(f).read(m[f])
1156 1156 self.wwrite(f, t, m.flags(f))
1157 1157 self.dirstate.normal(f)
1158 1158 finally:
1159 1159 wlock.release()
1160 1160
1161 1161 def copy(self, source, dest):
1162 1162 p = self.wjoin(dest)
1163 1163 if not (os.path.exists(p) or os.path.islink(p)):
1164 1164 self.ui.warn(_("%s does not exist!\n") % dest)
1165 1165 elif not (os.path.isfile(p) or os.path.islink(p)):
1166 1166 self.ui.warn(_("copy failed: %s is not a file or a "
1167 1167 "symbolic link\n") % dest)
1168 1168 else:
1169 1169 wlock = self.wlock()
1170 1170 try:
1171 1171 if self.dirstate[dest] in '?r':
1172 1172 self.dirstate.add(dest)
1173 1173 self.dirstate.copy(source, dest)
1174 1174 finally:
1175 1175 wlock.release()
1176 1176
1177 1177 def heads(self, start=None):
1178 1178 heads = self.changelog.heads(start)
1179 1179 # sort the output in rev descending order
1180 1180 heads = [(-self.changelog.rev(h), h) for h in heads]
1181 1181 return [n for (r, n) in sorted(heads)]
1182 1182
1183 1183 def branchheads(self, branch=None, start=None, closed=False):
1184 1184 '''return a (possibly filtered) list of heads for the given branch
1185 1185
1186 1186 Heads are returned in topological order, from newest to oldest.
1187 1187 If branch is None, use the dirstate branch.
1188 1188 If start is not None, return only heads reachable from start.
1189 1189 If closed is True, return heads that are marked as closed as well.
1190 1190 '''
1191 1191 if branch is None:
1192 1192 branch = self[None].branch()
1193 1193 branches = self.branchmap()
1194 1194 if branch not in branches:
1195 1195 return []
1196 1196 # the cache returns heads ordered lowest to highest
1197 1197 bheads = list(reversed(branches[branch]))
1198 1198 if start is not None:
1199 1199 # filter out the heads that cannot be reached from startrev
1200 1200 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1201 1201 bheads = [h for h in bheads if h in fbheads]
1202 1202 if not closed:
1203 1203 bheads = [h for h in bheads if
1204 1204 ('close' not in self.changelog.read(h)[5])]
1205 1205 return bheads
1206 1206
1207 1207 def branches(self, nodes):
1208 1208 if not nodes:
1209 1209 nodes = [self.changelog.tip()]
1210 1210 b = []
1211 1211 for n in nodes:
1212 1212 t = n
1213 1213 while 1:
1214 1214 p = self.changelog.parents(n)
1215 1215 if p[1] != nullid or p[0] == nullid:
1216 1216 b.append((t, n, p[0], p[1]))
1217 1217 break
1218 1218 n = p[0]
1219 1219 return b
1220 1220
1221 1221 def between(self, pairs):
1222 1222 r = []
1223 1223
1224 1224 for top, bottom in pairs:
1225 1225 n, l, i = top, [], 0
1226 1226 f = 1
1227 1227
1228 1228 while n != bottom and n != nullid:
1229 1229 p = self.changelog.parents(n)[0]
1230 1230 if i == f:
1231 1231 l.append(n)
1232 1232 f = f * 2
1233 1233 n = p
1234 1234 i += 1
1235 1235
1236 1236 r.append(l)
1237 1237
1238 1238 return r
1239 1239
1240 1240 def findincoming(self, remote, base=None, heads=None, force=False):
1241 1241 """Return list of roots of the subsets of missing nodes from remote
1242 1242
1243 1243 If base dict is specified, assume that these nodes and their parents
1244 1244 exist on the remote side and that no child of a node of base exists
1245 1245 in both remote and self.
1246 1246 Furthermore base will be updated to include the nodes that exists
1247 1247 in self and remote but no children exists in self and remote.
1248 1248 If a list of heads is specified, return only nodes which are heads
1249 1249 or ancestors of these heads.
1250 1250
1251 1251 All the ancestors of base are in self and in remote.
1252 1252 All the descendants of the list returned are missing in self.
1253 1253 (and so we know that the rest of the nodes are missing in remote, see
1254 1254 outgoing)
1255 1255 """
1256 1256 return self.findcommonincoming(remote, base, heads, force)[1]
1257 1257
1258 1258 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1259 1259 """Return a tuple (common, missing roots, heads) used to identify
1260 1260 missing nodes from remote.
1261 1261
1262 1262 If base dict is specified, assume that these nodes and their parents
1263 1263 exist on the remote side and that no child of a node of base exists
1264 1264 in both remote and self.
1265 1265 Furthermore base will be updated to include the nodes that exists
1266 1266 in self and remote but no children exists in self and remote.
1267 1267 If a list of heads is specified, return only nodes which are heads
1268 1268 or ancestors of these heads.
1269 1269
1270 1270 All the ancestors of base are in self and in remote.
1271 1271 """
1272 1272 m = self.changelog.nodemap
1273 1273 search = []
1274 1274 fetch = set()
1275 1275 seen = set()
1276 1276 seenbranch = set()
1277 1277 if base is None:
1278 1278 base = {}
1279 1279
1280 1280 if not heads:
1281 1281 heads = remote.heads()
1282 1282
1283 1283 if self.changelog.tip() == nullid:
1284 1284 base[nullid] = 1
1285 1285 if heads != [nullid]:
1286 1286 return [nullid], [nullid], list(heads)
1287 1287 return [nullid], [], []
1288 1288
1289 1289 # assume we're closer to the tip than the root
1290 1290 # and start by examining the heads
1291 1291 self.ui.status(_("searching for changes\n"))
1292 1292
1293 1293 unknown = []
1294 1294 for h in heads:
1295 1295 if h not in m:
1296 1296 unknown.append(h)
1297 1297 else:
1298 1298 base[h] = 1
1299 1299
1300 1300 heads = unknown
1301 1301 if not unknown:
1302 1302 return base.keys(), [], []
1303 1303
1304 1304 req = set(unknown)
1305 1305 reqcnt = 0
1306 1306
1307 1307 # search through remote branches
1308 1308 # a 'branch' here is a linear segment of history, with four parts:
1309 1309 # head, root, first parent, second parent
1310 1310 # (a branch always has two parents (or none) by definition)
1311 1311 unknown = remote.branches(unknown)
1312 1312 while unknown:
1313 1313 r = []
1314 1314 while unknown:
1315 1315 n = unknown.pop(0)
1316 1316 if n[0] in seen:
1317 1317 continue
1318 1318
1319 1319 self.ui.debug("examining %s:%s\n"
1320 1320 % (short(n[0]), short(n[1])))
1321 1321 if n[0] == nullid: # found the end of the branch
1322 1322 pass
1323 1323 elif n in seenbranch:
1324 1324 self.ui.debug("branch already found\n")
1325 1325 continue
1326 1326 elif n[1] and n[1] in m: # do we know the base?
1327 1327 self.ui.debug("found incomplete branch %s:%s\n"
1328 1328 % (short(n[0]), short(n[1])))
1329 1329 search.append(n[0:2]) # schedule branch range for scanning
1330 1330 seenbranch.add(n)
1331 1331 else:
1332 1332 if n[1] not in seen and n[1] not in fetch:
1333 1333 if n[2] in m and n[3] in m:
1334 1334 self.ui.debug("found new changeset %s\n" %
1335 1335 short(n[1]))
1336 1336 fetch.add(n[1]) # earliest unknown
1337 1337 for p in n[2:4]:
1338 1338 if p in m:
1339 1339 base[p] = 1 # latest known
1340 1340
1341 1341 for p in n[2:4]:
1342 1342 if p not in req and p not in m:
1343 1343 r.append(p)
1344 1344 req.add(p)
1345 1345 seen.add(n[0])
1346 1346
1347 1347 if r:
1348 1348 reqcnt += 1
1349 1349 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1350 1350 self.ui.debug("request %d: %s\n" %
1351 1351 (reqcnt, " ".join(map(short, r))))
1352 1352 for p in xrange(0, len(r), 10):
1353 1353 for b in remote.branches(r[p:p + 10]):
1354 1354 self.ui.debug("received %s:%s\n" %
1355 1355 (short(b[0]), short(b[1])))
1356 1356 unknown.append(b)
1357 1357
1358 1358 # do binary search on the branches we found
1359 1359 while search:
1360 1360 newsearch = []
1361 1361 reqcnt += 1
1362 1362 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1363 1363 for n, l in zip(search, remote.between(search)):
1364 1364 l.append(n[1])
1365 1365 p = n[0]
1366 1366 f = 1
1367 1367 for i in l:
1368 1368 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1369 1369 if i in m:
1370 1370 if f <= 2:
1371 1371 self.ui.debug("found new branch changeset %s\n" %
1372 1372 short(p))
1373 1373 fetch.add(p)
1374 1374 base[i] = 1
1375 1375 else:
1376 1376 self.ui.debug("narrowed branch search to %s:%s\n"
1377 1377 % (short(p), short(i)))
1378 1378 newsearch.append((p, i))
1379 1379 break
1380 1380 p, f = i, f * 2
1381 1381 search = newsearch
1382 1382
1383 1383 # sanity check our fetch list
1384 1384 for f in fetch:
1385 1385 if f in m:
1386 1386 raise error.RepoError(_("already have changeset ")
1387 1387 + short(f[:4]))
1388 1388
1389 1389 if base.keys() == [nullid]:
1390 1390 if force:
1391 1391 self.ui.warn(_("warning: repository is unrelated\n"))
1392 1392 else:
1393 1393 raise util.Abort(_("repository is unrelated"))
1394 1394
1395 1395 self.ui.debug("found new changesets starting at " +
1396 1396 " ".join([short(f) for f in fetch]) + "\n")
1397 1397
1398 1398 self.ui.progress(_('searching'), None, unit=_('queries'))
1399 1399 self.ui.debug("%d total queries\n" % reqcnt)
1400 1400
1401 1401 return base.keys(), list(fetch), heads
1402 1402
1403 1403 def findoutgoing(self, remote, base=None, heads=None, force=False):
1404 1404 """Return list of nodes that are roots of subsets not in remote
1405 1405
1406 1406 If base dict is specified, assume that these nodes and their parents
1407 1407 exist on the remote side.
1408 1408 If a list of heads is specified, return only nodes which are heads
1409 1409 or ancestors of these heads, and return a second element which
1410 1410 contains all remote heads which get new children.
1411 1411 """
1412 1412 if base is None:
1413 1413 base = {}
1414 1414 self.findincoming(remote, base, heads, force=force)
1415 1415
1416 1416 self.ui.debug("common changesets up to "
1417 1417 + " ".join(map(short, base.keys())) + "\n")
1418 1418
1419 1419 remain = set(self.changelog.nodemap)
1420 1420
1421 1421 # prune everything remote has from the tree
1422 1422 remain.remove(nullid)
1423 1423 remove = base.keys()
1424 1424 while remove:
1425 1425 n = remove.pop(0)
1426 1426 if n in remain:
1427 1427 remain.remove(n)
1428 1428 for p in self.changelog.parents(n):
1429 1429 remove.append(p)
1430 1430
1431 1431 # find every node whose parents have been pruned
1432 1432 subset = []
1433 1433 # find every remote head that will get new children
1434 1434 updated_heads = set()
1435 1435 for n in remain:
1436 1436 p1, p2 = self.changelog.parents(n)
1437 1437 if p1 not in remain and p2 not in remain:
1438 1438 subset.append(n)
1439 1439 if heads:
1440 1440 if p1 in heads:
1441 1441 updated_heads.add(p1)
1442 1442 if p2 in heads:
1443 1443 updated_heads.add(p2)
1444 1444
1445 1445 # this is the set of all roots we have to push
1446 1446 if heads:
1447 1447 return subset, list(updated_heads)
1448 1448 else:
1449 1449 return subset
1450 1450
1451 1451 def pull(self, remote, heads=None, force=False):
1452 1452 lock = self.lock()
1453 1453 try:
1454 1454 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1455 1455 force=force)
1456 1456 if fetch == [nullid]:
1457 1457 self.ui.status(_("requesting all changes\n"))
1458 1458
1459 1459 if not fetch:
1460 1460 self.ui.status(_("no changes found\n"))
1461 1461 return 0
1462 1462
1463 1463 if heads is None and remote.capable('changegroupsubset'):
1464 1464 heads = rheads
1465 1465
1466 1466 if heads is None:
1467 1467 cg = remote.changegroup(fetch, 'pull')
1468 1468 else:
1469 1469 if not remote.capable('changegroupsubset'):
1470 1470 raise util.Abort(_("Partial pull cannot be done because "
1471 1471 "other repository doesn't support "
1472 1472 "changegroupsubset."))
1473 1473 cg = remote.changegroupsubset(fetch, heads, 'pull')
1474 1474 return self.addchangegroup(cg, 'pull', remote.url())
1475 1475 finally:
1476 1476 lock.release()
1477 1477
1478 1478 def push(self, remote, force=False, revs=None):
1479 1479 # there are two ways to push to remote repo:
1480 1480 #
1481 1481 # addchangegroup assumes local user can lock remote
1482 1482 # repo (local filesystem, old ssh servers).
1483 1483 #
1484 1484 # unbundle assumes local user cannot lock remote repo (new ssh
1485 1485 # servers, http servers).
1486 1486
1487 1487 if remote.capable('unbundle'):
1488 1488 return self.push_unbundle(remote, force, revs)
1489 1489 return self.push_addchangegroup(remote, force, revs)
1490 1490
1491 1491 def prepush(self, remote, force, revs):
1492 1492 '''Analyze the local and remote repositories and determine which
1493 1493 changesets need to be pushed to the remote. Return a tuple
1494 1494 (changegroup, remoteheads). changegroup is a readable file-like
1495 1495 object whose read() returns successive changegroup chunks ready to
1496 1496 be sent over the wire. remoteheads is the list of remote heads.
1497 1497 '''
1498 1498 common = {}
1499 1499 remote_heads = remote.heads()
1500 1500 inc = self.findincoming(remote, common, remote_heads, force=force)
1501 1501
1502 1502 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1503 1503 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1504 1504
1505 1505 def checkbranch(lheads, rheads, updatelb, branchname=None):
1506 1506 '''
1507 1507 check whether there are more local heads than remote heads on
1508 1508 a specific branch.
1509 1509
1510 1510 lheads: local branch heads
1511 1511 rheads: remote branch heads
1512 1512 updatelb: outgoing local branch bases
1513 1513 '''
1514 1514
1515 1515 warn = 0
1516 1516
1517 1517 if not revs and len(lheads) > len(rheads):
1518 1518 warn = 1
1519 1519 else:
1520 1520 # add local heads involved in the push
1521 1521 updatelheads = [self.changelog.heads(x, lheads)
1522 1522 for x in updatelb]
1523 1523 newheads = set(sum(updatelheads, [])) & set(lheads)
1524 1524
1525 1525 if not newheads:
1526 1526 return True
1527 1527
1528 1528 # add heads we don't have or that are not involved in the push
1529 1529 for r in rheads:
1530 1530 if r in self.changelog.nodemap:
1531 1531 desc = self.changelog.heads(r, heads)
1532 1532 l = [h for h in heads if h in desc]
1533 1533 if not l:
1534 1534 newheads.add(r)
1535 1535 else:
1536 1536 newheads.add(r)
1537 1537 if len(newheads) > len(rheads):
1538 1538 warn = 1
1539 1539
1540 1540 if warn:
1541 1541 if branchname is not None:
1542 1542 msg = _("abort: push creates new remote heads"
1543 1543 " on branch '%s'!\n") % branchname
1544 1544 else:
1545 1545 msg = _("abort: push creates new remote heads!\n")
1546 1546 self.ui.warn(msg)
1547 1547 if len(lheads) > len(rheads):
1548 1548 self.ui.status(_("(did you forget to merge?"
1549 1549 " use push -f to force)\n"))
1550 1550 else:
1551 1551 self.ui.status(_("(you should pull and merge or"
1552 1552 " use push -f to force)\n"))
1553 1553 return False
1554 1554 return True
1555 1555
1556 1556 if not bases:
1557 1557 self.ui.status(_("no changes found\n"))
1558 1558 return None, 1
1559 1559 elif not force:
1560 1560 # Check for each named branch if we're creating new remote heads.
1561 1561 # To be a remote head after push, node must be either:
1562 1562 # - unknown locally
1563 1563 # - a local outgoing head descended from update
1564 1564 # - a remote head that's known locally and not
1565 1565 # ancestral to an outgoing head
1566 1566 #
1567 1567 # New named branches cannot be created without --force.
1568 1568
1569 1569 if remote_heads != [nullid]:
1570 1570 if remote.capable('branchmap'):
1571 1571 remotebrheads = remote.branchmap()
1572 1572
1573 1573 if not revs:
1574 1574 localbrheads = self.branchmap()
1575 1575 else:
1576 1576 localbrheads = {}
1577 1577 for n in heads:
1578 1578 branch = self[n].branch()
1579 1579 localbrheads.setdefault(branch, []).append(n)
1580 1580
1581 1581 newbranches = list(set(localbrheads) - set(remotebrheads))
1582 1582 if newbranches: # new branch requires --force
1583 1583 branchnames = ', '.join("%s" % b for b in newbranches)
1584 1584 self.ui.warn(_("abort: push creates "
1585 1585 "new remote branches: %s!\n")
1586 1586 % branchnames)
1587 1587 # propose 'push -b .' in the msg too?
1588 1588 self.ui.status(_("(use 'hg push -f' to force)\n"))
1589 1589 return None, 0
1590 1590 for branch, lheads in localbrheads.iteritems():
1591 1591 if branch in remotebrheads:
1592 1592 rheads = remotebrheads[branch]
1593 1593 if not checkbranch(lheads, rheads, update, branch):
1594 1594 return None, 0
1595 1595 else:
1596 1596 if not checkbranch(heads, remote_heads, update):
1597 1597 return None, 0
1598 1598
1599 1599 if inc:
1600 1600 self.ui.warn(_("note: unsynced remote changes!\n"))
1601 1601
1602 1602
1603 1603 if revs is None:
1604 1604 # use the fast path, no race possible on push
1605 1605 nodes = self.changelog.findmissing(common.keys())
1606 1606 cg = self._changegroup(nodes, 'push')
1607 1607 else:
1608 1608 cg = self.changegroupsubset(update, revs, 'push')
1609 1609 return cg, remote_heads
1610 1610
1611 1611 def push_addchangegroup(self, remote, force, revs):
1612 1612 lock = remote.lock()
1613 1613 try:
1614 1614 ret = self.prepush(remote, force, revs)
1615 1615 if ret[0] is not None:
1616 1616 cg, remote_heads = ret
1617 1617 return remote.addchangegroup(cg, 'push', self.url())
1618 1618 return ret[1]
1619 1619 finally:
1620 1620 lock.release()
1621 1621
1622 1622 def push_unbundle(self, remote, force, revs):
1623 1623 # local repo finds heads on server, finds out what revs it
1624 1624 # must push. once revs transferred, if server finds it has
1625 1625 # different heads (someone else won commit/push race), server
1626 1626 # aborts.
1627 1627
1628 1628 ret = self.prepush(remote, force, revs)
1629 1629 if ret[0] is not None:
1630 1630 cg, remote_heads = ret
1631 1631 if force:
1632 1632 remote_heads = ['force']
1633 1633 return remote.unbundle(cg, remote_heads, 'push')
1634 1634 return ret[1]
1635 1635
1636 1636 def changegroupinfo(self, nodes, source):
1637 1637 if self.ui.verbose or source == 'bundle':
1638 1638 self.ui.status(_("%d changesets found\n") % len(nodes))
1639 1639 if self.ui.debugflag:
1640 1640 self.ui.debug("list of changesets:\n")
1641 1641 for node in nodes:
1642 1642 self.ui.debug("%s\n" % hex(node))
1643 1643
1644 1644 def changegroupsubset(self, bases, heads, source, extranodes=None):
1645 1645 """Compute a changegroup consisting of all the nodes that are
1646 1646 descendents of any of the bases and ancestors of any of the heads.
1647 1647 Return a chunkbuffer object whose read() method will return
1648 1648 successive changegroup chunks.
1649 1649
1650 1650 It is fairly complex as determining which filenodes and which
1651 1651 manifest nodes need to be included for the changeset to be complete
1652 1652 is non-trivial.
1653 1653
1654 1654 Another wrinkle is doing the reverse, figuring out which changeset in
1655 1655 the changegroup a particular filenode or manifestnode belongs to.
1656 1656
1657 1657 The caller can specify some nodes that must be included in the
1658 1658 changegroup using the extranodes argument. It should be a dict
1659 1659 where the keys are the filenames (or 1 for the manifest), and the
1660 1660 values are lists of (node, linknode) tuples, where node is a wanted
1661 1661 node and linknode is the changelog node that should be transmitted as
1662 1662 the linkrev.
1663 1663 """
1664 1664
1665 1665 # Set up some initial variables
1666 1666 # Make it easy to refer to self.changelog
1667 1667 cl = self.changelog
1668 1668 # msng is short for missing - compute the list of changesets in this
1669 1669 # changegroup.
1670 1670 if not bases:
1671 1671 bases = [nullid]
1672 1672 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1673 1673
1674 1674 if extranodes is None:
1675 1675 # can we go through the fast path ?
1676 1676 heads.sort()
1677 1677 allheads = self.heads()
1678 1678 allheads.sort()
1679 1679 if heads == allheads:
1680 1680 return self._changegroup(msng_cl_lst, source)
1681 1681
1682 1682 # slow path
1683 1683 self.hook('preoutgoing', throw=True, source=source)
1684 1684
1685 1685 self.changegroupinfo(msng_cl_lst, source)
1686 1686 # Some bases may turn out to be superfluous, and some heads may be
1687 1687 # too. nodesbetween will return the minimal set of bases and heads
1688 1688 # necessary to re-create the changegroup.
1689 1689
1690 1690 # Known heads are the list of heads that it is assumed the recipient
1691 1691 # of this changegroup will know about.
1692 1692 knownheads = set()
1693 1693 # We assume that all parents of bases are known heads.
1694 1694 for n in bases:
1695 1695 knownheads.update(cl.parents(n))
1696 1696 knownheads.discard(nullid)
1697 1697 knownheads = list(knownheads)
1698 1698 if knownheads:
1699 1699 # Now that we know what heads are known, we can compute which
1700 1700 # changesets are known. The recipient must know about all
1701 1701 # changesets required to reach the known heads from the null
1702 1702 # changeset.
1703 1703 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1704 1704 junk = None
1705 1705 # Transform the list into a set.
1706 1706 has_cl_set = set(has_cl_set)
1707 1707 else:
1708 1708 # If there were no known heads, the recipient cannot be assumed to
1709 1709 # know about any changesets.
1710 1710 has_cl_set = set()
1711 1711
1712 1712 # Make it easy to refer to self.manifest
1713 1713 mnfst = self.manifest
1714 1714 # We don't know which manifests are missing yet
1715 1715 msng_mnfst_set = {}
1716 1716 # Nor do we know which filenodes are missing.
1717 1717 msng_filenode_set = {}
1718 1718
1719 1719 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1720 1720 junk = None
1721 1721
1722 1722 # A changeset always belongs to itself, so the changenode lookup
1723 1723 # function for a changenode is identity.
1724 1724 def identity(x):
1725 1725 return x
1726 1726
1727 1727 # If we determine that a particular file or manifest node must be a
1728 1728 # node that the recipient of the changegroup will already have, we can
1729 1729 # also assume the recipient will have all the parents. This function
1730 1730 # prunes them from the set of missing nodes.
1731 1731 def prune_parents(revlog, hasset, msngset):
1732 1732 for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]):
1733 1733 msngset.pop(revlog.node(r), None)
1734 1734
1735 1735 # Use the information collected in collect_manifests_and_files to say
1736 1736 # which changenode any manifestnode belongs to.
1737 1737 def lookup_manifest_link(mnfstnode):
1738 1738 return msng_mnfst_set[mnfstnode]
1739 1739
1740 1740 # A function generating function that sets up the initial environment
1741 1741 # the inner function.
1742 1742 def filenode_collector(changedfiles):
1743 1743 # This gathers information from each manifestnode included in the
1744 1744 # changegroup about which filenodes the manifest node references
1745 1745 # so we can include those in the changegroup too.
1746 1746 #
1747 1747 # It also remembers which changenode each filenode belongs to. It
1748 1748 # does this by assuming the a filenode belongs to the changenode
1749 1749 # the first manifest that references it belongs to.
1750 1750 def collect_msng_filenodes(mnfstnode):
1751 1751 r = mnfst.rev(mnfstnode)
1752 1752 if r - 1 in mnfst.parentrevs(r):
1753 1753 # If the previous rev is one of the parents,
1754 1754 # we only need to see a diff.
1755 1755 deltamf = mnfst.readdelta(mnfstnode)
1756 1756 # For each line in the delta
1757 1757 for f, fnode in deltamf.iteritems():
1758 1758 f = changedfiles.get(f, None)
1759 1759 # And if the file is in the list of files we care
1760 1760 # about.
1761 1761 if f is not None:
1762 1762 # Get the changenode this manifest belongs to
1763 1763 clnode = msng_mnfst_set[mnfstnode]
1764 1764 # Create the set of filenodes for the file if
1765 1765 # there isn't one already.
1766 1766 ndset = msng_filenode_set.setdefault(f, {})
1767 1767 # And set the filenode's changelog node to the
1768 1768 # manifest's if it hasn't been set already.
1769 1769 ndset.setdefault(fnode, clnode)
1770 1770 else:
1771 1771 # Otherwise we need a full manifest.
1772 1772 m = mnfst.read(mnfstnode)
1773 1773 # For every file in we care about.
1774 1774 for f in changedfiles:
1775 1775 fnode = m.get(f, None)
1776 1776 # If it's in the manifest
1777 1777 if fnode is not None:
1778 1778 # See comments above.
1779 1779 clnode = msng_mnfst_set[mnfstnode]
1780 1780 ndset = msng_filenode_set.setdefault(f, {})
1781 1781 ndset.setdefault(fnode, clnode)
1782 1782 return collect_msng_filenodes
1783 1783
1784 1784 # We have a list of filenodes we think we need for a file, lets remove
1785 1785 # all those we know the recipient must have.
1786 1786 def prune_filenodes(f, filerevlog):
1787 1787 msngset = msng_filenode_set[f]
1788 1788 hasset = set()
1789 1789 # If a 'missing' filenode thinks it belongs to a changenode we
1790 1790 # assume the recipient must have, then the recipient must have
1791 1791 # that filenode.
1792 1792 for n in msngset:
1793 1793 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1794 1794 if clnode in has_cl_set:
1795 1795 hasset.add(n)
1796 1796 prune_parents(filerevlog, hasset, msngset)
1797 1797
1798 1798 # A function generator function that sets up the a context for the
1799 1799 # inner function.
1800 1800 def lookup_filenode_link_func(fname):
1801 1801 msngset = msng_filenode_set[fname]
1802 1802 # Lookup the changenode the filenode belongs to.
1803 1803 def lookup_filenode_link(fnode):
1804 1804 return msngset[fnode]
1805 1805 return lookup_filenode_link
1806 1806
1807 1807 # Add the nodes that were explicitly requested.
1808 1808 def add_extra_nodes(name, nodes):
1809 1809 if not extranodes or name not in extranodes:
1810 1810 return
1811 1811
1812 1812 for node, linknode in extranodes[name]:
1813 1813 if node not in nodes:
1814 1814 nodes[node] = linknode
1815 1815
1816 1816 # Now that we have all theses utility functions to help out and
1817 1817 # logically divide up the task, generate the group.
1818 1818 def gengroup():
1819 1819 # The set of changed files starts empty.
1820 1820 changedfiles = {}
1821 1821 collect = changegroup.collector(cl, msng_mnfst_set, changedfiles)
1822 1822
1823 1823 # Create a changenode group generator that will call our functions
1824 1824 # back to lookup the owning changenode and collect information.
1825 1825 group = cl.group(msng_cl_lst, identity, collect)
1826 1826 cnt = 0
1827 1827 for chnk in group:
1828 1828 yield chnk
1829 self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
1829 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
1830 1830 cnt += 1
1831 self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
1831 self.ui.progress(_('bundling changes'), None, unit=_('chunks'))
1832 1832
1833 1833
1834 1834 # Figure out which manifest nodes (of the ones we think might be
1835 1835 # part of the changegroup) the recipient must know about and
1836 1836 # remove them from the changegroup.
1837 1837 has_mnfst_set = set()
1838 1838 for n in msng_mnfst_set:
1839 1839 # If a 'missing' manifest thinks it belongs to a changenode
1840 1840 # the recipient is assumed to have, obviously the recipient
1841 1841 # must have that manifest.
1842 1842 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1843 1843 if linknode in has_cl_set:
1844 1844 has_mnfst_set.add(n)
1845 1845 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1846 1846 add_extra_nodes(1, msng_mnfst_set)
1847 1847 msng_mnfst_lst = msng_mnfst_set.keys()
1848 1848 # Sort the manifestnodes by revision number.
1849 1849 msng_mnfst_lst.sort(key=mnfst.rev)
1850 1850 # Create a generator for the manifestnodes that calls our lookup
1851 1851 # and data collection functions back.
1852 1852 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1853 1853 filenode_collector(changedfiles))
1854 1854 cnt = 0
1855 1855 for chnk in group:
1856 1856 yield chnk
1857 self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
1857 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
1858 1858 cnt += 1
1859 self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
1859 self.ui.progress(_('bundling manifests'), None, unit=_('chunks'))
1860 1860
1861 1861 # These are no longer needed, dereference and toss the memory for
1862 1862 # them.
1863 1863 msng_mnfst_lst = None
1864 1864 msng_mnfst_set.clear()
1865 1865
1866 1866 if extranodes:
1867 1867 for fname in extranodes:
1868 1868 if isinstance(fname, int):
1869 1869 continue
1870 1870 msng_filenode_set.setdefault(fname, {})
1871 1871 changedfiles[fname] = 1
1872 1872 # Go through all our files in order sorted by name.
1873 1873 cnt = 0
1874 1874 for fname in sorted(changedfiles):
1875 1875 filerevlog = self.file(fname)
1876 1876 if not len(filerevlog):
1877 1877 raise util.Abort(_("empty or missing revlog for %s") % fname)
1878 1878 # Toss out the filenodes that the recipient isn't really
1879 1879 # missing.
1880 1880 if fname in msng_filenode_set:
1881 1881 prune_filenodes(fname, filerevlog)
1882 1882 add_extra_nodes(fname, msng_filenode_set[fname])
1883 1883 msng_filenode_lst = msng_filenode_set[fname].keys()
1884 1884 else:
1885 1885 msng_filenode_lst = []
1886 1886 # If any filenodes are left, generate the group for them,
1887 1887 # otherwise don't bother.
1888 1888 if len(msng_filenode_lst) > 0:
1889 1889 yield changegroup.chunkheader(len(fname))
1890 1890 yield fname
1891 1891 # Sort the filenodes by their revision #
1892 1892 msng_filenode_lst.sort(key=filerevlog.rev)
1893 1893 # Create a group generator and only pass in a changenode
1894 1894 # lookup function as we need to collect no information
1895 1895 # from filenodes.
1896 1896 group = filerevlog.group(msng_filenode_lst,
1897 1897 lookup_filenode_link_func(fname))
1898 1898 for chnk in group:
1899 1899 self.ui.progress(
1900 _('bundle files'), cnt, item=fname, unit=_('chunks'))
1900 _('bundling files'), cnt, item=fname, unit=_('chunks'))
1901 1901 cnt += 1
1902 1902 yield chnk
1903 1903 if fname in msng_filenode_set:
1904 1904 # Don't need this anymore, toss it to free memory.
1905 1905 del msng_filenode_set[fname]
1906 1906 # Signal that no more groups are left.
1907 1907 yield changegroup.closechunk()
1908 self.ui.progress(_('bundle files'), None, unit=_('chunks'))
1908 self.ui.progress(_('bundling files'), None, unit=_('chunks'))
1909 1909
1910 1910 if msng_cl_lst:
1911 1911 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1912 1912
1913 1913 return util.chunkbuffer(gengroup())
1914 1914
1915 1915 def changegroup(self, basenodes, source):
1916 1916 # to avoid a race we use changegroupsubset() (issue1320)
1917 1917 return self.changegroupsubset(basenodes, self.heads(), source)
1918 1918
1919 1919 def _changegroup(self, nodes, source):
1920 1920 """Compute the changegroup of all nodes that we have that a recipient
1921 1921 doesn't. Return a chunkbuffer object whose read() method will return
1922 1922 successive changegroup chunks.
1923 1923
1924 1924 This is much easier than the previous function as we can assume that
1925 1925 the recipient has any changenode we aren't sending them.
1926 1926
1927 1927 nodes is the set of nodes to send"""
1928 1928
1929 1929 self.hook('preoutgoing', throw=True, source=source)
1930 1930
1931 1931 cl = self.changelog
1932 1932 revset = set([cl.rev(n) for n in nodes])
1933 1933 self.changegroupinfo(nodes, source)
1934 1934
1935 1935 def identity(x):
1936 1936 return x
1937 1937
1938 1938 def gennodelst(log):
1939 1939 for r in log:
1940 1940 if log.linkrev(r) in revset:
1941 1941 yield log.node(r)
1942 1942
1943 1943 def lookuprevlink_func(revlog):
1944 1944 def lookuprevlink(n):
1945 1945 return cl.node(revlog.linkrev(revlog.rev(n)))
1946 1946 return lookuprevlink
1947 1947
1948 1948 def gengroup():
1949 1949 '''yield a sequence of changegroup chunks (strings)'''
1950 1950 # construct a list of all changed files
1951 1951 changedfiles = {}
1952 1952 mmfs = {}
1953 1953 collect = changegroup.collector(cl, mmfs, changedfiles)
1954 1954
1955 1955 cnt = 0
1956 1956 for chnk in cl.group(nodes, identity, collect):
1957 self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
1957 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
1958 1958 cnt += 1
1959 1959 yield chnk
1960 self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
1960 self.ui.progress(_('bundling changes'), None, unit=_('chunks'))
1961 1961
1962 1962 mnfst = self.manifest
1963 1963 nodeiter = gennodelst(mnfst)
1964 1964 cnt = 0
1965 1965 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1966 self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
1966 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
1967 1967 cnt += 1
1968 1968 yield chnk
1969 self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
1969 self.ui.progress(_('bundling manifests'), None, unit=_('chunks'))
1970 1970
1971 1971 cnt = 0
1972 1972 for fname in sorted(changedfiles):
1973 1973 filerevlog = self.file(fname)
1974 1974 if not len(filerevlog):
1975 1975 raise util.Abort(_("empty or missing revlog for %s") % fname)
1976 1976 nodeiter = gennodelst(filerevlog)
1977 1977 nodeiter = list(nodeiter)
1978 1978 if nodeiter:
1979 1979 yield changegroup.chunkheader(len(fname))
1980 1980 yield fname
1981 1981 lookup = lookuprevlink_func(filerevlog)
1982 1982 for chnk in filerevlog.group(nodeiter, lookup):
1983 1983 self.ui.progress(
1984 _('bundle files'), cnt, item=fname, unit=_('chunks'))
1984 _('bundling files'), cnt, item=fname, unit=_('chunks'))
1985 1985 cnt += 1
1986 1986 yield chnk
1987 self.ui.progress(_('bundle files'), None, unit=_('chunks'))
1987 self.ui.progress(_('bundling files'), None, unit=_('chunks'))
1988 1988
1989 1989 yield changegroup.closechunk()
1990 1990
1991 1991 if nodes:
1992 1992 self.hook('outgoing', node=hex(nodes[0]), source=source)
1993 1993
1994 1994 return util.chunkbuffer(gengroup())
1995 1995
1996 1996 def addchangegroup(self, source, srctype, url, emptyok=False):
1997 1997 """add changegroup to repo.
1998 1998
1999 1999 return values:
2000 2000 - nothing changed or no source: 0
2001 2001 - more heads than before: 1+added heads (2..n)
2002 2002 - less heads than before: -1-removed heads (-2..-n)
2003 2003 - number of heads stays the same: 1
2004 2004 """
2005 2005 def csmap(x):
2006 2006 self.ui.debug("add changeset %s\n" % short(x))
2007 2007 return len(cl)
2008 2008
2009 2009 def revmap(x):
2010 2010 return cl.rev(x)
2011 2011
2012 2012 if not source:
2013 2013 return 0
2014 2014
2015 2015 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2016 2016
2017 2017 changesets = files = revisions = 0
2018 2018
2019 2019 # write changelog data to temp files so concurrent readers will not see
2020 2020 # inconsistent view
2021 2021 cl = self.changelog
2022 2022 cl.delayupdate()
2023 2023 oldheads = len(cl.heads())
2024 2024
2025 2025 tr = self.transaction()
2026 2026 try:
2027 2027 trp = weakref.proxy(tr)
2028 2028 # pull off the changeset group
2029 2029 self.ui.status(_("adding changesets\n"))
2030 2030 clstart = len(cl)
2031 2031 class prog(object):
2032 2032 step = _('changesets')
2033 2033 count = 1
2034 2034 ui = self.ui
2035 2035 def __call__(self):
2036 2036 self.ui.progress(self.step, self.count, unit=_('chunks'))
2037 2037 self.count += 1
2038 2038 pr = prog()
2039 2039 chunkiter = changegroup.chunkiter(source, progress=pr)
2040 2040 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2041 2041 raise util.Abort(_("received changelog group is empty"))
2042 2042 clend = len(cl)
2043 2043 changesets = clend - clstart
2044 2044 self.ui.progress(_('changesets'), None)
2045 2045
2046 2046 # pull off the manifest group
2047 2047 self.ui.status(_("adding manifests\n"))
2048 2048 pr.step = _('manifests')
2049 2049 pr.count = 1
2050 2050 chunkiter = changegroup.chunkiter(source, progress=pr)
2051 2051 # no need to check for empty manifest group here:
2052 2052 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2053 2053 # no new manifest will be created and the manifest group will
2054 2054 # be empty during the pull
2055 2055 self.manifest.addgroup(chunkiter, revmap, trp)
2056 2056 self.ui.progress(_('manifests'), None)
2057 2057
2058 2058 needfiles = {}
2059 2059 if self.ui.configbool('server', 'validate', default=False):
2060 2060 # validate incoming csets have their manifests
2061 2061 for cset in xrange(clstart, clend):
2062 2062 mfest = self.changelog.read(self.changelog.node(cset))[0]
2063 2063 mfest = self.manifest.readdelta(mfest)
2064 2064 # store file nodes we must see
2065 2065 for f, n in mfest.iteritems():
2066 2066 needfiles.setdefault(f, set()).add(n)
2067 2067
2068 2068 # process the files
2069 2069 self.ui.status(_("adding file changes\n"))
2070 2070 pr.step = 'files'
2071 2071 pr.count = 1
2072 2072 while 1:
2073 2073 f = changegroup.getchunk(source)
2074 2074 if not f:
2075 2075 break
2076 2076 self.ui.debug("adding %s revisions\n" % f)
2077 2077 fl = self.file(f)
2078 2078 o = len(fl)
2079 2079 chunkiter = changegroup.chunkiter(source, progress=pr)
2080 2080 if fl.addgroup(chunkiter, revmap, trp) is None:
2081 2081 raise util.Abort(_("received file revlog group is empty"))
2082 2082 revisions += len(fl) - o
2083 2083 files += 1
2084 2084 if f in needfiles:
2085 2085 needs = needfiles[f]
2086 2086 for new in xrange(o, len(fl)):
2087 2087 n = fl.node(new)
2088 2088 if n in needs:
2089 2089 needs.remove(n)
2090 2090 if not needs:
2091 2091 del needfiles[f]
2092 2092 self.ui.progress(_('files'), None)
2093 2093
2094 2094 for f, needs in needfiles.iteritems():
2095 2095 fl = self.file(f)
2096 2096 for n in needs:
2097 2097 try:
2098 2098 fl.rev(n)
2099 2099 except error.LookupError:
2100 2100 raise util.Abort(
2101 2101 _('missing file data for %s:%s - run hg verify') %
2102 2102 (f, hex(n)))
2103 2103
2104 2104 newheads = len(cl.heads())
2105 2105 heads = ""
2106 2106 if oldheads and newheads != oldheads:
2107 2107 heads = _(" (%+d heads)") % (newheads - oldheads)
2108 2108
2109 2109 self.ui.status(_("added %d changesets"
2110 2110 " with %d changes to %d files%s\n")
2111 2111 % (changesets, revisions, files, heads))
2112 2112
2113 2113 if changesets > 0:
2114 2114 p = lambda: cl.writepending() and self.root or ""
2115 2115 self.hook('pretxnchangegroup', throw=True,
2116 2116 node=hex(cl.node(clstart)), source=srctype,
2117 2117 url=url, pending=p)
2118 2118
2119 2119 # make changelog see real files again
2120 2120 cl.finalize(trp)
2121 2121
2122 2122 tr.close()
2123 2123 finally:
2124 2124 del tr
2125 2125
2126 2126 if changesets > 0:
2127 2127 # forcefully update the on-disk branch cache
2128 2128 self.ui.debug("updating the branch cache\n")
2129 2129 self.branchtags()
2130 2130 self.hook("changegroup", node=hex(cl.node(clstart)),
2131 2131 source=srctype, url=url)
2132 2132
2133 2133 for i in xrange(clstart, clend):
2134 2134 self.hook("incoming", node=hex(cl.node(i)),
2135 2135 source=srctype, url=url)
2136 2136
2137 2137 # never return 0 here:
2138 2138 if newheads < oldheads:
2139 2139 return newheads - oldheads - 1
2140 2140 else:
2141 2141 return newheads - oldheads + 1
2142 2142
2143 2143
2144 2144 def stream_in(self, remote):
2145 2145 fp = remote.stream_out()
2146 2146 l = fp.readline()
2147 2147 try:
2148 2148 resp = int(l)
2149 2149 except ValueError:
2150 2150 raise error.ResponseError(
2151 2151 _('Unexpected response from remote server:'), l)
2152 2152 if resp == 1:
2153 2153 raise util.Abort(_('operation forbidden by server'))
2154 2154 elif resp == 2:
2155 2155 raise util.Abort(_('locking the remote repository failed'))
2156 2156 elif resp != 0:
2157 2157 raise util.Abort(_('the server sent an unknown error code'))
2158 2158 self.ui.status(_('streaming all changes\n'))
2159 2159 l = fp.readline()
2160 2160 try:
2161 2161 total_files, total_bytes = map(int, l.split(' ', 1))
2162 2162 except (ValueError, TypeError):
2163 2163 raise error.ResponseError(
2164 2164 _('Unexpected response from remote server:'), l)
2165 2165 self.ui.status(_('%d files to transfer, %s of data\n') %
2166 2166 (total_files, util.bytecount(total_bytes)))
2167 2167 start = time.time()
2168 2168 for i in xrange(total_files):
2169 2169 # XXX doesn't support '\n' or '\r' in filenames
2170 2170 l = fp.readline()
2171 2171 try:
2172 2172 name, size = l.split('\0', 1)
2173 2173 size = int(size)
2174 2174 except (ValueError, TypeError):
2175 2175 raise error.ResponseError(
2176 2176 _('Unexpected response from remote server:'), l)
2177 2177 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2178 2178 # for backwards compat, name was partially encoded
2179 2179 ofp = self.sopener(store.decodedir(name), 'w')
2180 2180 for chunk in util.filechunkiter(fp, limit=size):
2181 2181 ofp.write(chunk)
2182 2182 ofp.close()
2183 2183 elapsed = time.time() - start
2184 2184 if elapsed <= 0:
2185 2185 elapsed = 0.001
2186 2186 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2187 2187 (util.bytecount(total_bytes), elapsed,
2188 2188 util.bytecount(total_bytes / elapsed)))
2189 2189 self.invalidate()
2190 2190 return len(self.heads()) + 1
2191 2191
2192 2192 def clone(self, remote, heads=[], stream=False):
2193 2193 '''clone remote repository.
2194 2194
2195 2195 keyword arguments:
2196 2196 heads: list of revs to clone (forces use of pull)
2197 2197 stream: use streaming clone if possible'''
2198 2198
2199 2199 # now, all clients that can request uncompressed clones can
2200 2200 # read repo formats supported by all servers that can serve
2201 2201 # them.
2202 2202
2203 2203 # if revlog format changes, client will have to check version
2204 2204 # and format flags on "stream" capability, and use
2205 2205 # uncompressed only if compatible.
2206 2206
2207 2207 if stream and not heads and remote.capable('stream'):
2208 2208 return self.stream_in(remote)
2209 2209 return self.pull(remote, heads)
2210 2210
2211 2211 # used to avoid circular references so destructors work
2212 2212 def aftertrans(files):
2213 2213 renamefiles = [tuple(t) for t in files]
2214 2214 def a():
2215 2215 for src, dest in renamefiles:
2216 2216 util.rename(src, dest)
2217 2217 return a
2218 2218
2219 2219 def instance(ui, path, create):
2220 2220 return localrepository(ui, util.drop_scheme('file', path), create)
2221 2221
2222 2222 def islocal(path):
2223 2223 return True
@@ -1,370 +1,373 b''
1 1 # subrepo.py - sub-repository handling for Mercurial
2 2 #
3 3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import errno, os, re, xml.dom.minidom, shutil
9 9 from i18n import _
10 10 import config, util, node, error
11 11 hg = None
12 12
13 13 nullstate = ('', '', 'empty')
14 14
15 15 def state(ctx):
16 16 p = config.config()
17 17 def read(f, sections=None, remap=None):
18 18 if f in ctx:
19 19 p.parse(f, ctx[f].data(), sections, remap, read)
20 20 else:
21 21 raise util.Abort(_("subrepo spec file %s not found") % f)
22 22
23 23 if '.hgsub' in ctx:
24 24 read('.hgsub')
25 25
26 26 rev = {}
27 27 if '.hgsubstate' in ctx:
28 28 try:
29 29 for l in ctx['.hgsubstate'].data().splitlines():
30 30 revision, path = l.split(" ", 1)
31 31 rev[path] = revision
32 32 except IOError, err:
33 33 if err.errno != errno.ENOENT:
34 34 raise
35 35
36 36 state = {}
37 37 for path, src in p[''].items():
38 38 kind = 'hg'
39 39 if src.startswith('['):
40 40 if ']' not in src:
41 41 raise util.Abort(_('missing ] in subrepo source'))
42 42 kind, src = src.split(']', 1)
43 43 kind = kind[1:]
44 44 state[path] = (src.strip(), rev.get(path, ''), kind)
45 45
46 46 return state
47 47
48 48 def writestate(repo, state):
49 49 repo.wwrite('.hgsubstate',
50 50 ''.join(['%s %s\n' % (state[s][1], s)
51 51 for s in sorted(state)]), '')
52 52
53 53 def submerge(repo, wctx, mctx, actx):
54 54 # working context, merging context, ancestor context
55 55 if mctx == actx: # backwards?
56 56 actx = wctx.p1()
57 57 s1 = wctx.substate
58 58 s2 = mctx.substate
59 59 sa = actx.substate
60 60 sm = {}
61 61
62 62 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
63 63
64 64 def debug(s, msg, r=""):
65 65 if r:
66 66 r = "%s:%s:%s" % r
67 67 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
68 68
69 69 for s, l in s1.items():
70 70 if wctx != actx and wctx.sub(s).dirty():
71 71 l = (l[0], l[1] + "+")
72 72 a = sa.get(s, nullstate)
73 73 if s in s2:
74 74 r = s2[s]
75 75 if l == r or r == a: # no change or local is newer
76 76 sm[s] = l
77 77 continue
78 78 elif l == a: # other side changed
79 79 debug(s, "other changed, get", r)
80 80 wctx.sub(s).get(r)
81 81 sm[s] = r
82 82 elif l[0] != r[0]: # sources differ
83 83 if repo.ui.promptchoice(
84 84 _(' subrepository sources for %s differ\n'
85 85 'use (l)ocal source (%s) or (r)emote source (%s)?')
86 86 % (s, l[0], r[0]),
87 87 (_('&Local'), _('&Remote')), 0):
88 88 debug(s, "prompt changed, get", r)
89 89 wctx.sub(s).get(r)
90 90 sm[s] = r
91 91 elif l[1] == a[1]: # local side is unchanged
92 92 debug(s, "other side changed, get", r)
93 93 wctx.sub(s).get(r)
94 94 sm[s] = r
95 95 else:
96 96 debug(s, "both sides changed, merge with", r)
97 97 wctx.sub(s).merge(r)
98 98 sm[s] = l
99 99 elif l == a: # remote removed, local unchanged
100 100 debug(s, "remote removed, remove")
101 101 wctx.sub(s).remove()
102 102 else:
103 103 if repo.ui.promptchoice(
104 104 _(' local changed subrepository %s which remote removed\n'
105 105 'use (c)hanged version or (d)elete?') % s,
106 106 (_('&Changed'), _('&Delete')), 0):
107 107 debug(s, "prompt remove")
108 108 wctx.sub(s).remove()
109 109
110 110 for s, r in s2.items():
111 111 if s in s1:
112 112 continue
113 113 elif s not in sa:
114 114 debug(s, "remote added, get", r)
115 115 mctx.sub(s).get(r)
116 116 sm[s] = r
117 117 elif r != sa[s]:
118 118 if repo.ui.promptchoice(
119 119 _(' remote changed subrepository %s which local removed\n'
120 120 'use (c)hanged version or (d)elete?') % s,
121 121 (_('&Changed'), _('&Delete')), 0) == 0:
122 122 debug(s, "prompt recreate", r)
123 123 wctx.sub(s).get(r)
124 124 sm[s] = r
125 125
126 126 # record merged .hgsubstate
127 127 writestate(repo, sm)
128 128
129 129 def _abssource(repo, push=False):
130 130 if hasattr(repo, '_subparent'):
131 131 source = repo._subsource
132 132 if source.startswith('/') or '://' in source:
133 133 return source
134 134 parent = _abssource(repo._subparent, push)
135 135 if '://' in parent:
136 136 if parent[-1] == '/':
137 137 parent = parent[:-1]
138 138 return parent + '/' + source
139 139 return os.path.join(parent, repo._subsource)
140 140 if push and repo.ui.config('paths', 'default-push'):
141 141 return repo.ui.config('paths', 'default-push', repo.root)
142 142 return repo.ui.config('paths', 'default', repo.root)
143 143
144 144 def subrepo(ctx, path):
145 145 # subrepo inherently violates our import layering rules
146 146 # because it wants to make repo objects from deep inside the stack
147 147 # so we manually delay the circular imports to not break
148 148 # scripts that don't use our demand-loading
149 149 global hg
150 150 import hg as h
151 151 hg = h
152 152
153 153 util.path_auditor(ctx._repo.root)(path)
154 154 state = ctx.substate.get(path, nullstate)
155 155 if state[2] not in types:
156 156 raise util.Abort(_('unknown subrepo type %s') % state[2])
157 157 return types[state[2]](ctx, path, state[:2])
158 158
159 159 # subrepo classes need to implement the following methods:
160 160 # __init__(self, ctx, path, state)
161 161 # dirty(self): returns true if the dirstate of the subrepo
162 162 # does not match current stored state
163 163 # commit(self, text, user, date): commit the current changes
164 164 # to the subrepo with the given log message. Use given
165 165 # user and date if possible. Return the new state of the subrepo.
166 166 # remove(self): remove the subrepo (should verify the dirstate
167 167 # is not dirty first)
168 168 # get(self, state): run whatever commands are needed to put the
169 169 # subrepo into this state
170 170 # merge(self, state): merge currently-saved state with the new state.
171 171 # push(self, force): perform whatever action is analagous to 'hg push'
172 172 # This may be a no-op on some systems.
173 173
174 174 class hgsubrepo(object):
175 175 def __init__(self, ctx, path, state):
176 176 self._path = path
177 177 self._state = state
178 178 r = ctx._repo
179 179 root = r.wjoin(path)
180 180 create = False
181 181 if not os.path.exists(os.path.join(root, '.hg')):
182 182 create = True
183 183 util.makedirs(root)
184 184 self._repo = hg.repository(r.ui, root, create=create)
185 185 self._repo._subparent = r
186 186 self._repo._subsource = state[0]
187 187
188 188 if create:
189 189 fp = self._repo.opener("hgrc", "w", text=True)
190 190 fp.write('[paths]\n')
191 191
192 192 def addpathconfig(key, value):
193 193 fp.write('%s = %s\n' % (key, value))
194 194 self._repo.ui.setconfig('paths', key, value)
195 195
196 defpath = os.path.join(_abssource(ctx._repo), path)
196 defpath = _abssource(self._repo)
197 defpushpath = _abssource(self._repo, True)
197 198 addpathconfig('default', defpath)
199 if defpath != defpushpath:
200 addpathconfig('default-push', defpushpath)
198 201 fp.close()
199 202
200 203 def dirty(self):
201 204 r = self._state[1]
202 205 if r == '':
203 206 return True
204 207 w = self._repo[None]
205 208 if w.p1() != self._repo[r]: # version checked out change
206 209 return True
207 210 return w.dirty() # working directory changed
208 211
209 212 def commit(self, text, user, date):
210 213 self._repo.ui.debug("committing subrepo %s\n" % self._path)
211 214 n = self._repo.commit(text, user, date)
212 215 if not n:
213 216 return self._repo['.'].hex() # different version checked out
214 217 return node.hex(n)
215 218
216 219 def remove(self):
217 220 # we can't fully delete the repository as it may contain
218 221 # local-only history
219 222 self._repo.ui.note(_('removing subrepo %s\n') % self._path)
220 223 hg.clean(self._repo, node.nullid, False)
221 224
222 225 def _get(self, state):
223 226 source, revision, kind = state
224 227 try:
225 228 self._repo.lookup(revision)
226 229 except error.RepoError:
227 230 self._repo._subsource = source
228 231 srcurl = _abssource(self._repo)
229 232 self._repo.ui.status(_('pulling subrepo %s from %s\n')
230 233 % (self._path, srcurl))
231 234 other = hg.repository(self._repo.ui, srcurl)
232 235 self._repo.pull(other)
233 236
234 237 def get(self, state):
235 238 self._get(state)
236 239 source, revision, kind = state
237 240 self._repo.ui.debug("getting subrepo %s\n" % self._path)
238 241 hg.clean(self._repo, revision, False)
239 242
240 243 def merge(self, state):
241 244 self._get(state)
242 245 cur = self._repo['.']
243 246 dst = self._repo[state[1]]
244 247 anc = dst.ancestor(cur)
245 248 if anc == cur:
246 249 self._repo.ui.debug("updating subrepo %s\n" % self._path)
247 250 hg.update(self._repo, state[1])
248 251 elif anc == dst:
249 252 self._repo.ui.debug("skipping subrepo %s\n" % self._path)
250 253 else:
251 254 self._repo.ui.debug("merging subrepo %s\n" % self._path)
252 255 hg.merge(self._repo, state[1], remind=False)
253 256
254 257 def push(self, force):
255 258 # push subrepos depth-first for coherent ordering
256 259 c = self._repo['']
257 260 subs = c.substate # only repos that are committed
258 261 for s in sorted(subs):
259 262 c.sub(s).push(force)
260 263
261 264 self._repo.ui.status(_('pushing subrepo %s\n') % self._path)
262 265 dsturl = _abssource(self._repo, True)
263 266 other = hg.repository(self._repo.ui, dsturl)
264 267 self._repo.push(other, force)
265 268
266 269 class svnsubrepo(object):
267 270 def __init__(self, ctx, path, state):
268 271 self._path = path
269 272 self._state = state
270 273 self._ctx = ctx
271 274 self._ui = ctx._repo.ui
272 275
273 276 def _svncommand(self, commands):
274 277 cmd = ['svn'] + commands + [self._path]
275 278 cmd = [util.shellquote(arg) for arg in cmd]
276 279 cmd = util.quotecommand(' '.join(cmd))
277 280 env = dict(os.environ)
278 281 # Avoid localized output, preserve current locale for everything else.
279 282 env['LC_MESSAGES'] = 'C'
280 283 write, read, err = util.popen3(cmd, env=env, newlines=True)
281 284 retdata = read.read()
282 285 err = err.read().strip()
283 286 if err:
284 287 raise util.Abort(err)
285 288 return retdata
286 289
287 290 def _wcrev(self):
288 291 output = self._svncommand(['info', '--xml'])
289 292 doc = xml.dom.minidom.parseString(output)
290 293 entries = doc.getElementsByTagName('entry')
291 294 if not entries:
292 295 return 0
293 296 return int(entries[0].getAttribute('revision') or 0)
294 297
295 298 def _wcchanged(self):
296 299 """Return (changes, extchanges) where changes is True
297 300 if the working directory was changed, and extchanges is
298 301 True if any of these changes concern an external entry.
299 302 """
300 303 output = self._svncommand(['status', '--xml'])
301 304 externals, changes = [], []
302 305 doc = xml.dom.minidom.parseString(output)
303 306 for e in doc.getElementsByTagName('entry'):
304 307 s = e.getElementsByTagName('wc-status')
305 308 if not s:
306 309 continue
307 310 item = s[0].getAttribute('item')
308 311 props = s[0].getAttribute('props')
309 312 path = e.getAttribute('path')
310 313 if item == 'external':
311 314 externals.append(path)
312 315 if (item not in ('', 'normal', 'unversioned', 'external')
313 316 or props not in ('', 'none')):
314 317 changes.append(path)
315 318 for path in changes:
316 319 for ext in externals:
317 320 if path == ext or path.startswith(ext + os.sep):
318 321 return True, True
319 322 return bool(changes), False
320 323
321 324 def dirty(self):
322 325 if self._wcrev() == self._state[1] and not self._wcchanged()[0]:
323 326 return False
324 327 return True
325 328
326 329 def commit(self, text, user, date):
327 330 # user and date are out of our hands since svn is centralized
328 331 changed, extchanged = self._wcchanged()
329 332 if not changed:
330 333 return self._wcrev()
331 334 if extchanged:
332 335 # Do not try to commit externals
333 336 raise util.Abort(_('cannot commit svn externals'))
334 337 commitinfo = self._svncommand(['commit', '-m', text])
335 338 self._ui.status(commitinfo)
336 339 newrev = re.search('Committed revision ([\d]+).', commitinfo)
337 340 if not newrev:
338 341 raise util.Abort(commitinfo.splitlines()[-1])
339 342 newrev = newrev.groups()[0]
340 343 self._ui.status(self._svncommand(['update', '-r', newrev]))
341 344 return newrev
342 345
343 346 def remove(self):
344 347 if self.dirty():
345 348 self._ui.warn(_('not removing repo %s because '
346 349 'it has changes.\n' % self._path))
347 350 return
348 351 self._ui.note(_('removing subrepo %s\n') % self._path)
349 352 shutil.rmtree(self._ctx.repo.join(self._path))
350 353
351 354 def get(self, state):
352 355 status = self._svncommand(['checkout', state[0], '--revision', state[1]])
353 356 if not re.search('Checked out revision [\d]+.', status):
354 357 raise util.Abort(status.splitlines()[-1])
355 358 self._ui.status(status)
356 359
357 360 def merge(self, state):
358 361 old = int(self._state[1])
359 362 new = int(state[1])
360 363 if new > old:
361 364 self.get(state)
362 365
363 366 def push(self, force):
364 367 # nothing for svn
365 368 pass
366 369
367 370 types = {
368 371 'hg': hgsubrepo,
369 372 'svn': svnsubrepo,
370 373 }
@@ -1,298 +1,298 b''
1 1 # verify.py - repository integrity checking for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import nullid, short
9 9 from i18n import _
10 10 import os
11 11 import revlog, util, error
12 12
13 13 def verify(repo):
14 14 lock = repo.lock()
15 15 try:
16 16 return _verify(repo)
17 17 finally:
18 18 lock.release()
19 19
20 20 def _verify(repo):
21 21 mflinkrevs = {}
22 22 filelinkrevs = {}
23 23 filenodes = {}
24 24 revisions = 0
25 25 badrevs = set()
26 26 errors = [0]
27 27 warnings = [0]
28 28 ui = repo.ui
29 29 cl = repo.changelog
30 30 mf = repo.manifest
31 31 lrugetctx = util.lrucachefunc(repo.changectx)
32 32
33 33 if not repo.cancopy():
34 34 raise util.Abort(_("cannot verify bundle or remote repos"))
35 35
36 36 def err(linkrev, msg, filename=None):
37 37 if linkrev != None:
38 38 badrevs.add(linkrev)
39 39 else:
40 40 linkrev = '?'
41 41 msg = "%s: %s" % (linkrev, msg)
42 42 if filename:
43 43 msg = "%s@%s" % (filename, msg)
44 44 ui.warn(" " + msg + "\n")
45 45 errors[0] += 1
46 46
47 47 def exc(linkrev, msg, inst, filename=None):
48 48 if isinstance(inst, KeyboardInterrupt):
49 49 ui.warn(_("interrupted"))
50 50 raise
51 51 err(linkrev, "%s: %s" % (msg, inst), filename)
52 52
53 53 def warn(msg):
54 54 ui.warn(msg + "\n")
55 55 warnings[0] += 1
56 56
57 57 def checklog(obj, name, linkrev):
58 58 if not len(obj) and (havecl or havemf):
59 59 err(linkrev, _("empty or missing %s") % name)
60 60 return
61 61
62 62 d = obj.checksize()
63 63 if d[0]:
64 64 err(None, _("data length off by %d bytes") % d[0], name)
65 65 if d[1]:
66 66 err(None, _("index contains %d extra bytes") % d[1], name)
67 67
68 68 if obj.version != revlog.REVLOGV0:
69 69 if not revlogv1:
70 70 warn(_("warning: `%s' uses revlog format 1") % name)
71 71 elif revlogv1:
72 72 warn(_("warning: `%s' uses revlog format 0") % name)
73 73
74 74 def checkentry(obj, i, node, seen, linkrevs, f):
75 75 lr = obj.linkrev(obj.rev(node))
76 76 if lr < 0 or (havecl and lr not in linkrevs):
77 77 if lr < 0 or lr >= len(cl):
78 78 msg = _("rev %d points to nonexistent changeset %d")
79 79 else:
80 80 msg = _("rev %d points to unexpected changeset %d")
81 81 err(None, msg % (i, lr), f)
82 82 if linkrevs:
83 83 if f and len(linkrevs) > 1:
84 84 try:
85 85 # attempt to filter down to real linkrevs
86 86 linkrevs = [l for l in linkrevs
87 87 if lrugetctx(l)[f].filenode() == node]
88 88 except:
89 89 pass
90 90 warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
91 91 lr = None # can't be trusted
92 92
93 93 try:
94 94 p1, p2 = obj.parents(node)
95 95 if p1 not in seen and p1 != nullid:
96 96 err(lr, _("unknown parent 1 %s of %s") %
97 97 (short(p1), short(n)), f)
98 98 if p2 not in seen and p2 != nullid:
99 99 err(lr, _("unknown parent 2 %s of %s") %
100 100 (short(p2), short(p1)), f)
101 101 except Exception, inst:
102 102 exc(lr, _("checking parents of %s") % short(node), inst, f)
103 103
104 104 if node in seen:
105 105 err(lr, _("duplicate revision %d (%d)") % (i, seen[n]), f)
106 106 seen[n] = i
107 107 return lr
108 108
109 109 if os.path.exists(repo.sjoin("journal")):
110 110 ui.warn(_("abandoned transaction found - run hg recover\n"))
111 111
112 112 revlogv1 = cl.version != revlog.REVLOGV0
113 113 if ui.verbose or not revlogv1:
114 114 ui.status(_("repository uses revlog format %d\n") %
115 115 (revlogv1 and 1 or 0))
116 116
117 117 havecl = len(cl) > 0
118 118 havemf = len(mf) > 0
119 119
120 120 ui.status(_("checking changesets\n"))
121 121 seen = {}
122 122 checklog(cl, "changelog", 0)
123 123 total = len(repo)
124 124 for i in repo:
125 ui.progress(_('changelog'), i, total=total)
125 ui.progress(_('checking'), i, total=total)
126 126 n = cl.node(i)
127 127 checkentry(cl, i, n, seen, [i], "changelog")
128 128
129 129 try:
130 130 changes = cl.read(n)
131 131 mflinkrevs.setdefault(changes[0], []).append(i)
132 132 for f in changes[3]:
133 133 filelinkrevs.setdefault(f, []).append(i)
134 134 except Exception, inst:
135 135 exc(i, _("unpacking changeset %s") % short(n), inst)
136 ui.progress(_('changelog'), None)
136 ui.progress(_('checking'), None)
137 137
138 138 ui.status(_("checking manifests\n"))
139 139 seen = {}
140 140 checklog(mf, "manifest", 0)
141 141 total = len(mf)
142 142 for i in mf:
143 ui.progress(_('manifests'), i, total=total)
143 ui.progress(_('checking'), i, total=total)
144 144 n = mf.node(i)
145 145 lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest")
146 146 if n in mflinkrevs:
147 147 del mflinkrevs[n]
148 148 else:
149 149 err(lr, _("%s not in changesets") % short(n), "manifest")
150 150
151 151 try:
152 152 for f, fn in mf.readdelta(n).iteritems():
153 153 if not f:
154 154 err(lr, _("file without name in manifest"))
155 155 elif f != "/dev/null":
156 156 filenodes.setdefault(f, {}).setdefault(fn, lr)
157 157 except Exception, inst:
158 158 exc(lr, _("reading manifest delta %s") % short(n), inst)
159 ui.progress(_('manifests'), None)
159 ui.progress(_('checking'), None)
160 160
161 161 ui.status(_("crosschecking files in changesets and manifests\n"))
162 162
163 163 total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes)
164 164 count = 0
165 165 if havemf:
166 166 for c, m in sorted([(c, m) for m in mflinkrevs
167 167 for c in mflinkrevs[m]]):
168 168 count += 1
169 ui.progress(_('crosscheck'), count, total=total)
169 ui.progress(_('crosschecking'), count, total=total)
170 170 err(c, _("changeset refers to unknown manifest %s") % short(m))
171 171 mflinkrevs = None # del is bad here due to scope issues
172 172
173 173 for f in sorted(filelinkrevs):
174 174 count += 1
175 ui.progress(_('crosscheck'), count, total=total)
175 ui.progress(_('crosschecking'), count, total=total)
176 176 if f not in filenodes:
177 177 lr = filelinkrevs[f][0]
178 178 err(lr, _("in changeset but not in manifest"), f)
179 179
180 180 if havecl:
181 181 for f in sorted(filenodes):
182 182 count += 1
183 ui.progress(_('crosscheck'), count, total=total)
183 ui.progress(_('crosschecking'), count, total=total)
184 184 if f not in filelinkrevs:
185 185 try:
186 186 fl = repo.file(f)
187 187 lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
188 188 except:
189 189 lr = None
190 190 err(lr, _("in manifest but not in changeset"), f)
191 191
192 ui.progress(_('crosscheck'), None)
192 ui.progress(_('crosschecking'), None)
193 193
194 194 ui.status(_("checking files\n"))
195 195
196 196 storefiles = set()
197 197 for f, f2, size in repo.store.datafiles():
198 198 if not f:
199 199 err(None, _("cannot decode filename '%s'") % f2)
200 200 elif size > 0:
201 201 storefiles.add(f)
202 202
203 203 files = sorted(set(filenodes) | set(filelinkrevs))
204 204 total = len(files)
205 205 for i, f in enumerate(files):
206 ui.progress(_('files'), i, item=f, total=total)
206 ui.progress(_('checking'), i, item=f, total=total)
207 207 try:
208 208 linkrevs = filelinkrevs[f]
209 209 except KeyError:
210 210 # in manifest but not in changelog
211 211 linkrevs = []
212 212
213 213 if linkrevs:
214 214 lr = linkrevs[0]
215 215 else:
216 216 lr = None
217 217
218 218 try:
219 219 fl = repo.file(f)
220 220 except error.RevlogError, e:
221 221 err(lr, _("broken revlog! (%s)") % e, f)
222 222 continue
223 223
224 224 for ff in fl.files():
225 225 try:
226 226 storefiles.remove(ff)
227 227 except KeyError:
228 228 err(lr, _("missing revlog!"), ff)
229 229
230 230 checklog(fl, f, lr)
231 231 seen = {}
232 232 for i in fl:
233 233 revisions += 1
234 234 n = fl.node(i)
235 235 lr = checkentry(fl, i, n, seen, linkrevs, f)
236 236 if f in filenodes:
237 237 if havemf and n not in filenodes[f]:
238 238 err(lr, _("%s not in manifests") % (short(n)), f)
239 239 else:
240 240 del filenodes[f][n]
241 241
242 242 # verify contents
243 243 try:
244 244 t = fl.read(n)
245 245 rp = fl.renamed(n)
246 246 if len(t) != fl.size(i):
247 247 if len(fl.revision(n)) != fl.size(i):
248 248 err(lr, _("unpacked size is %s, %s expected") %
249 249 (len(t), fl.size(i)), f)
250 250 except Exception, inst:
251 251 exc(lr, _("unpacking %s") % short(n), inst, f)
252 252
253 253 # check renames
254 254 try:
255 255 if rp:
256 256 if lr is not None and ui.verbose:
257 257 ctx = lrugetctx(lr)
258 258 found = False
259 259 for pctx in ctx.parents():
260 260 if rp[0] in pctx:
261 261 found = True
262 262 break
263 263 if not found:
264 264 warn(_("warning: copy source of '%s' not"
265 265 " in parents of %s") % (f, ctx))
266 266 fl2 = repo.file(rp[0])
267 267 if not len(fl2):
268 268 err(lr, _("empty or missing copy source revlog %s:%s")
269 269 % (rp[0], short(rp[1])), f)
270 270 elif rp[1] == nullid:
271 271 ui.note(_("warning: %s@%s: copy source"
272 272 " revision is nullid %s:%s\n")
273 273 % (f, lr, rp[0], short(rp[1])))
274 274 else:
275 275 fl2.rev(rp[1])
276 276 except Exception, inst:
277 277 exc(lr, _("checking rename of %s") % short(n), inst, f)
278 278
279 279 # cross-check
280 280 if f in filenodes:
281 281 fns = [(lr, n) for n, lr in filenodes[f].iteritems()]
282 282 for lr, node in sorted(fns):
283 283 err(lr, _("%s in manifests not found") % short(node), f)
284 ui.progress(_('files'), None)
284 ui.progress(_('checking'), None)
285 285
286 286 for f in storefiles:
287 287 warn(_("warning: orphan revlog '%s'") % f)
288 288
289 289 ui.status(_("%d files, %d changesets, %d total revisions\n") %
290 290 (len(files), len(cl), revisions))
291 291 if warnings[0]:
292 292 ui.warn(_("%d warnings encountered!\n") % warnings[0])
293 293 if errors[0]:
294 294 ui.warn(_("%d integrity errors encountered!\n") % errors[0])
295 295 if badrevs:
296 296 ui.warn(_("(first damaged changeset appears to be %d)\n")
297 297 % min(badrevs))
298 298 return 1
This diff has been collapsed as it changes many lines, (896 lines changed) Show them Hide them
@@ -1,1165 +1,1165 b''
1 1 3:911600dab2ae
2 2 requesting all changes
3 3 adding changesets
4 4 adding manifests
5 5 adding file changes
6 6 added 1 changesets with 3 changes to 3 files
7 7 updating to branch default
8 8 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
9 9
10 10 Extension disabled for lack of a hook
11 11 Pushing as user fred
12 12 hgrc = """
13 13 """
14 14 pushing to ../b
15 15 searching for changes
16 16 common changesets up to 6675d58eff77
17 17 3 changesets found
18 18 list of changesets:
19 19 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
20 20 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
21 21 911600dab2ae7a9baff75958b84fe606851ce955
22 22 adding changesets
23 bundle changes: 0 chunks
24 bundle changes: 1 chunks
25 bundle changes: 2 chunks
26 bundle changes: 3 chunks
27 bundle changes: 4 chunks
28 bundle changes: 5 chunks
29 bundle changes: 6 chunks
30 bundle changes: 7 chunks
31 bundle changes: 8 chunks
32 bundle changes: 9 chunks
33 bundle manifests: 0 chunks
34 bundle manifests: 1 chunks
35 bundle manifests: 2 chunks
36 bundle manifests: 3 chunks
37 bundle manifests: 4 chunks
38 bundle manifests: 5 chunks
39 bundle manifests: 6 chunks
40 bundle manifests: 7 chunks
41 bundle manifests: 8 chunks
42 bundle manifests: 9 chunks
43 bundle files: foo/Bar/file.txt 0 chunks
44 bundle files: foo/Bar/file.txt 1 chunks
45 bundle files: foo/Bar/file.txt 2 chunks
46 bundle files: foo/Bar/file.txt 3 chunks
47 bundle files: foo/file.txt 4 chunks
48 bundle files: foo/file.txt 5 chunks
49 bundle files: foo/file.txt 6 chunks
50 bundle files: foo/file.txt 7 chunks
51 bundle files: quux/file.py 8 chunks
52 bundle files: quux/file.py 9 chunks
53 bundle files: quux/file.py 10 chunks
54 bundle files: quux/file.py 11 chunks
23 bundling changes: 0 chunks
24 bundling changes: 1 chunks
25 bundling changes: 2 chunks
26 bundling changes: 3 chunks
27 bundling changes: 4 chunks
28 bundling changes: 5 chunks
29 bundling changes: 6 chunks
30 bundling changes: 7 chunks
31 bundling changes: 8 chunks
32 bundling changes: 9 chunks
33 bundling manifests: 0 chunks
34 bundling manifests: 1 chunks
35 bundling manifests: 2 chunks
36 bundling manifests: 3 chunks
37 bundling manifests: 4 chunks
38 bundling manifests: 5 chunks
39 bundling manifests: 6 chunks
40 bundling manifests: 7 chunks
41 bundling manifests: 8 chunks
42 bundling manifests: 9 chunks
43 bundling files: foo/Bar/file.txt 0 chunks
44 bundling files: foo/Bar/file.txt 1 chunks
45 bundling files: foo/Bar/file.txt 2 chunks
46 bundling files: foo/Bar/file.txt 3 chunks
47 bundling files: foo/file.txt 4 chunks
48 bundling files: foo/file.txt 5 chunks
49 bundling files: foo/file.txt 6 chunks
50 bundling files: foo/file.txt 7 chunks
51 bundling files: quux/file.py 8 chunks
52 bundling files: quux/file.py 9 chunks
53 bundling files: quux/file.py 10 chunks
54 bundling files: quux/file.py 11 chunks
55 55 changesets: 1 chunks
56 56 add changeset ef1ea85a6374
57 57 changesets: 2 chunks
58 58 add changeset f9cafe1212c8
59 59 changesets: 3 chunks
60 60 add changeset 911600dab2ae
61 61 adding manifests
62 62 manifests: 1 chunks
63 63 manifests: 2 chunks
64 64 manifests: 3 chunks
65 65 adding file changes
66 66 adding foo/Bar/file.txt revisions
67 67 files: 1 chunks
68 68 adding foo/file.txt revisions
69 69 files: 2 chunks
70 70 adding quux/file.py revisions
71 71 files: 3 chunks
72 72 added 3 changesets with 3 changes to 3 files
73 73 updating the branch cache
74 74 rolling back last transaction
75 75 0:6675d58eff77
76 76
77 77 Extension disabled for lack of acl.sources
78 78 Pushing as user fred
79 79 hgrc = """
80 80 [hooks]
81 81 pretxnchangegroup.acl = python:hgext.acl.hook
82 82 """
83 83 pushing to ../b
84 84 searching for changes
85 85 common changesets up to 6675d58eff77
86 86 invalidating branch cache (tip differs)
87 87 3 changesets found
88 88 list of changesets:
89 89 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
90 90 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
91 91 911600dab2ae7a9baff75958b84fe606851ce955
92 92 adding changesets
93 bundle changes: 0 chunks
94 bundle changes: 1 chunks
95 bundle changes: 2 chunks
96 bundle changes: 3 chunks
97 bundle changes: 4 chunks
98 bundle changes: 5 chunks
99 bundle changes: 6 chunks
100 bundle changes: 7 chunks
101 bundle changes: 8 chunks
102 bundle changes: 9 chunks
103 bundle manifests: 0 chunks
104 bundle manifests: 1 chunks
105 bundle manifests: 2 chunks
106 bundle manifests: 3 chunks
107 bundle manifests: 4 chunks
108 bundle manifests: 5 chunks
109 bundle manifests: 6 chunks
110 bundle manifests: 7 chunks
111 bundle manifests: 8 chunks
112 bundle manifests: 9 chunks
113 bundle files: foo/Bar/file.txt 0 chunks
114 bundle files: foo/Bar/file.txt 1 chunks
115 bundle files: foo/Bar/file.txt 2 chunks
116 bundle files: foo/Bar/file.txt 3 chunks
117 bundle files: foo/file.txt 4 chunks
118 bundle files: foo/file.txt 5 chunks
119 bundle files: foo/file.txt 6 chunks
120 bundle files: foo/file.txt 7 chunks
121 bundle files: quux/file.py 8 chunks
122 bundle files: quux/file.py 9 chunks
123 bundle files: quux/file.py 10 chunks
124 bundle files: quux/file.py 11 chunks
93 bundling changes: 0 chunks
94 bundling changes: 1 chunks
95 bundling changes: 2 chunks
96 bundling changes: 3 chunks
97 bundling changes: 4 chunks
98 bundling changes: 5 chunks
99 bundling changes: 6 chunks
100 bundling changes: 7 chunks
101 bundling changes: 8 chunks
102 bundling changes: 9 chunks
103 bundling manifests: 0 chunks
104 bundling manifests: 1 chunks
105 bundling manifests: 2 chunks
106 bundling manifests: 3 chunks
107 bundling manifests: 4 chunks
108 bundling manifests: 5 chunks
109 bundling manifests: 6 chunks
110 bundling manifests: 7 chunks
111 bundling manifests: 8 chunks
112 bundling manifests: 9 chunks
113 bundling files: foo/Bar/file.txt 0 chunks
114 bundling files: foo/Bar/file.txt 1 chunks
115 bundling files: foo/Bar/file.txt 2 chunks
116 bundling files: foo/Bar/file.txt 3 chunks
117 bundling files: foo/file.txt 4 chunks
118 bundling files: foo/file.txt 5 chunks
119 bundling files: foo/file.txt 6 chunks
120 bundling files: foo/file.txt 7 chunks
121 bundling files: quux/file.py 8 chunks
122 bundling files: quux/file.py 9 chunks
123 bundling files: quux/file.py 10 chunks
124 bundling files: quux/file.py 11 chunks
125 125 changesets: 1 chunks
126 126 add changeset ef1ea85a6374
127 127 changesets: 2 chunks
128 128 add changeset f9cafe1212c8
129 129 changesets: 3 chunks
130 130 add changeset 911600dab2ae
131 131 adding manifests
132 132 manifests: 1 chunks
133 133 manifests: 2 chunks
134 134 manifests: 3 chunks
135 135 adding file changes
136 136 adding foo/Bar/file.txt revisions
137 137 files: 1 chunks
138 138 adding foo/file.txt revisions
139 139 files: 2 chunks
140 140 adding quux/file.py revisions
141 141 files: 3 chunks
142 142 added 3 changesets with 3 changes to 3 files
143 143 calling hook pretxnchangegroup.acl: hgext.acl.hook
144 144 acl: changes have source "push" - skipping
145 145 updating the branch cache
146 146 rolling back last transaction
147 147 0:6675d58eff77
148 148
149 149 No [acl.allow]/[acl.deny]
150 150 Pushing as user fred
151 151 hgrc = """
152 152 [hooks]
153 153 pretxnchangegroup.acl = python:hgext.acl.hook
154 154 [acl]
155 155 sources = push
156 156 """
157 157 pushing to ../b
158 158 searching for changes
159 159 common changesets up to 6675d58eff77
160 160 invalidating branch cache (tip differs)
161 161 3 changesets found
162 162 list of changesets:
163 163 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
164 164 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
165 165 911600dab2ae7a9baff75958b84fe606851ce955
166 166 adding changesets
167 bundle changes: 0 chunks
168 bundle changes: 1 chunks
169 bundle changes: 2 chunks
170 bundle changes: 3 chunks
171 bundle changes: 4 chunks
172 bundle changes: 5 chunks
173 bundle changes: 6 chunks
174 bundle changes: 7 chunks
175 bundle changes: 8 chunks
176 bundle changes: 9 chunks
177 bundle manifests: 0 chunks
178 bundle manifests: 1 chunks
179 bundle manifests: 2 chunks
180 bundle manifests: 3 chunks
181 bundle manifests: 4 chunks
182 bundle manifests: 5 chunks
183 bundle manifests: 6 chunks
184 bundle manifests: 7 chunks
185 bundle manifests: 8 chunks
186 bundle manifests: 9 chunks
187 bundle files: foo/Bar/file.txt 0 chunks
188 bundle files: foo/Bar/file.txt 1 chunks
189 bundle files: foo/Bar/file.txt 2 chunks
190 bundle files: foo/Bar/file.txt 3 chunks
191 bundle files: foo/file.txt 4 chunks
192 bundle files: foo/file.txt 5 chunks
193 bundle files: foo/file.txt 6 chunks
194 bundle files: foo/file.txt 7 chunks
195 bundle files: quux/file.py 8 chunks
196 bundle files: quux/file.py 9 chunks
197 bundle files: quux/file.py 10 chunks
198 bundle files: quux/file.py 11 chunks
167 bundling changes: 0 chunks
168 bundling changes: 1 chunks
169 bundling changes: 2 chunks
170 bundling changes: 3 chunks
171 bundling changes: 4 chunks
172 bundling changes: 5 chunks
173 bundling changes: 6 chunks
174 bundling changes: 7 chunks
175 bundling changes: 8 chunks
176 bundling changes: 9 chunks
177 bundling manifests: 0 chunks
178 bundling manifests: 1 chunks
179 bundling manifests: 2 chunks
180 bundling manifests: 3 chunks
181 bundling manifests: 4 chunks
182 bundling manifests: 5 chunks
183 bundling manifests: 6 chunks
184 bundling manifests: 7 chunks
185 bundling manifests: 8 chunks
186 bundling manifests: 9 chunks
187 bundling files: foo/Bar/file.txt 0 chunks
188 bundling files: foo/Bar/file.txt 1 chunks
189 bundling files: foo/Bar/file.txt 2 chunks
190 bundling files: foo/Bar/file.txt 3 chunks
191 bundling files: foo/file.txt 4 chunks
192 bundling files: foo/file.txt 5 chunks
193 bundling files: foo/file.txt 6 chunks
194 bundling files: foo/file.txt 7 chunks
195 bundling files: quux/file.py 8 chunks
196 bundling files: quux/file.py 9 chunks
197 bundling files: quux/file.py 10 chunks
198 bundling files: quux/file.py 11 chunks
199 199 changesets: 1 chunks
200 200 add changeset ef1ea85a6374
201 201 changesets: 2 chunks
202 202 add changeset f9cafe1212c8
203 203 changesets: 3 chunks
204 204 add changeset 911600dab2ae
205 205 adding manifests
206 206 manifests: 1 chunks
207 207 manifests: 2 chunks
208 208 manifests: 3 chunks
209 209 adding file changes
210 210 adding foo/Bar/file.txt revisions
211 211 files: 1 chunks
212 212 adding foo/file.txt revisions
213 213 files: 2 chunks
214 214 adding quux/file.py revisions
215 215 files: 3 chunks
216 216 added 3 changesets with 3 changes to 3 files
217 217 calling hook pretxnchangegroup.acl: hgext.acl.hook
218 218 acl: acl.allow not enabled
219 219 acl: acl.deny not enabled
220 220 acl: allowing changeset ef1ea85a6374
221 221 acl: allowing changeset f9cafe1212c8
222 222 acl: allowing changeset 911600dab2ae
223 223 updating the branch cache
224 224 rolling back last transaction
225 225 0:6675d58eff77
226 226
227 227 Empty [acl.allow]
228 228 Pushing as user fred
229 229 hgrc = """
230 230 [hooks]
231 231 pretxnchangegroup.acl = python:hgext.acl.hook
232 232 [acl]
233 233 sources = push
234 234 [acl.allow]
235 235 """
236 236 pushing to ../b
237 237 searching for changes
238 238 common changesets up to 6675d58eff77
239 239 invalidating branch cache (tip differs)
240 240 3 changesets found
241 241 list of changesets:
242 242 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
243 243 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
244 244 911600dab2ae7a9baff75958b84fe606851ce955
245 245 adding changesets
246 bundle changes: 0 chunks
247 bundle changes: 1 chunks
248 bundle changes: 2 chunks
249 bundle changes: 3 chunks
250 bundle changes: 4 chunks
251 bundle changes: 5 chunks
252 bundle changes: 6 chunks
253 bundle changes: 7 chunks
254 bundle changes: 8 chunks
255 bundle changes: 9 chunks
256 bundle manifests: 0 chunks
257 bundle manifests: 1 chunks
258 bundle manifests: 2 chunks
259 bundle manifests: 3 chunks
260 bundle manifests: 4 chunks
261 bundle manifests: 5 chunks
262 bundle manifests: 6 chunks
263 bundle manifests: 7 chunks
264 bundle manifests: 8 chunks
265 bundle manifests: 9 chunks
266 bundle files: foo/Bar/file.txt 0 chunks
267 bundle files: foo/Bar/file.txt 1 chunks
268 bundle files: foo/Bar/file.txt 2 chunks
269 bundle files: foo/Bar/file.txt 3 chunks
270 bundle files: foo/file.txt 4 chunks
271 bundle files: foo/file.txt 5 chunks
272 bundle files: foo/file.txt 6 chunks
273 bundle files: foo/file.txt 7 chunks
274 bundle files: quux/file.py 8 chunks
275 bundle files: quux/file.py 9 chunks
276 bundle files: quux/file.py 10 chunks
277 bundle files: quux/file.py 11 chunks
246 bundling changes: 0 chunks
247 bundling changes: 1 chunks
248 bundling changes: 2 chunks
249 bundling changes: 3 chunks
250 bundling changes: 4 chunks
251 bundling changes: 5 chunks
252 bundling changes: 6 chunks
253 bundling changes: 7 chunks
254 bundling changes: 8 chunks
255 bundling changes: 9 chunks
256 bundling manifests: 0 chunks
257 bundling manifests: 1 chunks
258 bundling manifests: 2 chunks
259 bundling manifests: 3 chunks
260 bundling manifests: 4 chunks
261 bundling manifests: 5 chunks
262 bundling manifests: 6 chunks
263 bundling manifests: 7 chunks
264 bundling manifests: 8 chunks
265 bundling manifests: 9 chunks
266 bundling files: foo/Bar/file.txt 0 chunks
267 bundling files: foo/Bar/file.txt 1 chunks
268 bundling files: foo/Bar/file.txt 2 chunks
269 bundling files: foo/Bar/file.txt 3 chunks
270 bundling files: foo/file.txt 4 chunks
271 bundling files: foo/file.txt 5 chunks
272 bundling files: foo/file.txt 6 chunks
273 bundling files: foo/file.txt 7 chunks
274 bundling files: quux/file.py 8 chunks
275 bundling files: quux/file.py 9 chunks
276 bundling files: quux/file.py 10 chunks
277 bundling files: quux/file.py 11 chunks
278 278 changesets: 1 chunks
279 279 add changeset ef1ea85a6374
280 280 changesets: 2 chunks
281 281 add changeset f9cafe1212c8
282 282 changesets: 3 chunks
283 283 add changeset 911600dab2ae
284 284 adding manifests
285 285 manifests: 1 chunks
286 286 manifests: 2 chunks
287 287 manifests: 3 chunks
288 288 adding file changes
289 289 adding foo/Bar/file.txt revisions
290 290 files: 1 chunks
291 291 adding foo/file.txt revisions
292 292 files: 2 chunks
293 293 adding quux/file.py revisions
294 294 files: 3 chunks
295 295 added 3 changesets with 3 changes to 3 files
296 296 calling hook pretxnchangegroup.acl: hgext.acl.hook
297 297 acl: acl.allow enabled, 0 entries for user fred
298 298 acl: acl.deny not enabled
299 299 acl: user fred not allowed on foo/file.txt
300 300 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
301 301 transaction abort!
302 302 rollback completed
303 303 abort: acl: access denied for changeset ef1ea85a6374
304 304 no rollback information available
305 305 0:6675d58eff77
306 306
307 307 fred is allowed inside foo/
308 308 Pushing as user fred
309 309 hgrc = """
310 310 [hooks]
311 311 pretxnchangegroup.acl = python:hgext.acl.hook
312 312 [acl]
313 313 sources = push
314 314 [acl.allow]
315 315 foo/** = fred
316 316 """
317 317 pushing to ../b
318 318 searching for changes
319 319 common changesets up to 6675d58eff77
320 320 3 changesets found
321 321 list of changesets:
322 322 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
323 323 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
324 324 911600dab2ae7a9baff75958b84fe606851ce955
325 325 adding changesets
326 bundle changes: 0 chunks
327 bundle changes: 1 chunks
328 bundle changes: 2 chunks
329 bundle changes: 3 chunks
330 bundle changes: 4 chunks
331 bundle changes: 5 chunks
332 bundle changes: 6 chunks
333 bundle changes: 7 chunks
334 bundle changes: 8 chunks
335 bundle changes: 9 chunks
336 bundle manifests: 0 chunks
337 bundle manifests: 1 chunks
338 bundle manifests: 2 chunks
339 bundle manifests: 3 chunks
340 bundle manifests: 4 chunks
341 bundle manifests: 5 chunks
342 bundle manifests: 6 chunks
343 bundle manifests: 7 chunks
344 bundle manifests: 8 chunks
345 bundle manifests: 9 chunks
346 bundle files: foo/Bar/file.txt 0 chunks
347 bundle files: foo/Bar/file.txt 1 chunks
348 bundle files: foo/Bar/file.txt 2 chunks
349 bundle files: foo/Bar/file.txt 3 chunks
350 bundle files: foo/file.txt 4 chunks
351 bundle files: foo/file.txt 5 chunks
352 bundle files: foo/file.txt 6 chunks
353 bundle files: foo/file.txt 7 chunks
354 bundle files: quux/file.py 8 chunks
355 bundle files: quux/file.py 9 chunks
356 bundle files: quux/file.py 10 chunks
357 bundle files: quux/file.py 11 chunks
326 bundling changes: 0 chunks
327 bundling changes: 1 chunks
328 bundling changes: 2 chunks
329 bundling changes: 3 chunks
330 bundling changes: 4 chunks
331 bundling changes: 5 chunks
332 bundling changes: 6 chunks
333 bundling changes: 7 chunks
334 bundling changes: 8 chunks
335 bundling changes: 9 chunks
336 bundling manifests: 0 chunks
337 bundling manifests: 1 chunks
338 bundling manifests: 2 chunks
339 bundling manifests: 3 chunks
340 bundling manifests: 4 chunks
341 bundling manifests: 5 chunks
342 bundling manifests: 6 chunks
343 bundling manifests: 7 chunks
344 bundling manifests: 8 chunks
345 bundling manifests: 9 chunks
346 bundling files: foo/Bar/file.txt 0 chunks
347 bundling files: foo/Bar/file.txt 1 chunks
348 bundling files: foo/Bar/file.txt 2 chunks
349 bundling files: foo/Bar/file.txt 3 chunks
350 bundling files: foo/file.txt 4 chunks
351 bundling files: foo/file.txt 5 chunks
352 bundling files: foo/file.txt 6 chunks
353 bundling files: foo/file.txt 7 chunks
354 bundling files: quux/file.py 8 chunks
355 bundling files: quux/file.py 9 chunks
356 bundling files: quux/file.py 10 chunks
357 bundling files: quux/file.py 11 chunks
358 358 changesets: 1 chunks
359 359 add changeset ef1ea85a6374
360 360 changesets: 2 chunks
361 361 add changeset f9cafe1212c8
362 362 changesets: 3 chunks
363 363 add changeset 911600dab2ae
364 364 adding manifests
365 365 manifests: 1 chunks
366 366 manifests: 2 chunks
367 367 manifests: 3 chunks
368 368 adding file changes
369 369 adding foo/Bar/file.txt revisions
370 370 files: 1 chunks
371 371 adding foo/file.txt revisions
372 372 files: 2 chunks
373 373 adding quux/file.py revisions
374 374 files: 3 chunks
375 375 added 3 changesets with 3 changes to 3 files
376 376 calling hook pretxnchangegroup.acl: hgext.acl.hook
377 377 acl: acl.allow enabled, 1 entries for user fred
378 378 acl: acl.deny not enabled
379 379 acl: allowing changeset ef1ea85a6374
380 380 acl: allowing changeset f9cafe1212c8
381 381 acl: user fred not allowed on quux/file.py
382 382 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
383 383 transaction abort!
384 384 rollback completed
385 385 abort: acl: access denied for changeset 911600dab2ae
386 386 no rollback information available
387 387 0:6675d58eff77
388 388
389 389 Empty [acl.deny]
390 390 Pushing as user barney
391 391 hgrc = """
392 392 [hooks]
393 393 pretxnchangegroup.acl = python:hgext.acl.hook
394 394 [acl]
395 395 sources = push
396 396 [acl.allow]
397 397 foo/** = fred
398 398 [acl.deny]
399 399 """
400 400 pushing to ../b
401 401 searching for changes
402 402 common changesets up to 6675d58eff77
403 403 3 changesets found
404 404 list of changesets:
405 405 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
406 406 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
407 407 911600dab2ae7a9baff75958b84fe606851ce955
408 408 adding changesets
409 bundle changes: 0 chunks
410 bundle changes: 1 chunks
411 bundle changes: 2 chunks
412 bundle changes: 3 chunks
413 bundle changes: 4 chunks
414 bundle changes: 5 chunks
415 bundle changes: 6 chunks
416 bundle changes: 7 chunks
417 bundle changes: 8 chunks
418 bundle changes: 9 chunks
419 bundle manifests: 0 chunks
420 bundle manifests: 1 chunks
421 bundle manifests: 2 chunks
422 bundle manifests: 3 chunks
423 bundle manifests: 4 chunks
424 bundle manifests: 5 chunks
425 bundle manifests: 6 chunks
426 bundle manifests: 7 chunks
427 bundle manifests: 8 chunks
428 bundle manifests: 9 chunks
429 bundle files: foo/Bar/file.txt 0 chunks
430 bundle files: foo/Bar/file.txt 1 chunks
431 bundle files: foo/Bar/file.txt 2 chunks
432 bundle files: foo/Bar/file.txt 3 chunks
433 bundle files: foo/file.txt 4 chunks
434 bundle files: foo/file.txt 5 chunks
435 bundle files: foo/file.txt 6 chunks
436 bundle files: foo/file.txt 7 chunks
437 bundle files: quux/file.py 8 chunks
438 bundle files: quux/file.py 9 chunks
439 bundle files: quux/file.py 10 chunks
440 bundle files: quux/file.py 11 chunks
409 bundling changes: 0 chunks
410 bundling changes: 1 chunks
411 bundling changes: 2 chunks
412 bundling changes: 3 chunks
413 bundling changes: 4 chunks
414 bundling changes: 5 chunks
415 bundling changes: 6 chunks
416 bundling changes: 7 chunks
417 bundling changes: 8 chunks
418 bundling changes: 9 chunks
419 bundling manifests: 0 chunks
420 bundling manifests: 1 chunks
421 bundling manifests: 2 chunks
422 bundling manifests: 3 chunks
423 bundling manifests: 4 chunks
424 bundling manifests: 5 chunks
425 bundling manifests: 6 chunks
426 bundling manifests: 7 chunks
427 bundling manifests: 8 chunks
428 bundling manifests: 9 chunks
429 bundling files: foo/Bar/file.txt 0 chunks
430 bundling files: foo/Bar/file.txt 1 chunks
431 bundling files: foo/Bar/file.txt 2 chunks
432 bundling files: foo/Bar/file.txt 3 chunks
433 bundling files: foo/file.txt 4 chunks
434 bundling files: foo/file.txt 5 chunks
435 bundling files: foo/file.txt 6 chunks
436 bundling files: foo/file.txt 7 chunks
437 bundling files: quux/file.py 8 chunks
438 bundling files: quux/file.py 9 chunks
439 bundling files: quux/file.py 10 chunks
440 bundling files: quux/file.py 11 chunks
441 441 changesets: 1 chunks
442 442 add changeset ef1ea85a6374
443 443 changesets: 2 chunks
444 444 add changeset f9cafe1212c8
445 445 changesets: 3 chunks
446 446 add changeset 911600dab2ae
447 447 adding manifests
448 448 manifests: 1 chunks
449 449 manifests: 2 chunks
450 450 manifests: 3 chunks
451 451 adding file changes
452 452 adding foo/Bar/file.txt revisions
453 453 files: 1 chunks
454 454 adding foo/file.txt revisions
455 455 files: 2 chunks
456 456 adding quux/file.py revisions
457 457 files: 3 chunks
458 458 added 3 changesets with 3 changes to 3 files
459 459 calling hook pretxnchangegroup.acl: hgext.acl.hook
460 460 acl: acl.allow enabled, 0 entries for user barney
461 461 acl: acl.deny enabled, 0 entries for user barney
462 462 acl: user barney not allowed on foo/file.txt
463 463 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
464 464 transaction abort!
465 465 rollback completed
466 466 abort: acl: access denied for changeset ef1ea85a6374
467 467 no rollback information available
468 468 0:6675d58eff77
469 469
470 470 fred is allowed inside foo/, but not foo/bar/ (case matters)
471 471 Pushing as user fred
472 472 hgrc = """
473 473 [hooks]
474 474 pretxnchangegroup.acl = python:hgext.acl.hook
475 475 [acl]
476 476 sources = push
477 477 [acl.allow]
478 478 foo/** = fred
479 479 [acl.deny]
480 480 foo/bar/** = fred
481 481 """
482 482 pushing to ../b
483 483 searching for changes
484 484 common changesets up to 6675d58eff77
485 485 3 changesets found
486 486 list of changesets:
487 487 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
488 488 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
489 489 911600dab2ae7a9baff75958b84fe606851ce955
490 490 adding changesets
491 bundle changes: 0 chunks
492 bundle changes: 1 chunks
493 bundle changes: 2 chunks
494 bundle changes: 3 chunks
495 bundle changes: 4 chunks
496 bundle changes: 5 chunks
497 bundle changes: 6 chunks
498 bundle changes: 7 chunks
499 bundle changes: 8 chunks
500 bundle changes: 9 chunks
501 bundle manifests: 0 chunks
502 bundle manifests: 1 chunks
503 bundle manifests: 2 chunks
504 bundle manifests: 3 chunks
505 bundle manifests: 4 chunks
506 bundle manifests: 5 chunks
507 bundle manifests: 6 chunks
508 bundle manifests: 7 chunks
509 bundle manifests: 8 chunks
510 bundle manifests: 9 chunks
511 bundle files: foo/Bar/file.txt 0 chunks
512 bundle files: foo/Bar/file.txt 1 chunks
513 bundle files: foo/Bar/file.txt 2 chunks
514 bundle files: foo/Bar/file.txt 3 chunks
515 bundle files: foo/file.txt 4 chunks
516 bundle files: foo/file.txt 5 chunks
517 bundle files: foo/file.txt 6 chunks
518 bundle files: foo/file.txt 7 chunks
519 bundle files: quux/file.py 8 chunks
520 bundle files: quux/file.py 9 chunks
521 bundle files: quux/file.py 10 chunks
522 bundle files: quux/file.py 11 chunks
491 bundling changes: 0 chunks
492 bundling changes: 1 chunks
493 bundling changes: 2 chunks
494 bundling changes: 3 chunks
495 bundling changes: 4 chunks
496 bundling changes: 5 chunks
497 bundling changes: 6 chunks
498 bundling changes: 7 chunks
499 bundling changes: 8 chunks
500 bundling changes: 9 chunks
501 bundling manifests: 0 chunks
502 bundling manifests: 1 chunks
503 bundling manifests: 2 chunks
504 bundling manifests: 3 chunks
505 bundling manifests: 4 chunks
506 bundling manifests: 5 chunks
507 bundling manifests: 6 chunks
508 bundling manifests: 7 chunks
509 bundling manifests: 8 chunks
510 bundling manifests: 9 chunks
511 bundling files: foo/Bar/file.txt 0 chunks
512 bundling files: foo/Bar/file.txt 1 chunks
513 bundling files: foo/Bar/file.txt 2 chunks
514 bundling files: foo/Bar/file.txt 3 chunks
515 bundling files: foo/file.txt 4 chunks
516 bundling files: foo/file.txt 5 chunks
517 bundling files: foo/file.txt 6 chunks
518 bundling files: foo/file.txt 7 chunks
519 bundling files: quux/file.py 8 chunks
520 bundling files: quux/file.py 9 chunks
521 bundling files: quux/file.py 10 chunks
522 bundling files: quux/file.py 11 chunks
523 523 changesets: 1 chunks
524 524 add changeset ef1ea85a6374
525 525 changesets: 2 chunks
526 526 add changeset f9cafe1212c8
527 527 changesets: 3 chunks
528 528 add changeset 911600dab2ae
529 529 adding manifests
530 530 manifests: 1 chunks
531 531 manifests: 2 chunks
532 532 manifests: 3 chunks
533 533 adding file changes
534 534 adding foo/Bar/file.txt revisions
535 535 files: 1 chunks
536 536 adding foo/file.txt revisions
537 537 files: 2 chunks
538 538 adding quux/file.py revisions
539 539 files: 3 chunks
540 540 added 3 changesets with 3 changes to 3 files
541 541 calling hook pretxnchangegroup.acl: hgext.acl.hook
542 542 acl: acl.allow enabled, 1 entries for user fred
543 543 acl: acl.deny enabled, 1 entries for user fred
544 544 acl: allowing changeset ef1ea85a6374
545 545 acl: allowing changeset f9cafe1212c8
546 546 acl: user fred not allowed on quux/file.py
547 547 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
548 548 transaction abort!
549 549 rollback completed
550 550 abort: acl: access denied for changeset 911600dab2ae
551 551 no rollback information available
552 552 0:6675d58eff77
553 553
554 554 fred is allowed inside foo/, but not foo/Bar/
555 555 Pushing as user fred
556 556 hgrc = """
557 557 [hooks]
558 558 pretxnchangegroup.acl = python:hgext.acl.hook
559 559 [acl]
560 560 sources = push
561 561 [acl.allow]
562 562 foo/** = fred
563 563 [acl.deny]
564 564 foo/bar/** = fred
565 565 foo/Bar/** = fred
566 566 """
567 567 pushing to ../b
568 568 searching for changes
569 569 common changesets up to 6675d58eff77
570 570 3 changesets found
571 571 list of changesets:
572 572 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
573 573 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
574 574 911600dab2ae7a9baff75958b84fe606851ce955
575 575 adding changesets
576 bundle changes: 0 chunks
577 bundle changes: 1 chunks
578 bundle changes: 2 chunks
579 bundle changes: 3 chunks
580 bundle changes: 4 chunks
581 bundle changes: 5 chunks
582 bundle changes: 6 chunks
583 bundle changes: 7 chunks
584 bundle changes: 8 chunks
585 bundle changes: 9 chunks
586 bundle manifests: 0 chunks
587 bundle manifests: 1 chunks
588 bundle manifests: 2 chunks
589 bundle manifests: 3 chunks
590 bundle manifests: 4 chunks
591 bundle manifests: 5 chunks
592 bundle manifests: 6 chunks
593 bundle manifests: 7 chunks
594 bundle manifests: 8 chunks
595 bundle manifests: 9 chunks
596 bundle files: foo/Bar/file.txt 0 chunks
597 bundle files: foo/Bar/file.txt 1 chunks
598 bundle files: foo/Bar/file.txt 2 chunks
599 bundle files: foo/Bar/file.txt 3 chunks
600 bundle files: foo/file.txt 4 chunks
601 bundle files: foo/file.txt 5 chunks
602 bundle files: foo/file.txt 6 chunks
603 bundle files: foo/file.txt 7 chunks
604 bundle files: quux/file.py 8 chunks
605 bundle files: quux/file.py 9 chunks
606 bundle files: quux/file.py 10 chunks
607 bundle files: quux/file.py 11 chunks
576 bundling changes: 0 chunks
577 bundling changes: 1 chunks
578 bundling changes: 2 chunks
579 bundling changes: 3 chunks
580 bundling changes: 4 chunks
581 bundling changes: 5 chunks
582 bundling changes: 6 chunks
583 bundling changes: 7 chunks
584 bundling changes: 8 chunks
585 bundling changes: 9 chunks
586 bundling manifests: 0 chunks
587 bundling manifests: 1 chunks
588 bundling manifests: 2 chunks
589 bundling manifests: 3 chunks
590 bundling manifests: 4 chunks
591 bundling manifests: 5 chunks
592 bundling manifests: 6 chunks
593 bundling manifests: 7 chunks
594 bundling manifests: 8 chunks
595 bundling manifests: 9 chunks
596 bundling files: foo/Bar/file.txt 0 chunks
597 bundling files: foo/Bar/file.txt 1 chunks
598 bundling files: foo/Bar/file.txt 2 chunks
599 bundling files: foo/Bar/file.txt 3 chunks
600 bundling files: foo/file.txt 4 chunks
601 bundling files: foo/file.txt 5 chunks
602 bundling files: foo/file.txt 6 chunks
603 bundling files: foo/file.txt 7 chunks
604 bundling files: quux/file.py 8 chunks
605 bundling files: quux/file.py 9 chunks
606 bundling files: quux/file.py 10 chunks
607 bundling files: quux/file.py 11 chunks
608 608 changesets: 1 chunks
609 609 add changeset ef1ea85a6374
610 610 changesets: 2 chunks
611 611 add changeset f9cafe1212c8
612 612 changesets: 3 chunks
613 613 add changeset 911600dab2ae
614 614 adding manifests
615 615 manifests: 1 chunks
616 616 manifests: 2 chunks
617 617 manifests: 3 chunks
618 618 adding file changes
619 619 adding foo/Bar/file.txt revisions
620 620 files: 1 chunks
621 621 adding foo/file.txt revisions
622 622 files: 2 chunks
623 623 adding quux/file.py revisions
624 624 files: 3 chunks
625 625 added 3 changesets with 3 changes to 3 files
626 626 calling hook pretxnchangegroup.acl: hgext.acl.hook
627 627 acl: acl.allow enabled, 1 entries for user fred
628 628 acl: acl.deny enabled, 2 entries for user fred
629 629 acl: allowing changeset ef1ea85a6374
630 630 acl: user fred denied on foo/Bar/file.txt
631 631 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
632 632 transaction abort!
633 633 rollback completed
634 634 abort: acl: access denied for changeset f9cafe1212c8
635 635 no rollback information available
636 636 0:6675d58eff77
637 637
638 638 barney is not mentioned => not allowed anywhere
639 639 Pushing as user barney
640 640 hgrc = """
641 641 [hooks]
642 642 pretxnchangegroup.acl = python:hgext.acl.hook
643 643 [acl]
644 644 sources = push
645 645 [acl.allow]
646 646 foo/** = fred
647 647 [acl.deny]
648 648 foo/bar/** = fred
649 649 foo/Bar/** = fred
650 650 """
651 651 pushing to ../b
652 652 searching for changes
653 653 common changesets up to 6675d58eff77
654 654 3 changesets found
655 655 list of changesets:
656 656 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
657 657 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
658 658 911600dab2ae7a9baff75958b84fe606851ce955
659 659 adding changesets
660 bundle changes: 0 chunks
661 bundle changes: 1 chunks
662 bundle changes: 2 chunks
663 bundle changes: 3 chunks
664 bundle changes: 4 chunks
665 bundle changes: 5 chunks
666 bundle changes: 6 chunks
667 bundle changes: 7 chunks
668 bundle changes: 8 chunks
669 bundle changes: 9 chunks
670 bundle manifests: 0 chunks
671 bundle manifests: 1 chunks
672 bundle manifests: 2 chunks
673 bundle manifests: 3 chunks
674 bundle manifests: 4 chunks
675 bundle manifests: 5 chunks
676 bundle manifests: 6 chunks
677 bundle manifests: 7 chunks
678 bundle manifests: 8 chunks
679 bundle manifests: 9 chunks
680 bundle files: foo/Bar/file.txt 0 chunks
681 bundle files: foo/Bar/file.txt 1 chunks
682 bundle files: foo/Bar/file.txt 2 chunks
683 bundle files: foo/Bar/file.txt 3 chunks
684 bundle files: foo/file.txt 4 chunks
685 bundle files: foo/file.txt 5 chunks
686 bundle files: foo/file.txt 6 chunks
687 bundle files: foo/file.txt 7 chunks
688 bundle files: quux/file.py 8 chunks
689 bundle files: quux/file.py 9 chunks
690 bundle files: quux/file.py 10 chunks
691 bundle files: quux/file.py 11 chunks
660 bundling changes: 0 chunks
661 bundling changes: 1 chunks
662 bundling changes: 2 chunks
663 bundling changes: 3 chunks
664 bundling changes: 4 chunks
665 bundling changes: 5 chunks
666 bundling changes: 6 chunks
667 bundling changes: 7 chunks
668 bundling changes: 8 chunks
669 bundling changes: 9 chunks
670 bundling manifests: 0 chunks
671 bundling manifests: 1 chunks
672 bundling manifests: 2 chunks
673 bundling manifests: 3 chunks
674 bundling manifests: 4 chunks
675 bundling manifests: 5 chunks
676 bundling manifests: 6 chunks
677 bundling manifests: 7 chunks
678 bundling manifests: 8 chunks
679 bundling manifests: 9 chunks
680 bundling files: foo/Bar/file.txt 0 chunks
681 bundling files: foo/Bar/file.txt 1 chunks
682 bundling files: foo/Bar/file.txt 2 chunks
683 bundling files: foo/Bar/file.txt 3 chunks
684 bundling files: foo/file.txt 4 chunks
685 bundling files: foo/file.txt 5 chunks
686 bundling files: foo/file.txt 6 chunks
687 bundling files: foo/file.txt 7 chunks
688 bundling files: quux/file.py 8 chunks
689 bundling files: quux/file.py 9 chunks
690 bundling files: quux/file.py 10 chunks
691 bundling files: quux/file.py 11 chunks
692 692 changesets: 1 chunks
693 693 add changeset ef1ea85a6374
694 694 changesets: 2 chunks
695 695 add changeset f9cafe1212c8
696 696 changesets: 3 chunks
697 697 add changeset 911600dab2ae
698 698 adding manifests
699 699 manifests: 1 chunks
700 700 manifests: 2 chunks
701 701 manifests: 3 chunks
702 702 adding file changes
703 703 adding foo/Bar/file.txt revisions
704 704 files: 1 chunks
705 705 adding foo/file.txt revisions
706 706 files: 2 chunks
707 707 adding quux/file.py revisions
708 708 files: 3 chunks
709 709 added 3 changesets with 3 changes to 3 files
710 710 calling hook pretxnchangegroup.acl: hgext.acl.hook
711 711 acl: acl.allow enabled, 0 entries for user barney
712 712 acl: acl.deny enabled, 0 entries for user barney
713 713 acl: user barney not allowed on foo/file.txt
714 714 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
715 715 transaction abort!
716 716 rollback completed
717 717 abort: acl: access denied for changeset ef1ea85a6374
718 718 no rollback information available
719 719 0:6675d58eff77
720 720
721 721 barney is allowed everywhere
722 722 Pushing as user barney
723 723 hgrc = """
724 724 [hooks]
725 725 pretxnchangegroup.acl = python:hgext.acl.hook
726 726 [acl]
727 727 sources = push
728 728 [acl.allow]
729 729 foo/** = fred
730 730 [acl.deny]
731 731 foo/bar/** = fred
732 732 foo/Bar/** = fred
733 733 [acl.allow]
734 734 ** = barney
735 735 """
736 736 pushing to ../b
737 737 searching for changes
738 738 common changesets up to 6675d58eff77
739 739 3 changesets found
740 740 list of changesets:
741 741 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
742 742 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
743 743 911600dab2ae7a9baff75958b84fe606851ce955
744 744 adding changesets
745 bundle changes: 0 chunks
746 bundle changes: 1 chunks
747 bundle changes: 2 chunks
748 bundle changes: 3 chunks
749 bundle changes: 4 chunks
750 bundle changes: 5 chunks
751 bundle changes: 6 chunks
752 bundle changes: 7 chunks
753 bundle changes: 8 chunks
754 bundle changes: 9 chunks
755 bundle manifests: 0 chunks
756 bundle manifests: 1 chunks
757 bundle manifests: 2 chunks
758 bundle manifests: 3 chunks
759 bundle manifests: 4 chunks
760 bundle manifests: 5 chunks
761 bundle manifests: 6 chunks
762 bundle manifests: 7 chunks
763 bundle manifests: 8 chunks
764 bundle manifests: 9 chunks
765 bundle files: foo/Bar/file.txt 0 chunks
766 bundle files: foo/Bar/file.txt 1 chunks
767 bundle files: foo/Bar/file.txt 2 chunks
768 bundle files: foo/Bar/file.txt 3 chunks
769 bundle files: foo/file.txt 4 chunks
770 bundle files: foo/file.txt 5 chunks
771 bundle files: foo/file.txt 6 chunks
772 bundle files: foo/file.txt 7 chunks
773 bundle files: quux/file.py 8 chunks
774 bundle files: quux/file.py 9 chunks
775 bundle files: quux/file.py 10 chunks
776 bundle files: quux/file.py 11 chunks
745 bundling changes: 0 chunks
746 bundling changes: 1 chunks
747 bundling changes: 2 chunks
748 bundling changes: 3 chunks
749 bundling changes: 4 chunks
750 bundling changes: 5 chunks
751 bundling changes: 6 chunks
752 bundling changes: 7 chunks
753 bundling changes: 8 chunks
754 bundling changes: 9 chunks
755 bundling manifests: 0 chunks
756 bundling manifests: 1 chunks
757 bundling manifests: 2 chunks
758 bundling manifests: 3 chunks
759 bundling manifests: 4 chunks
760 bundling manifests: 5 chunks
761 bundling manifests: 6 chunks
762 bundling manifests: 7 chunks
763 bundling manifests: 8 chunks
764 bundling manifests: 9 chunks
765 bundling files: foo/Bar/file.txt 0 chunks
766 bundling files: foo/Bar/file.txt 1 chunks
767 bundling files: foo/Bar/file.txt 2 chunks
768 bundling files: foo/Bar/file.txt 3 chunks
769 bundling files: foo/file.txt 4 chunks
770 bundling files: foo/file.txt 5 chunks
771 bundling files: foo/file.txt 6 chunks
772 bundling files: foo/file.txt 7 chunks
773 bundling files: quux/file.py 8 chunks
774 bundling files: quux/file.py 9 chunks
775 bundling files: quux/file.py 10 chunks
776 bundling files: quux/file.py 11 chunks
777 777 changesets: 1 chunks
778 778 add changeset ef1ea85a6374
779 779 changesets: 2 chunks
780 780 add changeset f9cafe1212c8
781 781 changesets: 3 chunks
782 782 add changeset 911600dab2ae
783 783 adding manifests
784 784 manifests: 1 chunks
785 785 manifests: 2 chunks
786 786 manifests: 3 chunks
787 787 adding file changes
788 788 adding foo/Bar/file.txt revisions
789 789 files: 1 chunks
790 790 adding foo/file.txt revisions
791 791 files: 2 chunks
792 792 adding quux/file.py revisions
793 793 files: 3 chunks
794 794 added 3 changesets with 3 changes to 3 files
795 795 calling hook pretxnchangegroup.acl: hgext.acl.hook
796 796 acl: acl.allow enabled, 1 entries for user barney
797 797 acl: acl.deny enabled, 0 entries for user barney
798 798 acl: allowing changeset ef1ea85a6374
799 799 acl: allowing changeset f9cafe1212c8
800 800 acl: allowing changeset 911600dab2ae
801 801 updating the branch cache
802 802 rolling back last transaction
803 803 0:6675d58eff77
804 804
805 805 wilma can change files with a .txt extension
806 806 Pushing as user wilma
807 807 hgrc = """
808 808 [hooks]
809 809 pretxnchangegroup.acl = python:hgext.acl.hook
810 810 [acl]
811 811 sources = push
812 812 [acl.allow]
813 813 foo/** = fred
814 814 [acl.deny]
815 815 foo/bar/** = fred
816 816 foo/Bar/** = fred
817 817 [acl.allow]
818 818 ** = barney
819 819 **/*.txt = wilma
820 820 """
821 821 pushing to ../b
822 822 searching for changes
823 823 common changesets up to 6675d58eff77
824 824 invalidating branch cache (tip differs)
825 825 3 changesets found
826 826 list of changesets:
827 827 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
828 828 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
829 829 911600dab2ae7a9baff75958b84fe606851ce955
830 830 adding changesets
831 bundle changes: 0 chunks
832 bundle changes: 1 chunks
833 bundle changes: 2 chunks
834 bundle changes: 3 chunks
835 bundle changes: 4 chunks
836 bundle changes: 5 chunks
837 bundle changes: 6 chunks
838 bundle changes: 7 chunks
839 bundle changes: 8 chunks
840 bundle changes: 9 chunks
841 bundle manifests: 0 chunks
842 bundle manifests: 1 chunks
843 bundle manifests: 2 chunks
844 bundle manifests: 3 chunks
845 bundle manifests: 4 chunks
846 bundle manifests: 5 chunks
847 bundle manifests: 6 chunks
848 bundle manifests: 7 chunks
849 bundle manifests: 8 chunks
850 bundle manifests: 9 chunks
851 bundle files: foo/Bar/file.txt 0 chunks
852 bundle files: foo/Bar/file.txt 1 chunks
853 bundle files: foo/Bar/file.txt 2 chunks
854 bundle files: foo/Bar/file.txt 3 chunks
855 bundle files: foo/file.txt 4 chunks
856 bundle files: foo/file.txt 5 chunks
857 bundle files: foo/file.txt 6 chunks
858 bundle files: foo/file.txt 7 chunks
859 bundle files: quux/file.py 8 chunks
860 bundle files: quux/file.py 9 chunks
861 bundle files: quux/file.py 10 chunks
862 bundle files: quux/file.py 11 chunks
831 bundling changes: 0 chunks
832 bundling changes: 1 chunks
833 bundling changes: 2 chunks
834 bundling changes: 3 chunks
835 bundling changes: 4 chunks
836 bundling changes: 5 chunks
837 bundling changes: 6 chunks
838 bundling changes: 7 chunks
839 bundling changes: 8 chunks
840 bundling changes: 9 chunks
841 bundling manifests: 0 chunks
842 bundling manifests: 1 chunks
843 bundling manifests: 2 chunks
844 bundling manifests: 3 chunks
845 bundling manifests: 4 chunks
846 bundling manifests: 5 chunks
847 bundling manifests: 6 chunks
848 bundling manifests: 7 chunks
849 bundling manifests: 8 chunks
850 bundling manifests: 9 chunks
851 bundling files: foo/Bar/file.txt 0 chunks
852 bundling files: foo/Bar/file.txt 1 chunks
853 bundling files: foo/Bar/file.txt 2 chunks
854 bundling files: foo/Bar/file.txt 3 chunks
855 bundling files: foo/file.txt 4 chunks
856 bundling files: foo/file.txt 5 chunks
857 bundling files: foo/file.txt 6 chunks
858 bundling files: foo/file.txt 7 chunks
859 bundling files: quux/file.py 8 chunks
860 bundling files: quux/file.py 9 chunks
861 bundling files: quux/file.py 10 chunks
862 bundling files: quux/file.py 11 chunks
863 863 changesets: 1 chunks
864 864 add changeset ef1ea85a6374
865 865 changesets: 2 chunks
866 866 add changeset f9cafe1212c8
867 867 changesets: 3 chunks
868 868 add changeset 911600dab2ae
869 869 adding manifests
870 870 manifests: 1 chunks
871 871 manifests: 2 chunks
872 872 manifests: 3 chunks
873 873 adding file changes
874 874 adding foo/Bar/file.txt revisions
875 875 files: 1 chunks
876 876 adding foo/file.txt revisions
877 877 files: 2 chunks
878 878 adding quux/file.py revisions
879 879 files: 3 chunks
880 880 added 3 changesets with 3 changes to 3 files
881 881 calling hook pretxnchangegroup.acl: hgext.acl.hook
882 882 acl: acl.allow enabled, 1 entries for user wilma
883 883 acl: acl.deny enabled, 0 entries for user wilma
884 884 acl: allowing changeset ef1ea85a6374
885 885 acl: allowing changeset f9cafe1212c8
886 886 acl: user wilma not allowed on quux/file.py
887 887 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
888 888 transaction abort!
889 889 rollback completed
890 890 abort: acl: access denied for changeset 911600dab2ae
891 891 no rollback information available
892 892 0:6675d58eff77
893 893
894 894 file specified by acl.config does not exist
895 895 Pushing as user barney
896 896 hgrc = """
897 897 [hooks]
898 898 pretxnchangegroup.acl = python:hgext.acl.hook
899 899 [acl]
900 900 sources = push
901 901 [acl.allow]
902 902 foo/** = fred
903 903 [acl.deny]
904 904 foo/bar/** = fred
905 905 foo/Bar/** = fred
906 906 [acl.allow]
907 907 ** = barney
908 908 **/*.txt = wilma
909 909 [acl]
910 910 config = ../acl.config
911 911 """
912 912 pushing to ../b
913 913 searching for changes
914 914 common changesets up to 6675d58eff77
915 915 3 changesets found
916 916 list of changesets:
917 917 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
918 918 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
919 919 911600dab2ae7a9baff75958b84fe606851ce955
920 920 adding changesets
921 bundle changes: 0 chunks
922 bundle changes: 1 chunks
923 bundle changes: 2 chunks
924 bundle changes: 3 chunks
925 bundle changes: 4 chunks
926 bundle changes: 5 chunks
927 bundle changes: 6 chunks
928 bundle changes: 7 chunks
929 bundle changes: 8 chunks
930 bundle changes: 9 chunks
931 bundle manifests: 0 chunks
932 bundle manifests: 1 chunks
933 bundle manifests: 2 chunks
934 bundle manifests: 3 chunks
935 bundle manifests: 4 chunks
936 bundle manifests: 5 chunks
937 bundle manifests: 6 chunks
938 bundle manifests: 7 chunks
939 bundle manifests: 8 chunks
940 bundle manifests: 9 chunks
941 bundle files: foo/Bar/file.txt 0 chunks
942 bundle files: foo/Bar/file.txt 1 chunks
943 bundle files: foo/Bar/file.txt 2 chunks
944 bundle files: foo/Bar/file.txt 3 chunks
945 bundle files: foo/file.txt 4 chunks
946 bundle files: foo/file.txt 5 chunks
947 bundle files: foo/file.txt 6 chunks
948 bundle files: foo/file.txt 7 chunks
949 bundle files: quux/file.py 8 chunks
950 bundle files: quux/file.py 9 chunks
951 bundle files: quux/file.py 10 chunks
952 bundle files: quux/file.py 11 chunks
921 bundling changes: 0 chunks
922 bundling changes: 1 chunks
923 bundling changes: 2 chunks
924 bundling changes: 3 chunks
925 bundling changes: 4 chunks
926 bundling changes: 5 chunks
927 bundling changes: 6 chunks
928 bundling changes: 7 chunks
929 bundling changes: 8 chunks
930 bundling changes: 9 chunks
931 bundling manifests: 0 chunks
932 bundling manifests: 1 chunks
933 bundling manifests: 2 chunks
934 bundling manifests: 3 chunks
935 bundling manifests: 4 chunks
936 bundling manifests: 5 chunks
937 bundling manifests: 6 chunks
938 bundling manifests: 7 chunks
939 bundling manifests: 8 chunks
940 bundling manifests: 9 chunks
941 bundling files: foo/Bar/file.txt 0 chunks
942 bundling files: foo/Bar/file.txt 1 chunks
943 bundling files: foo/Bar/file.txt 2 chunks
944 bundling files: foo/Bar/file.txt 3 chunks
945 bundling files: foo/file.txt 4 chunks
946 bundling files: foo/file.txt 5 chunks
947 bundling files: foo/file.txt 6 chunks
948 bundling files: foo/file.txt 7 chunks
949 bundling files: quux/file.py 8 chunks
950 bundling files: quux/file.py 9 chunks
951 bundling files: quux/file.py 10 chunks
952 bundling files: quux/file.py 11 chunks
953 953 changesets: 1 chunks
954 954 add changeset ef1ea85a6374
955 955 changesets: 2 chunks
956 956 add changeset f9cafe1212c8
957 957 changesets: 3 chunks
958 958 add changeset 911600dab2ae
959 959 adding manifests
960 960 manifests: 1 chunks
961 961 manifests: 2 chunks
962 962 manifests: 3 chunks
963 963 adding file changes
964 964 adding foo/Bar/file.txt revisions
965 965 files: 1 chunks
966 966 adding foo/file.txt revisions
967 967 files: 2 chunks
968 968 adding quux/file.py revisions
969 969 files: 3 chunks
970 970 added 3 changesets with 3 changes to 3 files
971 971 calling hook pretxnchangegroup.acl: hgext.acl.hook
972 972 error: pretxnchangegroup.acl hook raised an exception: [Errno 2] No such file or directory: '../acl.config'
973 973 transaction abort!
974 974 rollback completed
975 975 abort: No such file or directory: ../acl.config
976 976 no rollback information available
977 977 0:6675d58eff77
978 978
979 979 betty is allowed inside foo/ by a acl.config file
980 980 Pushing as user betty
981 981 hgrc = """
982 982 [hooks]
983 983 pretxnchangegroup.acl = python:hgext.acl.hook
984 984 [acl]
985 985 sources = push
986 986 [acl.allow]
987 987 foo/** = fred
988 988 [acl.deny]
989 989 foo/bar/** = fred
990 990 foo/Bar/** = fred
991 991 [acl.allow]
992 992 ** = barney
993 993 **/*.txt = wilma
994 994 [acl]
995 995 config = ../acl.config
996 996 """
997 997 acl.config = """
998 998 [acl.allow]
999 999 foo/** = betty
1000 1000 """
1001 1001 pushing to ../b
1002 1002 searching for changes
1003 1003 common changesets up to 6675d58eff77
1004 1004 3 changesets found
1005 1005 list of changesets:
1006 1006 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1007 1007 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1008 1008 911600dab2ae7a9baff75958b84fe606851ce955
1009 1009 adding changesets
1010 bundle changes: 0 chunks
1011 bundle changes: 1 chunks
1012 bundle changes: 2 chunks
1013 bundle changes: 3 chunks
1014 bundle changes: 4 chunks
1015 bundle changes: 5 chunks
1016 bundle changes: 6 chunks
1017 bundle changes: 7 chunks
1018 bundle changes: 8 chunks
1019 bundle changes: 9 chunks
1020 bundle manifests: 0 chunks
1021 bundle manifests: 1 chunks
1022 bundle manifests: 2 chunks
1023 bundle manifests: 3 chunks
1024 bundle manifests: 4 chunks
1025 bundle manifests: 5 chunks
1026 bundle manifests: 6 chunks
1027 bundle manifests: 7 chunks
1028 bundle manifests: 8 chunks
1029 bundle manifests: 9 chunks
1030 bundle files: foo/Bar/file.txt 0 chunks
1031 bundle files: foo/Bar/file.txt 1 chunks
1032 bundle files: foo/Bar/file.txt 2 chunks
1033 bundle files: foo/Bar/file.txt 3 chunks
1034 bundle files: foo/file.txt 4 chunks
1035 bundle files: foo/file.txt 5 chunks
1036 bundle files: foo/file.txt 6 chunks
1037 bundle files: foo/file.txt 7 chunks
1038 bundle files: quux/file.py 8 chunks
1039 bundle files: quux/file.py 9 chunks
1040 bundle files: quux/file.py 10 chunks
1041 bundle files: quux/file.py 11 chunks
1010 bundling changes: 0 chunks
1011 bundling changes: 1 chunks
1012 bundling changes: 2 chunks
1013 bundling changes: 3 chunks
1014 bundling changes: 4 chunks
1015 bundling changes: 5 chunks
1016 bundling changes: 6 chunks
1017 bundling changes: 7 chunks
1018 bundling changes: 8 chunks
1019 bundling changes: 9 chunks
1020 bundling manifests: 0 chunks
1021 bundling manifests: 1 chunks
1022 bundling manifests: 2 chunks
1023 bundling manifests: 3 chunks
1024 bundling manifests: 4 chunks
1025 bundling manifests: 5 chunks
1026 bundling manifests: 6 chunks
1027 bundling manifests: 7 chunks
1028 bundling manifests: 8 chunks
1029 bundling manifests: 9 chunks
1030 bundling files: foo/Bar/file.txt 0 chunks
1031 bundling files: foo/Bar/file.txt 1 chunks
1032 bundling files: foo/Bar/file.txt 2 chunks
1033 bundling files: foo/Bar/file.txt 3 chunks
1034 bundling files: foo/file.txt 4 chunks
1035 bundling files: foo/file.txt 5 chunks
1036 bundling files: foo/file.txt 6 chunks
1037 bundling files: foo/file.txt 7 chunks
1038 bundling files: quux/file.py 8 chunks
1039 bundling files: quux/file.py 9 chunks
1040 bundling files: quux/file.py 10 chunks
1041 bundling files: quux/file.py 11 chunks
1042 1042 changesets: 1 chunks
1043 1043 add changeset ef1ea85a6374
1044 1044 changesets: 2 chunks
1045 1045 add changeset f9cafe1212c8
1046 1046 changesets: 3 chunks
1047 1047 add changeset 911600dab2ae
1048 1048 adding manifests
1049 1049 manifests: 1 chunks
1050 1050 manifests: 2 chunks
1051 1051 manifests: 3 chunks
1052 1052 adding file changes
1053 1053 adding foo/Bar/file.txt revisions
1054 1054 files: 1 chunks
1055 1055 adding foo/file.txt revisions
1056 1056 files: 2 chunks
1057 1057 adding quux/file.py revisions
1058 1058 files: 3 chunks
1059 1059 added 3 changesets with 3 changes to 3 files
1060 1060 calling hook pretxnchangegroup.acl: hgext.acl.hook
1061 1061 acl: acl.allow enabled, 1 entries for user betty
1062 1062 acl: acl.deny enabled, 0 entries for user betty
1063 1063 acl: allowing changeset ef1ea85a6374
1064 1064 acl: allowing changeset f9cafe1212c8
1065 1065 acl: user betty not allowed on quux/file.py
1066 1066 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
1067 1067 transaction abort!
1068 1068 rollback completed
1069 1069 abort: acl: access denied for changeset 911600dab2ae
1070 1070 no rollback information available
1071 1071 0:6675d58eff77
1072 1072
1073 1073 acl.config can set only [acl.allow]/[acl.deny]
1074 1074 Pushing as user barney
1075 1075 hgrc = """
1076 1076 [hooks]
1077 1077 pretxnchangegroup.acl = python:hgext.acl.hook
1078 1078 [acl]
1079 1079 sources = push
1080 1080 [acl.allow]
1081 1081 foo/** = fred
1082 1082 [acl.deny]
1083 1083 foo/bar/** = fred
1084 1084 foo/Bar/** = fred
1085 1085 [acl.allow]
1086 1086 ** = barney
1087 1087 **/*.txt = wilma
1088 1088 [acl]
1089 1089 config = ../acl.config
1090 1090 """
1091 1091 acl.config = """
1092 1092 [acl.allow]
1093 1093 foo/** = betty
1094 1094 [hooks]
1095 1095 changegroup.acl = false
1096 1096 """
1097 1097 pushing to ../b
1098 1098 searching for changes
1099 1099 common changesets up to 6675d58eff77
1100 1100 3 changesets found
1101 1101 list of changesets:
1102 1102 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1103 1103 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1104 1104 911600dab2ae7a9baff75958b84fe606851ce955
1105 1105 adding changesets
1106 bundle changes: 0 chunks
1107 bundle changes: 1 chunks
1108 bundle changes: 2 chunks
1109 bundle changes: 3 chunks
1110 bundle changes: 4 chunks
1111 bundle changes: 5 chunks
1112 bundle changes: 6 chunks
1113 bundle changes: 7 chunks
1114 bundle changes: 8 chunks
1115 bundle changes: 9 chunks
1116 bundle manifests: 0 chunks
1117 bundle manifests: 1 chunks
1118 bundle manifests: 2 chunks
1119 bundle manifests: 3 chunks
1120 bundle manifests: 4 chunks
1121 bundle manifests: 5 chunks
1122 bundle manifests: 6 chunks
1123 bundle manifests: 7 chunks
1124 bundle manifests: 8 chunks
1125 bundle manifests: 9 chunks
1126 bundle files: foo/Bar/file.txt 0 chunks
1127 bundle files: foo/Bar/file.txt 1 chunks
1128 bundle files: foo/Bar/file.txt 2 chunks
1129 bundle files: foo/Bar/file.txt 3 chunks
1130 bundle files: foo/file.txt 4 chunks
1131 bundle files: foo/file.txt 5 chunks
1132 bundle files: foo/file.txt 6 chunks
1133 bundle files: foo/file.txt 7 chunks
1134 bundle files: quux/file.py 8 chunks
1135 bundle files: quux/file.py 9 chunks
1136 bundle files: quux/file.py 10 chunks
1137 bundle files: quux/file.py 11 chunks
1106 bundling changes: 0 chunks
1107 bundling changes: 1 chunks
1108 bundling changes: 2 chunks
1109 bundling changes: 3 chunks
1110 bundling changes: 4 chunks
1111 bundling changes: 5 chunks
1112 bundling changes: 6 chunks
1113 bundling changes: 7 chunks
1114 bundling changes: 8 chunks
1115 bundling changes: 9 chunks
1116 bundling manifests: 0 chunks
1117 bundling manifests: 1 chunks
1118 bundling manifests: 2 chunks
1119 bundling manifests: 3 chunks
1120 bundling manifests: 4 chunks
1121 bundling manifests: 5 chunks
1122 bundling manifests: 6 chunks
1123 bundling manifests: 7 chunks
1124 bundling manifests: 8 chunks
1125 bundling manifests: 9 chunks
1126 bundling files: foo/Bar/file.txt 0 chunks
1127 bundling files: foo/Bar/file.txt 1 chunks
1128 bundling files: foo/Bar/file.txt 2 chunks
1129 bundling files: foo/Bar/file.txt 3 chunks
1130 bundling files: foo/file.txt 4 chunks
1131 bundling files: foo/file.txt 5 chunks
1132 bundling files: foo/file.txt 6 chunks
1133 bundling files: foo/file.txt 7 chunks
1134 bundling files: quux/file.py 8 chunks
1135 bundling files: quux/file.py 9 chunks
1136 bundling files: quux/file.py 10 chunks
1137 bundling files: quux/file.py 11 chunks
1138 1138 changesets: 1 chunks
1139 1139 add changeset ef1ea85a6374
1140 1140 changesets: 2 chunks
1141 1141 add changeset f9cafe1212c8
1142 1142 changesets: 3 chunks
1143 1143 add changeset 911600dab2ae
1144 1144 adding manifests
1145 1145 manifests: 1 chunks
1146 1146 manifests: 2 chunks
1147 1147 manifests: 3 chunks
1148 1148 adding file changes
1149 1149 adding foo/Bar/file.txt revisions
1150 1150 files: 1 chunks
1151 1151 adding foo/file.txt revisions
1152 1152 files: 2 chunks
1153 1153 adding quux/file.py revisions
1154 1154 files: 3 chunks
1155 1155 added 3 changesets with 3 changes to 3 files
1156 1156 calling hook pretxnchangegroup.acl: hgext.acl.hook
1157 1157 acl: acl.allow enabled, 1 entries for user barney
1158 1158 acl: acl.deny enabled, 0 entries for user barney
1159 1159 acl: allowing changeset ef1ea85a6374
1160 1160 acl: allowing changeset f9cafe1212c8
1161 1161 acl: allowing changeset 911600dab2ae
1162 1162 updating the branch cache
1163 1163 rolling back last transaction
1164 1164 0:6675d58eff77
1165 1165
@@ -1,365 +1,365 b''
1 1 ====== Setting up test
2 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
3 3 created new head
4 4 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
5 5 checking changesets
6 6 checking manifests
7 7 crosschecking files in changesets and manifests
8 8 checking files
9 9 4 files, 9 changesets, 7 total revisions
10 10 ====== Bundle --all
11 11 9 changesets found
12 12 ====== Bundle test to full.hg
13 13 searching for changes
14 14 9 changesets found
15 15 ====== Unbundle full.hg in test
16 16 adding changesets
17 17 adding manifests
18 18 adding file changes
19 19 added 0 changesets with 0 changes to 4 files
20 20 (run 'hg update' to get a working copy)
21 21 ====== Verify empty
22 22 checking changesets
23 23 checking manifests
24 24 crosschecking files in changesets and manifests
25 25 checking files
26 26 0 files, 0 changesets, 0 total revisions
27 27 ====== Pull full.hg into test (using --cwd)
28 28 pulling from ../full.hg
29 29 searching for changes
30 30 no changes found
31 31 ====== Pull full.hg into empty (using --cwd)
32 32 pulling from ../full.hg
33 33 requesting all changes
34 34 adding changesets
35 35 adding manifests
36 36 adding file changes
37 37 added 9 changesets with 7 changes to 4 files (+1 heads)
38 38 (run 'hg heads' to see heads, 'hg merge' to merge)
39 39 ====== Rollback empty
40 40 rolling back last transaction
41 41 ====== Pull full.hg into empty again (using --cwd)
42 42 pulling from ../full.hg
43 43 requesting all changes
44 44 adding changesets
45 45 adding manifests
46 46 adding file changes
47 47 added 9 changesets with 7 changes to 4 files (+1 heads)
48 48 (run 'hg heads' to see heads, 'hg merge' to merge)
49 49 ====== Pull full.hg into test (using -R)
50 50 pulling from full.hg
51 51 searching for changes
52 52 no changes found
53 53 ====== Pull full.hg into empty (using -R)
54 54 pulling from full.hg
55 55 searching for changes
56 56 no changes found
57 57 ====== Rollback empty
58 58 rolling back last transaction
59 59 ====== Pull full.hg into empty again (using -R)
60 60 pulling from full.hg
61 61 requesting all changes
62 62 adding changesets
63 63 adding manifests
64 64 adding file changes
65 65 added 9 changesets with 7 changes to 4 files (+1 heads)
66 66 (run 'hg heads' to see heads, 'hg merge' to merge)
67 67 ====== Log -R full.hg in fresh empty
68 68 changeset: 8:088ff9d6e1e1
69 69 tag: tip
70 70 parent: 3:ac69c658229d
71 71 user: test
72 72 date: Mon Jan 12 13:46:40 1970 +0000
73 73 summary: 0.3m
74 74
75 75 changeset: 7:27f57c869697
76 76 user: test
77 77 date: Mon Jan 12 13:46:40 1970 +0000
78 78 summary: 1.3m
79 79
80 80 changeset: 6:1e3f6b843bd6
81 81 user: test
82 82 date: Mon Jan 12 13:46:40 1970 +0000
83 83 summary: 1.3
84 84
85 85 changeset: 5:024e4e7df376
86 86 user: test
87 87 date: Mon Jan 12 13:46:40 1970 +0000
88 88 summary: 1.2
89 89
90 90 changeset: 4:5f4f3ceb285e
91 91 parent: 0:5649c9d34dd8
92 92 user: test
93 93 date: Mon Jan 12 13:46:40 1970 +0000
94 94 summary: 1.1
95 95
96 96 changeset: 3:ac69c658229d
97 97 user: test
98 98 date: Mon Jan 12 13:46:40 1970 +0000
99 99 summary: 0.3
100 100
101 101 changeset: 2:d62976ca1e50
102 102 user: test
103 103 date: Mon Jan 12 13:46:40 1970 +0000
104 104 summary: 0.2
105 105
106 106 changeset: 1:10b2180f755b
107 107 user: test
108 108 date: Mon Jan 12 13:46:40 1970 +0000
109 109 summary: 0.1
110 110
111 111 changeset: 0:5649c9d34dd8
112 112 user: test
113 113 date: Mon Jan 12 13:46:40 1970 +0000
114 114 summary: 0.0
115 115
116 116 ====== Pull ../full.hg into empty (with hook)
117 117 changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:../full.hg
118 118 pulling from bundle://../full.hg
119 119 requesting all changes
120 120 adding changesets
121 121 adding manifests
122 122 adding file changes
123 123 added 9 changesets with 7 changes to 4 files (+1 heads)
124 124 (run 'hg heads' to see heads, 'hg merge' to merge)
125 125 ====== Rollback empty
126 126 rolling back last transaction
127 127 ====== Log -R bundle:empty+full.hg
128 128 8 7 6 5 4 3 2 1 0
129 129 ====== Pull full.hg into empty again (using -R; with hook)
130 130 changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:empty+full.hg
131 131 pulling from full.hg
132 132 requesting all changes
133 133 adding changesets
134 134 adding manifests
135 135 adding file changes
136 136 added 9 changesets with 7 changes to 4 files (+1 heads)
137 137 (run 'hg heads' to see heads, 'hg merge' to merge)
138 138 ====== Create partial clones
139 139 requesting all changes
140 140 adding changesets
141 141 adding manifests
142 142 adding file changes
143 143 added 4 changesets with 4 changes to 1 files
144 144 updating to branch default
145 145 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
146 146 updating to branch default
147 147 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
148 148 ====== Log -R full.hg in partial
149 149 changeset: 8:088ff9d6e1e1
150 150 tag: tip
151 151 parent: 3:ac69c658229d
152 152 user: test
153 153 date: Mon Jan 12 13:46:40 1970 +0000
154 154 summary: 0.3m
155 155
156 156 changeset: 7:27f57c869697
157 157 user: test
158 158 date: Mon Jan 12 13:46:40 1970 +0000
159 159 summary: 1.3m
160 160
161 161 changeset: 6:1e3f6b843bd6
162 162 user: test
163 163 date: Mon Jan 12 13:46:40 1970 +0000
164 164 summary: 1.3
165 165
166 166 changeset: 5:024e4e7df376
167 167 user: test
168 168 date: Mon Jan 12 13:46:40 1970 +0000
169 169 summary: 1.2
170 170
171 171 changeset: 4:5f4f3ceb285e
172 172 parent: 0:5649c9d34dd8
173 173 user: test
174 174 date: Mon Jan 12 13:46:40 1970 +0000
175 175 summary: 1.1
176 176
177 177 changeset: 3:ac69c658229d
178 178 user: test
179 179 date: Mon Jan 12 13:46:40 1970 +0000
180 180 summary: 0.3
181 181
182 182 changeset: 2:d62976ca1e50
183 183 user: test
184 184 date: Mon Jan 12 13:46:40 1970 +0000
185 185 summary: 0.2
186 186
187 187 changeset: 1:10b2180f755b
188 188 user: test
189 189 date: Mon Jan 12 13:46:40 1970 +0000
190 190 summary: 0.1
191 191
192 192 changeset: 0:5649c9d34dd8
193 193 user: test
194 194 date: Mon Jan 12 13:46:40 1970 +0000
195 195 summary: 0.0
196 196
197 197 ====== Incoming full.hg in partial
198 198 comparing with bundle://../full.hg
199 199 searching for changes
200 200 changeset: 4:5f4f3ceb285e
201 201 parent: 0:5649c9d34dd8
202 202 user: test
203 203 date: Mon Jan 12 13:46:40 1970 +0000
204 204 summary: 1.1
205 205
206 206 changeset: 5:024e4e7df376
207 207 user: test
208 208 date: Mon Jan 12 13:46:40 1970 +0000
209 209 summary: 1.2
210 210
211 211 changeset: 6:1e3f6b843bd6
212 212 user: test
213 213 date: Mon Jan 12 13:46:40 1970 +0000
214 214 summary: 1.3
215 215
216 216 changeset: 7:27f57c869697
217 217 user: test
218 218 date: Mon Jan 12 13:46:40 1970 +0000
219 219 summary: 1.3m
220 220
221 221 changeset: 8:088ff9d6e1e1
222 222 tag: tip
223 223 parent: 3:ac69c658229d
224 224 user: test
225 225 date: Mon Jan 12 13:46:40 1970 +0000
226 226 summary: 0.3m
227 227
228 228 ====== Outgoing -R full.hg vs partial2 in partial
229 229 comparing with ../partial2
230 230 searching for changes
231 231 changeset: 4:5f4f3ceb285e
232 232 parent: 0:5649c9d34dd8
233 233 user: test
234 234 date: Mon Jan 12 13:46:40 1970 +0000
235 235 summary: 1.1
236 236
237 237 changeset: 5:024e4e7df376
238 238 user: test
239 239 date: Mon Jan 12 13:46:40 1970 +0000
240 240 summary: 1.2
241 241
242 242 changeset: 6:1e3f6b843bd6
243 243 user: test
244 244 date: Mon Jan 12 13:46:40 1970 +0000
245 245 summary: 1.3
246 246
247 247 changeset: 7:27f57c869697
248 248 user: test
249 249 date: Mon Jan 12 13:46:40 1970 +0000
250 250 summary: 1.3m
251 251
252 252 changeset: 8:088ff9d6e1e1
253 253 tag: tip
254 254 parent: 3:ac69c658229d
255 255 user: test
256 256 date: Mon Jan 12 13:46:40 1970 +0000
257 257 summary: 0.3m
258 258
259 259 ====== Outgoing -R does-not-exist.hg vs partial2 in partial
260 260 abort: No such file or directory: ../does-not-exist.hg
261 261 ====== Direct clone from bundle (all-history)
262 262 requesting all changes
263 263 adding changesets
264 264 adding manifests
265 265 adding file changes
266 266 added 9 changesets with 7 changes to 4 files (+1 heads)
267 267 updating to branch default
268 268 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
269 269 changeset: 8:088ff9d6e1e1
270 270 tag: tip
271 271 parent: 3:ac69c658229d
272 272 user: test
273 273 date: Mon Jan 12 13:46:40 1970 +0000
274 274 summary: 0.3m
275 275
276 276 changeset: 7:27f57c869697
277 277 user: test
278 278 date: Mon Jan 12 13:46:40 1970 +0000
279 279 summary: 1.3m
280 280
281 281 ====== Unbundle incremental bundles into fresh empty in one go
282 282 1 changesets found
283 283 1 changesets found
284 284 adding changesets
285 285 adding manifests
286 286 adding file changes
287 287 added 1 changesets with 1 changes to 1 files
288 288 adding changesets
289 289 adding manifests
290 290 adding file changes
291 291 added 1 changesets with 1 changes to 1 files
292 292 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
293 293 ====== test for 540d1059c802
294 294 updating to branch default
295 295 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
296 296 searching for changes
297 297 1 changesets found
298 298 comparing with ../bundle.hg
299 299 searching for changes
300 300 changeset: 2:ed1b79f46b9a
301 301 tag: tip
302 302 parent: 0:bbd179dfa0a7
303 303 user: test
304 304 date: Thu Jan 01 00:00:00 1970 +0000
305 305 summary: change foo
306 306
307 307 ===== test that verify bundle does not traceback
308 308 abort: 00changelog.i@bbd179dfa0a7: unknown parent!
309 309 abort: cannot verify bundle or remote repos
310 310 checking changesets
311 311 checking manifests
312 312 crosschecking files in changesets and manifests
313 313 checking files
314 314 2 files, 2 changesets, 2 total revisions
315 315 ====== diff against bundle
316 316 diff -r 088ff9d6e1e1 anotherfile
317 317 --- a/anotherfile Mon Jan 12 13:46:40 1970 +0000
318 318 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
319 319 @@ -1,4 +0,0 @@
320 320 -0
321 321 -1
322 322 -2
323 323 -3
324 324 ====== bundle single branch
325 325 adding a
326 326 adding b
327 327 adding b1
328 328 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
329 329 adding c
330 330 created new head
331 331 adding c1
332 332 == bundling via incoming
333 333 comparing with .
334 334 searching for changes
335 335 d2ae7f538514cd87c17547b0de4cea71fe1af9fb
336 336 5ece8e77363e2b5269e27c66828b72da29e4341a
337 337 == bundling
338 338 searching for changes
339 339 common changesets up to c0025332f9ed
340 340 2 changesets found
341 341 list of changesets:
342 342 d2ae7f538514cd87c17547b0de4cea71fe1af9fb
343 343 5ece8e77363e2b5269e27c66828b72da29e4341a
344 bundle changes: 0 chunks
345 bundle changes: 1 chunks
346 bundle changes: 2 chunks
347 bundle changes: 3 chunks
348 bundle changes: 4 chunks
349 bundle changes: 5 chunks
350 bundle changes: 6 chunks
351 bundle manifests: 0 chunks
352 bundle manifests: 1 chunks
353 bundle manifests: 2 chunks
354 bundle manifests: 3 chunks
355 bundle manifests: 4 chunks
356 bundle manifests: 5 chunks
357 bundle manifests: 6 chunks
358 bundle files: b 0 chunks
359 bundle files: b 1 chunks
360 bundle files: b 2 chunks
361 bundle files: b 3 chunks
362 bundle files: b1 4 chunks
363 bundle files: b1 5 chunks
364 bundle files: b1 6 chunks
365 bundle files: b1 7 chunks
344 bundling changes: 0 chunks
345 bundling changes: 1 chunks
346 bundling changes: 2 chunks
347 bundling changes: 3 chunks
348 bundling changes: 4 chunks
349 bundling changes: 5 chunks
350 bundling changes: 6 chunks
351 bundling manifests: 0 chunks
352 bundling manifests: 1 chunks
353 bundling manifests: 2 chunks
354 bundling manifests: 3 chunks
355 bundling manifests: 4 chunks
356 bundling manifests: 5 chunks
357 bundling manifests: 6 chunks
358 bundling files: b 0 chunks
359 bundling files: b 1 chunks
360 bundling files: b 2 chunks
361 bundling files: b 3 chunks
362 bundling files: b1 4 chunks
363 bundling files: b1 5 chunks
364 bundling files: b1 6 chunks
365 bundling files: b1 7 chunks
@@ -1,25 +1,25 b''
1 1 % create source repository
2 2 adding a
3 3 adding b
4 4 % clone and pull to break links
5 5 requesting all changes
6 6 adding changesets
7 7 adding manifests
8 8 adding file changes
9 9 added 1 changesets with 2 changes to 2 files
10 10 updating to branch default
11 11 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
12 12 created new head
13 13 % relink
14 14 relinking .hg/store
15 15 collected 5 candidate storage files
16 16 not linkable: 00changelog.i
17 17 not linkable: 00manifest.i
18 18 not linkable: data/b.i
19 19 pruned down to 2 probably relinkable files
20 relink: data/a.i 1/2 files (50.00%)
20 relinking: data/a.i 1/2 files (50.00%)
21 21 not linkable: data/dummy.i
22 22 relinked 1 files (136 bytes reclaimed)
23 23 % check hardlinks
24 24 repo/.hg/store/data/a.i == clone/.hg/store/data/a.i
25 25 repo/.hg/store/data/b.i != clone/.hg/store/data/b.i
@@ -1,268 +1,268 b''
1 1 % first revision, no sub
2 2 adding a
3 3 % add first sub
4 4 adding a
5 5 committing subrepository s
6 6 % add sub sub
7 7 committing subrepository s
8 8 committing subrepository ss
9 9 % bump sub rev
10 10 committing subrepository s
11 11 % leave sub dirty
12 12 committing subrepository s
13 13 changeset: 3:1c833a7a9e3a
14 14 tag: tip
15 15 user: test
16 16 date: Thu Jan 01 00:00:00 1970 +0000
17 17 summary: 4
18 18
19 19 % check caching
20 20 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
21 21 % restore
22 22 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
23 23 path s
24 24 source s
25 25 revision 1c833a7a9e3a4445c711aaf0f012379cd0d4034e
26 26 % new branch for merge tests
27 27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 28 adding t/t
29 29 % 5
30 30 committing subrepository t
31 31 created new head
32 32 % 6
33 33 committing subrepository t
34 34 path s
35 35 source s
36 36 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
37 37 path t
38 38 source t
39 39 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
40 40 % 7
41 41 committing subrepository t
42 42 % 8
43 43 % merge tests
44 44 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
45 45 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
46 46 (branch merge, don't forget to commit)
47 47 path s
48 48 source s
49 49 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
50 50 path t
51 51 source t
52 52 revision 60ca1237c19474e7a3978b0dc1ca4e6f36d51382
53 53 created new head
54 54 searching for copies back to rev 2
55 55 resolving manifests
56 56 overwrite None partial False
57 57 ancestor 1f14a2e2d3ec local f0d2028bf86d+ remote 1831e14459c4
58 58 .hgsubstate: versions differ -> m
59 59 update: .hgsubstate 1/1 files (100.00%)
60 60 subrepo merge f0d2028bf86d+ 1831e14459c4 1f14a2e2d3ec
61 61 subrepo t: other changed, get t:6747d179aa9a688023c4b0cad32e4c92bb7f34ad:hg
62 62 getting subrepo t
63 63 resolving manifests
64 64 overwrite True partial False
65 65 ancestor 60ca1237c194+ local 60ca1237c194+ remote 6747d179aa9a
66 66 t: remote is newer -> g
67 67 update: t 1/1 files (100.00%)
68 68 getting t
69 69 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
70 70 (branch merge, don't forget to commit)
71 71 path s
72 72 source s
73 73 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
74 74 path t
75 75 source t
76 76 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
77 77 committing subrepository t
78 78 searching for copies back to rev 2
79 79 resolving manifests
80 80 overwrite None partial False
81 81 ancestor 1831e14459c4 local e45c8b14af55+ remote f94576341bcf
82 82 .hgsubstate: versions differ -> m
83 83 update: .hgsubstate 1/1 files (100.00%)
84 84 subrepo merge e45c8b14af55+ f94576341bcf 1831e14459c4
85 85 subrepo t: both sides changed, merge with t:7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4:hg
86 86 merging subrepo t
87 87 searching for copies back to rev 2
88 88 resolving manifests
89 89 overwrite None partial False
90 90 ancestor 6747d179aa9a local 20a0db6fbf6c+ remote 7af322bc1198
91 91 t: versions differ -> m
92 92 preserving t for resolve of t
93 93 update: t 1/1 files (100.00%)
94 94 picked tool 'internal:merge' for t (binary False symlink False)
95 95 merging t
96 96 my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
97 97 warning: conflicts during merge.
98 98 merging t failed!
99 99 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
100 100 use 'hg resolve' to retry unresolved file merges or 'hg update -C' to abandon
101 101 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
102 102 (branch merge, don't forget to commit)
103 103 % should conflict
104 104 <<<<<<< local
105 105 conflict
106 106 =======
107 107 t3
108 108 >>>>>>> other
109 109 % clone
110 110 updating to branch default
111 111 pulling subrepo s from .../sub/t/s
112 112 requesting all changes
113 113 adding changesets
114 114 adding manifests
115 115 adding file changes
116 116 added 4 changesets with 5 changes to 3 files
117 117 pulling subrepo ss from .../sub/t/s/ss
118 118 requesting all changes
119 119 adding changesets
120 120 adding manifests
121 121 adding file changes
122 122 added 1 changesets with 1 changes to 1 files
123 123 pulling subrepo t from .../sub/t/t
124 124 requesting all changes
125 125 adding changesets
126 126 adding manifests
127 127 adding file changes
128 128 added 4 changesets with 4 changes to 1 files (+1 heads)
129 129 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 130 path s
131 131 source s
132 132 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
133 133 path t
134 134 source t
135 135 revision 20a0db6fbf6c3d2836e6519a642ae929bfc67c0e
136 136 % push
137 137 committing subrepository t
138 138 pushing ...sub/t
139 139 pushing ...subrepo ss
140 140 searching for changes
141 141 no changes found
142 142 pushing ...subrepo s
143 143 searching for changes
144 144 no changes found
145 145 pushing ...subrepo t
146 146 searching for changes
147 147 adding changesets
148 148 adding manifests
149 149 adding file changes
150 150 added 1 changesets with 1 changes to 1 files
151 151 searching for changes
152 152 adding changesets
153 153 adding manifests
154 154 adding file changes
155 155 added 1 changesets with 1 changes to 1 files
156 156 % push -f
157 157 committing subrepository s
158 158 abort: push creates new remote heads on branch 'default'!
159 159 pushing ...sub/t
160 160 pushing ...subrepo ss
161 161 searching for changes
162 162 no changes found
163 163 pushing ...subrepo s
164 164 searching for changes
165 165 (did you forget to merge? use push -f to force)
166 166 pushing ...subrepo t
167 167 searching for changes
168 168 no changes found
169 169 searching for changes
170 170 adding changesets
171 171 adding manifests
172 172 adding file changes
173 173 added 1 changesets with 1 changes to 1 files
174 174 pushing ...sub/t
175 175 pushing ...subrepo ss
176 176 searching for changes
177 177 no changes found
178 178 pushing ...subrepo s
179 179 searching for changes
180 180 adding changesets
181 181 adding manifests
182 182 adding file changes
183 183 added 1 changesets with 1 changes to 1 files (+1 heads)
184 184 pushing ...subrepo t
185 185 searching for changes
186 186 no changes found
187 187 searching for changes
188 188 no changes found
189 189 % update
190 190 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
191 191 committing subrepository t
192 192 % pull
193 193 pulling ...sub/t
194 194 searching for changes
195 195 adding changesets
196 196 adding manifests
197 197 adding file changes
198 198 added 1 changesets with 1 changes to 1 files
199 199 (run 'hg update' to get a working copy)
200 200 pulling subrepo t from .../sub/t/t
201 201 searching for changes
202 202 adding changesets
203 203 adding manifests
204 204 adding file changes
205 205 added 1 changesets with 1 changes to 1 files
206 206 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
207 207 blah
208 208 % bogus subrepo path aborts
209 209 abort: missing ] in subrepo source
210 210 % issue 1986
211 211 adding a
212 212 marked working directory as branch br
213 213 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
214 214 adding b
215 215 created new head
216 216 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
217 217 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
218 218 (branch merge, don't forget to commit)
219 219 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
220 220 adding c
221 221 created new head
222 222 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
223 223 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
224 224 (branch merge, don't forget to commit)
225 225 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
226 226 adding .hgsub
227 227 committing subrepository s
228 228 marked working directory as branch br
229 229 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
230 230 adding b
231 231 committing subrepository s
232 232 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
233 233 adding c
234 234 created new head
235 235 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
236 236 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
237 237 (branch merge, don't forget to commit)
238 238 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
239 239 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
240 240 adding d
241 241 committing subrepository s
242 242 created new head
243 243 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
244 244 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
245 245 adding e
246 246 committing subrepository s
247 247 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
248 248 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
249 249 (branch merge, don't forget to commit)
250 250 % test subrepo delete from .hgsubstate
251 251 adding testdelete/nested/foo
252 252 adding testdelete/nested2/foo
253 253 adding testdelete/.hgsub
254 254 committing subrepository nested2
255 255 committing subrepository nested
256 256 nested
257 257 % test repository cloning
258 258 adding nested_absolute/foo
259 259 adding nested_relative/foo2
260 260 adding main/.hgsub
261 261 committing subrepository nested_relative
262 262 committing subrepository nested_absolute
263 263 updating to branch default
264 264 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
265 265 [paths]
266 default = $HGTMP/test-subrepo/sub/mercurial/main/nested_absolute
266 default = $HGTMP/test-subrepo/sub/mercurial/nested_absolute
267 267 [paths]
268 default = $HGTMP/test-subrepo/sub/mercurial/main/nested_relative
268 default = $HGTMP/test-subrepo/sub/mercurial/main/../nested_relative
General Comments 0
You need to be logged in to leave comments. Login now