##// END OF EJS Templates
Merge with stable
Martin Geisler -
r10699:7b0a0d49 merge default
parent child Browse files
Show More
@@ -1,159 +1,159 b''
1 # Mercurial extension to provide 'hg relink' command
1 # Mercurial extension to provide 'hg relink' command
2 #
2 #
3 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """recreates hardlinks between repository clones"""
8 """recreates hardlinks between repository clones"""
9
9
10 from mercurial import cmdutil, hg, util
10 from mercurial import cmdutil, hg, util
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 import os, stat
12 import os, stat
13
13
14 def relink(ui, repo, origin=None, **opts):
14 def relink(ui, repo, origin=None, **opts):
15 """recreate hardlinks between two repositories
15 """recreate hardlinks between two repositories
16
16
17 When repositories are cloned locally, their data files will be
17 When repositories are cloned locally, their data files will be
18 hardlinked so that they only use the space of a single repository.
18 hardlinked so that they only use the space of a single repository.
19
19
20 Unfortunately, subsequent pulls into either repository will break
20 Unfortunately, subsequent pulls into either repository will break
21 hardlinks for any files touched by the new changesets, even if
21 hardlinks for any files touched by the new changesets, even if
22 both repositories end up pulling the same changes.
22 both repositories end up pulling the same changes.
23
23
24 Similarly, passing --rev to "hg clone" will fail to use any
24 Similarly, passing --rev to "hg clone" will fail to use any
25 hardlinks, falling back to a complete copy of the source
25 hardlinks, falling back to a complete copy of the source
26 repository.
26 repository.
27
27
28 This command lets you recreate those hardlinks and reclaim that
28 This command lets you recreate those hardlinks and reclaim that
29 wasted space.
29 wasted space.
30
30
31 This repository will be relinked to share space with ORIGIN, which
31 This repository will be relinked to share space with ORIGIN, which
32 must be on the same local disk. If ORIGIN is omitted, looks for
32 must be on the same local disk. If ORIGIN is omitted, looks for
33 "default-relink", then "default", in [paths].
33 "default-relink", then "default", in [paths].
34
34
35 Do not attempt any read operations on this repository while the
35 Do not attempt any read operations on this repository while the
36 command is running. (Both repositories will be locked against
36 command is running. (Both repositories will be locked against
37 writes.)
37 writes.)
38 """
38 """
39 if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'):
39 if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'):
40 raise util.Abort(_('hardlinks are not supported on this system'))
40 raise util.Abort(_('hardlinks are not supported on this system'))
41 src = hg.repository(
41 src = hg.repository(
42 cmdutil.remoteui(repo, opts),
42 cmdutil.remoteui(repo, opts),
43 ui.expandpath(origin or 'default-relink', origin or 'default'))
43 ui.expandpath(origin or 'default-relink', origin or 'default'))
44 if not src.local():
44 if not src.local():
45 raise util.Abort('must specify local origin repository')
45 raise util.Abort('must specify local origin repository')
46 ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
46 ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
47 locallock = repo.lock()
47 locallock = repo.lock()
48 try:
48 try:
49 remotelock = src.lock()
49 remotelock = src.lock()
50 try:
50 try:
51 candidates = sorted(collect(src.store.path, ui))
51 candidates = sorted(collect(src.store.path, ui))
52 targets = prune(candidates, src.store.path, repo.store.path, ui)
52 targets = prune(candidates, src.store.path, repo.store.path, ui)
53 do_relink(src.store.path, repo.store.path, targets, ui)
53 do_relink(src.store.path, repo.store.path, targets, ui)
54 finally:
54 finally:
55 remotelock.release()
55 remotelock.release()
56 finally:
56 finally:
57 locallock.release()
57 locallock.release()
58
58
59 def collect(src, ui):
59 def collect(src, ui):
60 seplen = len(os.path.sep)
60 seplen = len(os.path.sep)
61 candidates = []
61 candidates = []
62 for dirpath, dirnames, filenames in os.walk(src):
62 for dirpath, dirnames, filenames in os.walk(src):
63 relpath = dirpath[len(src) + seplen:]
63 relpath = dirpath[len(src) + seplen:]
64 for filename in filenames:
64 for filename in filenames:
65 if not filename[-2:] in ('.d', '.i'):
65 if not filename[-2:] in ('.d', '.i'):
66 continue
66 continue
67 st = os.stat(os.path.join(dirpath, filename))
67 st = os.stat(os.path.join(dirpath, filename))
68 if not stat.S_ISREG(st.st_mode):
68 if not stat.S_ISREG(st.st_mode):
69 continue
69 continue
70 candidates.append((os.path.join(relpath, filename), st))
70 candidates.append((os.path.join(relpath, filename), st))
71
71
72 ui.status(_('collected %d candidate storage files\n') % len(candidates))
72 ui.status(_('collected %d candidate storage files\n') % len(candidates))
73 return candidates
73 return candidates
74
74
75 def prune(candidates, src, dst, ui):
75 def prune(candidates, src, dst, ui):
76 def linkfilter(src, dst, st):
76 def linkfilter(src, dst, st):
77 try:
77 try:
78 ts = os.stat(dst)
78 ts = os.stat(dst)
79 except OSError:
79 except OSError:
80 # Destination doesn't have this file?
80 # Destination doesn't have this file?
81 return False
81 return False
82 if util.samefile(src, dst):
82 if util.samefile(src, dst):
83 return False
83 return False
84 if not util.samedevice(src, dst):
84 if not util.samedevice(src, dst):
85 # No point in continuing
85 # No point in continuing
86 raise util.Abort(
86 raise util.Abort(
87 _('source and destination are on different devices'))
87 _('source and destination are on different devices'))
88 if st.st_size != ts.st_size:
88 if st.st_size != ts.st_size:
89 return False
89 return False
90 return st
90 return st
91
91
92 targets = []
92 targets = []
93 for fn, st in candidates:
93 for fn, st in candidates:
94 srcpath = os.path.join(src, fn)
94 srcpath = os.path.join(src, fn)
95 tgt = os.path.join(dst, fn)
95 tgt = os.path.join(dst, fn)
96 ts = linkfilter(srcpath, tgt, st)
96 ts = linkfilter(srcpath, tgt, st)
97 if not ts:
97 if not ts:
98 ui.debug(_('not linkable: %s\n') % fn)
98 ui.debug(_('not linkable: %s\n') % fn)
99 continue
99 continue
100 targets.append((fn, ts.st_size))
100 targets.append((fn, ts.st_size))
101
101
102 ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
102 ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
103 return targets
103 return targets
104
104
105 def do_relink(src, dst, files, ui):
105 def do_relink(src, dst, files, ui):
106 def relinkfile(src, dst):
106 def relinkfile(src, dst):
107 bak = dst + '.bak'
107 bak = dst + '.bak'
108 os.rename(dst, bak)
108 os.rename(dst, bak)
109 try:
109 try:
110 util.os_link(src, dst)
110 util.os_link(src, dst)
111 except OSError:
111 except OSError:
112 os.rename(bak, dst)
112 os.rename(bak, dst)
113 raise
113 raise
114 os.remove(bak)
114 os.remove(bak)
115
115
116 CHUNKLEN = 65536
116 CHUNKLEN = 65536
117 relinked = 0
117 relinked = 0
118 savedbytes = 0
118 savedbytes = 0
119
119
120 pos = 0
120 pos = 0
121 total = len(files)
121 total = len(files)
122 for f, sz in files:
122 for f, sz in files:
123 pos += 1
123 pos += 1
124 source = os.path.join(src, f)
124 source = os.path.join(src, f)
125 tgt = os.path.join(dst, f)
125 tgt = os.path.join(dst, f)
126 # Binary mode, so that read() works correctly, especially on Windows
126 # Binary mode, so that read() works correctly, especially on Windows
127 sfp = file(source, 'rb')
127 sfp = file(source, 'rb')
128 dfp = file(tgt, 'rb')
128 dfp = file(tgt, 'rb')
129 sin = sfp.read(CHUNKLEN)
129 sin = sfp.read(CHUNKLEN)
130 while sin:
130 while sin:
131 din = dfp.read(CHUNKLEN)
131 din = dfp.read(CHUNKLEN)
132 if sin != din:
132 if sin != din:
133 break
133 break
134 sin = sfp.read(CHUNKLEN)
134 sin = sfp.read(CHUNKLEN)
135 sfp.close()
135 sfp.close()
136 dfp.close()
136 dfp.close()
137 if sin:
137 if sin:
138 ui.debug(_('not linkable: %s\n') % f)
138 ui.debug(_('not linkable: %s\n') % f)
139 continue
139 continue
140 try:
140 try:
141 relinkfile(source, tgt)
141 relinkfile(source, tgt)
142 ui.progress(_('relink'), pos, f, _(' files'), total)
142 ui.progress(_('relinking'), pos, f, _(' files'), total)
143 relinked += 1
143 relinked += 1
144 savedbytes += sz
144 savedbytes += sz
145 except OSError, inst:
145 except OSError, inst:
146 ui.warn('%s: %s\n' % (tgt, str(inst)))
146 ui.warn('%s: %s\n' % (tgt, str(inst)))
147
147
148 ui.progress(_('relink'), None, f, _(' files'), total)
148 ui.progress(_('relinking'), None, f, _(' files'), total)
149
149
150 ui.status(_('relinked %d files (%d bytes reclaimed)\n') %
150 ui.status(_('relinked %d files (%d bytes reclaimed)\n') %
151 (relinked, savedbytes))
151 (relinked, savedbytes))
152
152
153 cmdtable = {
153 cmdtable = {
154 'relink': (
154 'relink': (
155 relink,
155 relink,
156 [],
156 [],
157 _('[ORIGIN]')
157 _('[ORIGIN]')
158 )
158 )
159 }
159 }
@@ -1,2223 +1,2223 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo
10 import repo, changegroup, subrepo
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import util, extensions, hook, error
13 import util, extensions, hook, error
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 from lock import release
17 from lock import release
18 import weakref, stat, errno, os, time, inspect
18 import weakref, stat, errno, os, time, inspect
19 propertycache = util.propertycache
19 propertycache = util.propertycache
20
20
21 class localrepository(repo.repository):
21 class localrepository(repo.repository):
22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
22 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
23 supported = set('revlogv1 store fncache shared'.split())
23 supported = set('revlogv1 store fncache shared'.split())
24
24
25 def __init__(self, baseui, path=None, create=0):
25 def __init__(self, baseui, path=None, create=0):
26 repo.repository.__init__(self)
26 repo.repository.__init__(self)
27 self.root = os.path.realpath(path)
27 self.root = os.path.realpath(path)
28 self.path = os.path.join(self.root, ".hg")
28 self.path = os.path.join(self.root, ".hg")
29 self.origroot = path
29 self.origroot = path
30 self.opener = util.opener(self.path)
30 self.opener = util.opener(self.path)
31 self.wopener = util.opener(self.root)
31 self.wopener = util.opener(self.root)
32 self.baseui = baseui
32 self.baseui = baseui
33 self.ui = baseui.copy()
33 self.ui = baseui.copy()
34
34
35 try:
35 try:
36 self.ui.readconfig(self.join("hgrc"), self.root)
36 self.ui.readconfig(self.join("hgrc"), self.root)
37 extensions.loadall(self.ui)
37 extensions.loadall(self.ui)
38 except IOError:
38 except IOError:
39 pass
39 pass
40
40
41 if not os.path.isdir(self.path):
41 if not os.path.isdir(self.path):
42 if create:
42 if create:
43 if not os.path.exists(path):
43 if not os.path.exists(path):
44 os.mkdir(path)
44 os.mkdir(path)
45 os.mkdir(self.path)
45 os.mkdir(self.path)
46 requirements = ["revlogv1"]
46 requirements = ["revlogv1"]
47 if self.ui.configbool('format', 'usestore', True):
47 if self.ui.configbool('format', 'usestore', True):
48 os.mkdir(os.path.join(self.path, "store"))
48 os.mkdir(os.path.join(self.path, "store"))
49 requirements.append("store")
49 requirements.append("store")
50 if self.ui.configbool('format', 'usefncache', True):
50 if self.ui.configbool('format', 'usefncache', True):
51 requirements.append("fncache")
51 requirements.append("fncache")
52 # create an invalid changelog
52 # create an invalid changelog
53 self.opener("00changelog.i", "a").write(
53 self.opener("00changelog.i", "a").write(
54 '\0\0\0\2' # represents revlogv2
54 '\0\0\0\2' # represents revlogv2
55 ' dummy changelog to prevent using the old repo layout'
55 ' dummy changelog to prevent using the old repo layout'
56 )
56 )
57 reqfile = self.opener("requires", "w")
57 reqfile = self.opener("requires", "w")
58 for r in requirements:
58 for r in requirements:
59 reqfile.write("%s\n" % r)
59 reqfile.write("%s\n" % r)
60 reqfile.close()
60 reqfile.close()
61 else:
61 else:
62 raise error.RepoError(_("repository %s not found") % path)
62 raise error.RepoError(_("repository %s not found") % path)
63 elif create:
63 elif create:
64 raise error.RepoError(_("repository %s already exists") % path)
64 raise error.RepoError(_("repository %s already exists") % path)
65 else:
65 else:
66 # find requirements
66 # find requirements
67 requirements = set()
67 requirements = set()
68 try:
68 try:
69 requirements = set(self.opener("requires").read().splitlines())
69 requirements = set(self.opener("requires").read().splitlines())
70 except IOError, inst:
70 except IOError, inst:
71 if inst.errno != errno.ENOENT:
71 if inst.errno != errno.ENOENT:
72 raise
72 raise
73 for r in requirements - self.supported:
73 for r in requirements - self.supported:
74 raise error.RepoError(_("requirement '%s' not supported") % r)
74 raise error.RepoError(_("requirement '%s' not supported") % r)
75
75
76 self.sharedpath = self.path
76 self.sharedpath = self.path
77 try:
77 try:
78 s = os.path.realpath(self.opener("sharedpath").read())
78 s = os.path.realpath(self.opener("sharedpath").read())
79 if not os.path.exists(s):
79 if not os.path.exists(s):
80 raise error.RepoError(
80 raise error.RepoError(
81 _('.hg/sharedpath points to nonexistent directory %s') % s)
81 _('.hg/sharedpath points to nonexistent directory %s') % s)
82 self.sharedpath = s
82 self.sharedpath = s
83 except IOError, inst:
83 except IOError, inst:
84 if inst.errno != errno.ENOENT:
84 if inst.errno != errno.ENOENT:
85 raise
85 raise
86
86
87 self.store = store.store(requirements, self.sharedpath, util.opener)
87 self.store = store.store(requirements, self.sharedpath, util.opener)
88 self.spath = self.store.path
88 self.spath = self.store.path
89 self.sopener = self.store.opener
89 self.sopener = self.store.opener
90 self.sjoin = self.store.join
90 self.sjoin = self.store.join
91 self.opener.createmode = self.store.createmode
91 self.opener.createmode = self.store.createmode
92 self.sopener.options = {}
92 self.sopener.options = {}
93
93
94 # These two define the set of tags for this repository. _tags
94 # These two define the set of tags for this repository. _tags
95 # maps tag name to node; _tagtypes maps tag name to 'global' or
95 # maps tag name to node; _tagtypes maps tag name to 'global' or
96 # 'local'. (Global tags are defined by .hgtags across all
96 # 'local'. (Global tags are defined by .hgtags across all
97 # heads, and local tags are defined in .hg/localtags.) They
97 # heads, and local tags are defined in .hg/localtags.) They
98 # constitute the in-memory cache of tags.
98 # constitute the in-memory cache of tags.
99 self._tags = None
99 self._tags = None
100 self._tagtypes = None
100 self._tagtypes = None
101
101
102 self._branchcache = None # in UTF-8
102 self._branchcache = None # in UTF-8
103 self._branchcachetip = None
103 self._branchcachetip = None
104 self.nodetagscache = None
104 self.nodetagscache = None
105 self.filterpats = {}
105 self.filterpats = {}
106 self._datafilters = {}
106 self._datafilters = {}
107 self._transref = self._lockref = self._wlockref = None
107 self._transref = self._lockref = self._wlockref = None
108
108
109 @propertycache
109 @propertycache
110 def changelog(self):
110 def changelog(self):
111 c = changelog.changelog(self.sopener)
111 c = changelog.changelog(self.sopener)
112 if 'HG_PENDING' in os.environ:
112 if 'HG_PENDING' in os.environ:
113 p = os.environ['HG_PENDING']
113 p = os.environ['HG_PENDING']
114 if p.startswith(self.root):
114 if p.startswith(self.root):
115 c.readpending('00changelog.i.a')
115 c.readpending('00changelog.i.a')
116 self.sopener.options['defversion'] = c.version
116 self.sopener.options['defversion'] = c.version
117 return c
117 return c
118
118
119 @propertycache
119 @propertycache
120 def manifest(self):
120 def manifest(self):
121 return manifest.manifest(self.sopener)
121 return manifest.manifest(self.sopener)
122
122
123 @propertycache
123 @propertycache
124 def dirstate(self):
124 def dirstate(self):
125 return dirstate.dirstate(self.opener, self.ui, self.root)
125 return dirstate.dirstate(self.opener, self.ui, self.root)
126
126
127 def __getitem__(self, changeid):
127 def __getitem__(self, changeid):
128 if changeid is None:
128 if changeid is None:
129 return context.workingctx(self)
129 return context.workingctx(self)
130 return context.changectx(self, changeid)
130 return context.changectx(self, changeid)
131
131
132 def __contains__(self, changeid):
132 def __contains__(self, changeid):
133 try:
133 try:
134 return bool(self.lookup(changeid))
134 return bool(self.lookup(changeid))
135 except error.RepoLookupError:
135 except error.RepoLookupError:
136 return False
136 return False
137
137
138 def __nonzero__(self):
138 def __nonzero__(self):
139 return True
139 return True
140
140
141 def __len__(self):
141 def __len__(self):
142 return len(self.changelog)
142 return len(self.changelog)
143
143
144 def __iter__(self):
144 def __iter__(self):
145 for i in xrange(len(self)):
145 for i in xrange(len(self)):
146 yield i
146 yield i
147
147
148 def url(self):
148 def url(self):
149 return 'file:' + self.root
149 return 'file:' + self.root
150
150
151 def hook(self, name, throw=False, **args):
151 def hook(self, name, throw=False, **args):
152 return hook.hook(self.ui, self, name, throw, **args)
152 return hook.hook(self.ui, self, name, throw, **args)
153
153
154 tag_disallowed = ':\r\n'
154 tag_disallowed = ':\r\n'
155
155
156 def _tag(self, names, node, message, local, user, date, extra={}):
156 def _tag(self, names, node, message, local, user, date, extra={}):
157 if isinstance(names, str):
157 if isinstance(names, str):
158 allchars = names
158 allchars = names
159 names = (names,)
159 names = (names,)
160 else:
160 else:
161 allchars = ''.join(names)
161 allchars = ''.join(names)
162 for c in self.tag_disallowed:
162 for c in self.tag_disallowed:
163 if c in allchars:
163 if c in allchars:
164 raise util.Abort(_('%r cannot be used in a tag name') % c)
164 raise util.Abort(_('%r cannot be used in a tag name') % c)
165
165
166 for name in names:
166 for name in names:
167 self.hook('pretag', throw=True, node=hex(node), tag=name,
167 self.hook('pretag', throw=True, node=hex(node), tag=name,
168 local=local)
168 local=local)
169
169
170 def writetags(fp, names, munge, prevtags):
170 def writetags(fp, names, munge, prevtags):
171 fp.seek(0, 2)
171 fp.seek(0, 2)
172 if prevtags and prevtags[-1] != '\n':
172 if prevtags and prevtags[-1] != '\n':
173 fp.write('\n')
173 fp.write('\n')
174 for name in names:
174 for name in names:
175 m = munge and munge(name) or name
175 m = munge and munge(name) or name
176 if self._tagtypes and name in self._tagtypes:
176 if self._tagtypes and name in self._tagtypes:
177 old = self._tags.get(name, nullid)
177 old = self._tags.get(name, nullid)
178 fp.write('%s %s\n' % (hex(old), m))
178 fp.write('%s %s\n' % (hex(old), m))
179 fp.write('%s %s\n' % (hex(node), m))
179 fp.write('%s %s\n' % (hex(node), m))
180 fp.close()
180 fp.close()
181
181
182 prevtags = ''
182 prevtags = ''
183 if local:
183 if local:
184 try:
184 try:
185 fp = self.opener('localtags', 'r+')
185 fp = self.opener('localtags', 'r+')
186 except IOError:
186 except IOError:
187 fp = self.opener('localtags', 'a')
187 fp = self.opener('localtags', 'a')
188 else:
188 else:
189 prevtags = fp.read()
189 prevtags = fp.read()
190
190
191 # local tags are stored in the current charset
191 # local tags are stored in the current charset
192 writetags(fp, names, None, prevtags)
192 writetags(fp, names, None, prevtags)
193 for name in names:
193 for name in names:
194 self.hook('tag', node=hex(node), tag=name, local=local)
194 self.hook('tag', node=hex(node), tag=name, local=local)
195 return
195 return
196
196
197 try:
197 try:
198 fp = self.wfile('.hgtags', 'rb+')
198 fp = self.wfile('.hgtags', 'rb+')
199 except IOError:
199 except IOError:
200 fp = self.wfile('.hgtags', 'ab')
200 fp = self.wfile('.hgtags', 'ab')
201 else:
201 else:
202 prevtags = fp.read()
202 prevtags = fp.read()
203
203
204 # committed tags are stored in UTF-8
204 # committed tags are stored in UTF-8
205 writetags(fp, names, encoding.fromlocal, prevtags)
205 writetags(fp, names, encoding.fromlocal, prevtags)
206
206
207 if '.hgtags' not in self.dirstate:
207 if '.hgtags' not in self.dirstate:
208 self.add(['.hgtags'])
208 self.add(['.hgtags'])
209
209
210 m = matchmod.exact(self.root, '', ['.hgtags'])
210 m = matchmod.exact(self.root, '', ['.hgtags'])
211 tagnode = self.commit(message, user, date, extra=extra, match=m)
211 tagnode = self.commit(message, user, date, extra=extra, match=m)
212
212
213 for name in names:
213 for name in names:
214 self.hook('tag', node=hex(node), tag=name, local=local)
214 self.hook('tag', node=hex(node), tag=name, local=local)
215
215
216 return tagnode
216 return tagnode
217
217
218 def tag(self, names, node, message, local, user, date):
218 def tag(self, names, node, message, local, user, date):
219 '''tag a revision with one or more symbolic names.
219 '''tag a revision with one or more symbolic names.
220
220
221 names is a list of strings or, when adding a single tag, names may be a
221 names is a list of strings or, when adding a single tag, names may be a
222 string.
222 string.
223
223
224 if local is True, the tags are stored in a per-repository file.
224 if local is True, the tags are stored in a per-repository file.
225 otherwise, they are stored in the .hgtags file, and a new
225 otherwise, they are stored in the .hgtags file, and a new
226 changeset is committed with the change.
226 changeset is committed with the change.
227
227
228 keyword arguments:
228 keyword arguments:
229
229
230 local: whether to store tags in non-version-controlled file
230 local: whether to store tags in non-version-controlled file
231 (default False)
231 (default False)
232
232
233 message: commit message to use if committing
233 message: commit message to use if committing
234
234
235 user: name of user to use if committing
235 user: name of user to use if committing
236
236
237 date: date tuple to use if committing'''
237 date: date tuple to use if committing'''
238
238
239 for x in self.status()[:5]:
239 for x in self.status()[:5]:
240 if '.hgtags' in x:
240 if '.hgtags' in x:
241 raise util.Abort(_('working copy of .hgtags is changed '
241 raise util.Abort(_('working copy of .hgtags is changed '
242 '(please commit .hgtags manually)'))
242 '(please commit .hgtags manually)'))
243
243
244 self.tags() # instantiate the cache
244 self.tags() # instantiate the cache
245 self._tag(names, node, message, local, user, date)
245 self._tag(names, node, message, local, user, date)
246
246
247 def tags(self):
247 def tags(self):
248 '''return a mapping of tag to node'''
248 '''return a mapping of tag to node'''
249 if self._tags is None:
249 if self._tags is None:
250 (self._tags, self._tagtypes) = self._findtags()
250 (self._tags, self._tagtypes) = self._findtags()
251
251
252 return self._tags
252 return self._tags
253
253
254 def _findtags(self):
254 def _findtags(self):
255 '''Do the hard work of finding tags. Return a pair of dicts
255 '''Do the hard work of finding tags. Return a pair of dicts
256 (tags, tagtypes) where tags maps tag name to node, and tagtypes
256 (tags, tagtypes) where tags maps tag name to node, and tagtypes
257 maps tag name to a string like \'global\' or \'local\'.
257 maps tag name to a string like \'global\' or \'local\'.
258 Subclasses or extensions are free to add their own tags, but
258 Subclasses or extensions are free to add their own tags, but
259 should be aware that the returned dicts will be retained for the
259 should be aware that the returned dicts will be retained for the
260 duration of the localrepo object.'''
260 duration of the localrepo object.'''
261
261
262 # XXX what tagtype should subclasses/extensions use? Currently
262 # XXX what tagtype should subclasses/extensions use? Currently
263 # mq and bookmarks add tags, but do not set the tagtype at all.
263 # mq and bookmarks add tags, but do not set the tagtype at all.
264 # Should each extension invent its own tag type? Should there
264 # Should each extension invent its own tag type? Should there
265 # be one tagtype for all such "virtual" tags? Or is the status
265 # be one tagtype for all such "virtual" tags? Or is the status
266 # quo fine?
266 # quo fine?
267
267
268 alltags = {} # map tag name to (node, hist)
268 alltags = {} # map tag name to (node, hist)
269 tagtypes = {}
269 tagtypes = {}
270
270
271 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
271 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
272 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
272 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
273
273
274 # Build the return dicts. Have to re-encode tag names because
274 # Build the return dicts. Have to re-encode tag names because
275 # the tags module always uses UTF-8 (in order not to lose info
275 # the tags module always uses UTF-8 (in order not to lose info
276 # writing to the cache), but the rest of Mercurial wants them in
276 # writing to the cache), but the rest of Mercurial wants them in
277 # local encoding.
277 # local encoding.
278 tags = {}
278 tags = {}
279 for (name, (node, hist)) in alltags.iteritems():
279 for (name, (node, hist)) in alltags.iteritems():
280 if node != nullid:
280 if node != nullid:
281 tags[encoding.tolocal(name)] = node
281 tags[encoding.tolocal(name)] = node
282 tags['tip'] = self.changelog.tip()
282 tags['tip'] = self.changelog.tip()
283 tagtypes = dict([(encoding.tolocal(name), value)
283 tagtypes = dict([(encoding.tolocal(name), value)
284 for (name, value) in tagtypes.iteritems()])
284 for (name, value) in tagtypes.iteritems()])
285 return (tags, tagtypes)
285 return (tags, tagtypes)
286
286
287 def tagtype(self, tagname):
287 def tagtype(self, tagname):
288 '''
288 '''
289 return the type of the given tag. result can be:
289 return the type of the given tag. result can be:
290
290
291 'local' : a local tag
291 'local' : a local tag
292 'global' : a global tag
292 'global' : a global tag
293 None : tag does not exist
293 None : tag does not exist
294 '''
294 '''
295
295
296 self.tags()
296 self.tags()
297
297
298 return self._tagtypes.get(tagname)
298 return self._tagtypes.get(tagname)
299
299
300 def tagslist(self):
300 def tagslist(self):
301 '''return a list of tags ordered by revision'''
301 '''return a list of tags ordered by revision'''
302 l = []
302 l = []
303 for t, n in self.tags().iteritems():
303 for t, n in self.tags().iteritems():
304 try:
304 try:
305 r = self.changelog.rev(n)
305 r = self.changelog.rev(n)
306 except:
306 except:
307 r = -2 # sort to the beginning of the list if unknown
307 r = -2 # sort to the beginning of the list if unknown
308 l.append((r, t, n))
308 l.append((r, t, n))
309 return [(t, n) for r, t, n in sorted(l)]
309 return [(t, n) for r, t, n in sorted(l)]
310
310
311 def nodetags(self, node):
311 def nodetags(self, node):
312 '''return the tags associated with a node'''
312 '''return the tags associated with a node'''
313 if not self.nodetagscache:
313 if not self.nodetagscache:
314 self.nodetagscache = {}
314 self.nodetagscache = {}
315 for t, n in self.tags().iteritems():
315 for t, n in self.tags().iteritems():
316 self.nodetagscache.setdefault(n, []).append(t)
316 self.nodetagscache.setdefault(n, []).append(t)
317 return self.nodetagscache.get(node, [])
317 return self.nodetagscache.get(node, [])
318
318
319 def _branchtags(self, partial, lrev):
319 def _branchtags(self, partial, lrev):
320 # TODO: rename this function?
320 # TODO: rename this function?
321 tiprev = len(self) - 1
321 tiprev = len(self) - 1
322 if lrev != tiprev:
322 if lrev != tiprev:
323 self._updatebranchcache(partial, lrev + 1, tiprev + 1)
323 self._updatebranchcache(partial, lrev + 1, tiprev + 1)
324 self._writebranchcache(partial, self.changelog.tip(), tiprev)
324 self._writebranchcache(partial, self.changelog.tip(), tiprev)
325
325
326 return partial
326 return partial
327
327
328 def branchmap(self):
328 def branchmap(self):
329 '''returns a dictionary {branch: [branchheads]}'''
329 '''returns a dictionary {branch: [branchheads]}'''
330 tip = self.changelog.tip()
330 tip = self.changelog.tip()
331 if self._branchcache is not None and self._branchcachetip == tip:
331 if self._branchcache is not None and self._branchcachetip == tip:
332 return self._branchcache
332 return self._branchcache
333
333
334 oldtip = self._branchcachetip
334 oldtip = self._branchcachetip
335 self._branchcachetip = tip
335 self._branchcachetip = tip
336 if oldtip is None or oldtip not in self.changelog.nodemap:
336 if oldtip is None or oldtip not in self.changelog.nodemap:
337 partial, last, lrev = self._readbranchcache()
337 partial, last, lrev = self._readbranchcache()
338 else:
338 else:
339 lrev = self.changelog.rev(oldtip)
339 lrev = self.changelog.rev(oldtip)
340 partial = self._branchcache
340 partial = self._branchcache
341
341
342 self._branchtags(partial, lrev)
342 self._branchtags(partial, lrev)
343 # this private cache holds all heads (not just tips)
343 # this private cache holds all heads (not just tips)
344 self._branchcache = partial
344 self._branchcache = partial
345
345
346 return self._branchcache
346 return self._branchcache
347
347
348 def branchtags(self):
348 def branchtags(self):
349 '''return a dict where branch names map to the tipmost head of
349 '''return a dict where branch names map to the tipmost head of
350 the branch, open heads come before closed'''
350 the branch, open heads come before closed'''
351 bt = {}
351 bt = {}
352 for bn, heads in self.branchmap().iteritems():
352 for bn, heads in self.branchmap().iteritems():
353 tip = heads[-1]
353 tip = heads[-1]
354 for h in reversed(heads):
354 for h in reversed(heads):
355 if 'close' not in self.changelog.read(h)[5]:
355 if 'close' not in self.changelog.read(h)[5]:
356 tip = h
356 tip = h
357 break
357 break
358 bt[bn] = tip
358 bt[bn] = tip
359 return bt
359 return bt
360
360
361
361
362 def _readbranchcache(self):
362 def _readbranchcache(self):
363 partial = {}
363 partial = {}
364 try:
364 try:
365 f = self.opener("branchheads.cache")
365 f = self.opener("branchheads.cache")
366 lines = f.read().split('\n')
366 lines = f.read().split('\n')
367 f.close()
367 f.close()
368 except (IOError, OSError):
368 except (IOError, OSError):
369 return {}, nullid, nullrev
369 return {}, nullid, nullrev
370
370
371 try:
371 try:
372 last, lrev = lines.pop(0).split(" ", 1)
372 last, lrev = lines.pop(0).split(" ", 1)
373 last, lrev = bin(last), int(lrev)
373 last, lrev = bin(last), int(lrev)
374 if lrev >= len(self) or self[lrev].node() != last:
374 if lrev >= len(self) or self[lrev].node() != last:
375 # invalidate the cache
375 # invalidate the cache
376 raise ValueError('invalidating branch cache (tip differs)')
376 raise ValueError('invalidating branch cache (tip differs)')
377 for l in lines:
377 for l in lines:
378 if not l:
378 if not l:
379 continue
379 continue
380 node, label = l.split(" ", 1)
380 node, label = l.split(" ", 1)
381 partial.setdefault(label.strip(), []).append(bin(node))
381 partial.setdefault(label.strip(), []).append(bin(node))
382 except KeyboardInterrupt:
382 except KeyboardInterrupt:
383 raise
383 raise
384 except Exception, inst:
384 except Exception, inst:
385 if self.ui.debugflag:
385 if self.ui.debugflag:
386 self.ui.warn(str(inst), '\n')
386 self.ui.warn(str(inst), '\n')
387 partial, last, lrev = {}, nullid, nullrev
387 partial, last, lrev = {}, nullid, nullrev
388 return partial, last, lrev
388 return partial, last, lrev
389
389
390 def _writebranchcache(self, branches, tip, tiprev):
390 def _writebranchcache(self, branches, tip, tiprev):
391 try:
391 try:
392 f = self.opener("branchheads.cache", "w", atomictemp=True)
392 f = self.opener("branchheads.cache", "w", atomictemp=True)
393 f.write("%s %s\n" % (hex(tip), tiprev))
393 f.write("%s %s\n" % (hex(tip), tiprev))
394 for label, nodes in branches.iteritems():
394 for label, nodes in branches.iteritems():
395 for node in nodes:
395 for node in nodes:
396 f.write("%s %s\n" % (hex(node), label))
396 f.write("%s %s\n" % (hex(node), label))
397 f.rename()
397 f.rename()
398 except (IOError, OSError):
398 except (IOError, OSError):
399 pass
399 pass
400
400
401 def _updatebranchcache(self, partial, start, end):
401 def _updatebranchcache(self, partial, start, end):
402 # collect new branch entries
402 # collect new branch entries
403 newbranches = {}
403 newbranches = {}
404 for r in xrange(start, end):
404 for r in xrange(start, end):
405 c = self[r]
405 c = self[r]
406 newbranches.setdefault(c.branch(), []).append(c.node())
406 newbranches.setdefault(c.branch(), []).append(c.node())
407 # if older branchheads are reachable from new ones, they aren't
407 # if older branchheads are reachable from new ones, they aren't
408 # really branchheads. Note checking parents is insufficient:
408 # really branchheads. Note checking parents is insufficient:
409 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
409 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
410 for branch, newnodes in newbranches.iteritems():
410 for branch, newnodes in newbranches.iteritems():
411 bheads = partial.setdefault(branch, [])
411 bheads = partial.setdefault(branch, [])
412 bheads.extend(newnodes)
412 bheads.extend(newnodes)
413 if len(bheads) < 2:
413 if len(bheads) < 2:
414 continue
414 continue
415 newbheads = []
415 newbheads = []
416 # starting from tip means fewer passes over reachable
416 # starting from tip means fewer passes over reachable
417 while newnodes:
417 while newnodes:
418 latest = newnodes.pop()
418 latest = newnodes.pop()
419 if latest not in bheads:
419 if latest not in bheads:
420 continue
420 continue
421 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
421 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
422 reachable = self.changelog.reachable(latest, minbhrev)
422 reachable = self.changelog.reachable(latest, minbhrev)
423 bheads = [b for b in bheads if b not in reachable]
423 bheads = [b for b in bheads if b not in reachable]
424 newbheads.insert(0, latest)
424 newbheads.insert(0, latest)
425 bheads.extend(newbheads)
425 bheads.extend(newbheads)
426 partial[branch] = bheads
426 partial[branch] = bheads
427
427
428 def lookup(self, key):
428 def lookup(self, key):
429 if isinstance(key, int):
429 if isinstance(key, int):
430 return self.changelog.node(key)
430 return self.changelog.node(key)
431 elif key == '.':
431 elif key == '.':
432 return self.dirstate.parents()[0]
432 return self.dirstate.parents()[0]
433 elif key == 'null':
433 elif key == 'null':
434 return nullid
434 return nullid
435 elif key == 'tip':
435 elif key == 'tip':
436 return self.changelog.tip()
436 return self.changelog.tip()
437 n = self.changelog._match(key)
437 n = self.changelog._match(key)
438 if n:
438 if n:
439 return n
439 return n
440 if key in self.tags():
440 if key in self.tags():
441 return self.tags()[key]
441 return self.tags()[key]
442 if key in self.branchtags():
442 if key in self.branchtags():
443 return self.branchtags()[key]
443 return self.branchtags()[key]
444 n = self.changelog._partialmatch(key)
444 n = self.changelog._partialmatch(key)
445 if n:
445 if n:
446 return n
446 return n
447
447
448 # can't find key, check if it might have come from damaged dirstate
448 # can't find key, check if it might have come from damaged dirstate
449 if key in self.dirstate.parents():
449 if key in self.dirstate.parents():
450 raise error.Abort(_("working directory has unknown parent '%s'!")
450 raise error.Abort(_("working directory has unknown parent '%s'!")
451 % short(key))
451 % short(key))
452 try:
452 try:
453 if len(key) == 20:
453 if len(key) == 20:
454 key = hex(key)
454 key = hex(key)
455 except:
455 except:
456 pass
456 pass
457 raise error.RepoLookupError(_("unknown revision '%s'") % key)
457 raise error.RepoLookupError(_("unknown revision '%s'") % key)
458
458
459 def local(self):
459 def local(self):
460 return True
460 return True
461
461
462 def join(self, f):
462 def join(self, f):
463 return os.path.join(self.path, f)
463 return os.path.join(self.path, f)
464
464
465 def wjoin(self, f):
465 def wjoin(self, f):
466 return os.path.join(self.root, f)
466 return os.path.join(self.root, f)
467
467
468 def rjoin(self, f):
468 def rjoin(self, f):
469 return os.path.join(self.root, util.pconvert(f))
469 return os.path.join(self.root, util.pconvert(f))
470
470
471 def file(self, f):
471 def file(self, f):
472 if f[0] == '/':
472 if f[0] == '/':
473 f = f[1:]
473 f = f[1:]
474 return filelog.filelog(self.sopener, f)
474 return filelog.filelog(self.sopener, f)
475
475
476 def changectx(self, changeid):
476 def changectx(self, changeid):
477 return self[changeid]
477 return self[changeid]
478
478
479 def parents(self, changeid=None):
479 def parents(self, changeid=None):
480 '''get list of changectxs for parents of changeid'''
480 '''get list of changectxs for parents of changeid'''
481 return self[changeid].parents()
481 return self[changeid].parents()
482
482
483 def filectx(self, path, changeid=None, fileid=None):
483 def filectx(self, path, changeid=None, fileid=None):
484 """changeid can be a changeset revision, node, or tag.
484 """changeid can be a changeset revision, node, or tag.
485 fileid can be a file revision or node."""
485 fileid can be a file revision or node."""
486 return context.filectx(self, path, changeid, fileid)
486 return context.filectx(self, path, changeid, fileid)
487
487
488 def getcwd(self):
488 def getcwd(self):
489 return self.dirstate.getcwd()
489 return self.dirstate.getcwd()
490
490
491 def pathto(self, f, cwd=None):
491 def pathto(self, f, cwd=None):
492 return self.dirstate.pathto(f, cwd)
492 return self.dirstate.pathto(f, cwd)
493
493
494 def wfile(self, f, mode='r'):
494 def wfile(self, f, mode='r'):
495 return self.wopener(f, mode)
495 return self.wopener(f, mode)
496
496
497 def _link(self, f):
497 def _link(self, f):
498 return os.path.islink(self.wjoin(f))
498 return os.path.islink(self.wjoin(f))
499
499
500 def _filter(self, filter, filename, data):
500 def _filter(self, filter, filename, data):
501 if filter not in self.filterpats:
501 if filter not in self.filterpats:
502 l = []
502 l = []
503 for pat, cmd in self.ui.configitems(filter):
503 for pat, cmd in self.ui.configitems(filter):
504 if cmd == '!':
504 if cmd == '!':
505 continue
505 continue
506 mf = matchmod.match(self.root, '', [pat])
506 mf = matchmod.match(self.root, '', [pat])
507 fn = None
507 fn = None
508 params = cmd
508 params = cmd
509 for name, filterfn in self._datafilters.iteritems():
509 for name, filterfn in self._datafilters.iteritems():
510 if cmd.startswith(name):
510 if cmd.startswith(name):
511 fn = filterfn
511 fn = filterfn
512 params = cmd[len(name):].lstrip()
512 params = cmd[len(name):].lstrip()
513 break
513 break
514 if not fn:
514 if not fn:
515 fn = lambda s, c, **kwargs: util.filter(s, c)
515 fn = lambda s, c, **kwargs: util.filter(s, c)
516 # Wrap old filters not supporting keyword arguments
516 # Wrap old filters not supporting keyword arguments
517 if not inspect.getargspec(fn)[2]:
517 if not inspect.getargspec(fn)[2]:
518 oldfn = fn
518 oldfn = fn
519 fn = lambda s, c, **kwargs: oldfn(s, c)
519 fn = lambda s, c, **kwargs: oldfn(s, c)
520 l.append((mf, fn, params))
520 l.append((mf, fn, params))
521 self.filterpats[filter] = l
521 self.filterpats[filter] = l
522
522
523 for mf, fn, cmd in self.filterpats[filter]:
523 for mf, fn, cmd in self.filterpats[filter]:
524 if mf(filename):
524 if mf(filename):
525 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
525 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
526 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
526 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
527 break
527 break
528
528
529 return data
529 return data
530
530
531 def adddatafilter(self, name, filter):
531 def adddatafilter(self, name, filter):
532 self._datafilters[name] = filter
532 self._datafilters[name] = filter
533
533
534 def wread(self, filename):
534 def wread(self, filename):
535 if self._link(filename):
535 if self._link(filename):
536 data = os.readlink(self.wjoin(filename))
536 data = os.readlink(self.wjoin(filename))
537 else:
537 else:
538 data = self.wopener(filename, 'r').read()
538 data = self.wopener(filename, 'r').read()
539 return self._filter("encode", filename, data)
539 return self._filter("encode", filename, data)
540
540
541 def wwrite(self, filename, data, flags):
541 def wwrite(self, filename, data, flags):
542 data = self._filter("decode", filename, data)
542 data = self._filter("decode", filename, data)
543 try:
543 try:
544 os.unlink(self.wjoin(filename))
544 os.unlink(self.wjoin(filename))
545 except OSError:
545 except OSError:
546 pass
546 pass
547 if 'l' in flags:
547 if 'l' in flags:
548 self.wopener.symlink(data, filename)
548 self.wopener.symlink(data, filename)
549 else:
549 else:
550 self.wopener(filename, 'w').write(data)
550 self.wopener(filename, 'w').write(data)
551 if 'x' in flags:
551 if 'x' in flags:
552 util.set_flags(self.wjoin(filename), False, True)
552 util.set_flags(self.wjoin(filename), False, True)
553
553
554 def wwritedata(self, filename, data):
554 def wwritedata(self, filename, data):
555 return self._filter("decode", filename, data)
555 return self._filter("decode", filename, data)
556
556
557 def transaction(self):
557 def transaction(self):
558 tr = self._transref and self._transref() or None
558 tr = self._transref and self._transref() or None
559 if tr and tr.running():
559 if tr and tr.running():
560 return tr.nest()
560 return tr.nest()
561
561
562 # abort here if the journal already exists
562 # abort here if the journal already exists
563 if os.path.exists(self.sjoin("journal")):
563 if os.path.exists(self.sjoin("journal")):
564 raise error.RepoError(
564 raise error.RepoError(
565 _("abandoned transaction found - run hg recover"))
565 _("abandoned transaction found - run hg recover"))
566
566
567 # save dirstate for rollback
567 # save dirstate for rollback
568 try:
568 try:
569 ds = self.opener("dirstate").read()
569 ds = self.opener("dirstate").read()
570 except IOError:
570 except IOError:
571 ds = ""
571 ds = ""
572 self.opener("journal.dirstate", "w").write(ds)
572 self.opener("journal.dirstate", "w").write(ds)
573 self.opener("journal.branch", "w").write(self.dirstate.branch())
573 self.opener("journal.branch", "w").write(self.dirstate.branch())
574
574
575 renames = [(self.sjoin("journal"), self.sjoin("undo")),
575 renames = [(self.sjoin("journal"), self.sjoin("undo")),
576 (self.join("journal.dirstate"), self.join("undo.dirstate")),
576 (self.join("journal.dirstate"), self.join("undo.dirstate")),
577 (self.join("journal.branch"), self.join("undo.branch"))]
577 (self.join("journal.branch"), self.join("undo.branch"))]
578 tr = transaction.transaction(self.ui.warn, self.sopener,
578 tr = transaction.transaction(self.ui.warn, self.sopener,
579 self.sjoin("journal"),
579 self.sjoin("journal"),
580 aftertrans(renames),
580 aftertrans(renames),
581 self.store.createmode)
581 self.store.createmode)
582 self._transref = weakref.ref(tr)
582 self._transref = weakref.ref(tr)
583 return tr
583 return tr
584
584
585 def recover(self):
585 def recover(self):
586 lock = self.lock()
586 lock = self.lock()
587 try:
587 try:
588 if os.path.exists(self.sjoin("journal")):
588 if os.path.exists(self.sjoin("journal")):
589 self.ui.status(_("rolling back interrupted transaction\n"))
589 self.ui.status(_("rolling back interrupted transaction\n"))
590 transaction.rollback(self.sopener, self.sjoin("journal"),
590 transaction.rollback(self.sopener, self.sjoin("journal"),
591 self.ui.warn)
591 self.ui.warn)
592 self.invalidate()
592 self.invalidate()
593 return True
593 return True
594 else:
594 else:
595 self.ui.warn(_("no interrupted transaction available\n"))
595 self.ui.warn(_("no interrupted transaction available\n"))
596 return False
596 return False
597 finally:
597 finally:
598 lock.release()
598 lock.release()
599
599
600 def rollback(self):
600 def rollback(self):
601 wlock = lock = None
601 wlock = lock = None
602 try:
602 try:
603 wlock = self.wlock()
603 wlock = self.wlock()
604 lock = self.lock()
604 lock = self.lock()
605 if os.path.exists(self.sjoin("undo")):
605 if os.path.exists(self.sjoin("undo")):
606 self.ui.status(_("rolling back last transaction\n"))
606 self.ui.status(_("rolling back last transaction\n"))
607 transaction.rollback(self.sopener, self.sjoin("undo"),
607 transaction.rollback(self.sopener, self.sjoin("undo"),
608 self.ui.warn)
608 self.ui.warn)
609 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
609 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
610 try:
610 try:
611 branch = self.opener("undo.branch").read()
611 branch = self.opener("undo.branch").read()
612 self.dirstate.setbranch(branch)
612 self.dirstate.setbranch(branch)
613 except IOError:
613 except IOError:
614 self.ui.warn(_("Named branch could not be reset, "
614 self.ui.warn(_("Named branch could not be reset, "
615 "current branch still is: %s\n")
615 "current branch still is: %s\n")
616 % encoding.tolocal(self.dirstate.branch()))
616 % encoding.tolocal(self.dirstate.branch()))
617 self.invalidate()
617 self.invalidate()
618 self.dirstate.invalidate()
618 self.dirstate.invalidate()
619 self.destroyed()
619 self.destroyed()
620 else:
620 else:
621 self.ui.warn(_("no rollback information available\n"))
621 self.ui.warn(_("no rollback information available\n"))
622 finally:
622 finally:
623 release(lock, wlock)
623 release(lock, wlock)
624
624
625 def invalidatecaches(self):
625 def invalidatecaches(self):
626 self._tags = None
626 self._tags = None
627 self._tagtypes = None
627 self._tagtypes = None
628 self.nodetagscache = None
628 self.nodetagscache = None
629 self._branchcache = None # in UTF-8
629 self._branchcache = None # in UTF-8
630 self._branchcachetip = None
630 self._branchcachetip = None
631
631
632 def invalidate(self):
632 def invalidate(self):
633 for a in "changelog manifest".split():
633 for a in "changelog manifest".split():
634 if a in self.__dict__:
634 if a in self.__dict__:
635 delattr(self, a)
635 delattr(self, a)
636 self.invalidatecaches()
636 self.invalidatecaches()
637
637
638 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
638 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
639 try:
639 try:
640 l = lock.lock(lockname, 0, releasefn, desc=desc)
640 l = lock.lock(lockname, 0, releasefn, desc=desc)
641 except error.LockHeld, inst:
641 except error.LockHeld, inst:
642 if not wait:
642 if not wait:
643 raise
643 raise
644 self.ui.warn(_("waiting for lock on %s held by %r\n") %
644 self.ui.warn(_("waiting for lock on %s held by %r\n") %
645 (desc, inst.locker))
645 (desc, inst.locker))
646 # default to 600 seconds timeout
646 # default to 600 seconds timeout
647 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
647 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
648 releasefn, desc=desc)
648 releasefn, desc=desc)
649 if acquirefn:
649 if acquirefn:
650 acquirefn()
650 acquirefn()
651 return l
651 return l
652
652
653 def lock(self, wait=True):
653 def lock(self, wait=True):
654 '''Lock the repository store (.hg/store) and return a weak reference
654 '''Lock the repository store (.hg/store) and return a weak reference
655 to the lock. Use this before modifying the store (e.g. committing or
655 to the lock. Use this before modifying the store (e.g. committing or
656 stripping). If you are opening a transaction, get a lock as well.)'''
656 stripping). If you are opening a transaction, get a lock as well.)'''
657 l = self._lockref and self._lockref()
657 l = self._lockref and self._lockref()
658 if l is not None and l.held:
658 if l is not None and l.held:
659 l.lock()
659 l.lock()
660 return l
660 return l
661
661
662 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
662 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
663 _('repository %s') % self.origroot)
663 _('repository %s') % self.origroot)
664 self._lockref = weakref.ref(l)
664 self._lockref = weakref.ref(l)
665 return l
665 return l
666
666
667 def wlock(self, wait=True):
667 def wlock(self, wait=True):
668 '''Lock the non-store parts of the repository (everything under
668 '''Lock the non-store parts of the repository (everything under
669 .hg except .hg/store) and return a weak reference to the lock.
669 .hg except .hg/store) and return a weak reference to the lock.
670 Use this before modifying files in .hg.'''
670 Use this before modifying files in .hg.'''
671 l = self._wlockref and self._wlockref()
671 l = self._wlockref and self._wlockref()
672 if l is not None and l.held:
672 if l is not None and l.held:
673 l.lock()
673 l.lock()
674 return l
674 return l
675
675
676 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
676 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
677 self.dirstate.invalidate, _('working directory of %s') %
677 self.dirstate.invalidate, _('working directory of %s') %
678 self.origroot)
678 self.origroot)
679 self._wlockref = weakref.ref(l)
679 self._wlockref = weakref.ref(l)
680 return l
680 return l
681
681
682 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
682 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
683 """
683 """
684 commit an individual file as part of a larger transaction
684 commit an individual file as part of a larger transaction
685 """
685 """
686
686
687 fname = fctx.path()
687 fname = fctx.path()
688 text = fctx.data()
688 text = fctx.data()
689 flog = self.file(fname)
689 flog = self.file(fname)
690 fparent1 = manifest1.get(fname, nullid)
690 fparent1 = manifest1.get(fname, nullid)
691 fparent2 = fparent2o = manifest2.get(fname, nullid)
691 fparent2 = fparent2o = manifest2.get(fname, nullid)
692
692
693 meta = {}
693 meta = {}
694 copy = fctx.renamed()
694 copy = fctx.renamed()
695 if copy and copy[0] != fname:
695 if copy and copy[0] != fname:
696 # Mark the new revision of this file as a copy of another
696 # Mark the new revision of this file as a copy of another
697 # file. This copy data will effectively act as a parent
697 # file. This copy data will effectively act as a parent
698 # of this new revision. If this is a merge, the first
698 # of this new revision. If this is a merge, the first
699 # parent will be the nullid (meaning "look up the copy data")
699 # parent will be the nullid (meaning "look up the copy data")
700 # and the second one will be the other parent. For example:
700 # and the second one will be the other parent. For example:
701 #
701 #
702 # 0 --- 1 --- 3 rev1 changes file foo
702 # 0 --- 1 --- 3 rev1 changes file foo
703 # \ / rev2 renames foo to bar and changes it
703 # \ / rev2 renames foo to bar and changes it
704 # \- 2 -/ rev3 should have bar with all changes and
704 # \- 2 -/ rev3 should have bar with all changes and
705 # should record that bar descends from
705 # should record that bar descends from
706 # bar in rev2 and foo in rev1
706 # bar in rev2 and foo in rev1
707 #
707 #
708 # this allows this merge to succeed:
708 # this allows this merge to succeed:
709 #
709 #
710 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
710 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
711 # \ / merging rev3 and rev4 should use bar@rev2
711 # \ / merging rev3 and rev4 should use bar@rev2
712 # \- 2 --- 4 as the merge base
712 # \- 2 --- 4 as the merge base
713 #
713 #
714
714
715 cfname = copy[0]
715 cfname = copy[0]
716 crev = manifest1.get(cfname)
716 crev = manifest1.get(cfname)
717 newfparent = fparent2
717 newfparent = fparent2
718
718
719 if manifest2: # branch merge
719 if manifest2: # branch merge
720 if fparent2 == nullid or crev is None: # copied on remote side
720 if fparent2 == nullid or crev is None: # copied on remote side
721 if cfname in manifest2:
721 if cfname in manifest2:
722 crev = manifest2[cfname]
722 crev = manifest2[cfname]
723 newfparent = fparent1
723 newfparent = fparent1
724
724
725 # find source in nearest ancestor if we've lost track
725 # find source in nearest ancestor if we've lost track
726 if not crev:
726 if not crev:
727 self.ui.debug(" %s: searching for copy revision for %s\n" %
727 self.ui.debug(" %s: searching for copy revision for %s\n" %
728 (fname, cfname))
728 (fname, cfname))
729 for ancestor in self['.'].ancestors():
729 for ancestor in self['.'].ancestors():
730 if cfname in ancestor:
730 if cfname in ancestor:
731 crev = ancestor[cfname].filenode()
731 crev = ancestor[cfname].filenode()
732 break
732 break
733
733
734 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
734 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
735 meta["copy"] = cfname
735 meta["copy"] = cfname
736 meta["copyrev"] = hex(crev)
736 meta["copyrev"] = hex(crev)
737 fparent1, fparent2 = nullid, newfparent
737 fparent1, fparent2 = nullid, newfparent
738 elif fparent2 != nullid:
738 elif fparent2 != nullid:
739 # is one parent an ancestor of the other?
739 # is one parent an ancestor of the other?
740 fparentancestor = flog.ancestor(fparent1, fparent2)
740 fparentancestor = flog.ancestor(fparent1, fparent2)
741 if fparentancestor == fparent1:
741 if fparentancestor == fparent1:
742 fparent1, fparent2 = fparent2, nullid
742 fparent1, fparent2 = fparent2, nullid
743 elif fparentancestor == fparent2:
743 elif fparentancestor == fparent2:
744 fparent2 = nullid
744 fparent2 = nullid
745
745
746 # is the file changed?
746 # is the file changed?
747 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
747 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
748 changelist.append(fname)
748 changelist.append(fname)
749 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
749 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
750
750
751 # are just the flags changed during merge?
751 # are just the flags changed during merge?
752 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
752 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
753 changelist.append(fname)
753 changelist.append(fname)
754
754
755 return fparent1
755 return fparent1
756
756
757 def commit(self, text="", user=None, date=None, match=None, force=False,
757 def commit(self, text="", user=None, date=None, match=None, force=False,
758 editor=False, extra={}):
758 editor=False, extra={}):
759 """Add a new revision to current repository.
759 """Add a new revision to current repository.
760
760
761 Revision information is gathered from the working directory,
761 Revision information is gathered from the working directory,
762 match can be used to filter the committed files. If editor is
762 match can be used to filter the committed files. If editor is
763 supplied, it is called to get a commit message.
763 supplied, it is called to get a commit message.
764 """
764 """
765
765
766 def fail(f, msg):
766 def fail(f, msg):
767 raise util.Abort('%s: %s' % (f, msg))
767 raise util.Abort('%s: %s' % (f, msg))
768
768
769 if not match:
769 if not match:
770 match = matchmod.always(self.root, '')
770 match = matchmod.always(self.root, '')
771
771
772 if not force:
772 if not force:
773 vdirs = []
773 vdirs = []
774 match.dir = vdirs.append
774 match.dir = vdirs.append
775 match.bad = fail
775 match.bad = fail
776
776
777 wlock = self.wlock()
777 wlock = self.wlock()
778 try:
778 try:
779 p1, p2 = self.dirstate.parents()
779 p1, p2 = self.dirstate.parents()
780 wctx = self[None]
780 wctx = self[None]
781
781
782 if (not force and p2 != nullid and match and
782 if (not force and p2 != nullid and match and
783 (match.files() or match.anypats())):
783 (match.files() or match.anypats())):
784 raise util.Abort(_('cannot partially commit a merge '
784 raise util.Abort(_('cannot partially commit a merge '
785 '(do not specify files or patterns)'))
785 '(do not specify files or patterns)'))
786
786
787 changes = self.status(match=match, clean=force)
787 changes = self.status(match=match, clean=force)
788 if force:
788 if force:
789 changes[0].extend(changes[6]) # mq may commit unchanged files
789 changes[0].extend(changes[6]) # mq may commit unchanged files
790
790
791 # check subrepos
791 # check subrepos
792 subs = []
792 subs = []
793 removedsubs = set()
793 removedsubs = set()
794 for p in wctx.parents():
794 for p in wctx.parents():
795 removedsubs.update(s for s in p.substate if match(s))
795 removedsubs.update(s for s in p.substate if match(s))
796 for s in wctx.substate:
796 for s in wctx.substate:
797 removedsubs.discard(s)
797 removedsubs.discard(s)
798 if match(s) and wctx.sub(s).dirty():
798 if match(s) and wctx.sub(s).dirty():
799 subs.append(s)
799 subs.append(s)
800 if (subs or removedsubs) and '.hgsubstate' not in changes[0]:
800 if (subs or removedsubs) and '.hgsubstate' not in changes[0]:
801 changes[0].insert(0, '.hgsubstate')
801 changes[0].insert(0, '.hgsubstate')
802
802
803 # make sure all explicit patterns are matched
803 # make sure all explicit patterns are matched
804 if not force and match.files():
804 if not force and match.files():
805 matched = set(changes[0] + changes[1] + changes[2])
805 matched = set(changes[0] + changes[1] + changes[2])
806
806
807 for f in match.files():
807 for f in match.files():
808 if f == '.' or f in matched or f in wctx.substate:
808 if f == '.' or f in matched or f in wctx.substate:
809 continue
809 continue
810 if f in changes[3]: # missing
810 if f in changes[3]: # missing
811 fail(f, _('file not found!'))
811 fail(f, _('file not found!'))
812 if f in vdirs: # visited directory
812 if f in vdirs: # visited directory
813 d = f + '/'
813 d = f + '/'
814 for mf in matched:
814 for mf in matched:
815 if mf.startswith(d):
815 if mf.startswith(d):
816 break
816 break
817 else:
817 else:
818 fail(f, _("no match under directory!"))
818 fail(f, _("no match under directory!"))
819 elif f not in self.dirstate:
819 elif f not in self.dirstate:
820 fail(f, _("file not tracked!"))
820 fail(f, _("file not tracked!"))
821
821
822 if (not force and not extra.get("close") and p2 == nullid
822 if (not force and not extra.get("close") and p2 == nullid
823 and not (changes[0] or changes[1] or changes[2])
823 and not (changes[0] or changes[1] or changes[2])
824 and self[None].branch() == self['.'].branch()):
824 and self[None].branch() == self['.'].branch()):
825 return None
825 return None
826
826
827 ms = mergemod.mergestate(self)
827 ms = mergemod.mergestate(self)
828 for f in changes[0]:
828 for f in changes[0]:
829 if f in ms and ms[f] == 'u':
829 if f in ms and ms[f] == 'u':
830 raise util.Abort(_("unresolved merge conflicts "
830 raise util.Abort(_("unresolved merge conflicts "
831 "(see hg resolve)"))
831 "(see hg resolve)"))
832
832
833 cctx = context.workingctx(self, (p1, p2), text, user, date,
833 cctx = context.workingctx(self, (p1, p2), text, user, date,
834 extra, changes)
834 extra, changes)
835 if editor:
835 if editor:
836 cctx._text = editor(self, cctx, subs)
836 cctx._text = editor(self, cctx, subs)
837 edited = (text != cctx._text)
837 edited = (text != cctx._text)
838
838
839 # commit subs
839 # commit subs
840 if subs or removedsubs:
840 if subs or removedsubs:
841 state = wctx.substate.copy()
841 state = wctx.substate.copy()
842 for s in subs:
842 for s in subs:
843 self.ui.status(_('committing subrepository %s\n') % s)
843 self.ui.status(_('committing subrepository %s\n') % s)
844 sr = wctx.sub(s).commit(cctx._text, user, date)
844 sr = wctx.sub(s).commit(cctx._text, user, date)
845 state[s] = (state[s][0], sr)
845 state[s] = (state[s][0], sr)
846 subrepo.writestate(self, state)
846 subrepo.writestate(self, state)
847
847
848 # Save commit message in case this transaction gets rolled back
848 # Save commit message in case this transaction gets rolled back
849 # (e.g. by a pretxncommit hook). Leave the content alone on
849 # (e.g. by a pretxncommit hook). Leave the content alone on
850 # the assumption that the user will use the same editor again.
850 # the assumption that the user will use the same editor again.
851 msgfile = self.opener('last-message.txt', 'wb')
851 msgfile = self.opener('last-message.txt', 'wb')
852 msgfile.write(cctx._text)
852 msgfile.write(cctx._text)
853 msgfile.close()
853 msgfile.close()
854
854
855 try:
855 try:
856 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
856 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
857 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
857 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
858 ret = self.commitctx(cctx, True)
858 ret = self.commitctx(cctx, True)
859 except:
859 except:
860 if edited:
860 if edited:
861 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
861 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
862 self.ui.write(
862 self.ui.write(
863 _('note: commit message saved in %s\n') % msgfn)
863 _('note: commit message saved in %s\n') % msgfn)
864 raise
864 raise
865
865
866 # update dirstate and mergestate
866 # update dirstate and mergestate
867 for f in changes[0] + changes[1]:
867 for f in changes[0] + changes[1]:
868 self.dirstate.normal(f)
868 self.dirstate.normal(f)
869 for f in changes[2]:
869 for f in changes[2]:
870 self.dirstate.forget(f)
870 self.dirstate.forget(f)
871 self.dirstate.setparents(ret)
871 self.dirstate.setparents(ret)
872 ms.reset()
872 ms.reset()
873 finally:
873 finally:
874 wlock.release()
874 wlock.release()
875
875
876 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
876 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
877 return ret
877 return ret
878
878
879 def commitctx(self, ctx, error=False):
879 def commitctx(self, ctx, error=False):
880 """Add a new revision to current repository.
880 """Add a new revision to current repository.
881 Revision information is passed via the context argument.
881 Revision information is passed via the context argument.
882 """
882 """
883
883
884 tr = lock = None
884 tr = lock = None
885 removed = ctx.removed()
885 removed = ctx.removed()
886 p1, p2 = ctx.p1(), ctx.p2()
886 p1, p2 = ctx.p1(), ctx.p2()
887 m1 = p1.manifest().copy()
887 m1 = p1.manifest().copy()
888 m2 = p2.manifest()
888 m2 = p2.manifest()
889 user = ctx.user()
889 user = ctx.user()
890
890
891 lock = self.lock()
891 lock = self.lock()
892 try:
892 try:
893 tr = self.transaction()
893 tr = self.transaction()
894 trp = weakref.proxy(tr)
894 trp = weakref.proxy(tr)
895
895
896 # check in files
896 # check in files
897 new = {}
897 new = {}
898 changed = []
898 changed = []
899 linkrev = len(self)
899 linkrev = len(self)
900 for f in sorted(ctx.modified() + ctx.added()):
900 for f in sorted(ctx.modified() + ctx.added()):
901 self.ui.note(f + "\n")
901 self.ui.note(f + "\n")
902 try:
902 try:
903 fctx = ctx[f]
903 fctx = ctx[f]
904 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
904 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
905 changed)
905 changed)
906 m1.set(f, fctx.flags())
906 m1.set(f, fctx.flags())
907 except OSError, inst:
907 except OSError, inst:
908 self.ui.warn(_("trouble committing %s!\n") % f)
908 self.ui.warn(_("trouble committing %s!\n") % f)
909 raise
909 raise
910 except IOError, inst:
910 except IOError, inst:
911 errcode = getattr(inst, 'errno', errno.ENOENT)
911 errcode = getattr(inst, 'errno', errno.ENOENT)
912 if error or errcode and errcode != errno.ENOENT:
912 if error or errcode and errcode != errno.ENOENT:
913 self.ui.warn(_("trouble committing %s!\n") % f)
913 self.ui.warn(_("trouble committing %s!\n") % f)
914 raise
914 raise
915 else:
915 else:
916 removed.append(f)
916 removed.append(f)
917
917
918 # update manifest
918 # update manifest
919 m1.update(new)
919 m1.update(new)
920 removed = [f for f in sorted(removed) if f in m1 or f in m2]
920 removed = [f for f in sorted(removed) if f in m1 or f in m2]
921 drop = [f for f in removed if f in m1]
921 drop = [f for f in removed if f in m1]
922 for f in drop:
922 for f in drop:
923 del m1[f]
923 del m1[f]
924 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
924 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
925 p2.manifestnode(), (new, drop))
925 p2.manifestnode(), (new, drop))
926
926
927 # update changelog
927 # update changelog
928 self.changelog.delayupdate()
928 self.changelog.delayupdate()
929 n = self.changelog.add(mn, changed + removed, ctx.description(),
929 n = self.changelog.add(mn, changed + removed, ctx.description(),
930 trp, p1.node(), p2.node(),
930 trp, p1.node(), p2.node(),
931 user, ctx.date(), ctx.extra().copy())
931 user, ctx.date(), ctx.extra().copy())
932 p = lambda: self.changelog.writepending() and self.root or ""
932 p = lambda: self.changelog.writepending() and self.root or ""
933 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
933 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
934 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
934 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
935 parent2=xp2, pending=p)
935 parent2=xp2, pending=p)
936 self.changelog.finalize(trp)
936 self.changelog.finalize(trp)
937 tr.close()
937 tr.close()
938
938
939 if self._branchcache:
939 if self._branchcache:
940 self.branchtags()
940 self.branchtags()
941 return n
941 return n
942 finally:
942 finally:
943 del tr
943 del tr
944 lock.release()
944 lock.release()
945
945
946 def destroyed(self):
946 def destroyed(self):
947 '''Inform the repository that nodes have been destroyed.
947 '''Inform the repository that nodes have been destroyed.
948 Intended for use by strip and rollback, so there's a common
948 Intended for use by strip and rollback, so there's a common
949 place for anything that has to be done after destroying history.'''
949 place for anything that has to be done after destroying history.'''
950 # XXX it might be nice if we could take the list of destroyed
950 # XXX it might be nice if we could take the list of destroyed
951 # nodes, but I don't see an easy way for rollback() to do that
951 # nodes, but I don't see an easy way for rollback() to do that
952
952
953 # Ensure the persistent tag cache is updated. Doing it now
953 # Ensure the persistent tag cache is updated. Doing it now
954 # means that the tag cache only has to worry about destroyed
954 # means that the tag cache only has to worry about destroyed
955 # heads immediately after a strip/rollback. That in turn
955 # heads immediately after a strip/rollback. That in turn
956 # guarantees that "cachetip == currenttip" (comparing both rev
956 # guarantees that "cachetip == currenttip" (comparing both rev
957 # and node) always means no nodes have been added or destroyed.
957 # and node) always means no nodes have been added or destroyed.
958
958
959 # XXX this is suboptimal when qrefresh'ing: we strip the current
959 # XXX this is suboptimal when qrefresh'ing: we strip the current
960 # head, refresh the tag cache, then immediately add a new head.
960 # head, refresh the tag cache, then immediately add a new head.
961 # But I think doing it this way is necessary for the "instant
961 # But I think doing it this way is necessary for the "instant
962 # tag cache retrieval" case to work.
962 # tag cache retrieval" case to work.
963 self.invalidatecaches()
963 self.invalidatecaches()
964
964
965 def walk(self, match, node=None):
965 def walk(self, match, node=None):
966 '''
966 '''
967 walk recursively through the directory tree or a given
967 walk recursively through the directory tree or a given
968 changeset, finding all files matched by the match
968 changeset, finding all files matched by the match
969 function
969 function
970 '''
970 '''
971 return self[node].walk(match)
971 return self[node].walk(match)
972
972
973 def status(self, node1='.', node2=None, match=None,
973 def status(self, node1='.', node2=None, match=None,
974 ignored=False, clean=False, unknown=False):
974 ignored=False, clean=False, unknown=False):
975 """return status of files between two nodes or node and working directory
975 """return status of files between two nodes or node and working directory
976
976
977 If node1 is None, use the first dirstate parent instead.
977 If node1 is None, use the first dirstate parent instead.
978 If node2 is None, compare node1 with working directory.
978 If node2 is None, compare node1 with working directory.
979 """
979 """
980
980
981 def mfmatches(ctx):
981 def mfmatches(ctx):
982 mf = ctx.manifest().copy()
982 mf = ctx.manifest().copy()
983 for fn in mf.keys():
983 for fn in mf.keys():
984 if not match(fn):
984 if not match(fn):
985 del mf[fn]
985 del mf[fn]
986 return mf
986 return mf
987
987
988 if isinstance(node1, context.changectx):
988 if isinstance(node1, context.changectx):
989 ctx1 = node1
989 ctx1 = node1
990 else:
990 else:
991 ctx1 = self[node1]
991 ctx1 = self[node1]
992 if isinstance(node2, context.changectx):
992 if isinstance(node2, context.changectx):
993 ctx2 = node2
993 ctx2 = node2
994 else:
994 else:
995 ctx2 = self[node2]
995 ctx2 = self[node2]
996
996
997 working = ctx2.rev() is None
997 working = ctx2.rev() is None
998 parentworking = working and ctx1 == self['.']
998 parentworking = working and ctx1 == self['.']
999 match = match or matchmod.always(self.root, self.getcwd())
999 match = match or matchmod.always(self.root, self.getcwd())
1000 listignored, listclean, listunknown = ignored, clean, unknown
1000 listignored, listclean, listunknown = ignored, clean, unknown
1001
1001
1002 # load earliest manifest first for caching reasons
1002 # load earliest manifest first for caching reasons
1003 if not working and ctx2.rev() < ctx1.rev():
1003 if not working and ctx2.rev() < ctx1.rev():
1004 ctx2.manifest()
1004 ctx2.manifest()
1005
1005
1006 if not parentworking:
1006 if not parentworking:
1007 def bad(f, msg):
1007 def bad(f, msg):
1008 if f not in ctx1:
1008 if f not in ctx1:
1009 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1009 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1010 match.bad = bad
1010 match.bad = bad
1011
1011
1012 if working: # we need to scan the working dir
1012 if working: # we need to scan the working dir
1013 subrepos = ctx1.substate.keys()
1013 subrepos = ctx1.substate.keys()
1014 s = self.dirstate.status(match, subrepos, listignored,
1014 s = self.dirstate.status(match, subrepos, listignored,
1015 listclean, listunknown)
1015 listclean, listunknown)
1016 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1016 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1017
1017
1018 # check for any possibly clean files
1018 # check for any possibly clean files
1019 if parentworking and cmp:
1019 if parentworking and cmp:
1020 fixup = []
1020 fixup = []
1021 # do a full compare of any files that might have changed
1021 # do a full compare of any files that might have changed
1022 for f in sorted(cmp):
1022 for f in sorted(cmp):
1023 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1023 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1024 or ctx1[f].cmp(ctx2[f].data())):
1024 or ctx1[f].cmp(ctx2[f].data())):
1025 modified.append(f)
1025 modified.append(f)
1026 else:
1026 else:
1027 fixup.append(f)
1027 fixup.append(f)
1028
1028
1029 if listclean:
1029 if listclean:
1030 clean += fixup
1030 clean += fixup
1031
1031
1032 # update dirstate for files that are actually clean
1032 # update dirstate for files that are actually clean
1033 if fixup:
1033 if fixup:
1034 try:
1034 try:
1035 # updating the dirstate is optional
1035 # updating the dirstate is optional
1036 # so we don't wait on the lock
1036 # so we don't wait on the lock
1037 wlock = self.wlock(False)
1037 wlock = self.wlock(False)
1038 try:
1038 try:
1039 for f in fixup:
1039 for f in fixup:
1040 self.dirstate.normal(f)
1040 self.dirstate.normal(f)
1041 finally:
1041 finally:
1042 wlock.release()
1042 wlock.release()
1043 except error.LockError:
1043 except error.LockError:
1044 pass
1044 pass
1045
1045
1046 if not parentworking:
1046 if not parentworking:
1047 mf1 = mfmatches(ctx1)
1047 mf1 = mfmatches(ctx1)
1048 if working:
1048 if working:
1049 # we are comparing working dir against non-parent
1049 # we are comparing working dir against non-parent
1050 # generate a pseudo-manifest for the working dir
1050 # generate a pseudo-manifest for the working dir
1051 mf2 = mfmatches(self['.'])
1051 mf2 = mfmatches(self['.'])
1052 for f in cmp + modified + added:
1052 for f in cmp + modified + added:
1053 mf2[f] = None
1053 mf2[f] = None
1054 mf2.set(f, ctx2.flags(f))
1054 mf2.set(f, ctx2.flags(f))
1055 for f in removed:
1055 for f in removed:
1056 if f in mf2:
1056 if f in mf2:
1057 del mf2[f]
1057 del mf2[f]
1058 else:
1058 else:
1059 # we are comparing two revisions
1059 # we are comparing two revisions
1060 deleted, unknown, ignored = [], [], []
1060 deleted, unknown, ignored = [], [], []
1061 mf2 = mfmatches(ctx2)
1061 mf2 = mfmatches(ctx2)
1062
1062
1063 modified, added, clean = [], [], []
1063 modified, added, clean = [], [], []
1064 for fn in mf2:
1064 for fn in mf2:
1065 if fn in mf1:
1065 if fn in mf1:
1066 if (mf1.flags(fn) != mf2.flags(fn) or
1066 if (mf1.flags(fn) != mf2.flags(fn) or
1067 (mf1[fn] != mf2[fn] and
1067 (mf1[fn] != mf2[fn] and
1068 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1068 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1069 modified.append(fn)
1069 modified.append(fn)
1070 elif listclean:
1070 elif listclean:
1071 clean.append(fn)
1071 clean.append(fn)
1072 del mf1[fn]
1072 del mf1[fn]
1073 else:
1073 else:
1074 added.append(fn)
1074 added.append(fn)
1075 removed = mf1.keys()
1075 removed = mf1.keys()
1076
1076
1077 r = modified, added, removed, deleted, unknown, ignored, clean
1077 r = modified, added, removed, deleted, unknown, ignored, clean
1078 [l.sort() for l in r]
1078 [l.sort() for l in r]
1079 return r
1079 return r
1080
1080
1081 def add(self, list):
1081 def add(self, list):
1082 wlock = self.wlock()
1082 wlock = self.wlock()
1083 try:
1083 try:
1084 rejected = []
1084 rejected = []
1085 for f in list:
1085 for f in list:
1086 p = self.wjoin(f)
1086 p = self.wjoin(f)
1087 try:
1087 try:
1088 st = os.lstat(p)
1088 st = os.lstat(p)
1089 except:
1089 except:
1090 self.ui.warn(_("%s does not exist!\n") % f)
1090 self.ui.warn(_("%s does not exist!\n") % f)
1091 rejected.append(f)
1091 rejected.append(f)
1092 continue
1092 continue
1093 if st.st_size > 10000000:
1093 if st.st_size > 10000000:
1094 self.ui.warn(_("%s: files over 10MB may cause memory and"
1094 self.ui.warn(_("%s: files over 10MB may cause memory and"
1095 " performance problems\n"
1095 " performance problems\n"
1096 "(use 'hg revert %s' to unadd the file)\n")
1096 "(use 'hg revert %s' to unadd the file)\n")
1097 % (f, f))
1097 % (f, f))
1098 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1098 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1099 self.ui.warn(_("%s not added: only files and symlinks "
1099 self.ui.warn(_("%s not added: only files and symlinks "
1100 "supported currently\n") % f)
1100 "supported currently\n") % f)
1101 rejected.append(p)
1101 rejected.append(p)
1102 elif self.dirstate[f] in 'amn':
1102 elif self.dirstate[f] in 'amn':
1103 self.ui.warn(_("%s already tracked!\n") % f)
1103 self.ui.warn(_("%s already tracked!\n") % f)
1104 elif self.dirstate[f] == 'r':
1104 elif self.dirstate[f] == 'r':
1105 self.dirstate.normallookup(f)
1105 self.dirstate.normallookup(f)
1106 else:
1106 else:
1107 self.dirstate.add(f)
1107 self.dirstate.add(f)
1108 return rejected
1108 return rejected
1109 finally:
1109 finally:
1110 wlock.release()
1110 wlock.release()
1111
1111
1112 def forget(self, list):
1112 def forget(self, list):
1113 wlock = self.wlock()
1113 wlock = self.wlock()
1114 try:
1114 try:
1115 for f in list:
1115 for f in list:
1116 if self.dirstate[f] != 'a':
1116 if self.dirstate[f] != 'a':
1117 self.ui.warn(_("%s not added!\n") % f)
1117 self.ui.warn(_("%s not added!\n") % f)
1118 else:
1118 else:
1119 self.dirstate.forget(f)
1119 self.dirstate.forget(f)
1120 finally:
1120 finally:
1121 wlock.release()
1121 wlock.release()
1122
1122
1123 def remove(self, list, unlink=False):
1123 def remove(self, list, unlink=False):
1124 if unlink:
1124 if unlink:
1125 for f in list:
1125 for f in list:
1126 try:
1126 try:
1127 util.unlink(self.wjoin(f))
1127 util.unlink(self.wjoin(f))
1128 except OSError, inst:
1128 except OSError, inst:
1129 if inst.errno != errno.ENOENT:
1129 if inst.errno != errno.ENOENT:
1130 raise
1130 raise
1131 wlock = self.wlock()
1131 wlock = self.wlock()
1132 try:
1132 try:
1133 for f in list:
1133 for f in list:
1134 if unlink and os.path.exists(self.wjoin(f)):
1134 if unlink and os.path.exists(self.wjoin(f)):
1135 self.ui.warn(_("%s still exists!\n") % f)
1135 self.ui.warn(_("%s still exists!\n") % f)
1136 elif self.dirstate[f] == 'a':
1136 elif self.dirstate[f] == 'a':
1137 self.dirstate.forget(f)
1137 self.dirstate.forget(f)
1138 elif f not in self.dirstate:
1138 elif f not in self.dirstate:
1139 self.ui.warn(_("%s not tracked!\n") % f)
1139 self.ui.warn(_("%s not tracked!\n") % f)
1140 else:
1140 else:
1141 self.dirstate.remove(f)
1141 self.dirstate.remove(f)
1142 finally:
1142 finally:
1143 wlock.release()
1143 wlock.release()
1144
1144
1145 def undelete(self, list):
1145 def undelete(self, list):
1146 manifests = [self.manifest.read(self.changelog.read(p)[0])
1146 manifests = [self.manifest.read(self.changelog.read(p)[0])
1147 for p in self.dirstate.parents() if p != nullid]
1147 for p in self.dirstate.parents() if p != nullid]
1148 wlock = self.wlock()
1148 wlock = self.wlock()
1149 try:
1149 try:
1150 for f in list:
1150 for f in list:
1151 if self.dirstate[f] != 'r':
1151 if self.dirstate[f] != 'r':
1152 self.ui.warn(_("%s not removed!\n") % f)
1152 self.ui.warn(_("%s not removed!\n") % f)
1153 else:
1153 else:
1154 m = f in manifests[0] and manifests[0] or manifests[1]
1154 m = f in manifests[0] and manifests[0] or manifests[1]
1155 t = self.file(f).read(m[f])
1155 t = self.file(f).read(m[f])
1156 self.wwrite(f, t, m.flags(f))
1156 self.wwrite(f, t, m.flags(f))
1157 self.dirstate.normal(f)
1157 self.dirstate.normal(f)
1158 finally:
1158 finally:
1159 wlock.release()
1159 wlock.release()
1160
1160
1161 def copy(self, source, dest):
1161 def copy(self, source, dest):
1162 p = self.wjoin(dest)
1162 p = self.wjoin(dest)
1163 if not (os.path.exists(p) or os.path.islink(p)):
1163 if not (os.path.exists(p) or os.path.islink(p)):
1164 self.ui.warn(_("%s does not exist!\n") % dest)
1164 self.ui.warn(_("%s does not exist!\n") % dest)
1165 elif not (os.path.isfile(p) or os.path.islink(p)):
1165 elif not (os.path.isfile(p) or os.path.islink(p)):
1166 self.ui.warn(_("copy failed: %s is not a file or a "
1166 self.ui.warn(_("copy failed: %s is not a file or a "
1167 "symbolic link\n") % dest)
1167 "symbolic link\n") % dest)
1168 else:
1168 else:
1169 wlock = self.wlock()
1169 wlock = self.wlock()
1170 try:
1170 try:
1171 if self.dirstate[dest] in '?r':
1171 if self.dirstate[dest] in '?r':
1172 self.dirstate.add(dest)
1172 self.dirstate.add(dest)
1173 self.dirstate.copy(source, dest)
1173 self.dirstate.copy(source, dest)
1174 finally:
1174 finally:
1175 wlock.release()
1175 wlock.release()
1176
1176
1177 def heads(self, start=None):
1177 def heads(self, start=None):
1178 heads = self.changelog.heads(start)
1178 heads = self.changelog.heads(start)
1179 # sort the output in rev descending order
1179 # sort the output in rev descending order
1180 heads = [(-self.changelog.rev(h), h) for h in heads]
1180 heads = [(-self.changelog.rev(h), h) for h in heads]
1181 return [n for (r, n) in sorted(heads)]
1181 return [n for (r, n) in sorted(heads)]
1182
1182
1183 def branchheads(self, branch=None, start=None, closed=False):
1183 def branchheads(self, branch=None, start=None, closed=False):
1184 '''return a (possibly filtered) list of heads for the given branch
1184 '''return a (possibly filtered) list of heads for the given branch
1185
1185
1186 Heads are returned in topological order, from newest to oldest.
1186 Heads are returned in topological order, from newest to oldest.
1187 If branch is None, use the dirstate branch.
1187 If branch is None, use the dirstate branch.
1188 If start is not None, return only heads reachable from start.
1188 If start is not None, return only heads reachable from start.
1189 If closed is True, return heads that are marked as closed as well.
1189 If closed is True, return heads that are marked as closed as well.
1190 '''
1190 '''
1191 if branch is None:
1191 if branch is None:
1192 branch = self[None].branch()
1192 branch = self[None].branch()
1193 branches = self.branchmap()
1193 branches = self.branchmap()
1194 if branch not in branches:
1194 if branch not in branches:
1195 return []
1195 return []
1196 # the cache returns heads ordered lowest to highest
1196 # the cache returns heads ordered lowest to highest
1197 bheads = list(reversed(branches[branch]))
1197 bheads = list(reversed(branches[branch]))
1198 if start is not None:
1198 if start is not None:
1199 # filter out the heads that cannot be reached from startrev
1199 # filter out the heads that cannot be reached from startrev
1200 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1200 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1201 bheads = [h for h in bheads if h in fbheads]
1201 bheads = [h for h in bheads if h in fbheads]
1202 if not closed:
1202 if not closed:
1203 bheads = [h for h in bheads if
1203 bheads = [h for h in bheads if
1204 ('close' not in self.changelog.read(h)[5])]
1204 ('close' not in self.changelog.read(h)[5])]
1205 return bheads
1205 return bheads
1206
1206
1207 def branches(self, nodes):
1207 def branches(self, nodes):
1208 if not nodes:
1208 if not nodes:
1209 nodes = [self.changelog.tip()]
1209 nodes = [self.changelog.tip()]
1210 b = []
1210 b = []
1211 for n in nodes:
1211 for n in nodes:
1212 t = n
1212 t = n
1213 while 1:
1213 while 1:
1214 p = self.changelog.parents(n)
1214 p = self.changelog.parents(n)
1215 if p[1] != nullid or p[0] == nullid:
1215 if p[1] != nullid or p[0] == nullid:
1216 b.append((t, n, p[0], p[1]))
1216 b.append((t, n, p[0], p[1]))
1217 break
1217 break
1218 n = p[0]
1218 n = p[0]
1219 return b
1219 return b
1220
1220
1221 def between(self, pairs):
1221 def between(self, pairs):
1222 r = []
1222 r = []
1223
1223
1224 for top, bottom in pairs:
1224 for top, bottom in pairs:
1225 n, l, i = top, [], 0
1225 n, l, i = top, [], 0
1226 f = 1
1226 f = 1
1227
1227
1228 while n != bottom and n != nullid:
1228 while n != bottom and n != nullid:
1229 p = self.changelog.parents(n)[0]
1229 p = self.changelog.parents(n)[0]
1230 if i == f:
1230 if i == f:
1231 l.append(n)
1231 l.append(n)
1232 f = f * 2
1232 f = f * 2
1233 n = p
1233 n = p
1234 i += 1
1234 i += 1
1235
1235
1236 r.append(l)
1236 r.append(l)
1237
1237
1238 return r
1238 return r
1239
1239
1240 def findincoming(self, remote, base=None, heads=None, force=False):
1240 def findincoming(self, remote, base=None, heads=None, force=False):
1241 """Return list of roots of the subsets of missing nodes from remote
1241 """Return list of roots of the subsets of missing nodes from remote
1242
1242
1243 If base dict is specified, assume that these nodes and their parents
1243 If base dict is specified, assume that these nodes and their parents
1244 exist on the remote side and that no child of a node of base exists
1244 exist on the remote side and that no child of a node of base exists
1245 in both remote and self.
1245 in both remote and self.
1246 Furthermore base will be updated to include the nodes that exists
1246 Furthermore base will be updated to include the nodes that exists
1247 in self and remote but no children exists in self and remote.
1247 in self and remote but no children exists in self and remote.
1248 If a list of heads is specified, return only nodes which are heads
1248 If a list of heads is specified, return only nodes which are heads
1249 or ancestors of these heads.
1249 or ancestors of these heads.
1250
1250
1251 All the ancestors of base are in self and in remote.
1251 All the ancestors of base are in self and in remote.
1252 All the descendants of the list returned are missing in self.
1252 All the descendants of the list returned are missing in self.
1253 (and so we know that the rest of the nodes are missing in remote, see
1253 (and so we know that the rest of the nodes are missing in remote, see
1254 outgoing)
1254 outgoing)
1255 """
1255 """
1256 return self.findcommonincoming(remote, base, heads, force)[1]
1256 return self.findcommonincoming(remote, base, heads, force)[1]
1257
1257
1258 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1258 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1259 """Return a tuple (common, missing roots, heads) used to identify
1259 """Return a tuple (common, missing roots, heads) used to identify
1260 missing nodes from remote.
1260 missing nodes from remote.
1261
1261
1262 If base dict is specified, assume that these nodes and their parents
1262 If base dict is specified, assume that these nodes and their parents
1263 exist on the remote side and that no child of a node of base exists
1263 exist on the remote side and that no child of a node of base exists
1264 in both remote and self.
1264 in both remote and self.
1265 Furthermore base will be updated to include the nodes that exists
1265 Furthermore base will be updated to include the nodes that exists
1266 in self and remote but no children exists in self and remote.
1266 in self and remote but no children exists in self and remote.
1267 If a list of heads is specified, return only nodes which are heads
1267 If a list of heads is specified, return only nodes which are heads
1268 or ancestors of these heads.
1268 or ancestors of these heads.
1269
1269
1270 All the ancestors of base are in self and in remote.
1270 All the ancestors of base are in self and in remote.
1271 """
1271 """
1272 m = self.changelog.nodemap
1272 m = self.changelog.nodemap
1273 search = []
1273 search = []
1274 fetch = set()
1274 fetch = set()
1275 seen = set()
1275 seen = set()
1276 seenbranch = set()
1276 seenbranch = set()
1277 if base is None:
1277 if base is None:
1278 base = {}
1278 base = {}
1279
1279
1280 if not heads:
1280 if not heads:
1281 heads = remote.heads()
1281 heads = remote.heads()
1282
1282
1283 if self.changelog.tip() == nullid:
1283 if self.changelog.tip() == nullid:
1284 base[nullid] = 1
1284 base[nullid] = 1
1285 if heads != [nullid]:
1285 if heads != [nullid]:
1286 return [nullid], [nullid], list(heads)
1286 return [nullid], [nullid], list(heads)
1287 return [nullid], [], []
1287 return [nullid], [], []
1288
1288
1289 # assume we're closer to the tip than the root
1289 # assume we're closer to the tip than the root
1290 # and start by examining the heads
1290 # and start by examining the heads
1291 self.ui.status(_("searching for changes\n"))
1291 self.ui.status(_("searching for changes\n"))
1292
1292
1293 unknown = []
1293 unknown = []
1294 for h in heads:
1294 for h in heads:
1295 if h not in m:
1295 if h not in m:
1296 unknown.append(h)
1296 unknown.append(h)
1297 else:
1297 else:
1298 base[h] = 1
1298 base[h] = 1
1299
1299
1300 heads = unknown
1300 heads = unknown
1301 if not unknown:
1301 if not unknown:
1302 return base.keys(), [], []
1302 return base.keys(), [], []
1303
1303
1304 req = set(unknown)
1304 req = set(unknown)
1305 reqcnt = 0
1305 reqcnt = 0
1306
1306
1307 # search through remote branches
1307 # search through remote branches
1308 # a 'branch' here is a linear segment of history, with four parts:
1308 # a 'branch' here is a linear segment of history, with four parts:
1309 # head, root, first parent, second parent
1309 # head, root, first parent, second parent
1310 # (a branch always has two parents (or none) by definition)
1310 # (a branch always has two parents (or none) by definition)
1311 unknown = remote.branches(unknown)
1311 unknown = remote.branches(unknown)
1312 while unknown:
1312 while unknown:
1313 r = []
1313 r = []
1314 while unknown:
1314 while unknown:
1315 n = unknown.pop(0)
1315 n = unknown.pop(0)
1316 if n[0] in seen:
1316 if n[0] in seen:
1317 continue
1317 continue
1318
1318
1319 self.ui.debug("examining %s:%s\n"
1319 self.ui.debug("examining %s:%s\n"
1320 % (short(n[0]), short(n[1])))
1320 % (short(n[0]), short(n[1])))
1321 if n[0] == nullid: # found the end of the branch
1321 if n[0] == nullid: # found the end of the branch
1322 pass
1322 pass
1323 elif n in seenbranch:
1323 elif n in seenbranch:
1324 self.ui.debug("branch already found\n")
1324 self.ui.debug("branch already found\n")
1325 continue
1325 continue
1326 elif n[1] and n[1] in m: # do we know the base?
1326 elif n[1] and n[1] in m: # do we know the base?
1327 self.ui.debug("found incomplete branch %s:%s\n"
1327 self.ui.debug("found incomplete branch %s:%s\n"
1328 % (short(n[0]), short(n[1])))
1328 % (short(n[0]), short(n[1])))
1329 search.append(n[0:2]) # schedule branch range for scanning
1329 search.append(n[0:2]) # schedule branch range for scanning
1330 seenbranch.add(n)
1330 seenbranch.add(n)
1331 else:
1331 else:
1332 if n[1] not in seen and n[1] not in fetch:
1332 if n[1] not in seen and n[1] not in fetch:
1333 if n[2] in m and n[3] in m:
1333 if n[2] in m and n[3] in m:
1334 self.ui.debug("found new changeset %s\n" %
1334 self.ui.debug("found new changeset %s\n" %
1335 short(n[1]))
1335 short(n[1]))
1336 fetch.add(n[1]) # earliest unknown
1336 fetch.add(n[1]) # earliest unknown
1337 for p in n[2:4]:
1337 for p in n[2:4]:
1338 if p in m:
1338 if p in m:
1339 base[p] = 1 # latest known
1339 base[p] = 1 # latest known
1340
1340
1341 for p in n[2:4]:
1341 for p in n[2:4]:
1342 if p not in req and p not in m:
1342 if p not in req and p not in m:
1343 r.append(p)
1343 r.append(p)
1344 req.add(p)
1344 req.add(p)
1345 seen.add(n[0])
1345 seen.add(n[0])
1346
1346
1347 if r:
1347 if r:
1348 reqcnt += 1
1348 reqcnt += 1
1349 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1349 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1350 self.ui.debug("request %d: %s\n" %
1350 self.ui.debug("request %d: %s\n" %
1351 (reqcnt, " ".join(map(short, r))))
1351 (reqcnt, " ".join(map(short, r))))
1352 for p in xrange(0, len(r), 10):
1352 for p in xrange(0, len(r), 10):
1353 for b in remote.branches(r[p:p + 10]):
1353 for b in remote.branches(r[p:p + 10]):
1354 self.ui.debug("received %s:%s\n" %
1354 self.ui.debug("received %s:%s\n" %
1355 (short(b[0]), short(b[1])))
1355 (short(b[0]), short(b[1])))
1356 unknown.append(b)
1356 unknown.append(b)
1357
1357
1358 # do binary search on the branches we found
1358 # do binary search on the branches we found
1359 while search:
1359 while search:
1360 newsearch = []
1360 newsearch = []
1361 reqcnt += 1
1361 reqcnt += 1
1362 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1362 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1363 for n, l in zip(search, remote.between(search)):
1363 for n, l in zip(search, remote.between(search)):
1364 l.append(n[1])
1364 l.append(n[1])
1365 p = n[0]
1365 p = n[0]
1366 f = 1
1366 f = 1
1367 for i in l:
1367 for i in l:
1368 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1368 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1369 if i in m:
1369 if i in m:
1370 if f <= 2:
1370 if f <= 2:
1371 self.ui.debug("found new branch changeset %s\n" %
1371 self.ui.debug("found new branch changeset %s\n" %
1372 short(p))
1372 short(p))
1373 fetch.add(p)
1373 fetch.add(p)
1374 base[i] = 1
1374 base[i] = 1
1375 else:
1375 else:
1376 self.ui.debug("narrowed branch search to %s:%s\n"
1376 self.ui.debug("narrowed branch search to %s:%s\n"
1377 % (short(p), short(i)))
1377 % (short(p), short(i)))
1378 newsearch.append((p, i))
1378 newsearch.append((p, i))
1379 break
1379 break
1380 p, f = i, f * 2
1380 p, f = i, f * 2
1381 search = newsearch
1381 search = newsearch
1382
1382
1383 # sanity check our fetch list
1383 # sanity check our fetch list
1384 for f in fetch:
1384 for f in fetch:
1385 if f in m:
1385 if f in m:
1386 raise error.RepoError(_("already have changeset ")
1386 raise error.RepoError(_("already have changeset ")
1387 + short(f[:4]))
1387 + short(f[:4]))
1388
1388
1389 if base.keys() == [nullid]:
1389 if base.keys() == [nullid]:
1390 if force:
1390 if force:
1391 self.ui.warn(_("warning: repository is unrelated\n"))
1391 self.ui.warn(_("warning: repository is unrelated\n"))
1392 else:
1392 else:
1393 raise util.Abort(_("repository is unrelated"))
1393 raise util.Abort(_("repository is unrelated"))
1394
1394
1395 self.ui.debug("found new changesets starting at " +
1395 self.ui.debug("found new changesets starting at " +
1396 " ".join([short(f) for f in fetch]) + "\n")
1396 " ".join([short(f) for f in fetch]) + "\n")
1397
1397
1398 self.ui.progress(_('searching'), None, unit=_('queries'))
1398 self.ui.progress(_('searching'), None, unit=_('queries'))
1399 self.ui.debug("%d total queries\n" % reqcnt)
1399 self.ui.debug("%d total queries\n" % reqcnt)
1400
1400
1401 return base.keys(), list(fetch), heads
1401 return base.keys(), list(fetch), heads
1402
1402
1403 def findoutgoing(self, remote, base=None, heads=None, force=False):
1403 def findoutgoing(self, remote, base=None, heads=None, force=False):
1404 """Return list of nodes that are roots of subsets not in remote
1404 """Return list of nodes that are roots of subsets not in remote
1405
1405
1406 If base dict is specified, assume that these nodes and their parents
1406 If base dict is specified, assume that these nodes and their parents
1407 exist on the remote side.
1407 exist on the remote side.
1408 If a list of heads is specified, return only nodes which are heads
1408 If a list of heads is specified, return only nodes which are heads
1409 or ancestors of these heads, and return a second element which
1409 or ancestors of these heads, and return a second element which
1410 contains all remote heads which get new children.
1410 contains all remote heads which get new children.
1411 """
1411 """
1412 if base is None:
1412 if base is None:
1413 base = {}
1413 base = {}
1414 self.findincoming(remote, base, heads, force=force)
1414 self.findincoming(remote, base, heads, force=force)
1415
1415
1416 self.ui.debug("common changesets up to "
1416 self.ui.debug("common changesets up to "
1417 + " ".join(map(short, base.keys())) + "\n")
1417 + " ".join(map(short, base.keys())) + "\n")
1418
1418
1419 remain = set(self.changelog.nodemap)
1419 remain = set(self.changelog.nodemap)
1420
1420
1421 # prune everything remote has from the tree
1421 # prune everything remote has from the tree
1422 remain.remove(nullid)
1422 remain.remove(nullid)
1423 remove = base.keys()
1423 remove = base.keys()
1424 while remove:
1424 while remove:
1425 n = remove.pop(0)
1425 n = remove.pop(0)
1426 if n in remain:
1426 if n in remain:
1427 remain.remove(n)
1427 remain.remove(n)
1428 for p in self.changelog.parents(n):
1428 for p in self.changelog.parents(n):
1429 remove.append(p)
1429 remove.append(p)
1430
1430
1431 # find every node whose parents have been pruned
1431 # find every node whose parents have been pruned
1432 subset = []
1432 subset = []
1433 # find every remote head that will get new children
1433 # find every remote head that will get new children
1434 updated_heads = set()
1434 updated_heads = set()
1435 for n in remain:
1435 for n in remain:
1436 p1, p2 = self.changelog.parents(n)
1436 p1, p2 = self.changelog.parents(n)
1437 if p1 not in remain and p2 not in remain:
1437 if p1 not in remain and p2 not in remain:
1438 subset.append(n)
1438 subset.append(n)
1439 if heads:
1439 if heads:
1440 if p1 in heads:
1440 if p1 in heads:
1441 updated_heads.add(p1)
1441 updated_heads.add(p1)
1442 if p2 in heads:
1442 if p2 in heads:
1443 updated_heads.add(p2)
1443 updated_heads.add(p2)
1444
1444
1445 # this is the set of all roots we have to push
1445 # this is the set of all roots we have to push
1446 if heads:
1446 if heads:
1447 return subset, list(updated_heads)
1447 return subset, list(updated_heads)
1448 else:
1448 else:
1449 return subset
1449 return subset
1450
1450
1451 def pull(self, remote, heads=None, force=False):
1451 def pull(self, remote, heads=None, force=False):
1452 lock = self.lock()
1452 lock = self.lock()
1453 try:
1453 try:
1454 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1454 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1455 force=force)
1455 force=force)
1456 if fetch == [nullid]:
1456 if fetch == [nullid]:
1457 self.ui.status(_("requesting all changes\n"))
1457 self.ui.status(_("requesting all changes\n"))
1458
1458
1459 if not fetch:
1459 if not fetch:
1460 self.ui.status(_("no changes found\n"))
1460 self.ui.status(_("no changes found\n"))
1461 return 0
1461 return 0
1462
1462
1463 if heads is None and remote.capable('changegroupsubset'):
1463 if heads is None and remote.capable('changegroupsubset'):
1464 heads = rheads
1464 heads = rheads
1465
1465
1466 if heads is None:
1466 if heads is None:
1467 cg = remote.changegroup(fetch, 'pull')
1467 cg = remote.changegroup(fetch, 'pull')
1468 else:
1468 else:
1469 if not remote.capable('changegroupsubset'):
1469 if not remote.capable('changegroupsubset'):
1470 raise util.Abort(_("Partial pull cannot be done because "
1470 raise util.Abort(_("Partial pull cannot be done because "
1471 "other repository doesn't support "
1471 "other repository doesn't support "
1472 "changegroupsubset."))
1472 "changegroupsubset."))
1473 cg = remote.changegroupsubset(fetch, heads, 'pull')
1473 cg = remote.changegroupsubset(fetch, heads, 'pull')
1474 return self.addchangegroup(cg, 'pull', remote.url())
1474 return self.addchangegroup(cg, 'pull', remote.url())
1475 finally:
1475 finally:
1476 lock.release()
1476 lock.release()
1477
1477
1478 def push(self, remote, force=False, revs=None):
1478 def push(self, remote, force=False, revs=None):
1479 # there are two ways to push to remote repo:
1479 # there are two ways to push to remote repo:
1480 #
1480 #
1481 # addchangegroup assumes local user can lock remote
1481 # addchangegroup assumes local user can lock remote
1482 # repo (local filesystem, old ssh servers).
1482 # repo (local filesystem, old ssh servers).
1483 #
1483 #
1484 # unbundle assumes local user cannot lock remote repo (new ssh
1484 # unbundle assumes local user cannot lock remote repo (new ssh
1485 # servers, http servers).
1485 # servers, http servers).
1486
1486
1487 if remote.capable('unbundle'):
1487 if remote.capable('unbundle'):
1488 return self.push_unbundle(remote, force, revs)
1488 return self.push_unbundle(remote, force, revs)
1489 return self.push_addchangegroup(remote, force, revs)
1489 return self.push_addchangegroup(remote, force, revs)
1490
1490
1491 def prepush(self, remote, force, revs):
1491 def prepush(self, remote, force, revs):
1492 '''Analyze the local and remote repositories and determine which
1492 '''Analyze the local and remote repositories and determine which
1493 changesets need to be pushed to the remote. Return a tuple
1493 changesets need to be pushed to the remote. Return a tuple
1494 (changegroup, remoteheads). changegroup is a readable file-like
1494 (changegroup, remoteheads). changegroup is a readable file-like
1495 object whose read() returns successive changegroup chunks ready to
1495 object whose read() returns successive changegroup chunks ready to
1496 be sent over the wire. remoteheads is the list of remote heads.
1496 be sent over the wire. remoteheads is the list of remote heads.
1497 '''
1497 '''
1498 common = {}
1498 common = {}
1499 remote_heads = remote.heads()
1499 remote_heads = remote.heads()
1500 inc = self.findincoming(remote, common, remote_heads, force=force)
1500 inc = self.findincoming(remote, common, remote_heads, force=force)
1501
1501
1502 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1502 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1503 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1503 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1504
1504
1505 def checkbranch(lheads, rheads, updatelb, branchname=None):
1505 def checkbranch(lheads, rheads, updatelb, branchname=None):
1506 '''
1506 '''
1507 check whether there are more local heads than remote heads on
1507 check whether there are more local heads than remote heads on
1508 a specific branch.
1508 a specific branch.
1509
1509
1510 lheads: local branch heads
1510 lheads: local branch heads
1511 rheads: remote branch heads
1511 rheads: remote branch heads
1512 updatelb: outgoing local branch bases
1512 updatelb: outgoing local branch bases
1513 '''
1513 '''
1514
1514
1515 warn = 0
1515 warn = 0
1516
1516
1517 if not revs and len(lheads) > len(rheads):
1517 if not revs and len(lheads) > len(rheads):
1518 warn = 1
1518 warn = 1
1519 else:
1519 else:
1520 # add local heads involved in the push
1520 # add local heads involved in the push
1521 updatelheads = [self.changelog.heads(x, lheads)
1521 updatelheads = [self.changelog.heads(x, lheads)
1522 for x in updatelb]
1522 for x in updatelb]
1523 newheads = set(sum(updatelheads, [])) & set(lheads)
1523 newheads = set(sum(updatelheads, [])) & set(lheads)
1524
1524
1525 if not newheads:
1525 if not newheads:
1526 return True
1526 return True
1527
1527
1528 # add heads we don't have or that are not involved in the push
1528 # add heads we don't have or that are not involved in the push
1529 for r in rheads:
1529 for r in rheads:
1530 if r in self.changelog.nodemap:
1530 if r in self.changelog.nodemap:
1531 desc = self.changelog.heads(r, heads)
1531 desc = self.changelog.heads(r, heads)
1532 l = [h for h in heads if h in desc]
1532 l = [h for h in heads if h in desc]
1533 if not l:
1533 if not l:
1534 newheads.add(r)
1534 newheads.add(r)
1535 else:
1535 else:
1536 newheads.add(r)
1536 newheads.add(r)
1537 if len(newheads) > len(rheads):
1537 if len(newheads) > len(rheads):
1538 warn = 1
1538 warn = 1
1539
1539
1540 if warn:
1540 if warn:
1541 if branchname is not None:
1541 if branchname is not None:
1542 msg = _("abort: push creates new remote heads"
1542 msg = _("abort: push creates new remote heads"
1543 " on branch '%s'!\n") % branchname
1543 " on branch '%s'!\n") % branchname
1544 else:
1544 else:
1545 msg = _("abort: push creates new remote heads!\n")
1545 msg = _("abort: push creates new remote heads!\n")
1546 self.ui.warn(msg)
1546 self.ui.warn(msg)
1547 if len(lheads) > len(rheads):
1547 if len(lheads) > len(rheads):
1548 self.ui.status(_("(did you forget to merge?"
1548 self.ui.status(_("(did you forget to merge?"
1549 " use push -f to force)\n"))
1549 " use push -f to force)\n"))
1550 else:
1550 else:
1551 self.ui.status(_("(you should pull and merge or"
1551 self.ui.status(_("(you should pull and merge or"
1552 " use push -f to force)\n"))
1552 " use push -f to force)\n"))
1553 return False
1553 return False
1554 return True
1554 return True
1555
1555
1556 if not bases:
1556 if not bases:
1557 self.ui.status(_("no changes found\n"))
1557 self.ui.status(_("no changes found\n"))
1558 return None, 1
1558 return None, 1
1559 elif not force:
1559 elif not force:
1560 # Check for each named branch if we're creating new remote heads.
1560 # Check for each named branch if we're creating new remote heads.
1561 # To be a remote head after push, node must be either:
1561 # To be a remote head after push, node must be either:
1562 # - unknown locally
1562 # - unknown locally
1563 # - a local outgoing head descended from update
1563 # - a local outgoing head descended from update
1564 # - a remote head that's known locally and not
1564 # - a remote head that's known locally and not
1565 # ancestral to an outgoing head
1565 # ancestral to an outgoing head
1566 #
1566 #
1567 # New named branches cannot be created without --force.
1567 # New named branches cannot be created without --force.
1568
1568
1569 if remote_heads != [nullid]:
1569 if remote_heads != [nullid]:
1570 if remote.capable('branchmap'):
1570 if remote.capable('branchmap'):
1571 remotebrheads = remote.branchmap()
1571 remotebrheads = remote.branchmap()
1572
1572
1573 if not revs:
1573 if not revs:
1574 localbrheads = self.branchmap()
1574 localbrheads = self.branchmap()
1575 else:
1575 else:
1576 localbrheads = {}
1576 localbrheads = {}
1577 for n in heads:
1577 for n in heads:
1578 branch = self[n].branch()
1578 branch = self[n].branch()
1579 localbrheads.setdefault(branch, []).append(n)
1579 localbrheads.setdefault(branch, []).append(n)
1580
1580
1581 newbranches = list(set(localbrheads) - set(remotebrheads))
1581 newbranches = list(set(localbrheads) - set(remotebrheads))
1582 if newbranches: # new branch requires --force
1582 if newbranches: # new branch requires --force
1583 branchnames = ', '.join("%s" % b for b in newbranches)
1583 branchnames = ', '.join("%s" % b for b in newbranches)
1584 self.ui.warn(_("abort: push creates "
1584 self.ui.warn(_("abort: push creates "
1585 "new remote branches: %s!\n")
1585 "new remote branches: %s!\n")
1586 % branchnames)
1586 % branchnames)
1587 # propose 'push -b .' in the msg too?
1587 # propose 'push -b .' in the msg too?
1588 self.ui.status(_("(use 'hg push -f' to force)\n"))
1588 self.ui.status(_("(use 'hg push -f' to force)\n"))
1589 return None, 0
1589 return None, 0
1590 for branch, lheads in localbrheads.iteritems():
1590 for branch, lheads in localbrheads.iteritems():
1591 if branch in remotebrheads:
1591 if branch in remotebrheads:
1592 rheads = remotebrheads[branch]
1592 rheads = remotebrheads[branch]
1593 if not checkbranch(lheads, rheads, update, branch):
1593 if not checkbranch(lheads, rheads, update, branch):
1594 return None, 0
1594 return None, 0
1595 else:
1595 else:
1596 if not checkbranch(heads, remote_heads, update):
1596 if not checkbranch(heads, remote_heads, update):
1597 return None, 0
1597 return None, 0
1598
1598
1599 if inc:
1599 if inc:
1600 self.ui.warn(_("note: unsynced remote changes!\n"))
1600 self.ui.warn(_("note: unsynced remote changes!\n"))
1601
1601
1602
1602
1603 if revs is None:
1603 if revs is None:
1604 # use the fast path, no race possible on push
1604 # use the fast path, no race possible on push
1605 nodes = self.changelog.findmissing(common.keys())
1605 nodes = self.changelog.findmissing(common.keys())
1606 cg = self._changegroup(nodes, 'push')
1606 cg = self._changegroup(nodes, 'push')
1607 else:
1607 else:
1608 cg = self.changegroupsubset(update, revs, 'push')
1608 cg = self.changegroupsubset(update, revs, 'push')
1609 return cg, remote_heads
1609 return cg, remote_heads
1610
1610
1611 def push_addchangegroup(self, remote, force, revs):
1611 def push_addchangegroup(self, remote, force, revs):
1612 lock = remote.lock()
1612 lock = remote.lock()
1613 try:
1613 try:
1614 ret = self.prepush(remote, force, revs)
1614 ret = self.prepush(remote, force, revs)
1615 if ret[0] is not None:
1615 if ret[0] is not None:
1616 cg, remote_heads = ret
1616 cg, remote_heads = ret
1617 return remote.addchangegroup(cg, 'push', self.url())
1617 return remote.addchangegroup(cg, 'push', self.url())
1618 return ret[1]
1618 return ret[1]
1619 finally:
1619 finally:
1620 lock.release()
1620 lock.release()
1621
1621
1622 def push_unbundle(self, remote, force, revs):
1622 def push_unbundle(self, remote, force, revs):
1623 # local repo finds heads on server, finds out what revs it
1623 # local repo finds heads on server, finds out what revs it
1624 # must push. once revs transferred, if server finds it has
1624 # must push. once revs transferred, if server finds it has
1625 # different heads (someone else won commit/push race), server
1625 # different heads (someone else won commit/push race), server
1626 # aborts.
1626 # aborts.
1627
1627
1628 ret = self.prepush(remote, force, revs)
1628 ret = self.prepush(remote, force, revs)
1629 if ret[0] is not None:
1629 if ret[0] is not None:
1630 cg, remote_heads = ret
1630 cg, remote_heads = ret
1631 if force:
1631 if force:
1632 remote_heads = ['force']
1632 remote_heads = ['force']
1633 return remote.unbundle(cg, remote_heads, 'push')
1633 return remote.unbundle(cg, remote_heads, 'push')
1634 return ret[1]
1634 return ret[1]
1635
1635
1636 def changegroupinfo(self, nodes, source):
1636 def changegroupinfo(self, nodes, source):
1637 if self.ui.verbose or source == 'bundle':
1637 if self.ui.verbose or source == 'bundle':
1638 self.ui.status(_("%d changesets found\n") % len(nodes))
1638 self.ui.status(_("%d changesets found\n") % len(nodes))
1639 if self.ui.debugflag:
1639 if self.ui.debugflag:
1640 self.ui.debug("list of changesets:\n")
1640 self.ui.debug("list of changesets:\n")
1641 for node in nodes:
1641 for node in nodes:
1642 self.ui.debug("%s\n" % hex(node))
1642 self.ui.debug("%s\n" % hex(node))
1643
1643
1644 def changegroupsubset(self, bases, heads, source, extranodes=None):
1644 def changegroupsubset(self, bases, heads, source, extranodes=None):
1645 """Compute a changegroup consisting of all the nodes that are
1645 """Compute a changegroup consisting of all the nodes that are
1646 descendents of any of the bases and ancestors of any of the heads.
1646 descendents of any of the bases and ancestors of any of the heads.
1647 Return a chunkbuffer object whose read() method will return
1647 Return a chunkbuffer object whose read() method will return
1648 successive changegroup chunks.
1648 successive changegroup chunks.
1649
1649
1650 It is fairly complex as determining which filenodes and which
1650 It is fairly complex as determining which filenodes and which
1651 manifest nodes need to be included for the changeset to be complete
1651 manifest nodes need to be included for the changeset to be complete
1652 is non-trivial.
1652 is non-trivial.
1653
1653
1654 Another wrinkle is doing the reverse, figuring out which changeset in
1654 Another wrinkle is doing the reverse, figuring out which changeset in
1655 the changegroup a particular filenode or manifestnode belongs to.
1655 the changegroup a particular filenode or manifestnode belongs to.
1656
1656
1657 The caller can specify some nodes that must be included in the
1657 The caller can specify some nodes that must be included in the
1658 changegroup using the extranodes argument. It should be a dict
1658 changegroup using the extranodes argument. It should be a dict
1659 where the keys are the filenames (or 1 for the manifest), and the
1659 where the keys are the filenames (or 1 for the manifest), and the
1660 values are lists of (node, linknode) tuples, where node is a wanted
1660 values are lists of (node, linknode) tuples, where node is a wanted
1661 node and linknode is the changelog node that should be transmitted as
1661 node and linknode is the changelog node that should be transmitted as
1662 the linkrev.
1662 the linkrev.
1663 """
1663 """
1664
1664
1665 # Set up some initial variables
1665 # Set up some initial variables
1666 # Make it easy to refer to self.changelog
1666 # Make it easy to refer to self.changelog
1667 cl = self.changelog
1667 cl = self.changelog
1668 # msng is short for missing - compute the list of changesets in this
1668 # msng is short for missing - compute the list of changesets in this
1669 # changegroup.
1669 # changegroup.
1670 if not bases:
1670 if not bases:
1671 bases = [nullid]
1671 bases = [nullid]
1672 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1672 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1673
1673
1674 if extranodes is None:
1674 if extranodes is None:
1675 # can we go through the fast path ?
1675 # can we go through the fast path ?
1676 heads.sort()
1676 heads.sort()
1677 allheads = self.heads()
1677 allheads = self.heads()
1678 allheads.sort()
1678 allheads.sort()
1679 if heads == allheads:
1679 if heads == allheads:
1680 return self._changegroup(msng_cl_lst, source)
1680 return self._changegroup(msng_cl_lst, source)
1681
1681
1682 # slow path
1682 # slow path
1683 self.hook('preoutgoing', throw=True, source=source)
1683 self.hook('preoutgoing', throw=True, source=source)
1684
1684
1685 self.changegroupinfo(msng_cl_lst, source)
1685 self.changegroupinfo(msng_cl_lst, source)
1686 # Some bases may turn out to be superfluous, and some heads may be
1686 # Some bases may turn out to be superfluous, and some heads may be
1687 # too. nodesbetween will return the minimal set of bases and heads
1687 # too. nodesbetween will return the minimal set of bases and heads
1688 # necessary to re-create the changegroup.
1688 # necessary to re-create the changegroup.
1689
1689
1690 # Known heads are the list of heads that it is assumed the recipient
1690 # Known heads are the list of heads that it is assumed the recipient
1691 # of this changegroup will know about.
1691 # of this changegroup will know about.
1692 knownheads = set()
1692 knownheads = set()
1693 # We assume that all parents of bases are known heads.
1693 # We assume that all parents of bases are known heads.
1694 for n in bases:
1694 for n in bases:
1695 knownheads.update(cl.parents(n))
1695 knownheads.update(cl.parents(n))
1696 knownheads.discard(nullid)
1696 knownheads.discard(nullid)
1697 knownheads = list(knownheads)
1697 knownheads = list(knownheads)
1698 if knownheads:
1698 if knownheads:
1699 # Now that we know what heads are known, we can compute which
1699 # Now that we know what heads are known, we can compute which
1700 # changesets are known. The recipient must know about all
1700 # changesets are known. The recipient must know about all
1701 # changesets required to reach the known heads from the null
1701 # changesets required to reach the known heads from the null
1702 # changeset.
1702 # changeset.
1703 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1703 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1704 junk = None
1704 junk = None
1705 # Transform the list into a set.
1705 # Transform the list into a set.
1706 has_cl_set = set(has_cl_set)
1706 has_cl_set = set(has_cl_set)
1707 else:
1707 else:
1708 # If there were no known heads, the recipient cannot be assumed to
1708 # If there were no known heads, the recipient cannot be assumed to
1709 # know about any changesets.
1709 # know about any changesets.
1710 has_cl_set = set()
1710 has_cl_set = set()
1711
1711
1712 # Make it easy to refer to self.manifest
1712 # Make it easy to refer to self.manifest
1713 mnfst = self.manifest
1713 mnfst = self.manifest
1714 # We don't know which manifests are missing yet
1714 # We don't know which manifests are missing yet
1715 msng_mnfst_set = {}
1715 msng_mnfst_set = {}
1716 # Nor do we know which filenodes are missing.
1716 # Nor do we know which filenodes are missing.
1717 msng_filenode_set = {}
1717 msng_filenode_set = {}
1718
1718
1719 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1719 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1720 junk = None
1720 junk = None
1721
1721
1722 # A changeset always belongs to itself, so the changenode lookup
1722 # A changeset always belongs to itself, so the changenode lookup
1723 # function for a changenode is identity.
1723 # function for a changenode is identity.
1724 def identity(x):
1724 def identity(x):
1725 return x
1725 return x
1726
1726
1727 # If we determine that a particular file or manifest node must be a
1727 # If we determine that a particular file or manifest node must be a
1728 # node that the recipient of the changegroup will already have, we can
1728 # node that the recipient of the changegroup will already have, we can
1729 # also assume the recipient will have all the parents. This function
1729 # also assume the recipient will have all the parents. This function
1730 # prunes them from the set of missing nodes.
1730 # prunes them from the set of missing nodes.
1731 def prune_parents(revlog, hasset, msngset):
1731 def prune_parents(revlog, hasset, msngset):
1732 for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]):
1732 for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]):
1733 msngset.pop(revlog.node(r), None)
1733 msngset.pop(revlog.node(r), None)
1734
1734
1735 # Use the information collected in collect_manifests_and_files to say
1735 # Use the information collected in collect_manifests_and_files to say
1736 # which changenode any manifestnode belongs to.
1736 # which changenode any manifestnode belongs to.
1737 def lookup_manifest_link(mnfstnode):
1737 def lookup_manifest_link(mnfstnode):
1738 return msng_mnfst_set[mnfstnode]
1738 return msng_mnfst_set[mnfstnode]
1739
1739
1740 # A function generating function that sets up the initial environment
1740 # A function generating function that sets up the initial environment
1741 # the inner function.
1741 # the inner function.
1742 def filenode_collector(changedfiles):
1742 def filenode_collector(changedfiles):
1743 # This gathers information from each manifestnode included in the
1743 # This gathers information from each manifestnode included in the
1744 # changegroup about which filenodes the manifest node references
1744 # changegroup about which filenodes the manifest node references
1745 # so we can include those in the changegroup too.
1745 # so we can include those in the changegroup too.
1746 #
1746 #
1747 # It also remembers which changenode each filenode belongs to. It
1747 # It also remembers which changenode each filenode belongs to. It
1748 # does this by assuming the a filenode belongs to the changenode
1748 # does this by assuming the a filenode belongs to the changenode
1749 # the first manifest that references it belongs to.
1749 # the first manifest that references it belongs to.
1750 def collect_msng_filenodes(mnfstnode):
1750 def collect_msng_filenodes(mnfstnode):
1751 r = mnfst.rev(mnfstnode)
1751 r = mnfst.rev(mnfstnode)
1752 if r - 1 in mnfst.parentrevs(r):
1752 if r - 1 in mnfst.parentrevs(r):
1753 # If the previous rev is one of the parents,
1753 # If the previous rev is one of the parents,
1754 # we only need to see a diff.
1754 # we only need to see a diff.
1755 deltamf = mnfst.readdelta(mnfstnode)
1755 deltamf = mnfst.readdelta(mnfstnode)
1756 # For each line in the delta
1756 # For each line in the delta
1757 for f, fnode in deltamf.iteritems():
1757 for f, fnode in deltamf.iteritems():
1758 f = changedfiles.get(f, None)
1758 f = changedfiles.get(f, None)
1759 # And if the file is in the list of files we care
1759 # And if the file is in the list of files we care
1760 # about.
1760 # about.
1761 if f is not None:
1761 if f is not None:
1762 # Get the changenode this manifest belongs to
1762 # Get the changenode this manifest belongs to
1763 clnode = msng_mnfst_set[mnfstnode]
1763 clnode = msng_mnfst_set[mnfstnode]
1764 # Create the set of filenodes for the file if
1764 # Create the set of filenodes for the file if
1765 # there isn't one already.
1765 # there isn't one already.
1766 ndset = msng_filenode_set.setdefault(f, {})
1766 ndset = msng_filenode_set.setdefault(f, {})
1767 # And set the filenode's changelog node to the
1767 # And set the filenode's changelog node to the
1768 # manifest's if it hasn't been set already.
1768 # manifest's if it hasn't been set already.
1769 ndset.setdefault(fnode, clnode)
1769 ndset.setdefault(fnode, clnode)
1770 else:
1770 else:
1771 # Otherwise we need a full manifest.
1771 # Otherwise we need a full manifest.
1772 m = mnfst.read(mnfstnode)
1772 m = mnfst.read(mnfstnode)
1773 # For every file in we care about.
1773 # For every file in we care about.
1774 for f in changedfiles:
1774 for f in changedfiles:
1775 fnode = m.get(f, None)
1775 fnode = m.get(f, None)
1776 # If it's in the manifest
1776 # If it's in the manifest
1777 if fnode is not None:
1777 if fnode is not None:
1778 # See comments above.
1778 # See comments above.
1779 clnode = msng_mnfst_set[mnfstnode]
1779 clnode = msng_mnfst_set[mnfstnode]
1780 ndset = msng_filenode_set.setdefault(f, {})
1780 ndset = msng_filenode_set.setdefault(f, {})
1781 ndset.setdefault(fnode, clnode)
1781 ndset.setdefault(fnode, clnode)
1782 return collect_msng_filenodes
1782 return collect_msng_filenodes
1783
1783
1784 # We have a list of filenodes we think we need for a file, lets remove
1784 # We have a list of filenodes we think we need for a file, lets remove
1785 # all those we know the recipient must have.
1785 # all those we know the recipient must have.
1786 def prune_filenodes(f, filerevlog):
1786 def prune_filenodes(f, filerevlog):
1787 msngset = msng_filenode_set[f]
1787 msngset = msng_filenode_set[f]
1788 hasset = set()
1788 hasset = set()
1789 # If a 'missing' filenode thinks it belongs to a changenode we
1789 # If a 'missing' filenode thinks it belongs to a changenode we
1790 # assume the recipient must have, then the recipient must have
1790 # assume the recipient must have, then the recipient must have
1791 # that filenode.
1791 # that filenode.
1792 for n in msngset:
1792 for n in msngset:
1793 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1793 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1794 if clnode in has_cl_set:
1794 if clnode in has_cl_set:
1795 hasset.add(n)
1795 hasset.add(n)
1796 prune_parents(filerevlog, hasset, msngset)
1796 prune_parents(filerevlog, hasset, msngset)
1797
1797
1798 # A function generator function that sets up the a context for the
1798 # A function generator function that sets up the a context for the
1799 # inner function.
1799 # inner function.
1800 def lookup_filenode_link_func(fname):
1800 def lookup_filenode_link_func(fname):
1801 msngset = msng_filenode_set[fname]
1801 msngset = msng_filenode_set[fname]
1802 # Lookup the changenode the filenode belongs to.
1802 # Lookup the changenode the filenode belongs to.
1803 def lookup_filenode_link(fnode):
1803 def lookup_filenode_link(fnode):
1804 return msngset[fnode]
1804 return msngset[fnode]
1805 return lookup_filenode_link
1805 return lookup_filenode_link
1806
1806
1807 # Add the nodes that were explicitly requested.
1807 # Add the nodes that were explicitly requested.
1808 def add_extra_nodes(name, nodes):
1808 def add_extra_nodes(name, nodes):
1809 if not extranodes or name not in extranodes:
1809 if not extranodes or name not in extranodes:
1810 return
1810 return
1811
1811
1812 for node, linknode in extranodes[name]:
1812 for node, linknode in extranodes[name]:
1813 if node not in nodes:
1813 if node not in nodes:
1814 nodes[node] = linknode
1814 nodes[node] = linknode
1815
1815
1816 # Now that we have all theses utility functions to help out and
1816 # Now that we have all theses utility functions to help out and
1817 # logically divide up the task, generate the group.
1817 # logically divide up the task, generate the group.
1818 def gengroup():
1818 def gengroup():
1819 # The set of changed files starts empty.
1819 # The set of changed files starts empty.
1820 changedfiles = {}
1820 changedfiles = {}
1821 collect = changegroup.collector(cl, msng_mnfst_set, changedfiles)
1821 collect = changegroup.collector(cl, msng_mnfst_set, changedfiles)
1822
1822
1823 # Create a changenode group generator that will call our functions
1823 # Create a changenode group generator that will call our functions
1824 # back to lookup the owning changenode and collect information.
1824 # back to lookup the owning changenode and collect information.
1825 group = cl.group(msng_cl_lst, identity, collect)
1825 group = cl.group(msng_cl_lst, identity, collect)
1826 cnt = 0
1826 cnt = 0
1827 for chnk in group:
1827 for chnk in group:
1828 yield chnk
1828 yield chnk
1829 self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
1829 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
1830 cnt += 1
1830 cnt += 1
1831 self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
1831 self.ui.progress(_('bundling changes'), None, unit=_('chunks'))
1832
1832
1833
1833
1834 # Figure out which manifest nodes (of the ones we think might be
1834 # Figure out which manifest nodes (of the ones we think might be
1835 # part of the changegroup) the recipient must know about and
1835 # part of the changegroup) the recipient must know about and
1836 # remove them from the changegroup.
1836 # remove them from the changegroup.
1837 has_mnfst_set = set()
1837 has_mnfst_set = set()
1838 for n in msng_mnfst_set:
1838 for n in msng_mnfst_set:
1839 # If a 'missing' manifest thinks it belongs to a changenode
1839 # If a 'missing' manifest thinks it belongs to a changenode
1840 # the recipient is assumed to have, obviously the recipient
1840 # the recipient is assumed to have, obviously the recipient
1841 # must have that manifest.
1841 # must have that manifest.
1842 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1842 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1843 if linknode in has_cl_set:
1843 if linknode in has_cl_set:
1844 has_mnfst_set.add(n)
1844 has_mnfst_set.add(n)
1845 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1845 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1846 add_extra_nodes(1, msng_mnfst_set)
1846 add_extra_nodes(1, msng_mnfst_set)
1847 msng_mnfst_lst = msng_mnfst_set.keys()
1847 msng_mnfst_lst = msng_mnfst_set.keys()
1848 # Sort the manifestnodes by revision number.
1848 # Sort the manifestnodes by revision number.
1849 msng_mnfst_lst.sort(key=mnfst.rev)
1849 msng_mnfst_lst.sort(key=mnfst.rev)
1850 # Create a generator for the manifestnodes that calls our lookup
1850 # Create a generator for the manifestnodes that calls our lookup
1851 # and data collection functions back.
1851 # and data collection functions back.
1852 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1852 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1853 filenode_collector(changedfiles))
1853 filenode_collector(changedfiles))
1854 cnt = 0
1854 cnt = 0
1855 for chnk in group:
1855 for chnk in group:
1856 yield chnk
1856 yield chnk
1857 self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
1857 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
1858 cnt += 1
1858 cnt += 1
1859 self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
1859 self.ui.progress(_('bundling manifests'), None, unit=_('chunks'))
1860
1860
1861 # These are no longer needed, dereference and toss the memory for
1861 # These are no longer needed, dereference and toss the memory for
1862 # them.
1862 # them.
1863 msng_mnfst_lst = None
1863 msng_mnfst_lst = None
1864 msng_mnfst_set.clear()
1864 msng_mnfst_set.clear()
1865
1865
1866 if extranodes:
1866 if extranodes:
1867 for fname in extranodes:
1867 for fname in extranodes:
1868 if isinstance(fname, int):
1868 if isinstance(fname, int):
1869 continue
1869 continue
1870 msng_filenode_set.setdefault(fname, {})
1870 msng_filenode_set.setdefault(fname, {})
1871 changedfiles[fname] = 1
1871 changedfiles[fname] = 1
1872 # Go through all our files in order sorted by name.
1872 # Go through all our files in order sorted by name.
1873 cnt = 0
1873 cnt = 0
1874 for fname in sorted(changedfiles):
1874 for fname in sorted(changedfiles):
1875 filerevlog = self.file(fname)
1875 filerevlog = self.file(fname)
1876 if not len(filerevlog):
1876 if not len(filerevlog):
1877 raise util.Abort(_("empty or missing revlog for %s") % fname)
1877 raise util.Abort(_("empty or missing revlog for %s") % fname)
1878 # Toss out the filenodes that the recipient isn't really
1878 # Toss out the filenodes that the recipient isn't really
1879 # missing.
1879 # missing.
1880 if fname in msng_filenode_set:
1880 if fname in msng_filenode_set:
1881 prune_filenodes(fname, filerevlog)
1881 prune_filenodes(fname, filerevlog)
1882 add_extra_nodes(fname, msng_filenode_set[fname])
1882 add_extra_nodes(fname, msng_filenode_set[fname])
1883 msng_filenode_lst = msng_filenode_set[fname].keys()
1883 msng_filenode_lst = msng_filenode_set[fname].keys()
1884 else:
1884 else:
1885 msng_filenode_lst = []
1885 msng_filenode_lst = []
1886 # If any filenodes are left, generate the group for them,
1886 # If any filenodes are left, generate the group for them,
1887 # otherwise don't bother.
1887 # otherwise don't bother.
1888 if len(msng_filenode_lst) > 0:
1888 if len(msng_filenode_lst) > 0:
1889 yield changegroup.chunkheader(len(fname))
1889 yield changegroup.chunkheader(len(fname))
1890 yield fname
1890 yield fname
1891 # Sort the filenodes by their revision #
1891 # Sort the filenodes by their revision #
1892 msng_filenode_lst.sort(key=filerevlog.rev)
1892 msng_filenode_lst.sort(key=filerevlog.rev)
1893 # Create a group generator and only pass in a changenode
1893 # Create a group generator and only pass in a changenode
1894 # lookup function as we need to collect no information
1894 # lookup function as we need to collect no information
1895 # from filenodes.
1895 # from filenodes.
1896 group = filerevlog.group(msng_filenode_lst,
1896 group = filerevlog.group(msng_filenode_lst,
1897 lookup_filenode_link_func(fname))
1897 lookup_filenode_link_func(fname))
1898 for chnk in group:
1898 for chnk in group:
1899 self.ui.progress(
1899 self.ui.progress(
1900 _('bundle files'), cnt, item=fname, unit=_('chunks'))
1900 _('bundling files'), cnt, item=fname, unit=_('chunks'))
1901 cnt += 1
1901 cnt += 1
1902 yield chnk
1902 yield chnk
1903 if fname in msng_filenode_set:
1903 if fname in msng_filenode_set:
1904 # Don't need this anymore, toss it to free memory.
1904 # Don't need this anymore, toss it to free memory.
1905 del msng_filenode_set[fname]
1905 del msng_filenode_set[fname]
1906 # Signal that no more groups are left.
1906 # Signal that no more groups are left.
1907 yield changegroup.closechunk()
1907 yield changegroup.closechunk()
1908 self.ui.progress(_('bundle files'), None, unit=_('chunks'))
1908 self.ui.progress(_('bundling files'), None, unit=_('chunks'))
1909
1909
1910 if msng_cl_lst:
1910 if msng_cl_lst:
1911 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1911 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1912
1912
1913 return util.chunkbuffer(gengroup())
1913 return util.chunkbuffer(gengroup())
1914
1914
1915 def changegroup(self, basenodes, source):
1915 def changegroup(self, basenodes, source):
1916 # to avoid a race we use changegroupsubset() (issue1320)
1916 # to avoid a race we use changegroupsubset() (issue1320)
1917 return self.changegroupsubset(basenodes, self.heads(), source)
1917 return self.changegroupsubset(basenodes, self.heads(), source)
1918
1918
1919 def _changegroup(self, nodes, source):
1919 def _changegroup(self, nodes, source):
1920 """Compute the changegroup of all nodes that we have that a recipient
1920 """Compute the changegroup of all nodes that we have that a recipient
1921 doesn't. Return a chunkbuffer object whose read() method will return
1921 doesn't. Return a chunkbuffer object whose read() method will return
1922 successive changegroup chunks.
1922 successive changegroup chunks.
1923
1923
1924 This is much easier than the previous function as we can assume that
1924 This is much easier than the previous function as we can assume that
1925 the recipient has any changenode we aren't sending them.
1925 the recipient has any changenode we aren't sending them.
1926
1926
1927 nodes is the set of nodes to send"""
1927 nodes is the set of nodes to send"""
1928
1928
1929 self.hook('preoutgoing', throw=True, source=source)
1929 self.hook('preoutgoing', throw=True, source=source)
1930
1930
1931 cl = self.changelog
1931 cl = self.changelog
1932 revset = set([cl.rev(n) for n in nodes])
1932 revset = set([cl.rev(n) for n in nodes])
1933 self.changegroupinfo(nodes, source)
1933 self.changegroupinfo(nodes, source)
1934
1934
1935 def identity(x):
1935 def identity(x):
1936 return x
1936 return x
1937
1937
1938 def gennodelst(log):
1938 def gennodelst(log):
1939 for r in log:
1939 for r in log:
1940 if log.linkrev(r) in revset:
1940 if log.linkrev(r) in revset:
1941 yield log.node(r)
1941 yield log.node(r)
1942
1942
1943 def lookuprevlink_func(revlog):
1943 def lookuprevlink_func(revlog):
1944 def lookuprevlink(n):
1944 def lookuprevlink(n):
1945 return cl.node(revlog.linkrev(revlog.rev(n)))
1945 return cl.node(revlog.linkrev(revlog.rev(n)))
1946 return lookuprevlink
1946 return lookuprevlink
1947
1947
1948 def gengroup():
1948 def gengroup():
1949 '''yield a sequence of changegroup chunks (strings)'''
1949 '''yield a sequence of changegroup chunks (strings)'''
1950 # construct a list of all changed files
1950 # construct a list of all changed files
1951 changedfiles = {}
1951 changedfiles = {}
1952 mmfs = {}
1952 mmfs = {}
1953 collect = changegroup.collector(cl, mmfs, changedfiles)
1953 collect = changegroup.collector(cl, mmfs, changedfiles)
1954
1954
1955 cnt = 0
1955 cnt = 0
1956 for chnk in cl.group(nodes, identity, collect):
1956 for chnk in cl.group(nodes, identity, collect):
1957 self.ui.progress(_('bundle changes'), cnt, unit=_('chunks'))
1957 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
1958 cnt += 1
1958 cnt += 1
1959 yield chnk
1959 yield chnk
1960 self.ui.progress(_('bundle changes'), None, unit=_('chunks'))
1960 self.ui.progress(_('bundling changes'), None, unit=_('chunks'))
1961
1961
1962 mnfst = self.manifest
1962 mnfst = self.manifest
1963 nodeiter = gennodelst(mnfst)
1963 nodeiter = gennodelst(mnfst)
1964 cnt = 0
1964 cnt = 0
1965 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1965 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1966 self.ui.progress(_('bundle manifests'), cnt, unit=_('chunks'))
1966 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
1967 cnt += 1
1967 cnt += 1
1968 yield chnk
1968 yield chnk
1969 self.ui.progress(_('bundle manifests'), None, unit=_('chunks'))
1969 self.ui.progress(_('bundling manifests'), None, unit=_('chunks'))
1970
1970
1971 cnt = 0
1971 cnt = 0
1972 for fname in sorted(changedfiles):
1972 for fname in sorted(changedfiles):
1973 filerevlog = self.file(fname)
1973 filerevlog = self.file(fname)
1974 if not len(filerevlog):
1974 if not len(filerevlog):
1975 raise util.Abort(_("empty or missing revlog for %s") % fname)
1975 raise util.Abort(_("empty or missing revlog for %s") % fname)
1976 nodeiter = gennodelst(filerevlog)
1976 nodeiter = gennodelst(filerevlog)
1977 nodeiter = list(nodeiter)
1977 nodeiter = list(nodeiter)
1978 if nodeiter:
1978 if nodeiter:
1979 yield changegroup.chunkheader(len(fname))
1979 yield changegroup.chunkheader(len(fname))
1980 yield fname
1980 yield fname
1981 lookup = lookuprevlink_func(filerevlog)
1981 lookup = lookuprevlink_func(filerevlog)
1982 for chnk in filerevlog.group(nodeiter, lookup):
1982 for chnk in filerevlog.group(nodeiter, lookup):
1983 self.ui.progress(
1983 self.ui.progress(
1984 _('bundle files'), cnt, item=fname, unit=_('chunks'))
1984 _('bundling files'), cnt, item=fname, unit=_('chunks'))
1985 cnt += 1
1985 cnt += 1
1986 yield chnk
1986 yield chnk
1987 self.ui.progress(_('bundle files'), None, unit=_('chunks'))
1987 self.ui.progress(_('bundling files'), None, unit=_('chunks'))
1988
1988
1989 yield changegroup.closechunk()
1989 yield changegroup.closechunk()
1990
1990
1991 if nodes:
1991 if nodes:
1992 self.hook('outgoing', node=hex(nodes[0]), source=source)
1992 self.hook('outgoing', node=hex(nodes[0]), source=source)
1993
1993
1994 return util.chunkbuffer(gengroup())
1994 return util.chunkbuffer(gengroup())
1995
1995
1996 def addchangegroup(self, source, srctype, url, emptyok=False):
1996 def addchangegroup(self, source, srctype, url, emptyok=False):
1997 """add changegroup to repo.
1997 """add changegroup to repo.
1998
1998
1999 return values:
1999 return values:
2000 - nothing changed or no source: 0
2000 - nothing changed or no source: 0
2001 - more heads than before: 1+added heads (2..n)
2001 - more heads than before: 1+added heads (2..n)
2002 - less heads than before: -1-removed heads (-2..-n)
2002 - less heads than before: -1-removed heads (-2..-n)
2003 - number of heads stays the same: 1
2003 - number of heads stays the same: 1
2004 """
2004 """
2005 def csmap(x):
2005 def csmap(x):
2006 self.ui.debug("add changeset %s\n" % short(x))
2006 self.ui.debug("add changeset %s\n" % short(x))
2007 return len(cl)
2007 return len(cl)
2008
2008
2009 def revmap(x):
2009 def revmap(x):
2010 return cl.rev(x)
2010 return cl.rev(x)
2011
2011
2012 if not source:
2012 if not source:
2013 return 0
2013 return 0
2014
2014
2015 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2015 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2016
2016
2017 changesets = files = revisions = 0
2017 changesets = files = revisions = 0
2018
2018
2019 # write changelog data to temp files so concurrent readers will not see
2019 # write changelog data to temp files so concurrent readers will not see
2020 # inconsistent view
2020 # inconsistent view
2021 cl = self.changelog
2021 cl = self.changelog
2022 cl.delayupdate()
2022 cl.delayupdate()
2023 oldheads = len(cl.heads())
2023 oldheads = len(cl.heads())
2024
2024
2025 tr = self.transaction()
2025 tr = self.transaction()
2026 try:
2026 try:
2027 trp = weakref.proxy(tr)
2027 trp = weakref.proxy(tr)
2028 # pull off the changeset group
2028 # pull off the changeset group
2029 self.ui.status(_("adding changesets\n"))
2029 self.ui.status(_("adding changesets\n"))
2030 clstart = len(cl)
2030 clstart = len(cl)
2031 class prog(object):
2031 class prog(object):
2032 step = _('changesets')
2032 step = _('changesets')
2033 count = 1
2033 count = 1
2034 ui = self.ui
2034 ui = self.ui
2035 def __call__(self):
2035 def __call__(self):
2036 self.ui.progress(self.step, self.count, unit=_('chunks'))
2036 self.ui.progress(self.step, self.count, unit=_('chunks'))
2037 self.count += 1
2037 self.count += 1
2038 pr = prog()
2038 pr = prog()
2039 chunkiter = changegroup.chunkiter(source, progress=pr)
2039 chunkiter = changegroup.chunkiter(source, progress=pr)
2040 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2040 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2041 raise util.Abort(_("received changelog group is empty"))
2041 raise util.Abort(_("received changelog group is empty"))
2042 clend = len(cl)
2042 clend = len(cl)
2043 changesets = clend - clstart
2043 changesets = clend - clstart
2044 self.ui.progress(_('changesets'), None)
2044 self.ui.progress(_('changesets'), None)
2045
2045
2046 # pull off the manifest group
2046 # pull off the manifest group
2047 self.ui.status(_("adding manifests\n"))
2047 self.ui.status(_("adding manifests\n"))
2048 pr.step = _('manifests')
2048 pr.step = _('manifests')
2049 pr.count = 1
2049 pr.count = 1
2050 chunkiter = changegroup.chunkiter(source, progress=pr)
2050 chunkiter = changegroup.chunkiter(source, progress=pr)
2051 # no need to check for empty manifest group here:
2051 # no need to check for empty manifest group here:
2052 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2052 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2053 # no new manifest will be created and the manifest group will
2053 # no new manifest will be created and the manifest group will
2054 # be empty during the pull
2054 # be empty during the pull
2055 self.manifest.addgroup(chunkiter, revmap, trp)
2055 self.manifest.addgroup(chunkiter, revmap, trp)
2056 self.ui.progress(_('manifests'), None)
2056 self.ui.progress(_('manifests'), None)
2057
2057
2058 needfiles = {}
2058 needfiles = {}
2059 if self.ui.configbool('server', 'validate', default=False):
2059 if self.ui.configbool('server', 'validate', default=False):
2060 # validate incoming csets have their manifests
2060 # validate incoming csets have their manifests
2061 for cset in xrange(clstart, clend):
2061 for cset in xrange(clstart, clend):
2062 mfest = self.changelog.read(self.changelog.node(cset))[0]
2062 mfest = self.changelog.read(self.changelog.node(cset))[0]
2063 mfest = self.manifest.readdelta(mfest)
2063 mfest = self.manifest.readdelta(mfest)
2064 # store file nodes we must see
2064 # store file nodes we must see
2065 for f, n in mfest.iteritems():
2065 for f, n in mfest.iteritems():
2066 needfiles.setdefault(f, set()).add(n)
2066 needfiles.setdefault(f, set()).add(n)
2067
2067
2068 # process the files
2068 # process the files
2069 self.ui.status(_("adding file changes\n"))
2069 self.ui.status(_("adding file changes\n"))
2070 pr.step = 'files'
2070 pr.step = 'files'
2071 pr.count = 1
2071 pr.count = 1
2072 while 1:
2072 while 1:
2073 f = changegroup.getchunk(source)
2073 f = changegroup.getchunk(source)
2074 if not f:
2074 if not f:
2075 break
2075 break
2076 self.ui.debug("adding %s revisions\n" % f)
2076 self.ui.debug("adding %s revisions\n" % f)
2077 fl = self.file(f)
2077 fl = self.file(f)
2078 o = len(fl)
2078 o = len(fl)
2079 chunkiter = changegroup.chunkiter(source, progress=pr)
2079 chunkiter = changegroup.chunkiter(source, progress=pr)
2080 if fl.addgroup(chunkiter, revmap, trp) is None:
2080 if fl.addgroup(chunkiter, revmap, trp) is None:
2081 raise util.Abort(_("received file revlog group is empty"))
2081 raise util.Abort(_("received file revlog group is empty"))
2082 revisions += len(fl) - o
2082 revisions += len(fl) - o
2083 files += 1
2083 files += 1
2084 if f in needfiles:
2084 if f in needfiles:
2085 needs = needfiles[f]
2085 needs = needfiles[f]
2086 for new in xrange(o, len(fl)):
2086 for new in xrange(o, len(fl)):
2087 n = fl.node(new)
2087 n = fl.node(new)
2088 if n in needs:
2088 if n in needs:
2089 needs.remove(n)
2089 needs.remove(n)
2090 if not needs:
2090 if not needs:
2091 del needfiles[f]
2091 del needfiles[f]
2092 self.ui.progress(_('files'), None)
2092 self.ui.progress(_('files'), None)
2093
2093
2094 for f, needs in needfiles.iteritems():
2094 for f, needs in needfiles.iteritems():
2095 fl = self.file(f)
2095 fl = self.file(f)
2096 for n in needs:
2096 for n in needs:
2097 try:
2097 try:
2098 fl.rev(n)
2098 fl.rev(n)
2099 except error.LookupError:
2099 except error.LookupError:
2100 raise util.Abort(
2100 raise util.Abort(
2101 _('missing file data for %s:%s - run hg verify') %
2101 _('missing file data for %s:%s - run hg verify') %
2102 (f, hex(n)))
2102 (f, hex(n)))
2103
2103
2104 newheads = len(cl.heads())
2104 newheads = len(cl.heads())
2105 heads = ""
2105 heads = ""
2106 if oldheads and newheads != oldheads:
2106 if oldheads and newheads != oldheads:
2107 heads = _(" (%+d heads)") % (newheads - oldheads)
2107 heads = _(" (%+d heads)") % (newheads - oldheads)
2108
2108
2109 self.ui.status(_("added %d changesets"
2109 self.ui.status(_("added %d changesets"
2110 " with %d changes to %d files%s\n")
2110 " with %d changes to %d files%s\n")
2111 % (changesets, revisions, files, heads))
2111 % (changesets, revisions, files, heads))
2112
2112
2113 if changesets > 0:
2113 if changesets > 0:
2114 p = lambda: cl.writepending() and self.root or ""
2114 p = lambda: cl.writepending() and self.root or ""
2115 self.hook('pretxnchangegroup', throw=True,
2115 self.hook('pretxnchangegroup', throw=True,
2116 node=hex(cl.node(clstart)), source=srctype,
2116 node=hex(cl.node(clstart)), source=srctype,
2117 url=url, pending=p)
2117 url=url, pending=p)
2118
2118
2119 # make changelog see real files again
2119 # make changelog see real files again
2120 cl.finalize(trp)
2120 cl.finalize(trp)
2121
2121
2122 tr.close()
2122 tr.close()
2123 finally:
2123 finally:
2124 del tr
2124 del tr
2125
2125
2126 if changesets > 0:
2126 if changesets > 0:
2127 # forcefully update the on-disk branch cache
2127 # forcefully update the on-disk branch cache
2128 self.ui.debug("updating the branch cache\n")
2128 self.ui.debug("updating the branch cache\n")
2129 self.branchtags()
2129 self.branchtags()
2130 self.hook("changegroup", node=hex(cl.node(clstart)),
2130 self.hook("changegroup", node=hex(cl.node(clstart)),
2131 source=srctype, url=url)
2131 source=srctype, url=url)
2132
2132
2133 for i in xrange(clstart, clend):
2133 for i in xrange(clstart, clend):
2134 self.hook("incoming", node=hex(cl.node(i)),
2134 self.hook("incoming", node=hex(cl.node(i)),
2135 source=srctype, url=url)
2135 source=srctype, url=url)
2136
2136
2137 # never return 0 here:
2137 # never return 0 here:
2138 if newheads < oldheads:
2138 if newheads < oldheads:
2139 return newheads - oldheads - 1
2139 return newheads - oldheads - 1
2140 else:
2140 else:
2141 return newheads - oldheads + 1
2141 return newheads - oldheads + 1
2142
2142
2143
2143
2144 def stream_in(self, remote):
2144 def stream_in(self, remote):
2145 fp = remote.stream_out()
2145 fp = remote.stream_out()
2146 l = fp.readline()
2146 l = fp.readline()
2147 try:
2147 try:
2148 resp = int(l)
2148 resp = int(l)
2149 except ValueError:
2149 except ValueError:
2150 raise error.ResponseError(
2150 raise error.ResponseError(
2151 _('Unexpected response from remote server:'), l)
2151 _('Unexpected response from remote server:'), l)
2152 if resp == 1:
2152 if resp == 1:
2153 raise util.Abort(_('operation forbidden by server'))
2153 raise util.Abort(_('operation forbidden by server'))
2154 elif resp == 2:
2154 elif resp == 2:
2155 raise util.Abort(_('locking the remote repository failed'))
2155 raise util.Abort(_('locking the remote repository failed'))
2156 elif resp != 0:
2156 elif resp != 0:
2157 raise util.Abort(_('the server sent an unknown error code'))
2157 raise util.Abort(_('the server sent an unknown error code'))
2158 self.ui.status(_('streaming all changes\n'))
2158 self.ui.status(_('streaming all changes\n'))
2159 l = fp.readline()
2159 l = fp.readline()
2160 try:
2160 try:
2161 total_files, total_bytes = map(int, l.split(' ', 1))
2161 total_files, total_bytes = map(int, l.split(' ', 1))
2162 except (ValueError, TypeError):
2162 except (ValueError, TypeError):
2163 raise error.ResponseError(
2163 raise error.ResponseError(
2164 _('Unexpected response from remote server:'), l)
2164 _('Unexpected response from remote server:'), l)
2165 self.ui.status(_('%d files to transfer, %s of data\n') %
2165 self.ui.status(_('%d files to transfer, %s of data\n') %
2166 (total_files, util.bytecount(total_bytes)))
2166 (total_files, util.bytecount(total_bytes)))
2167 start = time.time()
2167 start = time.time()
2168 for i in xrange(total_files):
2168 for i in xrange(total_files):
2169 # XXX doesn't support '\n' or '\r' in filenames
2169 # XXX doesn't support '\n' or '\r' in filenames
2170 l = fp.readline()
2170 l = fp.readline()
2171 try:
2171 try:
2172 name, size = l.split('\0', 1)
2172 name, size = l.split('\0', 1)
2173 size = int(size)
2173 size = int(size)
2174 except (ValueError, TypeError):
2174 except (ValueError, TypeError):
2175 raise error.ResponseError(
2175 raise error.ResponseError(
2176 _('Unexpected response from remote server:'), l)
2176 _('Unexpected response from remote server:'), l)
2177 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2177 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2178 # for backwards compat, name was partially encoded
2178 # for backwards compat, name was partially encoded
2179 ofp = self.sopener(store.decodedir(name), 'w')
2179 ofp = self.sopener(store.decodedir(name), 'w')
2180 for chunk in util.filechunkiter(fp, limit=size):
2180 for chunk in util.filechunkiter(fp, limit=size):
2181 ofp.write(chunk)
2181 ofp.write(chunk)
2182 ofp.close()
2182 ofp.close()
2183 elapsed = time.time() - start
2183 elapsed = time.time() - start
2184 if elapsed <= 0:
2184 if elapsed <= 0:
2185 elapsed = 0.001
2185 elapsed = 0.001
2186 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2186 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2187 (util.bytecount(total_bytes), elapsed,
2187 (util.bytecount(total_bytes), elapsed,
2188 util.bytecount(total_bytes / elapsed)))
2188 util.bytecount(total_bytes / elapsed)))
2189 self.invalidate()
2189 self.invalidate()
2190 return len(self.heads()) + 1
2190 return len(self.heads()) + 1
2191
2191
2192 def clone(self, remote, heads=[], stream=False):
2192 def clone(self, remote, heads=[], stream=False):
2193 '''clone remote repository.
2193 '''clone remote repository.
2194
2194
2195 keyword arguments:
2195 keyword arguments:
2196 heads: list of revs to clone (forces use of pull)
2196 heads: list of revs to clone (forces use of pull)
2197 stream: use streaming clone if possible'''
2197 stream: use streaming clone if possible'''
2198
2198
2199 # now, all clients that can request uncompressed clones can
2199 # now, all clients that can request uncompressed clones can
2200 # read repo formats supported by all servers that can serve
2200 # read repo formats supported by all servers that can serve
2201 # them.
2201 # them.
2202
2202
2203 # if revlog format changes, client will have to check version
2203 # if revlog format changes, client will have to check version
2204 # and format flags on "stream" capability, and use
2204 # and format flags on "stream" capability, and use
2205 # uncompressed only if compatible.
2205 # uncompressed only if compatible.
2206
2206
2207 if stream and not heads and remote.capable('stream'):
2207 if stream and not heads and remote.capable('stream'):
2208 return self.stream_in(remote)
2208 return self.stream_in(remote)
2209 return self.pull(remote, heads)
2209 return self.pull(remote, heads)
2210
2210
2211 # used to avoid circular references so destructors work
2211 # used to avoid circular references so destructors work
2212 def aftertrans(files):
2212 def aftertrans(files):
2213 renamefiles = [tuple(t) for t in files]
2213 renamefiles = [tuple(t) for t in files]
2214 def a():
2214 def a():
2215 for src, dest in renamefiles:
2215 for src, dest in renamefiles:
2216 util.rename(src, dest)
2216 util.rename(src, dest)
2217 return a
2217 return a
2218
2218
2219 def instance(ui, path, create):
2219 def instance(ui, path, create):
2220 return localrepository(ui, util.drop_scheme('file', path), create)
2220 return localrepository(ui, util.drop_scheme('file', path), create)
2221
2221
2222 def islocal(path):
2222 def islocal(path):
2223 return True
2223 return True
@@ -1,370 +1,373 b''
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import errno, os, re, xml.dom.minidom, shutil
8 import errno, os, re, xml.dom.minidom, shutil
9 from i18n import _
9 from i18n import _
10 import config, util, node, error
10 import config, util, node, error
11 hg = None
11 hg = None
12
12
13 nullstate = ('', '', 'empty')
13 nullstate = ('', '', 'empty')
14
14
15 def state(ctx):
15 def state(ctx):
16 p = config.config()
16 p = config.config()
17 def read(f, sections=None, remap=None):
17 def read(f, sections=None, remap=None):
18 if f in ctx:
18 if f in ctx:
19 p.parse(f, ctx[f].data(), sections, remap, read)
19 p.parse(f, ctx[f].data(), sections, remap, read)
20 else:
20 else:
21 raise util.Abort(_("subrepo spec file %s not found") % f)
21 raise util.Abort(_("subrepo spec file %s not found") % f)
22
22
23 if '.hgsub' in ctx:
23 if '.hgsub' in ctx:
24 read('.hgsub')
24 read('.hgsub')
25
25
26 rev = {}
26 rev = {}
27 if '.hgsubstate' in ctx:
27 if '.hgsubstate' in ctx:
28 try:
28 try:
29 for l in ctx['.hgsubstate'].data().splitlines():
29 for l in ctx['.hgsubstate'].data().splitlines():
30 revision, path = l.split(" ", 1)
30 revision, path = l.split(" ", 1)
31 rev[path] = revision
31 rev[path] = revision
32 except IOError, err:
32 except IOError, err:
33 if err.errno != errno.ENOENT:
33 if err.errno != errno.ENOENT:
34 raise
34 raise
35
35
36 state = {}
36 state = {}
37 for path, src in p[''].items():
37 for path, src in p[''].items():
38 kind = 'hg'
38 kind = 'hg'
39 if src.startswith('['):
39 if src.startswith('['):
40 if ']' not in src:
40 if ']' not in src:
41 raise util.Abort(_('missing ] in subrepo source'))
41 raise util.Abort(_('missing ] in subrepo source'))
42 kind, src = src.split(']', 1)
42 kind, src = src.split(']', 1)
43 kind = kind[1:]
43 kind = kind[1:]
44 state[path] = (src.strip(), rev.get(path, ''), kind)
44 state[path] = (src.strip(), rev.get(path, ''), kind)
45
45
46 return state
46 return state
47
47
48 def writestate(repo, state):
48 def writestate(repo, state):
49 repo.wwrite('.hgsubstate',
49 repo.wwrite('.hgsubstate',
50 ''.join(['%s %s\n' % (state[s][1], s)
50 ''.join(['%s %s\n' % (state[s][1], s)
51 for s in sorted(state)]), '')
51 for s in sorted(state)]), '')
52
52
53 def submerge(repo, wctx, mctx, actx):
53 def submerge(repo, wctx, mctx, actx):
54 # working context, merging context, ancestor context
54 # working context, merging context, ancestor context
55 if mctx == actx: # backwards?
55 if mctx == actx: # backwards?
56 actx = wctx.p1()
56 actx = wctx.p1()
57 s1 = wctx.substate
57 s1 = wctx.substate
58 s2 = mctx.substate
58 s2 = mctx.substate
59 sa = actx.substate
59 sa = actx.substate
60 sm = {}
60 sm = {}
61
61
62 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
62 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
63
63
64 def debug(s, msg, r=""):
64 def debug(s, msg, r=""):
65 if r:
65 if r:
66 r = "%s:%s:%s" % r
66 r = "%s:%s:%s" % r
67 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
67 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
68
68
69 for s, l in s1.items():
69 for s, l in s1.items():
70 if wctx != actx and wctx.sub(s).dirty():
70 if wctx != actx and wctx.sub(s).dirty():
71 l = (l[0], l[1] + "+")
71 l = (l[0], l[1] + "+")
72 a = sa.get(s, nullstate)
72 a = sa.get(s, nullstate)
73 if s in s2:
73 if s in s2:
74 r = s2[s]
74 r = s2[s]
75 if l == r or r == a: # no change or local is newer
75 if l == r or r == a: # no change or local is newer
76 sm[s] = l
76 sm[s] = l
77 continue
77 continue
78 elif l == a: # other side changed
78 elif l == a: # other side changed
79 debug(s, "other changed, get", r)
79 debug(s, "other changed, get", r)
80 wctx.sub(s).get(r)
80 wctx.sub(s).get(r)
81 sm[s] = r
81 sm[s] = r
82 elif l[0] != r[0]: # sources differ
82 elif l[0] != r[0]: # sources differ
83 if repo.ui.promptchoice(
83 if repo.ui.promptchoice(
84 _(' subrepository sources for %s differ\n'
84 _(' subrepository sources for %s differ\n'
85 'use (l)ocal source (%s) or (r)emote source (%s)?')
85 'use (l)ocal source (%s) or (r)emote source (%s)?')
86 % (s, l[0], r[0]),
86 % (s, l[0], r[0]),
87 (_('&Local'), _('&Remote')), 0):
87 (_('&Local'), _('&Remote')), 0):
88 debug(s, "prompt changed, get", r)
88 debug(s, "prompt changed, get", r)
89 wctx.sub(s).get(r)
89 wctx.sub(s).get(r)
90 sm[s] = r
90 sm[s] = r
91 elif l[1] == a[1]: # local side is unchanged
91 elif l[1] == a[1]: # local side is unchanged
92 debug(s, "other side changed, get", r)
92 debug(s, "other side changed, get", r)
93 wctx.sub(s).get(r)
93 wctx.sub(s).get(r)
94 sm[s] = r
94 sm[s] = r
95 else:
95 else:
96 debug(s, "both sides changed, merge with", r)
96 debug(s, "both sides changed, merge with", r)
97 wctx.sub(s).merge(r)
97 wctx.sub(s).merge(r)
98 sm[s] = l
98 sm[s] = l
99 elif l == a: # remote removed, local unchanged
99 elif l == a: # remote removed, local unchanged
100 debug(s, "remote removed, remove")
100 debug(s, "remote removed, remove")
101 wctx.sub(s).remove()
101 wctx.sub(s).remove()
102 else:
102 else:
103 if repo.ui.promptchoice(
103 if repo.ui.promptchoice(
104 _(' local changed subrepository %s which remote removed\n'
104 _(' local changed subrepository %s which remote removed\n'
105 'use (c)hanged version or (d)elete?') % s,
105 'use (c)hanged version or (d)elete?') % s,
106 (_('&Changed'), _('&Delete')), 0):
106 (_('&Changed'), _('&Delete')), 0):
107 debug(s, "prompt remove")
107 debug(s, "prompt remove")
108 wctx.sub(s).remove()
108 wctx.sub(s).remove()
109
109
110 for s, r in s2.items():
110 for s, r in s2.items():
111 if s in s1:
111 if s in s1:
112 continue
112 continue
113 elif s not in sa:
113 elif s not in sa:
114 debug(s, "remote added, get", r)
114 debug(s, "remote added, get", r)
115 mctx.sub(s).get(r)
115 mctx.sub(s).get(r)
116 sm[s] = r
116 sm[s] = r
117 elif r != sa[s]:
117 elif r != sa[s]:
118 if repo.ui.promptchoice(
118 if repo.ui.promptchoice(
119 _(' remote changed subrepository %s which local removed\n'
119 _(' remote changed subrepository %s which local removed\n'
120 'use (c)hanged version or (d)elete?') % s,
120 'use (c)hanged version or (d)elete?') % s,
121 (_('&Changed'), _('&Delete')), 0) == 0:
121 (_('&Changed'), _('&Delete')), 0) == 0:
122 debug(s, "prompt recreate", r)
122 debug(s, "prompt recreate", r)
123 wctx.sub(s).get(r)
123 wctx.sub(s).get(r)
124 sm[s] = r
124 sm[s] = r
125
125
126 # record merged .hgsubstate
126 # record merged .hgsubstate
127 writestate(repo, sm)
127 writestate(repo, sm)
128
128
129 def _abssource(repo, push=False):
129 def _abssource(repo, push=False):
130 if hasattr(repo, '_subparent'):
130 if hasattr(repo, '_subparent'):
131 source = repo._subsource
131 source = repo._subsource
132 if source.startswith('/') or '://' in source:
132 if source.startswith('/') or '://' in source:
133 return source
133 return source
134 parent = _abssource(repo._subparent, push)
134 parent = _abssource(repo._subparent, push)
135 if '://' in parent:
135 if '://' in parent:
136 if parent[-1] == '/':
136 if parent[-1] == '/':
137 parent = parent[:-1]
137 parent = parent[:-1]
138 return parent + '/' + source
138 return parent + '/' + source
139 return os.path.join(parent, repo._subsource)
139 return os.path.join(parent, repo._subsource)
140 if push and repo.ui.config('paths', 'default-push'):
140 if push and repo.ui.config('paths', 'default-push'):
141 return repo.ui.config('paths', 'default-push', repo.root)
141 return repo.ui.config('paths', 'default-push', repo.root)
142 return repo.ui.config('paths', 'default', repo.root)
142 return repo.ui.config('paths', 'default', repo.root)
143
143
144 def subrepo(ctx, path):
144 def subrepo(ctx, path):
145 # subrepo inherently violates our import layering rules
145 # subrepo inherently violates our import layering rules
146 # because it wants to make repo objects from deep inside the stack
146 # because it wants to make repo objects from deep inside the stack
147 # so we manually delay the circular imports to not break
147 # so we manually delay the circular imports to not break
148 # scripts that don't use our demand-loading
148 # scripts that don't use our demand-loading
149 global hg
149 global hg
150 import hg as h
150 import hg as h
151 hg = h
151 hg = h
152
152
153 util.path_auditor(ctx._repo.root)(path)
153 util.path_auditor(ctx._repo.root)(path)
154 state = ctx.substate.get(path, nullstate)
154 state = ctx.substate.get(path, nullstate)
155 if state[2] not in types:
155 if state[2] not in types:
156 raise util.Abort(_('unknown subrepo type %s') % state[2])
156 raise util.Abort(_('unknown subrepo type %s') % state[2])
157 return types[state[2]](ctx, path, state[:2])
157 return types[state[2]](ctx, path, state[:2])
158
158
159 # subrepo classes need to implement the following methods:
159 # subrepo classes need to implement the following methods:
160 # __init__(self, ctx, path, state)
160 # __init__(self, ctx, path, state)
161 # dirty(self): returns true if the dirstate of the subrepo
161 # dirty(self): returns true if the dirstate of the subrepo
162 # does not match current stored state
162 # does not match current stored state
163 # commit(self, text, user, date): commit the current changes
163 # commit(self, text, user, date): commit the current changes
164 # to the subrepo with the given log message. Use given
164 # to the subrepo with the given log message. Use given
165 # user and date if possible. Return the new state of the subrepo.
165 # user and date if possible. Return the new state of the subrepo.
166 # remove(self): remove the subrepo (should verify the dirstate
166 # remove(self): remove the subrepo (should verify the dirstate
167 # is not dirty first)
167 # is not dirty first)
168 # get(self, state): run whatever commands are needed to put the
168 # get(self, state): run whatever commands are needed to put the
169 # subrepo into this state
169 # subrepo into this state
170 # merge(self, state): merge currently-saved state with the new state.
170 # merge(self, state): merge currently-saved state with the new state.
171 # push(self, force): perform whatever action is analagous to 'hg push'
171 # push(self, force): perform whatever action is analagous to 'hg push'
172 # This may be a no-op on some systems.
172 # This may be a no-op on some systems.
173
173
174 class hgsubrepo(object):
174 class hgsubrepo(object):
175 def __init__(self, ctx, path, state):
175 def __init__(self, ctx, path, state):
176 self._path = path
176 self._path = path
177 self._state = state
177 self._state = state
178 r = ctx._repo
178 r = ctx._repo
179 root = r.wjoin(path)
179 root = r.wjoin(path)
180 create = False
180 create = False
181 if not os.path.exists(os.path.join(root, '.hg')):
181 if not os.path.exists(os.path.join(root, '.hg')):
182 create = True
182 create = True
183 util.makedirs(root)
183 util.makedirs(root)
184 self._repo = hg.repository(r.ui, root, create=create)
184 self._repo = hg.repository(r.ui, root, create=create)
185 self._repo._subparent = r
185 self._repo._subparent = r
186 self._repo._subsource = state[0]
186 self._repo._subsource = state[0]
187
187
188 if create:
188 if create:
189 fp = self._repo.opener("hgrc", "w", text=True)
189 fp = self._repo.opener("hgrc", "w", text=True)
190 fp.write('[paths]\n')
190 fp.write('[paths]\n')
191
191
192 def addpathconfig(key, value):
192 def addpathconfig(key, value):
193 fp.write('%s = %s\n' % (key, value))
193 fp.write('%s = %s\n' % (key, value))
194 self._repo.ui.setconfig('paths', key, value)
194 self._repo.ui.setconfig('paths', key, value)
195
195
196 defpath = os.path.join(_abssource(ctx._repo), path)
196 defpath = _abssource(self._repo)
197 defpushpath = _abssource(self._repo, True)
197 addpathconfig('default', defpath)
198 addpathconfig('default', defpath)
199 if defpath != defpushpath:
200 addpathconfig('default-push', defpushpath)
198 fp.close()
201 fp.close()
199
202
200 def dirty(self):
203 def dirty(self):
201 r = self._state[1]
204 r = self._state[1]
202 if r == '':
205 if r == '':
203 return True
206 return True
204 w = self._repo[None]
207 w = self._repo[None]
205 if w.p1() != self._repo[r]: # version checked out change
208 if w.p1() != self._repo[r]: # version checked out change
206 return True
209 return True
207 return w.dirty() # working directory changed
210 return w.dirty() # working directory changed
208
211
209 def commit(self, text, user, date):
212 def commit(self, text, user, date):
210 self._repo.ui.debug("committing subrepo %s\n" % self._path)
213 self._repo.ui.debug("committing subrepo %s\n" % self._path)
211 n = self._repo.commit(text, user, date)
214 n = self._repo.commit(text, user, date)
212 if not n:
215 if not n:
213 return self._repo['.'].hex() # different version checked out
216 return self._repo['.'].hex() # different version checked out
214 return node.hex(n)
217 return node.hex(n)
215
218
216 def remove(self):
219 def remove(self):
217 # we can't fully delete the repository as it may contain
220 # we can't fully delete the repository as it may contain
218 # local-only history
221 # local-only history
219 self._repo.ui.note(_('removing subrepo %s\n') % self._path)
222 self._repo.ui.note(_('removing subrepo %s\n') % self._path)
220 hg.clean(self._repo, node.nullid, False)
223 hg.clean(self._repo, node.nullid, False)
221
224
222 def _get(self, state):
225 def _get(self, state):
223 source, revision, kind = state
226 source, revision, kind = state
224 try:
227 try:
225 self._repo.lookup(revision)
228 self._repo.lookup(revision)
226 except error.RepoError:
229 except error.RepoError:
227 self._repo._subsource = source
230 self._repo._subsource = source
228 srcurl = _abssource(self._repo)
231 srcurl = _abssource(self._repo)
229 self._repo.ui.status(_('pulling subrepo %s from %s\n')
232 self._repo.ui.status(_('pulling subrepo %s from %s\n')
230 % (self._path, srcurl))
233 % (self._path, srcurl))
231 other = hg.repository(self._repo.ui, srcurl)
234 other = hg.repository(self._repo.ui, srcurl)
232 self._repo.pull(other)
235 self._repo.pull(other)
233
236
234 def get(self, state):
237 def get(self, state):
235 self._get(state)
238 self._get(state)
236 source, revision, kind = state
239 source, revision, kind = state
237 self._repo.ui.debug("getting subrepo %s\n" % self._path)
240 self._repo.ui.debug("getting subrepo %s\n" % self._path)
238 hg.clean(self._repo, revision, False)
241 hg.clean(self._repo, revision, False)
239
242
240 def merge(self, state):
243 def merge(self, state):
241 self._get(state)
244 self._get(state)
242 cur = self._repo['.']
245 cur = self._repo['.']
243 dst = self._repo[state[1]]
246 dst = self._repo[state[1]]
244 anc = dst.ancestor(cur)
247 anc = dst.ancestor(cur)
245 if anc == cur:
248 if anc == cur:
246 self._repo.ui.debug("updating subrepo %s\n" % self._path)
249 self._repo.ui.debug("updating subrepo %s\n" % self._path)
247 hg.update(self._repo, state[1])
250 hg.update(self._repo, state[1])
248 elif anc == dst:
251 elif anc == dst:
249 self._repo.ui.debug("skipping subrepo %s\n" % self._path)
252 self._repo.ui.debug("skipping subrepo %s\n" % self._path)
250 else:
253 else:
251 self._repo.ui.debug("merging subrepo %s\n" % self._path)
254 self._repo.ui.debug("merging subrepo %s\n" % self._path)
252 hg.merge(self._repo, state[1], remind=False)
255 hg.merge(self._repo, state[1], remind=False)
253
256
254 def push(self, force):
257 def push(self, force):
255 # push subrepos depth-first for coherent ordering
258 # push subrepos depth-first for coherent ordering
256 c = self._repo['']
259 c = self._repo['']
257 subs = c.substate # only repos that are committed
260 subs = c.substate # only repos that are committed
258 for s in sorted(subs):
261 for s in sorted(subs):
259 c.sub(s).push(force)
262 c.sub(s).push(force)
260
263
261 self._repo.ui.status(_('pushing subrepo %s\n') % self._path)
264 self._repo.ui.status(_('pushing subrepo %s\n') % self._path)
262 dsturl = _abssource(self._repo, True)
265 dsturl = _abssource(self._repo, True)
263 other = hg.repository(self._repo.ui, dsturl)
266 other = hg.repository(self._repo.ui, dsturl)
264 self._repo.push(other, force)
267 self._repo.push(other, force)
265
268
266 class svnsubrepo(object):
269 class svnsubrepo(object):
267 def __init__(self, ctx, path, state):
270 def __init__(self, ctx, path, state):
268 self._path = path
271 self._path = path
269 self._state = state
272 self._state = state
270 self._ctx = ctx
273 self._ctx = ctx
271 self._ui = ctx._repo.ui
274 self._ui = ctx._repo.ui
272
275
273 def _svncommand(self, commands):
276 def _svncommand(self, commands):
274 cmd = ['svn'] + commands + [self._path]
277 cmd = ['svn'] + commands + [self._path]
275 cmd = [util.shellquote(arg) for arg in cmd]
278 cmd = [util.shellquote(arg) for arg in cmd]
276 cmd = util.quotecommand(' '.join(cmd))
279 cmd = util.quotecommand(' '.join(cmd))
277 env = dict(os.environ)
280 env = dict(os.environ)
278 # Avoid localized output, preserve current locale for everything else.
281 # Avoid localized output, preserve current locale for everything else.
279 env['LC_MESSAGES'] = 'C'
282 env['LC_MESSAGES'] = 'C'
280 write, read, err = util.popen3(cmd, env=env, newlines=True)
283 write, read, err = util.popen3(cmd, env=env, newlines=True)
281 retdata = read.read()
284 retdata = read.read()
282 err = err.read().strip()
285 err = err.read().strip()
283 if err:
286 if err:
284 raise util.Abort(err)
287 raise util.Abort(err)
285 return retdata
288 return retdata
286
289
287 def _wcrev(self):
290 def _wcrev(self):
288 output = self._svncommand(['info', '--xml'])
291 output = self._svncommand(['info', '--xml'])
289 doc = xml.dom.minidom.parseString(output)
292 doc = xml.dom.minidom.parseString(output)
290 entries = doc.getElementsByTagName('entry')
293 entries = doc.getElementsByTagName('entry')
291 if not entries:
294 if not entries:
292 return 0
295 return 0
293 return int(entries[0].getAttribute('revision') or 0)
296 return int(entries[0].getAttribute('revision') or 0)
294
297
295 def _wcchanged(self):
298 def _wcchanged(self):
296 """Return (changes, extchanges) where changes is True
299 """Return (changes, extchanges) where changes is True
297 if the working directory was changed, and extchanges is
300 if the working directory was changed, and extchanges is
298 True if any of these changes concern an external entry.
301 True if any of these changes concern an external entry.
299 """
302 """
300 output = self._svncommand(['status', '--xml'])
303 output = self._svncommand(['status', '--xml'])
301 externals, changes = [], []
304 externals, changes = [], []
302 doc = xml.dom.minidom.parseString(output)
305 doc = xml.dom.minidom.parseString(output)
303 for e in doc.getElementsByTagName('entry'):
306 for e in doc.getElementsByTagName('entry'):
304 s = e.getElementsByTagName('wc-status')
307 s = e.getElementsByTagName('wc-status')
305 if not s:
308 if not s:
306 continue
309 continue
307 item = s[0].getAttribute('item')
310 item = s[0].getAttribute('item')
308 props = s[0].getAttribute('props')
311 props = s[0].getAttribute('props')
309 path = e.getAttribute('path')
312 path = e.getAttribute('path')
310 if item == 'external':
313 if item == 'external':
311 externals.append(path)
314 externals.append(path)
312 if (item not in ('', 'normal', 'unversioned', 'external')
315 if (item not in ('', 'normal', 'unversioned', 'external')
313 or props not in ('', 'none')):
316 or props not in ('', 'none')):
314 changes.append(path)
317 changes.append(path)
315 for path in changes:
318 for path in changes:
316 for ext in externals:
319 for ext in externals:
317 if path == ext or path.startswith(ext + os.sep):
320 if path == ext or path.startswith(ext + os.sep):
318 return True, True
321 return True, True
319 return bool(changes), False
322 return bool(changes), False
320
323
321 def dirty(self):
324 def dirty(self):
322 if self._wcrev() == self._state[1] and not self._wcchanged()[0]:
325 if self._wcrev() == self._state[1] and not self._wcchanged()[0]:
323 return False
326 return False
324 return True
327 return True
325
328
326 def commit(self, text, user, date):
329 def commit(self, text, user, date):
327 # user and date are out of our hands since svn is centralized
330 # user and date are out of our hands since svn is centralized
328 changed, extchanged = self._wcchanged()
331 changed, extchanged = self._wcchanged()
329 if not changed:
332 if not changed:
330 return self._wcrev()
333 return self._wcrev()
331 if extchanged:
334 if extchanged:
332 # Do not try to commit externals
335 # Do not try to commit externals
333 raise util.Abort(_('cannot commit svn externals'))
336 raise util.Abort(_('cannot commit svn externals'))
334 commitinfo = self._svncommand(['commit', '-m', text])
337 commitinfo = self._svncommand(['commit', '-m', text])
335 self._ui.status(commitinfo)
338 self._ui.status(commitinfo)
336 newrev = re.search('Committed revision ([\d]+).', commitinfo)
339 newrev = re.search('Committed revision ([\d]+).', commitinfo)
337 if not newrev:
340 if not newrev:
338 raise util.Abort(commitinfo.splitlines()[-1])
341 raise util.Abort(commitinfo.splitlines()[-1])
339 newrev = newrev.groups()[0]
342 newrev = newrev.groups()[0]
340 self._ui.status(self._svncommand(['update', '-r', newrev]))
343 self._ui.status(self._svncommand(['update', '-r', newrev]))
341 return newrev
344 return newrev
342
345
343 def remove(self):
346 def remove(self):
344 if self.dirty():
347 if self.dirty():
345 self._ui.warn(_('not removing repo %s because '
348 self._ui.warn(_('not removing repo %s because '
346 'it has changes.\n' % self._path))
349 'it has changes.\n' % self._path))
347 return
350 return
348 self._ui.note(_('removing subrepo %s\n') % self._path)
351 self._ui.note(_('removing subrepo %s\n') % self._path)
349 shutil.rmtree(self._ctx.repo.join(self._path))
352 shutil.rmtree(self._ctx.repo.join(self._path))
350
353
351 def get(self, state):
354 def get(self, state):
352 status = self._svncommand(['checkout', state[0], '--revision', state[1]])
355 status = self._svncommand(['checkout', state[0], '--revision', state[1]])
353 if not re.search('Checked out revision [\d]+.', status):
356 if not re.search('Checked out revision [\d]+.', status):
354 raise util.Abort(status.splitlines()[-1])
357 raise util.Abort(status.splitlines()[-1])
355 self._ui.status(status)
358 self._ui.status(status)
356
359
357 def merge(self, state):
360 def merge(self, state):
358 old = int(self._state[1])
361 old = int(self._state[1])
359 new = int(state[1])
362 new = int(state[1])
360 if new > old:
363 if new > old:
361 self.get(state)
364 self.get(state)
362
365
363 def push(self, force):
366 def push(self, force):
364 # nothing for svn
367 # nothing for svn
365 pass
368 pass
366
369
367 types = {
370 types = {
368 'hg': hgsubrepo,
371 'hg': hgsubrepo,
369 'svn': svnsubrepo,
372 'svn': svnsubrepo,
370 }
373 }
@@ -1,298 +1,298 b''
1 # verify.py - repository integrity checking for Mercurial
1 # verify.py - repository integrity checking for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, short
8 from node import nullid, short
9 from i18n import _
9 from i18n import _
10 import os
10 import os
11 import revlog, util, error
11 import revlog, util, error
12
12
13 def verify(repo):
13 def verify(repo):
14 lock = repo.lock()
14 lock = repo.lock()
15 try:
15 try:
16 return _verify(repo)
16 return _verify(repo)
17 finally:
17 finally:
18 lock.release()
18 lock.release()
19
19
20 def _verify(repo):
20 def _verify(repo):
21 mflinkrevs = {}
21 mflinkrevs = {}
22 filelinkrevs = {}
22 filelinkrevs = {}
23 filenodes = {}
23 filenodes = {}
24 revisions = 0
24 revisions = 0
25 badrevs = set()
25 badrevs = set()
26 errors = [0]
26 errors = [0]
27 warnings = [0]
27 warnings = [0]
28 ui = repo.ui
28 ui = repo.ui
29 cl = repo.changelog
29 cl = repo.changelog
30 mf = repo.manifest
30 mf = repo.manifest
31 lrugetctx = util.lrucachefunc(repo.changectx)
31 lrugetctx = util.lrucachefunc(repo.changectx)
32
32
33 if not repo.cancopy():
33 if not repo.cancopy():
34 raise util.Abort(_("cannot verify bundle or remote repos"))
34 raise util.Abort(_("cannot verify bundle or remote repos"))
35
35
36 def err(linkrev, msg, filename=None):
36 def err(linkrev, msg, filename=None):
37 if linkrev != None:
37 if linkrev != None:
38 badrevs.add(linkrev)
38 badrevs.add(linkrev)
39 else:
39 else:
40 linkrev = '?'
40 linkrev = '?'
41 msg = "%s: %s" % (linkrev, msg)
41 msg = "%s: %s" % (linkrev, msg)
42 if filename:
42 if filename:
43 msg = "%s@%s" % (filename, msg)
43 msg = "%s@%s" % (filename, msg)
44 ui.warn(" " + msg + "\n")
44 ui.warn(" " + msg + "\n")
45 errors[0] += 1
45 errors[0] += 1
46
46
47 def exc(linkrev, msg, inst, filename=None):
47 def exc(linkrev, msg, inst, filename=None):
48 if isinstance(inst, KeyboardInterrupt):
48 if isinstance(inst, KeyboardInterrupt):
49 ui.warn(_("interrupted"))
49 ui.warn(_("interrupted"))
50 raise
50 raise
51 err(linkrev, "%s: %s" % (msg, inst), filename)
51 err(linkrev, "%s: %s" % (msg, inst), filename)
52
52
53 def warn(msg):
53 def warn(msg):
54 ui.warn(msg + "\n")
54 ui.warn(msg + "\n")
55 warnings[0] += 1
55 warnings[0] += 1
56
56
57 def checklog(obj, name, linkrev):
57 def checklog(obj, name, linkrev):
58 if not len(obj) and (havecl or havemf):
58 if not len(obj) and (havecl or havemf):
59 err(linkrev, _("empty or missing %s") % name)
59 err(linkrev, _("empty or missing %s") % name)
60 return
60 return
61
61
62 d = obj.checksize()
62 d = obj.checksize()
63 if d[0]:
63 if d[0]:
64 err(None, _("data length off by %d bytes") % d[0], name)
64 err(None, _("data length off by %d bytes") % d[0], name)
65 if d[1]:
65 if d[1]:
66 err(None, _("index contains %d extra bytes") % d[1], name)
66 err(None, _("index contains %d extra bytes") % d[1], name)
67
67
68 if obj.version != revlog.REVLOGV0:
68 if obj.version != revlog.REVLOGV0:
69 if not revlogv1:
69 if not revlogv1:
70 warn(_("warning: `%s' uses revlog format 1") % name)
70 warn(_("warning: `%s' uses revlog format 1") % name)
71 elif revlogv1:
71 elif revlogv1:
72 warn(_("warning: `%s' uses revlog format 0") % name)
72 warn(_("warning: `%s' uses revlog format 0") % name)
73
73
74 def checkentry(obj, i, node, seen, linkrevs, f):
74 def checkentry(obj, i, node, seen, linkrevs, f):
75 lr = obj.linkrev(obj.rev(node))
75 lr = obj.linkrev(obj.rev(node))
76 if lr < 0 or (havecl and lr not in linkrevs):
76 if lr < 0 or (havecl and lr not in linkrevs):
77 if lr < 0 or lr >= len(cl):
77 if lr < 0 or lr >= len(cl):
78 msg = _("rev %d points to nonexistent changeset %d")
78 msg = _("rev %d points to nonexistent changeset %d")
79 else:
79 else:
80 msg = _("rev %d points to unexpected changeset %d")
80 msg = _("rev %d points to unexpected changeset %d")
81 err(None, msg % (i, lr), f)
81 err(None, msg % (i, lr), f)
82 if linkrevs:
82 if linkrevs:
83 if f and len(linkrevs) > 1:
83 if f and len(linkrevs) > 1:
84 try:
84 try:
85 # attempt to filter down to real linkrevs
85 # attempt to filter down to real linkrevs
86 linkrevs = [l for l in linkrevs
86 linkrevs = [l for l in linkrevs
87 if lrugetctx(l)[f].filenode() == node]
87 if lrugetctx(l)[f].filenode() == node]
88 except:
88 except:
89 pass
89 pass
90 warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
90 warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
91 lr = None # can't be trusted
91 lr = None # can't be trusted
92
92
93 try:
93 try:
94 p1, p2 = obj.parents(node)
94 p1, p2 = obj.parents(node)
95 if p1 not in seen and p1 != nullid:
95 if p1 not in seen and p1 != nullid:
96 err(lr, _("unknown parent 1 %s of %s") %
96 err(lr, _("unknown parent 1 %s of %s") %
97 (short(p1), short(n)), f)
97 (short(p1), short(n)), f)
98 if p2 not in seen and p2 != nullid:
98 if p2 not in seen and p2 != nullid:
99 err(lr, _("unknown parent 2 %s of %s") %
99 err(lr, _("unknown parent 2 %s of %s") %
100 (short(p2), short(p1)), f)
100 (short(p2), short(p1)), f)
101 except Exception, inst:
101 except Exception, inst:
102 exc(lr, _("checking parents of %s") % short(node), inst, f)
102 exc(lr, _("checking parents of %s") % short(node), inst, f)
103
103
104 if node in seen:
104 if node in seen:
105 err(lr, _("duplicate revision %d (%d)") % (i, seen[n]), f)
105 err(lr, _("duplicate revision %d (%d)") % (i, seen[n]), f)
106 seen[n] = i
106 seen[n] = i
107 return lr
107 return lr
108
108
109 if os.path.exists(repo.sjoin("journal")):
109 if os.path.exists(repo.sjoin("journal")):
110 ui.warn(_("abandoned transaction found - run hg recover\n"))
110 ui.warn(_("abandoned transaction found - run hg recover\n"))
111
111
112 revlogv1 = cl.version != revlog.REVLOGV0
112 revlogv1 = cl.version != revlog.REVLOGV0
113 if ui.verbose or not revlogv1:
113 if ui.verbose or not revlogv1:
114 ui.status(_("repository uses revlog format %d\n") %
114 ui.status(_("repository uses revlog format %d\n") %
115 (revlogv1 and 1 or 0))
115 (revlogv1 and 1 or 0))
116
116
117 havecl = len(cl) > 0
117 havecl = len(cl) > 0
118 havemf = len(mf) > 0
118 havemf = len(mf) > 0
119
119
120 ui.status(_("checking changesets\n"))
120 ui.status(_("checking changesets\n"))
121 seen = {}
121 seen = {}
122 checklog(cl, "changelog", 0)
122 checklog(cl, "changelog", 0)
123 total = len(repo)
123 total = len(repo)
124 for i in repo:
124 for i in repo:
125 ui.progress(_('changelog'), i, total=total)
125 ui.progress(_('checking'), i, total=total)
126 n = cl.node(i)
126 n = cl.node(i)
127 checkentry(cl, i, n, seen, [i], "changelog")
127 checkentry(cl, i, n, seen, [i], "changelog")
128
128
129 try:
129 try:
130 changes = cl.read(n)
130 changes = cl.read(n)
131 mflinkrevs.setdefault(changes[0], []).append(i)
131 mflinkrevs.setdefault(changes[0], []).append(i)
132 for f in changes[3]:
132 for f in changes[3]:
133 filelinkrevs.setdefault(f, []).append(i)
133 filelinkrevs.setdefault(f, []).append(i)
134 except Exception, inst:
134 except Exception, inst:
135 exc(i, _("unpacking changeset %s") % short(n), inst)
135 exc(i, _("unpacking changeset %s") % short(n), inst)
136 ui.progress(_('changelog'), None)
136 ui.progress(_('checking'), None)
137
137
138 ui.status(_("checking manifests\n"))
138 ui.status(_("checking manifests\n"))
139 seen = {}
139 seen = {}
140 checklog(mf, "manifest", 0)
140 checklog(mf, "manifest", 0)
141 total = len(mf)
141 total = len(mf)
142 for i in mf:
142 for i in mf:
143 ui.progress(_('manifests'), i, total=total)
143 ui.progress(_('checking'), i, total=total)
144 n = mf.node(i)
144 n = mf.node(i)
145 lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest")
145 lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest")
146 if n in mflinkrevs:
146 if n in mflinkrevs:
147 del mflinkrevs[n]
147 del mflinkrevs[n]
148 else:
148 else:
149 err(lr, _("%s not in changesets") % short(n), "manifest")
149 err(lr, _("%s not in changesets") % short(n), "manifest")
150
150
151 try:
151 try:
152 for f, fn in mf.readdelta(n).iteritems():
152 for f, fn in mf.readdelta(n).iteritems():
153 if not f:
153 if not f:
154 err(lr, _("file without name in manifest"))
154 err(lr, _("file without name in manifest"))
155 elif f != "/dev/null":
155 elif f != "/dev/null":
156 filenodes.setdefault(f, {}).setdefault(fn, lr)
156 filenodes.setdefault(f, {}).setdefault(fn, lr)
157 except Exception, inst:
157 except Exception, inst:
158 exc(lr, _("reading manifest delta %s") % short(n), inst)
158 exc(lr, _("reading manifest delta %s") % short(n), inst)
159 ui.progress(_('manifests'), None)
159 ui.progress(_('checking'), None)
160
160
161 ui.status(_("crosschecking files in changesets and manifests\n"))
161 ui.status(_("crosschecking files in changesets and manifests\n"))
162
162
163 total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes)
163 total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes)
164 count = 0
164 count = 0
165 if havemf:
165 if havemf:
166 for c, m in sorted([(c, m) for m in mflinkrevs
166 for c, m in sorted([(c, m) for m in mflinkrevs
167 for c in mflinkrevs[m]]):
167 for c in mflinkrevs[m]]):
168 count += 1
168 count += 1
169 ui.progress(_('crosscheck'), count, total=total)
169 ui.progress(_('crosschecking'), count, total=total)
170 err(c, _("changeset refers to unknown manifest %s") % short(m))
170 err(c, _("changeset refers to unknown manifest %s") % short(m))
171 mflinkrevs = None # del is bad here due to scope issues
171 mflinkrevs = None # del is bad here due to scope issues
172
172
173 for f in sorted(filelinkrevs):
173 for f in sorted(filelinkrevs):
174 count += 1
174 count += 1
175 ui.progress(_('crosscheck'), count, total=total)
175 ui.progress(_('crosschecking'), count, total=total)
176 if f not in filenodes:
176 if f not in filenodes:
177 lr = filelinkrevs[f][0]
177 lr = filelinkrevs[f][0]
178 err(lr, _("in changeset but not in manifest"), f)
178 err(lr, _("in changeset but not in manifest"), f)
179
179
180 if havecl:
180 if havecl:
181 for f in sorted(filenodes):
181 for f in sorted(filenodes):
182 count += 1
182 count += 1
183 ui.progress(_('crosscheck'), count, total=total)
183 ui.progress(_('crosschecking'), count, total=total)
184 if f not in filelinkrevs:
184 if f not in filelinkrevs:
185 try:
185 try:
186 fl = repo.file(f)
186 fl = repo.file(f)
187 lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
187 lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
188 except:
188 except:
189 lr = None
189 lr = None
190 err(lr, _("in manifest but not in changeset"), f)
190 err(lr, _("in manifest but not in changeset"), f)
191
191
192 ui.progress(_('crosscheck'), None)
192 ui.progress(_('crosschecking'), None)
193
193
194 ui.status(_("checking files\n"))
194 ui.status(_("checking files\n"))
195
195
196 storefiles = set()
196 storefiles = set()
197 for f, f2, size in repo.store.datafiles():
197 for f, f2, size in repo.store.datafiles():
198 if not f:
198 if not f:
199 err(None, _("cannot decode filename '%s'") % f2)
199 err(None, _("cannot decode filename '%s'") % f2)
200 elif size > 0:
200 elif size > 0:
201 storefiles.add(f)
201 storefiles.add(f)
202
202
203 files = sorted(set(filenodes) | set(filelinkrevs))
203 files = sorted(set(filenodes) | set(filelinkrevs))
204 total = len(files)
204 total = len(files)
205 for i, f in enumerate(files):
205 for i, f in enumerate(files):
206 ui.progress(_('files'), i, item=f, total=total)
206 ui.progress(_('checking'), i, item=f, total=total)
207 try:
207 try:
208 linkrevs = filelinkrevs[f]
208 linkrevs = filelinkrevs[f]
209 except KeyError:
209 except KeyError:
210 # in manifest but not in changelog
210 # in manifest but not in changelog
211 linkrevs = []
211 linkrevs = []
212
212
213 if linkrevs:
213 if linkrevs:
214 lr = linkrevs[0]
214 lr = linkrevs[0]
215 else:
215 else:
216 lr = None
216 lr = None
217
217
218 try:
218 try:
219 fl = repo.file(f)
219 fl = repo.file(f)
220 except error.RevlogError, e:
220 except error.RevlogError, e:
221 err(lr, _("broken revlog! (%s)") % e, f)
221 err(lr, _("broken revlog! (%s)") % e, f)
222 continue
222 continue
223
223
224 for ff in fl.files():
224 for ff in fl.files():
225 try:
225 try:
226 storefiles.remove(ff)
226 storefiles.remove(ff)
227 except KeyError:
227 except KeyError:
228 err(lr, _("missing revlog!"), ff)
228 err(lr, _("missing revlog!"), ff)
229
229
230 checklog(fl, f, lr)
230 checklog(fl, f, lr)
231 seen = {}
231 seen = {}
232 for i in fl:
232 for i in fl:
233 revisions += 1
233 revisions += 1
234 n = fl.node(i)
234 n = fl.node(i)
235 lr = checkentry(fl, i, n, seen, linkrevs, f)
235 lr = checkentry(fl, i, n, seen, linkrevs, f)
236 if f in filenodes:
236 if f in filenodes:
237 if havemf and n not in filenodes[f]:
237 if havemf and n not in filenodes[f]:
238 err(lr, _("%s not in manifests") % (short(n)), f)
238 err(lr, _("%s not in manifests") % (short(n)), f)
239 else:
239 else:
240 del filenodes[f][n]
240 del filenodes[f][n]
241
241
242 # verify contents
242 # verify contents
243 try:
243 try:
244 t = fl.read(n)
244 t = fl.read(n)
245 rp = fl.renamed(n)
245 rp = fl.renamed(n)
246 if len(t) != fl.size(i):
246 if len(t) != fl.size(i):
247 if len(fl.revision(n)) != fl.size(i):
247 if len(fl.revision(n)) != fl.size(i):
248 err(lr, _("unpacked size is %s, %s expected") %
248 err(lr, _("unpacked size is %s, %s expected") %
249 (len(t), fl.size(i)), f)
249 (len(t), fl.size(i)), f)
250 except Exception, inst:
250 except Exception, inst:
251 exc(lr, _("unpacking %s") % short(n), inst, f)
251 exc(lr, _("unpacking %s") % short(n), inst, f)
252
252
253 # check renames
253 # check renames
254 try:
254 try:
255 if rp:
255 if rp:
256 if lr is not None and ui.verbose:
256 if lr is not None and ui.verbose:
257 ctx = lrugetctx(lr)
257 ctx = lrugetctx(lr)
258 found = False
258 found = False
259 for pctx in ctx.parents():
259 for pctx in ctx.parents():
260 if rp[0] in pctx:
260 if rp[0] in pctx:
261 found = True
261 found = True
262 break
262 break
263 if not found:
263 if not found:
264 warn(_("warning: copy source of '%s' not"
264 warn(_("warning: copy source of '%s' not"
265 " in parents of %s") % (f, ctx))
265 " in parents of %s") % (f, ctx))
266 fl2 = repo.file(rp[0])
266 fl2 = repo.file(rp[0])
267 if not len(fl2):
267 if not len(fl2):
268 err(lr, _("empty or missing copy source revlog %s:%s")
268 err(lr, _("empty or missing copy source revlog %s:%s")
269 % (rp[0], short(rp[1])), f)
269 % (rp[0], short(rp[1])), f)
270 elif rp[1] == nullid:
270 elif rp[1] == nullid:
271 ui.note(_("warning: %s@%s: copy source"
271 ui.note(_("warning: %s@%s: copy source"
272 " revision is nullid %s:%s\n")
272 " revision is nullid %s:%s\n")
273 % (f, lr, rp[0], short(rp[1])))
273 % (f, lr, rp[0], short(rp[1])))
274 else:
274 else:
275 fl2.rev(rp[1])
275 fl2.rev(rp[1])
276 except Exception, inst:
276 except Exception, inst:
277 exc(lr, _("checking rename of %s") % short(n), inst, f)
277 exc(lr, _("checking rename of %s") % short(n), inst, f)
278
278
279 # cross-check
279 # cross-check
280 if f in filenodes:
280 if f in filenodes:
281 fns = [(lr, n) for n, lr in filenodes[f].iteritems()]
281 fns = [(lr, n) for n, lr in filenodes[f].iteritems()]
282 for lr, node in sorted(fns):
282 for lr, node in sorted(fns):
283 err(lr, _("%s in manifests not found") % short(node), f)
283 err(lr, _("%s in manifests not found") % short(node), f)
284 ui.progress(_('files'), None)
284 ui.progress(_('checking'), None)
285
285
286 for f in storefiles:
286 for f in storefiles:
287 warn(_("warning: orphan revlog '%s'") % f)
287 warn(_("warning: orphan revlog '%s'") % f)
288
288
289 ui.status(_("%d files, %d changesets, %d total revisions\n") %
289 ui.status(_("%d files, %d changesets, %d total revisions\n") %
290 (len(files), len(cl), revisions))
290 (len(files), len(cl), revisions))
291 if warnings[0]:
291 if warnings[0]:
292 ui.warn(_("%d warnings encountered!\n") % warnings[0])
292 ui.warn(_("%d warnings encountered!\n") % warnings[0])
293 if errors[0]:
293 if errors[0]:
294 ui.warn(_("%d integrity errors encountered!\n") % errors[0])
294 ui.warn(_("%d integrity errors encountered!\n") % errors[0])
295 if badrevs:
295 if badrevs:
296 ui.warn(_("(first damaged changeset appears to be %d)\n")
296 ui.warn(_("(first damaged changeset appears to be %d)\n")
297 % min(badrevs))
297 % min(badrevs))
298 return 1
298 return 1
This diff has been collapsed as it changes many lines, (896 lines changed) Show them Hide them
@@ -1,1165 +1,1165 b''
1 3:911600dab2ae
1 3:911600dab2ae
2 requesting all changes
2 requesting all changes
3 adding changesets
3 adding changesets
4 adding manifests
4 adding manifests
5 adding file changes
5 adding file changes
6 added 1 changesets with 3 changes to 3 files
6 added 1 changesets with 3 changes to 3 files
7 updating to branch default
7 updating to branch default
8 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
8 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
9
9
10 Extension disabled for lack of a hook
10 Extension disabled for lack of a hook
11 Pushing as user fred
11 Pushing as user fred
12 hgrc = """
12 hgrc = """
13 """
13 """
14 pushing to ../b
14 pushing to ../b
15 searching for changes
15 searching for changes
16 common changesets up to 6675d58eff77
16 common changesets up to 6675d58eff77
17 3 changesets found
17 3 changesets found
18 list of changesets:
18 list of changesets:
19 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
19 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
20 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
20 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
21 911600dab2ae7a9baff75958b84fe606851ce955
21 911600dab2ae7a9baff75958b84fe606851ce955
22 adding changesets
22 adding changesets
23 bundle changes: 0 chunks
23 bundling changes: 0 chunks
24 bundle changes: 1 chunks
24 bundling changes: 1 chunks
25 bundle changes: 2 chunks
25 bundling changes: 2 chunks
26 bundle changes: 3 chunks
26 bundling changes: 3 chunks
27 bundle changes: 4 chunks
27 bundling changes: 4 chunks
28 bundle changes: 5 chunks
28 bundling changes: 5 chunks
29 bundle changes: 6 chunks
29 bundling changes: 6 chunks
30 bundle changes: 7 chunks
30 bundling changes: 7 chunks
31 bundle changes: 8 chunks
31 bundling changes: 8 chunks
32 bundle changes: 9 chunks
32 bundling changes: 9 chunks
33 bundle manifests: 0 chunks
33 bundling manifests: 0 chunks
34 bundle manifests: 1 chunks
34 bundling manifests: 1 chunks
35 bundle manifests: 2 chunks
35 bundling manifests: 2 chunks
36 bundle manifests: 3 chunks
36 bundling manifests: 3 chunks
37 bundle manifests: 4 chunks
37 bundling manifests: 4 chunks
38 bundle manifests: 5 chunks
38 bundling manifests: 5 chunks
39 bundle manifests: 6 chunks
39 bundling manifests: 6 chunks
40 bundle manifests: 7 chunks
40 bundling manifests: 7 chunks
41 bundle manifests: 8 chunks
41 bundling manifests: 8 chunks
42 bundle manifests: 9 chunks
42 bundling manifests: 9 chunks
43 bundle files: foo/Bar/file.txt 0 chunks
43 bundling files: foo/Bar/file.txt 0 chunks
44 bundle files: foo/Bar/file.txt 1 chunks
44 bundling files: foo/Bar/file.txt 1 chunks
45 bundle files: foo/Bar/file.txt 2 chunks
45 bundling files: foo/Bar/file.txt 2 chunks
46 bundle files: foo/Bar/file.txt 3 chunks
46 bundling files: foo/Bar/file.txt 3 chunks
47 bundle files: foo/file.txt 4 chunks
47 bundling files: foo/file.txt 4 chunks
48 bundle files: foo/file.txt 5 chunks
48 bundling files: foo/file.txt 5 chunks
49 bundle files: foo/file.txt 6 chunks
49 bundling files: foo/file.txt 6 chunks
50 bundle files: foo/file.txt 7 chunks
50 bundling files: foo/file.txt 7 chunks
51 bundle files: quux/file.py 8 chunks
51 bundling files: quux/file.py 8 chunks
52 bundle files: quux/file.py 9 chunks
52 bundling files: quux/file.py 9 chunks
53 bundle files: quux/file.py 10 chunks
53 bundling files: quux/file.py 10 chunks
54 bundle files: quux/file.py 11 chunks
54 bundling files: quux/file.py 11 chunks
55 changesets: 1 chunks
55 changesets: 1 chunks
56 add changeset ef1ea85a6374
56 add changeset ef1ea85a6374
57 changesets: 2 chunks
57 changesets: 2 chunks
58 add changeset f9cafe1212c8
58 add changeset f9cafe1212c8
59 changesets: 3 chunks
59 changesets: 3 chunks
60 add changeset 911600dab2ae
60 add changeset 911600dab2ae
61 adding manifests
61 adding manifests
62 manifests: 1 chunks
62 manifests: 1 chunks
63 manifests: 2 chunks
63 manifests: 2 chunks
64 manifests: 3 chunks
64 manifests: 3 chunks
65 adding file changes
65 adding file changes
66 adding foo/Bar/file.txt revisions
66 adding foo/Bar/file.txt revisions
67 files: 1 chunks
67 files: 1 chunks
68 adding foo/file.txt revisions
68 adding foo/file.txt revisions
69 files: 2 chunks
69 files: 2 chunks
70 adding quux/file.py revisions
70 adding quux/file.py revisions
71 files: 3 chunks
71 files: 3 chunks
72 added 3 changesets with 3 changes to 3 files
72 added 3 changesets with 3 changes to 3 files
73 updating the branch cache
73 updating the branch cache
74 rolling back last transaction
74 rolling back last transaction
75 0:6675d58eff77
75 0:6675d58eff77
76
76
77 Extension disabled for lack of acl.sources
77 Extension disabled for lack of acl.sources
78 Pushing as user fred
78 Pushing as user fred
79 hgrc = """
79 hgrc = """
80 [hooks]
80 [hooks]
81 pretxnchangegroup.acl = python:hgext.acl.hook
81 pretxnchangegroup.acl = python:hgext.acl.hook
82 """
82 """
83 pushing to ../b
83 pushing to ../b
84 searching for changes
84 searching for changes
85 common changesets up to 6675d58eff77
85 common changesets up to 6675d58eff77
86 invalidating branch cache (tip differs)
86 invalidating branch cache (tip differs)
87 3 changesets found
87 3 changesets found
88 list of changesets:
88 list of changesets:
89 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
89 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
90 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
90 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
91 911600dab2ae7a9baff75958b84fe606851ce955
91 911600dab2ae7a9baff75958b84fe606851ce955
92 adding changesets
92 adding changesets
93 bundle changes: 0 chunks
93 bundling changes: 0 chunks
94 bundle changes: 1 chunks
94 bundling changes: 1 chunks
95 bundle changes: 2 chunks
95 bundling changes: 2 chunks
96 bundle changes: 3 chunks
96 bundling changes: 3 chunks
97 bundle changes: 4 chunks
97 bundling changes: 4 chunks
98 bundle changes: 5 chunks
98 bundling changes: 5 chunks
99 bundle changes: 6 chunks
99 bundling changes: 6 chunks
100 bundle changes: 7 chunks
100 bundling changes: 7 chunks
101 bundle changes: 8 chunks
101 bundling changes: 8 chunks
102 bundle changes: 9 chunks
102 bundling changes: 9 chunks
103 bundle manifests: 0 chunks
103 bundling manifests: 0 chunks
104 bundle manifests: 1 chunks
104 bundling manifests: 1 chunks
105 bundle manifests: 2 chunks
105 bundling manifests: 2 chunks
106 bundle manifests: 3 chunks
106 bundling manifests: 3 chunks
107 bundle manifests: 4 chunks
107 bundling manifests: 4 chunks
108 bundle manifests: 5 chunks
108 bundling manifests: 5 chunks
109 bundle manifests: 6 chunks
109 bundling manifests: 6 chunks
110 bundle manifests: 7 chunks
110 bundling manifests: 7 chunks
111 bundle manifests: 8 chunks
111 bundling manifests: 8 chunks
112 bundle manifests: 9 chunks
112 bundling manifests: 9 chunks
113 bundle files: foo/Bar/file.txt 0 chunks
113 bundling files: foo/Bar/file.txt 0 chunks
114 bundle files: foo/Bar/file.txt 1 chunks
114 bundling files: foo/Bar/file.txt 1 chunks
115 bundle files: foo/Bar/file.txt 2 chunks
115 bundling files: foo/Bar/file.txt 2 chunks
116 bundle files: foo/Bar/file.txt 3 chunks
116 bundling files: foo/Bar/file.txt 3 chunks
117 bundle files: foo/file.txt 4 chunks
117 bundling files: foo/file.txt 4 chunks
118 bundle files: foo/file.txt 5 chunks
118 bundling files: foo/file.txt 5 chunks
119 bundle files: foo/file.txt 6 chunks
119 bundling files: foo/file.txt 6 chunks
120 bundle files: foo/file.txt 7 chunks
120 bundling files: foo/file.txt 7 chunks
121 bundle files: quux/file.py 8 chunks
121 bundling files: quux/file.py 8 chunks
122 bundle files: quux/file.py 9 chunks
122 bundling files: quux/file.py 9 chunks
123 bundle files: quux/file.py 10 chunks
123 bundling files: quux/file.py 10 chunks
124 bundle files: quux/file.py 11 chunks
124 bundling files: quux/file.py 11 chunks
125 changesets: 1 chunks
125 changesets: 1 chunks
126 add changeset ef1ea85a6374
126 add changeset ef1ea85a6374
127 changesets: 2 chunks
127 changesets: 2 chunks
128 add changeset f9cafe1212c8
128 add changeset f9cafe1212c8
129 changesets: 3 chunks
129 changesets: 3 chunks
130 add changeset 911600dab2ae
130 add changeset 911600dab2ae
131 adding manifests
131 adding manifests
132 manifests: 1 chunks
132 manifests: 1 chunks
133 manifests: 2 chunks
133 manifests: 2 chunks
134 manifests: 3 chunks
134 manifests: 3 chunks
135 adding file changes
135 adding file changes
136 adding foo/Bar/file.txt revisions
136 adding foo/Bar/file.txt revisions
137 files: 1 chunks
137 files: 1 chunks
138 adding foo/file.txt revisions
138 adding foo/file.txt revisions
139 files: 2 chunks
139 files: 2 chunks
140 adding quux/file.py revisions
140 adding quux/file.py revisions
141 files: 3 chunks
141 files: 3 chunks
142 added 3 changesets with 3 changes to 3 files
142 added 3 changesets with 3 changes to 3 files
143 calling hook pretxnchangegroup.acl: hgext.acl.hook
143 calling hook pretxnchangegroup.acl: hgext.acl.hook
144 acl: changes have source "push" - skipping
144 acl: changes have source "push" - skipping
145 updating the branch cache
145 updating the branch cache
146 rolling back last transaction
146 rolling back last transaction
147 0:6675d58eff77
147 0:6675d58eff77
148
148
149 No [acl.allow]/[acl.deny]
149 No [acl.allow]/[acl.deny]
150 Pushing as user fred
150 Pushing as user fred
151 hgrc = """
151 hgrc = """
152 [hooks]
152 [hooks]
153 pretxnchangegroup.acl = python:hgext.acl.hook
153 pretxnchangegroup.acl = python:hgext.acl.hook
154 [acl]
154 [acl]
155 sources = push
155 sources = push
156 """
156 """
157 pushing to ../b
157 pushing to ../b
158 searching for changes
158 searching for changes
159 common changesets up to 6675d58eff77
159 common changesets up to 6675d58eff77
160 invalidating branch cache (tip differs)
160 invalidating branch cache (tip differs)
161 3 changesets found
161 3 changesets found
162 list of changesets:
162 list of changesets:
163 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
163 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
164 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
164 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
165 911600dab2ae7a9baff75958b84fe606851ce955
165 911600dab2ae7a9baff75958b84fe606851ce955
166 adding changesets
166 adding changesets
167 bundle changes: 0 chunks
167 bundling changes: 0 chunks
168 bundle changes: 1 chunks
168 bundling changes: 1 chunks
169 bundle changes: 2 chunks
169 bundling changes: 2 chunks
170 bundle changes: 3 chunks
170 bundling changes: 3 chunks
171 bundle changes: 4 chunks
171 bundling changes: 4 chunks
172 bundle changes: 5 chunks
172 bundling changes: 5 chunks
173 bundle changes: 6 chunks
173 bundling changes: 6 chunks
174 bundle changes: 7 chunks
174 bundling changes: 7 chunks
175 bundle changes: 8 chunks
175 bundling changes: 8 chunks
176 bundle changes: 9 chunks
176 bundling changes: 9 chunks
177 bundle manifests: 0 chunks
177 bundling manifests: 0 chunks
178 bundle manifests: 1 chunks
178 bundling manifests: 1 chunks
179 bundle manifests: 2 chunks
179 bundling manifests: 2 chunks
180 bundle manifests: 3 chunks
180 bundling manifests: 3 chunks
181 bundle manifests: 4 chunks
181 bundling manifests: 4 chunks
182 bundle manifests: 5 chunks
182 bundling manifests: 5 chunks
183 bundle manifests: 6 chunks
183 bundling manifests: 6 chunks
184 bundle manifests: 7 chunks
184 bundling manifests: 7 chunks
185 bundle manifests: 8 chunks
185 bundling manifests: 8 chunks
186 bundle manifests: 9 chunks
186 bundling manifests: 9 chunks
187 bundle files: foo/Bar/file.txt 0 chunks
187 bundling files: foo/Bar/file.txt 0 chunks
188 bundle files: foo/Bar/file.txt 1 chunks
188 bundling files: foo/Bar/file.txt 1 chunks
189 bundle files: foo/Bar/file.txt 2 chunks
189 bundling files: foo/Bar/file.txt 2 chunks
190 bundle files: foo/Bar/file.txt 3 chunks
190 bundling files: foo/Bar/file.txt 3 chunks
191 bundle files: foo/file.txt 4 chunks
191 bundling files: foo/file.txt 4 chunks
192 bundle files: foo/file.txt 5 chunks
192 bundling files: foo/file.txt 5 chunks
193 bundle files: foo/file.txt 6 chunks
193 bundling files: foo/file.txt 6 chunks
194 bundle files: foo/file.txt 7 chunks
194 bundling files: foo/file.txt 7 chunks
195 bundle files: quux/file.py 8 chunks
195 bundling files: quux/file.py 8 chunks
196 bundle files: quux/file.py 9 chunks
196 bundling files: quux/file.py 9 chunks
197 bundle files: quux/file.py 10 chunks
197 bundling files: quux/file.py 10 chunks
198 bundle files: quux/file.py 11 chunks
198 bundling files: quux/file.py 11 chunks
199 changesets: 1 chunks
199 changesets: 1 chunks
200 add changeset ef1ea85a6374
200 add changeset ef1ea85a6374
201 changesets: 2 chunks
201 changesets: 2 chunks
202 add changeset f9cafe1212c8
202 add changeset f9cafe1212c8
203 changesets: 3 chunks
203 changesets: 3 chunks
204 add changeset 911600dab2ae
204 add changeset 911600dab2ae
205 adding manifests
205 adding manifests
206 manifests: 1 chunks
206 manifests: 1 chunks
207 manifests: 2 chunks
207 manifests: 2 chunks
208 manifests: 3 chunks
208 manifests: 3 chunks
209 adding file changes
209 adding file changes
210 adding foo/Bar/file.txt revisions
210 adding foo/Bar/file.txt revisions
211 files: 1 chunks
211 files: 1 chunks
212 adding foo/file.txt revisions
212 adding foo/file.txt revisions
213 files: 2 chunks
213 files: 2 chunks
214 adding quux/file.py revisions
214 adding quux/file.py revisions
215 files: 3 chunks
215 files: 3 chunks
216 added 3 changesets with 3 changes to 3 files
216 added 3 changesets with 3 changes to 3 files
217 calling hook pretxnchangegroup.acl: hgext.acl.hook
217 calling hook pretxnchangegroup.acl: hgext.acl.hook
218 acl: acl.allow not enabled
218 acl: acl.allow not enabled
219 acl: acl.deny not enabled
219 acl: acl.deny not enabled
220 acl: allowing changeset ef1ea85a6374
220 acl: allowing changeset ef1ea85a6374
221 acl: allowing changeset f9cafe1212c8
221 acl: allowing changeset f9cafe1212c8
222 acl: allowing changeset 911600dab2ae
222 acl: allowing changeset 911600dab2ae
223 updating the branch cache
223 updating the branch cache
224 rolling back last transaction
224 rolling back last transaction
225 0:6675d58eff77
225 0:6675d58eff77
226
226
227 Empty [acl.allow]
227 Empty [acl.allow]
228 Pushing as user fred
228 Pushing as user fred
229 hgrc = """
229 hgrc = """
230 [hooks]
230 [hooks]
231 pretxnchangegroup.acl = python:hgext.acl.hook
231 pretxnchangegroup.acl = python:hgext.acl.hook
232 [acl]
232 [acl]
233 sources = push
233 sources = push
234 [acl.allow]
234 [acl.allow]
235 """
235 """
236 pushing to ../b
236 pushing to ../b
237 searching for changes
237 searching for changes
238 common changesets up to 6675d58eff77
238 common changesets up to 6675d58eff77
239 invalidating branch cache (tip differs)
239 invalidating branch cache (tip differs)
240 3 changesets found
240 3 changesets found
241 list of changesets:
241 list of changesets:
242 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
242 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
243 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
243 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
244 911600dab2ae7a9baff75958b84fe606851ce955
244 911600dab2ae7a9baff75958b84fe606851ce955
245 adding changesets
245 adding changesets
246 bundle changes: 0 chunks
246 bundling changes: 0 chunks
247 bundle changes: 1 chunks
247 bundling changes: 1 chunks
248 bundle changes: 2 chunks
248 bundling changes: 2 chunks
249 bundle changes: 3 chunks
249 bundling changes: 3 chunks
250 bundle changes: 4 chunks
250 bundling changes: 4 chunks
251 bundle changes: 5 chunks
251 bundling changes: 5 chunks
252 bundle changes: 6 chunks
252 bundling changes: 6 chunks
253 bundle changes: 7 chunks
253 bundling changes: 7 chunks
254 bundle changes: 8 chunks
254 bundling changes: 8 chunks
255 bundle changes: 9 chunks
255 bundling changes: 9 chunks
256 bundle manifests: 0 chunks
256 bundling manifests: 0 chunks
257 bundle manifests: 1 chunks
257 bundling manifests: 1 chunks
258 bundle manifests: 2 chunks
258 bundling manifests: 2 chunks
259 bundle manifests: 3 chunks
259 bundling manifests: 3 chunks
260 bundle manifests: 4 chunks
260 bundling manifests: 4 chunks
261 bundle manifests: 5 chunks
261 bundling manifests: 5 chunks
262 bundle manifests: 6 chunks
262 bundling manifests: 6 chunks
263 bundle manifests: 7 chunks
263 bundling manifests: 7 chunks
264 bundle manifests: 8 chunks
264 bundling manifests: 8 chunks
265 bundle manifests: 9 chunks
265 bundling manifests: 9 chunks
266 bundle files: foo/Bar/file.txt 0 chunks
266 bundling files: foo/Bar/file.txt 0 chunks
267 bundle files: foo/Bar/file.txt 1 chunks
267 bundling files: foo/Bar/file.txt 1 chunks
268 bundle files: foo/Bar/file.txt 2 chunks
268 bundling files: foo/Bar/file.txt 2 chunks
269 bundle files: foo/Bar/file.txt 3 chunks
269 bundling files: foo/Bar/file.txt 3 chunks
270 bundle files: foo/file.txt 4 chunks
270 bundling files: foo/file.txt 4 chunks
271 bundle files: foo/file.txt 5 chunks
271 bundling files: foo/file.txt 5 chunks
272 bundle files: foo/file.txt 6 chunks
272 bundling files: foo/file.txt 6 chunks
273 bundle files: foo/file.txt 7 chunks
273 bundling files: foo/file.txt 7 chunks
274 bundle files: quux/file.py 8 chunks
274 bundling files: quux/file.py 8 chunks
275 bundle files: quux/file.py 9 chunks
275 bundling files: quux/file.py 9 chunks
276 bundle files: quux/file.py 10 chunks
276 bundling files: quux/file.py 10 chunks
277 bundle files: quux/file.py 11 chunks
277 bundling files: quux/file.py 11 chunks
278 changesets: 1 chunks
278 changesets: 1 chunks
279 add changeset ef1ea85a6374
279 add changeset ef1ea85a6374
280 changesets: 2 chunks
280 changesets: 2 chunks
281 add changeset f9cafe1212c8
281 add changeset f9cafe1212c8
282 changesets: 3 chunks
282 changesets: 3 chunks
283 add changeset 911600dab2ae
283 add changeset 911600dab2ae
284 adding manifests
284 adding manifests
285 manifests: 1 chunks
285 manifests: 1 chunks
286 manifests: 2 chunks
286 manifests: 2 chunks
287 manifests: 3 chunks
287 manifests: 3 chunks
288 adding file changes
288 adding file changes
289 adding foo/Bar/file.txt revisions
289 adding foo/Bar/file.txt revisions
290 files: 1 chunks
290 files: 1 chunks
291 adding foo/file.txt revisions
291 adding foo/file.txt revisions
292 files: 2 chunks
292 files: 2 chunks
293 adding quux/file.py revisions
293 adding quux/file.py revisions
294 files: 3 chunks
294 files: 3 chunks
295 added 3 changesets with 3 changes to 3 files
295 added 3 changesets with 3 changes to 3 files
296 calling hook pretxnchangegroup.acl: hgext.acl.hook
296 calling hook pretxnchangegroup.acl: hgext.acl.hook
297 acl: acl.allow enabled, 0 entries for user fred
297 acl: acl.allow enabled, 0 entries for user fred
298 acl: acl.deny not enabled
298 acl: acl.deny not enabled
299 acl: user fred not allowed on foo/file.txt
299 acl: user fred not allowed on foo/file.txt
300 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
300 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
301 transaction abort!
301 transaction abort!
302 rollback completed
302 rollback completed
303 abort: acl: access denied for changeset ef1ea85a6374
303 abort: acl: access denied for changeset ef1ea85a6374
304 no rollback information available
304 no rollback information available
305 0:6675d58eff77
305 0:6675d58eff77
306
306
307 fred is allowed inside foo/
307 fred is allowed inside foo/
308 Pushing as user fred
308 Pushing as user fred
309 hgrc = """
309 hgrc = """
310 [hooks]
310 [hooks]
311 pretxnchangegroup.acl = python:hgext.acl.hook
311 pretxnchangegroup.acl = python:hgext.acl.hook
312 [acl]
312 [acl]
313 sources = push
313 sources = push
314 [acl.allow]
314 [acl.allow]
315 foo/** = fred
315 foo/** = fred
316 """
316 """
317 pushing to ../b
317 pushing to ../b
318 searching for changes
318 searching for changes
319 common changesets up to 6675d58eff77
319 common changesets up to 6675d58eff77
320 3 changesets found
320 3 changesets found
321 list of changesets:
321 list of changesets:
322 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
322 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
323 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
323 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
324 911600dab2ae7a9baff75958b84fe606851ce955
324 911600dab2ae7a9baff75958b84fe606851ce955
325 adding changesets
325 adding changesets
326 bundle changes: 0 chunks
326 bundling changes: 0 chunks
327 bundle changes: 1 chunks
327 bundling changes: 1 chunks
328 bundle changes: 2 chunks
328 bundling changes: 2 chunks
329 bundle changes: 3 chunks
329 bundling changes: 3 chunks
330 bundle changes: 4 chunks
330 bundling changes: 4 chunks
331 bundle changes: 5 chunks
331 bundling changes: 5 chunks
332 bundle changes: 6 chunks
332 bundling changes: 6 chunks
333 bundle changes: 7 chunks
333 bundling changes: 7 chunks
334 bundle changes: 8 chunks
334 bundling changes: 8 chunks
335 bundle changes: 9 chunks
335 bundling changes: 9 chunks
336 bundle manifests: 0 chunks
336 bundling manifests: 0 chunks
337 bundle manifests: 1 chunks
337 bundling manifests: 1 chunks
338 bundle manifests: 2 chunks
338 bundling manifests: 2 chunks
339 bundle manifests: 3 chunks
339 bundling manifests: 3 chunks
340 bundle manifests: 4 chunks
340 bundling manifests: 4 chunks
341 bundle manifests: 5 chunks
341 bundling manifests: 5 chunks
342 bundle manifests: 6 chunks
342 bundling manifests: 6 chunks
343 bundle manifests: 7 chunks
343 bundling manifests: 7 chunks
344 bundle manifests: 8 chunks
344 bundling manifests: 8 chunks
345 bundle manifests: 9 chunks
345 bundling manifests: 9 chunks
346 bundle files: foo/Bar/file.txt 0 chunks
346 bundling files: foo/Bar/file.txt 0 chunks
347 bundle files: foo/Bar/file.txt 1 chunks
347 bundling files: foo/Bar/file.txt 1 chunks
348 bundle files: foo/Bar/file.txt 2 chunks
348 bundling files: foo/Bar/file.txt 2 chunks
349 bundle files: foo/Bar/file.txt 3 chunks
349 bundling files: foo/Bar/file.txt 3 chunks
350 bundle files: foo/file.txt 4 chunks
350 bundling files: foo/file.txt 4 chunks
351 bundle files: foo/file.txt 5 chunks
351 bundling files: foo/file.txt 5 chunks
352 bundle files: foo/file.txt 6 chunks
352 bundling files: foo/file.txt 6 chunks
353 bundle files: foo/file.txt 7 chunks
353 bundling files: foo/file.txt 7 chunks
354 bundle files: quux/file.py 8 chunks
354 bundling files: quux/file.py 8 chunks
355 bundle files: quux/file.py 9 chunks
355 bundling files: quux/file.py 9 chunks
356 bundle files: quux/file.py 10 chunks
356 bundling files: quux/file.py 10 chunks
357 bundle files: quux/file.py 11 chunks
357 bundling files: quux/file.py 11 chunks
358 changesets: 1 chunks
358 changesets: 1 chunks
359 add changeset ef1ea85a6374
359 add changeset ef1ea85a6374
360 changesets: 2 chunks
360 changesets: 2 chunks
361 add changeset f9cafe1212c8
361 add changeset f9cafe1212c8
362 changesets: 3 chunks
362 changesets: 3 chunks
363 add changeset 911600dab2ae
363 add changeset 911600dab2ae
364 adding manifests
364 adding manifests
365 manifests: 1 chunks
365 manifests: 1 chunks
366 manifests: 2 chunks
366 manifests: 2 chunks
367 manifests: 3 chunks
367 manifests: 3 chunks
368 adding file changes
368 adding file changes
369 adding foo/Bar/file.txt revisions
369 adding foo/Bar/file.txt revisions
370 files: 1 chunks
370 files: 1 chunks
371 adding foo/file.txt revisions
371 adding foo/file.txt revisions
372 files: 2 chunks
372 files: 2 chunks
373 adding quux/file.py revisions
373 adding quux/file.py revisions
374 files: 3 chunks
374 files: 3 chunks
375 added 3 changesets with 3 changes to 3 files
375 added 3 changesets with 3 changes to 3 files
376 calling hook pretxnchangegroup.acl: hgext.acl.hook
376 calling hook pretxnchangegroup.acl: hgext.acl.hook
377 acl: acl.allow enabled, 1 entries for user fred
377 acl: acl.allow enabled, 1 entries for user fred
378 acl: acl.deny not enabled
378 acl: acl.deny not enabled
379 acl: allowing changeset ef1ea85a6374
379 acl: allowing changeset ef1ea85a6374
380 acl: allowing changeset f9cafe1212c8
380 acl: allowing changeset f9cafe1212c8
381 acl: user fred not allowed on quux/file.py
381 acl: user fred not allowed on quux/file.py
382 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
382 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
383 transaction abort!
383 transaction abort!
384 rollback completed
384 rollback completed
385 abort: acl: access denied for changeset 911600dab2ae
385 abort: acl: access denied for changeset 911600dab2ae
386 no rollback information available
386 no rollback information available
387 0:6675d58eff77
387 0:6675d58eff77
388
388
389 Empty [acl.deny]
389 Empty [acl.deny]
390 Pushing as user barney
390 Pushing as user barney
391 hgrc = """
391 hgrc = """
392 [hooks]
392 [hooks]
393 pretxnchangegroup.acl = python:hgext.acl.hook
393 pretxnchangegroup.acl = python:hgext.acl.hook
394 [acl]
394 [acl]
395 sources = push
395 sources = push
396 [acl.allow]
396 [acl.allow]
397 foo/** = fred
397 foo/** = fred
398 [acl.deny]
398 [acl.deny]
399 """
399 """
400 pushing to ../b
400 pushing to ../b
401 searching for changes
401 searching for changes
402 common changesets up to 6675d58eff77
402 common changesets up to 6675d58eff77
403 3 changesets found
403 3 changesets found
404 list of changesets:
404 list of changesets:
405 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
405 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
406 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
406 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
407 911600dab2ae7a9baff75958b84fe606851ce955
407 911600dab2ae7a9baff75958b84fe606851ce955
408 adding changesets
408 adding changesets
409 bundle changes: 0 chunks
409 bundling changes: 0 chunks
410 bundle changes: 1 chunks
410 bundling changes: 1 chunks
411 bundle changes: 2 chunks
411 bundling changes: 2 chunks
412 bundle changes: 3 chunks
412 bundling changes: 3 chunks
413 bundle changes: 4 chunks
413 bundling changes: 4 chunks
414 bundle changes: 5 chunks
414 bundling changes: 5 chunks
415 bundle changes: 6 chunks
415 bundling changes: 6 chunks
416 bundle changes: 7 chunks
416 bundling changes: 7 chunks
417 bundle changes: 8 chunks
417 bundling changes: 8 chunks
418 bundle changes: 9 chunks
418 bundling changes: 9 chunks
419 bundle manifests: 0 chunks
419 bundling manifests: 0 chunks
420 bundle manifests: 1 chunks
420 bundling manifests: 1 chunks
421 bundle manifests: 2 chunks
421 bundling manifests: 2 chunks
422 bundle manifests: 3 chunks
422 bundling manifests: 3 chunks
423 bundle manifests: 4 chunks
423 bundling manifests: 4 chunks
424 bundle manifests: 5 chunks
424 bundling manifests: 5 chunks
425 bundle manifests: 6 chunks
425 bundling manifests: 6 chunks
426 bundle manifests: 7 chunks
426 bundling manifests: 7 chunks
427 bundle manifests: 8 chunks
427 bundling manifests: 8 chunks
428 bundle manifests: 9 chunks
428 bundling manifests: 9 chunks
429 bundle files: foo/Bar/file.txt 0 chunks
429 bundling files: foo/Bar/file.txt 0 chunks
430 bundle files: foo/Bar/file.txt 1 chunks
430 bundling files: foo/Bar/file.txt 1 chunks
431 bundle files: foo/Bar/file.txt 2 chunks
431 bundling files: foo/Bar/file.txt 2 chunks
432 bundle files: foo/Bar/file.txt 3 chunks
432 bundling files: foo/Bar/file.txt 3 chunks
433 bundle files: foo/file.txt 4 chunks
433 bundling files: foo/file.txt 4 chunks
434 bundle files: foo/file.txt 5 chunks
434 bundling files: foo/file.txt 5 chunks
435 bundle files: foo/file.txt 6 chunks
435 bundling files: foo/file.txt 6 chunks
436 bundle files: foo/file.txt 7 chunks
436 bundling files: foo/file.txt 7 chunks
437 bundle files: quux/file.py 8 chunks
437 bundling files: quux/file.py 8 chunks
438 bundle files: quux/file.py 9 chunks
438 bundling files: quux/file.py 9 chunks
439 bundle files: quux/file.py 10 chunks
439 bundling files: quux/file.py 10 chunks
440 bundle files: quux/file.py 11 chunks
440 bundling files: quux/file.py 11 chunks
441 changesets: 1 chunks
441 changesets: 1 chunks
442 add changeset ef1ea85a6374
442 add changeset ef1ea85a6374
443 changesets: 2 chunks
443 changesets: 2 chunks
444 add changeset f9cafe1212c8
444 add changeset f9cafe1212c8
445 changesets: 3 chunks
445 changesets: 3 chunks
446 add changeset 911600dab2ae
446 add changeset 911600dab2ae
447 adding manifests
447 adding manifests
448 manifests: 1 chunks
448 manifests: 1 chunks
449 manifests: 2 chunks
449 manifests: 2 chunks
450 manifests: 3 chunks
450 manifests: 3 chunks
451 adding file changes
451 adding file changes
452 adding foo/Bar/file.txt revisions
452 adding foo/Bar/file.txt revisions
453 files: 1 chunks
453 files: 1 chunks
454 adding foo/file.txt revisions
454 adding foo/file.txt revisions
455 files: 2 chunks
455 files: 2 chunks
456 adding quux/file.py revisions
456 adding quux/file.py revisions
457 files: 3 chunks
457 files: 3 chunks
458 added 3 changesets with 3 changes to 3 files
458 added 3 changesets with 3 changes to 3 files
459 calling hook pretxnchangegroup.acl: hgext.acl.hook
459 calling hook pretxnchangegroup.acl: hgext.acl.hook
460 acl: acl.allow enabled, 0 entries for user barney
460 acl: acl.allow enabled, 0 entries for user barney
461 acl: acl.deny enabled, 0 entries for user barney
461 acl: acl.deny enabled, 0 entries for user barney
462 acl: user barney not allowed on foo/file.txt
462 acl: user barney not allowed on foo/file.txt
463 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
463 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
464 transaction abort!
464 transaction abort!
465 rollback completed
465 rollback completed
466 abort: acl: access denied for changeset ef1ea85a6374
466 abort: acl: access denied for changeset ef1ea85a6374
467 no rollback information available
467 no rollback information available
468 0:6675d58eff77
468 0:6675d58eff77
469
469
470 fred is allowed inside foo/, but not foo/bar/ (case matters)
470 fred is allowed inside foo/, but not foo/bar/ (case matters)
471 Pushing as user fred
471 Pushing as user fred
472 hgrc = """
472 hgrc = """
473 [hooks]
473 [hooks]
474 pretxnchangegroup.acl = python:hgext.acl.hook
474 pretxnchangegroup.acl = python:hgext.acl.hook
475 [acl]
475 [acl]
476 sources = push
476 sources = push
477 [acl.allow]
477 [acl.allow]
478 foo/** = fred
478 foo/** = fred
479 [acl.deny]
479 [acl.deny]
480 foo/bar/** = fred
480 foo/bar/** = fred
481 """
481 """
482 pushing to ../b
482 pushing to ../b
483 searching for changes
483 searching for changes
484 common changesets up to 6675d58eff77
484 common changesets up to 6675d58eff77
485 3 changesets found
485 3 changesets found
486 list of changesets:
486 list of changesets:
487 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
487 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
488 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
488 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
489 911600dab2ae7a9baff75958b84fe606851ce955
489 911600dab2ae7a9baff75958b84fe606851ce955
490 adding changesets
490 adding changesets
491 bundle changes: 0 chunks
491 bundling changes: 0 chunks
492 bundle changes: 1 chunks
492 bundling changes: 1 chunks
493 bundle changes: 2 chunks
493 bundling changes: 2 chunks
494 bundle changes: 3 chunks
494 bundling changes: 3 chunks
495 bundle changes: 4 chunks
495 bundling changes: 4 chunks
496 bundle changes: 5 chunks
496 bundling changes: 5 chunks
497 bundle changes: 6 chunks
497 bundling changes: 6 chunks
498 bundle changes: 7 chunks
498 bundling changes: 7 chunks
499 bundle changes: 8 chunks
499 bundling changes: 8 chunks
500 bundle changes: 9 chunks
500 bundling changes: 9 chunks
501 bundle manifests: 0 chunks
501 bundling manifests: 0 chunks
502 bundle manifests: 1 chunks
502 bundling manifests: 1 chunks
503 bundle manifests: 2 chunks
503 bundling manifests: 2 chunks
504 bundle manifests: 3 chunks
504 bundling manifests: 3 chunks
505 bundle manifests: 4 chunks
505 bundling manifests: 4 chunks
506 bundle manifests: 5 chunks
506 bundling manifests: 5 chunks
507 bundle manifests: 6 chunks
507 bundling manifests: 6 chunks
508 bundle manifests: 7 chunks
508 bundling manifests: 7 chunks
509 bundle manifests: 8 chunks
509 bundling manifests: 8 chunks
510 bundle manifests: 9 chunks
510 bundling manifests: 9 chunks
511 bundle files: foo/Bar/file.txt 0 chunks
511 bundling files: foo/Bar/file.txt 0 chunks
512 bundle files: foo/Bar/file.txt 1 chunks
512 bundling files: foo/Bar/file.txt 1 chunks
513 bundle files: foo/Bar/file.txt 2 chunks
513 bundling files: foo/Bar/file.txt 2 chunks
514 bundle files: foo/Bar/file.txt 3 chunks
514 bundling files: foo/Bar/file.txt 3 chunks
515 bundle files: foo/file.txt 4 chunks
515 bundling files: foo/file.txt 4 chunks
516 bundle files: foo/file.txt 5 chunks
516 bundling files: foo/file.txt 5 chunks
517 bundle files: foo/file.txt 6 chunks
517 bundling files: foo/file.txt 6 chunks
518 bundle files: foo/file.txt 7 chunks
518 bundling files: foo/file.txt 7 chunks
519 bundle files: quux/file.py 8 chunks
519 bundling files: quux/file.py 8 chunks
520 bundle files: quux/file.py 9 chunks
520 bundling files: quux/file.py 9 chunks
521 bundle files: quux/file.py 10 chunks
521 bundling files: quux/file.py 10 chunks
522 bundle files: quux/file.py 11 chunks
522 bundling files: quux/file.py 11 chunks
523 changesets: 1 chunks
523 changesets: 1 chunks
524 add changeset ef1ea85a6374
524 add changeset ef1ea85a6374
525 changesets: 2 chunks
525 changesets: 2 chunks
526 add changeset f9cafe1212c8
526 add changeset f9cafe1212c8
527 changesets: 3 chunks
527 changesets: 3 chunks
528 add changeset 911600dab2ae
528 add changeset 911600dab2ae
529 adding manifests
529 adding manifests
530 manifests: 1 chunks
530 manifests: 1 chunks
531 manifests: 2 chunks
531 manifests: 2 chunks
532 manifests: 3 chunks
532 manifests: 3 chunks
533 adding file changes
533 adding file changes
534 adding foo/Bar/file.txt revisions
534 adding foo/Bar/file.txt revisions
535 files: 1 chunks
535 files: 1 chunks
536 adding foo/file.txt revisions
536 adding foo/file.txt revisions
537 files: 2 chunks
537 files: 2 chunks
538 adding quux/file.py revisions
538 adding quux/file.py revisions
539 files: 3 chunks
539 files: 3 chunks
540 added 3 changesets with 3 changes to 3 files
540 added 3 changesets with 3 changes to 3 files
541 calling hook pretxnchangegroup.acl: hgext.acl.hook
541 calling hook pretxnchangegroup.acl: hgext.acl.hook
542 acl: acl.allow enabled, 1 entries for user fred
542 acl: acl.allow enabled, 1 entries for user fred
543 acl: acl.deny enabled, 1 entries for user fred
543 acl: acl.deny enabled, 1 entries for user fred
544 acl: allowing changeset ef1ea85a6374
544 acl: allowing changeset ef1ea85a6374
545 acl: allowing changeset f9cafe1212c8
545 acl: allowing changeset f9cafe1212c8
546 acl: user fred not allowed on quux/file.py
546 acl: user fred not allowed on quux/file.py
547 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
547 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
548 transaction abort!
548 transaction abort!
549 rollback completed
549 rollback completed
550 abort: acl: access denied for changeset 911600dab2ae
550 abort: acl: access denied for changeset 911600dab2ae
551 no rollback information available
551 no rollback information available
552 0:6675d58eff77
552 0:6675d58eff77
553
553
554 fred is allowed inside foo/, but not foo/Bar/
554 fred is allowed inside foo/, but not foo/Bar/
555 Pushing as user fred
555 Pushing as user fred
556 hgrc = """
556 hgrc = """
557 [hooks]
557 [hooks]
558 pretxnchangegroup.acl = python:hgext.acl.hook
558 pretxnchangegroup.acl = python:hgext.acl.hook
559 [acl]
559 [acl]
560 sources = push
560 sources = push
561 [acl.allow]
561 [acl.allow]
562 foo/** = fred
562 foo/** = fred
563 [acl.deny]
563 [acl.deny]
564 foo/bar/** = fred
564 foo/bar/** = fred
565 foo/Bar/** = fred
565 foo/Bar/** = fred
566 """
566 """
567 pushing to ../b
567 pushing to ../b
568 searching for changes
568 searching for changes
569 common changesets up to 6675d58eff77
569 common changesets up to 6675d58eff77
570 3 changesets found
570 3 changesets found
571 list of changesets:
571 list of changesets:
572 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
572 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
573 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
573 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
574 911600dab2ae7a9baff75958b84fe606851ce955
574 911600dab2ae7a9baff75958b84fe606851ce955
575 adding changesets
575 adding changesets
576 bundle changes: 0 chunks
576 bundling changes: 0 chunks
577 bundle changes: 1 chunks
577 bundling changes: 1 chunks
578 bundle changes: 2 chunks
578 bundling changes: 2 chunks
579 bundle changes: 3 chunks
579 bundling changes: 3 chunks
580 bundle changes: 4 chunks
580 bundling changes: 4 chunks
581 bundle changes: 5 chunks
581 bundling changes: 5 chunks
582 bundle changes: 6 chunks
582 bundling changes: 6 chunks
583 bundle changes: 7 chunks
583 bundling changes: 7 chunks
584 bundle changes: 8 chunks
584 bundling changes: 8 chunks
585 bundle changes: 9 chunks
585 bundling changes: 9 chunks
586 bundle manifests: 0 chunks
586 bundling manifests: 0 chunks
587 bundle manifests: 1 chunks
587 bundling manifests: 1 chunks
588 bundle manifests: 2 chunks
588 bundling manifests: 2 chunks
589 bundle manifests: 3 chunks
589 bundling manifests: 3 chunks
590 bundle manifests: 4 chunks
590 bundling manifests: 4 chunks
591 bundle manifests: 5 chunks
591 bundling manifests: 5 chunks
592 bundle manifests: 6 chunks
592 bundling manifests: 6 chunks
593 bundle manifests: 7 chunks
593 bundling manifests: 7 chunks
594 bundle manifests: 8 chunks
594 bundling manifests: 8 chunks
595 bundle manifests: 9 chunks
595 bundling manifests: 9 chunks
596 bundle files: foo/Bar/file.txt 0 chunks
596 bundling files: foo/Bar/file.txt 0 chunks
597 bundle files: foo/Bar/file.txt 1 chunks
597 bundling files: foo/Bar/file.txt 1 chunks
598 bundle files: foo/Bar/file.txt 2 chunks
598 bundling files: foo/Bar/file.txt 2 chunks
599 bundle files: foo/Bar/file.txt 3 chunks
599 bundling files: foo/Bar/file.txt 3 chunks
600 bundle files: foo/file.txt 4 chunks
600 bundling files: foo/file.txt 4 chunks
601 bundle files: foo/file.txt 5 chunks
601 bundling files: foo/file.txt 5 chunks
602 bundle files: foo/file.txt 6 chunks
602 bundling files: foo/file.txt 6 chunks
603 bundle files: foo/file.txt 7 chunks
603 bundling files: foo/file.txt 7 chunks
604 bundle files: quux/file.py 8 chunks
604 bundling files: quux/file.py 8 chunks
605 bundle files: quux/file.py 9 chunks
605 bundling files: quux/file.py 9 chunks
606 bundle files: quux/file.py 10 chunks
606 bundling files: quux/file.py 10 chunks
607 bundle files: quux/file.py 11 chunks
607 bundling files: quux/file.py 11 chunks
608 changesets: 1 chunks
608 changesets: 1 chunks
609 add changeset ef1ea85a6374
609 add changeset ef1ea85a6374
610 changesets: 2 chunks
610 changesets: 2 chunks
611 add changeset f9cafe1212c8
611 add changeset f9cafe1212c8
612 changesets: 3 chunks
612 changesets: 3 chunks
613 add changeset 911600dab2ae
613 add changeset 911600dab2ae
614 adding manifests
614 adding manifests
615 manifests: 1 chunks
615 manifests: 1 chunks
616 manifests: 2 chunks
616 manifests: 2 chunks
617 manifests: 3 chunks
617 manifests: 3 chunks
618 adding file changes
618 adding file changes
619 adding foo/Bar/file.txt revisions
619 adding foo/Bar/file.txt revisions
620 files: 1 chunks
620 files: 1 chunks
621 adding foo/file.txt revisions
621 adding foo/file.txt revisions
622 files: 2 chunks
622 files: 2 chunks
623 adding quux/file.py revisions
623 adding quux/file.py revisions
624 files: 3 chunks
624 files: 3 chunks
625 added 3 changesets with 3 changes to 3 files
625 added 3 changesets with 3 changes to 3 files
626 calling hook pretxnchangegroup.acl: hgext.acl.hook
626 calling hook pretxnchangegroup.acl: hgext.acl.hook
627 acl: acl.allow enabled, 1 entries for user fred
627 acl: acl.allow enabled, 1 entries for user fred
628 acl: acl.deny enabled, 2 entries for user fred
628 acl: acl.deny enabled, 2 entries for user fred
629 acl: allowing changeset ef1ea85a6374
629 acl: allowing changeset ef1ea85a6374
630 acl: user fred denied on foo/Bar/file.txt
630 acl: user fred denied on foo/Bar/file.txt
631 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
631 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
632 transaction abort!
632 transaction abort!
633 rollback completed
633 rollback completed
634 abort: acl: access denied for changeset f9cafe1212c8
634 abort: acl: access denied for changeset f9cafe1212c8
635 no rollback information available
635 no rollback information available
636 0:6675d58eff77
636 0:6675d58eff77
637
637
638 barney is not mentioned => not allowed anywhere
638 barney is not mentioned => not allowed anywhere
639 Pushing as user barney
639 Pushing as user barney
640 hgrc = """
640 hgrc = """
641 [hooks]
641 [hooks]
642 pretxnchangegroup.acl = python:hgext.acl.hook
642 pretxnchangegroup.acl = python:hgext.acl.hook
643 [acl]
643 [acl]
644 sources = push
644 sources = push
645 [acl.allow]
645 [acl.allow]
646 foo/** = fred
646 foo/** = fred
647 [acl.deny]
647 [acl.deny]
648 foo/bar/** = fred
648 foo/bar/** = fred
649 foo/Bar/** = fred
649 foo/Bar/** = fred
650 """
650 """
651 pushing to ../b
651 pushing to ../b
652 searching for changes
652 searching for changes
653 common changesets up to 6675d58eff77
653 common changesets up to 6675d58eff77
654 3 changesets found
654 3 changesets found
655 list of changesets:
655 list of changesets:
656 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
656 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
657 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
657 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
658 911600dab2ae7a9baff75958b84fe606851ce955
658 911600dab2ae7a9baff75958b84fe606851ce955
659 adding changesets
659 adding changesets
660 bundle changes: 0 chunks
660 bundling changes: 0 chunks
661 bundle changes: 1 chunks
661 bundling changes: 1 chunks
662 bundle changes: 2 chunks
662 bundling changes: 2 chunks
663 bundle changes: 3 chunks
663 bundling changes: 3 chunks
664 bundle changes: 4 chunks
664 bundling changes: 4 chunks
665 bundle changes: 5 chunks
665 bundling changes: 5 chunks
666 bundle changes: 6 chunks
666 bundling changes: 6 chunks
667 bundle changes: 7 chunks
667 bundling changes: 7 chunks
668 bundle changes: 8 chunks
668 bundling changes: 8 chunks
669 bundle changes: 9 chunks
669 bundling changes: 9 chunks
670 bundle manifests: 0 chunks
670 bundling manifests: 0 chunks
671 bundle manifests: 1 chunks
671 bundling manifests: 1 chunks
672 bundle manifests: 2 chunks
672 bundling manifests: 2 chunks
673 bundle manifests: 3 chunks
673 bundling manifests: 3 chunks
674 bundle manifests: 4 chunks
674 bundling manifests: 4 chunks
675 bundle manifests: 5 chunks
675 bundling manifests: 5 chunks
676 bundle manifests: 6 chunks
676 bundling manifests: 6 chunks
677 bundle manifests: 7 chunks
677 bundling manifests: 7 chunks
678 bundle manifests: 8 chunks
678 bundling manifests: 8 chunks
679 bundle manifests: 9 chunks
679 bundling manifests: 9 chunks
680 bundle files: foo/Bar/file.txt 0 chunks
680 bundling files: foo/Bar/file.txt 0 chunks
681 bundle files: foo/Bar/file.txt 1 chunks
681 bundling files: foo/Bar/file.txt 1 chunks
682 bundle files: foo/Bar/file.txt 2 chunks
682 bundling files: foo/Bar/file.txt 2 chunks
683 bundle files: foo/Bar/file.txt 3 chunks
683 bundling files: foo/Bar/file.txt 3 chunks
684 bundle files: foo/file.txt 4 chunks
684 bundling files: foo/file.txt 4 chunks
685 bundle files: foo/file.txt 5 chunks
685 bundling files: foo/file.txt 5 chunks
686 bundle files: foo/file.txt 6 chunks
686 bundling files: foo/file.txt 6 chunks
687 bundle files: foo/file.txt 7 chunks
687 bundling files: foo/file.txt 7 chunks
688 bundle files: quux/file.py 8 chunks
688 bundling files: quux/file.py 8 chunks
689 bundle files: quux/file.py 9 chunks
689 bundling files: quux/file.py 9 chunks
690 bundle files: quux/file.py 10 chunks
690 bundling files: quux/file.py 10 chunks
691 bundle files: quux/file.py 11 chunks
691 bundling files: quux/file.py 11 chunks
692 changesets: 1 chunks
692 changesets: 1 chunks
693 add changeset ef1ea85a6374
693 add changeset ef1ea85a6374
694 changesets: 2 chunks
694 changesets: 2 chunks
695 add changeset f9cafe1212c8
695 add changeset f9cafe1212c8
696 changesets: 3 chunks
696 changesets: 3 chunks
697 add changeset 911600dab2ae
697 add changeset 911600dab2ae
698 adding manifests
698 adding manifests
699 manifests: 1 chunks
699 manifests: 1 chunks
700 manifests: 2 chunks
700 manifests: 2 chunks
701 manifests: 3 chunks
701 manifests: 3 chunks
702 adding file changes
702 adding file changes
703 adding foo/Bar/file.txt revisions
703 adding foo/Bar/file.txt revisions
704 files: 1 chunks
704 files: 1 chunks
705 adding foo/file.txt revisions
705 adding foo/file.txt revisions
706 files: 2 chunks
706 files: 2 chunks
707 adding quux/file.py revisions
707 adding quux/file.py revisions
708 files: 3 chunks
708 files: 3 chunks
709 added 3 changesets with 3 changes to 3 files
709 added 3 changesets with 3 changes to 3 files
710 calling hook pretxnchangegroup.acl: hgext.acl.hook
710 calling hook pretxnchangegroup.acl: hgext.acl.hook
711 acl: acl.allow enabled, 0 entries for user barney
711 acl: acl.allow enabled, 0 entries for user barney
712 acl: acl.deny enabled, 0 entries for user barney
712 acl: acl.deny enabled, 0 entries for user barney
713 acl: user barney not allowed on foo/file.txt
713 acl: user barney not allowed on foo/file.txt
714 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
714 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
715 transaction abort!
715 transaction abort!
716 rollback completed
716 rollback completed
717 abort: acl: access denied for changeset ef1ea85a6374
717 abort: acl: access denied for changeset ef1ea85a6374
718 no rollback information available
718 no rollback information available
719 0:6675d58eff77
719 0:6675d58eff77
720
720
721 barney is allowed everywhere
721 barney is allowed everywhere
722 Pushing as user barney
722 Pushing as user barney
723 hgrc = """
723 hgrc = """
724 [hooks]
724 [hooks]
725 pretxnchangegroup.acl = python:hgext.acl.hook
725 pretxnchangegroup.acl = python:hgext.acl.hook
726 [acl]
726 [acl]
727 sources = push
727 sources = push
728 [acl.allow]
728 [acl.allow]
729 foo/** = fred
729 foo/** = fred
730 [acl.deny]
730 [acl.deny]
731 foo/bar/** = fred
731 foo/bar/** = fred
732 foo/Bar/** = fred
732 foo/Bar/** = fred
733 [acl.allow]
733 [acl.allow]
734 ** = barney
734 ** = barney
735 """
735 """
736 pushing to ../b
736 pushing to ../b
737 searching for changes
737 searching for changes
738 common changesets up to 6675d58eff77
738 common changesets up to 6675d58eff77
739 3 changesets found
739 3 changesets found
740 list of changesets:
740 list of changesets:
741 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
741 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
742 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
742 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
743 911600dab2ae7a9baff75958b84fe606851ce955
743 911600dab2ae7a9baff75958b84fe606851ce955
744 adding changesets
744 adding changesets
745 bundle changes: 0 chunks
745 bundling changes: 0 chunks
746 bundle changes: 1 chunks
746 bundling changes: 1 chunks
747 bundle changes: 2 chunks
747 bundling changes: 2 chunks
748 bundle changes: 3 chunks
748 bundling changes: 3 chunks
749 bundle changes: 4 chunks
749 bundling changes: 4 chunks
750 bundle changes: 5 chunks
750 bundling changes: 5 chunks
751 bundle changes: 6 chunks
751 bundling changes: 6 chunks
752 bundle changes: 7 chunks
752 bundling changes: 7 chunks
753 bundle changes: 8 chunks
753 bundling changes: 8 chunks
754 bundle changes: 9 chunks
754 bundling changes: 9 chunks
755 bundle manifests: 0 chunks
755 bundling manifests: 0 chunks
756 bundle manifests: 1 chunks
756 bundling manifests: 1 chunks
757 bundle manifests: 2 chunks
757 bundling manifests: 2 chunks
758 bundle manifests: 3 chunks
758 bundling manifests: 3 chunks
759 bundle manifests: 4 chunks
759 bundling manifests: 4 chunks
760 bundle manifests: 5 chunks
760 bundling manifests: 5 chunks
761 bundle manifests: 6 chunks
761 bundling manifests: 6 chunks
762 bundle manifests: 7 chunks
762 bundling manifests: 7 chunks
763 bundle manifests: 8 chunks
763 bundling manifests: 8 chunks
764 bundle manifests: 9 chunks
764 bundling manifests: 9 chunks
765 bundle files: foo/Bar/file.txt 0 chunks
765 bundling files: foo/Bar/file.txt 0 chunks
766 bundle files: foo/Bar/file.txt 1 chunks
766 bundling files: foo/Bar/file.txt 1 chunks
767 bundle files: foo/Bar/file.txt 2 chunks
767 bundling files: foo/Bar/file.txt 2 chunks
768 bundle files: foo/Bar/file.txt 3 chunks
768 bundling files: foo/Bar/file.txt 3 chunks
769 bundle files: foo/file.txt 4 chunks
769 bundling files: foo/file.txt 4 chunks
770 bundle files: foo/file.txt 5 chunks
770 bundling files: foo/file.txt 5 chunks
771 bundle files: foo/file.txt 6 chunks
771 bundling files: foo/file.txt 6 chunks
772 bundle files: foo/file.txt 7 chunks
772 bundling files: foo/file.txt 7 chunks
773 bundle files: quux/file.py 8 chunks
773 bundling files: quux/file.py 8 chunks
774 bundle files: quux/file.py 9 chunks
774 bundling files: quux/file.py 9 chunks
775 bundle files: quux/file.py 10 chunks
775 bundling files: quux/file.py 10 chunks
776 bundle files: quux/file.py 11 chunks
776 bundling files: quux/file.py 11 chunks
777 changesets: 1 chunks
777 changesets: 1 chunks
778 add changeset ef1ea85a6374
778 add changeset ef1ea85a6374
779 changesets: 2 chunks
779 changesets: 2 chunks
780 add changeset f9cafe1212c8
780 add changeset f9cafe1212c8
781 changesets: 3 chunks
781 changesets: 3 chunks
782 add changeset 911600dab2ae
782 add changeset 911600dab2ae
783 adding manifests
783 adding manifests
784 manifests: 1 chunks
784 manifests: 1 chunks
785 manifests: 2 chunks
785 manifests: 2 chunks
786 manifests: 3 chunks
786 manifests: 3 chunks
787 adding file changes
787 adding file changes
788 adding foo/Bar/file.txt revisions
788 adding foo/Bar/file.txt revisions
789 files: 1 chunks
789 files: 1 chunks
790 adding foo/file.txt revisions
790 adding foo/file.txt revisions
791 files: 2 chunks
791 files: 2 chunks
792 adding quux/file.py revisions
792 adding quux/file.py revisions
793 files: 3 chunks
793 files: 3 chunks
794 added 3 changesets with 3 changes to 3 files
794 added 3 changesets with 3 changes to 3 files
795 calling hook pretxnchangegroup.acl: hgext.acl.hook
795 calling hook pretxnchangegroup.acl: hgext.acl.hook
796 acl: acl.allow enabled, 1 entries for user barney
796 acl: acl.allow enabled, 1 entries for user barney
797 acl: acl.deny enabled, 0 entries for user barney
797 acl: acl.deny enabled, 0 entries for user barney
798 acl: allowing changeset ef1ea85a6374
798 acl: allowing changeset ef1ea85a6374
799 acl: allowing changeset f9cafe1212c8
799 acl: allowing changeset f9cafe1212c8
800 acl: allowing changeset 911600dab2ae
800 acl: allowing changeset 911600dab2ae
801 updating the branch cache
801 updating the branch cache
802 rolling back last transaction
802 rolling back last transaction
803 0:6675d58eff77
803 0:6675d58eff77
804
804
805 wilma can change files with a .txt extension
805 wilma can change files with a .txt extension
806 Pushing as user wilma
806 Pushing as user wilma
807 hgrc = """
807 hgrc = """
808 [hooks]
808 [hooks]
809 pretxnchangegroup.acl = python:hgext.acl.hook
809 pretxnchangegroup.acl = python:hgext.acl.hook
810 [acl]
810 [acl]
811 sources = push
811 sources = push
812 [acl.allow]
812 [acl.allow]
813 foo/** = fred
813 foo/** = fred
814 [acl.deny]
814 [acl.deny]
815 foo/bar/** = fred
815 foo/bar/** = fred
816 foo/Bar/** = fred
816 foo/Bar/** = fred
817 [acl.allow]
817 [acl.allow]
818 ** = barney
818 ** = barney
819 **/*.txt = wilma
819 **/*.txt = wilma
820 """
820 """
821 pushing to ../b
821 pushing to ../b
822 searching for changes
822 searching for changes
823 common changesets up to 6675d58eff77
823 common changesets up to 6675d58eff77
824 invalidating branch cache (tip differs)
824 invalidating branch cache (tip differs)
825 3 changesets found
825 3 changesets found
826 list of changesets:
826 list of changesets:
827 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
827 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
828 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
828 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
829 911600dab2ae7a9baff75958b84fe606851ce955
829 911600dab2ae7a9baff75958b84fe606851ce955
830 adding changesets
830 adding changesets
831 bundle changes: 0 chunks
831 bundling changes: 0 chunks
832 bundle changes: 1 chunks
832 bundling changes: 1 chunks
833 bundle changes: 2 chunks
833 bundling changes: 2 chunks
834 bundle changes: 3 chunks
834 bundling changes: 3 chunks
835 bundle changes: 4 chunks
835 bundling changes: 4 chunks
836 bundle changes: 5 chunks
836 bundling changes: 5 chunks
837 bundle changes: 6 chunks
837 bundling changes: 6 chunks
838 bundle changes: 7 chunks
838 bundling changes: 7 chunks
839 bundle changes: 8 chunks
839 bundling changes: 8 chunks
840 bundle changes: 9 chunks
840 bundling changes: 9 chunks
841 bundle manifests: 0 chunks
841 bundling manifests: 0 chunks
842 bundle manifests: 1 chunks
842 bundling manifests: 1 chunks
843 bundle manifests: 2 chunks
843 bundling manifests: 2 chunks
844 bundle manifests: 3 chunks
844 bundling manifests: 3 chunks
845 bundle manifests: 4 chunks
845 bundling manifests: 4 chunks
846 bundle manifests: 5 chunks
846 bundling manifests: 5 chunks
847 bundle manifests: 6 chunks
847 bundling manifests: 6 chunks
848 bundle manifests: 7 chunks
848 bundling manifests: 7 chunks
849 bundle manifests: 8 chunks
849 bundling manifests: 8 chunks
850 bundle manifests: 9 chunks
850 bundling manifests: 9 chunks
851 bundle files: foo/Bar/file.txt 0 chunks
851 bundling files: foo/Bar/file.txt 0 chunks
852 bundle files: foo/Bar/file.txt 1 chunks
852 bundling files: foo/Bar/file.txt 1 chunks
853 bundle files: foo/Bar/file.txt 2 chunks
853 bundling files: foo/Bar/file.txt 2 chunks
854 bundle files: foo/Bar/file.txt 3 chunks
854 bundling files: foo/Bar/file.txt 3 chunks
855 bundle files: foo/file.txt 4 chunks
855 bundling files: foo/file.txt 4 chunks
856 bundle files: foo/file.txt 5 chunks
856 bundling files: foo/file.txt 5 chunks
857 bundle files: foo/file.txt 6 chunks
857 bundling files: foo/file.txt 6 chunks
858 bundle files: foo/file.txt 7 chunks
858 bundling files: foo/file.txt 7 chunks
859 bundle files: quux/file.py 8 chunks
859 bundling files: quux/file.py 8 chunks
860 bundle files: quux/file.py 9 chunks
860 bundling files: quux/file.py 9 chunks
861 bundle files: quux/file.py 10 chunks
861 bundling files: quux/file.py 10 chunks
862 bundle files: quux/file.py 11 chunks
862 bundling files: quux/file.py 11 chunks
863 changesets: 1 chunks
863 changesets: 1 chunks
864 add changeset ef1ea85a6374
864 add changeset ef1ea85a6374
865 changesets: 2 chunks
865 changesets: 2 chunks
866 add changeset f9cafe1212c8
866 add changeset f9cafe1212c8
867 changesets: 3 chunks
867 changesets: 3 chunks
868 add changeset 911600dab2ae
868 add changeset 911600dab2ae
869 adding manifests
869 adding manifests
870 manifests: 1 chunks
870 manifests: 1 chunks
871 manifests: 2 chunks
871 manifests: 2 chunks
872 manifests: 3 chunks
872 manifests: 3 chunks
873 adding file changes
873 adding file changes
874 adding foo/Bar/file.txt revisions
874 adding foo/Bar/file.txt revisions
875 files: 1 chunks
875 files: 1 chunks
876 adding foo/file.txt revisions
876 adding foo/file.txt revisions
877 files: 2 chunks
877 files: 2 chunks
878 adding quux/file.py revisions
878 adding quux/file.py revisions
879 files: 3 chunks
879 files: 3 chunks
880 added 3 changesets with 3 changes to 3 files
880 added 3 changesets with 3 changes to 3 files
881 calling hook pretxnchangegroup.acl: hgext.acl.hook
881 calling hook pretxnchangegroup.acl: hgext.acl.hook
882 acl: acl.allow enabled, 1 entries for user wilma
882 acl: acl.allow enabled, 1 entries for user wilma
883 acl: acl.deny enabled, 0 entries for user wilma
883 acl: acl.deny enabled, 0 entries for user wilma
884 acl: allowing changeset ef1ea85a6374
884 acl: allowing changeset ef1ea85a6374
885 acl: allowing changeset f9cafe1212c8
885 acl: allowing changeset f9cafe1212c8
886 acl: user wilma not allowed on quux/file.py
886 acl: user wilma not allowed on quux/file.py
887 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
887 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
888 transaction abort!
888 transaction abort!
889 rollback completed
889 rollback completed
890 abort: acl: access denied for changeset 911600dab2ae
890 abort: acl: access denied for changeset 911600dab2ae
891 no rollback information available
891 no rollback information available
892 0:6675d58eff77
892 0:6675d58eff77
893
893
894 file specified by acl.config does not exist
894 file specified by acl.config does not exist
895 Pushing as user barney
895 Pushing as user barney
896 hgrc = """
896 hgrc = """
897 [hooks]
897 [hooks]
898 pretxnchangegroup.acl = python:hgext.acl.hook
898 pretxnchangegroup.acl = python:hgext.acl.hook
899 [acl]
899 [acl]
900 sources = push
900 sources = push
901 [acl.allow]
901 [acl.allow]
902 foo/** = fred
902 foo/** = fred
903 [acl.deny]
903 [acl.deny]
904 foo/bar/** = fred
904 foo/bar/** = fred
905 foo/Bar/** = fred
905 foo/Bar/** = fred
906 [acl.allow]
906 [acl.allow]
907 ** = barney
907 ** = barney
908 **/*.txt = wilma
908 **/*.txt = wilma
909 [acl]
909 [acl]
910 config = ../acl.config
910 config = ../acl.config
911 """
911 """
912 pushing to ../b
912 pushing to ../b
913 searching for changes
913 searching for changes
914 common changesets up to 6675d58eff77
914 common changesets up to 6675d58eff77
915 3 changesets found
915 3 changesets found
916 list of changesets:
916 list of changesets:
917 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
917 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
918 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
918 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
919 911600dab2ae7a9baff75958b84fe606851ce955
919 911600dab2ae7a9baff75958b84fe606851ce955
920 adding changesets
920 adding changesets
921 bundle changes: 0 chunks
921 bundling changes: 0 chunks
922 bundle changes: 1 chunks
922 bundling changes: 1 chunks
923 bundle changes: 2 chunks
923 bundling changes: 2 chunks
924 bundle changes: 3 chunks
924 bundling changes: 3 chunks
925 bundle changes: 4 chunks
925 bundling changes: 4 chunks
926 bundle changes: 5 chunks
926 bundling changes: 5 chunks
927 bundle changes: 6 chunks
927 bundling changes: 6 chunks
928 bundle changes: 7 chunks
928 bundling changes: 7 chunks
929 bundle changes: 8 chunks
929 bundling changes: 8 chunks
930 bundle changes: 9 chunks
930 bundling changes: 9 chunks
931 bundle manifests: 0 chunks
931 bundling manifests: 0 chunks
932 bundle manifests: 1 chunks
932 bundling manifests: 1 chunks
933 bundle manifests: 2 chunks
933 bundling manifests: 2 chunks
934 bundle manifests: 3 chunks
934 bundling manifests: 3 chunks
935 bundle manifests: 4 chunks
935 bundling manifests: 4 chunks
936 bundle manifests: 5 chunks
936 bundling manifests: 5 chunks
937 bundle manifests: 6 chunks
937 bundling manifests: 6 chunks
938 bundle manifests: 7 chunks
938 bundling manifests: 7 chunks
939 bundle manifests: 8 chunks
939 bundling manifests: 8 chunks
940 bundle manifests: 9 chunks
940 bundling manifests: 9 chunks
941 bundle files: foo/Bar/file.txt 0 chunks
941 bundling files: foo/Bar/file.txt 0 chunks
942 bundle files: foo/Bar/file.txt 1 chunks
942 bundling files: foo/Bar/file.txt 1 chunks
943 bundle files: foo/Bar/file.txt 2 chunks
943 bundling files: foo/Bar/file.txt 2 chunks
944 bundle files: foo/Bar/file.txt 3 chunks
944 bundling files: foo/Bar/file.txt 3 chunks
945 bundle files: foo/file.txt 4 chunks
945 bundling files: foo/file.txt 4 chunks
946 bundle files: foo/file.txt 5 chunks
946 bundling files: foo/file.txt 5 chunks
947 bundle files: foo/file.txt 6 chunks
947 bundling files: foo/file.txt 6 chunks
948 bundle files: foo/file.txt 7 chunks
948 bundling files: foo/file.txt 7 chunks
949 bundle files: quux/file.py 8 chunks
949 bundling files: quux/file.py 8 chunks
950 bundle files: quux/file.py 9 chunks
950 bundling files: quux/file.py 9 chunks
951 bundle files: quux/file.py 10 chunks
951 bundling files: quux/file.py 10 chunks
952 bundle files: quux/file.py 11 chunks
952 bundling files: quux/file.py 11 chunks
953 changesets: 1 chunks
953 changesets: 1 chunks
954 add changeset ef1ea85a6374
954 add changeset ef1ea85a6374
955 changesets: 2 chunks
955 changesets: 2 chunks
956 add changeset f9cafe1212c8
956 add changeset f9cafe1212c8
957 changesets: 3 chunks
957 changesets: 3 chunks
958 add changeset 911600dab2ae
958 add changeset 911600dab2ae
959 adding manifests
959 adding manifests
960 manifests: 1 chunks
960 manifests: 1 chunks
961 manifests: 2 chunks
961 manifests: 2 chunks
962 manifests: 3 chunks
962 manifests: 3 chunks
963 adding file changes
963 adding file changes
964 adding foo/Bar/file.txt revisions
964 adding foo/Bar/file.txt revisions
965 files: 1 chunks
965 files: 1 chunks
966 adding foo/file.txt revisions
966 adding foo/file.txt revisions
967 files: 2 chunks
967 files: 2 chunks
968 adding quux/file.py revisions
968 adding quux/file.py revisions
969 files: 3 chunks
969 files: 3 chunks
970 added 3 changesets with 3 changes to 3 files
970 added 3 changesets with 3 changes to 3 files
971 calling hook pretxnchangegroup.acl: hgext.acl.hook
971 calling hook pretxnchangegroup.acl: hgext.acl.hook
972 error: pretxnchangegroup.acl hook raised an exception: [Errno 2] No such file or directory: '../acl.config'
972 error: pretxnchangegroup.acl hook raised an exception: [Errno 2] No such file or directory: '../acl.config'
973 transaction abort!
973 transaction abort!
974 rollback completed
974 rollback completed
975 abort: No such file or directory: ../acl.config
975 abort: No such file or directory: ../acl.config
976 no rollback information available
976 no rollback information available
977 0:6675d58eff77
977 0:6675d58eff77
978
978
979 betty is allowed inside foo/ by a acl.config file
979 betty is allowed inside foo/ by a acl.config file
980 Pushing as user betty
980 Pushing as user betty
981 hgrc = """
981 hgrc = """
982 [hooks]
982 [hooks]
983 pretxnchangegroup.acl = python:hgext.acl.hook
983 pretxnchangegroup.acl = python:hgext.acl.hook
984 [acl]
984 [acl]
985 sources = push
985 sources = push
986 [acl.allow]
986 [acl.allow]
987 foo/** = fred
987 foo/** = fred
988 [acl.deny]
988 [acl.deny]
989 foo/bar/** = fred
989 foo/bar/** = fred
990 foo/Bar/** = fred
990 foo/Bar/** = fred
991 [acl.allow]
991 [acl.allow]
992 ** = barney
992 ** = barney
993 **/*.txt = wilma
993 **/*.txt = wilma
994 [acl]
994 [acl]
995 config = ../acl.config
995 config = ../acl.config
996 """
996 """
997 acl.config = """
997 acl.config = """
998 [acl.allow]
998 [acl.allow]
999 foo/** = betty
999 foo/** = betty
1000 """
1000 """
1001 pushing to ../b
1001 pushing to ../b
1002 searching for changes
1002 searching for changes
1003 common changesets up to 6675d58eff77
1003 common changesets up to 6675d58eff77
1004 3 changesets found
1004 3 changesets found
1005 list of changesets:
1005 list of changesets:
1006 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1006 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1007 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1007 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1008 911600dab2ae7a9baff75958b84fe606851ce955
1008 911600dab2ae7a9baff75958b84fe606851ce955
1009 adding changesets
1009 adding changesets
1010 bundle changes: 0 chunks
1010 bundling changes: 0 chunks
1011 bundle changes: 1 chunks
1011 bundling changes: 1 chunks
1012 bundle changes: 2 chunks
1012 bundling changes: 2 chunks
1013 bundle changes: 3 chunks
1013 bundling changes: 3 chunks
1014 bundle changes: 4 chunks
1014 bundling changes: 4 chunks
1015 bundle changes: 5 chunks
1015 bundling changes: 5 chunks
1016 bundle changes: 6 chunks
1016 bundling changes: 6 chunks
1017 bundle changes: 7 chunks
1017 bundling changes: 7 chunks
1018 bundle changes: 8 chunks
1018 bundling changes: 8 chunks
1019 bundle changes: 9 chunks
1019 bundling changes: 9 chunks
1020 bundle manifests: 0 chunks
1020 bundling manifests: 0 chunks
1021 bundle manifests: 1 chunks
1021 bundling manifests: 1 chunks
1022 bundle manifests: 2 chunks
1022 bundling manifests: 2 chunks
1023 bundle manifests: 3 chunks
1023 bundling manifests: 3 chunks
1024 bundle manifests: 4 chunks
1024 bundling manifests: 4 chunks
1025 bundle manifests: 5 chunks
1025 bundling manifests: 5 chunks
1026 bundle manifests: 6 chunks
1026 bundling manifests: 6 chunks
1027 bundle manifests: 7 chunks
1027 bundling manifests: 7 chunks
1028 bundle manifests: 8 chunks
1028 bundling manifests: 8 chunks
1029 bundle manifests: 9 chunks
1029 bundling manifests: 9 chunks
1030 bundle files: foo/Bar/file.txt 0 chunks
1030 bundling files: foo/Bar/file.txt 0 chunks
1031 bundle files: foo/Bar/file.txt 1 chunks
1031 bundling files: foo/Bar/file.txt 1 chunks
1032 bundle files: foo/Bar/file.txt 2 chunks
1032 bundling files: foo/Bar/file.txt 2 chunks
1033 bundle files: foo/Bar/file.txt 3 chunks
1033 bundling files: foo/Bar/file.txt 3 chunks
1034 bundle files: foo/file.txt 4 chunks
1034 bundling files: foo/file.txt 4 chunks
1035 bundle files: foo/file.txt 5 chunks
1035 bundling files: foo/file.txt 5 chunks
1036 bundle files: foo/file.txt 6 chunks
1036 bundling files: foo/file.txt 6 chunks
1037 bundle files: foo/file.txt 7 chunks
1037 bundling files: foo/file.txt 7 chunks
1038 bundle files: quux/file.py 8 chunks
1038 bundling files: quux/file.py 8 chunks
1039 bundle files: quux/file.py 9 chunks
1039 bundling files: quux/file.py 9 chunks
1040 bundle files: quux/file.py 10 chunks
1040 bundling files: quux/file.py 10 chunks
1041 bundle files: quux/file.py 11 chunks
1041 bundling files: quux/file.py 11 chunks
1042 changesets: 1 chunks
1042 changesets: 1 chunks
1043 add changeset ef1ea85a6374
1043 add changeset ef1ea85a6374
1044 changesets: 2 chunks
1044 changesets: 2 chunks
1045 add changeset f9cafe1212c8
1045 add changeset f9cafe1212c8
1046 changesets: 3 chunks
1046 changesets: 3 chunks
1047 add changeset 911600dab2ae
1047 add changeset 911600dab2ae
1048 adding manifests
1048 adding manifests
1049 manifests: 1 chunks
1049 manifests: 1 chunks
1050 manifests: 2 chunks
1050 manifests: 2 chunks
1051 manifests: 3 chunks
1051 manifests: 3 chunks
1052 adding file changes
1052 adding file changes
1053 adding foo/Bar/file.txt revisions
1053 adding foo/Bar/file.txt revisions
1054 files: 1 chunks
1054 files: 1 chunks
1055 adding foo/file.txt revisions
1055 adding foo/file.txt revisions
1056 files: 2 chunks
1056 files: 2 chunks
1057 adding quux/file.py revisions
1057 adding quux/file.py revisions
1058 files: 3 chunks
1058 files: 3 chunks
1059 added 3 changesets with 3 changes to 3 files
1059 added 3 changesets with 3 changes to 3 files
1060 calling hook pretxnchangegroup.acl: hgext.acl.hook
1060 calling hook pretxnchangegroup.acl: hgext.acl.hook
1061 acl: acl.allow enabled, 1 entries for user betty
1061 acl: acl.allow enabled, 1 entries for user betty
1062 acl: acl.deny enabled, 0 entries for user betty
1062 acl: acl.deny enabled, 0 entries for user betty
1063 acl: allowing changeset ef1ea85a6374
1063 acl: allowing changeset ef1ea85a6374
1064 acl: allowing changeset f9cafe1212c8
1064 acl: allowing changeset f9cafe1212c8
1065 acl: user betty not allowed on quux/file.py
1065 acl: user betty not allowed on quux/file.py
1066 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
1066 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
1067 transaction abort!
1067 transaction abort!
1068 rollback completed
1068 rollback completed
1069 abort: acl: access denied for changeset 911600dab2ae
1069 abort: acl: access denied for changeset 911600dab2ae
1070 no rollback information available
1070 no rollback information available
1071 0:6675d58eff77
1071 0:6675d58eff77
1072
1072
1073 acl.config can set only [acl.allow]/[acl.deny]
1073 acl.config can set only [acl.allow]/[acl.deny]
1074 Pushing as user barney
1074 Pushing as user barney
1075 hgrc = """
1075 hgrc = """
1076 [hooks]
1076 [hooks]
1077 pretxnchangegroup.acl = python:hgext.acl.hook
1077 pretxnchangegroup.acl = python:hgext.acl.hook
1078 [acl]
1078 [acl]
1079 sources = push
1079 sources = push
1080 [acl.allow]
1080 [acl.allow]
1081 foo/** = fred
1081 foo/** = fred
1082 [acl.deny]
1082 [acl.deny]
1083 foo/bar/** = fred
1083 foo/bar/** = fred
1084 foo/Bar/** = fred
1084 foo/Bar/** = fred
1085 [acl.allow]
1085 [acl.allow]
1086 ** = barney
1086 ** = barney
1087 **/*.txt = wilma
1087 **/*.txt = wilma
1088 [acl]
1088 [acl]
1089 config = ../acl.config
1089 config = ../acl.config
1090 """
1090 """
1091 acl.config = """
1091 acl.config = """
1092 [acl.allow]
1092 [acl.allow]
1093 foo/** = betty
1093 foo/** = betty
1094 [hooks]
1094 [hooks]
1095 changegroup.acl = false
1095 changegroup.acl = false
1096 """
1096 """
1097 pushing to ../b
1097 pushing to ../b
1098 searching for changes
1098 searching for changes
1099 common changesets up to 6675d58eff77
1099 common changesets up to 6675d58eff77
1100 3 changesets found
1100 3 changesets found
1101 list of changesets:
1101 list of changesets:
1102 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1102 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
1103 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1103 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
1104 911600dab2ae7a9baff75958b84fe606851ce955
1104 911600dab2ae7a9baff75958b84fe606851ce955
1105 adding changesets
1105 adding changesets
1106 bundle changes: 0 chunks
1106 bundling changes: 0 chunks
1107 bundle changes: 1 chunks
1107 bundling changes: 1 chunks
1108 bundle changes: 2 chunks
1108 bundling changes: 2 chunks
1109 bundle changes: 3 chunks
1109 bundling changes: 3 chunks
1110 bundle changes: 4 chunks
1110 bundling changes: 4 chunks
1111 bundle changes: 5 chunks
1111 bundling changes: 5 chunks
1112 bundle changes: 6 chunks
1112 bundling changes: 6 chunks
1113 bundle changes: 7 chunks
1113 bundling changes: 7 chunks
1114 bundle changes: 8 chunks
1114 bundling changes: 8 chunks
1115 bundle changes: 9 chunks
1115 bundling changes: 9 chunks
1116 bundle manifests: 0 chunks
1116 bundling manifests: 0 chunks
1117 bundle manifests: 1 chunks
1117 bundling manifests: 1 chunks
1118 bundle manifests: 2 chunks
1118 bundling manifests: 2 chunks
1119 bundle manifests: 3 chunks
1119 bundling manifests: 3 chunks
1120 bundle manifests: 4 chunks
1120 bundling manifests: 4 chunks
1121 bundle manifests: 5 chunks
1121 bundling manifests: 5 chunks
1122 bundle manifests: 6 chunks
1122 bundling manifests: 6 chunks
1123 bundle manifests: 7 chunks
1123 bundling manifests: 7 chunks
1124 bundle manifests: 8 chunks
1124 bundling manifests: 8 chunks
1125 bundle manifests: 9 chunks
1125 bundling manifests: 9 chunks
1126 bundle files: foo/Bar/file.txt 0 chunks
1126 bundling files: foo/Bar/file.txt 0 chunks
1127 bundle files: foo/Bar/file.txt 1 chunks
1127 bundling files: foo/Bar/file.txt 1 chunks
1128 bundle files: foo/Bar/file.txt 2 chunks
1128 bundling files: foo/Bar/file.txt 2 chunks
1129 bundle files: foo/Bar/file.txt 3 chunks
1129 bundling files: foo/Bar/file.txt 3 chunks
1130 bundle files: foo/file.txt 4 chunks
1130 bundling files: foo/file.txt 4 chunks
1131 bundle files: foo/file.txt 5 chunks
1131 bundling files: foo/file.txt 5 chunks
1132 bundle files: foo/file.txt 6 chunks
1132 bundling files: foo/file.txt 6 chunks
1133 bundle files: foo/file.txt 7 chunks
1133 bundling files: foo/file.txt 7 chunks
1134 bundle files: quux/file.py 8 chunks
1134 bundling files: quux/file.py 8 chunks
1135 bundle files: quux/file.py 9 chunks
1135 bundling files: quux/file.py 9 chunks
1136 bundle files: quux/file.py 10 chunks
1136 bundling files: quux/file.py 10 chunks
1137 bundle files: quux/file.py 11 chunks
1137 bundling files: quux/file.py 11 chunks
1138 changesets: 1 chunks
1138 changesets: 1 chunks
1139 add changeset ef1ea85a6374
1139 add changeset ef1ea85a6374
1140 changesets: 2 chunks
1140 changesets: 2 chunks
1141 add changeset f9cafe1212c8
1141 add changeset f9cafe1212c8
1142 changesets: 3 chunks
1142 changesets: 3 chunks
1143 add changeset 911600dab2ae
1143 add changeset 911600dab2ae
1144 adding manifests
1144 adding manifests
1145 manifests: 1 chunks
1145 manifests: 1 chunks
1146 manifests: 2 chunks
1146 manifests: 2 chunks
1147 manifests: 3 chunks
1147 manifests: 3 chunks
1148 adding file changes
1148 adding file changes
1149 adding foo/Bar/file.txt revisions
1149 adding foo/Bar/file.txt revisions
1150 files: 1 chunks
1150 files: 1 chunks
1151 adding foo/file.txt revisions
1151 adding foo/file.txt revisions
1152 files: 2 chunks
1152 files: 2 chunks
1153 adding quux/file.py revisions
1153 adding quux/file.py revisions
1154 files: 3 chunks
1154 files: 3 chunks
1155 added 3 changesets with 3 changes to 3 files
1155 added 3 changesets with 3 changes to 3 files
1156 calling hook pretxnchangegroup.acl: hgext.acl.hook
1156 calling hook pretxnchangegroup.acl: hgext.acl.hook
1157 acl: acl.allow enabled, 1 entries for user barney
1157 acl: acl.allow enabled, 1 entries for user barney
1158 acl: acl.deny enabled, 0 entries for user barney
1158 acl: acl.deny enabled, 0 entries for user barney
1159 acl: allowing changeset ef1ea85a6374
1159 acl: allowing changeset ef1ea85a6374
1160 acl: allowing changeset f9cafe1212c8
1160 acl: allowing changeset f9cafe1212c8
1161 acl: allowing changeset 911600dab2ae
1161 acl: allowing changeset 911600dab2ae
1162 updating the branch cache
1162 updating the branch cache
1163 rolling back last transaction
1163 rolling back last transaction
1164 0:6675d58eff77
1164 0:6675d58eff77
1165
1165
@@ -1,365 +1,365 b''
1 ====== Setting up test
1 ====== Setting up test
2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
3 created new head
3 created new head
4 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
4 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
5 checking changesets
5 checking changesets
6 checking manifests
6 checking manifests
7 crosschecking files in changesets and manifests
7 crosschecking files in changesets and manifests
8 checking files
8 checking files
9 4 files, 9 changesets, 7 total revisions
9 4 files, 9 changesets, 7 total revisions
10 ====== Bundle --all
10 ====== Bundle --all
11 9 changesets found
11 9 changesets found
12 ====== Bundle test to full.hg
12 ====== Bundle test to full.hg
13 searching for changes
13 searching for changes
14 9 changesets found
14 9 changesets found
15 ====== Unbundle full.hg in test
15 ====== Unbundle full.hg in test
16 adding changesets
16 adding changesets
17 adding manifests
17 adding manifests
18 adding file changes
18 adding file changes
19 added 0 changesets with 0 changes to 4 files
19 added 0 changesets with 0 changes to 4 files
20 (run 'hg update' to get a working copy)
20 (run 'hg update' to get a working copy)
21 ====== Verify empty
21 ====== Verify empty
22 checking changesets
22 checking changesets
23 checking manifests
23 checking manifests
24 crosschecking files in changesets and manifests
24 crosschecking files in changesets and manifests
25 checking files
25 checking files
26 0 files, 0 changesets, 0 total revisions
26 0 files, 0 changesets, 0 total revisions
27 ====== Pull full.hg into test (using --cwd)
27 ====== Pull full.hg into test (using --cwd)
28 pulling from ../full.hg
28 pulling from ../full.hg
29 searching for changes
29 searching for changes
30 no changes found
30 no changes found
31 ====== Pull full.hg into empty (using --cwd)
31 ====== Pull full.hg into empty (using --cwd)
32 pulling from ../full.hg
32 pulling from ../full.hg
33 requesting all changes
33 requesting all changes
34 adding changesets
34 adding changesets
35 adding manifests
35 adding manifests
36 adding file changes
36 adding file changes
37 added 9 changesets with 7 changes to 4 files (+1 heads)
37 added 9 changesets with 7 changes to 4 files (+1 heads)
38 (run 'hg heads' to see heads, 'hg merge' to merge)
38 (run 'hg heads' to see heads, 'hg merge' to merge)
39 ====== Rollback empty
39 ====== Rollback empty
40 rolling back last transaction
40 rolling back last transaction
41 ====== Pull full.hg into empty again (using --cwd)
41 ====== Pull full.hg into empty again (using --cwd)
42 pulling from ../full.hg
42 pulling from ../full.hg
43 requesting all changes
43 requesting all changes
44 adding changesets
44 adding changesets
45 adding manifests
45 adding manifests
46 adding file changes
46 adding file changes
47 added 9 changesets with 7 changes to 4 files (+1 heads)
47 added 9 changesets with 7 changes to 4 files (+1 heads)
48 (run 'hg heads' to see heads, 'hg merge' to merge)
48 (run 'hg heads' to see heads, 'hg merge' to merge)
49 ====== Pull full.hg into test (using -R)
49 ====== Pull full.hg into test (using -R)
50 pulling from full.hg
50 pulling from full.hg
51 searching for changes
51 searching for changes
52 no changes found
52 no changes found
53 ====== Pull full.hg into empty (using -R)
53 ====== Pull full.hg into empty (using -R)
54 pulling from full.hg
54 pulling from full.hg
55 searching for changes
55 searching for changes
56 no changes found
56 no changes found
57 ====== Rollback empty
57 ====== Rollback empty
58 rolling back last transaction
58 rolling back last transaction
59 ====== Pull full.hg into empty again (using -R)
59 ====== Pull full.hg into empty again (using -R)
60 pulling from full.hg
60 pulling from full.hg
61 requesting all changes
61 requesting all changes
62 adding changesets
62 adding changesets
63 adding manifests
63 adding manifests
64 adding file changes
64 adding file changes
65 added 9 changesets with 7 changes to 4 files (+1 heads)
65 added 9 changesets with 7 changes to 4 files (+1 heads)
66 (run 'hg heads' to see heads, 'hg merge' to merge)
66 (run 'hg heads' to see heads, 'hg merge' to merge)
67 ====== Log -R full.hg in fresh empty
67 ====== Log -R full.hg in fresh empty
68 changeset: 8:088ff9d6e1e1
68 changeset: 8:088ff9d6e1e1
69 tag: tip
69 tag: tip
70 parent: 3:ac69c658229d
70 parent: 3:ac69c658229d
71 user: test
71 user: test
72 date: Mon Jan 12 13:46:40 1970 +0000
72 date: Mon Jan 12 13:46:40 1970 +0000
73 summary: 0.3m
73 summary: 0.3m
74
74
75 changeset: 7:27f57c869697
75 changeset: 7:27f57c869697
76 user: test
76 user: test
77 date: Mon Jan 12 13:46:40 1970 +0000
77 date: Mon Jan 12 13:46:40 1970 +0000
78 summary: 1.3m
78 summary: 1.3m
79
79
80 changeset: 6:1e3f6b843bd6
80 changeset: 6:1e3f6b843bd6
81 user: test
81 user: test
82 date: Mon Jan 12 13:46:40 1970 +0000
82 date: Mon Jan 12 13:46:40 1970 +0000
83 summary: 1.3
83 summary: 1.3
84
84
85 changeset: 5:024e4e7df376
85 changeset: 5:024e4e7df376
86 user: test
86 user: test
87 date: Mon Jan 12 13:46:40 1970 +0000
87 date: Mon Jan 12 13:46:40 1970 +0000
88 summary: 1.2
88 summary: 1.2
89
89
90 changeset: 4:5f4f3ceb285e
90 changeset: 4:5f4f3ceb285e
91 parent: 0:5649c9d34dd8
91 parent: 0:5649c9d34dd8
92 user: test
92 user: test
93 date: Mon Jan 12 13:46:40 1970 +0000
93 date: Mon Jan 12 13:46:40 1970 +0000
94 summary: 1.1
94 summary: 1.1
95
95
96 changeset: 3:ac69c658229d
96 changeset: 3:ac69c658229d
97 user: test
97 user: test
98 date: Mon Jan 12 13:46:40 1970 +0000
98 date: Mon Jan 12 13:46:40 1970 +0000
99 summary: 0.3
99 summary: 0.3
100
100
101 changeset: 2:d62976ca1e50
101 changeset: 2:d62976ca1e50
102 user: test
102 user: test
103 date: Mon Jan 12 13:46:40 1970 +0000
103 date: Mon Jan 12 13:46:40 1970 +0000
104 summary: 0.2
104 summary: 0.2
105
105
106 changeset: 1:10b2180f755b
106 changeset: 1:10b2180f755b
107 user: test
107 user: test
108 date: Mon Jan 12 13:46:40 1970 +0000
108 date: Mon Jan 12 13:46:40 1970 +0000
109 summary: 0.1
109 summary: 0.1
110
110
111 changeset: 0:5649c9d34dd8
111 changeset: 0:5649c9d34dd8
112 user: test
112 user: test
113 date: Mon Jan 12 13:46:40 1970 +0000
113 date: Mon Jan 12 13:46:40 1970 +0000
114 summary: 0.0
114 summary: 0.0
115
115
116 ====== Pull ../full.hg into empty (with hook)
116 ====== Pull ../full.hg into empty (with hook)
117 changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:../full.hg
117 changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:../full.hg
118 pulling from bundle://../full.hg
118 pulling from bundle://../full.hg
119 requesting all changes
119 requesting all changes
120 adding changesets
120 adding changesets
121 adding manifests
121 adding manifests
122 adding file changes
122 adding file changes
123 added 9 changesets with 7 changes to 4 files (+1 heads)
123 added 9 changesets with 7 changes to 4 files (+1 heads)
124 (run 'hg heads' to see heads, 'hg merge' to merge)
124 (run 'hg heads' to see heads, 'hg merge' to merge)
125 ====== Rollback empty
125 ====== Rollback empty
126 rolling back last transaction
126 rolling back last transaction
127 ====== Log -R bundle:empty+full.hg
127 ====== Log -R bundle:empty+full.hg
128 8 7 6 5 4 3 2 1 0
128 8 7 6 5 4 3 2 1 0
129 ====== Pull full.hg into empty again (using -R; with hook)
129 ====== Pull full.hg into empty again (using -R; with hook)
130 changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:empty+full.hg
130 changegroup hook: HG_NODE=5649c9d34dd87d0ecb5fd39672128376e83b22e1 HG_SOURCE=pull HG_URL=bundle:empty+full.hg
131 pulling from full.hg
131 pulling from full.hg
132 requesting all changes
132 requesting all changes
133 adding changesets
133 adding changesets
134 adding manifests
134 adding manifests
135 adding file changes
135 adding file changes
136 added 9 changesets with 7 changes to 4 files (+1 heads)
136 added 9 changesets with 7 changes to 4 files (+1 heads)
137 (run 'hg heads' to see heads, 'hg merge' to merge)
137 (run 'hg heads' to see heads, 'hg merge' to merge)
138 ====== Create partial clones
138 ====== Create partial clones
139 requesting all changes
139 requesting all changes
140 adding changesets
140 adding changesets
141 adding manifests
141 adding manifests
142 adding file changes
142 adding file changes
143 added 4 changesets with 4 changes to 1 files
143 added 4 changesets with 4 changes to 1 files
144 updating to branch default
144 updating to branch default
145 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
145 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
146 updating to branch default
146 updating to branch default
147 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
147 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
148 ====== Log -R full.hg in partial
148 ====== Log -R full.hg in partial
149 changeset: 8:088ff9d6e1e1
149 changeset: 8:088ff9d6e1e1
150 tag: tip
150 tag: tip
151 parent: 3:ac69c658229d
151 parent: 3:ac69c658229d
152 user: test
152 user: test
153 date: Mon Jan 12 13:46:40 1970 +0000
153 date: Mon Jan 12 13:46:40 1970 +0000
154 summary: 0.3m
154 summary: 0.3m
155
155
156 changeset: 7:27f57c869697
156 changeset: 7:27f57c869697
157 user: test
157 user: test
158 date: Mon Jan 12 13:46:40 1970 +0000
158 date: Mon Jan 12 13:46:40 1970 +0000
159 summary: 1.3m
159 summary: 1.3m
160
160
161 changeset: 6:1e3f6b843bd6
161 changeset: 6:1e3f6b843bd6
162 user: test
162 user: test
163 date: Mon Jan 12 13:46:40 1970 +0000
163 date: Mon Jan 12 13:46:40 1970 +0000
164 summary: 1.3
164 summary: 1.3
165
165
166 changeset: 5:024e4e7df376
166 changeset: 5:024e4e7df376
167 user: test
167 user: test
168 date: Mon Jan 12 13:46:40 1970 +0000
168 date: Mon Jan 12 13:46:40 1970 +0000
169 summary: 1.2
169 summary: 1.2
170
170
171 changeset: 4:5f4f3ceb285e
171 changeset: 4:5f4f3ceb285e
172 parent: 0:5649c9d34dd8
172 parent: 0:5649c9d34dd8
173 user: test
173 user: test
174 date: Mon Jan 12 13:46:40 1970 +0000
174 date: Mon Jan 12 13:46:40 1970 +0000
175 summary: 1.1
175 summary: 1.1
176
176
177 changeset: 3:ac69c658229d
177 changeset: 3:ac69c658229d
178 user: test
178 user: test
179 date: Mon Jan 12 13:46:40 1970 +0000
179 date: Mon Jan 12 13:46:40 1970 +0000
180 summary: 0.3
180 summary: 0.3
181
181
182 changeset: 2:d62976ca1e50
182 changeset: 2:d62976ca1e50
183 user: test
183 user: test
184 date: Mon Jan 12 13:46:40 1970 +0000
184 date: Mon Jan 12 13:46:40 1970 +0000
185 summary: 0.2
185 summary: 0.2
186
186
187 changeset: 1:10b2180f755b
187 changeset: 1:10b2180f755b
188 user: test
188 user: test
189 date: Mon Jan 12 13:46:40 1970 +0000
189 date: Mon Jan 12 13:46:40 1970 +0000
190 summary: 0.1
190 summary: 0.1
191
191
192 changeset: 0:5649c9d34dd8
192 changeset: 0:5649c9d34dd8
193 user: test
193 user: test
194 date: Mon Jan 12 13:46:40 1970 +0000
194 date: Mon Jan 12 13:46:40 1970 +0000
195 summary: 0.0
195 summary: 0.0
196
196
197 ====== Incoming full.hg in partial
197 ====== Incoming full.hg in partial
198 comparing with bundle://../full.hg
198 comparing with bundle://../full.hg
199 searching for changes
199 searching for changes
200 changeset: 4:5f4f3ceb285e
200 changeset: 4:5f4f3ceb285e
201 parent: 0:5649c9d34dd8
201 parent: 0:5649c9d34dd8
202 user: test
202 user: test
203 date: Mon Jan 12 13:46:40 1970 +0000
203 date: Mon Jan 12 13:46:40 1970 +0000
204 summary: 1.1
204 summary: 1.1
205
205
206 changeset: 5:024e4e7df376
206 changeset: 5:024e4e7df376
207 user: test
207 user: test
208 date: Mon Jan 12 13:46:40 1970 +0000
208 date: Mon Jan 12 13:46:40 1970 +0000
209 summary: 1.2
209 summary: 1.2
210
210
211 changeset: 6:1e3f6b843bd6
211 changeset: 6:1e3f6b843bd6
212 user: test
212 user: test
213 date: Mon Jan 12 13:46:40 1970 +0000
213 date: Mon Jan 12 13:46:40 1970 +0000
214 summary: 1.3
214 summary: 1.3
215
215
216 changeset: 7:27f57c869697
216 changeset: 7:27f57c869697
217 user: test
217 user: test
218 date: Mon Jan 12 13:46:40 1970 +0000
218 date: Mon Jan 12 13:46:40 1970 +0000
219 summary: 1.3m
219 summary: 1.3m
220
220
221 changeset: 8:088ff9d6e1e1
221 changeset: 8:088ff9d6e1e1
222 tag: tip
222 tag: tip
223 parent: 3:ac69c658229d
223 parent: 3:ac69c658229d
224 user: test
224 user: test
225 date: Mon Jan 12 13:46:40 1970 +0000
225 date: Mon Jan 12 13:46:40 1970 +0000
226 summary: 0.3m
226 summary: 0.3m
227
227
228 ====== Outgoing -R full.hg vs partial2 in partial
228 ====== Outgoing -R full.hg vs partial2 in partial
229 comparing with ../partial2
229 comparing with ../partial2
230 searching for changes
230 searching for changes
231 changeset: 4:5f4f3ceb285e
231 changeset: 4:5f4f3ceb285e
232 parent: 0:5649c9d34dd8
232 parent: 0:5649c9d34dd8
233 user: test
233 user: test
234 date: Mon Jan 12 13:46:40 1970 +0000
234 date: Mon Jan 12 13:46:40 1970 +0000
235 summary: 1.1
235 summary: 1.1
236
236
237 changeset: 5:024e4e7df376
237 changeset: 5:024e4e7df376
238 user: test
238 user: test
239 date: Mon Jan 12 13:46:40 1970 +0000
239 date: Mon Jan 12 13:46:40 1970 +0000
240 summary: 1.2
240 summary: 1.2
241
241
242 changeset: 6:1e3f6b843bd6
242 changeset: 6:1e3f6b843bd6
243 user: test
243 user: test
244 date: Mon Jan 12 13:46:40 1970 +0000
244 date: Mon Jan 12 13:46:40 1970 +0000
245 summary: 1.3
245 summary: 1.3
246
246
247 changeset: 7:27f57c869697
247 changeset: 7:27f57c869697
248 user: test
248 user: test
249 date: Mon Jan 12 13:46:40 1970 +0000
249 date: Mon Jan 12 13:46:40 1970 +0000
250 summary: 1.3m
250 summary: 1.3m
251
251
252 changeset: 8:088ff9d6e1e1
252 changeset: 8:088ff9d6e1e1
253 tag: tip
253 tag: tip
254 parent: 3:ac69c658229d
254 parent: 3:ac69c658229d
255 user: test
255 user: test
256 date: Mon Jan 12 13:46:40 1970 +0000
256 date: Mon Jan 12 13:46:40 1970 +0000
257 summary: 0.3m
257 summary: 0.3m
258
258
259 ====== Outgoing -R does-not-exist.hg vs partial2 in partial
259 ====== Outgoing -R does-not-exist.hg vs partial2 in partial
260 abort: No such file or directory: ../does-not-exist.hg
260 abort: No such file or directory: ../does-not-exist.hg
261 ====== Direct clone from bundle (all-history)
261 ====== Direct clone from bundle (all-history)
262 requesting all changes
262 requesting all changes
263 adding changesets
263 adding changesets
264 adding manifests
264 adding manifests
265 adding file changes
265 adding file changes
266 added 9 changesets with 7 changes to 4 files (+1 heads)
266 added 9 changesets with 7 changes to 4 files (+1 heads)
267 updating to branch default
267 updating to branch default
268 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
268 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
269 changeset: 8:088ff9d6e1e1
269 changeset: 8:088ff9d6e1e1
270 tag: tip
270 tag: tip
271 parent: 3:ac69c658229d
271 parent: 3:ac69c658229d
272 user: test
272 user: test
273 date: Mon Jan 12 13:46:40 1970 +0000
273 date: Mon Jan 12 13:46:40 1970 +0000
274 summary: 0.3m
274 summary: 0.3m
275
275
276 changeset: 7:27f57c869697
276 changeset: 7:27f57c869697
277 user: test
277 user: test
278 date: Mon Jan 12 13:46:40 1970 +0000
278 date: Mon Jan 12 13:46:40 1970 +0000
279 summary: 1.3m
279 summary: 1.3m
280
280
281 ====== Unbundle incremental bundles into fresh empty in one go
281 ====== Unbundle incremental bundles into fresh empty in one go
282 1 changesets found
282 1 changesets found
283 1 changesets found
283 1 changesets found
284 adding changesets
284 adding changesets
285 adding manifests
285 adding manifests
286 adding file changes
286 adding file changes
287 added 1 changesets with 1 changes to 1 files
287 added 1 changesets with 1 changes to 1 files
288 adding changesets
288 adding changesets
289 adding manifests
289 adding manifests
290 adding file changes
290 adding file changes
291 added 1 changesets with 1 changes to 1 files
291 added 1 changesets with 1 changes to 1 files
292 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
292 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
293 ====== test for 540d1059c802
293 ====== test for 540d1059c802
294 updating to branch default
294 updating to branch default
295 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
295 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
296 searching for changes
296 searching for changes
297 1 changesets found
297 1 changesets found
298 comparing with ../bundle.hg
298 comparing with ../bundle.hg
299 searching for changes
299 searching for changes
300 changeset: 2:ed1b79f46b9a
300 changeset: 2:ed1b79f46b9a
301 tag: tip
301 tag: tip
302 parent: 0:bbd179dfa0a7
302 parent: 0:bbd179dfa0a7
303 user: test
303 user: test
304 date: Thu Jan 01 00:00:00 1970 +0000
304 date: Thu Jan 01 00:00:00 1970 +0000
305 summary: change foo
305 summary: change foo
306
306
307 ===== test that verify bundle does not traceback
307 ===== test that verify bundle does not traceback
308 abort: 00changelog.i@bbd179dfa0a7: unknown parent!
308 abort: 00changelog.i@bbd179dfa0a7: unknown parent!
309 abort: cannot verify bundle or remote repos
309 abort: cannot verify bundle or remote repos
310 checking changesets
310 checking changesets
311 checking manifests
311 checking manifests
312 crosschecking files in changesets and manifests
312 crosschecking files in changesets and manifests
313 checking files
313 checking files
314 2 files, 2 changesets, 2 total revisions
314 2 files, 2 changesets, 2 total revisions
315 ====== diff against bundle
315 ====== diff against bundle
316 diff -r 088ff9d6e1e1 anotherfile
316 diff -r 088ff9d6e1e1 anotherfile
317 --- a/anotherfile Mon Jan 12 13:46:40 1970 +0000
317 --- a/anotherfile Mon Jan 12 13:46:40 1970 +0000
318 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
318 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
319 @@ -1,4 +0,0 @@
319 @@ -1,4 +0,0 @@
320 -0
320 -0
321 -1
321 -1
322 -2
322 -2
323 -3
323 -3
324 ====== bundle single branch
324 ====== bundle single branch
325 adding a
325 adding a
326 adding b
326 adding b
327 adding b1
327 adding b1
328 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
328 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
329 adding c
329 adding c
330 created new head
330 created new head
331 adding c1
331 adding c1
332 == bundling via incoming
332 == bundling via incoming
333 comparing with .
333 comparing with .
334 searching for changes
334 searching for changes
335 d2ae7f538514cd87c17547b0de4cea71fe1af9fb
335 d2ae7f538514cd87c17547b0de4cea71fe1af9fb
336 5ece8e77363e2b5269e27c66828b72da29e4341a
336 5ece8e77363e2b5269e27c66828b72da29e4341a
337 == bundling
337 == bundling
338 searching for changes
338 searching for changes
339 common changesets up to c0025332f9ed
339 common changesets up to c0025332f9ed
340 2 changesets found
340 2 changesets found
341 list of changesets:
341 list of changesets:
342 d2ae7f538514cd87c17547b0de4cea71fe1af9fb
342 d2ae7f538514cd87c17547b0de4cea71fe1af9fb
343 5ece8e77363e2b5269e27c66828b72da29e4341a
343 5ece8e77363e2b5269e27c66828b72da29e4341a
344 bundle changes: 0 chunks
344 bundling changes: 0 chunks
345 bundle changes: 1 chunks
345 bundling changes: 1 chunks
346 bundle changes: 2 chunks
346 bundling changes: 2 chunks
347 bundle changes: 3 chunks
347 bundling changes: 3 chunks
348 bundle changes: 4 chunks
348 bundling changes: 4 chunks
349 bundle changes: 5 chunks
349 bundling changes: 5 chunks
350 bundle changes: 6 chunks
350 bundling changes: 6 chunks
351 bundle manifests: 0 chunks
351 bundling manifests: 0 chunks
352 bundle manifests: 1 chunks
352 bundling manifests: 1 chunks
353 bundle manifests: 2 chunks
353 bundling manifests: 2 chunks
354 bundle manifests: 3 chunks
354 bundling manifests: 3 chunks
355 bundle manifests: 4 chunks
355 bundling manifests: 4 chunks
356 bundle manifests: 5 chunks
356 bundling manifests: 5 chunks
357 bundle manifests: 6 chunks
357 bundling manifests: 6 chunks
358 bundle files: b 0 chunks
358 bundling files: b 0 chunks
359 bundle files: b 1 chunks
359 bundling files: b 1 chunks
360 bundle files: b 2 chunks
360 bundling files: b 2 chunks
361 bundle files: b 3 chunks
361 bundling files: b 3 chunks
362 bundle files: b1 4 chunks
362 bundling files: b1 4 chunks
363 bundle files: b1 5 chunks
363 bundling files: b1 5 chunks
364 bundle files: b1 6 chunks
364 bundling files: b1 6 chunks
365 bundle files: b1 7 chunks
365 bundling files: b1 7 chunks
@@ -1,25 +1,25 b''
1 % create source repository
1 % create source repository
2 adding a
2 adding a
3 adding b
3 adding b
4 % clone and pull to break links
4 % clone and pull to break links
5 requesting all changes
5 requesting all changes
6 adding changesets
6 adding changesets
7 adding manifests
7 adding manifests
8 adding file changes
8 adding file changes
9 added 1 changesets with 2 changes to 2 files
9 added 1 changesets with 2 changes to 2 files
10 updating to branch default
10 updating to branch default
11 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
11 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
12 created new head
12 created new head
13 % relink
13 % relink
14 relinking .hg/store
14 relinking .hg/store
15 collected 5 candidate storage files
15 collected 5 candidate storage files
16 not linkable: 00changelog.i
16 not linkable: 00changelog.i
17 not linkable: 00manifest.i
17 not linkable: 00manifest.i
18 not linkable: data/b.i
18 not linkable: data/b.i
19 pruned down to 2 probably relinkable files
19 pruned down to 2 probably relinkable files
20 relink: data/a.i 1/2 files (50.00%)
20 relinking: data/a.i 1/2 files (50.00%)
21 not linkable: data/dummy.i
21 not linkable: data/dummy.i
22 relinked 1 files (136 bytes reclaimed)
22 relinked 1 files (136 bytes reclaimed)
23 % check hardlinks
23 % check hardlinks
24 repo/.hg/store/data/a.i == clone/.hg/store/data/a.i
24 repo/.hg/store/data/a.i == clone/.hg/store/data/a.i
25 repo/.hg/store/data/b.i != clone/.hg/store/data/b.i
25 repo/.hg/store/data/b.i != clone/.hg/store/data/b.i
@@ -1,268 +1,268 b''
1 % first revision, no sub
1 % first revision, no sub
2 adding a
2 adding a
3 % add first sub
3 % add first sub
4 adding a
4 adding a
5 committing subrepository s
5 committing subrepository s
6 % add sub sub
6 % add sub sub
7 committing subrepository s
7 committing subrepository s
8 committing subrepository ss
8 committing subrepository ss
9 % bump sub rev
9 % bump sub rev
10 committing subrepository s
10 committing subrepository s
11 % leave sub dirty
11 % leave sub dirty
12 committing subrepository s
12 committing subrepository s
13 changeset: 3:1c833a7a9e3a
13 changeset: 3:1c833a7a9e3a
14 tag: tip
14 tag: tip
15 user: test
15 user: test
16 date: Thu Jan 01 00:00:00 1970 +0000
16 date: Thu Jan 01 00:00:00 1970 +0000
17 summary: 4
17 summary: 4
18
18
19 % check caching
19 % check caching
20 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
20 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
21 % restore
21 % restore
22 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
22 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
23 path s
23 path s
24 source s
24 source s
25 revision 1c833a7a9e3a4445c711aaf0f012379cd0d4034e
25 revision 1c833a7a9e3a4445c711aaf0f012379cd0d4034e
26 % new branch for merge tests
26 % new branch for merge tests
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
27 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 adding t/t
28 adding t/t
29 % 5
29 % 5
30 committing subrepository t
30 committing subrepository t
31 created new head
31 created new head
32 % 6
32 % 6
33 committing subrepository t
33 committing subrepository t
34 path s
34 path s
35 source s
35 source s
36 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
36 revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4
37 path t
37 path t
38 source t
38 source t
39 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
39 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
40 % 7
40 % 7
41 committing subrepository t
41 committing subrepository t
42 % 8
42 % 8
43 % merge tests
43 % merge tests
44 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
44 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
45 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
45 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
46 (branch merge, don't forget to commit)
46 (branch merge, don't forget to commit)
47 path s
47 path s
48 source s
48 source s
49 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
49 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
50 path t
50 path t
51 source t
51 source t
52 revision 60ca1237c19474e7a3978b0dc1ca4e6f36d51382
52 revision 60ca1237c19474e7a3978b0dc1ca4e6f36d51382
53 created new head
53 created new head
54 searching for copies back to rev 2
54 searching for copies back to rev 2
55 resolving manifests
55 resolving manifests
56 overwrite None partial False
56 overwrite None partial False
57 ancestor 1f14a2e2d3ec local f0d2028bf86d+ remote 1831e14459c4
57 ancestor 1f14a2e2d3ec local f0d2028bf86d+ remote 1831e14459c4
58 .hgsubstate: versions differ -> m
58 .hgsubstate: versions differ -> m
59 update: .hgsubstate 1/1 files (100.00%)
59 update: .hgsubstate 1/1 files (100.00%)
60 subrepo merge f0d2028bf86d+ 1831e14459c4 1f14a2e2d3ec
60 subrepo merge f0d2028bf86d+ 1831e14459c4 1f14a2e2d3ec
61 subrepo t: other changed, get t:6747d179aa9a688023c4b0cad32e4c92bb7f34ad:hg
61 subrepo t: other changed, get t:6747d179aa9a688023c4b0cad32e4c92bb7f34ad:hg
62 getting subrepo t
62 getting subrepo t
63 resolving manifests
63 resolving manifests
64 overwrite True partial False
64 overwrite True partial False
65 ancestor 60ca1237c194+ local 60ca1237c194+ remote 6747d179aa9a
65 ancestor 60ca1237c194+ local 60ca1237c194+ remote 6747d179aa9a
66 t: remote is newer -> g
66 t: remote is newer -> g
67 update: t 1/1 files (100.00%)
67 update: t 1/1 files (100.00%)
68 getting t
68 getting t
69 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
69 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
70 (branch merge, don't forget to commit)
70 (branch merge, don't forget to commit)
71 path s
71 path s
72 source s
72 source s
73 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
73 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
74 path t
74 path t
75 source t
75 source t
76 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
76 revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad
77 committing subrepository t
77 committing subrepository t
78 searching for copies back to rev 2
78 searching for copies back to rev 2
79 resolving manifests
79 resolving manifests
80 overwrite None partial False
80 overwrite None partial False
81 ancestor 1831e14459c4 local e45c8b14af55+ remote f94576341bcf
81 ancestor 1831e14459c4 local e45c8b14af55+ remote f94576341bcf
82 .hgsubstate: versions differ -> m
82 .hgsubstate: versions differ -> m
83 update: .hgsubstate 1/1 files (100.00%)
83 update: .hgsubstate 1/1 files (100.00%)
84 subrepo merge e45c8b14af55+ f94576341bcf 1831e14459c4
84 subrepo merge e45c8b14af55+ f94576341bcf 1831e14459c4
85 subrepo t: both sides changed, merge with t:7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4:hg
85 subrepo t: both sides changed, merge with t:7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4:hg
86 merging subrepo t
86 merging subrepo t
87 searching for copies back to rev 2
87 searching for copies back to rev 2
88 resolving manifests
88 resolving manifests
89 overwrite None partial False
89 overwrite None partial False
90 ancestor 6747d179aa9a local 20a0db6fbf6c+ remote 7af322bc1198
90 ancestor 6747d179aa9a local 20a0db6fbf6c+ remote 7af322bc1198
91 t: versions differ -> m
91 t: versions differ -> m
92 preserving t for resolve of t
92 preserving t for resolve of t
93 update: t 1/1 files (100.00%)
93 update: t 1/1 files (100.00%)
94 picked tool 'internal:merge' for t (binary False symlink False)
94 picked tool 'internal:merge' for t (binary False symlink False)
95 merging t
95 merging t
96 my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
96 my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a
97 warning: conflicts during merge.
97 warning: conflicts during merge.
98 merging t failed!
98 merging t failed!
99 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
99 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
100 use 'hg resolve' to retry unresolved file merges or 'hg update -C' to abandon
100 use 'hg resolve' to retry unresolved file merges or 'hg update -C' to abandon
101 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
101 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
102 (branch merge, don't forget to commit)
102 (branch merge, don't forget to commit)
103 % should conflict
103 % should conflict
104 <<<<<<< local
104 <<<<<<< local
105 conflict
105 conflict
106 =======
106 =======
107 t3
107 t3
108 >>>>>>> other
108 >>>>>>> other
109 % clone
109 % clone
110 updating to branch default
110 updating to branch default
111 pulling subrepo s from .../sub/t/s
111 pulling subrepo s from .../sub/t/s
112 requesting all changes
112 requesting all changes
113 adding changesets
113 adding changesets
114 adding manifests
114 adding manifests
115 adding file changes
115 adding file changes
116 added 4 changesets with 5 changes to 3 files
116 added 4 changesets with 5 changes to 3 files
117 pulling subrepo ss from .../sub/t/s/ss
117 pulling subrepo ss from .../sub/t/s/ss
118 requesting all changes
118 requesting all changes
119 adding changesets
119 adding changesets
120 adding manifests
120 adding manifests
121 adding file changes
121 adding file changes
122 added 1 changesets with 1 changes to 1 files
122 added 1 changesets with 1 changes to 1 files
123 pulling subrepo t from .../sub/t/t
123 pulling subrepo t from .../sub/t/t
124 requesting all changes
124 requesting all changes
125 adding changesets
125 adding changesets
126 adding manifests
126 adding manifests
127 adding file changes
127 adding file changes
128 added 4 changesets with 4 changes to 1 files (+1 heads)
128 added 4 changesets with 4 changes to 1 files (+1 heads)
129 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
129 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
130 path s
130 path s
131 source s
131 source s
132 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
132 revision fc627a69481fcbe5f1135069e8a3881c023e4cf5
133 path t
133 path t
134 source t
134 source t
135 revision 20a0db6fbf6c3d2836e6519a642ae929bfc67c0e
135 revision 20a0db6fbf6c3d2836e6519a642ae929bfc67c0e
136 % push
136 % push
137 committing subrepository t
137 committing subrepository t
138 pushing ...sub/t
138 pushing ...sub/t
139 pushing ...subrepo ss
139 pushing ...subrepo ss
140 searching for changes
140 searching for changes
141 no changes found
141 no changes found
142 pushing ...subrepo s
142 pushing ...subrepo s
143 searching for changes
143 searching for changes
144 no changes found
144 no changes found
145 pushing ...subrepo t
145 pushing ...subrepo t
146 searching for changes
146 searching for changes
147 adding changesets
147 adding changesets
148 adding manifests
148 adding manifests
149 adding file changes
149 adding file changes
150 added 1 changesets with 1 changes to 1 files
150 added 1 changesets with 1 changes to 1 files
151 searching for changes
151 searching for changes
152 adding changesets
152 adding changesets
153 adding manifests
153 adding manifests
154 adding file changes
154 adding file changes
155 added 1 changesets with 1 changes to 1 files
155 added 1 changesets with 1 changes to 1 files
156 % push -f
156 % push -f
157 committing subrepository s
157 committing subrepository s
158 abort: push creates new remote heads on branch 'default'!
158 abort: push creates new remote heads on branch 'default'!
159 pushing ...sub/t
159 pushing ...sub/t
160 pushing ...subrepo ss
160 pushing ...subrepo ss
161 searching for changes
161 searching for changes
162 no changes found
162 no changes found
163 pushing ...subrepo s
163 pushing ...subrepo s
164 searching for changes
164 searching for changes
165 (did you forget to merge? use push -f to force)
165 (did you forget to merge? use push -f to force)
166 pushing ...subrepo t
166 pushing ...subrepo t
167 searching for changes
167 searching for changes
168 no changes found
168 no changes found
169 searching for changes
169 searching for changes
170 adding changesets
170 adding changesets
171 adding manifests
171 adding manifests
172 adding file changes
172 adding file changes
173 added 1 changesets with 1 changes to 1 files
173 added 1 changesets with 1 changes to 1 files
174 pushing ...sub/t
174 pushing ...sub/t
175 pushing ...subrepo ss
175 pushing ...subrepo ss
176 searching for changes
176 searching for changes
177 no changes found
177 no changes found
178 pushing ...subrepo s
178 pushing ...subrepo s
179 searching for changes
179 searching for changes
180 adding changesets
180 adding changesets
181 adding manifests
181 adding manifests
182 adding file changes
182 adding file changes
183 added 1 changesets with 1 changes to 1 files (+1 heads)
183 added 1 changesets with 1 changes to 1 files (+1 heads)
184 pushing ...subrepo t
184 pushing ...subrepo t
185 searching for changes
185 searching for changes
186 no changes found
186 no changes found
187 searching for changes
187 searching for changes
188 no changes found
188 no changes found
189 % update
189 % update
190 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
190 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
191 committing subrepository t
191 committing subrepository t
192 % pull
192 % pull
193 pulling ...sub/t
193 pulling ...sub/t
194 searching for changes
194 searching for changes
195 adding changesets
195 adding changesets
196 adding manifests
196 adding manifests
197 adding file changes
197 adding file changes
198 added 1 changesets with 1 changes to 1 files
198 added 1 changesets with 1 changes to 1 files
199 (run 'hg update' to get a working copy)
199 (run 'hg update' to get a working copy)
200 pulling subrepo t from .../sub/t/t
200 pulling subrepo t from .../sub/t/t
201 searching for changes
201 searching for changes
202 adding changesets
202 adding changesets
203 adding manifests
203 adding manifests
204 adding file changes
204 adding file changes
205 added 1 changesets with 1 changes to 1 files
205 added 1 changesets with 1 changes to 1 files
206 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
206 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
207 blah
207 blah
208 % bogus subrepo path aborts
208 % bogus subrepo path aborts
209 abort: missing ] in subrepo source
209 abort: missing ] in subrepo source
210 % issue 1986
210 % issue 1986
211 adding a
211 adding a
212 marked working directory as branch br
212 marked working directory as branch br
213 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
213 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
214 adding b
214 adding b
215 created new head
215 created new head
216 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
216 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
217 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
217 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
218 (branch merge, don't forget to commit)
218 (branch merge, don't forget to commit)
219 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
219 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
220 adding c
220 adding c
221 created new head
221 created new head
222 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
222 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
223 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
223 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
224 (branch merge, don't forget to commit)
224 (branch merge, don't forget to commit)
225 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
225 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
226 adding .hgsub
226 adding .hgsub
227 committing subrepository s
227 committing subrepository s
228 marked working directory as branch br
228 marked working directory as branch br
229 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
229 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
230 adding b
230 adding b
231 committing subrepository s
231 committing subrepository s
232 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
232 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
233 adding c
233 adding c
234 created new head
234 created new head
235 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
235 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
236 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
236 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
237 (branch merge, don't forget to commit)
237 (branch merge, don't forget to commit)
238 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
238 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
239 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
239 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
240 adding d
240 adding d
241 committing subrepository s
241 committing subrepository s
242 created new head
242 created new head
243 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
243 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
244 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
244 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
245 adding e
245 adding e
246 committing subrepository s
246 committing subrepository s
247 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
247 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
248 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
248 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
249 (branch merge, don't forget to commit)
249 (branch merge, don't forget to commit)
250 % test subrepo delete from .hgsubstate
250 % test subrepo delete from .hgsubstate
251 adding testdelete/nested/foo
251 adding testdelete/nested/foo
252 adding testdelete/nested2/foo
252 adding testdelete/nested2/foo
253 adding testdelete/.hgsub
253 adding testdelete/.hgsub
254 committing subrepository nested2
254 committing subrepository nested2
255 committing subrepository nested
255 committing subrepository nested
256 nested
256 nested
257 % test repository cloning
257 % test repository cloning
258 adding nested_absolute/foo
258 adding nested_absolute/foo
259 adding nested_relative/foo2
259 adding nested_relative/foo2
260 adding main/.hgsub
260 adding main/.hgsub
261 committing subrepository nested_relative
261 committing subrepository nested_relative
262 committing subrepository nested_absolute
262 committing subrepository nested_absolute
263 updating to branch default
263 updating to branch default
264 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
264 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
265 [paths]
265 [paths]
266 default = $HGTMP/test-subrepo/sub/mercurial/main/nested_absolute
266 default = $HGTMP/test-subrepo/sub/mercurial/nested_absolute
267 [paths]
267 [paths]
268 default = $HGTMP/test-subrepo/sub/mercurial/main/nested_relative
268 default = $HGTMP/test-subrepo/sub/mercurial/main/../nested_relative
General Comments 0
You need to be logged in to leave comments. Login now