##// END OF EJS Templates
Show number (-v) and list (--debug) of changesets with bundle/pull/push etc.
Thomas Arendsen Hein -
r3513:9383af6f default
parent child Browse files
Show More
@@ -1,1832 +1,1841 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ('lookup', 'changegroupsubset')
18 capabilities = ('lookup', 'changegroupsubset')
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("There is no Mercurial repository"
30 raise repo.RepoError(_("There is no Mercurial repository"
31 " here (.hg not found)"))
31 " here (.hg not found)"))
32 path = p
32 path = p
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34
34
35 if not os.path.isdir(self.path):
35 if not os.path.isdir(self.path):
36 if create:
36 if create:
37 if not os.path.exists(path):
37 if not os.path.exists(path):
38 os.mkdir(path)
38 os.mkdir(path)
39 os.mkdir(self.path)
39 os.mkdir(self.path)
40 os.mkdir(self.join("data"))
40 os.mkdir(self.join("data"))
41 else:
41 else:
42 raise repo.RepoError(_("repository %s not found") % path)
42 raise repo.RepoError(_("repository %s not found") % path)
43 elif create:
43 elif create:
44 raise repo.RepoError(_("repository %s already exists") % path)
44 raise repo.RepoError(_("repository %s already exists") % path)
45
45
46 self.root = os.path.abspath(path)
46 self.root = os.path.abspath(path)
47 self.origroot = path
47 self.origroot = path
48 self.ui = ui.ui(parentui=parentui)
48 self.ui = ui.ui(parentui=parentui)
49 self.opener = util.opener(self.path)
49 self.opener = util.opener(self.path)
50 self.sopener = util.opener(self.path)
50 self.sopener = util.opener(self.path)
51 self.wopener = util.opener(self.root)
51 self.wopener = util.opener(self.root)
52
52
53 try:
53 try:
54 self.ui.readconfig(self.join("hgrc"), self.root)
54 self.ui.readconfig(self.join("hgrc"), self.root)
55 except IOError:
55 except IOError:
56 pass
56 pass
57
57
58 v = self.ui.configrevlog()
58 v = self.ui.configrevlog()
59 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
59 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
60 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
60 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
61 fl = v.get('flags', None)
61 fl = v.get('flags', None)
62 flags = 0
62 flags = 0
63 if fl != None:
63 if fl != None:
64 for x in fl.split():
64 for x in fl.split():
65 flags |= revlog.flagstr(x)
65 flags |= revlog.flagstr(x)
66 elif self.revlogv1:
66 elif self.revlogv1:
67 flags = revlog.REVLOG_DEFAULT_FLAGS
67 flags = revlog.REVLOG_DEFAULT_FLAGS
68
68
69 v = self.revlogversion | flags
69 v = self.revlogversion | flags
70 self.manifest = manifest.manifest(self.sopener, v)
70 self.manifest = manifest.manifest(self.sopener, v)
71 self.changelog = changelog.changelog(self.sopener, v)
71 self.changelog = changelog.changelog(self.sopener, v)
72
72
73 # the changelog might not have the inline index flag
73 # the changelog might not have the inline index flag
74 # on. If the format of the changelog is the same as found in
74 # on. If the format of the changelog is the same as found in
75 # .hgrc, apply any flags found in the .hgrc as well.
75 # .hgrc, apply any flags found in the .hgrc as well.
76 # Otherwise, just version from the changelog
76 # Otherwise, just version from the changelog
77 v = self.changelog.version
77 v = self.changelog.version
78 if v == self.revlogversion:
78 if v == self.revlogversion:
79 v |= flags
79 v |= flags
80 self.revlogversion = v
80 self.revlogversion = v
81
81
82 self.tagscache = None
82 self.tagscache = None
83 self.branchcache = None
83 self.branchcache = None
84 self.nodetagscache = None
84 self.nodetagscache = None
85 self.encodepats = None
85 self.encodepats = None
86 self.decodepats = None
86 self.decodepats = None
87 self.transhandle = None
87 self.transhandle = None
88
88
89 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
89 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
90
90
91 def url(self):
91 def url(self):
92 return 'file:' + self.root
92 return 'file:' + self.root
93
93
94 def hook(self, name, throw=False, **args):
94 def hook(self, name, throw=False, **args):
95 def callhook(hname, funcname):
95 def callhook(hname, funcname):
96 '''call python hook. hook is callable object, looked up as
96 '''call python hook. hook is callable object, looked up as
97 name in python module. if callable returns "true", hook
97 name in python module. if callable returns "true", hook
98 fails, else passes. if hook raises exception, treated as
98 fails, else passes. if hook raises exception, treated as
99 hook failure. exception propagates if throw is "true".
99 hook failure. exception propagates if throw is "true".
100
100
101 reason for "true" meaning "hook failed" is so that
101 reason for "true" meaning "hook failed" is so that
102 unmodified commands (e.g. mercurial.commands.update) can
102 unmodified commands (e.g. mercurial.commands.update) can
103 be run as hooks without wrappers to convert return values.'''
103 be run as hooks without wrappers to convert return values.'''
104
104
105 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
105 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
106 d = funcname.rfind('.')
106 d = funcname.rfind('.')
107 if d == -1:
107 if d == -1:
108 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
108 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
109 % (hname, funcname))
109 % (hname, funcname))
110 modname = funcname[:d]
110 modname = funcname[:d]
111 try:
111 try:
112 obj = __import__(modname)
112 obj = __import__(modname)
113 except ImportError:
113 except ImportError:
114 try:
114 try:
115 # extensions are loaded with hgext_ prefix
115 # extensions are loaded with hgext_ prefix
116 obj = __import__("hgext_%s" % modname)
116 obj = __import__("hgext_%s" % modname)
117 except ImportError:
117 except ImportError:
118 raise util.Abort(_('%s hook is invalid '
118 raise util.Abort(_('%s hook is invalid '
119 '(import of "%s" failed)') %
119 '(import of "%s" failed)') %
120 (hname, modname))
120 (hname, modname))
121 try:
121 try:
122 for p in funcname.split('.')[1:]:
122 for p in funcname.split('.')[1:]:
123 obj = getattr(obj, p)
123 obj = getattr(obj, p)
124 except AttributeError, err:
124 except AttributeError, err:
125 raise util.Abort(_('%s hook is invalid '
125 raise util.Abort(_('%s hook is invalid '
126 '("%s" is not defined)') %
126 '("%s" is not defined)') %
127 (hname, funcname))
127 (hname, funcname))
128 if not callable(obj):
128 if not callable(obj):
129 raise util.Abort(_('%s hook is invalid '
129 raise util.Abort(_('%s hook is invalid '
130 '("%s" is not callable)') %
130 '("%s" is not callable)') %
131 (hname, funcname))
131 (hname, funcname))
132 try:
132 try:
133 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
133 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
134 except (KeyboardInterrupt, util.SignalInterrupt):
134 except (KeyboardInterrupt, util.SignalInterrupt):
135 raise
135 raise
136 except Exception, exc:
136 except Exception, exc:
137 if isinstance(exc, util.Abort):
137 if isinstance(exc, util.Abort):
138 self.ui.warn(_('error: %s hook failed: %s\n') %
138 self.ui.warn(_('error: %s hook failed: %s\n') %
139 (hname, exc.args[0]))
139 (hname, exc.args[0]))
140 else:
140 else:
141 self.ui.warn(_('error: %s hook raised an exception: '
141 self.ui.warn(_('error: %s hook raised an exception: '
142 '%s\n') % (hname, exc))
142 '%s\n') % (hname, exc))
143 if throw:
143 if throw:
144 raise
144 raise
145 self.ui.print_exc()
145 self.ui.print_exc()
146 return True
146 return True
147 if r:
147 if r:
148 if throw:
148 if throw:
149 raise util.Abort(_('%s hook failed') % hname)
149 raise util.Abort(_('%s hook failed') % hname)
150 self.ui.warn(_('warning: %s hook failed\n') % hname)
150 self.ui.warn(_('warning: %s hook failed\n') % hname)
151 return r
151 return r
152
152
153 def runhook(name, cmd):
153 def runhook(name, cmd):
154 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
154 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
155 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
155 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
156 r = util.system(cmd, environ=env, cwd=self.root)
156 r = util.system(cmd, environ=env, cwd=self.root)
157 if r:
157 if r:
158 desc, r = util.explain_exit(r)
158 desc, r = util.explain_exit(r)
159 if throw:
159 if throw:
160 raise util.Abort(_('%s hook %s') % (name, desc))
160 raise util.Abort(_('%s hook %s') % (name, desc))
161 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
161 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
162 return r
162 return r
163
163
164 r = False
164 r = False
165 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
165 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
166 if hname.split(".", 1)[0] == name and cmd]
166 if hname.split(".", 1)[0] == name and cmd]
167 hooks.sort()
167 hooks.sort()
168 for hname, cmd in hooks:
168 for hname, cmd in hooks:
169 if cmd.startswith('python:'):
169 if cmd.startswith('python:'):
170 r = callhook(hname, cmd[7:].strip()) or r
170 r = callhook(hname, cmd[7:].strip()) or r
171 else:
171 else:
172 r = runhook(hname, cmd) or r
172 r = runhook(hname, cmd) or r
173 return r
173 return r
174
174
175 tag_disallowed = ':\r\n'
175 tag_disallowed = ':\r\n'
176
176
177 def tag(self, name, node, message, local, user, date):
177 def tag(self, name, node, message, local, user, date):
178 '''tag a revision with a symbolic name.
178 '''tag a revision with a symbolic name.
179
179
180 if local is True, the tag is stored in a per-repository file.
180 if local is True, the tag is stored in a per-repository file.
181 otherwise, it is stored in the .hgtags file, and a new
181 otherwise, it is stored in the .hgtags file, and a new
182 changeset is committed with the change.
182 changeset is committed with the change.
183
183
184 keyword arguments:
184 keyword arguments:
185
185
186 local: whether to store tag in non-version-controlled file
186 local: whether to store tag in non-version-controlled file
187 (default False)
187 (default False)
188
188
189 message: commit message to use if committing
189 message: commit message to use if committing
190
190
191 user: name of user to use if committing
191 user: name of user to use if committing
192
192
193 date: date tuple to use if committing'''
193 date: date tuple to use if committing'''
194
194
195 for c in self.tag_disallowed:
195 for c in self.tag_disallowed:
196 if c in name:
196 if c in name:
197 raise util.Abort(_('%r cannot be used in a tag name') % c)
197 raise util.Abort(_('%r cannot be used in a tag name') % c)
198
198
199 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
199 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
200
200
201 if local:
201 if local:
202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
203 self.hook('tag', node=hex(node), tag=name, local=local)
203 self.hook('tag', node=hex(node), tag=name, local=local)
204 return
204 return
205
205
206 for x in self.status()[:5]:
206 for x in self.status()[:5]:
207 if '.hgtags' in x:
207 if '.hgtags' in x:
208 raise util.Abort(_('working copy of .hgtags is changed '
208 raise util.Abort(_('working copy of .hgtags is changed '
209 '(please commit .hgtags manually)'))
209 '(please commit .hgtags manually)'))
210
210
211 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
211 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
212 if self.dirstate.state('.hgtags') == '?':
212 if self.dirstate.state('.hgtags') == '?':
213 self.add(['.hgtags'])
213 self.add(['.hgtags'])
214
214
215 self.commit(['.hgtags'], message, user, date)
215 self.commit(['.hgtags'], message, user, date)
216 self.hook('tag', node=hex(node), tag=name, local=local)
216 self.hook('tag', node=hex(node), tag=name, local=local)
217
217
218 def tags(self):
218 def tags(self):
219 '''return a mapping of tag to node'''
219 '''return a mapping of tag to node'''
220 if not self.tagscache:
220 if not self.tagscache:
221 self.tagscache = {}
221 self.tagscache = {}
222
222
223 def parsetag(line, context):
223 def parsetag(line, context):
224 if not line:
224 if not line:
225 return
225 return
226 s = l.split(" ", 1)
226 s = l.split(" ", 1)
227 if len(s) != 2:
227 if len(s) != 2:
228 self.ui.warn(_("%s: cannot parse entry\n") % context)
228 self.ui.warn(_("%s: cannot parse entry\n") % context)
229 return
229 return
230 node, key = s
230 node, key = s
231 key = key.strip()
231 key = key.strip()
232 try:
232 try:
233 bin_n = bin(node)
233 bin_n = bin(node)
234 except TypeError:
234 except TypeError:
235 self.ui.warn(_("%s: node '%s' is not well formed\n") %
235 self.ui.warn(_("%s: node '%s' is not well formed\n") %
236 (context, node))
236 (context, node))
237 return
237 return
238 if bin_n not in self.changelog.nodemap:
238 if bin_n not in self.changelog.nodemap:
239 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
239 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
240 (context, key))
240 (context, key))
241 return
241 return
242 self.tagscache[key] = bin_n
242 self.tagscache[key] = bin_n
243
243
244 # read the tags file from each head, ending with the tip,
244 # read the tags file from each head, ending with the tip,
245 # and add each tag found to the map, with "newer" ones
245 # and add each tag found to the map, with "newer" ones
246 # taking precedence
246 # taking precedence
247 heads = self.heads()
247 heads = self.heads()
248 heads.reverse()
248 heads.reverse()
249 seen = {}
249 seen = {}
250 for node in heads:
250 for node in heads:
251 f = self.filectx('.hgtags', node)
251 f = self.filectx('.hgtags', node)
252 if not f or f.filerev() in seen: continue
252 if not f or f.filerev() in seen: continue
253 seen[f.filerev()] = 1
253 seen[f.filerev()] = 1
254 count = 0
254 count = 0
255 for l in f.data().splitlines():
255 for l in f.data().splitlines():
256 count += 1
256 count += 1
257 parsetag(l, _("%s, line %d") % (str(f), count))
257 parsetag(l, _("%s, line %d") % (str(f), count))
258
258
259 try:
259 try:
260 f = self.opener("localtags")
260 f = self.opener("localtags")
261 count = 0
261 count = 0
262 for l in f:
262 for l in f:
263 count += 1
263 count += 1
264 parsetag(l, _("localtags, line %d") % count)
264 parsetag(l, _("localtags, line %d") % count)
265 except IOError:
265 except IOError:
266 pass
266 pass
267
267
268 self.tagscache['tip'] = self.changelog.tip()
268 self.tagscache['tip'] = self.changelog.tip()
269
269
270 return self.tagscache
270 return self.tagscache
271
271
272 def tagslist(self):
272 def tagslist(self):
273 '''return a list of tags ordered by revision'''
273 '''return a list of tags ordered by revision'''
274 l = []
274 l = []
275 for t, n in self.tags().items():
275 for t, n in self.tags().items():
276 try:
276 try:
277 r = self.changelog.rev(n)
277 r = self.changelog.rev(n)
278 except:
278 except:
279 r = -2 # sort to the beginning of the list if unknown
279 r = -2 # sort to the beginning of the list if unknown
280 l.append((r, t, n))
280 l.append((r, t, n))
281 l.sort()
281 l.sort()
282 return [(t, n) for r, t, n in l]
282 return [(t, n) for r, t, n in l]
283
283
284 def nodetags(self, node):
284 def nodetags(self, node):
285 '''return the tags associated with a node'''
285 '''return the tags associated with a node'''
286 if not self.nodetagscache:
286 if not self.nodetagscache:
287 self.nodetagscache = {}
287 self.nodetagscache = {}
288 for t, n in self.tags().items():
288 for t, n in self.tags().items():
289 self.nodetagscache.setdefault(n, []).append(t)
289 self.nodetagscache.setdefault(n, []).append(t)
290 return self.nodetagscache.get(node, [])
290 return self.nodetagscache.get(node, [])
291
291
292 def branchtags(self):
292 def branchtags(self):
293 if self.branchcache != None:
293 if self.branchcache != None:
294 return self.branchcache
294 return self.branchcache
295
295
296 self.branchcache = {} # avoid recursion in changectx
296 self.branchcache = {} # avoid recursion in changectx
297
297
298 partial, last, lrev = self._readbranchcache()
298 partial, last, lrev = self._readbranchcache()
299
299
300 tiprev = self.changelog.count() - 1
300 tiprev = self.changelog.count() - 1
301 if lrev != tiprev:
301 if lrev != tiprev:
302 self._updatebranchcache(partial, lrev+1, tiprev+1)
302 self._updatebranchcache(partial, lrev+1, tiprev+1)
303 self._writebranchcache(partial, self.changelog.tip(), tiprev)
303 self._writebranchcache(partial, self.changelog.tip(), tiprev)
304
304
305 self.branchcache = partial
305 self.branchcache = partial
306 return self.branchcache
306 return self.branchcache
307
307
308 def _readbranchcache(self):
308 def _readbranchcache(self):
309 partial = {}
309 partial = {}
310 try:
310 try:
311 f = self.opener("branches.cache")
311 f = self.opener("branches.cache")
312 last, lrev = f.readline().rstrip().split(" ", 1)
312 last, lrev = f.readline().rstrip().split(" ", 1)
313 last, lrev = bin(last), int(lrev)
313 last, lrev = bin(last), int(lrev)
314 if (lrev < self.changelog.count() and
314 if (lrev < self.changelog.count() and
315 self.changelog.node(lrev) == last): # sanity check
315 self.changelog.node(lrev) == last): # sanity check
316 for l in f:
316 for l in f:
317 node, label = l.rstrip().split(" ", 1)
317 node, label = l.rstrip().split(" ", 1)
318 partial[label] = bin(node)
318 partial[label] = bin(node)
319 else: # invalidate the cache
319 else: # invalidate the cache
320 last, lrev = nullid, -1
320 last, lrev = nullid, -1
321 f.close()
321 f.close()
322 except IOError:
322 except IOError:
323 last, lrev = nullid, -1
323 last, lrev = nullid, -1
324 return partial, last, lrev
324 return partial, last, lrev
325
325
326 def _writebranchcache(self, branches, tip, tiprev):
326 def _writebranchcache(self, branches, tip, tiprev):
327 try:
327 try:
328 f = self.opener("branches.cache", "w")
328 f = self.opener("branches.cache", "w")
329 f.write("%s %s\n" % (hex(tip), tiprev))
329 f.write("%s %s\n" % (hex(tip), tiprev))
330 for label, node in branches.iteritems():
330 for label, node in branches.iteritems():
331 f.write("%s %s\n" % (hex(node), label))
331 f.write("%s %s\n" % (hex(node), label))
332 except IOError:
332 except IOError:
333 pass
333 pass
334
334
335 def _updatebranchcache(self, partial, start, end):
335 def _updatebranchcache(self, partial, start, end):
336 for r in xrange(start, end):
336 for r in xrange(start, end):
337 c = self.changectx(r)
337 c = self.changectx(r)
338 b = c.branch()
338 b = c.branch()
339 if b:
339 if b:
340 partial[b] = c.node()
340 partial[b] = c.node()
341
341
342 def lookup(self, key):
342 def lookup(self, key):
343 if key == '.':
343 if key == '.':
344 key = self.dirstate.parents()[0]
344 key = self.dirstate.parents()[0]
345 if key == nullid:
345 if key == nullid:
346 raise repo.RepoError(_("no revision checked out"))
346 raise repo.RepoError(_("no revision checked out"))
347 n = self.changelog._match(key)
347 n = self.changelog._match(key)
348 if n:
348 if n:
349 return n
349 return n
350 if key in self.tags():
350 if key in self.tags():
351 return self.tags()[key]
351 return self.tags()[key]
352 if key in self.branchtags():
352 if key in self.branchtags():
353 return self.branchtags()[key]
353 return self.branchtags()[key]
354 n = self.changelog._partialmatch(key)
354 n = self.changelog._partialmatch(key)
355 if n:
355 if n:
356 return n
356 return n
357 raise repo.RepoError(_("unknown revision '%s'") % key)
357 raise repo.RepoError(_("unknown revision '%s'") % key)
358
358
359 def dev(self):
359 def dev(self):
360 return os.lstat(self.path).st_dev
360 return os.lstat(self.path).st_dev
361
361
362 def local(self):
362 def local(self):
363 return True
363 return True
364
364
365 def join(self, f):
365 def join(self, f):
366 return os.path.join(self.path, f)
366 return os.path.join(self.path, f)
367
367
368 def sjoin(self, f):
368 def sjoin(self, f):
369 return os.path.join(self.path, f)
369 return os.path.join(self.path, f)
370
370
371 def wjoin(self, f):
371 def wjoin(self, f):
372 return os.path.join(self.root, f)
372 return os.path.join(self.root, f)
373
373
374 def file(self, f):
374 def file(self, f):
375 if f[0] == '/':
375 if f[0] == '/':
376 f = f[1:]
376 f = f[1:]
377 return filelog.filelog(self.sopener, f, self.revlogversion)
377 return filelog.filelog(self.sopener, f, self.revlogversion)
378
378
379 def changectx(self, changeid=None):
379 def changectx(self, changeid=None):
380 return context.changectx(self, changeid)
380 return context.changectx(self, changeid)
381
381
382 def workingctx(self):
382 def workingctx(self):
383 return context.workingctx(self)
383 return context.workingctx(self)
384
384
385 def parents(self, changeid=None):
385 def parents(self, changeid=None):
386 '''
386 '''
387 get list of changectxs for parents of changeid or working directory
387 get list of changectxs for parents of changeid or working directory
388 '''
388 '''
389 if changeid is None:
389 if changeid is None:
390 pl = self.dirstate.parents()
390 pl = self.dirstate.parents()
391 else:
391 else:
392 n = self.changelog.lookup(changeid)
392 n = self.changelog.lookup(changeid)
393 pl = self.changelog.parents(n)
393 pl = self.changelog.parents(n)
394 if pl[1] == nullid:
394 if pl[1] == nullid:
395 return [self.changectx(pl[0])]
395 return [self.changectx(pl[0])]
396 return [self.changectx(pl[0]), self.changectx(pl[1])]
396 return [self.changectx(pl[0]), self.changectx(pl[1])]
397
397
398 def filectx(self, path, changeid=None, fileid=None):
398 def filectx(self, path, changeid=None, fileid=None):
399 """changeid can be a changeset revision, node, or tag.
399 """changeid can be a changeset revision, node, or tag.
400 fileid can be a file revision or node."""
400 fileid can be a file revision or node."""
401 return context.filectx(self, path, changeid, fileid)
401 return context.filectx(self, path, changeid, fileid)
402
402
403 def getcwd(self):
403 def getcwd(self):
404 return self.dirstate.getcwd()
404 return self.dirstate.getcwd()
405
405
406 def wfile(self, f, mode='r'):
406 def wfile(self, f, mode='r'):
407 return self.wopener(f, mode)
407 return self.wopener(f, mode)
408
408
409 def wread(self, filename):
409 def wread(self, filename):
410 if self.encodepats == None:
410 if self.encodepats == None:
411 l = []
411 l = []
412 for pat, cmd in self.ui.configitems("encode"):
412 for pat, cmd in self.ui.configitems("encode"):
413 mf = util.matcher(self.root, "", [pat], [], [])[1]
413 mf = util.matcher(self.root, "", [pat], [], [])[1]
414 l.append((mf, cmd))
414 l.append((mf, cmd))
415 self.encodepats = l
415 self.encodepats = l
416
416
417 data = self.wopener(filename, 'r').read()
417 data = self.wopener(filename, 'r').read()
418
418
419 for mf, cmd in self.encodepats:
419 for mf, cmd in self.encodepats:
420 if mf(filename):
420 if mf(filename):
421 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
421 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
422 data = util.filter(data, cmd)
422 data = util.filter(data, cmd)
423 break
423 break
424
424
425 return data
425 return data
426
426
427 def wwrite(self, filename, data, fd=None):
427 def wwrite(self, filename, data, fd=None):
428 if self.decodepats == None:
428 if self.decodepats == None:
429 l = []
429 l = []
430 for pat, cmd in self.ui.configitems("decode"):
430 for pat, cmd in self.ui.configitems("decode"):
431 mf = util.matcher(self.root, "", [pat], [], [])[1]
431 mf = util.matcher(self.root, "", [pat], [], [])[1]
432 l.append((mf, cmd))
432 l.append((mf, cmd))
433 self.decodepats = l
433 self.decodepats = l
434
434
435 for mf, cmd in self.decodepats:
435 for mf, cmd in self.decodepats:
436 if mf(filename):
436 if mf(filename):
437 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
437 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
438 data = util.filter(data, cmd)
438 data = util.filter(data, cmd)
439 break
439 break
440
440
441 if fd:
441 if fd:
442 return fd.write(data)
442 return fd.write(data)
443 return self.wopener(filename, 'w').write(data)
443 return self.wopener(filename, 'w').write(data)
444
444
445 def transaction(self):
445 def transaction(self):
446 tr = self.transhandle
446 tr = self.transhandle
447 if tr != None and tr.running():
447 if tr != None and tr.running():
448 return tr.nest()
448 return tr.nest()
449
449
450 # save dirstate for rollback
450 # save dirstate for rollback
451 try:
451 try:
452 ds = self.opener("dirstate").read()
452 ds = self.opener("dirstate").read()
453 except IOError:
453 except IOError:
454 ds = ""
454 ds = ""
455 self.opener("journal.dirstate", "w").write(ds)
455 self.opener("journal.dirstate", "w").write(ds)
456
456
457 tr = transaction.transaction(self.ui.warn, self.sopener,
457 tr = transaction.transaction(self.ui.warn, self.sopener,
458 self.sjoin("journal"),
458 self.sjoin("journal"),
459 aftertrans(self.path))
459 aftertrans(self.path))
460 self.transhandle = tr
460 self.transhandle = tr
461 return tr
461 return tr
462
462
463 def recover(self):
463 def recover(self):
464 l = self.lock()
464 l = self.lock()
465 if os.path.exists(self.sjoin("journal")):
465 if os.path.exists(self.sjoin("journal")):
466 self.ui.status(_("rolling back interrupted transaction\n"))
466 self.ui.status(_("rolling back interrupted transaction\n"))
467 transaction.rollback(self.sopener, self.sjoin("journal"))
467 transaction.rollback(self.sopener, self.sjoin("journal"))
468 self.reload()
468 self.reload()
469 return True
469 return True
470 else:
470 else:
471 self.ui.warn(_("no interrupted transaction available\n"))
471 self.ui.warn(_("no interrupted transaction available\n"))
472 return False
472 return False
473
473
474 def rollback(self, wlock=None):
474 def rollback(self, wlock=None):
475 if not wlock:
475 if not wlock:
476 wlock = self.wlock()
476 wlock = self.wlock()
477 l = self.lock()
477 l = self.lock()
478 if os.path.exists(self.sjoin("undo")):
478 if os.path.exists(self.sjoin("undo")):
479 self.ui.status(_("rolling back last transaction\n"))
479 self.ui.status(_("rolling back last transaction\n"))
480 transaction.rollback(self.sopener, self.sjoin("undo"))
480 transaction.rollback(self.sopener, self.sjoin("undo"))
481 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
481 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
482 self.reload()
482 self.reload()
483 self.wreload()
483 self.wreload()
484 else:
484 else:
485 self.ui.warn(_("no rollback information available\n"))
485 self.ui.warn(_("no rollback information available\n"))
486
486
487 def wreload(self):
487 def wreload(self):
488 self.dirstate.read()
488 self.dirstate.read()
489
489
490 def reload(self):
490 def reload(self):
491 self.changelog.load()
491 self.changelog.load()
492 self.manifest.load()
492 self.manifest.load()
493 self.tagscache = None
493 self.tagscache = None
494 self.nodetagscache = None
494 self.nodetagscache = None
495
495
496 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
496 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
497 desc=None):
497 desc=None):
498 try:
498 try:
499 l = lock.lock(lockname, 0, releasefn, desc=desc)
499 l = lock.lock(lockname, 0, releasefn, desc=desc)
500 except lock.LockHeld, inst:
500 except lock.LockHeld, inst:
501 if not wait:
501 if not wait:
502 raise
502 raise
503 self.ui.warn(_("waiting for lock on %s held by %s\n") %
503 self.ui.warn(_("waiting for lock on %s held by %s\n") %
504 (desc, inst.args[0]))
504 (desc, inst.args[0]))
505 # default to 600 seconds timeout
505 # default to 600 seconds timeout
506 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
506 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
507 releasefn, desc=desc)
507 releasefn, desc=desc)
508 if acquirefn:
508 if acquirefn:
509 acquirefn()
509 acquirefn()
510 return l
510 return l
511
511
512 def lock(self, wait=1):
512 def lock(self, wait=1):
513 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
513 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
514 desc=_('repository %s') % self.origroot)
514 desc=_('repository %s') % self.origroot)
515
515
516 def wlock(self, wait=1):
516 def wlock(self, wait=1):
517 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
517 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
518 self.wreload,
518 self.wreload,
519 desc=_('working directory of %s') % self.origroot)
519 desc=_('working directory of %s') % self.origroot)
520
520
521 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
521 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
522 """
522 """
523 commit an individual file as part of a larger transaction
523 commit an individual file as part of a larger transaction
524 """
524 """
525
525
526 t = self.wread(fn)
526 t = self.wread(fn)
527 fl = self.file(fn)
527 fl = self.file(fn)
528 fp1 = manifest1.get(fn, nullid)
528 fp1 = manifest1.get(fn, nullid)
529 fp2 = manifest2.get(fn, nullid)
529 fp2 = manifest2.get(fn, nullid)
530
530
531 meta = {}
531 meta = {}
532 cp = self.dirstate.copied(fn)
532 cp = self.dirstate.copied(fn)
533 if cp:
533 if cp:
534 meta["copy"] = cp
534 meta["copy"] = cp
535 if not manifest2: # not a branch merge
535 if not manifest2: # not a branch merge
536 meta["copyrev"] = hex(manifest1.get(cp, nullid))
536 meta["copyrev"] = hex(manifest1.get(cp, nullid))
537 fp2 = nullid
537 fp2 = nullid
538 elif fp2 != nullid: # copied on remote side
538 elif fp2 != nullid: # copied on remote side
539 meta["copyrev"] = hex(manifest1.get(cp, nullid))
539 meta["copyrev"] = hex(manifest1.get(cp, nullid))
540 else: # copied on local side, reversed
540 else: # copied on local side, reversed
541 meta["copyrev"] = hex(manifest2.get(cp))
541 meta["copyrev"] = hex(manifest2.get(cp))
542 fp2 = nullid
542 fp2 = nullid
543 self.ui.debug(_(" %s: copy %s:%s\n") %
543 self.ui.debug(_(" %s: copy %s:%s\n") %
544 (fn, cp, meta["copyrev"]))
544 (fn, cp, meta["copyrev"]))
545 fp1 = nullid
545 fp1 = nullid
546 elif fp2 != nullid:
546 elif fp2 != nullid:
547 # is one parent an ancestor of the other?
547 # is one parent an ancestor of the other?
548 fpa = fl.ancestor(fp1, fp2)
548 fpa = fl.ancestor(fp1, fp2)
549 if fpa == fp1:
549 if fpa == fp1:
550 fp1, fp2 = fp2, nullid
550 fp1, fp2 = fp2, nullid
551 elif fpa == fp2:
551 elif fpa == fp2:
552 fp2 = nullid
552 fp2 = nullid
553
553
554 # is the file unmodified from the parent? report existing entry
554 # is the file unmodified from the parent? report existing entry
555 if fp2 == nullid and not fl.cmp(fp1, t):
555 if fp2 == nullid and not fl.cmp(fp1, t):
556 return fp1
556 return fp1
557
557
558 changelist.append(fn)
558 changelist.append(fn)
559 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
559 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
560
560
561 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
561 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
562 orig_parent = self.dirstate.parents()[0] or nullid
562 orig_parent = self.dirstate.parents()[0] or nullid
563 p1 = p1 or self.dirstate.parents()[0] or nullid
563 p1 = p1 or self.dirstate.parents()[0] or nullid
564 p2 = p2 or self.dirstate.parents()[1] or nullid
564 p2 = p2 or self.dirstate.parents()[1] or nullid
565 c1 = self.changelog.read(p1)
565 c1 = self.changelog.read(p1)
566 c2 = self.changelog.read(p2)
566 c2 = self.changelog.read(p2)
567 m1 = self.manifest.read(c1[0]).copy()
567 m1 = self.manifest.read(c1[0]).copy()
568 m2 = self.manifest.read(c2[0])
568 m2 = self.manifest.read(c2[0])
569 changed = []
569 changed = []
570 removed = []
570 removed = []
571
571
572 if orig_parent == p1:
572 if orig_parent == p1:
573 update_dirstate = 1
573 update_dirstate = 1
574 else:
574 else:
575 update_dirstate = 0
575 update_dirstate = 0
576
576
577 if not wlock:
577 if not wlock:
578 wlock = self.wlock()
578 wlock = self.wlock()
579 l = self.lock()
579 l = self.lock()
580 tr = self.transaction()
580 tr = self.transaction()
581 linkrev = self.changelog.count()
581 linkrev = self.changelog.count()
582 for f in files:
582 for f in files:
583 try:
583 try:
584 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
584 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
585 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
585 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
586 except IOError:
586 except IOError:
587 try:
587 try:
588 del m1[f]
588 del m1[f]
589 if update_dirstate:
589 if update_dirstate:
590 self.dirstate.forget([f])
590 self.dirstate.forget([f])
591 removed.append(f)
591 removed.append(f)
592 except:
592 except:
593 # deleted from p2?
593 # deleted from p2?
594 pass
594 pass
595
595
596 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
596 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
597 user = user or self.ui.username()
597 user = user or self.ui.username()
598 n = self.changelog.add(mnode, changed + removed, text,
598 n = self.changelog.add(mnode, changed + removed, text,
599 tr, p1, p2, user, date)
599 tr, p1, p2, user, date)
600 tr.close()
600 tr.close()
601 if update_dirstate:
601 if update_dirstate:
602 self.dirstate.setparents(n, nullid)
602 self.dirstate.setparents(n, nullid)
603
603
604 def commit(self, files=None, text="", user=None, date=None,
604 def commit(self, files=None, text="", user=None, date=None,
605 match=util.always, force=False, lock=None, wlock=None,
605 match=util.always, force=False, lock=None, wlock=None,
606 force_editor=False):
606 force_editor=False):
607 commit = []
607 commit = []
608 remove = []
608 remove = []
609 changed = []
609 changed = []
610
610
611 if files:
611 if files:
612 for f in files:
612 for f in files:
613 s = self.dirstate.state(f)
613 s = self.dirstate.state(f)
614 if s in 'nmai':
614 if s in 'nmai':
615 commit.append(f)
615 commit.append(f)
616 elif s == 'r':
616 elif s == 'r':
617 remove.append(f)
617 remove.append(f)
618 else:
618 else:
619 self.ui.warn(_("%s not tracked!\n") % f)
619 self.ui.warn(_("%s not tracked!\n") % f)
620 else:
620 else:
621 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
621 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
622 commit = modified + added
622 commit = modified + added
623 remove = removed
623 remove = removed
624
624
625 p1, p2 = self.dirstate.parents()
625 p1, p2 = self.dirstate.parents()
626 c1 = self.changelog.read(p1)
626 c1 = self.changelog.read(p1)
627 c2 = self.changelog.read(p2)
627 c2 = self.changelog.read(p2)
628 m1 = self.manifest.read(c1[0]).copy()
628 m1 = self.manifest.read(c1[0]).copy()
629 m2 = self.manifest.read(c2[0])
629 m2 = self.manifest.read(c2[0])
630
630
631 branchname = self.workingctx().branch()
631 branchname = self.workingctx().branch()
632 oldname = c1[5].get("branch", "")
632 oldname = c1[5].get("branch", "")
633
633
634 if not commit and not remove and not force and p2 == nullid and \
634 if not commit and not remove and not force and p2 == nullid and \
635 branchname == oldname:
635 branchname == oldname:
636 self.ui.status(_("nothing changed\n"))
636 self.ui.status(_("nothing changed\n"))
637 return None
637 return None
638
638
639 xp1 = hex(p1)
639 xp1 = hex(p1)
640 if p2 == nullid: xp2 = ''
640 if p2 == nullid: xp2 = ''
641 else: xp2 = hex(p2)
641 else: xp2 = hex(p2)
642
642
643 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
643 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
644
644
645 if not wlock:
645 if not wlock:
646 wlock = self.wlock()
646 wlock = self.wlock()
647 if not lock:
647 if not lock:
648 lock = self.lock()
648 lock = self.lock()
649 tr = self.transaction()
649 tr = self.transaction()
650
650
651 # check in files
651 # check in files
652 new = {}
652 new = {}
653 linkrev = self.changelog.count()
653 linkrev = self.changelog.count()
654 commit.sort()
654 commit.sort()
655 for f in commit:
655 for f in commit:
656 self.ui.note(f + "\n")
656 self.ui.note(f + "\n")
657 try:
657 try:
658 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
658 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
659 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
659 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
660 except IOError:
660 except IOError:
661 self.ui.warn(_("trouble committing %s!\n") % f)
661 self.ui.warn(_("trouble committing %s!\n") % f)
662 raise
662 raise
663
663
664 # update manifest
664 # update manifest
665 m1.update(new)
665 m1.update(new)
666 for f in remove:
666 for f in remove:
667 if f in m1:
667 if f in m1:
668 del m1[f]
668 del m1[f]
669 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
669 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
670
670
671 # add changeset
671 # add changeset
672 new = new.keys()
672 new = new.keys()
673 new.sort()
673 new.sort()
674
674
675 user = user or self.ui.username()
675 user = user or self.ui.username()
676 if not text or force_editor:
676 if not text or force_editor:
677 edittext = []
677 edittext = []
678 if text:
678 if text:
679 edittext.append(text)
679 edittext.append(text)
680 edittext.append("")
680 edittext.append("")
681 if p2 != nullid:
681 if p2 != nullid:
682 edittext.append("HG: branch merge")
682 edittext.append("HG: branch merge")
683 edittext.extend(["HG: changed %s" % f for f in changed])
683 edittext.extend(["HG: changed %s" % f for f in changed])
684 edittext.extend(["HG: removed %s" % f for f in remove])
684 edittext.extend(["HG: removed %s" % f for f in remove])
685 if not changed and not remove:
685 if not changed and not remove:
686 edittext.append("HG: no files changed")
686 edittext.append("HG: no files changed")
687 edittext.append("")
687 edittext.append("")
688 # run editor in the repository root
688 # run editor in the repository root
689 olddir = os.getcwd()
689 olddir = os.getcwd()
690 os.chdir(self.root)
690 os.chdir(self.root)
691 text = self.ui.edit("\n".join(edittext), user)
691 text = self.ui.edit("\n".join(edittext), user)
692 os.chdir(olddir)
692 os.chdir(olddir)
693
693
694 lines = [line.rstrip() for line in text.rstrip().splitlines()]
694 lines = [line.rstrip() for line in text.rstrip().splitlines()]
695 while lines and not lines[0]:
695 while lines and not lines[0]:
696 del lines[0]
696 del lines[0]
697 if not lines:
697 if not lines:
698 return None
698 return None
699 text = '\n'.join(lines)
699 text = '\n'.join(lines)
700 extra = {}
700 extra = {}
701 if branchname:
701 if branchname:
702 extra["branch"] = branchname
702 extra["branch"] = branchname
703 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
703 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
704 user, date, extra)
704 user, date, extra)
705 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
705 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
706 parent2=xp2)
706 parent2=xp2)
707 tr.close()
707 tr.close()
708
708
709 self.dirstate.setparents(n)
709 self.dirstate.setparents(n)
710 self.dirstate.update(new, "n")
710 self.dirstate.update(new, "n")
711 self.dirstate.forget(remove)
711 self.dirstate.forget(remove)
712
712
713 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
713 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
714 return n
714 return n
715
715
716 def walk(self, node=None, files=[], match=util.always, badmatch=None):
716 def walk(self, node=None, files=[], match=util.always, badmatch=None):
717 if node:
717 if node:
718 fdict = dict.fromkeys(files)
718 fdict = dict.fromkeys(files)
719 for fn in self.manifest.read(self.changelog.read(node)[0]):
719 for fn in self.manifest.read(self.changelog.read(node)[0]):
720 for ffn in fdict:
720 for ffn in fdict:
721 # match if the file is the exact name or a directory
721 # match if the file is the exact name or a directory
722 if ffn == fn or fn.startswith("%s/" % ffn):
722 if ffn == fn or fn.startswith("%s/" % ffn):
723 del fdict[ffn]
723 del fdict[ffn]
724 break
724 break
725 if match(fn):
725 if match(fn):
726 yield 'm', fn
726 yield 'm', fn
727 for fn in fdict:
727 for fn in fdict:
728 if badmatch and badmatch(fn):
728 if badmatch and badmatch(fn):
729 if match(fn):
729 if match(fn):
730 yield 'b', fn
730 yield 'b', fn
731 else:
731 else:
732 self.ui.warn(_('%s: No such file in rev %s\n') % (
732 self.ui.warn(_('%s: No such file in rev %s\n') % (
733 util.pathto(self.getcwd(), fn), short(node)))
733 util.pathto(self.getcwd(), fn), short(node)))
734 else:
734 else:
735 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
735 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
736 yield src, fn
736 yield src, fn
737
737
738 def status(self, node1=None, node2=None, files=[], match=util.always,
738 def status(self, node1=None, node2=None, files=[], match=util.always,
739 wlock=None, list_ignored=False, list_clean=False):
739 wlock=None, list_ignored=False, list_clean=False):
740 """return status of files between two nodes or node and working directory
740 """return status of files between two nodes or node and working directory
741
741
742 If node1 is None, use the first dirstate parent instead.
742 If node1 is None, use the first dirstate parent instead.
743 If node2 is None, compare node1 with working directory.
743 If node2 is None, compare node1 with working directory.
744 """
744 """
745
745
746 def fcmp(fn, mf):
746 def fcmp(fn, mf):
747 t1 = self.wread(fn)
747 t1 = self.wread(fn)
748 return self.file(fn).cmp(mf.get(fn, nullid), t1)
748 return self.file(fn).cmp(mf.get(fn, nullid), t1)
749
749
750 def mfmatches(node):
750 def mfmatches(node):
751 change = self.changelog.read(node)
751 change = self.changelog.read(node)
752 mf = self.manifest.read(change[0]).copy()
752 mf = self.manifest.read(change[0]).copy()
753 for fn in mf.keys():
753 for fn in mf.keys():
754 if not match(fn):
754 if not match(fn):
755 del mf[fn]
755 del mf[fn]
756 return mf
756 return mf
757
757
758 modified, added, removed, deleted, unknown = [], [], [], [], []
758 modified, added, removed, deleted, unknown = [], [], [], [], []
759 ignored, clean = [], []
759 ignored, clean = [], []
760
760
761 compareworking = False
761 compareworking = False
762 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
762 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
763 compareworking = True
763 compareworking = True
764
764
765 if not compareworking:
765 if not compareworking:
766 # read the manifest from node1 before the manifest from node2,
766 # read the manifest from node1 before the manifest from node2,
767 # so that we'll hit the manifest cache if we're going through
767 # so that we'll hit the manifest cache if we're going through
768 # all the revisions in parent->child order.
768 # all the revisions in parent->child order.
769 mf1 = mfmatches(node1)
769 mf1 = mfmatches(node1)
770
770
771 # are we comparing the working directory?
771 # are we comparing the working directory?
772 if not node2:
772 if not node2:
773 if not wlock:
773 if not wlock:
774 try:
774 try:
775 wlock = self.wlock(wait=0)
775 wlock = self.wlock(wait=0)
776 except lock.LockException:
776 except lock.LockException:
777 wlock = None
777 wlock = None
778 (lookup, modified, added, removed, deleted, unknown,
778 (lookup, modified, added, removed, deleted, unknown,
779 ignored, clean) = self.dirstate.status(files, match,
779 ignored, clean) = self.dirstate.status(files, match,
780 list_ignored, list_clean)
780 list_ignored, list_clean)
781
781
782 # are we comparing working dir against its parent?
782 # are we comparing working dir against its parent?
783 if compareworking:
783 if compareworking:
784 if lookup:
784 if lookup:
785 # do a full compare of any files that might have changed
785 # do a full compare of any files that might have changed
786 mf2 = mfmatches(self.dirstate.parents()[0])
786 mf2 = mfmatches(self.dirstate.parents()[0])
787 for f in lookup:
787 for f in lookup:
788 if fcmp(f, mf2):
788 if fcmp(f, mf2):
789 modified.append(f)
789 modified.append(f)
790 else:
790 else:
791 clean.append(f)
791 clean.append(f)
792 if wlock is not None:
792 if wlock is not None:
793 self.dirstate.update([f], "n")
793 self.dirstate.update([f], "n")
794 else:
794 else:
795 # we are comparing working dir against non-parent
795 # we are comparing working dir against non-parent
796 # generate a pseudo-manifest for the working dir
796 # generate a pseudo-manifest for the working dir
797 # XXX: create it in dirstate.py ?
797 # XXX: create it in dirstate.py ?
798 mf2 = mfmatches(self.dirstate.parents()[0])
798 mf2 = mfmatches(self.dirstate.parents()[0])
799 for f in lookup + modified + added:
799 for f in lookup + modified + added:
800 mf2[f] = ""
800 mf2[f] = ""
801 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
801 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
802 for f in removed:
802 for f in removed:
803 if f in mf2:
803 if f in mf2:
804 del mf2[f]
804 del mf2[f]
805 else:
805 else:
806 # we are comparing two revisions
806 # we are comparing two revisions
807 mf2 = mfmatches(node2)
807 mf2 = mfmatches(node2)
808
808
809 if not compareworking:
809 if not compareworking:
810 # flush lists from dirstate before comparing manifests
810 # flush lists from dirstate before comparing manifests
811 modified, added, clean = [], [], []
811 modified, added, clean = [], [], []
812
812
813 # make sure to sort the files so we talk to the disk in a
813 # make sure to sort the files so we talk to the disk in a
814 # reasonable order
814 # reasonable order
815 mf2keys = mf2.keys()
815 mf2keys = mf2.keys()
816 mf2keys.sort()
816 mf2keys.sort()
817 for fn in mf2keys:
817 for fn in mf2keys:
818 if mf1.has_key(fn):
818 if mf1.has_key(fn):
819 if mf1.flags(fn) != mf2.flags(fn) or \
819 if mf1.flags(fn) != mf2.flags(fn) or \
820 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
820 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
821 modified.append(fn)
821 modified.append(fn)
822 elif list_clean:
822 elif list_clean:
823 clean.append(fn)
823 clean.append(fn)
824 del mf1[fn]
824 del mf1[fn]
825 else:
825 else:
826 added.append(fn)
826 added.append(fn)
827
827
828 removed = mf1.keys()
828 removed = mf1.keys()
829
829
830 # sort and return results:
830 # sort and return results:
831 for l in modified, added, removed, deleted, unknown, ignored, clean:
831 for l in modified, added, removed, deleted, unknown, ignored, clean:
832 l.sort()
832 l.sort()
833 return (modified, added, removed, deleted, unknown, ignored, clean)
833 return (modified, added, removed, deleted, unknown, ignored, clean)
834
834
835 def add(self, list, wlock=None):
835 def add(self, list, wlock=None):
836 if not wlock:
836 if not wlock:
837 wlock = self.wlock()
837 wlock = self.wlock()
838 for f in list:
838 for f in list:
839 p = self.wjoin(f)
839 p = self.wjoin(f)
840 if not os.path.exists(p):
840 if not os.path.exists(p):
841 self.ui.warn(_("%s does not exist!\n") % f)
841 self.ui.warn(_("%s does not exist!\n") % f)
842 elif not os.path.isfile(p):
842 elif not os.path.isfile(p):
843 self.ui.warn(_("%s not added: only files supported currently\n")
843 self.ui.warn(_("%s not added: only files supported currently\n")
844 % f)
844 % f)
845 elif self.dirstate.state(f) in 'an':
845 elif self.dirstate.state(f) in 'an':
846 self.ui.warn(_("%s already tracked!\n") % f)
846 self.ui.warn(_("%s already tracked!\n") % f)
847 else:
847 else:
848 self.dirstate.update([f], "a")
848 self.dirstate.update([f], "a")
849
849
850 def forget(self, list, wlock=None):
850 def forget(self, list, wlock=None):
851 if not wlock:
851 if not wlock:
852 wlock = self.wlock()
852 wlock = self.wlock()
853 for f in list:
853 for f in list:
854 if self.dirstate.state(f) not in 'ai':
854 if self.dirstate.state(f) not in 'ai':
855 self.ui.warn(_("%s not added!\n") % f)
855 self.ui.warn(_("%s not added!\n") % f)
856 else:
856 else:
857 self.dirstate.forget([f])
857 self.dirstate.forget([f])
858
858
859 def remove(self, list, unlink=False, wlock=None):
859 def remove(self, list, unlink=False, wlock=None):
860 if unlink:
860 if unlink:
861 for f in list:
861 for f in list:
862 try:
862 try:
863 util.unlink(self.wjoin(f))
863 util.unlink(self.wjoin(f))
864 except OSError, inst:
864 except OSError, inst:
865 if inst.errno != errno.ENOENT:
865 if inst.errno != errno.ENOENT:
866 raise
866 raise
867 if not wlock:
867 if not wlock:
868 wlock = self.wlock()
868 wlock = self.wlock()
869 for f in list:
869 for f in list:
870 p = self.wjoin(f)
870 p = self.wjoin(f)
871 if os.path.exists(p):
871 if os.path.exists(p):
872 self.ui.warn(_("%s still exists!\n") % f)
872 self.ui.warn(_("%s still exists!\n") % f)
873 elif self.dirstate.state(f) == 'a':
873 elif self.dirstate.state(f) == 'a':
874 self.dirstate.forget([f])
874 self.dirstate.forget([f])
875 elif f not in self.dirstate:
875 elif f not in self.dirstate:
876 self.ui.warn(_("%s not tracked!\n") % f)
876 self.ui.warn(_("%s not tracked!\n") % f)
877 else:
877 else:
878 self.dirstate.update([f], "r")
878 self.dirstate.update([f], "r")
879
879
880 def undelete(self, list, wlock=None):
880 def undelete(self, list, wlock=None):
881 p = self.dirstate.parents()[0]
881 p = self.dirstate.parents()[0]
882 mn = self.changelog.read(p)[0]
882 mn = self.changelog.read(p)[0]
883 m = self.manifest.read(mn)
883 m = self.manifest.read(mn)
884 if not wlock:
884 if not wlock:
885 wlock = self.wlock()
885 wlock = self.wlock()
886 for f in list:
886 for f in list:
887 if self.dirstate.state(f) not in "r":
887 if self.dirstate.state(f) not in "r":
888 self.ui.warn("%s not removed!\n" % f)
888 self.ui.warn("%s not removed!\n" % f)
889 else:
889 else:
890 t = self.file(f).read(m[f])
890 t = self.file(f).read(m[f])
891 self.wwrite(f, t)
891 self.wwrite(f, t)
892 util.set_exec(self.wjoin(f), m.execf(f))
892 util.set_exec(self.wjoin(f), m.execf(f))
893 self.dirstate.update([f], "n")
893 self.dirstate.update([f], "n")
894
894
895 def copy(self, source, dest, wlock=None):
895 def copy(self, source, dest, wlock=None):
896 p = self.wjoin(dest)
896 p = self.wjoin(dest)
897 if not os.path.exists(p):
897 if not os.path.exists(p):
898 self.ui.warn(_("%s does not exist!\n") % dest)
898 self.ui.warn(_("%s does not exist!\n") % dest)
899 elif not os.path.isfile(p):
899 elif not os.path.isfile(p):
900 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
900 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
901 else:
901 else:
902 if not wlock:
902 if not wlock:
903 wlock = self.wlock()
903 wlock = self.wlock()
904 if self.dirstate.state(dest) == '?':
904 if self.dirstate.state(dest) == '?':
905 self.dirstate.update([dest], "a")
905 self.dirstate.update([dest], "a")
906 self.dirstate.copy(source, dest)
906 self.dirstate.copy(source, dest)
907
907
908 def heads(self, start=None):
908 def heads(self, start=None):
909 heads = self.changelog.heads(start)
909 heads = self.changelog.heads(start)
910 # sort the output in rev descending order
910 # sort the output in rev descending order
911 heads = [(-self.changelog.rev(h), h) for h in heads]
911 heads = [(-self.changelog.rev(h), h) for h in heads]
912 heads.sort()
912 heads.sort()
913 return [n for (r, n) in heads]
913 return [n for (r, n) in heads]
914
914
915 # branchlookup returns a dict giving a list of branches for
915 # branchlookup returns a dict giving a list of branches for
916 # each head. A branch is defined as the tag of a node or
916 # each head. A branch is defined as the tag of a node or
917 # the branch of the node's parents. If a node has multiple
917 # the branch of the node's parents. If a node has multiple
918 # branch tags, tags are eliminated if they are visible from other
918 # branch tags, tags are eliminated if they are visible from other
919 # branch tags.
919 # branch tags.
920 #
920 #
921 # So, for this graph: a->b->c->d->e
921 # So, for this graph: a->b->c->d->e
922 # \ /
922 # \ /
923 # aa -----/
923 # aa -----/
924 # a has tag 2.6.12
924 # a has tag 2.6.12
925 # d has tag 2.6.13
925 # d has tag 2.6.13
926 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
926 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
927 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
927 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
928 # from the list.
928 # from the list.
929 #
929 #
930 # It is possible that more than one head will have the same branch tag.
930 # It is possible that more than one head will have the same branch tag.
931 # callers need to check the result for multiple heads under the same
931 # callers need to check the result for multiple heads under the same
932 # branch tag if that is a problem for them (ie checkout of a specific
932 # branch tag if that is a problem for them (ie checkout of a specific
933 # branch).
933 # branch).
934 #
934 #
935 # passing in a specific branch will limit the depth of the search
935 # passing in a specific branch will limit the depth of the search
936 # through the parents. It won't limit the branches returned in the
936 # through the parents. It won't limit the branches returned in the
937 # result though.
937 # result though.
938 def branchlookup(self, heads=None, branch=None):
938 def branchlookup(self, heads=None, branch=None):
939 if not heads:
939 if not heads:
940 heads = self.heads()
940 heads = self.heads()
941 headt = [ h for h in heads ]
941 headt = [ h for h in heads ]
942 chlog = self.changelog
942 chlog = self.changelog
943 branches = {}
943 branches = {}
944 merges = []
944 merges = []
945 seenmerge = {}
945 seenmerge = {}
946
946
947 # traverse the tree once for each head, recording in the branches
947 # traverse the tree once for each head, recording in the branches
948 # dict which tags are visible from this head. The branches
948 # dict which tags are visible from this head. The branches
949 # dict also records which tags are visible from each tag
949 # dict also records which tags are visible from each tag
950 # while we traverse.
950 # while we traverse.
951 while headt or merges:
951 while headt or merges:
952 if merges:
952 if merges:
953 n, found = merges.pop()
953 n, found = merges.pop()
954 visit = [n]
954 visit = [n]
955 else:
955 else:
956 h = headt.pop()
956 h = headt.pop()
957 visit = [h]
957 visit = [h]
958 found = [h]
958 found = [h]
959 seen = {}
959 seen = {}
960 while visit:
960 while visit:
961 n = visit.pop()
961 n = visit.pop()
962 if n in seen:
962 if n in seen:
963 continue
963 continue
964 pp = chlog.parents(n)
964 pp = chlog.parents(n)
965 tags = self.nodetags(n)
965 tags = self.nodetags(n)
966 if tags:
966 if tags:
967 for x in tags:
967 for x in tags:
968 if x == 'tip':
968 if x == 'tip':
969 continue
969 continue
970 for f in found:
970 for f in found:
971 branches.setdefault(f, {})[n] = 1
971 branches.setdefault(f, {})[n] = 1
972 branches.setdefault(n, {})[n] = 1
972 branches.setdefault(n, {})[n] = 1
973 break
973 break
974 if n not in found:
974 if n not in found:
975 found.append(n)
975 found.append(n)
976 if branch in tags:
976 if branch in tags:
977 continue
977 continue
978 seen[n] = 1
978 seen[n] = 1
979 if pp[1] != nullid and n not in seenmerge:
979 if pp[1] != nullid and n not in seenmerge:
980 merges.append((pp[1], [x for x in found]))
980 merges.append((pp[1], [x for x in found]))
981 seenmerge[n] = 1
981 seenmerge[n] = 1
982 if pp[0] != nullid:
982 if pp[0] != nullid:
983 visit.append(pp[0])
983 visit.append(pp[0])
984 # traverse the branches dict, eliminating branch tags from each
984 # traverse the branches dict, eliminating branch tags from each
985 # head that are visible from another branch tag for that head.
985 # head that are visible from another branch tag for that head.
986 out = {}
986 out = {}
987 viscache = {}
987 viscache = {}
988 for h in heads:
988 for h in heads:
989 def visible(node):
989 def visible(node):
990 if node in viscache:
990 if node in viscache:
991 return viscache[node]
991 return viscache[node]
992 ret = {}
992 ret = {}
993 visit = [node]
993 visit = [node]
994 while visit:
994 while visit:
995 x = visit.pop()
995 x = visit.pop()
996 if x in viscache:
996 if x in viscache:
997 ret.update(viscache[x])
997 ret.update(viscache[x])
998 elif x not in ret:
998 elif x not in ret:
999 ret[x] = 1
999 ret[x] = 1
1000 if x in branches:
1000 if x in branches:
1001 visit[len(visit):] = branches[x].keys()
1001 visit[len(visit):] = branches[x].keys()
1002 viscache[node] = ret
1002 viscache[node] = ret
1003 return ret
1003 return ret
1004 if h not in branches:
1004 if h not in branches:
1005 continue
1005 continue
1006 # O(n^2), but somewhat limited. This only searches the
1006 # O(n^2), but somewhat limited. This only searches the
1007 # tags visible from a specific head, not all the tags in the
1007 # tags visible from a specific head, not all the tags in the
1008 # whole repo.
1008 # whole repo.
1009 for b in branches[h]:
1009 for b in branches[h]:
1010 vis = False
1010 vis = False
1011 for bb in branches[h].keys():
1011 for bb in branches[h].keys():
1012 if b != bb:
1012 if b != bb:
1013 if b in visible(bb):
1013 if b in visible(bb):
1014 vis = True
1014 vis = True
1015 break
1015 break
1016 if not vis:
1016 if not vis:
1017 l = out.setdefault(h, [])
1017 l = out.setdefault(h, [])
1018 l[len(l):] = self.nodetags(b)
1018 l[len(l):] = self.nodetags(b)
1019 return out
1019 return out
1020
1020
1021 def branches(self, nodes):
1021 def branches(self, nodes):
1022 if not nodes:
1022 if not nodes:
1023 nodes = [self.changelog.tip()]
1023 nodes = [self.changelog.tip()]
1024 b = []
1024 b = []
1025 for n in nodes:
1025 for n in nodes:
1026 t = n
1026 t = n
1027 while 1:
1027 while 1:
1028 p = self.changelog.parents(n)
1028 p = self.changelog.parents(n)
1029 if p[1] != nullid or p[0] == nullid:
1029 if p[1] != nullid or p[0] == nullid:
1030 b.append((t, n, p[0], p[1]))
1030 b.append((t, n, p[0], p[1]))
1031 break
1031 break
1032 n = p[0]
1032 n = p[0]
1033 return b
1033 return b
1034
1034
1035 def between(self, pairs):
1035 def between(self, pairs):
1036 r = []
1036 r = []
1037
1037
1038 for top, bottom in pairs:
1038 for top, bottom in pairs:
1039 n, l, i = top, [], 0
1039 n, l, i = top, [], 0
1040 f = 1
1040 f = 1
1041
1041
1042 while n != bottom:
1042 while n != bottom:
1043 p = self.changelog.parents(n)[0]
1043 p = self.changelog.parents(n)[0]
1044 if i == f:
1044 if i == f:
1045 l.append(n)
1045 l.append(n)
1046 f = f * 2
1046 f = f * 2
1047 n = p
1047 n = p
1048 i += 1
1048 i += 1
1049
1049
1050 r.append(l)
1050 r.append(l)
1051
1051
1052 return r
1052 return r
1053
1053
1054 def findincoming(self, remote, base=None, heads=None, force=False):
1054 def findincoming(self, remote, base=None, heads=None, force=False):
1055 """Return list of roots of the subsets of missing nodes from remote
1055 """Return list of roots of the subsets of missing nodes from remote
1056
1056
1057 If base dict is specified, assume that these nodes and their parents
1057 If base dict is specified, assume that these nodes and their parents
1058 exist on the remote side and that no child of a node of base exists
1058 exist on the remote side and that no child of a node of base exists
1059 in both remote and self.
1059 in both remote and self.
1060 Furthermore base will be updated to include the nodes that exists
1060 Furthermore base will be updated to include the nodes that exists
1061 in self and remote but no children exists in self and remote.
1061 in self and remote but no children exists in self and remote.
1062 If a list of heads is specified, return only nodes which are heads
1062 If a list of heads is specified, return only nodes which are heads
1063 or ancestors of these heads.
1063 or ancestors of these heads.
1064
1064
1065 All the ancestors of base are in self and in remote.
1065 All the ancestors of base are in self and in remote.
1066 All the descendants of the list returned are missing in self.
1066 All the descendants of the list returned are missing in self.
1067 (and so we know that the rest of the nodes are missing in remote, see
1067 (and so we know that the rest of the nodes are missing in remote, see
1068 outgoing)
1068 outgoing)
1069 """
1069 """
1070 m = self.changelog.nodemap
1070 m = self.changelog.nodemap
1071 search = []
1071 search = []
1072 fetch = {}
1072 fetch = {}
1073 seen = {}
1073 seen = {}
1074 seenbranch = {}
1074 seenbranch = {}
1075 if base == None:
1075 if base == None:
1076 base = {}
1076 base = {}
1077
1077
1078 if not heads:
1078 if not heads:
1079 heads = remote.heads()
1079 heads = remote.heads()
1080
1080
1081 if self.changelog.tip() == nullid:
1081 if self.changelog.tip() == nullid:
1082 base[nullid] = 1
1082 base[nullid] = 1
1083 if heads != [nullid]:
1083 if heads != [nullid]:
1084 return [nullid]
1084 return [nullid]
1085 return []
1085 return []
1086
1086
1087 # assume we're closer to the tip than the root
1087 # assume we're closer to the tip than the root
1088 # and start by examining the heads
1088 # and start by examining the heads
1089 self.ui.status(_("searching for changes\n"))
1089 self.ui.status(_("searching for changes\n"))
1090
1090
1091 unknown = []
1091 unknown = []
1092 for h in heads:
1092 for h in heads:
1093 if h not in m:
1093 if h not in m:
1094 unknown.append(h)
1094 unknown.append(h)
1095 else:
1095 else:
1096 base[h] = 1
1096 base[h] = 1
1097
1097
1098 if not unknown:
1098 if not unknown:
1099 return []
1099 return []
1100
1100
1101 req = dict.fromkeys(unknown)
1101 req = dict.fromkeys(unknown)
1102 reqcnt = 0
1102 reqcnt = 0
1103
1103
1104 # search through remote branches
1104 # search through remote branches
1105 # a 'branch' here is a linear segment of history, with four parts:
1105 # a 'branch' here is a linear segment of history, with four parts:
1106 # head, root, first parent, second parent
1106 # head, root, first parent, second parent
1107 # (a branch always has two parents (or none) by definition)
1107 # (a branch always has two parents (or none) by definition)
1108 unknown = remote.branches(unknown)
1108 unknown = remote.branches(unknown)
1109 while unknown:
1109 while unknown:
1110 r = []
1110 r = []
1111 while unknown:
1111 while unknown:
1112 n = unknown.pop(0)
1112 n = unknown.pop(0)
1113 if n[0] in seen:
1113 if n[0] in seen:
1114 continue
1114 continue
1115
1115
1116 self.ui.debug(_("examining %s:%s\n")
1116 self.ui.debug(_("examining %s:%s\n")
1117 % (short(n[0]), short(n[1])))
1117 % (short(n[0]), short(n[1])))
1118 if n[0] == nullid: # found the end of the branch
1118 if n[0] == nullid: # found the end of the branch
1119 pass
1119 pass
1120 elif n in seenbranch:
1120 elif n in seenbranch:
1121 self.ui.debug(_("branch already found\n"))
1121 self.ui.debug(_("branch already found\n"))
1122 continue
1122 continue
1123 elif n[1] and n[1] in m: # do we know the base?
1123 elif n[1] and n[1] in m: # do we know the base?
1124 self.ui.debug(_("found incomplete branch %s:%s\n")
1124 self.ui.debug(_("found incomplete branch %s:%s\n")
1125 % (short(n[0]), short(n[1])))
1125 % (short(n[0]), short(n[1])))
1126 search.append(n) # schedule branch range for scanning
1126 search.append(n) # schedule branch range for scanning
1127 seenbranch[n] = 1
1127 seenbranch[n] = 1
1128 else:
1128 else:
1129 if n[1] not in seen and n[1] not in fetch:
1129 if n[1] not in seen and n[1] not in fetch:
1130 if n[2] in m and n[3] in m:
1130 if n[2] in m and n[3] in m:
1131 self.ui.debug(_("found new changeset %s\n") %
1131 self.ui.debug(_("found new changeset %s\n") %
1132 short(n[1]))
1132 short(n[1]))
1133 fetch[n[1]] = 1 # earliest unknown
1133 fetch[n[1]] = 1 # earliest unknown
1134 for p in n[2:4]:
1134 for p in n[2:4]:
1135 if p in m:
1135 if p in m:
1136 base[p] = 1 # latest known
1136 base[p] = 1 # latest known
1137
1137
1138 for p in n[2:4]:
1138 for p in n[2:4]:
1139 if p not in req and p not in m:
1139 if p not in req and p not in m:
1140 r.append(p)
1140 r.append(p)
1141 req[p] = 1
1141 req[p] = 1
1142 seen[n[0]] = 1
1142 seen[n[0]] = 1
1143
1143
1144 if r:
1144 if r:
1145 reqcnt += 1
1145 reqcnt += 1
1146 self.ui.debug(_("request %d: %s\n") %
1146 self.ui.debug(_("request %d: %s\n") %
1147 (reqcnt, " ".join(map(short, r))))
1147 (reqcnt, " ".join(map(short, r))))
1148 for p in xrange(0, len(r), 10):
1148 for p in xrange(0, len(r), 10):
1149 for b in remote.branches(r[p:p+10]):
1149 for b in remote.branches(r[p:p+10]):
1150 self.ui.debug(_("received %s:%s\n") %
1150 self.ui.debug(_("received %s:%s\n") %
1151 (short(b[0]), short(b[1])))
1151 (short(b[0]), short(b[1])))
1152 unknown.append(b)
1152 unknown.append(b)
1153
1153
1154 # do binary search on the branches we found
1154 # do binary search on the branches we found
1155 while search:
1155 while search:
1156 n = search.pop(0)
1156 n = search.pop(0)
1157 reqcnt += 1
1157 reqcnt += 1
1158 l = remote.between([(n[0], n[1])])[0]
1158 l = remote.between([(n[0], n[1])])[0]
1159 l.append(n[1])
1159 l.append(n[1])
1160 p = n[0]
1160 p = n[0]
1161 f = 1
1161 f = 1
1162 for i in l:
1162 for i in l:
1163 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1163 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1164 if i in m:
1164 if i in m:
1165 if f <= 2:
1165 if f <= 2:
1166 self.ui.debug(_("found new branch changeset %s\n") %
1166 self.ui.debug(_("found new branch changeset %s\n") %
1167 short(p))
1167 short(p))
1168 fetch[p] = 1
1168 fetch[p] = 1
1169 base[i] = 1
1169 base[i] = 1
1170 else:
1170 else:
1171 self.ui.debug(_("narrowed branch search to %s:%s\n")
1171 self.ui.debug(_("narrowed branch search to %s:%s\n")
1172 % (short(p), short(i)))
1172 % (short(p), short(i)))
1173 search.append((p, i))
1173 search.append((p, i))
1174 break
1174 break
1175 p, f = i, f * 2
1175 p, f = i, f * 2
1176
1176
1177 # sanity check our fetch list
1177 # sanity check our fetch list
1178 for f in fetch.keys():
1178 for f in fetch.keys():
1179 if f in m:
1179 if f in m:
1180 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1180 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1181
1181
1182 if base.keys() == [nullid]:
1182 if base.keys() == [nullid]:
1183 if force:
1183 if force:
1184 self.ui.warn(_("warning: repository is unrelated\n"))
1184 self.ui.warn(_("warning: repository is unrelated\n"))
1185 else:
1185 else:
1186 raise util.Abort(_("repository is unrelated"))
1186 raise util.Abort(_("repository is unrelated"))
1187
1187
1188 self.ui.debug(_("found new changesets starting at ") +
1188 self.ui.debug(_("found new changesets starting at ") +
1189 " ".join([short(f) for f in fetch]) + "\n")
1189 " ".join([short(f) for f in fetch]) + "\n")
1190
1190
1191 self.ui.debug(_("%d total queries\n") % reqcnt)
1191 self.ui.debug(_("%d total queries\n") % reqcnt)
1192
1192
1193 return fetch.keys()
1193 return fetch.keys()
1194
1194
1195 def findoutgoing(self, remote, base=None, heads=None, force=False):
1195 def findoutgoing(self, remote, base=None, heads=None, force=False):
1196 """Return list of nodes that are roots of subsets not in remote
1196 """Return list of nodes that are roots of subsets not in remote
1197
1197
1198 If base dict is specified, assume that these nodes and their parents
1198 If base dict is specified, assume that these nodes and their parents
1199 exist on the remote side.
1199 exist on the remote side.
1200 If a list of heads is specified, return only nodes which are heads
1200 If a list of heads is specified, return only nodes which are heads
1201 or ancestors of these heads, and return a second element which
1201 or ancestors of these heads, and return a second element which
1202 contains all remote heads which get new children.
1202 contains all remote heads which get new children.
1203 """
1203 """
1204 if base == None:
1204 if base == None:
1205 base = {}
1205 base = {}
1206 self.findincoming(remote, base, heads, force=force)
1206 self.findincoming(remote, base, heads, force=force)
1207
1207
1208 self.ui.debug(_("common changesets up to ")
1208 self.ui.debug(_("common changesets up to ")
1209 + " ".join(map(short, base.keys())) + "\n")
1209 + " ".join(map(short, base.keys())) + "\n")
1210
1210
1211 remain = dict.fromkeys(self.changelog.nodemap)
1211 remain = dict.fromkeys(self.changelog.nodemap)
1212
1212
1213 # prune everything remote has from the tree
1213 # prune everything remote has from the tree
1214 del remain[nullid]
1214 del remain[nullid]
1215 remove = base.keys()
1215 remove = base.keys()
1216 while remove:
1216 while remove:
1217 n = remove.pop(0)
1217 n = remove.pop(0)
1218 if n in remain:
1218 if n in remain:
1219 del remain[n]
1219 del remain[n]
1220 for p in self.changelog.parents(n):
1220 for p in self.changelog.parents(n):
1221 remove.append(p)
1221 remove.append(p)
1222
1222
1223 # find every node whose parents have been pruned
1223 # find every node whose parents have been pruned
1224 subset = []
1224 subset = []
1225 # find every remote head that will get new children
1225 # find every remote head that will get new children
1226 updated_heads = {}
1226 updated_heads = {}
1227 for n in remain:
1227 for n in remain:
1228 p1, p2 = self.changelog.parents(n)
1228 p1, p2 = self.changelog.parents(n)
1229 if p1 not in remain and p2 not in remain:
1229 if p1 not in remain and p2 not in remain:
1230 subset.append(n)
1230 subset.append(n)
1231 if heads:
1231 if heads:
1232 if p1 in heads:
1232 if p1 in heads:
1233 updated_heads[p1] = True
1233 updated_heads[p1] = True
1234 if p2 in heads:
1234 if p2 in heads:
1235 updated_heads[p2] = True
1235 updated_heads[p2] = True
1236
1236
1237 # this is the set of all roots we have to push
1237 # this is the set of all roots we have to push
1238 if heads:
1238 if heads:
1239 return subset, updated_heads.keys()
1239 return subset, updated_heads.keys()
1240 else:
1240 else:
1241 return subset
1241 return subset
1242
1242
1243 def pull(self, remote, heads=None, force=False, lock=None):
1243 def pull(self, remote, heads=None, force=False, lock=None):
1244 mylock = False
1244 mylock = False
1245 if not lock:
1245 if not lock:
1246 lock = self.lock()
1246 lock = self.lock()
1247 mylock = True
1247 mylock = True
1248
1248
1249 try:
1249 try:
1250 fetch = self.findincoming(remote, force=force)
1250 fetch = self.findincoming(remote, force=force)
1251 if fetch == [nullid]:
1251 if fetch == [nullid]:
1252 self.ui.status(_("requesting all changes\n"))
1252 self.ui.status(_("requesting all changes\n"))
1253
1253
1254 if not fetch:
1254 if not fetch:
1255 self.ui.status(_("no changes found\n"))
1255 self.ui.status(_("no changes found\n"))
1256 return 0
1256 return 0
1257
1257
1258 if heads is None:
1258 if heads is None:
1259 cg = remote.changegroup(fetch, 'pull')
1259 cg = remote.changegroup(fetch, 'pull')
1260 else:
1260 else:
1261 if 'changegroupsubset' not in remote.capabilities:
1261 if 'changegroupsubset' not in remote.capabilities:
1262 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1262 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1263 cg = remote.changegroupsubset(fetch, heads, 'pull')
1263 cg = remote.changegroupsubset(fetch, heads, 'pull')
1264 return self.addchangegroup(cg, 'pull', remote.url())
1264 return self.addchangegroup(cg, 'pull', remote.url())
1265 finally:
1265 finally:
1266 if mylock:
1266 if mylock:
1267 lock.release()
1267 lock.release()
1268
1268
1269 def push(self, remote, force=False, revs=None):
1269 def push(self, remote, force=False, revs=None):
1270 # there are two ways to push to remote repo:
1270 # there are two ways to push to remote repo:
1271 #
1271 #
1272 # addchangegroup assumes local user can lock remote
1272 # addchangegroup assumes local user can lock remote
1273 # repo (local filesystem, old ssh servers).
1273 # repo (local filesystem, old ssh servers).
1274 #
1274 #
1275 # unbundle assumes local user cannot lock remote repo (new ssh
1275 # unbundle assumes local user cannot lock remote repo (new ssh
1276 # servers, http servers).
1276 # servers, http servers).
1277
1277
1278 if remote.capable('unbundle'):
1278 if remote.capable('unbundle'):
1279 return self.push_unbundle(remote, force, revs)
1279 return self.push_unbundle(remote, force, revs)
1280 return self.push_addchangegroup(remote, force, revs)
1280 return self.push_addchangegroup(remote, force, revs)
1281
1281
1282 def prepush(self, remote, force, revs):
1282 def prepush(self, remote, force, revs):
1283 base = {}
1283 base = {}
1284 remote_heads = remote.heads()
1284 remote_heads = remote.heads()
1285 inc = self.findincoming(remote, base, remote_heads, force=force)
1285 inc = self.findincoming(remote, base, remote_heads, force=force)
1286 if not force and inc:
1286 if not force and inc:
1287 self.ui.warn(_("abort: unsynced remote changes!\n"))
1287 self.ui.warn(_("abort: unsynced remote changes!\n"))
1288 self.ui.status(_("(did you forget to sync?"
1288 self.ui.status(_("(did you forget to sync?"
1289 " use push -f to force)\n"))
1289 " use push -f to force)\n"))
1290 return None, 1
1290 return None, 1
1291
1291
1292 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1292 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1293 if revs is not None:
1293 if revs is not None:
1294 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1294 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1295 else:
1295 else:
1296 bases, heads = update, self.changelog.heads()
1296 bases, heads = update, self.changelog.heads()
1297
1297
1298 if not bases:
1298 if not bases:
1299 self.ui.status(_("no changes found\n"))
1299 self.ui.status(_("no changes found\n"))
1300 return None, 1
1300 return None, 1
1301 elif not force:
1301 elif not force:
1302 # FIXME we don't properly detect creation of new heads
1302 # FIXME we don't properly detect creation of new heads
1303 # in the push -r case, assume the user knows what he's doing
1303 # in the push -r case, assume the user knows what he's doing
1304 if not revs and len(remote_heads) < len(heads) \
1304 if not revs and len(remote_heads) < len(heads) \
1305 and remote_heads != [nullid]:
1305 and remote_heads != [nullid]:
1306 self.ui.warn(_("abort: push creates new remote branches!\n"))
1306 self.ui.warn(_("abort: push creates new remote branches!\n"))
1307 self.ui.status(_("(did you forget to merge?"
1307 self.ui.status(_("(did you forget to merge?"
1308 " use push -f to force)\n"))
1308 " use push -f to force)\n"))
1309 return None, 1
1309 return None, 1
1310
1310
1311 if revs is None:
1311 if revs is None:
1312 cg = self.changegroup(update, 'push')
1312 cg = self.changegroup(update, 'push')
1313 else:
1313 else:
1314 cg = self.changegroupsubset(update, revs, 'push')
1314 cg = self.changegroupsubset(update, revs, 'push')
1315 return cg, remote_heads
1315 return cg, remote_heads
1316
1316
1317 def push_addchangegroup(self, remote, force, revs):
1317 def push_addchangegroup(self, remote, force, revs):
1318 lock = remote.lock()
1318 lock = remote.lock()
1319
1319
1320 ret = self.prepush(remote, force, revs)
1320 ret = self.prepush(remote, force, revs)
1321 if ret[0] is not None:
1321 if ret[0] is not None:
1322 cg, remote_heads = ret
1322 cg, remote_heads = ret
1323 return remote.addchangegroup(cg, 'push', self.url())
1323 return remote.addchangegroup(cg, 'push', self.url())
1324 return ret[1]
1324 return ret[1]
1325
1325
1326 def push_unbundle(self, remote, force, revs):
1326 def push_unbundle(self, remote, force, revs):
1327 # local repo finds heads on server, finds out what revs it
1327 # local repo finds heads on server, finds out what revs it
1328 # must push. once revs transferred, if server finds it has
1328 # must push. once revs transferred, if server finds it has
1329 # different heads (someone else won commit/push race), server
1329 # different heads (someone else won commit/push race), server
1330 # aborts.
1330 # aborts.
1331
1331
1332 ret = self.prepush(remote, force, revs)
1332 ret = self.prepush(remote, force, revs)
1333 if ret[0] is not None:
1333 if ret[0] is not None:
1334 cg, remote_heads = ret
1334 cg, remote_heads = ret
1335 if force: remote_heads = ['force']
1335 if force: remote_heads = ['force']
1336 return remote.unbundle(cg, remote_heads, 'push')
1336 return remote.unbundle(cg, remote_heads, 'push')
1337 return ret[1]
1337 return ret[1]
1338
1338
1339 def changegroupinfo(self, nodes):
1340 self.ui.note(_("%d changesets found\n") % len(nodes))
1341 if self.ui.debugflag:
1342 self.ui.debug(_("List of changesets:\n"))
1343 for node in nodes:
1344 self.ui.debug("%s\n" % hex(node))
1345
1339 def changegroupsubset(self, bases, heads, source):
1346 def changegroupsubset(self, bases, heads, source):
1340 """This function generates a changegroup consisting of all the nodes
1347 """This function generates a changegroup consisting of all the nodes
1341 that are descendents of any of the bases, and ancestors of any of
1348 that are descendents of any of the bases, and ancestors of any of
1342 the heads.
1349 the heads.
1343
1350
1344 It is fairly complex as determining which filenodes and which
1351 It is fairly complex as determining which filenodes and which
1345 manifest nodes need to be included for the changeset to be complete
1352 manifest nodes need to be included for the changeset to be complete
1346 is non-trivial.
1353 is non-trivial.
1347
1354
1348 Another wrinkle is doing the reverse, figuring out which changeset in
1355 Another wrinkle is doing the reverse, figuring out which changeset in
1349 the changegroup a particular filenode or manifestnode belongs to."""
1356 the changegroup a particular filenode or manifestnode belongs to."""
1350
1357
1351 self.hook('preoutgoing', throw=True, source=source)
1358 self.hook('preoutgoing', throw=True, source=source)
1352
1359
1353 # Set up some initial variables
1360 # Set up some initial variables
1354 # Make it easy to refer to self.changelog
1361 # Make it easy to refer to self.changelog
1355 cl = self.changelog
1362 cl = self.changelog
1356 # msng is short for missing - compute the list of changesets in this
1363 # msng is short for missing - compute the list of changesets in this
1357 # changegroup.
1364 # changegroup.
1358 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1365 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1366 self.changegroupinfo(msng_cl_lst)
1359 # Some bases may turn out to be superfluous, and some heads may be
1367 # Some bases may turn out to be superfluous, and some heads may be
1360 # too. nodesbetween will return the minimal set of bases and heads
1368 # too. nodesbetween will return the minimal set of bases and heads
1361 # necessary to re-create the changegroup.
1369 # necessary to re-create the changegroup.
1362
1370
1363 # Known heads are the list of heads that it is assumed the recipient
1371 # Known heads are the list of heads that it is assumed the recipient
1364 # of this changegroup will know about.
1372 # of this changegroup will know about.
1365 knownheads = {}
1373 knownheads = {}
1366 # We assume that all parents of bases are known heads.
1374 # We assume that all parents of bases are known heads.
1367 for n in bases:
1375 for n in bases:
1368 for p in cl.parents(n):
1376 for p in cl.parents(n):
1369 if p != nullid:
1377 if p != nullid:
1370 knownheads[p] = 1
1378 knownheads[p] = 1
1371 knownheads = knownheads.keys()
1379 knownheads = knownheads.keys()
1372 if knownheads:
1380 if knownheads:
1373 # Now that we know what heads are known, we can compute which
1381 # Now that we know what heads are known, we can compute which
1374 # changesets are known. The recipient must know about all
1382 # changesets are known. The recipient must know about all
1375 # changesets required to reach the known heads from the null
1383 # changesets required to reach the known heads from the null
1376 # changeset.
1384 # changeset.
1377 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1385 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1378 junk = None
1386 junk = None
1379 # Transform the list into an ersatz set.
1387 # Transform the list into an ersatz set.
1380 has_cl_set = dict.fromkeys(has_cl_set)
1388 has_cl_set = dict.fromkeys(has_cl_set)
1381 else:
1389 else:
1382 # If there were no known heads, the recipient cannot be assumed to
1390 # If there were no known heads, the recipient cannot be assumed to
1383 # know about any changesets.
1391 # know about any changesets.
1384 has_cl_set = {}
1392 has_cl_set = {}
1385
1393
1386 # Make it easy to refer to self.manifest
1394 # Make it easy to refer to self.manifest
1387 mnfst = self.manifest
1395 mnfst = self.manifest
1388 # We don't know which manifests are missing yet
1396 # We don't know which manifests are missing yet
1389 msng_mnfst_set = {}
1397 msng_mnfst_set = {}
1390 # Nor do we know which filenodes are missing.
1398 # Nor do we know which filenodes are missing.
1391 msng_filenode_set = {}
1399 msng_filenode_set = {}
1392
1400
1393 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1401 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1394 junk = None
1402 junk = None
1395
1403
1396 # A changeset always belongs to itself, so the changenode lookup
1404 # A changeset always belongs to itself, so the changenode lookup
1397 # function for a changenode is identity.
1405 # function for a changenode is identity.
1398 def identity(x):
1406 def identity(x):
1399 return x
1407 return x
1400
1408
1401 # A function generating function. Sets up an environment for the
1409 # A function generating function. Sets up an environment for the
1402 # inner function.
1410 # inner function.
1403 def cmp_by_rev_func(revlog):
1411 def cmp_by_rev_func(revlog):
1404 # Compare two nodes by their revision number in the environment's
1412 # Compare two nodes by their revision number in the environment's
1405 # revision history. Since the revision number both represents the
1413 # revision history. Since the revision number both represents the
1406 # most efficient order to read the nodes in, and represents a
1414 # most efficient order to read the nodes in, and represents a
1407 # topological sorting of the nodes, this function is often useful.
1415 # topological sorting of the nodes, this function is often useful.
1408 def cmp_by_rev(a, b):
1416 def cmp_by_rev(a, b):
1409 return cmp(revlog.rev(a), revlog.rev(b))
1417 return cmp(revlog.rev(a), revlog.rev(b))
1410 return cmp_by_rev
1418 return cmp_by_rev
1411
1419
1412 # If we determine that a particular file or manifest node must be a
1420 # If we determine that a particular file or manifest node must be a
1413 # node that the recipient of the changegroup will already have, we can
1421 # node that the recipient of the changegroup will already have, we can
1414 # also assume the recipient will have all the parents. This function
1422 # also assume the recipient will have all the parents. This function
1415 # prunes them from the set of missing nodes.
1423 # prunes them from the set of missing nodes.
1416 def prune_parents(revlog, hasset, msngset):
1424 def prune_parents(revlog, hasset, msngset):
1417 haslst = hasset.keys()
1425 haslst = hasset.keys()
1418 haslst.sort(cmp_by_rev_func(revlog))
1426 haslst.sort(cmp_by_rev_func(revlog))
1419 for node in haslst:
1427 for node in haslst:
1420 parentlst = [p for p in revlog.parents(node) if p != nullid]
1428 parentlst = [p for p in revlog.parents(node) if p != nullid]
1421 while parentlst:
1429 while parentlst:
1422 n = parentlst.pop()
1430 n = parentlst.pop()
1423 if n not in hasset:
1431 if n not in hasset:
1424 hasset[n] = 1
1432 hasset[n] = 1
1425 p = [p for p in revlog.parents(n) if p != nullid]
1433 p = [p for p in revlog.parents(n) if p != nullid]
1426 parentlst.extend(p)
1434 parentlst.extend(p)
1427 for n in hasset:
1435 for n in hasset:
1428 msngset.pop(n, None)
1436 msngset.pop(n, None)
1429
1437
1430 # This is a function generating function used to set up an environment
1438 # This is a function generating function used to set up an environment
1431 # for the inner function to execute in.
1439 # for the inner function to execute in.
1432 def manifest_and_file_collector(changedfileset):
1440 def manifest_and_file_collector(changedfileset):
1433 # This is an information gathering function that gathers
1441 # This is an information gathering function that gathers
1434 # information from each changeset node that goes out as part of
1442 # information from each changeset node that goes out as part of
1435 # the changegroup. The information gathered is a list of which
1443 # the changegroup. The information gathered is a list of which
1436 # manifest nodes are potentially required (the recipient may
1444 # manifest nodes are potentially required (the recipient may
1437 # already have them) and total list of all files which were
1445 # already have them) and total list of all files which were
1438 # changed in any changeset in the changegroup.
1446 # changed in any changeset in the changegroup.
1439 #
1447 #
1440 # We also remember the first changenode we saw any manifest
1448 # We also remember the first changenode we saw any manifest
1441 # referenced by so we can later determine which changenode 'owns'
1449 # referenced by so we can later determine which changenode 'owns'
1442 # the manifest.
1450 # the manifest.
1443 def collect_manifests_and_files(clnode):
1451 def collect_manifests_and_files(clnode):
1444 c = cl.read(clnode)
1452 c = cl.read(clnode)
1445 for f in c[3]:
1453 for f in c[3]:
1446 # This is to make sure we only have one instance of each
1454 # This is to make sure we only have one instance of each
1447 # filename string for each filename.
1455 # filename string for each filename.
1448 changedfileset.setdefault(f, f)
1456 changedfileset.setdefault(f, f)
1449 msng_mnfst_set.setdefault(c[0], clnode)
1457 msng_mnfst_set.setdefault(c[0], clnode)
1450 return collect_manifests_and_files
1458 return collect_manifests_and_files
1451
1459
1452 # Figure out which manifest nodes (of the ones we think might be part
1460 # Figure out which manifest nodes (of the ones we think might be part
1453 # of the changegroup) the recipient must know about and remove them
1461 # of the changegroup) the recipient must know about and remove them
1454 # from the changegroup.
1462 # from the changegroup.
1455 def prune_manifests():
1463 def prune_manifests():
1456 has_mnfst_set = {}
1464 has_mnfst_set = {}
1457 for n in msng_mnfst_set:
1465 for n in msng_mnfst_set:
1458 # If a 'missing' manifest thinks it belongs to a changenode
1466 # If a 'missing' manifest thinks it belongs to a changenode
1459 # the recipient is assumed to have, obviously the recipient
1467 # the recipient is assumed to have, obviously the recipient
1460 # must have that manifest.
1468 # must have that manifest.
1461 linknode = cl.node(mnfst.linkrev(n))
1469 linknode = cl.node(mnfst.linkrev(n))
1462 if linknode in has_cl_set:
1470 if linknode in has_cl_set:
1463 has_mnfst_set[n] = 1
1471 has_mnfst_set[n] = 1
1464 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1472 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1465
1473
1466 # Use the information collected in collect_manifests_and_files to say
1474 # Use the information collected in collect_manifests_and_files to say
1467 # which changenode any manifestnode belongs to.
1475 # which changenode any manifestnode belongs to.
1468 def lookup_manifest_link(mnfstnode):
1476 def lookup_manifest_link(mnfstnode):
1469 return msng_mnfst_set[mnfstnode]
1477 return msng_mnfst_set[mnfstnode]
1470
1478
1471 # A function generating function that sets up the initial environment
1479 # A function generating function that sets up the initial environment
1472 # the inner function.
1480 # the inner function.
1473 def filenode_collector(changedfiles):
1481 def filenode_collector(changedfiles):
1474 next_rev = [0]
1482 next_rev = [0]
1475 # This gathers information from each manifestnode included in the
1483 # This gathers information from each manifestnode included in the
1476 # changegroup about which filenodes the manifest node references
1484 # changegroup about which filenodes the manifest node references
1477 # so we can include those in the changegroup too.
1485 # so we can include those in the changegroup too.
1478 #
1486 #
1479 # It also remembers which changenode each filenode belongs to. It
1487 # It also remembers which changenode each filenode belongs to. It
1480 # does this by assuming the a filenode belongs to the changenode
1488 # does this by assuming the a filenode belongs to the changenode
1481 # the first manifest that references it belongs to.
1489 # the first manifest that references it belongs to.
1482 def collect_msng_filenodes(mnfstnode):
1490 def collect_msng_filenodes(mnfstnode):
1483 r = mnfst.rev(mnfstnode)
1491 r = mnfst.rev(mnfstnode)
1484 if r == next_rev[0]:
1492 if r == next_rev[0]:
1485 # If the last rev we looked at was the one just previous,
1493 # If the last rev we looked at was the one just previous,
1486 # we only need to see a diff.
1494 # we only need to see a diff.
1487 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1495 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1488 # For each line in the delta
1496 # For each line in the delta
1489 for dline in delta.splitlines():
1497 for dline in delta.splitlines():
1490 # get the filename and filenode for that line
1498 # get the filename and filenode for that line
1491 f, fnode = dline.split('\0')
1499 f, fnode = dline.split('\0')
1492 fnode = bin(fnode[:40])
1500 fnode = bin(fnode[:40])
1493 f = changedfiles.get(f, None)
1501 f = changedfiles.get(f, None)
1494 # And if the file is in the list of files we care
1502 # And if the file is in the list of files we care
1495 # about.
1503 # about.
1496 if f is not None:
1504 if f is not None:
1497 # Get the changenode this manifest belongs to
1505 # Get the changenode this manifest belongs to
1498 clnode = msng_mnfst_set[mnfstnode]
1506 clnode = msng_mnfst_set[mnfstnode]
1499 # Create the set of filenodes for the file if
1507 # Create the set of filenodes for the file if
1500 # there isn't one already.
1508 # there isn't one already.
1501 ndset = msng_filenode_set.setdefault(f, {})
1509 ndset = msng_filenode_set.setdefault(f, {})
1502 # And set the filenode's changelog node to the
1510 # And set the filenode's changelog node to the
1503 # manifest's if it hasn't been set already.
1511 # manifest's if it hasn't been set already.
1504 ndset.setdefault(fnode, clnode)
1512 ndset.setdefault(fnode, clnode)
1505 else:
1513 else:
1506 # Otherwise we need a full manifest.
1514 # Otherwise we need a full manifest.
1507 m = mnfst.read(mnfstnode)
1515 m = mnfst.read(mnfstnode)
1508 # For every file in we care about.
1516 # For every file in we care about.
1509 for f in changedfiles:
1517 for f in changedfiles:
1510 fnode = m.get(f, None)
1518 fnode = m.get(f, None)
1511 # If it's in the manifest
1519 # If it's in the manifest
1512 if fnode is not None:
1520 if fnode is not None:
1513 # See comments above.
1521 # See comments above.
1514 clnode = msng_mnfst_set[mnfstnode]
1522 clnode = msng_mnfst_set[mnfstnode]
1515 ndset = msng_filenode_set.setdefault(f, {})
1523 ndset = msng_filenode_set.setdefault(f, {})
1516 ndset.setdefault(fnode, clnode)
1524 ndset.setdefault(fnode, clnode)
1517 # Remember the revision we hope to see next.
1525 # Remember the revision we hope to see next.
1518 next_rev[0] = r + 1
1526 next_rev[0] = r + 1
1519 return collect_msng_filenodes
1527 return collect_msng_filenodes
1520
1528
1521 # We have a list of filenodes we think we need for a file, lets remove
1529 # We have a list of filenodes we think we need for a file, lets remove
1522 # all those we now the recipient must have.
1530 # all those we now the recipient must have.
1523 def prune_filenodes(f, filerevlog):
1531 def prune_filenodes(f, filerevlog):
1524 msngset = msng_filenode_set[f]
1532 msngset = msng_filenode_set[f]
1525 hasset = {}
1533 hasset = {}
1526 # If a 'missing' filenode thinks it belongs to a changenode we
1534 # If a 'missing' filenode thinks it belongs to a changenode we
1527 # assume the recipient must have, then the recipient must have
1535 # assume the recipient must have, then the recipient must have
1528 # that filenode.
1536 # that filenode.
1529 for n in msngset:
1537 for n in msngset:
1530 clnode = cl.node(filerevlog.linkrev(n))
1538 clnode = cl.node(filerevlog.linkrev(n))
1531 if clnode in has_cl_set:
1539 if clnode in has_cl_set:
1532 hasset[n] = 1
1540 hasset[n] = 1
1533 prune_parents(filerevlog, hasset, msngset)
1541 prune_parents(filerevlog, hasset, msngset)
1534
1542
1535 # A function generator function that sets up the a context for the
1543 # A function generator function that sets up the a context for the
1536 # inner function.
1544 # inner function.
1537 def lookup_filenode_link_func(fname):
1545 def lookup_filenode_link_func(fname):
1538 msngset = msng_filenode_set[fname]
1546 msngset = msng_filenode_set[fname]
1539 # Lookup the changenode the filenode belongs to.
1547 # Lookup the changenode the filenode belongs to.
1540 def lookup_filenode_link(fnode):
1548 def lookup_filenode_link(fnode):
1541 return msngset[fnode]
1549 return msngset[fnode]
1542 return lookup_filenode_link
1550 return lookup_filenode_link
1543
1551
1544 # Now that we have all theses utility functions to help out and
1552 # Now that we have all theses utility functions to help out and
1545 # logically divide up the task, generate the group.
1553 # logically divide up the task, generate the group.
1546 def gengroup():
1554 def gengroup():
1547 # The set of changed files starts empty.
1555 # The set of changed files starts empty.
1548 changedfiles = {}
1556 changedfiles = {}
1549 # Create a changenode group generator that will call our functions
1557 # Create a changenode group generator that will call our functions
1550 # back to lookup the owning changenode and collect information.
1558 # back to lookup the owning changenode and collect information.
1551 group = cl.group(msng_cl_lst, identity,
1559 group = cl.group(msng_cl_lst, identity,
1552 manifest_and_file_collector(changedfiles))
1560 manifest_and_file_collector(changedfiles))
1553 for chnk in group:
1561 for chnk in group:
1554 yield chnk
1562 yield chnk
1555
1563
1556 # The list of manifests has been collected by the generator
1564 # The list of manifests has been collected by the generator
1557 # calling our functions back.
1565 # calling our functions back.
1558 prune_manifests()
1566 prune_manifests()
1559 msng_mnfst_lst = msng_mnfst_set.keys()
1567 msng_mnfst_lst = msng_mnfst_set.keys()
1560 # Sort the manifestnodes by revision number.
1568 # Sort the manifestnodes by revision number.
1561 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1569 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1562 # Create a generator for the manifestnodes that calls our lookup
1570 # Create a generator for the manifestnodes that calls our lookup
1563 # and data collection functions back.
1571 # and data collection functions back.
1564 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1572 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1565 filenode_collector(changedfiles))
1573 filenode_collector(changedfiles))
1566 for chnk in group:
1574 for chnk in group:
1567 yield chnk
1575 yield chnk
1568
1576
1569 # These are no longer needed, dereference and toss the memory for
1577 # These are no longer needed, dereference and toss the memory for
1570 # them.
1578 # them.
1571 msng_mnfst_lst = None
1579 msng_mnfst_lst = None
1572 msng_mnfst_set.clear()
1580 msng_mnfst_set.clear()
1573
1581
1574 changedfiles = changedfiles.keys()
1582 changedfiles = changedfiles.keys()
1575 changedfiles.sort()
1583 changedfiles.sort()
1576 # Go through all our files in order sorted by name.
1584 # Go through all our files in order sorted by name.
1577 for fname in changedfiles:
1585 for fname in changedfiles:
1578 filerevlog = self.file(fname)
1586 filerevlog = self.file(fname)
1579 # Toss out the filenodes that the recipient isn't really
1587 # Toss out the filenodes that the recipient isn't really
1580 # missing.
1588 # missing.
1581 if msng_filenode_set.has_key(fname):
1589 if msng_filenode_set.has_key(fname):
1582 prune_filenodes(fname, filerevlog)
1590 prune_filenodes(fname, filerevlog)
1583 msng_filenode_lst = msng_filenode_set[fname].keys()
1591 msng_filenode_lst = msng_filenode_set[fname].keys()
1584 else:
1592 else:
1585 msng_filenode_lst = []
1593 msng_filenode_lst = []
1586 # If any filenodes are left, generate the group for them,
1594 # If any filenodes are left, generate the group for them,
1587 # otherwise don't bother.
1595 # otherwise don't bother.
1588 if len(msng_filenode_lst) > 0:
1596 if len(msng_filenode_lst) > 0:
1589 yield changegroup.genchunk(fname)
1597 yield changegroup.genchunk(fname)
1590 # Sort the filenodes by their revision #
1598 # Sort the filenodes by their revision #
1591 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1599 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1592 # Create a group generator and only pass in a changenode
1600 # Create a group generator and only pass in a changenode
1593 # lookup function as we need to collect no information
1601 # lookup function as we need to collect no information
1594 # from filenodes.
1602 # from filenodes.
1595 group = filerevlog.group(msng_filenode_lst,
1603 group = filerevlog.group(msng_filenode_lst,
1596 lookup_filenode_link_func(fname))
1604 lookup_filenode_link_func(fname))
1597 for chnk in group:
1605 for chnk in group:
1598 yield chnk
1606 yield chnk
1599 if msng_filenode_set.has_key(fname):
1607 if msng_filenode_set.has_key(fname):
1600 # Don't need this anymore, toss it to free memory.
1608 # Don't need this anymore, toss it to free memory.
1601 del msng_filenode_set[fname]
1609 del msng_filenode_set[fname]
1602 # Signal that no more groups are left.
1610 # Signal that no more groups are left.
1603 yield changegroup.closechunk()
1611 yield changegroup.closechunk()
1604
1612
1605 if msng_cl_lst:
1613 if msng_cl_lst:
1606 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1614 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1607
1615
1608 return util.chunkbuffer(gengroup())
1616 return util.chunkbuffer(gengroup())
1609
1617
1610 def changegroup(self, basenodes, source):
1618 def changegroup(self, basenodes, source):
1611 """Generate a changegroup of all nodes that we have that a recipient
1619 """Generate a changegroup of all nodes that we have that a recipient
1612 doesn't.
1620 doesn't.
1613
1621
1614 This is much easier than the previous function as we can assume that
1622 This is much easier than the previous function as we can assume that
1615 the recipient has any changenode we aren't sending them."""
1623 the recipient has any changenode we aren't sending them."""
1616
1624
1617 self.hook('preoutgoing', throw=True, source=source)
1625 self.hook('preoutgoing', throw=True, source=source)
1618
1626
1619 cl = self.changelog
1627 cl = self.changelog
1620 nodes = cl.nodesbetween(basenodes, None)[0]
1628 nodes = cl.nodesbetween(basenodes, None)[0]
1621 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1629 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1630 self.changegroupinfo(nodes)
1622
1631
1623 def identity(x):
1632 def identity(x):
1624 return x
1633 return x
1625
1634
1626 def gennodelst(revlog):
1635 def gennodelst(revlog):
1627 for r in xrange(0, revlog.count()):
1636 for r in xrange(0, revlog.count()):
1628 n = revlog.node(r)
1637 n = revlog.node(r)
1629 if revlog.linkrev(n) in revset:
1638 if revlog.linkrev(n) in revset:
1630 yield n
1639 yield n
1631
1640
1632 def changed_file_collector(changedfileset):
1641 def changed_file_collector(changedfileset):
1633 def collect_changed_files(clnode):
1642 def collect_changed_files(clnode):
1634 c = cl.read(clnode)
1643 c = cl.read(clnode)
1635 for fname in c[3]:
1644 for fname in c[3]:
1636 changedfileset[fname] = 1
1645 changedfileset[fname] = 1
1637 return collect_changed_files
1646 return collect_changed_files
1638
1647
1639 def lookuprevlink_func(revlog):
1648 def lookuprevlink_func(revlog):
1640 def lookuprevlink(n):
1649 def lookuprevlink(n):
1641 return cl.node(revlog.linkrev(n))
1650 return cl.node(revlog.linkrev(n))
1642 return lookuprevlink
1651 return lookuprevlink
1643
1652
1644 def gengroup():
1653 def gengroup():
1645 # construct a list of all changed files
1654 # construct a list of all changed files
1646 changedfiles = {}
1655 changedfiles = {}
1647
1656
1648 for chnk in cl.group(nodes, identity,
1657 for chnk in cl.group(nodes, identity,
1649 changed_file_collector(changedfiles)):
1658 changed_file_collector(changedfiles)):
1650 yield chnk
1659 yield chnk
1651 changedfiles = changedfiles.keys()
1660 changedfiles = changedfiles.keys()
1652 changedfiles.sort()
1661 changedfiles.sort()
1653
1662
1654 mnfst = self.manifest
1663 mnfst = self.manifest
1655 nodeiter = gennodelst(mnfst)
1664 nodeiter = gennodelst(mnfst)
1656 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1665 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1657 yield chnk
1666 yield chnk
1658
1667
1659 for fname in changedfiles:
1668 for fname in changedfiles:
1660 filerevlog = self.file(fname)
1669 filerevlog = self.file(fname)
1661 nodeiter = gennodelst(filerevlog)
1670 nodeiter = gennodelst(filerevlog)
1662 nodeiter = list(nodeiter)
1671 nodeiter = list(nodeiter)
1663 if nodeiter:
1672 if nodeiter:
1664 yield changegroup.genchunk(fname)
1673 yield changegroup.genchunk(fname)
1665 lookup = lookuprevlink_func(filerevlog)
1674 lookup = lookuprevlink_func(filerevlog)
1666 for chnk in filerevlog.group(nodeiter, lookup):
1675 for chnk in filerevlog.group(nodeiter, lookup):
1667 yield chnk
1676 yield chnk
1668
1677
1669 yield changegroup.closechunk()
1678 yield changegroup.closechunk()
1670
1679
1671 if nodes:
1680 if nodes:
1672 self.hook('outgoing', node=hex(nodes[0]), source=source)
1681 self.hook('outgoing', node=hex(nodes[0]), source=source)
1673
1682
1674 return util.chunkbuffer(gengroup())
1683 return util.chunkbuffer(gengroup())
1675
1684
1676 def addchangegroup(self, source, srctype, url):
1685 def addchangegroup(self, source, srctype, url):
1677 """add changegroup to repo.
1686 """add changegroup to repo.
1678 returns number of heads modified or added + 1."""
1687 returns number of heads modified or added + 1."""
1679
1688
1680 def csmap(x):
1689 def csmap(x):
1681 self.ui.debug(_("add changeset %s\n") % short(x))
1690 self.ui.debug(_("add changeset %s\n") % short(x))
1682 return cl.count()
1691 return cl.count()
1683
1692
1684 def revmap(x):
1693 def revmap(x):
1685 return cl.rev(x)
1694 return cl.rev(x)
1686
1695
1687 if not source:
1696 if not source:
1688 return 0
1697 return 0
1689
1698
1690 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1699 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1691
1700
1692 changesets = files = revisions = 0
1701 changesets = files = revisions = 0
1693
1702
1694 tr = self.transaction()
1703 tr = self.transaction()
1695
1704
1696 # write changelog data to temp files so concurrent readers will not see
1705 # write changelog data to temp files so concurrent readers will not see
1697 # inconsistent view
1706 # inconsistent view
1698 cl = None
1707 cl = None
1699 try:
1708 try:
1700 cl = appendfile.appendchangelog(self.sopener,
1709 cl = appendfile.appendchangelog(self.sopener,
1701 self.changelog.version)
1710 self.changelog.version)
1702
1711
1703 oldheads = len(cl.heads())
1712 oldheads = len(cl.heads())
1704
1713
1705 # pull off the changeset group
1714 # pull off the changeset group
1706 self.ui.status(_("adding changesets\n"))
1715 self.ui.status(_("adding changesets\n"))
1707 cor = cl.count() - 1
1716 cor = cl.count() - 1
1708 chunkiter = changegroup.chunkiter(source)
1717 chunkiter = changegroup.chunkiter(source)
1709 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1718 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1710 raise util.Abort(_("received changelog group is empty"))
1719 raise util.Abort(_("received changelog group is empty"))
1711 cnr = cl.count() - 1
1720 cnr = cl.count() - 1
1712 changesets = cnr - cor
1721 changesets = cnr - cor
1713
1722
1714 # pull off the manifest group
1723 # pull off the manifest group
1715 self.ui.status(_("adding manifests\n"))
1724 self.ui.status(_("adding manifests\n"))
1716 chunkiter = changegroup.chunkiter(source)
1725 chunkiter = changegroup.chunkiter(source)
1717 # no need to check for empty manifest group here:
1726 # no need to check for empty manifest group here:
1718 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1727 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1719 # no new manifest will be created and the manifest group will
1728 # no new manifest will be created and the manifest group will
1720 # be empty during the pull
1729 # be empty during the pull
1721 self.manifest.addgroup(chunkiter, revmap, tr)
1730 self.manifest.addgroup(chunkiter, revmap, tr)
1722
1731
1723 # process the files
1732 # process the files
1724 self.ui.status(_("adding file changes\n"))
1733 self.ui.status(_("adding file changes\n"))
1725 while 1:
1734 while 1:
1726 f = changegroup.getchunk(source)
1735 f = changegroup.getchunk(source)
1727 if not f:
1736 if not f:
1728 break
1737 break
1729 self.ui.debug(_("adding %s revisions\n") % f)
1738 self.ui.debug(_("adding %s revisions\n") % f)
1730 fl = self.file(f)
1739 fl = self.file(f)
1731 o = fl.count()
1740 o = fl.count()
1732 chunkiter = changegroup.chunkiter(source)
1741 chunkiter = changegroup.chunkiter(source)
1733 if fl.addgroup(chunkiter, revmap, tr) is None:
1742 if fl.addgroup(chunkiter, revmap, tr) is None:
1734 raise util.Abort(_("received file revlog group is empty"))
1743 raise util.Abort(_("received file revlog group is empty"))
1735 revisions += fl.count() - o
1744 revisions += fl.count() - o
1736 files += 1
1745 files += 1
1737
1746
1738 cl.writedata()
1747 cl.writedata()
1739 finally:
1748 finally:
1740 if cl:
1749 if cl:
1741 cl.cleanup()
1750 cl.cleanup()
1742
1751
1743 # make changelog see real files again
1752 # make changelog see real files again
1744 self.changelog = changelog.changelog(self.sopener,
1753 self.changelog = changelog.changelog(self.sopener,
1745 self.changelog.version)
1754 self.changelog.version)
1746 self.changelog.checkinlinesize(tr)
1755 self.changelog.checkinlinesize(tr)
1747
1756
1748 newheads = len(self.changelog.heads())
1757 newheads = len(self.changelog.heads())
1749 heads = ""
1758 heads = ""
1750 if oldheads and newheads != oldheads:
1759 if oldheads and newheads != oldheads:
1751 heads = _(" (%+d heads)") % (newheads - oldheads)
1760 heads = _(" (%+d heads)") % (newheads - oldheads)
1752
1761
1753 self.ui.status(_("added %d changesets"
1762 self.ui.status(_("added %d changesets"
1754 " with %d changes to %d files%s\n")
1763 " with %d changes to %d files%s\n")
1755 % (changesets, revisions, files, heads))
1764 % (changesets, revisions, files, heads))
1756
1765
1757 if changesets > 0:
1766 if changesets > 0:
1758 self.hook('pretxnchangegroup', throw=True,
1767 self.hook('pretxnchangegroup', throw=True,
1759 node=hex(self.changelog.node(cor+1)), source=srctype,
1768 node=hex(self.changelog.node(cor+1)), source=srctype,
1760 url=url)
1769 url=url)
1761
1770
1762 tr.close()
1771 tr.close()
1763
1772
1764 if changesets > 0:
1773 if changesets > 0:
1765 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1774 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1766 source=srctype, url=url)
1775 source=srctype, url=url)
1767
1776
1768 for i in xrange(cor + 1, cnr + 1):
1777 for i in xrange(cor + 1, cnr + 1):
1769 self.hook("incoming", node=hex(self.changelog.node(i)),
1778 self.hook("incoming", node=hex(self.changelog.node(i)),
1770 source=srctype, url=url)
1779 source=srctype, url=url)
1771
1780
1772 return newheads - oldheads + 1
1781 return newheads - oldheads + 1
1773
1782
1774
1783
1775 def stream_in(self, remote):
1784 def stream_in(self, remote):
1776 fp = remote.stream_out()
1785 fp = remote.stream_out()
1777 resp = int(fp.readline())
1786 resp = int(fp.readline())
1778 if resp != 0:
1787 if resp != 0:
1779 raise util.Abort(_('operation forbidden by server'))
1788 raise util.Abort(_('operation forbidden by server'))
1780 self.ui.status(_('streaming all changes\n'))
1789 self.ui.status(_('streaming all changes\n'))
1781 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1790 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1782 self.ui.status(_('%d files to transfer, %s of data\n') %
1791 self.ui.status(_('%d files to transfer, %s of data\n') %
1783 (total_files, util.bytecount(total_bytes)))
1792 (total_files, util.bytecount(total_bytes)))
1784 start = time.time()
1793 start = time.time()
1785 for i in xrange(total_files):
1794 for i in xrange(total_files):
1786 name, size = fp.readline().split('\0', 1)
1795 name, size = fp.readline().split('\0', 1)
1787 size = int(size)
1796 size = int(size)
1788 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1797 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1789 ofp = self.sopener(name, 'w')
1798 ofp = self.sopener(name, 'w')
1790 for chunk in util.filechunkiter(fp, limit=size):
1799 for chunk in util.filechunkiter(fp, limit=size):
1791 ofp.write(chunk)
1800 ofp.write(chunk)
1792 ofp.close()
1801 ofp.close()
1793 elapsed = time.time() - start
1802 elapsed = time.time() - start
1794 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1803 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1795 (util.bytecount(total_bytes), elapsed,
1804 (util.bytecount(total_bytes), elapsed,
1796 util.bytecount(total_bytes / elapsed)))
1805 util.bytecount(total_bytes / elapsed)))
1797 self.reload()
1806 self.reload()
1798 return len(self.heads()) + 1
1807 return len(self.heads()) + 1
1799
1808
1800 def clone(self, remote, heads=[], stream=False):
1809 def clone(self, remote, heads=[], stream=False):
1801 '''clone remote repository.
1810 '''clone remote repository.
1802
1811
1803 keyword arguments:
1812 keyword arguments:
1804 heads: list of revs to clone (forces use of pull)
1813 heads: list of revs to clone (forces use of pull)
1805 stream: use streaming clone if possible'''
1814 stream: use streaming clone if possible'''
1806
1815
1807 # now, all clients that can request uncompressed clones can
1816 # now, all clients that can request uncompressed clones can
1808 # read repo formats supported by all servers that can serve
1817 # read repo formats supported by all servers that can serve
1809 # them.
1818 # them.
1810
1819
1811 # if revlog format changes, client will have to check version
1820 # if revlog format changes, client will have to check version
1812 # and format flags on "stream" capability, and use
1821 # and format flags on "stream" capability, and use
1813 # uncompressed only if compatible.
1822 # uncompressed only if compatible.
1814
1823
1815 if stream and not heads and remote.capable('stream'):
1824 if stream and not heads and remote.capable('stream'):
1816 return self.stream_in(remote)
1825 return self.stream_in(remote)
1817 return self.pull(remote, heads)
1826 return self.pull(remote, heads)
1818
1827
1819 # used to avoid circular references so destructors work
1828 # used to avoid circular references so destructors work
1820 def aftertrans(base):
1829 def aftertrans(base):
1821 p = base
1830 p = base
1822 def a():
1831 def a():
1823 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1832 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1824 util.rename(os.path.join(p, "journal.dirstate"),
1833 util.rename(os.path.join(p, "journal.dirstate"),
1825 os.path.join(p, "undo.dirstate"))
1834 os.path.join(p, "undo.dirstate"))
1826 return a
1835 return a
1827
1836
1828 def instance(ui, path, create):
1837 def instance(ui, path, create):
1829 return localrepository(ui, util.drop_scheme('file', path), create)
1838 return localrepository(ui, util.drop_scheme('file', path), create)
1830
1839
1831 def islocal(path):
1840 def islocal(path):
1832 return True
1841 return True
@@ -1,517 +1,587 b''
1 adding foo/Bar/file.txt
1 adding foo/Bar/file.txt
2 adding foo/file.txt
2 adding foo/file.txt
3 adding quux/file.py
3 adding quux/file.py
4 3:911600dab2ae
4 3:911600dab2ae
5 requesting all changes
5 requesting all changes
6 adding changesets
6 adding changesets
7 adding manifests
7 adding manifests
8 adding file changes
8 adding file changes
9 added 1 changesets with 3 changes to 3 files
9 added 1 changesets with 3 changes to 3 files
10 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
10 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
11
11
12 Extension disabled for lack of a hook
12 Extension disabled for lack of a hook
13 Pushing as user fred
13 Pushing as user fred
14 hgrc = """
14 hgrc = """
15 """
15 """
16 pushing to ../b
16 pushing to ../b
17 searching for changes
17 searching for changes
18 common changesets up to 6675d58eff77
18 common changesets up to 6675d58eff77
19 3 changesets found
20 List of changesets:
21 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
22 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
23 911600dab2ae7a9baff75958b84fe606851ce955
19 adding changesets
24 adding changesets
20 add changeset ef1ea85a6374
25 add changeset ef1ea85a6374
21 add changeset f9cafe1212c8
26 add changeset f9cafe1212c8
22 add changeset 911600dab2ae
27 add changeset 911600dab2ae
23 adding manifests
28 adding manifests
24 adding file changes
29 adding file changes
25 adding foo/Bar/file.txt revisions
30 adding foo/Bar/file.txt revisions
26 adding foo/file.txt revisions
31 adding foo/file.txt revisions
27 adding quux/file.py revisions
32 adding quux/file.py revisions
28 added 3 changesets with 3 changes to 3 files
33 added 3 changesets with 3 changes to 3 files
29 rolling back last transaction
34 rolling back last transaction
30 0:6675d58eff77
35 0:6675d58eff77
31
36
32 Extension disabled for lack of acl.sources
37 Extension disabled for lack of acl.sources
33 Pushing as user fred
38 Pushing as user fred
34 hgrc = """
39 hgrc = """
35 [hooks]
40 [hooks]
36 pretxnchangegroup.acl = python:hgext.acl.hook
41 pretxnchangegroup.acl = python:hgext.acl.hook
37 """
42 """
38 pushing to ../b
43 pushing to ../b
39 searching for changes
44 searching for changes
40 common changesets up to 6675d58eff77
45 common changesets up to 6675d58eff77
46 3 changesets found
47 List of changesets:
48 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
49 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
50 911600dab2ae7a9baff75958b84fe606851ce955
41 adding changesets
51 adding changesets
42 add changeset ef1ea85a6374
52 add changeset ef1ea85a6374
43 add changeset f9cafe1212c8
53 add changeset f9cafe1212c8
44 add changeset 911600dab2ae
54 add changeset 911600dab2ae
45 adding manifests
55 adding manifests
46 adding file changes
56 adding file changes
47 adding foo/Bar/file.txt revisions
57 adding foo/Bar/file.txt revisions
48 adding foo/file.txt revisions
58 adding foo/file.txt revisions
49 adding quux/file.py revisions
59 adding quux/file.py revisions
50 added 3 changesets with 3 changes to 3 files
60 added 3 changesets with 3 changes to 3 files
51 calling hook pretxnchangegroup.acl: hgext.acl.hook
61 calling hook pretxnchangegroup.acl: hgext.acl.hook
52 acl: acl.allow not enabled
62 acl: acl.allow not enabled
53 acl: acl.deny not enabled
63 acl: acl.deny not enabled
54 acl: changes have source "push" - skipping
64 acl: changes have source "push" - skipping
55 rolling back last transaction
65 rolling back last transaction
56 0:6675d58eff77
66 0:6675d58eff77
57
67
58 No [acl.allow]/[acl.deny]
68 No [acl.allow]/[acl.deny]
59 Pushing as user fred
69 Pushing as user fred
60 hgrc = """
70 hgrc = """
61 [hooks]
71 [hooks]
62 pretxnchangegroup.acl = python:hgext.acl.hook
72 pretxnchangegroup.acl = python:hgext.acl.hook
63 [acl]
73 [acl]
64 sources = push
74 sources = push
65 """
75 """
66 pushing to ../b
76 pushing to ../b
67 searching for changes
77 searching for changes
68 common changesets up to 6675d58eff77
78 common changesets up to 6675d58eff77
79 3 changesets found
80 List of changesets:
81 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
82 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
83 911600dab2ae7a9baff75958b84fe606851ce955
69 adding changesets
84 adding changesets
70 add changeset ef1ea85a6374
85 add changeset ef1ea85a6374
71 add changeset f9cafe1212c8
86 add changeset f9cafe1212c8
72 add changeset 911600dab2ae
87 add changeset 911600dab2ae
73 adding manifests
88 adding manifests
74 adding file changes
89 adding file changes
75 adding foo/Bar/file.txt revisions
90 adding foo/Bar/file.txt revisions
76 adding foo/file.txt revisions
91 adding foo/file.txt revisions
77 adding quux/file.py revisions
92 adding quux/file.py revisions
78 added 3 changesets with 3 changes to 3 files
93 added 3 changesets with 3 changes to 3 files
79 calling hook pretxnchangegroup.acl: hgext.acl.hook
94 calling hook pretxnchangegroup.acl: hgext.acl.hook
80 acl: acl.allow not enabled
95 acl: acl.allow not enabled
81 acl: acl.deny not enabled
96 acl: acl.deny not enabled
82 acl: allowing changeset ef1ea85a6374
97 acl: allowing changeset ef1ea85a6374
83 acl: allowing changeset f9cafe1212c8
98 acl: allowing changeset f9cafe1212c8
84 acl: allowing changeset 911600dab2ae
99 acl: allowing changeset 911600dab2ae
85 rolling back last transaction
100 rolling back last transaction
86 0:6675d58eff77
101 0:6675d58eff77
87
102
88 Empty [acl.allow]
103 Empty [acl.allow]
89 Pushing as user fred
104 Pushing as user fred
90 hgrc = """
105 hgrc = """
91 [hooks]
106 [hooks]
92 pretxnchangegroup.acl = python:hgext.acl.hook
107 pretxnchangegroup.acl = python:hgext.acl.hook
93 [acl]
108 [acl]
94 sources = push
109 sources = push
95 [acl.allow]
110 [acl.allow]
96 """
111 """
97 pushing to ../b
112 pushing to ../b
98 searching for changes
113 searching for changes
99 common changesets up to 6675d58eff77
114 common changesets up to 6675d58eff77
115 3 changesets found
116 List of changesets:
117 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
118 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
119 911600dab2ae7a9baff75958b84fe606851ce955
100 adding changesets
120 adding changesets
101 add changeset ef1ea85a6374
121 add changeset ef1ea85a6374
102 add changeset f9cafe1212c8
122 add changeset f9cafe1212c8
103 add changeset 911600dab2ae
123 add changeset 911600dab2ae
104 adding manifests
124 adding manifests
105 adding file changes
125 adding file changes
106 adding foo/Bar/file.txt revisions
126 adding foo/Bar/file.txt revisions
107 adding foo/file.txt revisions
127 adding foo/file.txt revisions
108 adding quux/file.py revisions
128 adding quux/file.py revisions
109 added 3 changesets with 3 changes to 3 files
129 added 3 changesets with 3 changes to 3 files
110 calling hook pretxnchangegroup.acl: hgext.acl.hook
130 calling hook pretxnchangegroup.acl: hgext.acl.hook
111 acl: acl.allow enabled, 0 entries for user fred
131 acl: acl.allow enabled, 0 entries for user fred
112 acl: acl.deny not enabled
132 acl: acl.deny not enabled
113 acl: user fred not allowed on foo/file.txt
133 acl: user fred not allowed on foo/file.txt
114 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
134 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
115 abort: acl: access denied for changeset ef1ea85a6374
135 abort: acl: access denied for changeset ef1ea85a6374
116 transaction abort!
136 transaction abort!
117 rollback completed
137 rollback completed
118 no rollback information available
138 no rollback information available
119 0:6675d58eff77
139 0:6675d58eff77
120
140
121 fred is allowed inside foo/
141 fred is allowed inside foo/
122 Pushing as user fred
142 Pushing as user fred
123 hgrc = """
143 hgrc = """
124 [hooks]
144 [hooks]
125 pretxnchangegroup.acl = python:hgext.acl.hook
145 pretxnchangegroup.acl = python:hgext.acl.hook
126 [acl]
146 [acl]
127 sources = push
147 sources = push
128 [acl.allow]
148 [acl.allow]
129 foo/** = fred
149 foo/** = fred
130 """
150 """
131 pushing to ../b
151 pushing to ../b
132 searching for changes
152 searching for changes
133 common changesets up to 6675d58eff77
153 common changesets up to 6675d58eff77
154 3 changesets found
155 List of changesets:
156 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
157 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
158 911600dab2ae7a9baff75958b84fe606851ce955
134 adding changesets
159 adding changesets
135 add changeset ef1ea85a6374
160 add changeset ef1ea85a6374
136 add changeset f9cafe1212c8
161 add changeset f9cafe1212c8
137 add changeset 911600dab2ae
162 add changeset 911600dab2ae
138 adding manifests
163 adding manifests
139 adding file changes
164 adding file changes
140 adding foo/Bar/file.txt revisions
165 adding foo/Bar/file.txt revisions
141 adding foo/file.txt revisions
166 adding foo/file.txt revisions
142 adding quux/file.py revisions
167 adding quux/file.py revisions
143 added 3 changesets with 3 changes to 3 files
168 added 3 changesets with 3 changes to 3 files
144 calling hook pretxnchangegroup.acl: hgext.acl.hook
169 calling hook pretxnchangegroup.acl: hgext.acl.hook
145 acl: acl.allow enabled, 1 entries for user fred
170 acl: acl.allow enabled, 1 entries for user fred
146 acl: acl.deny not enabled
171 acl: acl.deny not enabled
147 acl: allowing changeset ef1ea85a6374
172 acl: allowing changeset ef1ea85a6374
148 acl: allowing changeset f9cafe1212c8
173 acl: allowing changeset f9cafe1212c8
149 acl: user fred not allowed on quux/file.py
174 acl: user fred not allowed on quux/file.py
150 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
175 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
151 abort: acl: access denied for changeset 911600dab2ae
176 abort: acl: access denied for changeset 911600dab2ae
152 transaction abort!
177 transaction abort!
153 rollback completed
178 rollback completed
154 no rollback information available
179 no rollback information available
155 0:6675d58eff77
180 0:6675d58eff77
156
181
157 Empty [acl.deny]
182 Empty [acl.deny]
158 Pushing as user barney
183 Pushing as user barney
159 hgrc = """
184 hgrc = """
160 [hooks]
185 [hooks]
161 pretxnchangegroup.acl = python:hgext.acl.hook
186 pretxnchangegroup.acl = python:hgext.acl.hook
162 [acl]
187 [acl]
163 sources = push
188 sources = push
164 [acl.allow]
189 [acl.allow]
165 foo/** = fred
190 foo/** = fred
166 [acl.deny]
191 [acl.deny]
167 """
192 """
168 pushing to ../b
193 pushing to ../b
169 searching for changes
194 searching for changes
170 common changesets up to 6675d58eff77
195 common changesets up to 6675d58eff77
196 3 changesets found
197 List of changesets:
198 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
199 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
200 911600dab2ae7a9baff75958b84fe606851ce955
171 adding changesets
201 adding changesets
172 add changeset ef1ea85a6374
202 add changeset ef1ea85a6374
173 add changeset f9cafe1212c8
203 add changeset f9cafe1212c8
174 add changeset 911600dab2ae
204 add changeset 911600dab2ae
175 adding manifests
205 adding manifests
176 adding file changes
206 adding file changes
177 adding foo/Bar/file.txt revisions
207 adding foo/Bar/file.txt revisions
178 adding foo/file.txt revisions
208 adding foo/file.txt revisions
179 adding quux/file.py revisions
209 adding quux/file.py revisions
180 added 3 changesets with 3 changes to 3 files
210 added 3 changesets with 3 changes to 3 files
181 calling hook pretxnchangegroup.acl: hgext.acl.hook
211 calling hook pretxnchangegroup.acl: hgext.acl.hook
182 acl: acl.allow enabled, 0 entries for user barney
212 acl: acl.allow enabled, 0 entries for user barney
183 acl: acl.deny enabled, 0 entries for user barney
213 acl: acl.deny enabled, 0 entries for user barney
184 acl: user barney not allowed on foo/file.txt
214 acl: user barney not allowed on foo/file.txt
185 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
215 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
186 abort: acl: access denied for changeset ef1ea85a6374
216 abort: acl: access denied for changeset ef1ea85a6374
187 transaction abort!
217 transaction abort!
188 rollback completed
218 rollback completed
189 no rollback information available
219 no rollback information available
190 0:6675d58eff77
220 0:6675d58eff77
191
221
192 fred is allowed inside foo/, but not foo/bar/ (case matters)
222 fred is allowed inside foo/, but not foo/bar/ (case matters)
193 Pushing as user fred
223 Pushing as user fred
194 hgrc = """
224 hgrc = """
195 [hooks]
225 [hooks]
196 pretxnchangegroup.acl = python:hgext.acl.hook
226 pretxnchangegroup.acl = python:hgext.acl.hook
197 [acl]
227 [acl]
198 sources = push
228 sources = push
199 [acl.allow]
229 [acl.allow]
200 foo/** = fred
230 foo/** = fred
201 [acl.deny]
231 [acl.deny]
202 foo/bar/** = fred
232 foo/bar/** = fred
203 """
233 """
204 pushing to ../b
234 pushing to ../b
205 searching for changes
235 searching for changes
206 common changesets up to 6675d58eff77
236 common changesets up to 6675d58eff77
237 3 changesets found
238 List of changesets:
239 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
240 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
241 911600dab2ae7a9baff75958b84fe606851ce955
207 adding changesets
242 adding changesets
208 add changeset ef1ea85a6374
243 add changeset ef1ea85a6374
209 add changeset f9cafe1212c8
244 add changeset f9cafe1212c8
210 add changeset 911600dab2ae
245 add changeset 911600dab2ae
211 adding manifests
246 adding manifests
212 adding file changes
247 adding file changes
213 adding foo/Bar/file.txt revisions
248 adding foo/Bar/file.txt revisions
214 adding foo/file.txt revisions
249 adding foo/file.txt revisions
215 adding quux/file.py revisions
250 adding quux/file.py revisions
216 added 3 changesets with 3 changes to 3 files
251 added 3 changesets with 3 changes to 3 files
217 calling hook pretxnchangegroup.acl: hgext.acl.hook
252 calling hook pretxnchangegroup.acl: hgext.acl.hook
218 acl: acl.allow enabled, 1 entries for user fred
253 acl: acl.allow enabled, 1 entries for user fred
219 acl: acl.deny enabled, 1 entries for user fred
254 acl: acl.deny enabled, 1 entries for user fred
220 acl: allowing changeset ef1ea85a6374
255 acl: allowing changeset ef1ea85a6374
221 acl: allowing changeset f9cafe1212c8
256 acl: allowing changeset f9cafe1212c8
222 acl: user fred not allowed on quux/file.py
257 acl: user fred not allowed on quux/file.py
223 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
258 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
224 abort: acl: access denied for changeset 911600dab2ae
259 abort: acl: access denied for changeset 911600dab2ae
225 transaction abort!
260 transaction abort!
226 rollback completed
261 rollback completed
227 no rollback information available
262 no rollback information available
228 0:6675d58eff77
263 0:6675d58eff77
229
264
230 fred is allowed inside foo/, but not foo/Bar/
265 fred is allowed inside foo/, but not foo/Bar/
231 Pushing as user fred
266 Pushing as user fred
232 hgrc = """
267 hgrc = """
233 [hooks]
268 [hooks]
234 pretxnchangegroup.acl = python:hgext.acl.hook
269 pretxnchangegroup.acl = python:hgext.acl.hook
235 [acl]
270 [acl]
236 sources = push
271 sources = push
237 [acl.allow]
272 [acl.allow]
238 foo/** = fred
273 foo/** = fred
239 [acl.deny]
274 [acl.deny]
240 foo/bar/** = fred
275 foo/bar/** = fred
241 foo/Bar/** = fred
276 foo/Bar/** = fred
242 """
277 """
243 pushing to ../b
278 pushing to ../b
244 searching for changes
279 searching for changes
245 common changesets up to 6675d58eff77
280 common changesets up to 6675d58eff77
281 3 changesets found
282 List of changesets:
283 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
284 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
285 911600dab2ae7a9baff75958b84fe606851ce955
246 adding changesets
286 adding changesets
247 add changeset ef1ea85a6374
287 add changeset ef1ea85a6374
248 add changeset f9cafe1212c8
288 add changeset f9cafe1212c8
249 add changeset 911600dab2ae
289 add changeset 911600dab2ae
250 adding manifests
290 adding manifests
251 adding file changes
291 adding file changes
252 adding foo/Bar/file.txt revisions
292 adding foo/Bar/file.txt revisions
253 adding foo/file.txt revisions
293 adding foo/file.txt revisions
254 adding quux/file.py revisions
294 adding quux/file.py revisions
255 added 3 changesets with 3 changes to 3 files
295 added 3 changesets with 3 changes to 3 files
256 calling hook pretxnchangegroup.acl: hgext.acl.hook
296 calling hook pretxnchangegroup.acl: hgext.acl.hook
257 acl: acl.allow enabled, 1 entries for user fred
297 acl: acl.allow enabled, 1 entries for user fred
258 acl: acl.deny enabled, 2 entries for user fred
298 acl: acl.deny enabled, 2 entries for user fred
259 acl: allowing changeset ef1ea85a6374
299 acl: allowing changeset ef1ea85a6374
260 acl: user fred denied on foo/Bar/file.txt
300 acl: user fred denied on foo/Bar/file.txt
261 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
301 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset f9cafe1212c8
262 abort: acl: access denied for changeset f9cafe1212c8
302 abort: acl: access denied for changeset f9cafe1212c8
263 transaction abort!
303 transaction abort!
264 rollback completed
304 rollback completed
265 no rollback information available
305 no rollback information available
266 0:6675d58eff77
306 0:6675d58eff77
267
307
268 barney is not mentioned => not allowed anywhere
308 barney is not mentioned => not allowed anywhere
269 Pushing as user barney
309 Pushing as user barney
270 hgrc = """
310 hgrc = """
271 [hooks]
311 [hooks]
272 pretxnchangegroup.acl = python:hgext.acl.hook
312 pretxnchangegroup.acl = python:hgext.acl.hook
273 [acl]
313 [acl]
274 sources = push
314 sources = push
275 [acl.allow]
315 [acl.allow]
276 foo/** = fred
316 foo/** = fred
277 [acl.deny]
317 [acl.deny]
278 foo/bar/** = fred
318 foo/bar/** = fred
279 foo/Bar/** = fred
319 foo/Bar/** = fred
280 """
320 """
281 pushing to ../b
321 pushing to ../b
282 searching for changes
322 searching for changes
283 common changesets up to 6675d58eff77
323 common changesets up to 6675d58eff77
324 3 changesets found
325 List of changesets:
326 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
327 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
328 911600dab2ae7a9baff75958b84fe606851ce955
284 adding changesets
329 adding changesets
285 add changeset ef1ea85a6374
330 add changeset ef1ea85a6374
286 add changeset f9cafe1212c8
331 add changeset f9cafe1212c8
287 add changeset 911600dab2ae
332 add changeset 911600dab2ae
288 adding manifests
333 adding manifests
289 adding file changes
334 adding file changes
290 adding foo/Bar/file.txt revisions
335 adding foo/Bar/file.txt revisions
291 adding foo/file.txt revisions
336 adding foo/file.txt revisions
292 adding quux/file.py revisions
337 adding quux/file.py revisions
293 added 3 changesets with 3 changes to 3 files
338 added 3 changesets with 3 changes to 3 files
294 calling hook pretxnchangegroup.acl: hgext.acl.hook
339 calling hook pretxnchangegroup.acl: hgext.acl.hook
295 acl: acl.allow enabled, 0 entries for user barney
340 acl: acl.allow enabled, 0 entries for user barney
296 acl: acl.deny enabled, 0 entries for user barney
341 acl: acl.deny enabled, 0 entries for user barney
297 acl: user barney not allowed on foo/file.txt
342 acl: user barney not allowed on foo/file.txt
298 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
343 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset ef1ea85a6374
299 abort: acl: access denied for changeset ef1ea85a6374
344 abort: acl: access denied for changeset ef1ea85a6374
300 transaction abort!
345 transaction abort!
301 rollback completed
346 rollback completed
302 no rollback information available
347 no rollback information available
303 0:6675d58eff77
348 0:6675d58eff77
304
349
305 barney is allowed everywhere
350 barney is allowed everywhere
306 Pushing as user barney
351 Pushing as user barney
307 hgrc = """
352 hgrc = """
308 [hooks]
353 [hooks]
309 pretxnchangegroup.acl = python:hgext.acl.hook
354 pretxnchangegroup.acl = python:hgext.acl.hook
310 [acl]
355 [acl]
311 sources = push
356 sources = push
312 [acl.allow]
357 [acl.allow]
313 foo/** = fred
358 foo/** = fred
314 [acl.deny]
359 [acl.deny]
315 foo/bar/** = fred
360 foo/bar/** = fred
316 foo/Bar/** = fred
361 foo/Bar/** = fred
317 [acl.allow]
362 [acl.allow]
318 ** = barney
363 ** = barney
319 """
364 """
320 pushing to ../b
365 pushing to ../b
321 searching for changes
366 searching for changes
322 common changesets up to 6675d58eff77
367 common changesets up to 6675d58eff77
368 3 changesets found
369 List of changesets:
370 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
371 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
372 911600dab2ae7a9baff75958b84fe606851ce955
323 adding changesets
373 adding changesets
324 add changeset ef1ea85a6374
374 add changeset ef1ea85a6374
325 add changeset f9cafe1212c8
375 add changeset f9cafe1212c8
326 add changeset 911600dab2ae
376 add changeset 911600dab2ae
327 adding manifests
377 adding manifests
328 adding file changes
378 adding file changes
329 adding foo/Bar/file.txt revisions
379 adding foo/Bar/file.txt revisions
330 adding foo/file.txt revisions
380 adding foo/file.txt revisions
331 adding quux/file.py revisions
381 adding quux/file.py revisions
332 added 3 changesets with 3 changes to 3 files
382 added 3 changesets with 3 changes to 3 files
333 calling hook pretxnchangegroup.acl: hgext.acl.hook
383 calling hook pretxnchangegroup.acl: hgext.acl.hook
334 acl: acl.allow enabled, 1 entries for user barney
384 acl: acl.allow enabled, 1 entries for user barney
335 acl: acl.deny enabled, 0 entries for user barney
385 acl: acl.deny enabled, 0 entries for user barney
336 acl: allowing changeset ef1ea85a6374
386 acl: allowing changeset ef1ea85a6374
337 acl: allowing changeset f9cafe1212c8
387 acl: allowing changeset f9cafe1212c8
338 acl: allowing changeset 911600dab2ae
388 acl: allowing changeset 911600dab2ae
339 rolling back last transaction
389 rolling back last transaction
340 0:6675d58eff77
390 0:6675d58eff77
341
391
342 wilma can change files with a .txt extension
392 wilma can change files with a .txt extension
343 Pushing as user wilma
393 Pushing as user wilma
344 hgrc = """
394 hgrc = """
345 [hooks]
395 [hooks]
346 pretxnchangegroup.acl = python:hgext.acl.hook
396 pretxnchangegroup.acl = python:hgext.acl.hook
347 [acl]
397 [acl]
348 sources = push
398 sources = push
349 [acl.allow]
399 [acl.allow]
350 foo/** = fred
400 foo/** = fred
351 [acl.deny]
401 [acl.deny]
352 foo/bar/** = fred
402 foo/bar/** = fred
353 foo/Bar/** = fred
403 foo/Bar/** = fred
354 [acl.allow]
404 [acl.allow]
355 ** = barney
405 ** = barney
356 **/*.txt = wilma
406 **/*.txt = wilma
357 """
407 """
358 pushing to ../b
408 pushing to ../b
359 searching for changes
409 searching for changes
360 common changesets up to 6675d58eff77
410 common changesets up to 6675d58eff77
411 3 changesets found
412 List of changesets:
413 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
414 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
415 911600dab2ae7a9baff75958b84fe606851ce955
361 adding changesets
416 adding changesets
362 add changeset ef1ea85a6374
417 add changeset ef1ea85a6374
363 add changeset f9cafe1212c8
418 add changeset f9cafe1212c8
364 add changeset 911600dab2ae
419 add changeset 911600dab2ae
365 adding manifests
420 adding manifests
366 adding file changes
421 adding file changes
367 adding foo/Bar/file.txt revisions
422 adding foo/Bar/file.txt revisions
368 adding foo/file.txt revisions
423 adding foo/file.txt revisions
369 adding quux/file.py revisions
424 adding quux/file.py revisions
370 added 3 changesets with 3 changes to 3 files
425 added 3 changesets with 3 changes to 3 files
371 calling hook pretxnchangegroup.acl: hgext.acl.hook
426 calling hook pretxnchangegroup.acl: hgext.acl.hook
372 acl: acl.allow enabled, 1 entries for user wilma
427 acl: acl.allow enabled, 1 entries for user wilma
373 acl: acl.deny enabled, 0 entries for user wilma
428 acl: acl.deny enabled, 0 entries for user wilma
374 acl: allowing changeset ef1ea85a6374
429 acl: allowing changeset ef1ea85a6374
375 acl: allowing changeset f9cafe1212c8
430 acl: allowing changeset f9cafe1212c8
376 acl: user wilma not allowed on quux/file.py
431 acl: user wilma not allowed on quux/file.py
377 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
432 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
378 abort: acl: access denied for changeset 911600dab2ae
433 abort: acl: access denied for changeset 911600dab2ae
379 transaction abort!
434 transaction abort!
380 rollback completed
435 rollback completed
381 no rollback information available
436 no rollback information available
382 0:6675d58eff77
437 0:6675d58eff77
383
438
384 file specified by acl.config does not exist
439 file specified by acl.config does not exist
385 Pushing as user barney
440 Pushing as user barney
386 hgrc = """
441 hgrc = """
387 [hooks]
442 [hooks]
388 pretxnchangegroup.acl = python:hgext.acl.hook
443 pretxnchangegroup.acl = python:hgext.acl.hook
389 [acl]
444 [acl]
390 sources = push
445 sources = push
391 [acl.allow]
446 [acl.allow]
392 foo/** = fred
447 foo/** = fred
393 [acl.deny]
448 [acl.deny]
394 foo/bar/** = fred
449 foo/bar/** = fred
395 foo/Bar/** = fred
450 foo/Bar/** = fred
396 [acl.allow]
451 [acl.allow]
397 ** = barney
452 ** = barney
398 **/*.txt = wilma
453 **/*.txt = wilma
399 [acl]
454 [acl]
400 config = ../acl.config
455 config = ../acl.config
401 """
456 """
402 pushing to ../b
457 pushing to ../b
403 searching for changes
458 searching for changes
404 common changesets up to 6675d58eff77
459 common changesets up to 6675d58eff77
460 3 changesets found
461 List of changesets:
462 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
463 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
464 911600dab2ae7a9baff75958b84fe606851ce955
405 adding changesets
465 adding changesets
406 add changeset ef1ea85a6374
466 add changeset ef1ea85a6374
407 add changeset f9cafe1212c8
467 add changeset f9cafe1212c8
408 add changeset 911600dab2ae
468 add changeset 911600dab2ae
409 adding manifests
469 adding manifests
410 adding file changes
470 adding file changes
411 adding foo/Bar/file.txt revisions
471 adding foo/Bar/file.txt revisions
412 adding foo/file.txt revisions
472 adding foo/file.txt revisions
413 adding quux/file.py revisions
473 adding quux/file.py revisions
414 added 3 changesets with 3 changes to 3 files
474 added 3 changesets with 3 changes to 3 files
415 calling hook pretxnchangegroup.acl: hgext.acl.hook
475 calling hook pretxnchangegroup.acl: hgext.acl.hook
416 acl: acl.allow enabled, 1 entries for user barney
476 acl: acl.allow enabled, 1 entries for user barney
417 acl: acl.deny enabled, 0 entries for user barney
477 acl: acl.deny enabled, 0 entries for user barney
418 acl: allowing changeset ef1ea85a6374
478 acl: allowing changeset ef1ea85a6374
419 acl: allowing changeset f9cafe1212c8
479 acl: allowing changeset f9cafe1212c8
420 acl: allowing changeset 911600dab2ae
480 acl: allowing changeset 911600dab2ae
421 rolling back last transaction
481 rolling back last transaction
422 0:6675d58eff77
482 0:6675d58eff77
423
483
424 betty is allowed inside foo/ by a acl.config file
484 betty is allowed inside foo/ by a acl.config file
425 Pushing as user betty
485 Pushing as user betty
426 hgrc = """
486 hgrc = """
427 [hooks]
487 [hooks]
428 pretxnchangegroup.acl = python:hgext.acl.hook
488 pretxnchangegroup.acl = python:hgext.acl.hook
429 [acl]
489 [acl]
430 sources = push
490 sources = push
431 [acl.allow]
491 [acl.allow]
432 foo/** = fred
492 foo/** = fred
433 [acl.deny]
493 [acl.deny]
434 foo/bar/** = fred
494 foo/bar/** = fred
435 foo/Bar/** = fred
495 foo/Bar/** = fred
436 [acl.allow]
496 [acl.allow]
437 ** = barney
497 ** = barney
438 **/*.txt = wilma
498 **/*.txt = wilma
439 [acl]
499 [acl]
440 config = ../acl.config
500 config = ../acl.config
441 """
501 """
442 acl.config = """
502 acl.config = """
443 [acl.allow]
503 [acl.allow]
444 foo/** = betty
504 foo/** = betty
445 """
505 """
446 pushing to ../b
506 pushing to ../b
447 searching for changes
507 searching for changes
448 common changesets up to 6675d58eff77
508 common changesets up to 6675d58eff77
509 3 changesets found
510 List of changesets:
511 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
512 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
513 911600dab2ae7a9baff75958b84fe606851ce955
449 adding changesets
514 adding changesets
450 add changeset ef1ea85a6374
515 add changeset ef1ea85a6374
451 add changeset f9cafe1212c8
516 add changeset f9cafe1212c8
452 add changeset 911600dab2ae
517 add changeset 911600dab2ae
453 adding manifests
518 adding manifests
454 adding file changes
519 adding file changes
455 adding foo/Bar/file.txt revisions
520 adding foo/Bar/file.txt revisions
456 adding foo/file.txt revisions
521 adding foo/file.txt revisions
457 adding quux/file.py revisions
522 adding quux/file.py revisions
458 added 3 changesets with 3 changes to 3 files
523 added 3 changesets with 3 changes to 3 files
459 calling hook pretxnchangegroup.acl: hgext.acl.hook
524 calling hook pretxnchangegroup.acl: hgext.acl.hook
460 acl: acl.allow enabled, 1 entries for user betty
525 acl: acl.allow enabled, 1 entries for user betty
461 acl: acl.deny enabled, 0 entries for user betty
526 acl: acl.deny enabled, 0 entries for user betty
462 acl: allowing changeset ef1ea85a6374
527 acl: allowing changeset ef1ea85a6374
463 acl: allowing changeset f9cafe1212c8
528 acl: allowing changeset f9cafe1212c8
464 acl: user betty not allowed on quux/file.py
529 acl: user betty not allowed on quux/file.py
465 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
530 error: pretxnchangegroup.acl hook failed: acl: access denied for changeset 911600dab2ae
466 abort: acl: access denied for changeset 911600dab2ae
531 abort: acl: access denied for changeset 911600dab2ae
467 transaction abort!
532 transaction abort!
468 rollback completed
533 rollback completed
469 no rollback information available
534 no rollback information available
470 0:6675d58eff77
535 0:6675d58eff77
471
536
472 acl.config can set only [acl.allow]/[acl.deny]
537 acl.config can set only [acl.allow]/[acl.deny]
473 Pushing as user barney
538 Pushing as user barney
474 hgrc = """
539 hgrc = """
475 [hooks]
540 [hooks]
476 pretxnchangegroup.acl = python:hgext.acl.hook
541 pretxnchangegroup.acl = python:hgext.acl.hook
477 [acl]
542 [acl]
478 sources = push
543 sources = push
479 [acl.allow]
544 [acl.allow]
480 foo/** = fred
545 foo/** = fred
481 [acl.deny]
546 [acl.deny]
482 foo/bar/** = fred
547 foo/bar/** = fred
483 foo/Bar/** = fred
548 foo/Bar/** = fred
484 [acl.allow]
549 [acl.allow]
485 ** = barney
550 ** = barney
486 **/*.txt = wilma
551 **/*.txt = wilma
487 [acl]
552 [acl]
488 config = ../acl.config
553 config = ../acl.config
489 """
554 """
490 acl.config = """
555 acl.config = """
491 [acl.allow]
556 [acl.allow]
492 foo/** = betty
557 foo/** = betty
493 [hooks]
558 [hooks]
494 changegroup.acl = false
559 changegroup.acl = false
495 """
560 """
496 pushing to ../b
561 pushing to ../b
497 searching for changes
562 searching for changes
498 common changesets up to 6675d58eff77
563 common changesets up to 6675d58eff77
564 3 changesets found
565 List of changesets:
566 ef1ea85a6374b77d6da9dcda9541f498f2d17df7
567 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
568 911600dab2ae7a9baff75958b84fe606851ce955
499 adding changesets
569 adding changesets
500 add changeset ef1ea85a6374
570 add changeset ef1ea85a6374
501 add changeset f9cafe1212c8
571 add changeset f9cafe1212c8
502 add changeset 911600dab2ae
572 add changeset 911600dab2ae
503 adding manifests
573 adding manifests
504 adding file changes
574 adding file changes
505 adding foo/Bar/file.txt revisions
575 adding foo/Bar/file.txt revisions
506 adding foo/file.txt revisions
576 adding foo/file.txt revisions
507 adding quux/file.py revisions
577 adding quux/file.py revisions
508 added 3 changesets with 3 changes to 3 files
578 added 3 changesets with 3 changes to 3 files
509 calling hook pretxnchangegroup.acl: hgext.acl.hook
579 calling hook pretxnchangegroup.acl: hgext.acl.hook
510 acl: acl.allow enabled, 1 entries for user barney
580 acl: acl.allow enabled, 1 entries for user barney
511 acl: acl.deny enabled, 0 entries for user barney
581 acl: acl.deny enabled, 0 entries for user barney
512 acl: allowing changeset ef1ea85a6374
582 acl: allowing changeset ef1ea85a6374
513 acl: allowing changeset f9cafe1212c8
583 acl: allowing changeset f9cafe1212c8
514 acl: allowing changeset 911600dab2ae
584 acl: allowing changeset 911600dab2ae
515 rolling back last transaction
585 rolling back last transaction
516 0:6675d58eff77
586 0:6675d58eff77
517
587
General Comments 0
You need to be logged in to leave comments. Login now