##// END OF EJS Templates
Handle functions as the value of a hooks.<name> config variable...
Alexis S. L. Carvalho -
r4070:961ccb61 default
parent child Browse files
Show More
@@ -1,1903 +1,1907 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, appendfile, changegroup
10 import repo, appendfile, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util
13 import os, revlog, time, util
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 if not path:
23 if not path:
24 p = os.getcwd()
24 p = os.getcwd()
25 while not os.path.isdir(os.path.join(p, ".hg")):
25 while not os.path.isdir(os.path.join(p, ".hg")):
26 oldp = p
26 oldp = p
27 p = os.path.dirname(p)
27 p = os.path.dirname(p)
28 if p == oldp:
28 if p == oldp:
29 raise repo.RepoError(_("There is no Mercurial repository"
29 raise repo.RepoError(_("There is no Mercurial repository"
30 " here (.hg not found)"))
30 " here (.hg not found)"))
31 path = p
31 path = p
32
32
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34 self.root = os.path.realpath(path)
34 self.root = os.path.realpath(path)
35 self.origroot = path
35 self.origroot = path
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38
38
39 if not os.path.isdir(self.path):
39 if not os.path.isdir(self.path):
40 if create:
40 if create:
41 if not os.path.exists(path):
41 if not os.path.exists(path):
42 os.mkdir(path)
42 os.mkdir(path)
43 os.mkdir(self.path)
43 os.mkdir(self.path)
44 os.mkdir(os.path.join(self.path, "store"))
44 os.mkdir(os.path.join(self.path, "store"))
45 requirements = ("revlogv1", "store")
45 requirements = ("revlogv1", "store")
46 reqfile = self.opener("requires", "w")
46 reqfile = self.opener("requires", "w")
47 for r in requirements:
47 for r in requirements:
48 reqfile.write("%s\n" % r)
48 reqfile.write("%s\n" % r)
49 reqfile.close()
49 reqfile.close()
50 # create an invalid changelog
50 # create an invalid changelog
51 self.opener("00changelog.i", "a").write(
51 self.opener("00changelog.i", "a").write(
52 '\0\0\0\2' # represents revlogv2
52 '\0\0\0\2' # represents revlogv2
53 ' dummy changelog to prevent using the old repo layout'
53 ' dummy changelog to prevent using the old repo layout'
54 )
54 )
55 else:
55 else:
56 raise repo.RepoError(_("repository %s not found") % path)
56 raise repo.RepoError(_("repository %s not found") % path)
57 elif create:
57 elif create:
58 raise repo.RepoError(_("repository %s already exists") % path)
58 raise repo.RepoError(_("repository %s already exists") % path)
59 else:
59 else:
60 # find requirements
60 # find requirements
61 try:
61 try:
62 requirements = self.opener("requires").read().splitlines()
62 requirements = self.opener("requires").read().splitlines()
63 except IOError, inst:
63 except IOError, inst:
64 if inst.errno != errno.ENOENT:
64 if inst.errno != errno.ENOENT:
65 raise
65 raise
66 requirements = []
66 requirements = []
67 # check them
67 # check them
68 for r in requirements:
68 for r in requirements:
69 if r not in self.supported:
69 if r not in self.supported:
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
70 raise repo.RepoError(_("requirement '%s' not supported") % r)
71
71
72 # setup store
72 # setup store
73 if "store" in requirements:
73 if "store" in requirements:
74 self.encodefn = util.encodefilename
74 self.encodefn = util.encodefilename
75 self.decodefn = util.decodefilename
75 self.decodefn = util.decodefilename
76 self.spath = os.path.join(self.path, "store")
76 self.spath = os.path.join(self.path, "store")
77 else:
77 else:
78 self.encodefn = lambda x: x
78 self.encodefn = lambda x: x
79 self.decodefn = lambda x: x
79 self.decodefn = lambda x: x
80 self.spath = self.path
80 self.spath = self.path
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
81 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
82
82
83 self.ui = ui.ui(parentui=parentui)
83 self.ui = ui.ui(parentui=parentui)
84 try:
84 try:
85 self.ui.readconfig(self.join("hgrc"), self.root)
85 self.ui.readconfig(self.join("hgrc"), self.root)
86 except IOError:
86 except IOError:
87 pass
87 pass
88
88
89 v = self.ui.configrevlog()
89 v = self.ui.configrevlog()
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
90 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
91 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
92 fl = v.get('flags', None)
92 fl = v.get('flags', None)
93 flags = 0
93 flags = 0
94 if fl != None:
94 if fl != None:
95 for x in fl.split():
95 for x in fl.split():
96 flags |= revlog.flagstr(x)
96 flags |= revlog.flagstr(x)
97 elif self.revlogv1:
97 elif self.revlogv1:
98 flags = revlog.REVLOG_DEFAULT_FLAGS
98 flags = revlog.REVLOG_DEFAULT_FLAGS
99
99
100 v = self.revlogversion | flags
100 v = self.revlogversion | flags
101 self.manifest = manifest.manifest(self.sopener, v)
101 self.manifest = manifest.manifest(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
102 self.changelog = changelog.changelog(self.sopener, v)
103
103
104 fallback = self.ui.config('ui', 'fallbackencoding')
104 fallback = self.ui.config('ui', 'fallbackencoding')
105 if fallback:
105 if fallback:
106 util._fallbackencoding = fallback
106 util._fallbackencoding = fallback
107
107
108 # the changelog might not have the inline index flag
108 # the changelog might not have the inline index flag
109 # on. If the format of the changelog is the same as found in
109 # on. If the format of the changelog is the same as found in
110 # .hgrc, apply any flags found in the .hgrc as well.
110 # .hgrc, apply any flags found in the .hgrc as well.
111 # Otherwise, just version from the changelog
111 # Otherwise, just version from the changelog
112 v = self.changelog.version
112 v = self.changelog.version
113 if v == self.revlogversion:
113 if v == self.revlogversion:
114 v |= flags
114 v |= flags
115 self.revlogversion = v
115 self.revlogversion = v
116
116
117 self.tagscache = None
117 self.tagscache = None
118 self.branchcache = None
118 self.branchcache = None
119 self.nodetagscache = None
119 self.nodetagscache = None
120 self.filterpats = {}
120 self.filterpats = {}
121 self.transhandle = None
121 self.transhandle = None
122
122
123 self._link = lambda x: False
123 self._link = lambda x: False
124 if util.checklink(self.root):
124 if util.checklink(self.root):
125 r = self.root # avoid circular reference in lambda
125 r = self.root # avoid circular reference in lambda
126 self._link = lambda x: util.is_link(os.path.join(r, x))
126 self._link = lambda x: util.is_link(os.path.join(r, x))
127
127
128 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
128 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
129
129
130 def url(self):
130 def url(self):
131 return 'file:' + self.root
131 return 'file:' + self.root
132
132
133 def hook(self, name, throw=False, **args):
133 def hook(self, name, throw=False, **args):
134 def callhook(hname, funcname):
134 def callhook(hname, funcname):
135 '''call python hook. hook is callable object, looked up as
135 '''call python hook. hook is callable object, looked up as
136 name in python module. if callable returns "true", hook
136 name in python module. if callable returns "true", hook
137 fails, else passes. if hook raises exception, treated as
137 fails, else passes. if hook raises exception, treated as
138 hook failure. exception propagates if throw is "true".
138 hook failure. exception propagates if throw is "true".
139
139
140 reason for "true" meaning "hook failed" is so that
140 reason for "true" meaning "hook failed" is so that
141 unmodified commands (e.g. mercurial.commands.update) can
141 unmodified commands (e.g. mercurial.commands.update) can
142 be run as hooks without wrappers to convert return values.'''
142 be run as hooks without wrappers to convert return values.'''
143
143
144 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
144 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
145 d = funcname.rfind('.')
145 obj = funcname
146 if d == -1:
146 if not callable(obj):
147 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
147 d = funcname.rfind('.')
148 % (hname, funcname))
148 if d == -1:
149 modname = funcname[:d]
149 raise util.Abort(_('%s hook is invalid ("%s" not in '
150 try:
150 'a module)') % (hname, funcname))
151 obj = __import__(modname)
151 modname = funcname[:d]
152 except ImportError:
153 try:
152 try:
154 # extensions are loaded with hgext_ prefix
153 obj = __import__(modname)
155 obj = __import__("hgext_%s" % modname)
156 except ImportError:
154 except ImportError:
155 try:
156 # extensions are loaded with hgext_ prefix
157 obj = __import__("hgext_%s" % modname)
158 except ImportError:
159 raise util.Abort(_('%s hook is invalid '
160 '(import of "%s" failed)') %
161 (hname, modname))
162 try:
163 for p in funcname.split('.')[1:]:
164 obj = getattr(obj, p)
165 except AttributeError, err:
157 raise util.Abort(_('%s hook is invalid '
166 raise util.Abort(_('%s hook is invalid '
158 '(import of "%s" failed)') %
167 '("%s" is not defined)') %
159 (hname, modname))
168 (hname, funcname))
160 try:
169 if not callable(obj):
161 for p in funcname.split('.')[1:]:
170 raise util.Abort(_('%s hook is invalid '
162 obj = getattr(obj, p)
171 '("%s" is not callable)') %
163 except AttributeError, err:
172 (hname, funcname))
164 raise util.Abort(_('%s hook is invalid '
165 '("%s" is not defined)') %
166 (hname, funcname))
167 if not callable(obj):
168 raise util.Abort(_('%s hook is invalid '
169 '("%s" is not callable)') %
170 (hname, funcname))
171 try:
173 try:
172 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
174 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
173 except (KeyboardInterrupt, util.SignalInterrupt):
175 except (KeyboardInterrupt, util.SignalInterrupt):
174 raise
176 raise
175 except Exception, exc:
177 except Exception, exc:
176 if isinstance(exc, util.Abort):
178 if isinstance(exc, util.Abort):
177 self.ui.warn(_('error: %s hook failed: %s\n') %
179 self.ui.warn(_('error: %s hook failed: %s\n') %
178 (hname, exc.args[0]))
180 (hname, exc.args[0]))
179 else:
181 else:
180 self.ui.warn(_('error: %s hook raised an exception: '
182 self.ui.warn(_('error: %s hook raised an exception: '
181 '%s\n') % (hname, exc))
183 '%s\n') % (hname, exc))
182 if throw:
184 if throw:
183 raise
185 raise
184 self.ui.print_exc()
186 self.ui.print_exc()
185 return True
187 return True
186 if r:
188 if r:
187 if throw:
189 if throw:
188 raise util.Abort(_('%s hook failed') % hname)
190 raise util.Abort(_('%s hook failed') % hname)
189 self.ui.warn(_('warning: %s hook failed\n') % hname)
191 self.ui.warn(_('warning: %s hook failed\n') % hname)
190 return r
192 return r
191
193
192 def runhook(name, cmd):
194 def runhook(name, cmd):
193 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
195 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
194 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
196 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
195 r = util.system(cmd, environ=env, cwd=self.root)
197 r = util.system(cmd, environ=env, cwd=self.root)
196 if r:
198 if r:
197 desc, r = util.explain_exit(r)
199 desc, r = util.explain_exit(r)
198 if throw:
200 if throw:
199 raise util.Abort(_('%s hook %s') % (name, desc))
201 raise util.Abort(_('%s hook %s') % (name, desc))
200 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
202 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
201 return r
203 return r
202
204
203 r = False
205 r = False
204 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
206 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
205 if hname.split(".", 1)[0] == name and cmd]
207 if hname.split(".", 1)[0] == name and cmd]
206 hooks.sort()
208 hooks.sort()
207 for hname, cmd in hooks:
209 for hname, cmd in hooks:
208 if cmd.startswith('python:'):
210 if callable(cmd):
211 r = callhook(hname, cmd) or r
212 elif cmd.startswith('python:'):
209 r = callhook(hname, cmd[7:].strip()) or r
213 r = callhook(hname, cmd[7:].strip()) or r
210 else:
214 else:
211 r = runhook(hname, cmd) or r
215 r = runhook(hname, cmd) or r
212 return r
216 return r
213
217
214 tag_disallowed = ':\r\n'
218 tag_disallowed = ':\r\n'
215
219
216 def tag(self, name, node, message, local, user, date):
220 def tag(self, name, node, message, local, user, date):
217 '''tag a revision with a symbolic name.
221 '''tag a revision with a symbolic name.
218
222
219 if local is True, the tag is stored in a per-repository file.
223 if local is True, the tag is stored in a per-repository file.
220 otherwise, it is stored in the .hgtags file, and a new
224 otherwise, it is stored in the .hgtags file, and a new
221 changeset is committed with the change.
225 changeset is committed with the change.
222
226
223 keyword arguments:
227 keyword arguments:
224
228
225 local: whether to store tag in non-version-controlled file
229 local: whether to store tag in non-version-controlled file
226 (default False)
230 (default False)
227
231
228 message: commit message to use if committing
232 message: commit message to use if committing
229
233
230 user: name of user to use if committing
234 user: name of user to use if committing
231
235
232 date: date tuple to use if committing'''
236 date: date tuple to use if committing'''
233
237
234 for c in self.tag_disallowed:
238 for c in self.tag_disallowed:
235 if c in name:
239 if c in name:
236 raise util.Abort(_('%r cannot be used in a tag name') % c)
240 raise util.Abort(_('%r cannot be used in a tag name') % c)
237
241
238 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
242 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
239
243
240 if local:
244 if local:
241 # local tags are stored in the current charset
245 # local tags are stored in the current charset
242 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
246 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
243 self.hook('tag', node=hex(node), tag=name, local=local)
247 self.hook('tag', node=hex(node), tag=name, local=local)
244 return
248 return
245
249
246 for x in self.status()[:5]:
250 for x in self.status()[:5]:
247 if '.hgtags' in x:
251 if '.hgtags' in x:
248 raise util.Abort(_('working copy of .hgtags is changed '
252 raise util.Abort(_('working copy of .hgtags is changed '
249 '(please commit .hgtags manually)'))
253 '(please commit .hgtags manually)'))
250
254
251 # committed tags are stored in UTF-8
255 # committed tags are stored in UTF-8
252 line = '%s %s\n' % (hex(node), util.fromlocal(name))
256 line = '%s %s\n' % (hex(node), util.fromlocal(name))
253 self.wfile('.hgtags', 'ab').write(line)
257 self.wfile('.hgtags', 'ab').write(line)
254 if self.dirstate.state('.hgtags') == '?':
258 if self.dirstate.state('.hgtags') == '?':
255 self.add(['.hgtags'])
259 self.add(['.hgtags'])
256
260
257 self.commit(['.hgtags'], message, user, date)
261 self.commit(['.hgtags'], message, user, date)
258 self.hook('tag', node=hex(node), tag=name, local=local)
262 self.hook('tag', node=hex(node), tag=name, local=local)
259
263
260 def tags(self):
264 def tags(self):
261 '''return a mapping of tag to node'''
265 '''return a mapping of tag to node'''
262 if not self.tagscache:
266 if not self.tagscache:
263 self.tagscache = {}
267 self.tagscache = {}
264
268
265 def parsetag(line, context):
269 def parsetag(line, context):
266 if not line:
270 if not line:
267 return
271 return
268 s = l.split(" ", 1)
272 s = l.split(" ", 1)
269 if len(s) != 2:
273 if len(s) != 2:
270 self.ui.warn(_("%s: cannot parse entry\n") % context)
274 self.ui.warn(_("%s: cannot parse entry\n") % context)
271 return
275 return
272 node, key = s
276 node, key = s
273 key = util.tolocal(key.strip()) # stored in UTF-8
277 key = util.tolocal(key.strip()) # stored in UTF-8
274 try:
278 try:
275 bin_n = bin(node)
279 bin_n = bin(node)
276 except TypeError:
280 except TypeError:
277 self.ui.warn(_("%s: node '%s' is not well formed\n") %
281 self.ui.warn(_("%s: node '%s' is not well formed\n") %
278 (context, node))
282 (context, node))
279 return
283 return
280 if bin_n not in self.changelog.nodemap:
284 if bin_n not in self.changelog.nodemap:
281 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
285 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
282 (context, key))
286 (context, key))
283 return
287 return
284 self.tagscache[key] = bin_n
288 self.tagscache[key] = bin_n
285
289
286 # read the tags file from each head, ending with the tip,
290 # read the tags file from each head, ending with the tip,
287 # and add each tag found to the map, with "newer" ones
291 # and add each tag found to the map, with "newer" ones
288 # taking precedence
292 # taking precedence
289 f = None
293 f = None
290 for rev, node, fnode in self._hgtagsnodes():
294 for rev, node, fnode in self._hgtagsnodes():
291 f = (f and f.filectx(fnode) or
295 f = (f and f.filectx(fnode) or
292 self.filectx('.hgtags', fileid=fnode))
296 self.filectx('.hgtags', fileid=fnode))
293 count = 0
297 count = 0
294 for l in f.data().splitlines():
298 for l in f.data().splitlines():
295 count += 1
299 count += 1
296 parsetag(l, _("%s, line %d") % (str(f), count))
300 parsetag(l, _("%s, line %d") % (str(f), count))
297
301
298 try:
302 try:
299 f = self.opener("localtags")
303 f = self.opener("localtags")
300 count = 0
304 count = 0
301 for l in f:
305 for l in f:
302 # localtags are stored in the local character set
306 # localtags are stored in the local character set
303 # while the internal tag table is stored in UTF-8
307 # while the internal tag table is stored in UTF-8
304 l = util.fromlocal(l)
308 l = util.fromlocal(l)
305 count += 1
309 count += 1
306 parsetag(l, _("localtags, line %d") % count)
310 parsetag(l, _("localtags, line %d") % count)
307 except IOError:
311 except IOError:
308 pass
312 pass
309
313
310 self.tagscache['tip'] = self.changelog.tip()
314 self.tagscache['tip'] = self.changelog.tip()
311
315
312 return self.tagscache
316 return self.tagscache
313
317
314 def _hgtagsnodes(self):
318 def _hgtagsnodes(self):
315 heads = self.heads()
319 heads = self.heads()
316 heads.reverse()
320 heads.reverse()
317 last = {}
321 last = {}
318 ret = []
322 ret = []
319 for node in heads:
323 for node in heads:
320 c = self.changectx(node)
324 c = self.changectx(node)
321 rev = c.rev()
325 rev = c.rev()
322 try:
326 try:
323 fnode = c.filenode('.hgtags')
327 fnode = c.filenode('.hgtags')
324 except revlog.LookupError:
328 except revlog.LookupError:
325 continue
329 continue
326 ret.append((rev, node, fnode))
330 ret.append((rev, node, fnode))
327 if fnode in last:
331 if fnode in last:
328 ret[last[fnode]] = None
332 ret[last[fnode]] = None
329 last[fnode] = len(ret) - 1
333 last[fnode] = len(ret) - 1
330 return [item for item in ret if item]
334 return [item for item in ret if item]
331
335
332 def tagslist(self):
336 def tagslist(self):
333 '''return a list of tags ordered by revision'''
337 '''return a list of tags ordered by revision'''
334 l = []
338 l = []
335 for t, n in self.tags().items():
339 for t, n in self.tags().items():
336 try:
340 try:
337 r = self.changelog.rev(n)
341 r = self.changelog.rev(n)
338 except:
342 except:
339 r = -2 # sort to the beginning of the list if unknown
343 r = -2 # sort to the beginning of the list if unknown
340 l.append((r, t, n))
344 l.append((r, t, n))
341 l.sort()
345 l.sort()
342 return [(t, n) for r, t, n in l]
346 return [(t, n) for r, t, n in l]
343
347
344 def nodetags(self, node):
348 def nodetags(self, node):
345 '''return the tags associated with a node'''
349 '''return the tags associated with a node'''
346 if not self.nodetagscache:
350 if not self.nodetagscache:
347 self.nodetagscache = {}
351 self.nodetagscache = {}
348 for t, n in self.tags().items():
352 for t, n in self.tags().items():
349 self.nodetagscache.setdefault(n, []).append(t)
353 self.nodetagscache.setdefault(n, []).append(t)
350 return self.nodetagscache.get(node, [])
354 return self.nodetagscache.get(node, [])
351
355
352 def _branchtags(self):
356 def _branchtags(self):
353 partial, last, lrev = self._readbranchcache()
357 partial, last, lrev = self._readbranchcache()
354
358
355 tiprev = self.changelog.count() - 1
359 tiprev = self.changelog.count() - 1
356 if lrev != tiprev:
360 if lrev != tiprev:
357 self._updatebranchcache(partial, lrev+1, tiprev+1)
361 self._updatebranchcache(partial, lrev+1, tiprev+1)
358 self._writebranchcache(partial, self.changelog.tip(), tiprev)
362 self._writebranchcache(partial, self.changelog.tip(), tiprev)
359
363
360 return partial
364 return partial
361
365
362 def branchtags(self):
366 def branchtags(self):
363 if self.branchcache is not None:
367 if self.branchcache is not None:
364 return self.branchcache
368 return self.branchcache
365
369
366 self.branchcache = {} # avoid recursion in changectx
370 self.branchcache = {} # avoid recursion in changectx
367 partial = self._branchtags()
371 partial = self._branchtags()
368
372
369 # the branch cache is stored on disk as UTF-8, but in the local
373 # the branch cache is stored on disk as UTF-8, but in the local
370 # charset internally
374 # charset internally
371 for k, v in partial.items():
375 for k, v in partial.items():
372 self.branchcache[util.tolocal(k)] = v
376 self.branchcache[util.tolocal(k)] = v
373 return self.branchcache
377 return self.branchcache
374
378
375 def _readbranchcache(self):
379 def _readbranchcache(self):
376 partial = {}
380 partial = {}
377 try:
381 try:
378 f = self.opener("branches.cache")
382 f = self.opener("branches.cache")
379 lines = f.read().split('\n')
383 lines = f.read().split('\n')
380 f.close()
384 f.close()
381 last, lrev = lines.pop(0).rstrip().split(" ", 1)
385 last, lrev = lines.pop(0).rstrip().split(" ", 1)
382 last, lrev = bin(last), int(lrev)
386 last, lrev = bin(last), int(lrev)
383 if not (lrev < self.changelog.count() and
387 if not (lrev < self.changelog.count() and
384 self.changelog.node(lrev) == last): # sanity check
388 self.changelog.node(lrev) == last): # sanity check
385 # invalidate the cache
389 # invalidate the cache
386 raise ValueError('Invalid branch cache: unknown tip')
390 raise ValueError('Invalid branch cache: unknown tip')
387 for l in lines:
391 for l in lines:
388 if not l: continue
392 if not l: continue
389 node, label = l.rstrip().split(" ", 1)
393 node, label = l.rstrip().split(" ", 1)
390 partial[label] = bin(node)
394 partial[label] = bin(node)
391 except (KeyboardInterrupt, util.SignalInterrupt):
395 except (KeyboardInterrupt, util.SignalInterrupt):
392 raise
396 raise
393 except Exception, inst:
397 except Exception, inst:
394 if self.ui.debugflag:
398 if self.ui.debugflag:
395 self.ui.warn(str(inst), '\n')
399 self.ui.warn(str(inst), '\n')
396 partial, last, lrev = {}, nullid, nullrev
400 partial, last, lrev = {}, nullid, nullrev
397 return partial, last, lrev
401 return partial, last, lrev
398
402
399 def _writebranchcache(self, branches, tip, tiprev):
403 def _writebranchcache(self, branches, tip, tiprev):
400 try:
404 try:
401 f = self.opener("branches.cache", "w")
405 f = self.opener("branches.cache", "w")
402 f.write("%s %s\n" % (hex(tip), tiprev))
406 f.write("%s %s\n" % (hex(tip), tiprev))
403 for label, node in branches.iteritems():
407 for label, node in branches.iteritems():
404 f.write("%s %s\n" % (hex(node), label))
408 f.write("%s %s\n" % (hex(node), label))
405 except IOError:
409 except IOError:
406 pass
410 pass
407
411
408 def _updatebranchcache(self, partial, start, end):
412 def _updatebranchcache(self, partial, start, end):
409 for r in xrange(start, end):
413 for r in xrange(start, end):
410 c = self.changectx(r)
414 c = self.changectx(r)
411 b = c.branch()
415 b = c.branch()
412 if b:
416 if b:
413 partial[b] = c.node()
417 partial[b] = c.node()
414
418
415 def lookup(self, key):
419 def lookup(self, key):
416 if key == '.':
420 if key == '.':
417 key = self.dirstate.parents()[0]
421 key = self.dirstate.parents()[0]
418 if key == nullid:
422 if key == nullid:
419 raise repo.RepoError(_("no revision checked out"))
423 raise repo.RepoError(_("no revision checked out"))
420 elif key == 'null':
424 elif key == 'null':
421 return nullid
425 return nullid
422 n = self.changelog._match(key)
426 n = self.changelog._match(key)
423 if n:
427 if n:
424 return n
428 return n
425 if key in self.tags():
429 if key in self.tags():
426 return self.tags()[key]
430 return self.tags()[key]
427 if key in self.branchtags():
431 if key in self.branchtags():
428 return self.branchtags()[key]
432 return self.branchtags()[key]
429 n = self.changelog._partialmatch(key)
433 n = self.changelog._partialmatch(key)
430 if n:
434 if n:
431 return n
435 return n
432 raise repo.RepoError(_("unknown revision '%s'") % key)
436 raise repo.RepoError(_("unknown revision '%s'") % key)
433
437
434 def dev(self):
438 def dev(self):
435 return os.lstat(self.path).st_dev
439 return os.lstat(self.path).st_dev
436
440
437 def local(self):
441 def local(self):
438 return True
442 return True
439
443
440 def join(self, f):
444 def join(self, f):
441 return os.path.join(self.path, f)
445 return os.path.join(self.path, f)
442
446
443 def sjoin(self, f):
447 def sjoin(self, f):
444 f = self.encodefn(f)
448 f = self.encodefn(f)
445 return os.path.join(self.spath, f)
449 return os.path.join(self.spath, f)
446
450
447 def wjoin(self, f):
451 def wjoin(self, f):
448 return os.path.join(self.root, f)
452 return os.path.join(self.root, f)
449
453
450 def file(self, f):
454 def file(self, f):
451 if f[0] == '/':
455 if f[0] == '/':
452 f = f[1:]
456 f = f[1:]
453 return filelog.filelog(self.sopener, f, self.revlogversion)
457 return filelog.filelog(self.sopener, f, self.revlogversion)
454
458
455 def changectx(self, changeid=None):
459 def changectx(self, changeid=None):
456 return context.changectx(self, changeid)
460 return context.changectx(self, changeid)
457
461
458 def workingctx(self):
462 def workingctx(self):
459 return context.workingctx(self)
463 return context.workingctx(self)
460
464
461 def parents(self, changeid=None):
465 def parents(self, changeid=None):
462 '''
466 '''
463 get list of changectxs for parents of changeid or working directory
467 get list of changectxs for parents of changeid or working directory
464 '''
468 '''
465 if changeid is None:
469 if changeid is None:
466 pl = self.dirstate.parents()
470 pl = self.dirstate.parents()
467 else:
471 else:
468 n = self.changelog.lookup(changeid)
472 n = self.changelog.lookup(changeid)
469 pl = self.changelog.parents(n)
473 pl = self.changelog.parents(n)
470 if pl[1] == nullid:
474 if pl[1] == nullid:
471 return [self.changectx(pl[0])]
475 return [self.changectx(pl[0])]
472 return [self.changectx(pl[0]), self.changectx(pl[1])]
476 return [self.changectx(pl[0]), self.changectx(pl[1])]
473
477
474 def filectx(self, path, changeid=None, fileid=None):
478 def filectx(self, path, changeid=None, fileid=None):
475 """changeid can be a changeset revision, node, or tag.
479 """changeid can be a changeset revision, node, or tag.
476 fileid can be a file revision or node."""
480 fileid can be a file revision or node."""
477 return context.filectx(self, path, changeid, fileid)
481 return context.filectx(self, path, changeid, fileid)
478
482
479 def getcwd(self):
483 def getcwd(self):
480 return self.dirstate.getcwd()
484 return self.dirstate.getcwd()
481
485
482 def wfile(self, f, mode='r'):
486 def wfile(self, f, mode='r'):
483 return self.wopener(f, mode)
487 return self.wopener(f, mode)
484
488
485 def _filter(self, filter, filename, data):
489 def _filter(self, filter, filename, data):
486 if filter not in self.filterpats:
490 if filter not in self.filterpats:
487 l = []
491 l = []
488 for pat, cmd in self.ui.configitems(filter):
492 for pat, cmd in self.ui.configitems(filter):
489 mf = util.matcher(self.root, "", [pat], [], [])[1]
493 mf = util.matcher(self.root, "", [pat], [], [])[1]
490 l.append((mf, cmd))
494 l.append((mf, cmd))
491 self.filterpats[filter] = l
495 self.filterpats[filter] = l
492
496
493 for mf, cmd in self.filterpats[filter]:
497 for mf, cmd in self.filterpats[filter]:
494 if mf(filename):
498 if mf(filename):
495 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
499 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
496 data = util.filter(data, cmd)
500 data = util.filter(data, cmd)
497 break
501 break
498
502
499 return data
503 return data
500
504
501 def wread(self, filename):
505 def wread(self, filename):
502 if self._link(filename):
506 if self._link(filename):
503 data = os.readlink(self.wjoin(filename))
507 data = os.readlink(self.wjoin(filename))
504 else:
508 else:
505 data = self.wopener(filename, 'r').read()
509 data = self.wopener(filename, 'r').read()
506 return self._filter("encode", filename, data)
510 return self._filter("encode", filename, data)
507
511
508 def wwrite(self, filename, data, flags):
512 def wwrite(self, filename, data, flags):
509 data = self._filter("decode", filename, data)
513 data = self._filter("decode", filename, data)
510 if "l" in flags:
514 if "l" in flags:
511 try:
515 try:
512 os.unlink(self.wjoin(filename))
516 os.unlink(self.wjoin(filename))
513 except OSError:
517 except OSError:
514 pass
518 pass
515 os.symlink(data, self.wjoin(filename))
519 os.symlink(data, self.wjoin(filename))
516 else:
520 else:
517 try:
521 try:
518 if self._link(filename):
522 if self._link(filename):
519 os.unlink(self.wjoin(filename))
523 os.unlink(self.wjoin(filename))
520 except OSError:
524 except OSError:
521 pass
525 pass
522 self.wopener(filename, 'w').write(data)
526 self.wopener(filename, 'w').write(data)
523 util.set_exec(self.wjoin(filename), "x" in flags)
527 util.set_exec(self.wjoin(filename), "x" in flags)
524
528
525 def wwritedata(self, filename, data):
529 def wwritedata(self, filename, data):
526 return self._filter("decode", filename, data)
530 return self._filter("decode", filename, data)
527
531
528 def transaction(self):
532 def transaction(self):
529 tr = self.transhandle
533 tr = self.transhandle
530 if tr != None and tr.running():
534 if tr != None and tr.running():
531 return tr.nest()
535 return tr.nest()
532
536
533 # save dirstate for rollback
537 # save dirstate for rollback
534 try:
538 try:
535 ds = self.opener("dirstate").read()
539 ds = self.opener("dirstate").read()
536 except IOError:
540 except IOError:
537 ds = ""
541 ds = ""
538 self.opener("journal.dirstate", "w").write(ds)
542 self.opener("journal.dirstate", "w").write(ds)
539
543
540 renames = [(self.sjoin("journal"), self.sjoin("undo")),
544 renames = [(self.sjoin("journal"), self.sjoin("undo")),
541 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
545 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
542 tr = transaction.transaction(self.ui.warn, self.sopener,
546 tr = transaction.transaction(self.ui.warn, self.sopener,
543 self.sjoin("journal"),
547 self.sjoin("journal"),
544 aftertrans(renames))
548 aftertrans(renames))
545 self.transhandle = tr
549 self.transhandle = tr
546 return tr
550 return tr
547
551
548 def recover(self):
552 def recover(self):
549 l = self.lock()
553 l = self.lock()
550 if os.path.exists(self.sjoin("journal")):
554 if os.path.exists(self.sjoin("journal")):
551 self.ui.status(_("rolling back interrupted transaction\n"))
555 self.ui.status(_("rolling back interrupted transaction\n"))
552 transaction.rollback(self.sopener, self.sjoin("journal"))
556 transaction.rollback(self.sopener, self.sjoin("journal"))
553 self.reload()
557 self.reload()
554 return True
558 return True
555 else:
559 else:
556 self.ui.warn(_("no interrupted transaction available\n"))
560 self.ui.warn(_("no interrupted transaction available\n"))
557 return False
561 return False
558
562
559 def rollback(self, wlock=None):
563 def rollback(self, wlock=None):
560 if not wlock:
564 if not wlock:
561 wlock = self.wlock()
565 wlock = self.wlock()
562 l = self.lock()
566 l = self.lock()
563 if os.path.exists(self.sjoin("undo")):
567 if os.path.exists(self.sjoin("undo")):
564 self.ui.status(_("rolling back last transaction\n"))
568 self.ui.status(_("rolling back last transaction\n"))
565 transaction.rollback(self.sopener, self.sjoin("undo"))
569 transaction.rollback(self.sopener, self.sjoin("undo"))
566 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
570 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
567 self.reload()
571 self.reload()
568 self.wreload()
572 self.wreload()
569 else:
573 else:
570 self.ui.warn(_("no rollback information available\n"))
574 self.ui.warn(_("no rollback information available\n"))
571
575
572 def wreload(self):
576 def wreload(self):
573 self.dirstate.read()
577 self.dirstate.read()
574
578
575 def reload(self):
579 def reload(self):
576 self.changelog.load()
580 self.changelog.load()
577 self.manifest.load()
581 self.manifest.load()
578 self.tagscache = None
582 self.tagscache = None
579 self.nodetagscache = None
583 self.nodetagscache = None
580
584
581 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
585 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
582 desc=None):
586 desc=None):
583 try:
587 try:
584 l = lock.lock(lockname, 0, releasefn, desc=desc)
588 l = lock.lock(lockname, 0, releasefn, desc=desc)
585 except lock.LockHeld, inst:
589 except lock.LockHeld, inst:
586 if not wait:
590 if not wait:
587 raise
591 raise
588 self.ui.warn(_("waiting for lock on %s held by %r\n") %
592 self.ui.warn(_("waiting for lock on %s held by %r\n") %
589 (desc, inst.locker))
593 (desc, inst.locker))
590 # default to 600 seconds timeout
594 # default to 600 seconds timeout
591 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
595 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
592 releasefn, desc=desc)
596 releasefn, desc=desc)
593 if acquirefn:
597 if acquirefn:
594 acquirefn()
598 acquirefn()
595 return l
599 return l
596
600
597 def lock(self, wait=1):
601 def lock(self, wait=1):
598 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
602 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
599 desc=_('repository %s') % self.origroot)
603 desc=_('repository %s') % self.origroot)
600
604
601 def wlock(self, wait=1):
605 def wlock(self, wait=1):
602 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
606 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
603 self.wreload,
607 self.wreload,
604 desc=_('working directory of %s') % self.origroot)
608 desc=_('working directory of %s') % self.origroot)
605
609
606 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
610 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
607 """
611 """
608 commit an individual file as part of a larger transaction
612 commit an individual file as part of a larger transaction
609 """
613 """
610
614
611 t = self.wread(fn)
615 t = self.wread(fn)
612 fl = self.file(fn)
616 fl = self.file(fn)
613 fp1 = manifest1.get(fn, nullid)
617 fp1 = manifest1.get(fn, nullid)
614 fp2 = manifest2.get(fn, nullid)
618 fp2 = manifest2.get(fn, nullid)
615
619
616 meta = {}
620 meta = {}
617 cp = self.dirstate.copied(fn)
621 cp = self.dirstate.copied(fn)
618 if cp:
622 if cp:
619 # Mark the new revision of this file as a copy of another
623 # Mark the new revision of this file as a copy of another
620 # file. This copy data will effectively act as a parent
624 # file. This copy data will effectively act as a parent
621 # of this new revision. If this is a merge, the first
625 # of this new revision. If this is a merge, the first
622 # parent will be the nullid (meaning "look up the copy data")
626 # parent will be the nullid (meaning "look up the copy data")
623 # and the second one will be the other parent. For example:
627 # and the second one will be the other parent. For example:
624 #
628 #
625 # 0 --- 1 --- 3 rev1 changes file foo
629 # 0 --- 1 --- 3 rev1 changes file foo
626 # \ / rev2 renames foo to bar and changes it
630 # \ / rev2 renames foo to bar and changes it
627 # \- 2 -/ rev3 should have bar with all changes and
631 # \- 2 -/ rev3 should have bar with all changes and
628 # should record that bar descends from
632 # should record that bar descends from
629 # bar in rev2 and foo in rev1
633 # bar in rev2 and foo in rev1
630 #
634 #
631 # this allows this merge to succeed:
635 # this allows this merge to succeed:
632 #
636 #
633 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
637 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
634 # \ / merging rev3 and rev4 should use bar@rev2
638 # \ / merging rev3 and rev4 should use bar@rev2
635 # \- 2 --- 4 as the merge base
639 # \- 2 --- 4 as the merge base
636 #
640 #
637 meta["copy"] = cp
641 meta["copy"] = cp
638 if not manifest2: # not a branch merge
642 if not manifest2: # not a branch merge
639 meta["copyrev"] = hex(manifest1.get(cp, nullid))
643 meta["copyrev"] = hex(manifest1.get(cp, nullid))
640 fp2 = nullid
644 fp2 = nullid
641 elif fp2 != nullid: # copied on remote side
645 elif fp2 != nullid: # copied on remote side
642 meta["copyrev"] = hex(manifest1.get(cp, nullid))
646 meta["copyrev"] = hex(manifest1.get(cp, nullid))
643 elif fp1 != nullid: # copied on local side, reversed
647 elif fp1 != nullid: # copied on local side, reversed
644 meta["copyrev"] = hex(manifest2.get(cp))
648 meta["copyrev"] = hex(manifest2.get(cp))
645 fp2 = fp1
649 fp2 = fp1
646 else: # directory rename
650 else: # directory rename
647 meta["copyrev"] = hex(manifest1.get(cp, nullid))
651 meta["copyrev"] = hex(manifest1.get(cp, nullid))
648 self.ui.debug(_(" %s: copy %s:%s\n") %
652 self.ui.debug(_(" %s: copy %s:%s\n") %
649 (fn, cp, meta["copyrev"]))
653 (fn, cp, meta["copyrev"]))
650 fp1 = nullid
654 fp1 = nullid
651 elif fp2 != nullid:
655 elif fp2 != nullid:
652 # is one parent an ancestor of the other?
656 # is one parent an ancestor of the other?
653 fpa = fl.ancestor(fp1, fp2)
657 fpa = fl.ancestor(fp1, fp2)
654 if fpa == fp1:
658 if fpa == fp1:
655 fp1, fp2 = fp2, nullid
659 fp1, fp2 = fp2, nullid
656 elif fpa == fp2:
660 elif fpa == fp2:
657 fp2 = nullid
661 fp2 = nullid
658
662
659 # is the file unmodified from the parent? report existing entry
663 # is the file unmodified from the parent? report existing entry
660 if fp2 == nullid and not fl.cmp(fp1, t):
664 if fp2 == nullid and not fl.cmp(fp1, t):
661 return fp1
665 return fp1
662
666
663 changelist.append(fn)
667 changelist.append(fn)
664 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
668 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
665
669
666 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
670 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
667 if p1 is None:
671 if p1 is None:
668 p1, p2 = self.dirstate.parents()
672 p1, p2 = self.dirstate.parents()
669 return self.commit(files=files, text=text, user=user, date=date,
673 return self.commit(files=files, text=text, user=user, date=date,
670 p1=p1, p2=p2, wlock=wlock, extra=extra)
674 p1=p1, p2=p2, wlock=wlock, extra=extra)
671
675
672 def commit(self, files=None, text="", user=None, date=None,
676 def commit(self, files=None, text="", user=None, date=None,
673 match=util.always, force=False, lock=None, wlock=None,
677 match=util.always, force=False, lock=None, wlock=None,
674 force_editor=False, p1=None, p2=None, extra={}):
678 force_editor=False, p1=None, p2=None, extra={}):
675
679
676 commit = []
680 commit = []
677 remove = []
681 remove = []
678 changed = []
682 changed = []
679 use_dirstate = (p1 is None) # not rawcommit
683 use_dirstate = (p1 is None) # not rawcommit
680 extra = extra.copy()
684 extra = extra.copy()
681
685
682 if use_dirstate:
686 if use_dirstate:
683 if files:
687 if files:
684 for f in files:
688 for f in files:
685 s = self.dirstate.state(f)
689 s = self.dirstate.state(f)
686 if s in 'nmai':
690 if s in 'nmai':
687 commit.append(f)
691 commit.append(f)
688 elif s == 'r':
692 elif s == 'r':
689 remove.append(f)
693 remove.append(f)
690 else:
694 else:
691 self.ui.warn(_("%s not tracked!\n") % f)
695 self.ui.warn(_("%s not tracked!\n") % f)
692 else:
696 else:
693 changes = self.status(match=match)[:5]
697 changes = self.status(match=match)[:5]
694 modified, added, removed, deleted, unknown = changes
698 modified, added, removed, deleted, unknown = changes
695 commit = modified + added
699 commit = modified + added
696 remove = removed
700 remove = removed
697 else:
701 else:
698 commit = files
702 commit = files
699
703
700 if use_dirstate:
704 if use_dirstate:
701 p1, p2 = self.dirstate.parents()
705 p1, p2 = self.dirstate.parents()
702 update_dirstate = True
706 update_dirstate = True
703 else:
707 else:
704 p1, p2 = p1, p2 or nullid
708 p1, p2 = p1, p2 or nullid
705 update_dirstate = (self.dirstate.parents()[0] == p1)
709 update_dirstate = (self.dirstate.parents()[0] == p1)
706
710
707 c1 = self.changelog.read(p1)
711 c1 = self.changelog.read(p1)
708 c2 = self.changelog.read(p2)
712 c2 = self.changelog.read(p2)
709 m1 = self.manifest.read(c1[0]).copy()
713 m1 = self.manifest.read(c1[0]).copy()
710 m2 = self.manifest.read(c2[0])
714 m2 = self.manifest.read(c2[0])
711
715
712 if use_dirstate:
716 if use_dirstate:
713 branchname = self.workingctx().branch()
717 branchname = self.workingctx().branch()
714 try:
718 try:
715 branchname = branchname.decode('UTF-8').encode('UTF-8')
719 branchname = branchname.decode('UTF-8').encode('UTF-8')
716 except UnicodeDecodeError:
720 except UnicodeDecodeError:
717 raise util.Abort(_('branch name not in UTF-8!'))
721 raise util.Abort(_('branch name not in UTF-8!'))
718 else:
722 else:
719 branchname = ""
723 branchname = ""
720
724
721 if use_dirstate:
725 if use_dirstate:
722 oldname = c1[5].get("branch", "") # stored in UTF-8
726 oldname = c1[5].get("branch", "") # stored in UTF-8
723 if not commit and not remove and not force and p2 == nullid and \
727 if not commit and not remove and not force and p2 == nullid and \
724 branchname == oldname:
728 branchname == oldname:
725 self.ui.status(_("nothing changed\n"))
729 self.ui.status(_("nothing changed\n"))
726 return None
730 return None
727
731
728 xp1 = hex(p1)
732 xp1 = hex(p1)
729 if p2 == nullid: xp2 = ''
733 if p2 == nullid: xp2 = ''
730 else: xp2 = hex(p2)
734 else: xp2 = hex(p2)
731
735
732 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
736 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
733
737
734 if not wlock:
738 if not wlock:
735 wlock = self.wlock()
739 wlock = self.wlock()
736 if not lock:
740 if not lock:
737 lock = self.lock()
741 lock = self.lock()
738 tr = self.transaction()
742 tr = self.transaction()
739
743
740 # check in files
744 # check in files
741 new = {}
745 new = {}
742 linkrev = self.changelog.count()
746 linkrev = self.changelog.count()
743 commit.sort()
747 commit.sort()
744 is_exec = util.execfunc(self.root, m1.execf)
748 is_exec = util.execfunc(self.root, m1.execf)
745 is_link = util.linkfunc(self.root, m1.linkf)
749 is_link = util.linkfunc(self.root, m1.linkf)
746 for f in commit:
750 for f in commit:
747 self.ui.note(f + "\n")
751 self.ui.note(f + "\n")
748 try:
752 try:
749 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
753 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
750 m1.set(f, is_exec(f), is_link(f))
754 m1.set(f, is_exec(f), is_link(f))
751 except (OSError, IOError):
755 except (OSError, IOError):
752 if use_dirstate:
756 if use_dirstate:
753 self.ui.warn(_("trouble committing %s!\n") % f)
757 self.ui.warn(_("trouble committing %s!\n") % f)
754 raise
758 raise
755 else:
759 else:
756 remove.append(f)
760 remove.append(f)
757
761
758 # update manifest
762 # update manifest
759 m1.update(new)
763 m1.update(new)
760 remove.sort()
764 remove.sort()
761 removed = []
765 removed = []
762
766
763 for f in remove:
767 for f in remove:
764 if f in m1:
768 if f in m1:
765 del m1[f]
769 del m1[f]
766 removed.append(f)
770 removed.append(f)
767 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
771 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
768
772
769 # add changeset
773 # add changeset
770 new = new.keys()
774 new = new.keys()
771 new.sort()
775 new.sort()
772
776
773 user = user or self.ui.username()
777 user = user or self.ui.username()
774 if not text or force_editor:
778 if not text or force_editor:
775 edittext = []
779 edittext = []
776 if text:
780 if text:
777 edittext.append(text)
781 edittext.append(text)
778 edittext.append("")
782 edittext.append("")
779 edittext.append("HG: user: %s" % user)
783 edittext.append("HG: user: %s" % user)
780 if p2 != nullid:
784 if p2 != nullid:
781 edittext.append("HG: branch merge")
785 edittext.append("HG: branch merge")
782 if branchname:
786 if branchname:
783 edittext.append("HG: branch %s" % util.tolocal(branchname))
787 edittext.append("HG: branch %s" % util.tolocal(branchname))
784 edittext.extend(["HG: changed %s" % f for f in changed])
788 edittext.extend(["HG: changed %s" % f for f in changed])
785 edittext.extend(["HG: removed %s" % f for f in removed])
789 edittext.extend(["HG: removed %s" % f for f in removed])
786 if not changed and not remove:
790 if not changed and not remove:
787 edittext.append("HG: no files changed")
791 edittext.append("HG: no files changed")
788 edittext.append("")
792 edittext.append("")
789 # run editor in the repository root
793 # run editor in the repository root
790 olddir = os.getcwd()
794 olddir = os.getcwd()
791 os.chdir(self.root)
795 os.chdir(self.root)
792 text = self.ui.edit("\n".join(edittext), user)
796 text = self.ui.edit("\n".join(edittext), user)
793 os.chdir(olddir)
797 os.chdir(olddir)
794
798
795 lines = [line.rstrip() for line in text.rstrip().splitlines()]
799 lines = [line.rstrip() for line in text.rstrip().splitlines()]
796 while lines and not lines[0]:
800 while lines and not lines[0]:
797 del lines[0]
801 del lines[0]
798 if not lines:
802 if not lines:
799 return None
803 return None
800 text = '\n'.join(lines)
804 text = '\n'.join(lines)
801 if branchname:
805 if branchname:
802 extra["branch"] = branchname
806 extra["branch"] = branchname
803 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
807 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
804 user, date, extra)
808 user, date, extra)
805 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
809 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
806 parent2=xp2)
810 parent2=xp2)
807 tr.close()
811 tr.close()
808
812
809 if self.branchcache and "branch" in extra:
813 if self.branchcache and "branch" in extra:
810 self.branchcache[util.tolocal(extra["branch"])] = n
814 self.branchcache[util.tolocal(extra["branch"])] = n
811
815
812 if use_dirstate or update_dirstate:
816 if use_dirstate or update_dirstate:
813 self.dirstate.setparents(n)
817 self.dirstate.setparents(n)
814 if use_dirstate:
818 if use_dirstate:
815 self.dirstate.update(new, "n")
819 self.dirstate.update(new, "n")
816 self.dirstate.forget(removed)
820 self.dirstate.forget(removed)
817
821
818 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
822 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
819 return n
823 return n
820
824
821 def walk(self, node=None, files=[], match=util.always, badmatch=None):
825 def walk(self, node=None, files=[], match=util.always, badmatch=None):
822 '''
826 '''
823 walk recursively through the directory tree or a given
827 walk recursively through the directory tree or a given
824 changeset, finding all files matched by the match
828 changeset, finding all files matched by the match
825 function
829 function
826
830
827 results are yielded in a tuple (src, filename), where src
831 results are yielded in a tuple (src, filename), where src
828 is one of:
832 is one of:
829 'f' the file was found in the directory tree
833 'f' the file was found in the directory tree
830 'm' the file was only in the dirstate and not in the tree
834 'm' the file was only in the dirstate and not in the tree
831 'b' file was not found and matched badmatch
835 'b' file was not found and matched badmatch
832 '''
836 '''
833
837
834 if node:
838 if node:
835 fdict = dict.fromkeys(files)
839 fdict = dict.fromkeys(files)
836 for fn in self.manifest.read(self.changelog.read(node)[0]):
840 for fn in self.manifest.read(self.changelog.read(node)[0]):
837 for ffn in fdict:
841 for ffn in fdict:
838 # match if the file is the exact name or a directory
842 # match if the file is the exact name or a directory
839 if ffn == fn or fn.startswith("%s/" % ffn):
843 if ffn == fn or fn.startswith("%s/" % ffn):
840 del fdict[ffn]
844 del fdict[ffn]
841 break
845 break
842 if match(fn):
846 if match(fn):
843 yield 'm', fn
847 yield 'm', fn
844 for fn in fdict:
848 for fn in fdict:
845 if badmatch and badmatch(fn):
849 if badmatch and badmatch(fn):
846 if match(fn):
850 if match(fn):
847 yield 'b', fn
851 yield 'b', fn
848 else:
852 else:
849 self.ui.warn(_('%s: No such file in rev %s\n') % (
853 self.ui.warn(_('%s: No such file in rev %s\n') % (
850 util.pathto(self.getcwd(), fn), short(node)))
854 util.pathto(self.getcwd(), fn), short(node)))
851 else:
855 else:
852 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
856 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
853 yield src, fn
857 yield src, fn
854
858
855 def status(self, node1=None, node2=None, files=[], match=util.always,
859 def status(self, node1=None, node2=None, files=[], match=util.always,
856 wlock=None, list_ignored=False, list_clean=False):
860 wlock=None, list_ignored=False, list_clean=False):
857 """return status of files between two nodes or node and working directory
861 """return status of files between two nodes or node and working directory
858
862
859 If node1 is None, use the first dirstate parent instead.
863 If node1 is None, use the first dirstate parent instead.
860 If node2 is None, compare node1 with working directory.
864 If node2 is None, compare node1 with working directory.
861 """
865 """
862
866
863 def fcmp(fn, mf):
867 def fcmp(fn, mf):
864 t1 = self.wread(fn)
868 t1 = self.wread(fn)
865 return self.file(fn).cmp(mf.get(fn, nullid), t1)
869 return self.file(fn).cmp(mf.get(fn, nullid), t1)
866
870
867 def mfmatches(node):
871 def mfmatches(node):
868 change = self.changelog.read(node)
872 change = self.changelog.read(node)
869 mf = self.manifest.read(change[0]).copy()
873 mf = self.manifest.read(change[0]).copy()
870 for fn in mf.keys():
874 for fn in mf.keys():
871 if not match(fn):
875 if not match(fn):
872 del mf[fn]
876 del mf[fn]
873 return mf
877 return mf
874
878
875 modified, added, removed, deleted, unknown = [], [], [], [], []
879 modified, added, removed, deleted, unknown = [], [], [], [], []
876 ignored, clean = [], []
880 ignored, clean = [], []
877
881
878 compareworking = False
882 compareworking = False
879 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
883 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
880 compareworking = True
884 compareworking = True
881
885
882 if not compareworking:
886 if not compareworking:
883 # read the manifest from node1 before the manifest from node2,
887 # read the manifest from node1 before the manifest from node2,
884 # so that we'll hit the manifest cache if we're going through
888 # so that we'll hit the manifest cache if we're going through
885 # all the revisions in parent->child order.
889 # all the revisions in parent->child order.
886 mf1 = mfmatches(node1)
890 mf1 = mfmatches(node1)
887
891
888 # are we comparing the working directory?
892 # are we comparing the working directory?
889 if not node2:
893 if not node2:
890 if not wlock:
894 if not wlock:
891 try:
895 try:
892 wlock = self.wlock(wait=0)
896 wlock = self.wlock(wait=0)
893 except lock.LockException:
897 except lock.LockException:
894 wlock = None
898 wlock = None
895 (lookup, modified, added, removed, deleted, unknown,
899 (lookup, modified, added, removed, deleted, unknown,
896 ignored, clean) = self.dirstate.status(files, match,
900 ignored, clean) = self.dirstate.status(files, match,
897 list_ignored, list_clean)
901 list_ignored, list_clean)
898
902
899 # are we comparing working dir against its parent?
903 # are we comparing working dir against its parent?
900 if compareworking:
904 if compareworking:
901 if lookup:
905 if lookup:
902 # do a full compare of any files that might have changed
906 # do a full compare of any files that might have changed
903 mf2 = mfmatches(self.dirstate.parents()[0])
907 mf2 = mfmatches(self.dirstate.parents()[0])
904 for f in lookup:
908 for f in lookup:
905 if fcmp(f, mf2):
909 if fcmp(f, mf2):
906 modified.append(f)
910 modified.append(f)
907 else:
911 else:
908 clean.append(f)
912 clean.append(f)
909 if wlock is not None:
913 if wlock is not None:
910 self.dirstate.update([f], "n")
914 self.dirstate.update([f], "n")
911 else:
915 else:
912 # we are comparing working dir against non-parent
916 # we are comparing working dir against non-parent
913 # generate a pseudo-manifest for the working dir
917 # generate a pseudo-manifest for the working dir
914 # XXX: create it in dirstate.py ?
918 # XXX: create it in dirstate.py ?
915 mf2 = mfmatches(self.dirstate.parents()[0])
919 mf2 = mfmatches(self.dirstate.parents()[0])
916 is_exec = util.execfunc(self.root, mf2.execf)
920 is_exec = util.execfunc(self.root, mf2.execf)
917 is_link = util.linkfunc(self.root, mf2.linkf)
921 is_link = util.linkfunc(self.root, mf2.linkf)
918 for f in lookup + modified + added:
922 for f in lookup + modified + added:
919 mf2[f] = ""
923 mf2[f] = ""
920 mf2.set(f, is_exec(f), is_link(f))
924 mf2.set(f, is_exec(f), is_link(f))
921 for f in removed:
925 for f in removed:
922 if f in mf2:
926 if f in mf2:
923 del mf2[f]
927 del mf2[f]
924 else:
928 else:
925 # we are comparing two revisions
929 # we are comparing two revisions
926 mf2 = mfmatches(node2)
930 mf2 = mfmatches(node2)
927
931
928 if not compareworking:
932 if not compareworking:
929 # flush lists from dirstate before comparing manifests
933 # flush lists from dirstate before comparing manifests
930 modified, added, clean = [], [], []
934 modified, added, clean = [], [], []
931
935
932 # make sure to sort the files so we talk to the disk in a
936 # make sure to sort the files so we talk to the disk in a
933 # reasonable order
937 # reasonable order
934 mf2keys = mf2.keys()
938 mf2keys = mf2.keys()
935 mf2keys.sort()
939 mf2keys.sort()
936 for fn in mf2keys:
940 for fn in mf2keys:
937 if mf1.has_key(fn):
941 if mf1.has_key(fn):
938 if mf1.flags(fn) != mf2.flags(fn) or \
942 if mf1.flags(fn) != mf2.flags(fn) or \
939 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
943 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
940 modified.append(fn)
944 modified.append(fn)
941 elif list_clean:
945 elif list_clean:
942 clean.append(fn)
946 clean.append(fn)
943 del mf1[fn]
947 del mf1[fn]
944 else:
948 else:
945 added.append(fn)
949 added.append(fn)
946
950
947 removed = mf1.keys()
951 removed = mf1.keys()
948
952
949 # sort and return results:
953 # sort and return results:
950 for l in modified, added, removed, deleted, unknown, ignored, clean:
954 for l in modified, added, removed, deleted, unknown, ignored, clean:
951 l.sort()
955 l.sort()
952 return (modified, added, removed, deleted, unknown, ignored, clean)
956 return (modified, added, removed, deleted, unknown, ignored, clean)
953
957
954 def add(self, list, wlock=None):
958 def add(self, list, wlock=None):
955 if not wlock:
959 if not wlock:
956 wlock = self.wlock()
960 wlock = self.wlock()
957 for f in list:
961 for f in list:
958 p = self.wjoin(f)
962 p = self.wjoin(f)
959 islink = os.path.islink(p)
963 islink = os.path.islink(p)
960 if not islink and not os.path.exists(p):
964 if not islink and not os.path.exists(p):
961 self.ui.warn(_("%s does not exist!\n") % f)
965 self.ui.warn(_("%s does not exist!\n") % f)
962 elif not islink and not os.path.isfile(p):
966 elif not islink and not os.path.isfile(p):
963 self.ui.warn(_("%s not added: only files and symlinks "
967 self.ui.warn(_("%s not added: only files and symlinks "
964 "supported currently\n") % f)
968 "supported currently\n") % f)
965 elif self.dirstate.state(f) in 'an':
969 elif self.dirstate.state(f) in 'an':
966 self.ui.warn(_("%s already tracked!\n") % f)
970 self.ui.warn(_("%s already tracked!\n") % f)
967 else:
971 else:
968 self.dirstate.update([f], "a")
972 self.dirstate.update([f], "a")
969
973
970 def forget(self, list, wlock=None):
974 def forget(self, list, wlock=None):
971 if not wlock:
975 if not wlock:
972 wlock = self.wlock()
976 wlock = self.wlock()
973 for f in list:
977 for f in list:
974 if self.dirstate.state(f) not in 'ai':
978 if self.dirstate.state(f) not in 'ai':
975 self.ui.warn(_("%s not added!\n") % f)
979 self.ui.warn(_("%s not added!\n") % f)
976 else:
980 else:
977 self.dirstate.forget([f])
981 self.dirstate.forget([f])
978
982
979 def remove(self, list, unlink=False, wlock=None):
983 def remove(self, list, unlink=False, wlock=None):
980 if unlink:
984 if unlink:
981 for f in list:
985 for f in list:
982 try:
986 try:
983 util.unlink(self.wjoin(f))
987 util.unlink(self.wjoin(f))
984 except OSError, inst:
988 except OSError, inst:
985 if inst.errno != errno.ENOENT:
989 if inst.errno != errno.ENOENT:
986 raise
990 raise
987 if not wlock:
991 if not wlock:
988 wlock = self.wlock()
992 wlock = self.wlock()
989 for f in list:
993 for f in list:
990 p = self.wjoin(f)
994 p = self.wjoin(f)
991 if os.path.exists(p):
995 if os.path.exists(p):
992 self.ui.warn(_("%s still exists!\n") % f)
996 self.ui.warn(_("%s still exists!\n") % f)
993 elif self.dirstate.state(f) == 'a':
997 elif self.dirstate.state(f) == 'a':
994 self.dirstate.forget([f])
998 self.dirstate.forget([f])
995 elif f not in self.dirstate:
999 elif f not in self.dirstate:
996 self.ui.warn(_("%s not tracked!\n") % f)
1000 self.ui.warn(_("%s not tracked!\n") % f)
997 else:
1001 else:
998 self.dirstate.update([f], "r")
1002 self.dirstate.update([f], "r")
999
1003
1000 def undelete(self, list, wlock=None):
1004 def undelete(self, list, wlock=None):
1001 p = self.dirstate.parents()[0]
1005 p = self.dirstate.parents()[0]
1002 mn = self.changelog.read(p)[0]
1006 mn = self.changelog.read(p)[0]
1003 m = self.manifest.read(mn)
1007 m = self.manifest.read(mn)
1004 if not wlock:
1008 if not wlock:
1005 wlock = self.wlock()
1009 wlock = self.wlock()
1006 for f in list:
1010 for f in list:
1007 if self.dirstate.state(f) not in "r":
1011 if self.dirstate.state(f) not in "r":
1008 self.ui.warn("%s not removed!\n" % f)
1012 self.ui.warn("%s not removed!\n" % f)
1009 else:
1013 else:
1010 t = self.file(f).read(m[f])
1014 t = self.file(f).read(m[f])
1011 self.wwrite(f, t, m.flags(f))
1015 self.wwrite(f, t, m.flags(f))
1012 self.dirstate.update([f], "n")
1016 self.dirstate.update([f], "n")
1013
1017
1014 def copy(self, source, dest, wlock=None):
1018 def copy(self, source, dest, wlock=None):
1015 p = self.wjoin(dest)
1019 p = self.wjoin(dest)
1016 if not os.path.exists(p):
1020 if not os.path.exists(p):
1017 self.ui.warn(_("%s does not exist!\n") % dest)
1021 self.ui.warn(_("%s does not exist!\n") % dest)
1018 elif not os.path.isfile(p):
1022 elif not os.path.isfile(p):
1019 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1023 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1020 else:
1024 else:
1021 if not wlock:
1025 if not wlock:
1022 wlock = self.wlock()
1026 wlock = self.wlock()
1023 if self.dirstate.state(dest) == '?':
1027 if self.dirstate.state(dest) == '?':
1024 self.dirstate.update([dest], "a")
1028 self.dirstate.update([dest], "a")
1025 self.dirstate.copy(source, dest)
1029 self.dirstate.copy(source, dest)
1026
1030
1027 def heads(self, start=None):
1031 def heads(self, start=None):
1028 heads = self.changelog.heads(start)
1032 heads = self.changelog.heads(start)
1029 # sort the output in rev descending order
1033 # sort the output in rev descending order
1030 heads = [(-self.changelog.rev(h), h) for h in heads]
1034 heads = [(-self.changelog.rev(h), h) for h in heads]
1031 heads.sort()
1035 heads.sort()
1032 return [n for (r, n) in heads]
1036 return [n for (r, n) in heads]
1033
1037
1034 def branches(self, nodes):
1038 def branches(self, nodes):
1035 if not nodes:
1039 if not nodes:
1036 nodes = [self.changelog.tip()]
1040 nodes = [self.changelog.tip()]
1037 b = []
1041 b = []
1038 for n in nodes:
1042 for n in nodes:
1039 t = n
1043 t = n
1040 while 1:
1044 while 1:
1041 p = self.changelog.parents(n)
1045 p = self.changelog.parents(n)
1042 if p[1] != nullid or p[0] == nullid:
1046 if p[1] != nullid or p[0] == nullid:
1043 b.append((t, n, p[0], p[1]))
1047 b.append((t, n, p[0], p[1]))
1044 break
1048 break
1045 n = p[0]
1049 n = p[0]
1046 return b
1050 return b
1047
1051
1048 def between(self, pairs):
1052 def between(self, pairs):
1049 r = []
1053 r = []
1050
1054
1051 for top, bottom in pairs:
1055 for top, bottom in pairs:
1052 n, l, i = top, [], 0
1056 n, l, i = top, [], 0
1053 f = 1
1057 f = 1
1054
1058
1055 while n != bottom:
1059 while n != bottom:
1056 p = self.changelog.parents(n)[0]
1060 p = self.changelog.parents(n)[0]
1057 if i == f:
1061 if i == f:
1058 l.append(n)
1062 l.append(n)
1059 f = f * 2
1063 f = f * 2
1060 n = p
1064 n = p
1061 i += 1
1065 i += 1
1062
1066
1063 r.append(l)
1067 r.append(l)
1064
1068
1065 return r
1069 return r
1066
1070
1067 def findincoming(self, remote, base=None, heads=None, force=False):
1071 def findincoming(self, remote, base=None, heads=None, force=False):
1068 """Return list of roots of the subsets of missing nodes from remote
1072 """Return list of roots of the subsets of missing nodes from remote
1069
1073
1070 If base dict is specified, assume that these nodes and their parents
1074 If base dict is specified, assume that these nodes and their parents
1071 exist on the remote side and that no child of a node of base exists
1075 exist on the remote side and that no child of a node of base exists
1072 in both remote and self.
1076 in both remote and self.
1073 Furthermore base will be updated to include the nodes that exists
1077 Furthermore base will be updated to include the nodes that exists
1074 in self and remote but no children exists in self and remote.
1078 in self and remote but no children exists in self and remote.
1075 If a list of heads is specified, return only nodes which are heads
1079 If a list of heads is specified, return only nodes which are heads
1076 or ancestors of these heads.
1080 or ancestors of these heads.
1077
1081
1078 All the ancestors of base are in self and in remote.
1082 All the ancestors of base are in self and in remote.
1079 All the descendants of the list returned are missing in self.
1083 All the descendants of the list returned are missing in self.
1080 (and so we know that the rest of the nodes are missing in remote, see
1084 (and so we know that the rest of the nodes are missing in remote, see
1081 outgoing)
1085 outgoing)
1082 """
1086 """
1083 m = self.changelog.nodemap
1087 m = self.changelog.nodemap
1084 search = []
1088 search = []
1085 fetch = {}
1089 fetch = {}
1086 seen = {}
1090 seen = {}
1087 seenbranch = {}
1091 seenbranch = {}
1088 if base == None:
1092 if base == None:
1089 base = {}
1093 base = {}
1090
1094
1091 if not heads:
1095 if not heads:
1092 heads = remote.heads()
1096 heads = remote.heads()
1093
1097
1094 if self.changelog.tip() == nullid:
1098 if self.changelog.tip() == nullid:
1095 base[nullid] = 1
1099 base[nullid] = 1
1096 if heads != [nullid]:
1100 if heads != [nullid]:
1097 return [nullid]
1101 return [nullid]
1098 return []
1102 return []
1099
1103
1100 # assume we're closer to the tip than the root
1104 # assume we're closer to the tip than the root
1101 # and start by examining the heads
1105 # and start by examining the heads
1102 self.ui.status(_("searching for changes\n"))
1106 self.ui.status(_("searching for changes\n"))
1103
1107
1104 unknown = []
1108 unknown = []
1105 for h in heads:
1109 for h in heads:
1106 if h not in m:
1110 if h not in m:
1107 unknown.append(h)
1111 unknown.append(h)
1108 else:
1112 else:
1109 base[h] = 1
1113 base[h] = 1
1110
1114
1111 if not unknown:
1115 if not unknown:
1112 return []
1116 return []
1113
1117
1114 req = dict.fromkeys(unknown)
1118 req = dict.fromkeys(unknown)
1115 reqcnt = 0
1119 reqcnt = 0
1116
1120
1117 # search through remote branches
1121 # search through remote branches
1118 # a 'branch' here is a linear segment of history, with four parts:
1122 # a 'branch' here is a linear segment of history, with four parts:
1119 # head, root, first parent, second parent
1123 # head, root, first parent, second parent
1120 # (a branch always has two parents (or none) by definition)
1124 # (a branch always has two parents (or none) by definition)
1121 unknown = remote.branches(unknown)
1125 unknown = remote.branches(unknown)
1122 while unknown:
1126 while unknown:
1123 r = []
1127 r = []
1124 while unknown:
1128 while unknown:
1125 n = unknown.pop(0)
1129 n = unknown.pop(0)
1126 if n[0] in seen:
1130 if n[0] in seen:
1127 continue
1131 continue
1128
1132
1129 self.ui.debug(_("examining %s:%s\n")
1133 self.ui.debug(_("examining %s:%s\n")
1130 % (short(n[0]), short(n[1])))
1134 % (short(n[0]), short(n[1])))
1131 if n[0] == nullid: # found the end of the branch
1135 if n[0] == nullid: # found the end of the branch
1132 pass
1136 pass
1133 elif n in seenbranch:
1137 elif n in seenbranch:
1134 self.ui.debug(_("branch already found\n"))
1138 self.ui.debug(_("branch already found\n"))
1135 continue
1139 continue
1136 elif n[1] and n[1] in m: # do we know the base?
1140 elif n[1] and n[1] in m: # do we know the base?
1137 self.ui.debug(_("found incomplete branch %s:%s\n")
1141 self.ui.debug(_("found incomplete branch %s:%s\n")
1138 % (short(n[0]), short(n[1])))
1142 % (short(n[0]), short(n[1])))
1139 search.append(n) # schedule branch range for scanning
1143 search.append(n) # schedule branch range for scanning
1140 seenbranch[n] = 1
1144 seenbranch[n] = 1
1141 else:
1145 else:
1142 if n[1] not in seen and n[1] not in fetch:
1146 if n[1] not in seen and n[1] not in fetch:
1143 if n[2] in m and n[3] in m:
1147 if n[2] in m and n[3] in m:
1144 self.ui.debug(_("found new changeset %s\n") %
1148 self.ui.debug(_("found new changeset %s\n") %
1145 short(n[1]))
1149 short(n[1]))
1146 fetch[n[1]] = 1 # earliest unknown
1150 fetch[n[1]] = 1 # earliest unknown
1147 for p in n[2:4]:
1151 for p in n[2:4]:
1148 if p in m:
1152 if p in m:
1149 base[p] = 1 # latest known
1153 base[p] = 1 # latest known
1150
1154
1151 for p in n[2:4]:
1155 for p in n[2:4]:
1152 if p not in req and p not in m:
1156 if p not in req and p not in m:
1153 r.append(p)
1157 r.append(p)
1154 req[p] = 1
1158 req[p] = 1
1155 seen[n[0]] = 1
1159 seen[n[0]] = 1
1156
1160
1157 if r:
1161 if r:
1158 reqcnt += 1
1162 reqcnt += 1
1159 self.ui.debug(_("request %d: %s\n") %
1163 self.ui.debug(_("request %d: %s\n") %
1160 (reqcnt, " ".join(map(short, r))))
1164 (reqcnt, " ".join(map(short, r))))
1161 for p in xrange(0, len(r), 10):
1165 for p in xrange(0, len(r), 10):
1162 for b in remote.branches(r[p:p+10]):
1166 for b in remote.branches(r[p:p+10]):
1163 self.ui.debug(_("received %s:%s\n") %
1167 self.ui.debug(_("received %s:%s\n") %
1164 (short(b[0]), short(b[1])))
1168 (short(b[0]), short(b[1])))
1165 unknown.append(b)
1169 unknown.append(b)
1166
1170
1167 # do binary search on the branches we found
1171 # do binary search on the branches we found
1168 while search:
1172 while search:
1169 n = search.pop(0)
1173 n = search.pop(0)
1170 reqcnt += 1
1174 reqcnt += 1
1171 l = remote.between([(n[0], n[1])])[0]
1175 l = remote.between([(n[0], n[1])])[0]
1172 l.append(n[1])
1176 l.append(n[1])
1173 p = n[0]
1177 p = n[0]
1174 f = 1
1178 f = 1
1175 for i in l:
1179 for i in l:
1176 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1180 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1177 if i in m:
1181 if i in m:
1178 if f <= 2:
1182 if f <= 2:
1179 self.ui.debug(_("found new branch changeset %s\n") %
1183 self.ui.debug(_("found new branch changeset %s\n") %
1180 short(p))
1184 short(p))
1181 fetch[p] = 1
1185 fetch[p] = 1
1182 base[i] = 1
1186 base[i] = 1
1183 else:
1187 else:
1184 self.ui.debug(_("narrowed branch search to %s:%s\n")
1188 self.ui.debug(_("narrowed branch search to %s:%s\n")
1185 % (short(p), short(i)))
1189 % (short(p), short(i)))
1186 search.append((p, i))
1190 search.append((p, i))
1187 break
1191 break
1188 p, f = i, f * 2
1192 p, f = i, f * 2
1189
1193
1190 # sanity check our fetch list
1194 # sanity check our fetch list
1191 for f in fetch.keys():
1195 for f in fetch.keys():
1192 if f in m:
1196 if f in m:
1193 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1197 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1194
1198
1195 if base.keys() == [nullid]:
1199 if base.keys() == [nullid]:
1196 if force:
1200 if force:
1197 self.ui.warn(_("warning: repository is unrelated\n"))
1201 self.ui.warn(_("warning: repository is unrelated\n"))
1198 else:
1202 else:
1199 raise util.Abort(_("repository is unrelated"))
1203 raise util.Abort(_("repository is unrelated"))
1200
1204
1201 self.ui.debug(_("found new changesets starting at ") +
1205 self.ui.debug(_("found new changesets starting at ") +
1202 " ".join([short(f) for f in fetch]) + "\n")
1206 " ".join([short(f) for f in fetch]) + "\n")
1203
1207
1204 self.ui.debug(_("%d total queries\n") % reqcnt)
1208 self.ui.debug(_("%d total queries\n") % reqcnt)
1205
1209
1206 return fetch.keys()
1210 return fetch.keys()
1207
1211
1208 def findoutgoing(self, remote, base=None, heads=None, force=False):
1212 def findoutgoing(self, remote, base=None, heads=None, force=False):
1209 """Return list of nodes that are roots of subsets not in remote
1213 """Return list of nodes that are roots of subsets not in remote
1210
1214
1211 If base dict is specified, assume that these nodes and their parents
1215 If base dict is specified, assume that these nodes and their parents
1212 exist on the remote side.
1216 exist on the remote side.
1213 If a list of heads is specified, return only nodes which are heads
1217 If a list of heads is specified, return only nodes which are heads
1214 or ancestors of these heads, and return a second element which
1218 or ancestors of these heads, and return a second element which
1215 contains all remote heads which get new children.
1219 contains all remote heads which get new children.
1216 """
1220 """
1217 if base == None:
1221 if base == None:
1218 base = {}
1222 base = {}
1219 self.findincoming(remote, base, heads, force=force)
1223 self.findincoming(remote, base, heads, force=force)
1220
1224
1221 self.ui.debug(_("common changesets up to ")
1225 self.ui.debug(_("common changesets up to ")
1222 + " ".join(map(short, base.keys())) + "\n")
1226 + " ".join(map(short, base.keys())) + "\n")
1223
1227
1224 remain = dict.fromkeys(self.changelog.nodemap)
1228 remain = dict.fromkeys(self.changelog.nodemap)
1225
1229
1226 # prune everything remote has from the tree
1230 # prune everything remote has from the tree
1227 del remain[nullid]
1231 del remain[nullid]
1228 remove = base.keys()
1232 remove = base.keys()
1229 while remove:
1233 while remove:
1230 n = remove.pop(0)
1234 n = remove.pop(0)
1231 if n in remain:
1235 if n in remain:
1232 del remain[n]
1236 del remain[n]
1233 for p in self.changelog.parents(n):
1237 for p in self.changelog.parents(n):
1234 remove.append(p)
1238 remove.append(p)
1235
1239
1236 # find every node whose parents have been pruned
1240 # find every node whose parents have been pruned
1237 subset = []
1241 subset = []
1238 # find every remote head that will get new children
1242 # find every remote head that will get new children
1239 updated_heads = {}
1243 updated_heads = {}
1240 for n in remain:
1244 for n in remain:
1241 p1, p2 = self.changelog.parents(n)
1245 p1, p2 = self.changelog.parents(n)
1242 if p1 not in remain and p2 not in remain:
1246 if p1 not in remain and p2 not in remain:
1243 subset.append(n)
1247 subset.append(n)
1244 if heads:
1248 if heads:
1245 if p1 in heads:
1249 if p1 in heads:
1246 updated_heads[p1] = True
1250 updated_heads[p1] = True
1247 if p2 in heads:
1251 if p2 in heads:
1248 updated_heads[p2] = True
1252 updated_heads[p2] = True
1249
1253
1250 # this is the set of all roots we have to push
1254 # this is the set of all roots we have to push
1251 if heads:
1255 if heads:
1252 return subset, updated_heads.keys()
1256 return subset, updated_heads.keys()
1253 else:
1257 else:
1254 return subset
1258 return subset
1255
1259
1256 def pull(self, remote, heads=None, force=False, lock=None):
1260 def pull(self, remote, heads=None, force=False, lock=None):
1257 mylock = False
1261 mylock = False
1258 if not lock:
1262 if not lock:
1259 lock = self.lock()
1263 lock = self.lock()
1260 mylock = True
1264 mylock = True
1261
1265
1262 try:
1266 try:
1263 fetch = self.findincoming(remote, force=force)
1267 fetch = self.findincoming(remote, force=force)
1264 if fetch == [nullid]:
1268 if fetch == [nullid]:
1265 self.ui.status(_("requesting all changes\n"))
1269 self.ui.status(_("requesting all changes\n"))
1266
1270
1267 if not fetch:
1271 if not fetch:
1268 self.ui.status(_("no changes found\n"))
1272 self.ui.status(_("no changes found\n"))
1269 return 0
1273 return 0
1270
1274
1271 if heads is None:
1275 if heads is None:
1272 cg = remote.changegroup(fetch, 'pull')
1276 cg = remote.changegroup(fetch, 'pull')
1273 else:
1277 else:
1274 if 'changegroupsubset' not in remote.capabilities:
1278 if 'changegroupsubset' not in remote.capabilities:
1275 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1279 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1276 cg = remote.changegroupsubset(fetch, heads, 'pull')
1280 cg = remote.changegroupsubset(fetch, heads, 'pull')
1277 return self.addchangegroup(cg, 'pull', remote.url())
1281 return self.addchangegroup(cg, 'pull', remote.url())
1278 finally:
1282 finally:
1279 if mylock:
1283 if mylock:
1280 lock.release()
1284 lock.release()
1281
1285
1282 def push(self, remote, force=False, revs=None):
1286 def push(self, remote, force=False, revs=None):
1283 # there are two ways to push to remote repo:
1287 # there are two ways to push to remote repo:
1284 #
1288 #
1285 # addchangegroup assumes local user can lock remote
1289 # addchangegroup assumes local user can lock remote
1286 # repo (local filesystem, old ssh servers).
1290 # repo (local filesystem, old ssh servers).
1287 #
1291 #
1288 # unbundle assumes local user cannot lock remote repo (new ssh
1292 # unbundle assumes local user cannot lock remote repo (new ssh
1289 # servers, http servers).
1293 # servers, http servers).
1290
1294
1291 if remote.capable('unbundle'):
1295 if remote.capable('unbundle'):
1292 return self.push_unbundle(remote, force, revs)
1296 return self.push_unbundle(remote, force, revs)
1293 return self.push_addchangegroup(remote, force, revs)
1297 return self.push_addchangegroup(remote, force, revs)
1294
1298
1295 def prepush(self, remote, force, revs):
1299 def prepush(self, remote, force, revs):
1296 base = {}
1300 base = {}
1297 remote_heads = remote.heads()
1301 remote_heads = remote.heads()
1298 inc = self.findincoming(remote, base, remote_heads, force=force)
1302 inc = self.findincoming(remote, base, remote_heads, force=force)
1299
1303
1300 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1304 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1301 if revs is not None:
1305 if revs is not None:
1302 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1306 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1303 else:
1307 else:
1304 bases, heads = update, self.changelog.heads()
1308 bases, heads = update, self.changelog.heads()
1305
1309
1306 if not bases:
1310 if not bases:
1307 self.ui.status(_("no changes found\n"))
1311 self.ui.status(_("no changes found\n"))
1308 return None, 1
1312 return None, 1
1309 elif not force:
1313 elif not force:
1310 # check if we're creating new remote heads
1314 # check if we're creating new remote heads
1311 # to be a remote head after push, node must be either
1315 # to be a remote head after push, node must be either
1312 # - unknown locally
1316 # - unknown locally
1313 # - a local outgoing head descended from update
1317 # - a local outgoing head descended from update
1314 # - a remote head that's known locally and not
1318 # - a remote head that's known locally and not
1315 # ancestral to an outgoing head
1319 # ancestral to an outgoing head
1316
1320
1317 warn = 0
1321 warn = 0
1318
1322
1319 if remote_heads == [nullid]:
1323 if remote_heads == [nullid]:
1320 warn = 0
1324 warn = 0
1321 elif not revs and len(heads) > len(remote_heads):
1325 elif not revs and len(heads) > len(remote_heads):
1322 warn = 1
1326 warn = 1
1323 else:
1327 else:
1324 newheads = list(heads)
1328 newheads = list(heads)
1325 for r in remote_heads:
1329 for r in remote_heads:
1326 if r in self.changelog.nodemap:
1330 if r in self.changelog.nodemap:
1327 desc = self.changelog.heads(r, heads)
1331 desc = self.changelog.heads(r, heads)
1328 l = [h for h in heads if h in desc]
1332 l = [h for h in heads if h in desc]
1329 if not l:
1333 if not l:
1330 newheads.append(r)
1334 newheads.append(r)
1331 else:
1335 else:
1332 newheads.append(r)
1336 newheads.append(r)
1333 if len(newheads) > len(remote_heads):
1337 if len(newheads) > len(remote_heads):
1334 warn = 1
1338 warn = 1
1335
1339
1336 if warn:
1340 if warn:
1337 self.ui.warn(_("abort: push creates new remote branches!\n"))
1341 self.ui.warn(_("abort: push creates new remote branches!\n"))
1338 self.ui.status(_("(did you forget to merge?"
1342 self.ui.status(_("(did you forget to merge?"
1339 " use push -f to force)\n"))
1343 " use push -f to force)\n"))
1340 return None, 1
1344 return None, 1
1341 elif inc:
1345 elif inc:
1342 self.ui.warn(_("note: unsynced remote changes!\n"))
1346 self.ui.warn(_("note: unsynced remote changes!\n"))
1343
1347
1344
1348
1345 if revs is None:
1349 if revs is None:
1346 cg = self.changegroup(update, 'push')
1350 cg = self.changegroup(update, 'push')
1347 else:
1351 else:
1348 cg = self.changegroupsubset(update, revs, 'push')
1352 cg = self.changegroupsubset(update, revs, 'push')
1349 return cg, remote_heads
1353 return cg, remote_heads
1350
1354
1351 def push_addchangegroup(self, remote, force, revs):
1355 def push_addchangegroup(self, remote, force, revs):
1352 lock = remote.lock()
1356 lock = remote.lock()
1353
1357
1354 ret = self.prepush(remote, force, revs)
1358 ret = self.prepush(remote, force, revs)
1355 if ret[0] is not None:
1359 if ret[0] is not None:
1356 cg, remote_heads = ret
1360 cg, remote_heads = ret
1357 return remote.addchangegroup(cg, 'push', self.url())
1361 return remote.addchangegroup(cg, 'push', self.url())
1358 return ret[1]
1362 return ret[1]
1359
1363
1360 def push_unbundle(self, remote, force, revs):
1364 def push_unbundle(self, remote, force, revs):
1361 # local repo finds heads on server, finds out what revs it
1365 # local repo finds heads on server, finds out what revs it
1362 # must push. once revs transferred, if server finds it has
1366 # must push. once revs transferred, if server finds it has
1363 # different heads (someone else won commit/push race), server
1367 # different heads (someone else won commit/push race), server
1364 # aborts.
1368 # aborts.
1365
1369
1366 ret = self.prepush(remote, force, revs)
1370 ret = self.prepush(remote, force, revs)
1367 if ret[0] is not None:
1371 if ret[0] is not None:
1368 cg, remote_heads = ret
1372 cg, remote_heads = ret
1369 if force: remote_heads = ['force']
1373 if force: remote_heads = ['force']
1370 return remote.unbundle(cg, remote_heads, 'push')
1374 return remote.unbundle(cg, remote_heads, 'push')
1371 return ret[1]
1375 return ret[1]
1372
1376
1373 def changegroupinfo(self, nodes):
1377 def changegroupinfo(self, nodes):
1374 self.ui.note(_("%d changesets found\n") % len(nodes))
1378 self.ui.note(_("%d changesets found\n") % len(nodes))
1375 if self.ui.debugflag:
1379 if self.ui.debugflag:
1376 self.ui.debug(_("List of changesets:\n"))
1380 self.ui.debug(_("List of changesets:\n"))
1377 for node in nodes:
1381 for node in nodes:
1378 self.ui.debug("%s\n" % hex(node))
1382 self.ui.debug("%s\n" % hex(node))
1379
1383
1380 def changegroupsubset(self, bases, heads, source):
1384 def changegroupsubset(self, bases, heads, source):
1381 """This function generates a changegroup consisting of all the nodes
1385 """This function generates a changegroup consisting of all the nodes
1382 that are descendents of any of the bases, and ancestors of any of
1386 that are descendents of any of the bases, and ancestors of any of
1383 the heads.
1387 the heads.
1384
1388
1385 It is fairly complex as determining which filenodes and which
1389 It is fairly complex as determining which filenodes and which
1386 manifest nodes need to be included for the changeset to be complete
1390 manifest nodes need to be included for the changeset to be complete
1387 is non-trivial.
1391 is non-trivial.
1388
1392
1389 Another wrinkle is doing the reverse, figuring out which changeset in
1393 Another wrinkle is doing the reverse, figuring out which changeset in
1390 the changegroup a particular filenode or manifestnode belongs to."""
1394 the changegroup a particular filenode or manifestnode belongs to."""
1391
1395
1392 self.hook('preoutgoing', throw=True, source=source)
1396 self.hook('preoutgoing', throw=True, source=source)
1393
1397
1394 # Set up some initial variables
1398 # Set up some initial variables
1395 # Make it easy to refer to self.changelog
1399 # Make it easy to refer to self.changelog
1396 cl = self.changelog
1400 cl = self.changelog
1397 # msng is short for missing - compute the list of changesets in this
1401 # msng is short for missing - compute the list of changesets in this
1398 # changegroup.
1402 # changegroup.
1399 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1403 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1400 self.changegroupinfo(msng_cl_lst)
1404 self.changegroupinfo(msng_cl_lst)
1401 # Some bases may turn out to be superfluous, and some heads may be
1405 # Some bases may turn out to be superfluous, and some heads may be
1402 # too. nodesbetween will return the minimal set of bases and heads
1406 # too. nodesbetween will return the minimal set of bases and heads
1403 # necessary to re-create the changegroup.
1407 # necessary to re-create the changegroup.
1404
1408
1405 # Known heads are the list of heads that it is assumed the recipient
1409 # Known heads are the list of heads that it is assumed the recipient
1406 # of this changegroup will know about.
1410 # of this changegroup will know about.
1407 knownheads = {}
1411 knownheads = {}
1408 # We assume that all parents of bases are known heads.
1412 # We assume that all parents of bases are known heads.
1409 for n in bases:
1413 for n in bases:
1410 for p in cl.parents(n):
1414 for p in cl.parents(n):
1411 if p != nullid:
1415 if p != nullid:
1412 knownheads[p] = 1
1416 knownheads[p] = 1
1413 knownheads = knownheads.keys()
1417 knownheads = knownheads.keys()
1414 if knownheads:
1418 if knownheads:
1415 # Now that we know what heads are known, we can compute which
1419 # Now that we know what heads are known, we can compute which
1416 # changesets are known. The recipient must know about all
1420 # changesets are known. The recipient must know about all
1417 # changesets required to reach the known heads from the null
1421 # changesets required to reach the known heads from the null
1418 # changeset.
1422 # changeset.
1419 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1423 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1420 junk = None
1424 junk = None
1421 # Transform the list into an ersatz set.
1425 # Transform the list into an ersatz set.
1422 has_cl_set = dict.fromkeys(has_cl_set)
1426 has_cl_set = dict.fromkeys(has_cl_set)
1423 else:
1427 else:
1424 # If there were no known heads, the recipient cannot be assumed to
1428 # If there were no known heads, the recipient cannot be assumed to
1425 # know about any changesets.
1429 # know about any changesets.
1426 has_cl_set = {}
1430 has_cl_set = {}
1427
1431
1428 # Make it easy to refer to self.manifest
1432 # Make it easy to refer to self.manifest
1429 mnfst = self.manifest
1433 mnfst = self.manifest
1430 # We don't know which manifests are missing yet
1434 # We don't know which manifests are missing yet
1431 msng_mnfst_set = {}
1435 msng_mnfst_set = {}
1432 # Nor do we know which filenodes are missing.
1436 # Nor do we know which filenodes are missing.
1433 msng_filenode_set = {}
1437 msng_filenode_set = {}
1434
1438
1435 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1439 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1436 junk = None
1440 junk = None
1437
1441
1438 # A changeset always belongs to itself, so the changenode lookup
1442 # A changeset always belongs to itself, so the changenode lookup
1439 # function for a changenode is identity.
1443 # function for a changenode is identity.
1440 def identity(x):
1444 def identity(x):
1441 return x
1445 return x
1442
1446
1443 # A function generating function. Sets up an environment for the
1447 # A function generating function. Sets up an environment for the
1444 # inner function.
1448 # inner function.
1445 def cmp_by_rev_func(revlog):
1449 def cmp_by_rev_func(revlog):
1446 # Compare two nodes by their revision number in the environment's
1450 # Compare two nodes by their revision number in the environment's
1447 # revision history. Since the revision number both represents the
1451 # revision history. Since the revision number both represents the
1448 # most efficient order to read the nodes in, and represents a
1452 # most efficient order to read the nodes in, and represents a
1449 # topological sorting of the nodes, this function is often useful.
1453 # topological sorting of the nodes, this function is often useful.
1450 def cmp_by_rev(a, b):
1454 def cmp_by_rev(a, b):
1451 return cmp(revlog.rev(a), revlog.rev(b))
1455 return cmp(revlog.rev(a), revlog.rev(b))
1452 return cmp_by_rev
1456 return cmp_by_rev
1453
1457
1454 # If we determine that a particular file or manifest node must be a
1458 # If we determine that a particular file or manifest node must be a
1455 # node that the recipient of the changegroup will already have, we can
1459 # node that the recipient of the changegroup will already have, we can
1456 # also assume the recipient will have all the parents. This function
1460 # also assume the recipient will have all the parents. This function
1457 # prunes them from the set of missing nodes.
1461 # prunes them from the set of missing nodes.
1458 def prune_parents(revlog, hasset, msngset):
1462 def prune_parents(revlog, hasset, msngset):
1459 haslst = hasset.keys()
1463 haslst = hasset.keys()
1460 haslst.sort(cmp_by_rev_func(revlog))
1464 haslst.sort(cmp_by_rev_func(revlog))
1461 for node in haslst:
1465 for node in haslst:
1462 parentlst = [p for p in revlog.parents(node) if p != nullid]
1466 parentlst = [p for p in revlog.parents(node) if p != nullid]
1463 while parentlst:
1467 while parentlst:
1464 n = parentlst.pop()
1468 n = parentlst.pop()
1465 if n not in hasset:
1469 if n not in hasset:
1466 hasset[n] = 1
1470 hasset[n] = 1
1467 p = [p for p in revlog.parents(n) if p != nullid]
1471 p = [p for p in revlog.parents(n) if p != nullid]
1468 parentlst.extend(p)
1472 parentlst.extend(p)
1469 for n in hasset:
1473 for n in hasset:
1470 msngset.pop(n, None)
1474 msngset.pop(n, None)
1471
1475
1472 # This is a function generating function used to set up an environment
1476 # This is a function generating function used to set up an environment
1473 # for the inner function to execute in.
1477 # for the inner function to execute in.
1474 def manifest_and_file_collector(changedfileset):
1478 def manifest_and_file_collector(changedfileset):
1475 # This is an information gathering function that gathers
1479 # This is an information gathering function that gathers
1476 # information from each changeset node that goes out as part of
1480 # information from each changeset node that goes out as part of
1477 # the changegroup. The information gathered is a list of which
1481 # the changegroup. The information gathered is a list of which
1478 # manifest nodes are potentially required (the recipient may
1482 # manifest nodes are potentially required (the recipient may
1479 # already have them) and total list of all files which were
1483 # already have them) and total list of all files which were
1480 # changed in any changeset in the changegroup.
1484 # changed in any changeset in the changegroup.
1481 #
1485 #
1482 # We also remember the first changenode we saw any manifest
1486 # We also remember the first changenode we saw any manifest
1483 # referenced by so we can later determine which changenode 'owns'
1487 # referenced by so we can later determine which changenode 'owns'
1484 # the manifest.
1488 # the manifest.
1485 def collect_manifests_and_files(clnode):
1489 def collect_manifests_and_files(clnode):
1486 c = cl.read(clnode)
1490 c = cl.read(clnode)
1487 for f in c[3]:
1491 for f in c[3]:
1488 # This is to make sure we only have one instance of each
1492 # This is to make sure we only have one instance of each
1489 # filename string for each filename.
1493 # filename string for each filename.
1490 changedfileset.setdefault(f, f)
1494 changedfileset.setdefault(f, f)
1491 msng_mnfst_set.setdefault(c[0], clnode)
1495 msng_mnfst_set.setdefault(c[0], clnode)
1492 return collect_manifests_and_files
1496 return collect_manifests_and_files
1493
1497
1494 # Figure out which manifest nodes (of the ones we think might be part
1498 # Figure out which manifest nodes (of the ones we think might be part
1495 # of the changegroup) the recipient must know about and remove them
1499 # of the changegroup) the recipient must know about and remove them
1496 # from the changegroup.
1500 # from the changegroup.
1497 def prune_manifests():
1501 def prune_manifests():
1498 has_mnfst_set = {}
1502 has_mnfst_set = {}
1499 for n in msng_mnfst_set:
1503 for n in msng_mnfst_set:
1500 # If a 'missing' manifest thinks it belongs to a changenode
1504 # If a 'missing' manifest thinks it belongs to a changenode
1501 # the recipient is assumed to have, obviously the recipient
1505 # the recipient is assumed to have, obviously the recipient
1502 # must have that manifest.
1506 # must have that manifest.
1503 linknode = cl.node(mnfst.linkrev(n))
1507 linknode = cl.node(mnfst.linkrev(n))
1504 if linknode in has_cl_set:
1508 if linknode in has_cl_set:
1505 has_mnfst_set[n] = 1
1509 has_mnfst_set[n] = 1
1506 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1510 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1507
1511
1508 # Use the information collected in collect_manifests_and_files to say
1512 # Use the information collected in collect_manifests_and_files to say
1509 # which changenode any manifestnode belongs to.
1513 # which changenode any manifestnode belongs to.
1510 def lookup_manifest_link(mnfstnode):
1514 def lookup_manifest_link(mnfstnode):
1511 return msng_mnfst_set[mnfstnode]
1515 return msng_mnfst_set[mnfstnode]
1512
1516
1513 # A function generating function that sets up the initial environment
1517 # A function generating function that sets up the initial environment
1514 # the inner function.
1518 # the inner function.
1515 def filenode_collector(changedfiles):
1519 def filenode_collector(changedfiles):
1516 next_rev = [0]
1520 next_rev = [0]
1517 # This gathers information from each manifestnode included in the
1521 # This gathers information from each manifestnode included in the
1518 # changegroup about which filenodes the manifest node references
1522 # changegroup about which filenodes the manifest node references
1519 # so we can include those in the changegroup too.
1523 # so we can include those in the changegroup too.
1520 #
1524 #
1521 # It also remembers which changenode each filenode belongs to. It
1525 # It also remembers which changenode each filenode belongs to. It
1522 # does this by assuming the a filenode belongs to the changenode
1526 # does this by assuming the a filenode belongs to the changenode
1523 # the first manifest that references it belongs to.
1527 # the first manifest that references it belongs to.
1524 def collect_msng_filenodes(mnfstnode):
1528 def collect_msng_filenodes(mnfstnode):
1525 r = mnfst.rev(mnfstnode)
1529 r = mnfst.rev(mnfstnode)
1526 if r == next_rev[0]:
1530 if r == next_rev[0]:
1527 # If the last rev we looked at was the one just previous,
1531 # If the last rev we looked at was the one just previous,
1528 # we only need to see a diff.
1532 # we only need to see a diff.
1529 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1533 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1530 # For each line in the delta
1534 # For each line in the delta
1531 for dline in delta.splitlines():
1535 for dline in delta.splitlines():
1532 # get the filename and filenode for that line
1536 # get the filename and filenode for that line
1533 f, fnode = dline.split('\0')
1537 f, fnode = dline.split('\0')
1534 fnode = bin(fnode[:40])
1538 fnode = bin(fnode[:40])
1535 f = changedfiles.get(f, None)
1539 f = changedfiles.get(f, None)
1536 # And if the file is in the list of files we care
1540 # And if the file is in the list of files we care
1537 # about.
1541 # about.
1538 if f is not None:
1542 if f is not None:
1539 # Get the changenode this manifest belongs to
1543 # Get the changenode this manifest belongs to
1540 clnode = msng_mnfst_set[mnfstnode]
1544 clnode = msng_mnfst_set[mnfstnode]
1541 # Create the set of filenodes for the file if
1545 # Create the set of filenodes for the file if
1542 # there isn't one already.
1546 # there isn't one already.
1543 ndset = msng_filenode_set.setdefault(f, {})
1547 ndset = msng_filenode_set.setdefault(f, {})
1544 # And set the filenode's changelog node to the
1548 # And set the filenode's changelog node to the
1545 # manifest's if it hasn't been set already.
1549 # manifest's if it hasn't been set already.
1546 ndset.setdefault(fnode, clnode)
1550 ndset.setdefault(fnode, clnode)
1547 else:
1551 else:
1548 # Otherwise we need a full manifest.
1552 # Otherwise we need a full manifest.
1549 m = mnfst.read(mnfstnode)
1553 m = mnfst.read(mnfstnode)
1550 # For every file in we care about.
1554 # For every file in we care about.
1551 for f in changedfiles:
1555 for f in changedfiles:
1552 fnode = m.get(f, None)
1556 fnode = m.get(f, None)
1553 # If it's in the manifest
1557 # If it's in the manifest
1554 if fnode is not None:
1558 if fnode is not None:
1555 # See comments above.
1559 # See comments above.
1556 clnode = msng_mnfst_set[mnfstnode]
1560 clnode = msng_mnfst_set[mnfstnode]
1557 ndset = msng_filenode_set.setdefault(f, {})
1561 ndset = msng_filenode_set.setdefault(f, {})
1558 ndset.setdefault(fnode, clnode)
1562 ndset.setdefault(fnode, clnode)
1559 # Remember the revision we hope to see next.
1563 # Remember the revision we hope to see next.
1560 next_rev[0] = r + 1
1564 next_rev[0] = r + 1
1561 return collect_msng_filenodes
1565 return collect_msng_filenodes
1562
1566
1563 # We have a list of filenodes we think we need for a file, lets remove
1567 # We have a list of filenodes we think we need for a file, lets remove
1564 # all those we now the recipient must have.
1568 # all those we now the recipient must have.
1565 def prune_filenodes(f, filerevlog):
1569 def prune_filenodes(f, filerevlog):
1566 msngset = msng_filenode_set[f]
1570 msngset = msng_filenode_set[f]
1567 hasset = {}
1571 hasset = {}
1568 # If a 'missing' filenode thinks it belongs to a changenode we
1572 # If a 'missing' filenode thinks it belongs to a changenode we
1569 # assume the recipient must have, then the recipient must have
1573 # assume the recipient must have, then the recipient must have
1570 # that filenode.
1574 # that filenode.
1571 for n in msngset:
1575 for n in msngset:
1572 clnode = cl.node(filerevlog.linkrev(n))
1576 clnode = cl.node(filerevlog.linkrev(n))
1573 if clnode in has_cl_set:
1577 if clnode in has_cl_set:
1574 hasset[n] = 1
1578 hasset[n] = 1
1575 prune_parents(filerevlog, hasset, msngset)
1579 prune_parents(filerevlog, hasset, msngset)
1576
1580
1577 # A function generator function that sets up the a context for the
1581 # A function generator function that sets up the a context for the
1578 # inner function.
1582 # inner function.
1579 def lookup_filenode_link_func(fname):
1583 def lookup_filenode_link_func(fname):
1580 msngset = msng_filenode_set[fname]
1584 msngset = msng_filenode_set[fname]
1581 # Lookup the changenode the filenode belongs to.
1585 # Lookup the changenode the filenode belongs to.
1582 def lookup_filenode_link(fnode):
1586 def lookup_filenode_link(fnode):
1583 return msngset[fnode]
1587 return msngset[fnode]
1584 return lookup_filenode_link
1588 return lookup_filenode_link
1585
1589
1586 # Now that we have all theses utility functions to help out and
1590 # Now that we have all theses utility functions to help out and
1587 # logically divide up the task, generate the group.
1591 # logically divide up the task, generate the group.
1588 def gengroup():
1592 def gengroup():
1589 # The set of changed files starts empty.
1593 # The set of changed files starts empty.
1590 changedfiles = {}
1594 changedfiles = {}
1591 # Create a changenode group generator that will call our functions
1595 # Create a changenode group generator that will call our functions
1592 # back to lookup the owning changenode and collect information.
1596 # back to lookup the owning changenode and collect information.
1593 group = cl.group(msng_cl_lst, identity,
1597 group = cl.group(msng_cl_lst, identity,
1594 manifest_and_file_collector(changedfiles))
1598 manifest_and_file_collector(changedfiles))
1595 for chnk in group:
1599 for chnk in group:
1596 yield chnk
1600 yield chnk
1597
1601
1598 # The list of manifests has been collected by the generator
1602 # The list of manifests has been collected by the generator
1599 # calling our functions back.
1603 # calling our functions back.
1600 prune_manifests()
1604 prune_manifests()
1601 msng_mnfst_lst = msng_mnfst_set.keys()
1605 msng_mnfst_lst = msng_mnfst_set.keys()
1602 # Sort the manifestnodes by revision number.
1606 # Sort the manifestnodes by revision number.
1603 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1607 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1604 # Create a generator for the manifestnodes that calls our lookup
1608 # Create a generator for the manifestnodes that calls our lookup
1605 # and data collection functions back.
1609 # and data collection functions back.
1606 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1610 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1607 filenode_collector(changedfiles))
1611 filenode_collector(changedfiles))
1608 for chnk in group:
1612 for chnk in group:
1609 yield chnk
1613 yield chnk
1610
1614
1611 # These are no longer needed, dereference and toss the memory for
1615 # These are no longer needed, dereference and toss the memory for
1612 # them.
1616 # them.
1613 msng_mnfst_lst = None
1617 msng_mnfst_lst = None
1614 msng_mnfst_set.clear()
1618 msng_mnfst_set.clear()
1615
1619
1616 changedfiles = changedfiles.keys()
1620 changedfiles = changedfiles.keys()
1617 changedfiles.sort()
1621 changedfiles.sort()
1618 # Go through all our files in order sorted by name.
1622 # Go through all our files in order sorted by name.
1619 for fname in changedfiles:
1623 for fname in changedfiles:
1620 filerevlog = self.file(fname)
1624 filerevlog = self.file(fname)
1621 # Toss out the filenodes that the recipient isn't really
1625 # Toss out the filenodes that the recipient isn't really
1622 # missing.
1626 # missing.
1623 if msng_filenode_set.has_key(fname):
1627 if msng_filenode_set.has_key(fname):
1624 prune_filenodes(fname, filerevlog)
1628 prune_filenodes(fname, filerevlog)
1625 msng_filenode_lst = msng_filenode_set[fname].keys()
1629 msng_filenode_lst = msng_filenode_set[fname].keys()
1626 else:
1630 else:
1627 msng_filenode_lst = []
1631 msng_filenode_lst = []
1628 # If any filenodes are left, generate the group for them,
1632 # If any filenodes are left, generate the group for them,
1629 # otherwise don't bother.
1633 # otherwise don't bother.
1630 if len(msng_filenode_lst) > 0:
1634 if len(msng_filenode_lst) > 0:
1631 yield changegroup.genchunk(fname)
1635 yield changegroup.genchunk(fname)
1632 # Sort the filenodes by their revision #
1636 # Sort the filenodes by their revision #
1633 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1637 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1634 # Create a group generator and only pass in a changenode
1638 # Create a group generator and only pass in a changenode
1635 # lookup function as we need to collect no information
1639 # lookup function as we need to collect no information
1636 # from filenodes.
1640 # from filenodes.
1637 group = filerevlog.group(msng_filenode_lst,
1641 group = filerevlog.group(msng_filenode_lst,
1638 lookup_filenode_link_func(fname))
1642 lookup_filenode_link_func(fname))
1639 for chnk in group:
1643 for chnk in group:
1640 yield chnk
1644 yield chnk
1641 if msng_filenode_set.has_key(fname):
1645 if msng_filenode_set.has_key(fname):
1642 # Don't need this anymore, toss it to free memory.
1646 # Don't need this anymore, toss it to free memory.
1643 del msng_filenode_set[fname]
1647 del msng_filenode_set[fname]
1644 # Signal that no more groups are left.
1648 # Signal that no more groups are left.
1645 yield changegroup.closechunk()
1649 yield changegroup.closechunk()
1646
1650
1647 if msng_cl_lst:
1651 if msng_cl_lst:
1648 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1652 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1649
1653
1650 return util.chunkbuffer(gengroup())
1654 return util.chunkbuffer(gengroup())
1651
1655
1652 def changegroup(self, basenodes, source):
1656 def changegroup(self, basenodes, source):
1653 """Generate a changegroup of all nodes that we have that a recipient
1657 """Generate a changegroup of all nodes that we have that a recipient
1654 doesn't.
1658 doesn't.
1655
1659
1656 This is much easier than the previous function as we can assume that
1660 This is much easier than the previous function as we can assume that
1657 the recipient has any changenode we aren't sending them."""
1661 the recipient has any changenode we aren't sending them."""
1658
1662
1659 self.hook('preoutgoing', throw=True, source=source)
1663 self.hook('preoutgoing', throw=True, source=source)
1660
1664
1661 cl = self.changelog
1665 cl = self.changelog
1662 nodes = cl.nodesbetween(basenodes, None)[0]
1666 nodes = cl.nodesbetween(basenodes, None)[0]
1663 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1667 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1664 self.changegroupinfo(nodes)
1668 self.changegroupinfo(nodes)
1665
1669
1666 def identity(x):
1670 def identity(x):
1667 return x
1671 return x
1668
1672
1669 def gennodelst(revlog):
1673 def gennodelst(revlog):
1670 for r in xrange(0, revlog.count()):
1674 for r in xrange(0, revlog.count()):
1671 n = revlog.node(r)
1675 n = revlog.node(r)
1672 if revlog.linkrev(n) in revset:
1676 if revlog.linkrev(n) in revset:
1673 yield n
1677 yield n
1674
1678
1675 def changed_file_collector(changedfileset):
1679 def changed_file_collector(changedfileset):
1676 def collect_changed_files(clnode):
1680 def collect_changed_files(clnode):
1677 c = cl.read(clnode)
1681 c = cl.read(clnode)
1678 for fname in c[3]:
1682 for fname in c[3]:
1679 changedfileset[fname] = 1
1683 changedfileset[fname] = 1
1680 return collect_changed_files
1684 return collect_changed_files
1681
1685
1682 def lookuprevlink_func(revlog):
1686 def lookuprevlink_func(revlog):
1683 def lookuprevlink(n):
1687 def lookuprevlink(n):
1684 return cl.node(revlog.linkrev(n))
1688 return cl.node(revlog.linkrev(n))
1685 return lookuprevlink
1689 return lookuprevlink
1686
1690
1687 def gengroup():
1691 def gengroup():
1688 # construct a list of all changed files
1692 # construct a list of all changed files
1689 changedfiles = {}
1693 changedfiles = {}
1690
1694
1691 for chnk in cl.group(nodes, identity,
1695 for chnk in cl.group(nodes, identity,
1692 changed_file_collector(changedfiles)):
1696 changed_file_collector(changedfiles)):
1693 yield chnk
1697 yield chnk
1694 changedfiles = changedfiles.keys()
1698 changedfiles = changedfiles.keys()
1695 changedfiles.sort()
1699 changedfiles.sort()
1696
1700
1697 mnfst = self.manifest
1701 mnfst = self.manifest
1698 nodeiter = gennodelst(mnfst)
1702 nodeiter = gennodelst(mnfst)
1699 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1703 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1700 yield chnk
1704 yield chnk
1701
1705
1702 for fname in changedfiles:
1706 for fname in changedfiles:
1703 filerevlog = self.file(fname)
1707 filerevlog = self.file(fname)
1704 nodeiter = gennodelst(filerevlog)
1708 nodeiter = gennodelst(filerevlog)
1705 nodeiter = list(nodeiter)
1709 nodeiter = list(nodeiter)
1706 if nodeiter:
1710 if nodeiter:
1707 yield changegroup.genchunk(fname)
1711 yield changegroup.genchunk(fname)
1708 lookup = lookuprevlink_func(filerevlog)
1712 lookup = lookuprevlink_func(filerevlog)
1709 for chnk in filerevlog.group(nodeiter, lookup):
1713 for chnk in filerevlog.group(nodeiter, lookup):
1710 yield chnk
1714 yield chnk
1711
1715
1712 yield changegroup.closechunk()
1716 yield changegroup.closechunk()
1713
1717
1714 if nodes:
1718 if nodes:
1715 self.hook('outgoing', node=hex(nodes[0]), source=source)
1719 self.hook('outgoing', node=hex(nodes[0]), source=source)
1716
1720
1717 return util.chunkbuffer(gengroup())
1721 return util.chunkbuffer(gengroup())
1718
1722
1719 def addchangegroup(self, source, srctype, url):
1723 def addchangegroup(self, source, srctype, url):
1720 """add changegroup to repo.
1724 """add changegroup to repo.
1721
1725
1722 return values:
1726 return values:
1723 - nothing changed or no source: 0
1727 - nothing changed or no source: 0
1724 - more heads than before: 1+added heads (2..n)
1728 - more heads than before: 1+added heads (2..n)
1725 - less heads than before: -1-removed heads (-2..-n)
1729 - less heads than before: -1-removed heads (-2..-n)
1726 - number of heads stays the same: 1
1730 - number of heads stays the same: 1
1727 """
1731 """
1728 def csmap(x):
1732 def csmap(x):
1729 self.ui.debug(_("add changeset %s\n") % short(x))
1733 self.ui.debug(_("add changeset %s\n") % short(x))
1730 return cl.count()
1734 return cl.count()
1731
1735
1732 def revmap(x):
1736 def revmap(x):
1733 return cl.rev(x)
1737 return cl.rev(x)
1734
1738
1735 if not source:
1739 if not source:
1736 return 0
1740 return 0
1737
1741
1738 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1742 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1739
1743
1740 changesets = files = revisions = 0
1744 changesets = files = revisions = 0
1741
1745
1742 tr = self.transaction()
1746 tr = self.transaction()
1743
1747
1744 # write changelog data to temp files so concurrent readers will not see
1748 # write changelog data to temp files so concurrent readers will not see
1745 # inconsistent view
1749 # inconsistent view
1746 cl = None
1750 cl = None
1747 try:
1751 try:
1748 cl = appendfile.appendchangelog(self.sopener,
1752 cl = appendfile.appendchangelog(self.sopener,
1749 self.changelog.version)
1753 self.changelog.version)
1750
1754
1751 oldheads = len(cl.heads())
1755 oldheads = len(cl.heads())
1752
1756
1753 # pull off the changeset group
1757 # pull off the changeset group
1754 self.ui.status(_("adding changesets\n"))
1758 self.ui.status(_("adding changesets\n"))
1755 cor = cl.count() - 1
1759 cor = cl.count() - 1
1756 chunkiter = changegroup.chunkiter(source)
1760 chunkiter = changegroup.chunkiter(source)
1757 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1761 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1758 raise util.Abort(_("received changelog group is empty"))
1762 raise util.Abort(_("received changelog group is empty"))
1759 cnr = cl.count() - 1
1763 cnr = cl.count() - 1
1760 changesets = cnr - cor
1764 changesets = cnr - cor
1761
1765
1762 # pull off the manifest group
1766 # pull off the manifest group
1763 self.ui.status(_("adding manifests\n"))
1767 self.ui.status(_("adding manifests\n"))
1764 chunkiter = changegroup.chunkiter(source)
1768 chunkiter = changegroup.chunkiter(source)
1765 # no need to check for empty manifest group here:
1769 # no need to check for empty manifest group here:
1766 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1770 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1767 # no new manifest will be created and the manifest group will
1771 # no new manifest will be created and the manifest group will
1768 # be empty during the pull
1772 # be empty during the pull
1769 self.manifest.addgroup(chunkiter, revmap, tr)
1773 self.manifest.addgroup(chunkiter, revmap, tr)
1770
1774
1771 # process the files
1775 # process the files
1772 self.ui.status(_("adding file changes\n"))
1776 self.ui.status(_("adding file changes\n"))
1773 while 1:
1777 while 1:
1774 f = changegroup.getchunk(source)
1778 f = changegroup.getchunk(source)
1775 if not f:
1779 if not f:
1776 break
1780 break
1777 self.ui.debug(_("adding %s revisions\n") % f)
1781 self.ui.debug(_("adding %s revisions\n") % f)
1778 fl = self.file(f)
1782 fl = self.file(f)
1779 o = fl.count()
1783 o = fl.count()
1780 chunkiter = changegroup.chunkiter(source)
1784 chunkiter = changegroup.chunkiter(source)
1781 if fl.addgroup(chunkiter, revmap, tr) is None:
1785 if fl.addgroup(chunkiter, revmap, tr) is None:
1782 raise util.Abort(_("received file revlog group is empty"))
1786 raise util.Abort(_("received file revlog group is empty"))
1783 revisions += fl.count() - o
1787 revisions += fl.count() - o
1784 files += 1
1788 files += 1
1785
1789
1786 cl.writedata()
1790 cl.writedata()
1787 finally:
1791 finally:
1788 if cl:
1792 if cl:
1789 cl.cleanup()
1793 cl.cleanup()
1790
1794
1791 # make changelog see real files again
1795 # make changelog see real files again
1792 self.changelog = changelog.changelog(self.sopener,
1796 self.changelog = changelog.changelog(self.sopener,
1793 self.changelog.version)
1797 self.changelog.version)
1794 self.changelog.checkinlinesize(tr)
1798 self.changelog.checkinlinesize(tr)
1795
1799
1796 newheads = len(self.changelog.heads())
1800 newheads = len(self.changelog.heads())
1797 heads = ""
1801 heads = ""
1798 if oldheads and newheads != oldheads:
1802 if oldheads and newheads != oldheads:
1799 heads = _(" (%+d heads)") % (newheads - oldheads)
1803 heads = _(" (%+d heads)") % (newheads - oldheads)
1800
1804
1801 self.ui.status(_("added %d changesets"
1805 self.ui.status(_("added %d changesets"
1802 " with %d changes to %d files%s\n")
1806 " with %d changes to %d files%s\n")
1803 % (changesets, revisions, files, heads))
1807 % (changesets, revisions, files, heads))
1804
1808
1805 if changesets > 0:
1809 if changesets > 0:
1806 self.hook('pretxnchangegroup', throw=True,
1810 self.hook('pretxnchangegroup', throw=True,
1807 node=hex(self.changelog.node(cor+1)), source=srctype,
1811 node=hex(self.changelog.node(cor+1)), source=srctype,
1808 url=url)
1812 url=url)
1809
1813
1810 tr.close()
1814 tr.close()
1811
1815
1812 if changesets > 0:
1816 if changesets > 0:
1813 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1817 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1814 source=srctype, url=url)
1818 source=srctype, url=url)
1815
1819
1816 for i in xrange(cor + 1, cnr + 1):
1820 for i in xrange(cor + 1, cnr + 1):
1817 self.hook("incoming", node=hex(self.changelog.node(i)),
1821 self.hook("incoming", node=hex(self.changelog.node(i)),
1818 source=srctype, url=url)
1822 source=srctype, url=url)
1819
1823
1820 # never return 0 here:
1824 # never return 0 here:
1821 if newheads < oldheads:
1825 if newheads < oldheads:
1822 return newheads - oldheads - 1
1826 return newheads - oldheads - 1
1823 else:
1827 else:
1824 return newheads - oldheads + 1
1828 return newheads - oldheads + 1
1825
1829
1826
1830
1827 def stream_in(self, remote):
1831 def stream_in(self, remote):
1828 fp = remote.stream_out()
1832 fp = remote.stream_out()
1829 l = fp.readline()
1833 l = fp.readline()
1830 try:
1834 try:
1831 resp = int(l)
1835 resp = int(l)
1832 except ValueError:
1836 except ValueError:
1833 raise util.UnexpectedOutput(
1837 raise util.UnexpectedOutput(
1834 _('Unexpected response from remote server:'), l)
1838 _('Unexpected response from remote server:'), l)
1835 if resp == 1:
1839 if resp == 1:
1836 raise util.Abort(_('operation forbidden by server'))
1840 raise util.Abort(_('operation forbidden by server'))
1837 elif resp == 2:
1841 elif resp == 2:
1838 raise util.Abort(_('locking the remote repository failed'))
1842 raise util.Abort(_('locking the remote repository failed'))
1839 elif resp != 0:
1843 elif resp != 0:
1840 raise util.Abort(_('the server sent an unknown error code'))
1844 raise util.Abort(_('the server sent an unknown error code'))
1841 self.ui.status(_('streaming all changes\n'))
1845 self.ui.status(_('streaming all changes\n'))
1842 l = fp.readline()
1846 l = fp.readline()
1843 try:
1847 try:
1844 total_files, total_bytes = map(int, l.split(' ', 1))
1848 total_files, total_bytes = map(int, l.split(' ', 1))
1845 except ValueError, TypeError:
1849 except ValueError, TypeError:
1846 raise util.UnexpectedOutput(
1850 raise util.UnexpectedOutput(
1847 _('Unexpected response from remote server:'), l)
1851 _('Unexpected response from remote server:'), l)
1848 self.ui.status(_('%d files to transfer, %s of data\n') %
1852 self.ui.status(_('%d files to transfer, %s of data\n') %
1849 (total_files, util.bytecount(total_bytes)))
1853 (total_files, util.bytecount(total_bytes)))
1850 start = time.time()
1854 start = time.time()
1851 for i in xrange(total_files):
1855 for i in xrange(total_files):
1852 # XXX doesn't support '\n' or '\r' in filenames
1856 # XXX doesn't support '\n' or '\r' in filenames
1853 l = fp.readline()
1857 l = fp.readline()
1854 try:
1858 try:
1855 name, size = l.split('\0', 1)
1859 name, size = l.split('\0', 1)
1856 size = int(size)
1860 size = int(size)
1857 except ValueError, TypeError:
1861 except ValueError, TypeError:
1858 raise util.UnexpectedOutput(
1862 raise util.UnexpectedOutput(
1859 _('Unexpected response from remote server:'), l)
1863 _('Unexpected response from remote server:'), l)
1860 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1864 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1861 ofp = self.sopener(name, 'w')
1865 ofp = self.sopener(name, 'w')
1862 for chunk in util.filechunkiter(fp, limit=size):
1866 for chunk in util.filechunkiter(fp, limit=size):
1863 ofp.write(chunk)
1867 ofp.write(chunk)
1864 ofp.close()
1868 ofp.close()
1865 elapsed = time.time() - start
1869 elapsed = time.time() - start
1866 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1870 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1867 (util.bytecount(total_bytes), elapsed,
1871 (util.bytecount(total_bytes), elapsed,
1868 util.bytecount(total_bytes / elapsed)))
1872 util.bytecount(total_bytes / elapsed)))
1869 self.reload()
1873 self.reload()
1870 return len(self.heads()) + 1
1874 return len(self.heads()) + 1
1871
1875
1872 def clone(self, remote, heads=[], stream=False):
1876 def clone(self, remote, heads=[], stream=False):
1873 '''clone remote repository.
1877 '''clone remote repository.
1874
1878
1875 keyword arguments:
1879 keyword arguments:
1876 heads: list of revs to clone (forces use of pull)
1880 heads: list of revs to clone (forces use of pull)
1877 stream: use streaming clone if possible'''
1881 stream: use streaming clone if possible'''
1878
1882
1879 # now, all clients that can request uncompressed clones can
1883 # now, all clients that can request uncompressed clones can
1880 # read repo formats supported by all servers that can serve
1884 # read repo formats supported by all servers that can serve
1881 # them.
1885 # them.
1882
1886
1883 # if revlog format changes, client will have to check version
1887 # if revlog format changes, client will have to check version
1884 # and format flags on "stream" capability, and use
1888 # and format flags on "stream" capability, and use
1885 # uncompressed only if compatible.
1889 # uncompressed only if compatible.
1886
1890
1887 if stream and not heads and remote.capable('stream'):
1891 if stream and not heads and remote.capable('stream'):
1888 return self.stream_in(remote)
1892 return self.stream_in(remote)
1889 return self.pull(remote, heads)
1893 return self.pull(remote, heads)
1890
1894
1891 # used to avoid circular references so destructors work
1895 # used to avoid circular references so destructors work
1892 def aftertrans(files):
1896 def aftertrans(files):
1893 renamefiles = [tuple(t) for t in files]
1897 renamefiles = [tuple(t) for t in files]
1894 def a():
1898 def a():
1895 for src, dest in renamefiles:
1899 for src, dest in renamefiles:
1896 util.rename(src, dest)
1900 util.rename(src, dest)
1897 return a
1901 return a
1898
1902
1899 def instance(ui, path, create):
1903 def instance(ui, path, create):
1900 return localrepository(ui, util.drop_scheme('file', path), create)
1904 return localrepository(ui, util.drop_scheme('file', path), create)
1901
1905
1902 def islocal(path):
1906 def islocal(path):
1903 return True
1907 return True
@@ -1,186 +1,206 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 # commit hooks can see env vars
3 # commit hooks can see env vars
4 hg init a
4 hg init a
5 cd a
5 cd a
6 echo "[hooks]" > .hg/hgrc
6 echo "[hooks]" > .hg/hgrc
7 echo 'commit = echo commit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
7 echo 'commit = echo commit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
8 echo 'commit.b = echo commit hook b' >> .hg/hgrc
8 echo 'commit.b = echo commit hook b' >> .hg/hgrc
9 echo 'precommit = echo precommit hook: p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
9 echo 'precommit = echo precommit hook: p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
10 echo 'pretxncommit = echo pretxncommit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2; hg -q tip' >> .hg/hgrc
10 echo 'pretxncommit = echo pretxncommit hook: n=$HG_NODE p1=$HG_PARENT1 p2=$HG_PARENT2; hg -q tip' >> .hg/hgrc
11 echo a > a
11 echo a > a
12 hg add a
12 hg add a
13 hg commit -m a -d "1000000 0"
13 hg commit -m a -d "1000000 0"
14
14
15 hg clone . ../b
15 hg clone . ../b
16 cd ../b
16 cd ../b
17
17
18 # changegroup hooks can see env vars
18 # changegroup hooks can see env vars
19 echo '[hooks]' > .hg/hgrc
19 echo '[hooks]' > .hg/hgrc
20 echo 'prechangegroup = echo prechangegroup hook: u=`echo $HG_URL | sed s,file:.*,file:,`' >> .hg/hgrc
20 echo 'prechangegroup = echo prechangegroup hook: u=`echo $HG_URL | sed s,file:.*,file:,`' >> .hg/hgrc
21 echo 'changegroup = echo changegroup hook: n=$HG_NODE u=`echo $HG_URL | sed s,file:.*,file:,`' >> .hg/hgrc
21 echo 'changegroup = echo changegroup hook: n=$HG_NODE u=`echo $HG_URL | sed s,file:.*,file:,`' >> .hg/hgrc
22 echo 'incoming = echo incoming hook: n=$HG_NODE u=`echo $HG_URL | sed s,file:.*,file:,`' >> .hg/hgrc
22 echo 'incoming = echo incoming hook: n=$HG_NODE u=`echo $HG_URL | sed s,file:.*,file:,`' >> .hg/hgrc
23
23
24 # pretxncommit and commit hooks can see both parents of merge
24 # pretxncommit and commit hooks can see both parents of merge
25 cd ../a
25 cd ../a
26 echo b >> a
26 echo b >> a
27 hg commit -m a1 -d "1 0"
27 hg commit -m a1 -d "1 0"
28 hg update -C 0
28 hg update -C 0
29 echo b > b
29 echo b > b
30 hg add b
30 hg add b
31 hg commit -m b -d '1 0'
31 hg commit -m b -d '1 0'
32 hg merge 1
32 hg merge 1
33 hg commit -m merge -d '2 0'
33 hg commit -m merge -d '2 0'
34
34
35 cd ../b
35 cd ../b
36 hg pull ../a
36 hg pull ../a
37
37
38 # tag hooks can see env vars
38 # tag hooks can see env vars
39 cd ../a
39 cd ../a
40 echo 'pretag = echo pretag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
40 echo 'pretag = echo pretag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
41 echo 'tag = echo tag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
41 echo 'tag = echo tag hook: t=$HG_TAG n=$HG_NODE l=$HG_LOCAL' >> .hg/hgrc
42 hg tag -d '3 0' a
42 hg tag -d '3 0' a
43 hg tag -l la
43 hg tag -l la
44
44
45 # pretag hook can forbid tagging
45 # pretag hook can forbid tagging
46 echo 'pretag.forbid = echo pretag.forbid hook; exit 1' >> .hg/hgrc
46 echo 'pretag.forbid = echo pretag.forbid hook; exit 1' >> .hg/hgrc
47 hg tag -d '4 0' fa
47 hg tag -d '4 0' fa
48 hg tag -l fla
48 hg tag -l fla
49
49
50 # pretxncommit hook can see changeset, can roll back txn, changeset
50 # pretxncommit hook can see changeset, can roll back txn, changeset
51 # no more there after
51 # no more there after
52 echo 'pretxncommit.forbid = echo pretxncommit.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
52 echo 'pretxncommit.forbid = echo pretxncommit.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
53 echo z > z
53 echo z > z
54 hg add z
54 hg add z
55 hg -q tip
55 hg -q tip
56 hg commit -m 'fail' -d '4 0'
56 hg commit -m 'fail' -d '4 0'
57 hg -q tip
57 hg -q tip
58
58
59 # precommit hook can prevent commit
59 # precommit hook can prevent commit
60 echo 'precommit.forbid = echo precommit.forbid hook; exit 1' >> .hg/hgrc
60 echo 'precommit.forbid = echo precommit.forbid hook; exit 1' >> .hg/hgrc
61 hg commit -m 'fail' -d '4 0'
61 hg commit -m 'fail' -d '4 0'
62 hg -q tip
62 hg -q tip
63
63
64 # preupdate hook can prevent update
64 # preupdate hook can prevent update
65 echo 'preupdate = echo preupdate hook: p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
65 echo 'preupdate = echo preupdate hook: p1=$HG_PARENT1 p2=$HG_PARENT2' >> .hg/hgrc
66 hg update 1
66 hg update 1
67
67
68 # update hook
68 # update hook
69 echo 'update = echo update hook: p1=$HG_PARENT1 p2=$HG_PARENT2 err=$HG_ERROR' >> .hg/hgrc
69 echo 'update = echo update hook: p1=$HG_PARENT1 p2=$HG_PARENT2 err=$HG_ERROR' >> .hg/hgrc
70 hg update
70 hg update
71
71
72 # prechangegroup hook can prevent incoming changes
72 # prechangegroup hook can prevent incoming changes
73 cd ../b
73 cd ../b
74 hg -q tip
74 hg -q tip
75 echo '[hooks]' > .hg/hgrc
75 echo '[hooks]' > .hg/hgrc
76 echo 'prechangegroup.forbid = echo prechangegroup.forbid hook; exit 1' >> .hg/hgrc
76 echo 'prechangegroup.forbid = echo prechangegroup.forbid hook; exit 1' >> .hg/hgrc
77 hg pull ../a
77 hg pull ../a
78
78
79 # pretxnchangegroup hook can see incoming changes, can roll back txn,
79 # pretxnchangegroup hook can see incoming changes, can roll back txn,
80 # incoming changes no longer there after
80 # incoming changes no longer there after
81 echo '[hooks]' > .hg/hgrc
81 echo '[hooks]' > .hg/hgrc
82 echo 'pretxnchangegroup.forbid = echo pretxnchangegroup.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
82 echo 'pretxnchangegroup.forbid = echo pretxnchangegroup.forbid hook: tip=`hg -q tip`; exit 1' >> .hg/hgrc
83 hg pull ../a
83 hg pull ../a
84 hg -q tip
84 hg -q tip
85
85
86 # outgoing hooks can see env vars
86 # outgoing hooks can see env vars
87 rm .hg/hgrc
87 rm .hg/hgrc
88 echo '[hooks]' > ../a/.hg/hgrc
88 echo '[hooks]' > ../a/.hg/hgrc
89 echo 'preoutgoing = echo preoutgoing hook: s=$HG_SOURCE' >> ../a/.hg/hgrc
89 echo 'preoutgoing = echo preoutgoing hook: s=$HG_SOURCE' >> ../a/.hg/hgrc
90 echo 'outgoing = echo outgoing hook: n=$HG_NODE s=$HG_SOURCE' >> ../a/.hg/hgrc
90 echo 'outgoing = echo outgoing hook: n=$HG_NODE s=$HG_SOURCE' >> ../a/.hg/hgrc
91 hg pull ../a
91 hg pull ../a
92 hg rollback
92 hg rollback
93
93
94 # preoutgoing hook can prevent outgoing changes
94 # preoutgoing hook can prevent outgoing changes
95 echo 'preoutgoing.forbid = echo preoutgoing.forbid hook; exit 1' >> ../a/.hg/hgrc
95 echo 'preoutgoing.forbid = echo preoutgoing.forbid hook; exit 1' >> ../a/.hg/hgrc
96 hg pull ../a
96 hg pull ../a
97
97
98 cat > hooktests.py <<EOF
98 cat > hooktests.py <<EOF
99 from mercurial import util
99 from mercurial import util
100
100
101 uncallable = 0
101 uncallable = 0
102
102
103 def printargs(args):
103 def printargs(args):
104 args.pop('ui', None)
104 args.pop('ui', None)
105 args.pop('repo', None)
105 args.pop('repo', None)
106 a = list(args.items())
106 a = list(args.items())
107 a.sort()
107 a.sort()
108 print 'hook args:'
108 print 'hook args:'
109 for k, v in a:
109 for k, v in a:
110 print ' ', k, v
110 print ' ', k, v
111
111
112 def passhook(**args):
112 def passhook(**args):
113 printargs(args)
113 printargs(args)
114
114
115 def failhook(**args):
115 def failhook(**args):
116 printargs(args)
116 printargs(args)
117 return True
117 return True
118
118
119 class LocalException(Exception):
119 class LocalException(Exception):
120 pass
120 pass
121
121
122 def raisehook(**args):
122 def raisehook(**args):
123 raise LocalException('exception from hook')
123 raise LocalException('exception from hook')
124
124
125 def aborthook(**args):
125 def aborthook(**args):
126 raise util.Abort('raise abort from hook')
126 raise util.Abort('raise abort from hook')
127
127
128 def brokenhook(**args):
128 def brokenhook(**args):
129 return 1 + {}
129 return 1 + {}
130
130
131 class container:
131 class container:
132 unreachable = 1
132 unreachable = 1
133 EOF
133 EOF
134
134
135 echo '# test python hooks'
135 echo '# test python hooks'
136 PYTHONPATH="`pwd`:$PYTHONPATH"
136 PYTHONPATH="`pwd`:$PYTHONPATH"
137 export PYTHONPATH
137 export PYTHONPATH
138
138
139 echo '[hooks]' > ../a/.hg/hgrc
139 echo '[hooks]' > ../a/.hg/hgrc
140 echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
140 echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc
141 hg pull ../a 2>&1 | grep 'raised an exception'
141 hg pull ../a 2>&1 | grep 'raised an exception'
142
142
143 echo '[hooks]' > ../a/.hg/hgrc
143 echo '[hooks]' > ../a/.hg/hgrc
144 echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
144 echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc
145 hg pull ../a 2>&1 | grep 'raised an exception'
145 hg pull ../a 2>&1 | grep 'raised an exception'
146
146
147 echo '[hooks]' > ../a/.hg/hgrc
147 echo '[hooks]' > ../a/.hg/hgrc
148 echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
148 echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc
149 hg pull ../a
149 hg pull ../a
150
150
151 echo '[hooks]' > ../a/.hg/hgrc
151 echo '[hooks]' > ../a/.hg/hgrc
152 echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
152 echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc
153 hg pull ../a
153 hg pull ../a
154
154
155 echo '[hooks]' > ../a/.hg/hgrc
155 echo '[hooks]' > ../a/.hg/hgrc
156 echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
156 echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc
157 hg pull ../a
157 hg pull ../a
158
158
159 echo '[hooks]' > ../a/.hg/hgrc
159 echo '[hooks]' > ../a/.hg/hgrc
160 echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
160 echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc
161 hg pull ../a
161 hg pull ../a
162
162
163 echo '[hooks]' > ../a/.hg/hgrc
163 echo '[hooks]' > ../a/.hg/hgrc
164 echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
164 echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc
165 hg pull ../a
165 hg pull ../a
166
166
167 echo '[hooks]' > ../a/.hg/hgrc
167 echo '[hooks]' > ../a/.hg/hgrc
168 echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
168 echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc
169 hg pull ../a
169 hg pull ../a
170
170
171 echo '[hooks]' > ../a/.hg/hgrc
171 echo '[hooks]' > ../a/.hg/hgrc
172 echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
172 echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc
173 hg pull ../a
173 hg pull ../a
174
174
175 echo '[hooks]' > ../a/.hg/hgrc
175 echo '[hooks]' > ../a/.hg/hgrc
176 echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
176 echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc
177 hg pull ../a
177 hg pull ../a
178
178
179 echo '# make sure --traceback works'
179 echo '# make sure --traceback works'
180 echo '[hooks]' > .hg/hgrc
180 echo '[hooks]' > .hg/hgrc
181 echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
181 echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc
182
182
183 echo a >> a
183 echo a >> a
184 hg --traceback commit -A -m a 2>&1 | grep '^Traceback'
184 hg --traceback commit -A -m a 2>&1 | grep '^Traceback'
185
185
186 cd ..
187 hg init c
188 cd c
189
190 cat > hookext.py <<EOF
191 def autohook(**args):
192 print "Automatically installed hook"
193
194 def reposetup(ui, repo):
195 repo.ui.setconfig("hooks", "commit.auto", autohook)
196 EOF
197 echo '[extensions]' >> .hg/hgrc
198 echo 'hookext = hookext.py' >> .hg/hgrc
199
200 touch foo
201 hg add foo
202 hg ci -m 'add foo'
203 echo >> foo
204 hg ci --debug -m 'change foo' | sed -e 's/ at .*>/>/'
205
186 exit 0
206 exit 0
@@ -1,140 +1,144 b''
1 precommit hook: p1=0000000000000000000000000000000000000000 p2=
1 precommit hook: p1=0000000000000000000000000000000000000000 p2=
2 pretxncommit hook: n=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p1=0000000000000000000000000000000000000000 p2=
2 pretxncommit hook: n=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p1=0000000000000000000000000000000000000000 p2=
3 0:29b62aeb769f
3 0:29b62aeb769f
4 commit hook: n=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p1=0000000000000000000000000000000000000000 p2=
4 commit hook: n=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p1=0000000000000000000000000000000000000000 p2=
5 commit hook b
5 commit hook b
6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
7 precommit hook: p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
7 precommit hook: p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
8 pretxncommit hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
8 pretxncommit hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
9 1:b702efe96888
9 1:b702efe96888
10 commit hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
10 commit hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
11 commit hook b
11 commit hook b
12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
12 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
13 precommit hook: p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
13 precommit hook: p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
14 pretxncommit hook: n=1324a5531bac09b329c3845d35ae6a7526874edb p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
14 pretxncommit hook: n=1324a5531bac09b329c3845d35ae6a7526874edb p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
15 2:1324a5531bac
15 2:1324a5531bac
16 commit hook: n=1324a5531bac09b329c3845d35ae6a7526874edb p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
16 commit hook: n=1324a5531bac09b329c3845d35ae6a7526874edb p1=29b62aeb769fdf78d8d9c5f28b017f76d7ef824b p2=
17 commit hook b
17 commit hook b
18 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
18 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
19 (branch merge, don't forget to commit)
19 (branch merge, don't forget to commit)
20 precommit hook: p1=1324a5531bac09b329c3845d35ae6a7526874edb p2=b702efe9688826e3a91283852b328b84dbf37bc2
20 precommit hook: p1=1324a5531bac09b329c3845d35ae6a7526874edb p2=b702efe9688826e3a91283852b328b84dbf37bc2
21 pretxncommit hook: n=4c52fb2e402287dd5dc052090682536c8406c321 p1=1324a5531bac09b329c3845d35ae6a7526874edb p2=b702efe9688826e3a91283852b328b84dbf37bc2
21 pretxncommit hook: n=4c52fb2e402287dd5dc052090682536c8406c321 p1=1324a5531bac09b329c3845d35ae6a7526874edb p2=b702efe9688826e3a91283852b328b84dbf37bc2
22 3:4c52fb2e4022
22 3:4c52fb2e4022
23 commit hook: n=4c52fb2e402287dd5dc052090682536c8406c321 p1=1324a5531bac09b329c3845d35ae6a7526874edb p2=b702efe9688826e3a91283852b328b84dbf37bc2
23 commit hook: n=4c52fb2e402287dd5dc052090682536c8406c321 p1=1324a5531bac09b329c3845d35ae6a7526874edb p2=b702efe9688826e3a91283852b328b84dbf37bc2
24 commit hook b
24 commit hook b
25 prechangegroup hook: u=file:
25 prechangegroup hook: u=file:
26 changegroup hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 u=file:
26 changegroup hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 u=file:
27 incoming hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 u=file:
27 incoming hook: n=b702efe9688826e3a91283852b328b84dbf37bc2 u=file:
28 incoming hook: n=1324a5531bac09b329c3845d35ae6a7526874edb u=file:
28 incoming hook: n=1324a5531bac09b329c3845d35ae6a7526874edb u=file:
29 incoming hook: n=4c52fb2e402287dd5dc052090682536c8406c321 u=file:
29 incoming hook: n=4c52fb2e402287dd5dc052090682536c8406c321 u=file:
30 pulling from ../a
30 pulling from ../a
31 searching for changes
31 searching for changes
32 adding changesets
32 adding changesets
33 adding manifests
33 adding manifests
34 adding file changes
34 adding file changes
35 added 3 changesets with 2 changes to 2 files
35 added 3 changesets with 2 changes to 2 files
36 (run 'hg update' to get a working copy)
36 (run 'hg update' to get a working copy)
37 pretag hook: t=a n=4c52fb2e402287dd5dc052090682536c8406c321 l=0
37 pretag hook: t=a n=4c52fb2e402287dd5dc052090682536c8406c321 l=0
38 precommit hook: p1=4c52fb2e402287dd5dc052090682536c8406c321 p2=
38 precommit hook: p1=4c52fb2e402287dd5dc052090682536c8406c321 p2=
39 pretxncommit hook: n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p1=4c52fb2e402287dd5dc052090682536c8406c321 p2=
39 pretxncommit hook: n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p1=4c52fb2e402287dd5dc052090682536c8406c321 p2=
40 4:8ea2ef7ad3e8
40 4:8ea2ef7ad3e8
41 commit hook: n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p1=4c52fb2e402287dd5dc052090682536c8406c321 p2=
41 commit hook: n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p1=4c52fb2e402287dd5dc052090682536c8406c321 p2=
42 commit hook b
42 commit hook b
43 tag hook: t=a n=4c52fb2e402287dd5dc052090682536c8406c321 l=0
43 tag hook: t=a n=4c52fb2e402287dd5dc052090682536c8406c321 l=0
44 pretag hook: t=la n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 l=1
44 pretag hook: t=la n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 l=1
45 tag hook: t=la n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 l=1
45 tag hook: t=la n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 l=1
46 pretag hook: t=fa n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 l=0
46 pretag hook: t=fa n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 l=0
47 pretag.forbid hook
47 pretag.forbid hook
48 abort: pretag.forbid hook exited with status 1
48 abort: pretag.forbid hook exited with status 1
49 pretag hook: t=fla n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 l=1
49 pretag hook: t=fla n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 l=1
50 pretag.forbid hook
50 pretag.forbid hook
51 abort: pretag.forbid hook exited with status 1
51 abort: pretag.forbid hook exited with status 1
52 4:8ea2ef7ad3e8
52 4:8ea2ef7ad3e8
53 precommit hook: p1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p2=
53 precommit hook: p1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p2=
54 pretxncommit hook: n=fad284daf8c032148abaffcd745dafeceefceb61 p1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p2=
54 pretxncommit hook: n=fad284daf8c032148abaffcd745dafeceefceb61 p1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p2=
55 5:fad284daf8c0
55 5:fad284daf8c0
56 pretxncommit.forbid hook: tip=5:fad284daf8c0
56 pretxncommit.forbid hook: tip=5:fad284daf8c0
57 abort: pretxncommit.forbid hook exited with status 1
57 abort: pretxncommit.forbid hook exited with status 1
58 transaction abort!
58 transaction abort!
59 rollback completed
59 rollback completed
60 4:8ea2ef7ad3e8
60 4:8ea2ef7ad3e8
61 precommit hook: p1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p2=
61 precommit hook: p1=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 p2=
62 precommit.forbid hook
62 precommit.forbid hook
63 abort: precommit.forbid hook exited with status 1
63 abort: precommit.forbid hook exited with status 1
64 4:8ea2ef7ad3e8
64 4:8ea2ef7ad3e8
65 preupdate hook: p1=b702efe96888 p2=
65 preupdate hook: p1=b702efe96888 p2=
66 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
66 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
67 preupdate hook: p1=8ea2ef7ad3e8 p2=
67 preupdate hook: p1=8ea2ef7ad3e8 p2=
68 update hook: p1=8ea2ef7ad3e8 p2= err=0
68 update hook: p1=8ea2ef7ad3e8 p2= err=0
69 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
69 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
70 3:4c52fb2e4022
70 3:4c52fb2e4022
71 prechangegroup.forbid hook
71 prechangegroup.forbid hook
72 pulling from ../a
72 pulling from ../a
73 searching for changes
73 searching for changes
74 abort: prechangegroup.forbid hook exited with status 1
74 abort: prechangegroup.forbid hook exited with status 1
75 pretxnchangegroup.forbid hook: tip=4:8ea2ef7ad3e8
75 pretxnchangegroup.forbid hook: tip=4:8ea2ef7ad3e8
76 pulling from ../a
76 pulling from ../a
77 searching for changes
77 searching for changes
78 adding changesets
78 adding changesets
79 adding manifests
79 adding manifests
80 adding file changes
80 adding file changes
81 added 1 changesets with 1 changes to 1 files
81 added 1 changesets with 1 changes to 1 files
82 abort: pretxnchangegroup.forbid hook exited with status 1
82 abort: pretxnchangegroup.forbid hook exited with status 1
83 transaction abort!
83 transaction abort!
84 rollback completed
84 rollback completed
85 3:4c52fb2e4022
85 3:4c52fb2e4022
86 preoutgoing hook: s=pull
86 preoutgoing hook: s=pull
87 outgoing hook: n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 s=pull
87 outgoing hook: n=8ea2ef7ad3e8cac946c72f1e0c79d6aebc301198 s=pull
88 pulling from ../a
88 pulling from ../a
89 searching for changes
89 searching for changes
90 adding changesets
90 adding changesets
91 adding manifests
91 adding manifests
92 adding file changes
92 adding file changes
93 added 1 changesets with 1 changes to 1 files
93 added 1 changesets with 1 changes to 1 files
94 (run 'hg update' to get a working copy)
94 (run 'hg update' to get a working copy)
95 rolling back last transaction
95 rolling back last transaction
96 preoutgoing hook: s=pull
96 preoutgoing hook: s=pull
97 preoutgoing.forbid hook
97 preoutgoing.forbid hook
98 pulling from ../a
98 pulling from ../a
99 searching for changes
99 searching for changes
100 abort: preoutgoing.forbid hook exited with status 1
100 abort: preoutgoing.forbid hook exited with status 1
101 # test python hooks
101 # test python hooks
102 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
102 error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict'
103 error: preoutgoing.raise hook raised an exception: exception from hook
103 error: preoutgoing.raise hook raised an exception: exception from hook
104 pulling from ../a
104 pulling from ../a
105 searching for changes
105 searching for changes
106 error: preoutgoing.abort hook failed: raise abort from hook
106 error: preoutgoing.abort hook failed: raise abort from hook
107 abort: raise abort from hook
107 abort: raise abort from hook
108 pulling from ../a
108 pulling from ../a
109 searching for changes
109 searching for changes
110 hook args:
110 hook args:
111 hooktype preoutgoing
111 hooktype preoutgoing
112 source pull
112 source pull
113 abort: preoutgoing.fail hook failed
113 abort: preoutgoing.fail hook failed
114 pulling from ../a
114 pulling from ../a
115 searching for changes
115 searching for changes
116 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
116 abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable)
117 pulling from ../a
117 pulling from ../a
118 searching for changes
118 searching for changes
119 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
119 abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined)
120 pulling from ../a
120 pulling from ../a
121 searching for changes
121 searching for changes
122 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
122 abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module)
123 pulling from ../a
123 pulling from ../a
124 searching for changes
124 searching for changes
125 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
125 abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed)
126 pulling from ../a
126 pulling from ../a
127 searching for changes
127 searching for changes
128 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
128 abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed)
129 pulling from ../a
129 pulling from ../a
130 searching for changes
130 searching for changes
131 hook args:
131 hook args:
132 hooktype preoutgoing
132 hooktype preoutgoing
133 source pull
133 source pull
134 adding changesets
134 adding changesets
135 adding manifests
135 adding manifests
136 adding file changes
136 adding file changes
137 added 1 changesets with 1 changes to 1 files
137 added 1 changesets with 1 changes to 1 files
138 (run 'hg update' to get a working copy)
138 (run 'hg update' to get a working copy)
139 # make sure --traceback works
139 # make sure --traceback works
140 Traceback (most recent call last):
140 Traceback (most recent call last):
141 Automatically installed hook
142 foo
143 calling hook commit.auto: <function autohook>
144 Automatically installed hook
General Comments 0
You need to be logged in to leave comments. Login now