##// END OF EJS Templates
Show repo's revlog format on verify only if it doesn't match the default format....
Thomas Arendsen Hein -
r2152:57729c56 default
parent child Browse files
Show More
@@ -1,1998 +1,2000 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import os, util
8 import os, util
9 import filelog, manifest, changelog, dirstate, repo
9 import filelog, manifest, changelog, dirstate, repo
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "appendfile changegroup")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui revlog")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui revlog")
15
15
16 class localrepository(object):
16 class localrepository(object):
17 def __del__(self):
17 def __del__(self):
18 self.transhandle = None
18 self.transhandle = None
19 def __init__(self, parentui, path=None, create=0):
19 def __init__(self, parentui, path=None, create=0):
20 if not path:
20 if not path:
21 p = os.getcwd()
21 p = os.getcwd()
22 while not os.path.isdir(os.path.join(p, ".hg")):
22 while not os.path.isdir(os.path.join(p, ".hg")):
23 oldp = p
23 oldp = p
24 p = os.path.dirname(p)
24 p = os.path.dirname(p)
25 if p == oldp:
25 if p == oldp:
26 raise repo.RepoError(_("no repo found"))
26 raise repo.RepoError(_("no repo found"))
27 path = p
27 path = p
28 self.path = os.path.join(path, ".hg")
28 self.path = os.path.join(path, ".hg")
29
29
30 if not create and not os.path.isdir(self.path):
30 if not create and not os.path.isdir(self.path):
31 raise repo.RepoError(_("repository %s not found") % path)
31 raise repo.RepoError(_("repository %s not found") % path)
32
32
33 self.root = os.path.abspath(path)
33 self.root = os.path.abspath(path)
34 self.origroot = path
34 self.origroot = path
35 self.ui = ui.ui(parentui=parentui)
35 self.ui = ui.ui(parentui=parentui)
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38
38
39 try:
39 try:
40 self.ui.readconfig(self.join("hgrc"), self.root)
40 self.ui.readconfig(self.join("hgrc"), self.root)
41 except IOError:
41 except IOError:
42 pass
42 pass
43
43
44 v = self.ui.revlogopts
44 v = self.ui.revlogopts
45 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
45 self.revlogversion = int(v.get('format', revlog.REVLOGV0))
46 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
46 flags = 0
47 flags = 0
47 for x in v.get('flags', "").split():
48 for x in v.get('flags', "").split():
48 flags |= revlog.flagstr(x)
49 flags |= revlog.flagstr(x)
49
50
50 v = self.revlogversion | flags
51 v = self.revlogversion | flags
51 self.manifest = manifest.manifest(self.opener, v)
52 self.manifest = manifest.manifest(self.opener, v)
52 self.changelog = changelog.changelog(self.opener, v)
53 self.changelog = changelog.changelog(self.opener, v)
53
54
54 # the changelog might not have the inline index flag
55 # the changelog might not have the inline index flag
55 # on. If the format of the changelog is the same as found in
56 # on. If the format of the changelog is the same as found in
56 # .hgrc, apply any flags found in the .hgrc as well.
57 # .hgrc, apply any flags found in the .hgrc as well.
57 # Otherwise, just version from the changelog
58 # Otherwise, just version from the changelog
58 v = self.changelog.version
59 v = self.changelog.version
59 if v == self.revlogversion:
60 if v == self.revlogversion:
60 v |= flags
61 v |= flags
61 self.revlogversion = v
62 self.revlogversion = v
62
63
63 self.tagscache = None
64 self.tagscache = None
64 self.nodetagscache = None
65 self.nodetagscache = None
65 self.encodepats = None
66 self.encodepats = None
66 self.decodepats = None
67 self.decodepats = None
67 self.transhandle = None
68 self.transhandle = None
68
69
69 if create:
70 if create:
70 os.mkdir(self.path)
71 os.mkdir(self.path)
71 os.mkdir(self.join("data"))
72 os.mkdir(self.join("data"))
72
73
73 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
74 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
74 def hook(self, name, throw=False, **args):
75 def hook(self, name, throw=False, **args):
75 def runhook(name, cmd):
76 def runhook(name, cmd):
76 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
77 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
77 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
78 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()] +
78 [(k.upper(), v) for k, v in args.iteritems()])
79 [(k.upper(), v) for k, v in args.iteritems()])
79 r = util.system(cmd, environ=env, cwd=self.root)
80 r = util.system(cmd, environ=env, cwd=self.root)
80 if r:
81 if r:
81 desc, r = util.explain_exit(r)
82 desc, r = util.explain_exit(r)
82 if throw:
83 if throw:
83 raise util.Abort(_('%s hook %s') % (name, desc))
84 raise util.Abort(_('%s hook %s') % (name, desc))
84 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
85 self.ui.warn(_('error: %s hook %s\n') % (name, desc))
85 return False
86 return False
86 return True
87 return True
87
88
88 r = True
89 r = True
89 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
90 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
90 if hname.split(".", 1)[0] == name and cmd]
91 if hname.split(".", 1)[0] == name and cmd]
91 hooks.sort()
92 hooks.sort()
92 for hname, cmd in hooks:
93 for hname, cmd in hooks:
93 r = runhook(hname, cmd) and r
94 r = runhook(hname, cmd) and r
94 return r
95 return r
95
96
96 def tags(self):
97 def tags(self):
97 '''return a mapping of tag to node'''
98 '''return a mapping of tag to node'''
98 if not self.tagscache:
99 if not self.tagscache:
99 self.tagscache = {}
100 self.tagscache = {}
100
101
101 def parsetag(line, context):
102 def parsetag(line, context):
102 if not line:
103 if not line:
103 return
104 return
104 s = l.split(" ", 1)
105 s = l.split(" ", 1)
105 if len(s) != 2:
106 if len(s) != 2:
106 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
107 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
107 return
108 return
108 node, key = s
109 node, key = s
109 try:
110 try:
110 bin_n = bin(node)
111 bin_n = bin(node)
111 except TypeError:
112 except TypeError:
112 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
113 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
113 return
114 return
114 if bin_n not in self.changelog.nodemap:
115 if bin_n not in self.changelog.nodemap:
115 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
116 self.ui.warn(_("%s: ignoring invalid tag\n") % context)
116 return
117 return
117 self.tagscache[key.strip()] = bin_n
118 self.tagscache[key.strip()] = bin_n
118
119
119 # read each head of the tags file, ending with the tip
120 # read each head of the tags file, ending with the tip
120 # and add each tag found to the map, with "newer" ones
121 # and add each tag found to the map, with "newer" ones
121 # taking precedence
122 # taking precedence
122 fl = self.file(".hgtags")
123 fl = self.file(".hgtags")
123 h = fl.heads()
124 h = fl.heads()
124 h.reverse()
125 h.reverse()
125 for r in h:
126 for r in h:
126 count = 0
127 count = 0
127 for l in fl.read(r).splitlines():
128 for l in fl.read(r).splitlines():
128 count += 1
129 count += 1
129 parsetag(l, ".hgtags:%d" % count)
130 parsetag(l, ".hgtags:%d" % count)
130
131
131 try:
132 try:
132 f = self.opener("localtags")
133 f = self.opener("localtags")
133 count = 0
134 count = 0
134 for l in f:
135 for l in f:
135 count += 1
136 count += 1
136 parsetag(l, "localtags:%d" % count)
137 parsetag(l, "localtags:%d" % count)
137 except IOError:
138 except IOError:
138 pass
139 pass
139
140
140 self.tagscache['tip'] = self.changelog.tip()
141 self.tagscache['tip'] = self.changelog.tip()
141
142
142 return self.tagscache
143 return self.tagscache
143
144
144 def tagslist(self):
145 def tagslist(self):
145 '''return a list of tags ordered by revision'''
146 '''return a list of tags ordered by revision'''
146 l = []
147 l = []
147 for t, n in self.tags().items():
148 for t, n in self.tags().items():
148 try:
149 try:
149 r = self.changelog.rev(n)
150 r = self.changelog.rev(n)
150 except:
151 except:
151 r = -2 # sort to the beginning of the list if unknown
152 r = -2 # sort to the beginning of the list if unknown
152 l.append((r, t, n))
153 l.append((r, t, n))
153 l.sort()
154 l.sort()
154 return [(t, n) for r, t, n in l]
155 return [(t, n) for r, t, n in l]
155
156
156 def nodetags(self, node):
157 def nodetags(self, node):
157 '''return the tags associated with a node'''
158 '''return the tags associated with a node'''
158 if not self.nodetagscache:
159 if not self.nodetagscache:
159 self.nodetagscache = {}
160 self.nodetagscache = {}
160 for t, n in self.tags().items():
161 for t, n in self.tags().items():
161 self.nodetagscache.setdefault(n, []).append(t)
162 self.nodetagscache.setdefault(n, []).append(t)
162 return self.nodetagscache.get(node, [])
163 return self.nodetagscache.get(node, [])
163
164
164 def lookup(self, key):
165 def lookup(self, key):
165 try:
166 try:
166 return self.tags()[key]
167 return self.tags()[key]
167 except KeyError:
168 except KeyError:
168 try:
169 try:
169 return self.changelog.lookup(key)
170 return self.changelog.lookup(key)
170 except:
171 except:
171 raise repo.RepoError(_("unknown revision '%s'") % key)
172 raise repo.RepoError(_("unknown revision '%s'") % key)
172
173
173 def dev(self):
174 def dev(self):
174 return os.stat(self.path).st_dev
175 return os.stat(self.path).st_dev
175
176
176 def local(self):
177 def local(self):
177 return True
178 return True
178
179
179 def join(self, f):
180 def join(self, f):
180 return os.path.join(self.path, f)
181 return os.path.join(self.path, f)
181
182
182 def wjoin(self, f):
183 def wjoin(self, f):
183 return os.path.join(self.root, f)
184 return os.path.join(self.root, f)
184
185
185 def file(self, f):
186 def file(self, f):
186 if f[0] == '/':
187 if f[0] == '/':
187 f = f[1:]
188 f = f[1:]
188 return filelog.filelog(self.opener, f, self.revlogversion)
189 return filelog.filelog(self.opener, f, self.revlogversion)
189
190
190 def getcwd(self):
191 def getcwd(self):
191 return self.dirstate.getcwd()
192 return self.dirstate.getcwd()
192
193
193 def wfile(self, f, mode='r'):
194 def wfile(self, f, mode='r'):
194 return self.wopener(f, mode)
195 return self.wopener(f, mode)
195
196
196 def wread(self, filename):
197 def wread(self, filename):
197 if self.encodepats == None:
198 if self.encodepats == None:
198 l = []
199 l = []
199 for pat, cmd in self.ui.configitems("encode"):
200 for pat, cmd in self.ui.configitems("encode"):
200 mf = util.matcher(self.root, "", [pat], [], [])[1]
201 mf = util.matcher(self.root, "", [pat], [], [])[1]
201 l.append((mf, cmd))
202 l.append((mf, cmd))
202 self.encodepats = l
203 self.encodepats = l
203
204
204 data = self.wopener(filename, 'r').read()
205 data = self.wopener(filename, 'r').read()
205
206
206 for mf, cmd in self.encodepats:
207 for mf, cmd in self.encodepats:
207 if mf(filename):
208 if mf(filename):
208 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
209 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
209 data = util.filter(data, cmd)
210 data = util.filter(data, cmd)
210 break
211 break
211
212
212 return data
213 return data
213
214
214 def wwrite(self, filename, data, fd=None):
215 def wwrite(self, filename, data, fd=None):
215 if self.decodepats == None:
216 if self.decodepats == None:
216 l = []
217 l = []
217 for pat, cmd in self.ui.configitems("decode"):
218 for pat, cmd in self.ui.configitems("decode"):
218 mf = util.matcher(self.root, "", [pat], [], [])[1]
219 mf = util.matcher(self.root, "", [pat], [], [])[1]
219 l.append((mf, cmd))
220 l.append((mf, cmd))
220 self.decodepats = l
221 self.decodepats = l
221
222
222 for mf, cmd in self.decodepats:
223 for mf, cmd in self.decodepats:
223 if mf(filename):
224 if mf(filename):
224 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
225 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
225 data = util.filter(data, cmd)
226 data = util.filter(data, cmd)
226 break
227 break
227
228
228 if fd:
229 if fd:
229 return fd.write(data)
230 return fd.write(data)
230 return self.wopener(filename, 'w').write(data)
231 return self.wopener(filename, 'w').write(data)
231
232
232 def transaction(self):
233 def transaction(self):
233 tr = self.transhandle
234 tr = self.transhandle
234 if tr != None and tr.running():
235 if tr != None and tr.running():
235 return tr.nest()
236 return tr.nest()
236
237
237 # save dirstate for undo
238 # save dirstate for undo
238 try:
239 try:
239 ds = self.opener("dirstate").read()
240 ds = self.opener("dirstate").read()
240 except IOError:
241 except IOError:
241 ds = ""
242 ds = ""
242 self.opener("journal.dirstate", "w").write(ds)
243 self.opener("journal.dirstate", "w").write(ds)
243
244
244 tr = transaction.transaction(self.ui.warn, self.opener,
245 tr = transaction.transaction(self.ui.warn, self.opener,
245 self.join("journal"),
246 self.join("journal"),
246 aftertrans(self.path))
247 aftertrans(self.path))
247 self.transhandle = tr
248 self.transhandle = tr
248 return tr
249 return tr
249
250
250 def recover(self):
251 def recover(self):
251 l = self.lock()
252 l = self.lock()
252 if os.path.exists(self.join("journal")):
253 if os.path.exists(self.join("journal")):
253 self.ui.status(_("rolling back interrupted transaction\n"))
254 self.ui.status(_("rolling back interrupted transaction\n"))
254 transaction.rollback(self.opener, self.join("journal"))
255 transaction.rollback(self.opener, self.join("journal"))
255 self.reload()
256 self.reload()
256 return True
257 return True
257 else:
258 else:
258 self.ui.warn(_("no interrupted transaction available\n"))
259 self.ui.warn(_("no interrupted transaction available\n"))
259 return False
260 return False
260
261
261 def undo(self, wlock=None):
262 def undo(self, wlock=None):
262 if not wlock:
263 if not wlock:
263 wlock = self.wlock()
264 wlock = self.wlock()
264 l = self.lock()
265 l = self.lock()
265 if os.path.exists(self.join("undo")):
266 if os.path.exists(self.join("undo")):
266 self.ui.status(_("rolling back last transaction\n"))
267 self.ui.status(_("rolling back last transaction\n"))
267 transaction.rollback(self.opener, self.join("undo"))
268 transaction.rollback(self.opener, self.join("undo"))
268 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
269 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
269 self.reload()
270 self.reload()
270 self.wreload()
271 self.wreload()
271 else:
272 else:
272 self.ui.warn(_("no undo information available\n"))
273 self.ui.warn(_("no undo information available\n"))
273
274
274 def wreload(self):
275 def wreload(self):
275 self.dirstate.read()
276 self.dirstate.read()
276
277
277 def reload(self):
278 def reload(self):
278 self.changelog.load()
279 self.changelog.load()
279 self.manifest.load()
280 self.manifest.load()
280 self.tagscache = None
281 self.tagscache = None
281 self.nodetagscache = None
282 self.nodetagscache = None
282
283
283 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
284 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
284 desc=None):
285 desc=None):
285 try:
286 try:
286 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
287 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
287 except lock.LockHeld, inst:
288 except lock.LockHeld, inst:
288 if not wait:
289 if not wait:
289 raise
290 raise
290 self.ui.warn(_("waiting for lock on %s held by %s\n") %
291 self.ui.warn(_("waiting for lock on %s held by %s\n") %
291 (desc, inst.args[0]))
292 (desc, inst.args[0]))
292 # default to 600 seconds timeout
293 # default to 600 seconds timeout
293 l = lock.lock(self.join(lockname),
294 l = lock.lock(self.join(lockname),
294 int(self.ui.config("ui", "timeout") or 600),
295 int(self.ui.config("ui", "timeout") or 600),
295 releasefn, desc=desc)
296 releasefn, desc=desc)
296 if acquirefn:
297 if acquirefn:
297 acquirefn()
298 acquirefn()
298 return l
299 return l
299
300
300 def lock(self, wait=1):
301 def lock(self, wait=1):
301 return self.do_lock("lock", wait, acquirefn=self.reload,
302 return self.do_lock("lock", wait, acquirefn=self.reload,
302 desc=_('repository %s') % self.origroot)
303 desc=_('repository %s') % self.origroot)
303
304
304 def wlock(self, wait=1):
305 def wlock(self, wait=1):
305 return self.do_lock("wlock", wait, self.dirstate.write,
306 return self.do_lock("wlock", wait, self.dirstate.write,
306 self.wreload,
307 self.wreload,
307 desc=_('working directory of %s') % self.origroot)
308 desc=_('working directory of %s') % self.origroot)
308
309
309 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
310 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
310 "determine whether a new filenode is needed"
311 "determine whether a new filenode is needed"
311 fp1 = manifest1.get(filename, nullid)
312 fp1 = manifest1.get(filename, nullid)
312 fp2 = manifest2.get(filename, nullid)
313 fp2 = manifest2.get(filename, nullid)
313
314
314 if fp2 != nullid:
315 if fp2 != nullid:
315 # is one parent an ancestor of the other?
316 # is one parent an ancestor of the other?
316 fpa = filelog.ancestor(fp1, fp2)
317 fpa = filelog.ancestor(fp1, fp2)
317 if fpa == fp1:
318 if fpa == fp1:
318 fp1, fp2 = fp2, nullid
319 fp1, fp2 = fp2, nullid
319 elif fpa == fp2:
320 elif fpa == fp2:
320 fp2 = nullid
321 fp2 = nullid
321
322
322 # is the file unmodified from the parent? report existing entry
323 # is the file unmodified from the parent? report existing entry
323 if fp2 == nullid and text == filelog.read(fp1):
324 if fp2 == nullid and text == filelog.read(fp1):
324 return (fp1, None, None)
325 return (fp1, None, None)
325
326
326 return (None, fp1, fp2)
327 return (None, fp1, fp2)
327
328
328 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
329 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
329 orig_parent = self.dirstate.parents()[0] or nullid
330 orig_parent = self.dirstate.parents()[0] or nullid
330 p1 = p1 or self.dirstate.parents()[0] or nullid
331 p1 = p1 or self.dirstate.parents()[0] or nullid
331 p2 = p2 or self.dirstate.parents()[1] or nullid
332 p2 = p2 or self.dirstate.parents()[1] or nullid
332 c1 = self.changelog.read(p1)
333 c1 = self.changelog.read(p1)
333 c2 = self.changelog.read(p2)
334 c2 = self.changelog.read(p2)
334 m1 = self.manifest.read(c1[0])
335 m1 = self.manifest.read(c1[0])
335 mf1 = self.manifest.readflags(c1[0])
336 mf1 = self.manifest.readflags(c1[0])
336 m2 = self.manifest.read(c2[0])
337 m2 = self.manifest.read(c2[0])
337 changed = []
338 changed = []
338
339
339 if orig_parent == p1:
340 if orig_parent == p1:
340 update_dirstate = 1
341 update_dirstate = 1
341 else:
342 else:
342 update_dirstate = 0
343 update_dirstate = 0
343
344
344 if not wlock:
345 if not wlock:
345 wlock = self.wlock()
346 wlock = self.wlock()
346 l = self.lock()
347 l = self.lock()
347 tr = self.transaction()
348 tr = self.transaction()
348 mm = m1.copy()
349 mm = m1.copy()
349 mfm = mf1.copy()
350 mfm = mf1.copy()
350 linkrev = self.changelog.count()
351 linkrev = self.changelog.count()
351 for f in files:
352 for f in files:
352 try:
353 try:
353 t = self.wread(f)
354 t = self.wread(f)
354 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
355 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
355 r = self.file(f)
356 r = self.file(f)
356 mfm[f] = tm
357 mfm[f] = tm
357
358
358 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
359 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
359 if entry:
360 if entry:
360 mm[f] = entry
361 mm[f] = entry
361 continue
362 continue
362
363
363 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
364 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
364 changed.append(f)
365 changed.append(f)
365 if update_dirstate:
366 if update_dirstate:
366 self.dirstate.update([f], "n")
367 self.dirstate.update([f], "n")
367 except IOError:
368 except IOError:
368 try:
369 try:
369 del mm[f]
370 del mm[f]
370 del mfm[f]
371 del mfm[f]
371 if update_dirstate:
372 if update_dirstate:
372 self.dirstate.forget([f])
373 self.dirstate.forget([f])
373 except:
374 except:
374 # deleted from p2?
375 # deleted from p2?
375 pass
376 pass
376
377
377 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
378 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
378 user = user or self.ui.username()
379 user = user or self.ui.username()
379 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
380 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
380 tr.close()
381 tr.close()
381 if update_dirstate:
382 if update_dirstate:
382 self.dirstate.setparents(n, nullid)
383 self.dirstate.setparents(n, nullid)
383
384
384 def commit(self, files=None, text="", user=None, date=None,
385 def commit(self, files=None, text="", user=None, date=None,
385 match=util.always, force=False, lock=None, wlock=None):
386 match=util.always, force=False, lock=None, wlock=None):
386 commit = []
387 commit = []
387 remove = []
388 remove = []
388 changed = []
389 changed = []
389
390
390 if files:
391 if files:
391 for f in files:
392 for f in files:
392 s = self.dirstate.state(f)
393 s = self.dirstate.state(f)
393 if s in 'nmai':
394 if s in 'nmai':
394 commit.append(f)
395 commit.append(f)
395 elif s == 'r':
396 elif s == 'r':
396 remove.append(f)
397 remove.append(f)
397 else:
398 else:
398 self.ui.warn(_("%s not tracked!\n") % f)
399 self.ui.warn(_("%s not tracked!\n") % f)
399 else:
400 else:
400 modified, added, removed, deleted, unknown = self.changes(match=match)
401 modified, added, removed, deleted, unknown = self.changes(match=match)
401 commit = modified + added
402 commit = modified + added
402 remove = removed
403 remove = removed
403
404
404 p1, p2 = self.dirstate.parents()
405 p1, p2 = self.dirstate.parents()
405 c1 = self.changelog.read(p1)
406 c1 = self.changelog.read(p1)
406 c2 = self.changelog.read(p2)
407 c2 = self.changelog.read(p2)
407 m1 = self.manifest.read(c1[0])
408 m1 = self.manifest.read(c1[0])
408 mf1 = self.manifest.readflags(c1[0])
409 mf1 = self.manifest.readflags(c1[0])
409 m2 = self.manifest.read(c2[0])
410 m2 = self.manifest.read(c2[0])
410
411
411 if not commit and not remove and not force and p2 == nullid:
412 if not commit and not remove and not force and p2 == nullid:
412 self.ui.status(_("nothing changed\n"))
413 self.ui.status(_("nothing changed\n"))
413 return None
414 return None
414
415
415 xp1 = hex(p1)
416 xp1 = hex(p1)
416 if p2 == nullid: xp2 = ''
417 if p2 == nullid: xp2 = ''
417 else: xp2 = hex(p2)
418 else: xp2 = hex(p2)
418
419
419 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
420 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
420
421
421 if not wlock:
422 if not wlock:
422 wlock = self.wlock()
423 wlock = self.wlock()
423 if not lock:
424 if not lock:
424 lock = self.lock()
425 lock = self.lock()
425 tr = self.transaction()
426 tr = self.transaction()
426
427
427 # check in files
428 # check in files
428 new = {}
429 new = {}
429 linkrev = self.changelog.count()
430 linkrev = self.changelog.count()
430 commit.sort()
431 commit.sort()
431 for f in commit:
432 for f in commit:
432 self.ui.note(f + "\n")
433 self.ui.note(f + "\n")
433 try:
434 try:
434 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
435 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
435 t = self.wread(f)
436 t = self.wread(f)
436 except IOError:
437 except IOError:
437 self.ui.warn(_("trouble committing %s!\n") % f)
438 self.ui.warn(_("trouble committing %s!\n") % f)
438 raise
439 raise
439
440
440 r = self.file(f)
441 r = self.file(f)
441
442
442 meta = {}
443 meta = {}
443 cp = self.dirstate.copied(f)
444 cp = self.dirstate.copied(f)
444 if cp:
445 if cp:
445 meta["copy"] = cp
446 meta["copy"] = cp
446 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
447 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
447 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
448 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
448 fp1, fp2 = nullid, nullid
449 fp1, fp2 = nullid, nullid
449 else:
450 else:
450 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
451 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
451 if entry:
452 if entry:
452 new[f] = entry
453 new[f] = entry
453 continue
454 continue
454
455
455 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
456 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
456 # remember what we've added so that we can later calculate
457 # remember what we've added so that we can later calculate
457 # the files to pull from a set of changesets
458 # the files to pull from a set of changesets
458 changed.append(f)
459 changed.append(f)
459
460
460 # update manifest
461 # update manifest
461 m1 = m1.copy()
462 m1 = m1.copy()
462 m1.update(new)
463 m1.update(new)
463 for f in remove:
464 for f in remove:
464 if f in m1:
465 if f in m1:
465 del m1[f]
466 del m1[f]
466 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
467 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
467 (new, remove))
468 (new, remove))
468
469
469 # add changeset
470 # add changeset
470 new = new.keys()
471 new = new.keys()
471 new.sort()
472 new.sort()
472
473
473 user = user or self.ui.username()
474 user = user or self.ui.username()
474 if not text:
475 if not text:
475 edittext = [""]
476 edittext = [""]
476 if p2 != nullid:
477 if p2 != nullid:
477 edittext.append("HG: branch merge")
478 edittext.append("HG: branch merge")
478 edittext.extend(["HG: changed %s" % f for f in changed])
479 edittext.extend(["HG: changed %s" % f for f in changed])
479 edittext.extend(["HG: removed %s" % f for f in remove])
480 edittext.extend(["HG: removed %s" % f for f in remove])
480 if not changed and not remove:
481 if not changed and not remove:
481 edittext.append("HG: no files changed")
482 edittext.append("HG: no files changed")
482 edittext.append("")
483 edittext.append("")
483 # run editor in the repository root
484 # run editor in the repository root
484 olddir = os.getcwd()
485 olddir = os.getcwd()
485 os.chdir(self.root)
486 os.chdir(self.root)
486 edittext = self.ui.edit("\n".join(edittext), user)
487 edittext = self.ui.edit("\n".join(edittext), user)
487 os.chdir(olddir)
488 os.chdir(olddir)
488 if not edittext.rstrip():
489 if not edittext.rstrip():
489 return None
490 return None
490 text = edittext
491 text = edittext
491
492
492 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
493 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
493 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
494 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
494 parent2=xp2)
495 parent2=xp2)
495 tr.close()
496 tr.close()
496
497
497 self.dirstate.setparents(n)
498 self.dirstate.setparents(n)
498 self.dirstate.update(new, "n")
499 self.dirstate.update(new, "n")
499 self.dirstate.forget(remove)
500 self.dirstate.forget(remove)
500
501
501 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
502 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
502 return n
503 return n
503
504
504 def walk(self, node=None, files=[], match=util.always, badmatch=None):
505 def walk(self, node=None, files=[], match=util.always, badmatch=None):
505 if node:
506 if node:
506 fdict = dict.fromkeys(files)
507 fdict = dict.fromkeys(files)
507 for fn in self.manifest.read(self.changelog.read(node)[0]):
508 for fn in self.manifest.read(self.changelog.read(node)[0]):
508 fdict.pop(fn, None)
509 fdict.pop(fn, None)
509 if match(fn):
510 if match(fn):
510 yield 'm', fn
511 yield 'm', fn
511 for fn in fdict:
512 for fn in fdict:
512 if badmatch and badmatch(fn):
513 if badmatch and badmatch(fn):
513 if match(fn):
514 if match(fn):
514 yield 'b', fn
515 yield 'b', fn
515 else:
516 else:
516 self.ui.warn(_('%s: No such file in rev %s\n') % (
517 self.ui.warn(_('%s: No such file in rev %s\n') % (
517 util.pathto(self.getcwd(), fn), short(node)))
518 util.pathto(self.getcwd(), fn), short(node)))
518 else:
519 else:
519 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
520 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
520 yield src, fn
521 yield src, fn
521
522
522 def changes(self, node1=None, node2=None, files=[], match=util.always,
523 def changes(self, node1=None, node2=None, files=[], match=util.always,
523 wlock=None, show_ignored=None):
524 wlock=None, show_ignored=None):
524 """return changes between two nodes or node and working directory
525 """return changes between two nodes or node and working directory
525
526
526 If node1 is None, use the first dirstate parent instead.
527 If node1 is None, use the first dirstate parent instead.
527 If node2 is None, compare node1 with working directory.
528 If node2 is None, compare node1 with working directory.
528 """
529 """
529
530
530 def fcmp(fn, mf):
531 def fcmp(fn, mf):
531 t1 = self.wread(fn)
532 t1 = self.wread(fn)
532 t2 = self.file(fn).read(mf.get(fn, nullid))
533 t2 = self.file(fn).read(mf.get(fn, nullid))
533 return cmp(t1, t2)
534 return cmp(t1, t2)
534
535
535 def mfmatches(node):
536 def mfmatches(node):
536 change = self.changelog.read(node)
537 change = self.changelog.read(node)
537 mf = dict(self.manifest.read(change[0]))
538 mf = dict(self.manifest.read(change[0]))
538 for fn in mf.keys():
539 for fn in mf.keys():
539 if not match(fn):
540 if not match(fn):
540 del mf[fn]
541 del mf[fn]
541 return mf
542 return mf
542
543
543 if node1:
544 if node1:
544 # read the manifest from node1 before the manifest from node2,
545 # read the manifest from node1 before the manifest from node2,
545 # so that we'll hit the manifest cache if we're going through
546 # so that we'll hit the manifest cache if we're going through
546 # all the revisions in parent->child order.
547 # all the revisions in parent->child order.
547 mf1 = mfmatches(node1)
548 mf1 = mfmatches(node1)
548
549
549 # are we comparing the working directory?
550 # are we comparing the working directory?
550 if not node2:
551 if not node2:
551 if not wlock:
552 if not wlock:
552 try:
553 try:
553 wlock = self.wlock(wait=0)
554 wlock = self.wlock(wait=0)
554 except lock.LockException:
555 except lock.LockException:
555 wlock = None
556 wlock = None
556 lookup, modified, added, removed, deleted, unknown, ignored = (
557 lookup, modified, added, removed, deleted, unknown, ignored = (
557 self.dirstate.changes(files, match, show_ignored))
558 self.dirstate.changes(files, match, show_ignored))
558
559
559 # are we comparing working dir against its parent?
560 # are we comparing working dir against its parent?
560 if not node1:
561 if not node1:
561 if lookup:
562 if lookup:
562 # do a full compare of any files that might have changed
563 # do a full compare of any files that might have changed
563 mf2 = mfmatches(self.dirstate.parents()[0])
564 mf2 = mfmatches(self.dirstate.parents()[0])
564 for f in lookup:
565 for f in lookup:
565 if fcmp(f, mf2):
566 if fcmp(f, mf2):
566 modified.append(f)
567 modified.append(f)
567 elif wlock is not None:
568 elif wlock is not None:
568 self.dirstate.update([f], "n")
569 self.dirstate.update([f], "n")
569 else:
570 else:
570 # we are comparing working dir against non-parent
571 # we are comparing working dir against non-parent
571 # generate a pseudo-manifest for the working dir
572 # generate a pseudo-manifest for the working dir
572 mf2 = mfmatches(self.dirstate.parents()[0])
573 mf2 = mfmatches(self.dirstate.parents()[0])
573 for f in lookup + modified + added:
574 for f in lookup + modified + added:
574 mf2[f] = ""
575 mf2[f] = ""
575 for f in removed:
576 for f in removed:
576 if f in mf2:
577 if f in mf2:
577 del mf2[f]
578 del mf2[f]
578 else:
579 else:
579 # we are comparing two revisions
580 # we are comparing two revisions
580 deleted, unknown, ignored = [], [], []
581 deleted, unknown, ignored = [], [], []
581 mf2 = mfmatches(node2)
582 mf2 = mfmatches(node2)
582
583
583 if node1:
584 if node1:
584 # flush lists from dirstate before comparing manifests
585 # flush lists from dirstate before comparing manifests
585 modified, added = [], []
586 modified, added = [], []
586
587
587 for fn in mf2:
588 for fn in mf2:
588 if mf1.has_key(fn):
589 if mf1.has_key(fn):
589 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
590 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
590 modified.append(fn)
591 modified.append(fn)
591 del mf1[fn]
592 del mf1[fn]
592 else:
593 else:
593 added.append(fn)
594 added.append(fn)
594
595
595 removed = mf1.keys()
596 removed = mf1.keys()
596
597
597 # sort and return results:
598 # sort and return results:
598 for l in modified, added, removed, deleted, unknown, ignored:
599 for l in modified, added, removed, deleted, unknown, ignored:
599 l.sort()
600 l.sort()
600 if show_ignored is None:
601 if show_ignored is None:
601 return (modified, added, removed, deleted, unknown)
602 return (modified, added, removed, deleted, unknown)
602 else:
603 else:
603 return (modified, added, removed, deleted, unknown, ignored)
604 return (modified, added, removed, deleted, unknown, ignored)
604
605
605 def add(self, list, wlock=None):
606 def add(self, list, wlock=None):
606 if not wlock:
607 if not wlock:
607 wlock = self.wlock()
608 wlock = self.wlock()
608 for f in list:
609 for f in list:
609 p = self.wjoin(f)
610 p = self.wjoin(f)
610 if not os.path.exists(p):
611 if not os.path.exists(p):
611 self.ui.warn(_("%s does not exist!\n") % f)
612 self.ui.warn(_("%s does not exist!\n") % f)
612 elif not os.path.isfile(p):
613 elif not os.path.isfile(p):
613 self.ui.warn(_("%s not added: only files supported currently\n")
614 self.ui.warn(_("%s not added: only files supported currently\n")
614 % f)
615 % f)
615 elif self.dirstate.state(f) in 'an':
616 elif self.dirstate.state(f) in 'an':
616 self.ui.warn(_("%s already tracked!\n") % f)
617 self.ui.warn(_("%s already tracked!\n") % f)
617 else:
618 else:
618 self.dirstate.update([f], "a")
619 self.dirstate.update([f], "a")
619
620
620 def forget(self, list, wlock=None):
621 def forget(self, list, wlock=None):
621 if not wlock:
622 if not wlock:
622 wlock = self.wlock()
623 wlock = self.wlock()
623 for f in list:
624 for f in list:
624 if self.dirstate.state(f) not in 'ai':
625 if self.dirstate.state(f) not in 'ai':
625 self.ui.warn(_("%s not added!\n") % f)
626 self.ui.warn(_("%s not added!\n") % f)
626 else:
627 else:
627 self.dirstate.forget([f])
628 self.dirstate.forget([f])
628
629
629 def remove(self, list, unlink=False, wlock=None):
630 def remove(self, list, unlink=False, wlock=None):
630 if unlink:
631 if unlink:
631 for f in list:
632 for f in list:
632 try:
633 try:
633 util.unlink(self.wjoin(f))
634 util.unlink(self.wjoin(f))
634 except OSError, inst:
635 except OSError, inst:
635 if inst.errno != errno.ENOENT:
636 if inst.errno != errno.ENOENT:
636 raise
637 raise
637 if not wlock:
638 if not wlock:
638 wlock = self.wlock()
639 wlock = self.wlock()
639 for f in list:
640 for f in list:
640 p = self.wjoin(f)
641 p = self.wjoin(f)
641 if os.path.exists(p):
642 if os.path.exists(p):
642 self.ui.warn(_("%s still exists!\n") % f)
643 self.ui.warn(_("%s still exists!\n") % f)
643 elif self.dirstate.state(f) == 'a':
644 elif self.dirstate.state(f) == 'a':
644 self.dirstate.forget([f])
645 self.dirstate.forget([f])
645 elif f not in self.dirstate:
646 elif f not in self.dirstate:
646 self.ui.warn(_("%s not tracked!\n") % f)
647 self.ui.warn(_("%s not tracked!\n") % f)
647 else:
648 else:
648 self.dirstate.update([f], "r")
649 self.dirstate.update([f], "r")
649
650
650 def undelete(self, list, wlock=None):
651 def undelete(self, list, wlock=None):
651 p = self.dirstate.parents()[0]
652 p = self.dirstate.parents()[0]
652 mn = self.changelog.read(p)[0]
653 mn = self.changelog.read(p)[0]
653 mf = self.manifest.readflags(mn)
654 mf = self.manifest.readflags(mn)
654 m = self.manifest.read(mn)
655 m = self.manifest.read(mn)
655 if not wlock:
656 if not wlock:
656 wlock = self.wlock()
657 wlock = self.wlock()
657 for f in list:
658 for f in list:
658 if self.dirstate.state(f) not in "r":
659 if self.dirstate.state(f) not in "r":
659 self.ui.warn("%s not removed!\n" % f)
660 self.ui.warn("%s not removed!\n" % f)
660 else:
661 else:
661 t = self.file(f).read(m[f])
662 t = self.file(f).read(m[f])
662 self.wwrite(f, t)
663 self.wwrite(f, t)
663 util.set_exec(self.wjoin(f), mf[f])
664 util.set_exec(self.wjoin(f), mf[f])
664 self.dirstate.update([f], "n")
665 self.dirstate.update([f], "n")
665
666
666 def copy(self, source, dest, wlock=None):
667 def copy(self, source, dest, wlock=None):
667 p = self.wjoin(dest)
668 p = self.wjoin(dest)
668 if not os.path.exists(p):
669 if not os.path.exists(p):
669 self.ui.warn(_("%s does not exist!\n") % dest)
670 self.ui.warn(_("%s does not exist!\n") % dest)
670 elif not os.path.isfile(p):
671 elif not os.path.isfile(p):
671 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
672 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
672 else:
673 else:
673 if not wlock:
674 if not wlock:
674 wlock = self.wlock()
675 wlock = self.wlock()
675 if self.dirstate.state(dest) == '?':
676 if self.dirstate.state(dest) == '?':
676 self.dirstate.update([dest], "a")
677 self.dirstate.update([dest], "a")
677 self.dirstate.copy(source, dest)
678 self.dirstate.copy(source, dest)
678
679
679 def heads(self, start=None):
680 def heads(self, start=None):
680 heads = self.changelog.heads(start)
681 heads = self.changelog.heads(start)
681 # sort the output in rev descending order
682 # sort the output in rev descending order
682 heads = [(-self.changelog.rev(h), h) for h in heads]
683 heads = [(-self.changelog.rev(h), h) for h in heads]
683 heads.sort()
684 heads.sort()
684 return [n for (r, n) in heads]
685 return [n for (r, n) in heads]
685
686
686 # branchlookup returns a dict giving a list of branches for
687 # branchlookup returns a dict giving a list of branches for
687 # each head. A branch is defined as the tag of a node or
688 # each head. A branch is defined as the tag of a node or
688 # the branch of the node's parents. If a node has multiple
689 # the branch of the node's parents. If a node has multiple
689 # branch tags, tags are eliminated if they are visible from other
690 # branch tags, tags are eliminated if they are visible from other
690 # branch tags.
691 # branch tags.
691 #
692 #
692 # So, for this graph: a->b->c->d->e
693 # So, for this graph: a->b->c->d->e
693 # \ /
694 # \ /
694 # aa -----/
695 # aa -----/
695 # a has tag 2.6.12
696 # a has tag 2.6.12
696 # d has tag 2.6.13
697 # d has tag 2.6.13
697 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
698 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
698 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
699 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
699 # from the list.
700 # from the list.
700 #
701 #
701 # It is possible that more than one head will have the same branch tag.
702 # It is possible that more than one head will have the same branch tag.
702 # callers need to check the result for multiple heads under the same
703 # callers need to check the result for multiple heads under the same
703 # branch tag if that is a problem for them (ie checkout of a specific
704 # branch tag if that is a problem for them (ie checkout of a specific
704 # branch).
705 # branch).
705 #
706 #
706 # passing in a specific branch will limit the depth of the search
707 # passing in a specific branch will limit the depth of the search
707 # through the parents. It won't limit the branches returned in the
708 # through the parents. It won't limit the branches returned in the
708 # result though.
709 # result though.
709 def branchlookup(self, heads=None, branch=None):
710 def branchlookup(self, heads=None, branch=None):
710 if not heads:
711 if not heads:
711 heads = self.heads()
712 heads = self.heads()
712 headt = [ h for h in heads ]
713 headt = [ h for h in heads ]
713 chlog = self.changelog
714 chlog = self.changelog
714 branches = {}
715 branches = {}
715 merges = []
716 merges = []
716 seenmerge = {}
717 seenmerge = {}
717
718
718 # traverse the tree once for each head, recording in the branches
719 # traverse the tree once for each head, recording in the branches
719 # dict which tags are visible from this head. The branches
720 # dict which tags are visible from this head. The branches
720 # dict also records which tags are visible from each tag
721 # dict also records which tags are visible from each tag
721 # while we traverse.
722 # while we traverse.
722 while headt or merges:
723 while headt or merges:
723 if merges:
724 if merges:
724 n, found = merges.pop()
725 n, found = merges.pop()
725 visit = [n]
726 visit = [n]
726 else:
727 else:
727 h = headt.pop()
728 h = headt.pop()
728 visit = [h]
729 visit = [h]
729 found = [h]
730 found = [h]
730 seen = {}
731 seen = {}
731 while visit:
732 while visit:
732 n = visit.pop()
733 n = visit.pop()
733 if n in seen:
734 if n in seen:
734 continue
735 continue
735 pp = chlog.parents(n)
736 pp = chlog.parents(n)
736 tags = self.nodetags(n)
737 tags = self.nodetags(n)
737 if tags:
738 if tags:
738 for x in tags:
739 for x in tags:
739 if x == 'tip':
740 if x == 'tip':
740 continue
741 continue
741 for f in found:
742 for f in found:
742 branches.setdefault(f, {})[n] = 1
743 branches.setdefault(f, {})[n] = 1
743 branches.setdefault(n, {})[n] = 1
744 branches.setdefault(n, {})[n] = 1
744 break
745 break
745 if n not in found:
746 if n not in found:
746 found.append(n)
747 found.append(n)
747 if branch in tags:
748 if branch in tags:
748 continue
749 continue
749 seen[n] = 1
750 seen[n] = 1
750 if pp[1] != nullid and n not in seenmerge:
751 if pp[1] != nullid and n not in seenmerge:
751 merges.append((pp[1], [x for x in found]))
752 merges.append((pp[1], [x for x in found]))
752 seenmerge[n] = 1
753 seenmerge[n] = 1
753 if pp[0] != nullid:
754 if pp[0] != nullid:
754 visit.append(pp[0])
755 visit.append(pp[0])
755 # traverse the branches dict, eliminating branch tags from each
756 # traverse the branches dict, eliminating branch tags from each
756 # head that are visible from another branch tag for that head.
757 # head that are visible from another branch tag for that head.
757 out = {}
758 out = {}
758 viscache = {}
759 viscache = {}
759 for h in heads:
760 for h in heads:
760 def visible(node):
761 def visible(node):
761 if node in viscache:
762 if node in viscache:
762 return viscache[node]
763 return viscache[node]
763 ret = {}
764 ret = {}
764 visit = [node]
765 visit = [node]
765 while visit:
766 while visit:
766 x = visit.pop()
767 x = visit.pop()
767 if x in viscache:
768 if x in viscache:
768 ret.update(viscache[x])
769 ret.update(viscache[x])
769 elif x not in ret:
770 elif x not in ret:
770 ret[x] = 1
771 ret[x] = 1
771 if x in branches:
772 if x in branches:
772 visit[len(visit):] = branches[x].keys()
773 visit[len(visit):] = branches[x].keys()
773 viscache[node] = ret
774 viscache[node] = ret
774 return ret
775 return ret
775 if h not in branches:
776 if h not in branches:
776 continue
777 continue
777 # O(n^2), but somewhat limited. This only searches the
778 # O(n^2), but somewhat limited. This only searches the
778 # tags visible from a specific head, not all the tags in the
779 # tags visible from a specific head, not all the tags in the
779 # whole repo.
780 # whole repo.
780 for b in branches[h]:
781 for b in branches[h]:
781 vis = False
782 vis = False
782 for bb in branches[h].keys():
783 for bb in branches[h].keys():
783 if b != bb:
784 if b != bb:
784 if b in visible(bb):
785 if b in visible(bb):
785 vis = True
786 vis = True
786 break
787 break
787 if not vis:
788 if not vis:
788 l = out.setdefault(h, [])
789 l = out.setdefault(h, [])
789 l[len(l):] = self.nodetags(b)
790 l[len(l):] = self.nodetags(b)
790 return out
791 return out
791
792
792 def branches(self, nodes):
793 def branches(self, nodes):
793 if not nodes:
794 if not nodes:
794 nodes = [self.changelog.tip()]
795 nodes = [self.changelog.tip()]
795 b = []
796 b = []
796 for n in nodes:
797 for n in nodes:
797 t = n
798 t = n
798 while n:
799 while n:
799 p = self.changelog.parents(n)
800 p = self.changelog.parents(n)
800 if p[1] != nullid or p[0] == nullid:
801 if p[1] != nullid or p[0] == nullid:
801 b.append((t, n, p[0], p[1]))
802 b.append((t, n, p[0], p[1]))
802 break
803 break
803 n = p[0]
804 n = p[0]
804 return b
805 return b
805
806
806 def between(self, pairs):
807 def between(self, pairs):
807 r = []
808 r = []
808
809
809 for top, bottom in pairs:
810 for top, bottom in pairs:
810 n, l, i = top, [], 0
811 n, l, i = top, [], 0
811 f = 1
812 f = 1
812
813
813 while n != bottom:
814 while n != bottom:
814 p = self.changelog.parents(n)[0]
815 p = self.changelog.parents(n)[0]
815 if i == f:
816 if i == f:
816 l.append(n)
817 l.append(n)
817 f = f * 2
818 f = f * 2
818 n = p
819 n = p
819 i += 1
820 i += 1
820
821
821 r.append(l)
822 r.append(l)
822
823
823 return r
824 return r
824
825
825 def findincoming(self, remote, base=None, heads=None, force=False):
826 def findincoming(self, remote, base=None, heads=None, force=False):
826 m = self.changelog.nodemap
827 m = self.changelog.nodemap
827 search = []
828 search = []
828 fetch = {}
829 fetch = {}
829 seen = {}
830 seen = {}
830 seenbranch = {}
831 seenbranch = {}
831 if base == None:
832 if base == None:
832 base = {}
833 base = {}
833
834
834 if not heads:
835 if not heads:
835 heads = remote.heads()
836 heads = remote.heads()
836
837
837 if self.changelog.tip() == nullid:
838 if self.changelog.tip() == nullid:
838 if heads != [nullid]:
839 if heads != [nullid]:
839 return [nullid]
840 return [nullid]
840 return []
841 return []
841
842
842 # assume we're closer to the tip than the root
843 # assume we're closer to the tip than the root
843 # and start by examining the heads
844 # and start by examining the heads
844 self.ui.status(_("searching for changes\n"))
845 self.ui.status(_("searching for changes\n"))
845
846
846 unknown = []
847 unknown = []
847 for h in heads:
848 for h in heads:
848 if h not in m:
849 if h not in m:
849 unknown.append(h)
850 unknown.append(h)
850 else:
851 else:
851 base[h] = 1
852 base[h] = 1
852
853
853 if not unknown:
854 if not unknown:
854 return []
855 return []
855
856
856 rep = {}
857 rep = {}
857 reqcnt = 0
858 reqcnt = 0
858
859
859 # search through remote branches
860 # search through remote branches
860 # a 'branch' here is a linear segment of history, with four parts:
861 # a 'branch' here is a linear segment of history, with four parts:
861 # head, root, first parent, second parent
862 # head, root, first parent, second parent
862 # (a branch always has two parents (or none) by definition)
863 # (a branch always has two parents (or none) by definition)
863 unknown = remote.branches(unknown)
864 unknown = remote.branches(unknown)
864 while unknown:
865 while unknown:
865 r = []
866 r = []
866 while unknown:
867 while unknown:
867 n = unknown.pop(0)
868 n = unknown.pop(0)
868 if n[0] in seen:
869 if n[0] in seen:
869 continue
870 continue
870
871
871 self.ui.debug(_("examining %s:%s\n")
872 self.ui.debug(_("examining %s:%s\n")
872 % (short(n[0]), short(n[1])))
873 % (short(n[0]), short(n[1])))
873 if n[0] == nullid:
874 if n[0] == nullid:
874 break
875 break
875 if n in seenbranch:
876 if n in seenbranch:
876 self.ui.debug(_("branch already found\n"))
877 self.ui.debug(_("branch already found\n"))
877 continue
878 continue
878 if n[1] and n[1] in m: # do we know the base?
879 if n[1] and n[1] in m: # do we know the base?
879 self.ui.debug(_("found incomplete branch %s:%s\n")
880 self.ui.debug(_("found incomplete branch %s:%s\n")
880 % (short(n[0]), short(n[1])))
881 % (short(n[0]), short(n[1])))
881 search.append(n) # schedule branch range for scanning
882 search.append(n) # schedule branch range for scanning
882 seenbranch[n] = 1
883 seenbranch[n] = 1
883 else:
884 else:
884 if n[1] not in seen and n[1] not in fetch:
885 if n[1] not in seen and n[1] not in fetch:
885 if n[2] in m and n[3] in m:
886 if n[2] in m and n[3] in m:
886 self.ui.debug(_("found new changeset %s\n") %
887 self.ui.debug(_("found new changeset %s\n") %
887 short(n[1]))
888 short(n[1]))
888 fetch[n[1]] = 1 # earliest unknown
889 fetch[n[1]] = 1 # earliest unknown
889 base[n[2]] = 1 # latest known
890 base[n[2]] = 1 # latest known
890 continue
891 continue
891
892
892 for a in n[2:4]:
893 for a in n[2:4]:
893 if a not in rep:
894 if a not in rep:
894 r.append(a)
895 r.append(a)
895 rep[a] = 1
896 rep[a] = 1
896
897
897 seen[n[0]] = 1
898 seen[n[0]] = 1
898
899
899 if r:
900 if r:
900 reqcnt += 1
901 reqcnt += 1
901 self.ui.debug(_("request %d: %s\n") %
902 self.ui.debug(_("request %d: %s\n") %
902 (reqcnt, " ".join(map(short, r))))
903 (reqcnt, " ".join(map(short, r))))
903 for p in range(0, len(r), 10):
904 for p in range(0, len(r), 10):
904 for b in remote.branches(r[p:p+10]):
905 for b in remote.branches(r[p:p+10]):
905 self.ui.debug(_("received %s:%s\n") %
906 self.ui.debug(_("received %s:%s\n") %
906 (short(b[0]), short(b[1])))
907 (short(b[0]), short(b[1])))
907 if b[0] in m:
908 if b[0] in m:
908 self.ui.debug(_("found base node %s\n")
909 self.ui.debug(_("found base node %s\n")
909 % short(b[0]))
910 % short(b[0]))
910 base[b[0]] = 1
911 base[b[0]] = 1
911 elif b[0] not in seen:
912 elif b[0] not in seen:
912 unknown.append(b)
913 unknown.append(b)
913
914
914 # do binary search on the branches we found
915 # do binary search on the branches we found
915 while search:
916 while search:
916 n = search.pop(0)
917 n = search.pop(0)
917 reqcnt += 1
918 reqcnt += 1
918 l = remote.between([(n[0], n[1])])[0]
919 l = remote.between([(n[0], n[1])])[0]
919 l.append(n[1])
920 l.append(n[1])
920 p = n[0]
921 p = n[0]
921 f = 1
922 f = 1
922 for i in l:
923 for i in l:
923 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
924 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
924 if i in m:
925 if i in m:
925 if f <= 2:
926 if f <= 2:
926 self.ui.debug(_("found new branch changeset %s\n") %
927 self.ui.debug(_("found new branch changeset %s\n") %
927 short(p))
928 short(p))
928 fetch[p] = 1
929 fetch[p] = 1
929 base[i] = 1
930 base[i] = 1
930 else:
931 else:
931 self.ui.debug(_("narrowed branch search to %s:%s\n")
932 self.ui.debug(_("narrowed branch search to %s:%s\n")
932 % (short(p), short(i)))
933 % (short(p), short(i)))
933 search.append((p, i))
934 search.append((p, i))
934 break
935 break
935 p, f = i, f * 2
936 p, f = i, f * 2
936
937
937 # sanity check our fetch list
938 # sanity check our fetch list
938 for f in fetch.keys():
939 for f in fetch.keys():
939 if f in m:
940 if f in m:
940 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
941 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
941
942
942 if base.keys() == [nullid]:
943 if base.keys() == [nullid]:
943 if force:
944 if force:
944 self.ui.warn(_("warning: repository is unrelated\n"))
945 self.ui.warn(_("warning: repository is unrelated\n"))
945 else:
946 else:
946 raise util.Abort(_("repository is unrelated"))
947 raise util.Abort(_("repository is unrelated"))
947
948
948 self.ui.note(_("found new changesets starting at ") +
949 self.ui.note(_("found new changesets starting at ") +
949 " ".join([short(f) for f in fetch]) + "\n")
950 " ".join([short(f) for f in fetch]) + "\n")
950
951
951 self.ui.debug(_("%d total queries\n") % reqcnt)
952 self.ui.debug(_("%d total queries\n") % reqcnt)
952
953
953 return fetch.keys()
954 return fetch.keys()
954
955
955 def findoutgoing(self, remote, base=None, heads=None, force=False):
956 def findoutgoing(self, remote, base=None, heads=None, force=False):
956 """Return list of nodes that are roots of subsets not in remote
957 """Return list of nodes that are roots of subsets not in remote
957
958
958 If base dict is specified, assume that these nodes and their parents
959 If base dict is specified, assume that these nodes and their parents
959 exist on the remote side.
960 exist on the remote side.
960 If a list of heads is specified, return only nodes which are heads
961 If a list of heads is specified, return only nodes which are heads
961 or ancestors of these heads, and return a second element which
962 or ancestors of these heads, and return a second element which
962 contains all remote heads which get new children.
963 contains all remote heads which get new children.
963 """
964 """
964 if base == None:
965 if base == None:
965 base = {}
966 base = {}
966 self.findincoming(remote, base, heads, force=force)
967 self.findincoming(remote, base, heads, force=force)
967
968
968 self.ui.debug(_("common changesets up to ")
969 self.ui.debug(_("common changesets up to ")
969 + " ".join(map(short, base.keys())) + "\n")
970 + " ".join(map(short, base.keys())) + "\n")
970
971
971 remain = dict.fromkeys(self.changelog.nodemap)
972 remain = dict.fromkeys(self.changelog.nodemap)
972
973
973 # prune everything remote has from the tree
974 # prune everything remote has from the tree
974 del remain[nullid]
975 del remain[nullid]
975 remove = base.keys()
976 remove = base.keys()
976 while remove:
977 while remove:
977 n = remove.pop(0)
978 n = remove.pop(0)
978 if n in remain:
979 if n in remain:
979 del remain[n]
980 del remain[n]
980 for p in self.changelog.parents(n):
981 for p in self.changelog.parents(n):
981 remove.append(p)
982 remove.append(p)
982
983
983 # find every node whose parents have been pruned
984 # find every node whose parents have been pruned
984 subset = []
985 subset = []
985 # find every remote head that will get new children
986 # find every remote head that will get new children
986 updated_heads = {}
987 updated_heads = {}
987 for n in remain:
988 for n in remain:
988 p1, p2 = self.changelog.parents(n)
989 p1, p2 = self.changelog.parents(n)
989 if p1 not in remain and p2 not in remain:
990 if p1 not in remain and p2 not in remain:
990 subset.append(n)
991 subset.append(n)
991 if heads:
992 if heads:
992 if p1 in heads:
993 if p1 in heads:
993 updated_heads[p1] = True
994 updated_heads[p1] = True
994 if p2 in heads:
995 if p2 in heads:
995 updated_heads[p2] = True
996 updated_heads[p2] = True
996
997
997 # this is the set of all roots we have to push
998 # this is the set of all roots we have to push
998 if heads:
999 if heads:
999 return subset, updated_heads.keys()
1000 return subset, updated_heads.keys()
1000 else:
1001 else:
1001 return subset
1002 return subset
1002
1003
1003 def pull(self, remote, heads=None, force=False):
1004 def pull(self, remote, heads=None, force=False):
1004 l = self.lock()
1005 l = self.lock()
1005
1006
1006 fetch = self.findincoming(remote, force=force)
1007 fetch = self.findincoming(remote, force=force)
1007 if fetch == [nullid]:
1008 if fetch == [nullid]:
1008 self.ui.status(_("requesting all changes\n"))
1009 self.ui.status(_("requesting all changes\n"))
1009
1010
1010 if not fetch:
1011 if not fetch:
1011 self.ui.status(_("no changes found\n"))
1012 self.ui.status(_("no changes found\n"))
1012 return 0
1013 return 0
1013
1014
1014 if heads is None:
1015 if heads is None:
1015 cg = remote.changegroup(fetch, 'pull')
1016 cg = remote.changegroup(fetch, 'pull')
1016 else:
1017 else:
1017 cg = remote.changegroupsubset(fetch, heads, 'pull')
1018 cg = remote.changegroupsubset(fetch, heads, 'pull')
1018 return self.addchangegroup(cg)
1019 return self.addchangegroup(cg)
1019
1020
1020 def push(self, remote, force=False, revs=None):
1021 def push(self, remote, force=False, revs=None):
1021 lock = remote.lock()
1022 lock = remote.lock()
1022
1023
1023 base = {}
1024 base = {}
1024 remote_heads = remote.heads()
1025 remote_heads = remote.heads()
1025 inc = self.findincoming(remote, base, remote_heads, force=force)
1026 inc = self.findincoming(remote, base, remote_heads, force=force)
1026 if not force and inc:
1027 if not force and inc:
1027 self.ui.warn(_("abort: unsynced remote changes!\n"))
1028 self.ui.warn(_("abort: unsynced remote changes!\n"))
1028 self.ui.status(_("(did you forget to sync?"
1029 self.ui.status(_("(did you forget to sync?"
1029 " use push -f to force)\n"))
1030 " use push -f to force)\n"))
1030 return 1
1031 return 1
1031
1032
1032 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1033 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1033 if revs is not None:
1034 if revs is not None:
1034 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1035 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1035 else:
1036 else:
1036 bases, heads = update, self.changelog.heads()
1037 bases, heads = update, self.changelog.heads()
1037
1038
1038 if not bases:
1039 if not bases:
1039 self.ui.status(_("no changes found\n"))
1040 self.ui.status(_("no changes found\n"))
1040 return 1
1041 return 1
1041 elif not force:
1042 elif not force:
1042 # FIXME we don't properly detect creation of new heads
1043 # FIXME we don't properly detect creation of new heads
1043 # in the push -r case, assume the user knows what he's doing
1044 # in the push -r case, assume the user knows what he's doing
1044 if not revs and len(remote_heads) < len(heads) \
1045 if not revs and len(remote_heads) < len(heads) \
1045 and remote_heads != [nullid]:
1046 and remote_heads != [nullid]:
1046 self.ui.warn(_("abort: push creates new remote branches!\n"))
1047 self.ui.warn(_("abort: push creates new remote branches!\n"))
1047 self.ui.status(_("(did you forget to merge?"
1048 self.ui.status(_("(did you forget to merge?"
1048 " use push -f to force)\n"))
1049 " use push -f to force)\n"))
1049 return 1
1050 return 1
1050
1051
1051 if revs is None:
1052 if revs is None:
1052 cg = self.changegroup(update, 'push')
1053 cg = self.changegroup(update, 'push')
1053 else:
1054 else:
1054 cg = self.changegroupsubset(update, revs, 'push')
1055 cg = self.changegroupsubset(update, revs, 'push')
1055 return remote.addchangegroup(cg)
1056 return remote.addchangegroup(cg)
1056
1057
1057 def changegroupsubset(self, bases, heads, source):
1058 def changegroupsubset(self, bases, heads, source):
1058 """This function generates a changegroup consisting of all the nodes
1059 """This function generates a changegroup consisting of all the nodes
1059 that are descendents of any of the bases, and ancestors of any of
1060 that are descendents of any of the bases, and ancestors of any of
1060 the heads.
1061 the heads.
1061
1062
1062 It is fairly complex as determining which filenodes and which
1063 It is fairly complex as determining which filenodes and which
1063 manifest nodes need to be included for the changeset to be complete
1064 manifest nodes need to be included for the changeset to be complete
1064 is non-trivial.
1065 is non-trivial.
1065
1066
1066 Another wrinkle is doing the reverse, figuring out which changeset in
1067 Another wrinkle is doing the reverse, figuring out which changeset in
1067 the changegroup a particular filenode or manifestnode belongs to."""
1068 the changegroup a particular filenode or manifestnode belongs to."""
1068
1069
1069 self.hook('preoutgoing', throw=True, source=source)
1070 self.hook('preoutgoing', throw=True, source=source)
1070
1071
1071 # Set up some initial variables
1072 # Set up some initial variables
1072 # Make it easy to refer to self.changelog
1073 # Make it easy to refer to self.changelog
1073 cl = self.changelog
1074 cl = self.changelog
1074 # msng is short for missing - compute the list of changesets in this
1075 # msng is short for missing - compute the list of changesets in this
1075 # changegroup.
1076 # changegroup.
1076 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1077 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1077 # Some bases may turn out to be superfluous, and some heads may be
1078 # Some bases may turn out to be superfluous, and some heads may be
1078 # too. nodesbetween will return the minimal set of bases and heads
1079 # too. nodesbetween will return the minimal set of bases and heads
1079 # necessary to re-create the changegroup.
1080 # necessary to re-create the changegroup.
1080
1081
1081 # Known heads are the list of heads that it is assumed the recipient
1082 # Known heads are the list of heads that it is assumed the recipient
1082 # of this changegroup will know about.
1083 # of this changegroup will know about.
1083 knownheads = {}
1084 knownheads = {}
1084 # We assume that all parents of bases are known heads.
1085 # We assume that all parents of bases are known heads.
1085 for n in bases:
1086 for n in bases:
1086 for p in cl.parents(n):
1087 for p in cl.parents(n):
1087 if p != nullid:
1088 if p != nullid:
1088 knownheads[p] = 1
1089 knownheads[p] = 1
1089 knownheads = knownheads.keys()
1090 knownheads = knownheads.keys()
1090 if knownheads:
1091 if knownheads:
1091 # Now that we know what heads are known, we can compute which
1092 # Now that we know what heads are known, we can compute which
1092 # changesets are known. The recipient must know about all
1093 # changesets are known. The recipient must know about all
1093 # changesets required to reach the known heads from the null
1094 # changesets required to reach the known heads from the null
1094 # changeset.
1095 # changeset.
1095 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1096 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1096 junk = None
1097 junk = None
1097 # Transform the list into an ersatz set.
1098 # Transform the list into an ersatz set.
1098 has_cl_set = dict.fromkeys(has_cl_set)
1099 has_cl_set = dict.fromkeys(has_cl_set)
1099 else:
1100 else:
1100 # If there were no known heads, the recipient cannot be assumed to
1101 # If there were no known heads, the recipient cannot be assumed to
1101 # know about any changesets.
1102 # know about any changesets.
1102 has_cl_set = {}
1103 has_cl_set = {}
1103
1104
1104 # Make it easy to refer to self.manifest
1105 # Make it easy to refer to self.manifest
1105 mnfst = self.manifest
1106 mnfst = self.manifest
1106 # We don't know which manifests are missing yet
1107 # We don't know which manifests are missing yet
1107 msng_mnfst_set = {}
1108 msng_mnfst_set = {}
1108 # Nor do we know which filenodes are missing.
1109 # Nor do we know which filenodes are missing.
1109 msng_filenode_set = {}
1110 msng_filenode_set = {}
1110
1111
1111 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1112 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1112 junk = None
1113 junk = None
1113
1114
1114 # A changeset always belongs to itself, so the changenode lookup
1115 # A changeset always belongs to itself, so the changenode lookup
1115 # function for a changenode is identity.
1116 # function for a changenode is identity.
1116 def identity(x):
1117 def identity(x):
1117 return x
1118 return x
1118
1119
1119 # A function generating function. Sets up an environment for the
1120 # A function generating function. Sets up an environment for the
1120 # inner function.
1121 # inner function.
1121 def cmp_by_rev_func(revlog):
1122 def cmp_by_rev_func(revlog):
1122 # Compare two nodes by their revision number in the environment's
1123 # Compare two nodes by their revision number in the environment's
1123 # revision history. Since the revision number both represents the
1124 # revision history. Since the revision number both represents the
1124 # most efficient order to read the nodes in, and represents a
1125 # most efficient order to read the nodes in, and represents a
1125 # topological sorting of the nodes, this function is often useful.
1126 # topological sorting of the nodes, this function is often useful.
1126 def cmp_by_rev(a, b):
1127 def cmp_by_rev(a, b):
1127 return cmp(revlog.rev(a), revlog.rev(b))
1128 return cmp(revlog.rev(a), revlog.rev(b))
1128 return cmp_by_rev
1129 return cmp_by_rev
1129
1130
1130 # If we determine that a particular file or manifest node must be a
1131 # If we determine that a particular file or manifest node must be a
1131 # node that the recipient of the changegroup will already have, we can
1132 # node that the recipient of the changegroup will already have, we can
1132 # also assume the recipient will have all the parents. This function
1133 # also assume the recipient will have all the parents. This function
1133 # prunes them from the set of missing nodes.
1134 # prunes them from the set of missing nodes.
1134 def prune_parents(revlog, hasset, msngset):
1135 def prune_parents(revlog, hasset, msngset):
1135 haslst = hasset.keys()
1136 haslst = hasset.keys()
1136 haslst.sort(cmp_by_rev_func(revlog))
1137 haslst.sort(cmp_by_rev_func(revlog))
1137 for node in haslst:
1138 for node in haslst:
1138 parentlst = [p for p in revlog.parents(node) if p != nullid]
1139 parentlst = [p for p in revlog.parents(node) if p != nullid]
1139 while parentlst:
1140 while parentlst:
1140 n = parentlst.pop()
1141 n = parentlst.pop()
1141 if n not in hasset:
1142 if n not in hasset:
1142 hasset[n] = 1
1143 hasset[n] = 1
1143 p = [p for p in revlog.parents(n) if p != nullid]
1144 p = [p for p in revlog.parents(n) if p != nullid]
1144 parentlst.extend(p)
1145 parentlst.extend(p)
1145 for n in hasset:
1146 for n in hasset:
1146 msngset.pop(n, None)
1147 msngset.pop(n, None)
1147
1148
1148 # This is a function generating function used to set up an environment
1149 # This is a function generating function used to set up an environment
1149 # for the inner function to execute in.
1150 # for the inner function to execute in.
1150 def manifest_and_file_collector(changedfileset):
1151 def manifest_and_file_collector(changedfileset):
1151 # This is an information gathering function that gathers
1152 # This is an information gathering function that gathers
1152 # information from each changeset node that goes out as part of
1153 # information from each changeset node that goes out as part of
1153 # the changegroup. The information gathered is a list of which
1154 # the changegroup. The information gathered is a list of which
1154 # manifest nodes are potentially required (the recipient may
1155 # manifest nodes are potentially required (the recipient may
1155 # already have them) and total list of all files which were
1156 # already have them) and total list of all files which were
1156 # changed in any changeset in the changegroup.
1157 # changed in any changeset in the changegroup.
1157 #
1158 #
1158 # We also remember the first changenode we saw any manifest
1159 # We also remember the first changenode we saw any manifest
1159 # referenced by so we can later determine which changenode 'owns'
1160 # referenced by so we can later determine which changenode 'owns'
1160 # the manifest.
1161 # the manifest.
1161 def collect_manifests_and_files(clnode):
1162 def collect_manifests_and_files(clnode):
1162 c = cl.read(clnode)
1163 c = cl.read(clnode)
1163 for f in c[3]:
1164 for f in c[3]:
1164 # This is to make sure we only have one instance of each
1165 # This is to make sure we only have one instance of each
1165 # filename string for each filename.
1166 # filename string for each filename.
1166 changedfileset.setdefault(f, f)
1167 changedfileset.setdefault(f, f)
1167 msng_mnfst_set.setdefault(c[0], clnode)
1168 msng_mnfst_set.setdefault(c[0], clnode)
1168 return collect_manifests_and_files
1169 return collect_manifests_and_files
1169
1170
1170 # Figure out which manifest nodes (of the ones we think might be part
1171 # Figure out which manifest nodes (of the ones we think might be part
1171 # of the changegroup) the recipient must know about and remove them
1172 # of the changegroup) the recipient must know about and remove them
1172 # from the changegroup.
1173 # from the changegroup.
1173 def prune_manifests():
1174 def prune_manifests():
1174 has_mnfst_set = {}
1175 has_mnfst_set = {}
1175 for n in msng_mnfst_set:
1176 for n in msng_mnfst_set:
1176 # If a 'missing' manifest thinks it belongs to a changenode
1177 # If a 'missing' manifest thinks it belongs to a changenode
1177 # the recipient is assumed to have, obviously the recipient
1178 # the recipient is assumed to have, obviously the recipient
1178 # must have that manifest.
1179 # must have that manifest.
1179 linknode = cl.node(mnfst.linkrev(n))
1180 linknode = cl.node(mnfst.linkrev(n))
1180 if linknode in has_cl_set:
1181 if linknode in has_cl_set:
1181 has_mnfst_set[n] = 1
1182 has_mnfst_set[n] = 1
1182 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1183 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1183
1184
1184 # Use the information collected in collect_manifests_and_files to say
1185 # Use the information collected in collect_manifests_and_files to say
1185 # which changenode any manifestnode belongs to.
1186 # which changenode any manifestnode belongs to.
1186 def lookup_manifest_link(mnfstnode):
1187 def lookup_manifest_link(mnfstnode):
1187 return msng_mnfst_set[mnfstnode]
1188 return msng_mnfst_set[mnfstnode]
1188
1189
1189 # A function generating function that sets up the initial environment
1190 # A function generating function that sets up the initial environment
1190 # the inner function.
1191 # the inner function.
1191 def filenode_collector(changedfiles):
1192 def filenode_collector(changedfiles):
1192 next_rev = [0]
1193 next_rev = [0]
1193 # This gathers information from each manifestnode included in the
1194 # This gathers information from each manifestnode included in the
1194 # changegroup about which filenodes the manifest node references
1195 # changegroup about which filenodes the manifest node references
1195 # so we can include those in the changegroup too.
1196 # so we can include those in the changegroup too.
1196 #
1197 #
1197 # It also remembers which changenode each filenode belongs to. It
1198 # It also remembers which changenode each filenode belongs to. It
1198 # does this by assuming the a filenode belongs to the changenode
1199 # does this by assuming the a filenode belongs to the changenode
1199 # the first manifest that references it belongs to.
1200 # the first manifest that references it belongs to.
1200 def collect_msng_filenodes(mnfstnode):
1201 def collect_msng_filenodes(mnfstnode):
1201 r = mnfst.rev(mnfstnode)
1202 r = mnfst.rev(mnfstnode)
1202 if r == next_rev[0]:
1203 if r == next_rev[0]:
1203 # If the last rev we looked at was the one just previous,
1204 # If the last rev we looked at was the one just previous,
1204 # we only need to see a diff.
1205 # we only need to see a diff.
1205 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1206 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1206 # For each line in the delta
1207 # For each line in the delta
1207 for dline in delta.splitlines():
1208 for dline in delta.splitlines():
1208 # get the filename and filenode for that line
1209 # get the filename and filenode for that line
1209 f, fnode = dline.split('\0')
1210 f, fnode = dline.split('\0')
1210 fnode = bin(fnode[:40])
1211 fnode = bin(fnode[:40])
1211 f = changedfiles.get(f, None)
1212 f = changedfiles.get(f, None)
1212 # And if the file is in the list of files we care
1213 # And if the file is in the list of files we care
1213 # about.
1214 # about.
1214 if f is not None:
1215 if f is not None:
1215 # Get the changenode this manifest belongs to
1216 # Get the changenode this manifest belongs to
1216 clnode = msng_mnfst_set[mnfstnode]
1217 clnode = msng_mnfst_set[mnfstnode]
1217 # Create the set of filenodes for the file if
1218 # Create the set of filenodes for the file if
1218 # there isn't one already.
1219 # there isn't one already.
1219 ndset = msng_filenode_set.setdefault(f, {})
1220 ndset = msng_filenode_set.setdefault(f, {})
1220 # And set the filenode's changelog node to the
1221 # And set the filenode's changelog node to the
1221 # manifest's if it hasn't been set already.
1222 # manifest's if it hasn't been set already.
1222 ndset.setdefault(fnode, clnode)
1223 ndset.setdefault(fnode, clnode)
1223 else:
1224 else:
1224 # Otherwise we need a full manifest.
1225 # Otherwise we need a full manifest.
1225 m = mnfst.read(mnfstnode)
1226 m = mnfst.read(mnfstnode)
1226 # For every file in we care about.
1227 # For every file in we care about.
1227 for f in changedfiles:
1228 for f in changedfiles:
1228 fnode = m.get(f, None)
1229 fnode = m.get(f, None)
1229 # If it's in the manifest
1230 # If it's in the manifest
1230 if fnode is not None:
1231 if fnode is not None:
1231 # See comments above.
1232 # See comments above.
1232 clnode = msng_mnfst_set[mnfstnode]
1233 clnode = msng_mnfst_set[mnfstnode]
1233 ndset = msng_filenode_set.setdefault(f, {})
1234 ndset = msng_filenode_set.setdefault(f, {})
1234 ndset.setdefault(fnode, clnode)
1235 ndset.setdefault(fnode, clnode)
1235 # Remember the revision we hope to see next.
1236 # Remember the revision we hope to see next.
1236 next_rev[0] = r + 1
1237 next_rev[0] = r + 1
1237 return collect_msng_filenodes
1238 return collect_msng_filenodes
1238
1239
1239 # We have a list of filenodes we think we need for a file, lets remove
1240 # We have a list of filenodes we think we need for a file, lets remove
1240 # all those we now the recipient must have.
1241 # all those we now the recipient must have.
1241 def prune_filenodes(f, filerevlog):
1242 def prune_filenodes(f, filerevlog):
1242 msngset = msng_filenode_set[f]
1243 msngset = msng_filenode_set[f]
1243 hasset = {}
1244 hasset = {}
1244 # If a 'missing' filenode thinks it belongs to a changenode we
1245 # If a 'missing' filenode thinks it belongs to a changenode we
1245 # assume the recipient must have, then the recipient must have
1246 # assume the recipient must have, then the recipient must have
1246 # that filenode.
1247 # that filenode.
1247 for n in msngset:
1248 for n in msngset:
1248 clnode = cl.node(filerevlog.linkrev(n))
1249 clnode = cl.node(filerevlog.linkrev(n))
1249 if clnode in has_cl_set:
1250 if clnode in has_cl_set:
1250 hasset[n] = 1
1251 hasset[n] = 1
1251 prune_parents(filerevlog, hasset, msngset)
1252 prune_parents(filerevlog, hasset, msngset)
1252
1253
1253 # A function generator function that sets up the a context for the
1254 # A function generator function that sets up the a context for the
1254 # inner function.
1255 # inner function.
1255 def lookup_filenode_link_func(fname):
1256 def lookup_filenode_link_func(fname):
1256 msngset = msng_filenode_set[fname]
1257 msngset = msng_filenode_set[fname]
1257 # Lookup the changenode the filenode belongs to.
1258 # Lookup the changenode the filenode belongs to.
1258 def lookup_filenode_link(fnode):
1259 def lookup_filenode_link(fnode):
1259 return msngset[fnode]
1260 return msngset[fnode]
1260 return lookup_filenode_link
1261 return lookup_filenode_link
1261
1262
1262 # Now that we have all theses utility functions to help out and
1263 # Now that we have all theses utility functions to help out and
1263 # logically divide up the task, generate the group.
1264 # logically divide up the task, generate the group.
1264 def gengroup():
1265 def gengroup():
1265 # The set of changed files starts empty.
1266 # The set of changed files starts empty.
1266 changedfiles = {}
1267 changedfiles = {}
1267 # Create a changenode group generator that will call our functions
1268 # Create a changenode group generator that will call our functions
1268 # back to lookup the owning changenode and collect information.
1269 # back to lookup the owning changenode and collect information.
1269 group = cl.group(msng_cl_lst, identity,
1270 group = cl.group(msng_cl_lst, identity,
1270 manifest_and_file_collector(changedfiles))
1271 manifest_and_file_collector(changedfiles))
1271 for chnk in group:
1272 for chnk in group:
1272 yield chnk
1273 yield chnk
1273
1274
1274 # The list of manifests has been collected by the generator
1275 # The list of manifests has been collected by the generator
1275 # calling our functions back.
1276 # calling our functions back.
1276 prune_manifests()
1277 prune_manifests()
1277 msng_mnfst_lst = msng_mnfst_set.keys()
1278 msng_mnfst_lst = msng_mnfst_set.keys()
1278 # Sort the manifestnodes by revision number.
1279 # Sort the manifestnodes by revision number.
1279 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1280 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1280 # Create a generator for the manifestnodes that calls our lookup
1281 # Create a generator for the manifestnodes that calls our lookup
1281 # and data collection functions back.
1282 # and data collection functions back.
1282 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1283 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1283 filenode_collector(changedfiles))
1284 filenode_collector(changedfiles))
1284 for chnk in group:
1285 for chnk in group:
1285 yield chnk
1286 yield chnk
1286
1287
1287 # These are no longer needed, dereference and toss the memory for
1288 # These are no longer needed, dereference and toss the memory for
1288 # them.
1289 # them.
1289 msng_mnfst_lst = None
1290 msng_mnfst_lst = None
1290 msng_mnfst_set.clear()
1291 msng_mnfst_set.clear()
1291
1292
1292 changedfiles = changedfiles.keys()
1293 changedfiles = changedfiles.keys()
1293 changedfiles.sort()
1294 changedfiles.sort()
1294 # Go through all our files in order sorted by name.
1295 # Go through all our files in order sorted by name.
1295 for fname in changedfiles:
1296 for fname in changedfiles:
1296 filerevlog = self.file(fname)
1297 filerevlog = self.file(fname)
1297 # Toss out the filenodes that the recipient isn't really
1298 # Toss out the filenodes that the recipient isn't really
1298 # missing.
1299 # missing.
1299 if msng_filenode_set.has_key(fname):
1300 if msng_filenode_set.has_key(fname):
1300 prune_filenodes(fname, filerevlog)
1301 prune_filenodes(fname, filerevlog)
1301 msng_filenode_lst = msng_filenode_set[fname].keys()
1302 msng_filenode_lst = msng_filenode_set[fname].keys()
1302 else:
1303 else:
1303 msng_filenode_lst = []
1304 msng_filenode_lst = []
1304 # If any filenodes are left, generate the group for them,
1305 # If any filenodes are left, generate the group for them,
1305 # otherwise don't bother.
1306 # otherwise don't bother.
1306 if len(msng_filenode_lst) > 0:
1307 if len(msng_filenode_lst) > 0:
1307 yield changegroup.genchunk(fname)
1308 yield changegroup.genchunk(fname)
1308 # Sort the filenodes by their revision #
1309 # Sort the filenodes by their revision #
1309 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1310 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1310 # Create a group generator and only pass in a changenode
1311 # Create a group generator and only pass in a changenode
1311 # lookup function as we need to collect no information
1312 # lookup function as we need to collect no information
1312 # from filenodes.
1313 # from filenodes.
1313 group = filerevlog.group(msng_filenode_lst,
1314 group = filerevlog.group(msng_filenode_lst,
1314 lookup_filenode_link_func(fname))
1315 lookup_filenode_link_func(fname))
1315 for chnk in group:
1316 for chnk in group:
1316 yield chnk
1317 yield chnk
1317 if msng_filenode_set.has_key(fname):
1318 if msng_filenode_set.has_key(fname):
1318 # Don't need this anymore, toss it to free memory.
1319 # Don't need this anymore, toss it to free memory.
1319 del msng_filenode_set[fname]
1320 del msng_filenode_set[fname]
1320 # Signal that no more groups are left.
1321 # Signal that no more groups are left.
1321 yield changegroup.closechunk()
1322 yield changegroup.closechunk()
1322
1323
1323 if msng_cl_lst:
1324 if msng_cl_lst:
1324 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1325 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1325
1326
1326 return util.chunkbuffer(gengroup())
1327 return util.chunkbuffer(gengroup())
1327
1328
1328 def changegroup(self, basenodes, source):
1329 def changegroup(self, basenodes, source):
1329 """Generate a changegroup of all nodes that we have that a recipient
1330 """Generate a changegroup of all nodes that we have that a recipient
1330 doesn't.
1331 doesn't.
1331
1332
1332 This is much easier than the previous function as we can assume that
1333 This is much easier than the previous function as we can assume that
1333 the recipient has any changenode we aren't sending them."""
1334 the recipient has any changenode we aren't sending them."""
1334
1335
1335 self.hook('preoutgoing', throw=True, source=source)
1336 self.hook('preoutgoing', throw=True, source=source)
1336
1337
1337 cl = self.changelog
1338 cl = self.changelog
1338 nodes = cl.nodesbetween(basenodes, None)[0]
1339 nodes = cl.nodesbetween(basenodes, None)[0]
1339 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1340 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1340
1341
1341 def identity(x):
1342 def identity(x):
1342 return x
1343 return x
1343
1344
1344 def gennodelst(revlog):
1345 def gennodelst(revlog):
1345 for r in xrange(0, revlog.count()):
1346 for r in xrange(0, revlog.count()):
1346 n = revlog.node(r)
1347 n = revlog.node(r)
1347 if revlog.linkrev(n) in revset:
1348 if revlog.linkrev(n) in revset:
1348 yield n
1349 yield n
1349
1350
1350 def changed_file_collector(changedfileset):
1351 def changed_file_collector(changedfileset):
1351 def collect_changed_files(clnode):
1352 def collect_changed_files(clnode):
1352 c = cl.read(clnode)
1353 c = cl.read(clnode)
1353 for fname in c[3]:
1354 for fname in c[3]:
1354 changedfileset[fname] = 1
1355 changedfileset[fname] = 1
1355 return collect_changed_files
1356 return collect_changed_files
1356
1357
1357 def lookuprevlink_func(revlog):
1358 def lookuprevlink_func(revlog):
1358 def lookuprevlink(n):
1359 def lookuprevlink(n):
1359 return cl.node(revlog.linkrev(n))
1360 return cl.node(revlog.linkrev(n))
1360 return lookuprevlink
1361 return lookuprevlink
1361
1362
1362 def gengroup():
1363 def gengroup():
1363 # construct a list of all changed files
1364 # construct a list of all changed files
1364 changedfiles = {}
1365 changedfiles = {}
1365
1366
1366 for chnk in cl.group(nodes, identity,
1367 for chnk in cl.group(nodes, identity,
1367 changed_file_collector(changedfiles)):
1368 changed_file_collector(changedfiles)):
1368 yield chnk
1369 yield chnk
1369 changedfiles = changedfiles.keys()
1370 changedfiles = changedfiles.keys()
1370 changedfiles.sort()
1371 changedfiles.sort()
1371
1372
1372 mnfst = self.manifest
1373 mnfst = self.manifest
1373 nodeiter = gennodelst(mnfst)
1374 nodeiter = gennodelst(mnfst)
1374 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1375 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1375 yield chnk
1376 yield chnk
1376
1377
1377 for fname in changedfiles:
1378 for fname in changedfiles:
1378 filerevlog = self.file(fname)
1379 filerevlog = self.file(fname)
1379 nodeiter = gennodelst(filerevlog)
1380 nodeiter = gennodelst(filerevlog)
1380 nodeiter = list(nodeiter)
1381 nodeiter = list(nodeiter)
1381 if nodeiter:
1382 if nodeiter:
1382 yield changegroup.genchunk(fname)
1383 yield changegroup.genchunk(fname)
1383 lookup = lookuprevlink_func(filerevlog)
1384 lookup = lookuprevlink_func(filerevlog)
1384 for chnk in filerevlog.group(nodeiter, lookup):
1385 for chnk in filerevlog.group(nodeiter, lookup):
1385 yield chnk
1386 yield chnk
1386
1387
1387 yield changegroup.closechunk()
1388 yield changegroup.closechunk()
1388
1389
1389 if nodes:
1390 if nodes:
1390 self.hook('outgoing', node=hex(nodes[0]), source=source)
1391 self.hook('outgoing', node=hex(nodes[0]), source=source)
1391
1392
1392 return util.chunkbuffer(gengroup())
1393 return util.chunkbuffer(gengroup())
1393
1394
1394 def addchangegroup(self, source):
1395 def addchangegroup(self, source):
1395 """add changegroup to repo.
1396 """add changegroup to repo.
1396 returns number of heads modified or added + 1."""
1397 returns number of heads modified or added + 1."""
1397
1398
1398 def csmap(x):
1399 def csmap(x):
1399 self.ui.debug(_("add changeset %s\n") % short(x))
1400 self.ui.debug(_("add changeset %s\n") % short(x))
1400 return cl.count()
1401 return cl.count()
1401
1402
1402 def revmap(x):
1403 def revmap(x):
1403 return cl.rev(x)
1404 return cl.rev(x)
1404
1405
1405 if not source:
1406 if not source:
1406 return 0
1407 return 0
1407
1408
1408 self.hook('prechangegroup', throw=True)
1409 self.hook('prechangegroup', throw=True)
1409
1410
1410 changesets = files = revisions = 0
1411 changesets = files = revisions = 0
1411
1412
1412 tr = self.transaction()
1413 tr = self.transaction()
1413
1414
1414 # write changelog and manifest data to temp files so
1415 # write changelog and manifest data to temp files so
1415 # concurrent readers will not see inconsistent view
1416 # concurrent readers will not see inconsistent view
1416 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1417 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1417
1418
1418 oldheads = len(cl.heads())
1419 oldheads = len(cl.heads())
1419
1420
1420 # pull off the changeset group
1421 # pull off the changeset group
1421 self.ui.status(_("adding changesets\n"))
1422 self.ui.status(_("adding changesets\n"))
1422 co = cl.tip()
1423 co = cl.tip()
1423 chunkiter = changegroup.chunkiter(source)
1424 chunkiter = changegroup.chunkiter(source)
1424 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1425 cn = cl.addgroup(chunkiter, csmap, tr, 1) # unique
1425 cnr, cor = map(cl.rev, (cn, co))
1426 cnr, cor = map(cl.rev, (cn, co))
1426 if cn == nullid:
1427 if cn == nullid:
1427 cnr = cor
1428 cnr = cor
1428 changesets = cnr - cor
1429 changesets = cnr - cor
1429
1430
1430 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1431 mf = appendfile.appendmanifest(self.opener, self.manifest.version)
1431
1432
1432 # pull off the manifest group
1433 # pull off the manifest group
1433 self.ui.status(_("adding manifests\n"))
1434 self.ui.status(_("adding manifests\n"))
1434 mm = mf.tip()
1435 mm = mf.tip()
1435 chunkiter = changegroup.chunkiter(source)
1436 chunkiter = changegroup.chunkiter(source)
1436 mo = mf.addgroup(chunkiter, revmap, tr)
1437 mo = mf.addgroup(chunkiter, revmap, tr)
1437
1438
1438 # process the files
1439 # process the files
1439 self.ui.status(_("adding file changes\n"))
1440 self.ui.status(_("adding file changes\n"))
1440 while 1:
1441 while 1:
1441 f = changegroup.getchunk(source)
1442 f = changegroup.getchunk(source)
1442 if not f:
1443 if not f:
1443 break
1444 break
1444 self.ui.debug(_("adding %s revisions\n") % f)
1445 self.ui.debug(_("adding %s revisions\n") % f)
1445 fl = self.file(f)
1446 fl = self.file(f)
1446 o = fl.count()
1447 o = fl.count()
1447 chunkiter = changegroup.chunkiter(source)
1448 chunkiter = changegroup.chunkiter(source)
1448 n = fl.addgroup(chunkiter, revmap, tr)
1449 n = fl.addgroup(chunkiter, revmap, tr)
1449 revisions += fl.count() - o
1450 revisions += fl.count() - o
1450 files += 1
1451 files += 1
1451
1452
1452 # write order here is important so concurrent readers will see
1453 # write order here is important so concurrent readers will see
1453 # consistent view of repo
1454 # consistent view of repo
1454 mf.writedata()
1455 mf.writedata()
1455 cl.writedata()
1456 cl.writedata()
1456
1457
1457 # make changelog and manifest see real files again
1458 # make changelog and manifest see real files again
1458 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1459 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1459 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1460 self.manifest = manifest.manifest(self.opener, self.manifest.version)
1460 self.changelog.checkinlinesize(tr)
1461 self.changelog.checkinlinesize(tr)
1461 self.manifest.checkinlinesize(tr)
1462 self.manifest.checkinlinesize(tr)
1462
1463
1463 newheads = len(self.changelog.heads())
1464 newheads = len(self.changelog.heads())
1464 heads = ""
1465 heads = ""
1465 if oldheads and newheads > oldheads:
1466 if oldheads and newheads > oldheads:
1466 heads = _(" (+%d heads)") % (newheads - oldheads)
1467 heads = _(" (+%d heads)") % (newheads - oldheads)
1467
1468
1468 self.ui.status(_("added %d changesets"
1469 self.ui.status(_("added %d changesets"
1469 " with %d changes to %d files%s\n")
1470 " with %d changes to %d files%s\n")
1470 % (changesets, revisions, files, heads))
1471 % (changesets, revisions, files, heads))
1471
1472
1472 self.hook('pretxnchangegroup', throw=True,
1473 self.hook('pretxnchangegroup', throw=True,
1473 node=hex(self.changelog.node(cor+1)))
1474 node=hex(self.changelog.node(cor+1)))
1474
1475
1475 tr.close()
1476 tr.close()
1476
1477
1477 if changesets > 0:
1478 if changesets > 0:
1478 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1479 self.hook("changegroup", node=hex(self.changelog.node(cor+1)))
1479
1480
1480 for i in range(cor + 1, cnr + 1):
1481 for i in range(cor + 1, cnr + 1):
1481 self.hook("incoming", node=hex(self.changelog.node(i)))
1482 self.hook("incoming", node=hex(self.changelog.node(i)))
1482
1483
1483 return newheads - oldheads + 1
1484 return newheads - oldheads + 1
1484
1485
1485 def update(self, node, allow=False, force=False, choose=None,
1486 def update(self, node, allow=False, force=False, choose=None,
1486 moddirstate=True, forcemerge=False, wlock=None):
1487 moddirstate=True, forcemerge=False, wlock=None):
1487 pl = self.dirstate.parents()
1488 pl = self.dirstate.parents()
1488 if not force and pl[1] != nullid:
1489 if not force and pl[1] != nullid:
1489 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1490 self.ui.warn(_("aborting: outstanding uncommitted merges\n"))
1490 return 1
1491 return 1
1491
1492
1492 err = False
1493 err = False
1493
1494
1494 p1, p2 = pl[0], node
1495 p1, p2 = pl[0], node
1495 pa = self.changelog.ancestor(p1, p2)
1496 pa = self.changelog.ancestor(p1, p2)
1496 m1n = self.changelog.read(p1)[0]
1497 m1n = self.changelog.read(p1)[0]
1497 m2n = self.changelog.read(p2)[0]
1498 m2n = self.changelog.read(p2)[0]
1498 man = self.manifest.ancestor(m1n, m2n)
1499 man = self.manifest.ancestor(m1n, m2n)
1499 m1 = self.manifest.read(m1n)
1500 m1 = self.manifest.read(m1n)
1500 mf1 = self.manifest.readflags(m1n)
1501 mf1 = self.manifest.readflags(m1n)
1501 m2 = self.manifest.read(m2n).copy()
1502 m2 = self.manifest.read(m2n).copy()
1502 mf2 = self.manifest.readflags(m2n)
1503 mf2 = self.manifest.readflags(m2n)
1503 ma = self.manifest.read(man)
1504 ma = self.manifest.read(man)
1504 mfa = self.manifest.readflags(man)
1505 mfa = self.manifest.readflags(man)
1505
1506
1506 modified, added, removed, deleted, unknown = self.changes()
1507 modified, added, removed, deleted, unknown = self.changes()
1507
1508
1508 # is this a jump, or a merge? i.e. is there a linear path
1509 # is this a jump, or a merge? i.e. is there a linear path
1509 # from p1 to p2?
1510 # from p1 to p2?
1510 linear_path = (pa == p1 or pa == p2)
1511 linear_path = (pa == p1 or pa == p2)
1511
1512
1512 if allow and linear_path:
1513 if allow and linear_path:
1513 raise util.Abort(_("there is nothing to merge, "
1514 raise util.Abort(_("there is nothing to merge, "
1514 "just use 'hg update'"))
1515 "just use 'hg update'"))
1515 if allow and not forcemerge:
1516 if allow and not forcemerge:
1516 if modified or added or removed:
1517 if modified or added or removed:
1517 raise util.Abort(_("outstanding uncommitted changes"))
1518 raise util.Abort(_("outstanding uncommitted changes"))
1518 if not forcemerge and not force:
1519 if not forcemerge and not force:
1519 for f in unknown:
1520 for f in unknown:
1520 if f in m2:
1521 if f in m2:
1521 t1 = self.wread(f)
1522 t1 = self.wread(f)
1522 t2 = self.file(f).read(m2[f])
1523 t2 = self.file(f).read(m2[f])
1523 if cmp(t1, t2) != 0:
1524 if cmp(t1, t2) != 0:
1524 raise util.Abort(_("'%s' already exists in the working"
1525 raise util.Abort(_("'%s' already exists in the working"
1525 " dir and differs from remote") % f)
1526 " dir and differs from remote") % f)
1526
1527
1527 # resolve the manifest to determine which files
1528 # resolve the manifest to determine which files
1528 # we care about merging
1529 # we care about merging
1529 self.ui.note(_("resolving manifests\n"))
1530 self.ui.note(_("resolving manifests\n"))
1530 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1531 self.ui.debug(_(" force %s allow %s moddirstate %s linear %s\n") %
1531 (force, allow, moddirstate, linear_path))
1532 (force, allow, moddirstate, linear_path))
1532 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1533 self.ui.debug(_(" ancestor %s local %s remote %s\n") %
1533 (short(man), short(m1n), short(m2n)))
1534 (short(man), short(m1n), short(m2n)))
1534
1535
1535 merge = {}
1536 merge = {}
1536 get = {}
1537 get = {}
1537 remove = []
1538 remove = []
1538
1539
1539 # construct a working dir manifest
1540 # construct a working dir manifest
1540 mw = m1.copy()
1541 mw = m1.copy()
1541 mfw = mf1.copy()
1542 mfw = mf1.copy()
1542 umap = dict.fromkeys(unknown)
1543 umap = dict.fromkeys(unknown)
1543
1544
1544 for f in added + modified + unknown:
1545 for f in added + modified + unknown:
1545 mw[f] = ""
1546 mw[f] = ""
1546 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1547 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1547
1548
1548 if moddirstate and not wlock:
1549 if moddirstate and not wlock:
1549 wlock = self.wlock()
1550 wlock = self.wlock()
1550
1551
1551 for f in deleted + removed:
1552 for f in deleted + removed:
1552 if f in mw:
1553 if f in mw:
1553 del mw[f]
1554 del mw[f]
1554
1555
1555 # If we're jumping between revisions (as opposed to merging),
1556 # If we're jumping between revisions (as opposed to merging),
1556 # and if neither the working directory nor the target rev has
1557 # and if neither the working directory nor the target rev has
1557 # the file, then we need to remove it from the dirstate, to
1558 # the file, then we need to remove it from the dirstate, to
1558 # prevent the dirstate from listing the file when it is no
1559 # prevent the dirstate from listing the file when it is no
1559 # longer in the manifest.
1560 # longer in the manifest.
1560 if moddirstate and linear_path and f not in m2:
1561 if moddirstate and linear_path and f not in m2:
1561 self.dirstate.forget((f,))
1562 self.dirstate.forget((f,))
1562
1563
1563 # Compare manifests
1564 # Compare manifests
1564 for f, n in mw.iteritems():
1565 for f, n in mw.iteritems():
1565 if choose and not choose(f):
1566 if choose and not choose(f):
1566 continue
1567 continue
1567 if f in m2:
1568 if f in m2:
1568 s = 0
1569 s = 0
1569
1570
1570 # is the wfile new since m1, and match m2?
1571 # is the wfile new since m1, and match m2?
1571 if f not in m1:
1572 if f not in m1:
1572 t1 = self.wread(f)
1573 t1 = self.wread(f)
1573 t2 = self.file(f).read(m2[f])
1574 t2 = self.file(f).read(m2[f])
1574 if cmp(t1, t2) == 0:
1575 if cmp(t1, t2) == 0:
1575 n = m2[f]
1576 n = m2[f]
1576 del t1, t2
1577 del t1, t2
1577
1578
1578 # are files different?
1579 # are files different?
1579 if n != m2[f]:
1580 if n != m2[f]:
1580 a = ma.get(f, nullid)
1581 a = ma.get(f, nullid)
1581 # are both different from the ancestor?
1582 # are both different from the ancestor?
1582 if n != a and m2[f] != a:
1583 if n != a and m2[f] != a:
1583 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1584 self.ui.debug(_(" %s versions differ, resolve\n") % f)
1584 # merge executable bits
1585 # merge executable bits
1585 # "if we changed or they changed, change in merge"
1586 # "if we changed or they changed, change in merge"
1586 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1587 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1587 mode = ((a^b) | (a^c)) ^ a
1588 mode = ((a^b) | (a^c)) ^ a
1588 merge[f] = (m1.get(f, nullid), m2[f], mode)
1589 merge[f] = (m1.get(f, nullid), m2[f], mode)
1589 s = 1
1590 s = 1
1590 # are we clobbering?
1591 # are we clobbering?
1591 # is remote's version newer?
1592 # is remote's version newer?
1592 # or are we going back in time?
1593 # or are we going back in time?
1593 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1594 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1594 self.ui.debug(_(" remote %s is newer, get\n") % f)
1595 self.ui.debug(_(" remote %s is newer, get\n") % f)
1595 get[f] = m2[f]
1596 get[f] = m2[f]
1596 s = 1
1597 s = 1
1597 elif f in umap or f in added:
1598 elif f in umap or f in added:
1598 # this unknown file is the same as the checkout
1599 # this unknown file is the same as the checkout
1599 # we need to reset the dirstate if the file was added
1600 # we need to reset the dirstate if the file was added
1600 get[f] = m2[f]
1601 get[f] = m2[f]
1601
1602
1602 if not s and mfw[f] != mf2[f]:
1603 if not s and mfw[f] != mf2[f]:
1603 if force:
1604 if force:
1604 self.ui.debug(_(" updating permissions for %s\n") % f)
1605 self.ui.debug(_(" updating permissions for %s\n") % f)
1605 util.set_exec(self.wjoin(f), mf2[f])
1606 util.set_exec(self.wjoin(f), mf2[f])
1606 else:
1607 else:
1607 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1608 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1608 mode = ((a^b) | (a^c)) ^ a
1609 mode = ((a^b) | (a^c)) ^ a
1609 if mode != b:
1610 if mode != b:
1610 self.ui.debug(_(" updating permissions for %s\n")
1611 self.ui.debug(_(" updating permissions for %s\n")
1611 % f)
1612 % f)
1612 util.set_exec(self.wjoin(f), mode)
1613 util.set_exec(self.wjoin(f), mode)
1613 del m2[f]
1614 del m2[f]
1614 elif f in ma:
1615 elif f in ma:
1615 if n != ma[f]:
1616 if n != ma[f]:
1616 r = _("d")
1617 r = _("d")
1617 if not force and (linear_path or allow):
1618 if not force and (linear_path or allow):
1618 r = self.ui.prompt(
1619 r = self.ui.prompt(
1619 (_(" local changed %s which remote deleted\n") % f) +
1620 (_(" local changed %s which remote deleted\n") % f) +
1620 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1621 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1621 if r == _("d"):
1622 if r == _("d"):
1622 remove.append(f)
1623 remove.append(f)
1623 else:
1624 else:
1624 self.ui.debug(_("other deleted %s\n") % f)
1625 self.ui.debug(_("other deleted %s\n") % f)
1625 remove.append(f) # other deleted it
1626 remove.append(f) # other deleted it
1626 else:
1627 else:
1627 # file is created on branch or in working directory
1628 # file is created on branch or in working directory
1628 if force and f not in umap:
1629 if force and f not in umap:
1629 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1630 self.ui.debug(_("remote deleted %s, clobbering\n") % f)
1630 remove.append(f)
1631 remove.append(f)
1631 elif n == m1.get(f, nullid): # same as parent
1632 elif n == m1.get(f, nullid): # same as parent
1632 if p2 == pa: # going backwards?
1633 if p2 == pa: # going backwards?
1633 self.ui.debug(_("remote deleted %s\n") % f)
1634 self.ui.debug(_("remote deleted %s\n") % f)
1634 remove.append(f)
1635 remove.append(f)
1635 else:
1636 else:
1636 self.ui.debug(_("local modified %s, keeping\n") % f)
1637 self.ui.debug(_("local modified %s, keeping\n") % f)
1637 else:
1638 else:
1638 self.ui.debug(_("working dir created %s, keeping\n") % f)
1639 self.ui.debug(_("working dir created %s, keeping\n") % f)
1639
1640
1640 for f, n in m2.iteritems():
1641 for f, n in m2.iteritems():
1641 if choose and not choose(f):
1642 if choose and not choose(f):
1642 continue
1643 continue
1643 if f[0] == "/":
1644 if f[0] == "/":
1644 continue
1645 continue
1645 if f in ma and n != ma[f]:
1646 if f in ma and n != ma[f]:
1646 r = _("k")
1647 r = _("k")
1647 if not force and (linear_path or allow):
1648 if not force and (linear_path or allow):
1648 r = self.ui.prompt(
1649 r = self.ui.prompt(
1649 (_("remote changed %s which local deleted\n") % f) +
1650 (_("remote changed %s which local deleted\n") % f) +
1650 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1651 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
1651 if r == _("k"):
1652 if r == _("k"):
1652 get[f] = n
1653 get[f] = n
1653 elif f not in ma:
1654 elif f not in ma:
1654 self.ui.debug(_("remote created %s\n") % f)
1655 self.ui.debug(_("remote created %s\n") % f)
1655 get[f] = n
1656 get[f] = n
1656 else:
1657 else:
1657 if force or p2 == pa: # going backwards?
1658 if force or p2 == pa: # going backwards?
1658 self.ui.debug(_("local deleted %s, recreating\n") % f)
1659 self.ui.debug(_("local deleted %s, recreating\n") % f)
1659 get[f] = n
1660 get[f] = n
1660 else:
1661 else:
1661 self.ui.debug(_("local deleted %s\n") % f)
1662 self.ui.debug(_("local deleted %s\n") % f)
1662
1663
1663 del mw, m1, m2, ma
1664 del mw, m1, m2, ma
1664
1665
1665 if force:
1666 if force:
1666 for f in merge:
1667 for f in merge:
1667 get[f] = merge[f][1]
1668 get[f] = merge[f][1]
1668 merge = {}
1669 merge = {}
1669
1670
1670 if linear_path or force:
1671 if linear_path or force:
1671 # we don't need to do any magic, just jump to the new rev
1672 # we don't need to do any magic, just jump to the new rev
1672 branch_merge = False
1673 branch_merge = False
1673 p1, p2 = p2, nullid
1674 p1, p2 = p2, nullid
1674 else:
1675 else:
1675 if not allow:
1676 if not allow:
1676 self.ui.status(_("this update spans a branch"
1677 self.ui.status(_("this update spans a branch"
1677 " affecting the following files:\n"))
1678 " affecting the following files:\n"))
1678 fl = merge.keys() + get.keys()
1679 fl = merge.keys() + get.keys()
1679 fl.sort()
1680 fl.sort()
1680 for f in fl:
1681 for f in fl:
1681 cf = ""
1682 cf = ""
1682 if f in merge:
1683 if f in merge:
1683 cf = _(" (resolve)")
1684 cf = _(" (resolve)")
1684 self.ui.status(" %s%s\n" % (f, cf))
1685 self.ui.status(" %s%s\n" % (f, cf))
1685 self.ui.warn(_("aborting update spanning branches!\n"))
1686 self.ui.warn(_("aborting update spanning branches!\n"))
1686 self.ui.status(_("(use 'hg merge' to merge across branches"
1687 self.ui.status(_("(use 'hg merge' to merge across branches"
1687 " or 'hg update -C' to lose changes)\n"))
1688 " or 'hg update -C' to lose changes)\n"))
1688 return 1
1689 return 1
1689 branch_merge = True
1690 branch_merge = True
1690
1691
1691 # get the files we don't need to change
1692 # get the files we don't need to change
1692 files = get.keys()
1693 files = get.keys()
1693 files.sort()
1694 files.sort()
1694 for f in files:
1695 for f in files:
1695 if f[0] == "/":
1696 if f[0] == "/":
1696 continue
1697 continue
1697 self.ui.note(_("getting %s\n") % f)
1698 self.ui.note(_("getting %s\n") % f)
1698 t = self.file(f).read(get[f])
1699 t = self.file(f).read(get[f])
1699 self.wwrite(f, t)
1700 self.wwrite(f, t)
1700 util.set_exec(self.wjoin(f), mf2[f])
1701 util.set_exec(self.wjoin(f), mf2[f])
1701 if moddirstate:
1702 if moddirstate:
1702 if branch_merge:
1703 if branch_merge:
1703 self.dirstate.update([f], 'n', st_mtime=-1)
1704 self.dirstate.update([f], 'n', st_mtime=-1)
1704 else:
1705 else:
1705 self.dirstate.update([f], 'n')
1706 self.dirstate.update([f], 'n')
1706
1707
1707 # merge the tricky bits
1708 # merge the tricky bits
1708 failedmerge = []
1709 failedmerge = []
1709 files = merge.keys()
1710 files = merge.keys()
1710 files.sort()
1711 files.sort()
1711 xp1 = hex(p1)
1712 xp1 = hex(p1)
1712 xp2 = hex(p2)
1713 xp2 = hex(p2)
1713 for f in files:
1714 for f in files:
1714 self.ui.status(_("merging %s\n") % f)
1715 self.ui.status(_("merging %s\n") % f)
1715 my, other, flag = merge[f]
1716 my, other, flag = merge[f]
1716 ret = self.merge3(f, my, other, xp1, xp2)
1717 ret = self.merge3(f, my, other, xp1, xp2)
1717 if ret:
1718 if ret:
1718 err = True
1719 err = True
1719 failedmerge.append(f)
1720 failedmerge.append(f)
1720 util.set_exec(self.wjoin(f), flag)
1721 util.set_exec(self.wjoin(f), flag)
1721 if moddirstate:
1722 if moddirstate:
1722 if branch_merge:
1723 if branch_merge:
1723 # We've done a branch merge, mark this file as merged
1724 # We've done a branch merge, mark this file as merged
1724 # so that we properly record the merger later
1725 # so that we properly record the merger later
1725 self.dirstate.update([f], 'm')
1726 self.dirstate.update([f], 'm')
1726 else:
1727 else:
1727 # We've update-merged a locally modified file, so
1728 # We've update-merged a locally modified file, so
1728 # we set the dirstate to emulate a normal checkout
1729 # we set the dirstate to emulate a normal checkout
1729 # of that file some time in the past. Thus our
1730 # of that file some time in the past. Thus our
1730 # merge will appear as a normal local file
1731 # merge will appear as a normal local file
1731 # modification.
1732 # modification.
1732 f_len = len(self.file(f).read(other))
1733 f_len = len(self.file(f).read(other))
1733 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1734 self.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
1734
1735
1735 remove.sort()
1736 remove.sort()
1736 for f in remove:
1737 for f in remove:
1737 self.ui.note(_("removing %s\n") % f)
1738 self.ui.note(_("removing %s\n") % f)
1738 util.audit_path(f)
1739 util.audit_path(f)
1739 try:
1740 try:
1740 util.unlink(self.wjoin(f))
1741 util.unlink(self.wjoin(f))
1741 except OSError, inst:
1742 except OSError, inst:
1742 if inst.errno != errno.ENOENT:
1743 if inst.errno != errno.ENOENT:
1743 self.ui.warn(_("update failed to remove %s: %s!\n") %
1744 self.ui.warn(_("update failed to remove %s: %s!\n") %
1744 (f, inst.strerror))
1745 (f, inst.strerror))
1745 if moddirstate:
1746 if moddirstate:
1746 if branch_merge:
1747 if branch_merge:
1747 self.dirstate.update(remove, 'r')
1748 self.dirstate.update(remove, 'r')
1748 else:
1749 else:
1749 self.dirstate.forget(remove)
1750 self.dirstate.forget(remove)
1750
1751
1751 if moddirstate:
1752 if moddirstate:
1752 self.dirstate.setparents(p1, p2)
1753 self.dirstate.setparents(p1, p2)
1753
1754
1754 stat = ((len(get), _("updated")),
1755 stat = ((len(get), _("updated")),
1755 (len(merge) - len(failedmerge), _("merged")),
1756 (len(merge) - len(failedmerge), _("merged")),
1756 (len(remove), _("removed")),
1757 (len(remove), _("removed")),
1757 (len(failedmerge), _("unresolved")))
1758 (len(failedmerge), _("unresolved")))
1758 note = ", ".join([_("%d files %s") % s for s in stat])
1759 note = ", ".join([_("%d files %s") % s for s in stat])
1759 self.ui.note("%s\n" % note)
1760 self.ui.note("%s\n" % note)
1760 if moddirstate and branch_merge:
1761 if moddirstate and branch_merge:
1761 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1762 self.ui.note(_("(branch merge, don't forget to commit)\n"))
1762
1763
1763 return err
1764 return err
1764
1765
1765 def merge3(self, fn, my, other, p1, p2):
1766 def merge3(self, fn, my, other, p1, p2):
1766 """perform a 3-way merge in the working directory"""
1767 """perform a 3-way merge in the working directory"""
1767
1768
1768 def temp(prefix, node):
1769 def temp(prefix, node):
1769 pre = "%s~%s." % (os.path.basename(fn), prefix)
1770 pre = "%s~%s." % (os.path.basename(fn), prefix)
1770 (fd, name) = tempfile.mkstemp("", pre)
1771 (fd, name) = tempfile.mkstemp("", pre)
1771 f = os.fdopen(fd, "wb")
1772 f = os.fdopen(fd, "wb")
1772 self.wwrite(fn, fl.read(node), f)
1773 self.wwrite(fn, fl.read(node), f)
1773 f.close()
1774 f.close()
1774 return name
1775 return name
1775
1776
1776 fl = self.file(fn)
1777 fl = self.file(fn)
1777 base = fl.ancestor(my, other)
1778 base = fl.ancestor(my, other)
1778 a = self.wjoin(fn)
1779 a = self.wjoin(fn)
1779 b = temp("base", base)
1780 b = temp("base", base)
1780 c = temp("other", other)
1781 c = temp("other", other)
1781
1782
1782 self.ui.note(_("resolving %s\n") % fn)
1783 self.ui.note(_("resolving %s\n") % fn)
1783 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1784 self.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
1784 (fn, short(my), short(other), short(base)))
1785 (fn, short(my), short(other), short(base)))
1785
1786
1786 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1787 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1787 or "hgmerge")
1788 or "hgmerge")
1788 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1789 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=self.root,
1789 environ={'HG_FILE': fn,
1790 environ={'HG_FILE': fn,
1790 'HG_MY_NODE': p1,
1791 'HG_MY_NODE': p1,
1791 'HG_OTHER_NODE': p2,
1792 'HG_OTHER_NODE': p2,
1792 'HG_FILE_MY_NODE': hex(my),
1793 'HG_FILE_MY_NODE': hex(my),
1793 'HG_FILE_OTHER_NODE': hex(other),
1794 'HG_FILE_OTHER_NODE': hex(other),
1794 'HG_FILE_BASE_NODE': hex(base)})
1795 'HG_FILE_BASE_NODE': hex(base)})
1795 if r:
1796 if r:
1796 self.ui.warn(_("merging %s failed!\n") % fn)
1797 self.ui.warn(_("merging %s failed!\n") % fn)
1797
1798
1798 os.unlink(b)
1799 os.unlink(b)
1799 os.unlink(c)
1800 os.unlink(c)
1800 return r
1801 return r
1801
1802
1802 def verify(self):
1803 def verify(self):
1803 filelinkrevs = {}
1804 filelinkrevs = {}
1804 filenodes = {}
1805 filenodes = {}
1805 changesets = revisions = files = 0
1806 changesets = revisions = files = 0
1806 errors = [0]
1807 errors = [0]
1807 warnings = [0]
1808 warnings = [0]
1808 neededmanifests = {}
1809 neededmanifests = {}
1809
1810
1810 def err(msg):
1811 def err(msg):
1811 self.ui.warn(msg + "\n")
1812 self.ui.warn(msg + "\n")
1812 errors[0] += 1
1813 errors[0] += 1
1813
1814
1814 def warn(msg):
1815 def warn(msg):
1815 self.ui.warn(msg + "\n")
1816 self.ui.warn(msg + "\n")
1816 warnings[0] += 1
1817 warnings[0] += 1
1817
1818
1818 def checksize(obj, name):
1819 def checksize(obj, name):
1819 d = obj.checksize()
1820 d = obj.checksize()
1820 if d[0]:
1821 if d[0]:
1821 err(_("%s data length off by %d bytes") % (name, d[0]))
1822 err(_("%s data length off by %d bytes") % (name, d[0]))
1822 if d[1]:
1823 if d[1]:
1823 err(_("%s index contains %d extra bytes") % (name, d[1]))
1824 err(_("%s index contains %d extra bytes") % (name, d[1]))
1824
1825
1825 def checkversion(obj, name):
1826 def checkversion(obj, name):
1826 if obj.version != revlog.REVLOGV0:
1827 if obj.version != revlog.REVLOGV0:
1827 if not revlogv1:
1828 if not revlogv1:
1828 warn(_("warning: `%s' uses revlog format 1") % name)
1829 warn(_("warning: `%s' uses revlog format 1") % name)
1829 elif revlogv1:
1830 elif revlogv1:
1830 warn(_("warning: `%s' uses revlog format 0") % name)
1831 warn(_("warning: `%s' uses revlog format 0") % name)
1831
1832
1832 revlogv1 = self.revlogversion != revlog.REVLOGV0
1833 revlogv1 = self.revlogversion != revlog.REVLOGV0
1833 self.ui.status(_("repository uses revlog format %d\n") %
1834 if self.ui.verbose or revlogv1 != self.revlogv1:
1834 (revlogv1 and 1 or 0))
1835 self.ui.status(_("repository uses revlog format %d\n") %
1836 (revlogv1 and 1 or 0))
1835
1837
1836 seen = {}
1838 seen = {}
1837 self.ui.status(_("checking changesets\n"))
1839 self.ui.status(_("checking changesets\n"))
1838 checksize(self.changelog, "changelog")
1840 checksize(self.changelog, "changelog")
1839
1841
1840 for i in range(self.changelog.count()):
1842 for i in range(self.changelog.count()):
1841 changesets += 1
1843 changesets += 1
1842 n = self.changelog.node(i)
1844 n = self.changelog.node(i)
1843 l = self.changelog.linkrev(n)
1845 l = self.changelog.linkrev(n)
1844 if l != i:
1846 if l != i:
1845 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1847 err(_("incorrect link (%d) for changeset revision %d") %(l, i))
1846 if n in seen:
1848 if n in seen:
1847 err(_("duplicate changeset at revision %d") % i)
1849 err(_("duplicate changeset at revision %d") % i)
1848 seen[n] = 1
1850 seen[n] = 1
1849
1851
1850 for p in self.changelog.parents(n):
1852 for p in self.changelog.parents(n):
1851 if p not in self.changelog.nodemap:
1853 if p not in self.changelog.nodemap:
1852 err(_("changeset %s has unknown parent %s") %
1854 err(_("changeset %s has unknown parent %s") %
1853 (short(n), short(p)))
1855 (short(n), short(p)))
1854 try:
1856 try:
1855 changes = self.changelog.read(n)
1857 changes = self.changelog.read(n)
1856 except KeyboardInterrupt:
1858 except KeyboardInterrupt:
1857 self.ui.warn(_("interrupted"))
1859 self.ui.warn(_("interrupted"))
1858 raise
1860 raise
1859 except Exception, inst:
1861 except Exception, inst:
1860 err(_("unpacking changeset %s: %s") % (short(n), inst))
1862 err(_("unpacking changeset %s: %s") % (short(n), inst))
1861 continue
1863 continue
1862
1864
1863 neededmanifests[changes[0]] = n
1865 neededmanifests[changes[0]] = n
1864
1866
1865 for f in changes[3]:
1867 for f in changes[3]:
1866 filelinkrevs.setdefault(f, []).append(i)
1868 filelinkrevs.setdefault(f, []).append(i)
1867
1869
1868 seen = {}
1870 seen = {}
1869 self.ui.status(_("checking manifests\n"))
1871 self.ui.status(_("checking manifests\n"))
1870 checkversion(self.manifest, "manifest")
1872 checkversion(self.manifest, "manifest")
1871 checksize(self.manifest, "manifest")
1873 checksize(self.manifest, "manifest")
1872
1874
1873 for i in range(self.manifest.count()):
1875 for i in range(self.manifest.count()):
1874 n = self.manifest.node(i)
1876 n = self.manifest.node(i)
1875 l = self.manifest.linkrev(n)
1877 l = self.manifest.linkrev(n)
1876
1878
1877 if l < 0 or l >= self.changelog.count():
1879 if l < 0 or l >= self.changelog.count():
1878 err(_("bad manifest link (%d) at revision %d") % (l, i))
1880 err(_("bad manifest link (%d) at revision %d") % (l, i))
1879
1881
1880 if n in neededmanifests:
1882 if n in neededmanifests:
1881 del neededmanifests[n]
1883 del neededmanifests[n]
1882
1884
1883 if n in seen:
1885 if n in seen:
1884 err(_("duplicate manifest at revision %d") % i)
1886 err(_("duplicate manifest at revision %d") % i)
1885
1887
1886 seen[n] = 1
1888 seen[n] = 1
1887
1889
1888 for p in self.manifest.parents(n):
1890 for p in self.manifest.parents(n):
1889 if p not in self.manifest.nodemap:
1891 if p not in self.manifest.nodemap:
1890 err(_("manifest %s has unknown parent %s") %
1892 err(_("manifest %s has unknown parent %s") %
1891 (short(n), short(p)))
1893 (short(n), short(p)))
1892
1894
1893 try:
1895 try:
1894 delta = mdiff.patchtext(self.manifest.delta(n))
1896 delta = mdiff.patchtext(self.manifest.delta(n))
1895 except KeyboardInterrupt:
1897 except KeyboardInterrupt:
1896 self.ui.warn(_("interrupted"))
1898 self.ui.warn(_("interrupted"))
1897 raise
1899 raise
1898 except Exception, inst:
1900 except Exception, inst:
1899 err(_("unpacking manifest %s: %s") % (short(n), inst))
1901 err(_("unpacking manifest %s: %s") % (short(n), inst))
1900 continue
1902 continue
1901
1903
1902 try:
1904 try:
1903 ff = [ l.split('\0') for l in delta.splitlines() ]
1905 ff = [ l.split('\0') for l in delta.splitlines() ]
1904 for f, fn in ff:
1906 for f, fn in ff:
1905 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1907 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1906 except (ValueError, TypeError), inst:
1908 except (ValueError, TypeError), inst:
1907 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1909 err(_("broken delta in manifest %s: %s") % (short(n), inst))
1908
1910
1909 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1911 self.ui.status(_("crosschecking files in changesets and manifests\n"))
1910
1912
1911 for m, c in neededmanifests.items():
1913 for m, c in neededmanifests.items():
1912 err(_("Changeset %s refers to unknown manifest %s") %
1914 err(_("Changeset %s refers to unknown manifest %s") %
1913 (short(m), short(c)))
1915 (short(m), short(c)))
1914 del neededmanifests
1916 del neededmanifests
1915
1917
1916 for f in filenodes:
1918 for f in filenodes:
1917 if f not in filelinkrevs:
1919 if f not in filelinkrevs:
1918 err(_("file %s in manifest but not in changesets") % f)
1920 err(_("file %s in manifest but not in changesets") % f)
1919
1921
1920 for f in filelinkrevs:
1922 for f in filelinkrevs:
1921 if f not in filenodes:
1923 if f not in filenodes:
1922 err(_("file %s in changeset but not in manifest") % f)
1924 err(_("file %s in changeset but not in manifest") % f)
1923
1925
1924 self.ui.status(_("checking files\n"))
1926 self.ui.status(_("checking files\n"))
1925 ff = filenodes.keys()
1927 ff = filenodes.keys()
1926 ff.sort()
1928 ff.sort()
1927 for f in ff:
1929 for f in ff:
1928 if f == "/dev/null":
1930 if f == "/dev/null":
1929 continue
1931 continue
1930 files += 1
1932 files += 1
1931 if not f:
1933 if not f:
1932 err(_("file without name in manifest %s") % short(n))
1934 err(_("file without name in manifest %s") % short(n))
1933 continue
1935 continue
1934 fl = self.file(f)
1936 fl = self.file(f)
1935 checkversion(fl, f)
1937 checkversion(fl, f)
1936 checksize(fl, f)
1938 checksize(fl, f)
1937
1939
1938 nodes = {nullid: 1}
1940 nodes = {nullid: 1}
1939 seen = {}
1941 seen = {}
1940 for i in range(fl.count()):
1942 for i in range(fl.count()):
1941 revisions += 1
1943 revisions += 1
1942 n = fl.node(i)
1944 n = fl.node(i)
1943
1945
1944 if n in seen:
1946 if n in seen:
1945 err(_("%s: duplicate revision %d") % (f, i))
1947 err(_("%s: duplicate revision %d") % (f, i))
1946 if n not in filenodes[f]:
1948 if n not in filenodes[f]:
1947 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1949 err(_("%s: %d:%s not in manifests") % (f, i, short(n)))
1948 else:
1950 else:
1949 del filenodes[f][n]
1951 del filenodes[f][n]
1950
1952
1951 flr = fl.linkrev(n)
1953 flr = fl.linkrev(n)
1952 if flr not in filelinkrevs.get(f, []):
1954 if flr not in filelinkrevs.get(f, []):
1953 err(_("%s:%s points to unexpected changeset %d")
1955 err(_("%s:%s points to unexpected changeset %d")
1954 % (f, short(n), flr))
1956 % (f, short(n), flr))
1955 else:
1957 else:
1956 filelinkrevs[f].remove(flr)
1958 filelinkrevs[f].remove(flr)
1957
1959
1958 # verify contents
1960 # verify contents
1959 try:
1961 try:
1960 t = fl.read(n)
1962 t = fl.read(n)
1961 except KeyboardInterrupt:
1963 except KeyboardInterrupt:
1962 self.ui.warn(_("interrupted"))
1964 self.ui.warn(_("interrupted"))
1963 raise
1965 raise
1964 except Exception, inst:
1966 except Exception, inst:
1965 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1967 err(_("unpacking file %s %s: %s") % (f, short(n), inst))
1966
1968
1967 # verify parents
1969 # verify parents
1968 (p1, p2) = fl.parents(n)
1970 (p1, p2) = fl.parents(n)
1969 if p1 not in nodes:
1971 if p1 not in nodes:
1970 err(_("file %s:%s unknown parent 1 %s") %
1972 err(_("file %s:%s unknown parent 1 %s") %
1971 (f, short(n), short(p1)))
1973 (f, short(n), short(p1)))
1972 if p2 not in nodes:
1974 if p2 not in nodes:
1973 err(_("file %s:%s unknown parent 2 %s") %
1975 err(_("file %s:%s unknown parent 2 %s") %
1974 (f, short(n), short(p1)))
1976 (f, short(n), short(p1)))
1975 nodes[n] = 1
1977 nodes[n] = 1
1976
1978
1977 # cross-check
1979 # cross-check
1978 for node in filenodes[f]:
1980 for node in filenodes[f]:
1979 err(_("node %s in manifests not in %s") % (hex(node), f))
1981 err(_("node %s in manifests not in %s") % (hex(node), f))
1980
1982
1981 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1983 self.ui.status(_("%d files, %d changesets, %d total revisions\n") %
1982 (files, changesets, revisions))
1984 (files, changesets, revisions))
1983
1985
1984 if warnings[0]:
1986 if warnings[0]:
1985 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
1987 self.ui.warn(_("%d warnings encountered!\n") % warnings[0])
1986 if errors[0]:
1988 if errors[0]:
1987 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1989 self.ui.warn(_("%d integrity errors encountered!\n") % errors[0])
1988 return 1
1990 return 1
1989
1991
1990 # used to avoid circular references so destructors work
1992 # used to avoid circular references so destructors work
1991 def aftertrans(base):
1993 def aftertrans(base):
1992 p = base
1994 p = base
1993 def a():
1995 def a():
1994 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1996 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1995 util.rename(os.path.join(p, "journal.dirstate"),
1997 util.rename(os.path.join(p, "journal.dirstate"),
1996 os.path.join(p, "undo.dirstate"))
1998 os.path.join(p, "undo.dirstate"))
1997 return a
1999 return a
1998
2000
@@ -1,14 +1,13 b''
1 changeset: 0:0acdaf898367
1 changeset: 0:0acdaf898367
2 tag: tip
2 tag: tip
3 user: test
3 user: test
4 date: Mon Jan 12 13:46:40 1970 +0000
4 date: Mon Jan 12 13:46:40 1970 +0000
5 summary: test
5 summary: test
6
6
7 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
7 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a
8 a
8 a
9 repository uses revlog format 0
10 checking changesets
9 checking changesets
11 checking manifests
10 checking manifests
12 crosschecking files in changesets and manifests
11 crosschecking files in changesets and manifests
13 checking files
12 checking files
14 1 files, 1 changesets, 1 total revisions
13 1 files, 1 changesets, 1 total revisions
@@ -1,16 +1,15 b''
1 pulling from ../source
1 pulling from ../source
2 abort: pretxncommit hook exited with status 1
2 abort: pretxncommit hook exited with status 1
3 transaction abort!
3 transaction abort!
4 rollback completed
4 rollback completed
5 searching for changes
5 searching for changes
6 adding changesets
6 adding changesets
7 adding manifests
7 adding manifests
8 adding file changes
8 adding file changes
9 added 1 changesets with 1 changes to 1 files
9 added 1 changesets with 1 changes to 1 files
10 (run 'hg update' to get a working copy)
10 (run 'hg update' to get a working copy)
11 repository uses revlog format 0
12 checking changesets
11 checking changesets
13 checking manifests
12 checking manifests
14 crosschecking files in changesets and manifests
13 crosschecking files in changesets and manifests
15 checking files
14 checking files
16 1 files, 2 changesets, 2 total revisions
15 1 files, 2 changesets, 2 total revisions
@@ -1,137 +1,126 b''
1 rev offset length base linkrev nodeid p1 p2
1 rev offset length base linkrev nodeid p1 p2
2 0 0 3 0 0 362fef284ce2 000000000000 000000000000
2 0 0 3 0 0 362fef284ce2 000000000000 000000000000
3 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000
3 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000
4 2 8 7 2 2 4c982badb186 125144f7e028 000000000000
4 2 8 7 2 2 4c982badb186 125144f7e028 000000000000
5 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000
5 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000
6 rev offset length base linkrev nodeid p1 p2
6 rev offset length base linkrev nodeid p1 p2
7 0 0 75 0 7 905359268f77 000000000000 000000000000
7 0 0 75 0 7 905359268f77 000000000000 000000000000
8 rev offset length base linkrev nodeid p1 p2
8 rev offset length base linkrev nodeid p1 p2
9 0 0 75 0 8 905359268f77 000000000000 000000000000
9 0 0 75 0 8 905359268f77 000000000000 000000000000
10 rev offset length base linkrev nodeid p1 p2
10 rev offset length base linkrev nodeid p1 p2
11 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000
11 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000
12 rev offset length base linkrev nodeid p1 p2
12 rev offset length base linkrev nodeid p1 p2
13 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000
13 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000
14 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000
14 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000
15 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000
15 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000
16 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000
16 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000
17 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000
17 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000
18 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000
18 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000
19 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000
19 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000
20 repository uses revlog format 0
21 checking changesets
20 checking changesets
22 checking manifests
21 checking manifests
23 crosschecking files in changesets and manifests
22 crosschecking files in changesets and manifests
24 checking files
23 checking files
25 4 files, 9 changesets, 7 total revisions
24 4 files, 9 changesets, 7 total revisions
26 requesting all changes
25 requesting all changes
27 adding changesets
26 adding changesets
28 adding manifests
27 adding manifests
29 adding file changes
28 adding file changes
30 added 1 changesets with 1 changes to 1 files
29 added 1 changesets with 1 changes to 1 files
31 repository uses revlog format 0
32 checking changesets
30 checking changesets
33 checking manifests
31 checking manifests
34 crosschecking files in changesets and manifests
32 crosschecking files in changesets and manifests
35 checking files
33 checking files
36 1 files, 1 changesets, 1 total revisions
34 1 files, 1 changesets, 1 total revisions
37 requesting all changes
35 requesting all changes
38 adding changesets
36 adding changesets
39 adding manifests
37 adding manifests
40 adding file changes
38 adding file changes
41 added 2 changesets with 2 changes to 1 files
39 added 2 changesets with 2 changes to 1 files
42 repository uses revlog format 0
43 checking changesets
40 checking changesets
44 checking manifests
41 checking manifests
45 crosschecking files in changesets and manifests
42 crosschecking files in changesets and manifests
46 checking files
43 checking files
47 1 files, 2 changesets, 2 total revisions
44 1 files, 2 changesets, 2 total revisions
48 requesting all changes
45 requesting all changes
49 adding changesets
46 adding changesets
50 adding manifests
47 adding manifests
51 adding file changes
48 adding file changes
52 added 3 changesets with 3 changes to 1 files
49 added 3 changesets with 3 changes to 1 files
53 repository uses revlog format 0
54 checking changesets
50 checking changesets
55 checking manifests
51 checking manifests
56 crosschecking files in changesets and manifests
52 crosschecking files in changesets and manifests
57 checking files
53 checking files
58 1 files, 3 changesets, 3 total revisions
54 1 files, 3 changesets, 3 total revisions
59 requesting all changes
55 requesting all changes
60 adding changesets
56 adding changesets
61 adding manifests
57 adding manifests
62 adding file changes
58 adding file changes
63 added 4 changesets with 4 changes to 1 files
59 added 4 changesets with 4 changes to 1 files
64 repository uses revlog format 0
65 checking changesets
60 checking changesets
66 checking manifests
61 checking manifests
67 crosschecking files in changesets and manifests
62 crosschecking files in changesets and manifests
68 checking files
63 checking files
69 1 files, 4 changesets, 4 total revisions
64 1 files, 4 changesets, 4 total revisions
70 requesting all changes
65 requesting all changes
71 adding changesets
66 adding changesets
72 adding manifests
67 adding manifests
73 adding file changes
68 adding file changes
74 added 2 changesets with 2 changes to 1 files
69 added 2 changesets with 2 changes to 1 files
75 repository uses revlog format 0
76 checking changesets
70 checking changesets
77 checking manifests
71 checking manifests
78 crosschecking files in changesets and manifests
72 crosschecking files in changesets and manifests
79 checking files
73 checking files
80 1 files, 2 changesets, 2 total revisions
74 1 files, 2 changesets, 2 total revisions
81 requesting all changes
75 requesting all changes
82 adding changesets
76 adding changesets
83 adding manifests
77 adding manifests
84 adding file changes
78 adding file changes
85 added 3 changesets with 3 changes to 1 files
79 added 3 changesets with 3 changes to 1 files
86 repository uses revlog format 0
87 checking changesets
80 checking changesets
88 checking manifests
81 checking manifests
89 crosschecking files in changesets and manifests
82 crosschecking files in changesets and manifests
90 checking files
83 checking files
91 1 files, 3 changesets, 3 total revisions
84 1 files, 3 changesets, 3 total revisions
92 requesting all changes
85 requesting all changes
93 adding changesets
86 adding changesets
94 adding manifests
87 adding manifests
95 adding file changes
88 adding file changes
96 added 4 changesets with 5 changes to 2 files
89 added 4 changesets with 5 changes to 2 files
97 repository uses revlog format 0
98 checking changesets
90 checking changesets
99 checking manifests
91 checking manifests
100 crosschecking files in changesets and manifests
92 crosschecking files in changesets and manifests
101 checking files
93 checking files
102 2 files, 4 changesets, 5 total revisions
94 2 files, 4 changesets, 5 total revisions
103 requesting all changes
95 requesting all changes
104 adding changesets
96 adding changesets
105 adding manifests
97 adding manifests
106 adding file changes
98 adding file changes
107 added 5 changesets with 6 changes to 3 files
99 added 5 changesets with 6 changes to 3 files
108 repository uses revlog format 0
109 checking changesets
100 checking changesets
110 checking manifests
101 checking manifests
111 crosschecking files in changesets and manifests
102 crosschecking files in changesets and manifests
112 checking files
103 checking files
113 3 files, 5 changesets, 6 total revisions
104 3 files, 5 changesets, 6 total revisions
114 requesting all changes
105 requesting all changes
115 adding changesets
106 adding changesets
116 adding manifests
107 adding manifests
117 adding file changes
108 adding file changes
118 added 5 changesets with 5 changes to 2 files
109 added 5 changesets with 5 changes to 2 files
119 repository uses revlog format 0
120 checking changesets
110 checking changesets
121 checking manifests
111 checking manifests
122 crosschecking files in changesets and manifests
112 crosschecking files in changesets and manifests
123 checking files
113 checking files
124 2 files, 5 changesets, 5 total revisions
114 2 files, 5 changesets, 5 total revisions
125 pulling from ../test-7
115 pulling from ../test-7
126 searching for changes
116 searching for changes
127 adding changesets
117 adding changesets
128 adding manifests
118 adding manifests
129 adding file changes
119 adding file changes
130 added 4 changesets with 2 changes to 3 files (+1 heads)
120 added 4 changesets with 2 changes to 3 files (+1 heads)
131 (run 'hg heads' to see heads, 'hg merge' to merge)
121 (run 'hg heads' to see heads, 'hg merge' to merge)
132 repository uses revlog format 0
133 checking changesets
122 checking changesets
134 checking manifests
123 checking manifests
135 crosschecking files in changesets and manifests
124 crosschecking files in changesets and manifests
136 checking files
125 checking files
137 4 files, 9 changesets, 7 total revisions
126 4 files, 9 changesets, 7 total revisions
@@ -1,15 +1,13 b''
1 a
1 a
2 repository uses revlog format 0
3 checking changesets
2 checking changesets
4 checking manifests
3 checking manifests
5 crosschecking files in changesets and manifests
4 crosschecking files in changesets and manifests
6 checking files
5 checking files
7 1 files, 1 changesets, 1 total revisions
6 1 files, 1 changesets, 1 total revisions
8 a not present
7 a not present
9 repository uses revlog format 0
10 checking changesets
8 checking changesets
11 checking manifests
9 checking manifests
12 crosschecking files in changesets and manifests
10 crosschecking files in changesets and manifests
13 checking files
11 checking files
14 1 files, 1 changesets, 1 total revisions
12 1 files, 1 changesets, 1 total revisions
15 a
13 a
@@ -1,52 +1,51 b''
1 A b
1 A b
2 b
2 b
3 b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
3 b: copy a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
4 we should see two history entries
4 we should see two history entries
5 changeset: 1:386a3cc01532710ca78aed9a54fa2f459c04f29c
5 changeset: 1:386a3cc01532710ca78aed9a54fa2f459c04f29c
6 tag: tip
6 tag: tip
7 user: test
7 user: test
8 date: Mon Jan 12 13:46:40 1970 +0000
8 date: Mon Jan 12 13:46:40 1970 +0000
9 files: b
9 files: b
10 description:
10 description:
11 2
11 2
12
12
13
13
14 changeset: 0:33aaa84a386bd609094aeb21a97c09436c482ef1
14 changeset: 0:33aaa84a386bd609094aeb21a97c09436c482ef1
15 user: test
15 user: test
16 date: Mon Jan 12 13:46:40 1970 +0000
16 date: Mon Jan 12 13:46:40 1970 +0000
17 files: a
17 files: a
18 description:
18 description:
19 1
19 1
20
20
21
21
22 we should see one log entry for a
22 we should see one log entry for a
23 changeset: 0:33aaa84a386b
23 changeset: 0:33aaa84a386b
24 user: test
24 user: test
25 date: Mon Jan 12 13:46:40 1970 +0000
25 date: Mon Jan 12 13:46:40 1970 +0000
26 summary: 1
26 summary: 1
27
27
28 this should show a revision linked to changeset 0
28 this should show a revision linked to changeset 0
29 rev offset length base linkrev nodeid p1 p2
29 rev offset length base linkrev nodeid p1 p2
30 0 0 3 0 0 b789fdd96dc2 000000000000 000000000000
30 0 0 3 0 0 b789fdd96dc2 000000000000 000000000000
31 we should see one log entry for b
31 we should see one log entry for b
32 changeset: 1:386a3cc01532
32 changeset: 1:386a3cc01532
33 tag: tip
33 tag: tip
34 user: test
34 user: test
35 date: Mon Jan 12 13:46:40 1970 +0000
35 date: Mon Jan 12 13:46:40 1970 +0000
36 summary: 2
36 summary: 2
37
37
38 this should show a revision linked to changeset 1
38 this should show a revision linked to changeset 1
39 rev offset length base linkrev nodeid p1 p2
39 rev offset length base linkrev nodeid p1 p2
40 0 0 65 0 1 9a263dd772e0 000000000000 000000000000
40 0 0 65 0 1 9a263dd772e0 000000000000 000000000000
41 this should show the rename information in the metadata
41 this should show the rename information in the metadata
42 copyrev: b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
42 copyrev: b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3
43 copy: a
43 copy: a
44 566e338d09a089ba737c21e0d3759980 .hg/data/b.d
44 566e338d09a089ba737c21e0d3759980 .hg/data/b.d
45 60b725f10c9c85c70d97880dfe8191b3 bsum
45 60b725f10c9c85c70d97880dfe8191b3 bsum
46 60b725f10c9c85c70d97880dfe8191b3 asum
46 60b725f10c9c85c70d97880dfe8191b3 asum
47 repository uses revlog format 0
48 checking changesets
47 checking changesets
49 checking manifests
48 checking manifests
50 crosschecking files in changesets and manifests
49 crosschecking files in changesets and manifests
51 checking files
50 checking files
52 2 files, 2 changesets, 2 total revisions
51 2 files, 2 changesets, 2 total revisions
@@ -1,6 +1,5 b''
1 repository uses revlog format 0
2 checking changesets
1 checking changesets
3 checking manifests
2 checking manifests
4 crosschecking files in changesets and manifests
3 crosschecking files in changesets and manifests
5 checking files
4 checking files
6 0 files, 0 changesets, 0 total revisions
5 0 files, 0 changesets, 0 total revisions
@@ -1,60 +1,59 b''
1 changeset: 4:f6c172c6198c
1 changeset: 4:f6c172c6198c
2 tag: tip
2 tag: tip
3 parent: 1:448a8c5e42f1
3 parent: 1:448a8c5e42f1
4 parent: 2:7c5dc2e857f2
4 parent: 2:7c5dc2e857f2
5 user: test
5 user: test
6 date: Mon Jan 12 13:46:40 1970 +0000
6 date: Mon Jan 12 13:46:40 1970 +0000
7 summary: merge a/b -> blah
7 summary: merge a/b -> blah
8
8
9 changeset: 3:13d875a22764
9 changeset: 3:13d875a22764
10 parent: 2:7c5dc2e857f2
10 parent: 2:7c5dc2e857f2
11 parent: 1:448a8c5e42f1
11 parent: 1:448a8c5e42f1
12 user: test
12 user: test
13 date: Mon Jan 12 13:46:40 1970 +0000
13 date: Mon Jan 12 13:46:40 1970 +0000
14 summary: merge b/a -> blah
14 summary: merge b/a -> blah
15
15
16 changeset: 2:7c5dc2e857f2
16 changeset: 2:7c5dc2e857f2
17 parent: 0:dc1751ec2e9d
17 parent: 0:dc1751ec2e9d
18 user: test
18 user: test
19 date: Mon Jan 12 13:46:40 1970 +0000
19 date: Mon Jan 12 13:46:40 1970 +0000
20 summary: branch b
20 summary: branch b
21
21
22 changeset: 1:448a8c5e42f1
22 changeset: 1:448a8c5e42f1
23 user: test
23 user: test
24 date: Mon Jan 12 13:46:40 1970 +0000
24 date: Mon Jan 12 13:46:40 1970 +0000
25 summary: branch a
25 summary: branch a
26
26
27 changeset: 0:dc1751ec2e9d
27 changeset: 0:dc1751ec2e9d
28 user: test
28 user: test
29 date: Mon Jan 12 13:46:40 1970 +0000
29 date: Mon Jan 12 13:46:40 1970 +0000
30 summary: test
30 summary: test
31
31
32 rev offset length base linkrev nodeid p1 p2
32 rev offset length base linkrev nodeid p1 p2
33 0 0 64 0 0 dc1751ec2e9d 000000000000 000000000000
33 0 0 64 0 0 dc1751ec2e9d 000000000000 000000000000
34 1 64 68 1 1 448a8c5e42f1 dc1751ec2e9d 000000000000
34 1 64 68 1 1 448a8c5e42f1 dc1751ec2e9d 000000000000
35 2 132 68 2 2 7c5dc2e857f2 dc1751ec2e9d 000000000000
35 2 132 68 2 2 7c5dc2e857f2 dc1751ec2e9d 000000000000
36 3 200 75 3 3 13d875a22764 7c5dc2e857f2 448a8c5e42f1
36 3 200 75 3 3 13d875a22764 7c5dc2e857f2 448a8c5e42f1
37 4 275 29 3 4 f6c172c6198c 448a8c5e42f1 7c5dc2e857f2
37 4 275 29 3 4 f6c172c6198c 448a8c5e42f1 7c5dc2e857f2
38
38
39 1
39 1
40 79d7492df40aa0fa093ec4209be78043c181f094 644 a
40 79d7492df40aa0fa093ec4209be78043c181f094 644 a
41 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 b
41 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 b
42 2
42 2
43 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 a
43 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 a
44 79d7492df40aa0fa093ec4209be78043c181f094 644 b
44 79d7492df40aa0fa093ec4209be78043c181f094 644 b
45 3
45 3
46 79d7492df40aa0fa093ec4209be78043c181f094 644 a
46 79d7492df40aa0fa093ec4209be78043c181f094 644 a
47 79d7492df40aa0fa093ec4209be78043c181f094 644 b
47 79d7492df40aa0fa093ec4209be78043c181f094 644 b
48 4
48 4
49 79d7492df40aa0fa093ec4209be78043c181f094 644 a
49 79d7492df40aa0fa093ec4209be78043c181f094 644 a
50 79d7492df40aa0fa093ec4209be78043c181f094 644 b
50 79d7492df40aa0fa093ec4209be78043c181f094 644 b
51
51
52 rev offset length base linkrev nodeid p1 p2
52 rev offset length base linkrev nodeid p1 p2
53 0 0 5 0 0 2ed2a3912a0b 000000000000 000000000000
53 0 0 5 0 0 2ed2a3912a0b 000000000000 000000000000
54 1 5 6 1 1 79d7492df40a 2ed2a3912a0b 000000000000
54 1 5 6 1 1 79d7492df40a 2ed2a3912a0b 000000000000
55 repository uses revlog format 0
56 checking changesets
55 checking changesets
57 checking manifests
56 checking manifests
58 crosschecking files in changesets and manifests
57 crosschecking files in changesets and manifests
59 checking files
58 checking files
60 2 files, 5 changesets, 4 total revisions
59 2 files, 5 changesets, 4 total revisions
@@ -1,73 +1,72 b''
1 creating base
1 creating base
2 creating branch a
2 creating branch a
3 creating branch b
3 creating branch b
4 we shouldn't have anything but n state here
4 we shouldn't have anything but n state here
5 n 644 2 bar
5 n 644 2 bar
6 n 644 3 baz
6 n 644 3 baz
7 n 644 3 foo
7 n 644 3 foo
8 n 644 2 quux
8 n 644 2 quux
9 merging
9 merging
10 pulling from ../a
10 pulling from ../a
11 searching for changes
11 searching for changes
12 adding changesets
12 adding changesets
13 adding manifests
13 adding manifests
14 adding file changes
14 adding file changes
15 added 1 changesets with 2 changes to 2 files (+1 heads)
15 added 1 changesets with 2 changes to 2 files (+1 heads)
16 (run 'hg heads' to see heads, 'hg merge' to merge)
16 (run 'hg heads' to see heads, 'hg merge' to merge)
17 merging for foo
17 merging for foo
18 resolving manifests
18 resolving manifests
19 getting bar
19 getting bar
20 merging foo
20 merging foo
21 resolving foo
21 resolving foo
22 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
22 1 files updated, 1 files merged, 0 files removed, 0 files unresolved
23 (branch merge, don't forget to commit)
23 (branch merge, don't forget to commit)
24 we shouldn't have anything but foo in merge state here
24 we shouldn't have anything but foo in merge state here
25 m 644 3 foo
25 m 644 3 foo
26 main: we should have a merge here
26 main: we should have a merge here
27 rev offset length base linkrev nodeid p1 p2
27 rev offset length base linkrev nodeid p1 p2
28 0 0 77 0 0 c36078bec30d 000000000000 000000000000
28 0 0 77 0 0 c36078bec30d 000000000000 000000000000
29 1 77 73 1 1 182b283965f1 c36078bec30d 000000000000
29 1 77 73 1 1 182b283965f1 c36078bec30d 000000000000
30 2 150 71 2 2 a6aef98656b7 c36078bec30d 000000000000
30 2 150 71 2 2 a6aef98656b7 c36078bec30d 000000000000
31 3 221 72 3 3 0c2cc6fc80e2 182b283965f1 a6aef98656b7
31 3 221 72 3 3 0c2cc6fc80e2 182b283965f1 a6aef98656b7
32 log should show foo and quux changed
32 log should show foo and quux changed
33 changeset: 3:0c2cc6fc80e2d4ee289bb658dbbe9ad932380fe9
33 changeset: 3:0c2cc6fc80e2d4ee289bb658dbbe9ad932380fe9
34 tag: tip
34 tag: tip
35 parent: 1:182b283965f1069c0112784e30e7755ad1c0dd52
35 parent: 1:182b283965f1069c0112784e30e7755ad1c0dd52
36 parent: 2:a6aef98656b71154cae9d87408abe6d0218c8045
36 parent: 2:a6aef98656b71154cae9d87408abe6d0218c8045
37 user: test
37 user: test
38 date: Mon Jan 12 13:46:40 1970 +0000
38 date: Mon Jan 12 13:46:40 1970 +0000
39 files: foo quux
39 files: foo quux
40 description:
40 description:
41 merge
41 merge
42
42
43
43
44 foo: we should have a merge here
44 foo: we should have a merge here
45 rev offset length base linkrev nodeid p1 p2
45 rev offset length base linkrev nodeid p1 p2
46 0 0 3 0 0 b8e02f643373 000000000000 000000000000
46 0 0 3 0 0 b8e02f643373 000000000000 000000000000
47 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
47 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
48 2 7 4 2 2 33d1fb69067a b8e02f643373 000000000000
48 2 7 4 2 2 33d1fb69067a b8e02f643373 000000000000
49 3 11 4 3 3 aa27919ee430 2ffeddde1b65 33d1fb69067a
49 3 11 4 3 3 aa27919ee430 2ffeddde1b65 33d1fb69067a
50 bar: we shouldn't have a merge here
50 bar: we shouldn't have a merge here
51 rev offset length base linkrev nodeid p1 p2
51 rev offset length base linkrev nodeid p1 p2
52 0 0 3 0 0 b8e02f643373 000000000000 000000000000
52 0 0 3 0 0 b8e02f643373 000000000000 000000000000
53 1 3 4 1 2 33d1fb69067a b8e02f643373 000000000000
53 1 3 4 1 2 33d1fb69067a b8e02f643373 000000000000
54 baz: we shouldn't have a merge here
54 baz: we shouldn't have a merge here
55 rev offset length base linkrev nodeid p1 p2
55 rev offset length base linkrev nodeid p1 p2
56 0 0 3 0 0 b8e02f643373 000000000000 000000000000
56 0 0 3 0 0 b8e02f643373 000000000000 000000000000
57 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
57 1 3 4 1 1 2ffeddde1b65 b8e02f643373 000000000000
58 quux: we shouldn't have a merge here
58 quux: we shouldn't have a merge here
59 rev offset length base linkrev nodeid p1 p2
59 rev offset length base linkrev nodeid p1 p2
60 0 0 3 0 0 b8e02f643373 000000000000 000000000000
60 0 0 3 0 0 b8e02f643373 000000000000 000000000000
61 1 3 5 1 3 6128c0f33108 b8e02f643373 000000000000
61 1 3 5 1 3 6128c0f33108 b8e02f643373 000000000000
62 manifest entries should match tips of all files
62 manifest entries should match tips of all files
63 33d1fb69067a0139622a3fa3b7ba1cdb1367972e 644 bar
63 33d1fb69067a0139622a3fa3b7ba1cdb1367972e 644 bar
64 2ffeddde1b65b4827f6746174a145474129fa2ce 644 baz
64 2ffeddde1b65b4827f6746174a145474129fa2ce 644 baz
65 aa27919ee4303cfd575e1fb932dd64d75aa08be4 644 foo
65 aa27919ee4303cfd575e1fb932dd64d75aa08be4 644 foo
66 6128c0f33108e8cfbb4e0824d13ae48b466d7280 644 quux
66 6128c0f33108e8cfbb4e0824d13ae48b466d7280 644 quux
67 everything should be clean now
67 everything should be clean now
68 repository uses revlog format 0
69 checking changesets
68 checking changesets
70 checking manifests
69 checking manifests
71 crosschecking files in changesets and manifests
70 crosschecking files in changesets and manifests
72 checking files
71 checking files
73 4 files, 4 changesets, 10 total revisions
72 4 files, 4 changesets, 10 total revisions
@@ -1,19 +1,16 b''
1 repository uses revlog format 0
2 checking changesets
1 checking changesets
3 checking manifests
2 checking manifests
4 crosschecking files in changesets and manifests
3 crosschecking files in changesets and manifests
5 checking files
4 checking files
6 1 files, 1 changesets, 1 total revisions
5 1 files, 1 changesets, 1 total revisions
7 repository uses revlog format 0
8 checking changesets
6 checking changesets
9 checking manifests
7 checking manifests
10 crosschecking files in changesets and manifests
8 crosschecking files in changesets and manifests
11 checking files
9 checking files
12 verify failed
10 verify failed
13 repository uses revlog format 0
14 checking changesets
11 checking changesets
15 checking manifests
12 checking manifests
16 crosschecking files in changesets and manifests
13 crosschecking files in changesets and manifests
17 checking files
14 checking files
18 1 files, 1 changesets, 1 total revisions
15 1 files, 1 changesets, 1 total revisions
19 commit failed
16 commit failed
@@ -1,11 +1,10 b''
1 requesting all changes
1 requesting all changes
2 adding changesets
2 adding changesets
3 adding manifests
3 adding manifests
4 adding file changes
4 adding file changes
5 added 1 changesets with 1 changes to 1 files
5 added 1 changesets with 1 changes to 1 files
6 repository uses revlog format 0
7 checking changesets
6 checking changesets
8 checking manifests
7 checking manifests
9 crosschecking files in changesets and manifests
8 crosschecking files in changesets and manifests
10 checking files
9 checking files
11 1 files, 1 changesets, 1 total revisions
10 1 files, 1 changesets, 1 total revisions
@@ -1,25 +1,24 b''
1 requesting all changes
1 requesting all changes
2 adding changesets
2 adding changesets
3 adding manifests
3 adding manifests
4 adding file changes
4 adding file changes
5 added 1 changesets with 1 changes to 1 files
5 added 1 changesets with 1 changes to 1 files
6 pulling from ../source2
6 pulling from ../source2
7 pulling from ../source1
7 pulling from ../source1
8 requesting all changes
8 requesting all changes
9 adding changesets
9 adding changesets
10 adding manifests
10 adding manifests
11 adding file changes
11 adding file changes
12 added 10 changesets with 10 changes to 1 files
12 added 10 changesets with 10 changes to 1 files
13 (run 'hg update' to get a working copy)
13 (run 'hg update' to get a working copy)
14 searching for changes
14 searching for changes
15 adding changesets
15 adding changesets
16 adding manifests
16 adding manifests
17 adding file changes
17 adding file changes
18 added 1 changesets with 1 changes to 1 files (+1 heads)
18 added 1 changesets with 1 changes to 1 files (+1 heads)
19 (run 'hg heads' to see heads, 'hg merge' to merge)
19 (run 'hg heads' to see heads, 'hg merge' to merge)
20 repository uses revlog format 0
21 checking changesets
20 checking changesets
22 checking manifests
21 checking manifests
23 crosschecking files in changesets and manifests
22 crosschecking files in changesets and manifests
24 checking files
23 checking files
25 1 files, 11 changesets, 11 total revisions
24 1 files, 11 changesets, 11 total revisions
@@ -1,24 +1,22 b''
1 pulling from source1
1 pulling from source1
2 requesting all changes
2 requesting all changes
3 adding changesets
3 adding changesets
4 adding manifests
4 adding manifests
5 adding file changes
5 adding file changes
6 added 10 changesets with 10 changes to 1 files
6 added 10 changesets with 10 changes to 1 files
7 (run 'hg update' to get a working copy)
7 (run 'hg update' to get a working copy)
8 requesting all changes
8 requesting all changes
9 adding changesets
9 adding changesets
10 adding manifests
10 adding manifests
11 adding file changes
11 adding file changes
12 added 10 changesets with 10 changes to 1 files
12 added 10 changesets with 10 changes to 1 files
13 repository uses revlog format 0
14 checking changesets
13 checking changesets
15 checking manifests
14 checking manifests
16 crosschecking files in changesets and manifests
15 crosschecking files in changesets and manifests
17 checking files
16 checking files
18 1 files, 10 changesets, 10 total revisions
17 1 files, 10 changesets, 10 total revisions
19 repository uses revlog format 0
20 checking changesets
18 checking changesets
21 checking manifests
19 checking manifests
22 crosschecking files in changesets and manifests
20 crosschecking files in changesets and manifests
23 checking files
21 checking files
24 1 files, 10 changesets, 10 total revisions
22 1 files, 10 changesets, 10 total revisions
@@ -1,23 +1,21 b''
1 adding foo
1 adding foo
2 repository uses revlog format 0
3 checking changesets
2 checking changesets
4 checking manifests
3 checking manifests
5 crosschecking files in changesets and manifests
4 crosschecking files in changesets and manifests
6 checking files
5 checking files
7 1 files, 1 changesets, 1 total revisions
6 1 files, 1 changesets, 1 total revisions
8 requesting all changes
7 requesting all changes
9 adding changesets
8 adding changesets
10 adding manifests
9 adding manifests
11 adding file changes
10 adding file changes
12 added 1 changesets with 1 changes to 1 files
11 added 1 changesets with 1 changes to 1 files
13 repository uses revlog format 0
14 checking changesets
12 checking changesets
15 checking manifests
13 checking manifests
16 crosschecking files in changesets and manifests
14 crosschecking files in changesets and manifests
17 checking files
15 checking files
18 1 files, 1 changesets, 1 total revisions
16 1 files, 1 changesets, 1 total revisions
19 foo
17 foo
20 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
18 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo
21 pulling from http://localhost:20059/
19 pulling from http://localhost:20059/
22 searching for changes
20 searching for changes
23 no changes found
21 no changes found
@@ -1,146 +1,135 b''
1 rev offset length base linkrev nodeid p1 p2
1 rev offset length base linkrev nodeid p1 p2
2 0 0 3 0 0 362fef284ce2 000000000000 000000000000
2 0 0 3 0 0 362fef284ce2 000000000000 000000000000
3 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000
3 1 3 5 1 1 125144f7e028 362fef284ce2 000000000000
4 2 8 7 2 2 4c982badb186 125144f7e028 000000000000
4 2 8 7 2 2 4c982badb186 125144f7e028 000000000000
5 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000
5 3 15 9 3 3 19b1fc555737 4c982badb186 000000000000
6 rev offset length base linkrev nodeid p1 p2
6 rev offset length base linkrev nodeid p1 p2
7 0 0 75 0 7 905359268f77 000000000000 000000000000
7 0 0 75 0 7 905359268f77 000000000000 000000000000
8 rev offset length base linkrev nodeid p1 p2
8 rev offset length base linkrev nodeid p1 p2
9 0 0 75 0 8 905359268f77 000000000000 000000000000
9 0 0 75 0 8 905359268f77 000000000000 000000000000
10 rev offset length base linkrev nodeid p1 p2
10 rev offset length base linkrev nodeid p1 p2
11 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000
11 0 0 8 0 6 12ab3bcc5ea4 000000000000 000000000000
12 rev offset length base linkrev nodeid p1 p2
12 rev offset length base linkrev nodeid p1 p2
13 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000
13 0 0 48 0 0 43eadb1d2d06 000000000000 000000000000
14 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000
14 1 48 48 1 1 8b89697eba2c 43eadb1d2d06 000000000000
15 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000
15 2 96 48 2 2 626a32663c2f 8b89697eba2c 000000000000
16 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000
16 3 144 48 3 3 f54c32f13478 626a32663c2f 000000000000
17 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000
17 4 192 58 3 6 de68e904d169 626a32663c2f 000000000000
18 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000
18 5 250 68 3 7 3b45cc2ab868 de68e904d169 000000000000
19 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000
19 6 318 54 6 8 24d86153a002 f54c32f13478 000000000000
20 repository uses revlog format 0
21 checking changesets
20 checking changesets
22 checking manifests
21 checking manifests
23 crosschecking files in changesets and manifests
22 crosschecking files in changesets and manifests
24 checking files
23 checking files
25 4 files, 9 changesets, 7 total revisions
24 4 files, 9 changesets, 7 total revisions
26 pushing to test-0
25 pushing to test-0
27 searching for changes
26 searching for changes
28 adding changesets
27 adding changesets
29 adding manifests
28 adding manifests
30 adding file changes
29 adding file changes
31 added 1 changesets with 1 changes to 1 files
30 added 1 changesets with 1 changes to 1 files
32 repository uses revlog format 0
33 checking changesets
31 checking changesets
34 checking manifests
32 checking manifests
35 crosschecking files in changesets and manifests
33 crosschecking files in changesets and manifests
36 checking files
34 checking files
37 1 files, 1 changesets, 1 total revisions
35 1 files, 1 changesets, 1 total revisions
38 pushing to test-1
36 pushing to test-1
39 searching for changes
37 searching for changes
40 adding changesets
38 adding changesets
41 adding manifests
39 adding manifests
42 adding file changes
40 adding file changes
43 added 2 changesets with 2 changes to 1 files
41 added 2 changesets with 2 changes to 1 files
44 repository uses revlog format 0
45 checking changesets
42 checking changesets
46 checking manifests
43 checking manifests
47 crosschecking files in changesets and manifests
44 crosschecking files in changesets and manifests
48 checking files
45 checking files
49 1 files, 2 changesets, 2 total revisions
46 1 files, 2 changesets, 2 total revisions
50 pushing to test-2
47 pushing to test-2
51 searching for changes
48 searching for changes
52 adding changesets
49 adding changesets
53 adding manifests
50 adding manifests
54 adding file changes
51 adding file changes
55 added 3 changesets with 3 changes to 1 files
52 added 3 changesets with 3 changes to 1 files
56 repository uses revlog format 0
57 checking changesets
53 checking changesets
58 checking manifests
54 checking manifests
59 crosschecking files in changesets and manifests
55 crosschecking files in changesets and manifests
60 checking files
56 checking files
61 1 files, 3 changesets, 3 total revisions
57 1 files, 3 changesets, 3 total revisions
62 pushing to test-3
58 pushing to test-3
63 searching for changes
59 searching for changes
64 adding changesets
60 adding changesets
65 adding manifests
61 adding manifests
66 adding file changes
62 adding file changes
67 added 4 changesets with 4 changes to 1 files
63 added 4 changesets with 4 changes to 1 files
68 repository uses revlog format 0
69 checking changesets
64 checking changesets
70 checking manifests
65 checking manifests
71 crosschecking files in changesets and manifests
66 crosschecking files in changesets and manifests
72 checking files
67 checking files
73 1 files, 4 changesets, 4 total revisions
68 1 files, 4 changesets, 4 total revisions
74 pushing to test-4
69 pushing to test-4
75 searching for changes
70 searching for changes
76 adding changesets
71 adding changesets
77 adding manifests
72 adding manifests
78 adding file changes
73 adding file changes
79 added 2 changesets with 2 changes to 1 files
74 added 2 changesets with 2 changes to 1 files
80 repository uses revlog format 0
81 checking changesets
75 checking changesets
82 checking manifests
76 checking manifests
83 crosschecking files in changesets and manifests
77 crosschecking files in changesets and manifests
84 checking files
78 checking files
85 1 files, 2 changesets, 2 total revisions
79 1 files, 2 changesets, 2 total revisions
86 pushing to test-5
80 pushing to test-5
87 searching for changes
81 searching for changes
88 adding changesets
82 adding changesets
89 adding manifests
83 adding manifests
90 adding file changes
84 adding file changes
91 added 3 changesets with 3 changes to 1 files
85 added 3 changesets with 3 changes to 1 files
92 repository uses revlog format 0
93 checking changesets
86 checking changesets
94 checking manifests
87 checking manifests
95 crosschecking files in changesets and manifests
88 crosschecking files in changesets and manifests
96 checking files
89 checking files
97 1 files, 3 changesets, 3 total revisions
90 1 files, 3 changesets, 3 total revisions
98 pushing to test-6
91 pushing to test-6
99 searching for changes
92 searching for changes
100 adding changesets
93 adding changesets
101 adding manifests
94 adding manifests
102 adding file changes
95 adding file changes
103 added 4 changesets with 5 changes to 2 files
96 added 4 changesets with 5 changes to 2 files
104 repository uses revlog format 0
105 checking changesets
97 checking changesets
106 checking manifests
98 checking manifests
107 crosschecking files in changesets and manifests
99 crosschecking files in changesets and manifests
108 checking files
100 checking files
109 2 files, 4 changesets, 5 total revisions
101 2 files, 4 changesets, 5 total revisions
110 pushing to test-7
102 pushing to test-7
111 searching for changes
103 searching for changes
112 adding changesets
104 adding changesets
113 adding manifests
105 adding manifests
114 adding file changes
106 adding file changes
115 added 5 changesets with 6 changes to 3 files
107 added 5 changesets with 6 changes to 3 files
116 repository uses revlog format 0
117 checking changesets
108 checking changesets
118 checking manifests
109 checking manifests
119 crosschecking files in changesets and manifests
110 crosschecking files in changesets and manifests
120 checking files
111 checking files
121 3 files, 5 changesets, 6 total revisions
112 3 files, 5 changesets, 6 total revisions
122 pushing to test-8
113 pushing to test-8
123 searching for changes
114 searching for changes
124 adding changesets
115 adding changesets
125 adding manifests
116 adding manifests
126 adding file changes
117 adding file changes
127 added 5 changesets with 5 changes to 2 files
118 added 5 changesets with 5 changes to 2 files
128 repository uses revlog format 0
129 checking changesets
119 checking changesets
130 checking manifests
120 checking manifests
131 crosschecking files in changesets and manifests
121 crosschecking files in changesets and manifests
132 checking files
122 checking files
133 2 files, 5 changesets, 5 total revisions
123 2 files, 5 changesets, 5 total revisions
134 pulling from ../test-7
124 pulling from ../test-7
135 searching for changes
125 searching for changes
136 adding changesets
126 adding changesets
137 adding manifests
127 adding manifests
138 adding file changes
128 adding file changes
139 added 4 changesets with 2 changes to 3 files (+1 heads)
129 added 4 changesets with 2 changes to 3 files (+1 heads)
140 (run 'hg heads' to see heads, 'hg merge' to merge)
130 (run 'hg heads' to see heads, 'hg merge' to merge)
141 repository uses revlog format 0
142 checking changesets
131 checking changesets
143 checking manifests
132 checking manifests
144 crosschecking files in changesets and manifests
133 crosschecking files in changesets and manifests
145 checking files
134 checking files
146 4 files, 9 changesets, 7 total revisions
135 4 files, 9 changesets, 7 total revisions
@@ -1,23 +1,21 b''
1 adding foo
1 adding foo
2 repository uses revlog format 0
3 checking changesets
2 checking changesets
4 checking manifests
3 checking manifests
5 crosschecking files in changesets and manifests
4 crosschecking files in changesets and manifests
6 checking files
5 checking files
7 1 files, 1 changesets, 1 total revisions
6 1 files, 1 changesets, 1 total revisions
8 pulling from ../branch
7 pulling from ../branch
9 searching for changes
8 searching for changes
10 adding changesets
9 adding changesets
11 adding manifests
10 adding manifests
12 adding file changes
11 adding file changes
13 added 1 changesets with 1 changes to 1 files
12 added 1 changesets with 1 changes to 1 files
14 (run 'hg update' to get a working copy)
13 (run 'hg update' to get a working copy)
15 repository uses revlog format 0
16 checking changesets
14 checking changesets
17 checking manifests
15 checking manifests
18 crosschecking files in changesets and manifests
16 crosschecking files in changesets and manifests
19 checking files
17 checking files
20 1 files, 2 changesets, 2 total revisions
18 1 files, 2 changesets, 2 total revisions
21 foo
19 foo
22 bar
20 bar
23 6f4310b00b9a147241b071a60c28a650827fb03d 644 foo
21 6f4310b00b9a147241b071a60c28a650827fb03d 644 foo
@@ -1,63 +1,61 b''
1 # creating 'remote'
1 # creating 'remote'
2 # clone remote
2 # clone remote
3 requesting all changes
3 requesting all changes
4 adding changesets
4 adding changesets
5 adding manifests
5 adding manifests
6 adding file changes
6 adding file changes
7 added 1 changesets with 1 changes to 1 files
7 added 1 changesets with 1 changes to 1 files
8 # verify
8 # verify
9 repository uses revlog format 0
10 checking changesets
9 checking changesets
11 checking manifests
10 checking manifests
12 crosschecking files in changesets and manifests
11 crosschecking files in changesets and manifests
13 checking files
12 checking files
14 1 files, 1 changesets, 1 total revisions
13 1 files, 1 changesets, 1 total revisions
15 # empty default pull
14 # empty default pull
16 default = ssh://user@dummy/remote
15 default = ssh://user@dummy/remote
17 pulling from ssh://user@dummy/remote
16 pulling from ssh://user@dummy/remote
18 searching for changes
17 searching for changes
19 no changes found
18 no changes found
20 # local change
19 # local change
21 # updating rc
20 # updating rc
22 # find outgoing
21 # find outgoing
23 searching for changes
22 searching for changes
24 changeset: 1:c54836a570be
23 changeset: 1:c54836a570be
25 tag: tip
24 tag: tip
26 user: test
25 user: test
27 date: Mon Jan 12 13:46:40 1970 +0000
26 date: Mon Jan 12 13:46:40 1970 +0000
28 summary: add
27 summary: add
29
28
30 # find incoming on the remote side
29 # find incoming on the remote side
31 searching for changes
30 searching for changes
32 changeset: 1:c54836a570be
31 changeset: 1:c54836a570be
33 tag: tip
32 tag: tip
34 user: test
33 user: test
35 date: Mon Jan 12 13:46:40 1970 +0000
34 date: Mon Jan 12 13:46:40 1970 +0000
36 summary: add
35 summary: add
37
36
38 # push
37 # push
39 pushing to ssh://user@dummy/remote
38 pushing to ssh://user@dummy/remote
40 searching for changes
39 searching for changes
41 remote: adding changesets
40 remote: adding changesets
42 remote: adding manifests
41 remote: adding manifests
43 remote: adding file changes
42 remote: adding file changes
44 remote: added 1 changesets with 1 changes to 1 files
43 remote: added 1 changesets with 1 changes to 1 files
45 # check remote tip
44 # check remote tip
46 changeset: 1:c54836a570be
45 changeset: 1:c54836a570be
47 tag: tip
46 tag: tip
48 user: test
47 user: test
49 date: Mon Jan 12 13:46:40 1970 +0000
48 date: Mon Jan 12 13:46:40 1970 +0000
50 summary: add
49 summary: add
51
50
52 repository uses revlog format 0
53 checking changesets
51 checking changesets
54 checking manifests
52 checking manifests
55 crosschecking files in changesets and manifests
53 crosschecking files in changesets and manifests
56 checking files
54 checking files
57 1 files, 2 changesets, 2 total revisions
55 1 files, 2 changesets, 2 total revisions
58 bleah
56 bleah
59 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
57 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
60 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
58 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
61 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
59 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
62 Got arguments 1:user@dummy 2:hg -R local serve --stdio 3: 4: 5:
60 Got arguments 1:user@dummy 2:hg -R local serve --stdio 3: 4: 5:
63 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
61 Got arguments 1:user@dummy 2:hg -R remote serve --stdio 3: 4: 5:
@@ -1,24 +1,23 b''
1 abort: Connection refused
1 abort: Connection refused
2 255
2 255
3 copy: No such file or directory
3 copy: No such file or directory
4 changeset: 0:53e17d176ae6
4 changeset: 0:53e17d176ae6
5 tag: tip
5 tag: tip
6 user: test
6 user: test
7 date: Mon Jan 12 13:46:40 1970 +0000
7 date: Mon Jan 12 13:46:40 1970 +0000
8 summary: test
8 summary: test
9
9
10 requesting all changes
10 requesting all changes
11 adding changesets
11 adding changesets
12 adding manifests
12 adding manifests
13 adding file changes
13 adding file changes
14 added 1 changesets with 1 changes to 1 files
14 added 1 changesets with 1 changes to 1 files
15 repository uses revlog format 0
16 checking changesets
15 checking changesets
17 checking manifests
16 checking manifests
18 crosschecking files in changesets and manifests
17 crosschecking files in changesets and manifests
19 checking files
18 checking files
20 1 files, 1 changesets, 1 total revisions
19 1 files, 1 changesets, 1 total revisions
21 foo
20 foo
22 pulling from old-http://localhost:20059/remote
21 pulling from old-http://localhost:20059/remote
23 searching for changes
22 searching for changes
24 no changes found
23 no changes found
@@ -1,20 +1,18 b''
1 repository uses revlog format 0
2 checking changesets
1 checking changesets
3 checking manifests
2 checking manifests
4 crosschecking files in changesets and manifests
3 crosschecking files in changesets and manifests
5 checking files
4 checking files
6 1 files, 1 changesets, 1 total revisions
5 1 files, 1 changesets, 1 total revisions
7 changeset: 0:0acdaf898367
6 changeset: 0:0acdaf898367
8 tag: tip
7 tag: tip
9 user: test
8 user: test
10 date: Mon Jan 12 13:46:40 1970 +0000
9 date: Mon Jan 12 13:46:40 1970 +0000
11 summary: test
10 summary: test
12
11
13 rolling back last transaction
12 rolling back last transaction
14 repository uses revlog format 0
15 checking changesets
13 checking changesets
16 checking manifests
14 checking manifests
17 crosschecking files in changesets and manifests
15 crosschecking files in changesets and manifests
18 checking files
16 checking files
19 0 files, 0 changesets, 0 total revisions
17 0 files, 0 changesets, 0 total revisions
20 A a
18 A a
General Comments 0
You need to be logged in to leave comments. Login now