##// END OF EJS Templates
Actually warn on pulling from an unrelated repository...
mpm@selenic.com -
r579:ffeb2c3a default
parent child Browse files
Show More
@@ -0,0 +1,17 b''
1 mkdir a
2 cd a
3 hg init
4 echo 123 > a
5 hg add a
6 hg commit -t "a" -u a -d "0 0"
7
8 cd ..
9 mkdir b
10 cd b
11 hg init
12 echo 321 > b
13 hg add b
14 hg commit -t "b" -u b -d "0 0"
15
16 hg pull ../a
17 hg heads
@@ -0,0 +1,19 b''
1 pulling from ../a
2 searching for changes
3 warning: pulling from an unrelated repository!
4 adding changesets
5 adding manifests
6 adding file revisions
7 modified 1 files, added 1 changesets and 1 new revisions
8 (run 'hg update' to get a working copy)
9 changeset: 1:9a79c33a9db37480e40fbd2a65d62ebd2a3c441c
10 tag: tip
11 user: a
12 date: Thu Jan 1 00:00:00 1970
13 summary: a
14
15 changeset: 0:01f8062b2de51c0fa6428c5db1d1b3ea780189df
16 user: b
17 date: Thu Jan 1 00:00:00 1970
18 summary: b
19
@@ -1,1534 +1,1544 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import sys, struct, os
8 import sys, struct, os
9 import util
9 import util
10 from revlog import *
10 from revlog import *
11 from demandload import *
11 from demandload import *
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 demandload(globals(), "tempfile httprangereader bdiff")
13 demandload(globals(), "tempfile httprangereader bdiff")
14
14
15 class filelog(revlog):
15 class filelog(revlog):
16 def __init__(self, opener, path):
16 def __init__(self, opener, path):
17 revlog.__init__(self, opener,
17 revlog.__init__(self, opener,
18 os.path.join("data", path + ".i"),
18 os.path.join("data", path + ".i"),
19 os.path.join("data", path + ".d"))
19 os.path.join("data", path + ".d"))
20
20
21 def read(self, node):
21 def read(self, node):
22 t = self.revision(node)
22 t = self.revision(node)
23 if t[:2] != '\1\n':
23 if t[:2] != '\1\n':
24 return t
24 return t
25 s = t.find('\1\n', 2)
25 s = t.find('\1\n', 2)
26 return t[s+2:]
26 return t[s+2:]
27
27
28 def readmeta(self, node):
28 def readmeta(self, node):
29 t = self.revision(node)
29 t = self.revision(node)
30 if t[:2] != '\1\n':
30 if t[:2] != '\1\n':
31 return t
31 return t
32 s = t.find('\1\n', 2)
32 s = t.find('\1\n', 2)
33 mt = t[2:s]
33 mt = t[2:s]
34 for l in mt.splitlines():
34 for l in mt.splitlines():
35 k, v = l.split(": ", 1)
35 k, v = l.split(": ", 1)
36 m[k] = v
36 m[k] = v
37 return m
37 return m
38
38
39 def add(self, text, meta, transaction, link, p1=None, p2=None):
39 def add(self, text, meta, transaction, link, p1=None, p2=None):
40 if meta or text[:2] == '\1\n':
40 if meta or text[:2] == '\1\n':
41 mt = ""
41 mt = ""
42 if meta:
42 if meta:
43 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
43 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
44 text = "\1\n" + "".join(mt) + "\1\n" + text
44 text = "\1\n" + "".join(mt) + "\1\n" + text
45 return self.addrevision(text, transaction, link, p1, p2)
45 return self.addrevision(text, transaction, link, p1, p2)
46
46
47 def annotate(self, node):
47 def annotate(self, node):
48
48
49 def decorate(text, rev):
49 def decorate(text, rev):
50 return ([rev] * len(text.splitlines()), text)
50 return ([rev] * len(text.splitlines()), text)
51
51
52 def pair(parent, child):
52 def pair(parent, child):
53 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
53 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
54 child[0][b1:b2] = parent[0][a1:a2]
54 child[0][b1:b2] = parent[0][a1:a2]
55 return child
55 return child
56
56
57 # find all ancestors
57 # find all ancestors
58 needed = {node:1}
58 needed = {node:1}
59 visit = [node]
59 visit = [node]
60 while visit:
60 while visit:
61 n = visit.pop(0)
61 n = visit.pop(0)
62 for p in self.parents(n):
62 for p in self.parents(n):
63 if p not in needed:
63 if p not in needed:
64 needed[p] = 1
64 needed[p] = 1
65 visit.append(p)
65 visit.append(p)
66 else:
66 else:
67 # count how many times we'll use this
67 # count how many times we'll use this
68 needed[p] += 1
68 needed[p] += 1
69
69
70 # sort by revision which is a topological order
70 # sort by revision which is a topological order
71 visit = [ (self.rev(n), n) for n in needed.keys() ]
71 visit = [ (self.rev(n), n) for n in needed.keys() ]
72 visit.sort()
72 visit.sort()
73 hist = {}
73 hist = {}
74
74
75 for r,n in visit:
75 for r,n in visit:
76 curr = decorate(self.read(n), self.linkrev(n))
76 curr = decorate(self.read(n), self.linkrev(n))
77 for p in self.parents(n):
77 for p in self.parents(n):
78 if p != nullid:
78 if p != nullid:
79 curr = pair(hist[p], curr)
79 curr = pair(hist[p], curr)
80 # trim the history of unneeded revs
80 # trim the history of unneeded revs
81 needed[p] -= 1
81 needed[p] -= 1
82 if not needed[p]:
82 if not needed[p]:
83 del hist[p]
83 del hist[p]
84 hist[n] = curr
84 hist[n] = curr
85
85
86 return zip(hist[n][0], hist[n][1].splitlines(1))
86 return zip(hist[n][0], hist[n][1].splitlines(1))
87
87
88 class manifest(revlog):
88 class manifest(revlog):
89 def __init__(self, opener):
89 def __init__(self, opener):
90 self.mapcache = None
90 self.mapcache = None
91 self.listcache = None
91 self.listcache = None
92 self.addlist = None
92 self.addlist = None
93 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
93 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
94
94
95 def read(self, node):
95 def read(self, node):
96 if node == nullid: return {} # don't upset local cache
96 if node == nullid: return {} # don't upset local cache
97 if self.mapcache and self.mapcache[0] == node:
97 if self.mapcache and self.mapcache[0] == node:
98 return self.mapcache[1]
98 return self.mapcache[1]
99 text = self.revision(node)
99 text = self.revision(node)
100 map = {}
100 map = {}
101 flag = {}
101 flag = {}
102 self.listcache = (text, text.splitlines(1))
102 self.listcache = (text, text.splitlines(1))
103 for l in self.listcache[1]:
103 for l in self.listcache[1]:
104 (f, n) = l.split('\0')
104 (f, n) = l.split('\0')
105 map[f] = bin(n[:40])
105 map[f] = bin(n[:40])
106 flag[f] = (n[40:-1] == "x")
106 flag[f] = (n[40:-1] == "x")
107 self.mapcache = (node, map, flag)
107 self.mapcache = (node, map, flag)
108 return map
108 return map
109
109
110 def readflags(self, node):
110 def readflags(self, node):
111 if node == nullid: return {} # don't upset local cache
111 if node == nullid: return {} # don't upset local cache
112 if not self.mapcache or self.mapcache[0] != node:
112 if not self.mapcache or self.mapcache[0] != node:
113 self.read(node)
113 self.read(node)
114 return self.mapcache[2]
114 return self.mapcache[2]
115
115
116 def diff(self, a, b):
116 def diff(self, a, b):
117 # this is sneaky, as we're not actually using a and b
117 # this is sneaky, as we're not actually using a and b
118 if self.listcache and self.addlist and self.listcache[0] == a:
118 if self.listcache and self.addlist and self.listcache[0] == a:
119 d = mdiff.diff(self.listcache[1], self.addlist, 1)
119 d = mdiff.diff(self.listcache[1], self.addlist, 1)
120 if mdiff.patch(a, d) != b:
120 if mdiff.patch(a, d) != b:
121 sys.stderr.write("*** sortdiff failed, falling back ***\n")
121 sys.stderr.write("*** sortdiff failed, falling back ***\n")
122 return mdiff.textdiff(a, b)
122 return mdiff.textdiff(a, b)
123 return d
123 return d
124 else:
124 else:
125 return mdiff.textdiff(a, b)
125 return mdiff.textdiff(a, b)
126
126
127 def add(self, map, flags, transaction, link, p1=None, p2=None):
127 def add(self, map, flags, transaction, link, p1=None, p2=None):
128 files = map.keys()
128 files = map.keys()
129 files.sort()
129 files.sort()
130
130
131 self.addlist = ["%s\000%s%s\n" %
131 self.addlist = ["%s\000%s%s\n" %
132 (f, hex(map[f]), flags[f] and "x" or '')
132 (f, hex(map[f]), flags[f] and "x" or '')
133 for f in files]
133 for f in files]
134 text = "".join(self.addlist)
134 text = "".join(self.addlist)
135
135
136 n = self.addrevision(text, transaction, link, p1, p2)
136 n = self.addrevision(text, transaction, link, p1, p2)
137 self.mapcache = (n, map, flags)
137 self.mapcache = (n, map, flags)
138 self.listcache = (text, self.addlist)
138 self.listcache = (text, self.addlist)
139 self.addlist = None
139 self.addlist = None
140
140
141 return n
141 return n
142
142
143 class changelog(revlog):
143 class changelog(revlog):
144 def __init__(self, opener):
144 def __init__(self, opener):
145 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
145 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
146
146
147 def extract(self, text):
147 def extract(self, text):
148 if not text:
148 if not text:
149 return (nullid, "", "0", [], "")
149 return (nullid, "", "0", [], "")
150 last = text.index("\n\n")
150 last = text.index("\n\n")
151 desc = text[last + 2:]
151 desc = text[last + 2:]
152 l = text[:last].splitlines()
152 l = text[:last].splitlines()
153 manifest = bin(l[0])
153 manifest = bin(l[0])
154 user = l[1]
154 user = l[1]
155 date = l[2]
155 date = l[2]
156 files = l[3:]
156 files = l[3:]
157 return (manifest, user, date, files, desc)
157 return (manifest, user, date, files, desc)
158
158
159 def read(self, node):
159 def read(self, node):
160 return self.extract(self.revision(node))
160 return self.extract(self.revision(node))
161
161
162 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
162 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
163 user=None, date=None):
163 user=None, date=None):
164 user = (user or
164 user = (user or
165 os.environ.get("HGUSER") or
165 os.environ.get("HGUSER") or
166 os.environ.get("EMAIL") or
166 os.environ.get("EMAIL") or
167 (os.environ.get("LOGNAME",
167 (os.environ.get("LOGNAME",
168 os.environ.get("USERNAME", "unknown"))
168 os.environ.get("USERNAME", "unknown"))
169 + '@' + socket.getfqdn()))
169 + '@' + socket.getfqdn()))
170 date = date or "%d %d" % (time.time(), time.timezone)
170 date = date or "%d %d" % (time.time(), time.timezone)
171 list.sort()
171 list.sort()
172 l = [hex(manifest), user, date] + list + ["", desc]
172 l = [hex(manifest), user, date] + list + ["", desc]
173 text = "\n".join(l)
173 text = "\n".join(l)
174 return self.addrevision(text, transaction, self.count(), p1, p2)
174 return self.addrevision(text, transaction, self.count(), p1, p2)
175
175
176 class dirstate:
176 class dirstate:
177 def __init__(self, opener, ui, root):
177 def __init__(self, opener, ui, root):
178 self.opener = opener
178 self.opener = opener
179 self.root = root
179 self.root = root
180 self.dirty = 0
180 self.dirty = 0
181 self.ui = ui
181 self.ui = ui
182 self.map = None
182 self.map = None
183 self.pl = None
183 self.pl = None
184 self.copies = {}
184 self.copies = {}
185
185
186 def __del__(self):
186 def __del__(self):
187 if self.dirty:
187 if self.dirty:
188 self.write()
188 self.write()
189
189
190 def __getitem__(self, key):
190 def __getitem__(self, key):
191 try:
191 try:
192 return self.map[key]
192 return self.map[key]
193 except TypeError:
193 except TypeError:
194 self.read()
194 self.read()
195 return self[key]
195 return self[key]
196
196
197 def __contains__(self, key):
197 def __contains__(self, key):
198 if not self.map: self.read()
198 if not self.map: self.read()
199 return key in self.map
199 return key in self.map
200
200
201 def parents(self):
201 def parents(self):
202 if not self.pl:
202 if not self.pl:
203 self.read()
203 self.read()
204 return self.pl
204 return self.pl
205
205
206 def setparents(self, p1, p2 = nullid):
206 def setparents(self, p1, p2 = nullid):
207 self.dirty = 1
207 self.dirty = 1
208 self.pl = p1, p2
208 self.pl = p1, p2
209
209
210 def state(self, key):
210 def state(self, key):
211 try:
211 try:
212 return self[key][0]
212 return self[key][0]
213 except KeyError:
213 except KeyError:
214 return "?"
214 return "?"
215
215
216 def read(self):
216 def read(self):
217 if self.map is not None: return self.map
217 if self.map is not None: return self.map
218
218
219 self.map = {}
219 self.map = {}
220 self.pl = [nullid, nullid]
220 self.pl = [nullid, nullid]
221 try:
221 try:
222 st = self.opener("dirstate").read()
222 st = self.opener("dirstate").read()
223 if not st: return
223 if not st: return
224 except: return
224 except: return
225
225
226 self.pl = [st[:20], st[20: 40]]
226 self.pl = [st[:20], st[20: 40]]
227
227
228 pos = 40
228 pos = 40
229 while pos < len(st):
229 while pos < len(st):
230 e = struct.unpack(">cllll", st[pos:pos+17])
230 e = struct.unpack(">cllll", st[pos:pos+17])
231 l = e[4]
231 l = e[4]
232 pos += 17
232 pos += 17
233 f = st[pos:pos + l]
233 f = st[pos:pos + l]
234 if '\0' in f:
234 if '\0' in f:
235 f, c = f.split('\0')
235 f, c = f.split('\0')
236 self.copies[f] = c
236 self.copies[f] = c
237 self.map[f] = e[:4]
237 self.map[f] = e[:4]
238 pos += l
238 pos += l
239
239
240 def copy(self, source, dest):
240 def copy(self, source, dest):
241 self.read()
241 self.read()
242 self.dirty = 1
242 self.dirty = 1
243 self.copies[dest] = source
243 self.copies[dest] = source
244
244
245 def copied(self, file):
245 def copied(self, file):
246 return self.copies.get(file, None)
246 return self.copies.get(file, None)
247
247
248 def update(self, files, state):
248 def update(self, files, state):
249 ''' current states:
249 ''' current states:
250 n normal
250 n normal
251 m needs merging
251 m needs merging
252 r marked for removal
252 r marked for removal
253 a marked for addition'''
253 a marked for addition'''
254
254
255 if not files: return
255 if not files: return
256 self.read()
256 self.read()
257 self.dirty = 1
257 self.dirty = 1
258 for f in files:
258 for f in files:
259 if state == "r":
259 if state == "r":
260 self.map[f] = ('r', 0, 0, 0)
260 self.map[f] = ('r', 0, 0, 0)
261 else:
261 else:
262 s = os.stat(os.path.join(self.root, f))
262 s = os.stat(os.path.join(self.root, f))
263 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
263 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
264
264
265 def forget(self, files):
265 def forget(self, files):
266 if not files: return
266 if not files: return
267 self.read()
267 self.read()
268 self.dirty = 1
268 self.dirty = 1
269 for f in files:
269 for f in files:
270 try:
270 try:
271 del self.map[f]
271 del self.map[f]
272 except KeyError:
272 except KeyError:
273 self.ui.warn("not in dirstate: %s!\n" % f)
273 self.ui.warn("not in dirstate: %s!\n" % f)
274 pass
274 pass
275
275
276 def clear(self):
276 def clear(self):
277 self.map = {}
277 self.map = {}
278 self.dirty = 1
278 self.dirty = 1
279
279
280 def write(self):
280 def write(self):
281 st = self.opener("dirstate", "w")
281 st = self.opener("dirstate", "w")
282 st.write("".join(self.pl))
282 st.write("".join(self.pl))
283 for f, e in self.map.items():
283 for f, e in self.map.items():
284 c = self.copied(f)
284 c = self.copied(f)
285 if c:
285 if c:
286 f = f + "\0" + c
286 f = f + "\0" + c
287 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
287 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
288 st.write(e + f)
288 st.write(e + f)
289 self.dirty = 0
289 self.dirty = 0
290
290
291 def changes(self, files, ignore):
291 def changes(self, files, ignore):
292 self.read()
292 self.read()
293 dc = self.map.copy()
293 dc = self.map.copy()
294 lookup, changed, added, unknown = [], [], [], []
294 lookup, changed, added, unknown = [], [], [], []
295
295
296 # compare all files by default
296 # compare all files by default
297 if not files: files = [self.root]
297 if not files: files = [self.root]
298
298
299 # recursive generator of all files listed
299 # recursive generator of all files listed
300 def walk(files):
300 def walk(files):
301 for f in util.unique(files):
301 for f in util.unique(files):
302 f = os.path.join(self.root, f)
302 f = os.path.join(self.root, f)
303 if os.path.isdir(f):
303 if os.path.isdir(f):
304 for dir, subdirs, fl in os.walk(f):
304 for dir, subdirs, fl in os.walk(f):
305 d = dir[len(self.root) + 1:]
305 d = dir[len(self.root) + 1:]
306 if ".hg" in subdirs: subdirs.remove(".hg")
306 if ".hg" in subdirs: subdirs.remove(".hg")
307 for fn in fl:
307 for fn in fl:
308 fn = util.pconvert(os.path.join(d, fn))
308 fn = util.pconvert(os.path.join(d, fn))
309 yield fn
309 yield fn
310 else:
310 else:
311 yield f[len(self.root) + 1:]
311 yield f[len(self.root) + 1:]
312
312
313 for fn in util.unique(walk(files)):
313 for fn in util.unique(walk(files)):
314 try: s = os.stat(os.path.join(self.root, fn))
314 try: s = os.stat(os.path.join(self.root, fn))
315 except: continue
315 except: continue
316
316
317 if fn in dc:
317 if fn in dc:
318 c = dc[fn]
318 c = dc[fn]
319 del dc[fn]
319 del dc[fn]
320
320
321 if c[0] == 'm':
321 if c[0] == 'm':
322 changed.append(fn)
322 changed.append(fn)
323 elif c[0] == 'a':
323 elif c[0] == 'a':
324 added.append(fn)
324 added.append(fn)
325 elif c[0] == 'r':
325 elif c[0] == 'r':
326 unknown.append(fn)
326 unknown.append(fn)
327 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
327 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
328 changed.append(fn)
328 changed.append(fn)
329 elif c[1] != s.st_mode or c[3] != s.st_mtime:
329 elif c[1] != s.st_mode or c[3] != s.st_mtime:
330 lookup.append(fn)
330 lookup.append(fn)
331 else:
331 else:
332 if not ignore(fn): unknown.append(fn)
332 if not ignore(fn): unknown.append(fn)
333
333
334 return (lookup, changed, added, dc.keys(), unknown)
334 return (lookup, changed, added, dc.keys(), unknown)
335
335
336 # used to avoid circular references so destructors work
336 # used to avoid circular references so destructors work
337 def opener(base):
337 def opener(base):
338 p = base
338 p = base
339 def o(path, mode="r"):
339 def o(path, mode="r"):
340 if p[:7] == "http://":
340 if p[:7] == "http://":
341 f = os.path.join(p, urllib.quote(path))
341 f = os.path.join(p, urllib.quote(path))
342 return httprangereader.httprangereader(f)
342 return httprangereader.httprangereader(f)
343
343
344 f = os.path.join(p, path)
344 f = os.path.join(p, path)
345
345
346 mode += "b" # for that other OS
346 mode += "b" # for that other OS
347
347
348 if mode[0] != "r":
348 if mode[0] != "r":
349 try:
349 try:
350 s = os.stat(f)
350 s = os.stat(f)
351 except OSError:
351 except OSError:
352 d = os.path.dirname(f)
352 d = os.path.dirname(f)
353 if not os.path.isdir(d):
353 if not os.path.isdir(d):
354 os.makedirs(d)
354 os.makedirs(d)
355 else:
355 else:
356 if s.st_nlink > 1:
356 if s.st_nlink > 1:
357 file(f + ".tmp", "wb").write(file(f, "rb").read())
357 file(f + ".tmp", "wb").write(file(f, "rb").read())
358 util.rename(f+".tmp", f)
358 util.rename(f+".tmp", f)
359
359
360 return file(f, mode)
360 return file(f, mode)
361
361
362 return o
362 return o
363
363
364 class RepoError(Exception): pass
364 class RepoError(Exception): pass
365
365
366 class localrepository:
366 class localrepository:
367 def __init__(self, ui, path=None, create=0):
367 def __init__(self, ui, path=None, create=0):
368 self.remote = 0
368 self.remote = 0
369 if path and path[:7] == "http://":
369 if path and path[:7] == "http://":
370 self.remote = 1
370 self.remote = 1
371 self.path = path
371 self.path = path
372 else:
372 else:
373 if not path:
373 if not path:
374 p = os.getcwd()
374 p = os.getcwd()
375 while not os.path.isdir(os.path.join(p, ".hg")):
375 while not os.path.isdir(os.path.join(p, ".hg")):
376 oldp = p
376 oldp = p
377 p = os.path.dirname(p)
377 p = os.path.dirname(p)
378 if p == oldp: raise RepoError("no repo found")
378 if p == oldp: raise RepoError("no repo found")
379 path = p
379 path = p
380 self.path = os.path.join(path, ".hg")
380 self.path = os.path.join(path, ".hg")
381
381
382 if not create and not os.path.isdir(self.path):
382 if not create and not os.path.isdir(self.path):
383 raise RepoError("repository %s not found" % self.path)
383 raise RepoError("repository %s not found" % self.path)
384
384
385 self.root = path
385 self.root = path
386 self.ui = ui
386 self.ui = ui
387
387
388 if create:
388 if create:
389 os.mkdir(self.path)
389 os.mkdir(self.path)
390 os.mkdir(self.join("data"))
390 os.mkdir(self.join("data"))
391
391
392 self.opener = opener(self.path)
392 self.opener = opener(self.path)
393 self.wopener = opener(self.root)
393 self.wopener = opener(self.root)
394 self.manifest = manifest(self.opener)
394 self.manifest = manifest(self.opener)
395 self.changelog = changelog(self.opener)
395 self.changelog = changelog(self.opener)
396 self.ignorelist = None
396 self.ignorelist = None
397 self.tagscache = None
397 self.tagscache = None
398 self.nodetagscache = None
398 self.nodetagscache = None
399
399
400 if not self.remote:
400 if not self.remote:
401 self.dirstate = dirstate(self.opener, ui, self.root)
401 self.dirstate = dirstate(self.opener, ui, self.root)
402 try:
402 try:
403 self.ui.readconfig(self.opener("hgrc"))
403 self.ui.readconfig(self.opener("hgrc"))
404 except IOError: pass
404 except IOError: pass
405
405
406 def ignore(self, f):
406 def ignore(self, f):
407 if self.ignorelist is None:
407 if self.ignorelist is None:
408 self.ignorelist = []
408 self.ignorelist = []
409 try:
409 try:
410 l = file(self.wjoin(".hgignore"))
410 l = file(self.wjoin(".hgignore"))
411 for pat in l:
411 for pat in l:
412 if pat != "\n":
412 if pat != "\n":
413 self.ignorelist.append(re.compile(util.pconvert(pat[:-1])))
413 self.ignorelist.append(re.compile(util.pconvert(pat[:-1])))
414 except IOError: pass
414 except IOError: pass
415 for pat in self.ignorelist:
415 for pat in self.ignorelist:
416 if pat.search(f): return True
416 if pat.search(f): return True
417 return False
417 return False
418
418
419 def hook(self, name, **args):
419 def hook(self, name, **args):
420 s = self.ui.config("hooks", name)
420 s = self.ui.config("hooks", name)
421 if s:
421 if s:
422 self.ui.note("running hook %s: %s\n" % (name, s))
422 self.ui.note("running hook %s: %s\n" % (name, s))
423 old = {}
423 old = {}
424 for k, v in args.items():
424 for k, v in args.items():
425 k = k.upper()
425 k = k.upper()
426 old[k] = os.environ.get(k, None)
426 old[k] = os.environ.get(k, None)
427 os.environ[k] = v
427 os.environ[k] = v
428
428
429 r = os.system(s)
429 r = os.system(s)
430
430
431 for k, v in old.items():
431 for k, v in old.items():
432 if v != None:
432 if v != None:
433 os.environ[k] = v
433 os.environ[k] = v
434 else:
434 else:
435 del os.environ[k]
435 del os.environ[k]
436
436
437 if r:
437 if r:
438 self.ui.warn("abort: %s hook failed with status %d!\n" %
438 self.ui.warn("abort: %s hook failed with status %d!\n" %
439 (name, r))
439 (name, r))
440 return False
440 return False
441 return True
441 return True
442
442
443 def tags(self):
443 def tags(self):
444 '''return a mapping of tag to node'''
444 '''return a mapping of tag to node'''
445 if not self.tagscache:
445 if not self.tagscache:
446 self.tagscache = {}
446 self.tagscache = {}
447 try:
447 try:
448 # read each head of the tags file, ending with the tip
448 # read each head of the tags file, ending with the tip
449 # and add each tag found to the map, with "newer" ones
449 # and add each tag found to the map, with "newer" ones
450 # taking precedence
450 # taking precedence
451 fl = self.file(".hgtags")
451 fl = self.file(".hgtags")
452 h = fl.heads()
452 h = fl.heads()
453 h.reverse()
453 h.reverse()
454 for r in h:
454 for r in h:
455 for l in fl.revision(r).splitlines():
455 for l in fl.revision(r).splitlines():
456 if l:
456 if l:
457 n, k = l.split(" ", 1)
457 n, k = l.split(" ", 1)
458 try:
458 try:
459 bin_n = bin(n)
459 bin_n = bin(n)
460 except TypeError:
460 except TypeError:
461 bin_n = ''
461 bin_n = ''
462 self.tagscache[k.strip()] = bin_n
462 self.tagscache[k.strip()] = bin_n
463 except KeyError:
463 except KeyError:
464 pass
464 pass
465 for k, n in self.ui.configitems("tags"):
465 for k, n in self.ui.configitems("tags"):
466 try:
466 try:
467 bin_n = bin(n)
467 bin_n = bin(n)
468 except TypeError:
468 except TypeError:
469 bin_n = ''
469 bin_n = ''
470 self.tagscache[k] = bin_n
470 self.tagscache[k] = bin_n
471
471
472 self.tagscache['tip'] = self.changelog.tip()
472 self.tagscache['tip'] = self.changelog.tip()
473
473
474 return self.tagscache
474 return self.tagscache
475
475
476 def tagslist(self):
476 def tagslist(self):
477 '''return a list of tags ordered by revision'''
477 '''return a list of tags ordered by revision'''
478 l = []
478 l = []
479 for t, n in self.tags().items():
479 for t, n in self.tags().items():
480 try:
480 try:
481 r = self.changelog.rev(n)
481 r = self.changelog.rev(n)
482 except:
482 except:
483 r = -2 # sort to the beginning of the list if unknown
483 r = -2 # sort to the beginning of the list if unknown
484 l.append((r,t,n))
484 l.append((r,t,n))
485 l.sort()
485 l.sort()
486 return [(t,n) for r,t,n in l]
486 return [(t,n) for r,t,n in l]
487
487
488 def nodetags(self, node):
488 def nodetags(self, node):
489 '''return the tags associated with a node'''
489 '''return the tags associated with a node'''
490 if not self.nodetagscache:
490 if not self.nodetagscache:
491 self.nodetagscache = {}
491 self.nodetagscache = {}
492 for t,n in self.tags().items():
492 for t,n in self.tags().items():
493 self.nodetagscache.setdefault(n,[]).append(t)
493 self.nodetagscache.setdefault(n,[]).append(t)
494 return self.nodetagscache.get(node, [])
494 return self.nodetagscache.get(node, [])
495
495
496 def lookup(self, key):
496 def lookup(self, key):
497 try:
497 try:
498 return self.tags()[key]
498 return self.tags()[key]
499 except KeyError:
499 except KeyError:
500 return self.changelog.lookup(key)
500 return self.changelog.lookup(key)
501
501
502 def join(self, f):
502 def join(self, f):
503 return os.path.join(self.path, f)
503 return os.path.join(self.path, f)
504
504
505 def wjoin(self, f):
505 def wjoin(self, f):
506 return os.path.join(self.root, f)
506 return os.path.join(self.root, f)
507
507
508 def file(self, f):
508 def file(self, f):
509 if f[0] == '/': f = f[1:]
509 if f[0] == '/': f = f[1:]
510 return filelog(self.opener, f)
510 return filelog(self.opener, f)
511
511
512 def wfile(self, f, mode='r'):
512 def wfile(self, f, mode='r'):
513 return self.wopener(f, mode)
513 return self.wopener(f, mode)
514
514
515 def transaction(self):
515 def transaction(self):
516 # save dirstate for undo
516 # save dirstate for undo
517 try:
517 try:
518 ds = self.opener("dirstate").read()
518 ds = self.opener("dirstate").read()
519 except IOError:
519 except IOError:
520 ds = ""
520 ds = ""
521 self.opener("undo.dirstate", "w").write(ds)
521 self.opener("undo.dirstate", "w").write(ds)
522
522
523 return transaction.transaction(self.opener, self.join("journal"),
523 return transaction.transaction(self.opener, self.join("journal"),
524 self.join("undo"))
524 self.join("undo"))
525
525
526 def recover(self):
526 def recover(self):
527 lock = self.lock()
527 lock = self.lock()
528 if os.path.exists(self.join("journal")):
528 if os.path.exists(self.join("journal")):
529 self.ui.status("rolling back interrupted transaction\n")
529 self.ui.status("rolling back interrupted transaction\n")
530 return transaction.rollback(self.opener, self.join("journal"))
530 return transaction.rollback(self.opener, self.join("journal"))
531 else:
531 else:
532 self.ui.warn("no interrupted transaction available\n")
532 self.ui.warn("no interrupted transaction available\n")
533
533
534 def undo(self):
534 def undo(self):
535 lock = self.lock()
535 lock = self.lock()
536 if os.path.exists(self.join("undo")):
536 if os.path.exists(self.join("undo")):
537 self.ui.status("rolling back last transaction\n")
537 self.ui.status("rolling back last transaction\n")
538 transaction.rollback(self.opener, self.join("undo"))
538 transaction.rollback(self.opener, self.join("undo"))
539 self.dirstate = None
539 self.dirstate = None
540 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
540 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
541 self.dirstate = dirstate(self.opener, self.ui, self.root)
541 self.dirstate = dirstate(self.opener, self.ui, self.root)
542 else:
542 else:
543 self.ui.warn("no undo information available\n")
543 self.ui.warn("no undo information available\n")
544
544
545 def lock(self, wait = 1):
545 def lock(self, wait = 1):
546 try:
546 try:
547 return lock.lock(self.join("lock"), 0)
547 return lock.lock(self.join("lock"), 0)
548 except lock.LockHeld, inst:
548 except lock.LockHeld, inst:
549 if wait:
549 if wait:
550 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
550 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
551 return lock.lock(self.join("lock"), wait)
551 return lock.lock(self.join("lock"), wait)
552 raise inst
552 raise inst
553
553
554 def rawcommit(self, files, text, user, date, p1=None, p2=None):
554 def rawcommit(self, files, text, user, date, p1=None, p2=None):
555 orig_parent = self.dirstate.parents()[0] or nullid
555 orig_parent = self.dirstate.parents()[0] or nullid
556 p1 = p1 or self.dirstate.parents()[0] or nullid
556 p1 = p1 or self.dirstate.parents()[0] or nullid
557 p2 = p2 or self.dirstate.parents()[1] or nullid
557 p2 = p2 or self.dirstate.parents()[1] or nullid
558 c1 = self.changelog.read(p1)
558 c1 = self.changelog.read(p1)
559 c2 = self.changelog.read(p2)
559 c2 = self.changelog.read(p2)
560 m1 = self.manifest.read(c1[0])
560 m1 = self.manifest.read(c1[0])
561 mf1 = self.manifest.readflags(c1[0])
561 mf1 = self.manifest.readflags(c1[0])
562 m2 = self.manifest.read(c2[0])
562 m2 = self.manifest.read(c2[0])
563
563
564 if orig_parent == p1:
564 if orig_parent == p1:
565 update_dirstate = 1
565 update_dirstate = 1
566 else:
566 else:
567 update_dirstate = 0
567 update_dirstate = 0
568
568
569 tr = self.transaction()
569 tr = self.transaction()
570 mm = m1.copy()
570 mm = m1.copy()
571 mfm = mf1.copy()
571 mfm = mf1.copy()
572 linkrev = self.changelog.count()
572 linkrev = self.changelog.count()
573 for f in files:
573 for f in files:
574 try:
574 try:
575 t = self.wfile(f).read()
575 t = self.wfile(f).read()
576 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
576 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
577 r = self.file(f)
577 r = self.file(f)
578 mfm[f] = tm
578 mfm[f] = tm
579 mm[f] = r.add(t, {}, tr, linkrev,
579 mm[f] = r.add(t, {}, tr, linkrev,
580 m1.get(f, nullid), m2.get(f, nullid))
580 m1.get(f, nullid), m2.get(f, nullid))
581 if update_dirstate:
581 if update_dirstate:
582 self.dirstate.update([f], "n")
582 self.dirstate.update([f], "n")
583 except IOError:
583 except IOError:
584 try:
584 try:
585 del mm[f]
585 del mm[f]
586 del mfm[f]
586 del mfm[f]
587 if update_dirstate:
587 if update_dirstate:
588 self.dirstate.forget([f])
588 self.dirstate.forget([f])
589 except:
589 except:
590 # deleted from p2?
590 # deleted from p2?
591 pass
591 pass
592
592
593 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
593 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
594 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
594 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
595 tr.close()
595 tr.close()
596 if update_dirstate:
596 if update_dirstate:
597 self.dirstate.setparents(n, nullid)
597 self.dirstate.setparents(n, nullid)
598
598
599 def commit(self, files = None, text = "", user = None, date = None):
599 def commit(self, files = None, text = "", user = None, date = None):
600 commit = []
600 commit = []
601 remove = []
601 remove = []
602 if files:
602 if files:
603 for f in files:
603 for f in files:
604 s = self.dirstate.state(f)
604 s = self.dirstate.state(f)
605 if s in 'nmai':
605 if s in 'nmai':
606 commit.append(f)
606 commit.append(f)
607 elif s == 'r':
607 elif s == 'r':
608 remove.append(f)
608 remove.append(f)
609 else:
609 else:
610 self.ui.warn("%s not tracked!\n" % f)
610 self.ui.warn("%s not tracked!\n" % f)
611 else:
611 else:
612 (c, a, d, u) = self.changes(None, None)
612 (c, a, d, u) = self.changes(None, None)
613 commit = c + a
613 commit = c + a
614 remove = d
614 remove = d
615
615
616 if not commit and not remove:
616 if not commit and not remove:
617 self.ui.status("nothing changed\n")
617 self.ui.status("nothing changed\n")
618 return
618 return
619
619
620 if not self.hook("precommit"):
620 if not self.hook("precommit"):
621 return 1
621 return 1
622
622
623 p1, p2 = self.dirstate.parents()
623 p1, p2 = self.dirstate.parents()
624 c1 = self.changelog.read(p1)
624 c1 = self.changelog.read(p1)
625 c2 = self.changelog.read(p2)
625 c2 = self.changelog.read(p2)
626 m1 = self.manifest.read(c1[0])
626 m1 = self.manifest.read(c1[0])
627 mf1 = self.manifest.readflags(c1[0])
627 mf1 = self.manifest.readflags(c1[0])
628 m2 = self.manifest.read(c2[0])
628 m2 = self.manifest.read(c2[0])
629 lock = self.lock()
629 lock = self.lock()
630 tr = self.transaction()
630 tr = self.transaction()
631
631
632 # check in files
632 # check in files
633 new = {}
633 new = {}
634 linkrev = self.changelog.count()
634 linkrev = self.changelog.count()
635 commit.sort()
635 commit.sort()
636 for f in commit:
636 for f in commit:
637 self.ui.note(f + "\n")
637 self.ui.note(f + "\n")
638 try:
638 try:
639 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
639 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
640 t = self.wfile(f).read()
640 t = self.wfile(f).read()
641 except IOError:
641 except IOError:
642 self.warn("trouble committing %s!\n" % f)
642 self.warn("trouble committing %s!\n" % f)
643 raise
643 raise
644
644
645 meta = {}
645 meta = {}
646 cp = self.dirstate.copied(f)
646 cp = self.dirstate.copied(f)
647 if cp:
647 if cp:
648 meta["copy"] = cp
648 meta["copy"] = cp
649 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
649 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
650 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
650 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
651
651
652 r = self.file(f)
652 r = self.file(f)
653 fp1 = m1.get(f, nullid)
653 fp1 = m1.get(f, nullid)
654 fp2 = m2.get(f, nullid)
654 fp2 = m2.get(f, nullid)
655 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
655 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
656
656
657 # update manifest
657 # update manifest
658 m1.update(new)
658 m1.update(new)
659 for f in remove:
659 for f in remove:
660 if f in m1:
660 if f in m1:
661 del m1[f]
661 del m1[f]
662 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0])
662 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0])
663
663
664 # add changeset
664 # add changeset
665 new = new.keys()
665 new = new.keys()
666 new.sort()
666 new.sort()
667
667
668 if not text:
668 if not text:
669 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
669 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
670 edittext += "".join(["HG: changed %s\n" % f for f in new])
670 edittext += "".join(["HG: changed %s\n" % f for f in new])
671 edittext += "".join(["HG: removed %s\n" % f for f in remove])
671 edittext += "".join(["HG: removed %s\n" % f for f in remove])
672 edittext = self.ui.edit(edittext)
672 edittext = self.ui.edit(edittext)
673 if not edittext.rstrip():
673 if not edittext.rstrip():
674 return 1
674 return 1
675 text = edittext
675 text = edittext
676
676
677 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
677 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
678
678
679 if not self.hook("commit", node=hex(n)):
679 if not self.hook("commit", node=hex(n)):
680 return 1
680 return 1
681
681
682 tr.close()
682 tr.close()
683
683
684 self.dirstate.setparents(n)
684 self.dirstate.setparents(n)
685 self.dirstate.update(new, "n")
685 self.dirstate.update(new, "n")
686 self.dirstate.forget(remove)
686 self.dirstate.forget(remove)
687
687
688 def changes(self, node1, node2, files=None):
688 def changes(self, node1, node2, files=None):
689 mf2, u = None, []
689 mf2, u = None, []
690
690
691 def fcmp(fn, mf):
691 def fcmp(fn, mf):
692 t1 = self.wfile(fn).read()
692 t1 = self.wfile(fn).read()
693 t2 = self.file(fn).revision(mf[fn])
693 t2 = self.file(fn).revision(mf[fn])
694 return cmp(t1, t2)
694 return cmp(t1, t2)
695
695
696 # are we comparing the working directory?
696 # are we comparing the working directory?
697 if not node2:
697 if not node2:
698 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
698 l, c, a, d, u = self.dirstate.changes(files, self.ignore)
699
699
700 # are we comparing working dir against its parent?
700 # are we comparing working dir against its parent?
701 if not node1:
701 if not node1:
702 if l:
702 if l:
703 # do a full compare of any files that might have changed
703 # do a full compare of any files that might have changed
704 change = self.changelog.read(self.dirstate.parents()[0])
704 change = self.changelog.read(self.dirstate.parents()[0])
705 mf2 = self.manifest.read(change[0])
705 mf2 = self.manifest.read(change[0])
706 for f in l:
706 for f in l:
707 if fcmp(f, mf2):
707 if fcmp(f, mf2):
708 c.append(f)
708 c.append(f)
709
709
710 for l in c, a, d, u:
710 for l in c, a, d, u:
711 l.sort()
711 l.sort()
712
712
713 return (c, a, d, u)
713 return (c, a, d, u)
714
714
715 # are we comparing working dir against non-tip?
715 # are we comparing working dir against non-tip?
716 # generate a pseudo-manifest for the working dir
716 # generate a pseudo-manifest for the working dir
717 if not node2:
717 if not node2:
718 if not mf2:
718 if not mf2:
719 change = self.changelog.read(self.dirstate.parents()[0])
719 change = self.changelog.read(self.dirstate.parents()[0])
720 mf2 = self.manifest.read(change[0]).copy()
720 mf2 = self.manifest.read(change[0]).copy()
721 for f in a + c + l:
721 for f in a + c + l:
722 mf2[f] = ""
722 mf2[f] = ""
723 for f in d:
723 for f in d:
724 if f in mf2: del mf2[f]
724 if f in mf2: del mf2[f]
725 else:
725 else:
726 change = self.changelog.read(node2)
726 change = self.changelog.read(node2)
727 mf2 = self.manifest.read(change[0])
727 mf2 = self.manifest.read(change[0])
728
728
729 # flush lists from dirstate before comparing manifests
729 # flush lists from dirstate before comparing manifests
730 c, a = [], []
730 c, a = [], []
731
731
732 change = self.changelog.read(node1)
732 change = self.changelog.read(node1)
733 mf1 = self.manifest.read(change[0]).copy()
733 mf1 = self.manifest.read(change[0]).copy()
734
734
735 for fn in mf2:
735 for fn in mf2:
736 if mf1.has_key(fn):
736 if mf1.has_key(fn):
737 if mf1[fn] != mf2[fn]:
737 if mf1[fn] != mf2[fn]:
738 if mf2[fn] != "" or fcmp(fn, mf1):
738 if mf2[fn] != "" or fcmp(fn, mf1):
739 c.append(fn)
739 c.append(fn)
740 del mf1[fn]
740 del mf1[fn]
741 else:
741 else:
742 a.append(fn)
742 a.append(fn)
743
743
744 d = mf1.keys()
744 d = mf1.keys()
745
745
746 for l in c, a, d, u:
746 for l in c, a, d, u:
747 l.sort()
747 l.sort()
748
748
749 return (c, a, d, u)
749 return (c, a, d, u)
750
750
751 def add(self, list):
751 def add(self, list):
752 for f in list:
752 for f in list:
753 p = self.wjoin(f)
753 p = self.wjoin(f)
754 if not os.path.isfile(p):
754 if not os.path.isfile(p):
755 self.ui.warn("%s does not exist!\n" % f)
755 self.ui.warn("%s does not exist!\n" % f)
756 elif self.dirstate.state(f) == 'n':
756 elif self.dirstate.state(f) == 'n':
757 self.ui.warn("%s already tracked!\n" % f)
757 self.ui.warn("%s already tracked!\n" % f)
758 else:
758 else:
759 self.dirstate.update([f], "a")
759 self.dirstate.update([f], "a")
760
760
761 def forget(self, list):
761 def forget(self, list):
762 for f in list:
762 for f in list:
763 if self.dirstate.state(f) not in 'ai':
763 if self.dirstate.state(f) not in 'ai':
764 self.ui.warn("%s not added!\n" % f)
764 self.ui.warn("%s not added!\n" % f)
765 else:
765 else:
766 self.dirstate.forget([f])
766 self.dirstate.forget([f])
767
767
768 def remove(self, list):
768 def remove(self, list):
769 for f in list:
769 for f in list:
770 p = self.wjoin(f)
770 p = self.wjoin(f)
771 if os.path.isfile(p):
771 if os.path.isfile(p):
772 self.ui.warn("%s still exists!\n" % f)
772 self.ui.warn("%s still exists!\n" % f)
773 elif self.dirstate.state(f) == 'a':
773 elif self.dirstate.state(f) == 'a':
774 self.ui.warn("%s never committed!\n" % f)
774 self.ui.warn("%s never committed!\n" % f)
775 self.dirstate.forget(f)
775 self.dirstate.forget(f)
776 elif f not in self.dirstate:
776 elif f not in self.dirstate:
777 self.ui.warn("%s not tracked!\n" % f)
777 self.ui.warn("%s not tracked!\n" % f)
778 else:
778 else:
779 self.dirstate.update([f], "r")
779 self.dirstate.update([f], "r")
780
780
781 def copy(self, source, dest):
781 def copy(self, source, dest):
782 p = self.wjoin(dest)
782 p = self.wjoin(dest)
783 if not os.path.isfile(dest):
783 if not os.path.isfile(dest):
784 self.ui.warn("%s does not exist!\n" % dest)
784 self.ui.warn("%s does not exist!\n" % dest)
785 else:
785 else:
786 if self.dirstate.state(dest) == '?':
786 if self.dirstate.state(dest) == '?':
787 self.dirstate.update([dest], "a")
787 self.dirstate.update([dest], "a")
788 self.dirstate.copy(source, dest)
788 self.dirstate.copy(source, dest)
789
789
790 def heads(self):
790 def heads(self):
791 return self.changelog.heads()
791 return self.changelog.heads()
792
792
793 def branches(self, nodes):
793 def branches(self, nodes):
794 if not nodes: nodes = [self.changelog.tip()]
794 if not nodes: nodes = [self.changelog.tip()]
795 b = []
795 b = []
796 for n in nodes:
796 for n in nodes:
797 t = n
797 t = n
798 while n:
798 while n:
799 p = self.changelog.parents(n)
799 p = self.changelog.parents(n)
800 if p[1] != nullid or p[0] == nullid:
800 if p[1] != nullid or p[0] == nullid:
801 b.append((t, n, p[0], p[1]))
801 b.append((t, n, p[0], p[1]))
802 break
802 break
803 n = p[0]
803 n = p[0]
804 return b
804 return b
805
805
806 def between(self, pairs):
806 def between(self, pairs):
807 r = []
807 r = []
808
808
809 for top, bottom in pairs:
809 for top, bottom in pairs:
810 n, l, i = top, [], 0
810 n, l, i = top, [], 0
811 f = 1
811 f = 1
812
812
813 while n != bottom:
813 while n != bottom:
814 p = self.changelog.parents(n)[0]
814 p = self.changelog.parents(n)[0]
815 if i == f:
815 if i == f:
816 l.append(n)
816 l.append(n)
817 f = f * 2
817 f = f * 2
818 n = p
818 n = p
819 i += 1
819 i += 1
820
820
821 r.append(l)
821 r.append(l)
822
822
823 return r
823 return r
824
824
825 def newer(self, nodes):
825 def newer(self, nodes):
826 m = {}
826 m = {}
827 nl = []
827 nl = []
828 pm = {}
828 pm = {}
829 cl = self.changelog
829 cl = self.changelog
830 t = l = cl.count()
830 t = l = cl.count()
831
831
832 # find the lowest numbered node
832 # find the lowest numbered node
833 for n in nodes:
833 for n in nodes:
834 l = min(l, cl.rev(n))
834 l = min(l, cl.rev(n))
835 m[n] = 1
835 m[n] = 1
836
836
837 for i in xrange(l, t):
837 for i in xrange(l, t):
838 n = cl.node(i)
838 n = cl.node(i)
839 if n in m: # explicitly listed
839 if n in m: # explicitly listed
840 pm[n] = 1
840 pm[n] = 1
841 nl.append(n)
841 nl.append(n)
842 continue
842 continue
843 for p in cl.parents(n):
843 for p in cl.parents(n):
844 if p in pm: # parent listed
844 if p in pm: # parent listed
845 pm[n] = 1
845 pm[n] = 1
846 nl.append(n)
846 nl.append(n)
847 break
847 break
848
848
849 return nl
849 return nl
850
850
851 def findincoming(self, remote):
851 def findincoming(self, remote):
852 m = self.changelog.nodemap
852 m = self.changelog.nodemap
853 search = []
853 search = []
854 fetch = []
854 fetch = []
855 base = {}
855 seen = {}
856 seen = {}
856 seenbranch = {}
857 seenbranch = {}
857
858
858 # if we have an empty repo, fetch everything
859 # if we have an empty repo, fetch everything
859 if self.changelog.tip() == nullid:
860 if self.changelog.tip() == nullid:
860 self.ui.status("requesting all changes\n")
861 self.ui.status("requesting all changes\n")
861 return [nullid]
862 return [nullid]
862
863
863 # otherwise, assume we're closer to the tip than the root
864 # otherwise, assume we're closer to the tip than the root
865 # and start by examining the heads
864 self.ui.status("searching for changes\n")
866 self.ui.status("searching for changes\n")
865 heads = remote.heads()
867 heads = remote.heads()
866 unknown = []
868 unknown = []
867 for h in heads:
869 for h in heads:
868 if h not in m:
870 if h not in m:
869 unknown.append(h)
871 unknown.append(h)
870
872
871 if not unknown:
873 if not unknown:
872 return None
874 return None
873
875
874 rep = {}
876 rep = {}
875 reqcnt = 0
877 reqcnt = 0
876
878
879 # search through remote branches
880 # a 'branch' here is a linear segment of history, with four parts:
881 # head, root, first parent, second parent
882 # (a branch always has two parents (or none) by definition)
877 unknown = remote.branches(unknown)
883 unknown = remote.branches(unknown)
878 while unknown:
884 while unknown:
879 r = []
885 r = []
880 while unknown:
886 while unknown:
881 n = unknown.pop(0)
887 n = unknown.pop(0)
882 if n[0] in seen:
888 if n[0] in seen:
883 continue
889 continue
884
890
885 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
891 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
886 if n[0] == nullid:
892 if n[0] == nullid:
887 break
893 break
888 if n in seenbranch:
894 if n in seenbranch:
889 self.ui.debug("branch already found\n")
895 self.ui.debug("branch already found\n")
890 continue
896 continue
891 if n[1] and n[1] in m: # do we know the base?
897 if n[1] and n[1] in m: # do we know the base?
892 self.ui.debug("found incomplete branch %s:%s\n"
898 self.ui.debug("found incomplete branch %s:%s\n"
893 % (short(n[0]), short(n[1])))
899 % (short(n[0]), short(n[1])))
894 search.append(n) # schedule branch range for scanning
900 search.append(n) # schedule branch range for scanning
895 seenbranch[n] = 1
901 seenbranch[n] = 1
896 else:
902 else:
897 if n[1] not in seen and n[1] not in fetch:
903 if n[1] not in seen and n[1] not in fetch:
898 if n[2] in m and n[3] in m:
904 if n[2] in m and n[3] in m:
899 self.ui.debug("found new changeset %s\n" %
905 self.ui.debug("found new changeset %s\n" %
900 short(n[1]))
906 short(n[1]))
901 fetch.append(n[1]) # earliest unknown
907 fetch.append(n[1]) # earliest unknown
908 base[n[2]] = 1 # latest known
902 continue
909 continue
903
910
904 for a in n[2:4]:
911 for a in n[2:4]:
905 if a not in rep:
912 if a not in rep:
906 r.append(a)
913 r.append(a)
907 rep[a] = 1
914 rep[a] = 1
908
915
909 seen[n[0]] = 1
916 seen[n[0]] = 1
910
917
911 if r:
918 if r:
912 reqcnt += 1
919 reqcnt += 1
913 self.ui.debug("request %d: %s\n" %
920 self.ui.debug("request %d: %s\n" %
914 (reqcnt, " ".join(map(short, r))))
921 (reqcnt, " ".join(map(short, r))))
915 for p in range(0, len(r), 10):
922 for p in range(0, len(r), 10):
916 for b in remote.branches(r[p:p+10]):
923 for b in remote.branches(r[p:p+10]):
917 self.ui.debug("received %s:%s\n" %
924 self.ui.debug("received %s:%s\n" %
918 (short(b[0]), short(b[1])))
925 (short(b[0]), short(b[1])))
919 if b[0] not in m and b[0] not in seen:
926 if b[0] not in m and b[0] not in seen:
920 unknown.append(b)
927 unknown.append(b)
921
928
929 # do binary search on the branches we found
922 while search:
930 while search:
923 n = search.pop(0)
931 n = search.pop(0)
924 reqcnt += 1
932 reqcnt += 1
925 l = remote.between([(n[0], n[1])])[0]
933 l = remote.between([(n[0], n[1])])[0]
926 l.append(n[1])
934 l.append(n[1])
927 p = n[0]
935 p = n[0]
928 f = 1
936 f = 1
929 for i in l:
937 for i in l:
930 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
938 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
931 if i in m:
939 if i in m:
932 if f <= 2:
940 if f <= 2:
933 self.ui.debug("found new branch changeset %s\n" %
941 self.ui.debug("found new branch changeset %s\n" %
934 short(p))
942 short(p))
935 fetch.append(p)
943 fetch.append(p)
944 base[i] = 1
936 else:
945 else:
937 self.ui.debug("narrowed branch search to %s:%s\n"
946 self.ui.debug("narrowed branch search to %s:%s\n"
938 % (short(p), short(i)))
947 % (short(p), short(i)))
939 search.append((p, i))
948 search.append((p, i))
940 break
949 break
941 p, f = i, f * 2
950 p, f = i, f * 2
942
951
952 # sanity check our fetch list
943 for f in fetch:
953 for f in fetch:
944 if f in m:
954 if f in m:
945 raise RepoError("already have changeset " + short(f[:4]))
955 raise RepoError("already have changeset " + short(f[:4]))
946
956
947 if fetch == [nullid]:
957 if base.keys() == [nullid]:
948 self.ui.warn("warning: pulling from an unrelated repository!\n")
958 self.ui.warn("warning: pulling from an unrelated repository!\n")
949
959
950 self.ui.note("adding new changesets starting at " +
960 self.ui.note("adding new changesets starting at " +
951 " ".join([short(f) for f in fetch]) + "\n")
961 " ".join([short(f) for f in fetch]) + "\n")
952
962
953 self.ui.debug("%d total queries\n" % reqcnt)
963 self.ui.debug("%d total queries\n" % reqcnt)
954
964
955 return fetch
965 return fetch
956
966
957 def changegroup(self, basenodes):
967 def changegroup(self, basenodes):
958 nodes = self.newer(basenodes)
968 nodes = self.newer(basenodes)
959
969
960 # construct the link map
970 # construct the link map
961 linkmap = {}
971 linkmap = {}
962 for n in nodes:
972 for n in nodes:
963 linkmap[self.changelog.rev(n)] = n
973 linkmap[self.changelog.rev(n)] = n
964
974
965 # construct a list of all changed files
975 # construct a list of all changed files
966 changed = {}
976 changed = {}
967 for n in nodes:
977 for n in nodes:
968 c = self.changelog.read(n)
978 c = self.changelog.read(n)
969 for f in c[3]:
979 for f in c[3]:
970 changed[f] = 1
980 changed[f] = 1
971 changed = changed.keys()
981 changed = changed.keys()
972 changed.sort()
982 changed.sort()
973
983
974 # the changegroup is changesets + manifests + all file revs
984 # the changegroup is changesets + manifests + all file revs
975 revs = [ self.changelog.rev(n) for n in nodes ]
985 revs = [ self.changelog.rev(n) for n in nodes ]
976
986
977 for y in self.changelog.group(linkmap): yield y
987 for y in self.changelog.group(linkmap): yield y
978 for y in self.manifest.group(linkmap): yield y
988 for y in self.manifest.group(linkmap): yield y
979 for f in changed:
989 for f in changed:
980 yield struct.pack(">l", len(f) + 4) + f
990 yield struct.pack(">l", len(f) + 4) + f
981 g = self.file(f).group(linkmap)
991 g = self.file(f).group(linkmap)
982 for y in g:
992 for y in g:
983 yield y
993 yield y
984
994
985 def addchangegroup(self, generator):
995 def addchangegroup(self, generator):
986
996
987 class genread:
997 class genread:
988 def __init__(self, generator):
998 def __init__(self, generator):
989 self.g = generator
999 self.g = generator
990 self.buf = ""
1000 self.buf = ""
991 def read(self, l):
1001 def read(self, l):
992 while l > len(self.buf):
1002 while l > len(self.buf):
993 try:
1003 try:
994 self.buf += self.g.next()
1004 self.buf += self.g.next()
995 except StopIteration:
1005 except StopIteration:
996 break
1006 break
997 d, self.buf = self.buf[:l], self.buf[l:]
1007 d, self.buf = self.buf[:l], self.buf[l:]
998 return d
1008 return d
999
1009
1000 def getchunk():
1010 def getchunk():
1001 d = source.read(4)
1011 d = source.read(4)
1002 if not d: return ""
1012 if not d: return ""
1003 l = struct.unpack(">l", d)[0]
1013 l = struct.unpack(">l", d)[0]
1004 if l <= 4: return ""
1014 if l <= 4: return ""
1005 return source.read(l - 4)
1015 return source.read(l - 4)
1006
1016
1007 def getgroup():
1017 def getgroup():
1008 while 1:
1018 while 1:
1009 c = getchunk()
1019 c = getchunk()
1010 if not c: break
1020 if not c: break
1011 yield c
1021 yield c
1012
1022
1013 def csmap(x):
1023 def csmap(x):
1014 self.ui.debug("add changeset %s\n" % short(x))
1024 self.ui.debug("add changeset %s\n" % short(x))
1015 return self.changelog.count()
1025 return self.changelog.count()
1016
1026
1017 def revmap(x):
1027 def revmap(x):
1018 return self.changelog.rev(x)
1028 return self.changelog.rev(x)
1019
1029
1020 if not generator: return
1030 if not generator: return
1021 changesets = files = revisions = 0
1031 changesets = files = revisions = 0
1022
1032
1023 source = genread(generator)
1033 source = genread(generator)
1024 lock = self.lock()
1034 lock = self.lock()
1025 tr = self.transaction()
1035 tr = self.transaction()
1026
1036
1027 # pull off the changeset group
1037 # pull off the changeset group
1028 self.ui.status("adding changesets\n")
1038 self.ui.status("adding changesets\n")
1029 co = self.changelog.tip()
1039 co = self.changelog.tip()
1030 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1040 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1031 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1041 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1032
1042
1033 # pull off the manifest group
1043 # pull off the manifest group
1034 self.ui.status("adding manifests\n")
1044 self.ui.status("adding manifests\n")
1035 mm = self.manifest.tip()
1045 mm = self.manifest.tip()
1036 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1046 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1037
1047
1038 # process the files
1048 # process the files
1039 self.ui.status("adding file revisions\n")
1049 self.ui.status("adding file revisions\n")
1040 while 1:
1050 while 1:
1041 f = getchunk()
1051 f = getchunk()
1042 if not f: break
1052 if not f: break
1043 self.ui.debug("adding %s revisions\n" % f)
1053 self.ui.debug("adding %s revisions\n" % f)
1044 fl = self.file(f)
1054 fl = self.file(f)
1045 o = fl.count()
1055 o = fl.count()
1046 n = fl.addgroup(getgroup(), revmap, tr)
1056 n = fl.addgroup(getgroup(), revmap, tr)
1047 revisions += fl.count() - o
1057 revisions += fl.count() - o
1048 files += 1
1058 files += 1
1049
1059
1050 self.ui.status(("modified %d files, added %d changesets" +
1060 self.ui.status(("modified %d files, added %d changesets" +
1051 " and %d new revisions\n")
1061 " and %d new revisions\n")
1052 % (files, changesets, revisions))
1062 % (files, changesets, revisions))
1053
1063
1054 tr.close()
1064 tr.close()
1055 return
1065 return
1056
1066
1057 def update(self, node, allow=False, force=False):
1067 def update(self, node, allow=False, force=False):
1058 pl = self.dirstate.parents()
1068 pl = self.dirstate.parents()
1059 if not force and pl[1] != nullid:
1069 if not force and pl[1] != nullid:
1060 self.ui.warn("aborting: outstanding uncommitted merges\n")
1070 self.ui.warn("aborting: outstanding uncommitted merges\n")
1061 return
1071 return
1062
1072
1063 p1, p2 = pl[0], node
1073 p1, p2 = pl[0], node
1064 pa = self.changelog.ancestor(p1, p2)
1074 pa = self.changelog.ancestor(p1, p2)
1065 m1n = self.changelog.read(p1)[0]
1075 m1n = self.changelog.read(p1)[0]
1066 m2n = self.changelog.read(p2)[0]
1076 m2n = self.changelog.read(p2)[0]
1067 man = self.manifest.ancestor(m1n, m2n)
1077 man = self.manifest.ancestor(m1n, m2n)
1068 m1 = self.manifest.read(m1n)
1078 m1 = self.manifest.read(m1n)
1069 mf1 = self.manifest.readflags(m1n)
1079 mf1 = self.manifest.readflags(m1n)
1070 m2 = self.manifest.read(m2n)
1080 m2 = self.manifest.read(m2n)
1071 mf2 = self.manifest.readflags(m2n)
1081 mf2 = self.manifest.readflags(m2n)
1072 ma = self.manifest.read(man)
1082 ma = self.manifest.read(man)
1073 mfa = self.manifest.readflags(man)
1083 mfa = self.manifest.readflags(man)
1074
1084
1075 (c, a, d, u) = self.changes(None, None)
1085 (c, a, d, u) = self.changes(None, None)
1076
1086
1077 # is this a jump, or a merge? i.e. is there a linear path
1087 # is this a jump, or a merge? i.e. is there a linear path
1078 # from p1 to p2?
1088 # from p1 to p2?
1079 linear_path = (pa == p1 or pa == p2)
1089 linear_path = (pa == p1 or pa == p2)
1080
1090
1081 # resolve the manifest to determine which files
1091 # resolve the manifest to determine which files
1082 # we care about merging
1092 # we care about merging
1083 self.ui.note("resolving manifests\n")
1093 self.ui.note("resolving manifests\n")
1084 self.ui.debug(" ancestor %s local %s remote %s\n" %
1094 self.ui.debug(" ancestor %s local %s remote %s\n" %
1085 (short(man), short(m1n), short(m2n)))
1095 (short(man), short(m1n), short(m2n)))
1086
1096
1087 merge = {}
1097 merge = {}
1088 get = {}
1098 get = {}
1089 remove = []
1099 remove = []
1090 mark = {}
1100 mark = {}
1091
1101
1092 # construct a working dir manifest
1102 # construct a working dir manifest
1093 mw = m1.copy()
1103 mw = m1.copy()
1094 mfw = mf1.copy()
1104 mfw = mf1.copy()
1095 umap = dict.fromkeys(u)
1105 umap = dict.fromkeys(u)
1096
1106
1097 for f in a + c + u:
1107 for f in a + c + u:
1098 mw[f] = ""
1108 mw[f] = ""
1099 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1109 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1100
1110
1101 for f in d:
1111 for f in d:
1102 if f in mw: del mw[f]
1112 if f in mw: del mw[f]
1103
1113
1104 # If we're jumping between revisions (as opposed to merging),
1114 # If we're jumping between revisions (as opposed to merging),
1105 # and if neither the working directory nor the target rev has
1115 # and if neither the working directory nor the target rev has
1106 # the file, then we need to remove it from the dirstate, to
1116 # the file, then we need to remove it from the dirstate, to
1107 # prevent the dirstate from listing the file when it is no
1117 # prevent the dirstate from listing the file when it is no
1108 # longer in the manifest.
1118 # longer in the manifest.
1109 if linear_path and f not in m2:
1119 if linear_path and f not in m2:
1110 self.dirstate.forget((f,))
1120 self.dirstate.forget((f,))
1111
1121
1112 # Compare manifests
1122 # Compare manifests
1113 for f, n in mw.iteritems():
1123 for f, n in mw.iteritems():
1114 if f in m2:
1124 if f in m2:
1115 s = 0
1125 s = 0
1116
1126
1117 # is the wfile new since m1, and match m2?
1127 # is the wfile new since m1, and match m2?
1118 if f not in m1:
1128 if f not in m1:
1119 t1 = self.wfile(f).read()
1129 t1 = self.wfile(f).read()
1120 t2 = self.file(f).revision(m2[f])
1130 t2 = self.file(f).revision(m2[f])
1121 if cmp(t1, t2) == 0:
1131 if cmp(t1, t2) == 0:
1122 mark[f] = 1
1132 mark[f] = 1
1123 n = m2[f]
1133 n = m2[f]
1124 del t1, t2
1134 del t1, t2
1125
1135
1126 # are files different?
1136 # are files different?
1127 if n != m2[f]:
1137 if n != m2[f]:
1128 a = ma.get(f, nullid)
1138 a = ma.get(f, nullid)
1129 # are both different from the ancestor?
1139 # are both different from the ancestor?
1130 if n != a and m2[f] != a:
1140 if n != a and m2[f] != a:
1131 self.ui.debug(" %s versions differ, resolve\n" % f)
1141 self.ui.debug(" %s versions differ, resolve\n" % f)
1132 # merge executable bits
1142 # merge executable bits
1133 # "if we changed or they changed, change in merge"
1143 # "if we changed or they changed, change in merge"
1134 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1144 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1135 mode = ((a^b) | (a^c)) ^ a
1145 mode = ((a^b) | (a^c)) ^ a
1136 merge[f] = (m1.get(f, nullid), m2[f], mode)
1146 merge[f] = (m1.get(f, nullid), m2[f], mode)
1137 s = 1
1147 s = 1
1138 # are we clobbering?
1148 # are we clobbering?
1139 # is remote's version newer?
1149 # is remote's version newer?
1140 # or are we going back in time?
1150 # or are we going back in time?
1141 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1151 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1142 self.ui.debug(" remote %s is newer, get\n" % f)
1152 self.ui.debug(" remote %s is newer, get\n" % f)
1143 get[f] = m2[f]
1153 get[f] = m2[f]
1144 s = 1
1154 s = 1
1145 else:
1155 else:
1146 mark[f] = 1
1156 mark[f] = 1
1147 elif f in umap:
1157 elif f in umap:
1148 # this unknown file is the same as the checkout
1158 # this unknown file is the same as the checkout
1149 get[f] = m2[f]
1159 get[f] = m2[f]
1150
1160
1151 if not s and mfw[f] != mf2[f]:
1161 if not s and mfw[f] != mf2[f]:
1152 if force:
1162 if force:
1153 self.ui.debug(" updating permissions for %s\n" % f)
1163 self.ui.debug(" updating permissions for %s\n" % f)
1154 util.set_exec(self.wjoin(f), mf2[f])
1164 util.set_exec(self.wjoin(f), mf2[f])
1155 else:
1165 else:
1156 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1166 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1157 mode = ((a^b) | (a^c)) ^ a
1167 mode = ((a^b) | (a^c)) ^ a
1158 if mode != b:
1168 if mode != b:
1159 self.ui.debug(" updating permissions for %s\n" % f)
1169 self.ui.debug(" updating permissions for %s\n" % f)
1160 util.set_exec(self.wjoin(f), mode)
1170 util.set_exec(self.wjoin(f), mode)
1161 mark[f] = 1
1171 mark[f] = 1
1162 del m2[f]
1172 del m2[f]
1163 elif f in ma:
1173 elif f in ma:
1164 if not force and n != ma[f]:
1174 if not force and n != ma[f]:
1165 r = ""
1175 r = ""
1166 if linear_path or allow:
1176 if linear_path or allow:
1167 r = self.ui.prompt(
1177 r = self.ui.prompt(
1168 (" local changed %s which remote deleted\n" % f) +
1178 (" local changed %s which remote deleted\n" % f) +
1169 "(k)eep or (d)elete?", "[kd]", "k")
1179 "(k)eep or (d)elete?", "[kd]", "k")
1170 if r == "d":
1180 if r == "d":
1171 remove.append(f)
1181 remove.append(f)
1172 else:
1182 else:
1173 self.ui.debug("other deleted %s\n" % f)
1183 self.ui.debug("other deleted %s\n" % f)
1174 remove.append(f) # other deleted it
1184 remove.append(f) # other deleted it
1175 else:
1185 else:
1176 if n == m1.get(f, nullid): # same as parent
1186 if n == m1.get(f, nullid): # same as parent
1177 if p2 == pa: # going backwards?
1187 if p2 == pa: # going backwards?
1178 self.ui.debug("remote deleted %s\n" % f)
1188 self.ui.debug("remote deleted %s\n" % f)
1179 remove.append(f)
1189 remove.append(f)
1180 else:
1190 else:
1181 self.ui.debug("local created %s, keeping\n" % f)
1191 self.ui.debug("local created %s, keeping\n" % f)
1182 else:
1192 else:
1183 self.ui.debug("working dir created %s, keeping\n" % f)
1193 self.ui.debug("working dir created %s, keeping\n" % f)
1184
1194
1185 for f, n in m2.iteritems():
1195 for f, n in m2.iteritems():
1186 if f[0] == "/": continue
1196 if f[0] == "/": continue
1187 if not force and f in ma and n != ma[f]:
1197 if not force and f in ma and n != ma[f]:
1188 r = ""
1198 r = ""
1189 if linear_path or allow:
1199 if linear_path or allow:
1190 r = self.ui.prompt(
1200 r = self.ui.prompt(
1191 ("remote changed %s which local deleted\n" % f) +
1201 ("remote changed %s which local deleted\n" % f) +
1192 "(k)eep or (d)elete?", "[kd]", "k")
1202 "(k)eep or (d)elete?", "[kd]", "k")
1193 if r == "d": remove.append(f)
1203 if r == "d": remove.append(f)
1194 else:
1204 else:
1195 self.ui.debug("remote created %s\n" % f)
1205 self.ui.debug("remote created %s\n" % f)
1196 get[f] = n
1206 get[f] = n
1197
1207
1198 del mw, m1, m2, ma
1208 del mw, m1, m2, ma
1199
1209
1200 if force:
1210 if force:
1201 for f in merge:
1211 for f in merge:
1202 get[f] = merge[f][1]
1212 get[f] = merge[f][1]
1203 merge = {}
1213 merge = {}
1204
1214
1205 if linear_path:
1215 if linear_path:
1206 # we don't need to do any magic, just jump to the new rev
1216 # we don't need to do any magic, just jump to the new rev
1207 mode = 'n'
1217 mode = 'n'
1208 p1, p2 = p2, nullid
1218 p1, p2 = p2, nullid
1209 else:
1219 else:
1210 if not allow:
1220 if not allow:
1211 self.ui.status("this update spans a branch" +
1221 self.ui.status("this update spans a branch" +
1212 " affecting the following files:\n")
1222 " affecting the following files:\n")
1213 fl = merge.keys() + get.keys()
1223 fl = merge.keys() + get.keys()
1214 fl.sort()
1224 fl.sort()
1215 for f in fl:
1225 for f in fl:
1216 cf = ""
1226 cf = ""
1217 if f in merge: cf = " (resolve)"
1227 if f in merge: cf = " (resolve)"
1218 self.ui.status(" %s%s\n" % (f, cf))
1228 self.ui.status(" %s%s\n" % (f, cf))
1219 self.ui.warn("aborting update spanning branches!\n")
1229 self.ui.warn("aborting update spanning branches!\n")
1220 self.ui.status("(use update -m to perform a branch merge)\n")
1230 self.ui.status("(use update -m to perform a branch merge)\n")
1221 return 1
1231 return 1
1222 # we have to remember what files we needed to get/change
1232 # we have to remember what files we needed to get/change
1223 # because any file that's different from either one of its
1233 # because any file that's different from either one of its
1224 # parents must be in the changeset
1234 # parents must be in the changeset
1225 mode = 'm'
1235 mode = 'm'
1226 self.dirstate.update(mark.keys(), "m")
1236 self.dirstate.update(mark.keys(), "m")
1227
1237
1228 self.dirstate.setparents(p1, p2)
1238 self.dirstate.setparents(p1, p2)
1229
1239
1230 # get the files we don't need to change
1240 # get the files we don't need to change
1231 files = get.keys()
1241 files = get.keys()
1232 files.sort()
1242 files.sort()
1233 for f in files:
1243 for f in files:
1234 if f[0] == "/": continue
1244 if f[0] == "/": continue
1235 self.ui.note("getting %s\n" % f)
1245 self.ui.note("getting %s\n" % f)
1236 t = self.file(f).read(get[f])
1246 t = self.file(f).read(get[f])
1237 try:
1247 try:
1238 self.wfile(f, "w").write(t)
1248 self.wfile(f, "w").write(t)
1239 except IOError:
1249 except IOError:
1240 os.makedirs(os.path.dirname(self.wjoin(f)))
1250 os.makedirs(os.path.dirname(self.wjoin(f)))
1241 self.wfile(f, "w").write(t)
1251 self.wfile(f, "w").write(t)
1242 util.set_exec(self.wjoin(f), mf2[f])
1252 util.set_exec(self.wjoin(f), mf2[f])
1243 self.dirstate.update([f], mode)
1253 self.dirstate.update([f], mode)
1244
1254
1245 # merge the tricky bits
1255 # merge the tricky bits
1246 files = merge.keys()
1256 files = merge.keys()
1247 files.sort()
1257 files.sort()
1248 for f in files:
1258 for f in files:
1249 self.ui.status("merging %s\n" % f)
1259 self.ui.status("merging %s\n" % f)
1250 m, o, flag = merge[f]
1260 m, o, flag = merge[f]
1251 self.merge3(f, m, o)
1261 self.merge3(f, m, o)
1252 util.set_exec(self.wjoin(f), flag)
1262 util.set_exec(self.wjoin(f), flag)
1253 self.dirstate.update([f], 'm')
1263 self.dirstate.update([f], 'm')
1254
1264
1255 for f in remove:
1265 for f in remove:
1256 self.ui.note("removing %s\n" % f)
1266 self.ui.note("removing %s\n" % f)
1257 os.unlink(f)
1267 os.unlink(f)
1258 # try removing directories that might now be empty
1268 # try removing directories that might now be empty
1259 try: os.removedirs(os.path.dirname(f))
1269 try: os.removedirs(os.path.dirname(f))
1260 except: pass
1270 except: pass
1261 if mode == 'n':
1271 if mode == 'n':
1262 self.dirstate.forget(remove)
1272 self.dirstate.forget(remove)
1263 else:
1273 else:
1264 self.dirstate.update(remove, 'r')
1274 self.dirstate.update(remove, 'r')
1265
1275
1266 def merge3(self, fn, my, other):
1276 def merge3(self, fn, my, other):
1267 """perform a 3-way merge in the working directory"""
1277 """perform a 3-way merge in the working directory"""
1268
1278
1269 def temp(prefix, node):
1279 def temp(prefix, node):
1270 pre = "%s~%s." % (os.path.basename(fn), prefix)
1280 pre = "%s~%s." % (os.path.basename(fn), prefix)
1271 (fd, name) = tempfile.mkstemp("", pre)
1281 (fd, name) = tempfile.mkstemp("", pre)
1272 f = os.fdopen(fd, "wb")
1282 f = os.fdopen(fd, "wb")
1273 f.write(fl.revision(node))
1283 f.write(fl.revision(node))
1274 f.close()
1284 f.close()
1275 return name
1285 return name
1276
1286
1277 fl = self.file(fn)
1287 fl = self.file(fn)
1278 base = fl.ancestor(my, other)
1288 base = fl.ancestor(my, other)
1279 a = self.wjoin(fn)
1289 a = self.wjoin(fn)
1280 b = temp("base", base)
1290 b = temp("base", base)
1281 c = temp("other", other)
1291 c = temp("other", other)
1282
1292
1283 self.ui.note("resolving %s\n" % fn)
1293 self.ui.note("resolving %s\n" % fn)
1284 self.ui.debug("file %s: other %s ancestor %s\n" %
1294 self.ui.debug("file %s: other %s ancestor %s\n" %
1285 (fn, short(other), short(base)))
1295 (fn, short(other), short(base)))
1286
1296
1287 cmd = os.environ.get("HGMERGE", "hgmerge")
1297 cmd = os.environ.get("HGMERGE", "hgmerge")
1288 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1298 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1289 if r:
1299 if r:
1290 self.ui.warn("merging %s failed!\n" % fn)
1300 self.ui.warn("merging %s failed!\n" % fn)
1291
1301
1292 os.unlink(b)
1302 os.unlink(b)
1293 os.unlink(c)
1303 os.unlink(c)
1294
1304
1295 def verify(self):
1305 def verify(self):
1296 filelinkrevs = {}
1306 filelinkrevs = {}
1297 filenodes = {}
1307 filenodes = {}
1298 changesets = revisions = files = 0
1308 changesets = revisions = files = 0
1299 errors = 0
1309 errors = 0
1300
1310
1301 seen = {}
1311 seen = {}
1302 self.ui.status("checking changesets\n")
1312 self.ui.status("checking changesets\n")
1303 for i in range(self.changelog.count()):
1313 for i in range(self.changelog.count()):
1304 changesets += 1
1314 changesets += 1
1305 n = self.changelog.node(i)
1315 n = self.changelog.node(i)
1306 if n in seen:
1316 if n in seen:
1307 self.ui.warn("duplicate changeset at revision %d\n" % i)
1317 self.ui.warn("duplicate changeset at revision %d\n" % i)
1308 errors += 1
1318 errors += 1
1309 seen[n] = 1
1319 seen[n] = 1
1310
1320
1311 for p in self.changelog.parents(n):
1321 for p in self.changelog.parents(n):
1312 if p not in self.changelog.nodemap:
1322 if p not in self.changelog.nodemap:
1313 self.ui.warn("changeset %s has unknown parent %s\n" %
1323 self.ui.warn("changeset %s has unknown parent %s\n" %
1314 (short(n), short(p)))
1324 (short(n), short(p)))
1315 errors += 1
1325 errors += 1
1316 try:
1326 try:
1317 changes = self.changelog.read(n)
1327 changes = self.changelog.read(n)
1318 except Exception, inst:
1328 except Exception, inst:
1319 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1329 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1320 errors += 1
1330 errors += 1
1321
1331
1322 for f in changes[3]:
1332 for f in changes[3]:
1323 filelinkrevs.setdefault(f, []).append(i)
1333 filelinkrevs.setdefault(f, []).append(i)
1324
1334
1325 seen = {}
1335 seen = {}
1326 self.ui.status("checking manifests\n")
1336 self.ui.status("checking manifests\n")
1327 for i in range(self.manifest.count()):
1337 for i in range(self.manifest.count()):
1328 n = self.manifest.node(i)
1338 n = self.manifest.node(i)
1329 if n in seen:
1339 if n in seen:
1330 self.ui.warn("duplicate manifest at revision %d\n" % i)
1340 self.ui.warn("duplicate manifest at revision %d\n" % i)
1331 errors += 1
1341 errors += 1
1332 seen[n] = 1
1342 seen[n] = 1
1333
1343
1334 for p in self.manifest.parents(n):
1344 for p in self.manifest.parents(n):
1335 if p not in self.manifest.nodemap:
1345 if p not in self.manifest.nodemap:
1336 self.ui.warn("manifest %s has unknown parent %s\n" %
1346 self.ui.warn("manifest %s has unknown parent %s\n" %
1337 (short(n), short(p)))
1347 (short(n), short(p)))
1338 errors += 1
1348 errors += 1
1339
1349
1340 try:
1350 try:
1341 delta = mdiff.patchtext(self.manifest.delta(n))
1351 delta = mdiff.patchtext(self.manifest.delta(n))
1342 except KeyboardInterrupt:
1352 except KeyboardInterrupt:
1343 print "aborted"
1353 print "aborted"
1344 sys.exit(0)
1354 sys.exit(0)
1345 except Exception, inst:
1355 except Exception, inst:
1346 self.ui.warn("unpacking manifest %s: %s\n"
1356 self.ui.warn("unpacking manifest %s: %s\n"
1347 % (short(n), inst))
1357 % (short(n), inst))
1348 errors += 1
1358 errors += 1
1349
1359
1350 ff = [ l.split('\0') for l in delta.splitlines() ]
1360 ff = [ l.split('\0') for l in delta.splitlines() ]
1351 for f, fn in ff:
1361 for f, fn in ff:
1352 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1362 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1353
1363
1354 self.ui.status("crosschecking files in changesets and manifests\n")
1364 self.ui.status("crosschecking files in changesets and manifests\n")
1355 for f in filenodes:
1365 for f in filenodes:
1356 if f not in filelinkrevs:
1366 if f not in filelinkrevs:
1357 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1367 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1358 errors += 1
1368 errors += 1
1359
1369
1360 for f in filelinkrevs:
1370 for f in filelinkrevs:
1361 if f not in filenodes:
1371 if f not in filenodes:
1362 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1372 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1363 errors += 1
1373 errors += 1
1364
1374
1365 self.ui.status("checking files\n")
1375 self.ui.status("checking files\n")
1366 ff = filenodes.keys()
1376 ff = filenodes.keys()
1367 ff.sort()
1377 ff.sort()
1368 for f in ff:
1378 for f in ff:
1369 if f == "/dev/null": continue
1379 if f == "/dev/null": continue
1370 files += 1
1380 files += 1
1371 fl = self.file(f)
1381 fl = self.file(f)
1372 nodes = { nullid: 1 }
1382 nodes = { nullid: 1 }
1373 seen = {}
1383 seen = {}
1374 for i in range(fl.count()):
1384 for i in range(fl.count()):
1375 revisions += 1
1385 revisions += 1
1376 n = fl.node(i)
1386 n = fl.node(i)
1377
1387
1378 if n in seen:
1388 if n in seen:
1379 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1389 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1380 errors += 1
1390 errors += 1
1381
1391
1382 if n not in filenodes[f]:
1392 if n not in filenodes[f]:
1383 self.ui.warn("%s: %d:%s not in manifests\n"
1393 self.ui.warn("%s: %d:%s not in manifests\n"
1384 % (f, i, short(n)))
1394 % (f, i, short(n)))
1385 print len(filenodes[f].keys()), fl.count(), f
1395 print len(filenodes[f].keys()), fl.count(), f
1386 errors += 1
1396 errors += 1
1387 else:
1397 else:
1388 del filenodes[f][n]
1398 del filenodes[f][n]
1389
1399
1390 flr = fl.linkrev(n)
1400 flr = fl.linkrev(n)
1391 if flr not in filelinkrevs[f]:
1401 if flr not in filelinkrevs[f]:
1392 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1402 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1393 % (f, short(n), fl.linkrev(n)))
1403 % (f, short(n), fl.linkrev(n)))
1394 errors += 1
1404 errors += 1
1395 else:
1405 else:
1396 filelinkrevs[f].remove(flr)
1406 filelinkrevs[f].remove(flr)
1397
1407
1398 # verify contents
1408 # verify contents
1399 try:
1409 try:
1400 t = fl.read(n)
1410 t = fl.read(n)
1401 except Exception, inst:
1411 except Exception, inst:
1402 self.ui.warn("unpacking file %s %s: %s\n"
1412 self.ui.warn("unpacking file %s %s: %s\n"
1403 % (f, short(n), inst))
1413 % (f, short(n), inst))
1404 errors += 1
1414 errors += 1
1405
1415
1406 # verify parents
1416 # verify parents
1407 (p1, p2) = fl.parents(n)
1417 (p1, p2) = fl.parents(n)
1408 if p1 not in nodes:
1418 if p1 not in nodes:
1409 self.ui.warn("file %s:%s unknown parent 1 %s" %
1419 self.ui.warn("file %s:%s unknown parent 1 %s" %
1410 (f, short(n), short(p1)))
1420 (f, short(n), short(p1)))
1411 errors += 1
1421 errors += 1
1412 if p2 not in nodes:
1422 if p2 not in nodes:
1413 self.ui.warn("file %s:%s unknown parent 2 %s" %
1423 self.ui.warn("file %s:%s unknown parent 2 %s" %
1414 (f, short(n), short(p1)))
1424 (f, short(n), short(p1)))
1415 errors += 1
1425 errors += 1
1416 nodes[n] = 1
1426 nodes[n] = 1
1417
1427
1418 # cross-check
1428 # cross-check
1419 for node in filenodes[f]:
1429 for node in filenodes[f]:
1420 self.ui.warn("node %s in manifests not in %s\n"
1430 self.ui.warn("node %s in manifests not in %s\n"
1421 % (hex(n), f))
1431 % (hex(n), f))
1422 errors += 1
1432 errors += 1
1423
1433
1424 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1434 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1425 (files, changesets, revisions))
1435 (files, changesets, revisions))
1426
1436
1427 if errors:
1437 if errors:
1428 self.ui.warn("%d integrity errors encountered!\n" % errors)
1438 self.ui.warn("%d integrity errors encountered!\n" % errors)
1429 return 1
1439 return 1
1430
1440
1431 class remoterepository:
1441 class remoterepository:
1432 def __init__(self, ui, path):
1442 def __init__(self, ui, path):
1433 self.url = path
1443 self.url = path
1434 self.ui = ui
1444 self.ui = ui
1435 no_list = [ "localhost", "127.0.0.1" ]
1445 no_list = [ "localhost", "127.0.0.1" ]
1436 host = ui.config("http_proxy", "host")
1446 host = ui.config("http_proxy", "host")
1437 if host is None:
1447 if host is None:
1438 host = os.environ.get("http_proxy")
1448 host = os.environ.get("http_proxy")
1439 if host and host.startswith('http://'):
1449 if host and host.startswith('http://'):
1440 host = host[7:]
1450 host = host[7:]
1441 user = ui.config("http_proxy", "user")
1451 user = ui.config("http_proxy", "user")
1442 passwd = ui.config("http_proxy", "passwd")
1452 passwd = ui.config("http_proxy", "passwd")
1443 no = ui.config("http_proxy", "no")
1453 no = ui.config("http_proxy", "no")
1444 if no is None:
1454 if no is None:
1445 no = os.environ.get("no_proxy")
1455 no = os.environ.get("no_proxy")
1446 if no:
1456 if no:
1447 no_list = no_list + no.split(",")
1457 no_list = no_list + no.split(",")
1448
1458
1449 no_proxy = 0
1459 no_proxy = 0
1450 for h in no_list:
1460 for h in no_list:
1451 if (path.startswith("http://" + h + "/") or
1461 if (path.startswith("http://" + h + "/") or
1452 path.startswith("http://" + h + ":") or
1462 path.startswith("http://" + h + ":") or
1453 path == "http://" + h):
1463 path == "http://" + h):
1454 no_proxy = 1
1464 no_proxy = 1
1455
1465
1456 # Note: urllib2 takes proxy values from the environment and those will
1466 # Note: urllib2 takes proxy values from the environment and those will
1457 # take precedence
1467 # take precedence
1458 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1468 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1459 if os.environ.has_key(env):
1469 if os.environ.has_key(env):
1460 del os.environ[env]
1470 del os.environ[env]
1461
1471
1462 proxy_handler = urllib2.BaseHandler()
1472 proxy_handler = urllib2.BaseHandler()
1463 if host and not no_proxy:
1473 if host and not no_proxy:
1464 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1474 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1465
1475
1466 authinfo = None
1476 authinfo = None
1467 if user and passwd:
1477 if user and passwd:
1468 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1478 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1469 passmgr.add_password(None, host, user, passwd)
1479 passmgr.add_password(None, host, user, passwd)
1470 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1480 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1471
1481
1472 opener = urllib2.build_opener(proxy_handler, authinfo)
1482 opener = urllib2.build_opener(proxy_handler, authinfo)
1473 urllib2.install_opener(opener)
1483 urllib2.install_opener(opener)
1474
1484
1475 def do_cmd(self, cmd, **args):
1485 def do_cmd(self, cmd, **args):
1476 self.ui.debug("sending %s command\n" % cmd)
1486 self.ui.debug("sending %s command\n" % cmd)
1477 q = {"cmd": cmd}
1487 q = {"cmd": cmd}
1478 q.update(args)
1488 q.update(args)
1479 qs = urllib.urlencode(q)
1489 qs = urllib.urlencode(q)
1480 cu = "%s?%s" % (self.url, qs)
1490 cu = "%s?%s" % (self.url, qs)
1481 return urllib2.urlopen(cu)
1491 return urllib2.urlopen(cu)
1482
1492
1483 def heads(self):
1493 def heads(self):
1484 d = self.do_cmd("heads").read()
1494 d = self.do_cmd("heads").read()
1485 try:
1495 try:
1486 return map(bin, d[:-1].split(" "))
1496 return map(bin, d[:-1].split(" "))
1487 except:
1497 except:
1488 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1498 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1489 raise
1499 raise
1490
1500
1491 def branches(self, nodes):
1501 def branches(self, nodes):
1492 n = " ".join(map(hex, nodes))
1502 n = " ".join(map(hex, nodes))
1493 d = self.do_cmd("branches", nodes=n).read()
1503 d = self.do_cmd("branches", nodes=n).read()
1494 try:
1504 try:
1495 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1505 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1496 return br
1506 return br
1497 except:
1507 except:
1498 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1508 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1499 raise
1509 raise
1500
1510
1501 def between(self, pairs):
1511 def between(self, pairs):
1502 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1512 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1503 d = self.do_cmd("between", pairs=n).read()
1513 d = self.do_cmd("between", pairs=n).read()
1504 try:
1514 try:
1505 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1515 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1506 return p
1516 return p
1507 except:
1517 except:
1508 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1518 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1509 raise
1519 raise
1510
1520
1511 def changegroup(self, nodes):
1521 def changegroup(self, nodes):
1512 n = " ".join(map(hex, nodes))
1522 n = " ".join(map(hex, nodes))
1513 zd = zlib.decompressobj()
1523 zd = zlib.decompressobj()
1514 f = self.do_cmd("changegroup", roots=n)
1524 f = self.do_cmd("changegroup", roots=n)
1515 bytes = 0
1525 bytes = 0
1516 while 1:
1526 while 1:
1517 d = f.read(4096)
1527 d = f.read(4096)
1518 bytes += len(d)
1528 bytes += len(d)
1519 if not d:
1529 if not d:
1520 yield zd.flush()
1530 yield zd.flush()
1521 break
1531 break
1522 yield zd.decompress(d)
1532 yield zd.decompress(d)
1523 self.ui.note("%d bytes of data transfered\n" % bytes)
1533 self.ui.note("%d bytes of data transfered\n" % bytes)
1524
1534
1525 def repository(ui, path=None, create=0):
1535 def repository(ui, path=None, create=0):
1526 if path and path[:7] == "http://":
1536 if path and path[:7] == "http://":
1527 return remoterepository(ui, path)
1537 return remoterepository(ui, path)
1528 if path and path[:5] == "hg://":
1538 if path and path[:5] == "hg://":
1529 return remoterepository(ui, path.replace("hg://", "http://"))
1539 return remoterepository(ui, path.replace("hg://", "http://"))
1530 if path and path[:11] == "old-http://":
1540 if path and path[:11] == "old-http://":
1531 return localrepository(ui, path.replace("old-http://", "http://"))
1541 return localrepository(ui, path.replace("old-http://", "http://"))
1532 else:
1542 else:
1533 return localrepository(ui, path, create)
1543 return localrepository(ui, path, create)
1534
1544
General Comments 0
You need to be logged in to leave comments. Login now