##// END OF EJS Templates
Warn on pushing unsynced repo or adding new heads...
mpm@selenic.com -
r816:8674b780 default
parent child Browse files
Show More
@@ -0,0 +1,28 b''
1 #!/bin/sh
2
3 mkdir a
4 cd a
5 hg init
6 echo foo > t1
7 hg add t1
8 hg commit -m "1" -d "0 0"
9
10 cd ..
11 hg clone a b
12
13 cd a
14 echo foo > t2
15 hg add t2
16 hg commit -m "2" -d "0 0"
17
18 cd ../b
19 echo foo > t3
20 hg add t3
21 hg commit -m "3" -d "0 0"
22
23 hg push ../a
24 hg pull ../a
25 hg push ../a
26 hg up -m
27 hg commit -m "4" -d "0 0"
28 hg push ../a
@@ -0,0 +1,35 b''
1 + hg init
2 + hg add t1
3 + hg commit -m 1 -d 0 0
4 + hg clone a b
5 + hg add t2
6 + hg commit -m 2 -d 0 0
7 + hg add t3
8 + hg commit -m 3 -d 0 0
9 + hg push ../a
10 pushing to ../a
11 searching for changes
12 abort: unsynced remote changes!
13 (did you forget to sync? use push -f to force)
14 + hg pull ../a
15 pulling from ../a
16 searching for changes
17 adding changesets
18 adding manifests
19 adding file changes
20 added 1 changesets with 1 changes to 1 files
21 (run 'hg update' to get a working copy)
22 + hg push ../a
23 pushing to ../a
24 searching for changes
25 abort: push creates new remote branches!
26 (did you forget to merge? use push -f to force)
27 + hg up -m
28 + hg commit -m 4 -d 0 0
29 + hg push ../a
30 pushing to ../a
31 searching for changes
32 adding changesets
33 adding manifests
34 adding file changes
35 added 2 changesets with 2 changes to 2 files
@@ -1,1981 +1,2003 b''
1 1 # hg.py - repository classes for mercurial
2 2 #
3 3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import sys, struct, os
9 9 import util
10 10 from revlog import *
11 11 from demandload import *
12 12 demandload(globals(), "re lock urllib urllib2 transaction time socket")
13 13 demandload(globals(), "tempfile httprangereader bdiff urlparse")
14 14 demandload(globals(), "bisect select")
15 15
16 16 class filelog(revlog):
17 17 def __init__(self, opener, path):
18 18 revlog.__init__(self, opener,
19 19 os.path.join("data", self.encodedir(path + ".i")),
20 20 os.path.join("data", self.encodedir(path + ".d")))
21 21
22 22 # This avoids a collision between a file named foo and a dir named
23 23 # foo.i or foo.d
24 24 def encodedir(self, path):
25 25 path.replace(".hg/", ".hg.hg/")
26 26 path.replace(".i/", ".i.hg/")
27 27 path.replace(".d/", ".i.hg/")
28 28 return path
29 29
30 30 def decodedir(self, path):
31 31 path.replace(".d.hg/", ".d/")
32 32 path.replace(".i.hg/", ".i/")
33 33 path.replace(".hg.hg/", ".hg/")
34 34 return path
35 35
36 36 def read(self, node):
37 37 t = self.revision(node)
38 38 if not t.startswith('\1\n'):
39 39 return t
40 40 s = t.find('\1\n', 2)
41 41 return t[s+2:]
42 42
43 43 def readmeta(self, node):
44 44 t = self.revision(node)
45 45 if not t.startswith('\1\n'):
46 46 return t
47 47 s = t.find('\1\n', 2)
48 48 mt = t[2:s]
49 49 for l in mt.splitlines():
50 50 k, v = l.split(": ", 1)
51 51 m[k] = v
52 52 return m
53 53
54 54 def add(self, text, meta, transaction, link, p1=None, p2=None):
55 55 if meta or text.startswith('\1\n'):
56 56 mt = ""
57 57 if meta:
58 58 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
59 59 text = "\1\n" + "".join(mt) + "\1\n" + text
60 60 return self.addrevision(text, transaction, link, p1, p2)
61 61
62 62 def annotate(self, node):
63 63
64 64 def decorate(text, rev):
65 65 return ([rev] * len(text.splitlines()), text)
66 66
67 67 def pair(parent, child):
68 68 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
69 69 child[0][b1:b2] = parent[0][a1:a2]
70 70 return child
71 71
72 72 # find all ancestors
73 73 needed = {node:1}
74 74 visit = [node]
75 75 while visit:
76 76 n = visit.pop(0)
77 77 for p in self.parents(n):
78 78 if p not in needed:
79 79 needed[p] = 1
80 80 visit.append(p)
81 81 else:
82 82 # count how many times we'll use this
83 83 needed[p] += 1
84 84
85 85 # sort by revision which is a topological order
86 86 visit = [ (self.rev(n), n) for n in needed.keys() ]
87 87 visit.sort()
88 88 hist = {}
89 89
90 90 for r,n in visit:
91 91 curr = decorate(self.read(n), self.linkrev(n))
92 92 for p in self.parents(n):
93 93 if p != nullid:
94 94 curr = pair(hist[p], curr)
95 95 # trim the history of unneeded revs
96 96 needed[p] -= 1
97 97 if not needed[p]:
98 98 del hist[p]
99 99 hist[n] = curr
100 100
101 101 return zip(hist[n][0], hist[n][1].splitlines(1))
102 102
103 103 class manifest(revlog):
104 104 def __init__(self, opener):
105 105 self.mapcache = None
106 106 self.listcache = None
107 107 self.addlist = None
108 108 revlog.__init__(self, opener, "00manifest.i", "00manifest.d")
109 109
110 110 def read(self, node):
111 111 if node == nullid: return {} # don't upset local cache
112 112 if self.mapcache and self.mapcache[0] == node:
113 113 return self.mapcache[1]
114 114 text = self.revision(node)
115 115 map = {}
116 116 flag = {}
117 117 self.listcache = (text, text.splitlines(1))
118 118 for l in self.listcache[1]:
119 119 (f, n) = l.split('\0')
120 120 map[f] = bin(n[:40])
121 121 flag[f] = (n[40:-1] == "x")
122 122 self.mapcache = (node, map, flag)
123 123 return map
124 124
125 125 def readflags(self, node):
126 126 if node == nullid: return {} # don't upset local cache
127 127 if not self.mapcache or self.mapcache[0] != node:
128 128 self.read(node)
129 129 return self.mapcache[2]
130 130
131 131 def diff(self, a, b):
132 132 # this is sneaky, as we're not actually using a and b
133 133 if self.listcache and self.addlist and self.listcache[0] == a:
134 134 d = mdiff.diff(self.listcache[1], self.addlist, 1)
135 135 if mdiff.patch(a, d) != b:
136 136 sys.stderr.write("*** sortdiff failed, falling back ***\n")
137 137 return mdiff.textdiff(a, b)
138 138 return d
139 139 else:
140 140 return mdiff.textdiff(a, b)
141 141
142 142 def add(self, map, flags, transaction, link, p1=None, p2=None,
143 143 changed=None):
144 144 # directly generate the mdiff delta from the data collected during
145 145 # the bisect loop below
146 146 def gendelta(delta):
147 147 i = 0
148 148 result = []
149 149 while i < len(delta):
150 150 start = delta[i][2]
151 151 end = delta[i][3]
152 152 l = delta[i][4]
153 153 if l == None:
154 154 l = ""
155 155 while i < len(delta) - 1 and start <= delta[i+1][2] \
156 156 and end >= delta[i+1][2]:
157 157 if delta[i+1][3] > end:
158 158 end = delta[i+1][3]
159 159 if delta[i+1][4]:
160 160 l += delta[i+1][4]
161 161 i += 1
162 162 result.append(struct.pack(">lll", start, end, len(l)) + l)
163 163 i += 1
164 164 return result
165 165
166 166 # apply the changes collected during the bisect loop to our addlist
167 167 def addlistdelta(addlist, delta):
168 168 # apply the deltas to the addlist. start from the bottom up
169 169 # so changes to the offsets don't mess things up.
170 170 i = len(delta)
171 171 while i > 0:
172 172 i -= 1
173 173 start = delta[i][0]
174 174 end = delta[i][1]
175 175 if delta[i][4]:
176 176 addlist[start:end] = [delta[i][4]]
177 177 else:
178 178 del addlist[start:end]
179 179 return addlist
180 180
181 181 # calculate the byte offset of the start of each line in the
182 182 # manifest
183 183 def calcoffsets(addlist):
184 184 offsets = [0] * (len(addlist) + 1)
185 185 offset = 0
186 186 i = 0
187 187 while i < len(addlist):
188 188 offsets[i] = offset
189 189 offset += len(addlist[i])
190 190 i += 1
191 191 offsets[i] = offset
192 192 return offsets
193 193
194 194 # if we're using the listcache, make sure it is valid and
195 195 # parented by the same node we're diffing against
196 196 if not changed or not self.listcache or not p1 or \
197 197 self.mapcache[0] != p1:
198 198 files = map.keys()
199 199 files.sort()
200 200
201 201 self.addlist = ["%s\000%s%s\n" %
202 202 (f, hex(map[f]), flags[f] and "x" or '')
203 203 for f in files]
204 204 cachedelta = None
205 205 else:
206 206 addlist = self.listcache[1]
207 207
208 208 # find the starting offset for each line in the add list
209 209 offsets = calcoffsets(addlist)
210 210
211 211 # combine the changed lists into one list for sorting
212 212 work = [[x, 0] for x in changed[0]]
213 213 work[len(work):] = [[x, 1] for x in changed[1]]
214 214 work.sort()
215 215
216 216 delta = []
217 217 bs = 0
218 218
219 219 for w in work:
220 220 f = w[0]
221 221 # bs will either be the index of the item or the insert point
222 222 bs = bisect.bisect(addlist, f, bs)
223 223 if bs < len(addlist):
224 224 fn = addlist[bs][:addlist[bs].index('\0')]
225 225 else:
226 226 fn = None
227 227 if w[1] == 0:
228 228 l = "%s\000%s%s\n" % (f, hex(map[f]),
229 229 flags[f] and "x" or '')
230 230 else:
231 231 l = None
232 232 start = bs
233 233 if fn != f:
234 234 # item not found, insert a new one
235 235 end = bs
236 236 if w[1] == 1:
237 237 sys.stderr.write("failed to remove %s from manifest\n"
238 238 % f)
239 239 sys.exit(1)
240 240 else:
241 241 # item is found, replace/delete the existing line
242 242 end = bs + 1
243 243 delta.append([start, end, offsets[start], offsets[end], l])
244 244
245 245 self.addlist = addlistdelta(addlist, delta)
246 246 if self.mapcache[0] == self.tip():
247 247 cachedelta = "".join(gendelta(delta))
248 248 else:
249 249 cachedelta = None
250 250
251 251 text = "".join(self.addlist)
252 252 if cachedelta and mdiff.patch(self.listcache[0], cachedelta) != text:
253 253 sys.stderr.write("manifest delta failure\n")
254 254 sys.exit(1)
255 255 n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
256 256 self.mapcache = (n, map, flags)
257 257 self.listcache = (text, self.addlist)
258 258 self.addlist = None
259 259
260 260 return n
261 261
262 262 class changelog(revlog):
263 263 def __init__(self, opener):
264 264 revlog.__init__(self, opener, "00changelog.i", "00changelog.d")
265 265
266 266 def extract(self, text):
267 267 if not text:
268 268 return (nullid, "", "0", [], "")
269 269 last = text.index("\n\n")
270 270 desc = text[last + 2:]
271 271 l = text[:last].splitlines()
272 272 manifest = bin(l[0])
273 273 user = l[1]
274 274 date = l[2]
275 275 files = l[3:]
276 276 return (manifest, user, date, files, desc)
277 277
278 278 def read(self, node):
279 279 return self.extract(self.revision(node))
280 280
281 281 def add(self, manifest, list, desc, transaction, p1=None, p2=None,
282 282 user=None, date=None):
283 283 date = date or "%d %d" % (time.time(), time.timezone)
284 284 list.sort()
285 285 l = [hex(manifest), user, date] + list + ["", desc]
286 286 text = "\n".join(l)
287 287 return self.addrevision(text, transaction, self.count(), p1, p2)
288 288
289 289 class dirstate:
290 290 def __init__(self, opener, ui, root):
291 291 self.opener = opener
292 292 self.root = root
293 293 self.dirty = 0
294 294 self.ui = ui
295 295 self.map = None
296 296 self.pl = None
297 297 self.copies = {}
298 298 self.ignorefunc = None
299 299
300 300 def wjoin(self, f):
301 301 return os.path.join(self.root, f)
302 302
303 303 def ignore(self, f):
304 304 if not self.ignorefunc:
305 305 bigpat = []
306 306 try:
307 307 l = file(self.wjoin(".hgignore"))
308 308 for pat in l:
309 309 if pat != "\n":
310 310 p = util.pconvert(pat[:-1])
311 311 try:
312 312 r = re.compile(p)
313 313 except:
314 314 self.ui.warn("ignoring invalid ignore"
315 315 + " regular expression '%s'\n" % p)
316 316 else:
317 317 bigpat.append(util.pconvert(pat[:-1]))
318 318 except IOError: pass
319 319
320 320 if bigpat:
321 321 s = "(?:%s)" % (")|(?:".join(bigpat))
322 322 r = re.compile(s)
323 323 self.ignorefunc = r.search
324 324 else:
325 325 self.ignorefunc = util.never
326 326
327 327 return self.ignorefunc(f)
328 328
329 329 def __del__(self):
330 330 if self.dirty:
331 331 self.write()
332 332
333 333 def __getitem__(self, key):
334 334 try:
335 335 return self.map[key]
336 336 except TypeError:
337 337 self.read()
338 338 return self[key]
339 339
340 340 def __contains__(self, key):
341 341 if not self.map: self.read()
342 342 return key in self.map
343 343
344 344 def parents(self):
345 345 if not self.pl:
346 346 self.read()
347 347 return self.pl
348 348
349 349 def markdirty(self):
350 350 if not self.dirty:
351 351 self.dirty = 1
352 352
353 353 def setparents(self, p1, p2 = nullid):
354 354 self.markdirty()
355 355 self.pl = p1, p2
356 356
357 357 def state(self, key):
358 358 try:
359 359 return self[key][0]
360 360 except KeyError:
361 361 return "?"
362 362
363 363 def read(self):
364 364 if self.map is not None: return self.map
365 365
366 366 self.map = {}
367 367 self.pl = [nullid, nullid]
368 368 try:
369 369 st = self.opener("dirstate").read()
370 370 if not st: return
371 371 except: return
372 372
373 373 self.pl = [st[:20], st[20: 40]]
374 374
375 375 pos = 40
376 376 while pos < len(st):
377 377 e = struct.unpack(">cllll", st[pos:pos+17])
378 378 l = e[4]
379 379 pos += 17
380 380 f = st[pos:pos + l]
381 381 if '\0' in f:
382 382 f, c = f.split('\0')
383 383 self.copies[f] = c
384 384 self.map[f] = e[:4]
385 385 pos += l
386 386
387 387 def copy(self, source, dest):
388 388 self.read()
389 389 self.markdirty()
390 390 self.copies[dest] = source
391 391
392 392 def copied(self, file):
393 393 return self.copies.get(file, None)
394 394
395 395 def update(self, files, state):
396 396 ''' current states:
397 397 n normal
398 398 m needs merging
399 399 r marked for removal
400 400 a marked for addition'''
401 401
402 402 if not files: return
403 403 self.read()
404 404 self.markdirty()
405 405 for f in files:
406 406 if state == "r":
407 407 self.map[f] = ('r', 0, 0, 0)
408 408 else:
409 409 s = os.stat(os.path.join(self.root, f))
410 410 self.map[f] = (state, s.st_mode, s.st_size, s.st_mtime)
411 411
412 412 def forget(self, files):
413 413 if not files: return
414 414 self.read()
415 415 self.markdirty()
416 416 for f in files:
417 417 try:
418 418 del self.map[f]
419 419 except KeyError:
420 420 self.ui.warn("not in dirstate: %s!\n" % f)
421 421 pass
422 422
423 423 def clear(self):
424 424 self.map = {}
425 425 self.markdirty()
426 426
427 427 def write(self):
428 428 st = self.opener("dirstate", "w")
429 429 st.write("".join(self.pl))
430 430 for f, e in self.map.items():
431 431 c = self.copied(f)
432 432 if c:
433 433 f = f + "\0" + c
434 434 e = struct.pack(">cllll", e[0], e[1], e[2], e[3], len(f))
435 435 st.write(e + f)
436 436 self.dirty = 0
437 437
438 438 def walk(self, files = None, match = util.always):
439 439 self.read()
440 440 dc = self.map.copy()
441 441 # walk all files by default
442 442 if not files: files = [self.root]
443 443 def traverse():
444 444 for f in util.unique(files):
445 445 f = os.path.join(self.root, f)
446 446 if os.path.isdir(f):
447 447 for dir, subdirs, fl in os.walk(f):
448 448 d = dir[len(self.root) + 1:]
449 449 if d == '.hg':
450 450 subdirs[:] = []
451 451 continue
452 452 for sd in subdirs:
453 453 ds = os.path.join(d, sd +'/')
454 454 if self.ignore(ds) or not match(ds):
455 455 subdirs.remove(sd)
456 456 for fn in fl:
457 457 fn = util.pconvert(os.path.join(d, fn))
458 458 yield 'f', fn
459 459 else:
460 460 yield 'f', f[len(self.root) + 1:]
461 461
462 462 for k in dc.keys():
463 463 yield 'm', k
464 464
465 465 # yield only files that match: all in dirstate, others only if
466 466 # not in .hgignore
467 467
468 468 for src, fn in util.unique(traverse()):
469 469 if fn in dc:
470 470 del dc[fn]
471 471 elif self.ignore(fn):
472 472 continue
473 473 if match(fn):
474 474 yield src, fn
475 475
476 476 def changes(self, files = None, match = util.always):
477 477 self.read()
478 478 dc = self.map.copy()
479 479 lookup, changed, added, unknown = [], [], [], []
480 480
481 481 for src, fn in self.walk(files, match):
482 482 try: s = os.stat(os.path.join(self.root, fn))
483 483 except: continue
484 484
485 485 if fn in dc:
486 486 c = dc[fn]
487 487 del dc[fn]
488 488
489 489 if c[0] == 'm':
490 490 changed.append(fn)
491 491 elif c[0] == 'a':
492 492 added.append(fn)
493 493 elif c[0] == 'r':
494 494 unknown.append(fn)
495 495 elif c[2] != s.st_size or (c[1] ^ s.st_mode) & 0100:
496 496 changed.append(fn)
497 497 elif c[1] != s.st_mode or c[3] != s.st_mtime:
498 498 lookup.append(fn)
499 499 else:
500 500 if match(fn): unknown.append(fn)
501 501
502 502 return (lookup, changed, added, filter(match, dc.keys()), unknown)
503 503
504 504 # used to avoid circular references so destructors work
505 505 def opener(base):
506 506 p = base
507 507 def o(path, mode="r"):
508 508 if p.startswith("http://"):
509 509 f = os.path.join(p, urllib.quote(path))
510 510 return httprangereader.httprangereader(f)
511 511
512 512 f = os.path.join(p, path)
513 513
514 514 mode += "b" # for that other OS
515 515
516 516 if mode[0] != "r":
517 517 try:
518 518 s = os.stat(f)
519 519 except OSError:
520 520 d = os.path.dirname(f)
521 521 if not os.path.isdir(d):
522 522 os.makedirs(d)
523 523 else:
524 524 if s.st_nlink > 1:
525 525 file(f + ".tmp", "wb").write(file(f, "rb").read())
526 526 util.rename(f+".tmp", f)
527 527
528 528 return file(f, mode)
529 529
530 530 return o
531 531
532 532 class RepoError(Exception): pass
533 533
534 534 class localrepository:
535 535 def __init__(self, ui, path=None, create=0):
536 536 self.remote = 0
537 537 if path and path.startswith("http://"):
538 538 self.remote = 1
539 539 self.path = path
540 540 else:
541 541 if not path:
542 542 p = os.getcwd()
543 543 while not os.path.isdir(os.path.join(p, ".hg")):
544 544 oldp = p
545 545 p = os.path.dirname(p)
546 546 if p == oldp: raise RepoError("no repo found")
547 547 path = p
548 548 self.path = os.path.join(path, ".hg")
549 549
550 550 if not create and not os.path.isdir(self.path):
551 551 raise RepoError("repository %s not found" % self.path)
552 552
553 553 self.root = path
554 554 self.ui = ui
555 555
556 556 if create:
557 557 os.mkdir(self.path)
558 558 os.mkdir(self.join("data"))
559 559
560 560 self.opener = opener(self.path)
561 561 self.wopener = opener(self.root)
562 562 self.manifest = manifest(self.opener)
563 563 self.changelog = changelog(self.opener)
564 564 self.tagscache = None
565 565 self.nodetagscache = None
566 566
567 567 if not self.remote:
568 568 self.dirstate = dirstate(self.opener, ui, self.root)
569 569 try:
570 570 self.ui.readconfig(self.opener("hgrc"))
571 571 except IOError: pass
572 572
573 573 def hook(self, name, **args):
574 574 s = self.ui.config("hooks", name)
575 575 if s:
576 576 self.ui.note("running hook %s: %s\n" % (name, s))
577 577 old = {}
578 578 for k, v in args.items():
579 579 k = k.upper()
580 580 old[k] = os.environ.get(k, None)
581 581 os.environ[k] = v
582 582
583 583 r = os.system(s)
584 584
585 585 for k, v in old.items():
586 586 if v != None:
587 587 os.environ[k] = v
588 588 else:
589 589 del os.environ[k]
590 590
591 591 if r:
592 592 self.ui.warn("abort: %s hook failed with status %d!\n" %
593 593 (name, r))
594 594 return False
595 595 return True
596 596
597 597 def tags(self):
598 598 '''return a mapping of tag to node'''
599 599 if not self.tagscache:
600 600 self.tagscache = {}
601 601 def addtag(self, k, n):
602 602 try:
603 603 bin_n = bin(n)
604 604 except TypeError:
605 605 bin_n = ''
606 606 self.tagscache[k.strip()] = bin_n
607 607
608 608 try:
609 609 # read each head of the tags file, ending with the tip
610 610 # and add each tag found to the map, with "newer" ones
611 611 # taking precedence
612 612 fl = self.file(".hgtags")
613 613 h = fl.heads()
614 614 h.reverse()
615 615 for r in h:
616 616 for l in fl.revision(r).splitlines():
617 617 if l:
618 618 n, k = l.split(" ", 1)
619 619 addtag(self, k, n)
620 620 except KeyError:
621 621 pass
622 622
623 623 try:
624 624 f = self.opener("localtags")
625 625 for l in f:
626 626 n, k = l.split(" ", 1)
627 627 addtag(self, k, n)
628 628 except IOError:
629 629 pass
630 630
631 631 self.tagscache['tip'] = self.changelog.tip()
632 632
633 633 return self.tagscache
634 634
635 635 def tagslist(self):
636 636 '''return a list of tags ordered by revision'''
637 637 l = []
638 638 for t, n in self.tags().items():
639 639 try:
640 640 r = self.changelog.rev(n)
641 641 except:
642 642 r = -2 # sort to the beginning of the list if unknown
643 643 l.append((r,t,n))
644 644 l.sort()
645 645 return [(t,n) for r,t,n in l]
646 646
647 647 def nodetags(self, node):
648 648 '''return the tags associated with a node'''
649 649 if not self.nodetagscache:
650 650 self.nodetagscache = {}
651 651 for t,n in self.tags().items():
652 652 self.nodetagscache.setdefault(n,[]).append(t)
653 653 return self.nodetagscache.get(node, [])
654 654
655 655 def lookup(self, key):
656 656 try:
657 657 return self.tags()[key]
658 658 except KeyError:
659 659 try:
660 660 return self.changelog.lookup(key)
661 661 except:
662 662 raise RepoError("unknown revision '%s'" % key)
663 663
664 664 def dev(self):
665 665 if self.remote: return -1
666 666 return os.stat(self.path).st_dev
667 667
668 668 def join(self, f):
669 669 return os.path.join(self.path, f)
670 670
671 671 def wjoin(self, f):
672 672 return os.path.join(self.root, f)
673 673
674 674 def file(self, f):
675 675 if f[0] == '/': f = f[1:]
676 676 return filelog(self.opener, f)
677 677
678 678 def getcwd(self):
679 679 cwd = os.getcwd()
680 680 if cwd == self.root: return ''
681 681 return cwd[len(self.root) + 1:]
682 682
683 683 def wfile(self, f, mode='r'):
684 684 return self.wopener(f, mode)
685 685
686 686 def transaction(self):
687 687 # save dirstate for undo
688 688 try:
689 689 ds = self.opener("dirstate").read()
690 690 except IOError:
691 691 ds = ""
692 692 self.opener("journal.dirstate", "w").write(ds)
693 693
694 694 def after():
695 695 util.rename(self.join("journal"), self.join("undo"))
696 696 util.rename(self.join("journal.dirstate"),
697 697 self.join("undo.dirstate"))
698 698
699 699 return transaction.transaction(self.ui.warn, self.opener,
700 700 self.join("journal"), after)
701 701
702 702 def recover(self):
703 703 lock = self.lock()
704 704 if os.path.exists(self.join("journal")):
705 705 self.ui.status("rolling back interrupted transaction\n")
706 706 return transaction.rollback(self.opener, self.join("journal"))
707 707 else:
708 708 self.ui.warn("no interrupted transaction available\n")
709 709
710 710 def undo(self):
711 711 lock = self.lock()
712 712 if os.path.exists(self.join("undo")):
713 713 self.ui.status("rolling back last transaction\n")
714 714 transaction.rollback(self.opener, self.join("undo"))
715 715 self.dirstate = None
716 716 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
717 717 self.dirstate = dirstate(self.opener, self.ui, self.root)
718 718 else:
719 719 self.ui.warn("no undo information available\n")
720 720
721 721 def lock(self, wait = 1):
722 722 try:
723 723 return lock.lock(self.join("lock"), 0)
724 724 except lock.LockHeld, inst:
725 725 if wait:
726 726 self.ui.warn("waiting for lock held by %s\n" % inst.args[0])
727 727 return lock.lock(self.join("lock"), wait)
728 728 raise inst
729 729
730 730 def rawcommit(self, files, text, user, date, p1=None, p2=None):
731 731 orig_parent = self.dirstate.parents()[0] or nullid
732 732 p1 = p1 or self.dirstate.parents()[0] or nullid
733 733 p2 = p2 or self.dirstate.parents()[1] or nullid
734 734 c1 = self.changelog.read(p1)
735 735 c2 = self.changelog.read(p2)
736 736 m1 = self.manifest.read(c1[0])
737 737 mf1 = self.manifest.readflags(c1[0])
738 738 m2 = self.manifest.read(c2[0])
739 739
740 740 if orig_parent == p1:
741 741 update_dirstate = 1
742 742 else:
743 743 update_dirstate = 0
744 744
745 745 tr = self.transaction()
746 746 mm = m1.copy()
747 747 mfm = mf1.copy()
748 748 linkrev = self.changelog.count()
749 749 for f in files:
750 750 try:
751 751 t = self.wfile(f).read()
752 752 tm = util.is_exec(self.wjoin(f), mfm.get(f, False))
753 753 r = self.file(f)
754 754 mfm[f] = tm
755 755 mm[f] = r.add(t, {}, tr, linkrev,
756 756 m1.get(f, nullid), m2.get(f, nullid))
757 757 if update_dirstate:
758 758 self.dirstate.update([f], "n")
759 759 except IOError:
760 760 try:
761 761 del mm[f]
762 762 del mfm[f]
763 763 if update_dirstate:
764 764 self.dirstate.forget([f])
765 765 except:
766 766 # deleted from p2?
767 767 pass
768 768
769 769 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
770 770 user = user or self.ui.username()
771 771 n = self.changelog.add(mnode, files, text, tr, p1, p2, user, date)
772 772 tr.close()
773 773 if update_dirstate:
774 774 self.dirstate.setparents(n, nullid)
775 775
776 776 def commit(self, files = None, text = "", user = None, date = None,
777 777 match = util.always):
778 778 commit = []
779 779 remove = []
780 780 if files:
781 781 for f in files:
782 782 s = self.dirstate.state(f)
783 783 if s in 'nmai':
784 784 commit.append(f)
785 785 elif s == 'r':
786 786 remove.append(f)
787 787 else:
788 788 self.ui.warn("%s not tracked!\n" % f)
789 789 else:
790 790 (c, a, d, u) = self.changes(match = match)
791 791 commit = c + a
792 792 remove = d
793 793
794 794 if not commit and not remove:
795 795 self.ui.status("nothing changed\n")
796 796 return
797 797
798 798 if not self.hook("precommit"):
799 799 return 1
800 800
801 801 p1, p2 = self.dirstate.parents()
802 802 c1 = self.changelog.read(p1)
803 803 c2 = self.changelog.read(p2)
804 804 m1 = self.manifest.read(c1[0])
805 805 mf1 = self.manifest.readflags(c1[0])
806 806 m2 = self.manifest.read(c2[0])
807 807 lock = self.lock()
808 808 tr = self.transaction()
809 809
810 810 # check in files
811 811 new = {}
812 812 linkrev = self.changelog.count()
813 813 commit.sort()
814 814 for f in commit:
815 815 self.ui.note(f + "\n")
816 816 try:
817 817 mf1[f] = util.is_exec(self.wjoin(f), mf1.get(f, False))
818 818 t = self.wfile(f).read()
819 819 except IOError:
820 820 self.ui.warn("trouble committing %s!\n" % f)
821 821 raise
822 822
823 823 meta = {}
824 824 cp = self.dirstate.copied(f)
825 825 if cp:
826 826 meta["copy"] = cp
827 827 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
828 828 self.ui.debug(" %s: copy %s:%s\n" % (f, cp, meta["copyrev"]))
829 829
830 830 r = self.file(f)
831 831 fp1 = m1.get(f, nullid)
832 832 fp2 = m2.get(f, nullid)
833 833 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
834 834
835 835 # update manifest
836 836 m1.update(new)
837 837 for f in remove:
838 838 if f in m1:
839 839 del m1[f]
840 840 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
841 841 (new, remove))
842 842
843 843 # add changeset
844 844 new = new.keys()
845 845 new.sort()
846 846
847 847 if not text:
848 848 edittext = "\n" + "HG: manifest hash %s\n" % hex(mn)
849 849 edittext += "".join(["HG: changed %s\n" % f for f in new])
850 850 edittext += "".join(["HG: removed %s\n" % f for f in remove])
851 851 edittext = self.ui.edit(edittext)
852 852 if not edittext.rstrip():
853 853 return 1
854 854 text = edittext
855 855
856 856 user = user or self.ui.username()
857 857 n = self.changelog.add(mn, new, text, tr, p1, p2, user, date)
858 858
859 859 tr.close()
860 860
861 861 self.dirstate.setparents(n)
862 862 self.dirstate.update(new, "n")
863 863 self.dirstate.forget(remove)
864 864
865 865 if not self.hook("commit", node=hex(n)):
866 866 return 1
867 867
868 868 def walk(self, node = None, files = [], match = util.always):
869 869 if node:
870 870 for fn in self.manifest.read(self.changelog.read(node)[0]):
871 871 yield 'm', fn
872 872 else:
873 873 for src, fn in self.dirstate.walk(files, match):
874 874 yield src, fn
875 875
876 876 def changes(self, node1 = None, node2 = None, files = [],
877 877 match = util.always):
878 878 mf2, u = None, []
879 879
880 880 def fcmp(fn, mf):
881 881 t1 = self.wfile(fn).read()
882 882 t2 = self.file(fn).revision(mf[fn])
883 883 return cmp(t1, t2)
884 884
885 885 def mfmatches(node):
886 886 mf = dict(self.manifest.read(node))
887 887 for fn in mf.keys():
888 888 if not match(fn):
889 889 del mf[fn]
890 890 return mf
891 891
892 892 # are we comparing the working directory?
893 893 if not node2:
894 894 l, c, a, d, u = self.dirstate.changes(files, match)
895 895
896 896 # are we comparing working dir against its parent?
897 897 if not node1:
898 898 if l:
899 899 # do a full compare of any files that might have changed
900 900 change = self.changelog.read(self.dirstate.parents()[0])
901 901 mf2 = mfmatches(change[0])
902 902 for f in l:
903 903 if fcmp(f, mf2):
904 904 c.append(f)
905 905
906 906 for l in c, a, d, u:
907 907 l.sort()
908 908
909 909 return (c, a, d, u)
910 910
911 911 # are we comparing working dir against non-tip?
912 912 # generate a pseudo-manifest for the working dir
913 913 if not node2:
914 914 if not mf2:
915 915 change = self.changelog.read(self.dirstate.parents()[0])
916 916 mf2 = mfmatches(change[0])
917 917 for f in a + c + l:
918 918 mf2[f] = ""
919 919 for f in d:
920 920 if f in mf2: del mf2[f]
921 921 else:
922 922 change = self.changelog.read(node2)
923 923 mf2 = mfmatches(change[0])
924 924
925 925 # flush lists from dirstate before comparing manifests
926 926 c, a = [], []
927 927
928 928 change = self.changelog.read(node1)
929 929 mf1 = mfmatches(change[0])
930 930
931 931 for fn in mf2:
932 932 if mf1.has_key(fn):
933 933 if mf1[fn] != mf2[fn]:
934 934 if mf2[fn] != "" or fcmp(fn, mf1):
935 935 c.append(fn)
936 936 del mf1[fn]
937 937 else:
938 938 a.append(fn)
939 939
940 940 d = mf1.keys()
941 941
942 942 for l in c, a, d, u:
943 943 l.sort()
944 944
945 945 return (c, a, d, u)
946 946
947 947 def add(self, list):
948 948 for f in list:
949 949 p = self.wjoin(f)
950 950 if not os.path.exists(p):
951 951 self.ui.warn("%s does not exist!\n" % f)
952 952 elif not os.path.isfile(p):
953 953 self.ui.warn("%s not added: only files supported currently\n" % f)
954 954 elif self.dirstate.state(f) in 'an':
955 955 self.ui.warn("%s already tracked!\n" % f)
956 956 else:
957 957 self.dirstate.update([f], "a")
958 958
959 959 def forget(self, list):
960 960 for f in list:
961 961 if self.dirstate.state(f) not in 'ai':
962 962 self.ui.warn("%s not added!\n" % f)
963 963 else:
964 964 self.dirstate.forget([f])
965 965
966 966 def remove(self, list):
967 967 for f in list:
968 968 p = self.wjoin(f)
969 969 if os.path.exists(p):
970 970 self.ui.warn("%s still exists!\n" % f)
971 971 elif self.dirstate.state(f) == 'a':
972 972 self.ui.warn("%s never committed!\n" % f)
973 973 self.dirstate.forget([f])
974 974 elif f not in self.dirstate:
975 975 self.ui.warn("%s not tracked!\n" % f)
976 976 else:
977 977 self.dirstate.update([f], "r")
978 978
979 979 def copy(self, source, dest):
980 980 p = self.wjoin(dest)
981 981 if not os.path.exists(p):
982 982 self.ui.warn("%s does not exist!\n" % dest)
983 983 elif not os.path.isfile(p):
984 984 self.ui.warn("copy failed: %s is not a file\n" % dest)
985 985 else:
986 986 if self.dirstate.state(dest) == '?':
987 987 self.dirstate.update([dest], "a")
988 988 self.dirstate.copy(source, dest)
989 989
990 990 def heads(self):
991 991 return self.changelog.heads()
992 992
993 993 def branches(self, nodes):
994 994 if not nodes: nodes = [self.changelog.tip()]
995 995 b = []
996 996 for n in nodes:
997 997 t = n
998 998 while n:
999 999 p = self.changelog.parents(n)
1000 1000 if p[1] != nullid or p[0] == nullid:
1001 1001 b.append((t, n, p[0], p[1]))
1002 1002 break
1003 1003 n = p[0]
1004 1004 return b
1005 1005
1006 1006 def between(self, pairs):
1007 1007 r = []
1008 1008
1009 1009 for top, bottom in pairs:
1010 1010 n, l, i = top, [], 0
1011 1011 f = 1
1012 1012
1013 1013 while n != bottom:
1014 1014 p = self.changelog.parents(n)[0]
1015 1015 if i == f:
1016 1016 l.append(n)
1017 1017 f = f * 2
1018 1018 n = p
1019 1019 i += 1
1020 1020
1021 1021 r.append(l)
1022 1022
1023 1023 return r
1024 1024
1025 1025 def newer(self, nodes):
1026 1026 m = {}
1027 1027 nl = []
1028 1028 pm = {}
1029 1029 cl = self.changelog
1030 1030 t = l = cl.count()
1031 1031
1032 1032 # find the lowest numbered node
1033 1033 for n in nodes:
1034 1034 l = min(l, cl.rev(n))
1035 1035 m[n] = 1
1036 1036
1037 1037 for i in xrange(l, t):
1038 1038 n = cl.node(i)
1039 1039 if n in m: # explicitly listed
1040 1040 pm[n] = 1
1041 1041 nl.append(n)
1042 1042 continue
1043 1043 for p in cl.parents(n):
1044 1044 if p in pm: # parent listed
1045 1045 pm[n] = 1
1046 1046 nl.append(n)
1047 1047 break
1048 1048
1049 1049 return nl
1050 1050
1051 def findincoming(self, remote, base={}):
1051 def findincoming(self, remote, base=None, heads=None):
1052 1052 m = self.changelog.nodemap
1053 1053 search = []
1054 1054 fetch = []
1055 1055 seen = {}
1056 1056 seenbranch = {}
1057 if base == None:
1058 base = {}
1057 1059
1058 1060 # assume we're closer to the tip than the root
1059 1061 # and start by examining the heads
1060 1062 self.ui.status("searching for changes\n")
1063
1064 if not heads:
1061 1065 heads = remote.heads()
1066
1062 1067 unknown = []
1063 1068 for h in heads:
1064 1069 if h not in m:
1065 1070 unknown.append(h)
1066 1071 else:
1067 1072 base[h] = 1
1068 1073
1069 1074 if not unknown:
1070 1075 return None
1071 1076
1072 1077 rep = {}
1073 1078 reqcnt = 0
1074 1079
1075 1080 # search through remote branches
1076 1081 # a 'branch' here is a linear segment of history, with four parts:
1077 1082 # head, root, first parent, second parent
1078 1083 # (a branch always has two parents (or none) by definition)
1079 1084 unknown = remote.branches(unknown)
1080 1085 while unknown:
1081 1086 r = []
1082 1087 while unknown:
1083 1088 n = unknown.pop(0)
1084 1089 if n[0] in seen:
1085 1090 continue
1086 1091
1087 1092 self.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
1088 1093 if n[0] == nullid:
1089 1094 break
1090 1095 if n in seenbranch:
1091 1096 self.ui.debug("branch already found\n")
1092 1097 continue
1093 1098 if n[1] and n[1] in m: # do we know the base?
1094 1099 self.ui.debug("found incomplete branch %s:%s\n"
1095 1100 % (short(n[0]), short(n[1])))
1096 1101 search.append(n) # schedule branch range for scanning
1097 1102 seenbranch[n] = 1
1098 1103 else:
1099 1104 if n[1] not in seen and n[1] not in fetch:
1100 1105 if n[2] in m and n[3] in m:
1101 1106 self.ui.debug("found new changeset %s\n" %
1102 1107 short(n[1]))
1103 1108 fetch.append(n[1]) # earliest unknown
1104 1109 base[n[2]] = 1 # latest known
1105 1110 continue
1106 1111
1107 1112 for a in n[2:4]:
1108 1113 if a not in rep:
1109 1114 r.append(a)
1110 1115 rep[a] = 1
1111 1116
1112 1117 seen[n[0]] = 1
1113 1118
1114 1119 if r:
1115 1120 reqcnt += 1
1116 1121 self.ui.debug("request %d: %s\n" %
1117 1122 (reqcnt, " ".join(map(short, r))))
1118 1123 for p in range(0, len(r), 10):
1119 1124 for b in remote.branches(r[p:p+10]):
1120 1125 self.ui.debug("received %s:%s\n" %
1121 1126 (short(b[0]), short(b[1])))
1122 1127 if b[0] not in m and b[0] not in seen:
1123 1128 unknown.append(b)
1124 1129
1125 1130 # do binary search on the branches we found
1126 1131 while search:
1127 1132 n = search.pop(0)
1128 1133 reqcnt += 1
1129 1134 l = remote.between([(n[0], n[1])])[0]
1130 1135 l.append(n[1])
1131 1136 p = n[0]
1132 1137 f = 1
1133 1138 for i in l:
1134 1139 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1135 1140 if i in m:
1136 1141 if f <= 2:
1137 1142 self.ui.debug("found new branch changeset %s\n" %
1138 1143 short(p))
1139 1144 fetch.append(p)
1140 1145 base[i] = 1
1141 1146 else:
1142 1147 self.ui.debug("narrowed branch search to %s:%s\n"
1143 1148 % (short(p), short(i)))
1144 1149 search.append((p, i))
1145 1150 break
1146 1151 p, f = i, f * 2
1147 1152
1148 1153 # sanity check our fetch list
1149 1154 for f in fetch:
1150 1155 if f in m:
1151 1156 raise RepoError("already have changeset " + short(f[:4]))
1152 1157
1153 1158 if base.keys() == [nullid]:
1154 1159 self.ui.warn("warning: pulling from an unrelated repository!\n")
1155 1160
1156 1161 self.ui.note("adding new changesets starting at " +
1157 1162 " ".join([short(f) for f in fetch]) + "\n")
1158 1163
1159 1164 self.ui.debug("%d total queries\n" % reqcnt)
1160 1165
1161 1166 return fetch
1162 1167
1163 def findoutgoing(self, remote):
1168 def findoutgoing(self, remote, base=None, heads=None):
1169 if base == None:
1164 1170 base = {}
1165 self.findincoming(remote, base)
1171 self.findincoming(remote, base, heads)
1172
1166 1173 remain = dict.fromkeys(self.changelog.nodemap)
1167 1174
1168 1175 # prune everything remote has from the tree
1169 1176 del remain[nullid]
1170 1177 remove = base.keys()
1171 1178 while remove:
1172 1179 n = remove.pop(0)
1173 1180 if n in remain:
1174 1181 del remain[n]
1175 1182 for p in self.changelog.parents(n):
1176 1183 remove.append(p)
1177 1184
1178 1185 # find every node whose parents have been pruned
1179 1186 subset = []
1180 1187 for n in remain:
1181 1188 p1, p2 = self.changelog.parents(n)
1182 1189 if p1 not in remain and p2 not in remain:
1183 1190 subset.append(n)
1184 1191
1185 1192 # this is the set of all roots we have to push
1186 1193 return subset
1187 1194
1188 1195 def pull(self, remote):
1189 1196 lock = self.lock()
1190 1197
1191 1198 # if we have an empty repo, fetch everything
1192 1199 if self.changelog.tip() == nullid:
1193 1200 self.ui.status("requesting all changes\n")
1194 1201 fetch = [nullid]
1195 1202 else:
1196 1203 fetch = self.findincoming(remote)
1197 1204
1198 1205 if not fetch:
1199 1206 self.ui.status("no changes found\n")
1200 1207 return 1
1201 1208
1202 1209 cg = remote.changegroup(fetch)
1203 1210 return self.addchangegroup(cg)
1204 1211
1205 def push(self, remote):
1212 def push(self, remote, force=False):
1206 1213 lock = remote.lock()
1207 update = self.findoutgoing(remote)
1214
1215 base = {}
1216 heads = remote.heads()
1217 inc = self.findincoming(remote, base, heads)
1218 if not force and inc:
1219 self.ui.warn("abort: unsynced remote changes!\n")
1220 self.ui.status("(did you forget to sync? use push -f to force)\n")
1221 return 1
1222
1223 update = self.findoutgoing(remote, base)
1208 1224 if not update:
1209 1225 self.ui.status("no changes found\n")
1210 1226 return 1
1227 elif not force:
1228 if len(heads) < len(self.changelog.heads()):
1229 self.ui.warn("abort: push creates new remote branches!\n")
1230 self.ui.status("(did you forget to merge?" +
1231 " use push -f to force)\n")
1232 return 1
1211 1233
1212 1234 cg = self.changegroup(update)
1213 1235 return remote.addchangegroup(cg)
1214 1236
1215 1237 def changegroup(self, basenodes):
1216 1238 class genread:
1217 1239 def __init__(self, generator):
1218 1240 self.g = generator
1219 1241 self.buf = ""
1220 1242 def read(self, l):
1221 1243 while l > len(self.buf):
1222 1244 try:
1223 1245 self.buf += self.g.next()
1224 1246 except StopIteration:
1225 1247 break
1226 1248 d, self.buf = self.buf[:l], self.buf[l:]
1227 1249 return d
1228 1250
1229 1251 def gengroup():
1230 1252 nodes = self.newer(basenodes)
1231 1253
1232 1254 # construct the link map
1233 1255 linkmap = {}
1234 1256 for n in nodes:
1235 1257 linkmap[self.changelog.rev(n)] = n
1236 1258
1237 1259 # construct a list of all changed files
1238 1260 changed = {}
1239 1261 for n in nodes:
1240 1262 c = self.changelog.read(n)
1241 1263 for f in c[3]:
1242 1264 changed[f] = 1
1243 1265 changed = changed.keys()
1244 1266 changed.sort()
1245 1267
1246 1268 # the changegroup is changesets + manifests + all file revs
1247 1269 revs = [ self.changelog.rev(n) for n in nodes ]
1248 1270
1249 1271 for y in self.changelog.group(linkmap): yield y
1250 1272 for y in self.manifest.group(linkmap): yield y
1251 1273 for f in changed:
1252 1274 yield struct.pack(">l", len(f) + 4) + f
1253 1275 g = self.file(f).group(linkmap)
1254 1276 for y in g:
1255 1277 yield y
1256 1278
1257 1279 yield struct.pack(">l", 0)
1258 1280
1259 1281 return genread(gengroup())
1260 1282
1261 1283 def addchangegroup(self, source):
1262 1284
1263 1285 def getchunk():
1264 1286 d = source.read(4)
1265 1287 if not d: return ""
1266 1288 l = struct.unpack(">l", d)[0]
1267 1289 if l <= 4: return ""
1268 1290 return source.read(l - 4)
1269 1291
1270 1292 def getgroup():
1271 1293 while 1:
1272 1294 c = getchunk()
1273 1295 if not c: break
1274 1296 yield c
1275 1297
1276 1298 def csmap(x):
1277 1299 self.ui.debug("add changeset %s\n" % short(x))
1278 1300 return self.changelog.count()
1279 1301
1280 1302 def revmap(x):
1281 1303 return self.changelog.rev(x)
1282 1304
1283 1305 if not source: return
1284 1306 changesets = files = revisions = 0
1285 1307
1286 1308 tr = self.transaction()
1287 1309
1288 1310 # pull off the changeset group
1289 1311 self.ui.status("adding changesets\n")
1290 1312 co = self.changelog.tip()
1291 1313 cn = self.changelog.addgroup(getgroup(), csmap, tr, 1) # unique
1292 1314 changesets = self.changelog.rev(cn) - self.changelog.rev(co)
1293 1315
1294 1316 # pull off the manifest group
1295 1317 self.ui.status("adding manifests\n")
1296 1318 mm = self.manifest.tip()
1297 1319 mo = self.manifest.addgroup(getgroup(), revmap, tr)
1298 1320
1299 1321 # process the files
1300 1322 self.ui.status("adding file changes\n")
1301 1323 while 1:
1302 1324 f = getchunk()
1303 1325 if not f: break
1304 1326 self.ui.debug("adding %s revisions\n" % f)
1305 1327 fl = self.file(f)
1306 1328 o = fl.count()
1307 1329 n = fl.addgroup(getgroup(), revmap, tr)
1308 1330 revisions += fl.count() - o
1309 1331 files += 1
1310 1332
1311 1333 self.ui.status(("added %d changesets" +
1312 1334 " with %d changes to %d files\n")
1313 1335 % (changesets, revisions, files))
1314 1336
1315 1337 tr.close()
1316 1338
1317 1339 if not self.hook("changegroup"):
1318 1340 return 1
1319 1341
1320 1342 return
1321 1343
1322 1344 def update(self, node, allow=False, force=False, choose=None,
1323 1345 moddirstate=True):
1324 1346 pl = self.dirstate.parents()
1325 1347 if not force and pl[1] != nullid:
1326 1348 self.ui.warn("aborting: outstanding uncommitted merges\n")
1327 1349 return 1
1328 1350
1329 1351 p1, p2 = pl[0], node
1330 1352 pa = self.changelog.ancestor(p1, p2)
1331 1353 m1n = self.changelog.read(p1)[0]
1332 1354 m2n = self.changelog.read(p2)[0]
1333 1355 man = self.manifest.ancestor(m1n, m2n)
1334 1356 m1 = self.manifest.read(m1n)
1335 1357 mf1 = self.manifest.readflags(m1n)
1336 1358 m2 = self.manifest.read(m2n)
1337 1359 mf2 = self.manifest.readflags(m2n)
1338 1360 ma = self.manifest.read(man)
1339 1361 mfa = self.manifest.readflags(man)
1340 1362
1341 1363 (c, a, d, u) = self.changes()
1342 1364
1343 1365 # is this a jump, or a merge? i.e. is there a linear path
1344 1366 # from p1 to p2?
1345 1367 linear_path = (pa == p1 or pa == p2)
1346 1368
1347 1369 # resolve the manifest to determine which files
1348 1370 # we care about merging
1349 1371 self.ui.note("resolving manifests\n")
1350 1372 self.ui.debug(" force %s allow %s moddirstate %s linear %s\n" %
1351 1373 (force, allow, moddirstate, linear_path))
1352 1374 self.ui.debug(" ancestor %s local %s remote %s\n" %
1353 1375 (short(man), short(m1n), short(m2n)))
1354 1376
1355 1377 merge = {}
1356 1378 get = {}
1357 1379 remove = []
1358 1380 mark = {}
1359 1381
1360 1382 # construct a working dir manifest
1361 1383 mw = m1.copy()
1362 1384 mfw = mf1.copy()
1363 1385 umap = dict.fromkeys(u)
1364 1386
1365 1387 for f in a + c + u:
1366 1388 mw[f] = ""
1367 1389 mfw[f] = util.is_exec(self.wjoin(f), mfw.get(f, False))
1368 1390
1369 1391 for f in d:
1370 1392 if f in mw: del mw[f]
1371 1393
1372 1394 # If we're jumping between revisions (as opposed to merging),
1373 1395 # and if neither the working directory nor the target rev has
1374 1396 # the file, then we need to remove it from the dirstate, to
1375 1397 # prevent the dirstate from listing the file when it is no
1376 1398 # longer in the manifest.
1377 1399 if moddirstate and linear_path and f not in m2:
1378 1400 self.dirstate.forget((f,))
1379 1401
1380 1402 # Compare manifests
1381 1403 for f, n in mw.iteritems():
1382 1404 if choose and not choose(f): continue
1383 1405 if f in m2:
1384 1406 s = 0
1385 1407
1386 1408 # is the wfile new since m1, and match m2?
1387 1409 if f not in m1:
1388 1410 t1 = self.wfile(f).read()
1389 1411 t2 = self.file(f).revision(m2[f])
1390 1412 if cmp(t1, t2) == 0:
1391 1413 mark[f] = 1
1392 1414 n = m2[f]
1393 1415 del t1, t2
1394 1416
1395 1417 # are files different?
1396 1418 if n != m2[f]:
1397 1419 a = ma.get(f, nullid)
1398 1420 # are both different from the ancestor?
1399 1421 if n != a and m2[f] != a:
1400 1422 self.ui.debug(" %s versions differ, resolve\n" % f)
1401 1423 # merge executable bits
1402 1424 # "if we changed or they changed, change in merge"
1403 1425 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1404 1426 mode = ((a^b) | (a^c)) ^ a
1405 1427 merge[f] = (m1.get(f, nullid), m2[f], mode)
1406 1428 s = 1
1407 1429 # are we clobbering?
1408 1430 # is remote's version newer?
1409 1431 # or are we going back in time?
1410 1432 elif force or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
1411 1433 self.ui.debug(" remote %s is newer, get\n" % f)
1412 1434 get[f] = m2[f]
1413 1435 s = 1
1414 1436 else:
1415 1437 mark[f] = 1
1416 1438 elif f in umap:
1417 1439 # this unknown file is the same as the checkout
1418 1440 get[f] = m2[f]
1419 1441
1420 1442 if not s and mfw[f] != mf2[f]:
1421 1443 if force:
1422 1444 self.ui.debug(" updating permissions for %s\n" % f)
1423 1445 util.set_exec(self.wjoin(f), mf2[f])
1424 1446 else:
1425 1447 a, b, c = mfa.get(f, 0), mfw[f], mf2[f]
1426 1448 mode = ((a^b) | (a^c)) ^ a
1427 1449 if mode != b:
1428 1450 self.ui.debug(" updating permissions for %s\n" % f)
1429 1451 util.set_exec(self.wjoin(f), mode)
1430 1452 mark[f] = 1
1431 1453 del m2[f]
1432 1454 elif f in ma:
1433 1455 if n != ma[f]:
1434 1456 r = "d"
1435 1457 if not force and (linear_path or allow):
1436 1458 r = self.ui.prompt(
1437 1459 (" local changed %s which remote deleted\n" % f) +
1438 1460 "(k)eep or (d)elete?", "[kd]", "k")
1439 1461 if r == "d":
1440 1462 remove.append(f)
1441 1463 else:
1442 1464 self.ui.debug("other deleted %s\n" % f)
1443 1465 remove.append(f) # other deleted it
1444 1466 else:
1445 1467 if n == m1.get(f, nullid): # same as parent
1446 1468 if p2 == pa: # going backwards?
1447 1469 self.ui.debug("remote deleted %s\n" % f)
1448 1470 remove.append(f)
1449 1471 else:
1450 1472 self.ui.debug("local created %s, keeping\n" % f)
1451 1473 else:
1452 1474 self.ui.debug("working dir created %s, keeping\n" % f)
1453 1475
1454 1476 for f, n in m2.iteritems():
1455 1477 if choose and not choose(f): continue
1456 1478 if f[0] == "/": continue
1457 1479 if f in ma and n != ma[f]:
1458 1480 r = "k"
1459 1481 if not force and (linear_path or allow):
1460 1482 r = self.ui.prompt(
1461 1483 ("remote changed %s which local deleted\n" % f) +
1462 1484 "(k)eep or (d)elete?", "[kd]", "k")
1463 1485 if r == "k": get[f] = n
1464 1486 elif f not in ma:
1465 1487 self.ui.debug("remote created %s\n" % f)
1466 1488 get[f] = n
1467 1489 else:
1468 1490 if force or p2 == pa: # going backwards?
1469 1491 self.ui.debug("local deleted %s, recreating\n" % f)
1470 1492 get[f] = n
1471 1493 else:
1472 1494 self.ui.debug("local deleted %s\n" % f)
1473 1495
1474 1496 del mw, m1, m2, ma
1475 1497
1476 1498 if force:
1477 1499 for f in merge:
1478 1500 get[f] = merge[f][1]
1479 1501 merge = {}
1480 1502
1481 1503 if linear_path or force:
1482 1504 # we don't need to do any magic, just jump to the new rev
1483 1505 mode = 'n'
1484 1506 p1, p2 = p2, nullid
1485 1507 else:
1486 1508 if not allow:
1487 1509 self.ui.status("this update spans a branch" +
1488 1510 " affecting the following files:\n")
1489 1511 fl = merge.keys() + get.keys()
1490 1512 fl.sort()
1491 1513 for f in fl:
1492 1514 cf = ""
1493 1515 if f in merge: cf = " (resolve)"
1494 1516 self.ui.status(" %s%s\n" % (f, cf))
1495 1517 self.ui.warn("aborting update spanning branches!\n")
1496 1518 self.ui.status("(use update -m to merge across branches" +
1497 1519 " or -C to lose changes)\n")
1498 1520 return 1
1499 1521 # we have to remember what files we needed to get/change
1500 1522 # because any file that's different from either one of its
1501 1523 # parents must be in the changeset
1502 1524 mode = 'm'
1503 1525 if moddirstate:
1504 1526 self.dirstate.update(mark.keys(), "m")
1505 1527
1506 1528 if moddirstate:
1507 1529 self.dirstate.setparents(p1, p2)
1508 1530
1509 1531 # get the files we don't need to change
1510 1532 files = get.keys()
1511 1533 files.sort()
1512 1534 for f in files:
1513 1535 if f[0] == "/": continue
1514 1536 self.ui.note("getting %s\n" % f)
1515 1537 t = self.file(f).read(get[f])
1516 1538 try:
1517 1539 self.wfile(f, "w").write(t)
1518 1540 except IOError:
1519 1541 os.makedirs(os.path.dirname(self.wjoin(f)))
1520 1542 self.wfile(f, "w").write(t)
1521 1543 util.set_exec(self.wjoin(f), mf2[f])
1522 1544 if moddirstate:
1523 1545 self.dirstate.update([f], mode)
1524 1546
1525 1547 # merge the tricky bits
1526 1548 files = merge.keys()
1527 1549 files.sort()
1528 1550 for f in files:
1529 1551 self.ui.status("merging %s\n" % f)
1530 1552 m, o, flag = merge[f]
1531 1553 self.merge3(f, m, o)
1532 1554 util.set_exec(self.wjoin(f), flag)
1533 1555 if moddirstate and mode == 'm':
1534 1556 # only update dirstate on branch merge, otherwise we
1535 1557 # could mark files with changes as unchanged
1536 1558 self.dirstate.update([f], mode)
1537 1559
1538 1560 remove.sort()
1539 1561 for f in remove:
1540 1562 self.ui.note("removing %s\n" % f)
1541 1563 try:
1542 1564 os.unlink(f)
1543 1565 except OSError, inst:
1544 1566 self.ui.warn("update failed to remove %s: %s!\n" % (f, inst))
1545 1567 # try removing directories that might now be empty
1546 1568 try: os.removedirs(os.path.dirname(f))
1547 1569 except: pass
1548 1570 if moddirstate:
1549 1571 if mode == 'n':
1550 1572 self.dirstate.forget(remove)
1551 1573 else:
1552 1574 self.dirstate.update(remove, 'r')
1553 1575
1554 1576 def merge3(self, fn, my, other):
1555 1577 """perform a 3-way merge in the working directory"""
1556 1578
1557 1579 def temp(prefix, node):
1558 1580 pre = "%s~%s." % (os.path.basename(fn), prefix)
1559 1581 (fd, name) = tempfile.mkstemp("", pre)
1560 1582 f = os.fdopen(fd, "wb")
1561 1583 f.write(fl.revision(node))
1562 1584 f.close()
1563 1585 return name
1564 1586
1565 1587 fl = self.file(fn)
1566 1588 base = fl.ancestor(my, other)
1567 1589 a = self.wjoin(fn)
1568 1590 b = temp("base", base)
1569 1591 c = temp("other", other)
1570 1592
1571 1593 self.ui.note("resolving %s\n" % fn)
1572 1594 self.ui.debug("file %s: other %s ancestor %s\n" %
1573 1595 (fn, short(other), short(base)))
1574 1596
1575 1597 cmd = (os.environ.get("HGMERGE") or self.ui.config("ui", "merge")
1576 1598 or "hgmerge")
1577 1599 r = os.system("%s %s %s %s" % (cmd, a, b, c))
1578 1600 if r:
1579 1601 self.ui.warn("merging %s failed!\n" % fn)
1580 1602
1581 1603 os.unlink(b)
1582 1604 os.unlink(c)
1583 1605
1584 1606 def verify(self):
1585 1607 filelinkrevs = {}
1586 1608 filenodes = {}
1587 1609 changesets = revisions = files = 0
1588 1610 errors = 0
1589 1611
1590 1612 seen = {}
1591 1613 self.ui.status("checking changesets\n")
1592 1614 for i in range(self.changelog.count()):
1593 1615 changesets += 1
1594 1616 n = self.changelog.node(i)
1595 1617 if n in seen:
1596 1618 self.ui.warn("duplicate changeset at revision %d\n" % i)
1597 1619 errors += 1
1598 1620 seen[n] = 1
1599 1621
1600 1622 for p in self.changelog.parents(n):
1601 1623 if p not in self.changelog.nodemap:
1602 1624 self.ui.warn("changeset %s has unknown parent %s\n" %
1603 1625 (short(n), short(p)))
1604 1626 errors += 1
1605 1627 try:
1606 1628 changes = self.changelog.read(n)
1607 1629 except Exception, inst:
1608 1630 self.ui.warn("unpacking changeset %s: %s\n" % (short(n), inst))
1609 1631 errors += 1
1610 1632
1611 1633 for f in changes[3]:
1612 1634 filelinkrevs.setdefault(f, []).append(i)
1613 1635
1614 1636 seen = {}
1615 1637 self.ui.status("checking manifests\n")
1616 1638 for i in range(self.manifest.count()):
1617 1639 n = self.manifest.node(i)
1618 1640 if n in seen:
1619 1641 self.ui.warn("duplicate manifest at revision %d\n" % i)
1620 1642 errors += 1
1621 1643 seen[n] = 1
1622 1644
1623 1645 for p in self.manifest.parents(n):
1624 1646 if p not in self.manifest.nodemap:
1625 1647 self.ui.warn("manifest %s has unknown parent %s\n" %
1626 1648 (short(n), short(p)))
1627 1649 errors += 1
1628 1650
1629 1651 try:
1630 1652 delta = mdiff.patchtext(self.manifest.delta(n))
1631 1653 except KeyboardInterrupt:
1632 1654 self.ui.warn("aborted")
1633 1655 sys.exit(0)
1634 1656 except Exception, inst:
1635 1657 self.ui.warn("unpacking manifest %s: %s\n"
1636 1658 % (short(n), inst))
1637 1659 errors += 1
1638 1660
1639 1661 ff = [ l.split('\0') for l in delta.splitlines() ]
1640 1662 for f, fn in ff:
1641 1663 filenodes.setdefault(f, {})[bin(fn[:40])] = 1
1642 1664
1643 1665 self.ui.status("crosschecking files in changesets and manifests\n")
1644 1666 for f in filenodes:
1645 1667 if f not in filelinkrevs:
1646 1668 self.ui.warn("file %s in manifest but not in changesets\n" % f)
1647 1669 errors += 1
1648 1670
1649 1671 for f in filelinkrevs:
1650 1672 if f not in filenodes:
1651 1673 self.ui.warn("file %s in changeset but not in manifest\n" % f)
1652 1674 errors += 1
1653 1675
1654 1676 self.ui.status("checking files\n")
1655 1677 ff = filenodes.keys()
1656 1678 ff.sort()
1657 1679 for f in ff:
1658 1680 if f == "/dev/null": continue
1659 1681 files += 1
1660 1682 fl = self.file(f)
1661 1683 nodes = { nullid: 1 }
1662 1684 seen = {}
1663 1685 for i in range(fl.count()):
1664 1686 revisions += 1
1665 1687 n = fl.node(i)
1666 1688
1667 1689 if n in seen:
1668 1690 self.ui.warn("%s: duplicate revision %d\n" % (f, i))
1669 1691 errors += 1
1670 1692
1671 1693 if n not in filenodes[f]:
1672 1694 self.ui.warn("%s: %d:%s not in manifests\n"
1673 1695 % (f, i, short(n)))
1674 1696 errors += 1
1675 1697 else:
1676 1698 del filenodes[f][n]
1677 1699
1678 1700 flr = fl.linkrev(n)
1679 1701 if flr not in filelinkrevs[f]:
1680 1702 self.ui.warn("%s:%s points to unexpected changeset %d\n"
1681 1703 % (f, short(n), fl.linkrev(n)))
1682 1704 errors += 1
1683 1705 else:
1684 1706 filelinkrevs[f].remove(flr)
1685 1707
1686 1708 # verify contents
1687 1709 try:
1688 1710 t = fl.read(n)
1689 1711 except Exception, inst:
1690 1712 self.ui.warn("unpacking file %s %s: %s\n"
1691 1713 % (f, short(n), inst))
1692 1714 errors += 1
1693 1715
1694 1716 # verify parents
1695 1717 (p1, p2) = fl.parents(n)
1696 1718 if p1 not in nodes:
1697 1719 self.ui.warn("file %s:%s unknown parent 1 %s" %
1698 1720 (f, short(n), short(p1)))
1699 1721 errors += 1
1700 1722 if p2 not in nodes:
1701 1723 self.ui.warn("file %s:%s unknown parent 2 %s" %
1702 1724 (f, short(n), short(p1)))
1703 1725 errors += 1
1704 1726 nodes[n] = 1
1705 1727
1706 1728 # cross-check
1707 1729 for node in filenodes[f]:
1708 1730 self.ui.warn("node %s in manifests not in %s\n"
1709 1731 % (hex(node), f))
1710 1732 errors += 1
1711 1733
1712 1734 self.ui.status("%d files, %d changesets, %d total revisions\n" %
1713 1735 (files, changesets, revisions))
1714 1736
1715 1737 if errors:
1716 1738 self.ui.warn("%d integrity errors encountered!\n" % errors)
1717 1739 return 1
1718 1740
1719 1741 class httprepository:
1720 1742 def __init__(self, ui, path):
1721 1743 # fix missing / after hostname
1722 1744 s = urlparse.urlsplit(path)
1723 1745 partial = s[2]
1724 1746 if not partial: partial = "/"
1725 1747 self.url = urlparse.urlunsplit((s[0], s[1], partial, '', ''))
1726 1748 self.ui = ui
1727 1749 no_list = [ "localhost", "127.0.0.1" ]
1728 1750 host = ui.config("http_proxy", "host")
1729 1751 if host is None:
1730 1752 host = os.environ.get("http_proxy")
1731 1753 if host and host.startswith('http://'):
1732 1754 host = host[7:]
1733 1755 user = ui.config("http_proxy", "user")
1734 1756 passwd = ui.config("http_proxy", "passwd")
1735 1757 no = ui.config("http_proxy", "no")
1736 1758 if no is None:
1737 1759 no = os.environ.get("no_proxy")
1738 1760 if no:
1739 1761 no_list = no_list + no.split(",")
1740 1762
1741 1763 no_proxy = 0
1742 1764 for h in no_list:
1743 1765 if (path.startswith("http://" + h + "/") or
1744 1766 path.startswith("http://" + h + ":") or
1745 1767 path == "http://" + h):
1746 1768 no_proxy = 1
1747 1769
1748 1770 # Note: urllib2 takes proxy values from the environment and those will
1749 1771 # take precedence
1750 1772 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
1751 1773 if os.environ.has_key(env):
1752 1774 del os.environ[env]
1753 1775
1754 1776 proxy_handler = urllib2.BaseHandler()
1755 1777 if host and not no_proxy:
1756 1778 proxy_handler = urllib2.ProxyHandler({"http" : "http://" + host})
1757 1779
1758 1780 authinfo = None
1759 1781 if user and passwd:
1760 1782 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
1761 1783 passmgr.add_password(None, host, user, passwd)
1762 1784 authinfo = urllib2.ProxyBasicAuthHandler(passmgr)
1763 1785
1764 1786 opener = urllib2.build_opener(proxy_handler, authinfo)
1765 1787 urllib2.install_opener(opener)
1766 1788
1767 1789 def dev(self):
1768 1790 return -1
1769 1791
1770 1792 def do_cmd(self, cmd, **args):
1771 1793 self.ui.debug("sending %s command\n" % cmd)
1772 1794 q = {"cmd": cmd}
1773 1795 q.update(args)
1774 1796 qs = urllib.urlencode(q)
1775 1797 cu = "%s?%s" % (self.url, qs)
1776 1798 resp = urllib2.urlopen(cu)
1777 1799 proto = resp.headers['content-type']
1778 1800
1779 1801 # accept old "text/plain" and "application/hg-changegroup" for now
1780 1802 if not proto.startswith('application/mercurial') and \
1781 1803 not proto.startswith('text/plain') and \
1782 1804 not proto.startswith('application/hg-changegroup'):
1783 1805 raise RepoError("'%s' does not appear to be an hg repository"
1784 1806 % self.url)
1785 1807
1786 1808 if proto.startswith('application/mercurial'):
1787 1809 version = proto[22:]
1788 1810 if float(version) > 0.1:
1789 1811 raise RepoError("'%s' uses newer protocol %s" %
1790 1812 (self.url, version))
1791 1813
1792 1814 return resp
1793 1815
1794 1816 def heads(self):
1795 1817 d = self.do_cmd("heads").read()
1796 1818 try:
1797 1819 return map(bin, d[:-1].split(" "))
1798 1820 except:
1799 1821 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1800 1822 raise
1801 1823
1802 1824 def branches(self, nodes):
1803 1825 n = " ".join(map(hex, nodes))
1804 1826 d = self.do_cmd("branches", nodes=n).read()
1805 1827 try:
1806 1828 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1807 1829 return br
1808 1830 except:
1809 1831 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1810 1832 raise
1811 1833
1812 1834 def between(self, pairs):
1813 1835 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1814 1836 d = self.do_cmd("between", pairs=n).read()
1815 1837 try:
1816 1838 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1817 1839 return p
1818 1840 except:
1819 1841 self.ui.warn("unexpected response:\n" + d[:400] + "\n...\n")
1820 1842 raise
1821 1843
1822 1844 def changegroup(self, nodes):
1823 1845 n = " ".join(map(hex, nodes))
1824 1846 f = self.do_cmd("changegroup", roots=n)
1825 1847 bytes = 0
1826 1848
1827 1849 class zread:
1828 1850 def __init__(self, f):
1829 1851 self.zd = zlib.decompressobj()
1830 1852 self.f = f
1831 1853 self.buf = ""
1832 1854 def read(self, l):
1833 1855 while l > len(self.buf):
1834 1856 r = self.f.read(4096)
1835 1857 if r:
1836 1858 self.buf += self.zd.decompress(r)
1837 1859 else:
1838 1860 self.buf += self.zd.flush()
1839 1861 break
1840 1862 d, self.buf = self.buf[:l], self.buf[l:]
1841 1863 return d
1842 1864
1843 1865 return zread(f)
1844 1866
1845 1867 class remotelock:
1846 1868 def __init__(self, repo):
1847 1869 self.repo = repo
1848 1870 def release(self):
1849 1871 self.repo.unlock()
1850 1872 self.repo = None
1851 1873 def __del__(self):
1852 1874 if self.repo:
1853 1875 self.release()
1854 1876
1855 1877 class sshrepository:
1856 1878 def __init__(self, ui, path):
1857 1879 self.url = path
1858 1880 self.ui = ui
1859 1881
1860 1882 m = re.match(r'ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?', path)
1861 1883 if not m:
1862 1884 raise RepoError("couldn't parse destination %s\n" % path)
1863 1885
1864 1886 self.user = m.group(2)
1865 1887 self.host = m.group(3)
1866 1888 self.port = m.group(5)
1867 1889 self.path = m.group(7)
1868 1890
1869 1891 args = self.user and ("%s@%s" % (self.user, self.host)) or self.host
1870 1892 args = self.port and ("%s -p %s") % (args, self.port) or args
1871 1893 path = self.path or ""
1872 1894
1873 1895 cmd = "ssh %s 'hg -R %s serve --stdio'"
1874 1896 cmd = cmd % (args, path)
1875 1897
1876 1898 self.pipeo, self.pipei, self.pipee = os.popen3(cmd)
1877 1899
1878 1900 def readerr(self):
1879 1901 while 1:
1880 1902 r,w,x = select.select([self.pipee], [], [], 0)
1881 1903 if not r: break
1882 1904 l = self.pipee.readline()
1883 1905 if not l: break
1884 1906 self.ui.status("remote: ", l)
1885 1907
1886 1908 def __del__(self):
1887 1909 self.pipeo.close()
1888 1910 self.pipei.close()
1889 1911 for l in self.pipee:
1890 1912 self.ui.status("remote: ", l)
1891 1913 self.pipee.close()
1892 1914
1893 1915 def dev(self):
1894 1916 return -1
1895 1917
1896 1918 def do_cmd(self, cmd, **args):
1897 1919 self.ui.debug("sending %s command\n" % cmd)
1898 1920 self.pipeo.write("%s\n" % cmd)
1899 1921 for k, v in args.items():
1900 1922 self.pipeo.write("%s %d\n" % (k, len(v)))
1901 1923 self.pipeo.write(v)
1902 1924 self.pipeo.flush()
1903 1925
1904 1926 return self.pipei
1905 1927
1906 1928 def call(self, cmd, **args):
1907 1929 r = self.do_cmd(cmd, **args)
1908 1930 l = r.readline()
1909 1931 self.readerr()
1910 1932 try:
1911 1933 l = int(l)
1912 1934 except:
1913 1935 raise RepoError("unexpected response '%s'" % l)
1914 1936 return r.read(l)
1915 1937
1916 1938 def lock(self):
1917 1939 self.call("lock")
1918 1940 return remotelock(self)
1919 1941
1920 1942 def unlock(self):
1921 1943 self.call("unlock")
1922 1944
1923 1945 def heads(self):
1924 1946 d = self.call("heads")
1925 1947 try:
1926 1948 return map(bin, d[:-1].split(" "))
1927 1949 except:
1928 1950 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1929 1951
1930 1952 def branches(self, nodes):
1931 1953 n = " ".join(map(hex, nodes))
1932 1954 d = self.call("branches", nodes=n)
1933 1955 try:
1934 1956 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
1935 1957 return br
1936 1958 except:
1937 1959 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1938 1960
1939 1961 def between(self, pairs):
1940 1962 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
1941 1963 d = self.call("between", pairs=n)
1942 1964 try:
1943 1965 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
1944 1966 return p
1945 1967 except:
1946 1968 raise RepoError("unexpected response '%s'" % (d[:400] + "..."))
1947 1969
1948 1970 def changegroup(self, nodes):
1949 1971 n = " ".join(map(hex, nodes))
1950 1972 f = self.do_cmd("changegroup", roots=n)
1951 1973 return self.pipei
1952 1974
1953 1975 def addchangegroup(self, cg):
1954 1976 d = self.call("addchangegroup")
1955 1977 if d:
1956 1978 raise RepoError("push refused: %s", d)
1957 1979
1958 1980 while 1:
1959 1981 d = cg.read(4096)
1960 1982 if not d: break
1961 1983 self.pipeo.write(d)
1962 1984 self.readerr()
1963 1985
1964 1986 self.pipeo.flush()
1965 1987
1966 1988 self.readerr()
1967 1989 l = int(self.pipei.readline())
1968 1990 return self.pipei.read(l) != ""
1969 1991
1970 1992 def repository(ui, path=None, create=0):
1971 1993 if path:
1972 1994 if path.startswith("http://"):
1973 1995 return httprepository(ui, path)
1974 1996 if path.startswith("hg://"):
1975 1997 return httprepository(ui, path.replace("hg://", "http://"))
1976 1998 if path.startswith("old-http://"):
1977 1999 return localrepository(ui, path.replace("old-http://", "http://"))
1978 2000 if path.startswith("ssh://"):
1979 2001 return sshrepository(ui, path)
1980 2002
1981 2003 return localrepository(ui, path, create)
General Comments 0
You need to be logged in to leave comments. Login now