##// END OF EJS Templates
dirstate: break update into separate functions
Matt Mackall -
r4904:6fd953d5 default
parent child Browse files
Show More
@@ -1,97 +1,97 b''
1 1 # hg backend for convert extension
2 2
3 3 import os, time
4 4 from mercurial import hg
5 5
6 6 from common import NoRepo, converter_sink
7 7
8 8 class convert_mercurial(converter_sink):
9 9 def __init__(self, ui, path):
10 10 self.path = path
11 11 self.ui = ui
12 12 try:
13 13 self.repo = hg.repository(self.ui, path)
14 14 except:
15 15 raise NoRepo("could open hg repo %s" % path)
16 16
17 17 def mapfile(self):
18 18 return os.path.join(self.path, ".hg", "shamap")
19 19
20 20 def authorfile(self):
21 21 return os.path.join(self.path, ".hg", "authormap")
22 22
23 23 def getheads(self):
24 24 h = self.repo.changelog.heads()
25 25 return [ hg.hex(x) for x in h ]
26 26
27 27 def putfile(self, f, e, data):
28 28 self.repo.wwrite(f, data, e)
29 29 if self.repo.dirstate.state(f) == '?':
30 self.repo.dirstate.update([f], "a")
30 self.repo.dirstate.add(f)
31 31
32 32 def copyfile(self, source, dest):
33 33 self.repo.copy(source, dest)
34 34
35 35 def delfile(self, f):
36 36 try:
37 37 os.unlink(self.repo.wjoin(f))
38 38 #self.repo.remove([f])
39 39 except:
40 40 pass
41 41
42 42 def putcommit(self, files, parents, commit):
43 43 seen = {}
44 44 pl = []
45 45 for p in parents:
46 46 if p not in seen:
47 47 pl.append(p)
48 48 seen[p] = 1
49 49 parents = pl
50 50
51 51 if len(parents) < 2: parents.append("0" * 40)
52 52 if len(parents) < 2: parents.append("0" * 40)
53 53 p2 = parents.pop(0)
54 54
55 55 text = commit.desc
56 56 extra = {}
57 57 if commit.branch:
58 58 extra['branch'] = commit.branch
59 59 if commit.rev:
60 60 extra['convert_revision'] = commit.rev
61 61
62 62 while parents:
63 63 p1 = p2
64 64 p2 = parents.pop(0)
65 65 a = self.repo.rawcommit(files, text, commit.author, commit.date,
66 66 hg.bin(p1), hg.bin(p2), extra=extra)
67 67 text = "(octopus merge fixup)\n"
68 68 p2 = hg.hex(self.repo.changelog.tip())
69 69
70 70 return p2
71 71
72 72 def puttags(self, tags):
73 73 try:
74 74 old = self.repo.wfile(".hgtags").read()
75 75 oldlines = old.splitlines(1)
76 76 oldlines.sort()
77 77 except:
78 78 oldlines = []
79 79
80 80 k = tags.keys()
81 81 k.sort()
82 82 newlines = []
83 83 for tag in k:
84 84 newlines.append("%s %s\n" % (tags[tag], tag))
85 85
86 86 newlines.sort()
87 87
88 88 if newlines != oldlines:
89 89 self.ui.status("updating tags\n")
90 90 f = self.repo.wfile(".hgtags", "w")
91 91 f.write("".join(newlines))
92 92 f.close()
93 93 if not oldlines: self.repo.add([".hgtags"])
94 94 date = "%s 0" % int(time.mktime(time.gmtime()))
95 95 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
96 96 date, self.repo.changelog.tip(), hg.nullid)
97 97 return hg.hex(self.repo.changelog.tip())
@@ -1,2229 +1,2235 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.i18n import _
33 33 from mercurial import commands, cmdutil, hg, patch, revlog, util
34 34 from mercurial import repair
35 35 import os, sys, re, errno
36 36
37 37 commands.norepo += " qclone qversion"
38 38
39 39 # Patch names looks like unix-file names.
40 40 # They must be joinable with queue directory and result in the patch path.
41 41 normname = util.normpath
42 42
43 43 class statusentry:
44 44 def __init__(self, rev, name=None):
45 45 if not name:
46 46 fields = rev.split(':', 1)
47 47 if len(fields) == 2:
48 48 self.rev, self.name = fields
49 49 else:
50 50 self.rev, self.name = None, None
51 51 else:
52 52 self.rev, self.name = rev, name
53 53
54 54 def __str__(self):
55 55 return self.rev + ':' + self.name
56 56
57 57 class queue:
58 58 def __init__(self, ui, path, patchdir=None):
59 59 self.basepath = path
60 60 self.path = patchdir or os.path.join(path, "patches")
61 61 self.opener = util.opener(self.path)
62 62 self.ui = ui
63 63 self.applied = []
64 64 self.full_series = []
65 65 self.applied_dirty = 0
66 66 self.series_dirty = 0
67 67 self.series_path = "series"
68 68 self.status_path = "status"
69 69 self.guards_path = "guards"
70 70 self.active_guards = None
71 71 self.guards_dirty = False
72 72 self._diffopts = None
73 73
74 74 if os.path.exists(self.join(self.series_path)):
75 75 self.full_series = self.opener(self.series_path).read().splitlines()
76 76 self.parse_series()
77 77
78 78 if os.path.exists(self.join(self.status_path)):
79 79 lines = self.opener(self.status_path).read().splitlines()
80 80 self.applied = [statusentry(l) for l in lines]
81 81
82 82 def diffopts(self):
83 83 if self._diffopts is None:
84 84 self._diffopts = patch.diffopts(self.ui)
85 85 return self._diffopts
86 86
87 87 def join(self, *p):
88 88 return os.path.join(self.path, *p)
89 89
90 90 def find_series(self, patch):
91 91 pre = re.compile("(\s*)([^#]+)")
92 92 index = 0
93 93 for l in self.full_series:
94 94 m = pre.match(l)
95 95 if m:
96 96 s = m.group(2)
97 97 s = s.rstrip()
98 98 if s == patch:
99 99 return index
100 100 index += 1
101 101 return None
102 102
103 103 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
104 104
105 105 def parse_series(self):
106 106 self.series = []
107 107 self.series_guards = []
108 108 for l in self.full_series:
109 109 h = l.find('#')
110 110 if h == -1:
111 111 patch = l
112 112 comment = ''
113 113 elif h == 0:
114 114 continue
115 115 else:
116 116 patch = l[:h]
117 117 comment = l[h:]
118 118 patch = patch.strip()
119 119 if patch:
120 120 if patch in self.series:
121 121 raise util.Abort(_('%s appears more than once in %s') %
122 122 (patch, self.join(self.series_path)))
123 123 self.series.append(patch)
124 124 self.series_guards.append(self.guard_re.findall(comment))
125 125
126 126 def check_guard(self, guard):
127 127 bad_chars = '# \t\r\n\f'
128 128 first = guard[0]
129 129 for c in '-+':
130 130 if first == c:
131 131 return (_('guard %r starts with invalid character: %r') %
132 132 (guard, c))
133 133 for c in bad_chars:
134 134 if c in guard:
135 135 return _('invalid character in guard %r: %r') % (guard, c)
136 136
137 137 def set_active(self, guards):
138 138 for guard in guards:
139 139 bad = self.check_guard(guard)
140 140 if bad:
141 141 raise util.Abort(bad)
142 142 guards = dict.fromkeys(guards).keys()
143 143 guards.sort()
144 144 self.ui.debug('active guards: %s\n' % ' '.join(guards))
145 145 self.active_guards = guards
146 146 self.guards_dirty = True
147 147
148 148 def active(self):
149 149 if self.active_guards is None:
150 150 self.active_guards = []
151 151 try:
152 152 guards = self.opener(self.guards_path).read().split()
153 153 except IOError, err:
154 154 if err.errno != errno.ENOENT: raise
155 155 guards = []
156 156 for i, guard in enumerate(guards):
157 157 bad = self.check_guard(guard)
158 158 if bad:
159 159 self.ui.warn('%s:%d: %s\n' %
160 160 (self.join(self.guards_path), i + 1, bad))
161 161 else:
162 162 self.active_guards.append(guard)
163 163 return self.active_guards
164 164
165 165 def set_guards(self, idx, guards):
166 166 for g in guards:
167 167 if len(g) < 2:
168 168 raise util.Abort(_('guard %r too short') % g)
169 169 if g[0] not in '-+':
170 170 raise util.Abort(_('guard %r starts with invalid char') % g)
171 171 bad = self.check_guard(g[1:])
172 172 if bad:
173 173 raise util.Abort(bad)
174 174 drop = self.guard_re.sub('', self.full_series[idx])
175 175 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
176 176 self.parse_series()
177 177 self.series_dirty = True
178 178
179 179 def pushable(self, idx):
180 180 if isinstance(idx, str):
181 181 idx = self.series.index(idx)
182 182 patchguards = self.series_guards[idx]
183 183 if not patchguards:
184 184 return True, None
185 185 default = False
186 186 guards = self.active()
187 187 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
188 188 if exactneg:
189 189 return False, exactneg[0]
190 190 pos = [g for g in patchguards if g[0] == '+']
191 191 exactpos = [g for g in pos if g[1:] in guards]
192 192 if pos:
193 193 if exactpos:
194 194 return True, exactpos[0]
195 195 return False, pos
196 196 return True, ''
197 197
198 198 def explain_pushable(self, idx, all_patches=False):
199 199 write = all_patches and self.ui.write or self.ui.warn
200 200 if all_patches or self.ui.verbose:
201 201 if isinstance(idx, str):
202 202 idx = self.series.index(idx)
203 203 pushable, why = self.pushable(idx)
204 204 if all_patches and pushable:
205 205 if why is None:
206 206 write(_('allowing %s - no guards in effect\n') %
207 207 self.series[idx])
208 208 else:
209 209 if not why:
210 210 write(_('allowing %s - no matching negative guards\n') %
211 211 self.series[idx])
212 212 else:
213 213 write(_('allowing %s - guarded by %r\n') %
214 214 (self.series[idx], why))
215 215 if not pushable:
216 216 if why:
217 217 write(_('skipping %s - guarded by %r\n') %
218 218 (self.series[idx], why))
219 219 else:
220 220 write(_('skipping %s - no matching guards\n') %
221 221 self.series[idx])
222 222
223 223 def save_dirty(self):
224 224 def write_list(items, path):
225 225 fp = self.opener(path, 'w')
226 226 for i in items:
227 227 print >> fp, i
228 228 fp.close()
229 229 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
230 230 if self.series_dirty: write_list(self.full_series, self.series_path)
231 231 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
232 232
233 233 def readheaders(self, patch):
234 234 def eatdiff(lines):
235 235 while lines:
236 236 l = lines[-1]
237 237 if (l.startswith("diff -") or
238 238 l.startswith("Index:") or
239 239 l.startswith("===========")):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243 def eatempty(lines):
244 244 while lines:
245 245 l = lines[-1]
246 246 if re.match('\s*$', l):
247 247 del lines[-1]
248 248 else:
249 249 break
250 250
251 251 pf = self.join(patch)
252 252 message = []
253 253 comments = []
254 254 user = None
255 255 date = None
256 256 format = None
257 257 subject = None
258 258 diffstart = 0
259 259
260 260 for line in file(pf):
261 261 line = line.rstrip()
262 262 if line.startswith('diff --git'):
263 263 diffstart = 2
264 264 break
265 265 if diffstart:
266 266 if line.startswith('+++ '):
267 267 diffstart = 2
268 268 break
269 269 if line.startswith("--- "):
270 270 diffstart = 1
271 271 continue
272 272 elif format == "hgpatch":
273 273 # parse values when importing the result of an hg export
274 274 if line.startswith("# User "):
275 275 user = line[7:]
276 276 elif line.startswith("# Date "):
277 277 date = line[7:]
278 278 elif not line.startswith("# ") and line:
279 279 message.append(line)
280 280 format = None
281 281 elif line == '# HG changeset patch':
282 282 format = "hgpatch"
283 283 elif (format != "tagdone" and (line.startswith("Subject: ") or
284 284 line.startswith("subject: "))):
285 285 subject = line[9:]
286 286 format = "tag"
287 287 elif (format != "tagdone" and (line.startswith("From: ") or
288 288 line.startswith("from: "))):
289 289 user = line[6:]
290 290 format = "tag"
291 291 elif format == "tag" and line == "":
292 292 # when looking for tags (subject: from: etc) they
293 293 # end once you find a blank line in the source
294 294 format = "tagdone"
295 295 elif message or line:
296 296 message.append(line)
297 297 comments.append(line)
298 298
299 299 eatdiff(message)
300 300 eatdiff(comments)
301 301 eatempty(message)
302 302 eatempty(comments)
303 303
304 304 # make sure message isn't empty
305 305 if format and format.startswith("tag") and subject:
306 306 message.insert(0, "")
307 307 message.insert(0, subject)
308 308 return (message, comments, user, date, diffstart > 1)
309 309
310 310 def removeundo(self, repo):
311 311 undo = repo.sjoin('undo')
312 312 if not os.path.exists(undo):
313 313 return
314 314 try:
315 315 os.unlink(undo)
316 316 except OSError, inst:
317 317 self.ui.warn('error removing undo: %s\n' % str(inst))
318 318
319 319 def printdiff(self, repo, node1, node2=None, files=None,
320 320 fp=None, changes=None, opts={}):
321 321 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
322 322
323 323 patch.diff(repo, node1, node2, fns, match=matchfn,
324 324 fp=fp, changes=changes, opts=self.diffopts())
325 325
326 326 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
327 327 # first try just applying the patch
328 328 (err, n) = self.apply(repo, [ patch ], update_status=False,
329 329 strict=True, merge=rev, wlock=wlock)
330 330
331 331 if err == 0:
332 332 return (err, n)
333 333
334 334 if n is None:
335 335 raise util.Abort(_("apply failed for patch %s") % patch)
336 336
337 337 self.ui.warn("patch didn't work out, merging %s\n" % patch)
338 338
339 339 # apply failed, strip away that rev and merge.
340 340 hg.clean(repo, head, wlock=wlock)
341 341 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
342 342
343 343 ctx = repo.changectx(rev)
344 344 ret = hg.merge(repo, rev, wlock=wlock)
345 345 if ret:
346 346 raise util.Abort(_("update returned %d") % ret)
347 347 n = repo.commit(None, ctx.description(), ctx.user(),
348 348 force=1, wlock=wlock)
349 349 if n == None:
350 350 raise util.Abort(_("repo commit failed"))
351 351 try:
352 352 message, comments, user, date, patchfound = mergeq.readheaders(patch)
353 353 except:
354 354 raise util.Abort(_("unable to read %s") % patch)
355 355
356 356 patchf = self.opener(patch, "w")
357 357 if comments:
358 358 comments = "\n".join(comments) + '\n\n'
359 359 patchf.write(comments)
360 360 self.printdiff(repo, head, n, fp=patchf)
361 361 patchf.close()
362 362 self.removeundo(repo)
363 363 return (0, n)
364 364
365 365 def qparents(self, repo, rev=None):
366 366 if rev is None:
367 367 (p1, p2) = repo.dirstate.parents()
368 368 if p2 == revlog.nullid:
369 369 return p1
370 370 if len(self.applied) == 0:
371 371 return None
372 372 return revlog.bin(self.applied[-1].rev)
373 373 pp = repo.changelog.parents(rev)
374 374 if pp[1] != revlog.nullid:
375 375 arevs = [ x.rev for x in self.applied ]
376 376 p0 = revlog.hex(pp[0])
377 377 p1 = revlog.hex(pp[1])
378 378 if p0 in arevs:
379 379 return pp[0]
380 380 if p1 in arevs:
381 381 return pp[1]
382 382 return pp[0]
383 383
384 384 def mergepatch(self, repo, mergeq, series, wlock):
385 385 if len(self.applied) == 0:
386 386 # each of the patches merged in will have two parents. This
387 387 # can confuse the qrefresh, qdiff, and strip code because it
388 388 # needs to know which parent is actually in the patch queue.
389 389 # so, we insert a merge marker with only one parent. This way
390 390 # the first patch in the queue is never a merge patch
391 391 #
392 392 pname = ".hg.patches.merge.marker"
393 393 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
394 394 wlock=wlock)
395 395 self.removeundo(repo)
396 396 self.applied.append(statusentry(revlog.hex(n), pname))
397 397 self.applied_dirty = 1
398 398
399 399 head = self.qparents(repo)
400 400
401 401 for patch in series:
402 402 patch = mergeq.lookup(patch, strict=True)
403 403 if not patch:
404 404 self.ui.warn("patch %s does not exist\n" % patch)
405 405 return (1, None)
406 406 pushable, reason = self.pushable(patch)
407 407 if not pushable:
408 408 self.explain_pushable(patch, all_patches=True)
409 409 continue
410 410 info = mergeq.isapplied(patch)
411 411 if not info:
412 412 self.ui.warn("patch %s is not applied\n" % patch)
413 413 return (1, None)
414 414 rev = revlog.bin(info[1])
415 415 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
416 416 if head:
417 417 self.applied.append(statusentry(revlog.hex(head), patch))
418 418 self.applied_dirty = 1
419 419 if err:
420 420 return (err, head)
421 421 self.save_dirty()
422 422 return (0, head)
423 423
424 424 def patch(self, repo, patchfile):
425 425 '''Apply patchfile to the working directory.
426 426 patchfile: file name of patch'''
427 427 files = {}
428 428 try:
429 429 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
430 430 files=files)
431 431 except Exception, inst:
432 432 self.ui.note(str(inst) + '\n')
433 433 if not self.ui.verbose:
434 434 self.ui.warn("patch failed, unable to continue (try -v)\n")
435 435 return (False, files, False)
436 436
437 437 return (True, files, fuzz)
438 438
439 439 def apply(self, repo, series, list=False, update_status=True,
440 440 strict=False, patchdir=None, merge=None, wlock=None,
441 441 all_files={}):
442 442 if not wlock:
443 443 wlock = repo.wlock()
444 444 lock = repo.lock()
445 445 tr = repo.transaction()
446 446 try:
447 447 ret = self._apply(tr, repo, series, list, update_status,
448 448 strict, patchdir, merge, wlock,
449 449 lock=lock, all_files=all_files)
450 450 tr.close()
451 451 self.save_dirty()
452 452 return ret
453 453 except:
454 454 try:
455 455 tr.abort()
456 456 finally:
457 457 repo.invalidate()
458 458 repo.dirstate.invalidate()
459 459 raise
460 460
461 461 def _apply(self, tr, repo, series, list=False, update_status=True,
462 462 strict=False, patchdir=None, merge=None, wlock=None,
463 463 lock=None, all_files={}):
464 464 # TODO unify with commands.py
465 465 if not patchdir:
466 466 patchdir = self.path
467 467 err = 0
468 468 n = None
469 469 for patchname in series:
470 470 pushable, reason = self.pushable(patchname)
471 471 if not pushable:
472 472 self.explain_pushable(patchname, all_patches=True)
473 473 continue
474 474 self.ui.warn("applying %s\n" % patchname)
475 475 pf = os.path.join(patchdir, patchname)
476 476
477 477 try:
478 478 message, comments, user, date, patchfound = self.readheaders(patchname)
479 479 except:
480 480 self.ui.warn("Unable to read %s\n" % patchname)
481 481 err = 1
482 482 break
483 483
484 484 if not message:
485 485 message = "imported patch %s\n" % patchname
486 486 else:
487 487 if list:
488 488 message.append("\nimported patch %s" % patchname)
489 489 message = '\n'.join(message)
490 490
491 491 (patcherr, files, fuzz) = self.patch(repo, pf)
492 492 all_files.update(files)
493 493 patcherr = not patcherr
494 494
495 495 if merge and files:
496 496 # Mark as removed/merged and update dirstate parent info
497 497 removed = []
498 498 merged = []
499 499 for f in files:
500 500 if os.path.exists(repo.dirstate.wjoin(f)):
501 501 merged.append(f)
502 502 else:
503 503 removed.append(f)
504 repo.dirstate.update(repo.dirstate.filterfiles(removed), 'r')
505 repo.dirstate.update(repo.dirstate.filterfiles(merged), 'm')
504 for f in removed:
505 repo.dirstate.remove(f)
506 for f in merged:
507 repo.dirstate.merge(f)
506 508 p1, p2 = repo.dirstate.parents()
507 509 repo.dirstate.setparents(p1, merge)
508 510 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
509 511 n = repo.commit(files, message, user, date, force=1, lock=lock,
510 512 wlock=wlock)
511 513
512 514 if n == None:
513 515 raise util.Abort(_("repo commit failed"))
514 516
515 517 if update_status:
516 518 self.applied.append(statusentry(revlog.hex(n), patchname))
517 519
518 520 if patcherr:
519 521 if not patchfound:
520 522 self.ui.warn("patch %s is empty\n" % patchname)
521 523 err = 0
522 524 else:
523 525 self.ui.warn("patch failed, rejects left in working dir\n")
524 526 err = 1
525 527 break
526 528
527 529 if fuzz and strict:
528 530 self.ui.warn("fuzz found when applying patch, stopping\n")
529 531 err = 1
530 532 break
531 533 self.removeundo(repo)
532 534 return (err, n)
533 535
534 536 def delete(self, repo, patches, opts):
535 537 if not patches and not opts.get('rev'):
536 538 raise util.Abort(_('qdelete requires at least one revision or '
537 539 'patch name'))
538 540
539 541 realpatches = []
540 542 for patch in patches:
541 543 patch = self.lookup(patch, strict=True)
542 544 info = self.isapplied(patch)
543 545 if info:
544 546 raise util.Abort(_("cannot delete applied patch %s") % patch)
545 547 if patch not in self.series:
546 548 raise util.Abort(_("patch %s not in series file") % patch)
547 549 realpatches.append(patch)
548 550
549 551 appliedbase = 0
550 552 if opts.get('rev'):
551 553 if not self.applied:
552 554 raise util.Abort(_('no patches applied'))
553 555 revs = cmdutil.revrange(repo, opts['rev'])
554 556 if len(revs) > 1 and revs[0] > revs[1]:
555 557 revs.reverse()
556 558 for rev in revs:
557 559 if appliedbase >= len(self.applied):
558 560 raise util.Abort(_("revision %d is not managed") % rev)
559 561
560 562 base = revlog.bin(self.applied[appliedbase].rev)
561 563 node = repo.changelog.node(rev)
562 564 if node != base:
563 565 raise util.Abort(_("cannot delete revision %d above "
564 566 "applied patches") % rev)
565 567 realpatches.append(self.applied[appliedbase].name)
566 568 appliedbase += 1
567 569
568 570 if not opts.get('keep'):
569 571 r = self.qrepo()
570 572 if r:
571 573 r.remove(realpatches, True)
572 574 else:
573 575 for p in realpatches:
574 576 os.unlink(self.join(p))
575 577
576 578 if appliedbase:
577 579 del self.applied[:appliedbase]
578 580 self.applied_dirty = 1
579 581 indices = [self.find_series(p) for p in realpatches]
580 582 indices.sort()
581 583 for i in indices[-1::-1]:
582 584 del self.full_series[i]
583 585 self.parse_series()
584 586 self.series_dirty = 1
585 587
586 588 def check_toppatch(self, repo):
587 589 if len(self.applied) > 0:
588 590 top = revlog.bin(self.applied[-1].rev)
589 591 pp = repo.dirstate.parents()
590 592 if top not in pp:
591 593 raise util.Abort(_("queue top not at same revision as working directory"))
592 594 return top
593 595 return None
594 596 def check_localchanges(self, repo, force=False, refresh=True):
595 597 m, a, r, d = repo.status()[:4]
596 598 if m or a or r or d:
597 599 if not force:
598 600 if refresh:
599 601 raise util.Abort(_("local changes found, refresh first"))
600 602 else:
601 603 raise util.Abort(_("local changes found"))
602 604 return m, a, r, d
603 605
604 606 def new(self, repo, patch, *pats, **opts):
605 607 msg = opts.get('msg')
606 608 force = opts.get('force')
607 609 if os.path.exists(self.join(patch)):
608 610 raise util.Abort(_('patch "%s" already exists') % patch)
609 611 if opts.get('include') or opts.get('exclude') or pats:
610 612 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
611 613 m, a, r, d = repo.status(files=fns, match=match)[:4]
612 614 else:
613 615 m, a, r, d = self.check_localchanges(repo, force)
614 616 commitfiles = m + a + r
615 617 self.check_toppatch(repo)
616 618 wlock = repo.wlock()
617 619 insert = self.full_series_end()
618 620 if msg:
619 621 n = repo.commit(commitfiles, msg, force=True, wlock=wlock)
620 622 else:
621 623 n = repo.commit(commitfiles,
622 624 "[mq]: %s" % patch, force=True, wlock=wlock)
623 625 if n == None:
624 626 raise util.Abort(_("repo commit failed"))
625 627 self.full_series[insert:insert] = [patch]
626 628 self.applied.append(statusentry(revlog.hex(n), patch))
627 629 self.parse_series()
628 630 self.series_dirty = 1
629 631 self.applied_dirty = 1
630 632 p = self.opener(patch, "w")
631 633 if msg:
632 634 msg = msg + "\n"
633 635 p.write(msg)
634 636 p.close()
635 637 wlock = None
636 638 r = self.qrepo()
637 639 if r: r.add([patch])
638 640 if commitfiles:
639 641 self.refresh(repo, short=True)
640 642 self.removeundo(repo)
641 643
642 644 def strip(self, repo, rev, update=True, backup="all", wlock=None):
643 645 if not wlock:
644 646 wlock = repo.wlock()
645 647 lock = repo.lock()
646 648
647 649 if update:
648 650 self.check_localchanges(repo, refresh=False)
649 651 urev = self.qparents(repo, rev)
650 652 hg.clean(repo, urev, wlock=wlock)
651 653 repo.dirstate.write()
652 654
653 655 self.removeundo(repo)
654 656 repair.strip(self.ui, repo, rev, backup)
655 657
656 658 def isapplied(self, patch):
657 659 """returns (index, rev, patch)"""
658 660 for i in xrange(len(self.applied)):
659 661 a = self.applied[i]
660 662 if a.name == patch:
661 663 return (i, a.rev, a.name)
662 664 return None
663 665
664 666 # if the exact patch name does not exist, we try a few
665 667 # variations. If strict is passed, we try only #1
666 668 #
667 669 # 1) a number to indicate an offset in the series file
668 670 # 2) a unique substring of the patch name was given
669 671 # 3) patchname[-+]num to indicate an offset in the series file
670 672 def lookup(self, patch, strict=False):
671 673 patch = patch and str(patch)
672 674
673 675 def partial_name(s):
674 676 if s in self.series:
675 677 return s
676 678 matches = [x for x in self.series if s in x]
677 679 if len(matches) > 1:
678 680 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
679 681 for m in matches:
680 682 self.ui.warn(' %s\n' % m)
681 683 return None
682 684 if matches:
683 685 return matches[0]
684 686 if len(self.series) > 0 and len(self.applied) > 0:
685 687 if s == 'qtip':
686 688 return self.series[self.series_end(True)-1]
687 689 if s == 'qbase':
688 690 return self.series[0]
689 691 return None
690 692 if patch == None:
691 693 return None
692 694
693 695 # we don't want to return a partial match until we make
694 696 # sure the file name passed in does not exist (checked below)
695 697 res = partial_name(patch)
696 698 if res and res == patch:
697 699 return res
698 700
699 701 if not os.path.isfile(self.join(patch)):
700 702 try:
701 703 sno = int(patch)
702 704 except(ValueError, OverflowError):
703 705 pass
704 706 else:
705 707 if sno < len(self.series):
706 708 return self.series[sno]
707 709 if not strict:
708 710 # return any partial match made above
709 711 if res:
710 712 return res
711 713 minus = patch.rfind('-')
712 714 if minus >= 0:
713 715 res = partial_name(patch[:minus])
714 716 if res:
715 717 i = self.series.index(res)
716 718 try:
717 719 off = int(patch[minus+1:] or 1)
718 720 except(ValueError, OverflowError):
719 721 pass
720 722 else:
721 723 if i - off >= 0:
722 724 return self.series[i - off]
723 725 plus = patch.rfind('+')
724 726 if plus >= 0:
725 727 res = partial_name(patch[:plus])
726 728 if res:
727 729 i = self.series.index(res)
728 730 try:
729 731 off = int(patch[plus+1:] or 1)
730 732 except(ValueError, OverflowError):
731 733 pass
732 734 else:
733 735 if i + off < len(self.series):
734 736 return self.series[i + off]
735 737 raise util.Abort(_("patch %s not in series") % patch)
736 738
737 739 def push(self, repo, patch=None, force=False, list=False,
738 740 mergeq=None, wlock=None):
739 741 if not wlock:
740 742 wlock = repo.wlock()
741 743 patch = self.lookup(patch)
742 744 # Suppose our series file is: A B C and the current 'top' patch is B.
743 745 # qpush C should be performed (moving forward)
744 746 # qpush B is a NOP (no change)
745 747 # qpush A is an error (can't go backwards with qpush)
746 748 if patch:
747 749 info = self.isapplied(patch)
748 750 if info:
749 751 if info[0] < len(self.applied) - 1:
750 752 raise util.Abort(_("cannot push to a previous patch: %s") %
751 753 patch)
752 754 if info[0] < len(self.series) - 1:
753 755 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
754 756 else:
755 757 self.ui.warn(_('all patches are currently applied\n'))
756 758 return
757 759
758 760 # Following the above example, starting at 'top' of B:
759 761 # qpush should be performed (pushes C), but a subsequent qpush without
760 762 # an argument is an error (nothing to apply). This allows a loop
761 763 # of "...while hg qpush..." to work as it detects an error when done
762 764 if self.series_end() == len(self.series):
763 765 self.ui.warn(_('patch series already fully applied\n'))
764 766 return 1
765 767 if not force:
766 768 self.check_localchanges(repo)
767 769
768 770 self.applied_dirty = 1;
769 771 start = self.series_end()
770 772 if start > 0:
771 773 self.check_toppatch(repo)
772 774 if not patch:
773 775 patch = self.series[start]
774 776 end = start + 1
775 777 else:
776 778 end = self.series.index(patch, start) + 1
777 779 s = self.series[start:end]
778 780 all_files = {}
779 781 try:
780 782 if mergeq:
781 783 ret = self.mergepatch(repo, mergeq, s, wlock)
782 784 else:
783 785 ret = self.apply(repo, s, list, wlock=wlock,
784 786 all_files=all_files)
785 787 except:
786 788 self.ui.warn(_('cleaning up working directory...'))
787 789 node = repo.dirstate.parents()[0]
788 790 hg.revert(repo, node, None, wlock)
789 791 unknown = repo.status(wlock=wlock)[4]
790 792 # only remove unknown files that we know we touched or
791 793 # created while patching
792 794 for f in unknown:
793 795 if f in all_files:
794 796 util.unlink(repo.wjoin(f))
795 797 self.ui.warn(_('done\n'))
796 798 raise
797 799 top = self.applied[-1].name
798 800 if ret[0]:
799 801 self.ui.write("Errors during apply, please fix and refresh %s\n" %
800 802 top)
801 803 else:
802 804 self.ui.write("Now at: %s\n" % top)
803 805 return ret[0]
804 806
805 807 def pop(self, repo, patch=None, force=False, update=True, all=False,
806 808 wlock=None):
807 809 def getfile(f, rev):
808 810 t = repo.file(f).read(rev)
809 811 repo.wfile(f, "w").write(t)
810 812
811 813 if not wlock:
812 814 wlock = repo.wlock()
813 815 if patch:
814 816 # index, rev, patch
815 817 info = self.isapplied(patch)
816 818 if not info:
817 819 patch = self.lookup(patch)
818 820 info = self.isapplied(patch)
819 821 if not info:
820 822 raise util.Abort(_("patch %s is not applied") % patch)
821 823
822 824 if len(self.applied) == 0:
823 825 # Allow qpop -a to work repeatedly,
824 826 # but not qpop without an argument
825 827 self.ui.warn(_("no patches applied\n"))
826 828 return not all
827 829
828 830 if not update:
829 831 parents = repo.dirstate.parents()
830 832 rr = [ revlog.bin(x.rev) for x in self.applied ]
831 833 for p in parents:
832 834 if p in rr:
833 835 self.ui.warn("qpop: forcing dirstate update\n")
834 836 update = True
835 837
836 838 if not force and update:
837 839 self.check_localchanges(repo)
838 840
839 841 self.applied_dirty = 1;
840 842 end = len(self.applied)
841 843 if not patch:
842 844 if all:
843 845 popi = 0
844 846 else:
845 847 popi = len(self.applied) - 1
846 848 else:
847 849 popi = info[0] + 1
848 850 if popi >= end:
849 851 self.ui.warn("qpop: %s is already at the top\n" % patch)
850 852 return
851 853 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
852 854
853 855 start = info[0]
854 856 rev = revlog.bin(info[1])
855 857
856 858 # we know there are no local changes, so we can make a simplified
857 859 # form of hg.update.
858 860 if update:
859 861 top = self.check_toppatch(repo)
860 862 qp = self.qparents(repo, rev)
861 863 changes = repo.changelog.read(qp)
862 864 mmap = repo.manifest.read(changes[0])
863 865 m, a, r, d, u = repo.status(qp, top)[:5]
864 866 if d:
865 867 raise util.Abort("deletions found between repo revs")
866 868 for f in m:
867 869 getfile(f, mmap[f])
868 870 for f in r:
869 871 getfile(f, mmap[f])
870 872 util.set_exec(repo.wjoin(f), mmap.execf(f))
871 repo.dirstate.update(m + r, 'n')
873 for f in m + r:
874 repo.dirstate.normal(f)
872 875 for f in a:
873 876 try:
874 877 os.unlink(repo.wjoin(f))
875 878 except OSError, e:
876 879 if e.errno != errno.ENOENT:
877 880 raise
878 881 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
879 882 except: pass
880 if a:
881 repo.dirstate.forget(a)
883 repo.dirstate.forget(f)
882 884 repo.dirstate.setparents(qp, revlog.nullid)
883 885 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
884 886 del self.applied[start:end]
885 887 if len(self.applied):
886 888 self.ui.write("Now at: %s\n" % self.applied[-1].name)
887 889 else:
888 890 self.ui.write("Patch queue now empty\n")
889 891
890 892 def diff(self, repo, pats, opts):
891 893 top = self.check_toppatch(repo)
892 894 if not top:
893 895 self.ui.write("No patches applied\n")
894 896 return
895 897 qp = self.qparents(repo, top)
896 898 if opts.get('git'):
897 899 self.diffopts().git = True
898 900 self.printdiff(repo, qp, files=pats, opts=opts)
899 901
900 902 def refresh(self, repo, pats=None, **opts):
901 903 if len(self.applied) == 0:
902 904 self.ui.write("No patches applied\n")
903 905 return 1
904 906 wlock = repo.wlock()
905 907 self.check_toppatch(repo)
906 908 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
907 909 top = revlog.bin(top)
908 910 cparents = repo.changelog.parents(top)
909 911 patchparent = self.qparents(repo, top)
910 912 message, comments, user, date, patchfound = self.readheaders(patchfn)
911 913
912 914 patchf = self.opener(patchfn, 'r+')
913 915
914 916 # if the patch was a git patch, refresh it as a git patch
915 917 for line in patchf:
916 918 if line.startswith('diff --git'):
917 919 self.diffopts().git = True
918 920 break
919 921 patchf.seek(0)
920 922 patchf.truncate()
921 923
922 924 msg = opts.get('msg', '').rstrip()
923 925 if msg:
924 926 if comments:
925 927 # Remove existing message.
926 928 ci = 0
927 929 subj = None
928 930 for mi in xrange(len(message)):
929 931 if comments[ci].lower().startswith('subject: '):
930 932 subj = comments[ci][9:]
931 933 while message[mi] != comments[ci] and message[mi] != subj:
932 934 ci += 1
933 935 del comments[ci]
934 936 comments.append(msg)
935 937 if comments:
936 938 comments = "\n".join(comments) + '\n\n'
937 939 patchf.write(comments)
938 940
939 941 if opts.get('git'):
940 942 self.diffopts().git = True
941 943 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
942 944 tip = repo.changelog.tip()
943 945 if top == tip:
944 946 # if the top of our patch queue is also the tip, there is an
945 947 # optimization here. We update the dirstate in place and strip
946 948 # off the tip commit. Then just commit the current directory
947 949 # tree. We can also send repo.commit the list of files
948 950 # changed to speed up the diff
949 951 #
950 952 # in short mode, we only diff the files included in the
951 953 # patch already
952 954 #
953 955 # this should really read:
954 956 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
955 957 # but we do it backwards to take advantage of manifest/chlog
956 958 # caching against the next repo.status call
957 959 #
958 960 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
959 961 changes = repo.changelog.read(tip)
960 962 man = repo.manifest.read(changes[0])
961 963 aaa = aa[:]
962 964 if opts.get('short'):
963 965 filelist = mm + aa + dd
964 966 match = dict.fromkeys(filelist).__contains__
965 967 else:
966 968 filelist = None
967 969 match = util.always
968 970 m, a, r, d, u = repo.status(files=filelist, match=match)[:5]
969 971
970 972 # we might end up with files that were added between tip and
971 973 # the dirstate parent, but then changed in the local dirstate.
972 974 # in this case, we want them to only show up in the added section
973 975 for x in m:
974 976 if x not in aa:
975 977 mm.append(x)
976 978 # we might end up with files added by the local dirstate that
977 979 # were deleted by the patch. In this case, they should only
978 980 # show up in the changed section.
979 981 for x in a:
980 982 if x in dd:
981 983 del dd[dd.index(x)]
982 984 mm.append(x)
983 985 else:
984 986 aa.append(x)
985 987 # make sure any files deleted in the local dirstate
986 988 # are not in the add or change column of the patch
987 989 forget = []
988 990 for x in d + r:
989 991 if x in aa:
990 992 del aa[aa.index(x)]
991 993 forget.append(x)
992 994 continue
993 995 elif x in mm:
994 996 del mm[mm.index(x)]
995 997 dd.append(x)
996 998
997 999 m = util.unique(mm)
998 1000 r = util.unique(dd)
999 1001 a = util.unique(aa)
1000 1002 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1001 1003 filelist = util.unique(c[0] + c[1] + c[2])
1002 1004 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1003 1005 fp=patchf, changes=c, opts=self.diffopts())
1004 1006 patchf.close()
1005 1007
1006 1008 repo.dirstate.setparents(*cparents)
1007 1009 copies = {}
1008 1010 for dst in a:
1009 1011 src = repo.dirstate.copied(dst)
1010 1012 if src is None:
1011 1013 continue
1012 1014 copies.setdefault(src, []).append(dst)
1013 repo.dirstate.update(a, 'a')
1015 repo.dirstate.add(dst)
1014 1016 # remember the copies between patchparent and tip
1015 1017 # this may be slow, so don't do it if we're not tracking copies
1016 1018 if self.diffopts().git:
1017 1019 for dst in aaa:
1018 1020 f = repo.file(dst)
1019 1021 src = f.renamed(man[dst])
1020 1022 if src:
1021 1023 copies[src[0]] = copies.get(dst, [])
1022 1024 if dst in a:
1023 1025 copies[src[0]].append(dst)
1024 1026 # we can't copy a file created by the patch itself
1025 1027 if dst in copies:
1026 1028 del copies[dst]
1027 1029 for src, dsts in copies.iteritems():
1028 1030 for dst in dsts:
1029 1031 repo.dirstate.copy(src, dst)
1030 repo.dirstate.update(r, 'r')
1032 for f in r:
1033 repo.dirstate.remove(f)
1031 1034 # if the patch excludes a modified file, mark that file with mtime=0
1032 1035 # so status can see it.
1033 1036 mm = []
1034 1037 for i in xrange(len(m)-1, -1, -1):
1035 1038 if not matchfn(m[i]):
1036 1039 mm.append(m[i])
1037 1040 del m[i]
1038 repo.dirstate.update(m, 'n')
1039 repo.dirstate.update(mm, 'n', st_mtime=-1, st_size=-1)
1040 repo.dirstate.forget(forget)
1041 for f in m:
1042 repo.dirstate.normal(f)
1043 for f in mm:
1044 repo.dirstate.normaldirty(f)
1045 for f in forget:
1046 repo.dirstate.forget(f)
1041 1047
1042 1048 if not msg:
1043 1049 if not message:
1044 1050 message = "[mq]: %s\n" % patchfn
1045 1051 else:
1046 1052 message = "\n".join(message)
1047 1053 else:
1048 1054 message = msg
1049 1055
1050 1056 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1051 1057 n = repo.commit(filelist, message, changes[1], match=matchfn,
1052 1058 force=1, wlock=wlock)
1053 1059 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1054 1060 self.applied_dirty = 1
1055 1061 self.removeundo(repo)
1056 1062 else:
1057 1063 self.printdiff(repo, patchparent, fp=patchf)
1058 1064 patchf.close()
1059 1065 added = repo.status()[1]
1060 1066 for a in added:
1061 1067 f = repo.wjoin(a)
1062 1068 try:
1063 1069 os.unlink(f)
1064 1070 except OSError, e:
1065 1071 if e.errno != errno.ENOENT:
1066 1072 raise
1067 1073 try: os.removedirs(os.path.dirname(f))
1068 1074 except: pass
1069 # forget the file copies in the dirstate
1070 # push should readd the files later on
1071 repo.dirstate.forget(added)
1075 # forget the file copies in the dirstate
1076 # push should readd the files later on
1077 repo.dirstate.forget(a)
1072 1078 self.pop(repo, force=True, wlock=wlock)
1073 1079 self.push(repo, force=True, wlock=wlock)
1074 1080
1075 1081 def init(self, repo, create=False):
1076 1082 if not create and os.path.isdir(self.path):
1077 1083 raise util.Abort(_("patch queue directory already exists"))
1078 1084 try:
1079 1085 os.mkdir(self.path)
1080 1086 except OSError, inst:
1081 1087 if inst.errno != errno.EEXIST or not create:
1082 1088 raise
1083 1089 if create:
1084 1090 return self.qrepo(create=True)
1085 1091
1086 1092 def unapplied(self, repo, patch=None):
1087 1093 if patch and patch not in self.series:
1088 1094 raise util.Abort(_("patch %s is not in series file") % patch)
1089 1095 if not patch:
1090 1096 start = self.series_end()
1091 1097 else:
1092 1098 start = self.series.index(patch) + 1
1093 1099 unapplied = []
1094 1100 for i in xrange(start, len(self.series)):
1095 1101 pushable, reason = self.pushable(i)
1096 1102 if pushable:
1097 1103 unapplied.append((i, self.series[i]))
1098 1104 self.explain_pushable(i)
1099 1105 return unapplied
1100 1106
1101 1107 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1102 1108 summary=False):
1103 1109 def displayname(patchname):
1104 1110 if summary:
1105 1111 msg = self.readheaders(patchname)[0]
1106 1112 msg = msg and ': ' + msg[0] or ': '
1107 1113 else:
1108 1114 msg = ''
1109 1115 return '%s%s' % (patchname, msg)
1110 1116
1111 1117 applied = dict.fromkeys([p.name for p in self.applied])
1112 1118 if length is None:
1113 1119 length = len(self.series) - start
1114 1120 if not missing:
1115 1121 for i in xrange(start, start+length):
1116 1122 patch = self.series[i]
1117 1123 if patch in applied:
1118 1124 stat = 'A'
1119 1125 elif self.pushable(i)[0]:
1120 1126 stat = 'U'
1121 1127 else:
1122 1128 stat = 'G'
1123 1129 pfx = ''
1124 1130 if self.ui.verbose:
1125 1131 pfx = '%d %s ' % (i, stat)
1126 1132 elif status and status != stat:
1127 1133 continue
1128 1134 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1129 1135 else:
1130 1136 msng_list = []
1131 1137 for root, dirs, files in os.walk(self.path):
1132 1138 d = root[len(self.path) + 1:]
1133 1139 for f in files:
1134 1140 fl = os.path.join(d, f)
1135 1141 if (fl not in self.series and
1136 1142 fl not in (self.status_path, self.series_path,
1137 1143 self.guards_path)
1138 1144 and not fl.startswith('.')):
1139 1145 msng_list.append(fl)
1140 1146 msng_list.sort()
1141 1147 for x in msng_list:
1142 1148 pfx = self.ui.verbose and ('D ') or ''
1143 1149 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1144 1150
1145 1151 def issaveline(self, l):
1146 1152 if l.name == '.hg.patches.save.line':
1147 1153 return True
1148 1154
1149 1155 def qrepo(self, create=False):
1150 1156 if create or os.path.isdir(self.join(".hg")):
1151 1157 return hg.repository(self.ui, path=self.path, create=create)
1152 1158
1153 1159 def restore(self, repo, rev, delete=None, qupdate=None):
1154 1160 c = repo.changelog.read(rev)
1155 1161 desc = c[4].strip()
1156 1162 lines = desc.splitlines()
1157 1163 i = 0
1158 1164 datastart = None
1159 1165 series = []
1160 1166 applied = []
1161 1167 qpp = None
1162 1168 for i in xrange(0, len(lines)):
1163 1169 if lines[i] == 'Patch Data:':
1164 1170 datastart = i + 1
1165 1171 elif lines[i].startswith('Dirstate:'):
1166 1172 l = lines[i].rstrip()
1167 1173 l = l[10:].split(' ')
1168 1174 qpp = [ hg.bin(x) for x in l ]
1169 1175 elif datastart != None:
1170 1176 l = lines[i].rstrip()
1171 1177 se = statusentry(l)
1172 1178 file_ = se.name
1173 1179 if se.rev:
1174 1180 applied.append(se)
1175 1181 else:
1176 1182 series.append(file_)
1177 1183 if datastart == None:
1178 1184 self.ui.warn("No saved patch data found\n")
1179 1185 return 1
1180 1186 self.ui.warn("restoring status: %s\n" % lines[0])
1181 1187 self.full_series = series
1182 1188 self.applied = applied
1183 1189 self.parse_series()
1184 1190 self.series_dirty = 1
1185 1191 self.applied_dirty = 1
1186 1192 heads = repo.changelog.heads()
1187 1193 if delete:
1188 1194 if rev not in heads:
1189 1195 self.ui.warn("save entry has children, leaving it alone\n")
1190 1196 else:
1191 1197 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1192 1198 pp = repo.dirstate.parents()
1193 1199 if rev in pp:
1194 1200 update = True
1195 1201 else:
1196 1202 update = False
1197 1203 self.strip(repo, rev, update=update, backup='strip')
1198 1204 if qpp:
1199 1205 self.ui.warn("saved queue repository parents: %s %s\n" %
1200 1206 (hg.short(qpp[0]), hg.short(qpp[1])))
1201 1207 if qupdate:
1202 1208 print "queue directory updating"
1203 1209 r = self.qrepo()
1204 1210 if not r:
1205 1211 self.ui.warn("Unable to load queue repository\n")
1206 1212 return 1
1207 1213 hg.clean(r, qpp[0])
1208 1214
1209 1215 def save(self, repo, msg=None):
1210 1216 if len(self.applied) == 0:
1211 1217 self.ui.warn("save: no patches applied, exiting\n")
1212 1218 return 1
1213 1219 if self.issaveline(self.applied[-1]):
1214 1220 self.ui.warn("status is already saved\n")
1215 1221 return 1
1216 1222
1217 1223 ar = [ ':' + x for x in self.full_series ]
1218 1224 if not msg:
1219 1225 msg = "hg patches saved state"
1220 1226 else:
1221 1227 msg = "hg patches: " + msg.rstrip('\r\n')
1222 1228 r = self.qrepo()
1223 1229 if r:
1224 1230 pp = r.dirstate.parents()
1225 1231 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1226 1232 msg += "\n\nPatch Data:\n"
1227 1233 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1228 1234 "\n".join(ar) + '\n' or "")
1229 1235 n = repo.commit(None, text, user=None, force=1)
1230 1236 if not n:
1231 1237 self.ui.warn("repo commit failed\n")
1232 1238 return 1
1233 1239 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1234 1240 self.applied_dirty = 1
1235 1241 self.removeundo(repo)
1236 1242
1237 1243 def full_series_end(self):
1238 1244 if len(self.applied) > 0:
1239 1245 p = self.applied[-1].name
1240 1246 end = self.find_series(p)
1241 1247 if end == None:
1242 1248 return len(self.full_series)
1243 1249 return end + 1
1244 1250 return 0
1245 1251
1246 1252 def series_end(self, all_patches=False):
1247 1253 """If all_patches is False, return the index of the next pushable patch
1248 1254 in the series, or the series length. If all_patches is True, return the
1249 1255 index of the first patch past the last applied one.
1250 1256 """
1251 1257 end = 0
1252 1258 def next(start):
1253 1259 if all_patches:
1254 1260 return start
1255 1261 i = start
1256 1262 while i < len(self.series):
1257 1263 p, reason = self.pushable(i)
1258 1264 if p:
1259 1265 break
1260 1266 self.explain_pushable(i)
1261 1267 i += 1
1262 1268 return i
1263 1269 if len(self.applied) > 0:
1264 1270 p = self.applied[-1].name
1265 1271 try:
1266 1272 end = self.series.index(p)
1267 1273 except ValueError:
1268 1274 return 0
1269 1275 return next(end + 1)
1270 1276 return next(end)
1271 1277
1272 1278 def appliedname(self, index):
1273 1279 pname = self.applied[index].name
1274 1280 if not self.ui.verbose:
1275 1281 p = pname
1276 1282 else:
1277 1283 p = str(self.series.index(pname)) + " " + pname
1278 1284 return p
1279 1285
1280 1286 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1281 1287 force=None, git=False):
1282 1288 def checkseries(patchname):
1283 1289 if patchname in self.series:
1284 1290 raise util.Abort(_('patch %s is already in the series file')
1285 1291 % patchname)
1286 1292 def checkfile(patchname):
1287 1293 if not force and os.path.exists(self.join(patchname)):
1288 1294 raise util.Abort(_('patch "%s" already exists')
1289 1295 % patchname)
1290 1296
1291 1297 if rev:
1292 1298 if files:
1293 1299 raise util.Abort(_('option "-r" not valid when importing '
1294 1300 'files'))
1295 1301 rev = cmdutil.revrange(repo, rev)
1296 1302 rev.sort(lambda x, y: cmp(y, x))
1297 1303 if (len(files) > 1 or len(rev) > 1) and patchname:
1298 1304 raise util.Abort(_('option "-n" not valid when importing multiple '
1299 1305 'patches'))
1300 1306 i = 0
1301 1307 added = []
1302 1308 if rev:
1303 1309 # If mq patches are applied, we can only import revisions
1304 1310 # that form a linear path to qbase.
1305 1311 # Otherwise, they should form a linear path to a head.
1306 1312 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1307 1313 if len(heads) > 1:
1308 1314 raise util.Abort(_('revision %d is the root of more than one '
1309 1315 'branch') % rev[-1])
1310 1316 if self.applied:
1311 1317 base = revlog.hex(repo.changelog.node(rev[0]))
1312 1318 if base in [n.rev for n in self.applied]:
1313 1319 raise util.Abort(_('revision %d is already managed')
1314 1320 % rev[0])
1315 1321 if heads != [revlog.bin(self.applied[-1].rev)]:
1316 1322 raise util.Abort(_('revision %d is not the parent of '
1317 1323 'the queue') % rev[0])
1318 1324 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1319 1325 lastparent = repo.changelog.parentrevs(base)[0]
1320 1326 else:
1321 1327 if heads != [repo.changelog.node(rev[0])]:
1322 1328 raise util.Abort(_('revision %d has unmanaged children')
1323 1329 % rev[0])
1324 1330 lastparent = None
1325 1331
1326 1332 if git:
1327 1333 self.diffopts().git = True
1328 1334
1329 1335 for r in rev:
1330 1336 p1, p2 = repo.changelog.parentrevs(r)
1331 1337 n = repo.changelog.node(r)
1332 1338 if p2 != revlog.nullrev:
1333 1339 raise util.Abort(_('cannot import merge revision %d') % r)
1334 1340 if lastparent and lastparent != r:
1335 1341 raise util.Abort(_('revision %d is not the parent of %d')
1336 1342 % (r, lastparent))
1337 1343 lastparent = p1
1338 1344
1339 1345 if not patchname:
1340 1346 patchname = normname('%d.diff' % r)
1341 1347 checkseries(patchname)
1342 1348 checkfile(patchname)
1343 1349 self.full_series.insert(0, patchname)
1344 1350
1345 1351 patchf = self.opener(patchname, "w")
1346 1352 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1347 1353 patchf.close()
1348 1354
1349 1355 se = statusentry(revlog.hex(n), patchname)
1350 1356 self.applied.insert(0, se)
1351 1357
1352 1358 added.append(patchname)
1353 1359 patchname = None
1354 1360 self.parse_series()
1355 1361 self.applied_dirty = 1
1356 1362
1357 1363 for filename in files:
1358 1364 if existing:
1359 1365 if filename == '-':
1360 1366 raise util.Abort(_('-e is incompatible with import from -'))
1361 1367 if not patchname:
1362 1368 patchname = normname(filename)
1363 1369 if not os.path.isfile(self.join(patchname)):
1364 1370 raise util.Abort(_("patch %s does not exist") % patchname)
1365 1371 else:
1366 1372 try:
1367 1373 if filename == '-':
1368 1374 if not patchname:
1369 1375 raise util.Abort(_('need --name to import a patch from -'))
1370 1376 text = sys.stdin.read()
1371 1377 else:
1372 1378 text = file(filename).read()
1373 1379 except IOError:
1374 1380 raise util.Abort(_("unable to read %s") % patchname)
1375 1381 if not patchname:
1376 1382 patchname = normname(os.path.basename(filename))
1377 1383 checkfile(patchname)
1378 1384 patchf = self.opener(patchname, "w")
1379 1385 patchf.write(text)
1380 1386 checkseries(patchname)
1381 1387 index = self.full_series_end() + i
1382 1388 self.full_series[index:index] = [patchname]
1383 1389 self.parse_series()
1384 1390 self.ui.warn("adding %s to series file\n" % patchname)
1385 1391 i += 1
1386 1392 added.append(patchname)
1387 1393 patchname = None
1388 1394 self.series_dirty = 1
1389 1395 qrepo = self.qrepo()
1390 1396 if qrepo:
1391 1397 qrepo.add(added)
1392 1398
1393 1399 def delete(ui, repo, *patches, **opts):
1394 1400 """remove patches from queue
1395 1401
1396 1402 The patches must not be applied, unless they are arguments to
1397 1403 the --rev parameter. At least one patch or revision is required.
1398 1404
1399 1405 With --rev, mq will stop managing the named revisions (converting
1400 1406 them to regular mercurial changesets). The patches must be applied
1401 1407 and at the base of the stack. This option is useful when the patches
1402 1408 have been applied upstream.
1403 1409
1404 1410 With --keep, the patch files are preserved in the patch directory."""
1405 1411 q = repo.mq
1406 1412 q.delete(repo, patches, opts)
1407 1413 q.save_dirty()
1408 1414 return 0
1409 1415
1410 1416 def applied(ui, repo, patch=None, **opts):
1411 1417 """print the patches already applied"""
1412 1418 q = repo.mq
1413 1419 if patch:
1414 1420 if patch not in q.series:
1415 1421 raise util.Abort(_("patch %s is not in series file") % patch)
1416 1422 end = q.series.index(patch) + 1
1417 1423 else:
1418 1424 end = q.series_end(True)
1419 1425 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1420 1426
1421 1427 def unapplied(ui, repo, patch=None, **opts):
1422 1428 """print the patches not yet applied"""
1423 1429 q = repo.mq
1424 1430 if patch:
1425 1431 if patch not in q.series:
1426 1432 raise util.Abort(_("patch %s is not in series file") % patch)
1427 1433 start = q.series.index(patch) + 1
1428 1434 else:
1429 1435 start = q.series_end(True)
1430 1436 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1431 1437
1432 1438 def qimport(ui, repo, *filename, **opts):
1433 1439 """import a patch
1434 1440
1435 1441 The patch will have the same name as its source file unless you
1436 1442 give it a new one with --name.
1437 1443
1438 1444 You can register an existing patch inside the patch directory
1439 1445 with the --existing flag.
1440 1446
1441 1447 With --force, an existing patch of the same name will be overwritten.
1442 1448
1443 1449 An existing changeset may be placed under mq control with --rev
1444 1450 (e.g. qimport --rev tip -n patch will place tip under mq control).
1445 1451 With --git, patches imported with --rev will use the git diff
1446 1452 format.
1447 1453 """
1448 1454 q = repo.mq
1449 1455 q.qimport(repo, filename, patchname=opts['name'],
1450 1456 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1451 1457 git=opts['git'])
1452 1458 q.save_dirty()
1453 1459 return 0
1454 1460
1455 1461 def init(ui, repo, **opts):
1456 1462 """init a new queue repository
1457 1463
1458 1464 The queue repository is unversioned by default. If -c is
1459 1465 specified, qinit will create a separate nested repository
1460 1466 for patches (qinit -c may also be run later to convert
1461 1467 an unversioned patch repository into a versioned one).
1462 1468 You can use qcommit to commit changes to this queue repository."""
1463 1469 q = repo.mq
1464 1470 r = q.init(repo, create=opts['create_repo'])
1465 1471 q.save_dirty()
1466 1472 if r:
1467 1473 if not os.path.exists(r.wjoin('.hgignore')):
1468 1474 fp = r.wopener('.hgignore', 'w')
1469 1475 fp.write('syntax: glob\n')
1470 1476 fp.write('status\n')
1471 1477 fp.write('guards\n')
1472 1478 fp.close()
1473 1479 if not os.path.exists(r.wjoin('series')):
1474 1480 r.wopener('series', 'w').close()
1475 1481 r.add(['.hgignore', 'series'])
1476 1482 commands.add(ui, r)
1477 1483 return 0
1478 1484
1479 1485 def clone(ui, source, dest=None, **opts):
1480 1486 '''clone main and patch repository at same time
1481 1487
1482 1488 If source is local, destination will have no patches applied. If
1483 1489 source is remote, this command can not check if patches are
1484 1490 applied in source, so cannot guarantee that patches are not
1485 1491 applied in destination. If you clone remote repository, be sure
1486 1492 before that it has no patches applied.
1487 1493
1488 1494 Source patch repository is looked for in <src>/.hg/patches by
1489 1495 default. Use -p <url> to change.
1490 1496
1491 1497 The patch directory must be a nested mercurial repository, as
1492 1498 would be created by qinit -c.
1493 1499 '''
1494 1500 cmdutil.setremoteconfig(ui, opts)
1495 1501 if dest is None:
1496 1502 dest = hg.defaultdest(source)
1497 1503 sr = hg.repository(ui, ui.expandpath(source))
1498 1504 patchdir = opts['patches'] or (sr.url() + '/.hg/patches')
1499 1505 try:
1500 1506 pr = hg.repository(ui, patchdir)
1501 1507 except hg.RepoError:
1502 1508 raise util.Abort(_('versioned patch repository not found'
1503 1509 ' (see qinit -c)'))
1504 1510 qbase, destrev = None, None
1505 1511 if sr.local():
1506 1512 if sr.mq.applied:
1507 1513 qbase = revlog.bin(sr.mq.applied[0].rev)
1508 1514 if not hg.islocal(dest):
1509 1515 heads = dict.fromkeys(sr.heads())
1510 1516 for h in sr.heads(qbase):
1511 1517 del heads[h]
1512 1518 destrev = heads.keys()
1513 1519 destrev.append(sr.changelog.parents(qbase)[0])
1514 1520 ui.note(_('cloning main repo\n'))
1515 1521 sr, dr = hg.clone(ui, sr.url(), dest,
1516 1522 pull=opts['pull'],
1517 1523 rev=destrev,
1518 1524 update=False,
1519 1525 stream=opts['uncompressed'])
1520 1526 ui.note(_('cloning patch repo\n'))
1521 1527 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1522 1528 dr.url() + '/.hg/patches',
1523 1529 pull=opts['pull'],
1524 1530 update=not opts['noupdate'],
1525 1531 stream=opts['uncompressed'])
1526 1532 if dr.local():
1527 1533 if qbase:
1528 1534 ui.note(_('stripping applied patches from destination repo\n'))
1529 1535 dr.mq.strip(dr, qbase, update=False, backup=None)
1530 1536 if not opts['noupdate']:
1531 1537 ui.note(_('updating destination repo\n'))
1532 1538 hg.update(dr, dr.changelog.tip())
1533 1539
1534 1540 def commit(ui, repo, *pats, **opts):
1535 1541 """commit changes in the queue repository"""
1536 1542 q = repo.mq
1537 1543 r = q.qrepo()
1538 1544 if not r: raise util.Abort('no queue repository')
1539 1545 commands.commit(r.ui, r, *pats, **opts)
1540 1546
1541 1547 def series(ui, repo, **opts):
1542 1548 """print the entire series file"""
1543 1549 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1544 1550 return 0
1545 1551
1546 1552 def top(ui, repo, **opts):
1547 1553 """print the name of the current patch"""
1548 1554 q = repo.mq
1549 1555 t = q.applied and q.series_end(True) or 0
1550 1556 if t:
1551 1557 return q.qseries(repo, start=t-1, length=1, status='A',
1552 1558 summary=opts.get('summary'))
1553 1559 else:
1554 1560 ui.write("No patches applied\n")
1555 1561 return 1
1556 1562
1557 1563 def next(ui, repo, **opts):
1558 1564 """print the name of the next patch"""
1559 1565 q = repo.mq
1560 1566 end = q.series_end()
1561 1567 if end == len(q.series):
1562 1568 ui.write("All patches applied\n")
1563 1569 return 1
1564 1570 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1565 1571
1566 1572 def prev(ui, repo, **opts):
1567 1573 """print the name of the previous patch"""
1568 1574 q = repo.mq
1569 1575 l = len(q.applied)
1570 1576 if l == 1:
1571 1577 ui.write("Only one patch applied\n")
1572 1578 return 1
1573 1579 if not l:
1574 1580 ui.write("No patches applied\n")
1575 1581 return 1
1576 1582 return q.qseries(repo, start=l-2, length=1, status='A',
1577 1583 summary=opts.get('summary'))
1578 1584
1579 1585 def new(ui, repo, patch, *args, **opts):
1580 1586 """create a new patch
1581 1587
1582 1588 qnew creates a new patch on top of the currently-applied patch
1583 1589 (if any). It will refuse to run if there are any outstanding
1584 1590 changes unless -f is specified, in which case the patch will
1585 1591 be initialised with them. You may also use -I, -X, and/or a list of
1586 1592 files after the patch name to add only changes to matching files
1587 1593 to the new patch, leaving the rest as uncommitted modifications.
1588 1594
1589 1595 -e, -m or -l set the patch header as well as the commit message.
1590 1596 If none is specified, the patch header is empty and the
1591 1597 commit message is '[mq]: PATCH'"""
1592 1598 q = repo.mq
1593 1599 message = cmdutil.logmessage(opts)
1594 1600 if opts['edit']:
1595 1601 message = ui.edit(message, ui.username())
1596 1602 opts['msg'] = message
1597 1603 q.new(repo, patch, *args, **opts)
1598 1604 q.save_dirty()
1599 1605 return 0
1600 1606
1601 1607 def refresh(ui, repo, *pats, **opts):
1602 1608 """update the current patch
1603 1609
1604 1610 If any file patterns are provided, the refreshed patch will contain only
1605 1611 the modifications that match those patterns; the remaining modifications
1606 1612 will remain in the working directory.
1607 1613
1608 1614 hg add/remove/copy/rename work as usual, though you might want to use
1609 1615 git-style patches (--git or [diff] git=1) to track copies and renames.
1610 1616 """
1611 1617 q = repo.mq
1612 1618 message = cmdutil.logmessage(opts)
1613 1619 if opts['edit']:
1614 1620 if message:
1615 1621 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1616 1622 patch = q.applied[-1].name
1617 1623 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1618 1624 message = ui.edit('\n'.join(message), user or ui.username())
1619 1625 ret = q.refresh(repo, pats, msg=message, **opts)
1620 1626 q.save_dirty()
1621 1627 return ret
1622 1628
1623 1629 def diff(ui, repo, *pats, **opts):
1624 1630 """diff of the current patch"""
1625 1631 repo.mq.diff(repo, pats, opts)
1626 1632 return 0
1627 1633
1628 1634 def fold(ui, repo, *files, **opts):
1629 1635 """fold the named patches into the current patch
1630 1636
1631 1637 Patches must not yet be applied. Each patch will be successively
1632 1638 applied to the current patch in the order given. If all the
1633 1639 patches apply successfully, the current patch will be refreshed
1634 1640 with the new cumulative patch, and the folded patches will
1635 1641 be deleted. With -k/--keep, the folded patch files will not
1636 1642 be removed afterwards.
1637 1643
1638 1644 The header for each folded patch will be concatenated with
1639 1645 the current patch header, separated by a line of '* * *'."""
1640 1646
1641 1647 q = repo.mq
1642 1648
1643 1649 if not files:
1644 1650 raise util.Abort(_('qfold requires at least one patch name'))
1645 1651 if not q.check_toppatch(repo):
1646 1652 raise util.Abort(_('No patches applied'))
1647 1653
1648 1654 message = cmdutil.logmessage(opts)
1649 1655 if opts['edit']:
1650 1656 if message:
1651 1657 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1652 1658
1653 1659 parent = q.lookup('qtip')
1654 1660 patches = []
1655 1661 messages = []
1656 1662 for f in files:
1657 1663 p = q.lookup(f)
1658 1664 if p in patches or p == parent:
1659 1665 ui.warn(_('Skipping already folded patch %s') % p)
1660 1666 if q.isapplied(p):
1661 1667 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1662 1668 patches.append(p)
1663 1669
1664 1670 for p in patches:
1665 1671 if not message:
1666 1672 messages.append(q.readheaders(p)[0])
1667 1673 pf = q.join(p)
1668 1674 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1669 1675 if not patchsuccess:
1670 1676 raise util.Abort(_('Error folding patch %s') % p)
1671 1677 patch.updatedir(ui, repo, files)
1672 1678
1673 1679 if not message:
1674 1680 message, comments, user = q.readheaders(parent)[0:3]
1675 1681 for msg in messages:
1676 1682 message.append('* * *')
1677 1683 message.extend(msg)
1678 1684 message = '\n'.join(message)
1679 1685
1680 1686 if opts['edit']:
1681 1687 message = ui.edit(message, user or ui.username())
1682 1688
1683 1689 q.refresh(repo, msg=message)
1684 1690 q.delete(repo, patches, opts)
1685 1691 q.save_dirty()
1686 1692
1687 1693 def goto(ui, repo, patch, **opts):
1688 1694 '''push or pop patches until named patch is at top of stack'''
1689 1695 q = repo.mq
1690 1696 patch = q.lookup(patch)
1691 1697 if q.isapplied(patch):
1692 1698 ret = q.pop(repo, patch, force=opts['force'])
1693 1699 else:
1694 1700 ret = q.push(repo, patch, force=opts['force'])
1695 1701 q.save_dirty()
1696 1702 return ret
1697 1703
1698 1704 def guard(ui, repo, *args, **opts):
1699 1705 '''set or print guards for a patch
1700 1706
1701 1707 Guards control whether a patch can be pushed. A patch with no
1702 1708 guards is always pushed. A patch with a positive guard ("+foo") is
1703 1709 pushed only if the qselect command has activated it. A patch with
1704 1710 a negative guard ("-foo") is never pushed if the qselect command
1705 1711 has activated it.
1706 1712
1707 1713 With no arguments, print the currently active guards.
1708 1714 With arguments, set guards for the named patch.
1709 1715
1710 1716 To set a negative guard "-foo" on topmost patch ("--" is needed so
1711 1717 hg will not interpret "-foo" as an option):
1712 1718 hg qguard -- -foo
1713 1719
1714 1720 To set guards on another patch:
1715 1721 hg qguard other.patch +2.6.17 -stable
1716 1722 '''
1717 1723 def status(idx):
1718 1724 guards = q.series_guards[idx] or ['unguarded']
1719 1725 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1720 1726 q = repo.mq
1721 1727 patch = None
1722 1728 args = list(args)
1723 1729 if opts['list']:
1724 1730 if args or opts['none']:
1725 1731 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1726 1732 for i in xrange(len(q.series)):
1727 1733 status(i)
1728 1734 return
1729 1735 if not args or args[0][0:1] in '-+':
1730 1736 if not q.applied:
1731 1737 raise util.Abort(_('no patches applied'))
1732 1738 patch = q.applied[-1].name
1733 1739 if patch is None and args[0][0:1] not in '-+':
1734 1740 patch = args.pop(0)
1735 1741 if patch is None:
1736 1742 raise util.Abort(_('no patch to work with'))
1737 1743 if args or opts['none']:
1738 1744 idx = q.find_series(patch)
1739 1745 if idx is None:
1740 1746 raise util.Abort(_('no patch named %s') % patch)
1741 1747 q.set_guards(idx, args)
1742 1748 q.save_dirty()
1743 1749 else:
1744 1750 status(q.series.index(q.lookup(patch)))
1745 1751
1746 1752 def header(ui, repo, patch=None):
1747 1753 """Print the header of the topmost or specified patch"""
1748 1754 q = repo.mq
1749 1755
1750 1756 if patch:
1751 1757 patch = q.lookup(patch)
1752 1758 else:
1753 1759 if not q.applied:
1754 1760 ui.write('No patches applied\n')
1755 1761 return 1
1756 1762 patch = q.lookup('qtip')
1757 1763 message = repo.mq.readheaders(patch)[0]
1758 1764
1759 1765 ui.write('\n'.join(message) + '\n')
1760 1766
1761 1767 def lastsavename(path):
1762 1768 (directory, base) = os.path.split(path)
1763 1769 names = os.listdir(directory)
1764 1770 namere = re.compile("%s.([0-9]+)" % base)
1765 1771 maxindex = None
1766 1772 maxname = None
1767 1773 for f in names:
1768 1774 m = namere.match(f)
1769 1775 if m:
1770 1776 index = int(m.group(1))
1771 1777 if maxindex == None or index > maxindex:
1772 1778 maxindex = index
1773 1779 maxname = f
1774 1780 if maxname:
1775 1781 return (os.path.join(directory, maxname), maxindex)
1776 1782 return (None, None)
1777 1783
1778 1784 def savename(path):
1779 1785 (last, index) = lastsavename(path)
1780 1786 if last is None:
1781 1787 index = 0
1782 1788 newpath = path + ".%d" % (index + 1)
1783 1789 return newpath
1784 1790
1785 1791 def push(ui, repo, patch=None, **opts):
1786 1792 """push the next patch onto the stack"""
1787 1793 q = repo.mq
1788 1794 mergeq = None
1789 1795
1790 1796 if opts['all']:
1791 1797 if not q.series:
1792 1798 ui.warn(_('no patches in series\n'))
1793 1799 return 0
1794 1800 patch = q.series[-1]
1795 1801 if opts['merge']:
1796 1802 if opts['name']:
1797 1803 newpath = opts['name']
1798 1804 else:
1799 1805 newpath, i = lastsavename(q.path)
1800 1806 if not newpath:
1801 1807 ui.warn("no saved queues found, please use -n\n")
1802 1808 return 1
1803 1809 mergeq = queue(ui, repo.join(""), newpath)
1804 1810 ui.warn("merging with queue at: %s\n" % mergeq.path)
1805 1811 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1806 1812 mergeq=mergeq)
1807 1813 return ret
1808 1814
1809 1815 def pop(ui, repo, patch=None, **opts):
1810 1816 """pop the current patch off the stack"""
1811 1817 localupdate = True
1812 1818 if opts['name']:
1813 1819 q = queue(ui, repo.join(""), repo.join(opts['name']))
1814 1820 ui.warn('using patch queue: %s\n' % q.path)
1815 1821 localupdate = False
1816 1822 else:
1817 1823 q = repo.mq
1818 1824 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1819 1825 all=opts['all'])
1820 1826 q.save_dirty()
1821 1827 return ret
1822 1828
1823 1829 def rename(ui, repo, patch, name=None, **opts):
1824 1830 """rename a patch
1825 1831
1826 1832 With one argument, renames the current patch to PATCH1.
1827 1833 With two arguments, renames PATCH1 to PATCH2."""
1828 1834
1829 1835 q = repo.mq
1830 1836
1831 1837 if not name:
1832 1838 name = patch
1833 1839 patch = None
1834 1840
1835 1841 if patch:
1836 1842 patch = q.lookup(patch)
1837 1843 else:
1838 1844 if not q.applied:
1839 1845 ui.write(_('No patches applied\n'))
1840 1846 return
1841 1847 patch = q.lookup('qtip')
1842 1848 absdest = q.join(name)
1843 1849 if os.path.isdir(absdest):
1844 1850 name = normname(os.path.join(name, os.path.basename(patch)))
1845 1851 absdest = q.join(name)
1846 1852 if os.path.exists(absdest):
1847 1853 raise util.Abort(_('%s already exists') % absdest)
1848 1854
1849 1855 if name in q.series:
1850 1856 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1851 1857
1852 1858 if ui.verbose:
1853 1859 ui.write('Renaming %s to %s\n' % (patch, name))
1854 1860 i = q.find_series(patch)
1855 1861 guards = q.guard_re.findall(q.full_series[i])
1856 1862 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1857 1863 q.parse_series()
1858 1864 q.series_dirty = 1
1859 1865
1860 1866 info = q.isapplied(patch)
1861 1867 if info:
1862 1868 q.applied[info[0]] = statusentry(info[1], name)
1863 1869 q.applied_dirty = 1
1864 1870
1865 1871 util.rename(q.join(patch), absdest)
1866 1872 r = q.qrepo()
1867 1873 if r:
1868 1874 wlock = r.wlock()
1869 1875 if r.dirstate.state(name) == 'r':
1870 1876 r.undelete([name], wlock)
1871 1877 r.copy(patch, name, wlock)
1872 1878 r.remove([patch], False, wlock)
1873 1879
1874 1880 q.save_dirty()
1875 1881
1876 1882 def restore(ui, repo, rev, **opts):
1877 1883 """restore the queue state saved by a rev"""
1878 1884 rev = repo.lookup(rev)
1879 1885 q = repo.mq
1880 1886 q.restore(repo, rev, delete=opts['delete'],
1881 1887 qupdate=opts['update'])
1882 1888 q.save_dirty()
1883 1889 return 0
1884 1890
1885 1891 def save(ui, repo, **opts):
1886 1892 """save current queue state"""
1887 1893 q = repo.mq
1888 1894 message = cmdutil.logmessage(opts)
1889 1895 ret = q.save(repo, msg=message)
1890 1896 if ret:
1891 1897 return ret
1892 1898 q.save_dirty()
1893 1899 if opts['copy']:
1894 1900 path = q.path
1895 1901 if opts['name']:
1896 1902 newpath = os.path.join(q.basepath, opts['name'])
1897 1903 if os.path.exists(newpath):
1898 1904 if not os.path.isdir(newpath):
1899 1905 raise util.Abort(_('destination %s exists and is not '
1900 1906 'a directory') % newpath)
1901 1907 if not opts['force']:
1902 1908 raise util.Abort(_('destination %s exists, '
1903 1909 'use -f to force') % newpath)
1904 1910 else:
1905 1911 newpath = savename(path)
1906 1912 ui.warn("copy %s to %s\n" % (path, newpath))
1907 1913 util.copyfiles(path, newpath)
1908 1914 if opts['empty']:
1909 1915 try:
1910 1916 os.unlink(q.join(q.status_path))
1911 1917 except:
1912 1918 pass
1913 1919 return 0
1914 1920
1915 1921 def strip(ui, repo, rev, **opts):
1916 1922 """strip a revision and all later revs on the same branch"""
1917 1923 rev = repo.lookup(rev)
1918 1924 backup = 'all'
1919 1925 if opts['backup']:
1920 1926 backup = 'strip'
1921 1927 elif opts['nobackup']:
1922 1928 backup = 'none'
1923 1929 update = repo.dirstate.parents()[0] != revlog.nullid
1924 1930 repo.mq.strip(repo, rev, backup=backup, update=update)
1925 1931 return 0
1926 1932
1927 1933 def select(ui, repo, *args, **opts):
1928 1934 '''set or print guarded patches to push
1929 1935
1930 1936 Use the qguard command to set or print guards on patch, then use
1931 1937 qselect to tell mq which guards to use. A patch will be pushed if it
1932 1938 has no guards or any positive guards match the currently selected guard,
1933 1939 but will not be pushed if any negative guards match the current guard.
1934 1940 For example:
1935 1941
1936 1942 qguard foo.patch -stable (negative guard)
1937 1943 qguard bar.patch +stable (positive guard)
1938 1944 qselect stable
1939 1945
1940 1946 This activates the "stable" guard. mq will skip foo.patch (because
1941 1947 it has a negative match) but push bar.patch (because it
1942 1948 has a positive match).
1943 1949
1944 1950 With no arguments, prints the currently active guards.
1945 1951 With one argument, sets the active guard.
1946 1952
1947 1953 Use -n/--none to deactivate guards (no other arguments needed).
1948 1954 When no guards are active, patches with positive guards are skipped
1949 1955 and patches with negative guards are pushed.
1950 1956
1951 1957 qselect can change the guards on applied patches. It does not pop
1952 1958 guarded patches by default. Use --pop to pop back to the last applied
1953 1959 patch that is not guarded. Use --reapply (which implies --pop) to push
1954 1960 back to the current patch afterwards, but skip guarded patches.
1955 1961
1956 1962 Use -s/--series to print a list of all guards in the series file (no
1957 1963 other arguments needed). Use -v for more information.'''
1958 1964
1959 1965 q = repo.mq
1960 1966 guards = q.active()
1961 1967 if args or opts['none']:
1962 1968 old_unapplied = q.unapplied(repo)
1963 1969 old_guarded = [i for i in xrange(len(q.applied)) if
1964 1970 not q.pushable(i)[0]]
1965 1971 q.set_active(args)
1966 1972 q.save_dirty()
1967 1973 if not args:
1968 1974 ui.status(_('guards deactivated\n'))
1969 1975 if not opts['pop'] and not opts['reapply']:
1970 1976 unapplied = q.unapplied(repo)
1971 1977 guarded = [i for i in xrange(len(q.applied))
1972 1978 if not q.pushable(i)[0]]
1973 1979 if len(unapplied) != len(old_unapplied):
1974 1980 ui.status(_('number of unguarded, unapplied patches has '
1975 1981 'changed from %d to %d\n') %
1976 1982 (len(old_unapplied), len(unapplied)))
1977 1983 if len(guarded) != len(old_guarded):
1978 1984 ui.status(_('number of guarded, applied patches has changed '
1979 1985 'from %d to %d\n') %
1980 1986 (len(old_guarded), len(guarded)))
1981 1987 elif opts['series']:
1982 1988 guards = {}
1983 1989 noguards = 0
1984 1990 for gs in q.series_guards:
1985 1991 if not gs:
1986 1992 noguards += 1
1987 1993 for g in gs:
1988 1994 guards.setdefault(g, 0)
1989 1995 guards[g] += 1
1990 1996 if ui.verbose:
1991 1997 guards['NONE'] = noguards
1992 1998 guards = guards.items()
1993 1999 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1994 2000 if guards:
1995 2001 ui.note(_('guards in series file:\n'))
1996 2002 for guard, count in guards:
1997 2003 ui.note('%2d ' % count)
1998 2004 ui.write(guard, '\n')
1999 2005 else:
2000 2006 ui.note(_('no guards in series file\n'))
2001 2007 else:
2002 2008 if guards:
2003 2009 ui.note(_('active guards:\n'))
2004 2010 for g in guards:
2005 2011 ui.write(g, '\n')
2006 2012 else:
2007 2013 ui.write(_('no active guards\n'))
2008 2014 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2009 2015 popped = False
2010 2016 if opts['pop'] or opts['reapply']:
2011 2017 for i in xrange(len(q.applied)):
2012 2018 pushable, reason = q.pushable(i)
2013 2019 if not pushable:
2014 2020 ui.status(_('popping guarded patches\n'))
2015 2021 popped = True
2016 2022 if i == 0:
2017 2023 q.pop(repo, all=True)
2018 2024 else:
2019 2025 q.pop(repo, i-1)
2020 2026 break
2021 2027 if popped:
2022 2028 try:
2023 2029 if reapply:
2024 2030 ui.status(_('reapplying unguarded patches\n'))
2025 2031 q.push(repo, reapply)
2026 2032 finally:
2027 2033 q.save_dirty()
2028 2034
2029 2035 def reposetup(ui, repo):
2030 2036 class mqrepo(repo.__class__):
2031 2037 def abort_if_wdir_patched(self, errmsg, force=False):
2032 2038 if self.mq.applied and not force:
2033 2039 parent = revlog.hex(self.dirstate.parents()[0])
2034 2040 if parent in [s.rev for s in self.mq.applied]:
2035 2041 raise util.Abort(errmsg)
2036 2042
2037 2043 def commit(self, *args, **opts):
2038 2044 if len(args) >= 6:
2039 2045 force = args[5]
2040 2046 else:
2041 2047 force = opts.get('force')
2042 2048 self.abort_if_wdir_patched(
2043 2049 _('cannot commit over an applied mq patch'),
2044 2050 force)
2045 2051
2046 2052 return super(mqrepo, self).commit(*args, **opts)
2047 2053
2048 2054 def push(self, remote, force=False, revs=None):
2049 2055 if self.mq.applied and not force and not revs:
2050 2056 raise util.Abort(_('source has mq patches applied'))
2051 2057 return super(mqrepo, self).push(remote, force, revs)
2052 2058
2053 2059 def tags(self):
2054 2060 if self.tagscache:
2055 2061 return self.tagscache
2056 2062
2057 2063 tagscache = super(mqrepo, self).tags()
2058 2064
2059 2065 q = self.mq
2060 2066 if not q.applied:
2061 2067 return tagscache
2062 2068
2063 2069 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2064 2070 mqtags.append((mqtags[-1][0], 'qtip'))
2065 2071 mqtags.append((mqtags[0][0], 'qbase'))
2066 2072 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2067 2073 for patch in mqtags:
2068 2074 if patch[1] in tagscache:
2069 2075 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2070 2076 else:
2071 2077 tagscache[patch[1]] = patch[0]
2072 2078
2073 2079 return tagscache
2074 2080
2075 2081 def _branchtags(self):
2076 2082 q = self.mq
2077 2083 if not q.applied:
2078 2084 return super(mqrepo, self)._branchtags()
2079 2085
2080 2086 self.branchcache = {} # avoid recursion in changectx
2081 2087 cl = self.changelog
2082 2088 partial, last, lrev = self._readbranchcache()
2083 2089
2084 2090 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2085 2091 start = lrev + 1
2086 2092 if start < qbase:
2087 2093 # update the cache (excluding the patches) and save it
2088 2094 self._updatebranchcache(partial, lrev+1, qbase)
2089 2095 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2090 2096 start = qbase
2091 2097 # if start = qbase, the cache is as updated as it should be.
2092 2098 # if start > qbase, the cache includes (part of) the patches.
2093 2099 # we might as well use it, but we won't save it.
2094 2100
2095 2101 # update the cache up to the tip
2096 2102 self._updatebranchcache(partial, start, cl.count())
2097 2103
2098 2104 return partial
2099 2105
2100 2106 if repo.local():
2101 2107 repo.__class__ = mqrepo
2102 2108 repo.mq = queue(ui, repo.join(""))
2103 2109
2104 2110 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2105 2111
2106 2112 cmdtable = {
2107 2113 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2108 2114 "qclone":
2109 2115 (clone,
2110 2116 [('', 'pull', None, _('use pull protocol to copy metadata')),
2111 2117 ('U', 'noupdate', None, _('do not update the new working directories')),
2112 2118 ('', 'uncompressed', None,
2113 2119 _('use uncompressed transfer (fast over LAN)')),
2114 2120 ('e', 'ssh', '', _('specify ssh command to use')),
2115 2121 ('p', 'patches', '', _('location of source patch repo')),
2116 2122 ('', 'remotecmd', '',
2117 2123 _('specify hg command to run on the remote side'))],
2118 2124 _('hg qclone [OPTION]... SOURCE [DEST]')),
2119 2125 "qcommit|qci":
2120 2126 (commit,
2121 2127 commands.table["^commit|ci"][1],
2122 2128 _('hg qcommit [OPTION]... [FILE]...')),
2123 2129 "^qdiff":
2124 2130 (diff,
2125 2131 [('g', 'git', None, _('use git extended diff format')),
2126 2132 ('I', 'include', [], _('include names matching the given patterns')),
2127 2133 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2128 2134 _('hg qdiff [-I] [-X] [-g] [FILE]...')),
2129 2135 "qdelete|qremove|qrm":
2130 2136 (delete,
2131 2137 [('k', 'keep', None, _('keep patch file')),
2132 2138 ('r', 'rev', [], _('stop managing a revision'))],
2133 2139 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2134 2140 'qfold':
2135 2141 (fold,
2136 2142 [('e', 'edit', None, _('edit patch header')),
2137 2143 ('k', 'keep', None, _('keep folded patch files')),
2138 2144 ] + commands.commitopts,
2139 2145 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2140 2146 'qgoto':
2141 2147 (goto,
2142 2148 [('f', 'force', None, _('overwrite any local changes'))],
2143 2149 _('hg qgoto [OPTION]... PATCH')),
2144 2150 'qguard':
2145 2151 (guard,
2146 2152 [('l', 'list', None, _('list all patches and guards')),
2147 2153 ('n', 'none', None, _('drop all guards'))],
2148 2154 _('hg qguard [-l] [-n] [PATCH] [+GUARD]... [-GUARD]...')),
2149 2155 'qheader': (header, [], _('hg qheader [PATCH]')),
2150 2156 "^qimport":
2151 2157 (qimport,
2152 2158 [('e', 'existing', None, 'import file in patch dir'),
2153 2159 ('n', 'name', '', 'patch file name'),
2154 2160 ('f', 'force', None, 'overwrite existing files'),
2155 2161 ('r', 'rev', [], 'place existing revisions under mq control'),
2156 2162 ('g', 'git', None, _('use git extended diff format'))],
2157 2163 _('hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...')),
2158 2164 "^qinit":
2159 2165 (init,
2160 2166 [('c', 'create-repo', None, 'create queue repository')],
2161 2167 _('hg qinit [-c]')),
2162 2168 "qnew":
2163 2169 (new,
2164 2170 [('e', 'edit', None, _('edit commit message')),
2165 2171 ('f', 'force', None, _('import uncommitted changes into patch')),
2166 2172 ('I', 'include', [], _('include names matching the given patterns')),
2167 2173 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2168 2174 ] + commands.commitopts,
2169 2175 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2170 2176 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2171 2177 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2172 2178 "^qpop":
2173 2179 (pop,
2174 2180 [('a', 'all', None, _('pop all patches')),
2175 2181 ('n', 'name', '', _('queue name to pop')),
2176 2182 ('f', 'force', None, _('forget any local changes'))],
2177 2183 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2178 2184 "^qpush":
2179 2185 (push,
2180 2186 [('f', 'force', None, _('apply if the patch has rejects')),
2181 2187 ('l', 'list', None, _('list patch name in commit text')),
2182 2188 ('a', 'all', None, _('apply all patches')),
2183 2189 ('m', 'merge', None, _('merge from another queue')),
2184 2190 ('n', 'name', '', _('merge queue name'))],
2185 2191 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2186 2192 "^qrefresh":
2187 2193 (refresh,
2188 2194 [('e', 'edit', None, _('edit commit message')),
2189 2195 ('g', 'git', None, _('use git extended diff format')),
2190 2196 ('s', 'short', None, _('refresh only files already in the patch')),
2191 2197 ('I', 'include', [], _('include names matching the given patterns')),
2192 2198 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2193 2199 ] + commands.commitopts,
2194 2200 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2195 2201 'qrename|qmv':
2196 2202 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2197 2203 "qrestore":
2198 2204 (restore,
2199 2205 [('d', 'delete', None, _('delete save entry')),
2200 2206 ('u', 'update', None, _('update queue working dir'))],
2201 2207 _('hg qrestore [-d] [-u] REV')),
2202 2208 "qsave":
2203 2209 (save,
2204 2210 [('c', 'copy', None, _('copy patch directory')),
2205 2211 ('n', 'name', '', _('copy directory name')),
2206 2212 ('e', 'empty', None, _('clear queue status file')),
2207 2213 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2208 2214 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2209 2215 "qselect":
2210 2216 (select,
2211 2217 [('n', 'none', None, _('disable all guards')),
2212 2218 ('s', 'series', None, _('list all guards in series file')),
2213 2219 ('', 'pop', None, _('pop to before first guarded applied patch')),
2214 2220 ('', 'reapply', None, _('pop, then reapply patches'))],
2215 2221 _('hg qselect [OPTION]... [GUARD]...')),
2216 2222 "qseries":
2217 2223 (series,
2218 2224 [('m', 'missing', None, _('print patches not in series')),
2219 2225 ] + seriesopts,
2220 2226 _('hg qseries [-ms]')),
2221 2227 "^strip":
2222 2228 (strip,
2223 2229 [('f', 'force', None, _('force multi-head removal')),
2224 2230 ('b', 'backup', None, _('bundle unrelated changesets')),
2225 2231 ('n', 'nobackup', None, _('no backups'))],
2226 2232 _('hg strip [-f] [-b] [-n] REV')),
2227 2233 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2228 2234 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2229 2235 }
@@ -1,3159 +1,3163 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 import demandimport; demandimport.enable()
9 9 from node import *
10 10 from i18n import _
11 11 import bisect, os, re, sys, urllib, shlex, stat
12 12 import ui, hg, util, revlog, bundlerepo, extensions
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import errno, version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16 16
17 17 # Commands start here, listed alphabetically
18 18
19 19 def add(ui, repo, *pats, **opts):
20 20 """add the specified files on the next commit
21 21
22 22 Schedule files to be version controlled and added to the repository.
23 23
24 24 The files will be added to the repository at the next commit. To
25 25 undo an add before that, see hg revert.
26 26
27 27 If no names are given, add all files in the repository.
28 28 """
29 29
30 30 names = []
31 31 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
32 32 if exact:
33 33 if ui.verbose:
34 34 ui.status(_('adding %s\n') % rel)
35 35 names.append(abs)
36 36 elif repo.dirstate.state(abs) == '?':
37 37 ui.status(_('adding %s\n') % rel)
38 38 names.append(abs)
39 39 if not opts.get('dry_run'):
40 40 repo.add(names)
41 41
42 42 def addremove(ui, repo, *pats, **opts):
43 43 """add all new files, delete all missing files
44 44
45 45 Add all new files and remove all missing files from the repository.
46 46
47 47 New files are ignored if they match any of the patterns in .hgignore. As
48 48 with add, these changes take effect at the next commit.
49 49
50 50 Use the -s option to detect renamed files. With a parameter > 0,
51 51 this compares every removed file with every added file and records
52 52 those similar enough as renames. This option takes a percentage
53 53 between 0 (disabled) and 100 (files must be identical) as its
54 54 parameter. Detecting renamed files this way can be expensive.
55 55 """
56 56 sim = float(opts.get('similarity') or 0)
57 57 if sim < 0 or sim > 100:
58 58 raise util.Abort(_('similarity must be between 0 and 100'))
59 59 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
60 60
61 61 def annotate(ui, repo, *pats, **opts):
62 62 """show changeset information per file line
63 63
64 64 List changes in files, showing the revision id responsible for each line
65 65
66 66 This command is useful to discover who did a change or when a change took
67 67 place.
68 68
69 69 Without the -a option, annotate will avoid processing files it
70 70 detects as binary. With -a, annotate will generate an annotation
71 71 anyway, probably with undesirable results.
72 72 """
73 73 getdate = util.cachefunc(lambda x: util.datestr(x[0].date()))
74 74
75 75 if not pats:
76 76 raise util.Abort(_('at least one file name or pattern required'))
77 77
78 78 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
79 79 ('number', lambda x: str(x[0].rev())),
80 80 ('changeset', lambda x: short(x[0].node())),
81 81 ('date', getdate),
82 82 ('follow', lambda x: x[0].path()),
83 83 ]
84 84
85 85 if (not opts['user'] and not opts['changeset'] and not opts['date']
86 86 and not opts['follow']):
87 87 opts['number'] = 1
88 88
89 89 linenumber = opts.get('line_number') is not None
90 90 if (linenumber and (not opts['changeset']) and (not opts['number'])):
91 91 raise util.Abort(_('at least one of -n/-c is required for -l'))
92 92
93 93 funcmap = [func for op, func in opmap if opts.get(op)]
94 94 if linenumber:
95 95 lastfunc = funcmap[-1]
96 96 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
97 97
98 98 ctx = repo.changectx(opts['rev'])
99 99
100 100 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
101 101 node=ctx.node()):
102 102 fctx = ctx.filectx(abs)
103 103 if not opts['text'] and util.binary(fctx.data()):
104 104 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
105 105 continue
106 106
107 107 lines = fctx.annotate(follow=opts.get('follow'),
108 108 linenumber=linenumber)
109 109 pieces = []
110 110
111 111 for f in funcmap:
112 112 l = [f(n) for n, dummy in lines]
113 113 if l:
114 114 m = max(map(len, l))
115 115 pieces.append(["%*s" % (m, x) for x in l])
116 116
117 117 if pieces:
118 118 for p, l in zip(zip(*pieces), lines):
119 119 ui.write("%s: %s" % (" ".join(p), l[1]))
120 120
121 121 def archive(ui, repo, dest, **opts):
122 122 '''create unversioned archive of a repository revision
123 123
124 124 By default, the revision used is the parent of the working
125 125 directory; use "-r" to specify a different revision.
126 126
127 127 To specify the type of archive to create, use "-t". Valid
128 128 types are:
129 129
130 130 "files" (default): a directory full of files
131 131 "tar": tar archive, uncompressed
132 132 "tbz2": tar archive, compressed using bzip2
133 133 "tgz": tar archive, compressed using gzip
134 134 "uzip": zip archive, uncompressed
135 135 "zip": zip archive, compressed using deflate
136 136
137 137 The exact name of the destination archive or directory is given
138 138 using a format string; see "hg help export" for details.
139 139
140 140 Each member added to an archive file has a directory prefix
141 141 prepended. Use "-p" to specify a format string for the prefix.
142 142 The default is the basename of the archive, with suffixes removed.
143 143 '''
144 144
145 145 ctx = repo.changectx(opts['rev'])
146 146 if not ctx:
147 147 raise util.Abort(_('repository has no revisions'))
148 148 node = ctx.node()
149 149 dest = cmdutil.make_filename(repo, dest, node)
150 150 if os.path.realpath(dest) == repo.root:
151 151 raise util.Abort(_('repository root cannot be destination'))
152 152 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
153 153 kind = opts.get('type') or 'files'
154 154 prefix = opts['prefix']
155 155 if dest == '-':
156 156 if kind == 'files':
157 157 raise util.Abort(_('cannot archive plain files to stdout'))
158 158 dest = sys.stdout
159 159 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
160 160 prefix = cmdutil.make_filename(repo, prefix, node)
161 161 archival.archive(repo, dest, node, kind, not opts['no_decode'],
162 162 matchfn, prefix)
163 163
164 164 def backout(ui, repo, node=None, rev=None, **opts):
165 165 '''reverse effect of earlier changeset
166 166
167 167 Commit the backed out changes as a new changeset. The new
168 168 changeset is a child of the backed out changeset.
169 169
170 170 If you back out a changeset other than the tip, a new head is
171 171 created. This head is the parent of the working directory. If
172 172 you back out an old changeset, your working directory will appear
173 173 old after the backout. You should merge the backout changeset
174 174 with another head.
175 175
176 176 The --merge option remembers the parent of the working directory
177 177 before starting the backout, then merges the new head with that
178 178 changeset afterwards. This saves you from doing the merge by
179 179 hand. The result of this merge is not committed, as for a normal
180 180 merge.'''
181 181 if rev and node:
182 182 raise util.Abort(_("please specify just one revision"))
183 183
184 184 if not rev:
185 185 rev = node
186 186
187 187 if not rev:
188 188 raise util.Abort(_("please specify a revision to backout"))
189 189
190 190 cmdutil.bail_if_changed(repo)
191 191 op1, op2 = repo.dirstate.parents()
192 192 if op2 != nullid:
193 193 raise util.Abort(_('outstanding uncommitted merge'))
194 194 node = repo.lookup(rev)
195 195 p1, p2 = repo.changelog.parents(node)
196 196 if p1 == nullid:
197 197 raise util.Abort(_('cannot back out a change with no parents'))
198 198 if p2 != nullid:
199 199 if not opts['parent']:
200 200 raise util.Abort(_('cannot back out a merge changeset without '
201 201 '--parent'))
202 202 p = repo.lookup(opts['parent'])
203 203 if p not in (p1, p2):
204 204 raise util.Abort(_('%s is not a parent of %s') %
205 205 (short(p), short(node)))
206 206 parent = p
207 207 else:
208 208 if opts['parent']:
209 209 raise util.Abort(_('cannot use --parent on non-merge changeset'))
210 210 parent = p1
211 211 hg.clean(repo, node, show_stats=False)
212 212 revert_opts = opts.copy()
213 213 revert_opts['date'] = None
214 214 revert_opts['all'] = True
215 215 revert_opts['rev'] = hex(parent)
216 216 revert(ui, repo, **revert_opts)
217 217 commit_opts = opts.copy()
218 218 commit_opts['addremove'] = False
219 219 if not commit_opts['message'] and not commit_opts['logfile']:
220 220 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
221 221 commit_opts['force_editor'] = True
222 222 commit(ui, repo, **commit_opts)
223 223 def nice(node):
224 224 return '%d:%s' % (repo.changelog.rev(node), short(node))
225 225 ui.status(_('changeset %s backs out changeset %s\n') %
226 226 (nice(repo.changelog.tip()), nice(node)))
227 227 if op1 != node:
228 228 if opts['merge']:
229 229 ui.status(_('merging with changeset %s\n') % nice(op1))
230 230 hg.merge(repo, hex(op1))
231 231 else:
232 232 ui.status(_('the backout changeset is a new head - '
233 233 'do not forget to merge\n'))
234 234 ui.status(_('(use "backout --merge" '
235 235 'if you want to auto-merge)\n'))
236 236
237 237 def branch(ui, repo, label=None, **opts):
238 238 """set or show the current branch name
239 239
240 240 With no argument, show the current branch name. With one argument,
241 241 set the working directory branch name (the branch does not exist in
242 242 the repository until the next commit).
243 243
244 244 Unless --force is specified, branch will not let you set a
245 245 branch name that shadows an existing branch.
246 246 """
247 247
248 248 if label:
249 249 if not opts.get('force') and label in repo.branchtags():
250 250 if label not in [p.branch() for p in repo.workingctx().parents()]:
251 251 raise util.Abort(_('a branch of the same name already exists'
252 252 ' (use --force to override)'))
253 253 repo.dirstate.setbranch(util.fromlocal(label))
254 254 ui.status(_('marked working directory as branch %s\n') % label)
255 255 else:
256 256 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
257 257
258 258 def branches(ui, repo, active=False):
259 259 """list repository named branches
260 260
261 261 List the repository's named branches, indicating which ones are
262 262 inactive. If active is specified, only show active branches.
263 263
264 264 A branch is considered active if it contains unmerged heads.
265 265 """
266 266 b = repo.branchtags()
267 267 heads = dict.fromkeys(repo.heads(), 1)
268 268 l = [((n in heads), repo.changelog.rev(n), n, t) for t, n in b.items()]
269 269 l.sort()
270 270 l.reverse()
271 271 for ishead, r, n, t in l:
272 272 if active and not ishead:
273 273 # If we're only displaying active branches, abort the loop on
274 274 # encountering the first inactive head
275 275 break
276 276 else:
277 277 hexfunc = ui.debugflag and hex or short
278 278 if ui.quiet:
279 279 ui.write("%s\n" % t)
280 280 else:
281 281 spaces = " " * (30 - util.locallen(t))
282 282 # The code only gets here if inactive branches are being
283 283 # displayed or the branch is active.
284 284 isinactive = ((not ishead) and " (inactive)") or ''
285 285 ui.write("%s%s %s:%s%s\n" % (t, spaces, r, hexfunc(n), isinactive))
286 286
287 287 def bundle(ui, repo, fname, dest=None, **opts):
288 288 """create a changegroup file
289 289
290 290 Generate a compressed changegroup file collecting changesets not
291 291 found in the other repository.
292 292
293 293 If no destination repository is specified the destination is assumed
294 294 to have all the nodes specified by one or more --base parameters.
295 295
296 296 The bundle file can then be transferred using conventional means and
297 297 applied to another repository with the unbundle or pull command.
298 298 This is useful when direct push and pull are not available or when
299 299 exporting an entire repository is undesirable.
300 300
301 301 Applying bundles preserves all changeset contents including
302 302 permissions, copy/rename information, and revision history.
303 303 """
304 304 revs = opts.get('rev') or None
305 305 if revs:
306 306 revs = [repo.lookup(rev) for rev in revs]
307 307 base = opts.get('base')
308 308 if base:
309 309 if dest:
310 310 raise util.Abort(_("--base is incompatible with specifiying "
311 311 "a destination"))
312 312 base = [repo.lookup(rev) for rev in base]
313 313 # create the right base
314 314 # XXX: nodesbetween / changegroup* should be "fixed" instead
315 315 o = []
316 316 has = {nullid: None}
317 317 for n in base:
318 318 has.update(repo.changelog.reachable(n))
319 319 if revs:
320 320 visit = list(revs)
321 321 else:
322 322 visit = repo.changelog.heads()
323 323 seen = {}
324 324 while visit:
325 325 n = visit.pop(0)
326 326 parents = [p for p in repo.changelog.parents(n) if p not in has]
327 327 if len(parents) == 0:
328 328 o.insert(0, n)
329 329 else:
330 330 for p in parents:
331 331 if p not in seen:
332 332 seen[p] = 1
333 333 visit.append(p)
334 334 else:
335 335 cmdutil.setremoteconfig(ui, opts)
336 336 dest, revs = cmdutil.parseurl(
337 337 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
338 338 other = hg.repository(ui, dest)
339 339 o = repo.findoutgoing(other, force=opts['force'])
340 340
341 341 if revs:
342 342 cg = repo.changegroupsubset(o, revs, 'bundle')
343 343 else:
344 344 cg = repo.changegroup(o, 'bundle')
345 345 changegroup.writebundle(cg, fname, "HG10BZ")
346 346
347 347 def cat(ui, repo, file1, *pats, **opts):
348 348 """output the current or given revision of files
349 349
350 350 Print the specified files as they were at the given revision.
351 351 If no revision is given, the parent of the working directory is used,
352 352 or tip if no revision is checked out.
353 353
354 354 Output may be to a file, in which case the name of the file is
355 355 given using a format string. The formatting rules are the same as
356 356 for the export command, with the following additions:
357 357
358 358 %s basename of file being printed
359 359 %d dirname of file being printed, or '.' if in repo root
360 360 %p root-relative path name of file being printed
361 361 """
362 362 ctx = repo.changectx(opts['rev'])
363 363 err = 1
364 364 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
365 365 ctx.node()):
366 366 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
367 367 fp.write(ctx.filectx(abs).data())
368 368 err = 0
369 369 return err
370 370
371 371 def clone(ui, source, dest=None, **opts):
372 372 """make a copy of an existing repository
373 373
374 374 Create a copy of an existing repository in a new directory.
375 375
376 376 If no destination directory name is specified, it defaults to the
377 377 basename of the source.
378 378
379 379 The location of the source is added to the new repository's
380 380 .hg/hgrc file, as the default to be used for future pulls.
381 381
382 382 For efficiency, hardlinks are used for cloning whenever the source
383 383 and destination are on the same filesystem (note this applies only
384 384 to the repository data, not to the checked out files). Some
385 385 filesystems, such as AFS, implement hardlinking incorrectly, but
386 386 do not report errors. In these cases, use the --pull option to
387 387 avoid hardlinking.
388 388
389 389 You can safely clone repositories and checked out files using full
390 390 hardlinks with
391 391
392 392 $ cp -al REPO REPOCLONE
393 393
394 394 which is the fastest way to clone. However, the operation is not
395 395 atomic (making sure REPO is not modified during the operation is
396 396 up to you) and you have to make sure your editor breaks hardlinks
397 397 (Emacs and most Linux Kernel tools do so).
398 398
399 399 If you use the -r option to clone up to a specific revision, no
400 400 subsequent revisions will be present in the cloned repository.
401 401 This option implies --pull, even on local repositories.
402 402
403 403 See pull for valid source format details.
404 404
405 405 It is possible to specify an ssh:// URL as the destination, but no
406 406 .hg/hgrc and working directory will be created on the remote side.
407 407 Look at the help text for the pull command for important details
408 408 about ssh:// URLs.
409 409 """
410 410 cmdutil.setremoteconfig(ui, opts)
411 411 hg.clone(ui, source, dest,
412 412 pull=opts['pull'],
413 413 stream=opts['uncompressed'],
414 414 rev=opts['rev'],
415 415 update=not opts['noupdate'])
416 416
417 417 def commit(ui, repo, *pats, **opts):
418 418 """commit the specified files or all outstanding changes
419 419
420 420 Commit changes to the given files into the repository.
421 421
422 422 If a list of files is omitted, all changes reported by "hg status"
423 423 will be committed.
424 424
425 425 If no commit message is specified, the editor configured in your hgrc
426 426 or in the EDITOR environment variable is started to enter a message.
427 427 """
428 428 message = cmdutil.logmessage(opts)
429 429
430 430 if opts['addremove']:
431 431 cmdutil.addremove(repo, pats, opts)
432 432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
433 433 if pats:
434 434 status = repo.status(files=fns, match=match)
435 435 modified, added, removed, deleted, unknown = status[:5]
436 436 files = modified + added + removed
437 437 slist = None
438 438 for f in fns:
439 439 if f == '.':
440 440 continue
441 441 if f not in files:
442 442 rf = repo.wjoin(f)
443 443 try:
444 444 mode = os.lstat(rf)[stat.ST_MODE]
445 445 except OSError:
446 446 raise util.Abort(_("file %s not found!") % rf)
447 447 if stat.S_ISDIR(mode):
448 448 name = f + '/'
449 449 if slist is None:
450 450 slist = list(files)
451 451 slist.sort()
452 452 i = bisect.bisect(slist, name)
453 453 if i >= len(slist) or not slist[i].startswith(name):
454 454 raise util.Abort(_("no match under directory %s!")
455 455 % rf)
456 456 elif not (stat.S_ISREG(mode) or stat.S_ISLNK(mode)):
457 457 raise util.Abort(_("can't commit %s: "
458 458 "unsupported file type!") % rf)
459 459 elif repo.dirstate.state(f) == '?':
460 460 raise util.Abort(_("file %s not tracked!") % rf)
461 461 else:
462 462 files = []
463 463 try:
464 464 repo.commit(files, message, opts['user'], opts['date'], match,
465 465 force_editor=opts.get('force_editor'))
466 466 except ValueError, inst:
467 467 raise util.Abort(str(inst))
468 468
469 469 def docopy(ui, repo, pats, opts, wlock):
470 470 # called with the repo lock held
471 471 #
472 472 # hgsep => pathname that uses "/" to separate directories
473 473 # ossep => pathname that uses os.sep to separate directories
474 474 cwd = repo.getcwd()
475 475 errors = 0
476 476 copied = []
477 477 targets = {}
478 478
479 479 # abs: hgsep
480 480 # rel: ossep
481 481 # return: hgsep
482 482 def okaytocopy(abs, rel, exact):
483 483 reasons = {'?': _('is not managed'),
484 484 'r': _('has been marked for remove')}
485 485 state = repo.dirstate.state(abs)
486 486 reason = reasons.get(state)
487 487 if reason:
488 488 if exact:
489 489 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
490 490 else:
491 491 if state == 'a':
492 492 origsrc = repo.dirstate.copied(abs)
493 493 if origsrc is not None:
494 494 return origsrc
495 495 return abs
496 496
497 497 # origsrc: hgsep
498 498 # abssrc: hgsep
499 499 # relsrc: ossep
500 500 # otarget: ossep
501 501 def copy(origsrc, abssrc, relsrc, otarget, exact):
502 502 abstarget = util.canonpath(repo.root, cwd, otarget)
503 503 reltarget = repo.pathto(abstarget, cwd)
504 504 prevsrc = targets.get(abstarget)
505 505 src = repo.wjoin(abssrc)
506 506 target = repo.wjoin(abstarget)
507 507 if prevsrc is not None:
508 508 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
509 509 (reltarget, repo.pathto(abssrc, cwd),
510 510 repo.pathto(prevsrc, cwd)))
511 511 return
512 512 if (not opts['after'] and os.path.exists(target) or
513 513 opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
514 514 if not opts['force']:
515 515 ui.warn(_('%s: not overwriting - file exists\n') %
516 516 reltarget)
517 517 return
518 518 if not opts['after'] and not opts.get('dry_run'):
519 519 os.unlink(target)
520 520 if opts['after']:
521 521 if not os.path.exists(target):
522 522 return
523 523 else:
524 524 targetdir = os.path.dirname(target) or '.'
525 525 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
526 526 os.makedirs(targetdir)
527 527 try:
528 528 restore = repo.dirstate.state(abstarget) == 'r'
529 529 if restore and not opts.get('dry_run'):
530 530 repo.undelete([abstarget], wlock)
531 531 try:
532 532 if not opts.get('dry_run'):
533 533 util.copyfile(src, target)
534 534 restore = False
535 535 finally:
536 536 if restore:
537 537 repo.remove([abstarget], wlock=wlock)
538 538 except IOError, inst:
539 539 if inst.errno == errno.ENOENT:
540 540 ui.warn(_('%s: deleted in working copy\n') % relsrc)
541 541 else:
542 542 ui.warn(_('%s: cannot copy - %s\n') %
543 543 (relsrc, inst.strerror))
544 544 errors += 1
545 545 return
546 546 if ui.verbose or not exact:
547 547 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
548 548 targets[abstarget] = abssrc
549 549 if abstarget != origsrc:
550 550 if repo.dirstate.state(origsrc) == 'a':
551 551 if not ui.quiet:
552 552 ui.warn(_("%s has not been committed yet, so no copy "
553 553 "data will be stored for %s.\n")
554 554 % (repo.pathto(origsrc, cwd), reltarget))
555 555 if abstarget not in repo.dirstate and not opts.get('dry_run'):
556 556 repo.add([abstarget], wlock)
557 557 elif not opts.get('dry_run'):
558 558 repo.copy(origsrc, abstarget, wlock)
559 559 copied.append((abssrc, relsrc, exact))
560 560
561 561 # pat: ossep
562 562 # dest ossep
563 563 # srcs: list of (hgsep, hgsep, ossep, bool)
564 564 # return: function that takes hgsep and returns ossep
565 565 def targetpathfn(pat, dest, srcs):
566 566 if os.path.isdir(pat):
567 567 abspfx = util.canonpath(repo.root, cwd, pat)
568 568 abspfx = util.localpath(abspfx)
569 569 if destdirexists:
570 570 striplen = len(os.path.split(abspfx)[0])
571 571 else:
572 572 striplen = len(abspfx)
573 573 if striplen:
574 574 striplen += len(os.sep)
575 575 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
576 576 elif destdirexists:
577 577 res = lambda p: os.path.join(dest,
578 578 os.path.basename(util.localpath(p)))
579 579 else:
580 580 res = lambda p: dest
581 581 return res
582 582
583 583 # pat: ossep
584 584 # dest ossep
585 585 # srcs: list of (hgsep, hgsep, ossep, bool)
586 586 # return: function that takes hgsep and returns ossep
587 587 def targetpathafterfn(pat, dest, srcs):
588 588 if util.patkind(pat, None)[0]:
589 589 # a mercurial pattern
590 590 res = lambda p: os.path.join(dest,
591 591 os.path.basename(util.localpath(p)))
592 592 else:
593 593 abspfx = util.canonpath(repo.root, cwd, pat)
594 594 if len(abspfx) < len(srcs[0][0]):
595 595 # A directory. Either the target path contains the last
596 596 # component of the source path or it does not.
597 597 def evalpath(striplen):
598 598 score = 0
599 599 for s in srcs:
600 600 t = os.path.join(dest, util.localpath(s[0])[striplen:])
601 601 if os.path.exists(t):
602 602 score += 1
603 603 return score
604 604
605 605 abspfx = util.localpath(abspfx)
606 606 striplen = len(abspfx)
607 607 if striplen:
608 608 striplen += len(os.sep)
609 609 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
610 610 score = evalpath(striplen)
611 611 striplen1 = len(os.path.split(abspfx)[0])
612 612 if striplen1:
613 613 striplen1 += len(os.sep)
614 614 if evalpath(striplen1) > score:
615 615 striplen = striplen1
616 616 res = lambda p: os.path.join(dest,
617 617 util.localpath(p)[striplen:])
618 618 else:
619 619 # a file
620 620 if destdirexists:
621 621 res = lambda p: os.path.join(dest,
622 622 os.path.basename(util.localpath(p)))
623 623 else:
624 624 res = lambda p: dest
625 625 return res
626 626
627 627
628 628 pats = util.expand_glob(pats)
629 629 if not pats:
630 630 raise util.Abort(_('no source or destination specified'))
631 631 if len(pats) == 1:
632 632 raise util.Abort(_('no destination specified'))
633 633 dest = pats.pop()
634 634 destdirexists = os.path.isdir(dest)
635 635 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
636 636 raise util.Abort(_('with multiple sources, destination must be an '
637 637 'existing directory'))
638 638 if opts['after']:
639 639 tfn = targetpathafterfn
640 640 else:
641 641 tfn = targetpathfn
642 642 copylist = []
643 643 for pat in pats:
644 644 srcs = []
645 645 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
646 646 globbed=True):
647 647 origsrc = okaytocopy(abssrc, relsrc, exact)
648 648 if origsrc:
649 649 srcs.append((origsrc, abssrc, relsrc, exact))
650 650 if not srcs:
651 651 continue
652 652 copylist.append((tfn(pat, dest, srcs), srcs))
653 653 if not copylist:
654 654 raise util.Abort(_('no files to copy'))
655 655
656 656 for targetpath, srcs in copylist:
657 657 for origsrc, abssrc, relsrc, exact in srcs:
658 658 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
659 659
660 660 if errors:
661 661 ui.warn(_('(consider using --after)\n'))
662 662 return errors, copied
663 663
664 664 def copy(ui, repo, *pats, **opts):
665 665 """mark files as copied for the next commit
666 666
667 667 Mark dest as having copies of source files. If dest is a
668 668 directory, copies are put in that directory. If dest is a file,
669 669 there can only be one source.
670 670
671 671 By default, this command copies the contents of files as they
672 672 stand in the working directory. If invoked with --after, the
673 673 operation is recorded, but no copying is performed.
674 674
675 675 This command takes effect in the next commit. To undo a copy
676 676 before that, see hg revert.
677 677 """
678 678 wlock = repo.wlock(0)
679 679 errs, copied = docopy(ui, repo, pats, opts, wlock)
680 680 return errs
681 681
682 682 def debugancestor(ui, index, rev1, rev2):
683 683 """find the ancestor revision of two revisions in a given index"""
684 684 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
685 685 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
686 686 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
687 687
688 688 def debugcomplete(ui, cmd='', **opts):
689 689 """returns the completion list associated with the given command"""
690 690
691 691 if opts['options']:
692 692 options = []
693 693 otables = [globalopts]
694 694 if cmd:
695 695 aliases, entry = cmdutil.findcmd(ui, cmd)
696 696 otables.append(entry[1])
697 697 for t in otables:
698 698 for o in t:
699 699 if o[0]:
700 700 options.append('-%s' % o[0])
701 701 options.append('--%s' % o[1])
702 702 ui.write("%s\n" % "\n".join(options))
703 703 return
704 704
705 705 clist = cmdutil.findpossible(ui, cmd).keys()
706 706 clist.sort()
707 707 ui.write("%s\n" % "\n".join(clist))
708 708
709 709 def debugrebuildstate(ui, repo, rev=""):
710 710 """rebuild the dirstate as it would look like for the given revision"""
711 711 if rev == "":
712 712 rev = repo.changelog.tip()
713 713 ctx = repo.changectx(rev)
714 714 files = ctx.manifest()
715 715 wlock = repo.wlock()
716 716 repo.dirstate.rebuild(rev, files)
717 717
718 718 def debugcheckstate(ui, repo):
719 719 """validate the correctness of the current dirstate"""
720 720 parent1, parent2 = repo.dirstate.parents()
721 721 dc = repo.dirstate
722 722 m1 = repo.changectx(parent1).manifest()
723 723 m2 = repo.changectx(parent2).manifest()
724 724 errors = 0
725 725 for f in dc:
726 726 state = repo.dirstate.state(f)
727 727 if state in "nr" and f not in m1:
728 728 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
729 729 errors += 1
730 730 if state in "a" and f in m1:
731 731 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
732 732 errors += 1
733 733 if state in "m" and f not in m1 and f not in m2:
734 734 ui.warn(_("%s in state %s, but not in either manifest\n") %
735 735 (f, state))
736 736 errors += 1
737 737 for f in m1:
738 738 state = repo.dirstate.state(f)
739 739 if state not in "nrm":
740 740 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
741 741 errors += 1
742 742 if errors:
743 743 error = _(".hg/dirstate inconsistent with current parent's manifest")
744 744 raise util.Abort(error)
745 745
746 746 def showconfig(ui, repo, *values, **opts):
747 747 """show combined config settings from all hgrc files
748 748
749 749 With no args, print names and values of all config items.
750 750
751 751 With one arg of the form section.name, print just the value of
752 752 that config item.
753 753
754 754 With multiple args, print names and values of all config items
755 755 with matching section names."""
756 756
757 757 untrusted = bool(opts.get('untrusted'))
758 758 if values:
759 759 if len([v for v in values if '.' in v]) > 1:
760 760 raise util.Abort(_('only one config item permitted'))
761 761 for section, name, value in ui.walkconfig(untrusted=untrusted):
762 762 sectname = section + '.' + name
763 763 if values:
764 764 for v in values:
765 765 if v == section:
766 766 ui.write('%s=%s\n' % (sectname, value))
767 767 elif v == sectname:
768 768 ui.write(value, '\n')
769 769 else:
770 770 ui.write('%s=%s\n' % (sectname, value))
771 771
772 772 def debugsetparents(ui, repo, rev1, rev2=None):
773 773 """manually set the parents of the current working directory
774 774
775 775 This is useful for writing repository conversion tools, but should
776 776 be used with care.
777 777 """
778 778
779 779 if not rev2:
780 780 rev2 = hex(nullid)
781 781
782 782 wlock = repo.wlock()
783 783 try:
784 784 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
785 785 finally:
786 786 wlock.release()
787 787
788 788 def debugstate(ui, repo):
789 789 """show the contents of the current dirstate"""
790 790 dc = repo.dirstate
791 791 for file_ in dc:
792 792 if dc[file_][3] == -1:
793 793 # Pad or slice to locale representation
794 794 locale_len = len(time.strftime("%x %X", time.localtime(0)))
795 795 timestr = 'unset'
796 796 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
797 797 else:
798 798 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
799 799 ui.write("%c %3o %10d %s %s\n"
800 800 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
801 801 timestr, file_))
802 802 for f in repo.dirstate.copies():
803 803 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
804 804
805 805 def debugdata(ui, file_, rev):
806 806 """dump the contents of a data file revision"""
807 807 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
808 808 try:
809 809 ui.write(r.revision(r.lookup(rev)))
810 810 except KeyError:
811 811 raise util.Abort(_('invalid revision identifier %s') % rev)
812 812
813 813 def debugdate(ui, date, range=None, **opts):
814 814 """parse and display a date"""
815 815 if opts["extended"]:
816 816 d = util.parsedate(date, util.extendeddateformats)
817 817 else:
818 818 d = util.parsedate(date)
819 819 ui.write("internal: %s %s\n" % d)
820 820 ui.write("standard: %s\n" % util.datestr(d))
821 821 if range:
822 822 m = util.matchdate(range)
823 823 ui.write("match: %s\n" % m(d[0]))
824 824
825 825 def debugindex(ui, file_):
826 826 """dump the contents of an index file"""
827 827 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
828 828 ui.write(" rev offset length base linkrev" +
829 829 " nodeid p1 p2\n")
830 830 for i in xrange(r.count()):
831 831 node = r.node(i)
832 832 pp = r.parents(node)
833 833 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
834 834 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
835 835 short(node), short(pp[0]), short(pp[1])))
836 836
837 837 def debugindexdot(ui, file_):
838 838 """dump an index DAG as a .dot file"""
839 839 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
840 840 ui.write("digraph G {\n")
841 841 for i in xrange(r.count()):
842 842 node = r.node(i)
843 843 pp = r.parents(node)
844 844 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
845 845 if pp[1] != nullid:
846 846 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
847 847 ui.write("}\n")
848 848
849 849 def debuginstall(ui):
850 850 '''test Mercurial installation'''
851 851
852 852 def writetemp(contents):
853 853 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
854 854 f = os.fdopen(fd, "wb")
855 855 f.write(contents)
856 856 f.close()
857 857 return name
858 858
859 859 problems = 0
860 860
861 861 # encoding
862 862 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
863 863 try:
864 864 util.fromlocal("test")
865 865 except util.Abort, inst:
866 866 ui.write(" %s\n" % inst)
867 867 ui.write(_(" (check that your locale is properly set)\n"))
868 868 problems += 1
869 869
870 870 # compiled modules
871 871 ui.status(_("Checking extensions...\n"))
872 872 try:
873 873 import bdiff, mpatch, base85
874 874 except Exception, inst:
875 875 ui.write(" %s\n" % inst)
876 876 ui.write(_(" One or more extensions could not be found"))
877 877 ui.write(_(" (check that you compiled the extensions)\n"))
878 878 problems += 1
879 879
880 880 # templates
881 881 ui.status(_("Checking templates...\n"))
882 882 try:
883 883 import templater
884 884 t = templater.templater(templater.templatepath("map-cmdline.default"))
885 885 except Exception, inst:
886 886 ui.write(" %s\n" % inst)
887 887 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
888 888 problems += 1
889 889
890 890 # patch
891 891 ui.status(_("Checking patch...\n"))
892 892 patcher = ui.config('ui', 'patch')
893 893 patcher = ((patcher and util.find_exe(patcher)) or
894 894 util.find_exe('gpatch') or
895 895 util.find_exe('patch'))
896 896 if not patcher:
897 897 ui.write(_(" Can't find patch or gpatch in PATH\n"))
898 898 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
899 899 problems += 1
900 900 else:
901 901 # actually attempt a patch here
902 902 a = "1\n2\n3\n4\n"
903 903 b = "1\n2\n3\ninsert\n4\n"
904 904 fa = writetemp(a)
905 905 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
906 906 fd = writetemp(d)
907 907
908 908 files = {}
909 909 try:
910 910 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
911 911 except util.Abort, e:
912 912 ui.write(_(" patch call failed:\n"))
913 913 ui.write(" " + str(e) + "\n")
914 914 problems += 1
915 915 else:
916 916 if list(files) != [os.path.basename(fa)]:
917 917 ui.write(_(" unexpected patch output!"))
918 918 ui.write(_(" (you may have an incompatible version of patch)\n"))
919 919 problems += 1
920 920 a = file(fa).read()
921 921 if a != b:
922 922 ui.write(_(" patch test failed!"))
923 923 ui.write(_(" (you may have an incompatible version of patch)\n"))
924 924 problems += 1
925 925
926 926 os.unlink(fa)
927 927 os.unlink(fd)
928 928
929 929 # merge helper
930 930 ui.status(_("Checking merge helper...\n"))
931 931 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
932 932 or "hgmerge")
933 933 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
934 934 if not cmdpath:
935 935 if cmd == 'hgmerge':
936 936 ui.write(_(" No merge helper set and can't find default"
937 937 " hgmerge script in PATH\n"))
938 938 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
939 939 else:
940 940 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
941 941 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
942 942 problems += 1
943 943 else:
944 944 # actually attempt a patch here
945 945 fa = writetemp("1\n2\n3\n4\n")
946 946 fl = writetemp("1\n2\n3\ninsert\n4\n")
947 947 fr = writetemp("begin\n1\n2\n3\n4\n")
948 948 r = util.system('%s "%s" "%s" "%s"' % (cmd, fl, fa, fr))
949 949 if r:
950 950 ui.write(_(" Got unexpected merge error %d!\n") % r)
951 951 problems += 1
952 952 m = file(fl).read()
953 953 if m != "begin\n1\n2\n3\ninsert\n4\n":
954 954 ui.write(_(" Got unexpected merge results!\n"))
955 955 ui.write(_(" (your merge helper may have the"
956 956 " wrong argument order)\n"))
957 957 ui.write(_(" Result: %r\n") % m)
958 958 problems += 1
959 959 os.unlink(fa)
960 960 os.unlink(fl)
961 961 os.unlink(fr)
962 962
963 963 # editor
964 964 ui.status(_("Checking commit editor...\n"))
965 965 editor = (os.environ.get("HGEDITOR") or
966 966 ui.config("ui", "editor") or
967 967 os.environ.get("EDITOR", "vi"))
968 968 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
969 969 if not cmdpath:
970 970 if editor == 'vi':
971 971 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
972 972 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
973 973 else:
974 974 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
975 975 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
976 976 problems += 1
977 977
978 978 # check username
979 979 ui.status(_("Checking username...\n"))
980 980 user = os.environ.get("HGUSER")
981 981 if user is None:
982 982 user = ui.config("ui", "username")
983 983 if user is None:
984 984 user = os.environ.get("EMAIL")
985 985 if not user:
986 986 ui.warn(" ")
987 987 ui.username()
988 988 ui.write(_(" (specify a username in your .hgrc file)\n"))
989 989
990 990 if not problems:
991 991 ui.status(_("No problems detected\n"))
992 992 else:
993 993 ui.write(_("%s problems detected,"
994 994 " please check your install!\n") % problems)
995 995
996 996 return problems
997 997
998 998 def debugrename(ui, repo, file1, *pats, **opts):
999 999 """dump rename information"""
1000 1000
1001 1001 ctx = repo.changectx(opts.get('rev', 'tip'))
1002 1002 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
1003 1003 ctx.node()):
1004 1004 m = ctx.filectx(abs).renamed()
1005 1005 if m:
1006 1006 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1007 1007 else:
1008 1008 ui.write(_("%s not renamed\n") % rel)
1009 1009
1010 1010 def debugwalk(ui, repo, *pats, **opts):
1011 1011 """show how files match on given patterns"""
1012 1012 items = list(cmdutil.walk(repo, pats, opts))
1013 1013 if not items:
1014 1014 return
1015 1015 fmt = '%%s %%-%ds %%-%ds %%s' % (
1016 1016 max([len(abs) for (src, abs, rel, exact) in items]),
1017 1017 max([len(rel) for (src, abs, rel, exact) in items]))
1018 1018 for src, abs, rel, exact in items:
1019 1019 line = fmt % (src, abs, rel, exact and 'exact' or '')
1020 1020 ui.write("%s\n" % line.rstrip())
1021 1021
1022 1022 def diff(ui, repo, *pats, **opts):
1023 1023 """diff repository (or selected files)
1024 1024
1025 1025 Show differences between revisions for the specified files.
1026 1026
1027 1027 Differences between files are shown using the unified diff format.
1028 1028
1029 1029 NOTE: diff may generate unexpected results for merges, as it will
1030 1030 default to comparing against the working directory's first parent
1031 1031 changeset if no revisions are specified.
1032 1032
1033 1033 When two revision arguments are given, then changes are shown
1034 1034 between those revisions. If only one revision is specified then
1035 1035 that revision is compared to the working directory, and, when no
1036 1036 revisions are specified, the working directory files are compared
1037 1037 to its parent.
1038 1038
1039 1039 Without the -a option, diff will avoid generating diffs of files
1040 1040 it detects as binary. With -a, diff will generate a diff anyway,
1041 1041 probably with undesirable results.
1042 1042 """
1043 1043 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1044 1044
1045 1045 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1046 1046
1047 1047 patch.diff(repo, node1, node2, fns, match=matchfn,
1048 1048 opts=patch.diffopts(ui, opts))
1049 1049
1050 1050 def export(ui, repo, *changesets, **opts):
1051 1051 """dump the header and diffs for one or more changesets
1052 1052
1053 1053 Print the changeset header and diffs for one or more revisions.
1054 1054
1055 1055 The information shown in the changeset header is: author,
1056 1056 changeset hash, parent(s) and commit comment.
1057 1057
1058 1058 NOTE: export may generate unexpected diff output for merge changesets,
1059 1059 as it will compare the merge changeset against its first parent only.
1060 1060
1061 1061 Output may be to a file, in which case the name of the file is
1062 1062 given using a format string. The formatting rules are as follows:
1063 1063
1064 1064 %% literal "%" character
1065 1065 %H changeset hash (40 bytes of hexadecimal)
1066 1066 %N number of patches being generated
1067 1067 %R changeset revision number
1068 1068 %b basename of the exporting repository
1069 1069 %h short-form changeset hash (12 bytes of hexadecimal)
1070 1070 %n zero-padded sequence number, starting at 1
1071 1071 %r zero-padded changeset revision number
1072 1072
1073 1073 Without the -a option, export will avoid generating diffs of files
1074 1074 it detects as binary. With -a, export will generate a diff anyway,
1075 1075 probably with undesirable results.
1076 1076
1077 1077 With the --switch-parent option, the diff will be against the second
1078 1078 parent. It can be useful to review a merge.
1079 1079 """
1080 1080 if not changesets:
1081 1081 raise util.Abort(_("export requires at least one changeset"))
1082 1082 revs = cmdutil.revrange(repo, changesets)
1083 1083 if len(revs) > 1:
1084 1084 ui.note(_('exporting patches:\n'))
1085 1085 else:
1086 1086 ui.note(_('exporting patch:\n'))
1087 1087 patch.export(repo, revs, template=opts['output'],
1088 1088 switch_parent=opts['switch_parent'],
1089 1089 opts=patch.diffopts(ui, opts))
1090 1090
1091 1091 def grep(ui, repo, pattern, *pats, **opts):
1092 1092 """search for a pattern in specified files and revisions
1093 1093
1094 1094 Search revisions of files for a regular expression.
1095 1095
1096 1096 This command behaves differently than Unix grep. It only accepts
1097 1097 Python/Perl regexps. It searches repository history, not the
1098 1098 working directory. It always prints the revision number in which
1099 1099 a match appears.
1100 1100
1101 1101 By default, grep only prints output for the first revision of a
1102 1102 file in which it finds a match. To get it to print every revision
1103 1103 that contains a change in match status ("-" for a match that
1104 1104 becomes a non-match, or "+" for a non-match that becomes a match),
1105 1105 use the --all flag.
1106 1106 """
1107 1107 reflags = 0
1108 1108 if opts['ignore_case']:
1109 1109 reflags |= re.I
1110 1110 try:
1111 1111 regexp = re.compile(pattern, reflags)
1112 1112 except Exception, inst:
1113 1113 ui.warn(_("grep: invalid match pattern: %s!\n") % inst)
1114 1114 return None
1115 1115 sep, eol = ':', '\n'
1116 1116 if opts['print0']:
1117 1117 sep = eol = '\0'
1118 1118
1119 1119 fcache = {}
1120 1120 def getfile(fn):
1121 1121 if fn not in fcache:
1122 1122 fcache[fn] = repo.file(fn)
1123 1123 return fcache[fn]
1124 1124
1125 1125 def matchlines(body):
1126 1126 begin = 0
1127 1127 linenum = 0
1128 1128 while True:
1129 1129 match = regexp.search(body, begin)
1130 1130 if not match:
1131 1131 break
1132 1132 mstart, mend = match.span()
1133 1133 linenum += body.count('\n', begin, mstart) + 1
1134 1134 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1135 1135 lend = body.find('\n', mend)
1136 1136 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1137 1137 begin = lend + 1
1138 1138
1139 1139 class linestate(object):
1140 1140 def __init__(self, line, linenum, colstart, colend):
1141 1141 self.line = line
1142 1142 self.linenum = linenum
1143 1143 self.colstart = colstart
1144 1144 self.colend = colend
1145 1145
1146 1146 def __eq__(self, other):
1147 1147 return self.line == other.line
1148 1148
1149 1149 matches = {}
1150 1150 copies = {}
1151 1151 def grepbody(fn, rev, body):
1152 1152 matches[rev].setdefault(fn, [])
1153 1153 m = matches[rev][fn]
1154 1154 for lnum, cstart, cend, line in matchlines(body):
1155 1155 s = linestate(line, lnum, cstart, cend)
1156 1156 m.append(s)
1157 1157
1158 1158 def difflinestates(a, b):
1159 1159 sm = difflib.SequenceMatcher(None, a, b)
1160 1160 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1161 1161 if tag == 'insert':
1162 1162 for i in xrange(blo, bhi):
1163 1163 yield ('+', b[i])
1164 1164 elif tag == 'delete':
1165 1165 for i in xrange(alo, ahi):
1166 1166 yield ('-', a[i])
1167 1167 elif tag == 'replace':
1168 1168 for i in xrange(alo, ahi):
1169 1169 yield ('-', a[i])
1170 1170 for i in xrange(blo, bhi):
1171 1171 yield ('+', b[i])
1172 1172
1173 1173 prev = {}
1174 1174 def display(fn, rev, states, prevstates):
1175 1175 found = False
1176 1176 filerevmatches = {}
1177 1177 r = prev.get(fn, -1)
1178 1178 if opts['all']:
1179 1179 iter = difflinestates(states, prevstates)
1180 1180 else:
1181 1181 iter = [('', l) for l in prevstates]
1182 1182 for change, l in iter:
1183 1183 cols = [fn, str(r)]
1184 1184 if opts['line_number']:
1185 1185 cols.append(str(l.linenum))
1186 1186 if opts['all']:
1187 1187 cols.append(change)
1188 1188 if opts['user']:
1189 1189 cols.append(ui.shortuser(get(r)[1]))
1190 1190 if opts['files_with_matches']:
1191 1191 c = (fn, r)
1192 1192 if c in filerevmatches:
1193 1193 continue
1194 1194 filerevmatches[c] = 1
1195 1195 else:
1196 1196 cols.append(l.line)
1197 1197 ui.write(sep.join(cols), eol)
1198 1198 found = True
1199 1199 return found
1200 1200
1201 1201 fstate = {}
1202 1202 skip = {}
1203 1203 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1204 1204 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1205 1205 found = False
1206 1206 follow = opts.get('follow')
1207 1207 for st, rev, fns in changeiter:
1208 1208 if st == 'window':
1209 1209 matches.clear()
1210 1210 elif st == 'add':
1211 1211 mf = repo.changectx(rev).manifest()
1212 1212 matches[rev] = {}
1213 1213 for fn in fns:
1214 1214 if fn in skip:
1215 1215 continue
1216 1216 fstate.setdefault(fn, {})
1217 1217 try:
1218 1218 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1219 1219 if follow:
1220 1220 copied = getfile(fn).renamed(mf[fn])
1221 1221 if copied:
1222 1222 copies.setdefault(rev, {})[fn] = copied[0]
1223 1223 except KeyError:
1224 1224 pass
1225 1225 elif st == 'iter':
1226 1226 states = matches[rev].items()
1227 1227 states.sort()
1228 1228 for fn, m in states:
1229 1229 copy = copies.get(rev, {}).get(fn)
1230 1230 if fn in skip:
1231 1231 if copy:
1232 1232 skip[copy] = True
1233 1233 continue
1234 1234 if fn in prev or fstate[fn]:
1235 1235 r = display(fn, rev, m, fstate[fn])
1236 1236 found = found or r
1237 1237 if r and not opts['all']:
1238 1238 skip[fn] = True
1239 1239 if copy:
1240 1240 skip[copy] = True
1241 1241 fstate[fn] = m
1242 1242 if copy:
1243 1243 fstate[copy] = m
1244 1244 prev[fn] = rev
1245 1245
1246 1246 fstate = fstate.items()
1247 1247 fstate.sort()
1248 1248 for fn, state in fstate:
1249 1249 if fn in skip:
1250 1250 continue
1251 1251 if fn not in copies.get(prev[fn], {}):
1252 1252 found = display(fn, rev, {}, state) or found
1253 1253 return (not found and 1) or 0
1254 1254
1255 1255 def heads(ui, repo, *branchrevs, **opts):
1256 1256 """show current repository heads or show branch heads
1257 1257
1258 1258 With no arguments, show all repository head changesets.
1259 1259
1260 1260 If branch or revisions names are given this will show the heads of
1261 1261 the specified branches or the branches those revisions are tagged
1262 1262 with.
1263 1263
1264 1264 Repository "heads" are changesets that don't have child
1265 1265 changesets. They are where development generally takes place and
1266 1266 are the usual targets for update and merge operations.
1267 1267
1268 1268 Branch heads are changesets that have a given branch tag, but have
1269 1269 no child changesets with that tag. They are usually where
1270 1270 development on the given branch takes place.
1271 1271 """
1272 1272 if opts['rev']:
1273 1273 start = repo.lookup(opts['rev'])
1274 1274 else:
1275 1275 start = None
1276 1276 if not branchrevs:
1277 1277 # Assume we're looking repo-wide heads if no revs were specified.
1278 1278 heads = repo.heads(start)
1279 1279 else:
1280 1280 heads = []
1281 1281 visitedset = util.set()
1282 1282 for branchrev in branchrevs:
1283 1283 branch = repo.changectx(branchrev).branch()
1284 1284 if branch in visitedset:
1285 1285 continue
1286 1286 visitedset.add(branch)
1287 1287 bheads = repo.branchheads(branch, start)
1288 1288 if not bheads:
1289 1289 if branch != branchrev:
1290 1290 ui.warn(_("no changes on branch %s containing %s are "
1291 1291 "reachable from %s\n")
1292 1292 % (branch, branchrev, opts['rev']))
1293 1293 else:
1294 1294 ui.warn(_("no changes on branch %s are reachable from %s\n")
1295 1295 % (branch, opts['rev']))
1296 1296 heads.extend(bheads)
1297 1297 if not heads:
1298 1298 return 1
1299 1299 displayer = cmdutil.show_changeset(ui, repo, opts)
1300 1300 for n in heads:
1301 1301 displayer.show(changenode=n)
1302 1302
1303 1303 def help_(ui, name=None, with_version=False):
1304 1304 """show help for a command, extension, or list of commands
1305 1305
1306 1306 With no arguments, print a list of commands and short help.
1307 1307
1308 1308 Given a command name, print help for that command.
1309 1309
1310 1310 Given an extension name, print help for that extension, and the
1311 1311 commands it provides."""
1312 1312 option_lists = []
1313 1313
1314 1314 def addglobalopts(aliases):
1315 1315 if ui.verbose:
1316 1316 option_lists.append((_("global options:"), globalopts))
1317 1317 if name == 'shortlist':
1318 1318 option_lists.append((_('use "hg help" for the full list '
1319 1319 'of commands'), ()))
1320 1320 else:
1321 1321 if name == 'shortlist':
1322 1322 msg = _('use "hg help" for the full list of commands '
1323 1323 'or "hg -v" for details')
1324 1324 elif aliases:
1325 1325 msg = _('use "hg -v help%s" to show aliases and '
1326 1326 'global options') % (name and " " + name or "")
1327 1327 else:
1328 1328 msg = _('use "hg -v help %s" to show global options') % name
1329 1329 option_lists.append((msg, ()))
1330 1330
1331 1331 def helpcmd(name):
1332 1332 if with_version:
1333 1333 version_(ui)
1334 1334 ui.write('\n')
1335 1335 aliases, i = cmdutil.findcmd(ui, name)
1336 1336 # synopsis
1337 1337 ui.write("%s\n\n" % i[2])
1338 1338
1339 1339 # description
1340 1340 doc = i[0].__doc__
1341 1341 if not doc:
1342 1342 doc = _("(No help text available)")
1343 1343 if ui.quiet:
1344 1344 doc = doc.splitlines(0)[0]
1345 1345 ui.write("%s\n" % doc.rstrip())
1346 1346
1347 1347 if not ui.quiet:
1348 1348 # aliases
1349 1349 if len(aliases) > 1:
1350 1350 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1351 1351
1352 1352 # options
1353 1353 if i[1]:
1354 1354 option_lists.append((_("options:\n"), i[1]))
1355 1355
1356 1356 addglobalopts(False)
1357 1357
1358 1358 def helplist(select=None):
1359 1359 h = {}
1360 1360 cmds = {}
1361 1361 for c, e in table.items():
1362 1362 f = c.split("|", 1)[0]
1363 1363 if select and not select(f):
1364 1364 continue
1365 1365 if name == "shortlist" and not f.startswith("^"):
1366 1366 continue
1367 1367 f = f.lstrip("^")
1368 1368 if not ui.debugflag and f.startswith("debug"):
1369 1369 continue
1370 1370 doc = e[0].__doc__
1371 1371 if not doc:
1372 1372 doc = _("(No help text available)")
1373 1373 h[f] = doc.splitlines(0)[0].rstrip()
1374 1374 cmds[f] = c.lstrip("^")
1375 1375
1376 1376 fns = h.keys()
1377 1377 fns.sort()
1378 1378 m = max(map(len, fns))
1379 1379 for f in fns:
1380 1380 if ui.verbose:
1381 1381 commands = cmds[f].replace("|",", ")
1382 1382 ui.write(" %s:\n %s\n"%(commands, h[f]))
1383 1383 else:
1384 1384 ui.write(' %-*s %s\n' % (m, f, h[f]))
1385 1385
1386 1386 if not ui.quiet:
1387 1387 addglobalopts(True)
1388 1388
1389 1389 def helptopic(name):
1390 1390 v = None
1391 1391 for i in help.helptable:
1392 1392 l = i.split('|')
1393 1393 if name in l:
1394 1394 v = i
1395 1395 header = l[-1]
1396 1396 if not v:
1397 1397 raise cmdutil.UnknownCommand(name)
1398 1398
1399 1399 # description
1400 1400 doc = help.helptable[v]
1401 1401 if not doc:
1402 1402 doc = _("(No help text available)")
1403 1403 if callable(doc):
1404 1404 doc = doc()
1405 1405
1406 1406 ui.write("%s\n" % header)
1407 1407 ui.write("%s\n" % doc.rstrip())
1408 1408
1409 1409 def helpext(name):
1410 1410 try:
1411 1411 mod = extensions.find(name)
1412 1412 except KeyError:
1413 1413 raise cmdutil.UnknownCommand(name)
1414 1414
1415 1415 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1416 1416 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1417 1417 for d in doc[1:]:
1418 1418 ui.write(d, '\n')
1419 1419
1420 1420 ui.status('\n')
1421 1421
1422 1422 try:
1423 1423 ct = mod.cmdtable
1424 1424 except AttributeError:
1425 1425 ct = None
1426 1426 if not ct:
1427 1427 ui.status(_('no commands defined\n'))
1428 1428 return
1429 1429
1430 1430 ui.status(_('list of commands:\n\n'))
1431 1431 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1432 1432 helplist(modcmds.has_key)
1433 1433
1434 1434 if name and name != 'shortlist':
1435 1435 i = None
1436 1436 for f in (helpcmd, helptopic, helpext):
1437 1437 try:
1438 1438 f(name)
1439 1439 i = None
1440 1440 break
1441 1441 except cmdutil.UnknownCommand, inst:
1442 1442 i = inst
1443 1443 if i:
1444 1444 raise i
1445 1445
1446 1446 else:
1447 1447 # program name
1448 1448 if ui.verbose or with_version:
1449 1449 version_(ui)
1450 1450 else:
1451 1451 ui.status(_("Mercurial Distributed SCM\n"))
1452 1452 ui.status('\n')
1453 1453
1454 1454 # list of commands
1455 1455 if name == "shortlist":
1456 1456 ui.status(_('basic commands:\n\n'))
1457 1457 else:
1458 1458 ui.status(_('list of commands:\n\n'))
1459 1459
1460 1460 helplist()
1461 1461
1462 1462 # list all option lists
1463 1463 opt_output = []
1464 1464 for title, options in option_lists:
1465 1465 opt_output.append(("\n%s" % title, None))
1466 1466 for shortopt, longopt, default, desc in options:
1467 1467 if "DEPRECATED" in desc and not ui.verbose: continue
1468 1468 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1469 1469 longopt and " --%s" % longopt),
1470 1470 "%s%s" % (desc,
1471 1471 default
1472 1472 and _(" (default: %s)") % default
1473 1473 or "")))
1474 1474
1475 1475 if opt_output:
1476 1476 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1477 1477 for first, second in opt_output:
1478 1478 if second:
1479 1479 ui.write(" %-*s %s\n" % (opts_len, first, second))
1480 1480 else:
1481 1481 ui.write("%s\n" % first)
1482 1482
1483 1483 def identify(ui, repo, source=None,
1484 1484 rev=None, num=None, id=None, branch=None, tags=None):
1485 1485 """identify the working copy or specified revision
1486 1486
1487 1487 With no revision, print a summary of the current state of the repo.
1488 1488
1489 1489 With a path, do a lookup in another repository.
1490 1490
1491 1491 This summary identifies the repository state using one or two parent
1492 1492 hash identifiers, followed by a "+" if there are uncommitted changes
1493 1493 in the working directory, a list of tags for this revision and a branch
1494 1494 name for non-default branches.
1495 1495 """
1496 1496
1497 1497 hexfunc = ui.debugflag and hex or short
1498 1498 default = not (num or id or branch or tags)
1499 1499 output = []
1500 1500
1501 1501 if source:
1502 1502 source, revs = cmdutil.parseurl(ui.expandpath(source), [])
1503 1503 srepo = hg.repository(ui, source)
1504 1504 if not rev and revs:
1505 1505 rev = revs[0]
1506 1506 if not rev:
1507 1507 rev = "tip"
1508 1508 if num or branch or tags:
1509 1509 raise util.Abort(
1510 1510 "can't query remote revision number, branch, or tags")
1511 1511 output = [hexfunc(srepo.lookup(rev))]
1512 1512 elif not rev:
1513 1513 ctx = repo.workingctx()
1514 1514 parents = ctx.parents()
1515 1515 changed = False
1516 1516 if default or id or num:
1517 1517 changed = ctx.files() + ctx.deleted()
1518 1518 if default or id:
1519 1519 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1520 1520 (changed) and "+" or "")]
1521 1521 if num:
1522 1522 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1523 1523 (changed) and "+" or ""))
1524 1524 else:
1525 1525 ctx = repo.changectx(rev)
1526 1526 if default or id:
1527 1527 output = [hexfunc(ctx.node())]
1528 1528 if num:
1529 1529 output.append(str(ctx.rev()))
1530 1530
1531 1531 if not source and default and not ui.quiet:
1532 1532 b = util.tolocal(ctx.branch())
1533 1533 if b != 'default':
1534 1534 output.append("(%s)" % b)
1535 1535
1536 1536 # multiple tags for a single parent separated by '/'
1537 1537 t = "/".join(ctx.tags())
1538 1538 if t:
1539 1539 output.append(t)
1540 1540
1541 1541 if branch:
1542 1542 output.append(util.tolocal(ctx.branch()))
1543 1543
1544 1544 if tags:
1545 1545 output.extend(ctx.tags())
1546 1546
1547 1547 ui.write("%s\n" % ' '.join(output))
1548 1548
1549 1549 def import_(ui, repo, patch1, *patches, **opts):
1550 1550 """import an ordered set of patches
1551 1551
1552 1552 Import a list of patches and commit them individually.
1553 1553
1554 1554 If there are outstanding changes in the working directory, import
1555 1555 will abort unless given the -f flag.
1556 1556
1557 1557 You can import a patch straight from a mail message. Even patches
1558 1558 as attachments work (body part must be type text/plain or
1559 1559 text/x-patch to be used). From and Subject headers of email
1560 1560 message are used as default committer and commit message. All
1561 1561 text/plain body parts before first diff are added to commit
1562 1562 message.
1563 1563
1564 1564 If the imported patch was generated by hg export, user and description
1565 1565 from patch override values from message headers and body. Values
1566 1566 given on command line with -m and -u override these.
1567 1567
1568 1568 If --exact is specified, import will set the working directory
1569 1569 to the parent of each patch before applying it, and will abort
1570 1570 if the resulting changeset has a different ID than the one
1571 1571 recorded in the patch. This may happen due to character set
1572 1572 problems or other deficiencies in the text patch format.
1573 1573
1574 1574 To read a patch from standard input, use patch name "-".
1575 1575 """
1576 1576 patches = (patch1,) + patches
1577 1577
1578 1578 if opts.get('exact') or not opts['force']:
1579 1579 cmdutil.bail_if_changed(repo)
1580 1580
1581 1581 d = opts["base"]
1582 1582 strip = opts["strip"]
1583 1583
1584 1584 wlock = repo.wlock()
1585 1585 lock = repo.lock()
1586 1586
1587 1587 for p in patches:
1588 1588 pf = os.path.join(d, p)
1589 1589
1590 1590 if pf == '-':
1591 1591 ui.status(_("applying patch from stdin\n"))
1592 1592 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1593 1593 else:
1594 1594 ui.status(_("applying %s\n") % p)
1595 1595 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf, 'rb'))
1596 1596
1597 1597 if tmpname is None:
1598 1598 raise util.Abort(_('no diffs found'))
1599 1599
1600 1600 try:
1601 1601 cmdline_message = cmdutil.logmessage(opts)
1602 1602 if cmdline_message:
1603 1603 # pickup the cmdline msg
1604 1604 message = cmdline_message
1605 1605 elif message:
1606 1606 # pickup the patch msg
1607 1607 message = message.strip()
1608 1608 else:
1609 1609 # launch the editor
1610 1610 message = None
1611 1611 ui.debug(_('message:\n%s\n') % message)
1612 1612
1613 1613 wp = repo.workingctx().parents()
1614 1614 if opts.get('exact'):
1615 1615 if not nodeid or not p1:
1616 1616 raise util.Abort(_('not a mercurial patch'))
1617 1617 p1 = repo.lookup(p1)
1618 1618 p2 = repo.lookup(p2 or hex(nullid))
1619 1619
1620 1620 if p1 != wp[0].node():
1621 1621 hg.clean(repo, p1, wlock=wlock)
1622 1622 repo.dirstate.setparents(p1, p2)
1623 1623 elif p2:
1624 1624 try:
1625 1625 p1 = repo.lookup(p1)
1626 1626 p2 = repo.lookup(p2)
1627 1627 if p1 == wp[0].node():
1628 1628 repo.dirstate.setparents(p1, p2)
1629 1629 except hg.RepoError:
1630 1630 pass
1631 1631 if opts.get('exact') or opts.get('import_branch'):
1632 1632 repo.dirstate.setbranch(branch or 'default')
1633 1633
1634 1634 files = {}
1635 1635 try:
1636 1636 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1637 1637 files=files)
1638 1638 finally:
1639 1639 files = patch.updatedir(ui, repo, files, wlock=wlock)
1640 1640 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1641 1641 if opts.get('exact'):
1642 1642 if hex(n) != nodeid:
1643 1643 repo.rollback(wlock=wlock, lock=lock)
1644 1644 raise util.Abort(_('patch is damaged or loses information'))
1645 1645 finally:
1646 1646 os.unlink(tmpname)
1647 1647
1648 1648 def incoming(ui, repo, source="default", **opts):
1649 1649 """show new changesets found in source
1650 1650
1651 1651 Show new changesets found in the specified path/URL or the default
1652 1652 pull location. These are the changesets that would be pulled if a pull
1653 1653 was requested.
1654 1654
1655 1655 For remote repository, using --bundle avoids downloading the changesets
1656 1656 twice if the incoming is followed by a pull.
1657 1657
1658 1658 See pull for valid source format details.
1659 1659 """
1660 1660 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1661 1661 cmdutil.setremoteconfig(ui, opts)
1662 1662
1663 1663 other = hg.repository(ui, source)
1664 1664 ui.status(_('comparing with %s\n') % source)
1665 1665 if revs:
1666 1666 if 'lookup' in other.capabilities:
1667 1667 revs = [other.lookup(rev) for rev in revs]
1668 1668 else:
1669 1669 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1670 1670 raise util.Abort(error)
1671 1671 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1672 1672 if not incoming:
1673 1673 try:
1674 1674 os.unlink(opts["bundle"])
1675 1675 except:
1676 1676 pass
1677 1677 ui.status(_("no changes found\n"))
1678 1678 return 1
1679 1679
1680 1680 cleanup = None
1681 1681 try:
1682 1682 fname = opts["bundle"]
1683 1683 if fname or not other.local():
1684 1684 # create a bundle (uncompressed if other repo is not local)
1685 1685 if revs is None:
1686 1686 cg = other.changegroup(incoming, "incoming")
1687 1687 else:
1688 1688 if 'changegroupsubset' not in other.capabilities:
1689 1689 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1690 1690 cg = other.changegroupsubset(incoming, revs, 'incoming')
1691 1691 bundletype = other.local() and "HG10BZ" or "HG10UN"
1692 1692 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1693 1693 # keep written bundle?
1694 1694 if opts["bundle"]:
1695 1695 cleanup = None
1696 1696 if not other.local():
1697 1697 # use the created uncompressed bundlerepo
1698 1698 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1699 1699
1700 1700 o = other.changelog.nodesbetween(incoming, revs)[0]
1701 1701 if opts['newest_first']:
1702 1702 o.reverse()
1703 1703 displayer = cmdutil.show_changeset(ui, other, opts)
1704 1704 for n in o:
1705 1705 parents = [p for p in other.changelog.parents(n) if p != nullid]
1706 1706 if opts['no_merges'] and len(parents) == 2:
1707 1707 continue
1708 1708 displayer.show(changenode=n)
1709 1709 finally:
1710 1710 if hasattr(other, 'close'):
1711 1711 other.close()
1712 1712 if cleanup:
1713 1713 os.unlink(cleanup)
1714 1714
1715 1715 def init(ui, dest=".", **opts):
1716 1716 """create a new repository in the given directory
1717 1717
1718 1718 Initialize a new repository in the given directory. If the given
1719 1719 directory does not exist, it is created.
1720 1720
1721 1721 If no directory is given, the current directory is used.
1722 1722
1723 1723 It is possible to specify an ssh:// URL as the destination.
1724 1724 Look at the help text for the pull command for important details
1725 1725 about ssh:// URLs.
1726 1726 """
1727 1727 cmdutil.setremoteconfig(ui, opts)
1728 1728 hg.repository(ui, dest, create=1)
1729 1729
1730 1730 def locate(ui, repo, *pats, **opts):
1731 1731 """locate files matching specific patterns
1732 1732
1733 1733 Print all files under Mercurial control whose names match the
1734 1734 given patterns.
1735 1735
1736 1736 This command searches the entire repository by default. To search
1737 1737 just the current directory and its subdirectories, use
1738 1738 "--include .".
1739 1739
1740 1740 If no patterns are given to match, this command prints all file
1741 1741 names.
1742 1742
1743 1743 If you want to feed the output of this command into the "xargs"
1744 1744 command, use the "-0" option to both this command and "xargs".
1745 1745 This will avoid the problem of "xargs" treating single filenames
1746 1746 that contain white space as multiple filenames.
1747 1747 """
1748 1748 end = opts['print0'] and '\0' or '\n'
1749 1749 rev = opts['rev']
1750 1750 if rev:
1751 1751 node = repo.lookup(rev)
1752 1752 else:
1753 1753 node = None
1754 1754
1755 1755 ret = 1
1756 1756 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1757 1757 badmatch=util.always,
1758 1758 default='relglob'):
1759 1759 if src == 'b':
1760 1760 continue
1761 1761 if not node and repo.dirstate.state(abs) == '?':
1762 1762 continue
1763 1763 if opts['fullpath']:
1764 1764 ui.write(os.path.join(repo.root, abs), end)
1765 1765 else:
1766 1766 ui.write(((pats and rel) or abs), end)
1767 1767 ret = 0
1768 1768
1769 1769 return ret
1770 1770
1771 1771 def log(ui, repo, *pats, **opts):
1772 1772 """show revision history of entire repository or files
1773 1773
1774 1774 Print the revision history of the specified files or the entire
1775 1775 project.
1776 1776
1777 1777 File history is shown without following rename or copy history of
1778 1778 files. Use -f/--follow with a file name to follow history across
1779 1779 renames and copies. --follow without a file name will only show
1780 1780 ancestors or descendants of the starting revision. --follow-first
1781 1781 only follows the first parent of merge revisions.
1782 1782
1783 1783 If no revision range is specified, the default is tip:0 unless
1784 1784 --follow is set, in which case the working directory parent is
1785 1785 used as the starting revision.
1786 1786
1787 1787 By default this command outputs: changeset id and hash, tags,
1788 1788 non-trivial parents, user, date and time, and a summary for each
1789 1789 commit. When the -v/--verbose switch is used, the list of changed
1790 1790 files and full commit message is shown.
1791 1791
1792 1792 NOTE: log -p may generate unexpected diff output for merge
1793 1793 changesets, as it will compare the merge changeset against its
1794 1794 first parent only. Also, the files: list will only reflect files
1795 1795 that are different from BOTH parents.
1796 1796
1797 1797 """
1798 1798
1799 1799 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1800 1800 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1801 1801
1802 1802 if opts['limit']:
1803 1803 try:
1804 1804 limit = int(opts['limit'])
1805 1805 except ValueError:
1806 1806 raise util.Abort(_('limit must be a positive integer'))
1807 1807 if limit <= 0: raise util.Abort(_('limit must be positive'))
1808 1808 else:
1809 1809 limit = sys.maxint
1810 1810 count = 0
1811 1811
1812 1812 if opts['copies'] and opts['rev']:
1813 1813 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1814 1814 else:
1815 1815 endrev = repo.changelog.count()
1816 1816 rcache = {}
1817 1817 ncache = {}
1818 1818 dcache = []
1819 1819 def getrenamed(fn, rev, man):
1820 1820 '''looks up all renames for a file (up to endrev) the first
1821 1821 time the file is given. It indexes on the changerev and only
1822 1822 parses the manifest if linkrev != changerev.
1823 1823 Returns rename info for fn at changerev rev.'''
1824 1824 if fn not in rcache:
1825 1825 rcache[fn] = {}
1826 1826 ncache[fn] = {}
1827 1827 fl = repo.file(fn)
1828 1828 for i in xrange(fl.count()):
1829 1829 node = fl.node(i)
1830 1830 lr = fl.linkrev(node)
1831 1831 renamed = fl.renamed(node)
1832 1832 rcache[fn][lr] = renamed
1833 1833 if renamed:
1834 1834 ncache[fn][node] = renamed
1835 1835 if lr >= endrev:
1836 1836 break
1837 1837 if rev in rcache[fn]:
1838 1838 return rcache[fn][rev]
1839 1839 mr = repo.manifest.rev(man)
1840 1840 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1841 1841 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1842 1842 if not dcache or dcache[0] != man:
1843 1843 dcache[:] = [man, repo.manifest.readdelta(man)]
1844 1844 if fn in dcache[1]:
1845 1845 return ncache[fn].get(dcache[1][fn])
1846 1846 return None
1847 1847
1848 1848 df = False
1849 1849 if opts["date"]:
1850 1850 df = util.matchdate(opts["date"])
1851 1851
1852 1852 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1853 1853 for st, rev, fns in changeiter:
1854 1854 if st == 'add':
1855 1855 changenode = repo.changelog.node(rev)
1856 1856 parents = [p for p in repo.changelog.parentrevs(rev)
1857 1857 if p != nullrev]
1858 1858 if opts['no_merges'] and len(parents) == 2:
1859 1859 continue
1860 1860 if opts['only_merges'] and len(parents) != 2:
1861 1861 continue
1862 1862
1863 1863 if df:
1864 1864 changes = get(rev)
1865 1865 if not df(changes[2][0]):
1866 1866 continue
1867 1867
1868 1868 if opts['keyword']:
1869 1869 changes = get(rev)
1870 1870 miss = 0
1871 1871 for k in [kw.lower() for kw in opts['keyword']]:
1872 1872 if not (k in changes[1].lower() or
1873 1873 k in changes[4].lower() or
1874 1874 k in " ".join(changes[3]).lower()):
1875 1875 miss = 1
1876 1876 break
1877 1877 if miss:
1878 1878 continue
1879 1879
1880 1880 copies = []
1881 1881 if opts.get('copies') and rev:
1882 1882 mf = get(rev)[0]
1883 1883 for fn in get(rev)[3]:
1884 1884 rename = getrenamed(fn, rev, mf)
1885 1885 if rename:
1886 1886 copies.append((fn, rename[0]))
1887 1887 displayer.show(rev, changenode, copies=copies)
1888 1888 elif st == 'iter':
1889 1889 if count == limit: break
1890 1890 if displayer.flush(rev):
1891 1891 count += 1
1892 1892
1893 1893 def manifest(ui, repo, rev=None):
1894 1894 """output the current or given revision of the project manifest
1895 1895
1896 1896 Print a list of version controlled files for the given revision.
1897 1897 If no revision is given, the parent of the working directory is used,
1898 1898 or tip if no revision is checked out.
1899 1899
1900 1900 The manifest is the list of files being version controlled. If no revision
1901 1901 is given then the first parent of the working directory is used.
1902 1902
1903 1903 With -v flag, print file permissions. With --debug flag, print
1904 1904 file revision hashes.
1905 1905 """
1906 1906
1907 1907 m = repo.changectx(rev).manifest()
1908 1908 files = m.keys()
1909 1909 files.sort()
1910 1910
1911 1911 for f in files:
1912 1912 if ui.debugflag:
1913 1913 ui.write("%40s " % hex(m[f]))
1914 1914 if ui.verbose:
1915 1915 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1916 1916 ui.write("%s\n" % f)
1917 1917
1918 1918 def merge(ui, repo, node=None, force=None, rev=None):
1919 1919 """merge working directory with another revision
1920 1920
1921 1921 Merge the contents of the current working directory and the
1922 1922 requested revision. Files that changed between either parent are
1923 1923 marked as changed for the next commit and a commit must be
1924 1924 performed before any further updates are allowed.
1925 1925
1926 1926 If no revision is specified, the working directory's parent is a
1927 1927 head revision, and the repository contains exactly one other head,
1928 1928 the other head is merged with by default. Otherwise, an explicit
1929 1929 revision to merge with must be provided.
1930 1930 """
1931 1931
1932 1932 if rev and node:
1933 1933 raise util.Abort(_("please specify just one revision"))
1934 1934
1935 1935 if not node:
1936 1936 node = rev
1937 1937
1938 1938 if not node:
1939 1939 heads = repo.heads()
1940 1940 if len(heads) > 2:
1941 1941 raise util.Abort(_('repo has %d heads - '
1942 1942 'please merge with an explicit rev') %
1943 1943 len(heads))
1944 1944 if len(heads) == 1:
1945 1945 raise util.Abort(_('there is nothing to merge - '
1946 1946 'use "hg update" instead'))
1947 1947 parent = repo.dirstate.parents()[0]
1948 1948 if parent not in heads:
1949 1949 raise util.Abort(_('working dir not at a head rev - '
1950 1950 'use "hg update" or merge with an explicit rev'))
1951 1951 node = parent == heads[0] and heads[-1] or heads[0]
1952 1952 return hg.merge(repo, node, force=force)
1953 1953
1954 1954 def outgoing(ui, repo, dest=None, **opts):
1955 1955 """show changesets not found in destination
1956 1956
1957 1957 Show changesets not found in the specified destination repository or
1958 1958 the default push location. These are the changesets that would be pushed
1959 1959 if a push was requested.
1960 1960
1961 1961 See pull for valid destination format details.
1962 1962 """
1963 1963 dest, revs = cmdutil.parseurl(
1964 1964 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1965 1965 cmdutil.setremoteconfig(ui, opts)
1966 1966 if revs:
1967 1967 revs = [repo.lookup(rev) for rev in revs]
1968 1968
1969 1969 other = hg.repository(ui, dest)
1970 1970 ui.status(_('comparing with %s\n') % dest)
1971 1971 o = repo.findoutgoing(other, force=opts['force'])
1972 1972 if not o:
1973 1973 ui.status(_("no changes found\n"))
1974 1974 return 1
1975 1975 o = repo.changelog.nodesbetween(o, revs)[0]
1976 1976 if opts['newest_first']:
1977 1977 o.reverse()
1978 1978 displayer = cmdutil.show_changeset(ui, repo, opts)
1979 1979 for n in o:
1980 1980 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1981 1981 if opts['no_merges'] and len(parents) == 2:
1982 1982 continue
1983 1983 displayer.show(changenode=n)
1984 1984
1985 1985 def parents(ui, repo, file_=None, **opts):
1986 1986 """show the parents of the working dir or revision
1987 1987
1988 1988 Print the working directory's parent revisions. If a
1989 1989 revision is given via --rev, the parent of that revision
1990 1990 will be printed. If a file argument is given, revision in
1991 1991 which the file was last changed (before the working directory
1992 1992 revision or the argument to --rev if given) is printed.
1993 1993 """
1994 1994 rev = opts.get('rev')
1995 1995 if file_:
1996 1996 files, match, anypats = cmdutil.matchpats(repo, (file_,), opts)
1997 1997 if anypats or len(files) != 1:
1998 1998 raise util.Abort(_('can only specify an explicit file name'))
1999 1999 ctx = repo.filectx(files[0], changeid=rev)
2000 2000 elif rev:
2001 2001 ctx = repo.changectx(rev)
2002 2002 else:
2003 2003 ctx = repo.workingctx()
2004 2004 p = [cp.node() for cp in ctx.parents()]
2005 2005
2006 2006 displayer = cmdutil.show_changeset(ui, repo, opts)
2007 2007 for n in p:
2008 2008 if n != nullid:
2009 2009 displayer.show(changenode=n)
2010 2010
2011 2011 def paths(ui, repo, search=None):
2012 2012 """show definition of symbolic path names
2013 2013
2014 2014 Show definition of symbolic path name NAME. If no name is given, show
2015 2015 definition of available names.
2016 2016
2017 2017 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2018 2018 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2019 2019 """
2020 2020 if search:
2021 2021 for name, path in ui.configitems("paths"):
2022 2022 if name == search:
2023 2023 ui.write("%s\n" % path)
2024 2024 return
2025 2025 ui.warn(_("not found!\n"))
2026 2026 return 1
2027 2027 else:
2028 2028 for name, path in ui.configitems("paths"):
2029 2029 ui.write("%s = %s\n" % (name, path))
2030 2030
2031 2031 def postincoming(ui, repo, modheads, optupdate, wasempty):
2032 2032 if modheads == 0:
2033 2033 return
2034 2034 if optupdate:
2035 2035 if wasempty:
2036 2036 return hg.update(repo, repo.lookup('default'))
2037 2037 elif modheads == 1:
2038 2038 return hg.update(repo, repo.changelog.tip()) # update
2039 2039 else:
2040 2040 ui.status(_("not updating, since new heads added\n"))
2041 2041 if modheads > 1:
2042 2042 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2043 2043 else:
2044 2044 ui.status(_("(run 'hg update' to get a working copy)\n"))
2045 2045
2046 2046 def pull(ui, repo, source="default", **opts):
2047 2047 """pull changes from the specified source
2048 2048
2049 2049 Pull changes from a remote repository to a local one.
2050 2050
2051 2051 This finds all changes from the repository at the specified path
2052 2052 or URL and adds them to the local repository. By default, this
2053 2053 does not update the copy of the project in the working directory.
2054 2054
2055 2055 Valid URLs are of the form:
2056 2056
2057 2057 local/filesystem/path (or file://local/filesystem/path)
2058 2058 http://[user@]host[:port]/[path]
2059 2059 https://[user@]host[:port]/[path]
2060 2060 ssh://[user@]host[:port]/[path]
2061 2061 static-http://host[:port]/[path]
2062 2062
2063 2063 Paths in the local filesystem can either point to Mercurial
2064 2064 repositories or to bundle files (as created by 'hg bundle' or
2065 2065 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2066 2066 allows access to a Mercurial repository where you simply use a web
2067 2067 server to publish the .hg directory as static content.
2068 2068
2069 2069 An optional identifier after # indicates a particular branch, tag,
2070 2070 or changeset to pull.
2071 2071
2072 2072 Some notes about using SSH with Mercurial:
2073 2073 - SSH requires an accessible shell account on the destination machine
2074 2074 and a copy of hg in the remote path or specified with as remotecmd.
2075 2075 - path is relative to the remote user's home directory by default.
2076 2076 Use an extra slash at the start of a path to specify an absolute path:
2077 2077 ssh://example.com//tmp/repository
2078 2078 - Mercurial doesn't use its own compression via SSH; the right thing
2079 2079 to do is to configure it in your ~/.ssh/config, e.g.:
2080 2080 Host *.mylocalnetwork.example.com
2081 2081 Compression no
2082 2082 Host *
2083 2083 Compression yes
2084 2084 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2085 2085 with the --ssh command line option.
2086 2086 """
2087 2087 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
2088 2088 cmdutil.setremoteconfig(ui, opts)
2089 2089
2090 2090 other = hg.repository(ui, source)
2091 2091 ui.status(_('pulling from %s\n') % (source))
2092 2092 if revs:
2093 2093 if 'lookup' in other.capabilities:
2094 2094 revs = [other.lookup(rev) for rev in revs]
2095 2095 else:
2096 2096 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2097 2097 raise util.Abort(error)
2098 2098
2099 2099 wasempty = repo.changelog.count() == 0
2100 2100 modheads = repo.pull(other, heads=revs, force=opts['force'])
2101 2101 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2102 2102
2103 2103 def push(ui, repo, dest=None, **opts):
2104 2104 """push changes to the specified destination
2105 2105
2106 2106 Push changes from the local repository to the given destination.
2107 2107
2108 2108 This is the symmetrical operation for pull. It helps to move
2109 2109 changes from the current repository to a different one. If the
2110 2110 destination is local this is identical to a pull in that directory
2111 2111 from the current one.
2112 2112
2113 2113 By default, push will refuse to run if it detects the result would
2114 2114 increase the number of remote heads. This generally indicates the
2115 2115 the client has forgotten to sync and merge before pushing.
2116 2116
2117 2117 Valid URLs are of the form:
2118 2118
2119 2119 local/filesystem/path (or file://local/filesystem/path)
2120 2120 ssh://[user@]host[:port]/[path]
2121 2121 http://[user@]host[:port]/[path]
2122 2122 https://[user@]host[:port]/[path]
2123 2123
2124 2124 An optional identifier after # indicates a particular branch, tag,
2125 2125 or changeset to push.
2126 2126
2127 2127 Look at the help text for the pull command for important details
2128 2128 about ssh:// URLs.
2129 2129
2130 2130 Pushing to http:// and https:// URLs is only possible, if this
2131 2131 feature is explicitly enabled on the remote Mercurial server.
2132 2132 """
2133 2133 dest, revs = cmdutil.parseurl(
2134 2134 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2135 2135 cmdutil.setremoteconfig(ui, opts)
2136 2136
2137 2137 other = hg.repository(ui, dest)
2138 2138 ui.status('pushing to %s\n' % (dest))
2139 2139 if revs:
2140 2140 revs = [repo.lookup(rev) for rev in revs]
2141 2141 r = repo.push(other, opts['force'], revs=revs)
2142 2142 return r == 0
2143 2143
2144 2144 def rawcommit(ui, repo, *pats, **opts):
2145 2145 """raw commit interface (DEPRECATED)
2146 2146
2147 2147 (DEPRECATED)
2148 2148 Lowlevel commit, for use in helper scripts.
2149 2149
2150 2150 This command is not intended to be used by normal users, as it is
2151 2151 primarily useful for importing from other SCMs.
2152 2152
2153 2153 This command is now deprecated and will be removed in a future
2154 2154 release, please use debugsetparents and commit instead.
2155 2155 """
2156 2156
2157 2157 ui.warn(_("(the rawcommit command is deprecated)\n"))
2158 2158
2159 2159 message = cmdutil.logmessage(opts)
2160 2160
2161 2161 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2162 2162 if opts['files']:
2163 2163 files += open(opts['files']).read().splitlines()
2164 2164
2165 2165 parents = [repo.lookup(p) for p in opts['parent']]
2166 2166
2167 2167 try:
2168 2168 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2169 2169 except ValueError, inst:
2170 2170 raise util.Abort(str(inst))
2171 2171
2172 2172 def recover(ui, repo):
2173 2173 """roll back an interrupted transaction
2174 2174
2175 2175 Recover from an interrupted commit or pull.
2176 2176
2177 2177 This command tries to fix the repository status after an interrupted
2178 2178 operation. It should only be necessary when Mercurial suggests it.
2179 2179 """
2180 2180 if repo.recover():
2181 2181 return hg.verify(repo)
2182 2182 return 1
2183 2183
2184 2184 def remove(ui, repo, *pats, **opts):
2185 2185 """remove the specified files on the next commit
2186 2186
2187 2187 Schedule the indicated files for removal from the repository.
2188 2188
2189 2189 This only removes files from the current branch, not from the
2190 2190 entire project history. If the files still exist in the working
2191 2191 directory, they will be deleted from it. If invoked with --after,
2192 2192 files are marked as removed, but not actually unlinked unless --force
2193 2193 is also given. Without exact file names, --after will only mark
2194 2194 files as removed if they are no longer in the working directory.
2195 2195
2196 2196 This command schedules the files to be removed at the next commit.
2197 2197 To undo a remove before that, see hg revert.
2198 2198
2199 2199 Modified files and added files are not removed by default. To
2200 2200 remove them, use the -f/--force option.
2201 2201 """
2202 2202 names = []
2203 2203 if not opts['after'] and not pats:
2204 2204 raise util.Abort(_('no files specified'))
2205 2205 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2206 2206 exact = dict.fromkeys(files)
2207 2207 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2208 2208 modified, added, removed, deleted, unknown = mardu
2209 2209 remove, forget = [], []
2210 2210 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2211 2211 reason = None
2212 2212 if abs in modified and not opts['force']:
2213 2213 reason = _('is modified (use -f to force removal)')
2214 2214 elif abs in added:
2215 2215 if opts['force']:
2216 2216 forget.append(abs)
2217 2217 continue
2218 2218 reason = _('has been marked for add (use -f to force removal)')
2219 2219 elif repo.dirstate.state(abs) == '?':
2220 2220 reason = _('is not managed')
2221 2221 elif opts['after'] and not exact and abs not in deleted:
2222 2222 continue
2223 2223 elif abs in removed:
2224 2224 continue
2225 2225 if reason:
2226 2226 if exact:
2227 2227 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2228 2228 else:
2229 2229 if ui.verbose or not exact:
2230 2230 ui.status(_('removing %s\n') % rel)
2231 2231 remove.append(abs)
2232 2232 repo.forget(forget)
2233 2233 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2234 2234
2235 2235 def rename(ui, repo, *pats, **opts):
2236 2236 """rename files; equivalent of copy + remove
2237 2237
2238 2238 Mark dest as copies of sources; mark sources for deletion. If
2239 2239 dest is a directory, copies are put in that directory. If dest is
2240 2240 a file, there can only be one source.
2241 2241
2242 2242 By default, this command copies the contents of files as they
2243 2243 stand in the working directory. If invoked with --after, the
2244 2244 operation is recorded, but no copying is performed.
2245 2245
2246 2246 This command takes effect in the next commit. To undo a rename
2247 2247 before that, see hg revert.
2248 2248 """
2249 2249 wlock = repo.wlock(0)
2250 2250 errs, copied = docopy(ui, repo, pats, opts, wlock)
2251 2251 names = []
2252 2252 for abs, rel, exact in copied:
2253 2253 if ui.verbose or not exact:
2254 2254 ui.status(_('removing %s\n') % rel)
2255 2255 names.append(abs)
2256 2256 if not opts.get('dry_run'):
2257 2257 repo.remove(names, True, wlock=wlock)
2258 2258 return errs
2259 2259
2260 2260 def revert(ui, repo, *pats, **opts):
2261 2261 """revert files or dirs to their states as of some revision
2262 2262
2263 2263 With no revision specified, revert the named files or directories
2264 2264 to the contents they had in the parent of the working directory.
2265 2265 This restores the contents of the affected files to an unmodified
2266 2266 state and unschedules adds, removes, copies, and renames. If the
2267 2267 working directory has two parents, you must explicitly specify the
2268 2268 revision to revert to.
2269 2269
2270 2270 Modified files are saved with a .orig suffix before reverting.
2271 2271 To disable these backups, use --no-backup.
2272 2272
2273 2273 Using the -r option, revert the given files or directories to their
2274 2274 contents as of a specific revision. This can be helpful to "roll
2275 2275 back" some or all of a change that should not have been committed.
2276 2276
2277 2277 Revert modifies the working directory. It does not commit any
2278 2278 changes, or change the parent of the working directory. If you
2279 2279 revert to a revision other than the parent of the working
2280 2280 directory, the reverted files will thus appear modified
2281 2281 afterwards.
2282 2282
2283 2283 If a file has been deleted, it is restored. If the executable
2284 2284 mode of a file was changed, it is reset.
2285 2285
2286 2286 If names are given, all files matching the names are reverted.
2287 2287
2288 2288 If no arguments are given, no files are reverted.
2289 2289 """
2290 2290
2291 2291 if opts["date"]:
2292 2292 if opts["rev"]:
2293 2293 raise util.Abort(_("you can't specify a revision and a date"))
2294 2294 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2295 2295
2296 2296 if not pats and not opts['all']:
2297 2297 raise util.Abort(_('no files or directories specified; '
2298 2298 'use --all to revert the whole repo'))
2299 2299
2300 2300 parent, p2 = repo.dirstate.parents()
2301 2301 if not opts['rev'] and p2 != nullid:
2302 2302 raise util.Abort(_('uncommitted merge - please provide a '
2303 2303 'specific revision'))
2304 2304 ctx = repo.changectx(opts['rev'])
2305 2305 node = ctx.node()
2306 2306 mf = ctx.manifest()
2307 2307 if node == parent:
2308 2308 pmf = mf
2309 2309 else:
2310 2310 pmf = None
2311 2311
2312 2312 wlock = repo.wlock()
2313 2313
2314 2314 # need all matching names in dirstate and manifest of target rev,
2315 2315 # so have to walk both. do not print errors if files exist in one
2316 2316 # but not other.
2317 2317
2318 2318 names = {}
2319 2319 target_only = {}
2320 2320
2321 2321 # walk dirstate.
2322 2322
2323 2323 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2324 2324 badmatch=mf.has_key):
2325 2325 names[abs] = (rel, exact)
2326 2326 if src == 'b':
2327 2327 target_only[abs] = True
2328 2328
2329 2329 # walk target manifest.
2330 2330
2331 2331 def badmatch(path):
2332 2332 if path in names:
2333 2333 return True
2334 2334 path_ = path + '/'
2335 2335 for f in names:
2336 2336 if f.startswith(path_):
2337 2337 return True
2338 2338 return False
2339 2339
2340 2340 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2341 2341 badmatch=badmatch):
2342 2342 if abs in names or src == 'b':
2343 2343 continue
2344 2344 names[abs] = (rel, exact)
2345 2345 target_only[abs] = True
2346 2346
2347 2347 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2348 2348 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2349 2349
2350 2350 revert = ([], _('reverting %s\n'))
2351 2351 add = ([], _('adding %s\n'))
2352 2352 remove = ([], _('removing %s\n'))
2353 2353 forget = ([], _('forgetting %s\n'))
2354 2354 undelete = ([], _('undeleting %s\n'))
2355 2355 update = {}
2356 2356
2357 2357 disptable = (
2358 2358 # dispatch table:
2359 2359 # file state
2360 2360 # action if in target manifest
2361 2361 # action if not in target manifest
2362 2362 # make backup if in target manifest
2363 2363 # make backup if not in target manifest
2364 2364 (modified, revert, remove, True, True),
2365 2365 (added, revert, forget, True, False),
2366 2366 (removed, undelete, None, False, False),
2367 2367 (deleted, revert, remove, False, False),
2368 2368 (unknown, add, None, True, False),
2369 2369 (target_only, add, None, False, False),
2370 2370 )
2371 2371
2372 2372 entries = names.items()
2373 2373 entries.sort()
2374 2374
2375 2375 for abs, (rel, exact) in entries:
2376 2376 mfentry = mf.get(abs)
2377 2377 target = repo.wjoin(abs)
2378 2378 def handle(xlist, dobackup):
2379 2379 xlist[0].append(abs)
2380 2380 update[abs] = 1
2381 2381 if dobackup and not opts['no_backup'] and util.lexists(target):
2382 2382 bakname = "%s.orig" % rel
2383 2383 ui.note(_('saving current version of %s as %s\n') %
2384 2384 (rel, bakname))
2385 2385 if not opts.get('dry_run'):
2386 2386 util.copyfile(target, bakname)
2387 2387 if ui.verbose or not exact:
2388 2388 ui.status(xlist[1] % rel)
2389 2389 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2390 2390 if abs not in table: continue
2391 2391 # file has changed in dirstate
2392 2392 if mfentry:
2393 2393 handle(hitlist, backuphit)
2394 2394 elif misslist is not None:
2395 2395 handle(misslist, backupmiss)
2396 2396 else:
2397 2397 if exact: ui.warn(_('file not managed: %s\n') % rel)
2398 2398 break
2399 2399 else:
2400 2400 # file has not changed in dirstate
2401 2401 if node == parent:
2402 2402 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2403 2403 continue
2404 2404 if pmf is None:
2405 2405 # only need parent manifest in this unlikely case,
2406 2406 # so do not read by default
2407 2407 pmf = repo.changectx(parent).manifest()
2408 2408 if abs in pmf:
2409 2409 if mfentry:
2410 2410 # if version of file is same in parent and target
2411 2411 # manifests, do nothing
2412 2412 if pmf[abs] != mfentry:
2413 2413 handle(revert, False)
2414 2414 else:
2415 2415 handle(remove, False)
2416 2416
2417 2417 if not opts.get('dry_run'):
2418 repo.dirstate.forget(forget[0])
2418 for f in forget[0]:
2419 repo.dirstate.forget(f)
2419 2420 r = hg.revert(repo, node, update.has_key, wlock)
2420 repo.dirstate.update(add[0], 'a')
2421 repo.dirstate.update(undelete[0], 'n')
2422 repo.dirstate.update(remove[0], 'r')
2421 for f in add[0]:
2422 repo.dirstate.add(f)
2423 for f in undelete[0]:
2424 repo.dirstate.normal(f)
2425 for f in remove[0]:
2426 repo.dirstate.remove(f)
2423 2427 return r
2424 2428
2425 2429 def rollback(ui, repo):
2426 2430 """roll back the last transaction in this repository
2427 2431
2428 2432 Roll back the last transaction in this repository, restoring the
2429 2433 project to its state prior to the transaction.
2430 2434
2431 2435 Transactions are used to encapsulate the effects of all commands
2432 2436 that create new changesets or propagate existing changesets into a
2433 2437 repository. For example, the following commands are transactional,
2434 2438 and their effects can be rolled back:
2435 2439
2436 2440 commit
2437 2441 import
2438 2442 pull
2439 2443 push (with this repository as destination)
2440 2444 unbundle
2441 2445
2442 2446 This command should be used with care. There is only one level of
2443 2447 rollback, and there is no way to undo a rollback. It will also
2444 2448 restore the dirstate at the time of the last transaction, which
2445 2449 may lose subsequent dirstate changes.
2446 2450
2447 2451 This command is not intended for use on public repositories. Once
2448 2452 changes are visible for pull by other users, rolling a transaction
2449 2453 back locally is ineffective (someone else may already have pulled
2450 2454 the changes). Furthermore, a race is possible with readers of the
2451 2455 repository; for example an in-progress pull from the repository
2452 2456 may fail if a rollback is performed.
2453 2457 """
2454 2458 repo.rollback()
2455 2459
2456 2460 def root(ui, repo):
2457 2461 """print the root (top) of the current working dir
2458 2462
2459 2463 Print the root directory of the current repository.
2460 2464 """
2461 2465 ui.write(repo.root + "\n")
2462 2466
2463 2467 def serve(ui, repo, **opts):
2464 2468 """export the repository via HTTP
2465 2469
2466 2470 Start a local HTTP repository browser and pull server.
2467 2471
2468 2472 By default, the server logs accesses to stdout and errors to
2469 2473 stderr. Use the "-A" and "-E" options to log to files.
2470 2474 """
2471 2475
2472 2476 if opts["stdio"]:
2473 2477 if repo is None:
2474 2478 raise hg.RepoError(_("There is no Mercurial repository here"
2475 2479 " (.hg not found)"))
2476 2480 s = sshserver.sshserver(ui, repo)
2477 2481 s.serve_forever()
2478 2482
2479 2483 parentui = ui.parentui or ui
2480 2484 optlist = ("name templates style address port ipv6"
2481 2485 " accesslog errorlog webdir_conf certificate")
2482 2486 for o in optlist.split():
2483 2487 if opts[o]:
2484 2488 parentui.setconfig("web", o, str(opts[o]))
2485 2489 if repo.ui != parentui:
2486 2490 repo.ui.setconfig("web", o, str(opts[o]))
2487 2491
2488 2492 if repo is None and not ui.config("web", "webdir_conf"):
2489 2493 raise hg.RepoError(_("There is no Mercurial repository here"
2490 2494 " (.hg not found)"))
2491 2495
2492 2496 class service:
2493 2497 def init(self):
2494 2498 util.set_signal_handler()
2495 2499 try:
2496 2500 self.httpd = hgweb.server.create_server(parentui, repo)
2497 2501 except socket.error, inst:
2498 2502 raise util.Abort(_('cannot start server: ') + inst.args[1])
2499 2503
2500 2504 if not ui.verbose: return
2501 2505
2502 2506 if self.httpd.port != 80:
2503 2507 ui.status(_('listening at http://%s:%d/\n') %
2504 2508 (self.httpd.addr, self.httpd.port))
2505 2509 else:
2506 2510 ui.status(_('listening at http://%s/\n') % self.httpd.addr)
2507 2511
2508 2512 def run(self):
2509 2513 self.httpd.serve_forever()
2510 2514
2511 2515 service = service()
2512 2516
2513 2517 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2514 2518
2515 2519 def status(ui, repo, *pats, **opts):
2516 2520 """show changed files in the working directory
2517 2521
2518 2522 Show status of files in the repository. If names are given, only
2519 2523 files that match are shown. Files that are clean or ignored, are
2520 2524 not listed unless -c (clean), -i (ignored) or -A is given.
2521 2525
2522 2526 NOTE: status may appear to disagree with diff if permissions have
2523 2527 changed or a merge has occurred. The standard diff format does not
2524 2528 report permission changes and diff only reports changes relative
2525 2529 to one merge parent.
2526 2530
2527 2531 If one revision is given, it is used as the base revision.
2528 2532 If two revisions are given, the difference between them is shown.
2529 2533
2530 2534 The codes used to show the status of files are:
2531 2535 M = modified
2532 2536 A = added
2533 2537 R = removed
2534 2538 C = clean
2535 2539 ! = deleted, but still tracked
2536 2540 ? = not tracked
2537 2541 I = ignored (not shown by default)
2538 2542 = the previous added file was copied from here
2539 2543 """
2540 2544
2541 2545 all = opts['all']
2542 2546 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2543 2547
2544 2548 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2545 2549 cwd = (pats and repo.getcwd()) or ''
2546 2550 modified, added, removed, deleted, unknown, ignored, clean = [
2547 2551 n for n in repo.status(node1=node1, node2=node2, files=files,
2548 2552 match=matchfn,
2549 2553 list_ignored=all or opts['ignored'],
2550 2554 list_clean=all or opts['clean'])]
2551 2555
2552 2556 changetypes = (('modified', 'M', modified),
2553 2557 ('added', 'A', added),
2554 2558 ('removed', 'R', removed),
2555 2559 ('deleted', '!', deleted),
2556 2560 ('unknown', '?', unknown),
2557 2561 ('ignored', 'I', ignored))
2558 2562
2559 2563 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2560 2564
2561 2565 end = opts['print0'] and '\0' or '\n'
2562 2566
2563 2567 for opt, char, changes in ([ct for ct in explicit_changetypes
2564 2568 if all or opts[ct[0]]]
2565 2569 or changetypes):
2566 2570 if opts['no_status']:
2567 2571 format = "%%s%s" % end
2568 2572 else:
2569 2573 format = "%s %%s%s" % (char, end)
2570 2574
2571 2575 for f in changes:
2572 2576 ui.write(format % repo.pathto(f, cwd))
2573 2577 if ((all or opts.get('copies')) and not opts.get('no_status')):
2574 2578 copied = repo.dirstate.copied(f)
2575 2579 if copied:
2576 2580 ui.write(' %s%s' % (repo.pathto(copied, cwd), end))
2577 2581
2578 2582 def tag(ui, repo, name, rev_=None, **opts):
2579 2583 """add a tag for the current or given revision
2580 2584
2581 2585 Name a particular revision using <name>.
2582 2586
2583 2587 Tags are used to name particular revisions of the repository and are
2584 2588 very useful to compare different revision, to go back to significant
2585 2589 earlier versions or to mark branch points as releases, etc.
2586 2590
2587 2591 If no revision is given, the parent of the working directory is used,
2588 2592 or tip if no revision is checked out.
2589 2593
2590 2594 To facilitate version control, distribution, and merging of tags,
2591 2595 they are stored as a file named ".hgtags" which is managed
2592 2596 similarly to other project files and can be hand-edited if
2593 2597 necessary. The file '.hg/localtags' is used for local tags (not
2594 2598 shared among repositories).
2595 2599 """
2596 2600 if name in ['tip', '.', 'null']:
2597 2601 raise util.Abort(_("the name '%s' is reserved") % name)
2598 2602 if rev_ is not None:
2599 2603 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2600 2604 "please use 'hg tag [-r REV] NAME' instead\n"))
2601 2605 if opts['rev']:
2602 2606 raise util.Abort(_("use only one form to specify the revision"))
2603 2607 if opts['rev'] and opts['remove']:
2604 2608 raise util.Abort(_("--rev and --remove are incompatible"))
2605 2609 if opts['rev']:
2606 2610 rev_ = opts['rev']
2607 2611 message = opts['message']
2608 2612 if opts['remove']:
2609 2613 if not name in repo.tags():
2610 2614 raise util.Abort(_('tag %s does not exist') % name)
2611 2615 rev_ = nullid
2612 2616 if not message:
2613 2617 message = _('Removed tag %s') % name
2614 2618 elif name in repo.tags() and not opts['force']:
2615 2619 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2616 2620 % name)
2617 2621 if not rev_ and repo.dirstate.parents()[1] != nullid:
2618 2622 raise util.Abort(_('uncommitted merge - please provide a '
2619 2623 'specific revision'))
2620 2624 r = repo.changectx(rev_).node()
2621 2625
2622 2626 if not message:
2623 2627 message = _('Added tag %s for changeset %s') % (name, short(r))
2624 2628
2625 2629 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2626 2630
2627 2631 def tags(ui, repo):
2628 2632 """list repository tags
2629 2633
2630 2634 List the repository tags.
2631 2635
2632 2636 This lists both regular and local tags.
2633 2637 """
2634 2638
2635 2639 l = repo.tagslist()
2636 2640 l.reverse()
2637 2641 hexfunc = ui.debugflag and hex or short
2638 2642 for t, n in l:
2639 2643 try:
2640 2644 hn = hexfunc(n)
2641 2645 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2642 2646 except revlog.LookupError:
2643 2647 r = " ?:%s" % hn
2644 2648 if ui.quiet:
2645 2649 ui.write("%s\n" % t)
2646 2650 else:
2647 2651 spaces = " " * (30 - util.locallen(t))
2648 2652 ui.write("%s%s %s\n" % (t, spaces, r))
2649 2653
2650 2654 def tip(ui, repo, **opts):
2651 2655 """show the tip revision
2652 2656
2653 2657 Show the tip revision.
2654 2658 """
2655 2659 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2656 2660
2657 2661 def unbundle(ui, repo, fname1, *fnames, **opts):
2658 2662 """apply one or more changegroup files
2659 2663
2660 2664 Apply one or more compressed changegroup files generated by the
2661 2665 bundle command.
2662 2666 """
2663 2667 fnames = (fname1,) + fnames
2664 2668 result = None
2665 2669 wasempty = repo.changelog.count() == 0
2666 2670 for fname in fnames:
2667 2671 if os.path.exists(fname):
2668 2672 f = open(fname, "rb")
2669 2673 else:
2670 2674 f = urllib.urlopen(fname)
2671 2675 gen = changegroup.readbundle(f, fname)
2672 2676 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2673 2677
2674 2678 return postincoming(ui, repo, modheads, opts['update'], wasempty)
2675 2679
2676 2680 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2677 2681 """update working directory
2678 2682
2679 2683 Update the working directory to the specified revision, or the
2680 2684 tip of the current branch if none is specified.
2681 2685
2682 2686 If there are no outstanding changes in the working directory and
2683 2687 there is a linear relationship between the current version and the
2684 2688 requested version, the result is the requested version.
2685 2689
2686 2690 To merge the working directory with another revision, use the
2687 2691 merge command.
2688 2692
2689 2693 By default, update will refuse to run if doing so would require
2690 2694 discarding local changes.
2691 2695 """
2692 2696 if rev and node:
2693 2697 raise util.Abort(_("please specify just one revision"))
2694 2698
2695 2699 if not rev:
2696 2700 rev = node
2697 2701
2698 2702 if date:
2699 2703 if rev:
2700 2704 raise util.Abort(_("you can't specify a revision and a date"))
2701 2705 rev = cmdutil.finddate(ui, repo, date)
2702 2706
2703 2707 if clean:
2704 2708 return hg.clean(repo, rev)
2705 2709 else:
2706 2710 return hg.update(repo, rev)
2707 2711
2708 2712 def verify(ui, repo):
2709 2713 """verify the integrity of the repository
2710 2714
2711 2715 Verify the integrity of the current repository.
2712 2716
2713 2717 This will perform an extensive check of the repository's
2714 2718 integrity, validating the hashes and checksums of each entry in
2715 2719 the changelog, manifest, and tracked files, as well as the
2716 2720 integrity of their crosslinks and indices.
2717 2721 """
2718 2722 return hg.verify(repo)
2719 2723
2720 2724 def version_(ui):
2721 2725 """output version and copyright information"""
2722 2726 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2723 2727 % version.get_version())
2724 2728 ui.status(_(
2725 2729 "\nCopyright (C) 2005-2007 Matt Mackall <mpm@selenic.com> and others\n"
2726 2730 "This is free software; see the source for copying conditions. "
2727 2731 "There is NO\nwarranty; "
2728 2732 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2729 2733 ))
2730 2734
2731 2735 # Command options and aliases are listed here, alphabetically
2732 2736
2733 2737 globalopts = [
2734 2738 ('R', 'repository', '',
2735 2739 _('repository root directory or symbolic path name')),
2736 2740 ('', 'cwd', '', _('change working directory')),
2737 2741 ('y', 'noninteractive', None,
2738 2742 _('do not prompt, assume \'yes\' for any required answers')),
2739 2743 ('q', 'quiet', None, _('suppress output')),
2740 2744 ('v', 'verbose', None, _('enable additional output')),
2741 2745 ('', 'config', [], _('set/override config option')),
2742 2746 ('', 'debug', None, _('enable debugging output')),
2743 2747 ('', 'debugger', None, _('start debugger')),
2744 2748 ('', 'encoding', util._encoding, _('set the charset encoding')),
2745 2749 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2746 2750 ('', 'lsprof', None, _('print improved command execution profile')),
2747 2751 ('', 'traceback', None, _('print traceback on exception')),
2748 2752 ('', 'time', None, _('time how long the command takes')),
2749 2753 ('', 'profile', None, _('print command execution profile')),
2750 2754 ('', 'version', None, _('output version information and exit')),
2751 2755 ('h', 'help', None, _('display help and exit')),
2752 2756 ]
2753 2757
2754 2758 dryrunopts = [('n', 'dry-run', None,
2755 2759 _('do not perform actions, just print output'))]
2756 2760
2757 2761 remoteopts = [
2758 2762 ('e', 'ssh', '', _('specify ssh command to use')),
2759 2763 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2760 2764 ]
2761 2765
2762 2766 walkopts = [
2763 2767 ('I', 'include', [], _('include names matching the given patterns')),
2764 2768 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2765 2769 ]
2766 2770
2767 2771 commitopts = [
2768 2772 ('m', 'message', '', _('use <text> as commit message')),
2769 2773 ('l', 'logfile', '', _('read commit message from <file>')),
2770 2774 ]
2771 2775
2772 2776 table = {
2773 2777 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2774 2778 "addremove":
2775 2779 (addremove,
2776 2780 [('s', 'similarity', '',
2777 2781 _('guess renamed files by similarity (0<=s<=100)')),
2778 2782 ] + walkopts + dryrunopts,
2779 2783 _('hg addremove [OPTION]... [FILE]...')),
2780 2784 "^annotate":
2781 2785 (annotate,
2782 2786 [('r', 'rev', '', _('annotate the specified revision')),
2783 2787 ('f', 'follow', None, _('follow file copies and renames')),
2784 2788 ('a', 'text', None, _('treat all files as text')),
2785 2789 ('u', 'user', None, _('list the author')),
2786 2790 ('d', 'date', None, _('list the date')),
2787 2791 ('n', 'number', None, _('list the revision number (default)')),
2788 2792 ('c', 'changeset', None, _('list the changeset')),
2789 2793 ('l', 'line-number', None,
2790 2794 _('show line number at the first appearance'))
2791 2795 ] + walkopts,
2792 2796 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2793 2797 "archive":
2794 2798 (archive,
2795 2799 [('', 'no-decode', None, _('do not pass files through decoders')),
2796 2800 ('p', 'prefix', '', _('directory prefix for files in archive')),
2797 2801 ('r', 'rev', '', _('revision to distribute')),
2798 2802 ('t', 'type', '', _('type of distribution to create')),
2799 2803 ] + walkopts,
2800 2804 _('hg archive [OPTION]... DEST')),
2801 2805 "backout":
2802 2806 (backout,
2803 2807 [('', 'merge', None,
2804 2808 _('merge with old dirstate parent after backout')),
2805 2809 ('d', 'date', '', _('record datecode as commit date')),
2806 2810 ('', 'parent', '', _('parent to choose when backing out merge')),
2807 2811 ('u', 'user', '', _('record user as committer')),
2808 2812 ('r', 'rev', '', _('revision to backout')),
2809 2813 ] + walkopts + commitopts,
2810 2814 _('hg backout [OPTION]... [-r] REV')),
2811 2815 "branch":
2812 2816 (branch,
2813 2817 [('f', 'force', None,
2814 2818 _('set branch name even if it shadows an existing branch'))],
2815 2819 _('hg branch [NAME]')),
2816 2820 "branches":
2817 2821 (branches,
2818 2822 [('a', 'active', False,
2819 2823 _('show only branches that have unmerged heads'))],
2820 2824 _('hg branches [-a]')),
2821 2825 "bundle":
2822 2826 (bundle,
2823 2827 [('f', 'force', None,
2824 2828 _('run even when remote repository is unrelated')),
2825 2829 ('r', 'rev', [],
2826 2830 _('a changeset you would like to bundle')),
2827 2831 ('', 'base', [],
2828 2832 _('a base changeset to specify instead of a destination')),
2829 2833 ] + remoteopts,
2830 2834 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2831 2835 "cat":
2832 2836 (cat,
2833 2837 [('o', 'output', '', _('print output to file with formatted name')),
2834 2838 ('r', 'rev', '', _('print the given revision')),
2835 2839 ] + walkopts,
2836 2840 _('hg cat [OPTION]... FILE...')),
2837 2841 "^clone":
2838 2842 (clone,
2839 2843 [('U', 'noupdate', None, _('do not update the new working directory')),
2840 2844 ('r', 'rev', [],
2841 2845 _('a changeset you would like to have after cloning')),
2842 2846 ('', 'pull', None, _('use pull protocol to copy metadata')),
2843 2847 ('', 'uncompressed', None,
2844 2848 _('use uncompressed transfer (fast over LAN)')),
2845 2849 ] + remoteopts,
2846 2850 _('hg clone [OPTION]... SOURCE [DEST]')),
2847 2851 "^commit|ci":
2848 2852 (commit,
2849 2853 [('A', 'addremove', None,
2850 2854 _('mark new/missing files as added/removed before committing')),
2851 2855 ('d', 'date', '', _('record datecode as commit date')),
2852 2856 ('u', 'user', '', _('record user as commiter')),
2853 2857 ] + walkopts + commitopts,
2854 2858 _('hg commit [OPTION]... [FILE]...')),
2855 2859 "copy|cp":
2856 2860 (copy,
2857 2861 [('A', 'after', None, _('record a copy that has already occurred')),
2858 2862 ('f', 'force', None,
2859 2863 _('forcibly copy over an existing managed file')),
2860 2864 ] + walkopts + dryrunopts,
2861 2865 _('hg copy [OPTION]... [SOURCE]... DEST')),
2862 2866 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2863 2867 "debugcomplete":
2864 2868 (debugcomplete,
2865 2869 [('o', 'options', None, _('show the command options'))],
2866 2870 _('debugcomplete [-o] CMD')),
2867 2871 "debuginstall": (debuginstall, [], _('debuginstall')),
2868 2872 "debugrebuildstate":
2869 2873 (debugrebuildstate,
2870 2874 [('r', 'rev', '', _('revision to rebuild to'))],
2871 2875 _('debugrebuildstate [-r REV] [REV]')),
2872 2876 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2873 2877 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2874 2878 "debugstate": (debugstate, [], _('debugstate')),
2875 2879 "debugdate":
2876 2880 (debugdate,
2877 2881 [('e', 'extended', None, _('try extended date formats'))],
2878 2882 _('debugdate [-e] DATE [RANGE]')),
2879 2883 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2880 2884 "debugindex": (debugindex, [], _('debugindex FILE')),
2881 2885 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2882 2886 "debugrename":
2883 2887 (debugrename,
2884 2888 [('r', 'rev', '', _('revision to debug'))],
2885 2889 _('debugrename [-r REV] FILE')),
2886 2890 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2887 2891 "^diff":
2888 2892 (diff,
2889 2893 [('r', 'rev', [], _('revision')),
2890 2894 ('a', 'text', None, _('treat all files as text')),
2891 2895 ('p', 'show-function', None,
2892 2896 _('show which function each change is in')),
2893 2897 ('g', 'git', None, _('use git extended diff format')),
2894 2898 ('', 'nodates', None, _("don't include dates in diff headers")),
2895 2899 ('w', 'ignore-all-space', None,
2896 2900 _('ignore white space when comparing lines')),
2897 2901 ('b', 'ignore-space-change', None,
2898 2902 _('ignore changes in the amount of white space')),
2899 2903 ('B', 'ignore-blank-lines', None,
2900 2904 _('ignore changes whose lines are all blank')),
2901 2905 ] + walkopts,
2902 2906 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2903 2907 "^export":
2904 2908 (export,
2905 2909 [('o', 'output', '', _('print output to file with formatted name')),
2906 2910 ('a', 'text', None, _('treat all files as text')),
2907 2911 ('g', 'git', None, _('use git extended diff format')),
2908 2912 ('', 'nodates', None, _("don't include dates in diff headers")),
2909 2913 ('', 'switch-parent', None, _('diff against the second parent'))],
2910 2914 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2911 2915 "grep":
2912 2916 (grep,
2913 2917 [('0', 'print0', None, _('end fields with NUL')),
2914 2918 ('', 'all', None, _('print all revisions that match')),
2915 2919 ('f', 'follow', None,
2916 2920 _('follow changeset history, or file history across copies and renames')),
2917 2921 ('i', 'ignore-case', None, _('ignore case when matching')),
2918 2922 ('l', 'files-with-matches', None,
2919 2923 _('print only filenames and revs that match')),
2920 2924 ('n', 'line-number', None, _('print matching line numbers')),
2921 2925 ('r', 'rev', [], _('search in given revision range')),
2922 2926 ('u', 'user', None, _('print user who committed change')),
2923 2927 ] + walkopts,
2924 2928 _('hg grep [OPTION]... PATTERN [FILE]...')),
2925 2929 "heads":
2926 2930 (heads,
2927 2931 [('', 'style', '', _('display using template map file')),
2928 2932 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2929 2933 ('', 'template', '', _('display with template'))],
2930 2934 _('hg heads [-r REV] [REV]...')),
2931 2935 "help": (help_, [], _('hg help [COMMAND]')),
2932 2936 "identify|id":
2933 2937 (identify,
2934 2938 [('r', 'rev', '', _('identify the specified rev')),
2935 2939 ('n', 'num', None, _('show local revision number')),
2936 2940 ('i', 'id', None, _('show global revision id')),
2937 2941 ('b', 'branch', None, _('show branch')),
2938 2942 ('t', 'tags', None, _('show tags'))],
2939 2943 _('hg identify [-nibt] [-r REV] [SOURCE]')),
2940 2944 "import|patch":
2941 2945 (import_,
2942 2946 [('p', 'strip', 1,
2943 2947 _('directory strip option for patch. This has the same\n'
2944 2948 'meaning as the corresponding patch option')),
2945 2949 ('b', 'base', '', _('base path')),
2946 2950 ('f', 'force', None,
2947 2951 _('skip check for outstanding uncommitted changes')),
2948 2952 ('', 'exact', None,
2949 2953 _('apply patch to the nodes from which it was generated')),
2950 2954 ('', 'import-branch', None,
2951 2955 _('Use any branch information in patch (implied by --exact)'))] + commitopts,
2952 2956 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2953 2957 "incoming|in": (incoming,
2954 2958 [('M', 'no-merges', None, _('do not show merges')),
2955 2959 ('f', 'force', None,
2956 2960 _('run even when remote repository is unrelated')),
2957 2961 ('', 'style', '', _('display using template map file')),
2958 2962 ('n', 'newest-first', None, _('show newest record first')),
2959 2963 ('', 'bundle', '', _('file to store the bundles into')),
2960 2964 ('p', 'patch', None, _('show patch')),
2961 2965 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2962 2966 ('', 'template', '', _('display with template')),
2963 2967 ] + remoteopts,
2964 2968 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2965 2969 ' [--bundle FILENAME] [SOURCE]')),
2966 2970 "^init":
2967 2971 (init,
2968 2972 remoteopts,
2969 2973 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2970 2974 "locate":
2971 2975 (locate,
2972 2976 [('r', 'rev', '', _('search the repository as it stood at rev')),
2973 2977 ('0', 'print0', None,
2974 2978 _('end filenames with NUL, for use with xargs')),
2975 2979 ('f', 'fullpath', None,
2976 2980 _('print complete paths from the filesystem root')),
2977 2981 ] + walkopts,
2978 2982 _('hg locate [OPTION]... [PATTERN]...')),
2979 2983 "^log|history":
2980 2984 (log,
2981 2985 [('f', 'follow', None,
2982 2986 _('follow changeset history, or file history across copies and renames')),
2983 2987 ('', 'follow-first', None,
2984 2988 _('only follow the first parent of merge changesets')),
2985 2989 ('d', 'date', '', _('show revs matching date spec')),
2986 2990 ('C', 'copies', None, _('show copied files')),
2987 2991 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2988 2992 ('l', 'limit', '', _('limit number of changes displayed')),
2989 2993 ('r', 'rev', [], _('show the specified revision or range')),
2990 2994 ('', 'removed', None, _('include revs where files were removed')),
2991 2995 ('M', 'no-merges', None, _('do not show merges')),
2992 2996 ('', 'style', '', _('display using template map file')),
2993 2997 ('m', 'only-merges', None, _('show only merges')),
2994 2998 ('p', 'patch', None, _('show patch')),
2995 2999 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2996 3000 ('', 'template', '', _('display with template')),
2997 3001 ] + walkopts,
2998 3002 _('hg log [OPTION]... [FILE]')),
2999 3003 "manifest": (manifest, [], _('hg manifest [REV]')),
3000 3004 "^merge":
3001 3005 (merge,
3002 3006 [('f', 'force', None, _('force a merge with outstanding changes')),
3003 3007 ('r', 'rev', '', _('revision to merge')),
3004 3008 ],
3005 3009 _('hg merge [-f] [[-r] REV]')),
3006 3010 "outgoing|out": (outgoing,
3007 3011 [('M', 'no-merges', None, _('do not show merges')),
3008 3012 ('f', 'force', None,
3009 3013 _('run even when remote repository is unrelated')),
3010 3014 ('p', 'patch', None, _('show patch')),
3011 3015 ('', 'style', '', _('display using template map file')),
3012 3016 ('r', 'rev', [], _('a specific revision you would like to push')),
3013 3017 ('n', 'newest-first', None, _('show newest record first')),
3014 3018 ('', 'template', '', _('display with template')),
3015 3019 ] + remoteopts,
3016 3020 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3017 3021 "^parents":
3018 3022 (parents,
3019 3023 [('r', 'rev', '', _('show parents from the specified rev')),
3020 3024 ('', 'style', '', _('display using template map file')),
3021 3025 ('', 'template', '', _('display with template'))],
3022 3026 _('hg parents [-r REV] [FILE]')),
3023 3027 "paths": (paths, [], _('hg paths [NAME]')),
3024 3028 "^pull":
3025 3029 (pull,
3026 3030 [('u', 'update', None,
3027 3031 _('update to new tip if changesets were pulled')),
3028 3032 ('f', 'force', None,
3029 3033 _('run even when remote repository is unrelated')),
3030 3034 ('r', 'rev', [],
3031 3035 _('a specific revision up to which you would like to pull')),
3032 3036 ] + remoteopts,
3033 3037 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3034 3038 "^push":
3035 3039 (push,
3036 3040 [('f', 'force', None, _('force push')),
3037 3041 ('r', 'rev', [], _('a specific revision you would like to push')),
3038 3042 ] + remoteopts,
3039 3043 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3040 3044 "debugrawcommit|rawcommit":
3041 3045 (rawcommit,
3042 3046 [('p', 'parent', [], _('parent')),
3043 3047 ('d', 'date', '', _('date code')),
3044 3048 ('u', 'user', '', _('user')),
3045 3049 ('F', 'files', '', _('file list'))
3046 3050 ] + commitopts,
3047 3051 _('hg debugrawcommit [OPTION]... [FILE]...')),
3048 3052 "recover": (recover, [], _('hg recover')),
3049 3053 "^remove|rm":
3050 3054 (remove,
3051 3055 [('A', 'after', None, _('record remove that has already occurred')),
3052 3056 ('f', 'force', None, _('remove file even if modified')),
3053 3057 ] + walkopts,
3054 3058 _('hg remove [OPTION]... FILE...')),
3055 3059 "rename|mv":
3056 3060 (rename,
3057 3061 [('A', 'after', None, _('record a rename that has already occurred')),
3058 3062 ('f', 'force', None,
3059 3063 _('forcibly copy over an existing managed file')),
3060 3064 ] + walkopts + dryrunopts,
3061 3065 _('hg rename [OPTION]... SOURCE... DEST')),
3062 3066 "^revert":
3063 3067 (revert,
3064 3068 [('a', 'all', None, _('revert all changes when no arguments given')),
3065 3069 ('d', 'date', '', _('tipmost revision matching date')),
3066 3070 ('r', 'rev', '', _('revision to revert to')),
3067 3071 ('', 'no-backup', None, _('do not save backup copies of files')),
3068 3072 ] + walkopts + dryrunopts,
3069 3073 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3070 3074 "rollback": (rollback, [], _('hg rollback')),
3071 3075 "root": (root, [], _('hg root')),
3072 3076 "showconfig|debugconfig":
3073 3077 (showconfig,
3074 3078 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3075 3079 _('showconfig [-u] [NAME]...')),
3076 3080 "^serve":
3077 3081 (serve,
3078 3082 [('A', 'accesslog', '', _('name of access log file to write to')),
3079 3083 ('d', 'daemon', None, _('run server in background')),
3080 3084 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3081 3085 ('E', 'errorlog', '', _('name of error log file to write to')),
3082 3086 ('p', 'port', 0, _('port to use (default: 8000)')),
3083 3087 ('a', 'address', '', _('address to use')),
3084 3088 ('n', 'name', '',
3085 3089 _('name to show in web pages (default: working dir)')),
3086 3090 ('', 'webdir-conf', '', _('name of the webdir config file'
3087 3091 ' (serve more than one repo)')),
3088 3092 ('', 'pid-file', '', _('name of file to write process ID to')),
3089 3093 ('', 'stdio', None, _('for remote clients')),
3090 3094 ('t', 'templates', '', _('web templates to use')),
3091 3095 ('', 'style', '', _('template style to use')),
3092 3096 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3093 3097 ('', 'certificate', '', _('SSL certificate file'))],
3094 3098 _('hg serve [OPTION]...')),
3095 3099 "^status|st":
3096 3100 (status,
3097 3101 [('A', 'all', None, _('show status of all files')),
3098 3102 ('m', 'modified', None, _('show only modified files')),
3099 3103 ('a', 'added', None, _('show only added files')),
3100 3104 ('r', 'removed', None, _('show only removed files')),
3101 3105 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3102 3106 ('c', 'clean', None, _('show only files without changes')),
3103 3107 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3104 3108 ('i', 'ignored', None, _('show only ignored files')),
3105 3109 ('n', 'no-status', None, _('hide status prefix')),
3106 3110 ('C', 'copies', None, _('show source of copied files')),
3107 3111 ('0', 'print0', None,
3108 3112 _('end filenames with NUL, for use with xargs')),
3109 3113 ('', 'rev', [], _('show difference from revision')),
3110 3114 ] + walkopts,
3111 3115 _('hg status [OPTION]... [FILE]...')),
3112 3116 "tag":
3113 3117 (tag,
3114 3118 [('f', 'force', None, _('replace existing tag')),
3115 3119 ('l', 'local', None, _('make the tag local')),
3116 3120 ('m', 'message', '', _('message for tag commit log entry')),
3117 3121 ('d', 'date', '', _('record datecode as commit date')),
3118 3122 ('u', 'user', '', _('record user as commiter')),
3119 3123 ('r', 'rev', '', _('revision to tag')),
3120 3124 ('', 'remove', None, _('remove a tag'))],
3121 3125 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3122 3126 "tags": (tags, [], _('hg tags')),
3123 3127 "tip":
3124 3128 (tip,
3125 3129 [('', 'style', '', _('display using template map file')),
3126 3130 ('p', 'patch', None, _('show patch')),
3127 3131 ('', 'template', '', _('display with template'))],
3128 3132 _('hg tip [-p]')),
3129 3133 "unbundle":
3130 3134 (unbundle,
3131 3135 [('u', 'update', None,
3132 3136 _('update to new tip if changesets were unbundled'))],
3133 3137 _('hg unbundle [-u] FILE...')),
3134 3138 "^update|up|checkout|co":
3135 3139 (update,
3136 3140 [('C', 'clean', None, _('overwrite locally modified files')),
3137 3141 ('d', 'date', '', _('tipmost revision matching date')),
3138 3142 ('r', 'rev', '', _('revision'))],
3139 3143 _('hg update [-C] [-d DATE] [[-r] REV]')),
3140 3144 "verify": (verify, [], _('hg verify')),
3141 3145 "version": (version_, [], _('hg version')),
3142 3146 }
3143 3147
3144 3148 extensions.commandtable = table
3145 3149
3146 3150 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3147 3151 " debugindex debugindexdot debugdate debuginstall")
3148 3152 optionalrepo = ("paths serve showconfig")
3149 3153
3150 3154 def dispatch(args, argv0=None):
3151 3155 try:
3152 3156 u = ui.ui(traceback='--traceback' in args)
3153 3157 except util.Abort, inst:
3154 3158 sys.stderr.write(_("abort: %s\n") % inst)
3155 3159 return -1
3156 3160 return cmdutil.runcatch(u, args, argv0=argv0)
3157 3161
3158 3162 def run():
3159 3163 sys.exit(dispatch(sys.argv[1:], argv0=sys.argv[0]))
@@ -1,488 +1,507 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 10 from node import *
11 11 from i18n import _
12 12 import struct, os, time, bisect, stat, strutil, util, re, errno, ignore
13 13 import cStringIO
14 14
15 15 _unknown = ('?', 0, 0, 0)
16 16 _format = ">cllll"
17 17
18 18 class dirstate(object):
19 19
20 20 def __init__(self, opener, ui, root):
21 21 self._opener = opener
22 22 self._root = root
23 23 self._dirty = False
24 24 self._ui = ui
25 25
26 26 def __getattr__(self, name):
27 27 if name == '_map':
28 28 self._read()
29 29 return self._map
30 30 elif name == '_copymap':
31 31 self._read()
32 32 return self._copymap
33 33 elif name == '_branch':
34 34 try:
35 35 self._branch = (self._opener("branch").read().strip()
36 36 or "default")
37 37 except IOError:
38 38 self._branch = "default"
39 39 return self._branch
40 40 elif name == '_pl':
41 41 self._pl = [nullid, nullid]
42 42 try:
43 43 st = self._opener("dirstate").read(40)
44 44 if len(st) == 40:
45 45 self._pl = st[:20], st[20:40]
46 46 except IOError, err:
47 47 if err.errno != errno.ENOENT: raise
48 48 return self._pl
49 49 elif name == '_dirs':
50 50 self._dirs = {}
51 51 for f in self._map:
52 52 self._incpath(f)
53 53 return self._dirs
54 54 elif name == '_ignore':
55 55 files = [self.wjoin('.hgignore')]
56 56 for name, path in self._ui.configitems("ui"):
57 57 if name == 'ignore' or name.startswith('ignore.'):
58 58 files.append(os.path.expanduser(path))
59 59 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
60 60 return self._ignore
61 61 elif name == '_slash':
62 62 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
63 63 return self._slash
64 64 else:
65 65 raise AttributeError, name
66 66
67 67 def wjoin(self, f):
68 68 return os.path.join(self._root, f)
69 69
70 70 def getcwd(self):
71 71 cwd = os.getcwd()
72 72 if cwd == self._root: return ''
73 73 # self._root ends with a path separator if self._root is '/' or 'C:\'
74 74 rootsep = self._root
75 75 if not rootsep.endswith(os.sep):
76 76 rootsep += os.sep
77 77 if cwd.startswith(rootsep):
78 78 return cwd[len(rootsep):]
79 79 else:
80 80 # we're outside the repo. return an absolute path.
81 81 return cwd
82 82
83 83 def pathto(self, f, cwd=None):
84 84 if cwd is None:
85 85 cwd = self.getcwd()
86 86 path = util.pathto(self._root, cwd, f)
87 87 if self._slash:
88 88 return path.replace(os.sep, '/')
89 89 return path
90 90
91 91 def __del__(self):
92 92 self.write()
93 93
94 94 def __getitem__(self, key):
95 95 return self._map[key]
96 96
97 97 def __contains__(self, key):
98 98 return key in self._map
99 99
100 100 def __iter__(self):
101 101 a = self._map.keys()
102 102 a.sort()
103 103 for x in a:
104 104 yield x
105 105
106 106 def parents(self):
107 107 return self._pl
108 108
109 109 def branch(self):
110 110 return self._branch
111 111
112 112 def setparents(self, p1, p2=nullid):
113 113 self._dirty = True
114 114 self._pl = p1, p2
115 115
116 116 def setbranch(self, branch):
117 117 self._branch = branch
118 118 self._opener("branch", "w").write(branch + '\n')
119 119
120 120 def state(self, key):
121 ''' current states:
122 n normal
123 m needs merging
124 r marked for removal
125 a marked for addition'''
121 126 return self._map.get(key, ("?",))[0]
122 127
123 128 def _read(self):
124 129 self._map = {}
125 130 self._copymap = {}
126 131 self._pl = [nullid, nullid]
127 132 try:
128 133 st = self._opener("dirstate").read()
129 134 except IOError, err:
130 135 if err.errno != errno.ENOENT: raise
131 136 return
132 137 if not st:
133 138 return
134 139
135 140 self._pl = [st[:20], st[20: 40]]
136 141
137 142 # deref fields so they will be local in loop
138 143 dmap = self._map
139 144 copymap = self._copymap
140 145 unpack = struct.unpack
141 146
142 147 pos = 40
143 148 e_size = struct.calcsize(_format)
144 149
145 150 while pos < len(st):
146 151 newpos = pos + e_size
147 152 e = unpack(_format, st[pos:newpos])
148 153 l = e[4]
149 154 pos = newpos
150 155 newpos = pos + l
151 156 f = st[pos:newpos]
152 157 if '\0' in f:
153 158 f, c = f.split('\0')
154 159 copymap[f] = c
155 160 dmap[f] = e[:4]
156 161 pos = newpos
157 162
158 163 def invalidate(self):
159 164 for a in "_map _copymap _branch _pl _dirs _ignore".split():
160 165 if hasattr(self, a):
161 166 self.__delattr__(a)
162 167 self._dirty = False
163 168
164 169 def copy(self, source, dest):
165 170 self._dirty = True
166 171 self._copymap[dest] = source
167 172
168 173 def copied(self, file):
169 174 return self._copymap.get(file, None)
170 175
171 176 def copies(self):
172 177 return self._copymap
173 178
174 179 def _incpath(self, path):
175 180 for c in strutil.findall(path, '/'):
176 181 pc = path[:c]
177 182 self._dirs.setdefault(pc, 0)
178 183 self._dirs[pc] += 1
179 184
180 185 def _decpath(self, path):
181 186 for c in strutil.findall(path, '/'):
182 187 pc = path[:c]
183 188 self._dirs.setdefault(pc, 0)
184 189 self._dirs[pc] -= 1
185 190
186 191 def _incpathcheck(self, f):
187 192 if '\r' in f or '\n' in f:
188 193 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames"))
189 194 # shadows
190 195 if f in self._dirs:
191 196 raise util.Abort(_('directory named %r already in dirstate') % f)
192 197 for c in strutil.rfindall(f, '/'):
193 198 d = f[:c]
194 199 if d in self._dirs:
195 200 break
196 201 if d in self._map:
197 202 raise util.Abort(_('file named %r already in dirstate') % d)
198 203 self._incpath(f)
199 204
200 def update(self, files, state, **kw):
201 ''' current states:
202 n normal
203 m needs merging
204 r marked for removal
205 a marked for addition'''
205 def normal(self, f):
206 'mark a file normal'
207 self._dirty = True
208 s = os.lstat(self.wjoin(f))
209 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime)
210 if self._copymap.has_key(f):
211 del self._copymap[f]
206 212
207 if not files: return
213 def normaldirty(self, f):
214 'mark a file normal, but possibly dirty'
208 215 self._dirty = True
209 for f in files:
210 if self._copymap.has_key(f):
211 del self._copymap[f]
216 s = os.lstat(self.wjoin(f))
217 self._map[f] = ('n', s.st_mode, -1, -1)
218 if f in self._copymap:
219 del self._copymap[f]
220
221 def add(self, f):
222 'mark a file added'
223 self._dirty = True
224 self._incpathcheck(f)
225 s = os.lstat(self.wjoin(f))
226 self._map[f] = ('a', s.st_mode, s.st_size, s.st_mtime)
227 if f in self._copymap:
228 del self._copymap[f]
212 229
213 if state == "r":
214 self._map[f] = ('r', 0, 0, 0)
215 self._decpath(f)
216 continue
217 else:
218 if state == "a":
219 self._incpathcheck(f)
220 s = os.lstat(self.wjoin(f))
221 st_size = kw.get('st_size', s.st_size)
222 st_mtime = kw.get('st_mtime', s.st_mtime)
223 self._map[f] = (state, s.st_mode, st_size, st_mtime)
230 def remove(self, f):
231 'mark a file removed'
232 self._dirty = True
233 self._map[f] = ('r', 0, 0, 0)
234 self._decpath(f)
235 if f in self._copymap:
236 del self._copymap[f]
224 237
225 def forget(self, files):
226 if not files: return
238 def merge(self, f):
239 'mark a file merged'
227 240 self._dirty = True
228 for f in files:
229 try:
230 del self._map[f]
231 self._decpath(f)
232 except KeyError:
233 self._ui.warn(_("not in dirstate: %s!\n") % f)
234 pass
241 s = os.lstat(self.wjoin(f))
242 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime)
243 if f in self._copymap:
244 del self._copymap[f]
245
246 def forget(self, f):
247 'forget a file'
248 self._dirty = True
249 try:
250 del self._map[f]
251 self._decpath(f)
252 except KeyError:
253 self._ui.warn(_("not in dirstate: %s!\n") % f)
235 254
236 255 def rebuild(self, parent, files):
237 256 self.invalidate()
238 257 for f in files:
239 258 if files.execf(f):
240 259 self._map[f] = ('n', 0777, -1, 0)
241 260 else:
242 261 self._map[f] = ('n', 0666, -1, 0)
243 262 self._pl = (parent, nullid)
244 263 self._dirty = True
245 264
246 265 def write(self):
247 266 if not self._dirty:
248 267 return
249 268 cs = cStringIO.StringIO()
250 269 cs.write("".join(self._pl))
251 270 for f, e in self._map.iteritems():
252 271 c = self.copied(f)
253 272 if c:
254 273 f = f + "\0" + c
255 274 e = struct.pack(_format, e[0], e[1], e[2], e[3], len(f))
256 275 cs.write(e)
257 276 cs.write(f)
258 277 st = self._opener("dirstate", "w", atomictemp=True)
259 278 st.write(cs.getvalue())
260 279 st.rename()
261 280 self._dirty = False
262 281
263 282 def filterfiles(self, files):
264 283 ret = {}
265 284 unknown = []
266 285
267 286 for x in files:
268 287 if x == '.':
269 288 return self._map.copy()
270 289 if x not in self._map:
271 290 unknown.append(x)
272 291 else:
273 292 ret[x] = self._map[x]
274 293
275 294 if not unknown:
276 295 return ret
277 296
278 297 b = self._map.keys()
279 298 b.sort()
280 299 blen = len(b)
281 300
282 301 for x in unknown:
283 302 bs = bisect.bisect(b, "%s%s" % (x, '/'))
284 303 while bs < blen:
285 304 s = b[bs]
286 305 if len(s) > len(x) and s.startswith(x):
287 306 ret[s] = self._map[s]
288 307 else:
289 308 break
290 309 bs += 1
291 310 return ret
292 311
293 312 def _supported(self, f, st, verbose=False):
294 313 if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
295 314 return True
296 315 if verbose:
297 316 kind = 'unknown'
298 317 if stat.S_ISCHR(st.st_mode): kind = _('character device')
299 318 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
300 319 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
301 320 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
302 321 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
303 322 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
304 323 % (self.pathto(f), kind))
305 324 return False
306 325
307 326 def walk(self, files=None, match=util.always, badmatch=None):
308 327 # filter out the stat
309 328 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
310 329 yield src, f
311 330
312 331 def statwalk(self, files=None, match=util.always, ignored=False,
313 332 badmatch=None, directories=False):
314 333 '''
315 334 walk recursively through the directory tree, finding all files
316 335 matched by the match function
317 336
318 337 results are yielded in a tuple (src, filename, st), where src
319 338 is one of:
320 339 'f' the file was found in the directory tree
321 340 'd' the file is a directory of the tree
322 341 'm' the file was only in the dirstate and not in the tree
323 342 'b' file was not found and matched badmatch
324 343
325 344 and st is the stat result if the file was found in the directory.
326 345 '''
327 346
328 347 # walk all files by default
329 348 if not files:
330 349 files = ['.']
331 350 dc = self._map.copy()
332 351 else:
333 352 files = util.unique(files)
334 353 dc = self.filterfiles(files)
335 354
336 355 def imatch(file_):
337 356 if file_ not in dc and self._ignore(file_):
338 357 return False
339 358 return match(file_)
340 359
341 360 ignore = self._ignore
342 361 if ignored:
343 362 imatch = match
344 363 ignore = util.never
345 364
346 365 # self._root may end with a path separator when self._root == '/'
347 366 common_prefix_len = len(self._root)
348 367 if not self._root.endswith(os.sep):
349 368 common_prefix_len += 1
350 369 # recursion free walker, faster than os.walk.
351 370 def findfiles(s):
352 371 work = [s]
353 372 if directories:
354 373 yield 'd', util.normpath(s[common_prefix_len:]), os.lstat(s)
355 374 while work:
356 375 top = work.pop()
357 376 names = os.listdir(top)
358 377 names.sort()
359 378 # nd is the top of the repository dir tree
360 379 nd = util.normpath(top[common_prefix_len:])
361 380 if nd == '.':
362 381 nd = ''
363 382 else:
364 383 # do not recurse into a repo contained in this
365 384 # one. use bisect to find .hg directory so speed
366 385 # is good on big directory.
367 386 hg = bisect.bisect_left(names, '.hg')
368 387 if hg < len(names) and names[hg] == '.hg':
369 388 if os.path.isdir(os.path.join(top, '.hg')):
370 389 continue
371 390 for f in names:
372 391 np = util.pconvert(os.path.join(nd, f))
373 392 if seen(np):
374 393 continue
375 394 p = os.path.join(top, f)
376 395 # don't trip over symlinks
377 396 st = os.lstat(p)
378 397 if stat.S_ISDIR(st.st_mode):
379 398 if not ignore(np):
380 399 work.append(p)
381 400 if directories:
382 401 yield 'd', np, st
383 402 if imatch(np) and np in dc:
384 403 yield 'm', np, st
385 404 elif imatch(np):
386 405 if self._supported(np, st):
387 406 yield 'f', np, st
388 407 elif np in dc:
389 408 yield 'm', np, st
390 409
391 410 known = {'.hg': 1}
392 411 def seen(fn):
393 412 if fn in known: return True
394 413 known[fn] = 1
395 414
396 415 # step one, find all files that match our criteria
397 416 files.sort()
398 417 for ff in files:
399 418 nf = util.normpath(ff)
400 419 f = self.wjoin(ff)
401 420 try:
402 421 st = os.lstat(f)
403 422 except OSError, inst:
404 423 found = False
405 424 for fn in dc:
406 425 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
407 426 found = True
408 427 break
409 428 if not found:
410 429 if inst.errno != errno.ENOENT or not badmatch:
411 430 self._ui.warn('%s: %s\n' %
412 431 (self.pathto(ff), inst.strerror))
413 432 elif badmatch and badmatch(ff) and imatch(nf):
414 433 yield 'b', ff, None
415 434 continue
416 435 if stat.S_ISDIR(st.st_mode):
417 436 cmp1 = (lambda x, y: cmp(x[1], y[1]))
418 437 sorted_ = [ x for x in findfiles(f) ]
419 438 sorted_.sort(cmp1)
420 439 for e in sorted_:
421 440 yield e
422 441 else:
423 442 if not seen(nf) and match(nf):
424 443 if self._supported(ff, st, verbose=True):
425 444 yield 'f', nf, st
426 445 elif ff in dc:
427 446 yield 'm', nf, st
428 447
429 448 # step two run through anything left in the dc hash and yield
430 449 # if we haven't already seen it
431 450 ks = dc.keys()
432 451 ks.sort()
433 452 for k in ks:
434 453 if not seen(k) and imatch(k):
435 454 yield 'm', k, None
436 455
437 456 def status(self, files=None, match=util.always, list_ignored=False,
438 457 list_clean=False):
439 458 lookup, modified, added, unknown, ignored = [], [], [], [], []
440 459 removed, deleted, clean = [], [], []
441 460
442 461 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
443 462 try:
444 463 type_, mode, size, time = self[fn]
445 464 except KeyError:
446 465 if list_ignored and self._ignore(fn):
447 466 ignored.append(fn)
448 467 else:
449 468 unknown.append(fn)
450 469 continue
451 470 if src == 'm':
452 471 nonexistent = True
453 472 if not st:
454 473 try:
455 474 st = os.lstat(self.wjoin(fn))
456 475 except OSError, inst:
457 476 if inst.errno != errno.ENOENT:
458 477 raise
459 478 st = None
460 479 # We need to re-check that it is a valid file
461 480 if st and self._supported(fn, st):
462 481 nonexistent = False
463 482 # XXX: what to do with file no longer present in the fs
464 483 # who are not removed in the dirstate ?
465 484 if nonexistent and type_ in "nm":
466 485 deleted.append(fn)
467 486 continue
468 487 # check the common case first
469 488 if type_ == 'n':
470 489 if not st:
471 490 st = os.lstat(self.wjoin(fn))
472 491 if (size >= 0 and (size != st.st_size
473 492 or (mode ^ st.st_mode) & 0100)
474 493 or fn in self._copymap):
475 494 modified.append(fn)
476 495 elif time != int(st.st_mtime):
477 496 lookup.append(fn)
478 497 elif list_clean:
479 498 clean.append(fn)
480 499 elif type_ == 'm':
481 500 modified.append(fn)
482 501 elif type_ == 'a':
483 502 added.append(fn)
484 503 elif type_ == 'r':
485 504 removed.append(fn)
486 505
487 506 return (lookup, modified, added, removed, deleted, unknown, ignored,
488 507 clean)
@@ -1,1947 +1,1949 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 13 import os, revlog, time, util, extensions, hook
14 14
15 15 class localrepository(repo.repository):
16 16 capabilities = ('lookup', 'changegroupsubset')
17 17 supported = ('revlogv1', 'store')
18 18
19 19 def __del__(self):
20 20 self.transhandle = None
21 21 def __init__(self, parentui, path=None, create=0):
22 22 repo.repository.__init__(self)
23 23 self.path = path
24 24 self.root = os.path.realpath(path)
25 25 self.path = os.path.join(self.root, ".hg")
26 26 self.origroot = path
27 27 self.opener = util.opener(self.path)
28 28 self.wopener = util.opener(self.root)
29 29
30 30 if not os.path.isdir(self.path):
31 31 if create:
32 32 if not os.path.exists(path):
33 33 os.mkdir(path)
34 34 os.mkdir(self.path)
35 35 requirements = ["revlogv1"]
36 36 if parentui.configbool('format', 'usestore', True):
37 37 os.mkdir(os.path.join(self.path, "store"))
38 38 requirements.append("store")
39 39 # create an invalid changelog
40 40 self.opener("00changelog.i", "a").write(
41 41 '\0\0\0\2' # represents revlogv2
42 42 ' dummy changelog to prevent using the old repo layout'
43 43 )
44 44 reqfile = self.opener("requires", "w")
45 45 for r in requirements:
46 46 reqfile.write("%s\n" % r)
47 47 reqfile.close()
48 48 else:
49 49 raise repo.RepoError(_("repository %s not found") % path)
50 50 elif create:
51 51 raise repo.RepoError(_("repository %s already exists") % path)
52 52 else:
53 53 # find requirements
54 54 try:
55 55 requirements = self.opener("requires").read().splitlines()
56 56 except IOError, inst:
57 57 if inst.errno != errno.ENOENT:
58 58 raise
59 59 requirements = []
60 60 # check them
61 61 for r in requirements:
62 62 if r not in self.supported:
63 63 raise repo.RepoError(_("requirement '%s' not supported") % r)
64 64
65 65 # setup store
66 66 if "store" in requirements:
67 67 self.encodefn = util.encodefilename
68 68 self.decodefn = util.decodefilename
69 69 self.spath = os.path.join(self.path, "store")
70 70 else:
71 71 self.encodefn = lambda x: x
72 72 self.decodefn = lambda x: x
73 73 self.spath = self.path
74 74 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
75 75
76 76 self.ui = ui.ui(parentui=parentui)
77 77 try:
78 78 self.ui.readconfig(self.join("hgrc"), self.root)
79 79 extensions.loadall(self.ui)
80 80 except IOError:
81 81 pass
82 82
83 83 self.tagscache = None
84 84 self.branchcache = None
85 85 self.nodetagscache = None
86 86 self.filterpats = {}
87 87 self.transhandle = None
88 88
89 89 def __getattr__(self, name):
90 90 if name == 'changelog':
91 91 self.changelog = changelog.changelog(self.sopener)
92 92 self.sopener.defversion = self.changelog.version
93 93 return self.changelog
94 94 if name == 'manifest':
95 95 self.changelog
96 96 self.manifest = manifest.manifest(self.sopener)
97 97 return self.manifest
98 98 if name == 'dirstate':
99 99 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
100 100 return self.dirstate
101 101 else:
102 102 raise AttributeError, name
103 103
104 104 def url(self):
105 105 return 'file:' + self.root
106 106
107 107 def hook(self, name, throw=False, **args):
108 108 return hook.hook(self.ui, self, name, throw, **args)
109 109
110 110 tag_disallowed = ':\r\n'
111 111
112 112 def _tag(self, name, node, message, local, user, date, parent=None,
113 113 extra={}):
114 114 use_dirstate = parent is None
115 115
116 116 for c in self.tag_disallowed:
117 117 if c in name:
118 118 raise util.Abort(_('%r cannot be used in a tag name') % c)
119 119
120 120 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
121 121
122 122 def writetag(fp, name, munge, prevtags):
123 123 if prevtags and prevtags[-1] != '\n':
124 124 fp.write('\n')
125 125 fp.write('%s %s\n' % (hex(node), munge and munge(name) or name))
126 126 fp.close()
127 127 self.hook('tag', node=hex(node), tag=name, local=local)
128 128
129 129 prevtags = ''
130 130 if local:
131 131 try:
132 132 fp = self.opener('localtags', 'r+')
133 133 except IOError, err:
134 134 fp = self.opener('localtags', 'a')
135 135 else:
136 136 prevtags = fp.read()
137 137
138 138 # local tags are stored in the current charset
139 139 writetag(fp, name, None, prevtags)
140 140 return
141 141
142 142 if use_dirstate:
143 143 try:
144 144 fp = self.wfile('.hgtags', 'rb+')
145 145 except IOError, err:
146 146 fp = self.wfile('.hgtags', 'ab')
147 147 else:
148 148 prevtags = fp.read()
149 149 else:
150 150 try:
151 151 prevtags = self.filectx('.hgtags', parent).data()
152 152 except revlog.LookupError:
153 153 pass
154 154 fp = self.wfile('.hgtags', 'wb')
155 155
156 156 # committed tags are stored in UTF-8
157 157 writetag(fp, name, util.fromlocal, prevtags)
158 158
159 159 if use_dirstate and self.dirstate.state('.hgtags') == '?':
160 160 self.add(['.hgtags'])
161 161
162 162 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
163 163 extra=extra)
164 164
165 165 self.hook('tag', node=hex(node), tag=name, local=local)
166 166
167 167 return tagnode
168 168
169 169 def tag(self, name, node, message, local, user, date):
170 170 '''tag a revision with a symbolic name.
171 171
172 172 if local is True, the tag is stored in a per-repository file.
173 173 otherwise, it is stored in the .hgtags file, and a new
174 174 changeset is committed with the change.
175 175
176 176 keyword arguments:
177 177
178 178 local: whether to store tag in non-version-controlled file
179 179 (default False)
180 180
181 181 message: commit message to use if committing
182 182
183 183 user: name of user to use if committing
184 184
185 185 date: date tuple to use if committing'''
186 186
187 187 for x in self.status()[:5]:
188 188 if '.hgtags' in x:
189 189 raise util.Abort(_('working copy of .hgtags is changed '
190 190 '(please commit .hgtags manually)'))
191 191
192 192
193 193 self._tag(name, node, message, local, user, date)
194 194
195 195 def tags(self):
196 196 '''return a mapping of tag to node'''
197 197 if self.tagscache:
198 198 return self.tagscache
199 199
200 200 globaltags = {}
201 201
202 202 def readtags(lines, fn):
203 203 filetags = {}
204 204 count = 0
205 205
206 206 def warn(msg):
207 207 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
208 208
209 209 for l in lines:
210 210 count += 1
211 211 if not l:
212 212 continue
213 213 s = l.split(" ", 1)
214 214 if len(s) != 2:
215 215 warn(_("cannot parse entry"))
216 216 continue
217 217 node, key = s
218 218 key = util.tolocal(key.strip()) # stored in UTF-8
219 219 try:
220 220 bin_n = bin(node)
221 221 except TypeError:
222 222 warn(_("node '%s' is not well formed") % node)
223 223 continue
224 224 if bin_n not in self.changelog.nodemap:
225 225 warn(_("tag '%s' refers to unknown node") % key)
226 226 continue
227 227
228 228 h = []
229 229 if key in filetags:
230 230 n, h = filetags[key]
231 231 h.append(n)
232 232 filetags[key] = (bin_n, h)
233 233
234 234 for k, nh in filetags.items():
235 235 if k not in globaltags:
236 236 globaltags[k] = nh
237 237 continue
238 238 # we prefer the global tag if:
239 239 # it supercedes us OR
240 240 # mutual supercedes and it has a higher rank
241 241 # otherwise we win because we're tip-most
242 242 an, ah = nh
243 243 bn, bh = globaltags[k]
244 244 if (bn != an and an in bh and
245 245 (bn not in ah or len(bh) > len(ah))):
246 246 an = bn
247 247 ah.extend([n for n in bh if n not in ah])
248 248 globaltags[k] = an, ah
249 249
250 250 # read the tags file from each head, ending with the tip
251 251 f = None
252 252 for rev, node, fnode in self._hgtagsnodes():
253 253 f = (f and f.filectx(fnode) or
254 254 self.filectx('.hgtags', fileid=fnode))
255 255 readtags(f.data().splitlines(), f)
256 256
257 257 try:
258 258 data = util.fromlocal(self.opener("localtags").read())
259 259 # localtags are stored in the local character set
260 260 # while the internal tag table is stored in UTF-8
261 261 readtags(data.splitlines(), "localtags")
262 262 except IOError:
263 263 pass
264 264
265 265 self.tagscache = {}
266 266 for k,nh in globaltags.items():
267 267 n = nh[0]
268 268 if n != nullid:
269 269 self.tagscache[k] = n
270 270 self.tagscache['tip'] = self.changelog.tip()
271 271
272 272 return self.tagscache
273 273
274 274 def _hgtagsnodes(self):
275 275 heads = self.heads()
276 276 heads.reverse()
277 277 last = {}
278 278 ret = []
279 279 for node in heads:
280 280 c = self.changectx(node)
281 281 rev = c.rev()
282 282 try:
283 283 fnode = c.filenode('.hgtags')
284 284 except revlog.LookupError:
285 285 continue
286 286 ret.append((rev, node, fnode))
287 287 if fnode in last:
288 288 ret[last[fnode]] = None
289 289 last[fnode] = len(ret) - 1
290 290 return [item for item in ret if item]
291 291
292 292 def tagslist(self):
293 293 '''return a list of tags ordered by revision'''
294 294 l = []
295 295 for t, n in self.tags().items():
296 296 try:
297 297 r = self.changelog.rev(n)
298 298 except:
299 299 r = -2 # sort to the beginning of the list if unknown
300 300 l.append((r, t, n))
301 301 l.sort()
302 302 return [(t, n) for r, t, n in l]
303 303
304 304 def nodetags(self, node):
305 305 '''return the tags associated with a node'''
306 306 if not self.nodetagscache:
307 307 self.nodetagscache = {}
308 308 for t, n in self.tags().items():
309 309 self.nodetagscache.setdefault(n, []).append(t)
310 310 return self.nodetagscache.get(node, [])
311 311
312 312 def _branchtags(self):
313 313 partial, last, lrev = self._readbranchcache()
314 314
315 315 tiprev = self.changelog.count() - 1
316 316 if lrev != tiprev:
317 317 self._updatebranchcache(partial, lrev+1, tiprev+1)
318 318 self._writebranchcache(partial, self.changelog.tip(), tiprev)
319 319
320 320 return partial
321 321
322 322 def branchtags(self):
323 323 if self.branchcache is not None:
324 324 return self.branchcache
325 325
326 326 self.branchcache = {} # avoid recursion in changectx
327 327 partial = self._branchtags()
328 328
329 329 # the branch cache is stored on disk as UTF-8, but in the local
330 330 # charset internally
331 331 for k, v in partial.items():
332 332 self.branchcache[util.tolocal(k)] = v
333 333 return self.branchcache
334 334
335 335 def _readbranchcache(self):
336 336 partial = {}
337 337 try:
338 338 f = self.opener("branch.cache")
339 339 lines = f.read().split('\n')
340 340 f.close()
341 341 except (IOError, OSError):
342 342 return {}, nullid, nullrev
343 343
344 344 try:
345 345 last, lrev = lines.pop(0).split(" ", 1)
346 346 last, lrev = bin(last), int(lrev)
347 347 if not (lrev < self.changelog.count() and
348 348 self.changelog.node(lrev) == last): # sanity check
349 349 # invalidate the cache
350 350 raise ValueError('Invalid branch cache: unknown tip')
351 351 for l in lines:
352 352 if not l: continue
353 353 node, label = l.split(" ", 1)
354 354 partial[label.strip()] = bin(node)
355 355 except (KeyboardInterrupt, util.SignalInterrupt):
356 356 raise
357 357 except Exception, inst:
358 358 if self.ui.debugflag:
359 359 self.ui.warn(str(inst), '\n')
360 360 partial, last, lrev = {}, nullid, nullrev
361 361 return partial, last, lrev
362 362
363 363 def _writebranchcache(self, branches, tip, tiprev):
364 364 try:
365 365 f = self.opener("branch.cache", "w", atomictemp=True)
366 366 f.write("%s %s\n" % (hex(tip), tiprev))
367 367 for label, node in branches.iteritems():
368 368 f.write("%s %s\n" % (hex(node), label))
369 369 f.rename()
370 370 except (IOError, OSError):
371 371 pass
372 372
373 373 def _updatebranchcache(self, partial, start, end):
374 374 for r in xrange(start, end):
375 375 c = self.changectx(r)
376 376 b = c.branch()
377 377 partial[b] = c.node()
378 378
379 379 def lookup(self, key):
380 380 if key == '.':
381 381 key, second = self.dirstate.parents()
382 382 if key == nullid:
383 383 raise repo.RepoError(_("no revision checked out"))
384 384 if second != nullid:
385 385 self.ui.warn(_("warning: working directory has two parents, "
386 386 "tag '.' uses the first\n"))
387 387 elif key == 'null':
388 388 return nullid
389 389 n = self.changelog._match(key)
390 390 if n:
391 391 return n
392 392 if key in self.tags():
393 393 return self.tags()[key]
394 394 if key in self.branchtags():
395 395 return self.branchtags()[key]
396 396 n = self.changelog._partialmatch(key)
397 397 if n:
398 398 return n
399 399 raise repo.RepoError(_("unknown revision '%s'") % key)
400 400
401 401 def dev(self):
402 402 return os.lstat(self.path).st_dev
403 403
404 404 def local(self):
405 405 return True
406 406
407 407 def join(self, f):
408 408 return os.path.join(self.path, f)
409 409
410 410 def sjoin(self, f):
411 411 f = self.encodefn(f)
412 412 return os.path.join(self.spath, f)
413 413
414 414 def wjoin(self, f):
415 415 return os.path.join(self.root, f)
416 416
417 417 def file(self, f):
418 418 if f[0] == '/':
419 419 f = f[1:]
420 420 return filelog.filelog(self.sopener, f)
421 421
422 422 def changectx(self, changeid=None):
423 423 return context.changectx(self, changeid)
424 424
425 425 def workingctx(self):
426 426 return context.workingctx(self)
427 427
428 428 def parents(self, changeid=None):
429 429 '''
430 430 get list of changectxs for parents of changeid or working directory
431 431 '''
432 432 if changeid is None:
433 433 pl = self.dirstate.parents()
434 434 else:
435 435 n = self.changelog.lookup(changeid)
436 436 pl = self.changelog.parents(n)
437 437 if pl[1] == nullid:
438 438 return [self.changectx(pl[0])]
439 439 return [self.changectx(pl[0]), self.changectx(pl[1])]
440 440
441 441 def filectx(self, path, changeid=None, fileid=None):
442 442 """changeid can be a changeset revision, node, or tag.
443 443 fileid can be a file revision or node."""
444 444 return context.filectx(self, path, changeid, fileid)
445 445
446 446 def getcwd(self):
447 447 return self.dirstate.getcwd()
448 448
449 449 def pathto(self, f, cwd=None):
450 450 return self.dirstate.pathto(f, cwd)
451 451
452 452 def wfile(self, f, mode='r'):
453 453 return self.wopener(f, mode)
454 454
455 455 def _link(self, f):
456 456 return os.path.islink(self.wjoin(f))
457 457
458 458 def _filter(self, filter, filename, data):
459 459 if filter not in self.filterpats:
460 460 l = []
461 461 for pat, cmd in self.ui.configitems(filter):
462 462 mf = util.matcher(self.root, "", [pat], [], [])[1]
463 463 l.append((mf, cmd))
464 464 self.filterpats[filter] = l
465 465
466 466 for mf, cmd in self.filterpats[filter]:
467 467 if mf(filename):
468 468 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
469 469 data = util.filter(data, cmd)
470 470 break
471 471
472 472 return data
473 473
474 474 def wread(self, filename):
475 475 if self._link(filename):
476 476 data = os.readlink(self.wjoin(filename))
477 477 else:
478 478 data = self.wopener(filename, 'r').read()
479 479 return self._filter("encode", filename, data)
480 480
481 481 def wwrite(self, filename, data, flags):
482 482 data = self._filter("decode", filename, data)
483 483 if "l" in flags:
484 484 self.wopener.symlink(data, filename)
485 485 else:
486 486 try:
487 487 if self._link(filename):
488 488 os.unlink(self.wjoin(filename))
489 489 except OSError:
490 490 pass
491 491 self.wopener(filename, 'w').write(data)
492 492 util.set_exec(self.wjoin(filename), "x" in flags)
493 493
494 494 def wwritedata(self, filename, data):
495 495 return self._filter("decode", filename, data)
496 496
497 497 def transaction(self):
498 498 tr = self.transhandle
499 499 if tr != None and tr.running():
500 500 return tr.nest()
501 501
502 502 # save dirstate for rollback
503 503 try:
504 504 ds = self.opener("dirstate").read()
505 505 except IOError:
506 506 ds = ""
507 507 self.opener("journal.dirstate", "w").write(ds)
508 508
509 509 renames = [(self.sjoin("journal"), self.sjoin("undo")),
510 510 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
511 511 tr = transaction.transaction(self.ui.warn, self.sopener,
512 512 self.sjoin("journal"),
513 513 aftertrans(renames))
514 514 self.transhandle = tr
515 515 return tr
516 516
517 517 def recover(self):
518 518 l = self.lock()
519 519 if os.path.exists(self.sjoin("journal")):
520 520 self.ui.status(_("rolling back interrupted transaction\n"))
521 521 transaction.rollback(self.sopener, self.sjoin("journal"))
522 522 self.invalidate()
523 523 return True
524 524 else:
525 525 self.ui.warn(_("no interrupted transaction available\n"))
526 526 return False
527 527
528 528 def rollback(self, wlock=None, lock=None):
529 529 if not wlock:
530 530 wlock = self.wlock()
531 531 if not lock:
532 532 lock = self.lock()
533 533 if os.path.exists(self.sjoin("undo")):
534 534 self.ui.status(_("rolling back last transaction\n"))
535 535 transaction.rollback(self.sopener, self.sjoin("undo"))
536 536 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
537 537 self.invalidate()
538 538 self.dirstate.invalidate()
539 539 else:
540 540 self.ui.warn(_("no rollback information available\n"))
541 541
542 542 def invalidate(self):
543 543 for a in "changelog manifest".split():
544 544 if hasattr(self, a):
545 545 self.__delattr__(a)
546 546 self.tagscache = None
547 547 self.nodetagscache = None
548 548
549 549 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
550 550 desc=None):
551 551 try:
552 552 l = lock.lock(lockname, 0, releasefn, desc=desc)
553 553 except lock.LockHeld, inst:
554 554 if not wait:
555 555 raise
556 556 self.ui.warn(_("waiting for lock on %s held by %r\n") %
557 557 (desc, inst.locker))
558 558 # default to 600 seconds timeout
559 559 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
560 560 releasefn, desc=desc)
561 561 if acquirefn:
562 562 acquirefn()
563 563 return l
564 564
565 565 def lock(self, wait=1):
566 566 return self.do_lock(self.sjoin("lock"), wait,
567 567 acquirefn=self.invalidate,
568 568 desc=_('repository %s') % self.origroot)
569 569
570 570 def wlock(self, wait=1):
571 571 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
572 572 self.dirstate.invalidate,
573 573 desc=_('working directory of %s') % self.origroot)
574 574
575 575 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
576 576 """
577 577 commit an individual file as part of a larger transaction
578 578 """
579 579
580 580 t = self.wread(fn)
581 581 fl = self.file(fn)
582 582 fp1 = manifest1.get(fn, nullid)
583 583 fp2 = manifest2.get(fn, nullid)
584 584
585 585 meta = {}
586 586 cp = self.dirstate.copied(fn)
587 587 if cp:
588 588 # Mark the new revision of this file as a copy of another
589 589 # file. This copy data will effectively act as a parent
590 590 # of this new revision. If this is a merge, the first
591 591 # parent will be the nullid (meaning "look up the copy data")
592 592 # and the second one will be the other parent. For example:
593 593 #
594 594 # 0 --- 1 --- 3 rev1 changes file foo
595 595 # \ / rev2 renames foo to bar and changes it
596 596 # \- 2 -/ rev3 should have bar with all changes and
597 597 # should record that bar descends from
598 598 # bar in rev2 and foo in rev1
599 599 #
600 600 # this allows this merge to succeed:
601 601 #
602 602 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
603 603 # \ / merging rev3 and rev4 should use bar@rev2
604 604 # \- 2 --- 4 as the merge base
605 605 #
606 606 meta["copy"] = cp
607 607 if not manifest2: # not a branch merge
608 608 meta["copyrev"] = hex(manifest1.get(cp, nullid))
609 609 fp2 = nullid
610 610 elif fp2 != nullid: # copied on remote side
611 611 meta["copyrev"] = hex(manifest1.get(cp, nullid))
612 612 elif fp1 != nullid: # copied on local side, reversed
613 613 meta["copyrev"] = hex(manifest2.get(cp))
614 614 fp2 = fp1
615 615 else: # directory rename
616 616 meta["copyrev"] = hex(manifest1.get(cp, nullid))
617 617 self.ui.debug(_(" %s: copy %s:%s\n") %
618 618 (fn, cp, meta["copyrev"]))
619 619 fp1 = nullid
620 620 elif fp2 != nullid:
621 621 # is one parent an ancestor of the other?
622 622 fpa = fl.ancestor(fp1, fp2)
623 623 if fpa == fp1:
624 624 fp1, fp2 = fp2, nullid
625 625 elif fpa == fp2:
626 626 fp2 = nullid
627 627
628 628 # is the file unmodified from the parent? report existing entry
629 629 if fp2 == nullid and not fl.cmp(fp1, t):
630 630 return fp1
631 631
632 632 changelist.append(fn)
633 633 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
634 634
635 635 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
636 636 if p1 is None:
637 637 p1, p2 = self.dirstate.parents()
638 638 return self.commit(files=files, text=text, user=user, date=date,
639 639 p1=p1, p2=p2, wlock=wlock, extra=extra)
640 640
641 641 def commit(self, files=None, text="", user=None, date=None,
642 642 match=util.always, force=False, lock=None, wlock=None,
643 643 force_editor=False, p1=None, p2=None, extra={}):
644 644
645 645 commit = []
646 646 remove = []
647 647 changed = []
648 648 use_dirstate = (p1 is None) # not rawcommit
649 649 extra = extra.copy()
650 650
651 651 if use_dirstate:
652 652 if files:
653 653 for f in files:
654 654 s = self.dirstate.state(f)
655 655 if s in 'nmai':
656 656 commit.append(f)
657 657 elif s == 'r':
658 658 remove.append(f)
659 659 else:
660 660 self.ui.warn(_("%s not tracked!\n") % f)
661 661 else:
662 662 changes = self.status(match=match)[:5]
663 663 modified, added, removed, deleted, unknown = changes
664 664 commit = modified + added
665 665 remove = removed
666 666 else:
667 667 commit = files
668 668
669 669 if use_dirstate:
670 670 p1, p2 = self.dirstate.parents()
671 671 update_dirstate = True
672 672 else:
673 673 p1, p2 = p1, p2 or nullid
674 674 update_dirstate = (self.dirstate.parents()[0] == p1)
675 675
676 676 c1 = self.changelog.read(p1)
677 677 c2 = self.changelog.read(p2)
678 678 m1 = self.manifest.read(c1[0]).copy()
679 679 m2 = self.manifest.read(c2[0])
680 680
681 681 if use_dirstate:
682 682 branchname = self.workingctx().branch()
683 683 try:
684 684 branchname = branchname.decode('UTF-8').encode('UTF-8')
685 685 except UnicodeDecodeError:
686 686 raise util.Abort(_('branch name not in UTF-8!'))
687 687 else:
688 688 branchname = ""
689 689
690 690 if use_dirstate:
691 691 oldname = c1[5].get("branch") # stored in UTF-8
692 692 if (not commit and not remove and not force and p2 == nullid
693 693 and branchname == oldname):
694 694 self.ui.status(_("nothing changed\n"))
695 695 return None
696 696
697 697 xp1 = hex(p1)
698 698 if p2 == nullid: xp2 = ''
699 699 else: xp2 = hex(p2)
700 700
701 701 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
702 702
703 703 if not wlock:
704 704 wlock = self.wlock()
705 705 if not lock:
706 706 lock = self.lock()
707 707 tr = self.transaction()
708 708
709 709 # check in files
710 710 new = {}
711 711 linkrev = self.changelog.count()
712 712 commit.sort()
713 713 is_exec = util.execfunc(self.root, m1.execf)
714 714 is_link = util.linkfunc(self.root, m1.linkf)
715 715 for f in commit:
716 716 self.ui.note(f + "\n")
717 717 try:
718 718 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
719 719 new_exec = is_exec(f)
720 720 new_link = is_link(f)
721 721 if not changed or changed[-1] != f:
722 722 # mention the file in the changelog if some flag changed,
723 723 # even if there was no content change.
724 724 old_exec = m1.execf(f)
725 725 old_link = m1.linkf(f)
726 726 if old_exec != new_exec or old_link != new_link:
727 727 changed.append(f)
728 728 m1.set(f, new_exec, new_link)
729 729 except (OSError, IOError):
730 730 if use_dirstate:
731 731 self.ui.warn(_("trouble committing %s!\n") % f)
732 732 raise
733 733 else:
734 734 remove.append(f)
735 735
736 736 # update manifest
737 737 m1.update(new)
738 738 remove.sort()
739 739 removed = []
740 740
741 741 for f in remove:
742 742 if f in m1:
743 743 del m1[f]
744 744 removed.append(f)
745 745 elif f in m2:
746 746 removed.append(f)
747 747 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
748 748
749 749 # add changeset
750 750 new = new.keys()
751 751 new.sort()
752 752
753 753 user = user or self.ui.username()
754 754 if not text or force_editor:
755 755 edittext = []
756 756 if text:
757 757 edittext.append(text)
758 758 edittext.append("")
759 759 edittext.append("HG: user: %s" % user)
760 760 if p2 != nullid:
761 761 edittext.append("HG: branch merge")
762 762 if branchname:
763 763 edittext.append("HG: branch %s" % util.tolocal(branchname))
764 764 edittext.extend(["HG: changed %s" % f for f in changed])
765 765 edittext.extend(["HG: removed %s" % f for f in removed])
766 766 if not changed and not remove:
767 767 edittext.append("HG: no files changed")
768 768 edittext.append("")
769 769 # run editor in the repository root
770 770 olddir = os.getcwd()
771 771 os.chdir(self.root)
772 772 text = self.ui.edit("\n".join(edittext), user)
773 773 os.chdir(olddir)
774 774
775 775 lines = [line.rstrip() for line in text.rstrip().splitlines()]
776 776 while lines and not lines[0]:
777 777 del lines[0]
778 778 if not lines:
779 779 return None
780 780 text = '\n'.join(lines)
781 781 if branchname:
782 782 extra["branch"] = branchname
783 783 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
784 784 user, date, extra)
785 785 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
786 786 parent2=xp2)
787 787 tr.close()
788 788
789 789 if self.branchcache and "branch" in extra:
790 790 self.branchcache[util.tolocal(extra["branch"])] = n
791 791
792 792 if use_dirstate or update_dirstate:
793 793 self.dirstate.setparents(n)
794 794 if use_dirstate:
795 self.dirstate.update(new, "n")
796 self.dirstate.forget(removed)
795 for f in new:
796 self.dirstate.normal(f)
797 for f in removed:
798 self.dirstate.forget(f)
797 799
798 800 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
799 801 return n
800 802
801 803 def walk(self, node=None, files=[], match=util.always, badmatch=None):
802 804 '''
803 805 walk recursively through the directory tree or a given
804 806 changeset, finding all files matched by the match
805 807 function
806 808
807 809 results are yielded in a tuple (src, filename), where src
808 810 is one of:
809 811 'f' the file was found in the directory tree
810 812 'm' the file was only in the dirstate and not in the tree
811 813 'b' file was not found and matched badmatch
812 814 '''
813 815
814 816 if node:
815 817 fdict = dict.fromkeys(files)
816 818 # for dirstate.walk, files=['.'] means "walk the whole tree".
817 819 # follow that here, too
818 820 fdict.pop('.', None)
819 821 mdict = self.manifest.read(self.changelog.read(node)[0])
820 822 mfiles = mdict.keys()
821 823 mfiles.sort()
822 824 for fn in mfiles:
823 825 for ffn in fdict:
824 826 # match if the file is the exact name or a directory
825 827 if ffn == fn or fn.startswith("%s/" % ffn):
826 828 del fdict[ffn]
827 829 break
828 830 if match(fn):
829 831 yield 'm', fn
830 832 ffiles = fdict.keys()
831 833 ffiles.sort()
832 834 for fn in ffiles:
833 835 if badmatch and badmatch(fn):
834 836 if match(fn):
835 837 yield 'b', fn
836 838 else:
837 839 self.ui.warn(_('%s: No such file in rev %s\n')
838 840 % (self.pathto(fn), short(node)))
839 841 else:
840 842 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
841 843 yield src, fn
842 844
843 845 def status(self, node1=None, node2=None, files=[], match=util.always,
844 846 wlock=None, list_ignored=False, list_clean=False):
845 847 """return status of files between two nodes or node and working directory
846 848
847 849 If node1 is None, use the first dirstate parent instead.
848 850 If node2 is None, compare node1 with working directory.
849 851 """
850 852
851 853 def fcmp(fn, getnode):
852 854 t1 = self.wread(fn)
853 855 return self.file(fn).cmp(getnode(fn), t1)
854 856
855 857 def mfmatches(node):
856 858 change = self.changelog.read(node)
857 859 mf = self.manifest.read(change[0]).copy()
858 860 for fn in mf.keys():
859 861 if not match(fn):
860 862 del mf[fn]
861 863 return mf
862 864
863 865 modified, added, removed, deleted, unknown = [], [], [], [], []
864 866 ignored, clean = [], []
865 867
866 868 compareworking = False
867 869 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
868 870 compareworking = True
869 871
870 872 if not compareworking:
871 873 # read the manifest from node1 before the manifest from node2,
872 874 # so that we'll hit the manifest cache if we're going through
873 875 # all the revisions in parent->child order.
874 876 mf1 = mfmatches(node1)
875 877
876 878 mywlock = False
877 879
878 880 # are we comparing the working directory?
879 881 if not node2:
880 882 (lookup, modified, added, removed, deleted, unknown,
881 883 ignored, clean) = self.dirstate.status(files, match,
882 884 list_ignored, list_clean)
883 885
884 886 # are we comparing working dir against its parent?
885 887 if compareworking:
886 888 if lookup:
887 889 # do a full compare of any files that might have changed
888 890 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
889 891 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
890 892 nullid)
891 893 for f in lookup:
892 894 if fcmp(f, getnode):
893 895 modified.append(f)
894 896 else:
895 897 if list_clean:
896 898 clean.append(f)
897 899 if not wlock and not mywlock:
898 900 mywlock = True
899 901 try:
900 902 wlock = self.wlock(wait=0)
901 903 except lock.LockException:
902 904 pass
903 905 if wlock:
904 self.dirstate.update([f], "n")
906 self.dirstate.normal(f)
905 907 else:
906 908 # we are comparing working dir against non-parent
907 909 # generate a pseudo-manifest for the working dir
908 910 # XXX: create it in dirstate.py ?
909 911 mf2 = mfmatches(self.dirstate.parents()[0])
910 912 is_exec = util.execfunc(self.root, mf2.execf)
911 913 is_link = util.linkfunc(self.root, mf2.linkf)
912 914 for f in lookup + modified + added:
913 915 mf2[f] = ""
914 916 mf2.set(f, is_exec(f), is_link(f))
915 917 for f in removed:
916 918 if f in mf2:
917 919 del mf2[f]
918 920
919 921 if mywlock and wlock:
920 922 wlock.release()
921 923 else:
922 924 # we are comparing two revisions
923 925 mf2 = mfmatches(node2)
924 926
925 927 if not compareworking:
926 928 # flush lists from dirstate before comparing manifests
927 929 modified, added, clean = [], [], []
928 930
929 931 # make sure to sort the files so we talk to the disk in a
930 932 # reasonable order
931 933 mf2keys = mf2.keys()
932 934 mf2keys.sort()
933 935 getnode = lambda fn: mf1.get(fn, nullid)
934 936 for fn in mf2keys:
935 937 if mf1.has_key(fn):
936 938 if (mf1.flags(fn) != mf2.flags(fn) or
937 939 (mf1[fn] != mf2[fn] and
938 940 (mf2[fn] != "" or fcmp(fn, getnode)))):
939 941 modified.append(fn)
940 942 elif list_clean:
941 943 clean.append(fn)
942 944 del mf1[fn]
943 945 else:
944 946 added.append(fn)
945 947
946 948 removed = mf1.keys()
947 949
948 950 # sort and return results:
949 951 for l in modified, added, removed, deleted, unknown, ignored, clean:
950 952 l.sort()
951 953 return (modified, added, removed, deleted, unknown, ignored, clean)
952 954
953 955 def add(self, list, wlock=None):
954 956 if not wlock:
955 957 wlock = self.wlock()
956 958 for f in list:
957 959 p = self.wjoin(f)
958 960 try:
959 961 st = os.lstat(p)
960 962 except:
961 963 self.ui.warn(_("%s does not exist!\n") % f)
962 964 continue
963 965 if st.st_size > 10000000:
964 966 self.ui.warn(_("%s: files over 10MB may cause memory and"
965 967 " performance problems\n"
966 968 "(use 'hg revert %s' to unadd the file)\n")
967 969 % (f, f))
968 970 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
969 971 self.ui.warn(_("%s not added: only files and symlinks "
970 972 "supported currently\n") % f)
971 973 elif self.dirstate.state(f) in 'an':
972 974 self.ui.warn(_("%s already tracked!\n") % f)
973 975 else:
974 self.dirstate.update([f], "a")
976 self.dirstate.add(f)
975 977
976 978 def forget(self, list, wlock=None):
977 979 if not wlock:
978 980 wlock = self.wlock()
979 981 for f in list:
980 982 if self.dirstate.state(f) not in 'ai':
981 983 self.ui.warn(_("%s not added!\n") % f)
982 984 else:
983 self.dirstate.forget([f])
985 self.dirstate.forget(f)
984 986
985 987 def remove(self, list, unlink=False, wlock=None):
986 988 if unlink:
987 989 for f in list:
988 990 try:
989 991 util.unlink(self.wjoin(f))
990 992 except OSError, inst:
991 993 if inst.errno != errno.ENOENT:
992 994 raise
993 995 if not wlock:
994 996 wlock = self.wlock()
995 997 for f in list:
996 998 if unlink and os.path.exists(self.wjoin(f)):
997 999 self.ui.warn(_("%s still exists!\n") % f)
998 1000 elif self.dirstate.state(f) == 'a':
999 self.dirstate.forget([f])
1001 self.dirstate.forget(f)
1000 1002 elif f not in self.dirstate:
1001 1003 self.ui.warn(_("%s not tracked!\n") % f)
1002 1004 else:
1003 self.dirstate.update([f], "r")
1005 self.dirstate.remove(f)
1004 1006
1005 1007 def undelete(self, list, wlock=None):
1006 1008 p = self.dirstate.parents()[0]
1007 1009 mn = self.changelog.read(p)[0]
1008 1010 m = self.manifest.read(mn)
1009 1011 if not wlock:
1010 1012 wlock = self.wlock()
1011 1013 for f in list:
1012 1014 if self.dirstate.state(f) not in "r":
1013 1015 self.ui.warn("%s not removed!\n" % f)
1014 1016 else:
1015 1017 t = self.file(f).read(m[f])
1016 1018 self.wwrite(f, t, m.flags(f))
1017 self.dirstate.update([f], "n")
1019 self.dirstate.normal(f)
1018 1020
1019 1021 def copy(self, source, dest, wlock=None):
1020 1022 p = self.wjoin(dest)
1021 1023 if not (os.path.exists(p) or os.path.islink(p)):
1022 1024 self.ui.warn(_("%s does not exist!\n") % dest)
1023 1025 elif not (os.path.isfile(p) or os.path.islink(p)):
1024 1026 self.ui.warn(_("copy failed: %s is not a file or a "
1025 1027 "symbolic link\n") % dest)
1026 1028 else:
1027 1029 if not wlock:
1028 1030 wlock = self.wlock()
1029 1031 if self.dirstate.state(dest) == '?':
1030 self.dirstate.update([dest], "a")
1032 self.dirstate.add(dest)
1031 1033 self.dirstate.copy(source, dest)
1032 1034
1033 1035 def heads(self, start=None):
1034 1036 heads = self.changelog.heads(start)
1035 1037 # sort the output in rev descending order
1036 1038 heads = [(-self.changelog.rev(h), h) for h in heads]
1037 1039 heads.sort()
1038 1040 return [n for (r, n) in heads]
1039 1041
1040 1042 def branchheads(self, branch, start=None):
1041 1043 branches = self.branchtags()
1042 1044 if branch not in branches:
1043 1045 return []
1044 1046 # The basic algorithm is this:
1045 1047 #
1046 1048 # Start from the branch tip since there are no later revisions that can
1047 1049 # possibly be in this branch, and the tip is a guaranteed head.
1048 1050 #
1049 1051 # Remember the tip's parents as the first ancestors, since these by
1050 1052 # definition are not heads.
1051 1053 #
1052 1054 # Step backwards from the brach tip through all the revisions. We are
1053 1055 # guaranteed by the rules of Mercurial that we will now be visiting the
1054 1056 # nodes in reverse topological order (children before parents).
1055 1057 #
1056 1058 # If a revision is one of the ancestors of a head then we can toss it
1057 1059 # out of the ancestors set (we've already found it and won't be
1058 1060 # visiting it again) and put its parents in the ancestors set.
1059 1061 #
1060 1062 # Otherwise, if a revision is in the branch it's another head, since it
1061 1063 # wasn't in the ancestor list of an existing head. So add it to the
1062 1064 # head list, and add its parents to the ancestor list.
1063 1065 #
1064 1066 # If it is not in the branch ignore it.
1065 1067 #
1066 1068 # Once we have a list of heads, use nodesbetween to filter out all the
1067 1069 # heads that cannot be reached from startrev. There may be a more
1068 1070 # efficient way to do this as part of the previous algorithm.
1069 1071
1070 1072 set = util.set
1071 1073 heads = [self.changelog.rev(branches[branch])]
1072 1074 # Don't care if ancestors contains nullrev or not.
1073 1075 ancestors = set(self.changelog.parentrevs(heads[0]))
1074 1076 for rev in xrange(heads[0] - 1, nullrev, -1):
1075 1077 if rev in ancestors:
1076 1078 ancestors.update(self.changelog.parentrevs(rev))
1077 1079 ancestors.remove(rev)
1078 1080 elif self.changectx(rev).branch() == branch:
1079 1081 heads.append(rev)
1080 1082 ancestors.update(self.changelog.parentrevs(rev))
1081 1083 heads = [self.changelog.node(rev) for rev in heads]
1082 1084 if start is not None:
1083 1085 heads = self.changelog.nodesbetween([start], heads)[2]
1084 1086 return heads
1085 1087
1086 1088 def branches(self, nodes):
1087 1089 if not nodes:
1088 1090 nodes = [self.changelog.tip()]
1089 1091 b = []
1090 1092 for n in nodes:
1091 1093 t = n
1092 1094 while 1:
1093 1095 p = self.changelog.parents(n)
1094 1096 if p[1] != nullid or p[0] == nullid:
1095 1097 b.append((t, n, p[0], p[1]))
1096 1098 break
1097 1099 n = p[0]
1098 1100 return b
1099 1101
1100 1102 def between(self, pairs):
1101 1103 r = []
1102 1104
1103 1105 for top, bottom in pairs:
1104 1106 n, l, i = top, [], 0
1105 1107 f = 1
1106 1108
1107 1109 while n != bottom:
1108 1110 p = self.changelog.parents(n)[0]
1109 1111 if i == f:
1110 1112 l.append(n)
1111 1113 f = f * 2
1112 1114 n = p
1113 1115 i += 1
1114 1116
1115 1117 r.append(l)
1116 1118
1117 1119 return r
1118 1120
1119 1121 def findincoming(self, remote, base=None, heads=None, force=False):
1120 1122 """Return list of roots of the subsets of missing nodes from remote
1121 1123
1122 1124 If base dict is specified, assume that these nodes and their parents
1123 1125 exist on the remote side and that no child of a node of base exists
1124 1126 in both remote and self.
1125 1127 Furthermore base will be updated to include the nodes that exists
1126 1128 in self and remote but no children exists in self and remote.
1127 1129 If a list of heads is specified, return only nodes which are heads
1128 1130 or ancestors of these heads.
1129 1131
1130 1132 All the ancestors of base are in self and in remote.
1131 1133 All the descendants of the list returned are missing in self.
1132 1134 (and so we know that the rest of the nodes are missing in remote, see
1133 1135 outgoing)
1134 1136 """
1135 1137 m = self.changelog.nodemap
1136 1138 search = []
1137 1139 fetch = {}
1138 1140 seen = {}
1139 1141 seenbranch = {}
1140 1142 if base == None:
1141 1143 base = {}
1142 1144
1143 1145 if not heads:
1144 1146 heads = remote.heads()
1145 1147
1146 1148 if self.changelog.tip() == nullid:
1147 1149 base[nullid] = 1
1148 1150 if heads != [nullid]:
1149 1151 return [nullid]
1150 1152 return []
1151 1153
1152 1154 # assume we're closer to the tip than the root
1153 1155 # and start by examining the heads
1154 1156 self.ui.status(_("searching for changes\n"))
1155 1157
1156 1158 unknown = []
1157 1159 for h in heads:
1158 1160 if h not in m:
1159 1161 unknown.append(h)
1160 1162 else:
1161 1163 base[h] = 1
1162 1164
1163 1165 if not unknown:
1164 1166 return []
1165 1167
1166 1168 req = dict.fromkeys(unknown)
1167 1169 reqcnt = 0
1168 1170
1169 1171 # search through remote branches
1170 1172 # a 'branch' here is a linear segment of history, with four parts:
1171 1173 # head, root, first parent, second parent
1172 1174 # (a branch always has two parents (or none) by definition)
1173 1175 unknown = remote.branches(unknown)
1174 1176 while unknown:
1175 1177 r = []
1176 1178 while unknown:
1177 1179 n = unknown.pop(0)
1178 1180 if n[0] in seen:
1179 1181 continue
1180 1182
1181 1183 self.ui.debug(_("examining %s:%s\n")
1182 1184 % (short(n[0]), short(n[1])))
1183 1185 if n[0] == nullid: # found the end of the branch
1184 1186 pass
1185 1187 elif n in seenbranch:
1186 1188 self.ui.debug(_("branch already found\n"))
1187 1189 continue
1188 1190 elif n[1] and n[1] in m: # do we know the base?
1189 1191 self.ui.debug(_("found incomplete branch %s:%s\n")
1190 1192 % (short(n[0]), short(n[1])))
1191 1193 search.append(n) # schedule branch range for scanning
1192 1194 seenbranch[n] = 1
1193 1195 else:
1194 1196 if n[1] not in seen and n[1] not in fetch:
1195 1197 if n[2] in m and n[3] in m:
1196 1198 self.ui.debug(_("found new changeset %s\n") %
1197 1199 short(n[1]))
1198 1200 fetch[n[1]] = 1 # earliest unknown
1199 1201 for p in n[2:4]:
1200 1202 if p in m:
1201 1203 base[p] = 1 # latest known
1202 1204
1203 1205 for p in n[2:4]:
1204 1206 if p not in req and p not in m:
1205 1207 r.append(p)
1206 1208 req[p] = 1
1207 1209 seen[n[0]] = 1
1208 1210
1209 1211 if r:
1210 1212 reqcnt += 1
1211 1213 self.ui.debug(_("request %d: %s\n") %
1212 1214 (reqcnt, " ".join(map(short, r))))
1213 1215 for p in xrange(0, len(r), 10):
1214 1216 for b in remote.branches(r[p:p+10]):
1215 1217 self.ui.debug(_("received %s:%s\n") %
1216 1218 (short(b[0]), short(b[1])))
1217 1219 unknown.append(b)
1218 1220
1219 1221 # do binary search on the branches we found
1220 1222 while search:
1221 1223 n = search.pop(0)
1222 1224 reqcnt += 1
1223 1225 l = remote.between([(n[0], n[1])])[0]
1224 1226 l.append(n[1])
1225 1227 p = n[0]
1226 1228 f = 1
1227 1229 for i in l:
1228 1230 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1229 1231 if i in m:
1230 1232 if f <= 2:
1231 1233 self.ui.debug(_("found new branch changeset %s\n") %
1232 1234 short(p))
1233 1235 fetch[p] = 1
1234 1236 base[i] = 1
1235 1237 else:
1236 1238 self.ui.debug(_("narrowed branch search to %s:%s\n")
1237 1239 % (short(p), short(i)))
1238 1240 search.append((p, i))
1239 1241 break
1240 1242 p, f = i, f * 2
1241 1243
1242 1244 # sanity check our fetch list
1243 1245 for f in fetch.keys():
1244 1246 if f in m:
1245 1247 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1246 1248
1247 1249 if base.keys() == [nullid]:
1248 1250 if force:
1249 1251 self.ui.warn(_("warning: repository is unrelated\n"))
1250 1252 else:
1251 1253 raise util.Abort(_("repository is unrelated"))
1252 1254
1253 1255 self.ui.debug(_("found new changesets starting at ") +
1254 1256 " ".join([short(f) for f in fetch]) + "\n")
1255 1257
1256 1258 self.ui.debug(_("%d total queries\n") % reqcnt)
1257 1259
1258 1260 return fetch.keys()
1259 1261
1260 1262 def findoutgoing(self, remote, base=None, heads=None, force=False):
1261 1263 """Return list of nodes that are roots of subsets not in remote
1262 1264
1263 1265 If base dict is specified, assume that these nodes and their parents
1264 1266 exist on the remote side.
1265 1267 If a list of heads is specified, return only nodes which are heads
1266 1268 or ancestors of these heads, and return a second element which
1267 1269 contains all remote heads which get new children.
1268 1270 """
1269 1271 if base == None:
1270 1272 base = {}
1271 1273 self.findincoming(remote, base, heads, force=force)
1272 1274
1273 1275 self.ui.debug(_("common changesets up to ")
1274 1276 + " ".join(map(short, base.keys())) + "\n")
1275 1277
1276 1278 remain = dict.fromkeys(self.changelog.nodemap)
1277 1279
1278 1280 # prune everything remote has from the tree
1279 1281 del remain[nullid]
1280 1282 remove = base.keys()
1281 1283 while remove:
1282 1284 n = remove.pop(0)
1283 1285 if n in remain:
1284 1286 del remain[n]
1285 1287 for p in self.changelog.parents(n):
1286 1288 remove.append(p)
1287 1289
1288 1290 # find every node whose parents have been pruned
1289 1291 subset = []
1290 1292 # find every remote head that will get new children
1291 1293 updated_heads = {}
1292 1294 for n in remain:
1293 1295 p1, p2 = self.changelog.parents(n)
1294 1296 if p1 not in remain and p2 not in remain:
1295 1297 subset.append(n)
1296 1298 if heads:
1297 1299 if p1 in heads:
1298 1300 updated_heads[p1] = True
1299 1301 if p2 in heads:
1300 1302 updated_heads[p2] = True
1301 1303
1302 1304 # this is the set of all roots we have to push
1303 1305 if heads:
1304 1306 return subset, updated_heads.keys()
1305 1307 else:
1306 1308 return subset
1307 1309
1308 1310 def pull(self, remote, heads=None, force=False, lock=None):
1309 1311 mylock = False
1310 1312 if not lock:
1311 1313 lock = self.lock()
1312 1314 mylock = True
1313 1315
1314 1316 try:
1315 1317 fetch = self.findincoming(remote, force=force)
1316 1318 if fetch == [nullid]:
1317 1319 self.ui.status(_("requesting all changes\n"))
1318 1320
1319 1321 if not fetch:
1320 1322 self.ui.status(_("no changes found\n"))
1321 1323 return 0
1322 1324
1323 1325 if heads is None:
1324 1326 cg = remote.changegroup(fetch, 'pull')
1325 1327 else:
1326 1328 if 'changegroupsubset' not in remote.capabilities:
1327 1329 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1328 1330 cg = remote.changegroupsubset(fetch, heads, 'pull')
1329 1331 return self.addchangegroup(cg, 'pull', remote.url())
1330 1332 finally:
1331 1333 if mylock:
1332 1334 lock.release()
1333 1335
1334 1336 def push(self, remote, force=False, revs=None):
1335 1337 # there are two ways to push to remote repo:
1336 1338 #
1337 1339 # addchangegroup assumes local user can lock remote
1338 1340 # repo (local filesystem, old ssh servers).
1339 1341 #
1340 1342 # unbundle assumes local user cannot lock remote repo (new ssh
1341 1343 # servers, http servers).
1342 1344
1343 1345 if remote.capable('unbundle'):
1344 1346 return self.push_unbundle(remote, force, revs)
1345 1347 return self.push_addchangegroup(remote, force, revs)
1346 1348
1347 1349 def prepush(self, remote, force, revs):
1348 1350 base = {}
1349 1351 remote_heads = remote.heads()
1350 1352 inc = self.findincoming(remote, base, remote_heads, force=force)
1351 1353
1352 1354 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1353 1355 if revs is not None:
1354 1356 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1355 1357 else:
1356 1358 bases, heads = update, self.changelog.heads()
1357 1359
1358 1360 if not bases:
1359 1361 self.ui.status(_("no changes found\n"))
1360 1362 return None, 1
1361 1363 elif not force:
1362 1364 # check if we're creating new remote heads
1363 1365 # to be a remote head after push, node must be either
1364 1366 # - unknown locally
1365 1367 # - a local outgoing head descended from update
1366 1368 # - a remote head that's known locally and not
1367 1369 # ancestral to an outgoing head
1368 1370
1369 1371 warn = 0
1370 1372
1371 1373 if remote_heads == [nullid]:
1372 1374 warn = 0
1373 1375 elif not revs and len(heads) > len(remote_heads):
1374 1376 warn = 1
1375 1377 else:
1376 1378 newheads = list(heads)
1377 1379 for r in remote_heads:
1378 1380 if r in self.changelog.nodemap:
1379 1381 desc = self.changelog.heads(r, heads)
1380 1382 l = [h for h in heads if h in desc]
1381 1383 if not l:
1382 1384 newheads.append(r)
1383 1385 else:
1384 1386 newheads.append(r)
1385 1387 if len(newheads) > len(remote_heads):
1386 1388 warn = 1
1387 1389
1388 1390 if warn:
1389 1391 self.ui.warn(_("abort: push creates new remote branches!\n"))
1390 1392 self.ui.status(_("(did you forget to merge?"
1391 1393 " use push -f to force)\n"))
1392 1394 return None, 1
1393 1395 elif inc:
1394 1396 self.ui.warn(_("note: unsynced remote changes!\n"))
1395 1397
1396 1398
1397 1399 if revs is None:
1398 1400 cg = self.changegroup(update, 'push')
1399 1401 else:
1400 1402 cg = self.changegroupsubset(update, revs, 'push')
1401 1403 return cg, remote_heads
1402 1404
1403 1405 def push_addchangegroup(self, remote, force, revs):
1404 1406 lock = remote.lock()
1405 1407
1406 1408 ret = self.prepush(remote, force, revs)
1407 1409 if ret[0] is not None:
1408 1410 cg, remote_heads = ret
1409 1411 return remote.addchangegroup(cg, 'push', self.url())
1410 1412 return ret[1]
1411 1413
1412 1414 def push_unbundle(self, remote, force, revs):
1413 1415 # local repo finds heads on server, finds out what revs it
1414 1416 # must push. once revs transferred, if server finds it has
1415 1417 # different heads (someone else won commit/push race), server
1416 1418 # aborts.
1417 1419
1418 1420 ret = self.prepush(remote, force, revs)
1419 1421 if ret[0] is not None:
1420 1422 cg, remote_heads = ret
1421 1423 if force: remote_heads = ['force']
1422 1424 return remote.unbundle(cg, remote_heads, 'push')
1423 1425 return ret[1]
1424 1426
1425 1427 def changegroupinfo(self, nodes):
1426 1428 self.ui.note(_("%d changesets found\n") % len(nodes))
1427 1429 if self.ui.debugflag:
1428 1430 self.ui.debug(_("List of changesets:\n"))
1429 1431 for node in nodes:
1430 1432 self.ui.debug("%s\n" % hex(node))
1431 1433
1432 1434 def changegroupsubset(self, bases, heads, source):
1433 1435 """This function generates a changegroup consisting of all the nodes
1434 1436 that are descendents of any of the bases, and ancestors of any of
1435 1437 the heads.
1436 1438
1437 1439 It is fairly complex as determining which filenodes and which
1438 1440 manifest nodes need to be included for the changeset to be complete
1439 1441 is non-trivial.
1440 1442
1441 1443 Another wrinkle is doing the reverse, figuring out which changeset in
1442 1444 the changegroup a particular filenode or manifestnode belongs to."""
1443 1445
1444 1446 self.hook('preoutgoing', throw=True, source=source)
1445 1447
1446 1448 # Set up some initial variables
1447 1449 # Make it easy to refer to self.changelog
1448 1450 cl = self.changelog
1449 1451 # msng is short for missing - compute the list of changesets in this
1450 1452 # changegroup.
1451 1453 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1452 1454 self.changegroupinfo(msng_cl_lst)
1453 1455 # Some bases may turn out to be superfluous, and some heads may be
1454 1456 # too. nodesbetween will return the minimal set of bases and heads
1455 1457 # necessary to re-create the changegroup.
1456 1458
1457 1459 # Known heads are the list of heads that it is assumed the recipient
1458 1460 # of this changegroup will know about.
1459 1461 knownheads = {}
1460 1462 # We assume that all parents of bases are known heads.
1461 1463 for n in bases:
1462 1464 for p in cl.parents(n):
1463 1465 if p != nullid:
1464 1466 knownheads[p] = 1
1465 1467 knownheads = knownheads.keys()
1466 1468 if knownheads:
1467 1469 # Now that we know what heads are known, we can compute which
1468 1470 # changesets are known. The recipient must know about all
1469 1471 # changesets required to reach the known heads from the null
1470 1472 # changeset.
1471 1473 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1472 1474 junk = None
1473 1475 # Transform the list into an ersatz set.
1474 1476 has_cl_set = dict.fromkeys(has_cl_set)
1475 1477 else:
1476 1478 # If there were no known heads, the recipient cannot be assumed to
1477 1479 # know about any changesets.
1478 1480 has_cl_set = {}
1479 1481
1480 1482 # Make it easy to refer to self.manifest
1481 1483 mnfst = self.manifest
1482 1484 # We don't know which manifests are missing yet
1483 1485 msng_mnfst_set = {}
1484 1486 # Nor do we know which filenodes are missing.
1485 1487 msng_filenode_set = {}
1486 1488
1487 1489 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1488 1490 junk = None
1489 1491
1490 1492 # A changeset always belongs to itself, so the changenode lookup
1491 1493 # function for a changenode is identity.
1492 1494 def identity(x):
1493 1495 return x
1494 1496
1495 1497 # A function generating function. Sets up an environment for the
1496 1498 # inner function.
1497 1499 def cmp_by_rev_func(revlog):
1498 1500 # Compare two nodes by their revision number in the environment's
1499 1501 # revision history. Since the revision number both represents the
1500 1502 # most efficient order to read the nodes in, and represents a
1501 1503 # topological sorting of the nodes, this function is often useful.
1502 1504 def cmp_by_rev(a, b):
1503 1505 return cmp(revlog.rev(a), revlog.rev(b))
1504 1506 return cmp_by_rev
1505 1507
1506 1508 # If we determine that a particular file or manifest node must be a
1507 1509 # node that the recipient of the changegroup will already have, we can
1508 1510 # also assume the recipient will have all the parents. This function
1509 1511 # prunes them from the set of missing nodes.
1510 1512 def prune_parents(revlog, hasset, msngset):
1511 1513 haslst = hasset.keys()
1512 1514 haslst.sort(cmp_by_rev_func(revlog))
1513 1515 for node in haslst:
1514 1516 parentlst = [p for p in revlog.parents(node) if p != nullid]
1515 1517 while parentlst:
1516 1518 n = parentlst.pop()
1517 1519 if n not in hasset:
1518 1520 hasset[n] = 1
1519 1521 p = [p for p in revlog.parents(n) if p != nullid]
1520 1522 parentlst.extend(p)
1521 1523 for n in hasset:
1522 1524 msngset.pop(n, None)
1523 1525
1524 1526 # This is a function generating function used to set up an environment
1525 1527 # for the inner function to execute in.
1526 1528 def manifest_and_file_collector(changedfileset):
1527 1529 # This is an information gathering function that gathers
1528 1530 # information from each changeset node that goes out as part of
1529 1531 # the changegroup. The information gathered is a list of which
1530 1532 # manifest nodes are potentially required (the recipient may
1531 1533 # already have them) and total list of all files which were
1532 1534 # changed in any changeset in the changegroup.
1533 1535 #
1534 1536 # We also remember the first changenode we saw any manifest
1535 1537 # referenced by so we can later determine which changenode 'owns'
1536 1538 # the manifest.
1537 1539 def collect_manifests_and_files(clnode):
1538 1540 c = cl.read(clnode)
1539 1541 for f in c[3]:
1540 1542 # This is to make sure we only have one instance of each
1541 1543 # filename string for each filename.
1542 1544 changedfileset.setdefault(f, f)
1543 1545 msng_mnfst_set.setdefault(c[0], clnode)
1544 1546 return collect_manifests_and_files
1545 1547
1546 1548 # Figure out which manifest nodes (of the ones we think might be part
1547 1549 # of the changegroup) the recipient must know about and remove them
1548 1550 # from the changegroup.
1549 1551 def prune_manifests():
1550 1552 has_mnfst_set = {}
1551 1553 for n in msng_mnfst_set:
1552 1554 # If a 'missing' manifest thinks it belongs to a changenode
1553 1555 # the recipient is assumed to have, obviously the recipient
1554 1556 # must have that manifest.
1555 1557 linknode = cl.node(mnfst.linkrev(n))
1556 1558 if linknode in has_cl_set:
1557 1559 has_mnfst_set[n] = 1
1558 1560 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1559 1561
1560 1562 # Use the information collected in collect_manifests_and_files to say
1561 1563 # which changenode any manifestnode belongs to.
1562 1564 def lookup_manifest_link(mnfstnode):
1563 1565 return msng_mnfst_set[mnfstnode]
1564 1566
1565 1567 # A function generating function that sets up the initial environment
1566 1568 # the inner function.
1567 1569 def filenode_collector(changedfiles):
1568 1570 next_rev = [0]
1569 1571 # This gathers information from each manifestnode included in the
1570 1572 # changegroup about which filenodes the manifest node references
1571 1573 # so we can include those in the changegroup too.
1572 1574 #
1573 1575 # It also remembers which changenode each filenode belongs to. It
1574 1576 # does this by assuming the a filenode belongs to the changenode
1575 1577 # the first manifest that references it belongs to.
1576 1578 def collect_msng_filenodes(mnfstnode):
1577 1579 r = mnfst.rev(mnfstnode)
1578 1580 if r == next_rev[0]:
1579 1581 # If the last rev we looked at was the one just previous,
1580 1582 # we only need to see a diff.
1581 1583 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1582 1584 # For each line in the delta
1583 1585 for dline in delta.splitlines():
1584 1586 # get the filename and filenode for that line
1585 1587 f, fnode = dline.split('\0')
1586 1588 fnode = bin(fnode[:40])
1587 1589 f = changedfiles.get(f, None)
1588 1590 # And if the file is in the list of files we care
1589 1591 # about.
1590 1592 if f is not None:
1591 1593 # Get the changenode this manifest belongs to
1592 1594 clnode = msng_mnfst_set[mnfstnode]
1593 1595 # Create the set of filenodes for the file if
1594 1596 # there isn't one already.
1595 1597 ndset = msng_filenode_set.setdefault(f, {})
1596 1598 # And set the filenode's changelog node to the
1597 1599 # manifest's if it hasn't been set already.
1598 1600 ndset.setdefault(fnode, clnode)
1599 1601 else:
1600 1602 # Otherwise we need a full manifest.
1601 1603 m = mnfst.read(mnfstnode)
1602 1604 # For every file in we care about.
1603 1605 for f in changedfiles:
1604 1606 fnode = m.get(f, None)
1605 1607 # If it's in the manifest
1606 1608 if fnode is not None:
1607 1609 # See comments above.
1608 1610 clnode = msng_mnfst_set[mnfstnode]
1609 1611 ndset = msng_filenode_set.setdefault(f, {})
1610 1612 ndset.setdefault(fnode, clnode)
1611 1613 # Remember the revision we hope to see next.
1612 1614 next_rev[0] = r + 1
1613 1615 return collect_msng_filenodes
1614 1616
1615 1617 # We have a list of filenodes we think we need for a file, lets remove
1616 1618 # all those we now the recipient must have.
1617 1619 def prune_filenodes(f, filerevlog):
1618 1620 msngset = msng_filenode_set[f]
1619 1621 hasset = {}
1620 1622 # If a 'missing' filenode thinks it belongs to a changenode we
1621 1623 # assume the recipient must have, then the recipient must have
1622 1624 # that filenode.
1623 1625 for n in msngset:
1624 1626 clnode = cl.node(filerevlog.linkrev(n))
1625 1627 if clnode in has_cl_set:
1626 1628 hasset[n] = 1
1627 1629 prune_parents(filerevlog, hasset, msngset)
1628 1630
1629 1631 # A function generator function that sets up the a context for the
1630 1632 # inner function.
1631 1633 def lookup_filenode_link_func(fname):
1632 1634 msngset = msng_filenode_set[fname]
1633 1635 # Lookup the changenode the filenode belongs to.
1634 1636 def lookup_filenode_link(fnode):
1635 1637 return msngset[fnode]
1636 1638 return lookup_filenode_link
1637 1639
1638 1640 # Now that we have all theses utility functions to help out and
1639 1641 # logically divide up the task, generate the group.
1640 1642 def gengroup():
1641 1643 # The set of changed files starts empty.
1642 1644 changedfiles = {}
1643 1645 # Create a changenode group generator that will call our functions
1644 1646 # back to lookup the owning changenode and collect information.
1645 1647 group = cl.group(msng_cl_lst, identity,
1646 1648 manifest_and_file_collector(changedfiles))
1647 1649 for chnk in group:
1648 1650 yield chnk
1649 1651
1650 1652 # The list of manifests has been collected by the generator
1651 1653 # calling our functions back.
1652 1654 prune_manifests()
1653 1655 msng_mnfst_lst = msng_mnfst_set.keys()
1654 1656 # Sort the manifestnodes by revision number.
1655 1657 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1656 1658 # Create a generator for the manifestnodes that calls our lookup
1657 1659 # and data collection functions back.
1658 1660 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1659 1661 filenode_collector(changedfiles))
1660 1662 for chnk in group:
1661 1663 yield chnk
1662 1664
1663 1665 # These are no longer needed, dereference and toss the memory for
1664 1666 # them.
1665 1667 msng_mnfst_lst = None
1666 1668 msng_mnfst_set.clear()
1667 1669
1668 1670 changedfiles = changedfiles.keys()
1669 1671 changedfiles.sort()
1670 1672 # Go through all our files in order sorted by name.
1671 1673 for fname in changedfiles:
1672 1674 filerevlog = self.file(fname)
1673 1675 # Toss out the filenodes that the recipient isn't really
1674 1676 # missing.
1675 1677 if msng_filenode_set.has_key(fname):
1676 1678 prune_filenodes(fname, filerevlog)
1677 1679 msng_filenode_lst = msng_filenode_set[fname].keys()
1678 1680 else:
1679 1681 msng_filenode_lst = []
1680 1682 # If any filenodes are left, generate the group for them,
1681 1683 # otherwise don't bother.
1682 1684 if len(msng_filenode_lst) > 0:
1683 1685 yield changegroup.genchunk(fname)
1684 1686 # Sort the filenodes by their revision #
1685 1687 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1686 1688 # Create a group generator and only pass in a changenode
1687 1689 # lookup function as we need to collect no information
1688 1690 # from filenodes.
1689 1691 group = filerevlog.group(msng_filenode_lst,
1690 1692 lookup_filenode_link_func(fname))
1691 1693 for chnk in group:
1692 1694 yield chnk
1693 1695 if msng_filenode_set.has_key(fname):
1694 1696 # Don't need this anymore, toss it to free memory.
1695 1697 del msng_filenode_set[fname]
1696 1698 # Signal that no more groups are left.
1697 1699 yield changegroup.closechunk()
1698 1700
1699 1701 if msng_cl_lst:
1700 1702 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1701 1703
1702 1704 return util.chunkbuffer(gengroup())
1703 1705
1704 1706 def changegroup(self, basenodes, source):
1705 1707 """Generate a changegroup of all nodes that we have that a recipient
1706 1708 doesn't.
1707 1709
1708 1710 This is much easier than the previous function as we can assume that
1709 1711 the recipient has any changenode we aren't sending them."""
1710 1712
1711 1713 self.hook('preoutgoing', throw=True, source=source)
1712 1714
1713 1715 cl = self.changelog
1714 1716 nodes = cl.nodesbetween(basenodes, None)[0]
1715 1717 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1716 1718 self.changegroupinfo(nodes)
1717 1719
1718 1720 def identity(x):
1719 1721 return x
1720 1722
1721 1723 def gennodelst(revlog):
1722 1724 for r in xrange(0, revlog.count()):
1723 1725 n = revlog.node(r)
1724 1726 if revlog.linkrev(n) in revset:
1725 1727 yield n
1726 1728
1727 1729 def changed_file_collector(changedfileset):
1728 1730 def collect_changed_files(clnode):
1729 1731 c = cl.read(clnode)
1730 1732 for fname in c[3]:
1731 1733 changedfileset[fname] = 1
1732 1734 return collect_changed_files
1733 1735
1734 1736 def lookuprevlink_func(revlog):
1735 1737 def lookuprevlink(n):
1736 1738 return cl.node(revlog.linkrev(n))
1737 1739 return lookuprevlink
1738 1740
1739 1741 def gengroup():
1740 1742 # construct a list of all changed files
1741 1743 changedfiles = {}
1742 1744
1743 1745 for chnk in cl.group(nodes, identity,
1744 1746 changed_file_collector(changedfiles)):
1745 1747 yield chnk
1746 1748 changedfiles = changedfiles.keys()
1747 1749 changedfiles.sort()
1748 1750
1749 1751 mnfst = self.manifest
1750 1752 nodeiter = gennodelst(mnfst)
1751 1753 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1752 1754 yield chnk
1753 1755
1754 1756 for fname in changedfiles:
1755 1757 filerevlog = self.file(fname)
1756 1758 nodeiter = gennodelst(filerevlog)
1757 1759 nodeiter = list(nodeiter)
1758 1760 if nodeiter:
1759 1761 yield changegroup.genchunk(fname)
1760 1762 lookup = lookuprevlink_func(filerevlog)
1761 1763 for chnk in filerevlog.group(nodeiter, lookup):
1762 1764 yield chnk
1763 1765
1764 1766 yield changegroup.closechunk()
1765 1767
1766 1768 if nodes:
1767 1769 self.hook('outgoing', node=hex(nodes[0]), source=source)
1768 1770
1769 1771 return util.chunkbuffer(gengroup())
1770 1772
1771 1773 def addchangegroup(self, source, srctype, url):
1772 1774 """add changegroup to repo.
1773 1775
1774 1776 return values:
1775 1777 - nothing changed or no source: 0
1776 1778 - more heads than before: 1+added heads (2..n)
1777 1779 - less heads than before: -1-removed heads (-2..-n)
1778 1780 - number of heads stays the same: 1
1779 1781 """
1780 1782 def csmap(x):
1781 1783 self.ui.debug(_("add changeset %s\n") % short(x))
1782 1784 return cl.count()
1783 1785
1784 1786 def revmap(x):
1785 1787 return cl.rev(x)
1786 1788
1787 1789 if not source:
1788 1790 return 0
1789 1791
1790 1792 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1791 1793
1792 1794 changesets = files = revisions = 0
1793 1795
1794 1796 tr = self.transaction()
1795 1797
1796 1798 # write changelog data to temp files so concurrent readers will not see
1797 1799 # inconsistent view
1798 1800 cl = self.changelog
1799 1801 cl.delayupdate()
1800 1802 oldheads = len(cl.heads())
1801 1803
1802 1804 # pull off the changeset group
1803 1805 self.ui.status(_("adding changesets\n"))
1804 1806 cor = cl.count() - 1
1805 1807 chunkiter = changegroup.chunkiter(source)
1806 1808 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1807 1809 raise util.Abort(_("received changelog group is empty"))
1808 1810 cnr = cl.count() - 1
1809 1811 changesets = cnr - cor
1810 1812
1811 1813 # pull off the manifest group
1812 1814 self.ui.status(_("adding manifests\n"))
1813 1815 chunkiter = changegroup.chunkiter(source)
1814 1816 # no need to check for empty manifest group here:
1815 1817 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1816 1818 # no new manifest will be created and the manifest group will
1817 1819 # be empty during the pull
1818 1820 self.manifest.addgroup(chunkiter, revmap, tr)
1819 1821
1820 1822 # process the files
1821 1823 self.ui.status(_("adding file changes\n"))
1822 1824 while 1:
1823 1825 f = changegroup.getchunk(source)
1824 1826 if not f:
1825 1827 break
1826 1828 self.ui.debug(_("adding %s revisions\n") % f)
1827 1829 fl = self.file(f)
1828 1830 o = fl.count()
1829 1831 chunkiter = changegroup.chunkiter(source)
1830 1832 if fl.addgroup(chunkiter, revmap, tr) is None:
1831 1833 raise util.Abort(_("received file revlog group is empty"))
1832 1834 revisions += fl.count() - o
1833 1835 files += 1
1834 1836
1835 1837 # make changelog see real files again
1836 1838 cl.finalize(tr)
1837 1839
1838 1840 newheads = len(self.changelog.heads())
1839 1841 heads = ""
1840 1842 if oldheads and newheads != oldheads:
1841 1843 heads = _(" (%+d heads)") % (newheads - oldheads)
1842 1844
1843 1845 self.ui.status(_("added %d changesets"
1844 1846 " with %d changes to %d files%s\n")
1845 1847 % (changesets, revisions, files, heads))
1846 1848
1847 1849 if changesets > 0:
1848 1850 self.hook('pretxnchangegroup', throw=True,
1849 1851 node=hex(self.changelog.node(cor+1)), source=srctype,
1850 1852 url=url)
1851 1853
1852 1854 tr.close()
1853 1855
1854 1856 if changesets > 0:
1855 1857 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1856 1858 source=srctype, url=url)
1857 1859
1858 1860 for i in xrange(cor + 1, cnr + 1):
1859 1861 self.hook("incoming", node=hex(self.changelog.node(i)),
1860 1862 source=srctype, url=url)
1861 1863
1862 1864 # never return 0 here:
1863 1865 if newheads < oldheads:
1864 1866 return newheads - oldheads - 1
1865 1867 else:
1866 1868 return newheads - oldheads + 1
1867 1869
1868 1870
1869 1871 def stream_in(self, remote):
1870 1872 fp = remote.stream_out()
1871 1873 l = fp.readline()
1872 1874 try:
1873 1875 resp = int(l)
1874 1876 except ValueError:
1875 1877 raise util.UnexpectedOutput(
1876 1878 _('Unexpected response from remote server:'), l)
1877 1879 if resp == 1:
1878 1880 raise util.Abort(_('operation forbidden by server'))
1879 1881 elif resp == 2:
1880 1882 raise util.Abort(_('locking the remote repository failed'))
1881 1883 elif resp != 0:
1882 1884 raise util.Abort(_('the server sent an unknown error code'))
1883 1885 self.ui.status(_('streaming all changes\n'))
1884 1886 l = fp.readline()
1885 1887 try:
1886 1888 total_files, total_bytes = map(int, l.split(' ', 1))
1887 1889 except ValueError, TypeError:
1888 1890 raise util.UnexpectedOutput(
1889 1891 _('Unexpected response from remote server:'), l)
1890 1892 self.ui.status(_('%d files to transfer, %s of data\n') %
1891 1893 (total_files, util.bytecount(total_bytes)))
1892 1894 start = time.time()
1893 1895 for i in xrange(total_files):
1894 1896 # XXX doesn't support '\n' or '\r' in filenames
1895 1897 l = fp.readline()
1896 1898 try:
1897 1899 name, size = l.split('\0', 1)
1898 1900 size = int(size)
1899 1901 except ValueError, TypeError:
1900 1902 raise util.UnexpectedOutput(
1901 1903 _('Unexpected response from remote server:'), l)
1902 1904 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1903 1905 ofp = self.sopener(name, 'w')
1904 1906 for chunk in util.filechunkiter(fp, limit=size):
1905 1907 ofp.write(chunk)
1906 1908 ofp.close()
1907 1909 elapsed = time.time() - start
1908 1910 if elapsed <= 0:
1909 1911 elapsed = 0.001
1910 1912 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1911 1913 (util.bytecount(total_bytes), elapsed,
1912 1914 util.bytecount(total_bytes / elapsed)))
1913 1915 self.invalidate()
1914 1916 return len(self.heads()) + 1
1915 1917
1916 1918 def clone(self, remote, heads=[], stream=False):
1917 1919 '''clone remote repository.
1918 1920
1919 1921 keyword arguments:
1920 1922 heads: list of revs to clone (forces use of pull)
1921 1923 stream: use streaming clone if possible'''
1922 1924
1923 1925 # now, all clients that can request uncompressed clones can
1924 1926 # read repo formats supported by all servers that can serve
1925 1927 # them.
1926 1928
1927 1929 # if revlog format changes, client will have to check version
1928 1930 # and format flags on "stream" capability, and use
1929 1931 # uncompressed only if compatible.
1930 1932
1931 1933 if stream and not heads and remote.capable('stream'):
1932 1934 return self.stream_in(remote)
1933 1935 return self.pull(remote, heads)
1934 1936
1935 1937 # used to avoid circular references so destructors work
1936 1938 def aftertrans(files):
1937 1939 renamefiles = [tuple(t) for t in files]
1938 1940 def a():
1939 1941 for src, dest in renamefiles:
1940 1942 util.rename(src, dest)
1941 1943 return a
1942 1944
1943 1945 def instance(ui, path, create):
1944 1946 return localrepository(ui, util.drop_scheme('file', path), create)
1945 1947
1946 1948 def islocal(path):
1947 1949 return True
@@ -1,570 +1,570 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import _
10 10 import errno, util, os, tempfile, context
11 11
12 12 def filemerge(repo, fw, fo, wctx, mctx):
13 13 """perform a 3-way merge in the working directory
14 14
15 15 fw = filename in the working directory
16 16 fo = filename in other parent
17 17 wctx, mctx = working and merge changecontexts
18 18 """
19 19
20 20 def temp(prefix, ctx):
21 21 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
22 22 (fd, name) = tempfile.mkstemp(prefix=pre)
23 23 data = repo.wwritedata(ctx.path(), ctx.data())
24 24 f = os.fdopen(fd, "wb")
25 25 f.write(data)
26 26 f.close()
27 27 return name
28 28
29 29 fcm = wctx.filectx(fw)
30 30 fco = mctx.filectx(fo)
31 31
32 32 if not fco.cmp(fcm.data()): # files identical?
33 33 return None
34 34
35 35 fca = fcm.ancestor(fco)
36 36 if not fca:
37 37 fca = repo.filectx(fw, fileid=nullrev)
38 38 a = repo.wjoin(fw)
39 39 b = temp("base", fca)
40 40 c = temp("other", fco)
41 41
42 42 if fw != fo:
43 43 repo.ui.status(_("merging %s and %s\n") % (fw, fo))
44 44 else:
45 45 repo.ui.status(_("merging %s\n") % fw)
46 46
47 47 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcm, fco, fca))
48 48
49 49 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
50 50 or "hgmerge")
51 51 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
52 52 environ={'HG_FILE': fw,
53 53 'HG_MY_NODE': str(wctx.parents()[0]),
54 54 'HG_OTHER_NODE': str(mctx)})
55 55 if r:
56 56 repo.ui.warn(_("merging %s failed!\n") % fw)
57 57
58 58 os.unlink(b)
59 59 os.unlink(c)
60 60 return r
61 61
62 62 def checkunknown(wctx, mctx):
63 63 "check for collisions between unknown files and files in mctx"
64 64 man = mctx.manifest()
65 65 for f in wctx.unknown():
66 66 if f in man:
67 67 if mctx.filectx(f).cmp(wctx.filectx(f).data()):
68 68 raise util.Abort(_("untracked local file '%s' differs"
69 69 " from remote version") % f)
70 70
71 71 def checkcollision(mctx):
72 72 "check for case folding collisions in the destination context"
73 73 folded = {}
74 74 for fn in mctx.manifest():
75 75 fold = fn.lower()
76 76 if fold in folded:
77 77 raise util.Abort(_("case-folding collision between %s and %s")
78 78 % (fn, folded[fold]))
79 79 folded[fold] = fn
80 80
81 81 def forgetremoved(wctx, mctx):
82 82 """
83 83 Forget removed files
84 84
85 85 If we're jumping between revisions (as opposed to merging), and if
86 86 neither the working directory nor the target rev has the file,
87 87 then we need to remove it from the dirstate, to prevent the
88 88 dirstate from listing the file when it is no longer in the
89 89 manifest.
90 90 """
91 91
92 92 action = []
93 93 man = mctx.manifest()
94 94 for f in wctx.deleted() + wctx.removed():
95 95 if f not in man:
96 96 action.append((f, "f"))
97 97
98 98 return action
99 99
100 100 def findcopies(repo, m1, m2, ma, limit):
101 101 """
102 102 Find moves and copies between m1 and m2 back to limit linkrev
103 103 """
104 104
105 105 def nonoverlap(d1, d2, d3):
106 106 "Return list of elements in d1 not in d2 or d3"
107 107 l = [d for d in d1 if d not in d3 and d not in d2]
108 108 l.sort()
109 109 return l
110 110
111 111 def dirname(f):
112 112 s = f.rfind("/")
113 113 if s == -1:
114 114 return ""
115 115 return f[:s]
116 116
117 117 def dirs(files):
118 118 d = {}
119 119 for f in files:
120 120 f = dirname(f)
121 121 while f not in d:
122 122 d[f] = True
123 123 f = dirname(f)
124 124 return d
125 125
126 126 wctx = repo.workingctx()
127 127
128 128 def makectx(f, n):
129 129 if len(n) == 20:
130 130 return repo.filectx(f, fileid=n)
131 131 return wctx.filectx(f)
132 132 ctx = util.cachefunc(makectx)
133 133
134 134 def findold(fctx):
135 135 "find files that path was copied from, back to linkrev limit"
136 136 old = {}
137 137 seen = {}
138 138 orig = fctx.path()
139 139 visit = [fctx]
140 140 while visit:
141 141 fc = visit.pop()
142 142 s = str(fc)
143 143 if s in seen:
144 144 continue
145 145 seen[s] = 1
146 146 if fc.path() != orig and fc.path() not in old:
147 147 old[fc.path()] = 1
148 148 if fc.rev() < limit:
149 149 continue
150 150 visit += fc.parents()
151 151
152 152 old = old.keys()
153 153 old.sort()
154 154 return old
155 155
156 156 copy = {}
157 157 fullcopy = {}
158 158 diverge = {}
159 159
160 160 def checkcopies(c, man, aman):
161 161 '''check possible copies for filectx c'''
162 162 for of in findold(c):
163 163 fullcopy[c.path()] = of # remember for dir rename detection
164 164 if of not in man: # original file not in other manifest?
165 165 if of in ma:
166 166 diverge.setdefault(of, []).append(c.path())
167 167 continue
168 168 # if the original file is unchanged on the other branch,
169 169 # no merge needed
170 170 if man[of] == aman.get(of):
171 171 continue
172 172 c2 = ctx(of, man[of])
173 173 ca = c.ancestor(c2)
174 174 if not ca: # unrelated?
175 175 continue
176 176 # named changed on only one side?
177 177 if ca.path() == c.path() or ca.path() == c2.path():
178 178 if c == ca or c2 == ca: # no merge needed, ignore copy
179 179 continue
180 180 copy[c.path()] = of
181 181
182 182 if not repo.ui.configbool("merge", "followcopies", True):
183 183 return {}, {}
184 184
185 185 # avoid silly behavior for update from empty dir
186 186 if not m1 or not m2 or not ma:
187 187 return {}, {}
188 188
189 189 u1 = nonoverlap(m1, m2, ma)
190 190 u2 = nonoverlap(m2, m1, ma)
191 191
192 192 for f in u1:
193 193 checkcopies(ctx(f, m1[f]), m2, ma)
194 194
195 195 for f in u2:
196 196 checkcopies(ctx(f, m2[f]), m1, ma)
197 197
198 198 d2 = {}
199 199 for of, fl in diverge.items():
200 200 for f in fl:
201 201 fo = list(fl)
202 202 fo.remove(f)
203 203 d2[f] = (of, fo)
204 204
205 205 if not fullcopy or not repo.ui.configbool("merge", "followdirs", True):
206 206 return copy, diverge
207 207
208 208 # generate a directory move map
209 209 d1, d2 = dirs(m1), dirs(m2)
210 210 invalid = {}
211 211 dirmove = {}
212 212
213 213 # examine each file copy for a potential directory move, which is
214 214 # when all the files in a directory are moved to a new directory
215 215 for dst, src in fullcopy.items():
216 216 dsrc, ddst = dirname(src), dirname(dst)
217 217 if dsrc in invalid:
218 218 # already seen to be uninteresting
219 219 continue
220 220 elif dsrc in d1 and ddst in d1:
221 221 # directory wasn't entirely moved locally
222 222 invalid[dsrc] = True
223 223 elif dsrc in d2 and ddst in d2:
224 224 # directory wasn't entirely moved remotely
225 225 invalid[dsrc] = True
226 226 elif dsrc in dirmove and dirmove[dsrc] != ddst:
227 227 # files from the same directory moved to two different places
228 228 invalid[dsrc] = True
229 229 else:
230 230 # looks good so far
231 231 dirmove[dsrc + "/"] = ddst + "/"
232 232
233 233 for i in invalid:
234 234 if i in dirmove:
235 235 del dirmove[i]
236 236
237 237 del d1, d2, invalid
238 238
239 239 if not dirmove:
240 240 return copy, diverge
241 241
242 242 # check unaccounted nonoverlapping files against directory moves
243 243 for f in u1 + u2:
244 244 if f not in fullcopy:
245 245 for d in dirmove:
246 246 if f.startswith(d):
247 247 # new file added in a directory that was moved, move it
248 248 copy[f] = dirmove[d] + f[len(d):]
249 249 break
250 250
251 251 return copy, diverge
252 252
253 253 def manifestmerge(repo, p1, p2, pa, overwrite, partial):
254 254 """
255 255 Merge p1 and p2 with ancestor ma and generate merge action list
256 256
257 257 overwrite = whether we clobber working files
258 258 partial = function to filter file lists
259 259 """
260 260
261 261 repo.ui.note(_("resolving manifests\n"))
262 262 repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
263 263 repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
264 264
265 265 m1 = p1.manifest()
266 266 m2 = p2.manifest()
267 267 ma = pa.manifest()
268 268 backwards = (pa == p2)
269 269 action = []
270 270 copy = {}
271 271 diverge = {}
272 272
273 273 def fmerge(f, f2=None, fa=None):
274 274 """merge flags"""
275 275 if not f2:
276 276 f2 = f
277 277 fa = f
278 278 a, b, c = ma.execf(fa), m1.execf(f), m2.execf(f2)
279 279 if ((a^b) | (a^c)) ^ a:
280 280 return 'x'
281 281 a, b, c = ma.linkf(fa), m1.linkf(f), m2.linkf(f2)
282 282 if ((a^b) | (a^c)) ^ a:
283 283 return 'l'
284 284 return ''
285 285
286 286 def act(msg, m, f, *args):
287 287 repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
288 288 action.append((f, m) + args)
289 289
290 290 if not (backwards or overwrite):
291 291 copy, diverge = findcopies(repo, m1, m2, ma, pa.rev())
292 292
293 293 for of, fl in diverge.items():
294 294 act("divergent renames", "dr", of, fl)
295 295
296 296 copied = dict.fromkeys(copy.values())
297 297
298 298 # Compare manifests
299 299 for f, n in m1.iteritems():
300 300 if partial and not partial(f):
301 301 continue
302 302 if f in m2:
303 303 # are files different?
304 304 if n != m2[f]:
305 305 a = ma.get(f, nullid)
306 306 # are both different from the ancestor?
307 307 if not overwrite and n != a and m2[f] != a:
308 308 act("versions differ", "m", f, f, f, fmerge(f), False)
309 309 # are we clobbering?
310 310 # is remote's version newer?
311 311 # or are we going back in time and clean?
312 312 elif overwrite or m2[f] != a or (backwards and not n[20:]):
313 313 act("remote is newer", "g", f, m2.flags(f))
314 314 # local is newer, not overwrite, check mode bits
315 315 elif fmerge(f) != m1.flags(f):
316 316 act("update permissions", "e", f, m2.flags(f))
317 317 # contents same, check mode bits
318 318 elif m1.flags(f) != m2.flags(f):
319 319 if overwrite or fmerge(f) != m1.flags(f):
320 320 act("update permissions", "e", f, m2.flags(f))
321 321 elif f in copied:
322 322 continue
323 323 elif f in copy:
324 324 f2 = copy[f]
325 325 if f2 not in m2: # directory rename
326 326 act("remote renamed directory to " + f2, "d",
327 327 f, None, f2, m1.flags(f))
328 328 elif f2 in m1: # case 2 A,B/B/B
329 329 act("local copied to " + f2, "m",
330 330 f, f2, f, fmerge(f, f2, f2), False)
331 331 else: # case 4,21 A/B/B
332 332 act("local moved to " + f2, "m",
333 333 f, f2, f, fmerge(f, f2, f2), False)
334 334 elif f in ma:
335 335 if n != ma[f] and not overwrite:
336 336 if repo.ui.prompt(
337 337 (_(" local changed %s which remote deleted\n") % f) +
338 338 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("d"):
339 339 act("prompt delete", "r", f)
340 340 else:
341 341 act("other deleted", "r", f)
342 342 else:
343 343 # file is created on branch or in working directory
344 344 if (overwrite and n[20:] != "u") or (backwards and not n[20:]):
345 345 act("remote deleted", "r", f)
346 346
347 347 for f, n in m2.iteritems():
348 348 if partial and not partial(f):
349 349 continue
350 350 if f in m1:
351 351 continue
352 352 if f in copied:
353 353 continue
354 354 if f in copy:
355 355 f2 = copy[f]
356 356 if f2 not in m1: # directory rename
357 357 act("local renamed directory to " + f2, "d",
358 358 None, f, f2, m2.flags(f))
359 359 elif f2 in m2: # rename case 1, A/A,B/A
360 360 act("remote copied to " + f, "m",
361 361 f2, f, f, fmerge(f2, f, f2), False)
362 362 else: # case 3,20 A/B/A
363 363 act("remote moved to " + f, "m",
364 364 f2, f, f, fmerge(f2, f, f2), True)
365 365 elif f in ma:
366 366 if overwrite or backwards:
367 367 act("recreating", "g", f, m2.flags(f))
368 368 elif n != ma[f]:
369 369 if repo.ui.prompt(
370 370 (_("remote changed %s which local deleted\n") % f) +
371 371 _("(k)eep or (d)elete?"), _("[kd]"), _("k")) == _("k"):
372 372 act("prompt recreating", "g", f, m2.flags(f))
373 373 else:
374 374 act("remote created", "g", f, m2.flags(f))
375 375
376 376 return action
377 377
378 378 def applyupdates(repo, action, wctx, mctx):
379 379 "apply the merge action list to the working directory"
380 380
381 381 updated, merged, removed, unresolved = 0, 0, 0, 0
382 382 action.sort()
383 383 for a in action:
384 384 f, m = a[:2]
385 385 if f and f[0] == "/":
386 386 continue
387 387 if m == "r": # remove
388 388 repo.ui.note(_("removing %s\n") % f)
389 389 util.audit_path(f)
390 390 try:
391 391 util.unlink(repo.wjoin(f))
392 392 except OSError, inst:
393 393 if inst.errno != errno.ENOENT:
394 394 repo.ui.warn(_("update failed to remove %s: %s!\n") %
395 395 (f, inst.strerror))
396 396 removed += 1
397 397 elif m == "m": # merge
398 398 f2, fd, flags, move = a[2:]
399 399 r = filemerge(repo, f, f2, wctx, mctx)
400 400 if r > 0:
401 401 unresolved += 1
402 402 else:
403 403 if r is None:
404 404 updated += 1
405 405 else:
406 406 merged += 1
407 407 if f != fd:
408 408 repo.ui.debug(_("copying %s to %s\n") % (f, fd))
409 409 repo.wwrite(fd, repo.wread(f), flags)
410 410 if move:
411 411 repo.ui.debug(_("removing %s\n") % f)
412 412 os.unlink(repo.wjoin(f))
413 413 util.set_exec(repo.wjoin(fd), "x" in flags)
414 414 elif m == "g": # get
415 415 flags = a[2]
416 416 repo.ui.note(_("getting %s\n") % f)
417 417 t = mctx.filectx(f).data()
418 418 repo.wwrite(f, t, flags)
419 419 updated += 1
420 420 elif m == "d": # directory rename
421 421 f2, fd, flags = a[2:]
422 422 if f:
423 423 repo.ui.note(_("moving %s to %s\n") % (f, fd))
424 424 t = wctx.filectx(f).data()
425 425 repo.wwrite(fd, t, flags)
426 426 util.unlink(repo.wjoin(f))
427 427 if f2:
428 428 repo.ui.note(_("getting %s to %s\n") % (f2, fd))
429 429 t = mctx.filectx(f2).data()
430 430 repo.wwrite(fd, t, flags)
431 431 updated += 1
432 432 elif m == "dr": # divergent renames
433 433 fl = a[2]
434 434 repo.ui.warn("warning: detected divergent renames of %s to:\n" % f)
435 435 for nf in fl:
436 436 repo.ui.warn(" %s\n" % nf)
437 437 elif m == "e": # exec
438 438 flags = a[2]
439 439 util.set_exec(repo.wjoin(f), flags)
440 440
441 441 return updated, merged, removed, unresolved
442 442
443 443 def recordupdates(repo, action, branchmerge):
444 444 "record merge actions to the dirstate"
445 445
446 446 for a in action:
447 447 f, m = a[:2]
448 448 if m == "r": # remove
449 449 if branchmerge:
450 repo.dirstate.update([f], 'r')
450 repo.dirstate.remove(f)
451 451 else:
452 repo.dirstate.forget([f])
452 repo.dirstate.forget(f)
453 453 elif m == "f": # forget
454 repo.dirstate.forget([f])
454 repo.dirstate.forget(f)
455 455 elif m == "g": # get
456 456 if branchmerge:
457 repo.dirstate.update([f], 'n', st_mtime=-1)
457 repo.dirstate.normaldirty(f)
458 458 else:
459 repo.dirstate.update([f], 'n')
459 repo.dirstate.normal(f)
460 460 elif m == "m": # merge
461 461 f2, fd, flag, move = a[2:]
462 462 if branchmerge:
463 463 # We've done a branch merge, mark this file as merged
464 464 # so that we properly record the merger later
465 repo.dirstate.update([fd], 'm')
465 repo.dirstate.merge(fd)
466 466 if f != f2: # copy/rename
467 467 if move:
468 repo.dirstate.update([f], 'r')
468 repo.dirstate.remove(f)
469 469 if f != fd:
470 470 repo.dirstate.copy(f, fd)
471 471 else:
472 472 repo.dirstate.copy(f2, fd)
473 473 else:
474 474 # We've update-merged a locally modified file, so
475 475 # we set the dirstate to emulate a normal checkout
476 476 # of that file some time in the past. Thus our
477 477 # merge will appear as a normal local file
478 478 # modification.
479 repo.dirstate.update([fd], 'n', st_size=-1, st_mtime=-1)
479 repo.dirstate.normaldirty(fd)
480 480 if move:
481 repo.dirstate.forget([f])
481 repo.dirstate.forget(f)
482 482 elif m == "d": # directory rename
483 483 f2, fd, flag = a[2:]
484 484 if not f2 and f not in repo.dirstate:
485 485 # untracked file moved
486 486 continue
487 487 if branchmerge:
488 repo.dirstate.update([fd], 'a')
488 repo.dirstate.add(fd)
489 489 if f:
490 repo.dirstate.update([f], 'r')
490 repo.dirstate.remove(f)
491 491 repo.dirstate.copy(f, fd)
492 492 if f2:
493 493 repo.dirstate.copy(f2, fd)
494 494 else:
495 repo.dirstate.update([fd], 'n')
495 repo.dirstate.normal(fd)
496 496 if f:
497 repo.dirstate.forget([f])
497 repo.dirstate.forget(f)
498 498
499 499 def update(repo, node, branchmerge, force, partial, wlock):
500 500 """
501 501 Perform a merge between the working directory and the given node
502 502
503 503 branchmerge = whether to merge between branches
504 504 force = whether to force branch merging or file overwriting
505 505 partial = a function to filter file lists (dirstate not updated)
506 506 wlock = working dir lock, if already held
507 507 """
508 508
509 509 if not wlock:
510 510 wlock = repo.wlock()
511 511
512 512 wc = repo.workingctx()
513 513 if node is None:
514 514 # tip of current branch
515 515 try:
516 516 node = repo.branchtags()[wc.branch()]
517 517 except KeyError:
518 518 raise util.Abort(_("branch %s not found") % wc.branch())
519 519 overwrite = force and not branchmerge
520 520 forcemerge = force and branchmerge
521 521 pl = wc.parents()
522 522 p1, p2 = pl[0], repo.changectx(node)
523 523 pa = p1.ancestor(p2)
524 524 fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
525 525 fastforward = False
526 526
527 527 ### check phase
528 528 if not overwrite and len(pl) > 1:
529 529 raise util.Abort(_("outstanding uncommitted merges"))
530 530 if pa == p1 or pa == p2: # is there a linear path from p1 to p2?
531 531 if branchmerge:
532 532 if p1.branch() != p2.branch() and pa != p2:
533 533 fastforward = True
534 534 else:
535 535 raise util.Abort(_("there is nothing to merge, just use "
536 536 "'hg update' or look at 'hg heads'"))
537 537 elif not (overwrite or branchmerge):
538 538 raise util.Abort(_("update spans branches, use 'hg merge' "
539 539 "or 'hg update -C' to lose changes"))
540 540 if branchmerge and not forcemerge:
541 541 if wc.files():
542 542 raise util.Abort(_("outstanding uncommitted changes"))
543 543
544 544 ### calculate phase
545 545 action = []
546 546 if not force:
547 547 checkunknown(wc, p2)
548 548 if not util.checkfolding(repo.path):
549 549 checkcollision(p2)
550 550 if not branchmerge:
551 551 action += forgetremoved(wc, p2)
552 552 action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
553 553
554 554 ### apply phase
555 555 if not branchmerge: # just jump to the new rev
556 556 fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
557 557 if not partial:
558 558 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
559 559
560 560 stats = applyupdates(repo, action, wc, p2)
561 561
562 562 if not partial:
563 563 recordupdates(repo, action, branchmerge)
564 564 repo.dirstate.setparents(fp1, fp2)
565 565 if not branchmerge and not fastforward:
566 566 repo.dirstate.setbranch(p2.branch())
567 567 repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
568 568
569 569 return stats
570 570
General Comments 0
You need to be logged in to leave comments. Login now