##// END OF EJS Templates
redo merge with mpm....
Vadim Gelfer -
r2921:addb58e3 merge default
parent child Browse files
Show More
@@ -0,0 +1,68 b''
1 # mail.py - mail sending bits for mercurial
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
7
8 from i18n import gettext as _
9 from demandload import *
10 demandload(globals(), "os re smtplib templater util")
11
12 def _smtp(ui):
13 '''send mail using smtp.'''
14
15 local_hostname = ui.config('smtp', 'local_hostname')
16 s = smtplib.SMTP(local_hostname=local_hostname)
17 mailhost = ui.config('smtp', 'host')
18 if not mailhost:
19 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
20 mailport = int(ui.config('smtp', 'port', 25))
21 self.note(_('sending mail: smtp host %s, port %s\n') %
22 (mailhost, mailport))
23 s.connect(host=mailhost, port=mailport)
24 if ui.configbool('smtp', 'tls'):
25 ui.note(_('(using tls)\n'))
26 s.ehlo()
27 s.starttls()
28 s.ehlo()
29 username = ui.config('smtp', 'username')
30 password = ui.config('smtp', 'password')
31 if username and password:
32 ui.note(_('(authenticating to mail server as %s)\n') %
33 (username))
34 s.login(username, password)
35 return s
36
37 class _sendmail(object):
38 '''send mail using sendmail.'''
39
40 def __init__(self, ui, program):
41 self.ui = ui
42 self.program = program
43
44 def sendmail(self, sender, recipients, msg):
45 cmdline = '%s -f %s %s' % (
46 self.program, templater.email(sender),
47 ' '.join(map(templater.email, recipients)))
48 self.ui.note(_('sending mail: %s\n') % cmdline)
49 fp = os.popen(cmdline, 'w')
50 fp.write(msg)
51 ret = fp.close()
52 if ret:
53 raise util.Abort('%s %s' % (
54 os.path.basename(self.program.split(None, 1)[0]),
55 util.explain_exit(ret)[0]))
56
57 def connect(ui):
58 '''make a mail connection. object returned has one method, sendmail.
59 call as sendmail(sender, list-of-recipients, msg).'''
60
61 method = ui.config('email', 'method', 'smtp')
62 if method == 'smtp':
63 return smtp(ui)
64
65 return sendmail(ui, method)
66
67 def sendmail(ui, sender, recipients, msg):
68 return connect(ui).sendmail(sender, recipients, msg)
@@ -1,1980 +1,1980 b''
1 1 # queue.py - patch queues for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 '''patch management and development
9 9
10 10 This extension lets you work with a stack of patches in a Mercurial
11 11 repository. It manages two stacks of patches - all known patches, and
12 12 applied patches (subset of known patches).
13 13
14 14 Known patches are represented as patch files in the .hg/patches
15 15 directory. Applied patches are both patch files and changesets.
16 16
17 17 Common tasks (use "hg help command" for more details):
18 18
19 19 prepare repository to work with patches qinit
20 20 create new patch qnew
21 21 import existing patch qimport
22 22
23 23 print patch series qseries
24 24 print applied patches qapplied
25 25 print name of top applied patch qtop
26 26
27 27 add known patch to applied stack qpush
28 28 remove patch from applied stack qpop
29 29 refresh contents of top applied patch qrefresh
30 30 '''
31 31
32 32 from mercurial.demandload import *
33 33 from mercurial.i18n import gettext as _
34 34 demandload(globals(), "os sys re struct traceback errno bz2")
35 35 demandload(globals(), "mercurial:cmdutil,commands,hg,patch,revlog,ui,util")
36 36
37 37 commands.norepo += " qclone qversion"
38 38
39 39 class statusentry:
40 40 def __init__(self, rev, name=None):
41 41 if not name:
42 42 fields = rev.split(':')
43 43 if len(fields) == 2:
44 44 self.rev, self.name = fields
45 45 else:
46 46 self.rev, self.name = None, None
47 47 else:
48 48 self.rev, self.name = rev, name
49 49
50 50 def __str__(self):
51 51 return self.rev + ':' + self.name
52 52
53 53 class queue:
54 54 def __init__(self, ui, path, patchdir=None):
55 55 self.basepath = path
56 56 self.path = patchdir or os.path.join(path, "patches")
57 57 self.opener = util.opener(self.path)
58 58 self.ui = ui
59 59 self.applied = []
60 60 self.full_series = []
61 61 self.applied_dirty = 0
62 62 self.series_dirty = 0
63 63 self.series_path = "series"
64 64 self.status_path = "status"
65 65 self.guards_path = "guards"
66 66 self.active_guards = None
67 67 self.guards_dirty = False
68 68 self._diffopts = None
69 69
70 70 if os.path.exists(self.join(self.series_path)):
71 71 self.full_series = self.opener(self.series_path).read().splitlines()
72 72 self.parse_series()
73 73
74 74 if os.path.exists(self.join(self.status_path)):
75 75 lines = self.opener(self.status_path).read().splitlines()
76 76 self.applied = [statusentry(l) for l in lines]
77 77
78 78 def diffopts(self):
79 79 if self._diffopts is None:
80 self._diffopts = self.ui.diffopts()
80 self._diffopts = patch.diffopts(self.ui)
81 81 return self._diffopts
82 82
83 83 def join(self, *p):
84 84 return os.path.join(self.path, *p)
85 85
86 86 def find_series(self, patch):
87 87 pre = re.compile("(\s*)([^#]+)")
88 88 index = 0
89 89 for l in self.full_series:
90 90 m = pre.match(l)
91 91 if m:
92 92 s = m.group(2)
93 93 s = s.rstrip()
94 94 if s == patch:
95 95 return index
96 96 index += 1
97 97 return None
98 98
99 99 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
100 100
101 101 def parse_series(self):
102 102 self.series = []
103 103 self.series_guards = []
104 104 for l in self.full_series:
105 105 h = l.find('#')
106 106 if h == -1:
107 107 patch = l
108 108 comment = ''
109 109 elif h == 0:
110 110 continue
111 111 else:
112 112 patch = l[:h]
113 113 comment = l[h:]
114 114 patch = patch.strip()
115 115 if patch:
116 116 self.series.append(patch)
117 117 self.series_guards.append(self.guard_re.findall(comment))
118 118
119 119 def check_guard(self, guard):
120 120 bad_chars = '# \t\r\n\f'
121 121 first = guard[0]
122 122 for c in '-+':
123 123 if first == c:
124 124 return (_('guard %r starts with invalid character: %r') %
125 125 (guard, c))
126 126 for c in bad_chars:
127 127 if c in guard:
128 128 return _('invalid character in guard %r: %r') % (guard, c)
129 129
130 130 def set_active(self, guards):
131 131 for guard in guards:
132 132 bad = self.check_guard(guard)
133 133 if bad:
134 134 raise util.Abort(bad)
135 135 guards = dict.fromkeys(guards).keys()
136 136 guards.sort()
137 137 self.ui.debug('active guards: %s\n' % ' '.join(guards))
138 138 self.active_guards = guards
139 139 self.guards_dirty = True
140 140
141 141 def active(self):
142 142 if self.active_guards is None:
143 143 self.active_guards = []
144 144 try:
145 145 guards = self.opener(self.guards_path).read().split()
146 146 except IOError, err:
147 147 if err.errno != errno.ENOENT: raise
148 148 guards = []
149 149 for i, guard in enumerate(guards):
150 150 bad = self.check_guard(guard)
151 151 if bad:
152 152 self.ui.warn('%s:%d: %s\n' %
153 153 (self.join(self.guards_path), i + 1, bad))
154 154 else:
155 155 self.active_guards.append(guard)
156 156 return self.active_guards
157 157
158 158 def set_guards(self, idx, guards):
159 159 for g in guards:
160 160 if len(g) < 2:
161 161 raise util.Abort(_('guard %r too short') % g)
162 162 if g[0] not in '-+':
163 163 raise util.Abort(_('guard %r starts with invalid char') % g)
164 164 bad = self.check_guard(g[1:])
165 165 if bad:
166 166 raise util.Abort(bad)
167 167 drop = self.guard_re.sub('', self.full_series[idx])
168 168 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
169 169 self.parse_series()
170 170 self.series_dirty = True
171 171
172 172 def pushable(self, idx):
173 173 if isinstance(idx, str):
174 174 idx = self.series.index(idx)
175 175 patchguards = self.series_guards[idx]
176 176 if not patchguards:
177 177 return True, None
178 178 default = False
179 179 guards = self.active()
180 180 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
181 181 if exactneg:
182 182 return False, exactneg[0]
183 183 pos = [g for g in patchguards if g[0] == '+']
184 184 exactpos = [g for g in pos if g[1:] in guards]
185 185 if pos:
186 186 if exactpos:
187 187 return True, exactpos[0]
188 188 return False, pos
189 189 return True, ''
190 190
191 191 def explain_pushable(self, idx, all_patches=False):
192 192 write = all_patches and self.ui.write or self.ui.warn
193 193 if all_patches or self.ui.verbose:
194 194 if isinstance(idx, str):
195 195 idx = self.series.index(idx)
196 196 pushable, why = self.pushable(idx)
197 197 if all_patches and pushable:
198 198 if why is None:
199 199 write(_('allowing %s - no guards in effect\n') %
200 200 self.series[idx])
201 201 else:
202 202 if not why:
203 203 write(_('allowing %s - no matching negative guards\n') %
204 204 self.series[idx])
205 205 else:
206 206 write(_('allowing %s - guarded by %r\n') %
207 207 (self.series[idx], why))
208 208 if not pushable:
209 209 if why:
210 210 write(_('skipping %s - guarded by %r\n') %
211 211 (self.series[idx], ' '.join(why)))
212 212 else:
213 213 write(_('skipping %s - no matching guards\n') %
214 214 self.series[idx])
215 215
216 216 def save_dirty(self):
217 217 def write_list(items, path):
218 218 fp = self.opener(path, 'w')
219 219 for i in items:
220 220 print >> fp, i
221 221 fp.close()
222 222 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
223 223 if self.series_dirty: write_list(self.full_series, self.series_path)
224 224 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
225 225
226 226 def readheaders(self, patch):
227 227 def eatdiff(lines):
228 228 while lines:
229 229 l = lines[-1]
230 230 if (l.startswith("diff -") or
231 231 l.startswith("Index:") or
232 232 l.startswith("===========")):
233 233 del lines[-1]
234 234 else:
235 235 break
236 236 def eatempty(lines):
237 237 while lines:
238 238 l = lines[-1]
239 239 if re.match('\s*$', l):
240 240 del lines[-1]
241 241 else:
242 242 break
243 243
244 244 pf = self.join(patch)
245 245 message = []
246 246 comments = []
247 247 user = None
248 248 date = None
249 249 format = None
250 250 subject = None
251 251 diffstart = 0
252 252
253 253 for line in file(pf):
254 254 line = line.rstrip()
255 255 if diffstart:
256 256 if line.startswith('+++ '):
257 257 diffstart = 2
258 258 break
259 259 if line.startswith("--- "):
260 260 diffstart = 1
261 261 continue
262 262 elif format == "hgpatch":
263 263 # parse values when importing the result of an hg export
264 264 if line.startswith("# User "):
265 265 user = line[7:]
266 266 elif line.startswith("# Date "):
267 267 date = line[7:]
268 268 elif not line.startswith("# ") and line:
269 269 message.append(line)
270 270 format = None
271 271 elif line == '# HG changeset patch':
272 272 format = "hgpatch"
273 273 elif (format != "tagdone" and (line.startswith("Subject: ") or
274 274 line.startswith("subject: "))):
275 275 subject = line[9:]
276 276 format = "tag"
277 277 elif (format != "tagdone" and (line.startswith("From: ") or
278 278 line.startswith("from: "))):
279 279 user = line[6:]
280 280 format = "tag"
281 281 elif format == "tag" and line == "":
282 282 # when looking for tags (subject: from: etc) they
283 283 # end once you find a blank line in the source
284 284 format = "tagdone"
285 285 elif message or line:
286 286 message.append(line)
287 287 comments.append(line)
288 288
289 289 eatdiff(message)
290 290 eatdiff(comments)
291 291 eatempty(message)
292 292 eatempty(comments)
293 293
294 294 # make sure message isn't empty
295 295 if format and format.startswith("tag") and subject:
296 296 message.insert(0, "")
297 297 message.insert(0, subject)
298 298 return (message, comments, user, date, diffstart > 1)
299 299
300 300 def printdiff(self, repo, node1, node2=None, files=None,
301 301 fp=None, changes=None, opts=None):
302 302 patch.diff(repo, node1, node2, files,
303 303 fp=fp, changes=changes, opts=self.diffopts())
304 304
305 305 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
306 306 # first try just applying the patch
307 307 (err, n) = self.apply(repo, [ patch ], update_status=False,
308 308 strict=True, merge=rev, wlock=wlock)
309 309
310 310 if err == 0:
311 311 return (err, n)
312 312
313 313 if n is None:
314 314 raise util.Abort(_("apply failed for patch %s") % patch)
315 315
316 316 self.ui.warn("patch didn't work out, merging %s\n" % patch)
317 317
318 318 # apply failed, strip away that rev and merge.
319 319 hg.clean(repo, head, wlock=wlock)
320 320 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
321 321
322 322 c = repo.changelog.read(rev)
323 323 ret = hg.merge(repo, rev, wlock=wlock)
324 324 if ret:
325 325 raise util.Abort(_("update returned %d") % ret)
326 326 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
327 327 if n == None:
328 328 raise util.Abort(_("repo commit failed"))
329 329 try:
330 330 message, comments, user, date, patchfound = mergeq.readheaders(patch)
331 331 except:
332 332 raise util.Abort(_("unable to read %s") % patch)
333 333
334 334 patchf = self.opener(patch, "w")
335 335 if comments:
336 336 comments = "\n".join(comments) + '\n\n'
337 337 patchf.write(comments)
338 338 self.printdiff(repo, head, n, fp=patchf)
339 339 patchf.close()
340 340 return (0, n)
341 341
342 342 def qparents(self, repo, rev=None):
343 343 if rev is None:
344 344 (p1, p2) = repo.dirstate.parents()
345 345 if p2 == revlog.nullid:
346 346 return p1
347 347 if len(self.applied) == 0:
348 348 return None
349 349 return revlog.bin(self.applied[-1].rev)
350 350 pp = repo.changelog.parents(rev)
351 351 if pp[1] != revlog.nullid:
352 352 arevs = [ x.rev for x in self.applied ]
353 353 p0 = revlog.hex(pp[0])
354 354 p1 = revlog.hex(pp[1])
355 355 if p0 in arevs:
356 356 return pp[0]
357 357 if p1 in arevs:
358 358 return pp[1]
359 359 return pp[0]
360 360
361 361 def mergepatch(self, repo, mergeq, series, wlock):
362 362 if len(self.applied) == 0:
363 363 # each of the patches merged in will have two parents. This
364 364 # can confuse the qrefresh, qdiff, and strip code because it
365 365 # needs to know which parent is actually in the patch queue.
366 366 # so, we insert a merge marker with only one parent. This way
367 367 # the first patch in the queue is never a merge patch
368 368 #
369 369 pname = ".hg.patches.merge.marker"
370 370 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
371 371 wlock=wlock)
372 372 self.applied.append(statusentry(revlog.hex(n), pname))
373 373 self.applied_dirty = 1
374 374
375 375 head = self.qparents(repo)
376 376
377 377 for patch in series:
378 378 patch = mergeq.lookup(patch, strict=True)
379 379 if not patch:
380 380 self.ui.warn("patch %s does not exist\n" % patch)
381 381 return (1, None)
382 382 pushable, reason = self.pushable(patch)
383 383 if not pushable:
384 384 self.explain_pushable(patch, all_patches=True)
385 385 continue
386 386 info = mergeq.isapplied(patch)
387 387 if not info:
388 388 self.ui.warn("patch %s is not applied\n" % patch)
389 389 return (1, None)
390 390 rev = revlog.bin(info[1])
391 391 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
392 392 if head:
393 393 self.applied.append(statusentry(revlog.hex(head), patch))
394 394 self.applied_dirty = 1
395 395 if err:
396 396 return (err, head)
397 397 return (0, head)
398 398
399 399 def patch(self, repo, patchfile):
400 400 '''Apply patchfile to the working directory.
401 401 patchfile: file name of patch'''
402 402 try:
403 403 (files, fuzz) = patch.patch(patchfile, self.ui, strip=1,
404 404 cwd=repo.root)
405 405 except Exception, inst:
406 406 self.ui.note(str(inst) + '\n')
407 407 if not self.ui.verbose:
408 408 self.ui.warn("patch failed, unable to continue (try -v)\n")
409 409 return (False, [], False)
410 410
411 411 return (True, files.keys(), fuzz)
412 412
413 413 def apply(self, repo, series, list=False, update_status=True,
414 414 strict=False, patchdir=None, merge=None, wlock=None):
415 415 # TODO unify with commands.py
416 416 if not patchdir:
417 417 patchdir = self.path
418 418 err = 0
419 419 if not wlock:
420 420 wlock = repo.wlock()
421 421 lock = repo.lock()
422 422 tr = repo.transaction()
423 423 n = None
424 424 for patch in series:
425 425 pushable, reason = self.pushable(patch)
426 426 if not pushable:
427 427 self.explain_pushable(patch, all_patches=True)
428 428 continue
429 429 self.ui.warn("applying %s\n" % patch)
430 430 pf = os.path.join(patchdir, patch)
431 431
432 432 try:
433 433 message, comments, user, date, patchfound = self.readheaders(patch)
434 434 except:
435 435 self.ui.warn("Unable to read %s\n" % pf)
436 436 err = 1
437 437 break
438 438
439 439 if not message:
440 440 message = "imported patch %s\n" % patch
441 441 else:
442 442 if list:
443 443 message.append("\nimported patch %s" % patch)
444 444 message = '\n'.join(message)
445 445
446 446 (patcherr, files, fuzz) = self.patch(repo, pf)
447 447 patcherr = not patcherr
448 448
449 449 if merge and len(files) > 0:
450 450 # Mark as merged and update dirstate parent info
451 451 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
452 452 p1, p2 = repo.dirstate.parents()
453 453 repo.dirstate.setparents(p1, merge)
454 454 if len(files) > 0:
455 455 cwd = repo.getcwd()
456 456 cfiles = files
457 457 if cwd:
458 458 cfiles = [util.pathto(cwd, f) for f in files]
459 459 cmdutil.addremove(repo, cfiles, wlock=wlock)
460 460 n = repo.commit(files, message, user, date, force=1, lock=lock,
461 461 wlock=wlock)
462 462
463 463 if n == None:
464 464 raise util.Abort(_("repo commit failed"))
465 465
466 466 if update_status:
467 467 self.applied.append(statusentry(revlog.hex(n), patch))
468 468
469 469 if patcherr:
470 470 if not patchfound:
471 471 self.ui.warn("patch %s is empty\n" % patch)
472 472 err = 0
473 473 else:
474 474 self.ui.warn("patch failed, rejects left in working dir\n")
475 475 err = 1
476 476 break
477 477
478 478 if fuzz and strict:
479 479 self.ui.warn("fuzz found when applying patch, stopping\n")
480 480 err = 1
481 481 break
482 482 tr.close()
483 483 return (err, n)
484 484
485 485 def delete(self, repo, patches, keep=False):
486 486 realpatches = []
487 487 for patch in patches:
488 488 patch = self.lookup(patch, strict=True)
489 489 info = self.isapplied(patch)
490 490 if info:
491 491 raise util.Abort(_("cannot delete applied patch %s") % patch)
492 492 if patch not in self.series:
493 493 raise util.Abort(_("patch %s not in series file") % patch)
494 494 realpatches.append(patch)
495 495
496 496 if not keep:
497 497 r = self.qrepo()
498 498 if r:
499 499 r.remove(realpatches, True)
500 500 else:
501 501 os.unlink(self.join(patch))
502 502
503 503 indices = [self.find_series(p) for p in realpatches]
504 504 indices.sort()
505 505 for i in indices[-1::-1]:
506 506 del self.full_series[i]
507 507 self.parse_series()
508 508 self.series_dirty = 1
509 509
510 510 def check_toppatch(self, repo):
511 511 if len(self.applied) > 0:
512 512 top = revlog.bin(self.applied[-1].rev)
513 513 pp = repo.dirstate.parents()
514 514 if top not in pp:
515 515 raise util.Abort(_("queue top not at same revision as working directory"))
516 516 return top
517 517 return None
518 518 def check_localchanges(self, repo, force=False, refresh=True):
519 519 m, a, r, d = repo.status()[:4]
520 520 if m or a or r or d:
521 521 if not force:
522 522 if refresh:
523 523 raise util.Abort(_("local changes found, refresh first"))
524 524 else:
525 525 raise util.Abort(_("local changes found"))
526 526 return m, a, r, d
527 527 def new(self, repo, patch, msg=None, force=None):
528 528 if os.path.exists(self.join(patch)):
529 529 raise util.Abort(_('patch "%s" already exists') % patch)
530 530 m, a, r, d = self.check_localchanges(repo, force)
531 531 commitfiles = m + a + r
532 532 self.check_toppatch(repo)
533 533 wlock = repo.wlock()
534 534 insert = self.full_series_end()
535 535 if msg:
536 536 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
537 537 wlock=wlock)
538 538 else:
539 539 n = repo.commit(commitfiles,
540 540 "New patch: %s" % patch, force=True, wlock=wlock)
541 541 if n == None:
542 542 raise util.Abort(_("repo commit failed"))
543 543 self.full_series[insert:insert] = [patch]
544 544 self.applied.append(statusentry(revlog.hex(n), patch))
545 545 self.parse_series()
546 546 self.series_dirty = 1
547 547 self.applied_dirty = 1
548 548 p = self.opener(patch, "w")
549 549 if msg:
550 550 msg = msg + "\n"
551 551 p.write(msg)
552 552 p.close()
553 553 wlock = None
554 554 r = self.qrepo()
555 555 if r: r.add([patch])
556 556 if commitfiles:
557 557 self.refresh(repo, short=True)
558 558
559 559 def strip(self, repo, rev, update=True, backup="all", wlock=None):
560 560 def limitheads(chlog, stop):
561 561 """return the list of all nodes that have no children"""
562 562 p = {}
563 563 h = []
564 564 stoprev = 0
565 565 if stop in chlog.nodemap:
566 566 stoprev = chlog.rev(stop)
567 567
568 568 for r in range(chlog.count() - 1, -1, -1):
569 569 n = chlog.node(r)
570 570 if n not in p:
571 571 h.append(n)
572 572 if n == stop:
573 573 break
574 574 if r < stoprev:
575 575 break
576 576 for pn in chlog.parents(n):
577 577 p[pn] = 1
578 578 return h
579 579
580 580 def bundle(cg):
581 581 backupdir = repo.join("strip-backup")
582 582 if not os.path.isdir(backupdir):
583 583 os.mkdir(backupdir)
584 584 name = os.path.join(backupdir, "%s" % revlog.short(rev))
585 585 name = savename(name)
586 586 self.ui.warn("saving bundle to %s\n" % name)
587 587 # TODO, exclusive open
588 588 f = open(name, "wb")
589 589 try:
590 590 f.write("HG10")
591 591 z = bz2.BZ2Compressor(9)
592 592 while 1:
593 593 chunk = cg.read(4096)
594 594 if not chunk:
595 595 break
596 596 f.write(z.compress(chunk))
597 597 f.write(z.flush())
598 598 except:
599 599 os.unlink(name)
600 600 raise
601 601 f.close()
602 602 return name
603 603
604 604 def stripall(rev, revnum):
605 605 cl = repo.changelog
606 606 c = cl.read(rev)
607 607 mm = repo.manifest.read(c[0])
608 608 seen = {}
609 609
610 610 for x in xrange(revnum, cl.count()):
611 611 c = cl.read(cl.node(x))
612 612 for f in c[3]:
613 613 if f in seen:
614 614 continue
615 615 seen[f] = 1
616 616 if f in mm:
617 617 filerev = mm[f]
618 618 else:
619 619 filerev = 0
620 620 seen[f] = filerev
621 621 # we go in two steps here so the strip loop happens in a
622 622 # sensible order. When stripping many files, this helps keep
623 623 # our disk access patterns under control.
624 624 seen_list = seen.keys()
625 625 seen_list.sort()
626 626 for f in seen_list:
627 627 ff = repo.file(f)
628 628 filerev = seen[f]
629 629 if filerev != 0:
630 630 if filerev in ff.nodemap:
631 631 filerev = ff.rev(filerev)
632 632 else:
633 633 filerev = 0
634 634 ff.strip(filerev, revnum)
635 635
636 636 if not wlock:
637 637 wlock = repo.wlock()
638 638 lock = repo.lock()
639 639 chlog = repo.changelog
640 640 # TODO delete the undo files, and handle undo of merge sets
641 641 pp = chlog.parents(rev)
642 642 revnum = chlog.rev(rev)
643 643
644 644 if update:
645 645 self.check_localchanges(repo, refresh=False)
646 646 urev = self.qparents(repo, rev)
647 647 hg.clean(repo, urev, wlock=wlock)
648 648 repo.dirstate.write()
649 649
650 650 # save is a list of all the branches we are truncating away
651 651 # that we actually want to keep. changegroup will be used
652 652 # to preserve them and add them back after the truncate
653 653 saveheads = []
654 654 savebases = {}
655 655
656 656 heads = limitheads(chlog, rev)
657 657 seen = {}
658 658
659 659 # search through all the heads, finding those where the revision
660 660 # we want to strip away is an ancestor. Also look for merges
661 661 # that might be turned into new heads by the strip.
662 662 while heads:
663 663 h = heads.pop()
664 664 n = h
665 665 while True:
666 666 seen[n] = 1
667 667 pp = chlog.parents(n)
668 668 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
669 669 if pp[1] not in seen:
670 670 heads.append(pp[1])
671 671 if pp[0] == revlog.nullid:
672 672 break
673 673 if chlog.rev(pp[0]) < revnum:
674 674 break
675 675 n = pp[0]
676 676 if n == rev:
677 677 break
678 678 r = chlog.reachable(h, rev)
679 679 if rev not in r:
680 680 saveheads.append(h)
681 681 for x in r:
682 682 if chlog.rev(x) > revnum:
683 683 savebases[x] = 1
684 684
685 685 # create a changegroup for all the branches we need to keep
686 686 if backup == "all":
687 687 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
688 688 bundle(backupch)
689 689 if saveheads:
690 690 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
691 691 chgrpfile = bundle(backupch)
692 692
693 693 stripall(rev, revnum)
694 694
695 695 change = chlog.read(rev)
696 696 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
697 697 chlog.strip(revnum, revnum)
698 698 if saveheads:
699 699 self.ui.status("adding branch\n")
700 700 commands.unbundle(self.ui, repo, chgrpfile, update=False)
701 701 if backup != "strip":
702 702 os.unlink(chgrpfile)
703 703
704 704 def isapplied(self, patch):
705 705 """returns (index, rev, patch)"""
706 706 for i in xrange(len(self.applied)):
707 707 a = self.applied[i]
708 708 if a.name == patch:
709 709 return (i, a.rev, a.name)
710 710 return None
711 711
712 712 # if the exact patch name does not exist, we try a few
713 713 # variations. If strict is passed, we try only #1
714 714 #
715 715 # 1) a number to indicate an offset in the series file
716 716 # 2) a unique substring of the patch name was given
717 717 # 3) patchname[-+]num to indicate an offset in the series file
718 718 def lookup(self, patch, strict=False):
719 719 patch = patch and str(patch)
720 720
721 721 def partial_name(s):
722 722 if s in self.series:
723 723 return s
724 724 matches = [x for x in self.series if s in x]
725 725 if len(matches) > 1:
726 726 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
727 727 for m in matches:
728 728 self.ui.warn(' %s\n' % m)
729 729 return None
730 730 if matches:
731 731 return matches[0]
732 732 if len(self.series) > 0 and len(self.applied) > 0:
733 733 if s == 'qtip':
734 734 return self.series[self.series_end()-1]
735 735 if s == 'qbase':
736 736 return self.series[0]
737 737 return None
738 738 if patch == None:
739 739 return None
740 740
741 741 # we don't want to return a partial match until we make
742 742 # sure the file name passed in does not exist (checked below)
743 743 res = partial_name(patch)
744 744 if res and res == patch:
745 745 return res
746 746
747 747 if not os.path.isfile(self.join(patch)):
748 748 try:
749 749 sno = int(patch)
750 750 except(ValueError, OverflowError):
751 751 pass
752 752 else:
753 753 if sno < len(self.series):
754 754 return self.series[sno]
755 755 if not strict:
756 756 # return any partial match made above
757 757 if res:
758 758 return res
759 759 minus = patch.rsplit('-', 1)
760 760 if len(minus) > 1:
761 761 res = partial_name(minus[0])
762 762 if res:
763 763 i = self.series.index(res)
764 764 try:
765 765 off = int(minus[1] or 1)
766 766 except(ValueError, OverflowError):
767 767 pass
768 768 else:
769 769 if i - off >= 0:
770 770 return self.series[i - off]
771 771 plus = patch.rsplit('+', 1)
772 772 if len(plus) > 1:
773 773 res = partial_name(plus[0])
774 774 if res:
775 775 i = self.series.index(res)
776 776 try:
777 777 off = int(plus[1] or 1)
778 778 except(ValueError, OverflowError):
779 779 pass
780 780 else:
781 781 if i + off < len(self.series):
782 782 return self.series[i + off]
783 783 raise util.Abort(_("patch %s not in series") % patch)
784 784
785 785 def push(self, repo, patch=None, force=False, list=False,
786 786 mergeq=None, wlock=None):
787 787 if not wlock:
788 788 wlock = repo.wlock()
789 789 patch = self.lookup(patch)
790 790 if patch and self.isapplied(patch):
791 791 self.ui.warn(_("patch %s is already applied\n") % patch)
792 792 sys.exit(1)
793 793 if self.series_end() == len(self.series):
794 794 self.ui.warn(_("patch series fully applied\n"))
795 795 sys.exit(1)
796 796 if not force:
797 797 self.check_localchanges(repo)
798 798
799 799 self.applied_dirty = 1;
800 800 start = self.series_end()
801 801 if start > 0:
802 802 self.check_toppatch(repo)
803 803 if not patch:
804 804 patch = self.series[start]
805 805 end = start + 1
806 806 else:
807 807 end = self.series.index(patch, start) + 1
808 808 s = self.series[start:end]
809 809 if mergeq:
810 810 ret = self.mergepatch(repo, mergeq, s, wlock)
811 811 else:
812 812 ret = self.apply(repo, s, list, wlock=wlock)
813 813 top = self.applied[-1].name
814 814 if ret[0]:
815 815 self.ui.write("Errors during apply, please fix and refresh %s\n" %
816 816 top)
817 817 else:
818 818 self.ui.write("Now at: %s\n" % top)
819 819 return ret[0]
820 820
821 821 def pop(self, repo, patch=None, force=False, update=True, all=False,
822 822 wlock=None):
823 823 def getfile(f, rev):
824 824 t = repo.file(f).read(rev)
825 825 try:
826 826 repo.wfile(f, "w").write(t)
827 827 except IOError:
828 828 try:
829 829 os.makedirs(os.path.dirname(repo.wjoin(f)))
830 830 except OSError, err:
831 831 if err.errno != errno.EEXIST: raise
832 832 repo.wfile(f, "w").write(t)
833 833
834 834 if not wlock:
835 835 wlock = repo.wlock()
836 836 if patch:
837 837 # index, rev, patch
838 838 info = self.isapplied(patch)
839 839 if not info:
840 840 patch = self.lookup(patch)
841 841 info = self.isapplied(patch)
842 842 if not info:
843 843 raise util.Abort(_("patch %s is not applied") % patch)
844 844 if len(self.applied) == 0:
845 845 self.ui.warn(_("no patches applied\n"))
846 846 sys.exit(1)
847 847
848 848 if not update:
849 849 parents = repo.dirstate.parents()
850 850 rr = [ revlog.bin(x.rev) for x in self.applied ]
851 851 for p in parents:
852 852 if p in rr:
853 853 self.ui.warn("qpop: forcing dirstate update\n")
854 854 update = True
855 855
856 856 if not force and update:
857 857 self.check_localchanges(repo)
858 858
859 859 self.applied_dirty = 1;
860 860 end = len(self.applied)
861 861 if not patch:
862 862 if all:
863 863 popi = 0
864 864 else:
865 865 popi = len(self.applied) - 1
866 866 else:
867 867 popi = info[0] + 1
868 868 if popi >= end:
869 869 self.ui.warn("qpop: %s is already at the top\n" % patch)
870 870 return
871 871 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
872 872
873 873 start = info[0]
874 874 rev = revlog.bin(info[1])
875 875
876 876 # we know there are no local changes, so we can make a simplified
877 877 # form of hg.update.
878 878 if update:
879 879 top = self.check_toppatch(repo)
880 880 qp = self.qparents(repo, rev)
881 881 changes = repo.changelog.read(qp)
882 882 mmap = repo.manifest.read(changes[0])
883 883 m, a, r, d, u = repo.status(qp, top)[:5]
884 884 if d:
885 885 raise util.Abort("deletions found between repo revs")
886 886 for f in m:
887 887 getfile(f, mmap[f])
888 888 for f in r:
889 889 getfile(f, mmap[f])
890 890 util.set_exec(repo.wjoin(f), mmap.execf(f))
891 891 repo.dirstate.update(m + r, 'n')
892 892 for f in a:
893 893 try: os.unlink(repo.wjoin(f))
894 894 except: raise
895 895 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
896 896 except: pass
897 897 if a:
898 898 repo.dirstate.forget(a)
899 899 repo.dirstate.setparents(qp, revlog.nullid)
900 900 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
901 901 del self.applied[start:end]
902 902 if len(self.applied):
903 903 self.ui.write("Now at: %s\n" % self.applied[-1].name)
904 904 else:
905 905 self.ui.write("Patch queue now empty\n")
906 906
907 907 def diff(self, repo, files):
908 908 top = self.check_toppatch(repo)
909 909 if not top:
910 910 self.ui.write("No patches applied\n")
911 911 return
912 912 qp = self.qparents(repo, top)
913 913 self.printdiff(repo, qp, files=files)
914 914
915 915 def refresh(self, repo, msg='', short=False):
916 916 if len(self.applied) == 0:
917 917 self.ui.write("No patches applied\n")
918 918 return
919 919 wlock = repo.wlock()
920 920 self.check_toppatch(repo)
921 921 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
922 922 top = revlog.bin(top)
923 923 cparents = repo.changelog.parents(top)
924 924 patchparent = self.qparents(repo, top)
925 925 message, comments, user, date, patchfound = self.readheaders(patch)
926 926
927 927 patchf = self.opener(patch, "w")
928 928 msg = msg.rstrip()
929 929 if msg:
930 930 if comments:
931 931 # Remove existing message.
932 932 ci = 0
933 933 for mi in range(len(message)):
934 934 while message[mi] != comments[ci]:
935 935 ci += 1
936 936 del comments[ci]
937 937 comments.append(msg)
938 938 if comments:
939 939 comments = "\n".join(comments) + '\n\n'
940 940 patchf.write(comments)
941 941
942 942 tip = repo.changelog.tip()
943 943 if top == tip:
944 944 # if the top of our patch queue is also the tip, there is an
945 945 # optimization here. We update the dirstate in place and strip
946 946 # off the tip commit. Then just commit the current directory
947 947 # tree. We can also send repo.commit the list of files
948 948 # changed to speed up the diff
949 949 #
950 950 # in short mode, we only diff the files included in the
951 951 # patch already
952 952 #
953 953 # this should really read:
954 954 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
955 955 # but we do it backwards to take advantage of manifest/chlog
956 956 # caching against the next repo.status call
957 957 #
958 958 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
959 959 if short:
960 960 filelist = mm + aa + dd
961 961 else:
962 962 filelist = None
963 963 m, a, r, d, u = repo.status(files=filelist)[:5]
964 964
965 965 # we might end up with files that were added between tip and
966 966 # the dirstate parent, but then changed in the local dirstate.
967 967 # in this case, we want them to only show up in the added section
968 968 for x in m:
969 969 if x not in aa:
970 970 mm.append(x)
971 971 # we might end up with files added by the local dirstate that
972 972 # were deleted by the patch. In this case, they should only
973 973 # show up in the changed section.
974 974 for x in a:
975 975 if x in dd:
976 976 del dd[dd.index(x)]
977 977 mm.append(x)
978 978 else:
979 979 aa.append(x)
980 980 # make sure any files deleted in the local dirstate
981 981 # are not in the add or change column of the patch
982 982 forget = []
983 983 for x in d + r:
984 984 if x in aa:
985 985 del aa[aa.index(x)]
986 986 forget.append(x)
987 987 continue
988 988 elif x in mm:
989 989 del mm[mm.index(x)]
990 990 dd.append(x)
991 991
992 992 m = list(util.unique(mm))
993 993 r = list(util.unique(dd))
994 994 a = list(util.unique(aa))
995 995 filelist = list(util.unique(m + r + a))
996 996 self.printdiff(repo, patchparent, files=filelist,
997 997 changes=(m, a, r, [], u), fp=patchf)
998 998 patchf.close()
999 999
1000 1000 changes = repo.changelog.read(tip)
1001 1001 repo.dirstate.setparents(*cparents)
1002 1002 repo.dirstate.update(a, 'a')
1003 1003 repo.dirstate.update(r, 'r')
1004 1004 repo.dirstate.update(m, 'n')
1005 1005 repo.dirstate.forget(forget)
1006 1006
1007 1007 if not msg:
1008 1008 if not message:
1009 1009 message = "patch queue: %s\n" % patch
1010 1010 else:
1011 1011 message = "\n".join(message)
1012 1012 else:
1013 1013 message = msg
1014 1014
1015 1015 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1016 1016 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1017 1017 self.applied[-1] = statusentry(revlog.hex(n), patch)
1018 1018 self.applied_dirty = 1
1019 1019 else:
1020 1020 self.printdiff(repo, patchparent, fp=patchf)
1021 1021 patchf.close()
1022 1022 self.pop(repo, force=True, wlock=wlock)
1023 1023 self.push(repo, force=True, wlock=wlock)
1024 1024
1025 1025 def init(self, repo, create=False):
1026 1026 if os.path.isdir(self.path):
1027 1027 raise util.Abort(_("patch queue directory already exists"))
1028 1028 os.mkdir(self.path)
1029 1029 if create:
1030 1030 return self.qrepo(create=True)
1031 1031
1032 1032 def unapplied(self, repo, patch=None):
1033 1033 if patch and patch not in self.series:
1034 1034 raise util.Abort(_("patch %s is not in series file") % patch)
1035 1035 if not patch:
1036 1036 start = self.series_end()
1037 1037 else:
1038 1038 start = self.series.index(patch) + 1
1039 1039 unapplied = []
1040 1040 for i in xrange(start, len(self.series)):
1041 1041 pushable, reason = self.pushable(i)
1042 1042 if pushable:
1043 1043 unapplied.append((i, self.series[i]))
1044 1044 self.explain_pushable(i)
1045 1045 return unapplied
1046 1046
1047 1047 def qseries(self, repo, missing=None, summary=False):
1048 1048 start = self.series_end(all_patches=True)
1049 1049 if not missing:
1050 1050 for i in range(len(self.series)):
1051 1051 patch = self.series[i]
1052 1052 if self.ui.verbose:
1053 1053 if i < start:
1054 1054 status = 'A'
1055 1055 elif self.pushable(i)[0]:
1056 1056 status = 'U'
1057 1057 else:
1058 1058 status = 'G'
1059 1059 self.ui.write('%d %s ' % (i, status))
1060 1060 if summary:
1061 1061 msg = self.readheaders(patch)[0]
1062 1062 msg = msg and ': ' + msg[0] or ': '
1063 1063 else:
1064 1064 msg = ''
1065 1065 self.ui.write('%s%s\n' % (patch, msg))
1066 1066 else:
1067 1067 msng_list = []
1068 1068 for root, dirs, files in os.walk(self.path):
1069 1069 d = root[len(self.path) + 1:]
1070 1070 for f in files:
1071 1071 fl = os.path.join(d, f)
1072 1072 if (fl not in self.series and
1073 1073 fl not in (self.status_path, self.series_path)
1074 1074 and not fl.startswith('.')):
1075 1075 msng_list.append(fl)
1076 1076 msng_list.sort()
1077 1077 for x in msng_list:
1078 1078 if self.ui.verbose:
1079 1079 self.ui.write("D ")
1080 1080 self.ui.write("%s\n" % x)
1081 1081
1082 1082 def issaveline(self, l):
1083 1083 if l.name == '.hg.patches.save.line':
1084 1084 return True
1085 1085
1086 1086 def qrepo(self, create=False):
1087 1087 if create or os.path.isdir(self.join(".hg")):
1088 1088 return hg.repository(self.ui, path=self.path, create=create)
1089 1089
1090 1090 def restore(self, repo, rev, delete=None, qupdate=None):
1091 1091 c = repo.changelog.read(rev)
1092 1092 desc = c[4].strip()
1093 1093 lines = desc.splitlines()
1094 1094 i = 0
1095 1095 datastart = None
1096 1096 series = []
1097 1097 applied = []
1098 1098 qpp = None
1099 1099 for i in xrange(0, len(lines)):
1100 1100 if lines[i] == 'Patch Data:':
1101 1101 datastart = i + 1
1102 1102 elif lines[i].startswith('Dirstate:'):
1103 1103 l = lines[i].rstrip()
1104 1104 l = l[10:].split(' ')
1105 1105 qpp = [ hg.bin(x) for x in l ]
1106 1106 elif datastart != None:
1107 1107 l = lines[i].rstrip()
1108 1108 se = statusentry(l)
1109 1109 file_ = se.name
1110 1110 if se.rev:
1111 1111 applied.append(se)
1112 1112 series.append(file_)
1113 1113 if datastart == None:
1114 1114 self.ui.warn("No saved patch data found\n")
1115 1115 return 1
1116 1116 self.ui.warn("restoring status: %s\n" % lines[0])
1117 1117 self.full_series = series
1118 1118 self.applied = applied
1119 1119 self.parse_series()
1120 1120 self.series_dirty = 1
1121 1121 self.applied_dirty = 1
1122 1122 heads = repo.changelog.heads()
1123 1123 if delete:
1124 1124 if rev not in heads:
1125 1125 self.ui.warn("save entry has children, leaving it alone\n")
1126 1126 else:
1127 1127 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1128 1128 pp = repo.dirstate.parents()
1129 1129 if rev in pp:
1130 1130 update = True
1131 1131 else:
1132 1132 update = False
1133 1133 self.strip(repo, rev, update=update, backup='strip')
1134 1134 if qpp:
1135 1135 self.ui.warn("saved queue repository parents: %s %s\n" %
1136 1136 (hg.short(qpp[0]), hg.short(qpp[1])))
1137 1137 if qupdate:
1138 1138 print "queue directory updating"
1139 1139 r = self.qrepo()
1140 1140 if not r:
1141 1141 self.ui.warn("Unable to load queue repository\n")
1142 1142 return 1
1143 1143 hg.clean(r, qpp[0])
1144 1144
1145 1145 def save(self, repo, msg=None):
1146 1146 if len(self.applied) == 0:
1147 1147 self.ui.warn("save: no patches applied, exiting\n")
1148 1148 return 1
1149 1149 if self.issaveline(self.applied[-1]):
1150 1150 self.ui.warn("status is already saved\n")
1151 1151 return 1
1152 1152
1153 1153 ar = [ ':' + x for x in self.full_series ]
1154 1154 if not msg:
1155 1155 msg = "hg patches saved state"
1156 1156 else:
1157 1157 msg = "hg patches: " + msg.rstrip('\r\n')
1158 1158 r = self.qrepo()
1159 1159 if r:
1160 1160 pp = r.dirstate.parents()
1161 1161 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1162 1162 msg += "\n\nPatch Data:\n"
1163 1163 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1164 1164 "\n".join(ar) + '\n' or "")
1165 1165 n = repo.commit(None, text, user=None, force=1)
1166 1166 if not n:
1167 1167 self.ui.warn("repo commit failed\n")
1168 1168 return 1
1169 1169 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1170 1170 self.applied_dirty = 1
1171 1171
1172 1172 def full_series_end(self):
1173 1173 if len(self.applied) > 0:
1174 1174 p = self.applied[-1].name
1175 1175 end = self.find_series(p)
1176 1176 if end == None:
1177 1177 return len(self.full_series)
1178 1178 return end + 1
1179 1179 return 0
1180 1180
1181 1181 def series_end(self, all_patches=False):
1182 1182 end = 0
1183 1183 def next(start):
1184 1184 if all_patches:
1185 1185 return start
1186 1186 i = start
1187 1187 while i < len(self.series):
1188 1188 p, reason = self.pushable(i)
1189 1189 if p:
1190 1190 break
1191 1191 self.explain_pushable(i)
1192 1192 i += 1
1193 1193 return i
1194 1194 if len(self.applied) > 0:
1195 1195 p = self.applied[-1].name
1196 1196 try:
1197 1197 end = self.series.index(p)
1198 1198 except ValueError:
1199 1199 return 0
1200 1200 return next(end + 1)
1201 1201 return next(end)
1202 1202
1203 1203 def qapplied(self, repo, patch=None):
1204 1204 if patch and patch not in self.series:
1205 1205 raise util.Abort(_("patch %s is not in series file") % patch)
1206 1206 if not patch:
1207 1207 end = len(self.applied)
1208 1208 else:
1209 1209 end = self.series.index(patch) + 1
1210 1210 for x in xrange(end):
1211 1211 p = self.appliedname(x)
1212 1212 self.ui.write("%s\n" % p)
1213 1213
1214 1214 def appliedname(self, index):
1215 1215 pname = self.applied[index].name
1216 1216 if not self.ui.verbose:
1217 1217 p = pname
1218 1218 else:
1219 1219 p = str(self.series.index(pname)) + " " + p
1220 1220 return p
1221 1221
1222 1222 def top(self, repo):
1223 1223 if len(self.applied):
1224 1224 p = self.appliedname(-1)
1225 1225 self.ui.write(p + '\n')
1226 1226 else:
1227 1227 self.ui.write("No patches applied\n")
1228 1228
1229 1229 def next(self, repo):
1230 1230 end = self.series_end()
1231 1231 if end == len(self.series):
1232 1232 self.ui.write("All patches applied\n")
1233 1233 else:
1234 1234 p = self.series[end]
1235 1235 if self.ui.verbose:
1236 1236 self.ui.write("%d " % self.series.index(p))
1237 1237 self.ui.write(p + '\n')
1238 1238
1239 1239 def prev(self, repo):
1240 1240 if len(self.applied) > 1:
1241 1241 p = self.appliedname(-2)
1242 1242 self.ui.write(p + '\n')
1243 1243 elif len(self.applied) == 1:
1244 1244 self.ui.write("Only one patch applied\n")
1245 1245 else:
1246 1246 self.ui.write("No patches applied\n")
1247 1247
1248 1248 def qimport(self, repo, files, patch=None, existing=None, force=None):
1249 1249 if len(files) > 1 and patch:
1250 1250 raise util.Abort(_('option "-n" not valid when importing multiple '
1251 1251 'files'))
1252 1252 i = 0
1253 1253 added = []
1254 1254 for filename in files:
1255 1255 if existing:
1256 1256 if not patch:
1257 1257 patch = filename
1258 1258 if not os.path.isfile(self.join(patch)):
1259 1259 raise util.Abort(_("patch %s does not exist") % patch)
1260 1260 else:
1261 1261 try:
1262 1262 text = file(filename).read()
1263 1263 except IOError:
1264 1264 raise util.Abort(_("unable to read %s") % patch)
1265 1265 if not patch:
1266 1266 patch = os.path.split(filename)[1]
1267 1267 if not force and os.path.exists(self.join(patch)):
1268 1268 raise util.Abort(_('patch "%s" already exists') % patch)
1269 1269 patchf = self.opener(patch, "w")
1270 1270 patchf.write(text)
1271 1271 if patch in self.series:
1272 1272 raise util.Abort(_('patch %s is already in the series file')
1273 1273 % patch)
1274 1274 index = self.full_series_end() + i
1275 1275 self.full_series[index:index] = [patch]
1276 1276 self.parse_series()
1277 1277 self.ui.warn("adding %s to series file\n" % patch)
1278 1278 i += 1
1279 1279 added.append(patch)
1280 1280 patch = None
1281 1281 self.series_dirty = 1
1282 1282 qrepo = self.qrepo()
1283 1283 if qrepo:
1284 1284 qrepo.add(added)
1285 1285
1286 1286 def delete(ui, repo, patch, *patches, **opts):
1287 1287 """remove patches from queue
1288 1288
1289 1289 The patches must not be applied.
1290 1290 With -k, the patch files are preserved in the patch directory."""
1291 1291 q = repo.mq
1292 1292 q.delete(repo, (patch,) + patches, keep=opts.get('keep'))
1293 1293 q.save_dirty()
1294 1294 return 0
1295 1295
1296 1296 def applied(ui, repo, patch=None, **opts):
1297 1297 """print the patches already applied"""
1298 1298 repo.mq.qapplied(repo, patch)
1299 1299 return 0
1300 1300
1301 1301 def unapplied(ui, repo, patch=None, **opts):
1302 1302 """print the patches not yet applied"""
1303 1303 for i, p in repo.mq.unapplied(repo, patch):
1304 1304 if ui.verbose:
1305 1305 ui.write("%d " % i)
1306 1306 ui.write("%s\n" % p)
1307 1307
1308 1308 def qimport(ui, repo, *filename, **opts):
1309 1309 """import a patch"""
1310 1310 q = repo.mq
1311 1311 q.qimport(repo, filename, patch=opts['name'],
1312 1312 existing=opts['existing'], force=opts['force'])
1313 1313 q.save_dirty()
1314 1314 return 0
1315 1315
1316 1316 def init(ui, repo, **opts):
1317 1317 """init a new queue repository
1318 1318
1319 1319 The queue repository is unversioned by default. If -c is
1320 1320 specified, qinit will create a separate nested repository
1321 1321 for patches. Use qcommit to commit changes to this queue
1322 1322 repository."""
1323 1323 q = repo.mq
1324 1324 r = q.init(repo, create=opts['create_repo'])
1325 1325 q.save_dirty()
1326 1326 if r:
1327 1327 fp = r.wopener('.hgignore', 'w')
1328 1328 print >> fp, 'syntax: glob'
1329 1329 print >> fp, 'status'
1330 1330 fp.close()
1331 1331 r.wopener('series', 'w').close()
1332 1332 r.add(['.hgignore', 'series'])
1333 1333 return 0
1334 1334
1335 1335 def clone(ui, source, dest=None, **opts):
1336 1336 '''clone main and patch repository at same time
1337 1337
1338 1338 If source is local, destination will have no patches applied. If
1339 1339 source is remote, this command can not check if patches are
1340 1340 applied in source, so cannot guarantee that patches are not
1341 1341 applied in destination. If you clone remote repository, be sure
1342 1342 before that it has no patches applied.
1343 1343
1344 1344 Source patch repository is looked for in <src>/.hg/patches by
1345 1345 default. Use -p <url> to change.
1346 1346 '''
1347 1347 commands.setremoteconfig(ui, opts)
1348 1348 if dest is None:
1349 1349 dest = hg.defaultdest(source)
1350 1350 sr = hg.repository(ui, ui.expandpath(source))
1351 1351 qbase, destrev = None, None
1352 1352 if sr.local():
1353 1353 reposetup(ui, sr)
1354 1354 if sr.mq.applied:
1355 1355 qbase = revlog.bin(sr.mq.applied[0].rev)
1356 1356 if not hg.islocal(dest):
1357 1357 destrev = sr.parents(qbase)[0]
1358 1358 ui.note(_('cloning main repo\n'))
1359 1359 sr, dr = hg.clone(ui, sr, dest,
1360 1360 pull=opts['pull'],
1361 1361 rev=destrev,
1362 1362 update=False,
1363 1363 stream=opts['uncompressed'])
1364 1364 ui.note(_('cloning patch repo\n'))
1365 1365 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1366 1366 dr.url() + '/.hg/patches',
1367 1367 pull=opts['pull'],
1368 1368 update=not opts['noupdate'],
1369 1369 stream=opts['uncompressed'])
1370 1370 if dr.local():
1371 1371 if qbase:
1372 1372 ui.note(_('stripping applied patches from destination repo\n'))
1373 1373 reposetup(ui, dr)
1374 1374 dr.mq.strip(dr, qbase, update=False, backup=None)
1375 1375 if not opts['noupdate']:
1376 1376 ui.note(_('updating destination repo\n'))
1377 1377 hg.update(dr, dr.changelog.tip())
1378 1378
1379 1379 def commit(ui, repo, *pats, **opts):
1380 1380 """commit changes in the queue repository"""
1381 1381 q = repo.mq
1382 1382 r = q.qrepo()
1383 1383 if not r: raise util.Abort('no queue repository')
1384 1384 commands.commit(r.ui, r, *pats, **opts)
1385 1385
1386 1386 def series(ui, repo, **opts):
1387 1387 """print the entire series file"""
1388 1388 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1389 1389 return 0
1390 1390
1391 1391 def top(ui, repo, **opts):
1392 1392 """print the name of the current patch"""
1393 1393 repo.mq.top(repo)
1394 1394 return 0
1395 1395
1396 1396 def next(ui, repo, **opts):
1397 1397 """print the name of the next patch"""
1398 1398 repo.mq.next(repo)
1399 1399 return 0
1400 1400
1401 1401 def prev(ui, repo, **opts):
1402 1402 """print the name of the previous patch"""
1403 1403 repo.mq.prev(repo)
1404 1404 return 0
1405 1405
1406 1406 def new(ui, repo, patch, **opts):
1407 1407 """create a new patch
1408 1408
1409 1409 qnew creates a new patch on top of the currently-applied patch
1410 1410 (if any). It will refuse to run if there are any outstanding
1411 1411 changes unless -f is specified, in which case the patch will
1412 1412 be initialised with them.
1413 1413
1414 1414 -m or -l set the patch header as well as the commit message.
1415 1415 If neither is specified, the patch header is empty and the
1416 1416 commit message is 'New patch: PATCH'"""
1417 1417 q = repo.mq
1418 1418 message = commands.logmessage(opts)
1419 1419 q.new(repo, patch, msg=message, force=opts['force'])
1420 1420 q.save_dirty()
1421 1421 return 0
1422 1422
1423 1423 def refresh(ui, repo, **opts):
1424 1424 """update the current patch"""
1425 1425 q = repo.mq
1426 1426 message = commands.logmessage(opts)
1427 1427 if opts['edit']:
1428 1428 if message:
1429 1429 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1430 1430 patch = q.applied[-1].name
1431 1431 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1432 1432 message = ui.edit('\n'.join(message), user or ui.username())
1433 1433 q.refresh(repo, msg=message, short=opts['short'])
1434 1434 q.save_dirty()
1435 1435 return 0
1436 1436
1437 1437 def diff(ui, repo, *files, **opts):
1438 1438 """diff of the current patch"""
1439 1439 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1440 1440 repo.mq.diff(repo, list(files))
1441 1441 return 0
1442 1442
1443 1443 def fold(ui, repo, *files, **opts):
1444 1444 """fold the named patches into the current patch
1445 1445
1446 1446 Patches must not yet be applied. Each patch will be successively
1447 1447 applied to the current patch in the order given. If all the
1448 1448 patches apply successfully, the current patch will be refreshed
1449 1449 with the new cumulative patch, and the folded patches will
1450 1450 be deleted. With -k/--keep, the folded patch files will not
1451 1451 be removed afterwards.
1452 1452
1453 1453 The header for each folded patch will be concatenated with
1454 1454 the current patch header, separated by a line of '* * *'."""
1455 1455
1456 1456 q = repo.mq
1457 1457
1458 1458 if not files:
1459 1459 raise util.Abort(_('qfold requires at least one patch name'))
1460 1460 if not q.check_toppatch(repo):
1461 1461 raise util.Abort(_('No patches applied\n'))
1462 1462
1463 1463 message = commands.logmessage(opts)
1464 1464 if opts['edit']:
1465 1465 if message:
1466 1466 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1467 1467
1468 1468 parent = q.lookup('qtip')
1469 1469 patches = []
1470 1470 messages = []
1471 1471 for f in files:
1472 1472 patch = q.lookup(f)
1473 1473 if patch in patches or patch == parent:
1474 1474 ui.warn(_('Skipping already folded patch %s') % patch)
1475 1475 if q.isapplied(patch):
1476 1476 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1477 1477 patches.append(patch)
1478 1478
1479 1479 for patch in patches:
1480 1480 if not message:
1481 1481 messages.append(q.readheaders(patch)[0])
1482 1482 pf = q.join(patch)
1483 1483 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1484 1484 if not patchsuccess:
1485 1485 raise util.Abort(_('Error folding patch %s') % patch)
1486 1486
1487 1487 if not message:
1488 1488 message, comments, user = q.readheaders(parent)[0:3]
1489 1489 for msg in messages:
1490 1490 message.append('* * *')
1491 1491 message.extend(msg)
1492 1492 message = '\n'.join(message)
1493 1493
1494 1494 if opts['edit']:
1495 1495 message = ui.edit(message, user or ui.username())
1496 1496
1497 1497 q.refresh(repo, msg=message)
1498 1498
1499 1499 for patch in patches:
1500 1500 q.delete(repo, patch, keep=opts['keep'])
1501 1501
1502 1502 q.save_dirty()
1503 1503
1504 1504 def guard(ui, repo, *args, **opts):
1505 1505 '''set or print guards for a patch
1506 1506
1507 1507 guards control whether a patch can be pushed. a patch with no
1508 1508 guards is aways pushed. a patch with posative guard ("+foo") is
1509 1509 pushed only if qselect command enables guard "foo". a patch with
1510 1510 nagative guard ("-foo") is never pushed if qselect command enables
1511 1511 guard "foo".
1512 1512
1513 1513 with no arguments, default is to print current active guards.
1514 1514 with arguments, set active guards for patch.
1515 1515
1516 1516 to set nagative guard "-foo" on topmost patch ("--" is needed so
1517 1517 hg will not interpret "-foo" as argument):
1518 1518 hg qguard -- -foo
1519 1519
1520 1520 to set guards on other patch:
1521 1521 hg qguard other.patch +2.6.17 -stable
1522 1522 '''
1523 1523 def status(idx):
1524 1524 guards = q.series_guards[idx] or ['unguarded']
1525 1525 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1526 1526 q = repo.mq
1527 1527 patch = None
1528 1528 args = list(args)
1529 1529 if opts['list']:
1530 1530 if args or opts['none']:
1531 1531 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1532 1532 for i in xrange(len(q.series)):
1533 1533 status(i)
1534 1534 return
1535 1535 if not args or args[0][0:1] in '-+':
1536 1536 if not q.applied:
1537 1537 raise util.Abort(_('no patches applied'))
1538 1538 patch = q.applied[-1].name
1539 1539 if patch is None and args[0][0:1] not in '-+':
1540 1540 patch = args.pop(0)
1541 1541 if patch is None:
1542 1542 raise util.Abort(_('no patch to work with'))
1543 1543 if args or opts['none']:
1544 1544 q.set_guards(q.find_series(patch), args)
1545 1545 q.save_dirty()
1546 1546 else:
1547 1547 status(q.series.index(q.lookup(patch)))
1548 1548
1549 1549 def header(ui, repo, patch=None):
1550 1550 """Print the header of the topmost or specified patch"""
1551 1551 q = repo.mq
1552 1552
1553 1553 if patch:
1554 1554 patch = q.lookup(patch)
1555 1555 else:
1556 1556 if not q.applied:
1557 1557 ui.write('No patches applied\n')
1558 1558 return
1559 1559 patch = q.lookup('qtip')
1560 1560 message = repo.mq.readheaders(patch)[0]
1561 1561
1562 1562 ui.write('\n'.join(message) + '\n')
1563 1563
1564 1564 def lastsavename(path):
1565 1565 (directory, base) = os.path.split(path)
1566 1566 names = os.listdir(directory)
1567 1567 namere = re.compile("%s.([0-9]+)" % base)
1568 1568 maxindex = None
1569 1569 maxname = None
1570 1570 for f in names:
1571 1571 m = namere.match(f)
1572 1572 if m:
1573 1573 index = int(m.group(1))
1574 1574 if maxindex == None or index > maxindex:
1575 1575 maxindex = index
1576 1576 maxname = f
1577 1577 if maxname:
1578 1578 return (os.path.join(directory, maxname), maxindex)
1579 1579 return (None, None)
1580 1580
1581 1581 def savename(path):
1582 1582 (last, index) = lastsavename(path)
1583 1583 if last is None:
1584 1584 index = 0
1585 1585 newpath = path + ".%d" % (index + 1)
1586 1586 return newpath
1587 1587
1588 1588 def push(ui, repo, patch=None, **opts):
1589 1589 """push the next patch onto the stack"""
1590 1590 q = repo.mq
1591 1591 mergeq = None
1592 1592
1593 1593 if opts['all']:
1594 1594 patch = q.series[-1]
1595 1595 if opts['merge']:
1596 1596 if opts['name']:
1597 1597 newpath = opts['name']
1598 1598 else:
1599 1599 newpath, i = lastsavename(q.path)
1600 1600 if not newpath:
1601 1601 ui.warn("no saved queues found, please use -n\n")
1602 1602 return 1
1603 1603 mergeq = queue(ui, repo.join(""), newpath)
1604 1604 ui.warn("merging with queue at: %s\n" % mergeq.path)
1605 1605 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1606 1606 mergeq=mergeq)
1607 1607 q.save_dirty()
1608 1608 return ret
1609 1609
1610 1610 def pop(ui, repo, patch=None, **opts):
1611 1611 """pop the current patch off the stack"""
1612 1612 localupdate = True
1613 1613 if opts['name']:
1614 1614 q = queue(ui, repo.join(""), repo.join(opts['name']))
1615 1615 ui.warn('using patch queue: %s\n' % q.path)
1616 1616 localupdate = False
1617 1617 else:
1618 1618 q = repo.mq
1619 1619 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1620 1620 q.save_dirty()
1621 1621 return 0
1622 1622
1623 1623 def rename(ui, repo, patch, name=None, **opts):
1624 1624 """rename a patch
1625 1625
1626 1626 With one argument, renames the current patch to PATCH1.
1627 1627 With two arguments, renames PATCH1 to PATCH2."""
1628 1628
1629 1629 q = repo.mq
1630 1630
1631 1631 if not name:
1632 1632 name = patch
1633 1633 patch = None
1634 1634
1635 1635 if name in q.series:
1636 1636 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1637 1637
1638 1638 absdest = q.join(name)
1639 1639 if os.path.exists(absdest):
1640 1640 raise util.Abort(_('%s already exists') % absdest)
1641 1641
1642 1642 if patch:
1643 1643 patch = q.lookup(patch)
1644 1644 else:
1645 1645 if not q.applied:
1646 1646 ui.write(_('No patches applied\n'))
1647 1647 return
1648 1648 patch = q.lookup('qtip')
1649 1649
1650 1650 if ui.verbose:
1651 1651 ui.write('Renaming %s to %s\n' % (patch, name))
1652 1652 i = q.find_series(patch)
1653 1653 q.full_series[i] = name
1654 1654 q.parse_series()
1655 1655 q.series_dirty = 1
1656 1656
1657 1657 info = q.isapplied(patch)
1658 1658 if info:
1659 1659 q.applied[info[0]] = statusentry(info[1], name)
1660 1660 q.applied_dirty = 1
1661 1661
1662 1662 util.rename(q.join(patch), absdest)
1663 1663 r = q.qrepo()
1664 1664 if r:
1665 1665 wlock = r.wlock()
1666 1666 if r.dirstate.state(name) == 'r':
1667 1667 r.undelete([name], wlock)
1668 1668 r.copy(patch, name, wlock)
1669 1669 r.remove([patch], False, wlock)
1670 1670
1671 1671 q.save_dirty()
1672 1672
1673 1673 def restore(ui, repo, rev, **opts):
1674 1674 """restore the queue state saved by a rev"""
1675 1675 rev = repo.lookup(rev)
1676 1676 q = repo.mq
1677 1677 q.restore(repo, rev, delete=opts['delete'],
1678 1678 qupdate=opts['update'])
1679 1679 q.save_dirty()
1680 1680 return 0
1681 1681
1682 1682 def save(ui, repo, **opts):
1683 1683 """save current queue state"""
1684 1684 q = repo.mq
1685 1685 message = commands.logmessage(opts)
1686 1686 ret = q.save(repo, msg=message)
1687 1687 if ret:
1688 1688 return ret
1689 1689 q.save_dirty()
1690 1690 if opts['copy']:
1691 1691 path = q.path
1692 1692 if opts['name']:
1693 1693 newpath = os.path.join(q.basepath, opts['name'])
1694 1694 if os.path.exists(newpath):
1695 1695 if not os.path.isdir(newpath):
1696 1696 raise util.Abort(_('destination %s exists and is not '
1697 1697 'a directory') % newpath)
1698 1698 if not opts['force']:
1699 1699 raise util.Abort(_('destination %s exists, '
1700 1700 'use -f to force') % newpath)
1701 1701 else:
1702 1702 newpath = savename(path)
1703 1703 ui.warn("copy %s to %s\n" % (path, newpath))
1704 1704 util.copyfiles(path, newpath)
1705 1705 if opts['empty']:
1706 1706 try:
1707 1707 os.unlink(q.join(q.status_path))
1708 1708 except:
1709 1709 pass
1710 1710 return 0
1711 1711
1712 1712 def strip(ui, repo, rev, **opts):
1713 1713 """strip a revision and all later revs on the same branch"""
1714 1714 rev = repo.lookup(rev)
1715 1715 backup = 'all'
1716 1716 if opts['backup']:
1717 1717 backup = 'strip'
1718 1718 elif opts['nobackup']:
1719 1719 backup = 'none'
1720 1720 repo.mq.strip(repo, rev, backup=backup)
1721 1721 return 0
1722 1722
1723 1723 def select(ui, repo, *args, **opts):
1724 1724 '''set or print guarded patches to push
1725 1725
1726 1726 use qguard command to set or print guards on patch. then use
1727 1727 qselect to tell mq which guards to use. example:
1728 1728
1729 1729 qguard foo.patch -stable (nagative guard)
1730 1730 qguard bar.patch +stable (posative guard)
1731 1731 qselect stable
1732 1732
1733 1733 this sets "stable" guard. mq will skip foo.patch (because it has
1734 1734 nagative match) but push bar.patch (because it has posative
1735 1735 match). patch is pushed if any posative guards match and no
1736 1736 nagative guards match.
1737 1737
1738 1738 with no arguments, default is to print current active guards.
1739 1739 with arguments, set active guards as given.
1740 1740
1741 1741 use -n/--none to deactivate guards (no other arguments needed).
1742 1742 when no guards active, patches with posative guards are skipped,
1743 1743 patches with nagative guards are pushed.
1744 1744
1745 1745 qselect can change guards of applied patches. it does not pop
1746 1746 guarded patches by default. use --pop to pop back to last applied
1747 1747 patch that is not guarded. use --reapply (implies --pop) to push
1748 1748 back to current patch afterwards, but skip guarded patches.
1749 1749
1750 1750 use -s/--series to print list of all guards in series file (no
1751 1751 other arguments needed). use -v for more information.'''
1752 1752
1753 1753 q = repo.mq
1754 1754 guards = q.active()
1755 1755 if args or opts['none']:
1756 1756 old_unapplied = q.unapplied(repo)
1757 1757 old_guarded = [i for i in xrange(len(q.applied)) if
1758 1758 not q.pushable(i)[0]]
1759 1759 q.set_active(args)
1760 1760 q.save_dirty()
1761 1761 if not args:
1762 1762 ui.status(_('guards deactivated\n'))
1763 1763 if not opts['pop'] and not opts['reapply']:
1764 1764 unapplied = q.unapplied(repo)
1765 1765 guarded = [i for i in xrange(len(q.applied))
1766 1766 if not q.pushable(i)[0]]
1767 1767 if len(unapplied) != len(old_unapplied):
1768 1768 ui.status(_('number of unguarded, unapplied patches has '
1769 1769 'changed from %d to %d\n') %
1770 1770 (len(old_unapplied), len(unapplied)))
1771 1771 if len(guarded) != len(old_guarded):
1772 1772 ui.status(_('number of guarded, applied patches has changed '
1773 1773 'from %d to %d\n') %
1774 1774 (len(old_guarded), len(guarded)))
1775 1775 elif opts['series']:
1776 1776 guards = {}
1777 1777 noguards = 0
1778 1778 for gs in q.series_guards:
1779 1779 if not gs:
1780 1780 noguards += 1
1781 1781 for g in gs:
1782 1782 guards.setdefault(g, 0)
1783 1783 guards[g] += 1
1784 1784 if ui.verbose:
1785 1785 guards['NONE'] = noguards
1786 1786 guards = guards.items()
1787 1787 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1788 1788 if guards:
1789 1789 ui.note(_('guards in series file:\n'))
1790 1790 for guard, count in guards:
1791 1791 ui.note('%2d ' % count)
1792 1792 ui.write(guard, '\n')
1793 1793 else:
1794 1794 ui.note(_('no guards in series file\n'))
1795 1795 else:
1796 1796 if guards:
1797 1797 ui.note(_('active guards:\n'))
1798 1798 for g in guards:
1799 1799 ui.write(g, '\n')
1800 1800 else:
1801 1801 ui.write(_('no active guards\n'))
1802 1802 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1803 1803 popped = False
1804 1804 if opts['pop'] or opts['reapply']:
1805 1805 for i in xrange(len(q.applied)):
1806 1806 pushable, reason = q.pushable(i)
1807 1807 if not pushable:
1808 1808 ui.status(_('popping guarded patches\n'))
1809 1809 popped = True
1810 1810 if i == 0:
1811 1811 q.pop(repo, all=True)
1812 1812 else:
1813 1813 q.pop(repo, i-1)
1814 1814 break
1815 1815 if popped:
1816 1816 try:
1817 1817 if reapply:
1818 1818 ui.status(_('reapplying unguarded patches\n'))
1819 1819 q.push(repo, reapply)
1820 1820 finally:
1821 1821 q.save_dirty()
1822 1822
1823 1823 def reposetup(ui, repo):
1824 1824 class mqrepo(repo.__class__):
1825 1825 def abort_if_wdir_patched(self, errmsg, force=False):
1826 1826 if self.mq.applied and not force:
1827 1827 parent = revlog.hex(self.dirstate.parents()[0])
1828 1828 if parent in [s.rev for s in self.mq.applied]:
1829 1829 raise util.Abort(errmsg)
1830 1830
1831 1831 def commit(self, *args, **opts):
1832 1832 if len(args) >= 6:
1833 1833 force = args[5]
1834 1834 else:
1835 1835 force = opts.get('force')
1836 1836 self.abort_if_wdir_patched(
1837 1837 _('cannot commit over an applied mq patch'),
1838 1838 force)
1839 1839
1840 1840 return super(mqrepo, self).commit(*args, **opts)
1841 1841
1842 1842 def push(self, remote, force=False, revs=None):
1843 1843 if self.mq.applied and not force:
1844 1844 raise util.Abort(_('source has mq patches applied'))
1845 1845 return super(mqrepo, self).push(remote, force, revs)
1846 1846
1847 1847 def tags(self):
1848 1848 if self.tagscache:
1849 1849 return self.tagscache
1850 1850
1851 1851 tagscache = super(mqrepo, self).tags()
1852 1852
1853 1853 q = self.mq
1854 1854 if not q.applied:
1855 1855 return tagscache
1856 1856
1857 1857 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1858 1858 mqtags.append((mqtags[-1][0], 'qtip'))
1859 1859 mqtags.append((mqtags[0][0], 'qbase'))
1860 1860 for patch in mqtags:
1861 1861 if patch[1] in tagscache:
1862 1862 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1863 1863 else:
1864 1864 tagscache[patch[1]] = revlog.bin(patch[0])
1865 1865
1866 1866 return tagscache
1867 1867
1868 1868 if repo.local():
1869 1869 repo.__class__ = mqrepo
1870 1870 repo.mq = queue(ui, repo.join(""))
1871 1871
1872 1872 cmdtable = {
1873 1873 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1874 1874 "qclone": (clone,
1875 1875 [('', 'pull', None, _('use pull protocol to copy metadata')),
1876 1876 ('U', 'noupdate', None, _('do not update the new working directories')),
1877 1877 ('', 'uncompressed', None,
1878 1878 _('use uncompressed transfer (fast over LAN)')),
1879 1879 ('e', 'ssh', '', _('specify ssh command to use')),
1880 1880 ('p', 'patches', '', _('location of source patch repo')),
1881 1881 ('', 'remotecmd', '',
1882 1882 _('specify hg command to run on the remote side'))],
1883 1883 'hg qclone [OPTION]... SOURCE [DEST]'),
1884 1884 "qcommit|qci":
1885 1885 (commit,
1886 1886 commands.table["^commit|ci"][1],
1887 1887 'hg qcommit [OPTION]... [FILE]...'),
1888 1888 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1889 1889 "qdelete|qremove|qrm":
1890 1890 (delete,
1891 1891 [('k', 'keep', None, _('keep patch file'))],
1892 1892 'hg qdelete [-k] PATCH'),
1893 1893 'qfold':
1894 1894 (fold,
1895 1895 [('e', 'edit', None, _('edit patch header')),
1896 1896 ('k', 'keep', None, _('keep folded patch files')),
1897 1897 ('m', 'message', '', _('set patch header to <text>')),
1898 1898 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1899 1899 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1900 1900 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1901 1901 ('n', 'none', None, _('drop all guards'))],
1902 1902 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1903 1903 'qheader': (header, [],
1904 1904 _('hg qheader [PATCH]')),
1905 1905 "^qimport":
1906 1906 (qimport,
1907 1907 [('e', 'existing', None, 'import file in patch dir'),
1908 1908 ('n', 'name', '', 'patch file name'),
1909 1909 ('f', 'force', None, 'overwrite existing files')],
1910 1910 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1911 1911 "^qinit":
1912 1912 (init,
1913 1913 [('c', 'create-repo', None, 'create queue repository')],
1914 1914 'hg qinit [-c]'),
1915 1915 "qnew":
1916 1916 (new,
1917 1917 [('m', 'message', '', _('use <text> as commit message')),
1918 1918 ('l', 'logfile', '', _('read the commit message from <file>')),
1919 1919 ('f', 'force', None, _('import uncommitted changes into patch'))],
1920 1920 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1921 1921 "qnext": (next, [], 'hg qnext'),
1922 1922 "qprev": (prev, [], 'hg qprev'),
1923 1923 "^qpop":
1924 1924 (pop,
1925 1925 [('a', 'all', None, 'pop all patches'),
1926 1926 ('n', 'name', '', 'queue name to pop'),
1927 1927 ('f', 'force', None, 'forget any local changes')],
1928 1928 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1929 1929 "^qpush":
1930 1930 (push,
1931 1931 [('f', 'force', None, 'apply if the patch has rejects'),
1932 1932 ('l', 'list', None, 'list patch name in commit text'),
1933 1933 ('a', 'all', None, 'apply all patches'),
1934 1934 ('m', 'merge', None, 'merge from another queue'),
1935 1935 ('n', 'name', '', 'merge queue name')],
1936 1936 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1937 1937 "^qrefresh":
1938 1938 (refresh,
1939 1939 [('e', 'edit', None, _('edit commit message')),
1940 1940 ('m', 'message', '', _('change commit message with <text>')),
1941 1941 ('l', 'logfile', '', _('change commit message with <file> content')),
1942 1942 ('s', 'short', None, 'short refresh')],
1943 1943 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1944 1944 'qrename|qmv':
1945 1945 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1946 1946 "qrestore":
1947 1947 (restore,
1948 1948 [('d', 'delete', None, 'delete save entry'),
1949 1949 ('u', 'update', None, 'update queue working dir')],
1950 1950 'hg qrestore [-d] [-u] REV'),
1951 1951 "qsave":
1952 1952 (save,
1953 1953 [('m', 'message', '', _('use <text> as commit message')),
1954 1954 ('l', 'logfile', '', _('read the commit message from <file>')),
1955 1955 ('c', 'copy', None, 'copy patch directory'),
1956 1956 ('n', 'name', '', 'copy directory name'),
1957 1957 ('e', 'empty', None, 'clear queue status file'),
1958 1958 ('f', 'force', None, 'force copy')],
1959 1959 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1960 1960 "qselect": (select,
1961 1961 [('n', 'none', None, _('disable all guards')),
1962 1962 ('s', 'series', None, _('list all guards in series file')),
1963 1963 ('', 'pop', None,
1964 1964 _('pop to before first guarded applied patch')),
1965 1965 ('', 'reapply', None, _('pop, then reapply patches'))],
1966 1966 'hg qselect [OPTION...] [GUARD...]'),
1967 1967 "qseries":
1968 1968 (series,
1969 1969 [('m', 'missing', None, 'print patches not in series'),
1970 1970 ('s', 'summary', None, _('print first line of patch header'))],
1971 1971 'hg qseries [-m]'),
1972 1972 "^strip":
1973 1973 (strip,
1974 1974 [('f', 'force', None, 'force multi-head removal'),
1975 1975 ('b', 'backup', None, 'bundle unrelated changesets'),
1976 1976 ('n', 'nobackup', None, 'no backups')],
1977 1977 'hg strip [-f] [-b] [-n] REV'),
1978 1978 "qtop": (top, [], 'hg qtop'),
1979 1979 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1980 1980 }
@@ -1,276 +1,276 b''
1 1 # notify.py - email notifications for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # hook extension to email notifications to people when changesets are
9 9 # committed to a repo they subscribe to.
10 10 #
11 11 # default mode is to print messages to stdout, for testing and
12 12 # configuring.
13 13 #
14 14 # to use, configure notify extension and enable in hgrc like this:
15 15 #
16 16 # [extensions]
17 17 # hgext.notify =
18 18 #
19 19 # [hooks]
20 20 # # one email for each incoming changeset
21 21 # incoming.notify = python:hgext.notify.hook
22 22 # # batch emails when many changesets incoming at one time
23 23 # changegroup.notify = python:hgext.notify.hook
24 24 #
25 25 # [notify]
26 26 # # config items go in here
27 27 #
28 28 # config items:
29 29 #
30 30 # REQUIRED:
31 31 # config = /path/to/file # file containing subscriptions
32 32 #
33 33 # OPTIONAL:
34 34 # test = True # print messages to stdout for testing
35 35 # strip = 3 # number of slashes to strip for url paths
36 36 # domain = example.com # domain to use if committer missing domain
37 37 # style = ... # style file to use when formatting email
38 38 # template = ... # template to use when formatting email
39 39 # incoming = ... # template to use when run as incoming hook
40 40 # changegroup = ... # template when run as changegroup hook
41 41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 42 # maxsubject = 67 # truncate subject line longer than this
43 43 # sources = serve # notify if source of incoming changes in this list
44 44 # # (serve == ssh or http, push, pull, bundle)
45 45 # [email]
46 46 # from = user@host.com # email address to send as if none given
47 47 # [web]
48 48 # baseurl = http://hgserver/... # root of hg web site for browsing commits
49 49 #
50 50 # notify config file has same format as regular hgrc. it has two
51 51 # sections so you can express subscriptions in whatever way is handier
52 52 # for you.
53 53 #
54 54 # [usersubs]
55 55 # # key is subscriber email, value is ","-separated list of glob patterns
56 56 # user@host = pattern
57 57 #
58 58 # [reposubs]
59 59 # # key is glob pattern, value is ","-separated list of subscriber emails
60 60 # pattern = user@host
61 61 #
62 62 # glob patterns are matched against path to repo root.
63 63 #
64 64 # if you like, you can put notify config file in repo that users can
65 65 # push changes to, they can manage their own subscriptions.
66 66
67 67 from mercurial.demandload import *
68 68 from mercurial.i18n import gettext as _
69 69 from mercurial.node import *
70 demandload(globals(), 'email.Parser mercurial:commands,patch,templater,util')
71 demandload(globals(), 'fnmatch socket time')
70 demandload(globals(), 'mercurial:commands,patch,templater,util,mail')
71 demandload(globals(), 'email.Parser fnmatch socket time')
72 72
73 73 # template for single changeset can include email headers.
74 74 single_template = '''
75 75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 76 From: {author}
77 77
78 78 changeset {node|short} in {root}
79 79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 80 description:
81 81 \t{desc|tabindent|strip}
82 82 '''.lstrip()
83 83
84 84 # template for multiple changesets should not contain email headers,
85 85 # because only first set of headers will be used and result will look
86 86 # strange.
87 87 multiple_template = '''
88 88 changeset {node|short} in {root}
89 89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 90 summary: {desc|firstline}
91 91 '''
92 92
93 93 deftemplates = {
94 94 'changegroup': multiple_template,
95 95 }
96 96
97 97 class notifier(object):
98 98 '''email notification class.'''
99 99
100 100 def __init__(self, ui, repo, hooktype):
101 101 self.ui = ui
102 102 cfg = self.ui.config('notify', 'config')
103 103 if cfg:
104 104 self.ui.readconfig(cfg)
105 105 self.repo = repo
106 106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 107 self.root = self.strip(self.repo.root)
108 108 self.domain = self.ui.config('notify', 'domain')
109 109 self.sio = templater.stringio()
110 110 self.subs = self.subscribers()
111 111
112 112 mapfile = self.ui.config('notify', 'style')
113 113 template = (self.ui.config('notify', hooktype) or
114 114 self.ui.config('notify', 'template'))
115 115 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
116 116 self.sio)
117 117 if not mapfile and not template:
118 118 template = deftemplates.get(hooktype) or single_template
119 119 if template:
120 120 template = templater.parsestring(template, quoted=False)
121 121 self.t.use_template(template)
122 122
123 123 def strip(self, path):
124 124 '''strip leading slashes from local path, turn into web-safe path.'''
125 125
126 126 path = util.pconvert(path)
127 127 count = self.stripcount
128 128 while count > 0:
129 129 c = path.find('/')
130 130 if c == -1:
131 131 break
132 132 path = path[c+1:]
133 133 count -= 1
134 134 return path
135 135
136 136 def fixmail(self, addr):
137 137 '''try to clean up email addresses.'''
138 138
139 139 addr = templater.email(addr.strip())
140 140 a = addr.find('@localhost')
141 141 if a != -1:
142 142 addr = addr[:a]
143 143 if '@' not in addr:
144 144 return addr + '@' + self.domain
145 145 return addr
146 146
147 147 def subscribers(self):
148 148 '''return list of email addresses of subscribers to this repo.'''
149 149
150 150 subs = {}
151 151 for user, pats in self.ui.configitems('usersubs'):
152 152 for pat in pats.split(','):
153 153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 154 subs[self.fixmail(user)] = 1
155 155 for pat, users in self.ui.configitems('reposubs'):
156 156 if fnmatch.fnmatch(self.repo.root, pat):
157 157 for user in users.split(','):
158 158 subs[self.fixmail(user)] = 1
159 159 subs = subs.keys()
160 160 subs.sort()
161 161 return subs
162 162
163 163 def url(self, path=None):
164 164 return self.ui.config('web', 'baseurl') + (path or self.root)
165 165
166 166 def node(self, node):
167 167 '''format one changeset.'''
168 168
169 169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 170 baseurl=self.ui.config('web', 'baseurl'),
171 171 root=self.repo.root,
172 172 webroot=self.root)
173 173
174 174 def skipsource(self, source):
175 175 '''true if incoming changes from this source should be skipped.'''
176 176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 177 return source not in ok_sources
178 178
179 179 def send(self, node, count):
180 180 '''send message.'''
181 181
182 182 p = email.Parser.Parser()
183 183 self.sio.seek(0)
184 184 msg = p.parse(self.sio)
185 185
186 186 def fix_subject():
187 187 '''try to make subject line exist and be useful.'''
188 188
189 189 subject = msg['Subject']
190 190 if not subject:
191 191 if count > 1:
192 192 subject = _('%s: %d new changesets') % (self.root, count)
193 193 else:
194 194 changes = self.repo.changelog.read(node)
195 195 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
196 196 subject = '%s: %s' % (self.root, s)
197 197 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
198 198 if maxsubject and len(subject) > maxsubject:
199 199 subject = subject[:maxsubject-3] + '...'
200 200 del msg['Subject']
201 201 msg['Subject'] = subject
202 202
203 203 def fix_sender():
204 204 '''try to make message have proper sender.'''
205 205
206 206 sender = msg['From']
207 207 if not sender:
208 208 sender = self.ui.config('email', 'from') or self.ui.username()
209 209 if '@' not in sender or '@localhost' in sender:
210 210 sender = self.fixmail(sender)
211 211 del msg['From']
212 212 msg['From'] = sender
213 213
214 214 fix_subject()
215 215 fix_sender()
216 216
217 217 msg['X-Hg-Notification'] = 'changeset ' + short(node)
218 218 if not msg['Message-Id']:
219 219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
220 220 (short(node), int(time.time()),
221 221 hash(self.repo.root), socket.getfqdn()))
222 222 msg['To'] = ', '.join(self.subs)
223 223
224 224 msgtext = msg.as_string(0)
225 225 if self.ui.configbool('notify', 'test', True):
226 226 self.ui.write(msgtext)
227 227 if not msgtext.endswith('\n'):
228 228 self.ui.write('\n')
229 229 else:
230 230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
231 231 (len(self.subs), count))
232 mail = self.ui.sendmail()
233 mail.sendmail(templater.email(msg['From']), self.subs, msgtext)
232 mail.sendmail(self.ui, templater.email(msg['From']),
233 self.subs, msgtext)
234 234
235 235 def diff(self, node, ref):
236 236 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
237 237 if maxdiff == 0:
238 238 return
239 239 fp = templater.stringio()
240 240 prev = self.repo.changelog.parents(node)[0]
241 241 patch.diff(self.repo, fp, prev, ref)
242 242 difflines = fp.getvalue().splitlines(1)
243 243 if maxdiff > 0 and len(difflines) > maxdiff:
244 244 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
245 245 (len(difflines), maxdiff))
246 246 difflines = difflines[:maxdiff]
247 247 elif difflines:
248 248 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
249 249 self.sio.write(*difflines)
250 250
251 251 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
252 252 '''send email notifications to interested subscribers.
253 253
254 254 if used as changegroup hook, send one email for all changesets in
255 255 changegroup. else send one email per changeset.'''
256 256 n = notifier(ui, repo, hooktype)
257 257 if not n.subs:
258 258 ui.debug(_('notify: no subscribers to repo %s\n' % n.root))
259 259 return
260 260 if n.skipsource(source):
261 261 ui.debug(_('notify: changes have source "%s" - skipping\n') %
262 262 source)
263 263 return
264 264 node = bin(node)
265 265 if hooktype == 'changegroup':
266 266 start = repo.changelog.rev(node)
267 267 end = repo.changelog.count()
268 268 count = end - start
269 269 for rev in xrange(start, end):
270 270 n.node(repo.changelog.node(rev))
271 271 n.diff(node, repo.changelog.tip())
272 272 else:
273 273 count = 1
274 274 n.node(node)
275 275 n.diff(node, node)
276 276 n.send(node, count)
@@ -1,309 +1,309 b''
1 1 # Command for sending a collection of Mercurial changesets as a series
2 2 # of patch emails.
3 3 #
4 4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 5 # which describes the series as a whole.
6 6 #
7 7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 8 # the first line of the changeset description as the subject text.
9 9 # The message contains two or three body parts:
10 10 #
11 11 # The remainder of the changeset description.
12 12 #
13 13 # [Optional] If the diffstat program is installed, the result of
14 14 # running diffstat on the patch.
15 15 #
16 16 # The patch itself, as generated by "hg export".
17 17 #
18 18 # Each message refers to all of its predecessors using the In-Reply-To
19 19 # and References headers, so they will show up as a sequence in
20 20 # threaded mail and news readers, and in mail archives.
21 21 #
22 22 # For each changeset, you will be prompted with a diffstat summary and
23 23 # the changeset summary, so you can be sure you are sending the right
24 24 # changes.
25 25 #
26 26 # It is best to run this script with the "-n" (test only) flag before
27 27 # firing it up "for real", in which case it will use your pager to
28 28 # display each of the messages that it would send.
29 29 #
30 30 # The "-m" (mbox) option will create an mbox file instead of sending
31 31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
32 32 # and finally sent with "formail -s sendmail -bm -t < mbox".
33 33 #
34 34 # To configure other defaults, add a section like this to your hgrc
35 35 # file:
36 36 #
37 37 # [email]
38 38 # from = My Name <my@email>
39 39 # to = recipient1, recipient2, ...
40 40 # cc = cc1, cc2, ...
41 41 # bcc = bcc1, bcc2, ...
42 42
43 43 from mercurial.demandload import *
44 44 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
45 45 mercurial:commands,hg,ui
46 46 os errno popen2 socket sys tempfile time''')
47 47 from mercurial.i18n import gettext as _
48 48 from mercurial.node import *
49 49
50 50 try:
51 51 # readline gives raw_input editing capabilities, but is not
52 52 # present on windows
53 53 import readline
54 54 except ImportError: pass
55 55
56 56 def diffstat(patch):
57 57 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
58 58 try:
59 59 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
60 60 try:
61 61 for line in patch: print >> p.tochild, line
62 62 p.tochild.close()
63 63 if p.wait(): return
64 64 fp = os.fdopen(fd, 'r')
65 65 stat = []
66 66 for line in fp: stat.append(line.lstrip())
67 67 last = stat.pop()
68 68 stat.insert(0, last)
69 69 stat = ''.join(stat)
70 70 if stat.startswith('0 files'): raise ValueError
71 71 return stat
72 72 except: raise
73 73 finally:
74 74 try: os.unlink(name)
75 75 except: pass
76 76
77 77 def patchbomb(ui, repo, *revs, **opts):
78 78 '''send changesets as a series of patch emails
79 79
80 80 The series starts with a "[PATCH 0 of N]" introduction, which
81 81 describes the series as a whole.
82 82
83 83 Each patch email has a Subject line of "[PATCH M of N] ...", using
84 84 the first line of the changeset description as the subject text.
85 85 The message contains two or three body parts. First, the rest of
86 86 the changeset description. Next, (optionally) if the diffstat
87 87 program is installed, the result of running diffstat on the patch.
88 88 Finally, the patch itself, as generated by "hg export".'''
89 89 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
90 90 if default: prompt += ' [%s]' % default
91 91 prompt += rest
92 92 while True:
93 93 r = raw_input(prompt)
94 94 if r: return r
95 95 if default is not None: return default
96 96 if empty_ok: return r
97 97 ui.warn(_('Please enter a valid value.\n'))
98 98
99 99 def confirm(s):
100 100 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
101 101 raise ValueError
102 102
103 103 def cdiffstat(summary, patch):
104 104 s = diffstat(patch)
105 105 if s:
106 106 if summary:
107 107 ui.write(summary, '\n')
108 108 ui.write(s, '\n')
109 109 confirm(_('Does the diffstat above look okay'))
110 110 return s
111 111
112 112 def makepatch(patch, idx, total):
113 113 desc = []
114 114 node = None
115 115 body = ''
116 116 for line in patch:
117 117 if line.startswith('#'):
118 118 if line.startswith('# Node ID'): node = line.split()[-1]
119 119 continue
120 120 if line.startswith('diff -r'): break
121 121 desc.append(line)
122 122 if not node: raise ValueError
123 123
124 124 #body = ('\n'.join(desc[1:]).strip() or
125 125 # 'Patch subject is complete summary.')
126 126 #body += '\n\n\n'
127 127
128 128 if opts['plain']:
129 129 while patch and patch[0].startswith('# '): patch.pop(0)
130 130 if patch: patch.pop(0)
131 131 while patch and not patch[0].strip(): patch.pop(0)
132 132 if opts['diffstat']:
133 133 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
134 134 if opts['attach']:
135 135 msg = email.MIMEMultipart.MIMEMultipart()
136 136 if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
137 137 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
138 138 binnode = bin(node)
139 139 # if node is mq patch, it will have patch file name as tag
140 140 patchname = [t for t in repo.nodetags(binnode)
141 141 if t.endswith('.patch') or t.endswith('.diff')]
142 142 if patchname:
143 143 patchname = patchname[0]
144 144 elif total > 1:
145 145 patchname = commands.make_filename(repo, '%b-%n.patch',
146 146 binnode, idx, total)
147 147 else:
148 148 patchname = commands.make_filename(repo, '%b.patch', binnode)
149 149 p['Content-Disposition'] = 'inline; filename=' + patchname
150 150 msg.attach(p)
151 151 else:
152 152 body += '\n'.join(patch)
153 153 msg = email.MIMEText.MIMEText(body)
154 154 if total == 1:
155 155 subj = '[PATCH] ' + desc[0].strip()
156 156 else:
157 157 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
158 158 if subj.endswith('.'): subj = subj[:-1]
159 159 msg['Subject'] = subj
160 160 msg['X-Mercurial-Node'] = node
161 161 return msg
162 162
163 163 start_time = int(time.time())
164 164
165 165 def genmsgid(id):
166 166 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
167 167
168 168 patches = []
169 169
170 170 class exportee:
171 171 def __init__(self, container):
172 172 self.lines = []
173 173 self.container = container
174 174 self.name = 'email'
175 175
176 176 def write(self, data):
177 177 self.lines.append(data)
178 178
179 179 def close(self):
180 180 self.container.append(''.join(self.lines).split('\n'))
181 181 self.lines = []
182 182
183 183 commands.export(ui, repo, *revs, **{'output': exportee(patches),
184 184 'switch_parent': False,
185 185 'text': None})
186 186
187 187 jumbo = []
188 188 msgs = []
189 189
190 190 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
191 191
192 192 for p, i in zip(patches, range(len(patches))):
193 193 jumbo.extend(p)
194 194 msgs.append(makepatch(p, i + 1, len(patches)))
195 195
196 196 sender = (opts['from'] or ui.config('email', 'from') or
197 197 ui.config('patchbomb', 'from') or
198 198 prompt('From', ui.username()))
199 199
200 200 def getaddrs(opt, prpt, default = None):
201 201 addrs = opts[opt] or (ui.config('email', opt) or
202 202 ui.config('patchbomb', opt) or
203 203 prompt(prpt, default = default)).split(',')
204 204 return [a.strip() for a in addrs if a.strip()]
205 205 to = getaddrs('to', 'To')
206 206 cc = getaddrs('cc', 'Cc', '')
207 207
208 208 bcc = opts['bcc'] or (ui.config('email', 'bcc') or
209 209 ui.config('patchbomb', 'bcc') or '').split(',')
210 210 bcc = [a.strip() for a in bcc if a.strip()]
211 211
212 212 if len(patches) > 1:
213 213 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
214 214
215 215 subj = '[PATCH 0 of %d] %s' % (
216 216 len(patches),
217 217 opts['subject'] or
218 218 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
219 219
220 220 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
221 221
222 222 body = []
223 223
224 224 while True:
225 225 try: l = raw_input()
226 226 except EOFError: break
227 227 if l == '.': break
228 228 body.append(l)
229 229
230 230 if opts['diffstat']:
231 231 d = cdiffstat(_('Final summary:\n'), jumbo)
232 232 if d: body.append('\n' + d)
233 233
234 234 body = '\n'.join(body) + '\n'
235 235
236 236 msg = email.MIMEText.MIMEText(body)
237 237 msg['Subject'] = subj
238 238
239 239 msgs.insert(0, msg)
240 240
241 241 ui.write('\n')
242 242
243 243 if not opts['test'] and not opts['mbox']:
244 mail = ui.sendmail()
244 mailer = mail.connect(ui)
245 245 parent = None
246 246
247 247 # Calculate UTC offset
248 248 if time.daylight: offset = time.altzone
249 249 else: offset = time.timezone
250 250 if offset <= 0: sign, offset = '+', -offset
251 251 else: sign = '-'
252 252 offset = '%s%02d%02d' % (sign, offset / 3600, (offset % 3600) / 60)
253 253
254 254 sender_addr = email.Utils.parseaddr(sender)[1]
255 255 for m in msgs:
256 256 try:
257 257 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
258 258 except TypeError:
259 259 m['Message-Id'] = genmsgid('patchbomb')
260 260 if parent:
261 261 m['In-Reply-To'] = parent
262 262 else:
263 263 parent = m['Message-Id']
264 264 m['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(start_time)) + ' ' + offset
265 265
266 266 start_time += 1
267 267 m['From'] = sender
268 268 m['To'] = ', '.join(to)
269 269 if cc: m['Cc'] = ', '.join(cc)
270 270 if bcc: m['Bcc'] = ', '.join(bcc)
271 271 if opts['test']:
272 272 ui.status('Displaying ', m['Subject'], ' ...\n')
273 273 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
274 274 try:
275 275 fp.write(m.as_string(0))
276 276 fp.write('\n')
277 277 except IOError, inst:
278 278 if inst.errno != errno.EPIPE:
279 279 raise
280 280 fp.close()
281 281 elif opts['mbox']:
282 282 ui.status('Writing ', m['Subject'], ' ...\n')
283 283 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
284 284 date = time.asctime(time.localtime(start_time))
285 285 fp.write('From %s %s\n' % (sender_addr, date))
286 286 fp.write(m.as_string(0))
287 287 fp.write('\n\n')
288 288 fp.close()
289 289 else:
290 290 ui.status('Sending ', m['Subject'], ' ...\n')
291 291 # Exim does not remove the Bcc field
292 292 del m['Bcc']
293 mail.sendmail(sender, to + bcc + cc, m.as_string(0))
293 mailer.sendmail(sender, to + bcc + cc, m.as_string(0))
294 294
295 295 cmdtable = {
296 296 'email':
297 297 (patchbomb,
298 298 [('a', 'attach', None, 'send patches as inline attachments'),
299 299 ('', 'bcc', [], 'email addresses of blind copy recipients'),
300 300 ('c', 'cc', [], 'email addresses of copy recipients'),
301 301 ('d', 'diffstat', None, 'add diffstat output to messages'),
302 302 ('f', 'from', '', 'email address of sender'),
303 303 ('', 'plain', None, 'omit hg patch header'),
304 304 ('n', 'test', None, 'print messages that would be sent'),
305 305 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
306 306 ('s', 'subject', '', 'subject of introductory message'),
307 307 ('t', 'to', [], 'email addresses of recipients')],
308 308 "hg email [OPTION]... [REV]...")
309 309 }
@@ -1,3521 +1,3522 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 15 demandload(globals(), "archival cStringIO changegroup")
16 16 demandload(globals(), "cmdutil hgweb.server sshserver")
17 17
18 18 class UnknownCommand(Exception):
19 19 """Exception raised if command is not in the command table."""
20 20 class AmbiguousCommand(Exception):
21 21 """Exception raised if command shortcut matches more than one command."""
22 22
23 23 def bail_if_changed(repo):
24 24 modified, added, removed, deleted = repo.status()[:4]
25 25 if modified or added or removed or deleted:
26 26 raise util.Abort(_("outstanding uncommitted changes"))
27 27
28 28 def relpath(repo, args):
29 29 cwd = repo.getcwd()
30 30 if cwd:
31 31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 32 return args
33 33
34 34 def logmessage(opts):
35 35 """ get the log message according to -m and -l option """
36 36 message = opts['message']
37 37 logfile = opts['logfile']
38 38
39 39 if message and logfile:
40 40 raise util.Abort(_('options --message and --logfile are mutually '
41 41 'exclusive'))
42 42 if not message and logfile:
43 43 try:
44 44 if logfile == '-':
45 45 message = sys.stdin.read()
46 46 else:
47 47 message = open(logfile).read()
48 48 except IOError, inst:
49 49 raise util.Abort(_("can't read commit message '%s': %s") %
50 50 (logfile, inst.strerror))
51 51 return message
52 52
53 53 def walkchangerevs(ui, repo, pats, opts):
54 54 '''Iterate over files and the revs they changed in.
55 55
56 56 Callers most commonly need to iterate backwards over the history
57 57 it is interested in. Doing so has awful (quadratic-looking)
58 58 performance, so we use iterators in a "windowed" way.
59 59
60 60 We walk a window of revisions in the desired order. Within the
61 61 window, we first walk forwards to gather data, then in the desired
62 62 order (usually backwards) to display it.
63 63
64 64 This function returns an (iterator, getchange, matchfn) tuple. The
65 65 getchange function returns the changelog entry for a numeric
66 66 revision. The iterator yields 3-tuples. They will be of one of
67 67 the following forms:
68 68
69 69 "window", incrementing, lastrev: stepping through a window,
70 70 positive if walking forwards through revs, last rev in the
71 71 sequence iterated over - use to reset state for the current window
72 72
73 73 "add", rev, fns: out-of-order traversal of the given file names
74 74 fns, which changed during revision rev - use to gather data for
75 75 possible display
76 76
77 77 "iter", rev, None: in-order traversal of the revs earlier iterated
78 78 over with "add" - use to display data'''
79 79
80 80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
81 81 if start < end:
82 82 while start < end:
83 83 yield start, min(windowsize, end-start)
84 84 start += windowsize
85 85 if windowsize < sizelimit:
86 86 windowsize *= 2
87 87 else:
88 88 while start > end:
89 89 yield start, min(windowsize, start-end-1)
90 90 start -= windowsize
91 91 if windowsize < sizelimit:
92 92 windowsize *= 2
93 93
94 94
95 95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
96 96 follow = opts.get('follow') or opts.get('follow_first')
97 97
98 98 if repo.changelog.count() == 0:
99 99 return [], False, matchfn
100 100
101 101 if follow:
102 102 p = repo.dirstate.parents()[0]
103 103 if p == nullid:
104 104 ui.warn(_('No working directory revision; defaulting to tip\n'))
105 105 start = 'tip'
106 106 else:
107 107 start = repo.changelog.rev(p)
108 108 defrange = '%s:0' % start
109 109 else:
110 110 defrange = 'tip:0'
111 111 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
112 112 wanted = {}
113 113 slowpath = anypats
114 114 fncache = {}
115 115
116 116 chcache = {}
117 117 def getchange(rev):
118 118 ch = chcache.get(rev)
119 119 if ch is None:
120 120 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
121 121 return ch
122 122
123 123 if not slowpath and not files:
124 124 # No files, no patterns. Display all revs.
125 125 wanted = dict(zip(revs, revs))
126 126 copies = []
127 127 if not slowpath:
128 128 # Only files, no patterns. Check the history of each file.
129 129 def filerevgen(filelog, node):
130 130 cl_count = repo.changelog.count()
131 131 if node is None:
132 132 last = filelog.count() - 1
133 133 else:
134 134 last = filelog.rev(node)
135 135 for i, window in increasing_windows(last, -1):
136 136 revs = []
137 137 for j in xrange(i - window, i + 1):
138 138 n = filelog.node(j)
139 139 revs.append((filelog.linkrev(n),
140 140 follow and filelog.renamed(n)))
141 141 revs.reverse()
142 142 for rev in revs:
143 143 # only yield rev for which we have the changelog, it can
144 144 # happen while doing "hg log" during a pull or commit
145 145 if rev[0] < cl_count:
146 146 yield rev
147 147 def iterfiles():
148 148 for filename in files:
149 149 yield filename, None
150 150 for filename_node in copies:
151 151 yield filename_node
152 152 minrev, maxrev = min(revs), max(revs)
153 153 for file_, node in iterfiles():
154 154 filelog = repo.file(file_)
155 155 # A zero count may be a directory or deleted file, so
156 156 # try to find matching entries on the slow path.
157 157 if filelog.count() == 0:
158 158 slowpath = True
159 159 break
160 160 for rev, copied in filerevgen(filelog, node):
161 161 if rev <= maxrev:
162 162 if rev < minrev:
163 163 break
164 164 fncache.setdefault(rev, [])
165 165 fncache[rev].append(file_)
166 166 wanted[rev] = 1
167 167 if follow and copied:
168 168 copies.append(copied)
169 169 if slowpath:
170 170 if follow:
171 171 raise util.Abort(_('can only follow copies/renames for explicit '
172 172 'file names'))
173 173
174 174 # The slow path checks files modified in every changeset.
175 175 def changerevgen():
176 176 for i, window in increasing_windows(repo.changelog.count()-1, -1):
177 177 for j in xrange(i - window, i + 1):
178 178 yield j, getchange(j)[3]
179 179
180 180 for rev, changefiles in changerevgen():
181 181 matches = filter(matchfn, changefiles)
182 182 if matches:
183 183 fncache[rev] = matches
184 184 wanted[rev] = 1
185 185
186 186 class followfilter:
187 187 def __init__(self, onlyfirst=False):
188 188 self.startrev = -1
189 189 self.roots = []
190 190 self.onlyfirst = onlyfirst
191 191
192 192 def match(self, rev):
193 193 def realparents(rev):
194 194 if self.onlyfirst:
195 195 return repo.changelog.parentrevs(rev)[0:1]
196 196 else:
197 197 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
198 198
199 199 if self.startrev == -1:
200 200 self.startrev = rev
201 201 return True
202 202
203 203 if rev > self.startrev:
204 204 # forward: all descendants
205 205 if not self.roots:
206 206 self.roots.append(self.startrev)
207 207 for parent in realparents(rev):
208 208 if parent in self.roots:
209 209 self.roots.append(rev)
210 210 return True
211 211 else:
212 212 # backwards: all parents
213 213 if not self.roots:
214 214 self.roots.extend(realparents(self.startrev))
215 215 if rev in self.roots:
216 216 self.roots.remove(rev)
217 217 self.roots.extend(realparents(rev))
218 218 return True
219 219
220 220 return False
221 221
222 222 # it might be worthwhile to do this in the iterator if the rev range
223 223 # is descending and the prune args are all within that range
224 224 for rev in opts.get('prune', ()):
225 225 rev = repo.changelog.rev(repo.lookup(rev))
226 226 ff = followfilter()
227 227 stop = min(revs[0], revs[-1])
228 228 for x in range(rev, stop-1, -1):
229 229 if ff.match(x) and wanted.has_key(x):
230 230 del wanted[x]
231 231
232 232 def iterate():
233 233 if follow and not files:
234 234 ff = followfilter(onlyfirst=opts.get('follow_first'))
235 235 def want(rev):
236 236 if ff.match(rev) and rev in wanted:
237 237 return True
238 238 return False
239 239 else:
240 240 def want(rev):
241 241 return rev in wanted
242 242
243 243 for i, window in increasing_windows(0, len(revs)):
244 244 yield 'window', revs[0] < revs[-1], revs[-1]
245 245 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
246 246 srevs = list(nrevs)
247 247 srevs.sort()
248 248 for rev in srevs:
249 249 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
250 250 yield 'add', rev, fns
251 251 for rev in nrevs:
252 252 yield 'iter', rev, None
253 253 return iterate(), getchange, matchfn
254 254
255 255 revrangesep = ':'
256 256
257 257 def revfix(repo, val, defval):
258 258 '''turn user-level id of changeset into rev number.
259 259 user-level id can be tag, changeset, rev number, or negative rev
260 260 number relative to number of revs (-1 is tip, etc).'''
261 261 if not val:
262 262 return defval
263 263 try:
264 264 num = int(val)
265 265 if str(num) != val:
266 266 raise ValueError
267 267 if num < 0:
268 268 num += repo.changelog.count()
269 269 if num < 0:
270 270 num = 0
271 271 elif num >= repo.changelog.count():
272 272 raise ValueError
273 273 except ValueError:
274 274 try:
275 275 num = repo.changelog.rev(repo.lookup(val))
276 276 except KeyError:
277 277 raise util.Abort(_('invalid revision identifier %s'), val)
278 278 return num
279 279
280 280 def revpair(ui, repo, revs):
281 281 '''return pair of nodes, given list of revisions. second item can
282 282 be None, meaning use working dir.'''
283 283 if not revs:
284 284 return repo.dirstate.parents()[0], None
285 285 end = None
286 286 if len(revs) == 1:
287 287 start = revs[0]
288 288 if revrangesep in start:
289 289 start, end = start.split(revrangesep, 1)
290 290 start = revfix(repo, start, 0)
291 291 end = revfix(repo, end, repo.changelog.count() - 1)
292 292 else:
293 293 start = revfix(repo, start, None)
294 294 elif len(revs) == 2:
295 295 if revrangesep in revs[0] or revrangesep in revs[1]:
296 296 raise util.Abort(_('too many revisions specified'))
297 297 start = revfix(repo, revs[0], None)
298 298 end = revfix(repo, revs[1], None)
299 299 else:
300 300 raise util.Abort(_('too many revisions specified'))
301 301 if end is not None: end = repo.lookup(str(end))
302 302 return repo.lookup(str(start)), end
303 303
304 304 def revrange(ui, repo, revs):
305 305 """Yield revision as strings from a list of revision specifications."""
306 306 seen = {}
307 307 for spec in revs:
308 308 if revrangesep in spec:
309 309 start, end = spec.split(revrangesep, 1)
310 310 start = revfix(repo, start, 0)
311 311 end = revfix(repo, end, repo.changelog.count() - 1)
312 312 step = start > end and -1 or 1
313 313 for rev in xrange(start, end+step, step):
314 314 if rev in seen:
315 315 continue
316 316 seen[rev] = 1
317 317 yield str(rev)
318 318 else:
319 319 rev = revfix(repo, spec, None)
320 320 if rev in seen:
321 321 continue
322 322 seen[rev] = 1
323 323 yield str(rev)
324 324
325 325 def write_bundle(cg, filename=None, compress=True):
326 326 """Write a bundle file and return its filename.
327 327
328 328 Existing files will not be overwritten.
329 329 If no filename is specified, a temporary file is created.
330 330 bz2 compression can be turned off.
331 331 The bundle file will be deleted in case of errors.
332 332 """
333 333 class nocompress(object):
334 334 def compress(self, x):
335 335 return x
336 336 def flush(self):
337 337 return ""
338 338
339 339 fh = None
340 340 cleanup = None
341 341 try:
342 342 if filename:
343 343 if os.path.exists(filename):
344 344 raise util.Abort(_("file '%s' already exists"), filename)
345 345 fh = open(filename, "wb")
346 346 else:
347 347 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
348 348 fh = os.fdopen(fd, "wb")
349 349 cleanup = filename
350 350
351 351 if compress:
352 352 fh.write("HG10")
353 353 z = bz2.BZ2Compressor(9)
354 354 else:
355 355 fh.write("HG10UN")
356 356 z = nocompress()
357 357 # parse the changegroup data, otherwise we will block
358 358 # in case of sshrepo because we don't know the end of the stream
359 359
360 360 # an empty chunkiter is the end of the changegroup
361 361 empty = False
362 362 while not empty:
363 363 empty = True
364 364 for chunk in changegroup.chunkiter(cg):
365 365 empty = False
366 366 fh.write(z.compress(changegroup.genchunk(chunk)))
367 367 fh.write(z.compress(changegroup.closechunk()))
368 368 fh.write(z.flush())
369 369 cleanup = None
370 370 return filename
371 371 finally:
372 372 if fh is not None:
373 373 fh.close()
374 374 if cleanup is not None:
375 375 os.unlink(cleanup)
376 376
377 377 def trimuser(ui, name, rev, revcache):
378 378 """trim the name of the user who committed a change"""
379 379 user = revcache.get(rev)
380 380 if user is None:
381 381 user = revcache[rev] = ui.shortuser(name)
382 382 return user
383 383
384 384 class changeset_printer(object):
385 385 '''show changeset information when templating not requested.'''
386 386
387 387 def __init__(self, ui, repo):
388 388 self.ui = ui
389 389 self.repo = repo
390 390
391 391 def show(self, rev=0, changenode=None, brinfo=None):
392 392 '''show a single changeset or file revision'''
393 393 log = self.repo.changelog
394 394 if changenode is None:
395 395 changenode = log.node(rev)
396 396 elif not rev:
397 397 rev = log.rev(changenode)
398 398
399 399 if self.ui.quiet:
400 400 self.ui.write("%d:%s\n" % (rev, short(changenode)))
401 401 return
402 402
403 403 changes = log.read(changenode)
404 404 date = util.datestr(changes[2])
405 405
406 406 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
407 407 for p in log.parents(changenode)
408 408 if self.ui.debugflag or p != nullid]
409 409 if (not self.ui.debugflag and len(parents) == 1 and
410 410 parents[0][0] == rev-1):
411 411 parents = []
412 412
413 413 if self.ui.verbose:
414 414 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
415 415 else:
416 416 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
417 417
418 418 for tag in self.repo.nodetags(changenode):
419 419 self.ui.status(_("tag: %s\n") % tag)
420 420 for parent in parents:
421 421 self.ui.write(_("parent: %d:%s\n") % parent)
422 422
423 423 if brinfo and changenode in brinfo:
424 424 br = brinfo[changenode]
425 425 self.ui.write(_("branch: %s\n") % " ".join(br))
426 426
427 427 self.ui.debug(_("manifest: %d:%s\n") %
428 428 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
429 429 self.ui.status(_("user: %s\n") % changes[1])
430 430 self.ui.status(_("date: %s\n") % date)
431 431
432 432 if self.ui.debugflag:
433 433 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
434 434 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
435 435 files):
436 436 if value:
437 437 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
438 438 else:
439 439 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
440 440
441 441 description = changes[4].strip()
442 442 if description:
443 443 if self.ui.verbose:
444 444 self.ui.status(_("description:\n"))
445 445 self.ui.status(description)
446 446 self.ui.status("\n\n")
447 447 else:
448 448 self.ui.status(_("summary: %s\n") %
449 449 description.splitlines()[0])
450 450 self.ui.status("\n")
451 451
452 452 def show_changeset(ui, repo, opts):
453 453 '''show one changeset. uses template or regular display. caller
454 454 can pass in 'style' and 'template' options in opts.'''
455 455
456 456 tmpl = opts.get('template')
457 457 if tmpl:
458 458 tmpl = templater.parsestring(tmpl, quoted=False)
459 459 else:
460 460 tmpl = ui.config('ui', 'logtemplate')
461 461 if tmpl: tmpl = templater.parsestring(tmpl)
462 462 mapfile = opts.get('style') or ui.config('ui', 'style')
463 463 if tmpl or mapfile:
464 464 if mapfile:
465 465 if not os.path.isfile(mapfile):
466 466 mapname = templater.templatepath('map-cmdline.' + mapfile)
467 467 if not mapname: mapname = templater.templatepath(mapfile)
468 468 if mapname: mapfile = mapname
469 469 try:
470 470 t = templater.changeset_templater(ui, repo, mapfile)
471 471 except SyntaxError, inst:
472 472 raise util.Abort(inst.args[0])
473 473 if tmpl: t.use_template(tmpl)
474 474 return t
475 475 return changeset_printer(ui, repo)
476 476
477 477 def setremoteconfig(ui, opts):
478 478 "copy remote options to ui tree"
479 479 if opts.get('ssh'):
480 480 ui.setconfig("ui", "ssh", opts['ssh'])
481 481 if opts.get('remotecmd'):
482 482 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
483 483
484 484 def show_version(ui):
485 485 """output version and copyright information"""
486 486 ui.write(_("Mercurial Distributed SCM (version %s)\n")
487 487 % version.get_version())
488 488 ui.status(_(
489 489 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
490 490 "This is free software; see the source for copying conditions. "
491 491 "There is NO\nwarranty; "
492 492 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
493 493 ))
494 494
495 495 def help_(ui, name=None, with_version=False):
496 496 """show help for a command, extension, or list of commands
497 497
498 498 With no arguments, print a list of commands and short help.
499 499
500 500 Given a command name, print help for that command.
501 501
502 502 Given an extension name, print help for that extension, and the
503 503 commands it provides."""
504 504 option_lists = []
505 505
506 506 def helpcmd(name):
507 507 if with_version:
508 508 show_version(ui)
509 509 ui.write('\n')
510 510 aliases, i = findcmd(name)
511 511 # synopsis
512 512 ui.write("%s\n\n" % i[2])
513 513
514 514 # description
515 515 doc = i[0].__doc__
516 516 if not doc:
517 517 doc = _("(No help text available)")
518 518 if ui.quiet:
519 519 doc = doc.splitlines(0)[0]
520 520 ui.write("%s\n" % doc.rstrip())
521 521
522 522 if not ui.quiet:
523 523 # aliases
524 524 if len(aliases) > 1:
525 525 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
526 526
527 527 # options
528 528 if i[1]:
529 529 option_lists.append(("options", i[1]))
530 530
531 531 def helplist(select=None):
532 532 h = {}
533 533 cmds = {}
534 534 for c, e in table.items():
535 535 f = c.split("|", 1)[0]
536 536 if select and not select(f):
537 537 continue
538 538 if name == "shortlist" and not f.startswith("^"):
539 539 continue
540 540 f = f.lstrip("^")
541 541 if not ui.debugflag and f.startswith("debug"):
542 542 continue
543 543 doc = e[0].__doc__
544 544 if not doc:
545 545 doc = _("(No help text available)")
546 546 h[f] = doc.splitlines(0)[0].rstrip()
547 547 cmds[f] = c.lstrip("^")
548 548
549 549 fns = h.keys()
550 550 fns.sort()
551 551 m = max(map(len, fns))
552 552 for f in fns:
553 553 if ui.verbose:
554 554 commands = cmds[f].replace("|",", ")
555 555 ui.write(" %s:\n %s\n"%(commands, h[f]))
556 556 else:
557 557 ui.write(' %-*s %s\n' % (m, f, h[f]))
558 558
559 559 def helpext(name):
560 560 try:
561 561 mod = findext(name)
562 562 except KeyError:
563 563 raise UnknownCommand(name)
564 564
565 565 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
566 566 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
567 567 for d in doc[1:]:
568 568 ui.write(d, '\n')
569 569
570 570 ui.status('\n')
571 571 if ui.verbose:
572 572 ui.status(_('list of commands:\n\n'))
573 573 else:
574 574 ui.status(_('list of commands (use "hg help -v %s" '
575 575 'to show aliases and global options):\n\n') % name)
576 576
577 577 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
578 578 helplist(modcmds.has_key)
579 579
580 580 if name and name != 'shortlist':
581 581 try:
582 582 helpcmd(name)
583 583 except UnknownCommand:
584 584 helpext(name)
585 585
586 586 else:
587 587 # program name
588 588 if ui.verbose or with_version:
589 589 show_version(ui)
590 590 else:
591 591 ui.status(_("Mercurial Distributed SCM\n"))
592 592 ui.status('\n')
593 593
594 594 # list of commands
595 595 if name == "shortlist":
596 596 ui.status(_('basic commands (use "hg help" '
597 597 'for the full list or option "-v" for details):\n\n'))
598 598 elif ui.verbose:
599 599 ui.status(_('list of commands:\n\n'))
600 600 else:
601 601 ui.status(_('list of commands (use "hg help -v" '
602 602 'to show aliases and global options):\n\n'))
603 603
604 604 helplist()
605 605
606 606 # global options
607 607 if ui.verbose:
608 608 option_lists.append(("global options", globalopts))
609 609
610 610 # list all option lists
611 611 opt_output = []
612 612 for title, options in option_lists:
613 613 opt_output.append(("\n%s:\n" % title, None))
614 614 for shortopt, longopt, default, desc in options:
615 615 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
616 616 longopt and " --%s" % longopt),
617 617 "%s%s" % (desc,
618 618 default
619 619 and _(" (default: %s)") % default
620 620 or "")))
621 621
622 622 if opt_output:
623 623 opts_len = max([len(line[0]) for line in opt_output if line[1]])
624 624 for first, second in opt_output:
625 625 if second:
626 626 ui.write(" %-*s %s\n" % (opts_len, first, second))
627 627 else:
628 628 ui.write("%s\n" % first)
629 629
630 630 # Commands start here, listed alphabetically
631 631
632 632 def add(ui, repo, *pats, **opts):
633 633 """add the specified files on the next commit
634 634
635 635 Schedule files to be version controlled and added to the repository.
636 636
637 637 The files will be added to the repository at the next commit.
638 638
639 639 If no names are given, add all files in the repository.
640 640 """
641 641
642 642 names = []
643 643 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
644 644 if exact:
645 645 if ui.verbose:
646 646 ui.status(_('adding %s\n') % rel)
647 647 names.append(abs)
648 648 elif repo.dirstate.state(abs) == '?':
649 649 ui.status(_('adding %s\n') % rel)
650 650 names.append(abs)
651 651 if not opts.get('dry_run'):
652 652 repo.add(names)
653 653
654 654 def addremove(ui, repo, *pats, **opts):
655 655 """add all new files, delete all missing files (DEPRECATED)
656 656
657 657 (DEPRECATED)
658 658 Add all new files and remove all missing files from the repository.
659 659
660 660 New files are ignored if they match any of the patterns in .hgignore. As
661 661 with add, these changes take effect at the next commit.
662 662
663 663 This command is now deprecated and will be removed in a future
664 664 release. Please use add and remove --after instead.
665 665 """
666 666 ui.warn(_('(the addremove command is deprecated; use add and remove '
667 667 '--after instead)\n'))
668 668 return cmdutil.addremove(repo, pats, opts)
669 669
670 670 def annotate(ui, repo, *pats, **opts):
671 671 """show changeset information per file line
672 672
673 673 List changes in files, showing the revision id responsible for each line
674 674
675 675 This command is useful to discover who did a change or when a change took
676 676 place.
677 677
678 678 Without the -a option, annotate will avoid processing files it
679 679 detects as binary. With -a, annotate will generate an annotation
680 680 anyway, probably with undesirable results.
681 681 """
682 682 def getnode(rev):
683 683 return short(repo.changelog.node(rev))
684 684
685 685 ucache = {}
686 686 def getname(rev):
687 687 try:
688 688 return ucache[rev]
689 689 except:
690 690 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
691 691 ucache[rev] = u
692 692 return u
693 693
694 694 dcache = {}
695 695 def getdate(rev):
696 696 datestr = dcache.get(rev)
697 697 if datestr is None:
698 698 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
699 699 return datestr
700 700
701 701 if not pats:
702 702 raise util.Abort(_('at least one file name or pattern required'))
703 703
704 704 opmap = [['user', getname], ['number', str], ['changeset', getnode],
705 705 ['date', getdate]]
706 706 if not opts['user'] and not opts['changeset'] and not opts['date']:
707 707 opts['number'] = 1
708 708
709 709 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
710 710
711 711 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
712 712 node=ctx.node()):
713 713 fctx = ctx.filectx(abs)
714 714 if not opts['text'] and util.binary(fctx.data()):
715 715 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
716 716 continue
717 717
718 718 lines = fctx.annotate()
719 719 pieces = []
720 720
721 721 for o, f in opmap:
722 722 if opts[o]:
723 723 l = [f(n) for n, dummy in lines]
724 724 if l:
725 725 m = max(map(len, l))
726 726 pieces.append(["%*s" % (m, x) for x in l])
727 727
728 728 if pieces:
729 729 for p, l in zip(zip(*pieces), lines):
730 730 ui.write("%s: %s" % (" ".join(p), l[1]))
731 731
732 732 def archive(ui, repo, dest, **opts):
733 733 '''create unversioned archive of a repository revision
734 734
735 735 By default, the revision used is the parent of the working
736 736 directory; use "-r" to specify a different revision.
737 737
738 738 To specify the type of archive to create, use "-t". Valid
739 739 types are:
740 740
741 741 "files" (default): a directory full of files
742 742 "tar": tar archive, uncompressed
743 743 "tbz2": tar archive, compressed using bzip2
744 744 "tgz": tar archive, compressed using gzip
745 745 "uzip": zip archive, uncompressed
746 746 "zip": zip archive, compressed using deflate
747 747
748 748 The exact name of the destination archive or directory is given
749 749 using a format string; see "hg help export" for details.
750 750
751 751 Each member added to an archive file has a directory prefix
752 752 prepended. Use "-p" to specify a format string for the prefix.
753 753 The default is the basename of the archive, with suffixes removed.
754 754 '''
755 755
756 756 if opts['rev']:
757 757 node = repo.lookup(opts['rev'])
758 758 else:
759 759 node, p2 = repo.dirstate.parents()
760 760 if p2 != nullid:
761 761 raise util.Abort(_('uncommitted merge - please provide a '
762 762 'specific revision'))
763 763
764 764 dest = cmdutil.make_filename(repo, dest, node)
765 765 if os.path.realpath(dest) == repo.root:
766 766 raise util.Abort(_('repository root cannot be destination'))
767 767 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
768 768 kind = opts.get('type') or 'files'
769 769 prefix = opts['prefix']
770 770 if dest == '-':
771 771 if kind == 'files':
772 772 raise util.Abort(_('cannot archive plain files to stdout'))
773 773 dest = sys.stdout
774 774 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
775 775 prefix = cmdutil.make_filename(repo, prefix, node)
776 776 archival.archive(repo, dest, node, kind, not opts['no_decode'],
777 777 matchfn, prefix)
778 778
779 779 def backout(ui, repo, rev, **opts):
780 780 '''reverse effect of earlier changeset
781 781
782 782 Commit the backed out changes as a new changeset. The new
783 783 changeset is a child of the backed out changeset.
784 784
785 785 If you back out a changeset other than the tip, a new head is
786 786 created. This head is the parent of the working directory. If
787 787 you back out an old changeset, your working directory will appear
788 788 old after the backout. You should merge the backout changeset
789 789 with another head.
790 790
791 791 The --merge option remembers the parent of the working directory
792 792 before starting the backout, then merges the new head with that
793 793 changeset afterwards. This saves you from doing the merge by
794 794 hand. The result of this merge is not committed, as for a normal
795 795 merge.'''
796 796
797 797 bail_if_changed(repo)
798 798 op1, op2 = repo.dirstate.parents()
799 799 if op2 != nullid:
800 800 raise util.Abort(_('outstanding uncommitted merge'))
801 801 node = repo.lookup(rev)
802 802 p1, p2 = repo.changelog.parents(node)
803 803 if p1 == nullid:
804 804 raise util.Abort(_('cannot back out a change with no parents'))
805 805 if p2 != nullid:
806 806 if not opts['parent']:
807 807 raise util.Abort(_('cannot back out a merge changeset without '
808 808 '--parent'))
809 809 p = repo.lookup(opts['parent'])
810 810 if p not in (p1, p2):
811 811 raise util.Abort(_('%s is not a parent of %s' %
812 812 (short(p), short(node))))
813 813 parent = p
814 814 else:
815 815 if opts['parent']:
816 816 raise util.Abort(_('cannot use --parent on non-merge changeset'))
817 817 parent = p1
818 818 hg.clean(repo, node, show_stats=False)
819 819 revert_opts = opts.copy()
820 820 revert_opts['rev'] = hex(parent)
821 821 revert(ui, repo, **revert_opts)
822 822 commit_opts = opts.copy()
823 823 commit_opts['addremove'] = False
824 824 if not commit_opts['message'] and not commit_opts['logfile']:
825 825 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
826 826 commit_opts['force_editor'] = True
827 827 commit(ui, repo, **commit_opts)
828 828 def nice(node):
829 829 return '%d:%s' % (repo.changelog.rev(node), short(node))
830 830 ui.status(_('changeset %s backs out changeset %s\n') %
831 831 (nice(repo.changelog.tip()), nice(node)))
832 832 if op1 != node:
833 833 if opts['merge']:
834 834 ui.status(_('merging with changeset %s\n') % nice(op1))
835 835 n = _lookup(repo, hex(op1))
836 836 hg.merge(repo, n)
837 837 else:
838 838 ui.status(_('the backout changeset is a new head - '
839 839 'do not forget to merge\n'))
840 840 ui.status(_('(use "backout --merge" '
841 841 'if you want to auto-merge)\n'))
842 842
843 843 def bundle(ui, repo, fname, dest=None, **opts):
844 844 """create a changegroup file
845 845
846 846 Generate a compressed changegroup file collecting all changesets
847 847 not found in the other repository.
848 848
849 849 This file can then be transferred using conventional means and
850 850 applied to another repository with the unbundle command. This is
851 851 useful when native push and pull are not available or when
852 852 exporting an entire repository is undesirable. The standard file
853 853 extension is ".hg".
854 854
855 855 Unlike import/export, this exactly preserves all changeset
856 856 contents including permissions, rename data, and revision history.
857 857 """
858 858 dest = ui.expandpath(dest or 'default-push', dest or 'default')
859 859 other = hg.repository(ui, dest)
860 860 o = repo.findoutgoing(other, force=opts['force'])
861 861 cg = repo.changegroup(o, 'bundle')
862 862 write_bundle(cg, fname)
863 863
864 864 def cat(ui, repo, file1, *pats, **opts):
865 865 """output the latest or given revisions of files
866 866
867 867 Print the specified files as they were at the given revision.
868 868 If no revision is given then the tip is used.
869 869
870 870 Output may be to a file, in which case the name of the file is
871 871 given using a format string. The formatting rules are the same as
872 872 for the export command, with the following additions:
873 873
874 874 %s basename of file being printed
875 875 %d dirname of file being printed, or '.' if in repo root
876 876 %p root-relative path name of file being printed
877 877 """
878 878 ctx = repo.changectx(opts['rev'] or "-1")
879 879 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
880 880 ctx.node()):
881 881 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
882 882 fp.write(ctx.filectx(abs).data())
883 883
884 884 def clone(ui, source, dest=None, **opts):
885 885 """make a copy of an existing repository
886 886
887 887 Create a copy of an existing repository in a new directory.
888 888
889 889 If no destination directory name is specified, it defaults to the
890 890 basename of the source.
891 891
892 892 The location of the source is added to the new repository's
893 893 .hg/hgrc file, as the default to be used for future pulls.
894 894
895 895 For efficiency, hardlinks are used for cloning whenever the source
896 896 and destination are on the same filesystem (note this applies only
897 897 to the repository data, not to the checked out files). Some
898 898 filesystems, such as AFS, implement hardlinking incorrectly, but
899 899 do not report errors. In these cases, use the --pull option to
900 900 avoid hardlinking.
901 901
902 902 You can safely clone repositories and checked out files using full
903 903 hardlinks with
904 904
905 905 $ cp -al REPO REPOCLONE
906 906
907 907 which is the fastest way to clone. However, the operation is not
908 908 atomic (making sure REPO is not modified during the operation is
909 909 up to you) and you have to make sure your editor breaks hardlinks
910 910 (Emacs and most Linux Kernel tools do so).
911 911
912 912 If you use the -r option to clone up to a specific revision, no
913 913 subsequent revisions will be present in the cloned repository.
914 914 This option implies --pull, even on local repositories.
915 915
916 916 See pull for valid source format details.
917 917
918 918 It is possible to specify an ssh:// URL as the destination, but no
919 919 .hg/hgrc will be created on the remote side. Look at the help text
920 920 for the pull command for important details about ssh:// URLs.
921 921 """
922 922 setremoteconfig(ui, opts)
923 923 hg.clone(ui, ui.expandpath(source), dest,
924 924 pull=opts['pull'],
925 925 stream=opts['uncompressed'],
926 926 rev=opts['rev'],
927 927 update=not opts['noupdate'])
928 928
929 929 def commit(ui, repo, *pats, **opts):
930 930 """commit the specified files or all outstanding changes
931 931
932 932 Commit changes to the given files into the repository.
933 933
934 934 If a list of files is omitted, all changes reported by "hg status"
935 935 will be committed.
936 936
937 937 If no commit message is specified, the editor configured in your hgrc
938 938 or in the EDITOR environment variable is started to enter a message.
939 939 """
940 940 message = logmessage(opts)
941 941
942 942 if opts['addremove']:
943 943 cmdutil.addremove(repo, pats, opts)
944 944 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
945 945 if pats:
946 946 modified, added, removed = repo.status(files=fns, match=match)[:3]
947 947 files = modified + added + removed
948 948 else:
949 949 files = []
950 950 try:
951 951 repo.commit(files, message, opts['user'], opts['date'], match,
952 952 force_editor=opts.get('force_editor'))
953 953 except ValueError, inst:
954 954 raise util.Abort(str(inst))
955 955
956 956 def docopy(ui, repo, pats, opts, wlock):
957 957 # called with the repo lock held
958 958 cwd = repo.getcwd()
959 959 errors = 0
960 960 copied = []
961 961 targets = {}
962 962
963 963 def okaytocopy(abs, rel, exact):
964 964 reasons = {'?': _('is not managed'),
965 965 'a': _('has been marked for add'),
966 966 'r': _('has been marked for remove')}
967 967 state = repo.dirstate.state(abs)
968 968 reason = reasons.get(state)
969 969 if reason:
970 970 if state == 'a':
971 971 origsrc = repo.dirstate.copied(abs)
972 972 if origsrc is not None:
973 973 return origsrc
974 974 if exact:
975 975 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
976 976 else:
977 977 return abs
978 978
979 979 def copy(origsrc, abssrc, relsrc, target, exact):
980 980 abstarget = util.canonpath(repo.root, cwd, target)
981 981 reltarget = util.pathto(cwd, abstarget)
982 982 prevsrc = targets.get(abstarget)
983 983 if prevsrc is not None:
984 984 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
985 985 (reltarget, abssrc, prevsrc))
986 986 return
987 987 if (not opts['after'] and os.path.exists(reltarget) or
988 988 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
989 989 if not opts['force']:
990 990 ui.warn(_('%s: not overwriting - file exists\n') %
991 991 reltarget)
992 992 return
993 993 if not opts['after'] and not opts.get('dry_run'):
994 994 os.unlink(reltarget)
995 995 if opts['after']:
996 996 if not os.path.exists(reltarget):
997 997 return
998 998 else:
999 999 targetdir = os.path.dirname(reltarget) or '.'
1000 1000 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1001 1001 os.makedirs(targetdir)
1002 1002 try:
1003 1003 restore = repo.dirstate.state(abstarget) == 'r'
1004 1004 if restore and not opts.get('dry_run'):
1005 1005 repo.undelete([abstarget], wlock)
1006 1006 try:
1007 1007 if not opts.get('dry_run'):
1008 1008 shutil.copyfile(relsrc, reltarget)
1009 1009 shutil.copymode(relsrc, reltarget)
1010 1010 restore = False
1011 1011 finally:
1012 1012 if restore:
1013 1013 repo.remove([abstarget], wlock)
1014 1014 except shutil.Error, inst:
1015 1015 raise util.Abort(str(inst))
1016 1016 except IOError, inst:
1017 1017 if inst.errno == errno.ENOENT:
1018 1018 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1019 1019 else:
1020 1020 ui.warn(_('%s: cannot copy - %s\n') %
1021 1021 (relsrc, inst.strerror))
1022 1022 errors += 1
1023 1023 return
1024 1024 if ui.verbose or not exact:
1025 1025 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1026 1026 targets[abstarget] = abssrc
1027 1027 if abstarget != origsrc and not opts.get('dry_run'):
1028 1028 repo.copy(origsrc, abstarget, wlock)
1029 1029 copied.append((abssrc, relsrc, exact))
1030 1030
1031 1031 def targetpathfn(pat, dest, srcs):
1032 1032 if os.path.isdir(pat):
1033 1033 abspfx = util.canonpath(repo.root, cwd, pat)
1034 1034 if destdirexists:
1035 1035 striplen = len(os.path.split(abspfx)[0])
1036 1036 else:
1037 1037 striplen = len(abspfx)
1038 1038 if striplen:
1039 1039 striplen += len(os.sep)
1040 1040 res = lambda p: os.path.join(dest, p[striplen:])
1041 1041 elif destdirexists:
1042 1042 res = lambda p: os.path.join(dest, os.path.basename(p))
1043 1043 else:
1044 1044 res = lambda p: dest
1045 1045 return res
1046 1046
1047 1047 def targetpathafterfn(pat, dest, srcs):
1048 1048 if util.patkind(pat, None)[0]:
1049 1049 # a mercurial pattern
1050 1050 res = lambda p: os.path.join(dest, os.path.basename(p))
1051 1051 else:
1052 1052 abspfx = util.canonpath(repo.root, cwd, pat)
1053 1053 if len(abspfx) < len(srcs[0][0]):
1054 1054 # A directory. Either the target path contains the last
1055 1055 # component of the source path or it does not.
1056 1056 def evalpath(striplen):
1057 1057 score = 0
1058 1058 for s in srcs:
1059 1059 t = os.path.join(dest, s[0][striplen:])
1060 1060 if os.path.exists(t):
1061 1061 score += 1
1062 1062 return score
1063 1063
1064 1064 striplen = len(abspfx)
1065 1065 if striplen:
1066 1066 striplen += len(os.sep)
1067 1067 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1068 1068 score = evalpath(striplen)
1069 1069 striplen1 = len(os.path.split(abspfx)[0])
1070 1070 if striplen1:
1071 1071 striplen1 += len(os.sep)
1072 1072 if evalpath(striplen1) > score:
1073 1073 striplen = striplen1
1074 1074 res = lambda p: os.path.join(dest, p[striplen:])
1075 1075 else:
1076 1076 # a file
1077 1077 if destdirexists:
1078 1078 res = lambda p: os.path.join(dest, os.path.basename(p))
1079 1079 else:
1080 1080 res = lambda p: dest
1081 1081 return res
1082 1082
1083 1083
1084 1084 pats = list(pats)
1085 1085 if not pats:
1086 1086 raise util.Abort(_('no source or destination specified'))
1087 1087 if len(pats) == 1:
1088 1088 raise util.Abort(_('no destination specified'))
1089 1089 dest = pats.pop()
1090 1090 destdirexists = os.path.isdir(dest)
1091 1091 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1092 1092 raise util.Abort(_('with multiple sources, destination must be an '
1093 1093 'existing directory'))
1094 1094 if opts['after']:
1095 1095 tfn = targetpathafterfn
1096 1096 else:
1097 1097 tfn = targetpathfn
1098 1098 copylist = []
1099 1099 for pat in pats:
1100 1100 srcs = []
1101 1101 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1102 1102 origsrc = okaytocopy(abssrc, relsrc, exact)
1103 1103 if origsrc:
1104 1104 srcs.append((origsrc, abssrc, relsrc, exact))
1105 1105 if not srcs:
1106 1106 continue
1107 1107 copylist.append((tfn(pat, dest, srcs), srcs))
1108 1108 if not copylist:
1109 1109 raise util.Abort(_('no files to copy'))
1110 1110
1111 1111 for targetpath, srcs in copylist:
1112 1112 for origsrc, abssrc, relsrc, exact in srcs:
1113 1113 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1114 1114
1115 1115 if errors:
1116 1116 ui.warn(_('(consider using --after)\n'))
1117 1117 return errors, copied
1118 1118
1119 1119 def copy(ui, repo, *pats, **opts):
1120 1120 """mark files as copied for the next commit
1121 1121
1122 1122 Mark dest as having copies of source files. If dest is a
1123 1123 directory, copies are put in that directory. If dest is a file,
1124 1124 there can only be one source.
1125 1125
1126 1126 By default, this command copies the contents of files as they
1127 1127 stand in the working directory. If invoked with --after, the
1128 1128 operation is recorded, but no copying is performed.
1129 1129
1130 1130 This command takes effect in the next commit.
1131 1131
1132 1132 NOTE: This command should be treated as experimental. While it
1133 1133 should properly record copied files, this information is not yet
1134 1134 fully used by merge, nor fully reported by log.
1135 1135 """
1136 1136 wlock = repo.wlock(0)
1137 1137 errs, copied = docopy(ui, repo, pats, opts, wlock)
1138 1138 return errs
1139 1139
1140 1140 def debugancestor(ui, index, rev1, rev2):
1141 1141 """find the ancestor revision of two revisions in a given index"""
1142 1142 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1143 1143 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1144 1144 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1145 1145
1146 1146 def debugcomplete(ui, cmd='', **opts):
1147 1147 """returns the completion list associated with the given command"""
1148 1148
1149 1149 if opts['options']:
1150 1150 options = []
1151 1151 otables = [globalopts]
1152 1152 if cmd:
1153 1153 aliases, entry = findcmd(cmd)
1154 1154 otables.append(entry[1])
1155 1155 for t in otables:
1156 1156 for o in t:
1157 1157 if o[0]:
1158 1158 options.append('-%s' % o[0])
1159 1159 options.append('--%s' % o[1])
1160 1160 ui.write("%s\n" % "\n".join(options))
1161 1161 return
1162 1162
1163 1163 clist = findpossible(cmd).keys()
1164 1164 clist.sort()
1165 1165 ui.write("%s\n" % "\n".join(clist))
1166 1166
1167 1167 def debugrebuildstate(ui, repo, rev=None):
1168 1168 """rebuild the dirstate as it would look like for the given revision"""
1169 1169 if not rev:
1170 1170 rev = repo.changelog.tip()
1171 1171 else:
1172 1172 rev = repo.lookup(rev)
1173 1173 change = repo.changelog.read(rev)
1174 1174 n = change[0]
1175 1175 files = repo.manifest.read(n)
1176 1176 wlock = repo.wlock()
1177 1177 repo.dirstate.rebuild(rev, files)
1178 1178
1179 1179 def debugcheckstate(ui, repo):
1180 1180 """validate the correctness of the current dirstate"""
1181 1181 parent1, parent2 = repo.dirstate.parents()
1182 1182 repo.dirstate.read()
1183 1183 dc = repo.dirstate.map
1184 1184 keys = dc.keys()
1185 1185 keys.sort()
1186 1186 m1n = repo.changelog.read(parent1)[0]
1187 1187 m2n = repo.changelog.read(parent2)[0]
1188 1188 m1 = repo.manifest.read(m1n)
1189 1189 m2 = repo.manifest.read(m2n)
1190 1190 errors = 0
1191 1191 for f in dc:
1192 1192 state = repo.dirstate.state(f)
1193 1193 if state in "nr" and f not in m1:
1194 1194 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1195 1195 errors += 1
1196 1196 if state in "a" and f in m1:
1197 1197 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1198 1198 errors += 1
1199 1199 if state in "m" and f not in m1 and f not in m2:
1200 1200 ui.warn(_("%s in state %s, but not in either manifest\n") %
1201 1201 (f, state))
1202 1202 errors += 1
1203 1203 for f in m1:
1204 1204 state = repo.dirstate.state(f)
1205 1205 if state not in "nrm":
1206 1206 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1207 1207 errors += 1
1208 1208 if errors:
1209 1209 error = _(".hg/dirstate inconsistent with current parent's manifest")
1210 1210 raise util.Abort(error)
1211 1211
1212 1212 def debugconfig(ui, repo, *values):
1213 1213 """show combined config settings from all hgrc files
1214 1214
1215 1215 With no args, print names and values of all config items.
1216 1216
1217 1217 With one arg of the form section.name, print just the value of
1218 1218 that config item.
1219 1219
1220 1220 With multiple args, print names and values of all config items
1221 1221 with matching section names."""
1222 1222
1223 1223 if values:
1224 1224 if len([v for v in values if '.' in v]) > 1:
1225 1225 raise util.Abort(_('only one config item permitted'))
1226 1226 for section, name, value in ui.walkconfig():
1227 1227 sectname = section + '.' + name
1228 1228 if values:
1229 1229 for v in values:
1230 1230 if v == section:
1231 1231 ui.write('%s=%s\n' % (sectname, value))
1232 1232 elif v == sectname:
1233 1233 ui.write(value, '\n')
1234 1234 else:
1235 1235 ui.write('%s=%s\n' % (sectname, value))
1236 1236
1237 1237 def debugsetparents(ui, repo, rev1, rev2=None):
1238 1238 """manually set the parents of the current working directory
1239 1239
1240 1240 This is useful for writing repository conversion tools, but should
1241 1241 be used with care.
1242 1242 """
1243 1243
1244 1244 if not rev2:
1245 1245 rev2 = hex(nullid)
1246 1246
1247 1247 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1248 1248
1249 1249 def debugstate(ui, repo):
1250 1250 """show the contents of the current dirstate"""
1251 1251 repo.dirstate.read()
1252 1252 dc = repo.dirstate.map
1253 1253 keys = dc.keys()
1254 1254 keys.sort()
1255 1255 for file_ in keys:
1256 1256 ui.write("%c %3o %10d %s %s\n"
1257 1257 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1258 1258 time.strftime("%x %X",
1259 1259 time.localtime(dc[file_][3])), file_))
1260 1260 for f in repo.dirstate.copies:
1261 1261 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1262 1262
1263 1263 def debugdata(ui, file_, rev):
1264 1264 """dump the contents of an data file revision"""
1265 1265 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1266 1266 file_[:-2] + ".i", file_, 0)
1267 1267 try:
1268 1268 ui.write(r.revision(r.lookup(rev)))
1269 1269 except KeyError:
1270 1270 raise util.Abort(_('invalid revision identifier %s'), rev)
1271 1271
1272 1272 def debugindex(ui, file_):
1273 1273 """dump the contents of an index file"""
1274 1274 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1275 1275 ui.write(" rev offset length base linkrev" +
1276 1276 " nodeid p1 p2\n")
1277 1277 for i in range(r.count()):
1278 1278 node = r.node(i)
1279 1279 pp = r.parents(node)
1280 1280 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1281 1281 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1282 1282 short(node), short(pp[0]), short(pp[1])))
1283 1283
1284 1284 def debugindexdot(ui, file_):
1285 1285 """dump an index DAG as a .dot file"""
1286 1286 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1287 1287 ui.write("digraph G {\n")
1288 1288 for i in range(r.count()):
1289 1289 node = r.node(i)
1290 1290 pp = r.parents(node)
1291 1291 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1292 1292 if pp[1] != nullid:
1293 1293 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1294 1294 ui.write("}\n")
1295 1295
1296 1296 def debugrename(ui, repo, file, rev=None):
1297 1297 """dump rename information"""
1298 1298 r = repo.file(relpath(repo, [file])[0])
1299 1299 if rev:
1300 1300 try:
1301 1301 # assume all revision numbers are for changesets
1302 1302 n = repo.lookup(rev)
1303 1303 change = repo.changelog.read(n)
1304 1304 m = repo.manifest.read(change[0])
1305 1305 n = m[relpath(repo, [file])[0]]
1306 1306 except (hg.RepoError, KeyError):
1307 1307 n = r.lookup(rev)
1308 1308 else:
1309 1309 n = r.tip()
1310 1310 m = r.renamed(n)
1311 1311 if m:
1312 1312 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1313 1313 else:
1314 1314 ui.write(_("not renamed\n"))
1315 1315
1316 1316 def debugwalk(ui, repo, *pats, **opts):
1317 1317 """show how files match on given patterns"""
1318 1318 items = list(cmdutil.walk(repo, pats, opts))
1319 1319 if not items:
1320 1320 return
1321 1321 fmt = '%%s %%-%ds %%-%ds %%s' % (
1322 1322 max([len(abs) for (src, abs, rel, exact) in items]),
1323 1323 max([len(rel) for (src, abs, rel, exact) in items]))
1324 1324 for src, abs, rel, exact in items:
1325 1325 line = fmt % (src, abs, rel, exact and 'exact' or '')
1326 1326 ui.write("%s\n" % line.rstrip())
1327 1327
1328 1328 def diff(ui, repo, *pats, **opts):
1329 1329 """diff repository (or selected files)
1330 1330
1331 1331 Show differences between revisions for the specified files.
1332 1332
1333 1333 Differences between files are shown using the unified diff format.
1334 1334
1335 1335 When two revision arguments are given, then changes are shown
1336 1336 between those revisions. If only one revision is specified then
1337 1337 that revision is compared to the working directory, and, when no
1338 1338 revisions are specified, the working directory files are compared
1339 1339 to its parent.
1340 1340
1341 1341 Without the -a option, diff will avoid generating diffs of files
1342 1342 it detects as binary. With -a, diff will generate a diff anyway,
1343 1343 probably with undesirable results.
1344 1344 """
1345 1345 node1, node2 = revpair(ui, repo, opts['rev'])
1346 1346
1347 1347 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1348 1348
1349 1349 patch.diff(repo, node1, node2, fns, match=matchfn,
1350 opts=ui.diffopts(opts))
1350 opts=patch.diffopts(ui, opts))
1351 1351
1352 1352 def export(ui, repo, *changesets, **opts):
1353 1353 """dump the header and diffs for one or more changesets
1354 1354
1355 1355 Print the changeset header and diffs for one or more revisions.
1356 1356
1357 1357 The information shown in the changeset header is: author,
1358 1358 changeset hash, parent and commit comment.
1359 1359
1360 1360 Output may be to a file, in which case the name of the file is
1361 1361 given using a format string. The formatting rules are as follows:
1362 1362
1363 1363 %% literal "%" character
1364 1364 %H changeset hash (40 bytes of hexadecimal)
1365 1365 %N number of patches being generated
1366 1366 %R changeset revision number
1367 1367 %b basename of the exporting repository
1368 1368 %h short-form changeset hash (12 bytes of hexadecimal)
1369 1369 %n zero-padded sequence number, starting at 1
1370 1370 %r zero-padded changeset revision number
1371 1371
1372 1372 Without the -a option, export will avoid generating diffs of files
1373 1373 it detects as binary. With -a, export will generate a diff anyway,
1374 1374 probably with undesirable results.
1375 1375
1376 1376 With the --switch-parent option, the diff will be against the second
1377 1377 parent. It can be useful to review a merge.
1378 1378 """
1379 1379 if not changesets:
1380 1380 raise util.Abort(_("export requires at least one changeset"))
1381 1381 revs = list(revrange(ui, repo, changesets))
1382 1382 if len(revs) > 1:
1383 1383 ui.note(_('exporting patches:\n'))
1384 1384 else:
1385 1385 ui.note(_('exporting patch:\n'))
1386 1386 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1387 switch_parent=opts['switch_parent'], opts=ui.diffopts(opts))
1387 switch_parent=opts['switch_parent'],
1388 opts=patch.diffopts(ui, opts))
1388 1389
1389 1390 def forget(ui, repo, *pats, **opts):
1390 1391 """don't add the specified files on the next commit (DEPRECATED)
1391 1392
1392 1393 (DEPRECATED)
1393 1394 Undo an 'hg add' scheduled for the next commit.
1394 1395
1395 1396 This command is now deprecated and will be removed in a future
1396 1397 release. Please use revert instead.
1397 1398 """
1398 1399 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1399 1400 forget = []
1400 1401 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1401 1402 if repo.dirstate.state(abs) == 'a':
1402 1403 forget.append(abs)
1403 1404 if ui.verbose or not exact:
1404 1405 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1405 1406 repo.forget(forget)
1406 1407
1407 1408 def grep(ui, repo, pattern, *pats, **opts):
1408 1409 """search for a pattern in specified files and revisions
1409 1410
1410 1411 Search revisions of files for a regular expression.
1411 1412
1412 1413 This command behaves differently than Unix grep. It only accepts
1413 1414 Python/Perl regexps. It searches repository history, not the
1414 1415 working directory. It always prints the revision number in which
1415 1416 a match appears.
1416 1417
1417 1418 By default, grep only prints output for the first revision of a
1418 1419 file in which it finds a match. To get it to print every revision
1419 1420 that contains a change in match status ("-" for a match that
1420 1421 becomes a non-match, or "+" for a non-match that becomes a match),
1421 1422 use the --all flag.
1422 1423 """
1423 1424 reflags = 0
1424 1425 if opts['ignore_case']:
1425 1426 reflags |= re.I
1426 1427 regexp = re.compile(pattern, reflags)
1427 1428 sep, eol = ':', '\n'
1428 1429 if opts['print0']:
1429 1430 sep = eol = '\0'
1430 1431
1431 1432 fcache = {}
1432 1433 def getfile(fn):
1433 1434 if fn not in fcache:
1434 1435 fcache[fn] = repo.file(fn)
1435 1436 return fcache[fn]
1436 1437
1437 1438 def matchlines(body):
1438 1439 begin = 0
1439 1440 linenum = 0
1440 1441 while True:
1441 1442 match = regexp.search(body, begin)
1442 1443 if not match:
1443 1444 break
1444 1445 mstart, mend = match.span()
1445 1446 linenum += body.count('\n', begin, mstart) + 1
1446 1447 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1447 1448 lend = body.find('\n', mend)
1448 1449 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1449 1450 begin = lend + 1
1450 1451
1451 1452 class linestate(object):
1452 1453 def __init__(self, line, linenum, colstart, colend):
1453 1454 self.line = line
1454 1455 self.linenum = linenum
1455 1456 self.colstart = colstart
1456 1457 self.colend = colend
1457 1458
1458 1459 def __eq__(self, other):
1459 1460 return self.line == other.line
1460 1461
1461 1462 matches = {}
1462 1463 copies = {}
1463 1464 def grepbody(fn, rev, body):
1464 1465 matches[rev].setdefault(fn, [])
1465 1466 m = matches[rev][fn]
1466 1467 for lnum, cstart, cend, line in matchlines(body):
1467 1468 s = linestate(line, lnum, cstart, cend)
1468 1469 m.append(s)
1469 1470
1470 1471 def difflinestates(a, b):
1471 1472 sm = difflib.SequenceMatcher(None, a, b)
1472 1473 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1473 1474 if tag == 'insert':
1474 1475 for i in range(blo, bhi):
1475 1476 yield ('+', b[i])
1476 1477 elif tag == 'delete':
1477 1478 for i in range(alo, ahi):
1478 1479 yield ('-', a[i])
1479 1480 elif tag == 'replace':
1480 1481 for i in range(alo, ahi):
1481 1482 yield ('-', a[i])
1482 1483 for i in range(blo, bhi):
1483 1484 yield ('+', b[i])
1484 1485
1485 1486 prev = {}
1486 1487 ucache = {}
1487 1488 def display(fn, rev, states, prevstates):
1488 1489 counts = {'-': 0, '+': 0}
1489 1490 filerevmatches = {}
1490 1491 if incrementing or not opts['all']:
1491 1492 a, b = prevstates, states
1492 1493 else:
1493 1494 a, b = states, prevstates
1494 1495 for change, l in difflinestates(a, b):
1495 1496 if incrementing or not opts['all']:
1496 1497 r = rev
1497 1498 else:
1498 1499 r = prev[fn]
1499 1500 cols = [fn, str(r)]
1500 1501 if opts['line_number']:
1501 1502 cols.append(str(l.linenum))
1502 1503 if opts['all']:
1503 1504 cols.append(change)
1504 1505 if opts['user']:
1505 1506 cols.append(trimuser(ui, getchange(r)[1], rev,
1506 1507 ucache))
1507 1508 if opts['files_with_matches']:
1508 1509 c = (fn, rev)
1509 1510 if c in filerevmatches:
1510 1511 continue
1511 1512 filerevmatches[c] = 1
1512 1513 else:
1513 1514 cols.append(l.line)
1514 1515 ui.write(sep.join(cols), eol)
1515 1516 counts[change] += 1
1516 1517 return counts['+'], counts['-']
1517 1518
1518 1519 fstate = {}
1519 1520 skip = {}
1520 1521 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1521 1522 count = 0
1522 1523 incrementing = False
1523 1524 follow = opts.get('follow')
1524 1525 for st, rev, fns in changeiter:
1525 1526 if st == 'window':
1526 1527 incrementing = rev
1527 1528 matches.clear()
1528 1529 copies.clear()
1529 1530 elif st == 'add':
1530 1531 change = repo.changelog.read(repo.lookup(str(rev)))
1531 1532 mf = repo.manifest.read(change[0])
1532 1533 matches[rev] = {}
1533 1534 for fn in fns:
1534 1535 if fn in skip:
1535 1536 continue
1536 1537 fstate.setdefault(fn, {})
1537 1538 copies.setdefault(rev, {})
1538 1539 try:
1539 1540 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1540 1541 if follow:
1541 1542 copied = getfile(fn).renamed(mf[fn])
1542 1543 if copied:
1543 1544 copies[rev][fn] = copied[0]
1544 1545 except KeyError:
1545 1546 pass
1546 1547 elif st == 'iter':
1547 1548 states = matches[rev].items()
1548 1549 states.sort()
1549 1550 for fn, m in states:
1550 1551 copy = copies[rev].get(fn)
1551 1552 if fn in skip:
1552 1553 if copy:
1553 1554 skip[copy] = True
1554 1555 continue
1555 1556 if incrementing or not opts['all'] or fstate[fn]:
1556 1557 pos, neg = display(fn, rev, m, fstate[fn])
1557 1558 count += pos + neg
1558 1559 if pos and not opts['all']:
1559 1560 skip[fn] = True
1560 1561 if copy:
1561 1562 skip[copy] = True
1562 1563 fstate[fn] = m
1563 1564 if copy:
1564 1565 fstate[copy] = m
1565 1566 prev[fn] = rev
1566 1567
1567 1568 if not incrementing:
1568 1569 fstate = fstate.items()
1569 1570 fstate.sort()
1570 1571 for fn, state in fstate:
1571 1572 if fn in skip:
1572 1573 continue
1573 1574 if fn not in copies[prev[fn]]:
1574 1575 display(fn, rev, {}, state)
1575 1576 return (count == 0 and 1) or 0
1576 1577
1577 1578 def heads(ui, repo, **opts):
1578 1579 """show current repository heads
1579 1580
1580 1581 Show all repository head changesets.
1581 1582
1582 1583 Repository "heads" are changesets that don't have children
1583 1584 changesets. They are where development generally takes place and
1584 1585 are the usual targets for update and merge operations.
1585 1586 """
1586 1587 if opts['rev']:
1587 1588 heads = repo.heads(repo.lookup(opts['rev']))
1588 1589 else:
1589 1590 heads = repo.heads()
1590 1591 br = None
1591 1592 if opts['branches']:
1592 1593 br = repo.branchlookup(heads)
1593 1594 displayer = show_changeset(ui, repo, opts)
1594 1595 for n in heads:
1595 1596 displayer.show(changenode=n, brinfo=br)
1596 1597
1597 1598 def identify(ui, repo):
1598 1599 """print information about the working copy
1599 1600
1600 1601 Print a short summary of the current state of the repo.
1601 1602
1602 1603 This summary identifies the repository state using one or two parent
1603 1604 hash identifiers, followed by a "+" if there are uncommitted changes
1604 1605 in the working directory, followed by a list of tags for this revision.
1605 1606 """
1606 1607 parents = [p for p in repo.dirstate.parents() if p != nullid]
1607 1608 if not parents:
1608 1609 ui.write(_("unknown\n"))
1609 1610 return
1610 1611
1611 1612 hexfunc = ui.verbose and hex or short
1612 1613 modified, added, removed, deleted = repo.status()[:4]
1613 1614 output = ["%s%s" %
1614 1615 ('+'.join([hexfunc(parent) for parent in parents]),
1615 1616 (modified or added or removed or deleted) and "+" or "")]
1616 1617
1617 1618 if not ui.quiet:
1618 1619 # multiple tags for a single parent separated by '/'
1619 1620 parenttags = ['/'.join(tags)
1620 1621 for tags in map(repo.nodetags, parents) if tags]
1621 1622 # tags for multiple parents separated by ' + '
1622 1623 if parenttags:
1623 1624 output.append(' + '.join(parenttags))
1624 1625
1625 1626 ui.write("%s\n" % ' '.join(output))
1626 1627
1627 1628 def import_(ui, repo, patch1, *patches, **opts):
1628 1629 """import an ordered set of patches
1629 1630
1630 1631 Import a list of patches and commit them individually.
1631 1632
1632 1633 If there are outstanding changes in the working directory, import
1633 1634 will abort unless given the -f flag.
1634 1635
1635 1636 You can import a patch straight from a mail message. Even patches
1636 1637 as attachments work (body part must be type text/plain or
1637 1638 text/x-patch to be used). From and Subject headers of email
1638 1639 message are used as default committer and commit message. All
1639 1640 text/plain body parts before first diff are added to commit
1640 1641 message.
1641 1642
1642 1643 If imported patch was generated by hg export, user and description
1643 1644 from patch override values from message headers and body. Values
1644 1645 given on command line with -m and -u override these.
1645 1646
1646 1647 To read a patch from standard input, use patch name "-".
1647 1648 """
1648 1649 patches = (patch1,) + patches
1649 1650
1650 1651 if not opts['force']:
1651 1652 bail_if_changed(repo)
1652 1653
1653 1654 d = opts["base"]
1654 1655 strip = opts["strip"]
1655 1656
1656 1657 wlock = repo.wlock()
1657 1658 lock = repo.lock()
1658 1659
1659 1660 for p in patches:
1660 1661 pf = os.path.join(d, p)
1661 1662
1662 1663 if pf == '-':
1663 1664 ui.status(_("applying patch from stdin\n"))
1664 1665 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1665 1666 else:
1666 1667 ui.status(_("applying %s\n") % p)
1667 1668 tmpname, message, user, date = patch.extract(ui, file(pf))
1668 1669
1669 1670 if tmpname is None:
1670 1671 raise util.Abort(_('no diffs found'))
1671 1672
1672 1673 try:
1673 1674 if opts['message']:
1674 1675 # pickup the cmdline msg
1675 1676 message = opts['message']
1676 1677 elif message:
1677 1678 # pickup the patch msg
1678 1679 message = message.strip()
1679 1680 else:
1680 1681 # launch the editor
1681 1682 message = None
1682 1683 ui.debug(_('message:\n%s\n') % message)
1683 1684
1684 1685 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1685 1686 removes = []
1686 1687 if len(files) > 0:
1687 1688 cfiles = files.keys()
1688 1689 copies = []
1689 1690 copts = {'after': False, 'force': False}
1690 1691 cwd = repo.getcwd()
1691 1692 if cwd:
1692 1693 cfiles = [util.pathto(cwd, f) for f in files.keys()]
1693 1694 for f in files:
1694 1695 ctype, gp = files[f]
1695 1696 if ctype == 'RENAME':
1696 1697 copies.append((gp.oldpath, gp.path, gp.copymod))
1697 1698 removes.append(gp.oldpath)
1698 1699 elif ctype == 'COPY':
1699 1700 copies.append((gp.oldpath, gp.path, gp.copymod))
1700 1701 elif ctype == 'DELETE':
1701 1702 removes.append(gp.path)
1702 1703 for src, dst, after in copies:
1703 1704 absdst = os.path.join(repo.root, dst)
1704 1705 if not after and os.path.exists(absdst):
1705 1706 raise util.Abort(_('patch creates existing file %s') % dst)
1706 1707 if cwd:
1707 1708 src, dst = [util.pathto(cwd, f) for f in (src, dst)]
1708 1709 copts['after'] = after
1709 1710 errs, copied = docopy(ui, repo, (src, dst), copts, wlock=wlock)
1710 1711 if errs:
1711 1712 raise util.Abort(errs)
1712 1713 if removes:
1713 1714 repo.remove(removes, True, wlock=wlock)
1714 1715 for f in files:
1715 1716 ctype, gp = files[f]
1716 1717 if gp and gp.mode:
1717 1718 x = gp.mode & 0100 != 0
1718 1719 dst = os.path.join(repo.root, gp.path)
1719 1720 util.set_exec(dst, x)
1720 1721 cmdutil.addremove(repo, cfiles, wlock=wlock)
1721 1722 files = files.keys()
1722 1723 files.extend([r for r in removes if r not in files])
1723 1724 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1724 1725 finally:
1725 1726 os.unlink(tmpname)
1726 1727
1727 1728 def incoming(ui, repo, source="default", **opts):
1728 1729 """show new changesets found in source
1729 1730
1730 1731 Show new changesets found in the specified path/URL or the default
1731 1732 pull location. These are the changesets that would be pulled if a pull
1732 1733 was requested.
1733 1734
1734 1735 For remote repository, using --bundle avoids downloading the changesets
1735 1736 twice if the incoming is followed by a pull.
1736 1737
1737 1738 See pull for valid source format details.
1738 1739 """
1739 1740 source = ui.expandpath(source)
1740 1741 setremoteconfig(ui, opts)
1741 1742
1742 1743 other = hg.repository(ui, source)
1743 1744 incoming = repo.findincoming(other, force=opts["force"])
1744 1745 if not incoming:
1745 1746 ui.status(_("no changes found\n"))
1746 1747 return
1747 1748
1748 1749 cleanup = None
1749 1750 try:
1750 1751 fname = opts["bundle"]
1751 1752 if fname or not other.local():
1752 1753 # create a bundle (uncompressed if other repo is not local)
1753 1754 cg = other.changegroup(incoming, "incoming")
1754 1755 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1755 1756 # keep written bundle?
1756 1757 if opts["bundle"]:
1757 1758 cleanup = None
1758 1759 if not other.local():
1759 1760 # use the created uncompressed bundlerepo
1760 1761 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1761 1762
1762 1763 revs = None
1763 1764 if opts['rev']:
1764 1765 revs = [other.lookup(rev) for rev in opts['rev']]
1765 1766 o = other.changelog.nodesbetween(incoming, revs)[0]
1766 1767 if opts['newest_first']:
1767 1768 o.reverse()
1768 1769 displayer = show_changeset(ui, other, opts)
1769 1770 for n in o:
1770 1771 parents = [p for p in other.changelog.parents(n) if p != nullid]
1771 1772 if opts['no_merges'] and len(parents) == 2:
1772 1773 continue
1773 1774 displayer.show(changenode=n)
1774 1775 if opts['patch']:
1775 1776 prev = (parents and parents[0]) or nullid
1776 1777 patch.diff(repo, other, prev, n)
1777 1778 ui.write("\n")
1778 1779 finally:
1779 1780 if hasattr(other, 'close'):
1780 1781 other.close()
1781 1782 if cleanup:
1782 1783 os.unlink(cleanup)
1783 1784
1784 1785 def init(ui, dest=".", **opts):
1785 1786 """create a new repository in the given directory
1786 1787
1787 1788 Initialize a new repository in the given directory. If the given
1788 1789 directory does not exist, it is created.
1789 1790
1790 1791 If no directory is given, the current directory is used.
1791 1792
1792 1793 It is possible to specify an ssh:// URL as the destination.
1793 1794 Look at the help text for the pull command for important details
1794 1795 about ssh:// URLs.
1795 1796 """
1796 1797 setremoteconfig(ui, opts)
1797 1798 hg.repository(ui, dest, create=1)
1798 1799
1799 1800 def locate(ui, repo, *pats, **opts):
1800 1801 """locate files matching specific patterns
1801 1802
1802 1803 Print all files under Mercurial control whose names match the
1803 1804 given patterns.
1804 1805
1805 1806 This command searches the current directory and its
1806 1807 subdirectories. To search an entire repository, move to the root
1807 1808 of the repository.
1808 1809
1809 1810 If no patterns are given to match, this command prints all file
1810 1811 names.
1811 1812
1812 1813 If you want to feed the output of this command into the "xargs"
1813 1814 command, use the "-0" option to both this command and "xargs".
1814 1815 This will avoid the problem of "xargs" treating single filenames
1815 1816 that contain white space as multiple filenames.
1816 1817 """
1817 1818 end = opts['print0'] and '\0' or '\n'
1818 1819 rev = opts['rev']
1819 1820 if rev:
1820 1821 node = repo.lookup(rev)
1821 1822 else:
1822 1823 node = None
1823 1824
1824 1825 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1825 1826 head='(?:.*/|)'):
1826 1827 if not node and repo.dirstate.state(abs) == '?':
1827 1828 continue
1828 1829 if opts['fullpath']:
1829 1830 ui.write(os.path.join(repo.root, abs), end)
1830 1831 else:
1831 1832 ui.write(((pats and rel) or abs), end)
1832 1833
1833 1834 def log(ui, repo, *pats, **opts):
1834 1835 """show revision history of entire repository or files
1835 1836
1836 1837 Print the revision history of the specified files or the entire
1837 1838 project.
1838 1839
1839 1840 File history is shown without following rename or copy history of
1840 1841 files. Use -f/--follow with a file name to follow history across
1841 1842 renames and copies. --follow without a file name will only show
1842 1843 ancestors or descendants of the starting revision. --follow-first
1843 1844 only follows the first parent of merge revisions.
1844 1845
1845 1846 If no revision range is specified, the default is tip:0 unless
1846 1847 --follow is set, in which case the working directory parent is
1847 1848 used as the starting revision.
1848 1849
1849 1850 By default this command outputs: changeset id and hash, tags,
1850 1851 non-trivial parents, user, date and time, and a summary for each
1851 1852 commit. When the -v/--verbose switch is used, the list of changed
1852 1853 files and full commit message is shown.
1853 1854 """
1854 1855 class dui(object):
1855 1856 # Implement and delegate some ui protocol. Save hunks of
1856 1857 # output for later display in the desired order.
1857 1858 def __init__(self, ui):
1858 1859 self.ui = ui
1859 1860 self.hunk = {}
1860 1861 self.header = {}
1861 1862 def bump(self, rev):
1862 1863 self.rev = rev
1863 1864 self.hunk[rev] = []
1864 1865 self.header[rev] = []
1865 1866 def note(self, *args):
1866 1867 if self.verbose:
1867 1868 self.write(*args)
1868 1869 def status(self, *args):
1869 1870 if not self.quiet:
1870 1871 self.write(*args)
1871 1872 def write(self, *args):
1872 1873 self.hunk[self.rev].append(args)
1873 1874 def write_header(self, *args):
1874 1875 self.header[self.rev].append(args)
1875 1876 def debug(self, *args):
1876 1877 if self.debugflag:
1877 1878 self.write(*args)
1878 1879 def __getattr__(self, key):
1879 1880 return getattr(self.ui, key)
1880 1881
1881 1882 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1882 1883
1883 1884 if opts['limit']:
1884 1885 try:
1885 1886 limit = int(opts['limit'])
1886 1887 except ValueError:
1887 1888 raise util.Abort(_('limit must be a positive integer'))
1888 1889 if limit <= 0: raise util.Abort(_('limit must be positive'))
1889 1890 else:
1890 1891 limit = sys.maxint
1891 1892 count = 0
1892 1893
1893 1894 displayer = show_changeset(ui, repo, opts)
1894 1895 for st, rev, fns in changeiter:
1895 1896 if st == 'window':
1896 1897 du = dui(ui)
1897 1898 displayer.ui = du
1898 1899 elif st == 'add':
1899 1900 du.bump(rev)
1900 1901 changenode = repo.changelog.node(rev)
1901 1902 parents = [p for p in repo.changelog.parents(changenode)
1902 1903 if p != nullid]
1903 1904 if opts['no_merges'] and len(parents) == 2:
1904 1905 continue
1905 1906 if opts['only_merges'] and len(parents) != 2:
1906 1907 continue
1907 1908
1908 1909 if opts['keyword']:
1909 1910 changes = getchange(rev)
1910 1911 miss = 0
1911 1912 for k in [kw.lower() for kw in opts['keyword']]:
1912 1913 if not (k in changes[1].lower() or
1913 1914 k in changes[4].lower() or
1914 1915 k in " ".join(changes[3][:20]).lower()):
1915 1916 miss = 1
1916 1917 break
1917 1918 if miss:
1918 1919 continue
1919 1920
1920 1921 br = None
1921 1922 if opts['branches']:
1922 1923 br = repo.branchlookup([repo.changelog.node(rev)])
1923 1924
1924 1925 displayer.show(rev, brinfo=br)
1925 1926 if opts['patch']:
1926 1927 prev = (parents and parents[0]) or nullid
1927 1928 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1928 1929 du.write("\n\n")
1929 1930 elif st == 'iter':
1930 1931 if count == limit: break
1931 1932 if du.header[rev]:
1932 1933 for args in du.header[rev]:
1933 1934 ui.write_header(*args)
1934 1935 if du.hunk[rev]:
1935 1936 count += 1
1936 1937 for args in du.hunk[rev]:
1937 1938 ui.write(*args)
1938 1939
1939 1940 def manifest(ui, repo, rev=None):
1940 1941 """output the latest or given revision of the project manifest
1941 1942
1942 1943 Print a list of version controlled files for the given revision.
1943 1944
1944 1945 The manifest is the list of files being version controlled. If no revision
1945 1946 is given then the tip is used.
1946 1947 """
1947 1948 if rev:
1948 1949 try:
1949 1950 # assume all revision numbers are for changesets
1950 1951 n = repo.lookup(rev)
1951 1952 change = repo.changelog.read(n)
1952 1953 n = change[0]
1953 1954 except hg.RepoError:
1954 1955 n = repo.manifest.lookup(rev)
1955 1956 else:
1956 1957 n = repo.manifest.tip()
1957 1958 m = repo.manifest.read(n)
1958 1959 files = m.keys()
1959 1960 files.sort()
1960 1961
1961 1962 for f in files:
1962 1963 ui.write("%40s %3s %s\n" % (hex(m[f]),
1963 1964 m.execf(f) and "755" or "644", f))
1964 1965
1965 1966 def merge(ui, repo, node=None, force=None, branch=None):
1966 1967 """Merge working directory with another revision
1967 1968
1968 1969 Merge the contents of the current working directory and the
1969 1970 requested revision. Files that changed between either parent are
1970 1971 marked as changed for the next commit and a commit must be
1971 1972 performed before any further updates are allowed.
1972 1973
1973 1974 If no revision is specified, the working directory's parent is a
1974 1975 head revision, and the repository contains exactly one other head,
1975 1976 the other head is merged with by default. Otherwise, an explicit
1976 1977 revision to merge with must be provided.
1977 1978 """
1978 1979
1979 1980 if node:
1980 1981 node = _lookup(repo, node, branch)
1981 1982 else:
1982 1983 heads = repo.heads()
1983 1984 if len(heads) > 2:
1984 1985 raise util.Abort(_('repo has %d heads - '
1985 1986 'please merge with an explicit rev') %
1986 1987 len(heads))
1987 1988 if len(heads) == 1:
1988 1989 raise util.Abort(_('there is nothing to merge - '
1989 1990 'use "hg update" instead'))
1990 1991 parent = repo.dirstate.parents()[0]
1991 1992 if parent not in heads:
1992 1993 raise util.Abort(_('working dir not at a head rev - '
1993 1994 'use "hg update" or merge with an explicit rev'))
1994 1995 node = parent == heads[0] and heads[-1] or heads[0]
1995 1996 return hg.merge(repo, node, force=force)
1996 1997
1997 1998 def outgoing(ui, repo, dest=None, **opts):
1998 1999 """show changesets not found in destination
1999 2000
2000 2001 Show changesets not found in the specified destination repository or
2001 2002 the default push location. These are the changesets that would be pushed
2002 2003 if a push was requested.
2003 2004
2004 2005 See pull for valid destination format details.
2005 2006 """
2006 2007 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2007 2008 setremoteconfig(ui, opts)
2008 2009 revs = None
2009 2010 if opts['rev']:
2010 2011 revs = [repo.lookup(rev) for rev in opts['rev']]
2011 2012
2012 2013 other = hg.repository(ui, dest)
2013 2014 o = repo.findoutgoing(other, force=opts['force'])
2014 2015 if not o:
2015 2016 ui.status(_("no changes found\n"))
2016 2017 return
2017 2018 o = repo.changelog.nodesbetween(o, revs)[0]
2018 2019 if opts['newest_first']:
2019 2020 o.reverse()
2020 2021 displayer = show_changeset(ui, repo, opts)
2021 2022 for n in o:
2022 2023 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2023 2024 if opts['no_merges'] and len(parents) == 2:
2024 2025 continue
2025 2026 displayer.show(changenode=n)
2026 2027 if opts['patch']:
2027 2028 prev = (parents and parents[0]) or nullid
2028 2029 patch.diff(repo, prev, n)
2029 2030 ui.write("\n")
2030 2031
2031 2032 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2032 2033 """show the parents of the working dir or revision
2033 2034
2034 2035 Print the working directory's parent revisions.
2035 2036 """
2036 2037 # legacy
2037 2038 if file_ and not rev:
2038 2039 try:
2039 2040 rev = repo.lookup(file_)
2040 2041 file_ = None
2041 2042 except hg.RepoError:
2042 2043 pass
2043 2044 else:
2044 2045 ui.warn(_("'hg parent REV' is deprecated, "
2045 2046 "please use 'hg parents -r REV instead\n"))
2046 2047
2047 2048 if rev:
2048 2049 if file_:
2049 2050 ctx = repo.filectx(file_, changeid=rev)
2050 2051 else:
2051 2052 ctx = repo.changectx(rev)
2052 2053 p = [cp.node() for cp in ctx.parents()]
2053 2054 else:
2054 2055 p = repo.dirstate.parents()
2055 2056
2056 2057 br = None
2057 2058 if branches is not None:
2058 2059 br = repo.branchlookup(p)
2059 2060 displayer = show_changeset(ui, repo, opts)
2060 2061 for n in p:
2061 2062 if n != nullid:
2062 2063 displayer.show(changenode=n, brinfo=br)
2063 2064
2064 2065 def paths(ui, repo, search=None):
2065 2066 """show definition of symbolic path names
2066 2067
2067 2068 Show definition of symbolic path name NAME. If no name is given, show
2068 2069 definition of available names.
2069 2070
2070 2071 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2071 2072 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2072 2073 """
2073 2074 if search:
2074 2075 for name, path in ui.configitems("paths"):
2075 2076 if name == search:
2076 2077 ui.write("%s\n" % path)
2077 2078 return
2078 2079 ui.warn(_("not found!\n"))
2079 2080 return 1
2080 2081 else:
2081 2082 for name, path in ui.configitems("paths"):
2082 2083 ui.write("%s = %s\n" % (name, path))
2083 2084
2084 2085 def postincoming(ui, repo, modheads, optupdate):
2085 2086 if modheads == 0:
2086 2087 return
2087 2088 if optupdate:
2088 2089 if modheads == 1:
2089 2090 return hg.update(repo, repo.changelog.tip()) # update
2090 2091 else:
2091 2092 ui.status(_("not updating, since new heads added\n"))
2092 2093 if modheads > 1:
2093 2094 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2094 2095 else:
2095 2096 ui.status(_("(run 'hg update' to get a working copy)\n"))
2096 2097
2097 2098 def pull(ui, repo, source="default", **opts):
2098 2099 """pull changes from the specified source
2099 2100
2100 2101 Pull changes from a remote repository to a local one.
2101 2102
2102 2103 This finds all changes from the repository at the specified path
2103 2104 or URL and adds them to the local repository. By default, this
2104 2105 does not update the copy of the project in the working directory.
2105 2106
2106 2107 Valid URLs are of the form:
2107 2108
2108 2109 local/filesystem/path
2109 2110 http://[user@]host[:port]/[path]
2110 2111 https://[user@]host[:port]/[path]
2111 2112 ssh://[user@]host[:port]/[path]
2112 2113
2113 2114 Some notes about using SSH with Mercurial:
2114 2115 - SSH requires an accessible shell account on the destination machine
2115 2116 and a copy of hg in the remote path or specified with as remotecmd.
2116 2117 - path is relative to the remote user's home directory by default.
2117 2118 Use an extra slash at the start of a path to specify an absolute path:
2118 2119 ssh://example.com//tmp/repository
2119 2120 - Mercurial doesn't use its own compression via SSH; the right thing
2120 2121 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2121 2122 Host *.mylocalnetwork.example.com
2122 2123 Compression off
2123 2124 Host *
2124 2125 Compression on
2125 2126 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2126 2127 with the --ssh command line option.
2127 2128 """
2128 2129 source = ui.expandpath(source)
2129 2130 setremoteconfig(ui, opts)
2130 2131
2131 2132 other = hg.repository(ui, source)
2132 2133 ui.status(_('pulling from %s\n') % (source))
2133 2134 revs = None
2134 2135 if opts['rev'] and not other.local():
2135 2136 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2136 2137 elif opts['rev']:
2137 2138 revs = [other.lookup(rev) for rev in opts['rev']]
2138 2139 modheads = repo.pull(other, heads=revs, force=opts['force'])
2139 2140 return postincoming(ui, repo, modheads, opts['update'])
2140 2141
2141 2142 def push(ui, repo, dest=None, **opts):
2142 2143 """push changes to the specified destination
2143 2144
2144 2145 Push changes from the local repository to the given destination.
2145 2146
2146 2147 This is the symmetrical operation for pull. It helps to move
2147 2148 changes from the current repository to a different one. If the
2148 2149 destination is local this is identical to a pull in that directory
2149 2150 from the current one.
2150 2151
2151 2152 By default, push will refuse to run if it detects the result would
2152 2153 increase the number of remote heads. This generally indicates the
2153 2154 the client has forgotten to sync and merge before pushing.
2154 2155
2155 2156 Valid URLs are of the form:
2156 2157
2157 2158 local/filesystem/path
2158 2159 ssh://[user@]host[:port]/[path]
2159 2160
2160 2161 Look at the help text for the pull command for important details
2161 2162 about ssh:// URLs.
2162 2163
2163 2164 Pushing to http:// and https:// URLs is possible, too, if this
2164 2165 feature is enabled on the remote Mercurial server.
2165 2166 """
2166 2167 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2167 2168 setremoteconfig(ui, opts)
2168 2169
2169 2170 other = hg.repository(ui, dest)
2170 2171 ui.status('pushing to %s\n' % (dest))
2171 2172 revs = None
2172 2173 if opts['rev']:
2173 2174 revs = [repo.lookup(rev) for rev in opts['rev']]
2174 2175 r = repo.push(other, opts['force'], revs=revs)
2175 2176 return r == 0
2176 2177
2177 2178 def rawcommit(ui, repo, *flist, **rc):
2178 2179 """raw commit interface (DEPRECATED)
2179 2180
2180 2181 (DEPRECATED)
2181 2182 Lowlevel commit, for use in helper scripts.
2182 2183
2183 2184 This command is not intended to be used by normal users, as it is
2184 2185 primarily useful for importing from other SCMs.
2185 2186
2186 2187 This command is now deprecated and will be removed in a future
2187 2188 release, please use debugsetparents and commit instead.
2188 2189 """
2189 2190
2190 2191 ui.warn(_("(the rawcommit command is deprecated)\n"))
2191 2192
2192 2193 message = rc['message']
2193 2194 if not message and rc['logfile']:
2194 2195 try:
2195 2196 message = open(rc['logfile']).read()
2196 2197 except IOError:
2197 2198 pass
2198 2199 if not message and not rc['logfile']:
2199 2200 raise util.Abort(_("missing commit message"))
2200 2201
2201 2202 files = relpath(repo, list(flist))
2202 2203 if rc['files']:
2203 2204 files += open(rc['files']).read().splitlines()
2204 2205
2205 2206 rc['parent'] = map(repo.lookup, rc['parent'])
2206 2207
2207 2208 try:
2208 2209 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2209 2210 except ValueError, inst:
2210 2211 raise util.Abort(str(inst))
2211 2212
2212 2213 def recover(ui, repo):
2213 2214 """roll back an interrupted transaction
2214 2215
2215 2216 Recover from an interrupted commit or pull.
2216 2217
2217 2218 This command tries to fix the repository status after an interrupted
2218 2219 operation. It should only be necessary when Mercurial suggests it.
2219 2220 """
2220 2221 if repo.recover():
2221 2222 return hg.verify(repo)
2222 2223 return 1
2223 2224
2224 2225 def remove(ui, repo, *pats, **opts):
2225 2226 """remove the specified files on the next commit
2226 2227
2227 2228 Schedule the indicated files for removal from the repository.
2228 2229
2229 2230 This command schedules the files to be removed at the next commit.
2230 2231 This only removes files from the current branch, not from the
2231 2232 entire project history. If the files still exist in the working
2232 2233 directory, they will be deleted from it. If invoked with --after,
2233 2234 files that have been manually deleted are marked as removed.
2234 2235
2235 2236 Modified files and added files are not removed by default. To
2236 2237 remove them, use the -f/--force option.
2237 2238 """
2238 2239 names = []
2239 2240 if not opts['after'] and not pats:
2240 2241 raise util.Abort(_('no files specified'))
2241 2242 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2242 2243 exact = dict.fromkeys(files)
2243 2244 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2244 2245 modified, added, removed, deleted, unknown = mardu
2245 2246 remove, forget = [], []
2246 2247 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2247 2248 reason = None
2248 2249 if abs not in deleted and opts['after']:
2249 2250 reason = _('is still present')
2250 2251 elif abs in modified and not opts['force']:
2251 2252 reason = _('is modified (use -f to force removal)')
2252 2253 elif abs in added:
2253 2254 if opts['force']:
2254 2255 forget.append(abs)
2255 2256 continue
2256 2257 reason = _('has been marked for add (use -f to force removal)')
2257 2258 elif abs in unknown:
2258 2259 reason = _('is not managed')
2259 2260 elif abs in removed:
2260 2261 continue
2261 2262 if reason:
2262 2263 if exact:
2263 2264 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2264 2265 else:
2265 2266 if ui.verbose or not exact:
2266 2267 ui.status(_('removing %s\n') % rel)
2267 2268 remove.append(abs)
2268 2269 repo.forget(forget)
2269 2270 repo.remove(remove, unlink=not opts['after'])
2270 2271
2271 2272 def rename(ui, repo, *pats, **opts):
2272 2273 """rename files; equivalent of copy + remove
2273 2274
2274 2275 Mark dest as copies of sources; mark sources for deletion. If
2275 2276 dest is a directory, copies are put in that directory. If dest is
2276 2277 a file, there can only be one source.
2277 2278
2278 2279 By default, this command copies the contents of files as they
2279 2280 stand in the working directory. If invoked with --after, the
2280 2281 operation is recorded, but no copying is performed.
2281 2282
2282 2283 This command takes effect in the next commit.
2283 2284
2284 2285 NOTE: This command should be treated as experimental. While it
2285 2286 should properly record rename files, this information is not yet
2286 2287 fully used by merge, nor fully reported by log.
2287 2288 """
2288 2289 wlock = repo.wlock(0)
2289 2290 errs, copied = docopy(ui, repo, pats, opts, wlock)
2290 2291 names = []
2291 2292 for abs, rel, exact in copied:
2292 2293 if ui.verbose or not exact:
2293 2294 ui.status(_('removing %s\n') % rel)
2294 2295 names.append(abs)
2295 2296 if not opts.get('dry_run'):
2296 2297 repo.remove(names, True, wlock)
2297 2298 return errs
2298 2299
2299 2300 def revert(ui, repo, *pats, **opts):
2300 2301 """revert files or dirs to their states as of some revision
2301 2302
2302 2303 With no revision specified, revert the named files or directories
2303 2304 to the contents they had in the parent of the working directory.
2304 2305 This restores the contents of the affected files to an unmodified
2305 2306 state. If the working directory has two parents, you must
2306 2307 explicitly specify the revision to revert to.
2307 2308
2308 2309 Modified files are saved with a .orig suffix before reverting.
2309 2310 To disable these backups, use --no-backup.
2310 2311
2311 2312 Using the -r option, revert the given files or directories to
2312 2313 their contents as of a specific revision. This can be helpful to"roll
2313 2314 back" some or all of a change that should not have been committed.
2314 2315
2315 2316 Revert modifies the working directory. It does not commit any
2316 2317 changes, or change the parent of the working directory. If you
2317 2318 revert to a revision other than the parent of the working
2318 2319 directory, the reverted files will thus appear modified
2319 2320 afterwards.
2320 2321
2321 2322 If a file has been deleted, it is recreated. If the executable
2322 2323 mode of a file was changed, it is reset.
2323 2324
2324 2325 If names are given, all files matching the names are reverted.
2325 2326
2326 2327 If no arguments are given, all files in the repository are reverted.
2327 2328 """
2328 2329 parent, p2 = repo.dirstate.parents()
2329 2330 if opts['rev']:
2330 2331 node = repo.lookup(opts['rev'])
2331 2332 elif p2 != nullid:
2332 2333 raise util.Abort(_('working dir has two parents; '
2333 2334 'you must specify the revision to revert to'))
2334 2335 else:
2335 2336 node = parent
2336 2337 mf = repo.manifest.read(repo.changelog.read(node)[0])
2337 2338 if node == parent:
2338 2339 pmf = mf
2339 2340 else:
2340 2341 pmf = None
2341 2342
2342 2343 wlock = repo.wlock()
2343 2344
2344 2345 # need all matching names in dirstate and manifest of target rev,
2345 2346 # so have to walk both. do not print errors if files exist in one
2346 2347 # but not other.
2347 2348
2348 2349 names = {}
2349 2350 target_only = {}
2350 2351
2351 2352 # walk dirstate.
2352 2353
2353 2354 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2354 2355 badmatch=mf.has_key):
2355 2356 names[abs] = (rel, exact)
2356 2357 if src == 'b':
2357 2358 target_only[abs] = True
2358 2359
2359 2360 # walk target manifest.
2360 2361
2361 2362 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2362 2363 badmatch=names.has_key):
2363 2364 if abs in names: continue
2364 2365 names[abs] = (rel, exact)
2365 2366 target_only[abs] = True
2366 2367
2367 2368 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2368 2369 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2369 2370
2370 2371 revert = ([], _('reverting %s\n'))
2371 2372 add = ([], _('adding %s\n'))
2372 2373 remove = ([], _('removing %s\n'))
2373 2374 forget = ([], _('forgetting %s\n'))
2374 2375 undelete = ([], _('undeleting %s\n'))
2375 2376 update = {}
2376 2377
2377 2378 disptable = (
2378 2379 # dispatch table:
2379 2380 # file state
2380 2381 # action if in target manifest
2381 2382 # action if not in target manifest
2382 2383 # make backup if in target manifest
2383 2384 # make backup if not in target manifest
2384 2385 (modified, revert, remove, True, True),
2385 2386 (added, revert, forget, True, False),
2386 2387 (removed, undelete, None, False, False),
2387 2388 (deleted, revert, remove, False, False),
2388 2389 (unknown, add, None, True, False),
2389 2390 (target_only, add, None, False, False),
2390 2391 )
2391 2392
2392 2393 entries = names.items()
2393 2394 entries.sort()
2394 2395
2395 2396 for abs, (rel, exact) in entries:
2396 2397 mfentry = mf.get(abs)
2397 2398 def handle(xlist, dobackup):
2398 2399 xlist[0].append(abs)
2399 2400 update[abs] = 1
2400 2401 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2401 2402 bakname = "%s.orig" % rel
2402 2403 ui.note(_('saving current version of %s as %s\n') %
2403 2404 (rel, bakname))
2404 2405 if not opts.get('dry_run'):
2405 2406 shutil.copyfile(rel, bakname)
2406 2407 shutil.copymode(rel, bakname)
2407 2408 if ui.verbose or not exact:
2408 2409 ui.status(xlist[1] % rel)
2409 2410 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2410 2411 if abs not in table: continue
2411 2412 # file has changed in dirstate
2412 2413 if mfentry:
2413 2414 handle(hitlist, backuphit)
2414 2415 elif misslist is not None:
2415 2416 handle(misslist, backupmiss)
2416 2417 else:
2417 2418 if exact: ui.warn(_('file not managed: %s\n' % rel))
2418 2419 break
2419 2420 else:
2420 2421 # file has not changed in dirstate
2421 2422 if node == parent:
2422 2423 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2423 2424 continue
2424 2425 if pmf is None:
2425 2426 # only need parent manifest in this unlikely case,
2426 2427 # so do not read by default
2427 2428 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2428 2429 if abs in pmf:
2429 2430 if mfentry:
2430 2431 # if version of file is same in parent and target
2431 2432 # manifests, do nothing
2432 2433 if pmf[abs] != mfentry:
2433 2434 handle(revert, False)
2434 2435 else:
2435 2436 handle(remove, False)
2436 2437
2437 2438 if not opts.get('dry_run'):
2438 2439 repo.dirstate.forget(forget[0])
2439 2440 r = hg.revert(repo, node, update.has_key, wlock)
2440 2441 repo.dirstate.update(add[0], 'a')
2441 2442 repo.dirstate.update(undelete[0], 'n')
2442 2443 repo.dirstate.update(remove[0], 'r')
2443 2444 return r
2444 2445
2445 2446 def rollback(ui, repo):
2446 2447 """roll back the last transaction in this repository
2447 2448
2448 2449 Roll back the last transaction in this repository, restoring the
2449 2450 project to its state prior to the transaction.
2450 2451
2451 2452 Transactions are used to encapsulate the effects of all commands
2452 2453 that create new changesets or propagate existing changesets into a
2453 2454 repository. For example, the following commands are transactional,
2454 2455 and their effects can be rolled back:
2455 2456
2456 2457 commit
2457 2458 import
2458 2459 pull
2459 2460 push (with this repository as destination)
2460 2461 unbundle
2461 2462
2462 2463 This command should be used with care. There is only one level of
2463 2464 rollback, and there is no way to undo a rollback.
2464 2465
2465 2466 This command is not intended for use on public repositories. Once
2466 2467 changes are visible for pull by other users, rolling a transaction
2467 2468 back locally is ineffective (someone else may already have pulled
2468 2469 the changes). Furthermore, a race is possible with readers of the
2469 2470 repository; for example an in-progress pull from the repository
2470 2471 may fail if a rollback is performed.
2471 2472 """
2472 2473 repo.rollback()
2473 2474
2474 2475 def root(ui, repo):
2475 2476 """print the root (top) of the current working dir
2476 2477
2477 2478 Print the root directory of the current repository.
2478 2479 """
2479 2480 ui.write(repo.root + "\n")
2480 2481
2481 2482 def serve(ui, repo, **opts):
2482 2483 """export the repository via HTTP
2483 2484
2484 2485 Start a local HTTP repository browser and pull server.
2485 2486
2486 2487 By default, the server logs accesses to stdout and errors to
2487 2488 stderr. Use the "-A" and "-E" options to log to files.
2488 2489 """
2489 2490
2490 2491 if opts["stdio"]:
2491 2492 if repo is None:
2492 2493 raise hg.RepoError(_('no repo found'))
2493 2494 s = sshserver.sshserver(ui, repo)
2494 2495 s.serve_forever()
2495 2496
2496 2497 optlist = ("name templates style address port ipv6"
2497 2498 " accesslog errorlog webdir_conf")
2498 2499 for o in optlist.split():
2499 2500 if opts[o]:
2500 2501 ui.setconfig("web", o, opts[o])
2501 2502
2502 2503 if repo is None and not ui.config("web", "webdir_conf"):
2503 2504 raise hg.RepoError(_('no repo found'))
2504 2505
2505 2506 if opts['daemon'] and not opts['daemon_pipefds']:
2506 2507 rfd, wfd = os.pipe()
2507 2508 args = sys.argv[:]
2508 2509 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2509 2510 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2510 2511 args[0], args)
2511 2512 os.close(wfd)
2512 2513 os.read(rfd, 1)
2513 2514 os._exit(0)
2514 2515
2515 2516 try:
2516 2517 httpd = hgweb.server.create_server(ui, repo)
2517 2518 except socket.error, inst:
2518 2519 raise util.Abort(_('cannot start server: ') + inst.args[1])
2519 2520
2520 2521 if ui.verbose:
2521 2522 addr, port = httpd.socket.getsockname()
2522 2523 if addr == '0.0.0.0':
2523 2524 addr = socket.gethostname()
2524 2525 else:
2525 2526 try:
2526 2527 addr = socket.gethostbyaddr(addr)[0]
2527 2528 except socket.error:
2528 2529 pass
2529 2530 if port != 80:
2530 2531 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2531 2532 else:
2532 2533 ui.status(_('listening at http://%s/\n') % addr)
2533 2534
2534 2535 if opts['pid_file']:
2535 2536 fp = open(opts['pid_file'], 'w')
2536 2537 fp.write(str(os.getpid()) + '\n')
2537 2538 fp.close()
2538 2539
2539 2540 if opts['daemon_pipefds']:
2540 2541 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2541 2542 os.close(rfd)
2542 2543 os.write(wfd, 'y')
2543 2544 os.close(wfd)
2544 2545 sys.stdout.flush()
2545 2546 sys.stderr.flush()
2546 2547 fd = os.open(util.nulldev, os.O_RDWR)
2547 2548 if fd != 0: os.dup2(fd, 0)
2548 2549 if fd != 1: os.dup2(fd, 1)
2549 2550 if fd != 2: os.dup2(fd, 2)
2550 2551 if fd not in (0, 1, 2): os.close(fd)
2551 2552
2552 2553 httpd.serve_forever()
2553 2554
2554 2555 def status(ui, repo, *pats, **opts):
2555 2556 """show changed files in the working directory
2556 2557
2557 2558 Show status of files in the repository. If names are given, only
2558 2559 files that match are shown. Files that are clean or ignored, are
2559 2560 not listed unless -c (clean), -i (ignored) or -A is given.
2560 2561
2561 2562 The codes used to show the status of files are:
2562 2563 M = modified
2563 2564 A = added
2564 2565 R = removed
2565 2566 C = clean
2566 2567 ! = deleted, but still tracked
2567 2568 ? = not tracked
2568 2569 I = ignored (not shown by default)
2569 2570 = the previous added file was copied from here
2570 2571 """
2571 2572
2572 2573 all = opts['all']
2573 2574
2574 2575 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2575 2576 cwd = (pats and repo.getcwd()) or ''
2576 2577 modified, added, removed, deleted, unknown, ignored, clean = [
2577 2578 [util.pathto(cwd, x) for x in n]
2578 2579 for n in repo.status(files=files, match=matchfn,
2579 2580 list_ignored=all or opts['ignored'],
2580 2581 list_clean=all or opts['clean'])]
2581 2582
2582 2583 changetypes = (('modified', 'M', modified),
2583 2584 ('added', 'A', added),
2584 2585 ('removed', 'R', removed),
2585 2586 ('deleted', '!', deleted),
2586 2587 ('unknown', '?', unknown),
2587 2588 ('ignored', 'I', ignored))
2588 2589
2589 2590 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2590 2591
2591 2592 end = opts['print0'] and '\0' or '\n'
2592 2593
2593 2594 for opt, char, changes in ([ct for ct in explicit_changetypes
2594 2595 if all or opts[ct[0]]]
2595 2596 or changetypes):
2596 2597 if opts['no_status']:
2597 2598 format = "%%s%s" % end
2598 2599 else:
2599 2600 format = "%s %%s%s" % (char, end)
2600 2601
2601 2602 for f in changes:
2602 2603 ui.write(format % f)
2603 2604 if ((all or opts.get('copies')) and not opts.get('no_status')
2604 2605 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2605 2606 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2606 2607
2607 2608 def tag(ui, repo, name, rev_=None, **opts):
2608 2609 """add a tag for the current tip or a given revision
2609 2610
2610 2611 Name a particular revision using <name>.
2611 2612
2612 2613 Tags are used to name particular revisions of the repository and are
2613 2614 very useful to compare different revision, to go back to significant
2614 2615 earlier versions or to mark branch points as releases, etc.
2615 2616
2616 2617 If no revision is given, the parent of the working directory is used.
2617 2618
2618 2619 To facilitate version control, distribution, and merging of tags,
2619 2620 they are stored as a file named ".hgtags" which is managed
2620 2621 similarly to other project files and can be hand-edited if
2621 2622 necessary. The file '.hg/localtags' is used for local tags (not
2622 2623 shared among repositories).
2623 2624 """
2624 2625 if name in ['tip', '.']:
2625 2626 raise util.Abort(_("the name '%s' is reserved") % name)
2626 2627 if rev_ is not None:
2627 2628 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2628 2629 "please use 'hg tag [-r REV] NAME' instead\n"))
2629 2630 if opts['rev']:
2630 2631 raise util.Abort(_("use only one form to specify the revision"))
2631 2632 if opts['rev']:
2632 2633 rev_ = opts['rev']
2633 2634 if rev_:
2634 2635 r = hex(repo.lookup(rev_))
2635 2636 else:
2636 2637 p1, p2 = repo.dirstate.parents()
2637 2638 if p1 == nullid:
2638 2639 raise util.Abort(_('no revision to tag'))
2639 2640 if p2 != nullid:
2640 2641 raise util.Abort(_('outstanding uncommitted merges'))
2641 2642 r = hex(p1)
2642 2643
2643 2644 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2644 2645 opts['date'])
2645 2646
2646 2647 def tags(ui, repo):
2647 2648 """list repository tags
2648 2649
2649 2650 List the repository tags.
2650 2651
2651 2652 This lists both regular and local tags.
2652 2653 """
2653 2654
2654 2655 l = repo.tagslist()
2655 2656 l.reverse()
2656 2657 for t, n in l:
2657 2658 try:
2658 2659 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2659 2660 except KeyError:
2660 2661 r = " ?:?"
2661 2662 if ui.quiet:
2662 2663 ui.write("%s\n" % t)
2663 2664 else:
2664 2665 ui.write("%-30s %s\n" % (t, r))
2665 2666
2666 2667 def tip(ui, repo, **opts):
2667 2668 """show the tip revision
2668 2669
2669 2670 Show the tip revision.
2670 2671 """
2671 2672 n = repo.changelog.tip()
2672 2673 br = None
2673 2674 if opts['branches']:
2674 2675 br = repo.branchlookup([n])
2675 2676 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2676 2677 if opts['patch']:
2677 2678 patch.diff(repo, repo.changelog.parents(n)[0], n)
2678 2679
2679 2680 def unbundle(ui, repo, fname, **opts):
2680 2681 """apply a changegroup file
2681 2682
2682 2683 Apply a compressed changegroup file generated by the bundle
2683 2684 command.
2684 2685 """
2685 2686 f = urllib.urlopen(fname)
2686 2687
2687 2688 header = f.read(6)
2688 2689 if not header.startswith("HG"):
2689 2690 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2690 2691 elif not header.startswith("HG10"):
2691 2692 raise util.Abort(_("%s: unknown bundle version") % fname)
2692 2693 elif header == "HG10BZ":
2693 2694 def generator(f):
2694 2695 zd = bz2.BZ2Decompressor()
2695 2696 zd.decompress("BZ")
2696 2697 for chunk in f:
2697 2698 yield zd.decompress(chunk)
2698 2699 elif header == "HG10UN":
2699 2700 def generator(f):
2700 2701 for chunk in f:
2701 2702 yield chunk
2702 2703 else:
2703 2704 raise util.Abort(_("%s: unknown bundle compression type")
2704 2705 % fname)
2705 2706 gen = generator(util.filechunkiter(f, 4096))
2706 2707 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2707 2708 'bundle:' + fname)
2708 2709 return postincoming(ui, repo, modheads, opts['update'])
2709 2710
2710 2711 def undo(ui, repo):
2711 2712 """undo the last commit or pull (DEPRECATED)
2712 2713
2713 2714 (DEPRECATED)
2714 2715 This command is now deprecated and will be removed in a future
2715 2716 release. Please use the rollback command instead. For usage
2716 2717 instructions, see the rollback command.
2717 2718 """
2718 2719 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2719 2720 repo.rollback()
2720 2721
2721 2722 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2722 2723 branch=None):
2723 2724 """update or merge working directory
2724 2725
2725 2726 Update the working directory to the specified revision.
2726 2727
2727 2728 If there are no outstanding changes in the working directory and
2728 2729 there is a linear relationship between the current version and the
2729 2730 requested version, the result is the requested version.
2730 2731
2731 2732 To merge the working directory with another revision, use the
2732 2733 merge command.
2733 2734
2734 2735 By default, update will refuse to run if doing so would require
2735 2736 merging or discarding local changes.
2736 2737 """
2737 2738 node = _lookup(repo, node, branch)
2738 2739 if merge:
2739 2740 ui.warn(_('(the -m/--merge option is deprecated; '
2740 2741 'use the merge command instead)\n'))
2741 2742 return hg.merge(repo, node, force=force)
2742 2743 elif clean:
2743 2744 return hg.clean(repo, node)
2744 2745 else:
2745 2746 return hg.update(repo, node)
2746 2747
2747 2748 def _lookup(repo, node, branch=None):
2748 2749 if branch:
2749 2750 br = repo.branchlookup(branch=branch)
2750 2751 found = []
2751 2752 for x in br:
2752 2753 if branch in br[x]:
2753 2754 found.append(x)
2754 2755 if len(found) > 1:
2755 2756 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2756 2757 for x in found:
2757 2758 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2758 2759 raise util.Abort("")
2759 2760 if len(found) == 1:
2760 2761 node = found[0]
2761 2762 repo.ui.warn(_("Using head %s for branch %s\n")
2762 2763 % (short(node), branch))
2763 2764 else:
2764 2765 raise util.Abort(_("branch %s not found\n") % (branch))
2765 2766 else:
2766 2767 node = node and repo.lookup(node) or repo.changelog.tip()
2767 2768 return node
2768 2769
2769 2770 def verify(ui, repo):
2770 2771 """verify the integrity of the repository
2771 2772
2772 2773 Verify the integrity of the current repository.
2773 2774
2774 2775 This will perform an extensive check of the repository's
2775 2776 integrity, validating the hashes and checksums of each entry in
2776 2777 the changelog, manifest, and tracked files, as well as the
2777 2778 integrity of their crosslinks and indices.
2778 2779 """
2779 2780 return hg.verify(repo)
2780 2781
2781 2782 # Command options and aliases are listed here, alphabetically
2782 2783
2783 2784 table = {
2784 2785 "^add":
2785 2786 (add,
2786 2787 [('I', 'include', [], _('include names matching the given patterns')),
2787 2788 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2788 2789 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2789 2790 _('hg add [OPTION]... [FILE]...')),
2790 2791 "debugaddremove|addremove":
2791 2792 (addremove,
2792 2793 [('I', 'include', [], _('include names matching the given patterns')),
2793 2794 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2794 2795 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2795 2796 _('hg addremove [OPTION]... [FILE]...')),
2796 2797 "^annotate":
2797 2798 (annotate,
2798 2799 [('r', 'rev', '', _('annotate the specified revision')),
2799 2800 ('a', 'text', None, _('treat all files as text')),
2800 2801 ('u', 'user', None, _('list the author')),
2801 2802 ('d', 'date', None, _('list the date')),
2802 2803 ('n', 'number', None, _('list the revision number (default)')),
2803 2804 ('c', 'changeset', None, _('list the changeset')),
2804 2805 ('I', 'include', [], _('include names matching the given patterns')),
2805 2806 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2806 2807 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2807 2808 "archive":
2808 2809 (archive,
2809 2810 [('', 'no-decode', None, _('do not pass files through decoders')),
2810 2811 ('p', 'prefix', '', _('directory prefix for files in archive')),
2811 2812 ('r', 'rev', '', _('revision to distribute')),
2812 2813 ('t', 'type', '', _('type of distribution to create')),
2813 2814 ('I', 'include', [], _('include names matching the given patterns')),
2814 2815 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2815 2816 _('hg archive [OPTION]... DEST')),
2816 2817 "backout":
2817 2818 (backout,
2818 2819 [('', 'merge', None,
2819 2820 _('merge with old dirstate parent after backout')),
2820 2821 ('m', 'message', '', _('use <text> as commit message')),
2821 2822 ('l', 'logfile', '', _('read commit message from <file>')),
2822 2823 ('d', 'date', '', _('record datecode as commit date')),
2823 2824 ('', 'parent', '', _('parent to choose when backing out merge')),
2824 2825 ('u', 'user', '', _('record user as committer')),
2825 2826 ('I', 'include', [], _('include names matching the given patterns')),
2826 2827 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2827 2828 _('hg backout [OPTION]... REV')),
2828 2829 "bundle":
2829 2830 (bundle,
2830 2831 [('f', 'force', None,
2831 2832 _('run even when remote repository is unrelated'))],
2832 2833 _('hg bundle FILE DEST')),
2833 2834 "cat":
2834 2835 (cat,
2835 2836 [('o', 'output', '', _('print output to file with formatted name')),
2836 2837 ('r', 'rev', '', _('print the given revision')),
2837 2838 ('I', 'include', [], _('include names matching the given patterns')),
2838 2839 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2839 2840 _('hg cat [OPTION]... FILE...')),
2840 2841 "^clone":
2841 2842 (clone,
2842 2843 [('U', 'noupdate', None, _('do not update the new working directory')),
2843 2844 ('r', 'rev', [],
2844 2845 _('a changeset you would like to have after cloning')),
2845 2846 ('', 'pull', None, _('use pull protocol to copy metadata')),
2846 2847 ('', 'uncompressed', None,
2847 2848 _('use uncompressed transfer (fast over LAN)')),
2848 2849 ('e', 'ssh', '', _('specify ssh command to use')),
2849 2850 ('', 'remotecmd', '',
2850 2851 _('specify hg command to run on the remote side'))],
2851 2852 _('hg clone [OPTION]... SOURCE [DEST]')),
2852 2853 "^commit|ci":
2853 2854 (commit,
2854 2855 [('A', 'addremove', None,
2855 2856 _('mark new/missing files as added/removed before committing')),
2856 2857 ('m', 'message', '', _('use <text> as commit message')),
2857 2858 ('l', 'logfile', '', _('read the commit message from <file>')),
2858 2859 ('d', 'date', '', _('record datecode as commit date')),
2859 2860 ('u', 'user', '', _('record user as commiter')),
2860 2861 ('I', 'include', [], _('include names matching the given patterns')),
2861 2862 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2862 2863 _('hg commit [OPTION]... [FILE]...')),
2863 2864 "copy|cp":
2864 2865 (copy,
2865 2866 [('A', 'after', None, _('record a copy that has already occurred')),
2866 2867 ('f', 'force', None,
2867 2868 _('forcibly copy over an existing managed file')),
2868 2869 ('I', 'include', [], _('include names matching the given patterns')),
2869 2870 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2870 2871 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2871 2872 _('hg copy [OPTION]... [SOURCE]... DEST')),
2872 2873 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2873 2874 "debugcomplete":
2874 2875 (debugcomplete,
2875 2876 [('o', 'options', None, _('show the command options'))],
2876 2877 _('debugcomplete [-o] CMD')),
2877 2878 "debugrebuildstate":
2878 2879 (debugrebuildstate,
2879 2880 [('r', 'rev', '', _('revision to rebuild to'))],
2880 2881 _('debugrebuildstate [-r REV] [REV]')),
2881 2882 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2882 2883 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2883 2884 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2884 2885 "debugstate": (debugstate, [], _('debugstate')),
2885 2886 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2886 2887 "debugindex": (debugindex, [], _('debugindex FILE')),
2887 2888 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2888 2889 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2889 2890 "debugwalk":
2890 2891 (debugwalk,
2891 2892 [('I', 'include', [], _('include names matching the given patterns')),
2892 2893 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2893 2894 _('debugwalk [OPTION]... [FILE]...')),
2894 2895 "^diff":
2895 2896 (diff,
2896 2897 [('r', 'rev', [], _('revision')),
2897 2898 ('a', 'text', None, _('treat all files as text')),
2898 2899 ('p', 'show-function', None,
2899 2900 _('show which function each change is in')),
2900 2901 ('g', 'git', None, _('use git extended diff format')),
2901 2902 ('w', 'ignore-all-space', None,
2902 2903 _('ignore white space when comparing lines')),
2903 2904 ('b', 'ignore-space-change', None,
2904 2905 _('ignore changes in the amount of white space')),
2905 2906 ('B', 'ignore-blank-lines', None,
2906 2907 _('ignore changes whose lines are all blank')),
2907 2908 ('I', 'include', [], _('include names matching the given patterns')),
2908 2909 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2909 2910 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2910 2911 "^export":
2911 2912 (export,
2912 2913 [('o', 'output', '', _('print output to file with formatted name')),
2913 2914 ('a', 'text', None, _('treat all files as text')),
2914 2915 ('', 'switch-parent', None, _('diff against the second parent'))],
2915 2916 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2916 2917 "debugforget|forget":
2917 2918 (forget,
2918 2919 [('I', 'include', [], _('include names matching the given patterns')),
2919 2920 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2920 2921 _('hg forget [OPTION]... FILE...')),
2921 2922 "grep":
2922 2923 (grep,
2923 2924 [('0', 'print0', None, _('end fields with NUL')),
2924 2925 ('', 'all', None, _('print all revisions that match')),
2925 2926 ('f', 'follow', None,
2926 2927 _('follow changeset history, or file history across copies and renames')),
2927 2928 ('i', 'ignore-case', None, _('ignore case when matching')),
2928 2929 ('l', 'files-with-matches', None,
2929 2930 _('print only filenames and revs that match')),
2930 2931 ('n', 'line-number', None, _('print matching line numbers')),
2931 2932 ('r', 'rev', [], _('search in given revision range')),
2932 2933 ('u', 'user', None, _('print user who committed change')),
2933 2934 ('I', 'include', [], _('include names matching the given patterns')),
2934 2935 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2935 2936 _('hg grep [OPTION]... PATTERN [FILE]...')),
2936 2937 "heads":
2937 2938 (heads,
2938 2939 [('b', 'branches', None, _('show branches')),
2939 2940 ('', 'style', '', _('display using template map file')),
2940 2941 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2941 2942 ('', 'template', '', _('display with template'))],
2942 2943 _('hg heads [-b] [-r <rev>]')),
2943 2944 "help": (help_, [], _('hg help [COMMAND]')),
2944 2945 "identify|id": (identify, [], _('hg identify')),
2945 2946 "import|patch":
2946 2947 (import_,
2947 2948 [('p', 'strip', 1,
2948 2949 _('directory strip option for patch. This has the same\n'
2949 2950 'meaning as the corresponding patch option')),
2950 2951 ('m', 'message', '', _('use <text> as commit message')),
2951 2952 ('b', 'base', '', _('base path')),
2952 2953 ('f', 'force', None,
2953 2954 _('skip check for outstanding uncommitted changes'))],
2954 2955 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2955 2956 "incoming|in": (incoming,
2956 2957 [('M', 'no-merges', None, _('do not show merges')),
2957 2958 ('f', 'force', None,
2958 2959 _('run even when remote repository is unrelated')),
2959 2960 ('', 'style', '', _('display using template map file')),
2960 2961 ('n', 'newest-first', None, _('show newest record first')),
2961 2962 ('', 'bundle', '', _('file to store the bundles into')),
2962 2963 ('p', 'patch', None, _('show patch')),
2963 2964 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2964 2965 ('', 'template', '', _('display with template')),
2965 2966 ('e', 'ssh', '', _('specify ssh command to use')),
2966 2967 ('', 'remotecmd', '',
2967 2968 _('specify hg command to run on the remote side'))],
2968 2969 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2969 2970 ' [--bundle FILENAME] [SOURCE]')),
2970 2971 "^init":
2971 2972 (init,
2972 2973 [('e', 'ssh', '', _('specify ssh command to use')),
2973 2974 ('', 'remotecmd', '',
2974 2975 _('specify hg command to run on the remote side'))],
2975 2976 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2976 2977 "locate":
2977 2978 (locate,
2978 2979 [('r', 'rev', '', _('search the repository as it stood at rev')),
2979 2980 ('0', 'print0', None,
2980 2981 _('end filenames with NUL, for use with xargs')),
2981 2982 ('f', 'fullpath', None,
2982 2983 _('print complete paths from the filesystem root')),
2983 2984 ('I', 'include', [], _('include names matching the given patterns')),
2984 2985 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2985 2986 _('hg locate [OPTION]... [PATTERN]...')),
2986 2987 "^log|history":
2987 2988 (log,
2988 2989 [('b', 'branches', None, _('show branches')),
2989 2990 ('f', 'follow', None,
2990 2991 _('follow changeset history, or file history across copies and renames')),
2991 2992 ('', 'follow-first', None,
2992 2993 _('only follow the first parent of merge changesets')),
2993 2994 ('k', 'keyword', [], _('search for a keyword')),
2994 2995 ('l', 'limit', '', _('limit number of changes displayed')),
2995 2996 ('r', 'rev', [], _('show the specified revision or range')),
2996 2997 ('M', 'no-merges', None, _('do not show merges')),
2997 2998 ('', 'style', '', _('display using template map file')),
2998 2999 ('m', 'only-merges', None, _('show only merges')),
2999 3000 ('p', 'patch', None, _('show patch')),
3000 3001 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3001 3002 ('', 'template', '', _('display with template')),
3002 3003 ('I', 'include', [], _('include names matching the given patterns')),
3003 3004 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3004 3005 _('hg log [OPTION]... [FILE]')),
3005 3006 "manifest": (manifest, [], _('hg manifest [REV]')),
3006 3007 "merge":
3007 3008 (merge,
3008 3009 [('b', 'branch', '', _('merge with head of a specific branch')),
3009 3010 ('f', 'force', None, _('force a merge with outstanding changes'))],
3010 3011 _('hg merge [-b TAG] [-f] [REV]')),
3011 3012 "outgoing|out": (outgoing,
3012 3013 [('M', 'no-merges', None, _('do not show merges')),
3013 3014 ('f', 'force', None,
3014 3015 _('run even when remote repository is unrelated')),
3015 3016 ('p', 'patch', None, _('show patch')),
3016 3017 ('', 'style', '', _('display using template map file')),
3017 3018 ('r', 'rev', [], _('a specific revision you would like to push')),
3018 3019 ('n', 'newest-first', None, _('show newest record first')),
3019 3020 ('', 'template', '', _('display with template')),
3020 3021 ('e', 'ssh', '', _('specify ssh command to use')),
3021 3022 ('', 'remotecmd', '',
3022 3023 _('specify hg command to run on the remote side'))],
3023 3024 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3024 3025 "^parents":
3025 3026 (parents,
3026 3027 [('b', 'branches', None, _('show branches')),
3027 3028 ('r', 'rev', '', _('show parents from the specified rev')),
3028 3029 ('', 'style', '', _('display using template map file')),
3029 3030 ('', 'template', '', _('display with template'))],
3030 3031 _('hg parents [-b] [-r REV] [FILE]')),
3031 3032 "paths": (paths, [], _('hg paths [NAME]')),
3032 3033 "^pull":
3033 3034 (pull,
3034 3035 [('u', 'update', None,
3035 3036 _('update the working directory to tip after pull')),
3036 3037 ('e', 'ssh', '', _('specify ssh command to use')),
3037 3038 ('f', 'force', None,
3038 3039 _('run even when remote repository is unrelated')),
3039 3040 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3040 3041 ('', 'remotecmd', '',
3041 3042 _('specify hg command to run on the remote side'))],
3042 3043 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3043 3044 "^push":
3044 3045 (push,
3045 3046 [('f', 'force', None, _('force push')),
3046 3047 ('e', 'ssh', '', _('specify ssh command to use')),
3047 3048 ('r', 'rev', [], _('a specific revision you would like to push')),
3048 3049 ('', 'remotecmd', '',
3049 3050 _('specify hg command to run on the remote side'))],
3050 3051 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3051 3052 "debugrawcommit|rawcommit":
3052 3053 (rawcommit,
3053 3054 [('p', 'parent', [], _('parent')),
3054 3055 ('d', 'date', '', _('date code')),
3055 3056 ('u', 'user', '', _('user')),
3056 3057 ('F', 'files', '', _('file list')),
3057 3058 ('m', 'message', '', _('commit message')),
3058 3059 ('l', 'logfile', '', _('commit message file'))],
3059 3060 _('hg debugrawcommit [OPTION]... [FILE]...')),
3060 3061 "recover": (recover, [], _('hg recover')),
3061 3062 "^remove|rm":
3062 3063 (remove,
3063 3064 [('A', 'after', None, _('record remove that has already occurred')),
3064 3065 ('f', 'force', None, _('remove file even if modified')),
3065 3066 ('I', 'include', [], _('include names matching the given patterns')),
3066 3067 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3067 3068 _('hg remove [OPTION]... FILE...')),
3068 3069 "rename|mv":
3069 3070 (rename,
3070 3071 [('A', 'after', None, _('record a rename that has already occurred')),
3071 3072 ('f', 'force', None,
3072 3073 _('forcibly copy over an existing managed file')),
3073 3074 ('I', 'include', [], _('include names matching the given patterns')),
3074 3075 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3075 3076 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3076 3077 _('hg rename [OPTION]... SOURCE... DEST')),
3077 3078 "^revert":
3078 3079 (revert,
3079 3080 [('r', 'rev', '', _('revision to revert to')),
3080 3081 ('', 'no-backup', None, _('do not save backup copies of files')),
3081 3082 ('I', 'include', [], _('include names matching given patterns')),
3082 3083 ('X', 'exclude', [], _('exclude names matching given patterns')),
3083 3084 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3084 3085 _('hg revert [-r REV] [NAME]...')),
3085 3086 "rollback": (rollback, [], _('hg rollback')),
3086 3087 "root": (root, [], _('hg root')),
3087 3088 "^serve":
3088 3089 (serve,
3089 3090 [('A', 'accesslog', '', _('name of access log file to write to')),
3090 3091 ('d', 'daemon', None, _('run server in background')),
3091 3092 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3092 3093 ('E', 'errorlog', '', _('name of error log file to write to')),
3093 3094 ('p', 'port', 0, _('port to use (default: 8000)')),
3094 3095 ('a', 'address', '', _('address to use')),
3095 3096 ('n', 'name', '',
3096 3097 _('name to show in web pages (default: working dir)')),
3097 3098 ('', 'webdir-conf', '', _('name of the webdir config file'
3098 3099 ' (serve more than one repo)')),
3099 3100 ('', 'pid-file', '', _('name of file to write process ID to')),
3100 3101 ('', 'stdio', None, _('for remote clients')),
3101 3102 ('t', 'templates', '', _('web templates to use')),
3102 3103 ('', 'style', '', _('template style to use')),
3103 3104 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3104 3105 _('hg serve [OPTION]...')),
3105 3106 "^status|st":
3106 3107 (status,
3107 3108 [('A', 'all', None, _('show status of all files')),
3108 3109 ('m', 'modified', None, _('show only modified files')),
3109 3110 ('a', 'added', None, _('show only added files')),
3110 3111 ('r', 'removed', None, _('show only removed files')),
3111 3112 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3112 3113 ('c', 'clean', None, _('show only files without changes')),
3113 3114 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3114 3115 ('i', 'ignored', None, _('show ignored files')),
3115 3116 ('n', 'no-status', None, _('hide status prefix')),
3116 3117 ('C', 'copies', None, _('show source of copied files')),
3117 3118 ('0', 'print0', None,
3118 3119 _('end filenames with NUL, for use with xargs')),
3119 3120 ('I', 'include', [], _('include names matching the given patterns')),
3120 3121 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3121 3122 _('hg status [OPTION]... [FILE]...')),
3122 3123 "tag":
3123 3124 (tag,
3124 3125 [('l', 'local', None, _('make the tag local')),
3125 3126 ('m', 'message', '', _('message for tag commit log entry')),
3126 3127 ('d', 'date', '', _('record datecode as commit date')),
3127 3128 ('u', 'user', '', _('record user as commiter')),
3128 3129 ('r', 'rev', '', _('revision to tag'))],
3129 3130 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3130 3131 "tags": (tags, [], _('hg tags')),
3131 3132 "tip":
3132 3133 (tip,
3133 3134 [('b', 'branches', None, _('show branches')),
3134 3135 ('', 'style', '', _('display using template map file')),
3135 3136 ('p', 'patch', None, _('show patch')),
3136 3137 ('', 'template', '', _('display with template'))],
3137 3138 _('hg tip [-b] [-p]')),
3138 3139 "unbundle":
3139 3140 (unbundle,
3140 3141 [('u', 'update', None,
3141 3142 _('update the working directory to tip after unbundle'))],
3142 3143 _('hg unbundle [-u] FILE')),
3143 3144 "debugundo|undo": (undo, [], _('hg undo')),
3144 3145 "^update|up|checkout|co":
3145 3146 (update,
3146 3147 [('b', 'branch', '', _('checkout the head of a specific branch')),
3147 3148 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3148 3149 ('C', 'clean', None, _('overwrite locally modified files')),
3149 3150 ('f', 'force', None, _('force a merge with outstanding changes'))],
3150 3151 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3151 3152 "verify": (verify, [], _('hg verify')),
3152 3153 "version": (show_version, [], _('hg version')),
3153 3154 }
3154 3155
3155 3156 globalopts = [
3156 3157 ('R', 'repository', '',
3157 3158 _('repository root directory or symbolic path name')),
3158 3159 ('', 'cwd', '', _('change working directory')),
3159 3160 ('y', 'noninteractive', None,
3160 3161 _('do not prompt, assume \'yes\' for any required answers')),
3161 3162 ('q', 'quiet', None, _('suppress output')),
3162 3163 ('v', 'verbose', None, _('enable additional output')),
3163 3164 ('', 'config', [], _('set/override config option')),
3164 3165 ('', 'debug', None, _('enable debugging output')),
3165 3166 ('', 'debugger', None, _('start debugger')),
3166 3167 ('', 'lsprof', None, _('print improved command execution profile')),
3167 3168 ('', 'traceback', None, _('print traceback on exception')),
3168 3169 ('', 'time', None, _('time how long the command takes')),
3169 3170 ('', 'profile', None, _('print command execution profile')),
3170 3171 ('', 'version', None, _('output version information and exit')),
3171 3172 ('h', 'help', None, _('display help and exit')),
3172 3173 ]
3173 3174
3174 3175 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3175 3176 " debugindex debugindexdot")
3176 3177 optionalrepo = ("paths serve debugconfig")
3177 3178
3178 3179 def findpossible(cmd):
3179 3180 """
3180 3181 Return cmd -> (aliases, command table entry)
3181 3182 for each matching command.
3182 3183 Return debug commands (or their aliases) only if no normal command matches.
3183 3184 """
3184 3185 choice = {}
3185 3186 debugchoice = {}
3186 3187 for e in table.keys():
3187 3188 aliases = e.lstrip("^").split("|")
3188 3189 found = None
3189 3190 if cmd in aliases:
3190 3191 found = cmd
3191 3192 else:
3192 3193 for a in aliases:
3193 3194 if a.startswith(cmd):
3194 3195 found = a
3195 3196 break
3196 3197 if found is not None:
3197 3198 if aliases[0].startswith("debug"):
3198 3199 debugchoice[found] = (aliases, table[e])
3199 3200 else:
3200 3201 choice[found] = (aliases, table[e])
3201 3202
3202 3203 if not choice and debugchoice:
3203 3204 choice = debugchoice
3204 3205
3205 3206 return choice
3206 3207
3207 3208 def findcmd(cmd):
3208 3209 """Return (aliases, command table entry) for command string."""
3209 3210 choice = findpossible(cmd)
3210 3211
3211 3212 if choice.has_key(cmd):
3212 3213 return choice[cmd]
3213 3214
3214 3215 if len(choice) > 1:
3215 3216 clist = choice.keys()
3216 3217 clist.sort()
3217 3218 raise AmbiguousCommand(cmd, clist)
3218 3219
3219 3220 if choice:
3220 3221 return choice.values()[0]
3221 3222
3222 3223 raise UnknownCommand(cmd)
3223 3224
3224 3225 def catchterm(*args):
3225 3226 raise util.SignalInterrupt
3226 3227
3227 3228 def run():
3228 3229 sys.exit(dispatch(sys.argv[1:]))
3229 3230
3230 3231 class ParseError(Exception):
3231 3232 """Exception raised on errors in parsing the command line."""
3232 3233
3233 3234 def parse(ui, args):
3234 3235 options = {}
3235 3236 cmdoptions = {}
3236 3237
3237 3238 try:
3238 3239 args = fancyopts.fancyopts(args, globalopts, options)
3239 3240 except fancyopts.getopt.GetoptError, inst:
3240 3241 raise ParseError(None, inst)
3241 3242
3242 3243 if args:
3243 3244 cmd, args = args[0], args[1:]
3244 3245 aliases, i = findcmd(cmd)
3245 3246 cmd = aliases[0]
3246 3247 defaults = ui.config("defaults", cmd)
3247 3248 if defaults:
3248 3249 args = defaults.split() + args
3249 3250 c = list(i[1])
3250 3251 else:
3251 3252 cmd = None
3252 3253 c = []
3253 3254
3254 3255 # combine global options into local
3255 3256 for o in globalopts:
3256 3257 c.append((o[0], o[1], options[o[1]], o[3]))
3257 3258
3258 3259 try:
3259 3260 args = fancyopts.fancyopts(args, c, cmdoptions)
3260 3261 except fancyopts.getopt.GetoptError, inst:
3261 3262 raise ParseError(cmd, inst)
3262 3263
3263 3264 # separate global options back out
3264 3265 for o in globalopts:
3265 3266 n = o[1]
3266 3267 options[n] = cmdoptions[n]
3267 3268 del cmdoptions[n]
3268 3269
3269 3270 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3270 3271
3271 3272 external = {}
3272 3273
3273 3274 def findext(name):
3274 3275 '''return module with given extension name'''
3275 3276 try:
3276 3277 return sys.modules[external[name]]
3277 3278 except KeyError:
3278 3279 for k, v in external.iteritems():
3279 3280 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3280 3281 return sys.modules[v]
3281 3282 raise KeyError(name)
3282 3283
3283 3284 def dispatch(args):
3284 3285 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3285 3286 num = getattr(signal, name, None)
3286 3287 if num: signal.signal(num, catchterm)
3287 3288
3288 3289 try:
3289 3290 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3290 3291 except util.Abort, inst:
3291 3292 sys.stderr.write(_("abort: %s\n") % inst)
3292 3293 return -1
3293 3294
3294 3295 for ext_name, load_from_name in u.extensions():
3295 3296 try:
3296 3297 if load_from_name:
3297 3298 # the module will be loaded in sys.modules
3298 3299 # choose an unique name so that it doesn't
3299 3300 # conflicts with other modules
3300 3301 module_name = "hgext_%s" % ext_name.replace('.', '_')
3301 3302 mod = imp.load_source(module_name, load_from_name)
3302 3303 else:
3303 3304 def importh(name):
3304 3305 mod = __import__(name)
3305 3306 components = name.split('.')
3306 3307 for comp in components[1:]:
3307 3308 mod = getattr(mod, comp)
3308 3309 return mod
3309 3310 try:
3310 3311 mod = importh("hgext.%s" % ext_name)
3311 3312 except ImportError:
3312 3313 mod = importh(ext_name)
3313 3314 external[ext_name] = mod.__name__
3314 3315 except (util.SignalInterrupt, KeyboardInterrupt):
3315 3316 raise
3316 3317 except Exception, inst:
3317 3318 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3318 3319 if u.print_exc():
3319 3320 return 1
3320 3321
3321 3322 for name in external.itervalues():
3322 3323 mod = sys.modules[name]
3323 3324 uisetup = getattr(mod, 'uisetup', None)
3324 3325 if uisetup:
3325 3326 uisetup(u)
3326 3327 cmdtable = getattr(mod, 'cmdtable', {})
3327 3328 for t in cmdtable:
3328 3329 if t in table:
3329 3330 u.warn(_("module %s overrides %s\n") % (name, t))
3330 3331 table.update(cmdtable)
3331 3332
3332 3333 try:
3333 3334 cmd, func, args, options, cmdoptions = parse(u, args)
3334 3335 if options["time"]:
3335 3336 def get_times():
3336 3337 t = os.times()
3337 3338 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3338 3339 t = (t[0], t[1], t[2], t[3], time.clock())
3339 3340 return t
3340 3341 s = get_times()
3341 3342 def print_time():
3342 3343 t = get_times()
3343 3344 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3344 3345 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3345 3346 atexit.register(print_time)
3346 3347
3347 3348 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3348 3349 not options["noninteractive"], options["traceback"],
3349 3350 options["config"])
3350 3351
3351 3352 # enter the debugger before command execution
3352 3353 if options['debugger']:
3353 3354 pdb.set_trace()
3354 3355
3355 3356 try:
3356 3357 if options['cwd']:
3357 3358 try:
3358 3359 os.chdir(options['cwd'])
3359 3360 except OSError, inst:
3360 3361 raise util.Abort('%s: %s' %
3361 3362 (options['cwd'], inst.strerror))
3362 3363
3363 3364 path = u.expandpath(options["repository"]) or ""
3364 3365 repo = path and hg.repository(u, path=path) or None
3365 3366
3366 3367 if options['help']:
3367 3368 return help_(u, cmd, options['version'])
3368 3369 elif options['version']:
3369 3370 return show_version(u)
3370 3371 elif not cmd:
3371 3372 return help_(u, 'shortlist')
3372 3373
3373 3374 if cmd not in norepo.split():
3374 3375 try:
3375 3376 if not repo:
3376 3377 repo = hg.repository(u, path=path)
3377 3378 u = repo.ui
3378 3379 for name in external.itervalues():
3379 3380 mod = sys.modules[name]
3380 3381 if hasattr(mod, 'reposetup'):
3381 3382 mod.reposetup(u, repo)
3382 3383 hg.repo_setup_hooks.append(mod.reposetup)
3383 3384 except hg.RepoError:
3384 3385 if cmd not in optionalrepo.split():
3385 3386 raise
3386 3387 d = lambda: func(u, repo, *args, **cmdoptions)
3387 3388 else:
3388 3389 d = lambda: func(u, *args, **cmdoptions)
3389 3390
3390 3391 # reupdate the options, repo/.hg/hgrc may have changed them
3391 3392 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3392 3393 not options["noninteractive"], options["traceback"],
3393 3394 options["config"])
3394 3395
3395 3396 try:
3396 3397 if options['profile']:
3397 3398 import hotshot, hotshot.stats
3398 3399 prof = hotshot.Profile("hg.prof")
3399 3400 try:
3400 3401 try:
3401 3402 return prof.runcall(d)
3402 3403 except:
3403 3404 try:
3404 3405 u.warn(_('exception raised - generating '
3405 3406 'profile anyway\n'))
3406 3407 except:
3407 3408 pass
3408 3409 raise
3409 3410 finally:
3410 3411 prof.close()
3411 3412 stats = hotshot.stats.load("hg.prof")
3412 3413 stats.strip_dirs()
3413 3414 stats.sort_stats('time', 'calls')
3414 3415 stats.print_stats(40)
3415 3416 elif options['lsprof']:
3416 3417 try:
3417 3418 from mercurial import lsprof
3418 3419 except ImportError:
3419 3420 raise util.Abort(_(
3420 3421 'lsprof not available - install from '
3421 3422 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3422 3423 p = lsprof.Profiler()
3423 3424 p.enable(subcalls=True)
3424 3425 try:
3425 3426 return d()
3426 3427 finally:
3427 3428 p.disable()
3428 3429 stats = lsprof.Stats(p.getstats())
3429 3430 stats.sort()
3430 3431 stats.pprint(top=10, file=sys.stderr, climit=5)
3431 3432 else:
3432 3433 return d()
3433 3434 finally:
3434 3435 u.flush()
3435 3436 except:
3436 3437 # enter the debugger when we hit an exception
3437 3438 if options['debugger']:
3438 3439 pdb.post_mortem(sys.exc_info()[2])
3439 3440 u.print_exc()
3440 3441 raise
3441 3442 except ParseError, inst:
3442 3443 if inst.args[0]:
3443 3444 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3444 3445 help_(u, inst.args[0])
3445 3446 else:
3446 3447 u.warn(_("hg: %s\n") % inst.args[1])
3447 3448 help_(u, 'shortlist')
3448 3449 except AmbiguousCommand, inst:
3449 3450 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3450 3451 (inst.args[0], " ".join(inst.args[1])))
3451 3452 except UnknownCommand, inst:
3452 3453 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3453 3454 help_(u, 'shortlist')
3454 3455 except hg.RepoError, inst:
3455 3456 u.warn(_("abort: %s!\n") % inst)
3456 3457 except lock.LockHeld, inst:
3457 3458 if inst.errno == errno.ETIMEDOUT:
3458 3459 reason = _('timed out waiting for lock held by %s') % inst.locker
3459 3460 else:
3460 3461 reason = _('lock held by %s') % inst.locker
3461 3462 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3462 3463 except lock.LockUnavailable, inst:
3463 3464 u.warn(_("abort: could not lock %s: %s\n") %
3464 3465 (inst.desc or inst.filename, inst.strerror))
3465 3466 except revlog.RevlogError, inst:
3466 3467 u.warn(_("abort: "), inst, "!\n")
3467 3468 except util.SignalInterrupt:
3468 3469 u.warn(_("killed!\n"))
3469 3470 except KeyboardInterrupt:
3470 3471 try:
3471 3472 u.warn(_("interrupted!\n"))
3472 3473 except IOError, inst:
3473 3474 if inst.errno == errno.EPIPE:
3474 3475 if u.debugflag:
3475 3476 u.warn(_("\nbroken pipe\n"))
3476 3477 else:
3477 3478 raise
3478 3479 except IOError, inst:
3479 3480 if hasattr(inst, "code"):
3480 3481 u.warn(_("abort: %s\n") % inst)
3481 3482 elif hasattr(inst, "reason"):
3482 3483 u.warn(_("abort: error: %s\n") % inst.reason[1])
3483 3484 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3484 3485 if u.debugflag:
3485 3486 u.warn(_("broken pipe\n"))
3486 3487 elif getattr(inst, "strerror", None):
3487 3488 if getattr(inst, "filename", None):
3488 3489 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3489 3490 else:
3490 3491 u.warn(_("abort: %s\n") % inst.strerror)
3491 3492 else:
3492 3493 raise
3493 3494 except OSError, inst:
3494 3495 if hasattr(inst, "filename"):
3495 3496 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3496 3497 else:
3497 3498 u.warn(_("abort: %s\n") % inst.strerror)
3498 3499 except util.Abort, inst:
3499 3500 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3500 3501 except TypeError, inst:
3501 3502 # was this an argument error?
3502 3503 tb = traceback.extract_tb(sys.exc_info()[2])
3503 3504 if len(tb) > 2: # no
3504 3505 raise
3505 3506 u.debug(inst, "\n")
3506 3507 u.warn(_("%s: invalid arguments\n") % cmd)
3507 3508 help_(u, cmd)
3508 3509 except SystemExit, inst:
3509 3510 # Commands shouldn't sys.exit directly, but give a return code.
3510 3511 # Just in case catch this and and pass exit code to caller.
3511 3512 return inst.code
3512 3513 except:
3513 3514 u.warn(_("** unknown exception encountered, details follow\n"))
3514 3515 u.warn(_("** report bug details to "
3515 3516 "http://www.selenic.com/mercurial/bts\n"))
3516 3517 u.warn(_("** or mercurial@selenic.com\n"))
3517 3518 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3518 3519 % version.get_version())
3519 3520 raise
3520 3521
3521 3522 return -1
@@ -1,128 +1,127 b''
1 1 # filelog.py - file history class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from revlog import *
9 9 from demandload import *
10 10 demandload(globals(), "bdiff os")
11 11
12 12 class filelog(revlog):
13 13 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
14 14 revlog.__init__(self, opener,
15 15 os.path.join("data", self.encodedir(path + ".i")),
16 16 os.path.join("data", self.encodedir(path + ".d")),
17 17 defversion)
18 18
19 19 # This avoids a collision between a file named foo and a dir named
20 20 # foo.i or foo.d
21 21 def encodedir(self, path):
22 22 return (path
23 23 .replace(".hg/", ".hg.hg/")
24 24 .replace(".i/", ".i.hg/")
25 25 .replace(".d/", ".d.hg/"))
26 26
27 27 def decodedir(self, path):
28 28 return (path
29 29 .replace(".d.hg/", ".d/")
30 30 .replace(".i.hg/", ".i/")
31 31 .replace(".hg.hg/", ".hg/"))
32 32
33 33 def read(self, node):
34 34 t = self.revision(node)
35 35 if not t.startswith('\1\n'):
36 36 return t
37 37 s = t.index('\1\n', 2)
38 38 return t[s+2:]
39 39
40 40 def readmeta(self, node):
41 41 t = self.revision(node)
42 42 if not t.startswith('\1\n'):
43 43 return {}
44 44 s = t.index('\1\n', 2)
45 45 mt = t[2:s]
46 46 m = {}
47 47 for l in mt.splitlines():
48 48 k, v = l.split(": ", 1)
49 49 m[k] = v
50 50 return m
51 51
52 52 def add(self, text, meta, transaction, link, p1=None, p2=None):
53 53 if meta or text.startswith('\1\n'):
54 54 mt = ""
55 55 if meta:
56 56 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
57 57 text = "\1\n%s\1\n%s" % ("".join(mt), text)
58 58 return self.addrevision(text, transaction, link, p1, p2)
59 59
60 60 def renamed(self, node):
61 61 if self.parents(node)[0] != nullid:
62 62 return False
63 63 m = self.readmeta(node)
64 64 if m and m.has_key("copy"):
65 65 return (m["copy"], bin(m["copyrev"]))
66 66 return False
67 67
68 def size(self, rev):
69 """return the size of a given revision"""
70
71 # for revisions with renames, we have to go the slow way
72 node = self.node(rev)
73 if self.renamed(node):
74 return len(self.read(node))
75
76 return revlog.size(self, rev)
77
68 78 def cmp(self, node, text):
69 79 """compare text with a given file revision"""
70 80
71 81 # for renames, we have to go the slow way
72 82 if self.renamed(node):
73 83 t2 = self.read(node)
74 return t2 == text
75
76 p1, p2 = self.parents(node)
77 h = hash(text, p1, p2)
78
79 return h != node
84 return t2 != text
80 85
81 def makenode(self, node, text):
82 """calculate a file nodeid for text, descended or possibly
83 unchanged from node"""
84
85 if self.cmp(node, text):
86 return hash(text, node, nullid)
87 return node
86 return revlog.cmp(self, node, text)
88 87
89 88 def annotate(self, node):
90 89
91 90 def decorate(text, rev):
92 91 return ([rev] * len(text.splitlines()), text)
93 92
94 93 def pair(parent, child):
95 94 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
96 95 child[0][b1:b2] = parent[0][a1:a2]
97 96 return child
98 97
99 98 # find all ancestors
100 99 needed = {node:1}
101 100 visit = [node]
102 101 while visit:
103 102 n = visit.pop(0)
104 103 for p in self.parents(n):
105 104 if p not in needed:
106 105 needed[p] = 1
107 106 visit.append(p)
108 107 else:
109 108 # count how many times we'll use this
110 109 needed[p] += 1
111 110
112 111 # sort by revision which is a topological order
113 112 visit = [ (self.rev(n), n) for n in needed.keys() ]
114 113 visit.sort()
115 114 hist = {}
116 115
117 116 for r,n in visit:
118 117 curr = decorate(self.read(n), self.linkrev(n))
119 118 for p in self.parents(n):
120 119 if p != nullid:
121 120 curr = pair(hist[p], curr)
122 121 # trim the history of unneeded revs
123 122 needed[p] -= 1
124 123 if not needed[p]:
125 124 del hist[p]
126 125 hist[n] = curr
127 126
128 127 return zip(hist[n][0], hist[n][1].splitlines(1))
@@ -1,981 +1,981 b''
1 1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os
10 10 import os.path
11 11 import mimetypes
12 12 from mercurial.demandload import demandload
13 13 demandload(globals(), "re zlib ConfigParser mimetools cStringIO sys tempfile")
14 demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone")
14 demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone,patch")
15 15 demandload(globals(), "mercurial:templater")
16 16 demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile")
17 17 from mercurial.node import *
18 18 from mercurial.i18n import gettext as _
19 19
20 20 def _up(p):
21 21 if p[0] != "/":
22 22 p = "/" + p
23 23 if p[-1] == "/":
24 24 p = p[:-1]
25 25 up = os.path.dirname(p)
26 26 if up == "/":
27 27 return "/"
28 28 return up + "/"
29 29
30 30 class hgweb(object):
31 31 def __init__(self, repo, name=None):
32 32 if type(repo) == type(""):
33 33 self.repo = hg.repository(ui.ui(), repo)
34 34 else:
35 35 self.repo = repo
36 36
37 37 self.mtime = -1
38 38 self.reponame = name
39 39 self.archives = 'zip', 'gz', 'bz2'
40 40 self.stripecount = 1
41 41 self.templatepath = self.repo.ui.config("web", "templates",
42 42 templater.templatepath())
43 43
44 44 def refresh(self):
45 45 mtime = get_mtime(self.repo.root)
46 46 if mtime != self.mtime:
47 47 self.mtime = mtime
48 48 self.repo = hg.repository(self.repo.ui, self.repo.root)
49 49 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
50 50 self.stripecount = int(self.repo.ui.config("web", "stripes", 1))
51 51 self.maxshortchanges = int(self.repo.ui.config("web", "maxshortchanges", 60))
52 52 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
53 53 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
54 54
55 55 def archivelist(self, nodeid):
56 56 allowed = self.repo.ui.configlist("web", "allow_archive")
57 57 for i in self.archives:
58 58 if i in allowed or self.repo.ui.configbool("web", "allow" + i):
59 59 yield {"type" : i, "node" : nodeid, "url": ""}
60 60
61 61 def listfiles(self, files, mf):
62 62 for f in files[:self.maxfiles]:
63 63 yield self.t("filenodelink", node=hex(mf[f]), file=f)
64 64 if len(files) > self.maxfiles:
65 65 yield self.t("fileellipses")
66 66
67 67 def listfilediffs(self, files, changeset):
68 68 for f in files[:self.maxfiles]:
69 69 yield self.t("filedifflink", node=hex(changeset), file=f)
70 70 if len(files) > self.maxfiles:
71 71 yield self.t("fileellipses")
72 72
73 73 def siblings(self, siblings=[], rev=None, hiderev=None, **args):
74 74 if not rev:
75 75 rev = lambda x: ""
76 76 siblings = [s for s in siblings if s != nullid]
77 77 if len(siblings) == 1 and rev(siblings[0]) == hiderev:
78 78 return
79 79 for s in siblings:
80 80 yield dict(node=hex(s), rev=rev(s), **args)
81 81
82 82 def renamelink(self, fl, node):
83 83 r = fl.renamed(node)
84 84 if r:
85 85 return [dict(file=r[0], node=hex(r[1]))]
86 86 return []
87 87
88 88 def showtag(self, t1, node=nullid, **args):
89 89 for t in self.repo.nodetags(node):
90 90 yield self.t(t1, tag=t, **args)
91 91
92 92 def diff(self, node1, node2, files):
93 93 def filterfiles(filters, files):
94 94 l = [x for x in files if x in filters]
95 95
96 96 for t in filters:
97 97 if t and t[-1] != os.sep:
98 98 t += os.sep
99 99 l += [x for x in files if x.startswith(t)]
100 100 return l
101 101
102 102 parity = [0]
103 103 def diffblock(diff, f, fn):
104 104 yield self.t("diffblock",
105 105 lines=prettyprintlines(diff),
106 106 parity=parity[0],
107 107 file=f,
108 108 filenode=hex(fn or nullid))
109 109 parity[0] = 1 - parity[0]
110 110
111 111 def prettyprintlines(diff):
112 112 for l in diff.splitlines(1):
113 113 if l.startswith('+'):
114 114 yield self.t("difflineplus", line=l)
115 115 elif l.startswith('-'):
116 116 yield self.t("difflineminus", line=l)
117 117 elif l.startswith('@'):
118 118 yield self.t("difflineat", line=l)
119 119 else:
120 120 yield self.t("diffline", line=l)
121 121
122 122 r = self.repo
123 123 cl = r.changelog
124 124 mf = r.manifest
125 125 change1 = cl.read(node1)
126 126 change2 = cl.read(node2)
127 127 mmap1 = mf.read(change1[0])
128 128 mmap2 = mf.read(change2[0])
129 129 date1 = util.datestr(change1[2])
130 130 date2 = util.datestr(change2[2])
131 131
132 132 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
133 133 if files:
134 134 modified, added, removed = map(lambda x: filterfiles(files, x),
135 135 (modified, added, removed))
136 136
137 diffopts = self.repo.ui.diffopts()
137 diffopts = patch.diffopts(ui)
138 138 for f in modified:
139 139 to = r.file(f).read(mmap1[f])
140 140 tn = r.file(f).read(mmap2[f])
141 141 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
142 142 opts=diffopts), f, tn)
143 143 for f in added:
144 144 to = None
145 145 tn = r.file(f).read(mmap2[f])
146 146 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
147 147 opts=diffopts), f, tn)
148 148 for f in removed:
149 149 to = r.file(f).read(mmap1[f])
150 150 tn = None
151 151 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
152 152 opts=diffopts), f, tn)
153 153
154 154 def changelog(self, pos, shortlog=False):
155 155 def changenav(**map):
156 156 def seq(factor, maxchanges=None):
157 157 if maxchanges:
158 158 yield maxchanges
159 159 if maxchanges >= 20 and maxchanges <= 40:
160 160 yield 50
161 161 else:
162 162 yield 1 * factor
163 163 yield 3 * factor
164 164 for f in seq(factor * 10):
165 165 yield f
166 166
167 167 l = []
168 168 last = 0
169 169 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
170 170 for f in seq(1, maxchanges):
171 171 if f < maxchanges or f <= last:
172 172 continue
173 173 if f > count:
174 174 break
175 175 last = f
176 176 r = "%d" % f
177 177 if pos + f < count:
178 178 l.append(("+" + r, pos + f))
179 179 if pos - f >= 0:
180 180 l.insert(0, ("-" + r, pos - f))
181 181
182 182 yield {"rev": 0, "label": "(0)"}
183 183
184 184 for label, rev in l:
185 185 yield {"label": label, "rev": rev}
186 186
187 187 yield {"label": "tip", "rev": "tip"}
188 188
189 189 def changelist(**map):
190 190 parity = (start - end) & 1
191 191 cl = self.repo.changelog
192 192 l = [] # build a list in forward order for efficiency
193 193 for i in range(start, end):
194 194 n = cl.node(i)
195 195 changes = cl.read(n)
196 196 hn = hex(n)
197 197
198 198 l.insert(0, {"parity": parity,
199 199 "author": changes[1],
200 200 "parent": self.siblings(cl.parents(n), cl.rev,
201 201 cl.rev(n) - 1),
202 202 "child": self.siblings(cl.children(n), cl.rev,
203 203 cl.rev(n) + 1),
204 204 "changelogtag": self.showtag("changelogtag",n),
205 205 "manifest": hex(changes[0]),
206 206 "desc": changes[4],
207 207 "date": changes[2],
208 208 "files": self.listfilediffs(changes[3], n),
209 209 "rev": i,
210 210 "node": hn})
211 211 parity = 1 - parity
212 212
213 213 for e in l:
214 214 yield e
215 215
216 216 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
217 217 cl = self.repo.changelog
218 218 mf = cl.read(cl.tip())[0]
219 219 count = cl.count()
220 220 start = max(0, pos - maxchanges + 1)
221 221 end = min(count, start + maxchanges)
222 222 pos = end - 1
223 223
224 224 yield self.t(shortlog and 'shortlog' or 'changelog',
225 225 changenav=changenav,
226 226 manifest=hex(mf),
227 227 rev=pos, changesets=count, entries=changelist,
228 228 archives=self.archivelist("tip"))
229 229
230 230 def search(self, query):
231 231
232 232 def changelist(**map):
233 233 cl = self.repo.changelog
234 234 count = 0
235 235 qw = query.lower().split()
236 236
237 237 def revgen():
238 238 for i in range(cl.count() - 1, 0, -100):
239 239 l = []
240 240 for j in range(max(0, i - 100), i):
241 241 n = cl.node(j)
242 242 changes = cl.read(n)
243 243 l.append((n, j, changes))
244 244 l.reverse()
245 245 for e in l:
246 246 yield e
247 247
248 248 for n, i, changes in revgen():
249 249 miss = 0
250 250 for q in qw:
251 251 if not (q in changes[1].lower() or
252 252 q in changes[4].lower() or
253 253 q in " ".join(changes[3][:20]).lower()):
254 254 miss = 1
255 255 break
256 256 if miss:
257 257 continue
258 258
259 259 count += 1
260 260 hn = hex(n)
261 261
262 262 yield self.t('searchentry',
263 263 parity=self.stripes(count),
264 264 author=changes[1],
265 265 parent=self.siblings(cl.parents(n), cl.rev),
266 266 child=self.siblings(cl.children(n), cl.rev),
267 267 changelogtag=self.showtag("changelogtag",n),
268 268 manifest=hex(changes[0]),
269 269 desc=changes[4],
270 270 date=changes[2],
271 271 files=self.listfilediffs(changes[3], n),
272 272 rev=i,
273 273 node=hn)
274 274
275 275 if count >= self.maxchanges:
276 276 break
277 277
278 278 cl = self.repo.changelog
279 279 mf = cl.read(cl.tip())[0]
280 280
281 281 yield self.t('search',
282 282 query=query,
283 283 manifest=hex(mf),
284 284 entries=changelist)
285 285
286 286 def changeset(self, nodeid):
287 287 cl = self.repo.changelog
288 288 n = self.repo.lookup(nodeid)
289 289 nodeid = hex(n)
290 290 changes = cl.read(n)
291 291 p1 = cl.parents(n)[0]
292 292
293 293 files = []
294 294 mf = self.repo.manifest.read(changes[0])
295 295 for f in changes[3]:
296 296 files.append(self.t("filenodelink",
297 297 filenode=hex(mf.get(f, nullid)), file=f))
298 298
299 299 def diff(**map):
300 300 yield self.diff(p1, n, None)
301 301
302 302 yield self.t('changeset',
303 303 diff=diff,
304 304 rev=cl.rev(n),
305 305 node=nodeid,
306 306 parent=self.siblings(cl.parents(n), cl.rev),
307 307 child=self.siblings(cl.children(n), cl.rev),
308 308 changesettag=self.showtag("changesettag",n),
309 309 manifest=hex(changes[0]),
310 310 author=changes[1],
311 311 desc=changes[4],
312 312 date=changes[2],
313 313 files=files,
314 314 archives=self.archivelist(nodeid))
315 315
316 316 def filelog(self, f, filenode):
317 317 cl = self.repo.changelog
318 318 fl = self.repo.file(f)
319 319 filenode = hex(fl.lookup(filenode))
320 320 count = fl.count()
321 321
322 322 def entries(**map):
323 323 l = []
324 324 parity = (count - 1) & 1
325 325
326 326 for i in range(count):
327 327 n = fl.node(i)
328 328 lr = fl.linkrev(n)
329 329 cn = cl.node(lr)
330 330 cs = cl.read(cl.node(lr))
331 331
332 332 l.insert(0, {"parity": parity,
333 333 "filenode": hex(n),
334 334 "filerev": i,
335 335 "file": f,
336 336 "node": hex(cn),
337 337 "author": cs[1],
338 338 "date": cs[2],
339 339 "rename": self.renamelink(fl, n),
340 340 "parent": self.siblings(fl.parents(n),
341 341 fl.rev, file=f),
342 342 "child": self.siblings(fl.children(n),
343 343 fl.rev, file=f),
344 344 "desc": cs[4]})
345 345 parity = 1 - parity
346 346
347 347 for e in l:
348 348 yield e
349 349
350 350 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
351 351
352 352 def filerevision(self, f, node):
353 353 fl = self.repo.file(f)
354 354 n = fl.lookup(node)
355 355 node = hex(n)
356 356 text = fl.read(n)
357 357 changerev = fl.linkrev(n)
358 358 cl = self.repo.changelog
359 359 cn = cl.node(changerev)
360 360 cs = cl.read(cn)
361 361 mfn = cs[0]
362 362
363 363 mt = mimetypes.guess_type(f)[0]
364 364 rawtext = text
365 365 if util.binary(text):
366 366 mt = mt or 'application/octet-stream'
367 367 text = "(binary:%s)" % mt
368 368 mt = mt or 'text/plain'
369 369
370 370 def lines():
371 371 for l, t in enumerate(text.splitlines(1)):
372 372 yield {"line": t,
373 373 "linenumber": "% 6d" % (l + 1),
374 374 "parity": self.stripes(l)}
375 375
376 376 yield self.t("filerevision",
377 377 file=f,
378 378 filenode=node,
379 379 path=_up(f),
380 380 text=lines(),
381 381 raw=rawtext,
382 382 mimetype=mt,
383 383 rev=changerev,
384 384 node=hex(cn),
385 385 manifest=hex(mfn),
386 386 author=cs[1],
387 387 date=cs[2],
388 388 parent=self.siblings(fl.parents(n), fl.rev, file=f),
389 389 child=self.siblings(fl.children(n), fl.rev, file=f),
390 390 rename=self.renamelink(fl, n),
391 391 permissions=self.repo.manifest.read(mfn).execf(f))
392 392
393 393 def fileannotate(self, f, node):
394 394 bcache = {}
395 395 ncache = {}
396 396 fl = self.repo.file(f)
397 397 n = fl.lookup(node)
398 398 node = hex(n)
399 399 changerev = fl.linkrev(n)
400 400
401 401 cl = self.repo.changelog
402 402 cn = cl.node(changerev)
403 403 cs = cl.read(cn)
404 404 mfn = cs[0]
405 405
406 406 def annotate(**map):
407 407 parity = 0
408 408 last = None
409 409 for r, l in fl.annotate(n):
410 410 try:
411 411 cnode = ncache[r]
412 412 except KeyError:
413 413 cnode = ncache[r] = self.repo.changelog.node(r)
414 414
415 415 try:
416 416 name = bcache[r]
417 417 except KeyError:
418 418 cl = self.repo.changelog.read(cnode)
419 419 bcache[r] = name = self.repo.ui.shortuser(cl[1])
420 420
421 421 if last != cnode:
422 422 parity = 1 - parity
423 423 last = cnode
424 424
425 425 yield {"parity": parity,
426 426 "node": hex(cnode),
427 427 "rev": r,
428 428 "author": name,
429 429 "file": f,
430 430 "line": l}
431 431
432 432 yield self.t("fileannotate",
433 433 file=f,
434 434 filenode=node,
435 435 annotate=annotate,
436 436 path=_up(f),
437 437 rev=changerev,
438 438 node=hex(cn),
439 439 manifest=hex(mfn),
440 440 author=cs[1],
441 441 date=cs[2],
442 442 rename=self.renamelink(fl, n),
443 443 parent=self.siblings(fl.parents(n), fl.rev, file=f),
444 444 child=self.siblings(fl.children(n), fl.rev, file=f),
445 445 permissions=self.repo.manifest.read(mfn).execf(f))
446 446
447 447 def manifest(self, mnode, path):
448 448 man = self.repo.manifest
449 449 mn = man.lookup(mnode)
450 450 mnode = hex(mn)
451 451 mf = man.read(mn)
452 452 rev = man.rev(mn)
453 453 changerev = man.linkrev(mn)
454 454 node = self.repo.changelog.node(changerev)
455 455
456 456 files = {}
457 457
458 458 p = path[1:]
459 459 if p and p[-1] != "/":
460 460 p += "/"
461 461 l = len(p)
462 462
463 463 for f,n in mf.items():
464 464 if f[:l] != p:
465 465 continue
466 466 remain = f[l:]
467 467 if "/" in remain:
468 468 short = remain[:remain.index("/") + 1] # bleah
469 469 files[short] = (f, None)
470 470 else:
471 471 short = os.path.basename(remain)
472 472 files[short] = (f, n)
473 473
474 474 def filelist(**map):
475 475 parity = 0
476 476 fl = files.keys()
477 477 fl.sort()
478 478 for f in fl:
479 479 full, fnode = files[f]
480 480 if not fnode:
481 481 continue
482 482
483 483 yield {"file": full,
484 484 "manifest": mnode,
485 485 "filenode": hex(fnode),
486 486 "parity": self.stripes(parity),
487 487 "basename": f,
488 488 "permissions": mf.execf(full)}
489 489 parity += 1
490 490
491 491 def dirlist(**map):
492 492 parity = 0
493 493 fl = files.keys()
494 494 fl.sort()
495 495 for f in fl:
496 496 full, fnode = files[f]
497 497 if fnode:
498 498 continue
499 499
500 500 yield {"parity": self.stripes(parity),
501 501 "path": os.path.join(path, f),
502 502 "manifest": mnode,
503 503 "basename": f[:-1]}
504 504 parity += 1
505 505
506 506 yield self.t("manifest",
507 507 manifest=mnode,
508 508 rev=rev,
509 509 node=hex(node),
510 510 path=path,
511 511 up=_up(path),
512 512 fentries=filelist,
513 513 dentries=dirlist,
514 514 archives=self.archivelist(hex(node)))
515 515
516 516 def tags(self):
517 517 cl = self.repo.changelog
518 518 mf = cl.read(cl.tip())[0]
519 519
520 520 i = self.repo.tagslist()
521 521 i.reverse()
522 522
523 523 def entries(notip=False, **map):
524 524 parity = 0
525 525 for k,n in i:
526 526 if notip and k == "tip": continue
527 527 yield {"parity": self.stripes(parity),
528 528 "tag": k,
529 529 "tagmanifest": hex(cl.read(n)[0]),
530 530 "date": cl.read(n)[2],
531 531 "node": hex(n)}
532 532 parity += 1
533 533
534 534 yield self.t("tags",
535 535 manifest=hex(mf),
536 536 entries=lambda **x: entries(False, **x),
537 537 entriesnotip=lambda **x: entries(True, **x))
538 538
539 539 def summary(self):
540 540 cl = self.repo.changelog
541 541 mf = cl.read(cl.tip())[0]
542 542
543 543 i = self.repo.tagslist()
544 544 i.reverse()
545 545
546 546 def tagentries(**map):
547 547 parity = 0
548 548 count = 0
549 549 for k,n in i:
550 550 if k == "tip": # skip tip
551 551 continue;
552 552
553 553 count += 1
554 554 if count > 10: # limit to 10 tags
555 555 break;
556 556
557 557 c = cl.read(n)
558 558 m = c[0]
559 559 t = c[2]
560 560
561 561 yield self.t("tagentry",
562 562 parity = self.stripes(parity),
563 563 tag = k,
564 564 node = hex(n),
565 565 date = t,
566 566 tagmanifest = hex(m))
567 567 parity += 1
568 568
569 569 def changelist(**map):
570 570 parity = 0
571 571 cl = self.repo.changelog
572 572 l = [] # build a list in forward order for efficiency
573 573 for i in range(start, end):
574 574 n = cl.node(i)
575 575 changes = cl.read(n)
576 576 hn = hex(n)
577 577 t = changes[2]
578 578
579 579 l.insert(0, self.t(
580 580 'shortlogentry',
581 581 parity = parity,
582 582 author = changes[1],
583 583 manifest = hex(changes[0]),
584 584 desc = changes[4],
585 585 date = t,
586 586 rev = i,
587 587 node = hn))
588 588 parity = 1 - parity
589 589
590 590 yield l
591 591
592 592 cl = self.repo.changelog
593 593 mf = cl.read(cl.tip())[0]
594 594 count = cl.count()
595 595 start = max(0, count - self.maxchanges)
596 596 end = min(count, start + self.maxchanges)
597 597
598 598 yield self.t("summary",
599 599 desc = self.repo.ui.config("web", "description", "unknown"),
600 600 owner = (self.repo.ui.config("ui", "username") or # preferred
601 601 self.repo.ui.config("web", "contact") or # deprecated
602 602 self.repo.ui.config("web", "author", "unknown")), # also
603 603 lastchange = (0, 0), # FIXME
604 604 manifest = hex(mf),
605 605 tags = tagentries,
606 606 shortlog = changelist,
607 607 archives=self.archivelist("tip"))
608 608
609 609 def filediff(self, file, changeset):
610 610 cl = self.repo.changelog
611 611 n = self.repo.lookup(changeset)
612 612 changeset = hex(n)
613 613 p1 = cl.parents(n)[0]
614 614 cs = cl.read(n)
615 615 mf = self.repo.manifest.read(cs[0])
616 616
617 617 def diff(**map):
618 618 yield self.diff(p1, n, [file])
619 619
620 620 yield self.t("filediff",
621 621 file=file,
622 622 filenode=hex(mf.get(file, nullid)),
623 623 node=changeset,
624 624 rev=self.repo.changelog.rev(n),
625 625 parent=self.siblings(cl.parents(n), cl.rev),
626 626 child=self.siblings(cl.children(n), cl.rev),
627 627 diff=diff)
628 628
629 629 archive_specs = {
630 630 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
631 631 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
632 632 'zip': ('application/zip', 'zip', '.zip', None),
633 633 }
634 634
635 635 def archive(self, req, cnode, type_):
636 636 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
637 637 name = "%s-%s" % (reponame, short(cnode))
638 638 mimetype, artype, extension, encoding = self.archive_specs[type_]
639 639 headers = [('Content-type', mimetype),
640 640 ('Content-disposition', 'attachment; filename=%s%s' %
641 641 (name, extension))]
642 642 if encoding:
643 643 headers.append(('Content-encoding', encoding))
644 644 req.header(headers)
645 645 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
646 646
647 647 # add tags to things
648 648 # tags -> list of changesets corresponding to tags
649 649 # find tag, changeset, file
650 650
651 651 def cleanpath(self, path):
652 652 p = util.normpath(path)
653 653 if p[:2] == "..":
654 654 raise Exception("suspicious path")
655 655 return p
656 656
657 657 def run(self):
658 658 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
659 659 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
660 660 import mercurial.hgweb.wsgicgi as wsgicgi
661 661 from request import wsgiapplication
662 662 def make_web_app():
663 663 return self
664 664 wsgicgi.launch(wsgiapplication(make_web_app))
665 665
666 666 def run_wsgi(self, req):
667 667 def header(**map):
668 668 header_file = cStringIO.StringIO(''.join(self.t("header", **map)))
669 669 msg = mimetools.Message(header_file, 0)
670 670 req.header(msg.items())
671 671 yield header_file.read()
672 672
673 673 def rawfileheader(**map):
674 674 req.header([('Content-type', map['mimetype']),
675 675 ('Content-disposition', 'filename=%s' % map['file']),
676 676 ('Content-length', str(len(map['raw'])))])
677 677 yield ''
678 678
679 679 def footer(**map):
680 680 yield self.t("footer",
681 681 motd=self.repo.ui.config("web", "motd", ""),
682 682 **map)
683 683
684 684 def expand_form(form):
685 685 shortcuts = {
686 686 'cl': [('cmd', ['changelog']), ('rev', None)],
687 687 'sl': [('cmd', ['shortlog']), ('rev', None)],
688 688 'cs': [('cmd', ['changeset']), ('node', None)],
689 689 'f': [('cmd', ['file']), ('filenode', None)],
690 690 'fl': [('cmd', ['filelog']), ('filenode', None)],
691 691 'fd': [('cmd', ['filediff']), ('node', None)],
692 692 'fa': [('cmd', ['annotate']), ('filenode', None)],
693 693 'mf': [('cmd', ['manifest']), ('manifest', None)],
694 694 'ca': [('cmd', ['archive']), ('node', None)],
695 695 'tags': [('cmd', ['tags'])],
696 696 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
697 697 'static': [('cmd', ['static']), ('file', None)]
698 698 }
699 699
700 700 for k in shortcuts.iterkeys():
701 701 if form.has_key(k):
702 702 for name, value in shortcuts[k]:
703 703 if value is None:
704 704 value = form[k]
705 705 form[name] = value
706 706 del form[k]
707 707
708 708 self.refresh()
709 709
710 710 expand_form(req.form)
711 711
712 712 m = os.path.join(self.templatepath, "map")
713 713 style = self.repo.ui.config("web", "style", "")
714 714 if req.form.has_key('style'):
715 715 style = req.form['style'][0]
716 716 if style:
717 717 b = os.path.basename("map-" + style)
718 718 p = os.path.join(self.templatepath, b)
719 719 if os.path.isfile(p):
720 720 m = p
721 721
722 722 port = req.env["SERVER_PORT"]
723 723 port = port != "80" and (":" + port) or ""
724 724 uri = req.env["REQUEST_URI"]
725 725 if "?" in uri:
726 726 uri = uri.split("?")[0]
727 727 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
728 728 if not self.reponame:
729 729 self.reponame = (self.repo.ui.config("web", "name")
730 730 or uri.strip('/') or self.repo.root)
731 731
732 732 self.t = templater.templater(m, templater.common_filters,
733 733 defaults={"url": url,
734 734 "repo": self.reponame,
735 735 "header": header,
736 736 "footer": footer,
737 737 "rawfileheader": rawfileheader,
738 738 })
739 739
740 740 if not req.form.has_key('cmd'):
741 741 req.form['cmd'] = [self.t.cache['default'],]
742 742
743 743 cmd = req.form['cmd'][0]
744 744
745 745 method = getattr(self, 'do_' + cmd, None)
746 746 if method:
747 747 method(req)
748 748 else:
749 749 req.write(self.t("error"))
750 750
751 751 def stripes(self, parity):
752 752 "make horizontal stripes for easier reading"
753 753 if self.stripecount:
754 754 return (1 + parity / self.stripecount) & 1
755 755 else:
756 756 return 0
757 757
758 758 def do_changelog(self, req):
759 759 hi = self.repo.changelog.count() - 1
760 760 if req.form.has_key('rev'):
761 761 hi = req.form['rev'][0]
762 762 try:
763 763 hi = self.repo.changelog.rev(self.repo.lookup(hi))
764 764 except hg.RepoError:
765 765 req.write(self.search(hi)) # XXX redirect to 404 page?
766 766 return
767 767
768 768 req.write(self.changelog(hi))
769 769
770 770 def do_shortlog(self, req):
771 771 hi = self.repo.changelog.count() - 1
772 772 if req.form.has_key('rev'):
773 773 hi = req.form['rev'][0]
774 774 try:
775 775 hi = self.repo.changelog.rev(self.repo.lookup(hi))
776 776 except hg.RepoError:
777 777 req.write(self.search(hi)) # XXX redirect to 404 page?
778 778 return
779 779
780 780 req.write(self.changelog(hi, shortlog = True))
781 781
782 782 def do_changeset(self, req):
783 783 req.write(self.changeset(req.form['node'][0]))
784 784
785 785 def do_manifest(self, req):
786 786 req.write(self.manifest(req.form['manifest'][0],
787 787 self.cleanpath(req.form['path'][0])))
788 788
789 789 def do_tags(self, req):
790 790 req.write(self.tags())
791 791
792 792 def do_summary(self, req):
793 793 req.write(self.summary())
794 794
795 795 def do_filediff(self, req):
796 796 req.write(self.filediff(self.cleanpath(req.form['file'][0]),
797 797 req.form['node'][0]))
798 798
799 799 def do_file(self, req):
800 800 req.write(self.filerevision(self.cleanpath(req.form['file'][0]),
801 801 req.form['filenode'][0]))
802 802
803 803 def do_annotate(self, req):
804 804 req.write(self.fileannotate(self.cleanpath(req.form['file'][0]),
805 805 req.form['filenode'][0]))
806 806
807 807 def do_filelog(self, req):
808 808 req.write(self.filelog(self.cleanpath(req.form['file'][0]),
809 809 req.form['filenode'][0]))
810 810
811 811 def do_heads(self, req):
812 812 resp = " ".join(map(hex, self.repo.heads())) + "\n"
813 813 req.httphdr("application/mercurial-0.1", length=len(resp))
814 814 req.write(resp)
815 815
816 816 def do_branches(self, req):
817 817 nodes = []
818 818 if req.form.has_key('nodes'):
819 819 nodes = map(bin, req.form['nodes'][0].split(" "))
820 820 resp = cStringIO.StringIO()
821 821 for b in self.repo.branches(nodes):
822 822 resp.write(" ".join(map(hex, b)) + "\n")
823 823 resp = resp.getvalue()
824 824 req.httphdr("application/mercurial-0.1", length=len(resp))
825 825 req.write(resp)
826 826
827 827 def do_between(self, req):
828 828 nodes = []
829 829 if req.form.has_key('pairs'):
830 830 pairs = [map(bin, p.split("-"))
831 831 for p in req.form['pairs'][0].split(" ")]
832 832 resp = cStringIO.StringIO()
833 833 for b in self.repo.between(pairs):
834 834 resp.write(" ".join(map(hex, b)) + "\n")
835 835 resp = resp.getvalue()
836 836 req.httphdr("application/mercurial-0.1", length=len(resp))
837 837 req.write(resp)
838 838
839 839 def do_changegroup(self, req):
840 840 req.httphdr("application/mercurial-0.1")
841 841 nodes = []
842 842 if not self.allowpull:
843 843 return
844 844
845 845 if req.form.has_key('roots'):
846 846 nodes = map(bin, req.form['roots'][0].split(" "))
847 847
848 848 z = zlib.compressobj()
849 849 f = self.repo.changegroup(nodes, 'serve')
850 850 while 1:
851 851 chunk = f.read(4096)
852 852 if not chunk:
853 853 break
854 854 req.write(z.compress(chunk))
855 855
856 856 req.write(z.flush())
857 857
858 858 def do_archive(self, req):
859 859 changeset = self.repo.lookup(req.form['node'][0])
860 860 type_ = req.form['type'][0]
861 861 allowed = self.repo.ui.configlist("web", "allow_archive")
862 862 if (type_ in self.archives and (type_ in allowed or
863 863 self.repo.ui.configbool("web", "allow" + type_, False))):
864 864 self.archive(req, changeset, type_)
865 865 return
866 866
867 867 req.write(self.t("error"))
868 868
869 869 def do_static(self, req):
870 870 fname = req.form['file'][0]
871 871 static = self.repo.ui.config("web", "static",
872 872 os.path.join(self.templatepath,
873 873 "static"))
874 874 req.write(staticfile(static, fname, req)
875 875 or self.t("error", error="%r not found" % fname))
876 876
877 877 def do_capabilities(self, req):
878 878 caps = ['unbundle']
879 879 if self.repo.ui.configbool('server', 'uncompressed'):
880 880 caps.append('stream=%d' % self.repo.revlogversion)
881 881 resp = ' '.join(caps)
882 882 req.httphdr("application/mercurial-0.1", length=len(resp))
883 883 req.write(resp)
884 884
885 885 def check_perm(self, req, op, default):
886 886 '''check permission for operation based on user auth.
887 887 return true if op allowed, else false.
888 888 default is policy to use if no config given.'''
889 889
890 890 user = req.env.get('REMOTE_USER')
891 891
892 892 deny = self.repo.ui.configlist('web', 'deny_' + op)
893 893 if deny and (not user or deny == ['*'] or user in deny):
894 894 return False
895 895
896 896 allow = self.repo.ui.configlist('web', 'allow_' + op)
897 897 return (allow and (allow == ['*'] or user in allow)) or default
898 898
899 899 def do_unbundle(self, req):
900 900 def bail(response, headers={}):
901 901 length = int(req.env['CONTENT_LENGTH'])
902 902 for s in util.filechunkiter(req, limit=length):
903 903 # drain incoming bundle, else client will not see
904 904 # response when run outside cgi script
905 905 pass
906 906 req.httphdr("application/mercurial-0.1", headers=headers)
907 907 req.write('0\n')
908 908 req.write(response)
909 909
910 910 # require ssl by default, auth info cannot be sniffed and
911 911 # replayed
912 912 ssl_req = self.repo.ui.configbool('web', 'push_ssl', True)
913 913 if ssl_req:
914 914 if not req.env.get('HTTPS'):
915 915 bail(_('ssl required\n'))
916 916 return
917 917 proto = 'https'
918 918 else:
919 919 proto = 'http'
920 920
921 921 # do not allow push unless explicitly allowed
922 922 if not self.check_perm(req, 'push', False):
923 923 bail(_('push not authorized\n'),
924 924 headers={'status': '401 Unauthorized'})
925 925 return
926 926
927 927 req.httphdr("application/mercurial-0.1")
928 928
929 929 their_heads = req.form['heads'][0].split(' ')
930 930
931 931 def check_heads():
932 932 heads = map(hex, self.repo.heads())
933 933 return their_heads == [hex('force')] or their_heads == heads
934 934
935 935 # fail early if possible
936 936 if not check_heads():
937 937 bail(_('unsynced changes\n'))
938 938 return
939 939
940 940 # do not lock repo until all changegroup data is
941 941 # streamed. save to temporary file.
942 942
943 943 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
944 944 fp = os.fdopen(fd, 'wb+')
945 945 try:
946 946 length = int(req.env['CONTENT_LENGTH'])
947 947 for s in util.filechunkiter(req, limit=length):
948 948 fp.write(s)
949 949
950 950 lock = self.repo.lock()
951 951 try:
952 952 if not check_heads():
953 953 req.write('0\n')
954 954 req.write(_('unsynced changes\n'))
955 955 return
956 956
957 957 fp.seek(0)
958 958
959 959 # send addchangegroup output to client
960 960
961 961 old_stdout = sys.stdout
962 962 sys.stdout = cStringIO.StringIO()
963 963
964 964 try:
965 965 url = 'remote:%s:%s' % (proto,
966 966 req.env.get('REMOTE_HOST', ''))
967 967 ret = self.repo.addchangegroup(fp, 'serve', url)
968 968 finally:
969 969 val = sys.stdout.getvalue()
970 970 sys.stdout = old_stdout
971 971 req.write('%d\n' % ret)
972 972 req.write(val)
973 973 finally:
974 974 lock.release()
975 975 finally:
976 976 fp.close()
977 977 os.unlink(tempname)
978 978
979 979 def do_stream_out(self, req):
980 980 req.httphdr("application/mercurial-0.1")
981 981 streamclone.stream_out(self.repo, req)
@@ -1,334 +1,339 b''
1 1 # merge.py - directory-level update/merge handling for Mercurial
2 2 #
3 3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 demandload(globals(), "util os tempfile")
12 12
13 def fmerge(f, local, other, ancestor):
14 """merge executable flags"""
15 a, b, c = ancestor.execf(f), local.execf(f), other.execf(f)
16 return ((a^b) | (a^c)) ^ a
17
13 18 def merge3(repo, fn, my, other, p1, p2):
14 19 """perform a 3-way merge in the working directory"""
15 20
16 21 def temp(prefix, node):
17 22 pre = "%s~%s." % (os.path.basename(fn), prefix)
18 23 (fd, name) = tempfile.mkstemp(prefix=pre)
19 24 f = os.fdopen(fd, "wb")
20 25 repo.wwrite(fn, fl.read(node), f)
21 26 f.close()
22 27 return name
23 28
24 29 fl = repo.file(fn)
25 30 base = fl.ancestor(my, other)
26 31 a = repo.wjoin(fn)
27 32 b = temp("base", base)
28 33 c = temp("other", other)
29 34
30 35 repo.ui.note(_("resolving %s\n") % fn)
31 36 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
32 37 (fn, short(my), short(other), short(base)))
33 38
34 39 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
35 40 or "hgmerge")
36 41 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
37 42 environ={'HG_FILE': fn,
38 43 'HG_MY_NODE': p1,
39 44 'HG_OTHER_NODE': p2,
40 45 'HG_FILE_MY_NODE': hex(my),
41 46 'HG_FILE_OTHER_NODE': hex(other),
42 47 'HG_FILE_BASE_NODE': hex(base)})
43 48 if r:
44 49 repo.ui.warn(_("merging %s failed!\n") % fn)
45 50
46 51 os.unlink(b)
47 52 os.unlink(c)
48 53 return r
49 54
50 55 def update(repo, node, branchmerge=False, force=False, partial=None,
51 56 wlock=None, show_stats=True, remind=True):
52 57
53 58 overwrite = force and not branchmerge
54 59 forcemerge = force and branchmerge
55 60
56 61 if not wlock:
57 62 wlock = repo.wlock()
58 63
59 64 ### check phase
60 65
61 66 pl = repo.dirstate.parents()
62 67 if not overwrite and pl[1] != nullid:
63 68 raise util.Abort(_("outstanding uncommitted merges"))
64 69
65 70 p1, p2 = pl[0], node
66 71 pa = repo.changelog.ancestor(p1, p2)
67 72
68 73 # is there a linear path from p1 to p2?
69 74 linear_path = (pa == p1 or pa == p2)
70 75 if branchmerge and linear_path:
71 76 raise util.Abort(_("there is nothing to merge, just use "
72 77 "'hg update' or look at 'hg heads'"))
73 78
74 79 if not overwrite and not linear_path and not branchmerge:
75 80 raise util.Abort(_("update spans branches, use 'hg merge' "
76 81 "or 'hg update -C' to lose changes"))
77 82
78 83 modified, added, removed, deleted, unknown = repo.status()[:5]
79 84 if branchmerge and not forcemerge:
80 85 if modified or added or removed:
81 86 raise util.Abort(_("outstanding uncommitted changes"))
82 87
83 88 m1n = repo.changelog.read(p1)[0]
84 89 m2n = repo.changelog.read(p2)[0]
85 90 man = repo.manifest.ancestor(m1n, m2n)
86 91 m1 = repo.manifest.read(m1n)
87 92 m2 = repo.manifest.read(m2n).copy()
88 93 ma = repo.manifest.read(man)
89 94
90 95 if not force:
91 96 for f in unknown:
92 97 if f in m2:
93 t1 = repo.wread(f)
94 t2 = repo.file(f).read(m2[f])
95 if cmp(t1, t2) != 0:
98 if repo.file(f).cmp(m2[f], repo.wread(f)):
96 99 raise util.Abort(_("'%s' already exists in the working"
97 100 " dir and differs from remote") % f)
98 101
99 102 # resolve the manifest to determine which files
100 103 # we care about merging
101 104 repo.ui.note(_("resolving manifests\n"))
102 105 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
103 (overwrite, branchmerge, partial and True or False, linear_path))
106 (overwrite, branchmerge, bool(partial), linear_path))
104 107 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
105 108 (short(man), short(m1n), short(m2n)))
106 109
107 110 merge = {}
108 111 get = {}
109 112 remove = []
113 forget = []
110 114
111 115 # construct a working dir manifest
112 116 mw = m1.copy()
113 117 umap = dict.fromkeys(unknown)
114 118
115 119 for f in added + modified + unknown:
116 120 mw[f] = ""
121 # is the wfile new and matches m2?
122 if (f not in m1 and f in m2 and
123 not repo.file(f).cmp(m2[f], repo.wread(f))):
124 mw[f] = m2[f]
125
117 126 mw.set(f, util.is_exec(repo.wjoin(f), mw.execf(f)))
118 127
119 128 for f in deleted + removed:
120 129 if f in mw:
121 130 del mw[f]
122 131
123 132 # If we're jumping between revisions (as opposed to merging),
124 133 # and if neither the working directory nor the target rev has
125 134 # the file, then we need to remove it from the dirstate, to
126 135 # prevent the dirstate from listing the file when it is no
127 136 # longer in the manifest.
128 if not partial and linear_path and f not in m2:
129 repo.dirstate.forget((f,))
137 if linear_path and f not in m2:
138 forget.append(f)
130 139
131 140 # Compare manifests
132 141 for f, n in mw.iteritems():
133 142 if partial and not partial(f):
134 143 continue
135 144 if f in m2:
136 145 s = 0
137 146
138 # is the wfile new since m1, and match m2?
139 if f not in m1:
140 t1 = repo.wread(f)
141 t2 = repo.file(f).read(m2[f])
142 if cmp(t1, t2) == 0:
143 n = m2[f]
144 del t1, t2
145
146 147 # are files different?
147 148 if n != m2[f]:
148 149 a = ma.get(f, nullid)
149 150 # are both different from the ancestor?
150 151 if n != a and m2[f] != a:
151 152 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
152 # merge executable bits
153 # "if we changed or they changed, change in merge"
154 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
155 mode = ((a^b) | (a^c)) ^ a
156 merge[f] = (mode, m1.get(f, nullid), m2[f])
153 merge[f] = (fmerge(f, mw, m2, ma), m1.get(f, nullid), m2[f])
157 154 s = 1
158 155 # are we clobbering?
159 156 # is remote's version newer?
160 157 # or are we going back in time?
161 158 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
162 159 repo.ui.debug(_(" remote %s is newer, get\n") % f)
163 160 get[f] = (m2.execf(f), m2[f])
164 161 s = 1
165 162 elif f in umap or f in added:
166 163 # this unknown file is the same as the checkout
167 164 # we need to reset the dirstate if the file was added
168 165 get[f] = (m2.execf(f), m2[f])
169 166
170 167 if not s and mw.execf(f) != m2.execf(f):
171 168 if overwrite:
172 169 repo.ui.debug(_(" updating permissions for %s\n") % f)
173 170 util.set_exec(repo.wjoin(f), m2.execf(f))
174 171 else:
175 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
176 mode = ((a^b) | (a^c)) ^ a
177 if mode != b:
172 if fmerge(f, mw, m2, ma) != mw.execf(f):
178 173 repo.ui.debug(_(" updating permissions for %s\n")
179 174 % f)
180 175 util.set_exec(repo.wjoin(f), mode)
181 176 del m2[f]
182 177 elif f in ma:
183 178 if n != ma[f]:
184 179 r = _("d")
185 180 if not overwrite and (linear_path or branchmerge):
186 181 r = repo.ui.prompt(
187 182 (_(" local changed %s which remote deleted\n") % f) +
188 183 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
189 184 if r == _("d"):
190 185 remove.append(f)
191 186 else:
192 187 repo.ui.debug(_("other deleted %s\n") % f)
193 188 remove.append(f) # other deleted it
194 189 else:
195 190 # file is created on branch or in working directory
196 191 if overwrite and f not in umap:
197 192 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
198 193 remove.append(f)
199 194 elif n == m1.get(f, nullid): # same as parent
200 195 if p2 == pa: # going backwards?
201 196 repo.ui.debug(_("remote deleted %s\n") % f)
202 197 remove.append(f)
203 198 else:
204 199 repo.ui.debug(_("local modified %s, keeping\n") % f)
205 200 else:
206 201 repo.ui.debug(_("working dir created %s, keeping\n") % f)
207 202
208 203 for f, n in m2.iteritems():
209 204 if partial and not partial(f):
210 205 continue
211 206 if f[0] == "/":
212 207 continue
213 208 if f in ma and n != ma[f]:
214 209 r = _("k")
215 210 if not overwrite and (linear_path or branchmerge):
216 211 r = repo.ui.prompt(
217 212 (_("remote changed %s which local deleted\n") % f) +
218 213 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
219 214 if r == _("k"):
220 215 get[f] = (m2.execf(f), n)
221 216 elif f not in ma:
222 217 repo.ui.debug(_("remote created %s\n") % f)
223 218 get[f] = (m2.execf(f), n)
224 219 else:
225 220 if overwrite or p2 == pa: # going backwards?
226 221 repo.ui.debug(_("local deleted %s, recreating\n") % f)
227 222 get[f] = (m2.execf(f), n)
228 223 else:
229 224 repo.ui.debug(_("local deleted %s\n") % f)
230 225
231 226 del mw, m1, m2, ma
232 227
228 ### apply phase
229
233 230 if overwrite:
234 231 for f in merge:
235 232 get[f] = merge[f][:2]
236 233 merge = {}
237 234
238 235 if linear_path or overwrite:
239 236 # we don't need to do any magic, just jump to the new rev
240 237 p1, p2 = p2, nullid
241 238
242 239 xp1 = hex(p1)
243 240 xp2 = hex(p2)
244 241 if p2 == nullid: xxp2 = ''
245 242 else: xxp2 = xp2
246 243
247 244 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
248 245
249 246 # get the files we don't need to change
250 247 files = get.keys()
251 248 files.sort()
252 249 for f in files:
253 250 flag, node = get[f]
254 251 if f[0] == "/":
255 252 continue
256 253 repo.ui.note(_("getting %s\n") % f)
257 254 t = repo.file(f).read(node)
258 255 repo.wwrite(f, t)
259 256 util.set_exec(repo.wjoin(f), flag)
260 if not partial:
261 if branchmerge:
262 repo.dirstate.update([f], 'n', st_mtime=-1)
263 else:
264 repo.dirstate.update([f], 'n')
265 257
266 258 # merge the tricky bits
267 259 unresolved = []
268 260 files = merge.keys()
269 261 files.sort()
270 262 for f in files:
271 263 repo.ui.status(_("merging %s\n") % f)
272 264 flag, my, other = merge[f]
273 265 ret = merge3(repo, f, my, other, xp1, xp2)
274 266 if ret:
275 267 unresolved.append(f)
276 268 util.set_exec(repo.wjoin(f), flag)
277 if not partial:
278 if branchmerge:
279 # We've done a branch merge, mark this file as merged
280 # so that we properly record the merger later
281 repo.dirstate.update([f], 'm')
282 else:
283 # We've update-merged a locally modified file, so
284 # we set the dirstate to emulate a normal checkout
285 # of that file some time in the past. Thus our
286 # merge will appear as a normal local file
287 # modification.
288 f_len = len(repo.file(f).read(other))
289 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
290 269
291 270 remove.sort()
292 271 for f in remove:
293 272 repo.ui.note(_("removing %s\n") % f)
294 273 util.audit_path(f)
295 274 try:
296 275 util.unlink(repo.wjoin(f))
297 276 except OSError, inst:
298 277 if inst.errno != errno.ENOENT:
299 278 repo.ui.warn(_("update failed to remove %s: %s!\n") %
300 279 (f, inst.strerror))
280
281 # update dirstate
301 282 if not partial:
283 repo.dirstate.setparents(p1, p2)
284 repo.dirstate.forget(forget)
302 285 if branchmerge:
303 286 repo.dirstate.update(remove, 'r')
304 287 else:
305 288 repo.dirstate.forget(remove)
306 289
307 if not partial:
308 repo.dirstate.setparents(p1, p2)
290 files = get.keys()
291 files.sort()
292 for f in files:
293 if branchmerge:
294 repo.dirstate.update([f], 'n', st_mtime=-1)
295 else:
296 repo.dirstate.update([f], 'n')
297
298 files = merge.keys()
299 files.sort()
300 for f in files:
301 if branchmerge:
302 # We've done a branch merge, mark this file as merged
303 # so that we properly record the merger later
304 repo.dirstate.update([f], 'm')
305 else:
306 # We've update-merged a locally modified file, so
307 # we set the dirstate to emulate a normal checkout
308 # of that file some time in the past. Thus our
309 # merge will appear as a normal local file
310 # modification.
311 fl = repo.file(f)
312 f_len = fl.size(fl.rev(other))
313 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
309 314
310 315 if show_stats:
311 316 stats = ((len(get), _("updated")),
312 317 (len(merge) - len(unresolved), _("merged")),
313 318 (len(remove), _("removed")),
314 319 (len(unresolved), _("unresolved")))
315 320 note = ", ".join([_("%d files %s") % s for s in stats])
316 321 repo.ui.status("%s\n" % note)
317 322 if not partial:
318 323 if branchmerge:
319 324 if unresolved:
320 325 repo.ui.status(_("There are unresolved merges,"
321 326 " you can redo the full merge using:\n"
322 327 " hg update -C %s\n"
323 328 " hg merge %s\n"
324 329 % (repo.changelog.rev(p1),
325 330 repo.changelog.rev(p2))))
326 331 elif remind:
327 332 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
328 333 elif unresolved:
329 334 repo.ui.status(_("There are unresolved merges with"
330 335 " locally modified files.\n"))
331 336
332 337 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
333 338 return len(unresolved)
334 339
@@ -1,435 +1,449 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from i18n import gettext as _
10 10 from node import *
11 11 demandload(globals(), "cmdutil mdiff util")
12 12 demandload(globals(), "cStringIO email.Parser os re shutil sys tempfile")
13 13
14 14 def extract(ui, fileobj):
15 15 '''extract patch from data read from fileobj.
16 16
17 17 patch can be normal patch or contained in email message.
18 18
19 19 return tuple (filename, message, user, date). any item in returned
20 20 tuple can be None. if filename is None, fileobj did not contain
21 21 patch. caller must unlink filename when done.'''
22 22
23 23 # attempt to detect the start of a patch
24 24 # (this heuristic is borrowed from quilt)
25 25 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
26 26 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
27 27 '(---|\*\*\*)[ \t])', re.MULTILINE)
28 28
29 29 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
30 30 tmpfp = os.fdopen(fd, 'w')
31 31 try:
32 32 hgpatch = False
33 33
34 34 msg = email.Parser.Parser().parse(fileobj)
35 35
36 36 message = msg['Subject']
37 37 user = msg['From']
38 38 # should try to parse msg['Date']
39 39 date = None
40 40
41 41 if message:
42 42 message = message.replace('\n\t', ' ')
43 43 ui.debug('Subject: %s\n' % message)
44 44 if user:
45 45 ui.debug('From: %s\n' % user)
46 46 diffs_seen = 0
47 47 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
48 48
49 49 for part in msg.walk():
50 50 content_type = part.get_content_type()
51 51 ui.debug('Content-Type: %s\n' % content_type)
52 52 if content_type not in ok_types:
53 53 continue
54 54 payload = part.get_payload(decode=True)
55 55 m = diffre.search(payload)
56 56 if m:
57 57 ui.debug(_('found patch at byte %d\n') % m.start(0))
58 58 diffs_seen += 1
59 59 cfp = cStringIO.StringIO()
60 60 if message:
61 61 cfp.write(message)
62 62 cfp.write('\n')
63 63 for line in payload[:m.start(0)].splitlines():
64 64 if line.startswith('# HG changeset patch'):
65 65 ui.debug(_('patch generated by hg export\n'))
66 66 hgpatch = True
67 67 # drop earlier commit message content
68 68 cfp.seek(0)
69 69 cfp.truncate()
70 70 elif hgpatch:
71 71 if line.startswith('# User '):
72 72 user = line[7:]
73 73 ui.debug('From: %s\n' % user)
74 74 elif line.startswith("# Date "):
75 75 date = line[7:]
76 76 if not line.startswith('# '):
77 77 cfp.write(line)
78 78 cfp.write('\n')
79 79 message = cfp.getvalue()
80 80 if tmpfp:
81 81 tmpfp.write(payload)
82 82 if not payload.endswith('\n'):
83 83 tmpfp.write('\n')
84 84 elif not diffs_seen and message and content_type == 'text/plain':
85 85 message += '\n' + payload
86 86 except:
87 87 tmpfp.close()
88 88 os.unlink(tmpname)
89 89 raise
90 90
91 91 tmpfp.close()
92 92 if not diffs_seen:
93 93 os.unlink(tmpname)
94 94 return None, message, user, date
95 95 return tmpname, message, user, date
96 96
97 97 def readgitpatch(patchname):
98 98 """extract git-style metadata about patches from <patchname>"""
99 99 class gitpatch:
100 100 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
101 101 def __init__(self, path):
102 102 self.path = path
103 103 self.oldpath = None
104 104 self.mode = None
105 105 self.op = 'MODIFY'
106 106 self.copymod = False
107 107 self.lineno = 0
108 108
109 109 # Filter patch for git information
110 110 gitre = re.compile('diff --git a/(.*) b/(.*)')
111 111 pf = file(patchname)
112 112 gp = None
113 113 gitpatches = []
114 114 # Can have a git patch with only metadata, causing patch to complain
115 115 dopatch = False
116 116
117 117 lineno = 0
118 118 for line in pf:
119 119 lineno += 1
120 120 if line.startswith('diff --git'):
121 121 m = gitre.match(line)
122 122 if m:
123 123 if gp:
124 124 gitpatches.append(gp)
125 125 src, dst = m.group(1,2)
126 126 gp = gitpatch(dst)
127 127 gp.lineno = lineno
128 128 elif gp:
129 129 if line.startswith('--- '):
130 130 if gp.op in ('COPY', 'RENAME'):
131 131 gp.copymod = True
132 132 dopatch = 'filter'
133 133 gitpatches.append(gp)
134 134 gp = None
135 135 if not dopatch:
136 136 dopatch = True
137 137 continue
138 138 if line.startswith('rename from '):
139 139 gp.op = 'RENAME'
140 140 gp.oldpath = line[12:].rstrip()
141 141 elif line.startswith('rename to '):
142 142 gp.path = line[10:].rstrip()
143 143 elif line.startswith('copy from '):
144 144 gp.op = 'COPY'
145 145 gp.oldpath = line[10:].rstrip()
146 146 elif line.startswith('copy to '):
147 147 gp.path = line[8:].rstrip()
148 148 elif line.startswith('deleted file'):
149 149 gp.op = 'DELETE'
150 150 elif line.startswith('new file mode '):
151 151 gp.op = 'ADD'
152 152 gp.mode = int(line.rstrip()[-3:], 8)
153 153 elif line.startswith('new mode '):
154 154 gp.mode = int(line.rstrip()[-3:], 8)
155 155 if gp:
156 156 gitpatches.append(gp)
157 157
158 158 if not gitpatches:
159 159 dopatch = True
160 160
161 161 return (dopatch, gitpatches)
162 162
163 163 def dogitpatch(patchname, gitpatches):
164 164 """Preprocess git patch so that vanilla patch can handle it"""
165 165 pf = file(patchname)
166 166 pfline = 1
167 167
168 168 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
169 169 tmpfp = os.fdopen(fd, 'w')
170 170
171 171 try:
172 172 for i in range(len(gitpatches)):
173 173 p = gitpatches[i]
174 174 if not p.copymod:
175 175 continue
176 176
177 177 if os.path.exists(p.path):
178 178 raise util.Abort(_("cannot create %s: destination already exists") %
179 179 p.path)
180 180
181 181 (src, dst) = [os.path.join(os.getcwd(), n)
182 182 for n in (p.oldpath, p.path)]
183 183
184 184 targetdir = os.path.dirname(dst)
185 185 if not os.path.isdir(targetdir):
186 186 os.makedirs(targetdir)
187 187 try:
188 188 shutil.copyfile(src, dst)
189 189 shutil.copymode(src, dst)
190 190 except shutil.Error, inst:
191 191 raise util.Abort(str(inst))
192 192
193 193 # rewrite patch hunk
194 194 while pfline < p.lineno:
195 195 tmpfp.write(pf.readline())
196 196 pfline += 1
197 197 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
198 198 line = pf.readline()
199 199 pfline += 1
200 200 while not line.startswith('--- a/'):
201 201 tmpfp.write(line)
202 202 line = pf.readline()
203 203 pfline += 1
204 204 tmpfp.write('--- a/%s\n' % p.path)
205 205
206 206 line = pf.readline()
207 207 while line:
208 208 tmpfp.write(line)
209 209 line = pf.readline()
210 210 except:
211 211 tmpfp.close()
212 212 os.unlink(patchname)
213 213 raise
214 214
215 215 tmpfp.close()
216 216 return patchname
217 217
218 218 def patch(patchname, ui, strip=1, cwd=None):
219 219 """apply the patch <patchname> to the working directory.
220 220 a list of patched files is returned"""
221 221
222 222 (dopatch, gitpatches) = readgitpatch(patchname)
223 223
224 224 files = {}
225 225 fuzz = False
226 226 if dopatch:
227 227 if dopatch == 'filter':
228 228 patchname = dogitpatch(patchname, gitpatches)
229 229 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
230 230 args = []
231 231 if cwd:
232 232 args.append('-d %s' % util.shellquote(cwd))
233 233 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
234 234 util.shellquote(patchname)))
235 235
236 236 if dopatch == 'filter':
237 237 False and os.unlink(patchname)
238 238
239 239 for line in fp:
240 240 line = line.rstrip()
241 241 ui.note(line + '\n')
242 242 if line.startswith('patching file '):
243 243 pf = util.parse_patch_output(line)
244 244 printed_file = False
245 245 files.setdefault(pf, (None, None))
246 246 elif line.find('with fuzz') >= 0:
247 247 fuzz = True
248 248 if not printed_file:
249 249 ui.warn(pf + '\n')
250 250 printed_file = True
251 251 ui.warn(line + '\n')
252 252 elif line.find('saving rejects to file') >= 0:
253 253 ui.warn(line + '\n')
254 254 elif line.find('FAILED') >= 0:
255 255 if not printed_file:
256 256 ui.warn(pf + '\n')
257 257 printed_file = True
258 258 ui.warn(line + '\n')
259 259
260 260 code = fp.close()
261 261 if code:
262 262 raise util.Abort(_("patch command failed: %s") %
263 263 util.explain_exit(code)[0])
264 264
265 265 for gp in gitpatches:
266 266 files[gp.path] = (gp.op, gp)
267 267
268 268 return (files, fuzz)
269 269
270 def diffopts(ui, opts={}):
271 return mdiff.diffopts(
272 text=opts.get('text'),
273 git=(opts.get('git') or
274 ui.configbool('diff', 'git', None)),
275 showfunc=(opts.get('show_function') or
276 ui.configbool('diff', 'showfunc', None)),
277 ignorews=(opts.get('ignore_all_space') or
278 ui.configbool('diff', 'ignorews', None)),
279 ignorewsamount=(opts.get('ignore_space_change') or
280 ui.configbool('diff', 'ignorewsamount', None)),
281 ignoreblanklines=(opts.get('ignore_blank_lines') or
282 ui.configbool('diff', 'ignoreblanklines', None)))
283
270 284 def diff(repo, node1=None, node2=None, files=None, match=util.always,
271 285 fp=None, changes=None, opts=None):
272 286 '''print diff of changes to files between two nodes, or node and
273 287 working directory.
274 288
275 289 if node1 is None, use first dirstate parent instead.
276 290 if node2 is None, compare node1 with working directory.'''
277 291
278 292 if opts is None:
279 293 opts = mdiff.defaultopts
280 294 if fp is None:
281 295 fp = repo.ui
282 296
283 297 if not node1:
284 298 node1 = repo.dirstate.parents()[0]
285 299 # reading the data for node1 early allows it to play nicely
286 300 # with repo.status and the revlog cache.
287 301 change = repo.changelog.read(node1)
288 302 mmap = repo.manifest.read(change[0])
289 303 date1 = util.datestr(change[2])
290 304
291 305 if not changes:
292 306 changes = repo.status(node1, node2, files, match=match)[:5]
293 307 modified, added, removed, deleted, unknown = changes
294 308 if files:
295 309 def filterfiles(filters):
296 310 l = [x for x in filters if x in files]
297 311
298 312 for t in files:
299 313 if not t.endswith("/"):
300 314 t += "/"
301 315 l += [x for x in filters if x.startswith(t)]
302 316 return l
303 317
304 318 modified, added, removed = map(filterfiles, (modified, added, removed))
305 319
306 320 if not modified and not added and not removed:
307 321 return
308 322
309 323 if node2:
310 324 change = repo.changelog.read(node2)
311 325 mmap2 = repo.manifest.read(change[0])
312 326 _date2 = util.datestr(change[2])
313 327 def date2(f):
314 328 return _date2
315 329 def read(f):
316 330 return repo.file(f).read(mmap2[f])
317 331 def renamed(f):
318 332 src = repo.file(f).renamed(mmap2[f])
319 333 return src and src[0] or None
320 334 else:
321 335 tz = util.makedate()[1]
322 336 _date2 = util.datestr()
323 337 def date2(f):
324 338 try:
325 339 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
326 340 except OSError, err:
327 341 if err.errno != errno.ENOENT: raise
328 342 return _date2
329 343 def read(f):
330 344 return repo.wread(f)
331 345 def renamed(f):
332 346 return repo.dirstate.copies.get(f)
333 347
334 348 if repo.ui.quiet:
335 349 r = None
336 350 else:
337 351 hexfunc = repo.ui.verbose and hex or short
338 352 r = [hexfunc(node) for node in [node1, node2] if node]
339 353
340 354 if opts.git:
341 355 copied = {}
342 356 for f in added:
343 357 src = renamed(f)
344 358 if src:
345 359 copied[f] = src
346 360 srcs = [x[1] for x in copied.items()]
347 361
348 362 all = modified + added + removed
349 363 all.sort()
350 364 for f in all:
351 365 to = None
352 366 tn = None
353 367 dodiff = True
354 368 if f in mmap:
355 369 to = repo.file(f).read(mmap[f])
356 370 if f not in removed:
357 371 tn = read(f)
358 372 if opts.git:
359 373 def gitmode(x):
360 374 return x and '100755' or '100644'
361 375 def addmodehdr(header, omode, nmode):
362 376 if omode != nmode:
363 377 header.append('old mode %s\n' % omode)
364 378 header.append('new mode %s\n' % nmode)
365 379
366 380 a, b = f, f
367 381 header = []
368 382 if f in added:
369 383 if node2:
370 384 mode = gitmode(mmap2.execf(f))
371 385 else:
372 386 mode = gitmode(util.is_exec(repo.wjoin(f), None))
373 387 if f in copied:
374 388 a = copied[f]
375 389 omode = gitmode(mmap.execf(a))
376 390 addmodehdr(header, omode, mode)
377 391 op = a in removed and 'rename' or 'copy'
378 392 header.append('%s from %s\n' % (op, a))
379 393 header.append('%s to %s\n' % (op, f))
380 394 to = repo.file(a).read(mmap[a])
381 395 else:
382 396 header.append('new file mode %s\n' % mode)
383 397 elif f in removed:
384 398 if f in srcs:
385 399 dodiff = False
386 400 else:
387 401 mode = gitmode(mmap.execf(f))
388 402 header.append('deleted file mode %s\n' % mode)
389 403 else:
390 404 omode = gitmode(mmap.execf(f))
391 405 nmode = gitmode(util.is_exec(repo.wjoin(f), mmap.execf(f)))
392 406 addmodehdr(header, omode, nmode)
393 407 r = None
394 408 if dodiff:
395 409 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
396 410 fp.write(''.join(header))
397 411 if dodiff:
398 412 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, opts=opts))
399 413
400 414 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
401 415 opts=None):
402 416 '''export changesets as hg patches.'''
403 417
404 418 total = len(revs)
405 419 revwidth = max(map(len, revs))
406 420
407 421 def single(node, seqno, fp):
408 422 parents = [p for p in repo.changelog.parents(node) if p != nullid]
409 423 if switch_parent:
410 424 parents.reverse()
411 425 prev = (parents and parents[0]) or nullid
412 426 change = repo.changelog.read(node)
413 427
414 428 if not fp:
415 429 fp = cmdutil.make_file(repo, template, node, total=total,
416 430 seqno=seqno, revwidth=revwidth)
417 431 if fp not in (sys.stdout, repo.ui):
418 432 repo.ui.note("%s\n" % fp.name)
419 433
420 434 fp.write("# HG changeset patch\n")
421 435 fp.write("# User %s\n" % change[1])
422 436 fp.write("# Date %d %d\n" % change[2])
423 437 fp.write("# Node ID %s\n" % hex(node))
424 438 fp.write("# Parent %s\n" % hex(prev))
425 439 if len(parents) > 1:
426 440 fp.write("# Parent %s\n" % hex(parents[1]))
427 441 fp.write(change[4].rstrip())
428 442 fp.write("\n\n")
429 443
430 444 diff(repo, prev, node, fp=fp, opts=opts)
431 445 if fp not in (sys.stdout, repo.ui):
432 446 fp.close()
433 447
434 448 for seqno, cset in enumerate(revs):
435 449 single(cset, seqno, fp)
@@ -1,1285 +1,1298 b''
1 1 """
2 2 revlog.py - storage back-end for mercurial
3 3
4 4 This provides efficient delta storage with O(1) retrieve and append
5 5 and O(changes) merge between branches
6 6
7 7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
8 8
9 9 This software may be used and distributed according to the terms
10 10 of the GNU General Public License, incorporated herein by reference.
11 11 """
12 12
13 13 from node import *
14 14 from i18n import gettext as _
15 15 from demandload import demandload
16 16 demandload(globals(), "binascii changegroup errno heapq mdiff os")
17 17 demandload(globals(), "sha struct util zlib")
18 18
19 19 # revlog version strings
20 20 REVLOGV0 = 0
21 21 REVLOGNG = 1
22 22
23 23 # revlog flags
24 24 REVLOGNGINLINEDATA = (1 << 16)
25 25 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
26 26
27 27 REVLOG_DEFAULT_FORMAT = REVLOGNG
28 28 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
29 29
30 30 def flagstr(flag):
31 31 if flag == "inline":
32 32 return REVLOGNGINLINEDATA
33 33 raise RevlogError(_("unknown revlog flag %s" % flag))
34 34
35 35 def hash(text, p1, p2):
36 36 """generate a hash from the given text and its parent hashes
37 37
38 38 This hash combines both the current file contents and its history
39 39 in a manner that makes it easy to distinguish nodes with the same
40 40 content in the revision graph.
41 41 """
42 42 l = [p1, p2]
43 43 l.sort()
44 44 s = sha.new(l[0])
45 45 s.update(l[1])
46 46 s.update(text)
47 47 return s.digest()
48 48
49 49 def compress(text):
50 50 """ generate a possibly-compressed representation of text """
51 51 if not text: return ("", text)
52 52 if len(text) < 44:
53 53 if text[0] == '\0': return ("", text)
54 54 return ('u', text)
55 55 bin = zlib.compress(text)
56 56 if len(bin) > len(text):
57 57 if text[0] == '\0': return ("", text)
58 58 return ('u', text)
59 59 return ("", bin)
60 60
61 61 def decompress(bin):
62 62 """ decompress the given input """
63 63 if not bin: return bin
64 64 t = bin[0]
65 65 if t == '\0': return bin
66 66 if t == 'x': return zlib.decompress(bin)
67 67 if t == 'u': return bin[1:]
68 68 raise RevlogError(_("unknown compression type %r") % t)
69 69
70 70 indexformatv0 = ">4l20s20s20s"
71 71 v0shaoffset = 56
72 72 # index ng:
73 73 # 6 bytes offset
74 74 # 2 bytes flags
75 75 # 4 bytes compressed length
76 76 # 4 bytes uncompressed length
77 77 # 4 bytes: base rev
78 78 # 4 bytes link rev
79 79 # 4 bytes parent 1 rev
80 80 # 4 bytes parent 2 rev
81 81 # 32 bytes: nodeid
82 82 indexformatng = ">Qiiiiii20s12x"
83 83 ngshaoffset = 32
84 84 versionformat = ">i"
85 85
86 86 class lazyparser(object):
87 87 """
88 88 this class avoids the need to parse the entirety of large indices
89 89 """
90 90
91 91 # lazyparser is not safe to use on windows if win32 extensions not
92 92 # available. it keeps file handle open, which make it not possible
93 93 # to break hardlinks on local cloned repos.
94 94 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
95 95 hasattr(util, 'win32api'))
96 96
97 97 def __init__(self, dataf, size, indexformat, shaoffset):
98 98 self.dataf = dataf
99 99 self.format = indexformat
100 100 self.s = struct.calcsize(indexformat)
101 101 self.indexformat = indexformat
102 102 self.datasize = size
103 103 self.l = size/self.s
104 104 self.index = [None] * self.l
105 105 self.map = {nullid: -1}
106 106 self.allmap = 0
107 107 self.all = 0
108 108 self.mapfind_count = 0
109 109 self.shaoffset = shaoffset
110 110
111 111 def loadmap(self):
112 112 """
113 113 during a commit, we need to make sure the rev being added is
114 114 not a duplicate. This requires loading the entire index,
115 115 which is fairly slow. loadmap can load up just the node map,
116 116 which takes much less time.
117 117 """
118 118 if self.allmap: return
119 119 start = 0
120 120 end = self.datasize
121 121 self.allmap = 1
122 122 cur = 0
123 123 count = 0
124 124 blocksize = self.s * 256
125 125 self.dataf.seek(0)
126 126 while cur < end:
127 127 data = self.dataf.read(blocksize)
128 128 off = 0
129 129 for x in xrange(256):
130 130 n = data[off + self.shaoffset:off + self.shaoffset + 20]
131 131 self.map[n] = count
132 132 count += 1
133 133 if count >= self.l:
134 134 break
135 135 off += self.s
136 136 cur += blocksize
137 137
138 138 def loadblock(self, blockstart, blocksize, data=None):
139 139 if self.all: return
140 140 if data is None:
141 141 self.dataf.seek(blockstart)
142 142 data = self.dataf.read(blocksize)
143 143 lend = len(data) / self.s
144 144 i = blockstart / self.s
145 145 off = 0
146 146 for x in xrange(lend):
147 147 if self.index[i + x] == None:
148 148 b = data[off : off + self.s]
149 149 self.index[i + x] = b
150 150 n = b[self.shaoffset:self.shaoffset + 20]
151 151 self.map[n] = i + x
152 152 off += self.s
153 153
154 154 def findnode(self, node):
155 155 """search backwards through the index file for a specific node"""
156 156 if self.allmap: return None
157 157
158 158 # hg log will cause many many searches for the manifest
159 159 # nodes. After we get called a few times, just load the whole
160 160 # thing.
161 161 if self.mapfind_count > 8:
162 162 self.loadmap()
163 163 if node in self.map:
164 164 return node
165 165 return None
166 166 self.mapfind_count += 1
167 167 last = self.l - 1
168 168 while self.index[last] != None:
169 169 if last == 0:
170 170 self.all = 1
171 171 self.allmap = 1
172 172 return None
173 173 last -= 1
174 174 end = (last + 1) * self.s
175 175 blocksize = self.s * 256
176 176 while end >= 0:
177 177 start = max(end - blocksize, 0)
178 178 self.dataf.seek(start)
179 179 data = self.dataf.read(end - start)
180 180 findend = end - start
181 181 while True:
182 182 # we're searching backwards, so weh have to make sure
183 183 # we don't find a changeset where this node is a parent
184 184 off = data.rfind(node, 0, findend)
185 185 findend = off
186 186 if off >= 0:
187 187 i = off / self.s
188 188 off = i * self.s
189 189 n = data[off + self.shaoffset:off + self.shaoffset + 20]
190 190 if n == node:
191 191 self.map[n] = i + start / self.s
192 192 return node
193 193 else:
194 194 break
195 195 end -= blocksize
196 196 return None
197 197
198 198 def loadindex(self, i=None, end=None):
199 199 if self.all: return
200 200 all = False
201 201 if i == None:
202 202 blockstart = 0
203 203 blocksize = (512 / self.s) * self.s
204 204 end = self.datasize
205 205 all = True
206 206 else:
207 207 if end:
208 208 blockstart = i * self.s
209 209 end = end * self.s
210 210 blocksize = end - blockstart
211 211 else:
212 212 blockstart = (i & ~(32)) * self.s
213 213 blocksize = self.s * 64
214 214 end = blockstart + blocksize
215 215 while blockstart < end:
216 216 self.loadblock(blockstart, blocksize)
217 217 blockstart += blocksize
218 218 if all: self.all = True
219 219
220 220 class lazyindex(object):
221 221 """a lazy version of the index array"""
222 222 def __init__(self, parser):
223 223 self.p = parser
224 224 def __len__(self):
225 225 return len(self.p.index)
226 226 def load(self, pos):
227 227 if pos < 0:
228 228 pos += len(self.p.index)
229 229 self.p.loadindex(pos)
230 230 return self.p.index[pos]
231 231 def __getitem__(self, pos):
232 232 ret = self.p.index[pos] or self.load(pos)
233 233 if isinstance(ret, str):
234 234 ret = struct.unpack(self.p.indexformat, ret)
235 235 return ret
236 236 def __setitem__(self, pos, item):
237 237 self.p.index[pos] = item
238 238 def __delitem__(self, pos):
239 239 del self.p.index[pos]
240 240 def append(self, e):
241 241 self.p.index.append(e)
242 242
243 243 class lazymap(object):
244 244 """a lazy version of the node map"""
245 245 def __init__(self, parser):
246 246 self.p = parser
247 247 def load(self, key):
248 248 n = self.p.findnode(key)
249 249 if n == None:
250 250 raise KeyError(key)
251 251 def __contains__(self, key):
252 252 if key in self.p.map:
253 253 return True
254 254 self.p.loadmap()
255 255 return key in self.p.map
256 256 def __iter__(self):
257 257 yield nullid
258 258 for i in xrange(self.p.l):
259 259 ret = self.p.index[i]
260 260 if not ret:
261 261 self.p.loadindex(i)
262 262 ret = self.p.index[i]
263 263 if isinstance(ret, str):
264 264 ret = struct.unpack(self.p.indexformat, ret)
265 265 yield ret[-1]
266 266 def __getitem__(self, key):
267 267 try:
268 268 return self.p.map[key]
269 269 except KeyError:
270 270 try:
271 271 self.load(key)
272 272 return self.p.map[key]
273 273 except KeyError:
274 274 raise KeyError("node " + hex(key))
275 275 def __setitem__(self, key, val):
276 276 self.p.map[key] = val
277 277 def __delitem__(self, key):
278 278 del self.p.map[key]
279 279
280 280 class RevlogError(Exception): pass
281 281
282 282 class revlog(object):
283 283 """
284 284 the underlying revision storage object
285 285
286 286 A revlog consists of two parts, an index and the revision data.
287 287
288 288 The index is a file with a fixed record size containing
289 289 information on each revision, includings its nodeid (hash), the
290 290 nodeids of its parents, the position and offset of its data within
291 291 the data file, and the revision it's based on. Finally, each entry
292 292 contains a linkrev entry that can serve as a pointer to external
293 293 data.
294 294
295 295 The revision data itself is a linear collection of data chunks.
296 296 Each chunk represents a revision and is usually represented as a
297 297 delta against the previous chunk. To bound lookup time, runs of
298 298 deltas are limited to about 2 times the length of the original
299 299 version data. This makes retrieval of a version proportional to
300 300 its size, or O(1) relative to the number of revisions.
301 301
302 302 Both pieces of the revlog are written to in an append-only
303 303 fashion, which means we never need to rewrite a file to insert or
304 304 remove data, and can use some simple techniques to avoid the need
305 305 for locking while reading.
306 306 """
307 307 def __init__(self, opener, indexfile, datafile,
308 308 defversion=REVLOG_DEFAULT_VERSION):
309 309 """
310 310 create a revlog object
311 311
312 312 opener is a function that abstracts the file opening operation
313 313 and can be used to implement COW semantics or the like.
314 314 """
315 315 self.indexfile = indexfile
316 316 self.datafile = datafile
317 317 self.opener = opener
318 318
319 319 self.indexstat = None
320 320 self.cache = None
321 321 self.chunkcache = None
322 322 self.defversion = defversion
323 323 self.load()
324 324
325 325 def load(self):
326 326 v = self.defversion
327 327 try:
328 328 f = self.opener(self.indexfile)
329 329 i = f.read(4)
330 330 f.seek(0)
331 331 except IOError, inst:
332 332 if inst.errno != errno.ENOENT:
333 333 raise
334 334 i = ""
335 335 else:
336 336 try:
337 337 st = util.fstat(f)
338 338 except AttributeError, inst:
339 339 st = None
340 340 else:
341 341 oldst = self.indexstat
342 342 if (oldst and st.st_dev == oldst.st_dev
343 343 and st.st_ino == oldst.st_ino
344 344 and st.st_mtime == oldst.st_mtime
345 345 and st.st_ctime == oldst.st_ctime):
346 346 return
347 347 self.indexstat = st
348 348 if len(i) > 0:
349 349 v = struct.unpack(versionformat, i)[0]
350 350 flags = v & ~0xFFFF
351 351 fmt = v & 0xFFFF
352 352 if fmt == REVLOGV0:
353 353 if flags:
354 354 raise RevlogError(_("index %s invalid flags %x for format v0" %
355 355 (self.indexfile, flags)))
356 356 elif fmt == REVLOGNG:
357 357 if flags & ~REVLOGNGINLINEDATA:
358 358 raise RevlogError(_("index %s invalid flags %x for revlogng" %
359 359 (self.indexfile, flags)))
360 360 else:
361 361 raise RevlogError(_("index %s invalid format %d" %
362 362 (self.indexfile, fmt)))
363 363 self.version = v
364 364 if v == REVLOGV0:
365 365 self.indexformat = indexformatv0
366 366 shaoffset = v0shaoffset
367 367 else:
368 368 self.indexformat = indexformatng
369 369 shaoffset = ngshaoffset
370 370
371 371 if i:
372 372 if (lazyparser.safe_to_use and not self.inlinedata() and
373 373 st and st.st_size > 10000):
374 374 # big index, let's parse it on demand
375 375 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
376 376 self.index = lazyindex(parser)
377 377 self.nodemap = lazymap(parser)
378 378 else:
379 379 self.parseindex(f, st)
380 380 if self.version != REVLOGV0:
381 381 e = list(self.index[0])
382 382 type = self.ngtype(e[0])
383 383 e[0] = self.offset_type(0, type)
384 384 self.index[0] = e
385 385 else:
386 386 self.nodemap = { nullid: -1}
387 387 self.index = []
388 388
389 389
390 390 def parseindex(self, fp, st):
391 391 s = struct.calcsize(self.indexformat)
392 392 self.index = []
393 393 self.nodemap = {nullid: -1}
394 394 inline = self.inlinedata()
395 395 n = 0
396 396 leftover = None
397 397 while True:
398 398 if st:
399 399 data = fp.read(65536)
400 400 else:
401 401 # hack for httprangereader, it doesn't do partial reads well
402 402 data = fp.read()
403 403 if not data:
404 404 break
405 405 if n == 0 and self.inlinedata():
406 406 # cache the first chunk
407 407 self.chunkcache = (0, data)
408 408 if leftover:
409 409 data = leftover + data
410 410 leftover = None
411 411 off = 0
412 412 l = len(data)
413 413 while off < l:
414 414 if l - off < s:
415 415 leftover = data[off:]
416 416 break
417 417 cur = data[off:off + s]
418 418 off += s
419 419 e = struct.unpack(self.indexformat, cur)
420 420 self.index.append(e)
421 421 self.nodemap[e[-1]] = n
422 422 n += 1
423 423 if inline:
424 424 off += e[1]
425 425 if off > l:
426 426 # some things don't seek well, just read it
427 427 fp.read(off - l)
428 428 if not st:
429 429 break
430 430
431 431
432 432 def ngoffset(self, q):
433 433 if q & 0xFFFF:
434 434 raise RevlogError(_('%s: incompatible revision flag %x') %
435 435 (self.indexfile, q))
436 436 return long(q >> 16)
437 437
438 438 def ngtype(self, q):
439 439 return int(q & 0xFFFF)
440 440
441 441 def offset_type(self, offset, type):
442 442 return long(long(offset) << 16 | type)
443 443
444 444 def loadindex(self, start, end):
445 445 """load a block of indexes all at once from the lazy parser"""
446 446 if isinstance(self.index, lazyindex):
447 447 self.index.p.loadindex(start, end)
448 448
449 449 def loadindexmap(self):
450 450 """loads both the map and the index from the lazy parser"""
451 451 if isinstance(self.index, lazyindex):
452 452 p = self.index.p
453 453 p.loadindex()
454 454 self.nodemap = p.map
455 455
456 456 def loadmap(self):
457 457 """loads the map from the lazy parser"""
458 458 if isinstance(self.nodemap, lazymap):
459 459 self.nodemap.p.loadmap()
460 460 self.nodemap = self.nodemap.p.map
461 461
462 462 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
463 463 def tip(self): return self.node(len(self.index) - 1)
464 464 def count(self): return len(self.index)
465 465 def node(self, rev):
466 466 return (rev < 0) and nullid or self.index[rev][-1]
467 467 def rev(self, node):
468 468 try:
469 469 return self.nodemap[node]
470 470 except KeyError:
471 471 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
472 472 def linkrev(self, node):
473 473 return (node == nullid) and -1 or self.index[self.rev(node)][-4]
474 474 def parents(self, node):
475 475 if node == nullid: return (nullid, nullid)
476 476 r = self.rev(node)
477 477 d = self.index[r][-3:-1]
478 478 if self.version == REVLOGV0:
479 479 return d
480 480 return [ self.node(x) for x in d ]
481 481 def parentrevs(self, rev):
482 482 if rev == -1:
483 483 return (-1, -1)
484 484 d = self.index[rev][-3:-1]
485 485 if self.version == REVLOGV0:
486 486 return [ self.rev(x) for x in d ]
487 487 return d
488 488 def start(self, rev):
489 489 if rev < 0:
490 490 return -1
491 491 if self.version != REVLOGV0:
492 492 return self.ngoffset(self.index[rev][0])
493 493 return self.index[rev][0]
494 494
495 495 def end(self, rev): return self.start(rev) + self.length(rev)
496 496
497 497 def size(self, rev):
498 498 """return the length of the uncompressed text for a given revision"""
499 499 l = -1
500 500 if self.version != REVLOGV0:
501 501 l = self.index[rev][2]
502 502 if l >= 0:
503 503 return l
504 504
505 505 t = self.revision(self.node(rev))
506 506 return len(t)
507 507
508 508 # alternate implementation, The advantage to this code is it
509 509 # will be faster for a single revision. But, the results are not
510 510 # cached, so finding the size of every revision will be slower.
511 511 """
512 512 if self.cache and self.cache[1] == rev:
513 513 return len(self.cache[2])
514 514
515 515 base = self.base(rev)
516 516 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
517 517 base = self.cache[1]
518 518 text = self.cache[2]
519 519 else:
520 520 text = self.revision(self.node(base))
521 521
522 522 l = len(text)
523 523 for x in xrange(base + 1, rev + 1):
524 524 l = mdiff.patchedsize(l, self.chunk(x))
525 525 return l
526 526 """
527 527
528 528 def length(self, rev):
529 529 if rev < 0:
530 530 return 0
531 531 else:
532 532 return self.index[rev][1]
533 533 def base(self, rev): return (rev < 0) and rev or self.index[rev][-5]
534 534
535 535 def reachable(self, rev, stop=None):
536 536 reachable = {}
537 537 visit = [rev]
538 538 reachable[rev] = 1
539 539 if stop:
540 540 stopn = self.rev(stop)
541 541 else:
542 542 stopn = 0
543 543 while visit:
544 544 n = visit.pop(0)
545 545 if n == stop:
546 546 continue
547 547 if n == nullid:
548 548 continue
549 549 for p in self.parents(n):
550 550 if self.rev(p) < stopn:
551 551 continue
552 552 if p not in reachable:
553 553 reachable[p] = 1
554 554 visit.append(p)
555 555 return reachable
556 556
557 557 def nodesbetween(self, roots=None, heads=None):
558 558 """Return a tuple containing three elements. Elements 1 and 2 contain
559 559 a final list bases and heads after all the unreachable ones have been
560 560 pruned. Element 0 contains a topologically sorted list of all
561 561
562 562 nodes that satisfy these constraints:
563 563 1. All nodes must be descended from a node in roots (the nodes on
564 564 roots are considered descended from themselves).
565 565 2. All nodes must also be ancestors of a node in heads (the nodes in
566 566 heads are considered to be their own ancestors).
567 567
568 568 If roots is unspecified, nullid is assumed as the only root.
569 569 If heads is unspecified, it is taken to be the output of the
570 570 heads method (i.e. a list of all nodes in the repository that
571 571 have no children)."""
572 572 nonodes = ([], [], [])
573 573 if roots is not None:
574 574 roots = list(roots)
575 575 if not roots:
576 576 return nonodes
577 577 lowestrev = min([self.rev(n) for n in roots])
578 578 else:
579 579 roots = [nullid] # Everybody's a descendent of nullid
580 580 lowestrev = -1
581 581 if (lowestrev == -1) and (heads is None):
582 582 # We want _all_ the nodes!
583 583 return ([self.node(r) for r in xrange(0, self.count())],
584 584 [nullid], list(self.heads()))
585 585 if heads is None:
586 586 # All nodes are ancestors, so the latest ancestor is the last
587 587 # node.
588 588 highestrev = self.count() - 1
589 589 # Set ancestors to None to signal that every node is an ancestor.
590 590 ancestors = None
591 591 # Set heads to an empty dictionary for later discovery of heads
592 592 heads = {}
593 593 else:
594 594 heads = list(heads)
595 595 if not heads:
596 596 return nonodes
597 597 ancestors = {}
598 598 # Start at the top and keep marking parents until we're done.
599 599 nodestotag = heads[:]
600 600 # Turn heads into a dictionary so we can remove 'fake' heads.
601 601 # Also, later we will be using it to filter out the heads we can't
602 602 # find from roots.
603 603 heads = dict.fromkeys(heads, 0)
604 604 # Remember where the top was so we can use it as a limit later.
605 605 highestrev = max([self.rev(n) for n in nodestotag])
606 606 while nodestotag:
607 607 # grab a node to tag
608 608 n = nodestotag.pop()
609 609 # Never tag nullid
610 610 if n == nullid:
611 611 continue
612 612 # A node's revision number represents its place in a
613 613 # topologically sorted list of nodes.
614 614 r = self.rev(n)
615 615 if r >= lowestrev:
616 616 if n not in ancestors:
617 617 # If we are possibly a descendent of one of the roots
618 618 # and we haven't already been marked as an ancestor
619 619 ancestors[n] = 1 # Mark as ancestor
620 620 # Add non-nullid parents to list of nodes to tag.
621 621 nodestotag.extend([p for p in self.parents(n) if
622 622 p != nullid])
623 623 elif n in heads: # We've seen it before, is it a fake head?
624 624 # So it is, real heads should not be the ancestors of
625 625 # any other heads.
626 626 heads.pop(n)
627 627 if not ancestors:
628 628 return nonodes
629 629 # Now that we have our set of ancestors, we want to remove any
630 630 # roots that are not ancestors.
631 631
632 632 # If one of the roots was nullid, everything is included anyway.
633 633 if lowestrev > -1:
634 634 # But, since we weren't, let's recompute the lowest rev to not
635 635 # include roots that aren't ancestors.
636 636
637 637 # Filter out roots that aren't ancestors of heads
638 638 roots = [n for n in roots if n in ancestors]
639 639 # Recompute the lowest revision
640 640 if roots:
641 641 lowestrev = min([self.rev(n) for n in roots])
642 642 else:
643 643 # No more roots? Return empty list
644 644 return nonodes
645 645 else:
646 646 # We are descending from nullid, and don't need to care about
647 647 # any other roots.
648 648 lowestrev = -1
649 649 roots = [nullid]
650 650 # Transform our roots list into a 'set' (i.e. a dictionary where the
651 651 # values don't matter.
652 652 descendents = dict.fromkeys(roots, 1)
653 653 # Also, keep the original roots so we can filter out roots that aren't
654 654 # 'real' roots (i.e. are descended from other roots).
655 655 roots = descendents.copy()
656 656 # Our topologically sorted list of output nodes.
657 657 orderedout = []
658 658 # Don't start at nullid since we don't want nullid in our output list,
659 659 # and if nullid shows up in descedents, empty parents will look like
660 660 # they're descendents.
661 661 for r in xrange(max(lowestrev, 0), highestrev + 1):
662 662 n = self.node(r)
663 663 isdescendent = False
664 664 if lowestrev == -1: # Everybody is a descendent of nullid
665 665 isdescendent = True
666 666 elif n in descendents:
667 667 # n is already a descendent
668 668 isdescendent = True
669 669 # This check only needs to be done here because all the roots
670 670 # will start being marked is descendents before the loop.
671 671 if n in roots:
672 672 # If n was a root, check if it's a 'real' root.
673 673 p = tuple(self.parents(n))
674 674 # If any of its parents are descendents, it's not a root.
675 675 if (p[0] in descendents) or (p[1] in descendents):
676 676 roots.pop(n)
677 677 else:
678 678 p = tuple(self.parents(n))
679 679 # A node is a descendent if either of its parents are
680 680 # descendents. (We seeded the dependents list with the roots
681 681 # up there, remember?)
682 682 if (p[0] in descendents) or (p[1] in descendents):
683 683 descendents[n] = 1
684 684 isdescendent = True
685 685 if isdescendent and ((ancestors is None) or (n in ancestors)):
686 686 # Only include nodes that are both descendents and ancestors.
687 687 orderedout.append(n)
688 688 if (ancestors is not None) and (n in heads):
689 689 # We're trying to figure out which heads are reachable
690 690 # from roots.
691 691 # Mark this head as having been reached
692 692 heads[n] = 1
693 693 elif ancestors is None:
694 694 # Otherwise, we're trying to discover the heads.
695 695 # Assume this is a head because if it isn't, the next step
696 696 # will eventually remove it.
697 697 heads[n] = 1
698 698 # But, obviously its parents aren't.
699 699 for p in self.parents(n):
700 700 heads.pop(p, None)
701 701 heads = [n for n in heads.iterkeys() if heads[n] != 0]
702 702 roots = roots.keys()
703 703 assert orderedout
704 704 assert roots
705 705 assert heads
706 706 return (orderedout, roots, heads)
707 707
708 708 def heads(self, start=None):
709 709 """return the list of all nodes that have no children
710 710
711 711 if start is specified, only heads that are descendants of
712 712 start will be returned
713 713
714 714 """
715 715 if start is None:
716 716 start = nullid
717 717 startrev = self.rev(start)
718 718 reachable = {startrev: 1}
719 719 heads = {startrev: 1}
720 720
721 721 parentrevs = self.parentrevs
722 722 for r in xrange(startrev + 1, self.count()):
723 723 for p in parentrevs(r):
724 724 if p in reachable:
725 725 reachable[r] = 1
726 726 heads[r] = 1
727 727 if p in heads:
728 728 del heads[p]
729 729 return [self.node(r) for r in heads]
730 730
731 731 def children(self, node):
732 732 """find the children of a given node"""
733 733 c = []
734 734 p = self.rev(node)
735 735 for r in range(p + 1, self.count()):
736 736 n = self.node(r)
737 737 for pn in self.parents(n):
738 738 if pn == node:
739 739 c.append(n)
740 740 continue
741 741 elif pn == nullid:
742 742 continue
743 743 return c
744 744
745 745 def lookup(self, id):
746 746 """locate a node based on revision number or subset of hex nodeid"""
747 747 if type(id) == type(0):
748 748 return self.node(id)
749 749 try:
750 750 rev = int(id)
751 751 if str(rev) != id: raise ValueError
752 752 if rev < 0: rev = self.count() + rev
753 753 if rev < 0 or rev >= self.count(): raise ValueError
754 754 return self.node(rev)
755 755 except (ValueError, OverflowError):
756 756 c = []
757 757 for n in self.nodemap:
758 758 if hex(n).startswith(id):
759 759 c.append(n)
760 760 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
761 761 if len(c) == 1: return c[0]
762 762
763 763 # might need fixing if we change hash lengths
764 764 if len(id) == 20 and id in self.nodemap:
765 765 return id
766 766
767 767 raise RevlogError(_("No match found"))
768 768
769 def cmp(self, node, text):
770 """compare text with a given file revision"""
771 p1, p2 = self.parents(node)
772 return hash(text, p1, p2) != node
773
774 def makenode(self, node, text):
775 """calculate a file nodeid for text, descended or possibly
776 unchanged from node"""
777
778 if self.cmp(node, text):
779 return hash(text, node, nullid)
780 return node
781
769 782 def diff(self, a, b):
770 783 """return a delta between two revisions"""
771 784 return mdiff.textdiff(a, b)
772 785
773 786 def patches(self, t, pl):
774 787 """apply a list of patches to a string"""
775 788 return mdiff.patches(t, pl)
776 789
777 790 def chunk(self, rev, df=None, cachelen=4096):
778 791 start, length = self.start(rev), self.length(rev)
779 792 inline = self.inlinedata()
780 793 if inline:
781 794 start += (rev + 1) * struct.calcsize(self.indexformat)
782 795 end = start + length
783 796 def loadcache(df):
784 797 cache_length = max(cachelen, length) # 4k
785 798 if not df:
786 799 if inline:
787 800 df = self.opener(self.indexfile)
788 801 else:
789 802 df = self.opener(self.datafile)
790 803 df.seek(start)
791 804 self.chunkcache = (start, df.read(cache_length))
792 805
793 806 if not self.chunkcache:
794 807 loadcache(df)
795 808
796 809 cache_start = self.chunkcache[0]
797 810 cache_end = cache_start + len(self.chunkcache[1])
798 811 if start >= cache_start and end <= cache_end:
799 812 # it is cached
800 813 offset = start - cache_start
801 814 else:
802 815 loadcache(df)
803 816 offset = 0
804 817
805 818 #def checkchunk():
806 819 # df = self.opener(self.datafile)
807 820 # df.seek(start)
808 821 # return df.read(length)
809 822 #assert s == checkchunk()
810 823 return decompress(self.chunkcache[1][offset:offset + length])
811 824
812 825 def delta(self, node):
813 826 """return or calculate a delta between a node and its predecessor"""
814 827 r = self.rev(node)
815 828 return self.revdiff(r - 1, r)
816 829
817 830 def revdiff(self, rev1, rev2):
818 831 """return or calculate a delta between two revisions"""
819 832 b1 = self.base(rev1)
820 833 b2 = self.base(rev2)
821 834 if b1 == b2 and rev1 + 1 == rev2:
822 835 return self.chunk(rev2)
823 836 else:
824 837 return self.diff(self.revision(self.node(rev1)),
825 838 self.revision(self.node(rev2)))
826 839
827 840 def revision(self, node):
828 841 """return an uncompressed revision of a given"""
829 842 if node == nullid: return ""
830 843 if self.cache and self.cache[0] == node: return self.cache[2]
831 844
832 845 # look up what we need to read
833 846 text = None
834 847 rev = self.rev(node)
835 848 base = self.base(rev)
836 849
837 850 if self.inlinedata():
838 851 # we probably have the whole chunk cached
839 852 df = None
840 853 else:
841 854 df = self.opener(self.datafile)
842 855
843 856 # do we have useful data cached?
844 857 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
845 858 base = self.cache[1]
846 859 text = self.cache[2]
847 860 self.loadindex(base, rev + 1)
848 861 else:
849 862 self.loadindex(base, rev + 1)
850 863 text = self.chunk(base, df=df)
851 864
852 865 bins = []
853 866 for r in xrange(base + 1, rev + 1):
854 867 bins.append(self.chunk(r, df=df))
855 868
856 869 text = self.patches(text, bins)
857 870
858 871 p1, p2 = self.parents(node)
859 872 if node != hash(text, p1, p2):
860 873 raise RevlogError(_("integrity check failed on %s:%d")
861 874 % (self.datafile, rev))
862 875
863 876 self.cache = (node, rev, text)
864 877 return text
865 878
866 879 def checkinlinesize(self, tr, fp=None):
867 880 if not self.inlinedata():
868 881 return
869 882 if not fp:
870 883 fp = self.opener(self.indexfile, 'r')
871 884 fp.seek(0, 2)
872 885 size = fp.tell()
873 886 if size < 131072:
874 887 return
875 888 trinfo = tr.find(self.indexfile)
876 889 if trinfo == None:
877 890 raise RevlogError(_("%s not found in the transaction" %
878 891 self.indexfile))
879 892
880 893 trindex = trinfo[2]
881 894 dataoff = self.start(trindex)
882 895
883 896 tr.add(self.datafile, dataoff)
884 897 df = self.opener(self.datafile, 'w')
885 898 calc = struct.calcsize(self.indexformat)
886 899 for r in xrange(self.count()):
887 900 start = self.start(r) + (r + 1) * calc
888 901 length = self.length(r)
889 902 fp.seek(start)
890 903 d = fp.read(length)
891 904 df.write(d)
892 905 fp.close()
893 906 df.close()
894 907 fp = self.opener(self.indexfile, 'w', atomictemp=True)
895 908 self.version &= ~(REVLOGNGINLINEDATA)
896 909 if self.count():
897 910 x = self.index[0]
898 911 e = struct.pack(self.indexformat, *x)[4:]
899 912 l = struct.pack(versionformat, self.version)
900 913 fp.write(l)
901 914 fp.write(e)
902 915
903 916 for i in xrange(1, self.count()):
904 917 x = self.index[i]
905 918 e = struct.pack(self.indexformat, *x)
906 919 fp.write(e)
907 920
908 921 # if we don't call rename, the temp file will never replace the
909 922 # real index
910 923 fp.rename()
911 924
912 925 tr.replace(self.indexfile, trindex * calc)
913 926 self.chunkcache = None
914 927
915 928 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
916 929 """add a revision to the log
917 930
918 931 text - the revision data to add
919 932 transaction - the transaction object used for rollback
920 933 link - the linkrev data to add
921 934 p1, p2 - the parent nodeids of the revision
922 935 d - an optional precomputed delta
923 936 """
924 937 if text is None: text = ""
925 938 if p1 is None: p1 = self.tip()
926 939 if p2 is None: p2 = nullid
927 940
928 941 node = hash(text, p1, p2)
929 942
930 943 if node in self.nodemap:
931 944 return node
932 945
933 946 n = self.count()
934 947 t = n - 1
935 948
936 949 if n:
937 950 base = self.base(t)
938 951 start = self.start(base)
939 952 end = self.end(t)
940 953 if not d:
941 954 prev = self.revision(self.tip())
942 955 d = self.diff(prev, str(text))
943 956 data = compress(d)
944 957 l = len(data[1]) + len(data[0])
945 958 dist = end - start + l
946 959
947 960 # full versions are inserted when the needed deltas
948 961 # become comparable to the uncompressed text
949 962 if not n or dist > len(text) * 2:
950 963 data = compress(text)
951 964 l = len(data[1]) + len(data[0])
952 965 base = n
953 966 else:
954 967 base = self.base(t)
955 968
956 969 offset = 0
957 970 if t >= 0:
958 971 offset = self.end(t)
959 972
960 973 if self.version == REVLOGV0:
961 974 e = (offset, l, base, link, p1, p2, node)
962 975 else:
963 976 e = (self.offset_type(offset, 0), l, len(text),
964 977 base, link, self.rev(p1), self.rev(p2), node)
965 978
966 979 self.index.append(e)
967 980 self.nodemap[node] = n
968 981 entry = struct.pack(self.indexformat, *e)
969 982
970 983 if not self.inlinedata():
971 984 transaction.add(self.datafile, offset)
972 985 transaction.add(self.indexfile, n * len(entry))
973 986 f = self.opener(self.datafile, "a")
974 987 if data[0]:
975 988 f.write(data[0])
976 989 f.write(data[1])
977 990 f.close()
978 991 f = self.opener(self.indexfile, "a")
979 992 else:
980 993 f = self.opener(self.indexfile, "a+")
981 994 f.seek(0, 2)
982 995 transaction.add(self.indexfile, f.tell(), self.count() - 1)
983 996
984 997 if len(self.index) == 1 and self.version != REVLOGV0:
985 998 l = struct.pack(versionformat, self.version)
986 999 f.write(l)
987 1000 entry = entry[4:]
988 1001
989 1002 f.write(entry)
990 1003
991 1004 if self.inlinedata():
992 1005 f.write(data[0])
993 1006 f.write(data[1])
994 1007 self.checkinlinesize(transaction, f)
995 1008
996 1009 self.cache = (node, n, text)
997 1010 return node
998 1011
999 1012 def ancestor(self, a, b):
1000 1013 """calculate the least common ancestor of nodes a and b"""
1001 1014
1002 1015 # start with some short cuts for the linear cases
1003 1016 if a == b:
1004 1017 return a
1005 1018 ra = self.rev(a)
1006 1019 rb = self.rev(b)
1007 1020 if ra < rb:
1008 1021 last = b
1009 1022 first = a
1010 1023 else:
1011 1024 last = a
1012 1025 first = b
1013 1026
1014 1027 # reachable won't include stop in the list, so we have to use a parent
1015 1028 reachable = self.reachable(last, stop=self.parents(first)[0])
1016 1029 if first in reachable:
1017 1030 return first
1018 1031
1019 1032 # calculate the distance of every node from root
1020 1033 dist = {nullid: 0}
1021 1034 for i in xrange(self.count()):
1022 1035 n = self.node(i)
1023 1036 p1, p2 = self.parents(n)
1024 1037 dist[n] = max(dist[p1], dist[p2]) + 1
1025 1038
1026 1039 # traverse ancestors in order of decreasing distance from root
1027 1040 def ancestors(node):
1028 1041 # we store negative distances because heap returns smallest member
1029 1042 h = [(-dist[node], node)]
1030 1043 seen = {}
1031 1044 while h:
1032 1045 d, n = heapq.heappop(h)
1033 1046 if n not in seen:
1034 1047 seen[n] = 1
1035 1048 yield (-d, n)
1036 1049 for p in self.parents(n):
1037 1050 heapq.heappush(h, (-dist[p], p))
1038 1051
1039 1052 def generations(node):
1040 1053 sg, s = None, {}
1041 1054 for g,n in ancestors(node):
1042 1055 if g != sg:
1043 1056 if sg:
1044 1057 yield sg, s
1045 1058 sg, s = g, {n:1}
1046 1059 else:
1047 1060 s[n] = 1
1048 1061 yield sg, s
1049 1062
1050 1063 x = generations(a)
1051 1064 y = generations(b)
1052 1065 gx = x.next()
1053 1066 gy = y.next()
1054 1067
1055 1068 # increment each ancestor list until it is closer to root than
1056 1069 # the other, or they match
1057 1070 while 1:
1058 1071 #print "ancestor gen %s %s" % (gx[0], gy[0])
1059 1072 if gx[0] == gy[0]:
1060 1073 # find the intersection
1061 1074 i = [ n for n in gx[1] if n in gy[1] ]
1062 1075 if i:
1063 1076 return i[0]
1064 1077 else:
1065 1078 #print "next"
1066 1079 gy = y.next()
1067 1080 gx = x.next()
1068 1081 elif gx[0] < gy[0]:
1069 1082 #print "next y"
1070 1083 gy = y.next()
1071 1084 else:
1072 1085 #print "next x"
1073 1086 gx = x.next()
1074 1087
1075 1088 def group(self, nodelist, lookup, infocollect=None):
1076 1089 """calculate a delta group
1077 1090
1078 1091 Given a list of changeset revs, return a set of deltas and
1079 1092 metadata corresponding to nodes. the first delta is
1080 1093 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1081 1094 have this parent as it has all history before these
1082 1095 changesets. parent is parent[0]
1083 1096 """
1084 1097 revs = [self.rev(n) for n in nodelist]
1085 1098
1086 1099 # if we don't have any revisions touched by these changesets, bail
1087 1100 if not revs:
1088 1101 yield changegroup.closechunk()
1089 1102 return
1090 1103
1091 1104 # add the parent of the first rev
1092 1105 p = self.parents(self.node(revs[0]))[0]
1093 1106 revs.insert(0, self.rev(p))
1094 1107
1095 1108 # build deltas
1096 1109 for d in xrange(0, len(revs) - 1):
1097 1110 a, b = revs[d], revs[d + 1]
1098 1111 nb = self.node(b)
1099 1112
1100 1113 if infocollect is not None:
1101 1114 infocollect(nb)
1102 1115
1103 1116 d = self.revdiff(a, b)
1104 1117 p = self.parents(nb)
1105 1118 meta = nb + p[0] + p[1] + lookup(nb)
1106 1119 yield changegroup.genchunk("%s%s" % (meta, d))
1107 1120
1108 1121 yield changegroup.closechunk()
1109 1122
1110 1123 def addgroup(self, revs, linkmapper, transaction, unique=0):
1111 1124 """
1112 1125 add a delta group
1113 1126
1114 1127 given a set of deltas, add them to the revision log. the
1115 1128 first delta is against its parent, which should be in our
1116 1129 log, the rest are against the previous delta.
1117 1130 """
1118 1131
1119 1132 #track the base of the current delta log
1120 1133 r = self.count()
1121 1134 t = r - 1
1122 1135 node = None
1123 1136
1124 1137 base = prev = -1
1125 1138 start = end = textlen = 0
1126 1139 if r:
1127 1140 end = self.end(t)
1128 1141
1129 1142 ifh = self.opener(self.indexfile, "a+")
1130 1143 ifh.seek(0, 2)
1131 1144 transaction.add(self.indexfile, ifh.tell(), self.count())
1132 1145 if self.inlinedata():
1133 1146 dfh = None
1134 1147 else:
1135 1148 transaction.add(self.datafile, end)
1136 1149 dfh = self.opener(self.datafile, "a")
1137 1150
1138 1151 # loop through our set of deltas
1139 1152 chain = None
1140 1153 for chunk in revs:
1141 1154 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1142 1155 link = linkmapper(cs)
1143 1156 if node in self.nodemap:
1144 1157 # this can happen if two branches make the same change
1145 1158 # if unique:
1146 1159 # raise RevlogError(_("already have %s") % hex(node[:4]))
1147 1160 chain = node
1148 1161 continue
1149 1162 delta = chunk[80:]
1150 1163
1151 1164 for p in (p1, p2):
1152 1165 if not p in self.nodemap:
1153 1166 raise RevlogError(_("unknown parent %s") % short(p))
1154 1167
1155 1168 if not chain:
1156 1169 # retrieve the parent revision of the delta chain
1157 1170 chain = p1
1158 1171 if not chain in self.nodemap:
1159 1172 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1160 1173
1161 1174 # full versions are inserted when the needed deltas become
1162 1175 # comparable to the uncompressed text or when the previous
1163 1176 # version is not the one we have a delta against. We use
1164 1177 # the size of the previous full rev as a proxy for the
1165 1178 # current size.
1166 1179
1167 1180 if chain == prev:
1168 1181 tempd = compress(delta)
1169 1182 cdelta = tempd[0] + tempd[1]
1170 1183 textlen = mdiff.patchedsize(textlen, delta)
1171 1184
1172 1185 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1173 1186 # flush our writes here so we can read it in revision
1174 1187 if dfh:
1175 1188 dfh.flush()
1176 1189 ifh.flush()
1177 1190 text = self.revision(chain)
1178 1191 text = self.patches(text, [delta])
1179 1192 chk = self.addrevision(text, transaction, link, p1, p2)
1180 1193 if chk != node:
1181 1194 raise RevlogError(_("consistency error adding group"))
1182 1195 textlen = len(text)
1183 1196 else:
1184 1197 if self.version == REVLOGV0:
1185 1198 e = (end, len(cdelta), base, link, p1, p2, node)
1186 1199 else:
1187 1200 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1188 1201 link, self.rev(p1), self.rev(p2), node)
1189 1202 self.index.append(e)
1190 1203 self.nodemap[node] = r
1191 1204 if self.inlinedata():
1192 1205 ifh.write(struct.pack(self.indexformat, *e))
1193 1206 ifh.write(cdelta)
1194 1207 self.checkinlinesize(transaction, ifh)
1195 1208 if not self.inlinedata():
1196 1209 dfh = self.opener(self.datafile, "a")
1197 1210 ifh = self.opener(self.indexfile, "a")
1198 1211 else:
1199 1212 if not dfh:
1200 1213 # addrevision switched from inline to conventional
1201 1214 # reopen the index
1202 1215 dfh = self.opener(self.datafile, "a")
1203 1216 ifh = self.opener(self.indexfile, "a")
1204 1217 dfh.write(cdelta)
1205 1218 ifh.write(struct.pack(self.indexformat, *e))
1206 1219
1207 1220 t, r, chain, prev = r, r + 1, node, node
1208 1221 base = self.base(t)
1209 1222 start = self.start(base)
1210 1223 end = self.end(t)
1211 1224
1212 1225 return node
1213 1226
1214 1227 def strip(self, rev, minlink):
1215 1228 if self.count() == 0 or rev >= self.count():
1216 1229 return
1217 1230
1218 1231 if isinstance(self.index, lazyindex):
1219 1232 self.loadindexmap()
1220 1233
1221 1234 # When stripping away a revision, we need to make sure it
1222 1235 # does not actually belong to an older changeset.
1223 1236 # The minlink parameter defines the oldest revision
1224 1237 # we're allowed to strip away.
1225 1238 while minlink > self.index[rev][-4]:
1226 1239 rev += 1
1227 1240 if rev >= self.count():
1228 1241 return
1229 1242
1230 1243 # first truncate the files on disk
1231 1244 end = self.start(rev)
1232 1245 if not self.inlinedata():
1233 1246 df = self.opener(self.datafile, "a")
1234 1247 df.truncate(end)
1235 1248 end = rev * struct.calcsize(self.indexformat)
1236 1249 else:
1237 1250 end += rev * struct.calcsize(self.indexformat)
1238 1251
1239 1252 indexf = self.opener(self.indexfile, "a")
1240 1253 indexf.truncate(end)
1241 1254
1242 1255 # then reset internal state in memory to forget those revisions
1243 1256 self.cache = None
1244 1257 self.chunkcache = None
1245 1258 for x in xrange(rev, self.count()):
1246 1259 del self.nodemap[self.node(x)]
1247 1260
1248 1261 del self.index[rev:]
1249 1262
1250 1263 def checksize(self):
1251 1264 expected = 0
1252 1265 if self.count():
1253 1266 expected = self.end(self.count() - 1)
1254 1267
1255 1268 try:
1256 1269 f = self.opener(self.datafile)
1257 1270 f.seek(0, 2)
1258 1271 actual = f.tell()
1259 1272 dd = actual - expected
1260 1273 except IOError, inst:
1261 1274 if inst.errno != errno.ENOENT:
1262 1275 raise
1263 1276 dd = 0
1264 1277
1265 1278 try:
1266 1279 f = self.opener(self.indexfile)
1267 1280 f.seek(0, 2)
1268 1281 actual = f.tell()
1269 1282 s = struct.calcsize(self.indexformat)
1270 1283 i = actual / s
1271 1284 di = actual - (i * s)
1272 1285 if self.inlinedata():
1273 1286 databytes = 0
1274 1287 for r in xrange(self.count()):
1275 1288 databytes += self.length(r)
1276 1289 dd = 0
1277 1290 di = actual - self.count() * s - databytes
1278 1291 except IOError, inst:
1279 1292 if inst.errno != errno.ENOENT:
1280 1293 raise
1281 1294 di = 0
1282 1295
1283 1296 return (dd, di)
1284 1297
1285 1298
@@ -1,360 +1,290 b''
1 1 # ui.py - user interface bits for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from i18n import gettext as _
9 9 from demandload import *
10 demandload(globals(), "errno getpass os re smtplib socket sys tempfile")
10 demandload(globals(), "errno getpass os re socket sys tempfile")
11 11 demandload(globals(), "ConfigParser mdiff templater traceback util")
12 12
13 13 class ui(object):
14 14 def __init__(self, verbose=False, debug=False, quiet=False,
15 15 interactive=True, traceback=False, parentui=None):
16 16 self.overlay = {}
17 17 if parentui is None:
18 18 # this is the parent of all ui children
19 19 self.parentui = None
20 20 self.cdata = ConfigParser.SafeConfigParser()
21 21 self.readconfig(util.rcpath())
22 22
23 23 self.quiet = self.configbool("ui", "quiet")
24 24 self.verbose = self.configbool("ui", "verbose")
25 25 self.debugflag = self.configbool("ui", "debug")
26 26 self.interactive = self.configbool("ui", "interactive", True)
27 27 self.traceback = traceback
28 28
29 29 self.updateopts(verbose, debug, quiet, interactive)
30 30 self.diffcache = None
31 31 self.header = []
32 32 self.prev_header = []
33 33 self.revlogopts = self.configrevlog()
34 34 else:
35 35 # parentui may point to an ui object which is already a child
36 36 self.parentui = parentui.parentui or parentui
37 37 parent_cdata = self.parentui.cdata
38 38 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
39 39 # make interpolation work
40 40 for section in parent_cdata.sections():
41 41 self.cdata.add_section(section)
42 42 for name, value in parent_cdata.items(section, raw=True):
43 43 self.cdata.set(section, name, value)
44 44
45 45 def __getattr__(self, key):
46 46 return getattr(self.parentui, key)
47 47
48 48 def updateopts(self, verbose=False, debug=False, quiet=False,
49 49 interactive=True, traceback=False, config=[]):
50 50 self.quiet = (self.quiet or quiet) and not verbose and not debug
51 51 self.verbose = (self.verbose or verbose) or debug
52 52 self.debugflag = (self.debugflag or debug)
53 53 self.interactive = (self.interactive and interactive)
54 54 self.traceback = self.traceback or traceback
55 55 for cfg in config:
56 56 try:
57 57 name, value = cfg.split('=', 1)
58 58 section, name = name.split('.', 1)
59 59 if not self.cdata.has_section(section):
60 60 self.cdata.add_section(section)
61 61 if not section or not name:
62 62 raise IndexError
63 63 self.cdata.set(section, name, value)
64 64 except (IndexError, ValueError):
65 65 raise util.Abort(_('malformed --config option: %s') % cfg)
66 66
67 67 def readconfig(self, fn, root=None):
68 68 if isinstance(fn, basestring):
69 69 fn = [fn]
70 70 for f in fn:
71 71 try:
72 72 self.cdata.read(f)
73 73 except ConfigParser.ParsingError, inst:
74 74 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
75 75 # translate paths relative to root (or home) into absolute paths
76 76 if root is None:
77 77 root = os.path.expanduser('~')
78 78 for name, path in self.configitems("paths"):
79 79 if path and "://" not in path and not os.path.isabs(path):
80 80 self.cdata.set("paths", name, os.path.join(root, path))
81 81
82 82 def setconfig(self, section, name, val):
83 83 self.overlay[(section, name)] = val
84 84
85 85 def config(self, section, name, default=None):
86 86 if self.overlay.has_key((section, name)):
87 87 return self.overlay[(section, name)]
88 88 if self.cdata.has_option(section, name):
89 89 try:
90 90 return self.cdata.get(section, name)
91 91 except ConfigParser.InterpolationError, inst:
92 92 raise util.Abort(_("Error in configuration:\n%s") % inst)
93 93 if self.parentui is None:
94 94 return default
95 95 else:
96 96 return self.parentui.config(section, name, default)
97 97
98 98 def configlist(self, section, name, default=None):
99 99 """Return a list of comma/space separated strings"""
100 100 result = self.config(section, name)
101 101 if result is None:
102 102 result = default or []
103 103 if isinstance(result, basestring):
104 104 result = result.replace(",", " ").split()
105 105 return result
106 106
107 107 def configbool(self, section, name, default=False):
108 108 if self.overlay.has_key((section, name)):
109 109 return self.overlay[(section, name)]
110 110 if self.cdata.has_option(section, name):
111 111 try:
112 112 return self.cdata.getboolean(section, name)
113 113 except ConfigParser.InterpolationError, inst:
114 114 raise util.Abort(_("Error in configuration:\n%s") % inst)
115 115 if self.parentui is None:
116 116 return default
117 117 else:
118 118 return self.parentui.configbool(section, name, default)
119 119
120 120 def has_config(self, section):
121 121 '''tell whether section exists in config.'''
122 122 return self.cdata.has_section(section)
123 123
124 124 def configitems(self, section):
125 125 items = {}
126 126 if self.parentui is not None:
127 127 items = dict(self.parentui.configitems(section))
128 128 if self.cdata.has_section(section):
129 129 try:
130 130 items.update(dict(self.cdata.items(section)))
131 131 except ConfigParser.InterpolationError, inst:
132 132 raise util.Abort(_("Error in configuration:\n%s") % inst)
133 133 x = items.items()
134 134 x.sort()
135 135 return x
136 136
137 137 def walkconfig(self, seen=None):
138 138 if seen is None:
139 139 seen = {}
140 140 for (section, name), value in self.overlay.iteritems():
141 141 yield section, name, value
142 142 seen[section, name] = 1
143 143 for section in self.cdata.sections():
144 144 for name, value in self.cdata.items(section):
145 145 if (section, name) in seen: continue
146 146 yield section, name, value.replace('\n', '\\n')
147 147 seen[section, name] = 1
148 148 if self.parentui is not None:
149 149 for parent in self.parentui.walkconfig(seen):
150 150 yield parent
151 151
152 152 def extensions(self):
153 153 result = self.configitems("extensions")
154 154 for i, (key, value) in enumerate(result):
155 155 if value:
156 156 result[i] = (key, os.path.expanduser(value))
157 157 return result
158 158
159 159 def hgignorefiles(self):
160 160 result = []
161 161 for key, value in self.configitems("ui"):
162 162 if key == 'ignore' or key.startswith('ignore.'):
163 163 result.append(os.path.expanduser(value))
164 164 return result
165 165
166 166 def configrevlog(self):
167 167 result = {}
168 168 for key, value in self.configitems("revlog"):
169 169 result[key.lower()] = value
170 170 return result
171 171
172 def diffopts(self, opts={}):
173 return mdiff.diffopts(
174 text=opts.get('text'),
175 showfunc=(opts.get('show_function') or
176 self.configbool('diff', 'showfunc', None)),
177 git=(opts.get('git') or
178 self.configbool('diff', 'git', None)),
179 ignorews=(opts.get('ignore_all_space') or
180 self.configbool('diff', 'ignorews', None)),
181 ignorewsamount=(opts.get('ignore_space_change') or
182 self.configbool('diff', 'ignorewsamount', None)),
183 ignoreblanklines=(opts.get('ignore_blank_lines') or
184 self.configbool('diff', 'ignoreblanklines', None)))
185
186 172 def username(self):
187 173 """Return default username to be used in commits.
188 174
189 175 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
190 176 and stop searching if one of these is set.
191 177 Abort if found username is an empty string to force specifying
192 178 the commit user elsewhere, e.g. with line option or repo hgrc.
193 179 If not found, use ($LOGNAME or $USER or $LNAME or
194 180 $USERNAME) +"@full.hostname".
195 181 """
196 182 user = os.environ.get("HGUSER")
197 183 if user is None:
198 184 user = self.config("ui", "username")
199 185 if user is None:
200 186 user = os.environ.get("EMAIL")
201 187 if user is None:
202 188 try:
203 189 user = '%s@%s' % (util.getuser(), socket.getfqdn())
204 190 except KeyError:
205 191 raise util.Abort(_("Please specify a username."))
206 192 return user
207 193
208 194 def shortuser(self, user):
209 195 """Return a short representation of a user name or email address."""
210 196 if not self.verbose: user = util.shortuser(user)
211 197 return user
212 198
213 199 def expandpath(self, loc, default=None):
214 200 """Return repository location relative to cwd or from [paths]"""
215 201 if "://" in loc or os.path.isdir(loc):
216 202 return loc
217 203
218 204 path = self.config("paths", loc)
219 205 if not path and default is not None:
220 206 path = self.config("paths", default)
221 207 return path or loc
222 208
223 209 def write(self, *args):
224 210 if self.header:
225 211 if self.header != self.prev_header:
226 212 self.prev_header = self.header
227 213 self.write(*self.header)
228 214 self.header = []
229 215 for a in args:
230 216 sys.stdout.write(str(a))
231 217
232 218 def write_header(self, *args):
233 219 for a in args:
234 220 self.header.append(str(a))
235 221
236 222 def write_err(self, *args):
237 223 try:
238 224 if not sys.stdout.closed: sys.stdout.flush()
239 225 for a in args:
240 226 sys.stderr.write(str(a))
241 227 except IOError, inst:
242 228 if inst.errno != errno.EPIPE:
243 229 raise
244 230
245 231 def flush(self):
246 232 try: sys.stdout.flush()
247 233 except: pass
248 234 try: sys.stderr.flush()
249 235 except: pass
250 236
251 237 def readline(self):
252 238 return sys.stdin.readline()[:-1]
253 239 def prompt(self, msg, pat=None, default="y"):
254 240 if not self.interactive: return default
255 241 while 1:
256 242 self.write(msg, " ")
257 243 r = self.readline()
258 244 if not pat or re.match(pat, r):
259 245 return r
260 246 else:
261 247 self.write(_("unrecognized response\n"))
262 248 def getpass(self, prompt=None, default=None):
263 249 if not self.interactive: return default
264 250 return getpass.getpass(prompt or _('password: '))
265 251 def status(self, *msg):
266 252 if not self.quiet: self.write(*msg)
267 253 def warn(self, *msg):
268 254 self.write_err(*msg)
269 255 def note(self, *msg):
270 256 if self.verbose: self.write(*msg)
271 257 def debug(self, *msg):
272 258 if self.debugflag: self.write(*msg)
273 259 def edit(self, text, user):
274 260 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
275 261 text=True)
276 262 try:
277 263 f = os.fdopen(fd, "w")
278 264 f.write(text)
279 265 f.close()
280 266
281 267 editor = (os.environ.get("HGEDITOR") or
282 268 self.config("ui", "editor") or
283 269 os.environ.get("EDITOR", "vi"))
284 270
285 271 util.system("%s \"%s\"" % (editor, name),
286 272 environ={'HGUSER': user},
287 273 onerr=util.Abort, errprefix=_("edit failed"))
288 274
289 275 f = open(name)
290 276 t = f.read()
291 277 f.close()
292 278 t = re.sub("(?m)^HG:.*\n", "", t)
293 279 finally:
294 280 os.unlink(name)
295 281
296 282 return t
297 283
298 def sendmail(self):
299 '''send mail message. object returned has one method, sendmail.
300 call as sendmail(sender, list-of-recipients, msg).'''
301
302 def smtp():
303 '''send mail using smtp.'''
304
305 local_hostname = self.config('smtp', 'local_hostname')
306 s = smtplib.SMTP(local_hostname=local_hostname)
307 mailhost = self.config('smtp', 'host')
308 if not mailhost:
309 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
310 mailport = int(self.config('smtp', 'port', 25))
311 self.note(_('sending mail: smtp host %s, port %s\n') %
312 (mailhost, mailport))
313 s.connect(host=mailhost, port=mailport)
314 if self.configbool('smtp', 'tls'):
315 self.note(_('(using tls)\n'))
316 s.ehlo()
317 s.starttls()
318 s.ehlo()
319 username = self.config('smtp', 'username')
320 password = self.config('smtp', 'password')
321 if username and password:
322 self.note(_('(authenticating to mail server as %s)\n') %
323 (username))
324 s.login(username, password)
325 return s
326
327 class sendmail(object):
328 '''send mail using sendmail.'''
329
330 def __init__(self, ui, program):
331 self.ui = ui
332 self.program = program
333
334 def sendmail(self, sender, recipients, msg):
335 cmdline = '%s -f %s %s' % (
336 self.program, templater.email(sender),
337 ' '.join(map(templater.email, recipients)))
338 self.ui.note(_('sending mail: %s\n') % cmdline)
339 fp = os.popen(cmdline, 'w')
340 fp.write(msg)
341 ret = fp.close()
342 if ret:
343 raise util.Abort('%s %s' % (
344 os.path.basename(self.program.split(None, 1)[0]),
345 util.explain_exit(ret)[0]))
346
347 method = self.config('email', 'method', 'smtp')
348 if method == 'smtp':
349 mail = smtp()
350 else:
351 mail = sendmail(self, method)
352 return mail
353
354 284 def print_exc(self):
355 285 '''print exception traceback if traceback printing enabled.
356 286 only to call in exception handler. returns true if traceback
357 287 printed.'''
358 288 if self.traceback:
359 289 traceback.print_exc()
360 290 return self.traceback
General Comments 0
You need to be logged in to leave comments. Login now