##// END OF EJS Templates
Consistently import foo as foomod when foo to avoid shadowing...
Martin Geisler -
r12085:6f833fc3 default
parent child Browse files
Show More
@@ -1,1280 +1,1280
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import os, sys, errno, re, glob, tempfile
11 11 import util, templater, patch, error, encoding, templatekw
12 import match as _match
12 import match as matchmod
13 13 import similar, revset
14 14
15 15 revrangesep = ':'
16 16
17 17 def parsealiases(cmd):
18 18 return cmd.lstrip("^").split("|")
19 19
20 20 def findpossible(cmd, table, strict=False):
21 21 """
22 22 Return cmd -> (aliases, command table entry)
23 23 for each matching command.
24 24 Return debug commands (or their aliases) only if no normal command matches.
25 25 """
26 26 choice = {}
27 27 debugchoice = {}
28 28 for e in table.keys():
29 29 aliases = parsealiases(e)
30 30 found = None
31 31 if cmd in aliases:
32 32 found = cmd
33 33 elif not strict:
34 34 for a in aliases:
35 35 if a.startswith(cmd):
36 36 found = a
37 37 break
38 38 if found is not None:
39 39 if aliases[0].startswith("debug") or found.startswith("debug"):
40 40 debugchoice[found] = (aliases, table[e])
41 41 else:
42 42 choice[found] = (aliases, table[e])
43 43
44 44 if not choice and debugchoice:
45 45 choice = debugchoice
46 46
47 47 return choice
48 48
49 49 def findcmd(cmd, table, strict=True):
50 50 """Return (aliases, command table entry) for command string."""
51 51 choice = findpossible(cmd, table, strict)
52 52
53 53 if cmd in choice:
54 54 return choice[cmd]
55 55
56 56 if len(choice) > 1:
57 57 clist = choice.keys()
58 58 clist.sort()
59 59 raise error.AmbiguousCommand(cmd, clist)
60 60
61 61 if choice:
62 62 return choice.values()[0]
63 63
64 64 raise error.UnknownCommand(cmd)
65 65
66 66 def findrepo(p):
67 67 while not os.path.isdir(os.path.join(p, ".hg")):
68 68 oldp, p = p, os.path.dirname(p)
69 69 if p == oldp:
70 70 return None
71 71
72 72 return p
73 73
74 74 def bail_if_changed(repo):
75 75 if repo.dirstate.parents()[1] != nullid:
76 76 raise util.Abort(_('outstanding uncommitted merge'))
77 77 modified, added, removed, deleted = repo.status()[:4]
78 78 if modified or added or removed or deleted:
79 79 raise util.Abort(_("outstanding uncommitted changes"))
80 80
81 81 def logmessage(opts):
82 82 """ get the log message according to -m and -l option """
83 83 message = opts.get('message')
84 84 logfile = opts.get('logfile')
85 85
86 86 if message and logfile:
87 87 raise util.Abort(_('options --message and --logfile are mutually '
88 88 'exclusive'))
89 89 if not message and logfile:
90 90 try:
91 91 if logfile == '-':
92 92 message = sys.stdin.read()
93 93 else:
94 94 message = open(logfile).read()
95 95 except IOError, inst:
96 96 raise util.Abort(_("can't read commit message '%s': %s") %
97 97 (logfile, inst.strerror))
98 98 return message
99 99
100 100 def loglimit(opts):
101 101 """get the log limit according to option -l/--limit"""
102 102 limit = opts.get('limit')
103 103 if limit:
104 104 try:
105 105 limit = int(limit)
106 106 except ValueError:
107 107 raise util.Abort(_('limit must be a positive integer'))
108 108 if limit <= 0:
109 109 raise util.Abort(_('limit must be positive'))
110 110 else:
111 111 limit = None
112 112 return limit
113 113
114 114 def revpair(repo, revs):
115 115 '''return pair of nodes, given list of revisions. second item can
116 116 be None, meaning use working dir.'''
117 117
118 118 def revfix(repo, val, defval):
119 119 if not val and val != 0 and defval is not None:
120 120 val = defval
121 121 return repo.lookup(val)
122 122
123 123 if not revs:
124 124 return repo.dirstate.parents()[0], None
125 125 end = None
126 126 if len(revs) == 1:
127 127 if revrangesep in revs[0]:
128 128 start, end = revs[0].split(revrangesep, 1)
129 129 start = revfix(repo, start, 0)
130 130 end = revfix(repo, end, len(repo) - 1)
131 131 else:
132 132 start = revfix(repo, revs[0], None)
133 133 elif len(revs) == 2:
134 134 if revrangesep in revs[0] or revrangesep in revs[1]:
135 135 raise util.Abort(_('too many revisions specified'))
136 136 start = revfix(repo, revs[0], None)
137 137 end = revfix(repo, revs[1], None)
138 138 else:
139 139 raise util.Abort(_('too many revisions specified'))
140 140 return start, end
141 141
142 142 def revrange(repo, revs):
143 143 """Yield revision as strings from a list of revision specifications."""
144 144
145 145 def revfix(repo, val, defval):
146 146 if not val and val != 0 and defval is not None:
147 147 return defval
148 148 return repo.changelog.rev(repo.lookup(val))
149 149
150 150 seen, l = set(), []
151 151 for spec in revs:
152 152 # attempt to parse old-style ranges first to deal with
153 153 # things like old-tag which contain query metacharacters
154 154 try:
155 155 if revrangesep in spec:
156 156 start, end = spec.split(revrangesep, 1)
157 157 start = revfix(repo, start, 0)
158 158 end = revfix(repo, end, len(repo) - 1)
159 159 step = start > end and -1 or 1
160 160 for rev in xrange(start, end + step, step):
161 161 if rev in seen:
162 162 continue
163 163 seen.add(rev)
164 164 l.append(rev)
165 165 continue
166 166 elif spec and spec in repo: # single unquoted rev
167 167 rev = revfix(repo, spec, None)
168 168 if rev in seen:
169 169 continue
170 170 seen.add(rev)
171 171 l.append(rev)
172 172 continue
173 173 except error.RepoLookupError:
174 174 pass
175 175
176 176 # fall through to new-style queries if old-style fails
177 177 m = revset.match(spec)
178 178 for r in m(repo, range(len(repo))):
179 179 if r not in seen:
180 180 l.append(r)
181 181 seen.update(l)
182 182
183 183 return l
184 184
185 185 def make_filename(repo, pat, node,
186 186 total=None, seqno=None, revwidth=None, pathname=None):
187 187 node_expander = {
188 188 'H': lambda: hex(node),
189 189 'R': lambda: str(repo.changelog.rev(node)),
190 190 'h': lambda: short(node),
191 191 }
192 192 expander = {
193 193 '%': lambda: '%',
194 194 'b': lambda: os.path.basename(repo.root),
195 195 }
196 196
197 197 try:
198 198 if node:
199 199 expander.update(node_expander)
200 200 if node:
201 201 expander['r'] = (lambda:
202 202 str(repo.changelog.rev(node)).zfill(revwidth or 0))
203 203 if total is not None:
204 204 expander['N'] = lambda: str(total)
205 205 if seqno is not None:
206 206 expander['n'] = lambda: str(seqno)
207 207 if total is not None and seqno is not None:
208 208 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
209 209 if pathname is not None:
210 210 expander['s'] = lambda: os.path.basename(pathname)
211 211 expander['d'] = lambda: os.path.dirname(pathname) or '.'
212 212 expander['p'] = lambda: pathname
213 213
214 214 newname = []
215 215 patlen = len(pat)
216 216 i = 0
217 217 while i < patlen:
218 218 c = pat[i]
219 219 if c == '%':
220 220 i += 1
221 221 c = pat[i]
222 222 c = expander[c]()
223 223 newname.append(c)
224 224 i += 1
225 225 return ''.join(newname)
226 226 except KeyError, inst:
227 227 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
228 228 inst.args[0])
229 229
230 230 def make_file(repo, pat, node=None,
231 231 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
232 232
233 233 writable = 'w' in mode or 'a' in mode
234 234
235 235 if not pat or pat == '-':
236 236 return writable and sys.stdout or sys.stdin
237 237 if hasattr(pat, 'write') and writable:
238 238 return pat
239 239 if hasattr(pat, 'read') and 'r' in mode:
240 240 return pat
241 241 return open(make_filename(repo, pat, node, total, seqno, revwidth,
242 242 pathname),
243 243 mode)
244 244
245 245 def expandpats(pats):
246 246 if not util.expandglobs:
247 247 return list(pats)
248 248 ret = []
249 249 for p in pats:
250 kind, name = _match._patsplit(p, None)
250 kind, name = matchmod._patsplit(p, None)
251 251 if kind is None:
252 252 try:
253 253 globbed = glob.glob(name)
254 254 except re.error:
255 255 globbed = [name]
256 256 if globbed:
257 257 ret.extend(globbed)
258 258 continue
259 259 ret.append(p)
260 260 return ret
261 261
262 262 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
263 263 if not globbed and default == 'relpath':
264 264 pats = expandpats(pats or [])
265 m = _match.match(repo.root, repo.getcwd(), pats,
265 m = matchmod.match(repo.root, repo.getcwd(), pats,
266 266 opts.get('include'), opts.get('exclude'), default)
267 267 def badfn(f, msg):
268 268 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
269 269 m.bad = badfn
270 270 return m
271 271
272 272 def matchall(repo):
273 return _match.always(repo.root, repo.getcwd())
273 return matchmod.always(repo.root, repo.getcwd())
274 274
275 275 def matchfiles(repo, files):
276 return _match.exact(repo.root, repo.getcwd(), files)
276 return matchmod.exact(repo.root, repo.getcwd(), files)
277 277
278 278 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
279 279 if dry_run is None:
280 280 dry_run = opts.get('dry_run')
281 281 if similarity is None:
282 282 similarity = float(opts.get('similarity') or 0)
283 283 # we'd use status here, except handling of symlinks and ignore is tricky
284 284 added, unknown, deleted, removed = [], [], [], []
285 285 audit_path = util.path_auditor(repo.root)
286 286 m = match(repo, pats, opts)
287 287 for abs in repo.walk(m):
288 288 target = repo.wjoin(abs)
289 289 good = True
290 290 try:
291 291 audit_path(abs)
292 292 except:
293 293 good = False
294 294 rel = m.rel(abs)
295 295 exact = m.exact(abs)
296 296 if good and abs not in repo.dirstate:
297 297 unknown.append(abs)
298 298 if repo.ui.verbose or not exact:
299 299 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
300 300 elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
301 301 or (os.path.isdir(target) and not os.path.islink(target))):
302 302 deleted.append(abs)
303 303 if repo.ui.verbose or not exact:
304 304 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
305 305 # for finding renames
306 306 elif repo.dirstate[abs] == 'r':
307 307 removed.append(abs)
308 308 elif repo.dirstate[abs] == 'a':
309 309 added.append(abs)
310 310 copies = {}
311 311 if similarity > 0:
312 312 for old, new, score in similar.findrenames(repo,
313 313 added + unknown, removed + deleted, similarity):
314 314 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
315 315 repo.ui.status(_('recording removal of %s as rename to %s '
316 316 '(%d%% similar)\n') %
317 317 (m.rel(old), m.rel(new), score * 100))
318 318 copies[new] = old
319 319
320 320 if not dry_run:
321 321 wctx = repo[None]
322 322 wlock = repo.wlock()
323 323 try:
324 324 wctx.remove(deleted)
325 325 wctx.add(unknown)
326 326 for new, old in copies.iteritems():
327 327 wctx.copy(old, new)
328 328 finally:
329 329 wlock.release()
330 330
331 331 def copy(ui, repo, pats, opts, rename=False):
332 332 # called with the repo lock held
333 333 #
334 334 # hgsep => pathname that uses "/" to separate directories
335 335 # ossep => pathname that uses os.sep to separate directories
336 336 cwd = repo.getcwd()
337 337 targets = {}
338 338 after = opts.get("after")
339 339 dryrun = opts.get("dry_run")
340 340 wctx = repo[None]
341 341
342 342 def walkpat(pat):
343 343 srcs = []
344 344 badstates = after and '?' or '?r'
345 345 m = match(repo, [pat], opts, globbed=True)
346 346 for abs in repo.walk(m):
347 347 state = repo.dirstate[abs]
348 348 rel = m.rel(abs)
349 349 exact = m.exact(abs)
350 350 if state in badstates:
351 351 if exact and state == '?':
352 352 ui.warn(_('%s: not copying - file is not managed\n') % rel)
353 353 if exact and state == 'r':
354 354 ui.warn(_('%s: not copying - file has been marked for'
355 355 ' remove\n') % rel)
356 356 continue
357 357 # abs: hgsep
358 358 # rel: ossep
359 359 srcs.append((abs, rel, exact))
360 360 return srcs
361 361
362 362 # abssrc: hgsep
363 363 # relsrc: ossep
364 364 # otarget: ossep
365 365 def copyfile(abssrc, relsrc, otarget, exact):
366 366 abstarget = util.canonpath(repo.root, cwd, otarget)
367 367 reltarget = repo.pathto(abstarget, cwd)
368 368 target = repo.wjoin(abstarget)
369 369 src = repo.wjoin(abssrc)
370 370 state = repo.dirstate[abstarget]
371 371
372 372 # check for collisions
373 373 prevsrc = targets.get(abstarget)
374 374 if prevsrc is not None:
375 375 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
376 376 (reltarget, repo.pathto(abssrc, cwd),
377 377 repo.pathto(prevsrc, cwd)))
378 378 return
379 379
380 380 # check for overwrites
381 381 exists = os.path.exists(target)
382 382 if not after and exists or after and state in 'mn':
383 383 if not opts['force']:
384 384 ui.warn(_('%s: not overwriting - file exists\n') %
385 385 reltarget)
386 386 return
387 387
388 388 if after:
389 389 if not exists:
390 390 if rename:
391 391 ui.warn(_('%s: not recording move - %s does not exist\n') %
392 392 (relsrc, reltarget))
393 393 else:
394 394 ui.warn(_('%s: not recording copy - %s does not exist\n') %
395 395 (relsrc, reltarget))
396 396 return
397 397 elif not dryrun:
398 398 try:
399 399 if exists:
400 400 os.unlink(target)
401 401 targetdir = os.path.dirname(target) or '.'
402 402 if not os.path.isdir(targetdir):
403 403 os.makedirs(targetdir)
404 404 util.copyfile(src, target)
405 405 except IOError, inst:
406 406 if inst.errno == errno.ENOENT:
407 407 ui.warn(_('%s: deleted in working copy\n') % relsrc)
408 408 else:
409 409 ui.warn(_('%s: cannot copy - %s\n') %
410 410 (relsrc, inst.strerror))
411 411 return True # report a failure
412 412
413 413 if ui.verbose or not exact:
414 414 if rename:
415 415 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
416 416 else:
417 417 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
418 418
419 419 targets[abstarget] = abssrc
420 420
421 421 # fix up dirstate
422 422 origsrc = repo.dirstate.copied(abssrc) or abssrc
423 423 if abstarget == origsrc: # copying back a copy?
424 424 if state not in 'mn' and not dryrun:
425 425 repo.dirstate.normallookup(abstarget)
426 426 else:
427 427 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
428 428 if not ui.quiet:
429 429 ui.warn(_("%s has not been committed yet, so no copy "
430 430 "data will be stored for %s.\n")
431 431 % (repo.pathto(origsrc, cwd), reltarget))
432 432 if repo.dirstate[abstarget] in '?r' and not dryrun:
433 433 wctx.add([abstarget])
434 434 elif not dryrun:
435 435 wctx.copy(origsrc, abstarget)
436 436
437 437 if rename and not dryrun:
438 438 wctx.remove([abssrc], not after)
439 439
440 440 # pat: ossep
441 441 # dest ossep
442 442 # srcs: list of (hgsep, hgsep, ossep, bool)
443 443 # return: function that takes hgsep and returns ossep
444 444 def targetpathfn(pat, dest, srcs):
445 445 if os.path.isdir(pat):
446 446 abspfx = util.canonpath(repo.root, cwd, pat)
447 447 abspfx = util.localpath(abspfx)
448 448 if destdirexists:
449 449 striplen = len(os.path.split(abspfx)[0])
450 450 else:
451 451 striplen = len(abspfx)
452 452 if striplen:
453 453 striplen += len(os.sep)
454 454 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
455 455 elif destdirexists:
456 456 res = lambda p: os.path.join(dest,
457 457 os.path.basename(util.localpath(p)))
458 458 else:
459 459 res = lambda p: dest
460 460 return res
461 461
462 462 # pat: ossep
463 463 # dest ossep
464 464 # srcs: list of (hgsep, hgsep, ossep, bool)
465 465 # return: function that takes hgsep and returns ossep
466 466 def targetpathafterfn(pat, dest, srcs):
467 if _match.patkind(pat):
467 if matchmod.patkind(pat):
468 468 # a mercurial pattern
469 469 res = lambda p: os.path.join(dest,
470 470 os.path.basename(util.localpath(p)))
471 471 else:
472 472 abspfx = util.canonpath(repo.root, cwd, pat)
473 473 if len(abspfx) < len(srcs[0][0]):
474 474 # A directory. Either the target path contains the last
475 475 # component of the source path or it does not.
476 476 def evalpath(striplen):
477 477 score = 0
478 478 for s in srcs:
479 479 t = os.path.join(dest, util.localpath(s[0])[striplen:])
480 480 if os.path.exists(t):
481 481 score += 1
482 482 return score
483 483
484 484 abspfx = util.localpath(abspfx)
485 485 striplen = len(abspfx)
486 486 if striplen:
487 487 striplen += len(os.sep)
488 488 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
489 489 score = evalpath(striplen)
490 490 striplen1 = len(os.path.split(abspfx)[0])
491 491 if striplen1:
492 492 striplen1 += len(os.sep)
493 493 if evalpath(striplen1) > score:
494 494 striplen = striplen1
495 495 res = lambda p: os.path.join(dest,
496 496 util.localpath(p)[striplen:])
497 497 else:
498 498 # a file
499 499 if destdirexists:
500 500 res = lambda p: os.path.join(dest,
501 501 os.path.basename(util.localpath(p)))
502 502 else:
503 503 res = lambda p: dest
504 504 return res
505 505
506 506
507 507 pats = expandpats(pats)
508 508 if not pats:
509 509 raise util.Abort(_('no source or destination specified'))
510 510 if len(pats) == 1:
511 511 raise util.Abort(_('no destination specified'))
512 512 dest = pats.pop()
513 513 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
514 514 if not destdirexists:
515 if len(pats) > 1 or _match.patkind(pats[0]):
515 if len(pats) > 1 or matchmod.patkind(pats[0]):
516 516 raise util.Abort(_('with multiple sources, destination must be an '
517 517 'existing directory'))
518 518 if util.endswithsep(dest):
519 519 raise util.Abort(_('destination %s is not a directory') % dest)
520 520
521 521 tfn = targetpathfn
522 522 if after:
523 523 tfn = targetpathafterfn
524 524 copylist = []
525 525 for pat in pats:
526 526 srcs = walkpat(pat)
527 527 if not srcs:
528 528 continue
529 529 copylist.append((tfn(pat, dest, srcs), srcs))
530 530 if not copylist:
531 531 raise util.Abort(_('no files to copy'))
532 532
533 533 errors = 0
534 534 for targetpath, srcs in copylist:
535 535 for abssrc, relsrc, exact in srcs:
536 536 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
537 537 errors += 1
538 538
539 539 if errors:
540 540 ui.warn(_('(consider using --after)\n'))
541 541
542 542 return errors != 0
543 543
544 544 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
545 545 runargs=None, appendpid=False):
546 546 '''Run a command as a service.'''
547 547
548 548 if opts['daemon'] and not opts['daemon_pipefds']:
549 549 # Signal child process startup with file removal
550 550 lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-')
551 551 os.close(lockfd)
552 552 try:
553 553 if not runargs:
554 554 runargs = util.hgcmd() + sys.argv[1:]
555 555 runargs.append('--daemon-pipefds=%s' % lockpath)
556 556 # Don't pass --cwd to the child process, because we've already
557 557 # changed directory.
558 558 for i in xrange(1, len(runargs)):
559 559 if runargs[i].startswith('--cwd='):
560 560 del runargs[i]
561 561 break
562 562 elif runargs[i].startswith('--cwd'):
563 563 del runargs[i:i + 2]
564 564 break
565 565 def condfn():
566 566 return not os.path.exists(lockpath)
567 567 pid = util.rundetached(runargs, condfn)
568 568 if pid < 0:
569 569 raise util.Abort(_('child process failed to start'))
570 570 finally:
571 571 try:
572 572 os.unlink(lockpath)
573 573 except OSError, e:
574 574 if e.errno != errno.ENOENT:
575 575 raise
576 576 if parentfn:
577 577 return parentfn(pid)
578 578 else:
579 579 return
580 580
581 581 if initfn:
582 582 initfn()
583 583
584 584 if opts['pid_file']:
585 585 mode = appendpid and 'a' or 'w'
586 586 fp = open(opts['pid_file'], mode)
587 587 fp.write(str(os.getpid()) + '\n')
588 588 fp.close()
589 589
590 590 if opts['daemon_pipefds']:
591 591 lockpath = opts['daemon_pipefds']
592 592 try:
593 593 os.setsid()
594 594 except AttributeError:
595 595 pass
596 596 os.unlink(lockpath)
597 597 util.hidewindow()
598 598 sys.stdout.flush()
599 599 sys.stderr.flush()
600 600
601 601 nullfd = os.open(util.nulldev, os.O_RDWR)
602 602 logfilefd = nullfd
603 603 if logfile:
604 604 logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND)
605 605 os.dup2(nullfd, 0)
606 606 os.dup2(logfilefd, 1)
607 607 os.dup2(logfilefd, 2)
608 608 if nullfd not in (0, 1, 2):
609 609 os.close(nullfd)
610 610 if logfile and logfilefd not in (0, 1, 2):
611 611 os.close(logfilefd)
612 612
613 613 if runfn:
614 614 return runfn()
615 615
616 616 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
617 617 opts=None):
618 618 '''export changesets as hg patches.'''
619 619
620 620 total = len(revs)
621 621 revwidth = max([len(str(rev)) for rev in revs])
622 622
623 623 def single(rev, seqno, fp):
624 624 ctx = repo[rev]
625 625 node = ctx.node()
626 626 parents = [p.node() for p in ctx.parents() if p]
627 627 branch = ctx.branch()
628 628 if switch_parent:
629 629 parents.reverse()
630 630 prev = (parents and parents[0]) or nullid
631 631
632 632 if not fp:
633 633 fp = make_file(repo, template, node, total=total, seqno=seqno,
634 634 revwidth=revwidth, mode='ab')
635 635 if fp != sys.stdout and hasattr(fp, 'name'):
636 636 repo.ui.note("%s\n" % fp.name)
637 637
638 638 fp.write("# HG changeset patch\n")
639 639 fp.write("# User %s\n" % ctx.user())
640 640 fp.write("# Date %d %d\n" % ctx.date())
641 641 if branch and branch != 'default':
642 642 fp.write("# Branch %s\n" % branch)
643 643 fp.write("# Node ID %s\n" % hex(node))
644 644 fp.write("# Parent %s\n" % hex(prev))
645 645 if len(parents) > 1:
646 646 fp.write("# Parent %s\n" % hex(parents[1]))
647 647 fp.write(ctx.description().rstrip())
648 648 fp.write("\n\n")
649 649
650 650 for chunk in patch.diff(repo, prev, node, opts=opts):
651 651 fp.write(chunk)
652 652
653 653 for seqno, rev in enumerate(revs):
654 654 single(rev, seqno + 1, fp)
655 655
656 656 def diffordiffstat(ui, repo, diffopts, node1, node2, match,
657 657 changes=None, stat=False, fp=None):
658 658 '''show diff or diffstat.'''
659 659 if fp is None:
660 660 write = ui.write
661 661 else:
662 662 def write(s, **kw):
663 663 fp.write(s)
664 664
665 665 if stat:
666 666 diffopts = diffopts.copy(context=0)
667 667 width = 80
668 668 if not ui.plain():
669 669 width = util.termwidth()
670 670 chunks = patch.diff(repo, node1, node2, match, changes, diffopts)
671 671 for chunk, label in patch.diffstatui(util.iterlines(chunks),
672 672 width=width,
673 673 git=diffopts.git):
674 674 write(chunk, label=label)
675 675 else:
676 676 for chunk, label in patch.diffui(repo, node1, node2, match,
677 677 changes, diffopts):
678 678 write(chunk, label=label)
679 679
680 680 class changeset_printer(object):
681 681 '''show changeset information when templating not requested.'''
682 682
683 683 def __init__(self, ui, repo, patch, diffopts, buffered):
684 684 self.ui = ui
685 685 self.repo = repo
686 686 self.buffered = buffered
687 687 self.patch = patch
688 688 self.diffopts = diffopts
689 689 self.header = {}
690 690 self.hunk = {}
691 691 self.lastheader = None
692 692 self.footer = None
693 693
694 694 def flush(self, rev):
695 695 if rev in self.header:
696 696 h = self.header[rev]
697 697 if h != self.lastheader:
698 698 self.lastheader = h
699 699 self.ui.write(h)
700 700 del self.header[rev]
701 701 if rev in self.hunk:
702 702 self.ui.write(self.hunk[rev])
703 703 del self.hunk[rev]
704 704 return 1
705 705 return 0
706 706
707 707 def close(self):
708 708 if self.footer:
709 709 self.ui.write(self.footer)
710 710
711 711 def show(self, ctx, copies=None, matchfn=None, **props):
712 712 if self.buffered:
713 713 self.ui.pushbuffer()
714 714 self._show(ctx, copies, matchfn, props)
715 715 self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True)
716 716 else:
717 717 self._show(ctx, copies, matchfn, props)
718 718
719 719 def _show(self, ctx, copies, matchfn, props):
720 720 '''show a single changeset or file revision'''
721 721 changenode = ctx.node()
722 722 rev = ctx.rev()
723 723
724 724 if self.ui.quiet:
725 725 self.ui.write("%d:%s\n" % (rev, short(changenode)),
726 726 label='log.node')
727 727 return
728 728
729 729 log = self.repo.changelog
730 730 date = util.datestr(ctx.date())
731 731
732 732 hexfunc = self.ui.debugflag and hex or short
733 733
734 734 parents = [(p, hexfunc(log.node(p)))
735 735 for p in self._meaningful_parentrevs(log, rev)]
736 736
737 737 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)),
738 738 label='log.changeset')
739 739
740 740 branch = ctx.branch()
741 741 # don't show the default branch name
742 742 if branch != 'default':
743 743 branch = encoding.tolocal(branch)
744 744 self.ui.write(_("branch: %s\n") % branch,
745 745 label='log.branch')
746 746 for tag in self.repo.nodetags(changenode):
747 747 self.ui.write(_("tag: %s\n") % tag,
748 748 label='log.tag')
749 749 for parent in parents:
750 750 self.ui.write(_("parent: %d:%s\n") % parent,
751 751 label='log.parent')
752 752
753 753 if self.ui.debugflag:
754 754 mnode = ctx.manifestnode()
755 755 self.ui.write(_("manifest: %d:%s\n") %
756 756 (self.repo.manifest.rev(mnode), hex(mnode)),
757 757 label='ui.debug log.manifest')
758 758 self.ui.write(_("user: %s\n") % ctx.user(),
759 759 label='log.user')
760 760 self.ui.write(_("date: %s\n") % date,
761 761 label='log.date')
762 762
763 763 if self.ui.debugflag:
764 764 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
765 765 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
766 766 files):
767 767 if value:
768 768 self.ui.write("%-12s %s\n" % (key, " ".join(value)),
769 769 label='ui.debug log.files')
770 770 elif ctx.files() and self.ui.verbose:
771 771 self.ui.write(_("files: %s\n") % " ".join(ctx.files()),
772 772 label='ui.note log.files')
773 773 if copies and self.ui.verbose:
774 774 copies = ['%s (%s)' % c for c in copies]
775 775 self.ui.write(_("copies: %s\n") % ' '.join(copies),
776 776 label='ui.note log.copies')
777 777
778 778 extra = ctx.extra()
779 779 if extra and self.ui.debugflag:
780 780 for key, value in sorted(extra.items()):
781 781 self.ui.write(_("extra: %s=%s\n")
782 782 % (key, value.encode('string_escape')),
783 783 label='ui.debug log.extra')
784 784
785 785 description = ctx.description().strip()
786 786 if description:
787 787 if self.ui.verbose:
788 788 self.ui.write(_("description:\n"),
789 789 label='ui.note log.description')
790 790 self.ui.write(description,
791 791 label='ui.note log.description')
792 792 self.ui.write("\n\n")
793 793 else:
794 794 self.ui.write(_("summary: %s\n") %
795 795 description.splitlines()[0],
796 796 label='log.summary')
797 797 self.ui.write("\n")
798 798
799 799 self.showpatch(changenode, matchfn)
800 800
801 801 def showpatch(self, node, matchfn):
802 802 if not matchfn:
803 803 matchfn = self.patch
804 804 if matchfn:
805 805 stat = self.diffopts.get('stat')
806 806 diff = self.diffopts.get('patch')
807 807 diffopts = patch.diffopts(self.ui, self.diffopts)
808 808 prev = self.repo.changelog.parents(node)[0]
809 809 if stat:
810 810 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
811 811 match=matchfn, stat=True)
812 812 if diff:
813 813 if stat:
814 814 self.ui.write("\n")
815 815 diffordiffstat(self.ui, self.repo, diffopts, prev, node,
816 816 match=matchfn, stat=False)
817 817 self.ui.write("\n")
818 818
819 819 def _meaningful_parentrevs(self, log, rev):
820 820 """Return list of meaningful (or all if debug) parentrevs for rev.
821 821
822 822 For merges (two non-nullrev revisions) both parents are meaningful.
823 823 Otherwise the first parent revision is considered meaningful if it
824 824 is not the preceding revision.
825 825 """
826 826 parents = log.parentrevs(rev)
827 827 if not self.ui.debugflag and parents[1] == nullrev:
828 828 if parents[0] >= rev - 1:
829 829 parents = []
830 830 else:
831 831 parents = [parents[0]]
832 832 return parents
833 833
834 834
835 835 class changeset_templater(changeset_printer):
836 836 '''format changeset information.'''
837 837
838 838 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
839 839 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
840 840 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
841 841 defaulttempl = {
842 842 'parent': '{rev}:{node|formatnode} ',
843 843 'manifest': '{rev}:{node|formatnode}',
844 844 'file_copy': '{name} ({source})',
845 845 'extra': '{key}={value|stringescape}'
846 846 }
847 847 # filecopy is preserved for compatibility reasons
848 848 defaulttempl['filecopy'] = defaulttempl['file_copy']
849 849 self.t = templater.templater(mapfile, {'formatnode': formatnode},
850 850 cache=defaulttempl)
851 851 self.cache = {}
852 852
853 853 def use_template(self, t):
854 854 '''set template string to use'''
855 855 self.t.cache['changeset'] = t
856 856
857 857 def _meaningful_parentrevs(self, ctx):
858 858 """Return list of meaningful (or all if debug) parentrevs for rev.
859 859 """
860 860 parents = ctx.parents()
861 861 if len(parents) > 1:
862 862 return parents
863 863 if self.ui.debugflag:
864 864 return [parents[0], self.repo['null']]
865 865 if parents[0].rev() >= ctx.rev() - 1:
866 866 return []
867 867 return parents
868 868
869 869 def _show(self, ctx, copies, matchfn, props):
870 870 '''show a single changeset or file revision'''
871 871
872 872 showlist = templatekw.showlist
873 873
874 874 # showparents() behaviour depends on ui trace level which
875 875 # causes unexpected behaviours at templating level and makes
876 876 # it harder to extract it in a standalone function. Its
877 877 # behaviour cannot be changed so leave it here for now.
878 878 def showparents(**args):
879 879 ctx = args['ctx']
880 880 parents = [[('rev', p.rev()), ('node', p.hex())]
881 881 for p in self._meaningful_parentrevs(ctx)]
882 882 return showlist('parent', parents, **args)
883 883
884 884 props = props.copy()
885 885 props.update(templatekw.keywords)
886 886 props['parents'] = showparents
887 887 props['templ'] = self.t
888 888 props['ctx'] = ctx
889 889 props['repo'] = self.repo
890 890 props['revcache'] = {'copies': copies}
891 891 props['cache'] = self.cache
892 892
893 893 # find correct templates for current mode
894 894
895 895 tmplmodes = [
896 896 (True, None),
897 897 (self.ui.verbose, 'verbose'),
898 898 (self.ui.quiet, 'quiet'),
899 899 (self.ui.debugflag, 'debug'),
900 900 ]
901 901
902 902 types = {'header': '', 'footer':'', 'changeset': 'changeset'}
903 903 for mode, postfix in tmplmodes:
904 904 for type in types:
905 905 cur = postfix and ('%s_%s' % (type, postfix)) or type
906 906 if mode and cur in self.t:
907 907 types[type] = cur
908 908
909 909 try:
910 910
911 911 # write header
912 912 if types['header']:
913 913 h = templater.stringify(self.t(types['header'], **props))
914 914 if self.buffered:
915 915 self.header[ctx.rev()] = h
916 916 else:
917 917 if self.lastheader != h:
918 918 self.lastheader = h
919 919 self.ui.write(h)
920 920
921 921 # write changeset metadata, then patch if requested
922 922 key = types['changeset']
923 923 self.ui.write(templater.stringify(self.t(key, **props)))
924 924 self.showpatch(ctx.node(), matchfn)
925 925
926 926 if types['footer']:
927 927 if not self.footer:
928 928 self.footer = templater.stringify(self.t(types['footer'],
929 929 **props))
930 930
931 931 except KeyError, inst:
932 932 msg = _("%s: no key named '%s'")
933 933 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
934 934 except SyntaxError, inst:
935 935 raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0]))
936 936
937 937 def show_changeset(ui, repo, opts, buffered=False):
938 938 """show one changeset using template or regular display.
939 939
940 940 Display format will be the first non-empty hit of:
941 941 1. option 'template'
942 942 2. option 'style'
943 943 3. [ui] setting 'logtemplate'
944 944 4. [ui] setting 'style'
945 945 If all of these values are either the unset or the empty string,
946 946 regular display via changeset_printer() is done.
947 947 """
948 948 # options
949 949 patch = False
950 950 if opts.get('patch') or opts.get('stat'):
951 951 patch = matchall(repo)
952 952
953 953 tmpl = opts.get('template')
954 954 style = None
955 955 if tmpl:
956 956 tmpl = templater.parsestring(tmpl, quoted=False)
957 957 else:
958 958 style = opts.get('style')
959 959
960 960 # ui settings
961 961 if not (tmpl or style):
962 962 tmpl = ui.config('ui', 'logtemplate')
963 963 if tmpl:
964 964 tmpl = templater.parsestring(tmpl)
965 965 else:
966 966 style = util.expandpath(ui.config('ui', 'style', ''))
967 967
968 968 if not (tmpl or style):
969 969 return changeset_printer(ui, repo, patch, opts, buffered)
970 970
971 971 mapfile = None
972 972 if style and not tmpl:
973 973 mapfile = style
974 974 if not os.path.split(mapfile)[0]:
975 975 mapname = (templater.templatepath('map-cmdline.' + mapfile)
976 976 or templater.templatepath(mapfile))
977 977 if mapname:
978 978 mapfile = mapname
979 979
980 980 try:
981 981 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
982 982 except SyntaxError, inst:
983 983 raise util.Abort(inst.args[0])
984 984 if tmpl:
985 985 t.use_template(tmpl)
986 986 return t
987 987
988 988 def finddate(ui, repo, date):
989 989 """Find the tipmost changeset that matches the given date spec"""
990 990
991 991 df = util.matchdate(date)
992 992 m = matchall(repo)
993 993 results = {}
994 994
995 995 def prep(ctx, fns):
996 996 d = ctx.date()
997 997 if df(d[0]):
998 998 results[ctx.rev()] = d
999 999
1000 1000 for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
1001 1001 rev = ctx.rev()
1002 1002 if rev in results:
1003 1003 ui.status(_("Found revision %s from %s\n") %
1004 1004 (rev, util.datestr(results[rev])))
1005 1005 return str(rev)
1006 1006
1007 1007 raise util.Abort(_("revision matching date not found"))
1008 1008
1009 1009 def walkchangerevs(repo, match, opts, prepare):
1010 1010 '''Iterate over files and the revs in which they changed.
1011 1011
1012 1012 Callers most commonly need to iterate backwards over the history
1013 1013 in which they are interested. Doing so has awful (quadratic-looking)
1014 1014 performance, so we use iterators in a "windowed" way.
1015 1015
1016 1016 We walk a window of revisions in the desired order. Within the
1017 1017 window, we first walk forwards to gather data, then in the desired
1018 1018 order (usually backwards) to display it.
1019 1019
1020 1020 This function returns an iterator yielding contexts. Before
1021 1021 yielding each context, the iterator will first call the prepare
1022 1022 function on each context in the window in forward order.'''
1023 1023
1024 1024 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1025 1025 if start < end:
1026 1026 while start < end:
1027 1027 yield start, min(windowsize, end - start)
1028 1028 start += windowsize
1029 1029 if windowsize < sizelimit:
1030 1030 windowsize *= 2
1031 1031 else:
1032 1032 while start > end:
1033 1033 yield start, min(windowsize, start - end - 1)
1034 1034 start -= windowsize
1035 1035 if windowsize < sizelimit:
1036 1036 windowsize *= 2
1037 1037
1038 1038 follow = opts.get('follow') or opts.get('follow_first')
1039 1039
1040 1040 if not len(repo):
1041 1041 return []
1042 1042
1043 1043 if follow:
1044 1044 defrange = '%s:0' % repo['.'].rev()
1045 1045 else:
1046 1046 defrange = '-1:0'
1047 1047 revs = revrange(repo, opts['rev'] or [defrange])
1048 1048 if not revs:
1049 1049 return []
1050 1050 wanted = set()
1051 1051 slowpath = match.anypats() or (match.files() and opts.get('removed'))
1052 1052 fncache = {}
1053 1053 change = util.cachefunc(repo.changectx)
1054 1054
1055 1055 # First step is to fill wanted, the set of revisions that we want to yield.
1056 1056 # When it does not induce extra cost, we also fill fncache for revisions in
1057 1057 # wanted: a cache of filenames that were changed (ctx.files()) and that
1058 1058 # match the file filtering conditions.
1059 1059
1060 1060 if not slowpath and not match.files():
1061 1061 # No files, no patterns. Display all revs.
1062 1062 wanted = set(revs)
1063 1063 copies = []
1064 1064
1065 1065 if not slowpath:
1066 1066 # We only have to read through the filelog to find wanted revisions
1067 1067
1068 1068 minrev, maxrev = min(revs), max(revs)
1069 1069 def filerevgen(filelog, last):
1070 1070 """
1071 1071 Only files, no patterns. Check the history of each file.
1072 1072
1073 1073 Examines filelog entries within minrev, maxrev linkrev range
1074 1074 Returns an iterator yielding (linkrev, parentlinkrevs, copied)
1075 1075 tuples in backwards order
1076 1076 """
1077 1077 cl_count = len(repo)
1078 1078 revs = []
1079 1079 for j in xrange(0, last + 1):
1080 1080 linkrev = filelog.linkrev(j)
1081 1081 if linkrev < minrev:
1082 1082 continue
1083 1083 # only yield rev for which we have the changelog, it can
1084 1084 # happen while doing "hg log" during a pull or commit
1085 1085 if linkrev > maxrev or linkrev >= cl_count:
1086 1086 break
1087 1087
1088 1088 parentlinkrevs = []
1089 1089 for p in filelog.parentrevs(j):
1090 1090 if p != nullrev:
1091 1091 parentlinkrevs.append(filelog.linkrev(p))
1092 1092 n = filelog.node(j)
1093 1093 revs.append((linkrev, parentlinkrevs,
1094 1094 follow and filelog.renamed(n)))
1095 1095
1096 1096 return reversed(revs)
1097 1097 def iterfiles():
1098 1098 for filename in match.files():
1099 1099 yield filename, None
1100 1100 for filename_node in copies:
1101 1101 yield filename_node
1102 1102 for file_, node in iterfiles():
1103 1103 filelog = repo.file(file_)
1104 1104 if not len(filelog):
1105 1105 if node is None:
1106 1106 # A zero count may be a directory or deleted file, so
1107 1107 # try to find matching entries on the slow path.
1108 1108 if follow:
1109 1109 raise util.Abort(
1110 1110 _('cannot follow nonexistent file: "%s"') % file_)
1111 1111 slowpath = True
1112 1112 break
1113 1113 else:
1114 1114 continue
1115 1115
1116 1116 if node is None:
1117 1117 last = len(filelog) - 1
1118 1118 else:
1119 1119 last = filelog.rev(node)
1120 1120
1121 1121
1122 1122 # keep track of all ancestors of the file
1123 1123 ancestors = set([filelog.linkrev(last)])
1124 1124
1125 1125 # iterate from latest to oldest revision
1126 1126 for rev, flparentlinkrevs, copied in filerevgen(filelog, last):
1127 1127 if rev not in ancestors:
1128 1128 continue
1129 1129 # XXX insert 1327 fix here
1130 1130 if flparentlinkrevs:
1131 1131 ancestors.update(flparentlinkrevs)
1132 1132
1133 1133 fncache.setdefault(rev, []).append(file_)
1134 1134 wanted.add(rev)
1135 1135 if copied:
1136 1136 copies.append(copied)
1137 1137 if slowpath:
1138 1138 # We have to read the changelog to match filenames against
1139 1139 # changed files
1140 1140
1141 1141 if follow:
1142 1142 raise util.Abort(_('can only follow copies/renames for explicit '
1143 1143 'filenames'))
1144 1144
1145 1145 # The slow path checks files modified in every changeset.
1146 1146 for i in sorted(revs):
1147 1147 ctx = change(i)
1148 1148 matches = filter(match, ctx.files())
1149 1149 if matches:
1150 1150 fncache[i] = matches
1151 1151 wanted.add(i)
1152 1152
1153 1153 class followfilter(object):
1154 1154 def __init__(self, onlyfirst=False):
1155 1155 self.startrev = nullrev
1156 1156 self.roots = set()
1157 1157 self.onlyfirst = onlyfirst
1158 1158
1159 1159 def match(self, rev):
1160 1160 def realparents(rev):
1161 1161 if self.onlyfirst:
1162 1162 return repo.changelog.parentrevs(rev)[0:1]
1163 1163 else:
1164 1164 return filter(lambda x: x != nullrev,
1165 1165 repo.changelog.parentrevs(rev))
1166 1166
1167 1167 if self.startrev == nullrev:
1168 1168 self.startrev = rev
1169 1169 return True
1170 1170
1171 1171 if rev > self.startrev:
1172 1172 # forward: all descendants
1173 1173 if not self.roots:
1174 1174 self.roots.add(self.startrev)
1175 1175 for parent in realparents(rev):
1176 1176 if parent in self.roots:
1177 1177 self.roots.add(rev)
1178 1178 return True
1179 1179 else:
1180 1180 # backwards: all parents
1181 1181 if not self.roots:
1182 1182 self.roots.update(realparents(self.startrev))
1183 1183 if rev in self.roots:
1184 1184 self.roots.remove(rev)
1185 1185 self.roots.update(realparents(rev))
1186 1186 return True
1187 1187
1188 1188 return False
1189 1189
1190 1190 # it might be worthwhile to do this in the iterator if the rev range
1191 1191 # is descending and the prune args are all within that range
1192 1192 for rev in opts.get('prune', ()):
1193 1193 rev = repo.changelog.rev(repo.lookup(rev))
1194 1194 ff = followfilter()
1195 1195 stop = min(revs[0], revs[-1])
1196 1196 for x in xrange(rev, stop - 1, -1):
1197 1197 if ff.match(x):
1198 1198 wanted.discard(x)
1199 1199
1200 1200 # Now that wanted is correctly initialized, we can iterate over the
1201 1201 # revision range, yielding only revisions in wanted.
1202 1202 def iterate():
1203 1203 if follow and not match.files():
1204 1204 ff = followfilter(onlyfirst=opts.get('follow_first'))
1205 1205 def want(rev):
1206 1206 return ff.match(rev) and rev in wanted
1207 1207 else:
1208 1208 def want(rev):
1209 1209 return rev in wanted
1210 1210
1211 1211 for i, window in increasing_windows(0, len(revs)):
1212 1212 nrevs = [rev for rev in revs[i:i + window] if want(rev)]
1213 1213 for rev in sorted(nrevs):
1214 1214 fns = fncache.get(rev)
1215 1215 ctx = change(rev)
1216 1216 if not fns:
1217 1217 def fns_generator():
1218 1218 for f in ctx.files():
1219 1219 if match(f):
1220 1220 yield f
1221 1221 fns = fns_generator()
1222 1222 prepare(ctx, fns)
1223 1223 for rev in nrevs:
1224 1224 yield change(rev)
1225 1225 return iterate()
1226 1226
1227 1227 def commit(ui, repo, commitfunc, pats, opts):
1228 1228 '''commit the specified files or all outstanding changes'''
1229 1229 date = opts.get('date')
1230 1230 if date:
1231 1231 opts['date'] = util.parsedate(date)
1232 1232 message = logmessage(opts)
1233 1233
1234 1234 # extract addremove carefully -- this function can be called from a command
1235 1235 # that doesn't support addremove
1236 1236 if opts.get('addremove'):
1237 1237 addremove(repo, pats, opts)
1238 1238
1239 1239 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1240 1240
1241 1241 def commiteditor(repo, ctx, subs):
1242 1242 if ctx.description():
1243 1243 return ctx.description()
1244 1244 return commitforceeditor(repo, ctx, subs)
1245 1245
1246 1246 def commitforceeditor(repo, ctx, subs):
1247 1247 edittext = []
1248 1248 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1249 1249 if ctx.description():
1250 1250 edittext.append(ctx.description())
1251 1251 edittext.append("")
1252 1252 edittext.append("") # Empty line between message and comments.
1253 1253 edittext.append(_("HG: Enter commit message."
1254 1254 " Lines beginning with 'HG:' are removed."))
1255 1255 edittext.append(_("HG: Leave message empty to abort commit."))
1256 1256 edittext.append("HG: --")
1257 1257 edittext.append(_("HG: user: %s") % ctx.user())
1258 1258 if ctx.p2():
1259 1259 edittext.append(_("HG: branch merge"))
1260 1260 if ctx.branch():
1261 1261 edittext.append(_("HG: branch '%s'")
1262 1262 % encoding.tolocal(ctx.branch()))
1263 1263 edittext.extend([_("HG: subrepo %s") % s for s in subs])
1264 1264 edittext.extend([_("HG: added %s") % f for f in added])
1265 1265 edittext.extend([_("HG: changed %s") % f for f in modified])
1266 1266 edittext.extend([_("HG: removed %s") % f for f in removed])
1267 1267 if not added and not modified and not removed:
1268 1268 edittext.append(_("HG: no files changed"))
1269 1269 edittext.append("")
1270 1270 # run editor in the repository root
1271 1271 olddir = os.getcwd()
1272 1272 os.chdir(repo.root)
1273 1273 text = repo.ui.edit("\n".join(edittext), ctx.user())
1274 1274 text = re.sub("(?m)^HG:.*\n", "", text)
1275 1275 os.chdir(olddir)
1276 1276
1277 1277 if not text.strip():
1278 1278 raise util.Abort(_("empty commit message"))
1279 1279
1280 1280 return text
@@ -1,588 +1,588
1 1 # revset.py - revision set queries for mercurial
2 2 #
3 3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import re
9 9 import parser, util, error, discovery
10 import match as _match
10 import match as matchmod
11 11 from i18n import _
12 12
13 13 elements = {
14 14 "(": (20, ("group", 1, ")"), ("func", 1, ")")),
15 15 "-": (19, ("negate", 19), ("minus", 19)),
16 16 "::": (17, ("dagrangepre", 17), ("dagrange", 17),
17 17 ("dagrangepost", 17)),
18 18 "..": (17, ("dagrangepre", 17), ("dagrange", 17),
19 19 ("dagrangepost", 17)),
20 20 ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)),
21 21 "not": (10, ("not", 10)),
22 22 "!": (10, ("not", 10)),
23 23 "and": (5, None, ("and", 5)),
24 24 "&": (5, None, ("and", 5)),
25 25 "or": (4, None, ("or", 4)),
26 26 "|": (4, None, ("or", 4)),
27 27 "+": (4, None, ("or", 4)),
28 28 ",": (2, None, ("list", 2)),
29 29 ")": (0, None, None),
30 30 "symbol": (0, ("symbol",), None),
31 31 "string": (0, ("string",), None),
32 32 "end": (0, None, None),
33 33 }
34 34
35 35 keywords = set(['and', 'or', 'not'])
36 36
37 37 def tokenize(program):
38 38 pos, l = 0, len(program)
39 39 while pos < l:
40 40 c = program[pos]
41 41 if c.isspace(): # skip inter-token whitespace
42 42 pass
43 43 elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
44 44 yield ('::', None, pos)
45 45 pos += 1 # skip ahead
46 46 elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
47 47 yield ('..', None, pos)
48 48 pos += 1 # skip ahead
49 49 elif c in "():,-|&+!": # handle simple operators
50 50 yield (c, None, pos)
51 51 elif c in '"\'': # handle quoted strings
52 52 pos += 1
53 53 s = pos
54 54 while pos < l: # find closing quote
55 55 d = program[pos]
56 56 if d == '\\': # skip over escaped characters
57 57 pos += 2
58 58 continue
59 59 if d == c:
60 60 yield ('string', program[s:pos].decode('string-escape'), s)
61 61 break
62 62 pos += 1
63 63 else:
64 64 raise error.ParseError(_("unterminated string"), s)
65 65 elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
66 66 s = pos
67 67 pos += 1
68 68 while pos < l: # find end of symbol
69 69 d = program[pos]
70 70 if not (d.isalnum() or d in "._" or ord(d) > 127):
71 71 break
72 72 if d == '.' and program[pos - 1] == '.': # special case for ..
73 73 pos -= 1
74 74 break
75 75 pos += 1
76 76 sym = program[s:pos]
77 77 if sym in keywords: # operator keywords
78 78 yield (sym, None, s)
79 79 else:
80 80 yield ('symbol', sym, s)
81 81 pos -= 1
82 82 else:
83 83 raise error.ParseError(_("syntax error"), pos)
84 84 pos += 1
85 85 yield ('end', None, pos)
86 86
87 87 # helpers
88 88
89 89 def getstring(x, err):
90 90 if x and (x[0] == 'string' or x[0] == 'symbol'):
91 91 return x[1]
92 92 raise error.ParseError(err)
93 93
94 94 def getlist(x):
95 95 if not x:
96 96 return []
97 97 if x[0] == 'list':
98 98 return getlist(x[1]) + [x[2]]
99 99 return [x]
100 100
101 101 def getargs(x, min, max, err):
102 102 l = getlist(x)
103 103 if len(l) < min or len(l) > max:
104 104 raise error.ParseError(err)
105 105 return l
106 106
107 107 def getset(repo, subset, x):
108 108 if not x:
109 109 raise error.ParseError(_("missing argument"))
110 110 return methods[x[0]](repo, subset, *x[1:])
111 111
112 112 # operator methods
113 113
114 114 def stringset(repo, subset, x):
115 115 x = repo[x].rev()
116 116 if x == -1 and len(subset) == len(repo):
117 117 return [-1]
118 118 if x in subset:
119 119 return [x]
120 120 return []
121 121
122 122 def symbolset(repo, subset, x):
123 123 if x in symbols:
124 124 raise error.ParseError(_("can't use %s here") % x)
125 125 return stringset(repo, subset, x)
126 126
127 127 def rangeset(repo, subset, x, y):
128 128 m = getset(repo, subset, x)
129 129 if not m:
130 130 m = getset(repo, range(len(repo)), x)
131 131
132 132 n = getset(repo, subset, y)
133 133 if not n:
134 134 n = getset(repo, range(len(repo)), y)
135 135
136 136 if not m or not n:
137 137 return []
138 138 m, n = m[0], n[-1]
139 139
140 140 if m < n:
141 141 r = range(m, n + 1)
142 142 else:
143 143 r = range(m, n - 1, -1)
144 144 s = set(subset)
145 145 return [x for x in r if x in s]
146 146
147 147 def andset(repo, subset, x, y):
148 148 return getset(repo, getset(repo, subset, x), y)
149 149
150 150 def orset(repo, subset, x, y):
151 151 s = set(getset(repo, subset, x))
152 152 s |= set(getset(repo, [r for r in subset if r not in s], y))
153 153 return [r for r in subset if r in s]
154 154
155 155 def notset(repo, subset, x):
156 156 s = set(getset(repo, subset, x))
157 157 return [r for r in subset if r not in s]
158 158
159 159 def listset(repo, subset, a, b):
160 160 raise error.ParseError(_("can't use a list in this context"))
161 161
162 162 def func(repo, subset, a, b):
163 163 if a[0] == 'symbol' and a[1] in symbols:
164 164 return symbols[a[1]](repo, subset, b)
165 165 raise error.ParseError(_("not a function: %s") % a[1])
166 166
167 167 # functions
168 168
169 169 def p1(repo, subset, x):
170 170 ps = set()
171 171 cl = repo.changelog
172 172 for r in getset(repo, subset, x):
173 173 ps.add(cl.parentrevs(r)[0])
174 174 return [r for r in subset if r in ps]
175 175
176 176 def p2(repo, subset, x):
177 177 ps = set()
178 178 cl = repo.changelog
179 179 for r in getset(repo, subset, x):
180 180 ps.add(cl.parentrevs(r)[1])
181 181 return [r for r in subset if r in ps]
182 182
183 183 def parents(repo, subset, x):
184 184 ps = set()
185 185 cl = repo.changelog
186 186 for r in getset(repo, subset, x):
187 187 ps.update(cl.parentrevs(r))
188 188 return [r for r in subset if r in ps]
189 189
190 190 def maxrev(repo, subset, x):
191 191 s = getset(repo, subset, x)
192 192 if s:
193 193 m = max(s)
194 194 if m in subset:
195 195 return [m]
196 196 return []
197 197
198 198 def minrev(repo, subset, x):
199 199 s = getset(repo, subset, x)
200 200 if s:
201 201 m = min(s)
202 202 if m in subset:
203 203 return [m]
204 204 return []
205 205
206 206 def limit(repo, subset, x):
207 207 l = getargs(x, 2, 2, _("limit wants two arguments"))
208 208 try:
209 209 lim = int(getstring(l[1], _("limit wants a number")))
210 210 except ValueError:
211 211 raise error.ParseError(_("limit expects a number"))
212 212 return getset(repo, subset, l[0])[:lim]
213 213
214 214 def children(repo, subset, x):
215 215 cs = set()
216 216 cl = repo.changelog
217 217 s = set(getset(repo, subset, x))
218 218 for r in xrange(0, len(repo)):
219 219 for p in cl.parentrevs(r):
220 220 if p in s:
221 221 cs.add(r)
222 222 return [r for r in subset if r in cs]
223 223
224 224 def branch(repo, subset, x):
225 225 s = getset(repo, range(len(repo)), x)
226 226 b = set()
227 227 for r in s:
228 228 b.add(repo[r].branch())
229 229 s = set(s)
230 230 return [r for r in subset if r in s or repo[r].branch() in b]
231 231
232 232 def ancestor(repo, subset, x):
233 233 l = getargs(x, 2, 2, _("ancestor wants two arguments"))
234 234 r = range(len(repo))
235 235 a = getset(repo, r, l[0])
236 236 b = getset(repo, r, l[1])
237 237 if len(a) != 1 or len(b) != 1:
238 238 raise error.ParseError(_("ancestor arguments must be single revisions"))
239 239 an = [repo[a[0]].ancestor(repo[b[0]]).rev()]
240 240
241 241 return [r for r in an if r in subset]
242 242
243 243 def ancestors(repo, subset, x):
244 244 args = getset(repo, range(len(repo)), x)
245 245 if not args:
246 246 return []
247 247 s = set(repo.changelog.ancestors(*args)) | set(args)
248 248 return [r for r in subset if r in s]
249 249
250 250 def descendants(repo, subset, x):
251 251 args = getset(repo, range(len(repo)), x)
252 252 if not args:
253 253 return []
254 254 s = set(repo.changelog.descendants(*args)) | set(args)
255 255 return [r for r in subset if r in s]
256 256
257 257 def follow(repo, subset, x):
258 258 getargs(x, 0, 0, _("follow takes no arguments"))
259 259 p = repo['.'].rev()
260 260 s = set(repo.changelog.ancestors(p)) | set([p])
261 261 return [r for r in subset if r in s]
262 262
263 263 def date(repo, subset, x):
264 264 ds = getstring(x, _("date wants a string"))
265 265 dm = util.matchdate(ds)
266 266 return [r for r in subset if dm(repo[r].date()[0])]
267 267
268 268 def keyword(repo, subset, x):
269 269 kw = getstring(x, _("keyword wants a string")).lower()
270 270 l = []
271 271 for r in subset:
272 272 c = repo[r]
273 273 t = " ".join(c.files() + [c.user(), c.description()])
274 274 if kw in t.lower():
275 275 l.append(r)
276 276 return l
277 277
278 278 def grep(repo, subset, x):
279 279 gr = re.compile(getstring(x, _("grep wants a string")))
280 280 l = []
281 281 for r in subset:
282 282 c = repo[r]
283 283 for e in c.files() + [c.user(), c.description()]:
284 284 if gr.search(e):
285 285 l.append(r)
286 286 continue
287 287 return l
288 288
289 289 def author(repo, subset, x):
290 290 n = getstring(x, _("author wants a string")).lower()
291 291 return [r for r in subset if n in repo[r].user().lower()]
292 292
293 293 def hasfile(repo, subset, x):
294 294 pat = getstring(x, _("file wants a pattern"))
295 m = _match.match(repo.root, repo.getcwd(), [pat])
295 m = matchmod.match(repo.root, repo.getcwd(), [pat])
296 296 s = []
297 297 for r in subset:
298 298 for f in repo[r].files():
299 299 if m(f):
300 300 s.append(r)
301 301 continue
302 302 return s
303 303
304 304 def contains(repo, subset, x):
305 305 pat = getstring(x, _("contains wants a pattern"))
306 m = _match.match(repo.root, repo.getcwd(), [pat])
306 m = matchmod.match(repo.root, repo.getcwd(), [pat])
307 307 s = []
308 308 if m.files() == [pat]:
309 309 for r in subset:
310 310 if pat in repo[r]:
311 311 s.append(r)
312 312 continue
313 313 else:
314 314 for r in subset:
315 315 for f in repo[r].manifest():
316 316 if m(f):
317 317 s.append(r)
318 318 continue
319 319 return s
320 320
321 321 def checkstatus(repo, subset, pat, field):
322 m = _match.match(repo.root, repo.getcwd(), [pat])
322 m = matchmod.match(repo.root, repo.getcwd(), [pat])
323 323 s = []
324 324 fast = (m.files() == [pat])
325 325 for r in subset:
326 326 c = repo[r]
327 327 if fast:
328 328 if pat not in c.files():
329 329 continue
330 330 else:
331 331 for f in c.files():
332 332 if m(f):
333 333 break
334 334 else:
335 335 continue
336 336 files = repo.status(c.p1().node(), c.node())[field]
337 337 if fast:
338 338 if pat in files:
339 339 s.append(r)
340 340 continue
341 341 else:
342 342 for f in files:
343 343 if m(f):
344 344 s.append(r)
345 345 continue
346 346 return s
347 347
348 348 def modifies(repo, subset, x):
349 349 pat = getstring(x, _("modifies wants a pattern"))
350 350 return checkstatus(repo, subset, pat, 0)
351 351
352 352 def adds(repo, subset, x):
353 353 pat = getstring(x, _("adds wants a pattern"))
354 354 return checkstatus(repo, subset, pat, 1)
355 355
356 356 def removes(repo, subset, x):
357 357 pat = getstring(x, _("removes wants a pattern"))
358 358 return checkstatus(repo, subset, pat, 2)
359 359
360 360 def merge(repo, subset, x):
361 361 getargs(x, 0, 0, _("merge takes no arguments"))
362 362 cl = repo.changelog
363 363 return [r for r in subset if cl.parentrevs(r)[1] != -1]
364 364
365 365 def closed(repo, subset, x):
366 366 getargs(x, 0, 0, _("closed takes no arguments"))
367 367 return [r for r in subset if repo[r].extra().get('close')]
368 368
369 369 def head(repo, subset, x):
370 370 getargs(x, 0, 0, _("head takes no arguments"))
371 371 hs = set()
372 372 for b, ls in repo.branchmap().iteritems():
373 373 hs.update(repo[h].rev() for h in ls)
374 374 return [r for r in subset if r in hs]
375 375
376 376 def reverse(repo, subset, x):
377 377 l = getset(repo, subset, x)
378 378 l.reverse()
379 379 return l
380 380
381 381 def present(repo, subset, x):
382 382 try:
383 383 return getset(repo, subset, x)
384 384 except error.RepoLookupError:
385 385 return []
386 386
387 387 def sort(repo, subset, x):
388 388 l = getargs(x, 1, 2, _("sort wants one or two arguments"))
389 389 keys = "rev"
390 390 if len(l) == 2:
391 391 keys = getstring(l[1], _("sort spec must be a string"))
392 392
393 393 s = l[0]
394 394 keys = keys.split()
395 395 l = []
396 396 def invert(s):
397 397 return "".join(chr(255 - ord(c)) for c in s)
398 398 for r in getset(repo, subset, s):
399 399 c = repo[r]
400 400 e = []
401 401 for k in keys:
402 402 if k == 'rev':
403 403 e.append(r)
404 404 elif k == '-rev':
405 405 e.append(-r)
406 406 elif k == 'branch':
407 407 e.append(c.branch())
408 408 elif k == '-branch':
409 409 e.append(invert(c.branch()))
410 410 elif k == 'desc':
411 411 e.append(c.description())
412 412 elif k == '-desc':
413 413 e.append(invert(c.description()))
414 414 elif k in 'user author':
415 415 e.append(c.user())
416 416 elif k in '-user -author':
417 417 e.append(invert(c.user()))
418 418 elif k == 'date':
419 419 e.append(c.date()[0])
420 420 elif k == '-date':
421 421 e.append(-c.date()[0])
422 422 else:
423 423 raise error.ParseError(_("unknown sort key %r") % k)
424 424 e.append(r)
425 425 l.append(e)
426 426 l.sort()
427 427 return [e[-1] for e in l]
428 428
429 429 def getall(repo, subset, x):
430 430 getargs(x, 0, 0, _("all takes no arguments"))
431 431 return subset
432 432
433 433 def heads(repo, subset, x):
434 434 s = getset(repo, subset, x)
435 435 ps = set(parents(repo, subset, x))
436 436 return [r for r in s if r not in ps]
437 437
438 438 def roots(repo, subset, x):
439 439 s = getset(repo, subset, x)
440 440 cs = set(children(repo, subset, x))
441 441 return [r for r in s if r not in cs]
442 442
443 443 def outgoing(repo, subset, x):
444 444 import hg # avoid start-up nasties
445 445 l = getargs(x, 0, 1, _("outgoing wants a repository path"))
446 446 dest = l and getstring(l[0], _("outgoing wants a repository path")) or ''
447 447 dest = repo.ui.expandpath(dest or 'default-push', dest or 'default')
448 448 dest, branches = hg.parseurl(dest)
449 449 other = hg.repository(hg.remoteui(repo, {}), dest)
450 450 repo.ui.pushbuffer()
451 451 o = discovery.findoutgoing(repo, other)
452 452 repo.ui.popbuffer()
453 453 cl = repo.changelog
454 454 o = set([cl.rev(r) for r in repo.changelog.nodesbetween(o, None)[0]])
455 455 return [r for r in subset if r in o]
456 456
457 457 def tagged(repo, subset, x):
458 458 getargs(x, 0, 0, _("tagged takes no arguments"))
459 459 cl = repo.changelog
460 460 s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
461 461 return [r for r in subset if r in s]
462 462
463 463 symbols = {
464 464 "adds": adds,
465 465 "all": getall,
466 466 "ancestor": ancestor,
467 467 "ancestors": ancestors,
468 468 "author": author,
469 469 "branch": branch,
470 470 "children": children,
471 471 "closed": closed,
472 472 "contains": contains,
473 473 "date": date,
474 474 "descendants": descendants,
475 475 "file": hasfile,
476 476 "follow": follow,
477 477 "grep": grep,
478 478 "head": head,
479 479 "heads": heads,
480 480 "keyword": keyword,
481 481 "limit": limit,
482 482 "max": maxrev,
483 483 "min": minrev,
484 484 "merge": merge,
485 485 "modifies": modifies,
486 486 "outgoing": outgoing,
487 487 "p1": p1,
488 488 "p2": p2,
489 489 "parents": parents,
490 490 "present": present,
491 491 "removes": removes,
492 492 "reverse": reverse,
493 493 "roots": roots,
494 494 "sort": sort,
495 495 "tagged": tagged,
496 496 "user": author,
497 497 }
498 498
499 499 methods = {
500 500 "range": rangeset,
501 501 "string": stringset,
502 502 "symbol": symbolset,
503 503 "and": andset,
504 504 "or": orset,
505 505 "not": notset,
506 506 "list": listset,
507 507 "func": func,
508 508 }
509 509
510 510 def optimize(x, small):
511 511 if x == None:
512 512 return 0, x
513 513
514 514 smallbonus = 1
515 515 if small:
516 516 smallbonus = .5
517 517
518 518 op = x[0]
519 519 if op == 'minus':
520 520 return optimize(('and', x[1], ('not', x[2])), small)
521 521 elif op == 'dagrange':
522 522 return optimize(('and', ('func', ('symbol', 'descendants'), x[1]),
523 523 ('func', ('symbol', 'ancestors'), x[2])), small)
524 524 elif op == 'dagrangepre':
525 525 return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
526 526 elif op == 'dagrangepost':
527 527 return optimize(('func', ('symbol', 'descendants'), x[1]), small)
528 528 elif op == 'rangepre':
529 529 return optimize(('range', ('string', '0'), x[1]), small)
530 530 elif op == 'rangepost':
531 531 return optimize(('range', x[1], ('string', 'tip')), small)
532 532 elif op == 'negate':
533 533 return optimize(('string',
534 534 '-' + getstring(x[1], _("can't negate that"))), small)
535 535 elif op in 'string symbol negate':
536 536 return smallbonus, x # single revisions are small
537 537 elif op == 'and' or op == 'dagrange':
538 538 wa, ta = optimize(x[1], True)
539 539 wb, tb = optimize(x[2], True)
540 540 w = min(wa, wb)
541 541 if wa > wb:
542 542 return w, (op, tb, ta)
543 543 return w, (op, ta, tb)
544 544 elif op == 'or':
545 545 wa, ta = optimize(x[1], False)
546 546 wb, tb = optimize(x[2], False)
547 547 if wb < wa:
548 548 wb, wa = wa, wb
549 549 return max(wa, wb), (op, ta, tb)
550 550 elif op == 'not':
551 551 o = optimize(x[1], not small)
552 552 return o[0], (op, o[1])
553 553 elif op == 'group':
554 554 return optimize(x[1], small)
555 555 elif op in 'range list':
556 556 wa, ta = optimize(x[1], small)
557 557 wb, tb = optimize(x[2], small)
558 558 return wa + wb, (op, ta, tb)
559 559 elif op == 'func':
560 560 f = getstring(x[1], _("not a symbol"))
561 561 wa, ta = optimize(x[2], small)
562 562 if f in "grep date user author keyword branch file":
563 563 w = 10 # slow
564 564 elif f in "modifies adds removes outgoing":
565 565 w = 30 # slower
566 566 elif f == "contains":
567 567 w = 100 # very slow
568 568 elif f == "ancestor":
569 569 w = 1 * smallbonus
570 570 elif f == "reverse limit":
571 571 w = 0
572 572 elif f in "sort":
573 573 w = 10 # assume most sorts look at changelog
574 574 else:
575 575 w = 1
576 576 return w + wa, (op, x[1], ta)
577 577 return 1, x
578 578
579 579 parse = parser.parser(tokenize, elements).parse
580 580
581 581 def match(spec):
582 582 if not spec:
583 583 raise error.ParseError(_("empty query"))
584 584 tree = parse(spec)
585 585 weight, tree = optimize(tree, True)
586 586 def mfunc(repo, subset):
587 587 return getset(repo, subset, tree)
588 588 return mfunc
@@ -1,326 +1,326
1 1 # wireproto.py - generic wire protocol support functions
2 2 #
3 3 # Copyright 2005-2010 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import urllib, tempfile, os, sys
9 9 from i18n import _
10 10 from node import bin, hex
11 11 import changegroup as changegroupmod
12 12 import repo, error, encoding, util, store
13 import pushkey as pushkey_
13 import pushkey as pushkeymod
14 14
15 15 # list of nodes encoding / decoding
16 16
17 17 def decodelist(l, sep=' '):
18 18 return map(bin, l.split(sep))
19 19
20 20 def encodelist(l, sep=' '):
21 21 return sep.join(map(hex, l))
22 22
23 23 # client side
24 24
25 25 class wirerepository(repo.repository):
26 26 def lookup(self, key):
27 27 self.requirecap('lookup', _('look up remote revision'))
28 28 d = self._call("lookup", key=key)
29 29 success, data = d[:-1].split(" ", 1)
30 30 if int(success):
31 31 return bin(data)
32 32 self._abort(error.RepoError(data))
33 33
34 34 def heads(self):
35 35 d = self._call("heads")
36 36 try:
37 37 return decodelist(d[:-1])
38 38 except:
39 39 self._abort(error.ResponseError(_("unexpected response:"), d))
40 40
41 41 def branchmap(self):
42 42 d = self._call("branchmap")
43 43 try:
44 44 branchmap = {}
45 45 for branchpart in d.splitlines():
46 46 branchname, branchheads = branchpart.split(' ', 1)
47 47 branchname = urllib.unquote(branchname)
48 48 # Earlier servers (1.3.x) send branch names in (their) local
49 49 # charset. The best we can do is assume it's identical to our
50 50 # own local charset, in case it's not utf-8.
51 51 try:
52 52 branchname.decode('utf-8')
53 53 except UnicodeDecodeError:
54 54 branchname = encoding.fromlocal(branchname)
55 55 branchheads = decodelist(branchheads)
56 56 branchmap[branchname] = branchheads
57 57 return branchmap
58 58 except TypeError:
59 59 self._abort(error.ResponseError(_("unexpected response:"), d))
60 60
61 61 def branches(self, nodes):
62 62 n = encodelist(nodes)
63 63 d = self._call("branches", nodes=n)
64 64 try:
65 65 br = [tuple(decodelist(b)) for b in d.splitlines()]
66 66 return br
67 67 except:
68 68 self._abort(error.ResponseError(_("unexpected response:"), d))
69 69
70 70 def between(self, pairs):
71 71 batch = 8 # avoid giant requests
72 72 r = []
73 73 for i in xrange(0, len(pairs), batch):
74 74 n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
75 75 d = self._call("between", pairs=n)
76 76 try:
77 77 r.extend(l and decodelist(l) or [] for l in d.splitlines())
78 78 except:
79 79 self._abort(error.ResponseError(_("unexpected response:"), d))
80 80 return r
81 81
82 82 def pushkey(self, namespace, key, old, new):
83 83 if not self.capable('pushkey'):
84 84 return False
85 85 d = self._call("pushkey",
86 86 namespace=namespace, key=key, old=old, new=new)
87 87 return bool(int(d))
88 88
89 89 def listkeys(self, namespace):
90 90 if not self.capable('pushkey'):
91 91 return {}
92 92 d = self._call("listkeys", namespace=namespace)
93 93 r = {}
94 94 for l in d.splitlines():
95 95 k, v = l.split('\t')
96 96 r[k.decode('string-escape')] = v.decode('string-escape')
97 97 return r
98 98
99 99 def stream_out(self):
100 100 return self._callstream('stream_out')
101 101
102 102 def changegroup(self, nodes, kind):
103 103 n = encodelist(nodes)
104 104 f = self._callstream("changegroup", roots=n)
105 105 return self._decompress(f)
106 106
107 107 def changegroupsubset(self, bases, heads, kind):
108 108 self.requirecap('changegroupsubset', _('look up remote changes'))
109 109 bases = encodelist(bases)
110 110 heads = encodelist(heads)
111 111 return self._decompress(self._callstream("changegroupsubset",
112 112 bases=bases, heads=heads))
113 113
114 114 def unbundle(self, cg, heads, source):
115 115 '''Send cg (a readable file-like object representing the
116 116 changegroup to push, typically a chunkbuffer object) to the
117 117 remote server as a bundle. Return an integer indicating the
118 118 result of the push (see localrepository.addchangegroup()).'''
119 119
120 120 ret, output = self._callpush("unbundle", cg, heads=encodelist(heads))
121 121 if ret == "":
122 122 raise error.ResponseError(
123 123 _('push failed:'), output)
124 124 try:
125 125 ret = int(ret)
126 126 except ValueError:
127 127 raise error.ResponseError(
128 128 _('push failed (unexpected response):'), ret)
129 129
130 130 for l in output.splitlines(True):
131 131 self.ui.status(_('remote: '), l)
132 132 return ret
133 133
134 134 # server side
135 135
136 136 class streamres(object):
137 137 def __init__(self, gen):
138 138 self.gen = gen
139 139
140 140 class pushres(object):
141 141 def __init__(self, res):
142 142 self.res = res
143 143
144 144 def dispatch(repo, proto, command):
145 145 func, spec = commands[command]
146 146 args = proto.getargs(spec)
147 147 return func(repo, proto, *args)
148 148
149 149 def between(repo, proto, pairs):
150 150 pairs = [decodelist(p, '-') for p in pairs.split(" ")]
151 151 r = []
152 152 for b in repo.between(pairs):
153 153 r.append(encodelist(b) + "\n")
154 154 return "".join(r)
155 155
156 156 def branchmap(repo, proto):
157 157 branchmap = repo.branchmap()
158 158 heads = []
159 159 for branch, nodes in branchmap.iteritems():
160 160 branchname = urllib.quote(branch)
161 161 branchnodes = encodelist(nodes)
162 162 heads.append('%s %s' % (branchname, branchnodes))
163 163 return '\n'.join(heads)
164 164
165 165 def branches(repo, proto, nodes):
166 166 nodes = decodelist(nodes)
167 167 r = []
168 168 for b in repo.branches(nodes):
169 169 r.append(encodelist(b) + "\n")
170 170 return "".join(r)
171 171
172 172 def capabilities(repo, proto):
173 173 caps = 'lookup changegroupsubset branchmap pushkey'.split()
174 174 if _allowstream(repo.ui):
175 175 caps.append('stream=%d' % repo.changelog.version)
176 176 caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
177 177 return ' '.join(caps)
178 178
179 179 def changegroup(repo, proto, roots):
180 180 nodes = decodelist(roots)
181 181 cg = repo.changegroup(nodes, 'serve')
182 182 return streamres(proto.groupchunks(cg))
183 183
184 184 def changegroupsubset(repo, proto, bases, heads):
185 185 bases = decodelist(bases)
186 186 heads = decodelist(heads)
187 187 cg = repo.changegroupsubset(bases, heads, 'serve')
188 188 return streamres(proto.groupchunks(cg))
189 189
190 190 def heads(repo, proto):
191 191 h = repo.heads()
192 192 return encodelist(h) + "\n"
193 193
194 194 def hello(repo, proto):
195 195 '''the hello command returns a set of lines describing various
196 196 interesting things about the server, in an RFC822-like format.
197 197 Currently the only one defined is "capabilities", which
198 198 consists of a line in the form:
199 199
200 200 capabilities: space separated list of tokens
201 201 '''
202 202 return "capabilities: %s\n" % (capabilities(repo, proto))
203 203
204 204 def listkeys(repo, proto, namespace):
205 d = pushkey_.list(repo, namespace).items()
205 d = pushkeymod.list(repo, namespace).items()
206 206 t = '\n'.join(['%s\t%s' % (k.encode('string-escape'),
207 207 v.encode('string-escape')) for k, v in d])
208 208 return t
209 209
210 210 def lookup(repo, proto, key):
211 211 try:
212 212 r = hex(repo.lookup(key))
213 213 success = 1
214 214 except Exception, inst:
215 215 r = str(inst)
216 216 success = 0
217 217 return "%s %s\n" % (success, r)
218 218
219 219 def pushkey(repo, proto, namespace, key, old, new):
220 r = pushkey_.push(repo, namespace, key, old, new)
220 r = pushkeymod.push(repo, namespace, key, old, new)
221 221 return '%s\n' % int(r)
222 222
223 223 def _allowstream(ui):
224 224 return ui.configbool('server', 'uncompressed', True, untrusted=True)
225 225
226 226 def stream(repo, proto):
227 227 '''If the server supports streaming clone, it advertises the "stream"
228 228 capability with a value representing the version and flags of the repo
229 229 it is serving. Client checks to see if it understands the format.
230 230
231 231 The format is simple: the server writes out a line with the amount
232 232 of files, then the total amount of bytes to be transfered (separated
233 233 by a space). Then, for each file, the server first writes the filename
234 234 and filesize (separated by the null character), then the file contents.
235 235 '''
236 236
237 237 if not _allowstream(repo.ui):
238 238 return '1\n'
239 239
240 240 entries = []
241 241 total_bytes = 0
242 242 try:
243 243 # get consistent snapshot of repo, lock during scan
244 244 lock = repo.lock()
245 245 try:
246 246 repo.ui.debug('scanning\n')
247 247 for name, ename, size in repo.store.walk():
248 248 entries.append((name, size))
249 249 total_bytes += size
250 250 finally:
251 251 lock.release()
252 252 except error.LockError:
253 253 return '2\n' # error: 2
254 254
255 255 def streamer(repo, entries, total):
256 256 '''stream out all metadata files in repository.'''
257 257 yield '0\n' # success
258 258 repo.ui.debug('%d files, %d bytes to transfer\n' %
259 259 (len(entries), total_bytes))
260 260 yield '%d %d\n' % (len(entries), total_bytes)
261 261 for name, size in entries:
262 262 repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
263 263 # partially encode name over the wire for backwards compat
264 264 yield '%s\0%d\n' % (store.encodedir(name), size)
265 265 for chunk in util.filechunkiter(repo.sopener(name), limit=size):
266 266 yield chunk
267 267
268 268 return streamres(streamer(repo, entries, total_bytes))
269 269
270 270 def unbundle(repo, proto, heads):
271 271 their_heads = decodelist(heads)
272 272
273 273 def check_heads():
274 274 heads = repo.heads()
275 275 return their_heads == ['force'] or their_heads == heads
276 276
277 277 # fail early if possible
278 278 if not check_heads():
279 279 return 'unsynced changes'
280 280
281 281 # write bundle data to temporary file because it can be big
282 282 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
283 283 fp = os.fdopen(fd, 'wb+')
284 284 r = 0
285 285 proto.redirect()
286 286 try:
287 287 proto.getfile(fp)
288 288 lock = repo.lock()
289 289 try:
290 290 if not check_heads():
291 291 # someone else committed/pushed/unbundled while we
292 292 # were transferring data
293 293 return 'unsynced changes'
294 294
295 295 # push can proceed
296 296 fp.seek(0)
297 297 gen = changegroupmod.readbundle(fp, None)
298 298
299 299 try:
300 300 r = repo.addchangegroup(gen, 'serve', proto._client(),
301 301 lock=lock)
302 302 except util.Abort, inst:
303 303 sys.stderr.write("abort: %s\n" % inst)
304 304 finally:
305 305 lock.release()
306 306 return pushres(r)
307 307
308 308 finally:
309 309 fp.close()
310 310 os.unlink(tempname)
311 311
312 312 commands = {
313 313 'between': (between, 'pairs'),
314 314 'branchmap': (branchmap, ''),
315 315 'branches': (branches, 'nodes'),
316 316 'capabilities': (capabilities, ''),
317 317 'changegroup': (changegroup, 'roots'),
318 318 'changegroupsubset': (changegroupsubset, 'bases heads'),
319 319 'heads': (heads, ''),
320 320 'hello': (hello, ''),
321 321 'listkeys': (listkeys, 'namespace'),
322 322 'lookup': (lookup, 'key'),
323 323 'pushkey': (pushkey, 'namespace key old new'),
324 324 'stream_out': (stream, ''),
325 325 'unbundle': (unbundle, 'heads'),
326 326 }
General Comments 0
You need to be logged in to leave comments. Login now