##// END OF EJS Templates
change locate to use relglobs by default...
Alexis S. L. Carvalho -
r4195:e8ee8fde default
parent child Browse files
Show More
@@ -1,769 +1,770 b''
1 1 # cmdutil.py - help for command processing in mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), 'os sys')
12 12 demandload(globals(), 'mdiff util templater patch')
13 13
14 14 revrangesep = ':'
15 15
16 16 def revpair(repo, revs):
17 17 '''return pair of nodes, given list of revisions. second item can
18 18 be None, meaning use working dir.'''
19 19
20 20 def revfix(repo, val, defval):
21 21 if not val and val != 0 and defval is not None:
22 22 val = defval
23 23 return repo.lookup(val)
24 24
25 25 if not revs:
26 26 return repo.dirstate.parents()[0], None
27 27 end = None
28 28 if len(revs) == 1:
29 29 if revrangesep in revs[0]:
30 30 start, end = revs[0].split(revrangesep, 1)
31 31 start = revfix(repo, start, 0)
32 32 end = revfix(repo, end, repo.changelog.count() - 1)
33 33 else:
34 34 start = revfix(repo, revs[0], None)
35 35 elif len(revs) == 2:
36 36 if revrangesep in revs[0] or revrangesep in revs[1]:
37 37 raise util.Abort(_('too many revisions specified'))
38 38 start = revfix(repo, revs[0], None)
39 39 end = revfix(repo, revs[1], None)
40 40 else:
41 41 raise util.Abort(_('too many revisions specified'))
42 42 return start, end
43 43
44 44 def revrange(repo, revs):
45 45 """Yield revision as strings from a list of revision specifications."""
46 46
47 47 def revfix(repo, val, defval):
48 48 if not val and val != 0 and defval is not None:
49 49 return defval
50 50 return repo.changelog.rev(repo.lookup(val))
51 51
52 52 seen, l = {}, []
53 53 for spec in revs:
54 54 if revrangesep in spec:
55 55 start, end = spec.split(revrangesep, 1)
56 56 start = revfix(repo, start, 0)
57 57 end = revfix(repo, end, repo.changelog.count() - 1)
58 58 step = start > end and -1 or 1
59 59 for rev in xrange(start, end+step, step):
60 60 if rev in seen:
61 61 continue
62 62 seen[rev] = 1
63 63 l.append(rev)
64 64 else:
65 65 rev = revfix(repo, spec, None)
66 66 if rev in seen:
67 67 continue
68 68 seen[rev] = 1
69 69 l.append(rev)
70 70
71 71 return l
72 72
73 73 def make_filename(repo, pat, node,
74 74 total=None, seqno=None, revwidth=None, pathname=None):
75 75 node_expander = {
76 76 'H': lambda: hex(node),
77 77 'R': lambda: str(repo.changelog.rev(node)),
78 78 'h': lambda: short(node),
79 79 }
80 80 expander = {
81 81 '%': lambda: '%',
82 82 'b': lambda: os.path.basename(repo.root),
83 83 }
84 84
85 85 try:
86 86 if node:
87 87 expander.update(node_expander)
88 88 if node and revwidth is not None:
89 89 expander['r'] = (lambda:
90 90 str(repo.changelog.rev(node)).zfill(revwidth))
91 91 if total is not None:
92 92 expander['N'] = lambda: str(total)
93 93 if seqno is not None:
94 94 expander['n'] = lambda: str(seqno)
95 95 if total is not None and seqno is not None:
96 96 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
97 97 if pathname is not None:
98 98 expander['s'] = lambda: os.path.basename(pathname)
99 99 expander['d'] = lambda: os.path.dirname(pathname) or '.'
100 100 expander['p'] = lambda: pathname
101 101
102 102 newname = []
103 103 patlen = len(pat)
104 104 i = 0
105 105 while i < patlen:
106 106 c = pat[i]
107 107 if c == '%':
108 108 i += 1
109 109 c = pat[i]
110 110 c = expander[c]()
111 111 newname.append(c)
112 112 i += 1
113 113 return ''.join(newname)
114 114 except KeyError, inst:
115 115 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
116 116 inst.args[0])
117 117
118 118 def make_file(repo, pat, node=None,
119 119 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
120 120 if not pat or pat == '-':
121 121 return 'w' in mode and sys.stdout or sys.stdin
122 122 if hasattr(pat, 'write') and 'w' in mode:
123 123 return pat
124 124 if hasattr(pat, 'read') and 'r' in mode:
125 125 return pat
126 126 return open(make_filename(repo, pat, node, total, seqno, revwidth,
127 127 pathname),
128 128 mode)
129 129
130 def matchpats(repo, pats=[], opts={}, head='', globbed=False):
130 def matchpats(repo, pats=[], opts={}, head='', globbed=False, default=None):
131 131 cwd = repo.getcwd()
132 132 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
133 opts.get('exclude'), head, globbed=globbed)
133 opts.get('exclude'), head, globbed=globbed,
134 default=default)
134 135
135 136 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None,
136 globbed=False):
137 globbed=False, default=None):
137 138 files, matchfn, anypats = matchpats(repo, pats, opts, head,
138 globbed=globbed)
139 globbed=globbed, default=default)
139 140 exact = dict.fromkeys(files)
140 141 for src, fn in repo.walk(node=node, files=files, match=matchfn,
141 142 badmatch=badmatch):
142 143 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
143 144
144 145 def findrenames(repo, added=None, removed=None, threshold=0.5):
145 146 if added is None or removed is None:
146 147 added, removed = repo.status()[1:3]
147 148 changes = repo.changelog.read(repo.dirstate.parents()[0])
148 149 mf = repo.manifest.read(changes[0])
149 150 for a in added:
150 151 aa = repo.wread(a)
151 152 bestscore, bestname = None, None
152 153 for r in removed:
153 154 rr = repo.file(r).read(mf[r])
154 155 delta = mdiff.textdiff(aa, rr)
155 156 if len(delta) < len(aa):
156 157 myscore = 1.0 - (float(len(delta)) / len(aa))
157 158 if bestscore is None or myscore > bestscore:
158 159 bestscore, bestname = myscore, r
159 160 if bestname and bestscore >= threshold:
160 161 yield bestname, a, bestscore
161 162
162 163 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
163 164 similarity=None):
164 165 if dry_run is None:
165 166 dry_run = opts.get('dry_run')
166 167 if similarity is None:
167 168 similarity = float(opts.get('similarity') or 0)
168 169 add, remove = [], []
169 170 mapping = {}
170 171 for src, abs, rel, exact in walk(repo, pats, opts):
171 172 if src == 'f' and repo.dirstate.state(abs) == '?':
172 173 add.append(abs)
173 174 mapping[abs] = rel, exact
174 175 if repo.ui.verbose or not exact:
175 176 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
176 177 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
177 178 remove.append(abs)
178 179 mapping[abs] = rel, exact
179 180 if repo.ui.verbose or not exact:
180 181 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
181 182 if not dry_run:
182 183 repo.add(add, wlock=wlock)
183 184 repo.remove(remove, wlock=wlock)
184 185 if similarity > 0:
185 186 for old, new, score in findrenames(repo, add, remove, similarity):
186 187 oldrel, oldexact = mapping[old]
187 188 newrel, newexact = mapping[new]
188 189 if repo.ui.verbose or not oldexact or not newexact:
189 190 repo.ui.status(_('recording removal of %s as rename to %s '
190 191 '(%d%% similar)\n') %
191 192 (oldrel, newrel, score * 100))
192 193 if not dry_run:
193 194 repo.copy(old, new, wlock=wlock)
194 195
195 196 class changeset_printer(object):
196 197 '''show changeset information when templating not requested.'''
197 198
198 199 def __init__(self, ui, repo, patch, brinfo, buffered):
199 200 self.ui = ui
200 201 self.repo = repo
201 202 self.buffered = buffered
202 203 self.patch = patch
203 204 self.brinfo = brinfo
204 205 self.header = {}
205 206 self.hunk = {}
206 207 self.lastheader = None
207 208
208 209 def flush(self, rev):
209 210 if rev in self.header:
210 211 h = self.header[rev]
211 212 if h != self.lastheader:
212 213 self.lastheader = h
213 214 self.ui.write(h)
214 215 del self.header[rev]
215 216 if rev in self.hunk:
216 217 self.ui.write(self.hunk[rev])
217 218 del self.hunk[rev]
218 219 return 1
219 220 return 0
220 221
221 222 def show(self, rev=0, changenode=None, copies=None, **props):
222 223 if self.buffered:
223 224 self.ui.pushbuffer()
224 225 self._show(rev, changenode, copies, props)
225 226 self.hunk[rev] = self.ui.popbuffer()
226 227 else:
227 228 self._show(rev, changenode, copies, props)
228 229
229 230 def _show(self, rev, changenode, copies, props):
230 231 '''show a single changeset or file revision'''
231 232 log = self.repo.changelog
232 233 if changenode is None:
233 234 changenode = log.node(rev)
234 235 elif not rev:
235 236 rev = log.rev(changenode)
236 237
237 238 if self.ui.quiet:
238 239 self.ui.write("%d:%s\n" % (rev, short(changenode)))
239 240 return
240 241
241 242 changes = log.read(changenode)
242 243 date = util.datestr(changes[2])
243 244 extra = changes[5]
244 245 branch = extra.get("branch")
245 246
246 247 hexfunc = self.ui.debugflag and hex or short
247 248
248 249 parents = log.parentrevs(rev)
249 250 if not self.ui.debugflag:
250 251 if parents[1] == nullrev:
251 252 if parents[0] >= rev - 1:
252 253 parents = []
253 254 else:
254 255 parents = [parents[0]]
255 256 parents = [(p, hexfunc(log.node(p))) for p in parents]
256 257
257 258 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
258 259
259 260 if branch:
260 261 branch = util.tolocal(branch)
261 262 self.ui.write(_("branch: %s\n") % branch)
262 263 for tag in self.repo.nodetags(changenode):
263 264 self.ui.write(_("tag: %s\n") % tag)
264 265 for parent in parents:
265 266 self.ui.write(_("parent: %d:%s\n") % parent)
266 267
267 268 if self.brinfo:
268 269 br = self.repo.branchlookup([changenode])
269 270 if br:
270 271 self.ui.write(_("branch: %s\n") % " ".join(br[changenode]))
271 272
272 273 if self.ui.debugflag:
273 274 self.ui.write(_("manifest: %d:%s\n") %
274 275 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
275 276 self.ui.write(_("user: %s\n") % changes[1])
276 277 self.ui.write(_("date: %s\n") % date)
277 278
278 279 if self.ui.debugflag:
279 280 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
280 281 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
281 282 files):
282 283 if value:
283 284 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
284 285 elif changes[3] and self.ui.verbose:
285 286 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
286 287 if copies and self.ui.verbose:
287 288 copies = ['%s (%s)' % c for c in copies]
288 289 self.ui.write(_("copies: %s\n") % ' '.join(copies))
289 290
290 291 if extra and self.ui.debugflag:
291 292 extraitems = extra.items()
292 293 extraitems.sort()
293 294 for key, value in extraitems:
294 295 self.ui.write(_("extra: %s=%s\n")
295 296 % (key, value.encode('string_escape')))
296 297
297 298 description = changes[4].strip()
298 299 if description:
299 300 if self.ui.verbose:
300 301 self.ui.write(_("description:\n"))
301 302 self.ui.write(description)
302 303 self.ui.write("\n\n")
303 304 else:
304 305 self.ui.write(_("summary: %s\n") %
305 306 description.splitlines()[0])
306 307 self.ui.write("\n")
307 308
308 309 self.showpatch(changenode)
309 310
310 311 def showpatch(self, node):
311 312 if self.patch:
312 313 prev = self.repo.changelog.parents(node)[0]
313 314 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
314 315 self.ui.write("\n")
315 316
316 317 class changeset_templater(changeset_printer):
317 318 '''format changeset information.'''
318 319
319 320 def __init__(self, ui, repo, patch, brinfo, mapfile, buffered):
320 321 changeset_printer.__init__(self, ui, repo, patch, brinfo, buffered)
321 322 self.t = templater.templater(mapfile, templater.common_filters,
322 323 cache={'parent': '{rev}:{node|short} ',
323 324 'manifest': '{rev}:{node|short}',
324 325 'filecopy': '{name} ({source})'})
325 326
326 327 def use_template(self, t):
327 328 '''set template string to use'''
328 329 self.t.cache['changeset'] = t
329 330
330 331 def _show(self, rev, changenode, copies, props):
331 332 '''show a single changeset or file revision'''
332 333 log = self.repo.changelog
333 334 if changenode is None:
334 335 changenode = log.node(rev)
335 336 elif not rev:
336 337 rev = log.rev(changenode)
337 338
338 339 changes = log.read(changenode)
339 340
340 341 def showlist(name, values, plural=None, **args):
341 342 '''expand set of values.
342 343 name is name of key in template map.
343 344 values is list of strings or dicts.
344 345 plural is plural of name, if not simply name + 's'.
345 346
346 347 expansion works like this, given name 'foo'.
347 348
348 349 if values is empty, expand 'no_foos'.
349 350
350 351 if 'foo' not in template map, return values as a string,
351 352 joined by space.
352 353
353 354 expand 'start_foos'.
354 355
355 356 for each value, expand 'foo'. if 'last_foo' in template
356 357 map, expand it instead of 'foo' for last key.
357 358
358 359 expand 'end_foos'.
359 360 '''
360 361 if plural: names = plural
361 362 else: names = name + 's'
362 363 if not values:
363 364 noname = 'no_' + names
364 365 if noname in self.t:
365 366 yield self.t(noname, **args)
366 367 return
367 368 if name not in self.t:
368 369 if isinstance(values[0], str):
369 370 yield ' '.join(values)
370 371 else:
371 372 for v in values:
372 373 yield dict(v, **args)
373 374 return
374 375 startname = 'start_' + names
375 376 if startname in self.t:
376 377 yield self.t(startname, **args)
377 378 vargs = args.copy()
378 379 def one(v, tag=name):
379 380 try:
380 381 vargs.update(v)
381 382 except (AttributeError, ValueError):
382 383 try:
383 384 for a, b in v:
384 385 vargs[a] = b
385 386 except ValueError:
386 387 vargs[name] = v
387 388 return self.t(tag, **vargs)
388 389 lastname = 'last_' + name
389 390 if lastname in self.t:
390 391 last = values.pop()
391 392 else:
392 393 last = None
393 394 for v in values:
394 395 yield one(v)
395 396 if last is not None:
396 397 yield one(last, tag=lastname)
397 398 endname = 'end_' + names
398 399 if endname in self.t:
399 400 yield self.t(endname, **args)
400 401
401 402 def showbranches(**args):
402 403 branch = changes[5].get("branch")
403 404 if branch:
404 405 branch = util.tolocal(branch)
405 406 return showlist('branch', [branch], plural='branches', **args)
406 407 # add old style branches if requested
407 408 if self.brinfo:
408 409 br = self.repo.branchlookup([changenode])
409 410 if changenode in br:
410 411 return showlist('branch', br[changenode],
411 412 plural='branches', **args)
412 413
413 414 def showparents(**args):
414 415 parents = [[('rev', log.rev(p)), ('node', hex(p))]
415 416 for p in log.parents(changenode)
416 417 if self.ui.debugflag or p != nullid]
417 418 if (not self.ui.debugflag and len(parents) == 1 and
418 419 parents[0][0][1] == rev - 1):
419 420 return
420 421 return showlist('parent', parents, **args)
421 422
422 423 def showtags(**args):
423 424 return showlist('tag', self.repo.nodetags(changenode), **args)
424 425
425 426 def showextras(**args):
426 427 extras = changes[5].items()
427 428 extras.sort()
428 429 for key, value in extras:
429 430 args = args.copy()
430 431 args.update(dict(key=key, value=value))
431 432 yield self.t('extra', **args)
432 433
433 434 def showcopies(**args):
434 435 c = [{'name': x[0], 'source': x[1]} for x in copies]
435 436 return showlist('file_copy', c, plural='file_copies', **args)
436 437
437 438 if self.ui.debugflag:
438 439 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
439 440 def showfiles(**args):
440 441 return showlist('file', files[0], **args)
441 442 def showadds(**args):
442 443 return showlist('file_add', files[1], **args)
443 444 def showdels(**args):
444 445 return showlist('file_del', files[2], **args)
445 446 def showmanifest(**args):
446 447 args = args.copy()
447 448 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
448 449 node=hex(changes[0])))
449 450 return self.t('manifest', **args)
450 451 else:
451 452 def showfiles(**args):
452 453 return showlist('file', changes[3], **args)
453 454 showadds = ''
454 455 showdels = ''
455 456 showmanifest = ''
456 457
457 458 defprops = {
458 459 'author': changes[1],
459 460 'branches': showbranches,
460 461 'date': changes[2],
461 462 'desc': changes[4],
462 463 'file_adds': showadds,
463 464 'file_dels': showdels,
464 465 'files': showfiles,
465 466 'file_copies': showcopies,
466 467 'manifest': showmanifest,
467 468 'node': hex(changenode),
468 469 'parents': showparents,
469 470 'rev': rev,
470 471 'tags': showtags,
471 472 'extras': showextras,
472 473 }
473 474 props = props.copy()
474 475 props.update(defprops)
475 476
476 477 try:
477 478 if self.ui.debugflag and 'header_debug' in self.t:
478 479 key = 'header_debug'
479 480 elif self.ui.quiet and 'header_quiet' in self.t:
480 481 key = 'header_quiet'
481 482 elif self.ui.verbose and 'header_verbose' in self.t:
482 483 key = 'header_verbose'
483 484 elif 'header' in self.t:
484 485 key = 'header'
485 486 else:
486 487 key = ''
487 488 if key:
488 489 h = templater.stringify(self.t(key, **props))
489 490 if self.buffered:
490 491 self.header[rev] = h
491 492 else:
492 493 self.ui.write(h)
493 494 if self.ui.debugflag and 'changeset_debug' in self.t:
494 495 key = 'changeset_debug'
495 496 elif self.ui.quiet and 'changeset_quiet' in self.t:
496 497 key = 'changeset_quiet'
497 498 elif self.ui.verbose and 'changeset_verbose' in self.t:
498 499 key = 'changeset_verbose'
499 500 else:
500 501 key = 'changeset'
501 502 self.ui.write(templater.stringify(self.t(key, **props)))
502 503 self.showpatch(changenode)
503 504 except KeyError, inst:
504 505 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
505 506 inst.args[0]))
506 507 except SyntaxError, inst:
507 508 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
508 509
509 510 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
510 511 """show one changeset using template or regular display.
511 512
512 513 Display format will be the first non-empty hit of:
513 514 1. option 'template'
514 515 2. option 'style'
515 516 3. [ui] setting 'logtemplate'
516 517 4. [ui] setting 'style'
517 518 If all of these values are either the unset or the empty string,
518 519 regular display via changeset_printer() is done.
519 520 """
520 521 # options
521 522 patch = False
522 523 if opts.get('patch'):
523 524 patch = matchfn or util.always
524 525
525 526 br = None
526 527 if opts.get('branches'):
527 528 ui.warn(_("the --branches option is deprecated, "
528 529 "please use 'hg branches' instead\n"))
529 530 br = True
530 531 tmpl = opts.get('template')
531 532 mapfile = None
532 533 if tmpl:
533 534 tmpl = templater.parsestring(tmpl, quoted=False)
534 535 else:
535 536 mapfile = opts.get('style')
536 537 # ui settings
537 538 if not mapfile:
538 539 tmpl = ui.config('ui', 'logtemplate')
539 540 if tmpl:
540 541 tmpl = templater.parsestring(tmpl)
541 542 else:
542 543 mapfile = ui.config('ui', 'style')
543 544
544 545 if tmpl or mapfile:
545 546 if mapfile:
546 547 if not os.path.split(mapfile)[0]:
547 548 mapname = (templater.templatepath('map-cmdline.' + mapfile)
548 549 or templater.templatepath(mapfile))
549 550 if mapname: mapfile = mapname
550 551 try:
551 552 t = changeset_templater(ui, repo, patch, br, mapfile, buffered)
552 553 except SyntaxError, inst:
553 554 raise util.Abort(inst.args[0])
554 555 if tmpl: t.use_template(tmpl)
555 556 return t
556 557 return changeset_printer(ui, repo, patch, br, buffered)
557 558
558 559 def finddate(ui, repo, date):
559 560 """Find the tipmost changeset that matches the given date spec"""
560 561 df = util.matchdate(date + " to " + date)
561 562 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
562 563 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
563 564 results = {}
564 565 for st, rev, fns in changeiter:
565 566 if st == 'add':
566 567 d = get(rev)[2]
567 568 if df(d[0]):
568 569 results[rev] = d
569 570 elif st == 'iter':
570 571 if rev in results:
571 572 ui.status("Found revision %s from %s\n" %
572 573 (rev, util.datestr(results[rev])))
573 574 return str(rev)
574 575
575 576 raise util.Abort(_("revision matching date not found"))
576 577
577 578 def walkchangerevs(ui, repo, pats, change, opts):
578 579 '''Iterate over files and the revs they changed in.
579 580
580 581 Callers most commonly need to iterate backwards over the history
581 582 it is interested in. Doing so has awful (quadratic-looking)
582 583 performance, so we use iterators in a "windowed" way.
583 584
584 585 We walk a window of revisions in the desired order. Within the
585 586 window, we first walk forwards to gather data, then in the desired
586 587 order (usually backwards) to display it.
587 588
588 589 This function returns an (iterator, matchfn) tuple. The iterator
589 590 yields 3-tuples. They will be of one of the following forms:
590 591
591 592 "window", incrementing, lastrev: stepping through a window,
592 593 positive if walking forwards through revs, last rev in the
593 594 sequence iterated over - use to reset state for the current window
594 595
595 596 "add", rev, fns: out-of-order traversal of the given file names
596 597 fns, which changed during revision rev - use to gather data for
597 598 possible display
598 599
599 600 "iter", rev, None: in-order traversal of the revs earlier iterated
600 601 over with "add" - use to display data'''
601 602
602 603 def increasing_windows(start, end, windowsize=8, sizelimit=512):
603 604 if start < end:
604 605 while start < end:
605 606 yield start, min(windowsize, end-start)
606 607 start += windowsize
607 608 if windowsize < sizelimit:
608 609 windowsize *= 2
609 610 else:
610 611 while start > end:
611 612 yield start, min(windowsize, start-end-1)
612 613 start -= windowsize
613 614 if windowsize < sizelimit:
614 615 windowsize *= 2
615 616
616 617 files, matchfn, anypats = matchpats(repo, pats, opts)
617 618 follow = opts.get('follow') or opts.get('follow_first')
618 619
619 620 if repo.changelog.count() == 0:
620 621 return [], matchfn
621 622
622 623 if follow:
623 624 defrange = '%s:0' % repo.changectx().rev()
624 625 else:
625 626 defrange = 'tip:0'
626 627 revs = revrange(repo, opts['rev'] or [defrange])
627 628 wanted = {}
628 629 slowpath = anypats or opts.get('removed')
629 630 fncache = {}
630 631
631 632 if not slowpath and not files:
632 633 # No files, no patterns. Display all revs.
633 634 wanted = dict.fromkeys(revs)
634 635 copies = []
635 636 if not slowpath:
636 637 # Only files, no patterns. Check the history of each file.
637 638 def filerevgen(filelog, node):
638 639 cl_count = repo.changelog.count()
639 640 if node is None:
640 641 last = filelog.count() - 1
641 642 else:
642 643 last = filelog.rev(node)
643 644 for i, window in increasing_windows(last, nullrev):
644 645 revs = []
645 646 for j in xrange(i - window, i + 1):
646 647 n = filelog.node(j)
647 648 revs.append((filelog.linkrev(n),
648 649 follow and filelog.renamed(n)))
649 650 revs.reverse()
650 651 for rev in revs:
651 652 # only yield rev for which we have the changelog, it can
652 653 # happen while doing "hg log" during a pull or commit
653 654 if rev[0] < cl_count:
654 655 yield rev
655 656 def iterfiles():
656 657 for filename in files:
657 658 yield filename, None
658 659 for filename_node in copies:
659 660 yield filename_node
660 661 minrev, maxrev = min(revs), max(revs)
661 662 for file_, node in iterfiles():
662 663 filelog = repo.file(file_)
663 664 # A zero count may be a directory or deleted file, so
664 665 # try to find matching entries on the slow path.
665 666 if filelog.count() == 0:
666 667 slowpath = True
667 668 break
668 669 for rev, copied in filerevgen(filelog, node):
669 670 if rev <= maxrev:
670 671 if rev < minrev:
671 672 break
672 673 fncache.setdefault(rev, [])
673 674 fncache[rev].append(file_)
674 675 wanted[rev] = 1
675 676 if follow and copied:
676 677 copies.append(copied)
677 678 if slowpath:
678 679 if follow:
679 680 raise util.Abort(_('can only follow copies/renames for explicit '
680 681 'file names'))
681 682
682 683 # The slow path checks files modified in every changeset.
683 684 def changerevgen():
684 685 for i, window in increasing_windows(repo.changelog.count()-1,
685 686 nullrev):
686 687 for j in xrange(i - window, i + 1):
687 688 yield j, change(j)[3]
688 689
689 690 for rev, changefiles in changerevgen():
690 691 matches = filter(matchfn, changefiles)
691 692 if matches:
692 693 fncache[rev] = matches
693 694 wanted[rev] = 1
694 695
695 696 class followfilter:
696 697 def __init__(self, onlyfirst=False):
697 698 self.startrev = nullrev
698 699 self.roots = []
699 700 self.onlyfirst = onlyfirst
700 701
701 702 def match(self, rev):
702 703 def realparents(rev):
703 704 if self.onlyfirst:
704 705 return repo.changelog.parentrevs(rev)[0:1]
705 706 else:
706 707 return filter(lambda x: x != nullrev,
707 708 repo.changelog.parentrevs(rev))
708 709
709 710 if self.startrev == nullrev:
710 711 self.startrev = rev
711 712 return True
712 713
713 714 if rev > self.startrev:
714 715 # forward: all descendants
715 716 if not self.roots:
716 717 self.roots.append(self.startrev)
717 718 for parent in realparents(rev):
718 719 if parent in self.roots:
719 720 self.roots.append(rev)
720 721 return True
721 722 else:
722 723 # backwards: all parents
723 724 if not self.roots:
724 725 self.roots.extend(realparents(self.startrev))
725 726 if rev in self.roots:
726 727 self.roots.remove(rev)
727 728 self.roots.extend(realparents(rev))
728 729 return True
729 730
730 731 return False
731 732
732 733 # it might be worthwhile to do this in the iterator if the rev range
733 734 # is descending and the prune args are all within that range
734 735 for rev in opts.get('prune', ()):
735 736 rev = repo.changelog.rev(repo.lookup(rev))
736 737 ff = followfilter()
737 738 stop = min(revs[0], revs[-1])
738 739 for x in xrange(rev, stop-1, -1):
739 740 if ff.match(x) and x in wanted:
740 741 del wanted[x]
741 742
742 743 def iterate():
743 744 if follow and not files:
744 745 ff = followfilter(onlyfirst=opts.get('follow_first'))
745 746 def want(rev):
746 747 if ff.match(rev) and rev in wanted:
747 748 return True
748 749 return False
749 750 else:
750 751 def want(rev):
751 752 return rev in wanted
752 753
753 754 for i, window in increasing_windows(0, len(revs)):
754 755 yield 'window', revs[0] < revs[-1], revs[-1]
755 756 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
756 757 srevs = list(nrevs)
757 758 srevs.sort()
758 759 for rev in srevs:
759 760 fns = fncache.get(rev)
760 761 if not fns:
761 762 def fns_generator():
762 763 for f in change(rev)[3]:
763 764 if matchfn(f):
764 765 yield f
765 766 fns = fns_generator()
766 767 yield 'add', rev, fns
767 768 for rev in nrevs:
768 769 yield 'iter', rev, None
769 770 return iterate(), matchfn
@@ -1,3344 +1,3343 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from demandload import demandload
9 9 from node import *
10 10 from i18n import gettext as _
11 11 demandload(globals(), "bisect os re sys signal imp urllib pdb shlex stat")
12 12 demandload(globals(), "fancyopts ui hg util lock revlog bundlerepo")
13 13 demandload(globals(), "difflib patch time help mdiff tempfile")
14 14 demandload(globals(), "traceback errno version atexit socket")
15 15 demandload(globals(), "archival changegroup cmdutil hgweb.server sshserver")
16 16
17 17 class UnknownCommand(Exception):
18 18 """Exception raised if command is not in the command table."""
19 19 class AmbiguousCommand(Exception):
20 20 """Exception raised if command shortcut matches more than one command."""
21 21
22 22 def bail_if_changed(repo):
23 23 modified, added, removed, deleted = repo.status()[:4]
24 24 if modified or added or removed or deleted:
25 25 raise util.Abort(_("outstanding uncommitted changes"))
26 26
27 27 def logmessage(opts):
28 28 """ get the log message according to -m and -l option """
29 29 message = opts['message']
30 30 logfile = opts['logfile']
31 31
32 32 if message and logfile:
33 33 raise util.Abort(_('options --message and --logfile are mutually '
34 34 'exclusive'))
35 35 if not message and logfile:
36 36 try:
37 37 if logfile == '-':
38 38 message = sys.stdin.read()
39 39 else:
40 40 message = open(logfile).read()
41 41 except IOError, inst:
42 42 raise util.Abort(_("can't read commit message '%s': %s") %
43 43 (logfile, inst.strerror))
44 44 return message
45 45
46 46 def setremoteconfig(ui, opts):
47 47 "copy remote options to ui tree"
48 48 if opts.get('ssh'):
49 49 ui.setconfig("ui", "ssh", opts['ssh'])
50 50 if opts.get('remotecmd'):
51 51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52 52
53 53 # Commands start here, listed alphabetically
54 54
55 55 def add(ui, repo, *pats, **opts):
56 56 """add the specified files on the next commit
57 57
58 58 Schedule files to be version controlled and added to the repository.
59 59
60 60 The files will be added to the repository at the next commit. To
61 61 undo an add before that, see hg revert.
62 62
63 63 If no names are given, add all files in the repository.
64 64 """
65 65
66 66 names = []
67 67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 68 if exact:
69 69 if ui.verbose:
70 70 ui.status(_('adding %s\n') % rel)
71 71 names.append(abs)
72 72 elif repo.dirstate.state(abs) == '?':
73 73 ui.status(_('adding %s\n') % rel)
74 74 names.append(abs)
75 75 if not opts.get('dry_run'):
76 76 repo.add(names)
77 77
78 78 def addremove(ui, repo, *pats, **opts):
79 79 """add all new files, delete all missing files
80 80
81 81 Add all new files and remove all missing files from the repository.
82 82
83 83 New files are ignored if they match any of the patterns in .hgignore. As
84 84 with add, these changes take effect at the next commit.
85 85
86 86 Use the -s option to detect renamed files. With a parameter > 0,
87 87 this compares every removed file with every added file and records
88 88 those similar enough as renames. This option takes a percentage
89 89 between 0 (disabled) and 100 (files must be identical) as its
90 90 parameter. Detecting renamed files this way can be expensive.
91 91 """
92 92 sim = float(opts.get('similarity') or 0)
93 93 if sim < 0 or sim > 100:
94 94 raise util.Abort(_('similarity must be between 0 and 100'))
95 95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96 96
97 97 def annotate(ui, repo, *pats, **opts):
98 98 """show changeset information per file line
99 99
100 100 List changes in files, showing the revision id responsible for each line
101 101
102 102 This command is useful to discover who did a change or when a change took
103 103 place.
104 104
105 105 Without the -a option, annotate will avoid processing files it
106 106 detects as binary. With -a, annotate will generate an annotation
107 107 anyway, probably with undesirable results.
108 108 """
109 109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110 110
111 111 if not pats:
112 112 raise util.Abort(_('at least one file name or pattern required'))
113 113
114 114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 115 ['number', lambda x: str(x.rev())],
116 116 ['changeset', lambda x: short(x.node())],
117 117 ['date', getdate], ['follow', lambda x: x.path()]]
118 118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 119 and not opts['follow']):
120 120 opts['number'] = 1
121 121
122 122 ctx = repo.changectx(opts['rev'])
123 123
124 124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 125 node=ctx.node()):
126 126 fctx = ctx.filectx(abs)
127 127 if not opts['text'] and util.binary(fctx.data()):
128 128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 129 continue
130 130
131 131 lines = fctx.annotate(follow=opts.get('follow'))
132 132 pieces = []
133 133
134 134 for o, f in opmap:
135 135 if opts[o]:
136 136 l = [f(n) for n, dummy in lines]
137 137 if l:
138 138 m = max(map(len, l))
139 139 pieces.append(["%*s" % (m, x) for x in l])
140 140
141 141 if pieces:
142 142 for p, l in zip(zip(*pieces), lines):
143 143 ui.write("%s: %s" % (" ".join(p), l[1]))
144 144
145 145 def archive(ui, repo, dest, **opts):
146 146 '''create unversioned archive of a repository revision
147 147
148 148 By default, the revision used is the parent of the working
149 149 directory; use "-r" to specify a different revision.
150 150
151 151 To specify the type of archive to create, use "-t". Valid
152 152 types are:
153 153
154 154 "files" (default): a directory full of files
155 155 "tar": tar archive, uncompressed
156 156 "tbz2": tar archive, compressed using bzip2
157 157 "tgz": tar archive, compressed using gzip
158 158 "uzip": zip archive, uncompressed
159 159 "zip": zip archive, compressed using deflate
160 160
161 161 The exact name of the destination archive or directory is given
162 162 using a format string; see "hg help export" for details.
163 163
164 164 Each member added to an archive file has a directory prefix
165 165 prepended. Use "-p" to specify a format string for the prefix.
166 166 The default is the basename of the archive, with suffixes removed.
167 167 '''
168 168
169 169 node = repo.changectx(opts['rev']).node()
170 170 dest = cmdutil.make_filename(repo, dest, node)
171 171 if os.path.realpath(dest) == repo.root:
172 172 raise util.Abort(_('repository root cannot be destination'))
173 173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 174 kind = opts.get('type') or 'files'
175 175 prefix = opts['prefix']
176 176 if dest == '-':
177 177 if kind == 'files':
178 178 raise util.Abort(_('cannot archive plain files to stdout'))
179 179 dest = sys.stdout
180 180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 181 prefix = cmdutil.make_filename(repo, prefix, node)
182 182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 183 matchfn, prefix)
184 184
185 185 def backout(ui, repo, rev, **opts):
186 186 '''reverse effect of earlier changeset
187 187
188 188 Commit the backed out changes as a new changeset. The new
189 189 changeset is a child of the backed out changeset.
190 190
191 191 If you back out a changeset other than the tip, a new head is
192 192 created. This head is the parent of the working directory. If
193 193 you back out an old changeset, your working directory will appear
194 194 old after the backout. You should merge the backout changeset
195 195 with another head.
196 196
197 197 The --merge option remembers the parent of the working directory
198 198 before starting the backout, then merges the new head with that
199 199 changeset afterwards. This saves you from doing the merge by
200 200 hand. The result of this merge is not committed, as for a normal
201 201 merge.'''
202 202
203 203 bail_if_changed(repo)
204 204 op1, op2 = repo.dirstate.parents()
205 205 if op2 != nullid:
206 206 raise util.Abort(_('outstanding uncommitted merge'))
207 207 node = repo.lookup(rev)
208 208 p1, p2 = repo.changelog.parents(node)
209 209 if p1 == nullid:
210 210 raise util.Abort(_('cannot back out a change with no parents'))
211 211 if p2 != nullid:
212 212 if not opts['parent']:
213 213 raise util.Abort(_('cannot back out a merge changeset without '
214 214 '--parent'))
215 215 p = repo.lookup(opts['parent'])
216 216 if p not in (p1, p2):
217 217 raise util.Abort(_('%s is not a parent of %s') %
218 218 (short(p), short(node)))
219 219 parent = p
220 220 else:
221 221 if opts['parent']:
222 222 raise util.Abort(_('cannot use --parent on non-merge changeset'))
223 223 parent = p1
224 224 hg.clean(repo, node, show_stats=False)
225 225 revert_opts = opts.copy()
226 226 revert_opts['date'] = None
227 227 revert_opts['all'] = True
228 228 revert_opts['rev'] = hex(parent)
229 229 revert(ui, repo, **revert_opts)
230 230 commit_opts = opts.copy()
231 231 commit_opts['addremove'] = False
232 232 if not commit_opts['message'] and not commit_opts['logfile']:
233 233 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
234 234 commit_opts['force_editor'] = True
235 235 commit(ui, repo, **commit_opts)
236 236 def nice(node):
237 237 return '%d:%s' % (repo.changelog.rev(node), short(node))
238 238 ui.status(_('changeset %s backs out changeset %s\n') %
239 239 (nice(repo.changelog.tip()), nice(node)))
240 240 if op1 != node:
241 241 if opts['merge']:
242 242 ui.status(_('merging with changeset %s\n') % nice(op1))
243 243 n = _lookup(repo, hex(op1))
244 244 hg.merge(repo, n)
245 245 else:
246 246 ui.status(_('the backout changeset is a new head - '
247 247 'do not forget to merge\n'))
248 248 ui.status(_('(use "backout --merge" '
249 249 'if you want to auto-merge)\n'))
250 250
251 251 def branch(ui, repo, label=None):
252 252 """set or show the current branch name
253 253
254 254 With <name>, set the current branch name. Otherwise, show the
255 255 current branch name.
256 256 """
257 257
258 258 if label is not None:
259 259 repo.opener("branch", "w").write(util.fromlocal(label) + '\n')
260 260 else:
261 261 b = util.tolocal(repo.workingctx().branch())
262 262 if b:
263 263 ui.write("%s\n" % b)
264 264
265 265 def branches(ui, repo):
266 266 """list repository named branches
267 267
268 268 List the repository's named branches.
269 269 """
270 270 b = repo.branchtags()
271 271 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
272 272 l.sort()
273 273 for r, n, t in l:
274 274 hexfunc = ui.debugflag and hex or short
275 275 if ui.quiet:
276 276 ui.write("%s\n" % t)
277 277 else:
278 278 spaces = " " * (30 - util.locallen(t))
279 279 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
280 280
281 281 def bundle(ui, repo, fname, dest=None, **opts):
282 282 """create a changegroup file
283 283
284 284 Generate a compressed changegroup file collecting changesets not
285 285 found in the other repository.
286 286
287 287 If no destination repository is specified the destination is assumed
288 288 to have all the nodes specified by one or more --base parameters.
289 289
290 290 The bundle file can then be transferred using conventional means and
291 291 applied to another repository with the unbundle or pull command.
292 292 This is useful when direct push and pull are not available or when
293 293 exporting an entire repository is undesirable.
294 294
295 295 Applying bundles preserves all changeset contents including
296 296 permissions, copy/rename information, and revision history.
297 297 """
298 298 revs = opts.get('rev') or None
299 299 if revs:
300 300 revs = [repo.lookup(rev) for rev in revs]
301 301 base = opts.get('base')
302 302 if base:
303 303 if dest:
304 304 raise util.Abort(_("--base is incompatible with specifiying "
305 305 "a destination"))
306 306 base = [repo.lookup(rev) for rev in base]
307 307 # create the right base
308 308 # XXX: nodesbetween / changegroup* should be "fixed" instead
309 309 o = []
310 310 has = {nullid: None}
311 311 for n in base:
312 312 has.update(repo.changelog.reachable(n))
313 313 if revs:
314 314 visit = list(revs)
315 315 else:
316 316 visit = repo.changelog.heads()
317 317 seen = {}
318 318 while visit:
319 319 n = visit.pop(0)
320 320 parents = [p for p in repo.changelog.parents(n) if p not in has]
321 321 if len(parents) == 0:
322 322 o.insert(0, n)
323 323 else:
324 324 for p in parents:
325 325 if p not in seen:
326 326 seen[p] = 1
327 327 visit.append(p)
328 328 else:
329 329 setremoteconfig(ui, opts)
330 330 dest = ui.expandpath(dest or 'default-push', dest or 'default')
331 331 other = hg.repository(ui, dest)
332 332 o = repo.findoutgoing(other, force=opts['force'])
333 333
334 334 if revs:
335 335 cg = repo.changegroupsubset(o, revs, 'bundle')
336 336 else:
337 337 cg = repo.changegroup(o, 'bundle')
338 338 changegroup.writebundle(cg, fname, "HG10BZ")
339 339
340 340 def cat(ui, repo, file1, *pats, **opts):
341 341 """output the current or given revision of files
342 342
343 343 Print the specified files as they were at the given revision.
344 344 If no revision is given, the parent of the working directory is used,
345 345 or tip if no revision is checked out.
346 346
347 347 Output may be to a file, in which case the name of the file is
348 348 given using a format string. The formatting rules are the same as
349 349 for the export command, with the following additions:
350 350
351 351 %s basename of file being printed
352 352 %d dirname of file being printed, or '.' if in repo root
353 353 %p root-relative path name of file being printed
354 354 """
355 355 ctx = repo.changectx(opts['rev'])
356 356 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
357 357 ctx.node()):
358 358 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
359 359 fp.write(ctx.filectx(abs).data())
360 360
361 361 def clone(ui, source, dest=None, **opts):
362 362 """make a copy of an existing repository
363 363
364 364 Create a copy of an existing repository in a new directory.
365 365
366 366 If no destination directory name is specified, it defaults to the
367 367 basename of the source.
368 368
369 369 The location of the source is added to the new repository's
370 370 .hg/hgrc file, as the default to be used for future pulls.
371 371
372 372 For efficiency, hardlinks are used for cloning whenever the source
373 373 and destination are on the same filesystem (note this applies only
374 374 to the repository data, not to the checked out files). Some
375 375 filesystems, such as AFS, implement hardlinking incorrectly, but
376 376 do not report errors. In these cases, use the --pull option to
377 377 avoid hardlinking.
378 378
379 379 You can safely clone repositories and checked out files using full
380 380 hardlinks with
381 381
382 382 $ cp -al REPO REPOCLONE
383 383
384 384 which is the fastest way to clone. However, the operation is not
385 385 atomic (making sure REPO is not modified during the operation is
386 386 up to you) and you have to make sure your editor breaks hardlinks
387 387 (Emacs and most Linux Kernel tools do so).
388 388
389 389 If you use the -r option to clone up to a specific revision, no
390 390 subsequent revisions will be present in the cloned repository.
391 391 This option implies --pull, even on local repositories.
392 392
393 393 See pull for valid source format details.
394 394
395 395 It is possible to specify an ssh:// URL as the destination, but no
396 396 .hg/hgrc and working directory will be created on the remote side.
397 397 Look at the help text for the pull command for important details
398 398 about ssh:// URLs.
399 399 """
400 400 setremoteconfig(ui, opts)
401 401 hg.clone(ui, ui.expandpath(source), dest,
402 402 pull=opts['pull'],
403 403 stream=opts['uncompressed'],
404 404 rev=opts['rev'],
405 405 update=not opts['noupdate'])
406 406
407 407 def commit(ui, repo, *pats, **opts):
408 408 """commit the specified files or all outstanding changes
409 409
410 410 Commit changes to the given files into the repository.
411 411
412 412 If a list of files is omitted, all changes reported by "hg status"
413 413 will be committed.
414 414
415 415 If no commit message is specified, the editor configured in your hgrc
416 416 or in the EDITOR environment variable is started to enter a message.
417 417 """
418 418 message = logmessage(opts)
419 419
420 420 if opts['addremove']:
421 421 cmdutil.addremove(repo, pats, opts)
422 422 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
423 423 if pats:
424 424 status = repo.status(files=fns, match=match)
425 425 modified, added, removed, deleted, unknown = status[:5]
426 426 files = modified + added + removed
427 427 slist = None
428 428 for f in fns:
429 429 if f not in files:
430 430 rf = repo.wjoin(f)
431 431 if f in unknown:
432 432 raise util.Abort(_("file %s not tracked!") % rf)
433 433 try:
434 434 mode = os.lstat(rf)[stat.ST_MODE]
435 435 except OSError:
436 436 raise util.Abort(_("file %s not found!") % rf)
437 437 if stat.S_ISDIR(mode):
438 438 name = f + '/'
439 439 if slist is None:
440 440 slist = list(files)
441 441 slist.sort()
442 442 i = bisect.bisect(slist, name)
443 443 if i >= len(slist) or not slist[i].startswith(name):
444 444 raise util.Abort(_("no match under directory %s!")
445 445 % rf)
446 446 elif not stat.S_ISREG(mode):
447 447 raise util.Abort(_("can't commit %s: "
448 448 "unsupported file type!") % rf)
449 449 else:
450 450 files = []
451 451 try:
452 452 repo.commit(files, message, opts['user'], opts['date'], match,
453 453 force_editor=opts.get('force_editor'))
454 454 except ValueError, inst:
455 455 raise util.Abort(str(inst))
456 456
457 457 def docopy(ui, repo, pats, opts, wlock):
458 458 # called with the repo lock held
459 459 #
460 460 # hgsep => pathname that uses "/" to separate directories
461 461 # ossep => pathname that uses os.sep to separate directories
462 462 cwd = repo.getcwd()
463 463 errors = 0
464 464 copied = []
465 465 targets = {}
466 466
467 467 # abs: hgsep
468 468 # rel: ossep
469 469 # return: hgsep
470 470 def okaytocopy(abs, rel, exact):
471 471 reasons = {'?': _('is not managed'),
472 472 'a': _('has been marked for add'),
473 473 'r': _('has been marked for remove')}
474 474 state = repo.dirstate.state(abs)
475 475 reason = reasons.get(state)
476 476 if reason:
477 477 if state == 'a':
478 478 origsrc = repo.dirstate.copied(abs)
479 479 if origsrc is not None:
480 480 return origsrc
481 481 if exact:
482 482 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
483 483 else:
484 484 return abs
485 485
486 486 # origsrc: hgsep
487 487 # abssrc: hgsep
488 488 # relsrc: ossep
489 489 # target: ossep
490 490 def copy(origsrc, abssrc, relsrc, target, exact):
491 491 abstarget = util.canonpath(repo.root, cwd, target)
492 492 reltarget = util.pathto(cwd, abstarget)
493 493 prevsrc = targets.get(abstarget)
494 494 if prevsrc is not None:
495 495 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
496 496 (reltarget, util.localpath(abssrc),
497 497 util.localpath(prevsrc)))
498 498 return
499 499 if (not opts['after'] and os.path.exists(reltarget) or
500 500 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
501 501 if not opts['force']:
502 502 ui.warn(_('%s: not overwriting - file exists\n') %
503 503 reltarget)
504 504 return
505 505 if not opts['after'] and not opts.get('dry_run'):
506 506 os.unlink(reltarget)
507 507 if opts['after']:
508 508 if not os.path.exists(reltarget):
509 509 return
510 510 else:
511 511 targetdir = os.path.dirname(reltarget) or '.'
512 512 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
513 513 os.makedirs(targetdir)
514 514 try:
515 515 restore = repo.dirstate.state(abstarget) == 'r'
516 516 if restore and not opts.get('dry_run'):
517 517 repo.undelete([abstarget], wlock)
518 518 try:
519 519 if not opts.get('dry_run'):
520 520 util.copyfile(relsrc, reltarget)
521 521 restore = False
522 522 finally:
523 523 if restore:
524 524 repo.remove([abstarget], wlock)
525 525 except IOError, inst:
526 526 if inst.errno == errno.ENOENT:
527 527 ui.warn(_('%s: deleted in working copy\n') % relsrc)
528 528 else:
529 529 ui.warn(_('%s: cannot copy - %s\n') %
530 530 (relsrc, inst.strerror))
531 531 errors += 1
532 532 return
533 533 if ui.verbose or not exact:
534 534 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
535 535 targets[abstarget] = abssrc
536 536 if abstarget != origsrc and not opts.get('dry_run'):
537 537 repo.copy(origsrc, abstarget, wlock)
538 538 copied.append((abssrc, relsrc, exact))
539 539
540 540 # pat: ossep
541 541 # dest ossep
542 542 # srcs: list of (hgsep, hgsep, ossep, bool)
543 543 # return: function that takes hgsep and returns ossep
544 544 def targetpathfn(pat, dest, srcs):
545 545 if os.path.isdir(pat):
546 546 abspfx = util.canonpath(repo.root, cwd, pat)
547 547 abspfx = util.localpath(abspfx)
548 548 if destdirexists:
549 549 striplen = len(os.path.split(abspfx)[0])
550 550 else:
551 551 striplen = len(abspfx)
552 552 if striplen:
553 553 striplen += len(os.sep)
554 554 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
555 555 elif destdirexists:
556 556 res = lambda p: os.path.join(dest,
557 557 os.path.basename(util.localpath(p)))
558 558 else:
559 559 res = lambda p: dest
560 560 return res
561 561
562 562 # pat: ossep
563 563 # dest ossep
564 564 # srcs: list of (hgsep, hgsep, ossep, bool)
565 565 # return: function that takes hgsep and returns ossep
566 566 def targetpathafterfn(pat, dest, srcs):
567 567 if util.patkind(pat, None)[0]:
568 568 # a mercurial pattern
569 569 res = lambda p: os.path.join(dest,
570 570 os.path.basename(util.localpath(p)))
571 571 else:
572 572 abspfx = util.canonpath(repo.root, cwd, pat)
573 573 if len(abspfx) < len(srcs[0][0]):
574 574 # A directory. Either the target path contains the last
575 575 # component of the source path or it does not.
576 576 def evalpath(striplen):
577 577 score = 0
578 578 for s in srcs:
579 579 t = os.path.join(dest, util.localpath(s[0])[striplen:])
580 580 if os.path.exists(t):
581 581 score += 1
582 582 return score
583 583
584 584 abspfx = util.localpath(abspfx)
585 585 striplen = len(abspfx)
586 586 if striplen:
587 587 striplen += len(os.sep)
588 588 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
589 589 score = evalpath(striplen)
590 590 striplen1 = len(os.path.split(abspfx)[0])
591 591 if striplen1:
592 592 striplen1 += len(os.sep)
593 593 if evalpath(striplen1) > score:
594 594 striplen = striplen1
595 595 res = lambda p: os.path.join(dest,
596 596 util.localpath(p)[striplen:])
597 597 else:
598 598 # a file
599 599 if destdirexists:
600 600 res = lambda p: os.path.join(dest,
601 601 os.path.basename(util.localpath(p)))
602 602 else:
603 603 res = lambda p: dest
604 604 return res
605 605
606 606
607 607 pats = util.expand_glob(pats)
608 608 if not pats:
609 609 raise util.Abort(_('no source or destination specified'))
610 610 if len(pats) == 1:
611 611 raise util.Abort(_('no destination specified'))
612 612 dest = pats.pop()
613 613 destdirexists = os.path.isdir(dest)
614 614 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
615 615 raise util.Abort(_('with multiple sources, destination must be an '
616 616 'existing directory'))
617 617 if opts['after']:
618 618 tfn = targetpathafterfn
619 619 else:
620 620 tfn = targetpathfn
621 621 copylist = []
622 622 for pat in pats:
623 623 srcs = []
624 624 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
625 625 globbed=True):
626 626 origsrc = okaytocopy(abssrc, relsrc, exact)
627 627 if origsrc:
628 628 srcs.append((origsrc, abssrc, relsrc, exact))
629 629 if not srcs:
630 630 continue
631 631 copylist.append((tfn(pat, dest, srcs), srcs))
632 632 if not copylist:
633 633 raise util.Abort(_('no files to copy'))
634 634
635 635 for targetpath, srcs in copylist:
636 636 for origsrc, abssrc, relsrc, exact in srcs:
637 637 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
638 638
639 639 if errors:
640 640 ui.warn(_('(consider using --after)\n'))
641 641 return errors, copied
642 642
643 643 def copy(ui, repo, *pats, **opts):
644 644 """mark files as copied for the next commit
645 645
646 646 Mark dest as having copies of source files. If dest is a
647 647 directory, copies are put in that directory. If dest is a file,
648 648 there can only be one source.
649 649
650 650 By default, this command copies the contents of files as they
651 651 stand in the working directory. If invoked with --after, the
652 652 operation is recorded, but no copying is performed.
653 653
654 654 This command takes effect in the next commit. To undo a copy
655 655 before that, see hg revert.
656 656 """
657 657 wlock = repo.wlock(0)
658 658 errs, copied = docopy(ui, repo, pats, opts, wlock)
659 659 return errs
660 660
661 661 def debugancestor(ui, index, rev1, rev2):
662 662 """find the ancestor revision of two revisions in a given index"""
663 663 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
664 664 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
665 665 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
666 666
667 667 def debugcomplete(ui, cmd='', **opts):
668 668 """returns the completion list associated with the given command"""
669 669
670 670 if opts['options']:
671 671 options = []
672 672 otables = [globalopts]
673 673 if cmd:
674 674 aliases, entry = findcmd(ui, cmd)
675 675 otables.append(entry[1])
676 676 for t in otables:
677 677 for o in t:
678 678 if o[0]:
679 679 options.append('-%s' % o[0])
680 680 options.append('--%s' % o[1])
681 681 ui.write("%s\n" % "\n".join(options))
682 682 return
683 683
684 684 clist = findpossible(ui, cmd).keys()
685 685 clist.sort()
686 686 ui.write("%s\n" % "\n".join(clist))
687 687
688 688 def debugrebuildstate(ui, repo, rev=None):
689 689 """rebuild the dirstate as it would look like for the given revision"""
690 690 if not rev:
691 691 rev = repo.changelog.tip()
692 692 else:
693 693 rev = repo.lookup(rev)
694 694 change = repo.changelog.read(rev)
695 695 n = change[0]
696 696 files = repo.manifest.read(n)
697 697 wlock = repo.wlock()
698 698 repo.dirstate.rebuild(rev, files)
699 699
700 700 def debugcheckstate(ui, repo):
701 701 """validate the correctness of the current dirstate"""
702 702 parent1, parent2 = repo.dirstate.parents()
703 703 repo.dirstate.read()
704 704 dc = repo.dirstate.map
705 705 keys = dc.keys()
706 706 keys.sort()
707 707 m1n = repo.changelog.read(parent1)[0]
708 708 m2n = repo.changelog.read(parent2)[0]
709 709 m1 = repo.manifest.read(m1n)
710 710 m2 = repo.manifest.read(m2n)
711 711 errors = 0
712 712 for f in dc:
713 713 state = repo.dirstate.state(f)
714 714 if state in "nr" and f not in m1:
715 715 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
716 716 errors += 1
717 717 if state in "a" and f in m1:
718 718 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
719 719 errors += 1
720 720 if state in "m" and f not in m1 and f not in m2:
721 721 ui.warn(_("%s in state %s, but not in either manifest\n") %
722 722 (f, state))
723 723 errors += 1
724 724 for f in m1:
725 725 state = repo.dirstate.state(f)
726 726 if state not in "nrm":
727 727 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
728 728 errors += 1
729 729 if errors:
730 730 error = _(".hg/dirstate inconsistent with current parent's manifest")
731 731 raise util.Abort(error)
732 732
733 733 def showconfig(ui, repo, *values, **opts):
734 734 """show combined config settings from all hgrc files
735 735
736 736 With no args, print names and values of all config items.
737 737
738 738 With one arg of the form section.name, print just the value of
739 739 that config item.
740 740
741 741 With multiple args, print names and values of all config items
742 742 with matching section names."""
743 743
744 744 untrusted = bool(opts.get('untrusted'))
745 745 if values:
746 746 if len([v for v in values if '.' in v]) > 1:
747 747 raise util.Abort(_('only one config item permitted'))
748 748 for section, name, value in ui.walkconfig(untrusted=untrusted):
749 749 sectname = section + '.' + name
750 750 if values:
751 751 for v in values:
752 752 if v == section:
753 753 ui.write('%s=%s\n' % (sectname, value))
754 754 elif v == sectname:
755 755 ui.write(value, '\n')
756 756 else:
757 757 ui.write('%s=%s\n' % (sectname, value))
758 758
759 759 def debugsetparents(ui, repo, rev1, rev2=None):
760 760 """manually set the parents of the current working directory
761 761
762 762 This is useful for writing repository conversion tools, but should
763 763 be used with care.
764 764 """
765 765
766 766 if not rev2:
767 767 rev2 = hex(nullid)
768 768
769 769 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
770 770
771 771 def debugstate(ui, repo):
772 772 """show the contents of the current dirstate"""
773 773 repo.dirstate.read()
774 774 dc = repo.dirstate.map
775 775 keys = dc.keys()
776 776 keys.sort()
777 777 for file_ in keys:
778 778 if dc[file_][3] == -1:
779 779 # Pad or slice to locale representation
780 780 locale_len = len(time.strftime("%x %X", time.localtime(0)))
781 781 timestr = 'unset'
782 782 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
783 783 else:
784 784 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
785 785 ui.write("%c %3o %10d %s %s\n"
786 786 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
787 787 timestr, file_))
788 788 for f in repo.dirstate.copies():
789 789 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
790 790
791 791 def debugdata(ui, file_, rev):
792 792 """dump the contents of an data file revision"""
793 793 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
794 794 file_[:-2] + ".i", file_, 0)
795 795 try:
796 796 ui.write(r.revision(r.lookup(rev)))
797 797 except KeyError:
798 798 raise util.Abort(_('invalid revision identifier %s') % rev)
799 799
800 800 def debugdate(ui, date, range=None, **opts):
801 801 """parse and display a date"""
802 802 if opts["extended"]:
803 803 d = util.parsedate(date, util.extendeddateformats)
804 804 else:
805 805 d = util.parsedate(date)
806 806 ui.write("internal: %s %s\n" % d)
807 807 ui.write("standard: %s\n" % util.datestr(d))
808 808 if range:
809 809 m = util.matchdate(range)
810 810 ui.write("match: %s\n" % m(d[0]))
811 811
812 812 def debugindex(ui, file_):
813 813 """dump the contents of an index file"""
814 814 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
815 815 ui.write(" rev offset length base linkrev" +
816 816 " nodeid p1 p2\n")
817 817 for i in xrange(r.count()):
818 818 node = r.node(i)
819 819 pp = r.parents(node)
820 820 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
821 821 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
822 822 short(node), short(pp[0]), short(pp[1])))
823 823
824 824 def debugindexdot(ui, file_):
825 825 """dump an index DAG as a .dot file"""
826 826 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
827 827 ui.write("digraph G {\n")
828 828 for i in xrange(r.count()):
829 829 node = r.node(i)
830 830 pp = r.parents(node)
831 831 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
832 832 if pp[1] != nullid:
833 833 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
834 834 ui.write("}\n")
835 835
836 836 def debuginstall(ui):
837 837 '''test Mercurial installation'''
838 838
839 839 def writetemp(contents):
840 840 (fd, name) = tempfile.mkstemp()
841 841 f = os.fdopen(fd, "wb")
842 842 f.write(contents)
843 843 f.close()
844 844 return name
845 845
846 846 problems = 0
847 847
848 848 # encoding
849 849 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
850 850 try:
851 851 util.fromlocal("test")
852 852 except util.Abort, inst:
853 853 ui.write(" %s\n" % inst)
854 854 ui.write(_(" (check that your locale is properly set)\n"))
855 855 problems += 1
856 856
857 857 # compiled modules
858 858 ui.status(_("Checking extensions...\n"))
859 859 try:
860 860 import bdiff, mpatch, base85
861 861 except Exception, inst:
862 862 ui.write(" %s\n" % inst)
863 863 ui.write(_(" One or more extensions could not be found"))
864 864 ui.write(_(" (check that you compiled the extensions)\n"))
865 865 problems += 1
866 866
867 867 # templates
868 868 ui.status(_("Checking templates...\n"))
869 869 try:
870 870 import templater
871 871 t = templater.templater(templater.templatepath("map-cmdline.default"))
872 872 except Exception, inst:
873 873 ui.write(" %s\n" % inst)
874 874 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
875 875 problems += 1
876 876
877 877 # patch
878 878 ui.status(_("Checking patch...\n"))
879 879 path = os.environ.get('PATH', '')
880 880 patcher = util.find_in_path('gpatch', path,
881 881 util.find_in_path('patch', path, None))
882 882 if not patcher:
883 883 ui.write(_(" Can't find patch or gpatch in PATH\n"))
884 884 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
885 885 problems += 1
886 886 else:
887 887 # actually attempt a patch here
888 888 a = "1\n2\n3\n4\n"
889 889 b = "1\n2\n3\ninsert\n4\n"
890 890 d = mdiff.unidiff(a, None, b, None, "a")
891 891 fa = writetemp(a)
892 892 fd = writetemp(d)
893 893 fp = os.popen('%s %s %s' % (patcher, fa, fd))
894 894 files = []
895 895 output = ""
896 896 for line in fp:
897 897 output += line
898 898 if line.startswith('patching file '):
899 899 pf = util.parse_patch_output(line.rstrip())
900 900 files.append(pf)
901 901 if files != [fa]:
902 902 ui.write(_(" unexpected patch output!"))
903 903 ui.write(_(" (you may have an incompatible version of patch)\n"))
904 904 ui.write(output)
905 905 problems += 1
906 906 a = file(fa).read()
907 907 if a != b:
908 908 ui.write(_(" patch test failed!"))
909 909 ui.write(_(" (you may have an incompatible version of patch)\n"))
910 910 problems += 1
911 911 os.unlink(fa)
912 912 os.unlink(fd)
913 913
914 914 # merge helper
915 915 ui.status(_("Checking merge helper...\n"))
916 916 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
917 917 or "hgmerge")
918 918 cmdpath = util.find_in_path(cmd, path)
919 919 if not cmdpath:
920 920 cmdpath = util.find_in_path(cmd.split()[0], path)
921 921 if not cmdpath:
922 922 if cmd == 'hgmerge':
923 923 ui.write(_(" No merge helper set and can't find default"
924 924 " hgmerge script in PATH\n"))
925 925 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
926 926 else:
927 927 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
928 928 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
929 929 problems += 1
930 930 else:
931 931 # actually attempt a patch here
932 932 fa = writetemp("1\n2\n3\n4\n")
933 933 fl = writetemp("1\n2\n3\ninsert\n4\n")
934 934 fr = writetemp("begin\n1\n2\n3\n4\n")
935 935 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
936 936 if r:
937 937 ui.write(_(" got unexpected merge error %d!") % r)
938 938 problems += 1
939 939 m = file(fl).read()
940 940 if m != "begin\n1\n2\n3\ninsert\n4\n":
941 941 ui.write(_(" got unexpected merge results!") % r)
942 942 ui.write(_(" (your merge helper may have the"
943 943 " wrong argument order)\n"))
944 944 ui.write(m)
945 945 os.unlink(fa)
946 946 os.unlink(fl)
947 947 os.unlink(fr)
948 948
949 949 # editor
950 950 ui.status(_("Checking commit editor...\n"))
951 951 editor = (os.environ.get("HGEDITOR") or
952 952 ui.config("ui", "editor") or
953 953 os.environ.get("EDITOR", "vi"))
954 954 cmdpath = util.find_in_path(editor, path)
955 955 if not cmdpath:
956 956 cmdpath = util.find_in_path(editor.split()[0], path)
957 957 if not cmdpath:
958 958 if editor == 'vi':
959 959 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
960 960 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
961 961 else:
962 962 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
963 963 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
964 964 problems += 1
965 965
966 966 # check username
967 967 ui.status(_("Checking username...\n"))
968 968 user = os.environ.get("HGUSER")
969 969 if user is None:
970 970 user = ui.config("ui", "username")
971 971 if user is None:
972 972 user = os.environ.get("EMAIL")
973 973 if not user:
974 974 ui.warn(" ")
975 975 ui.username()
976 976 ui.write(_(" (specify a username in your .hgrc file)\n"))
977 977
978 978 if not problems:
979 979 ui.status(_("No problems detected\n"))
980 980 else:
981 981 ui.write(_("%s problems detected,"
982 982 " please check your install!\n") % problems)
983 983
984 984 return problems
985 985
986 986 def debugrename(ui, repo, file1, *pats, **opts):
987 987 """dump rename information"""
988 988
989 989 ctx = repo.changectx(opts.get('rev', 'tip'))
990 990 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
991 991 ctx.node()):
992 992 m = ctx.filectx(abs).renamed()
993 993 if m:
994 994 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
995 995 else:
996 996 ui.write(_("%s not renamed\n") % rel)
997 997
998 998 def debugwalk(ui, repo, *pats, **opts):
999 999 """show how files match on given patterns"""
1000 1000 items = list(cmdutil.walk(repo, pats, opts))
1001 1001 if not items:
1002 1002 return
1003 1003 fmt = '%%s %%-%ds %%-%ds %%s' % (
1004 1004 max([len(abs) for (src, abs, rel, exact) in items]),
1005 1005 max([len(rel) for (src, abs, rel, exact) in items]))
1006 1006 for src, abs, rel, exact in items:
1007 1007 line = fmt % (src, abs, rel, exact and 'exact' or '')
1008 1008 ui.write("%s\n" % line.rstrip())
1009 1009
1010 1010 def diff(ui, repo, *pats, **opts):
1011 1011 """diff repository (or selected files)
1012 1012
1013 1013 Show differences between revisions for the specified files.
1014 1014
1015 1015 Differences between files are shown using the unified diff format.
1016 1016
1017 1017 NOTE: diff may generate unexpected results for merges, as it will
1018 1018 default to comparing against the working directory's first parent
1019 1019 changeset if no revisions are specified.
1020 1020
1021 1021 When two revision arguments are given, then changes are shown
1022 1022 between those revisions. If only one revision is specified then
1023 1023 that revision is compared to the working directory, and, when no
1024 1024 revisions are specified, the working directory files are compared
1025 1025 to its parent.
1026 1026
1027 1027 Without the -a option, diff will avoid generating diffs of files
1028 1028 it detects as binary. With -a, diff will generate a diff anyway,
1029 1029 probably with undesirable results.
1030 1030 """
1031 1031 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1032 1032
1033 1033 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1034 1034
1035 1035 patch.diff(repo, node1, node2, fns, match=matchfn,
1036 1036 opts=patch.diffopts(ui, opts))
1037 1037
1038 1038 def export(ui, repo, *changesets, **opts):
1039 1039 """dump the header and diffs for one or more changesets
1040 1040
1041 1041 Print the changeset header and diffs for one or more revisions.
1042 1042
1043 1043 The information shown in the changeset header is: author,
1044 1044 changeset hash, parent(s) and commit comment.
1045 1045
1046 1046 NOTE: export may generate unexpected diff output for merge changesets,
1047 1047 as it will compare the merge changeset against its first parent only.
1048 1048
1049 1049 Output may be to a file, in which case the name of the file is
1050 1050 given using a format string. The formatting rules are as follows:
1051 1051
1052 1052 %% literal "%" character
1053 1053 %H changeset hash (40 bytes of hexadecimal)
1054 1054 %N number of patches being generated
1055 1055 %R changeset revision number
1056 1056 %b basename of the exporting repository
1057 1057 %h short-form changeset hash (12 bytes of hexadecimal)
1058 1058 %n zero-padded sequence number, starting at 1
1059 1059 %r zero-padded changeset revision number
1060 1060
1061 1061 Without the -a option, export will avoid generating diffs of files
1062 1062 it detects as binary. With -a, export will generate a diff anyway,
1063 1063 probably with undesirable results.
1064 1064
1065 1065 With the --switch-parent option, the diff will be against the second
1066 1066 parent. It can be useful to review a merge.
1067 1067 """
1068 1068 if not changesets:
1069 1069 raise util.Abort(_("export requires at least one changeset"))
1070 1070 revs = cmdutil.revrange(repo, changesets)
1071 1071 if len(revs) > 1:
1072 1072 ui.note(_('exporting patches:\n'))
1073 1073 else:
1074 1074 ui.note(_('exporting patch:\n'))
1075 1075 patch.export(repo, revs, template=opts['output'],
1076 1076 switch_parent=opts['switch_parent'],
1077 1077 opts=patch.diffopts(ui, opts))
1078 1078
1079 1079 def grep(ui, repo, pattern, *pats, **opts):
1080 1080 """search for a pattern in specified files and revisions
1081 1081
1082 1082 Search revisions of files for a regular expression.
1083 1083
1084 1084 This command behaves differently than Unix grep. It only accepts
1085 1085 Python/Perl regexps. It searches repository history, not the
1086 1086 working directory. It always prints the revision number in which
1087 1087 a match appears.
1088 1088
1089 1089 By default, grep only prints output for the first revision of a
1090 1090 file in which it finds a match. To get it to print every revision
1091 1091 that contains a change in match status ("-" for a match that
1092 1092 becomes a non-match, or "+" for a non-match that becomes a match),
1093 1093 use the --all flag.
1094 1094 """
1095 1095 reflags = 0
1096 1096 if opts['ignore_case']:
1097 1097 reflags |= re.I
1098 1098 regexp = re.compile(pattern, reflags)
1099 1099 sep, eol = ':', '\n'
1100 1100 if opts['print0']:
1101 1101 sep = eol = '\0'
1102 1102
1103 1103 fcache = {}
1104 1104 def getfile(fn):
1105 1105 if fn not in fcache:
1106 1106 fcache[fn] = repo.file(fn)
1107 1107 return fcache[fn]
1108 1108
1109 1109 def matchlines(body):
1110 1110 begin = 0
1111 1111 linenum = 0
1112 1112 while True:
1113 1113 match = regexp.search(body, begin)
1114 1114 if not match:
1115 1115 break
1116 1116 mstart, mend = match.span()
1117 1117 linenum += body.count('\n', begin, mstart) + 1
1118 1118 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1119 1119 lend = body.find('\n', mend)
1120 1120 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1121 1121 begin = lend + 1
1122 1122
1123 1123 class linestate(object):
1124 1124 def __init__(self, line, linenum, colstart, colend):
1125 1125 self.line = line
1126 1126 self.linenum = linenum
1127 1127 self.colstart = colstart
1128 1128 self.colend = colend
1129 1129
1130 1130 def __eq__(self, other):
1131 1131 return self.line == other.line
1132 1132
1133 1133 matches = {}
1134 1134 copies = {}
1135 1135 def grepbody(fn, rev, body):
1136 1136 matches[rev].setdefault(fn, [])
1137 1137 m = matches[rev][fn]
1138 1138 for lnum, cstart, cend, line in matchlines(body):
1139 1139 s = linestate(line, lnum, cstart, cend)
1140 1140 m.append(s)
1141 1141
1142 1142 def difflinestates(a, b):
1143 1143 sm = difflib.SequenceMatcher(None, a, b)
1144 1144 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1145 1145 if tag == 'insert':
1146 1146 for i in xrange(blo, bhi):
1147 1147 yield ('+', b[i])
1148 1148 elif tag == 'delete':
1149 1149 for i in xrange(alo, ahi):
1150 1150 yield ('-', a[i])
1151 1151 elif tag == 'replace':
1152 1152 for i in xrange(alo, ahi):
1153 1153 yield ('-', a[i])
1154 1154 for i in xrange(blo, bhi):
1155 1155 yield ('+', b[i])
1156 1156
1157 1157 prev = {}
1158 1158 def display(fn, rev, states, prevstates):
1159 1159 counts = {'-': 0, '+': 0}
1160 1160 filerevmatches = {}
1161 1161 if incrementing or not opts['all']:
1162 1162 a, b, r = prevstates, states, rev
1163 1163 else:
1164 1164 a, b, r = states, prevstates, prev.get(fn, -1)
1165 1165 for change, l in difflinestates(a, b):
1166 1166 cols = [fn, str(r)]
1167 1167 if opts['line_number']:
1168 1168 cols.append(str(l.linenum))
1169 1169 if opts['all']:
1170 1170 cols.append(change)
1171 1171 if opts['user']:
1172 1172 cols.append(ui.shortuser(get(r)[1]))
1173 1173 if opts['files_with_matches']:
1174 1174 c = (fn, r)
1175 1175 if c in filerevmatches:
1176 1176 continue
1177 1177 filerevmatches[c] = 1
1178 1178 else:
1179 1179 cols.append(l.line)
1180 1180 ui.write(sep.join(cols), eol)
1181 1181 counts[change] += 1
1182 1182 return counts['+'], counts['-']
1183 1183
1184 1184 fstate = {}
1185 1185 skip = {}
1186 1186 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1187 1187 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1188 1188 count = 0
1189 1189 incrementing = False
1190 1190 follow = opts.get('follow')
1191 1191 for st, rev, fns in changeiter:
1192 1192 if st == 'window':
1193 1193 incrementing = rev
1194 1194 matches.clear()
1195 1195 elif st == 'add':
1196 1196 mf = repo.changectx(rev).manifest()
1197 1197 matches[rev] = {}
1198 1198 for fn in fns:
1199 1199 if fn in skip:
1200 1200 continue
1201 1201 fstate.setdefault(fn, {})
1202 1202 try:
1203 1203 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1204 1204 if follow:
1205 1205 copied = getfile(fn).renamed(mf[fn])
1206 1206 if copied:
1207 1207 copies.setdefault(rev, {})[fn] = copied[0]
1208 1208 except KeyError:
1209 1209 pass
1210 1210 elif st == 'iter':
1211 1211 states = matches[rev].items()
1212 1212 states.sort()
1213 1213 for fn, m in states:
1214 1214 copy = copies.get(rev, {}).get(fn)
1215 1215 if fn in skip:
1216 1216 if copy:
1217 1217 skip[copy] = True
1218 1218 continue
1219 1219 if incrementing or not opts['all'] or fstate[fn]:
1220 1220 pos, neg = display(fn, rev, m, fstate[fn])
1221 1221 count += pos + neg
1222 1222 if pos and not opts['all']:
1223 1223 skip[fn] = True
1224 1224 if copy:
1225 1225 skip[copy] = True
1226 1226 fstate[fn] = m
1227 1227 if copy:
1228 1228 fstate[copy] = m
1229 1229 prev[fn] = rev
1230 1230
1231 1231 if not incrementing:
1232 1232 fstate = fstate.items()
1233 1233 fstate.sort()
1234 1234 for fn, state in fstate:
1235 1235 if fn in skip:
1236 1236 continue
1237 1237 if fn not in copies.get(prev[fn], {}):
1238 1238 display(fn, rev, {}, state)
1239 1239 return (count == 0 and 1) or 0
1240 1240
1241 1241 def heads(ui, repo, **opts):
1242 1242 """show current repository heads
1243 1243
1244 1244 Show all repository head changesets.
1245 1245
1246 1246 Repository "heads" are changesets that don't have children
1247 1247 changesets. They are where development generally takes place and
1248 1248 are the usual targets for update and merge operations.
1249 1249 """
1250 1250 if opts['rev']:
1251 1251 heads = repo.heads(repo.lookup(opts['rev']))
1252 1252 else:
1253 1253 heads = repo.heads()
1254 1254 displayer = cmdutil.show_changeset(ui, repo, opts)
1255 1255 for n in heads:
1256 1256 displayer.show(changenode=n)
1257 1257
1258 1258 def help_(ui, name=None, with_version=False):
1259 1259 """show help for a command, extension, or list of commands
1260 1260
1261 1261 With no arguments, print a list of commands and short help.
1262 1262
1263 1263 Given a command name, print help for that command.
1264 1264
1265 1265 Given an extension name, print help for that extension, and the
1266 1266 commands it provides."""
1267 1267 option_lists = []
1268 1268
1269 1269 def helpcmd(name):
1270 1270 if with_version:
1271 1271 version_(ui)
1272 1272 ui.write('\n')
1273 1273 aliases, i = findcmd(ui, name)
1274 1274 # synopsis
1275 1275 ui.write("%s\n\n" % i[2])
1276 1276
1277 1277 # description
1278 1278 doc = i[0].__doc__
1279 1279 if not doc:
1280 1280 doc = _("(No help text available)")
1281 1281 if ui.quiet:
1282 1282 doc = doc.splitlines(0)[0]
1283 1283 ui.write("%s\n" % doc.rstrip())
1284 1284
1285 1285 if not ui.quiet:
1286 1286 # aliases
1287 1287 if len(aliases) > 1:
1288 1288 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1289 1289
1290 1290 # options
1291 1291 if i[1]:
1292 1292 option_lists.append(("options", i[1]))
1293 1293
1294 1294 def helplist(select=None):
1295 1295 h = {}
1296 1296 cmds = {}
1297 1297 for c, e in table.items():
1298 1298 f = c.split("|", 1)[0]
1299 1299 if select and not select(f):
1300 1300 continue
1301 1301 if name == "shortlist" and not f.startswith("^"):
1302 1302 continue
1303 1303 f = f.lstrip("^")
1304 1304 if not ui.debugflag and f.startswith("debug"):
1305 1305 continue
1306 1306 doc = e[0].__doc__
1307 1307 if not doc:
1308 1308 doc = _("(No help text available)")
1309 1309 h[f] = doc.splitlines(0)[0].rstrip()
1310 1310 cmds[f] = c.lstrip("^")
1311 1311
1312 1312 fns = h.keys()
1313 1313 fns.sort()
1314 1314 m = max(map(len, fns))
1315 1315 for f in fns:
1316 1316 if ui.verbose:
1317 1317 commands = cmds[f].replace("|",", ")
1318 1318 ui.write(" %s:\n %s\n"%(commands, h[f]))
1319 1319 else:
1320 1320 ui.write(' %-*s %s\n' % (m, f, h[f]))
1321 1321
1322 1322 def helptopic(name):
1323 1323 v = None
1324 1324 for i in help.helptable:
1325 1325 l = i.split('|')
1326 1326 if name in l:
1327 1327 v = i
1328 1328 header = l[-1]
1329 1329 if not v:
1330 1330 raise UnknownCommand(name)
1331 1331
1332 1332 # description
1333 1333 doc = help.helptable[v]
1334 1334 if not doc:
1335 1335 doc = _("(No help text available)")
1336 1336 if callable(doc):
1337 1337 doc = doc()
1338 1338
1339 1339 ui.write("%s\n" % header)
1340 1340 ui.write("%s\n" % doc.rstrip())
1341 1341
1342 1342 def helpext(name):
1343 1343 try:
1344 1344 mod = findext(name)
1345 1345 except KeyError:
1346 1346 raise UnknownCommand(name)
1347 1347
1348 1348 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1349 1349 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1350 1350 for d in doc[1:]:
1351 1351 ui.write(d, '\n')
1352 1352
1353 1353 ui.status('\n')
1354 1354
1355 1355 try:
1356 1356 ct = mod.cmdtable
1357 1357 except AttributeError:
1358 1358 ui.status(_('no commands defined\n'))
1359 1359 return
1360 1360
1361 1361 if ui.verbose:
1362 1362 ui.status(_('list of commands:\n\n'))
1363 1363 else:
1364 1364 ui.status(_('list of commands (use "hg help -v %s" '
1365 1365 'to show aliases and global options):\n\n') % name)
1366 1366
1367 1367 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1368 1368 helplist(modcmds.has_key)
1369 1369
1370 1370 if name and name != 'shortlist':
1371 1371 i = None
1372 1372 for f in (helpcmd, helptopic, helpext):
1373 1373 try:
1374 1374 f(name)
1375 1375 i = None
1376 1376 break
1377 1377 except UnknownCommand, inst:
1378 1378 i = inst
1379 1379 if i:
1380 1380 raise i
1381 1381
1382 1382 else:
1383 1383 # program name
1384 1384 if ui.verbose or with_version:
1385 1385 version_(ui)
1386 1386 else:
1387 1387 ui.status(_("Mercurial Distributed SCM\n"))
1388 1388 ui.status('\n')
1389 1389
1390 1390 # list of commands
1391 1391 if name == "shortlist":
1392 1392 ui.status(_('basic commands (use "hg help" '
1393 1393 'for the full list or option "-v" for details):\n\n'))
1394 1394 elif ui.verbose:
1395 1395 ui.status(_('list of commands:\n\n'))
1396 1396 else:
1397 1397 ui.status(_('list of commands (use "hg help -v" '
1398 1398 'to show aliases and global options):\n\n'))
1399 1399
1400 1400 helplist()
1401 1401
1402 1402 # global options
1403 1403 if ui.verbose:
1404 1404 option_lists.append(("global options", globalopts))
1405 1405
1406 1406 # list all option lists
1407 1407 opt_output = []
1408 1408 for title, options in option_lists:
1409 1409 opt_output.append(("\n%s:\n" % title, None))
1410 1410 for shortopt, longopt, default, desc in options:
1411 1411 if "DEPRECATED" in desc and not ui.verbose: continue
1412 1412 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1413 1413 longopt and " --%s" % longopt),
1414 1414 "%s%s" % (desc,
1415 1415 default
1416 1416 and _(" (default: %s)") % default
1417 1417 or "")))
1418 1418
1419 1419 if opt_output:
1420 1420 opts_len = max([len(line[0]) for line in opt_output if line[1]])
1421 1421 for first, second in opt_output:
1422 1422 if second:
1423 1423 ui.write(" %-*s %s\n" % (opts_len, first, second))
1424 1424 else:
1425 1425 ui.write("%s\n" % first)
1426 1426
1427 1427 def identify(ui, repo):
1428 1428 """print information about the working copy
1429 1429
1430 1430 Print a short summary of the current state of the repo.
1431 1431
1432 1432 This summary identifies the repository state using one or two parent
1433 1433 hash identifiers, followed by a "+" if there are uncommitted changes
1434 1434 in the working directory, followed by a list of tags for this revision.
1435 1435 """
1436 1436 parents = [p for p in repo.dirstate.parents() if p != nullid]
1437 1437 if not parents:
1438 1438 ui.write(_("unknown\n"))
1439 1439 return
1440 1440
1441 1441 hexfunc = ui.debugflag and hex or short
1442 1442 modified, added, removed, deleted = repo.status()[:4]
1443 1443 output = ["%s%s" %
1444 1444 ('+'.join([hexfunc(parent) for parent in parents]),
1445 1445 (modified or added or removed or deleted) and "+" or "")]
1446 1446
1447 1447 if not ui.quiet:
1448 1448
1449 1449 branch = util.tolocal(repo.workingctx().branch())
1450 1450 if branch:
1451 1451 output.append("(%s)" % branch)
1452 1452
1453 1453 # multiple tags for a single parent separated by '/'
1454 1454 parenttags = ['/'.join(tags)
1455 1455 for tags in map(repo.nodetags, parents) if tags]
1456 1456 # tags for multiple parents separated by ' + '
1457 1457 if parenttags:
1458 1458 output.append(' + '.join(parenttags))
1459 1459
1460 1460 ui.write("%s\n" % ' '.join(output))
1461 1461
1462 1462 def import_(ui, repo, patch1, *patches, **opts):
1463 1463 """import an ordered set of patches
1464 1464
1465 1465 Import a list of patches and commit them individually.
1466 1466
1467 1467 If there are outstanding changes in the working directory, import
1468 1468 will abort unless given the -f flag.
1469 1469
1470 1470 You can import a patch straight from a mail message. Even patches
1471 1471 as attachments work (body part must be type text/plain or
1472 1472 text/x-patch to be used). From and Subject headers of email
1473 1473 message are used as default committer and commit message. All
1474 1474 text/plain body parts before first diff are added to commit
1475 1475 message.
1476 1476
1477 1477 If imported patch was generated by hg export, user and description
1478 1478 from patch override values from message headers and body. Values
1479 1479 given on command line with -m and -u override these.
1480 1480
1481 1481 To read a patch from standard input, use patch name "-".
1482 1482 """
1483 1483 patches = (patch1,) + patches
1484 1484
1485 1485 if not opts['force']:
1486 1486 bail_if_changed(repo)
1487 1487
1488 1488 d = opts["base"]
1489 1489 strip = opts["strip"]
1490 1490
1491 1491 wlock = repo.wlock()
1492 1492 lock = repo.lock()
1493 1493
1494 1494 for p in patches:
1495 1495 pf = os.path.join(d, p)
1496 1496
1497 1497 if pf == '-':
1498 1498 ui.status(_("applying patch from stdin\n"))
1499 1499 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1500 1500 else:
1501 1501 ui.status(_("applying %s\n") % p)
1502 1502 tmpname, message, user, date = patch.extract(ui, file(pf))
1503 1503
1504 1504 if tmpname is None:
1505 1505 raise util.Abort(_('no diffs found'))
1506 1506
1507 1507 try:
1508 1508 cmdline_message = logmessage(opts)
1509 1509 if cmdline_message:
1510 1510 # pickup the cmdline msg
1511 1511 message = cmdline_message
1512 1512 elif message:
1513 1513 # pickup the patch msg
1514 1514 message = message.strip()
1515 1515 else:
1516 1516 # launch the editor
1517 1517 message = None
1518 1518 ui.debug(_('message:\n%s\n') % message)
1519 1519
1520 1520 files = {}
1521 1521 try:
1522 1522 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1523 1523 files=files)
1524 1524 finally:
1525 1525 files = patch.updatedir(ui, repo, files, wlock=wlock)
1526 1526 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1527 1527 finally:
1528 1528 os.unlink(tmpname)
1529 1529
1530 1530 def incoming(ui, repo, source="default", **opts):
1531 1531 """show new changesets found in source
1532 1532
1533 1533 Show new changesets found in the specified path/URL or the default
1534 1534 pull location. These are the changesets that would be pulled if a pull
1535 1535 was requested.
1536 1536
1537 1537 For remote repository, using --bundle avoids downloading the changesets
1538 1538 twice if the incoming is followed by a pull.
1539 1539
1540 1540 See pull for valid source format details.
1541 1541 """
1542 1542 source = ui.expandpath(source)
1543 1543 setremoteconfig(ui, opts)
1544 1544
1545 1545 other = hg.repository(ui, source)
1546 1546 incoming = repo.findincoming(other, force=opts["force"])
1547 1547 if not incoming:
1548 1548 ui.status(_("no changes found\n"))
1549 1549 return
1550 1550
1551 1551 cleanup = None
1552 1552 try:
1553 1553 fname = opts["bundle"]
1554 1554 if fname or not other.local():
1555 1555 # create a bundle (uncompressed if other repo is not local)
1556 1556 cg = other.changegroup(incoming, "incoming")
1557 1557 bundletype = other.local() and "HG10BZ" or "HG10UN"
1558 1558 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1559 1559 # keep written bundle?
1560 1560 if opts["bundle"]:
1561 1561 cleanup = None
1562 1562 if not other.local():
1563 1563 # use the created uncompressed bundlerepo
1564 1564 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1565 1565
1566 1566 revs = None
1567 1567 if opts['rev']:
1568 1568 revs = [other.lookup(rev) for rev in opts['rev']]
1569 1569 o = other.changelog.nodesbetween(incoming, revs)[0]
1570 1570 if opts['newest_first']:
1571 1571 o.reverse()
1572 1572 displayer = cmdutil.show_changeset(ui, other, opts)
1573 1573 for n in o:
1574 1574 parents = [p for p in other.changelog.parents(n) if p != nullid]
1575 1575 if opts['no_merges'] and len(parents) == 2:
1576 1576 continue
1577 1577 displayer.show(changenode=n)
1578 1578 finally:
1579 1579 if hasattr(other, 'close'):
1580 1580 other.close()
1581 1581 if cleanup:
1582 1582 os.unlink(cleanup)
1583 1583
1584 1584 def init(ui, dest=".", **opts):
1585 1585 """create a new repository in the given directory
1586 1586
1587 1587 Initialize a new repository in the given directory. If the given
1588 1588 directory does not exist, it is created.
1589 1589
1590 1590 If no directory is given, the current directory is used.
1591 1591
1592 1592 It is possible to specify an ssh:// URL as the destination.
1593 1593 Look at the help text for the pull command for important details
1594 1594 about ssh:// URLs.
1595 1595 """
1596 1596 setremoteconfig(ui, opts)
1597 1597 hg.repository(ui, dest, create=1)
1598 1598
1599 1599 def locate(ui, repo, *pats, **opts):
1600 1600 """locate files matching specific patterns
1601 1601
1602 1602 Print all files under Mercurial control whose names match the
1603 1603 given patterns.
1604 1604
1605 This command searches the current directory and its
1606 subdirectories. To search an entire repository, move to the root
1607 of the repository.
1605 This command searches the entire repository by default. To search
1606 just the current directory and its subdirectories, use "--include .".
1608 1607
1609 1608 If no patterns are given to match, this command prints all file
1610 1609 names.
1611 1610
1612 1611 If you want to feed the output of this command into the "xargs"
1613 1612 command, use the "-0" option to both this command and "xargs".
1614 1613 This will avoid the problem of "xargs" treating single filenames
1615 1614 that contain white space as multiple filenames.
1616 1615 """
1617 1616 end = opts['print0'] and '\0' or '\n'
1618 1617 rev = opts['rev']
1619 1618 if rev:
1620 1619 node = repo.lookup(rev)
1621 1620 else:
1622 1621 node = None
1623 1622
1624 1623 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1625 head='(?:.*/|)'):
1624 default='relglob'):
1626 1625 if not node and repo.dirstate.state(abs) == '?':
1627 1626 continue
1628 1627 if opts['fullpath']:
1629 1628 ui.write(os.path.join(repo.root, abs), end)
1630 1629 else:
1631 1630 ui.write(((pats and rel) or abs), end)
1632 1631
1633 1632 def log(ui, repo, *pats, **opts):
1634 1633 """show revision history of entire repository or files
1635 1634
1636 1635 Print the revision history of the specified files or the entire
1637 1636 project.
1638 1637
1639 1638 File history is shown without following rename or copy history of
1640 1639 files. Use -f/--follow with a file name to follow history across
1641 1640 renames and copies. --follow without a file name will only show
1642 1641 ancestors or descendants of the starting revision. --follow-first
1643 1642 only follows the first parent of merge revisions.
1644 1643
1645 1644 If no revision range is specified, the default is tip:0 unless
1646 1645 --follow is set, in which case the working directory parent is
1647 1646 used as the starting revision.
1648 1647
1649 1648 By default this command outputs: changeset id and hash, tags,
1650 1649 non-trivial parents, user, date and time, and a summary for each
1651 1650 commit. When the -v/--verbose switch is used, the list of changed
1652 1651 files and full commit message is shown.
1653 1652
1654 1653 NOTE: log -p may generate unexpected diff output for merge
1655 1654 changesets, as it will compare the merge changeset against its
1656 1655 first parent only. Also, the files: list will only reflect files
1657 1656 that are different from BOTH parents.
1658 1657
1659 1658 """
1660 1659
1661 1660 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1662 1661 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1663 1662
1664 1663 if opts['limit']:
1665 1664 try:
1666 1665 limit = int(opts['limit'])
1667 1666 except ValueError:
1668 1667 raise util.Abort(_('limit must be a positive integer'))
1669 1668 if limit <= 0: raise util.Abort(_('limit must be positive'))
1670 1669 else:
1671 1670 limit = sys.maxint
1672 1671 count = 0
1673 1672
1674 1673 if opts['copies'] and opts['rev']:
1675 1674 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1676 1675 else:
1677 1676 endrev = repo.changelog.count()
1678 1677 rcache = {}
1679 1678 ncache = {}
1680 1679 dcache = []
1681 1680 def getrenamed(fn, rev, man):
1682 1681 '''looks up all renames for a file (up to endrev) the first
1683 1682 time the file is given. It indexes on the changerev and only
1684 1683 parses the manifest if linkrev != changerev.
1685 1684 Returns rename info for fn at changerev rev.'''
1686 1685 if fn not in rcache:
1687 1686 rcache[fn] = {}
1688 1687 ncache[fn] = {}
1689 1688 fl = repo.file(fn)
1690 1689 for i in xrange(fl.count()):
1691 1690 node = fl.node(i)
1692 1691 lr = fl.linkrev(node)
1693 1692 renamed = fl.renamed(node)
1694 1693 rcache[fn][lr] = renamed
1695 1694 if renamed:
1696 1695 ncache[fn][node] = renamed
1697 1696 if lr >= endrev:
1698 1697 break
1699 1698 if rev in rcache[fn]:
1700 1699 return rcache[fn][rev]
1701 1700 mr = repo.manifest.rev(man)
1702 1701 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1703 1702 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1704 1703 if not dcache or dcache[0] != man:
1705 1704 dcache[:] = [man, repo.manifest.readdelta(man)]
1706 1705 if fn in dcache[1]:
1707 1706 return ncache[fn].get(dcache[1][fn])
1708 1707 return None
1709 1708
1710 1709 df = False
1711 1710 if opts["date"]:
1712 1711 df = util.matchdate(opts["date"])
1713 1712
1714 1713
1715 1714 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1716 1715 for st, rev, fns in changeiter:
1717 1716 if st == 'add':
1718 1717 changenode = repo.changelog.node(rev)
1719 1718 parents = [p for p in repo.changelog.parentrevs(rev)
1720 1719 if p != nullrev]
1721 1720 if opts['no_merges'] and len(parents) == 2:
1722 1721 continue
1723 1722 if opts['only_merges'] and len(parents) != 2:
1724 1723 continue
1725 1724
1726 1725 if df:
1727 1726 changes = get(rev)
1728 1727 if not df(changes[2][0]):
1729 1728 continue
1730 1729
1731 1730 if opts['keyword']:
1732 1731 changes = get(rev)
1733 1732 miss = 0
1734 1733 for k in [kw.lower() for kw in opts['keyword']]:
1735 1734 if not (k in changes[1].lower() or
1736 1735 k in changes[4].lower() or
1737 1736 k in " ".join(changes[3][:20]).lower()):
1738 1737 miss = 1
1739 1738 break
1740 1739 if miss:
1741 1740 continue
1742 1741
1743 1742 copies = []
1744 1743 if opts.get('copies') and rev:
1745 1744 mf = get(rev)[0]
1746 1745 for fn in get(rev)[3]:
1747 1746 rename = getrenamed(fn, rev, mf)
1748 1747 if rename:
1749 1748 copies.append((fn, rename[0]))
1750 1749 displayer.show(rev, changenode, copies=copies)
1751 1750 elif st == 'iter':
1752 1751 if count == limit: break
1753 1752 if displayer.flush(rev):
1754 1753 count += 1
1755 1754
1756 1755 def manifest(ui, repo, rev=None):
1757 1756 """output the current or given revision of the project manifest
1758 1757
1759 1758 Print a list of version controlled files for the given revision.
1760 1759 If no revision is given, the parent of the working directory is used,
1761 1760 or tip if no revision is checked out.
1762 1761
1763 1762 The manifest is the list of files being version controlled. If no revision
1764 1763 is given then the first parent of the working directory is used.
1765 1764
1766 1765 With -v flag, print file permissions. With --debug flag, print
1767 1766 file revision hashes.
1768 1767 """
1769 1768
1770 1769 m = repo.changectx(rev).manifest()
1771 1770 files = m.keys()
1772 1771 files.sort()
1773 1772
1774 1773 for f in files:
1775 1774 if ui.debugflag:
1776 1775 ui.write("%40s " % hex(m[f]))
1777 1776 if ui.verbose:
1778 1777 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1779 1778 ui.write("%s\n" % f)
1780 1779
1781 1780 def merge(ui, repo, node=None, force=None, branch=None):
1782 1781 """merge working directory with another revision
1783 1782
1784 1783 Merge the contents of the current working directory and the
1785 1784 requested revision. Files that changed between either parent are
1786 1785 marked as changed for the next commit and a commit must be
1787 1786 performed before any further updates are allowed.
1788 1787
1789 1788 If no revision is specified, the working directory's parent is a
1790 1789 head revision, and the repository contains exactly one other head,
1791 1790 the other head is merged with by default. Otherwise, an explicit
1792 1791 revision to merge with must be provided.
1793 1792 """
1794 1793
1795 1794 if node or branch:
1796 1795 node = _lookup(repo, node, branch)
1797 1796 else:
1798 1797 heads = repo.heads()
1799 1798 if len(heads) > 2:
1800 1799 raise util.Abort(_('repo has %d heads - '
1801 1800 'please merge with an explicit rev') %
1802 1801 len(heads))
1803 1802 if len(heads) == 1:
1804 1803 raise util.Abort(_('there is nothing to merge - '
1805 1804 'use "hg update" instead'))
1806 1805 parent = repo.dirstate.parents()[0]
1807 1806 if parent not in heads:
1808 1807 raise util.Abort(_('working dir not at a head rev - '
1809 1808 'use "hg update" or merge with an explicit rev'))
1810 1809 node = parent == heads[0] and heads[-1] or heads[0]
1811 1810 return hg.merge(repo, node, force=force)
1812 1811
1813 1812 def outgoing(ui, repo, dest=None, **opts):
1814 1813 """show changesets not found in destination
1815 1814
1816 1815 Show changesets not found in the specified destination repository or
1817 1816 the default push location. These are the changesets that would be pushed
1818 1817 if a push was requested.
1819 1818
1820 1819 See pull for valid destination format details.
1821 1820 """
1822 1821 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1823 1822 setremoteconfig(ui, opts)
1824 1823 revs = None
1825 1824 if opts['rev']:
1826 1825 revs = [repo.lookup(rev) for rev in opts['rev']]
1827 1826
1828 1827 other = hg.repository(ui, dest)
1829 1828 o = repo.findoutgoing(other, force=opts['force'])
1830 1829 if not o:
1831 1830 ui.status(_("no changes found\n"))
1832 1831 return
1833 1832 o = repo.changelog.nodesbetween(o, revs)[0]
1834 1833 if opts['newest_first']:
1835 1834 o.reverse()
1836 1835 displayer = cmdutil.show_changeset(ui, repo, opts)
1837 1836 for n in o:
1838 1837 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1839 1838 if opts['no_merges'] and len(parents) == 2:
1840 1839 continue
1841 1840 displayer.show(changenode=n)
1842 1841
1843 1842 def parents(ui, repo, file_=None, **opts):
1844 1843 """show the parents of the working dir or revision
1845 1844
1846 1845 Print the working directory's parent revisions.
1847 1846 """
1848 1847 rev = opts.get('rev')
1849 1848 if rev:
1850 1849 if file_:
1851 1850 ctx = repo.filectx(file_, changeid=rev)
1852 1851 else:
1853 1852 ctx = repo.changectx(rev)
1854 1853 p = [cp.node() for cp in ctx.parents()]
1855 1854 else:
1856 1855 p = repo.dirstate.parents()
1857 1856
1858 1857 displayer = cmdutil.show_changeset(ui, repo, opts)
1859 1858 for n in p:
1860 1859 if n != nullid:
1861 1860 displayer.show(changenode=n)
1862 1861
1863 1862 def paths(ui, repo, search=None):
1864 1863 """show definition of symbolic path names
1865 1864
1866 1865 Show definition of symbolic path name NAME. If no name is given, show
1867 1866 definition of available names.
1868 1867
1869 1868 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1870 1869 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1871 1870 """
1872 1871 if search:
1873 1872 for name, path in ui.configitems("paths"):
1874 1873 if name == search:
1875 1874 ui.write("%s\n" % path)
1876 1875 return
1877 1876 ui.warn(_("not found!\n"))
1878 1877 return 1
1879 1878 else:
1880 1879 for name, path in ui.configitems("paths"):
1881 1880 ui.write("%s = %s\n" % (name, path))
1882 1881
1883 1882 def postincoming(ui, repo, modheads, optupdate):
1884 1883 if modheads == 0:
1885 1884 return
1886 1885 if optupdate:
1887 1886 if modheads == 1:
1888 1887 return hg.update(repo, repo.changelog.tip()) # update
1889 1888 else:
1890 1889 ui.status(_("not updating, since new heads added\n"))
1891 1890 if modheads > 1:
1892 1891 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1893 1892 else:
1894 1893 ui.status(_("(run 'hg update' to get a working copy)\n"))
1895 1894
1896 1895 def pull(ui, repo, source="default", **opts):
1897 1896 """pull changes from the specified source
1898 1897
1899 1898 Pull changes from a remote repository to a local one.
1900 1899
1901 1900 This finds all changes from the repository at the specified path
1902 1901 or URL and adds them to the local repository. By default, this
1903 1902 does not update the copy of the project in the working directory.
1904 1903
1905 1904 Valid URLs are of the form:
1906 1905
1907 1906 local/filesystem/path (or file://local/filesystem/path)
1908 1907 http://[user@]host[:port]/[path]
1909 1908 https://[user@]host[:port]/[path]
1910 1909 ssh://[user@]host[:port]/[path]
1911 1910 static-http://host[:port]/[path]
1912 1911
1913 1912 Paths in the local filesystem can either point to Mercurial
1914 1913 repositories or to bundle files (as created by 'hg bundle' or
1915 1914 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1916 1915 allows access to a Mercurial repository where you simply use a web
1917 1916 server to publish the .hg directory as static content.
1918 1917
1919 1918 Some notes about using SSH with Mercurial:
1920 1919 - SSH requires an accessible shell account on the destination machine
1921 1920 and a copy of hg in the remote path or specified with as remotecmd.
1922 1921 - path is relative to the remote user's home directory by default.
1923 1922 Use an extra slash at the start of a path to specify an absolute path:
1924 1923 ssh://example.com//tmp/repository
1925 1924 - Mercurial doesn't use its own compression via SSH; the right thing
1926 1925 to do is to configure it in your ~/.ssh/config, e.g.:
1927 1926 Host *.mylocalnetwork.example.com
1928 1927 Compression no
1929 1928 Host *
1930 1929 Compression yes
1931 1930 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1932 1931 with the --ssh command line option.
1933 1932 """
1934 1933 source = ui.expandpath(source)
1935 1934 setremoteconfig(ui, opts)
1936 1935
1937 1936 other = hg.repository(ui, source)
1938 1937 ui.status(_('pulling from %s\n') % (source))
1939 1938 revs = None
1940 1939 if opts['rev']:
1941 1940 if 'lookup' in other.capabilities:
1942 1941 revs = [other.lookup(rev) for rev in opts['rev']]
1943 1942 else:
1944 1943 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1945 1944 raise util.Abort(error)
1946 1945 modheads = repo.pull(other, heads=revs, force=opts['force'])
1947 1946 return postincoming(ui, repo, modheads, opts['update'])
1948 1947
1949 1948 def push(ui, repo, dest=None, **opts):
1950 1949 """push changes to the specified destination
1951 1950
1952 1951 Push changes from the local repository to the given destination.
1953 1952
1954 1953 This is the symmetrical operation for pull. It helps to move
1955 1954 changes from the current repository to a different one. If the
1956 1955 destination is local this is identical to a pull in that directory
1957 1956 from the current one.
1958 1957
1959 1958 By default, push will refuse to run if it detects the result would
1960 1959 increase the number of remote heads. This generally indicates the
1961 1960 the client has forgotten to sync and merge before pushing.
1962 1961
1963 1962 Valid URLs are of the form:
1964 1963
1965 1964 local/filesystem/path (or file://local/filesystem/path)
1966 1965 ssh://[user@]host[:port]/[path]
1967 1966 http://[user@]host[:port]/[path]
1968 1967 https://[user@]host[:port]/[path]
1969 1968
1970 1969 Look at the help text for the pull command for important details
1971 1970 about ssh:// URLs.
1972 1971
1973 1972 Pushing to http:// and https:// URLs is only possible, if this
1974 1973 feature is explicitly enabled on the remote Mercurial server.
1975 1974 """
1976 1975 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1977 1976 setremoteconfig(ui, opts)
1978 1977
1979 1978 other = hg.repository(ui, dest)
1980 1979 ui.status('pushing to %s\n' % (dest))
1981 1980 revs = None
1982 1981 if opts['rev']:
1983 1982 revs = [repo.lookup(rev) for rev in opts['rev']]
1984 1983 r = repo.push(other, opts['force'], revs=revs)
1985 1984 return r == 0
1986 1985
1987 1986 def rawcommit(ui, repo, *pats, **opts):
1988 1987 """raw commit interface (DEPRECATED)
1989 1988
1990 1989 (DEPRECATED)
1991 1990 Lowlevel commit, for use in helper scripts.
1992 1991
1993 1992 This command is not intended to be used by normal users, as it is
1994 1993 primarily useful for importing from other SCMs.
1995 1994
1996 1995 This command is now deprecated and will be removed in a future
1997 1996 release, please use debugsetparents and commit instead.
1998 1997 """
1999 1998
2000 1999 ui.warn(_("(the rawcommit command is deprecated)\n"))
2001 2000
2002 2001 message = logmessage(opts)
2003 2002
2004 2003 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2005 2004 if opts['files']:
2006 2005 files += open(opts['files']).read().splitlines()
2007 2006
2008 2007 parents = [repo.lookup(p) for p in opts['parent']]
2009 2008
2010 2009 try:
2011 2010 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2012 2011 except ValueError, inst:
2013 2012 raise util.Abort(str(inst))
2014 2013
2015 2014 def recover(ui, repo):
2016 2015 """roll back an interrupted transaction
2017 2016
2018 2017 Recover from an interrupted commit or pull.
2019 2018
2020 2019 This command tries to fix the repository status after an interrupted
2021 2020 operation. It should only be necessary when Mercurial suggests it.
2022 2021 """
2023 2022 if repo.recover():
2024 2023 return hg.verify(repo)
2025 2024 return 1
2026 2025
2027 2026 def remove(ui, repo, *pats, **opts):
2028 2027 """remove the specified files on the next commit
2029 2028
2030 2029 Schedule the indicated files for removal from the repository.
2031 2030
2032 2031 This only removes files from the current branch, not from the
2033 2032 entire project history. If the files still exist in the working
2034 2033 directory, they will be deleted from it. If invoked with --after,
2035 2034 files that have been manually deleted are marked as removed.
2036 2035
2037 2036 This command schedules the files to be removed at the next commit.
2038 2037 To undo a remove before that, see hg revert.
2039 2038
2040 2039 Modified files and added files are not removed by default. To
2041 2040 remove them, use the -f/--force option.
2042 2041 """
2043 2042 names = []
2044 2043 if not opts['after'] and not pats:
2045 2044 raise util.Abort(_('no files specified'))
2046 2045 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2047 2046 exact = dict.fromkeys(files)
2048 2047 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2049 2048 modified, added, removed, deleted, unknown = mardu
2050 2049 remove, forget = [], []
2051 2050 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2052 2051 reason = None
2053 2052 if abs not in deleted and opts['after']:
2054 2053 reason = _('is still present')
2055 2054 elif abs in modified and not opts['force']:
2056 2055 reason = _('is modified (use -f to force removal)')
2057 2056 elif abs in added:
2058 2057 if opts['force']:
2059 2058 forget.append(abs)
2060 2059 continue
2061 2060 reason = _('has been marked for add (use -f to force removal)')
2062 2061 elif abs in unknown:
2063 2062 reason = _('is not managed')
2064 2063 elif abs in removed:
2065 2064 continue
2066 2065 if reason:
2067 2066 if exact:
2068 2067 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2069 2068 else:
2070 2069 if ui.verbose or not exact:
2071 2070 ui.status(_('removing %s\n') % rel)
2072 2071 remove.append(abs)
2073 2072 repo.forget(forget)
2074 2073 repo.remove(remove, unlink=not opts['after'])
2075 2074
2076 2075 def rename(ui, repo, *pats, **opts):
2077 2076 """rename files; equivalent of copy + remove
2078 2077
2079 2078 Mark dest as copies of sources; mark sources for deletion. If
2080 2079 dest is a directory, copies are put in that directory. If dest is
2081 2080 a file, there can only be one source.
2082 2081
2083 2082 By default, this command copies the contents of files as they
2084 2083 stand in the working directory. If invoked with --after, the
2085 2084 operation is recorded, but no copying is performed.
2086 2085
2087 2086 This command takes effect in the next commit. To undo a rename
2088 2087 before that, see hg revert.
2089 2088 """
2090 2089 wlock = repo.wlock(0)
2091 2090 errs, copied = docopy(ui, repo, pats, opts, wlock)
2092 2091 names = []
2093 2092 for abs, rel, exact in copied:
2094 2093 if ui.verbose or not exact:
2095 2094 ui.status(_('removing %s\n') % rel)
2096 2095 names.append(abs)
2097 2096 if not opts.get('dry_run'):
2098 2097 repo.remove(names, True, wlock)
2099 2098 return errs
2100 2099
2101 2100 def revert(ui, repo, *pats, **opts):
2102 2101 """revert files or dirs to their states as of some revision
2103 2102
2104 2103 With no revision specified, revert the named files or directories
2105 2104 to the contents they had in the parent of the working directory.
2106 2105 This restores the contents of the affected files to an unmodified
2107 2106 state and unschedules adds, removes, copies, and renames. If the
2108 2107 working directory has two parents, you must explicitly specify the
2109 2108 revision to revert to.
2110 2109
2111 2110 Modified files are saved with a .orig suffix before reverting.
2112 2111 To disable these backups, use --no-backup.
2113 2112
2114 2113 Using the -r option, revert the given files or directories to their
2115 2114 contents as of a specific revision. This can be helpful to "roll
2116 2115 back" some or all of a change that should not have been committed.
2117 2116
2118 2117 Revert modifies the working directory. It does not commit any
2119 2118 changes, or change the parent of the working directory. If you
2120 2119 revert to a revision other than the parent of the working
2121 2120 directory, the reverted files will thus appear modified
2122 2121 afterwards.
2123 2122
2124 2123 If a file has been deleted, it is recreated. If the executable
2125 2124 mode of a file was changed, it is reset.
2126 2125
2127 2126 If names are given, all files matching the names are reverted.
2128 2127
2129 2128 If no arguments are given, no files are reverted.
2130 2129 """
2131 2130
2132 2131 if opts["date"]:
2133 2132 if opts["rev"]:
2134 2133 raise util.Abort(_("you can't specify a revision and a date"))
2135 2134 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2136 2135
2137 2136 if not pats and not opts['all']:
2138 2137 raise util.Abort(_('no files or directories specified; '
2139 2138 'use --all to revert the whole repo'))
2140 2139
2141 2140 parent, p2 = repo.dirstate.parents()
2142 2141 if not opts['rev'] and p2 != nullid:
2143 2142 raise util.Abort(_('uncommitted merge - please provide a '
2144 2143 'specific revision'))
2145 2144 node = repo.changectx(opts['rev']).node()
2146 2145 mf = repo.manifest.read(repo.changelog.read(node)[0])
2147 2146 if node == parent:
2148 2147 pmf = mf
2149 2148 else:
2150 2149 pmf = None
2151 2150
2152 2151 wlock = repo.wlock()
2153 2152
2154 2153 # need all matching names in dirstate and manifest of target rev,
2155 2154 # so have to walk both. do not print errors if files exist in one
2156 2155 # but not other.
2157 2156
2158 2157 names = {}
2159 2158 target_only = {}
2160 2159
2161 2160 # walk dirstate.
2162 2161
2163 2162 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2164 2163 badmatch=mf.has_key):
2165 2164 names[abs] = (rel, exact)
2166 2165 if src == 'b':
2167 2166 target_only[abs] = True
2168 2167
2169 2168 # walk target manifest.
2170 2169
2171 2170 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2172 2171 badmatch=names.has_key):
2173 2172 if abs in names: continue
2174 2173 names[abs] = (rel, exact)
2175 2174 target_only[abs] = True
2176 2175
2177 2176 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2178 2177 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2179 2178
2180 2179 revert = ([], _('reverting %s\n'))
2181 2180 add = ([], _('adding %s\n'))
2182 2181 remove = ([], _('removing %s\n'))
2183 2182 forget = ([], _('forgetting %s\n'))
2184 2183 undelete = ([], _('undeleting %s\n'))
2185 2184 update = {}
2186 2185
2187 2186 disptable = (
2188 2187 # dispatch table:
2189 2188 # file state
2190 2189 # action if in target manifest
2191 2190 # action if not in target manifest
2192 2191 # make backup if in target manifest
2193 2192 # make backup if not in target manifest
2194 2193 (modified, revert, remove, True, True),
2195 2194 (added, revert, forget, True, False),
2196 2195 (removed, undelete, None, False, False),
2197 2196 (deleted, revert, remove, False, False),
2198 2197 (unknown, add, None, True, False),
2199 2198 (target_only, add, None, False, False),
2200 2199 )
2201 2200
2202 2201 entries = names.items()
2203 2202 entries.sort()
2204 2203
2205 2204 for abs, (rel, exact) in entries:
2206 2205 mfentry = mf.get(abs)
2207 2206 def handle(xlist, dobackup):
2208 2207 xlist[0].append(abs)
2209 2208 update[abs] = 1
2210 2209 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2211 2210 bakname = "%s.orig" % rel
2212 2211 ui.note(_('saving current version of %s as %s\n') %
2213 2212 (rel, bakname))
2214 2213 if not opts.get('dry_run'):
2215 2214 util.copyfile(rel, bakname)
2216 2215 if ui.verbose or not exact:
2217 2216 ui.status(xlist[1] % rel)
2218 2217 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2219 2218 if abs not in table: continue
2220 2219 # file has changed in dirstate
2221 2220 if mfentry:
2222 2221 handle(hitlist, backuphit)
2223 2222 elif misslist is not None:
2224 2223 handle(misslist, backupmiss)
2225 2224 else:
2226 2225 if exact: ui.warn(_('file not managed: %s\n') % rel)
2227 2226 break
2228 2227 else:
2229 2228 # file has not changed in dirstate
2230 2229 if node == parent:
2231 2230 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2232 2231 continue
2233 2232 if pmf is None:
2234 2233 # only need parent manifest in this unlikely case,
2235 2234 # so do not read by default
2236 2235 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2237 2236 if abs in pmf:
2238 2237 if mfentry:
2239 2238 # if version of file is same in parent and target
2240 2239 # manifests, do nothing
2241 2240 if pmf[abs] != mfentry:
2242 2241 handle(revert, False)
2243 2242 else:
2244 2243 handle(remove, False)
2245 2244
2246 2245 if not opts.get('dry_run'):
2247 2246 repo.dirstate.forget(forget[0])
2248 2247 r = hg.revert(repo, node, update.has_key, wlock)
2249 2248 repo.dirstate.update(add[0], 'a')
2250 2249 repo.dirstate.update(undelete[0], 'n')
2251 2250 repo.dirstate.update(remove[0], 'r')
2252 2251 return r
2253 2252
2254 2253 def rollback(ui, repo):
2255 2254 """roll back the last transaction in this repository
2256 2255
2257 2256 Roll back the last transaction in this repository, restoring the
2258 2257 project to its state prior to the transaction.
2259 2258
2260 2259 Transactions are used to encapsulate the effects of all commands
2261 2260 that create new changesets or propagate existing changesets into a
2262 2261 repository. For example, the following commands are transactional,
2263 2262 and their effects can be rolled back:
2264 2263
2265 2264 commit
2266 2265 import
2267 2266 pull
2268 2267 push (with this repository as destination)
2269 2268 unbundle
2270 2269
2271 2270 This command should be used with care. There is only one level of
2272 2271 rollback, and there is no way to undo a rollback.
2273 2272
2274 2273 This command is not intended for use on public repositories. Once
2275 2274 changes are visible for pull by other users, rolling a transaction
2276 2275 back locally is ineffective (someone else may already have pulled
2277 2276 the changes). Furthermore, a race is possible with readers of the
2278 2277 repository; for example an in-progress pull from the repository
2279 2278 may fail if a rollback is performed.
2280 2279 """
2281 2280 repo.rollback()
2282 2281
2283 2282 def root(ui, repo):
2284 2283 """print the root (top) of the current working dir
2285 2284
2286 2285 Print the root directory of the current repository.
2287 2286 """
2288 2287 ui.write(repo.root + "\n")
2289 2288
2290 2289 def serve(ui, repo, **opts):
2291 2290 """export the repository via HTTP
2292 2291
2293 2292 Start a local HTTP repository browser and pull server.
2294 2293
2295 2294 By default, the server logs accesses to stdout and errors to
2296 2295 stderr. Use the "-A" and "-E" options to log to files.
2297 2296 """
2298 2297
2299 2298 if opts["stdio"]:
2300 2299 if repo is None:
2301 2300 raise hg.RepoError(_("There is no Mercurial repository here"
2302 2301 " (.hg not found)"))
2303 2302 s = sshserver.sshserver(ui, repo)
2304 2303 s.serve_forever()
2305 2304
2306 2305 parentui = ui.parentui or ui
2307 2306 optlist = ("name templates style address port ipv6"
2308 2307 " accesslog errorlog webdir_conf")
2309 2308 for o in optlist.split():
2310 2309 if opts[o]:
2311 2310 parentui.setconfig("web", o, str(opts[o]))
2312 2311
2313 2312 if repo is None and not ui.config("web", "webdir_conf"):
2314 2313 raise hg.RepoError(_("There is no Mercurial repository here"
2315 2314 " (.hg not found)"))
2316 2315
2317 2316 if opts['daemon'] and not opts['daemon_pipefds']:
2318 2317 rfd, wfd = os.pipe()
2319 2318 args = sys.argv[:]
2320 2319 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2321 2320 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2322 2321 args[0], args)
2323 2322 os.close(wfd)
2324 2323 os.read(rfd, 1)
2325 2324 os._exit(0)
2326 2325
2327 2326 httpd = hgweb.server.create_server(parentui, repo)
2328 2327
2329 2328 if ui.verbose:
2330 2329 if httpd.port != 80:
2331 2330 ui.status(_('listening at http://%s:%d/\n') %
2332 2331 (httpd.addr, httpd.port))
2333 2332 else:
2334 2333 ui.status(_('listening at http://%s/\n') % httpd.addr)
2335 2334
2336 2335 if opts['pid_file']:
2337 2336 fp = open(opts['pid_file'], 'w')
2338 2337 fp.write(str(os.getpid()) + '\n')
2339 2338 fp.close()
2340 2339
2341 2340 if opts['daemon_pipefds']:
2342 2341 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2343 2342 os.close(rfd)
2344 2343 os.write(wfd, 'y')
2345 2344 os.close(wfd)
2346 2345 sys.stdout.flush()
2347 2346 sys.stderr.flush()
2348 2347 fd = os.open(util.nulldev, os.O_RDWR)
2349 2348 if fd != 0: os.dup2(fd, 0)
2350 2349 if fd != 1: os.dup2(fd, 1)
2351 2350 if fd != 2: os.dup2(fd, 2)
2352 2351 if fd not in (0, 1, 2): os.close(fd)
2353 2352
2354 2353 httpd.serve_forever()
2355 2354
2356 2355 def status(ui, repo, *pats, **opts):
2357 2356 """show changed files in the working directory
2358 2357
2359 2358 Show status of files in the repository. If names are given, only
2360 2359 files that match are shown. Files that are clean or ignored, are
2361 2360 not listed unless -c (clean), -i (ignored) or -A is given.
2362 2361
2363 2362 NOTE: status may appear to disagree with diff if permissions have
2364 2363 changed or a merge has occurred. The standard diff format does not
2365 2364 report permission changes and diff only reports changes relative
2366 2365 to one merge parent.
2367 2366
2368 2367 If one revision is given, it is used as the base revision.
2369 2368 If two revisions are given, the difference between them is shown.
2370 2369
2371 2370 The codes used to show the status of files are:
2372 2371 M = modified
2373 2372 A = added
2374 2373 R = removed
2375 2374 C = clean
2376 2375 ! = deleted, but still tracked
2377 2376 ? = not tracked
2378 2377 I = ignored (not shown by default)
2379 2378 = the previous added file was copied from here
2380 2379 """
2381 2380
2382 2381 all = opts['all']
2383 2382 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2384 2383
2385 2384 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2386 2385 cwd = (pats and repo.getcwd()) or ''
2387 2386 modified, added, removed, deleted, unknown, ignored, clean = [
2388 2387 n for n in repo.status(node1=node1, node2=node2, files=files,
2389 2388 match=matchfn,
2390 2389 list_ignored=all or opts['ignored'],
2391 2390 list_clean=all or opts['clean'])]
2392 2391
2393 2392 changetypes = (('modified', 'M', modified),
2394 2393 ('added', 'A', added),
2395 2394 ('removed', 'R', removed),
2396 2395 ('deleted', '!', deleted),
2397 2396 ('unknown', '?', unknown),
2398 2397 ('ignored', 'I', ignored))
2399 2398
2400 2399 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2401 2400
2402 2401 end = opts['print0'] and '\0' or '\n'
2403 2402
2404 2403 for opt, char, changes in ([ct for ct in explicit_changetypes
2405 2404 if all or opts[ct[0]]]
2406 2405 or changetypes):
2407 2406 if opts['no_status']:
2408 2407 format = "%%s%s" % end
2409 2408 else:
2410 2409 format = "%s %%s%s" % (char, end)
2411 2410
2412 2411 for f in changes:
2413 2412 ui.write(format % util.pathto(cwd, f))
2414 2413 if ((all or opts.get('copies')) and not opts.get('no_status')):
2415 2414 copied = repo.dirstate.copied(f)
2416 2415 if copied:
2417 2416 ui.write(' %s%s' % (util.pathto(cwd, copied), end))
2418 2417
2419 2418 def tag(ui, repo, name, rev_=None, **opts):
2420 2419 """add a tag for the current or given revision
2421 2420
2422 2421 Name a particular revision using <name>.
2423 2422
2424 2423 Tags are used to name particular revisions of the repository and are
2425 2424 very useful to compare different revision, to go back to significant
2426 2425 earlier versions or to mark branch points as releases, etc.
2427 2426
2428 2427 If no revision is given, the parent of the working directory is used,
2429 2428 or tip if no revision is checked out.
2430 2429
2431 2430 To facilitate version control, distribution, and merging of tags,
2432 2431 they are stored as a file named ".hgtags" which is managed
2433 2432 similarly to other project files and can be hand-edited if
2434 2433 necessary. The file '.hg/localtags' is used for local tags (not
2435 2434 shared among repositories).
2436 2435 """
2437 2436 if name in ['tip', '.', 'null']:
2438 2437 raise util.Abort(_("the name '%s' is reserved") % name)
2439 2438 if rev_ is not None:
2440 2439 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2441 2440 "please use 'hg tag [-r REV] NAME' instead\n"))
2442 2441 if opts['rev']:
2443 2442 raise util.Abort(_("use only one form to specify the revision"))
2444 2443 if opts['rev']:
2445 2444 rev_ = opts['rev']
2446 2445 if not rev_ and repo.dirstate.parents()[1] != nullid:
2447 2446 raise util.Abort(_('uncommitted merge - please provide a '
2448 2447 'specific revision'))
2449 2448 r = repo.changectx(rev_).node()
2450 2449
2451 2450 message = opts['message']
2452 2451 if not message:
2453 2452 message = _('Added tag %s for changeset %s') % (name, short(r))
2454 2453
2455 2454 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2456 2455
2457 2456 def tags(ui, repo):
2458 2457 """list repository tags
2459 2458
2460 2459 List the repository tags.
2461 2460
2462 2461 This lists both regular and local tags.
2463 2462 """
2464 2463
2465 2464 l = repo.tagslist()
2466 2465 l.reverse()
2467 2466 hexfunc = ui.debugflag and hex or short
2468 2467 for t, n in l:
2469 2468 try:
2470 2469 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2471 2470 except KeyError:
2472 2471 r = " ?:?"
2473 2472 if ui.quiet:
2474 2473 ui.write("%s\n" % t)
2475 2474 else:
2476 2475 spaces = " " * (30 - util.locallen(t))
2477 2476 ui.write("%s%s %s\n" % (t, spaces, r))
2478 2477
2479 2478 def tip(ui, repo, **opts):
2480 2479 """show the tip revision
2481 2480
2482 2481 Show the tip revision.
2483 2482 """
2484 2483 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2485 2484
2486 2485 def unbundle(ui, repo, fname, **opts):
2487 2486 """apply a changegroup file
2488 2487
2489 2488 Apply a compressed changegroup file generated by the bundle
2490 2489 command.
2491 2490 """
2492 2491 if os.path.exists(fname):
2493 2492 f = open(fname, "rb")
2494 2493 else:
2495 2494 f = urllib.urlopen(fname)
2496 2495 gen = changegroup.readbundle(f, fname)
2497 2496 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2498 2497 return postincoming(ui, repo, modheads, opts['update'])
2499 2498
2500 2499 def update(ui, repo, node=None, clean=False, branch=None, date=None):
2501 2500 """update working directory
2502 2501
2503 2502 Update the working directory to the specified revision.
2504 2503
2505 2504 If there are no outstanding changes in the working directory and
2506 2505 there is a linear relationship between the current version and the
2507 2506 requested version, the result is the requested version.
2508 2507
2509 2508 To merge the working directory with another revision, use the
2510 2509 merge command.
2511 2510
2512 2511 By default, update will refuse to run if doing so would require
2513 2512 discarding local changes.
2514 2513 """
2515 2514 if date:
2516 2515 if node:
2517 2516 raise util.Abort(_("you can't specify a revision and a date"))
2518 2517 node = cmdutil.finddate(ui, repo, date)
2519 2518
2520 2519 node = _lookup(repo, node, branch)
2521 2520 if clean:
2522 2521 return hg.clean(repo, node)
2523 2522 else:
2524 2523 return hg.update(repo, node)
2525 2524
2526 2525 def _lookup(repo, node, branch=None):
2527 2526 if branch:
2528 2527 repo.ui.warn(_("the --branch option is deprecated, "
2529 2528 "please use 'hg branch' instead\n"))
2530 2529 br = repo.branchlookup(branch=branch)
2531 2530 found = []
2532 2531 for x in br:
2533 2532 if branch in br[x]:
2534 2533 found.append(x)
2535 2534 if len(found) > 1:
2536 2535 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2537 2536 for x in found:
2538 2537 cmdutil.show_changeset(ui, repo, {}).show(changenode=x)
2539 2538 raise util.Abort("")
2540 2539 if len(found) == 1:
2541 2540 node = found[0]
2542 2541 repo.ui.warn(_("Using head %s for branch %s\n")
2543 2542 % (short(node), branch))
2544 2543 else:
2545 2544 raise util.Abort(_("branch %s not found") % branch)
2546 2545 else:
2547 2546 if node:
2548 2547 node = repo.lookup(node)
2549 2548 else:
2550 2549 wc = repo.workingctx()
2551 2550 node = repo.branchtags()[wc.branch()]
2552 2551 return node
2553 2552
2554 2553 def verify(ui, repo):
2555 2554 """verify the integrity of the repository
2556 2555
2557 2556 Verify the integrity of the current repository.
2558 2557
2559 2558 This will perform an extensive check of the repository's
2560 2559 integrity, validating the hashes and checksums of each entry in
2561 2560 the changelog, manifest, and tracked files, as well as the
2562 2561 integrity of their crosslinks and indices.
2563 2562 """
2564 2563 return hg.verify(repo)
2565 2564
2566 2565 def version_(ui):
2567 2566 """output version and copyright information"""
2568 2567 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2569 2568 % version.get_version())
2570 2569 ui.status(_(
2571 2570 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2572 2571 "This is free software; see the source for copying conditions. "
2573 2572 "There is NO\nwarranty; "
2574 2573 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2575 2574 ))
2576 2575
2577 2576 # Command options and aliases are listed here, alphabetically
2578 2577
2579 2578 globalopts = [
2580 2579 ('R', 'repository', '',
2581 2580 _('repository root directory or symbolic path name')),
2582 2581 ('', 'cwd', '', _('change working directory')),
2583 2582 ('y', 'noninteractive', None,
2584 2583 _('do not prompt, assume \'yes\' for any required answers')),
2585 2584 ('q', 'quiet', None, _('suppress output')),
2586 2585 ('v', 'verbose', None, _('enable additional output')),
2587 2586 ('', 'config', [], _('set/override config option')),
2588 2587 ('', 'debug', None, _('enable debugging output')),
2589 2588 ('', 'debugger', None, _('start debugger')),
2590 2589 ('', 'encoding', util._encoding, _('set the charset encoding')),
2591 2590 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2592 2591 ('', 'lsprof', None, _('print improved command execution profile')),
2593 2592 ('', 'traceback', None, _('print traceback on exception')),
2594 2593 ('', 'time', None, _('time how long the command takes')),
2595 2594 ('', 'profile', None, _('print command execution profile')),
2596 2595 ('', 'version', None, _('output version information and exit')),
2597 2596 ('h', 'help', None, _('display help and exit')),
2598 2597 ]
2599 2598
2600 2599 dryrunopts = [('n', 'dry-run', None,
2601 2600 _('do not perform actions, just print output'))]
2602 2601
2603 2602 remoteopts = [
2604 2603 ('e', 'ssh', '', _('specify ssh command to use')),
2605 2604 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2606 2605 ]
2607 2606
2608 2607 walkopts = [
2609 2608 ('I', 'include', [], _('include names matching the given patterns')),
2610 2609 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2611 2610 ]
2612 2611
2613 2612 commitopts = [
2614 2613 ('m', 'message', '', _('use <text> as commit message')),
2615 2614 ('l', 'logfile', '', _('read commit message from <file>')),
2616 2615 ]
2617 2616
2618 2617 table = {
2619 2618 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2620 2619 "addremove":
2621 2620 (addremove,
2622 2621 [('s', 'similarity', '',
2623 2622 _('guess renamed files by similarity (0<=s<=100)')),
2624 2623 ] + walkopts + dryrunopts,
2625 2624 _('hg addremove [OPTION]... [FILE]...')),
2626 2625 "^annotate":
2627 2626 (annotate,
2628 2627 [('r', 'rev', '', _('annotate the specified revision')),
2629 2628 ('f', 'follow', None, _('follow file copies and renames')),
2630 2629 ('a', 'text', None, _('treat all files as text')),
2631 2630 ('u', 'user', None, _('list the author')),
2632 2631 ('d', 'date', None, _('list the date')),
2633 2632 ('n', 'number', None, _('list the revision number (default)')),
2634 2633 ('c', 'changeset', None, _('list the changeset')),
2635 2634 ] + walkopts,
2636 2635 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2637 2636 "archive":
2638 2637 (archive,
2639 2638 [('', 'no-decode', None, _('do not pass files through decoders')),
2640 2639 ('p', 'prefix', '', _('directory prefix for files in archive')),
2641 2640 ('r', 'rev', '', _('revision to distribute')),
2642 2641 ('t', 'type', '', _('type of distribution to create')),
2643 2642 ] + walkopts,
2644 2643 _('hg archive [OPTION]... DEST')),
2645 2644 "backout":
2646 2645 (backout,
2647 2646 [('', 'merge', None,
2648 2647 _('merge with old dirstate parent after backout')),
2649 2648 ('d', 'date', '', _('record datecode as commit date')),
2650 2649 ('', 'parent', '', _('parent to choose when backing out merge')),
2651 2650 ('u', 'user', '', _('record user as committer')),
2652 2651 ] + walkopts + commitopts,
2653 2652 _('hg backout [OPTION]... REV')),
2654 2653 "branch": (branch, [], _('hg branch [NAME]')),
2655 2654 "branches": (branches, [], _('hg branches')),
2656 2655 "bundle":
2657 2656 (bundle,
2658 2657 [('f', 'force', None,
2659 2658 _('run even when remote repository is unrelated')),
2660 2659 ('r', 'rev', [],
2661 2660 _('a changeset you would like to bundle')),
2662 2661 ('', 'base', [],
2663 2662 _('a base changeset to specify instead of a destination')),
2664 2663 ] + remoteopts,
2665 2664 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2666 2665 "cat":
2667 2666 (cat,
2668 2667 [('o', 'output', '', _('print output to file with formatted name')),
2669 2668 ('r', 'rev', '', _('print the given revision')),
2670 2669 ] + walkopts,
2671 2670 _('hg cat [OPTION]... FILE...')),
2672 2671 "^clone":
2673 2672 (clone,
2674 2673 [('U', 'noupdate', None, _('do not update the new working directory')),
2675 2674 ('r', 'rev', [],
2676 2675 _('a changeset you would like to have after cloning')),
2677 2676 ('', 'pull', None, _('use pull protocol to copy metadata')),
2678 2677 ('', 'uncompressed', None,
2679 2678 _('use uncompressed transfer (fast over LAN)')),
2680 2679 ] + remoteopts,
2681 2680 _('hg clone [OPTION]... SOURCE [DEST]')),
2682 2681 "^commit|ci":
2683 2682 (commit,
2684 2683 [('A', 'addremove', None,
2685 2684 _('mark new/missing files as added/removed before committing')),
2686 2685 ('d', 'date', '', _('record datecode as commit date')),
2687 2686 ('u', 'user', '', _('record user as commiter')),
2688 2687 ] + walkopts + commitopts,
2689 2688 _('hg commit [OPTION]... [FILE]...')),
2690 2689 "copy|cp":
2691 2690 (copy,
2692 2691 [('A', 'after', None, _('record a copy that has already occurred')),
2693 2692 ('f', 'force', None,
2694 2693 _('forcibly copy over an existing managed file')),
2695 2694 ] + walkopts + dryrunopts,
2696 2695 _('hg copy [OPTION]... [SOURCE]... DEST')),
2697 2696 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2698 2697 "debugcomplete":
2699 2698 (debugcomplete,
2700 2699 [('o', 'options', None, _('show the command options'))],
2701 2700 _('debugcomplete [-o] CMD')),
2702 2701 "debuginstall": (debuginstall, [], _('debuginstall')),
2703 2702 "debugrebuildstate":
2704 2703 (debugrebuildstate,
2705 2704 [('r', 'rev', '', _('revision to rebuild to'))],
2706 2705 _('debugrebuildstate [-r REV] [REV]')),
2707 2706 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2708 2707 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2709 2708 "debugstate": (debugstate, [], _('debugstate')),
2710 2709 "debugdate":
2711 2710 (debugdate,
2712 2711 [('e', 'extended', None, _('try extended date formats'))],
2713 2712 _('debugdate [-e] DATE [RANGE]')),
2714 2713 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2715 2714 "debugindex": (debugindex, [], _('debugindex FILE')),
2716 2715 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2717 2716 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2718 2717 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2719 2718 "^diff":
2720 2719 (diff,
2721 2720 [('r', 'rev', [], _('revision')),
2722 2721 ('a', 'text', None, _('treat all files as text')),
2723 2722 ('p', 'show-function', None,
2724 2723 _('show which function each change is in')),
2725 2724 ('g', 'git', None, _('use git extended diff format')),
2726 2725 ('', 'nodates', None, _("don't include dates in diff headers")),
2727 2726 ('w', 'ignore-all-space', None,
2728 2727 _('ignore white space when comparing lines')),
2729 2728 ('b', 'ignore-space-change', None,
2730 2729 _('ignore changes in the amount of white space')),
2731 2730 ('B', 'ignore-blank-lines', None,
2732 2731 _('ignore changes whose lines are all blank')),
2733 2732 ] + walkopts,
2734 2733 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2735 2734 "^export":
2736 2735 (export,
2737 2736 [('o', 'output', '', _('print output to file with formatted name')),
2738 2737 ('a', 'text', None, _('treat all files as text')),
2739 2738 ('g', 'git', None, _('use git extended diff format')),
2740 2739 ('', 'nodates', None, _("don't include dates in diff headers")),
2741 2740 ('', 'switch-parent', None, _('diff against the second parent'))],
2742 2741 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2743 2742 "grep":
2744 2743 (grep,
2745 2744 [('0', 'print0', None, _('end fields with NUL')),
2746 2745 ('', 'all', None, _('print all revisions that match')),
2747 2746 ('f', 'follow', None,
2748 2747 _('follow changeset history, or file history across copies and renames')),
2749 2748 ('i', 'ignore-case', None, _('ignore case when matching')),
2750 2749 ('l', 'files-with-matches', None,
2751 2750 _('print only filenames and revs that match')),
2752 2751 ('n', 'line-number', None, _('print matching line numbers')),
2753 2752 ('r', 'rev', [], _('search in given revision range')),
2754 2753 ('u', 'user', None, _('print user who committed change')),
2755 2754 ] + walkopts,
2756 2755 _('hg grep [OPTION]... PATTERN [FILE]...')),
2757 2756 "heads":
2758 2757 (heads,
2759 2758 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2760 2759 ('', 'style', '', _('display using template map file')),
2761 2760 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2762 2761 ('', 'template', '', _('display with template'))],
2763 2762 _('hg heads [-r REV]')),
2764 2763 "help": (help_, [], _('hg help [COMMAND]')),
2765 2764 "identify|id": (identify, [], _('hg identify')),
2766 2765 "import|patch":
2767 2766 (import_,
2768 2767 [('p', 'strip', 1,
2769 2768 _('directory strip option for patch. This has the same\n'
2770 2769 'meaning as the corresponding patch option')),
2771 2770 ('b', 'base', '', _('base path (DEPRECATED)')),
2772 2771 ('f', 'force', None,
2773 2772 _('skip check for outstanding uncommitted changes'))] + commitopts,
2774 2773 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2775 2774 "incoming|in": (incoming,
2776 2775 [('M', 'no-merges', None, _('do not show merges')),
2777 2776 ('f', 'force', None,
2778 2777 _('run even when remote repository is unrelated')),
2779 2778 ('', 'style', '', _('display using template map file')),
2780 2779 ('n', 'newest-first', None, _('show newest record first')),
2781 2780 ('', 'bundle', '', _('file to store the bundles into')),
2782 2781 ('p', 'patch', None, _('show patch')),
2783 2782 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2784 2783 ('', 'template', '', _('display with template')),
2785 2784 ] + remoteopts,
2786 2785 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2787 2786 ' [--bundle FILENAME] [SOURCE]')),
2788 2787 "^init":
2789 2788 (init,
2790 2789 remoteopts,
2791 2790 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2792 2791 "locate":
2793 2792 (locate,
2794 2793 [('r', 'rev', '', _('search the repository as it stood at rev')),
2795 2794 ('0', 'print0', None,
2796 2795 _('end filenames with NUL, for use with xargs')),
2797 2796 ('f', 'fullpath', None,
2798 2797 _('print complete paths from the filesystem root')),
2799 2798 ] + walkopts,
2800 2799 _('hg locate [OPTION]... [PATTERN]...')),
2801 2800 "^log|history":
2802 2801 (log,
2803 2802 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2804 2803 ('f', 'follow', None,
2805 2804 _('follow changeset history, or file history across copies and renames')),
2806 2805 ('', 'follow-first', None,
2807 2806 _('only follow the first parent of merge changesets')),
2808 2807 ('d', 'date', '', _('show revs matching date spec')),
2809 2808 ('C', 'copies', None, _('show copied files')),
2810 2809 ('k', 'keyword', [], _('search for a keyword')),
2811 2810 ('l', 'limit', '', _('limit number of changes displayed')),
2812 2811 ('r', 'rev', [], _('show the specified revision or range')),
2813 2812 ('', 'removed', None, _('include revs where files were removed')),
2814 2813 ('M', 'no-merges', None, _('do not show merges')),
2815 2814 ('', 'style', '', _('display using template map file')),
2816 2815 ('m', 'only-merges', None, _('show only merges')),
2817 2816 ('p', 'patch', None, _('show patch')),
2818 2817 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2819 2818 ('', 'template', '', _('display with template')),
2820 2819 ] + walkopts,
2821 2820 _('hg log [OPTION]... [FILE]')),
2822 2821 "manifest": (manifest, [], _('hg manifest [REV]')),
2823 2822 "^merge":
2824 2823 (merge,
2825 2824 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
2826 2825 ('f', 'force', None, _('force a merge with outstanding changes'))],
2827 2826 _('hg merge [-f] [REV]')),
2828 2827 "outgoing|out": (outgoing,
2829 2828 [('M', 'no-merges', None, _('do not show merges')),
2830 2829 ('f', 'force', None,
2831 2830 _('run even when remote repository is unrelated')),
2832 2831 ('p', 'patch', None, _('show patch')),
2833 2832 ('', 'style', '', _('display using template map file')),
2834 2833 ('r', 'rev', [], _('a specific revision you would like to push')),
2835 2834 ('n', 'newest-first', None, _('show newest record first')),
2836 2835 ('', 'template', '', _('display with template')),
2837 2836 ] + remoteopts,
2838 2837 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2839 2838 "^parents":
2840 2839 (parents,
2841 2840 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2842 2841 ('r', 'rev', '', _('show parents from the specified rev')),
2843 2842 ('', 'style', '', _('display using template map file')),
2844 2843 ('', 'template', '', _('display with template'))],
2845 2844 _('hg parents [-r REV] [FILE]')),
2846 2845 "paths": (paths, [], _('hg paths [NAME]')),
2847 2846 "^pull":
2848 2847 (pull,
2849 2848 [('u', 'update', None,
2850 2849 _('update to new tip if changesets were pulled')),
2851 2850 ('f', 'force', None,
2852 2851 _('run even when remote repository is unrelated')),
2853 2852 ('r', 'rev', [],
2854 2853 _('a specific revision up to which you would like to pull')),
2855 2854 ] + remoteopts,
2856 2855 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2857 2856 "^push":
2858 2857 (push,
2859 2858 [('f', 'force', None, _('force push')),
2860 2859 ('r', 'rev', [], _('a specific revision you would like to push')),
2861 2860 ] + remoteopts,
2862 2861 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2863 2862 "debugrawcommit|rawcommit":
2864 2863 (rawcommit,
2865 2864 [('p', 'parent', [], _('parent')),
2866 2865 ('d', 'date', '', _('date code')),
2867 2866 ('u', 'user', '', _('user')),
2868 2867 ('F', 'files', '', _('file list'))
2869 2868 ] + commitopts,
2870 2869 _('hg debugrawcommit [OPTION]... [FILE]...')),
2871 2870 "recover": (recover, [], _('hg recover')),
2872 2871 "^remove|rm":
2873 2872 (remove,
2874 2873 [('A', 'after', None, _('record remove that has already occurred')),
2875 2874 ('f', 'force', None, _('remove file even if modified')),
2876 2875 ] + walkopts,
2877 2876 _('hg remove [OPTION]... FILE...')),
2878 2877 "rename|mv":
2879 2878 (rename,
2880 2879 [('A', 'after', None, _('record a rename that has already occurred')),
2881 2880 ('f', 'force', None,
2882 2881 _('forcibly copy over an existing managed file')),
2883 2882 ] + walkopts + dryrunopts,
2884 2883 _('hg rename [OPTION]... SOURCE... DEST')),
2885 2884 "^revert":
2886 2885 (revert,
2887 2886 [('a', 'all', None, _('revert all changes when no arguments given')),
2888 2887 ('d', 'date', '', _('tipmost revision matching date')),
2889 2888 ('r', 'rev', '', _('revision to revert to')),
2890 2889 ('', 'no-backup', None, _('do not save backup copies of files')),
2891 2890 ] + walkopts + dryrunopts,
2892 2891 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2893 2892 "rollback": (rollback, [], _('hg rollback')),
2894 2893 "root": (root, [], _('hg root')),
2895 2894 "showconfig|debugconfig":
2896 2895 (showconfig,
2897 2896 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2898 2897 _('showconfig [-u] [NAME]...')),
2899 2898 "^serve":
2900 2899 (serve,
2901 2900 [('A', 'accesslog', '', _('name of access log file to write to')),
2902 2901 ('d', 'daemon', None, _('run server in background')),
2903 2902 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2904 2903 ('E', 'errorlog', '', _('name of error log file to write to')),
2905 2904 ('p', 'port', 0, _('port to use (default: 8000)')),
2906 2905 ('a', 'address', '', _('address to use')),
2907 2906 ('n', 'name', '',
2908 2907 _('name to show in web pages (default: working dir)')),
2909 2908 ('', 'webdir-conf', '', _('name of the webdir config file'
2910 2909 ' (serve more than one repo)')),
2911 2910 ('', 'pid-file', '', _('name of file to write process ID to')),
2912 2911 ('', 'stdio', None, _('for remote clients')),
2913 2912 ('t', 'templates', '', _('web templates to use')),
2914 2913 ('', 'style', '', _('template style to use')),
2915 2914 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2916 2915 _('hg serve [OPTION]...')),
2917 2916 "^status|st":
2918 2917 (status,
2919 2918 [('A', 'all', None, _('show status of all files')),
2920 2919 ('m', 'modified', None, _('show only modified files')),
2921 2920 ('a', 'added', None, _('show only added files')),
2922 2921 ('r', 'removed', None, _('show only removed files')),
2923 2922 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2924 2923 ('c', 'clean', None, _('show only files without changes')),
2925 2924 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2926 2925 ('i', 'ignored', None, _('show only ignored files')),
2927 2926 ('n', 'no-status', None, _('hide status prefix')),
2928 2927 ('C', 'copies', None, _('show source of copied files')),
2929 2928 ('0', 'print0', None,
2930 2929 _('end filenames with NUL, for use with xargs')),
2931 2930 ('', 'rev', [], _('show difference from revision')),
2932 2931 ] + walkopts,
2933 2932 _('hg status [OPTION]... [FILE]...')),
2934 2933 "tag":
2935 2934 (tag,
2936 2935 [('l', 'local', None, _('make the tag local')),
2937 2936 ('m', 'message', '', _('message for tag commit log entry')),
2938 2937 ('d', 'date', '', _('record datecode as commit date')),
2939 2938 ('u', 'user', '', _('record user as commiter')),
2940 2939 ('r', 'rev', '', _('revision to tag'))],
2941 2940 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2942 2941 "tags": (tags, [], _('hg tags')),
2943 2942 "tip":
2944 2943 (tip,
2945 2944 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2946 2945 ('', 'style', '', _('display using template map file')),
2947 2946 ('p', 'patch', None, _('show patch')),
2948 2947 ('', 'template', '', _('display with template'))],
2949 2948 _('hg tip [-p]')),
2950 2949 "unbundle":
2951 2950 (unbundle,
2952 2951 [('u', 'update', None,
2953 2952 _('update to new tip if changesets were unbundled'))],
2954 2953 _('hg unbundle [-u] FILE')),
2955 2954 "^update|up|checkout|co":
2956 2955 (update,
2957 2956 [('b', 'branch', '',
2958 2957 _('checkout the head of a specific branch (DEPRECATED)')),
2959 2958 ('C', 'clean', None, _('overwrite locally modified files')),
2960 2959 ('d', 'date', '', _('tipmost revision matching date'))],
2961 2960 _('hg update [-C] [-d DATE] [REV]')),
2962 2961 "verify": (verify, [], _('hg verify')),
2963 2962 "version": (version_, [], _('hg version')),
2964 2963 }
2965 2964
2966 2965 norepo = ("clone init version help debugancestor debugcomplete debugdata"
2967 2966 " debugindex debugindexdot debugdate debuginstall")
2968 2967 optionalrepo = ("paths serve showconfig")
2969 2968
2970 2969 def findpossible(ui, cmd):
2971 2970 """
2972 2971 Return cmd -> (aliases, command table entry)
2973 2972 for each matching command.
2974 2973 Return debug commands (or their aliases) only if no normal command matches.
2975 2974 """
2976 2975 choice = {}
2977 2976 debugchoice = {}
2978 2977 for e in table.keys():
2979 2978 aliases = e.lstrip("^").split("|")
2980 2979 found = None
2981 2980 if cmd in aliases:
2982 2981 found = cmd
2983 2982 elif not ui.config("ui", "strict"):
2984 2983 for a in aliases:
2985 2984 if a.startswith(cmd):
2986 2985 found = a
2987 2986 break
2988 2987 if found is not None:
2989 2988 if aliases[0].startswith("debug") or found.startswith("debug"):
2990 2989 debugchoice[found] = (aliases, table[e])
2991 2990 else:
2992 2991 choice[found] = (aliases, table[e])
2993 2992
2994 2993 if not choice and debugchoice:
2995 2994 choice = debugchoice
2996 2995
2997 2996 return choice
2998 2997
2999 2998 def findcmd(ui, cmd):
3000 2999 """Return (aliases, command table entry) for command string."""
3001 3000 choice = findpossible(ui, cmd)
3002 3001
3003 3002 if choice.has_key(cmd):
3004 3003 return choice[cmd]
3005 3004
3006 3005 if len(choice) > 1:
3007 3006 clist = choice.keys()
3008 3007 clist.sort()
3009 3008 raise AmbiguousCommand(cmd, clist)
3010 3009
3011 3010 if choice:
3012 3011 return choice.values()[0]
3013 3012
3014 3013 raise UnknownCommand(cmd)
3015 3014
3016 3015 def catchterm(*args):
3017 3016 raise util.SignalInterrupt
3018 3017
3019 3018 def run():
3020 3019 sys.exit(dispatch(sys.argv[1:]))
3021 3020
3022 3021 class ParseError(Exception):
3023 3022 """Exception raised on errors in parsing the command line."""
3024 3023
3025 3024 def parse(ui, args):
3026 3025 options = {}
3027 3026 cmdoptions = {}
3028 3027
3029 3028 try:
3030 3029 args = fancyopts.fancyopts(args, globalopts, options)
3031 3030 except fancyopts.getopt.GetoptError, inst:
3032 3031 raise ParseError(None, inst)
3033 3032
3034 3033 if args:
3035 3034 cmd, args = args[0], args[1:]
3036 3035 aliases, i = findcmd(ui, cmd)
3037 3036 cmd = aliases[0]
3038 3037 defaults = ui.config("defaults", cmd)
3039 3038 if defaults:
3040 3039 args = shlex.split(defaults) + args
3041 3040 c = list(i[1])
3042 3041 else:
3043 3042 cmd = None
3044 3043 c = []
3045 3044
3046 3045 # combine global options into local
3047 3046 for o in globalopts:
3048 3047 c.append((o[0], o[1], options[o[1]], o[3]))
3049 3048
3050 3049 try:
3051 3050 args = fancyopts.fancyopts(args, c, cmdoptions)
3052 3051 except fancyopts.getopt.GetoptError, inst:
3053 3052 raise ParseError(cmd, inst)
3054 3053
3055 3054 # separate global options back out
3056 3055 for o in globalopts:
3057 3056 n = o[1]
3058 3057 options[n] = cmdoptions[n]
3059 3058 del cmdoptions[n]
3060 3059
3061 3060 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3062 3061
3063 3062 external = {}
3064 3063
3065 3064 def findext(name):
3066 3065 '''return module with given extension name'''
3067 3066 try:
3068 3067 return sys.modules[external[name]]
3069 3068 except KeyError:
3070 3069 for k, v in external.iteritems():
3071 3070 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3072 3071 return sys.modules[v]
3073 3072 raise KeyError(name)
3074 3073
3075 3074 def load_extensions(ui):
3076 3075 added = []
3077 3076 for ext_name, load_from_name in ui.extensions():
3078 3077 if ext_name in external:
3079 3078 continue
3080 3079 try:
3081 3080 if load_from_name:
3082 3081 # the module will be loaded in sys.modules
3083 3082 # choose an unique name so that it doesn't
3084 3083 # conflicts with other modules
3085 3084 module_name = "hgext_%s" % ext_name.replace('.', '_')
3086 3085 mod = imp.load_source(module_name, load_from_name)
3087 3086 else:
3088 3087 def importh(name):
3089 3088 mod = __import__(name)
3090 3089 components = name.split('.')
3091 3090 for comp in components[1:]:
3092 3091 mod = getattr(mod, comp)
3093 3092 return mod
3094 3093 try:
3095 3094 mod = importh("hgext.%s" % ext_name)
3096 3095 except ImportError:
3097 3096 mod = importh(ext_name)
3098 3097 external[ext_name] = mod.__name__
3099 3098 added.append((mod, ext_name))
3100 3099 except (util.SignalInterrupt, KeyboardInterrupt):
3101 3100 raise
3102 3101 except Exception, inst:
3103 3102 ui.warn(_("*** failed to import extension %s: %s\n") %
3104 3103 (ext_name, inst))
3105 3104 if ui.print_exc():
3106 3105 return 1
3107 3106
3108 3107 for mod, name in added:
3109 3108 uisetup = getattr(mod, 'uisetup', None)
3110 3109 if uisetup:
3111 3110 uisetup(ui)
3112 3111 reposetup = getattr(mod, 'reposetup', None)
3113 3112 if reposetup:
3114 3113 hg.repo_setup_hooks.append(reposetup)
3115 3114 cmdtable = getattr(mod, 'cmdtable', {})
3116 3115 for t in cmdtable:
3117 3116 if t in table:
3118 3117 ui.warn(_("module %s overrides %s\n") % (name, t))
3119 3118 table.update(cmdtable)
3120 3119
3121 3120 def parseconfig(config):
3122 3121 """parse the --config options from the command line"""
3123 3122 parsed = []
3124 3123 for cfg in config:
3125 3124 try:
3126 3125 name, value = cfg.split('=', 1)
3127 3126 section, name = name.split('.', 1)
3128 3127 if not section or not name:
3129 3128 raise IndexError
3130 3129 parsed.append((section, name, value))
3131 3130 except (IndexError, ValueError):
3132 3131 raise util.Abort(_('malformed --config option: %s') % cfg)
3133 3132 return parsed
3134 3133
3135 3134 def dispatch(args):
3136 3135 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3137 3136 num = getattr(signal, name, None)
3138 3137 if num: signal.signal(num, catchterm)
3139 3138
3140 3139 try:
3141 3140 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3142 3141 except util.Abort, inst:
3143 3142 sys.stderr.write(_("abort: %s\n") % inst)
3144 3143 return -1
3145 3144
3146 3145 load_extensions(u)
3147 3146 u.addreadhook(load_extensions)
3148 3147
3149 3148 try:
3150 3149 cmd, func, args, options, cmdoptions = parse(u, args)
3151 3150 if options["encoding"]:
3152 3151 util._encoding = options["encoding"]
3153 3152 if options["encodingmode"]:
3154 3153 util._encodingmode = options["encodingmode"]
3155 3154 if options["time"]:
3156 3155 def get_times():
3157 3156 t = os.times()
3158 3157 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3159 3158 t = (t[0], t[1], t[2], t[3], time.clock())
3160 3159 return t
3161 3160 s = get_times()
3162 3161 def print_time():
3163 3162 t = get_times()
3164 3163 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3165 3164 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3166 3165 atexit.register(print_time)
3167 3166
3168 3167 # enter the debugger before command execution
3169 3168 if options['debugger']:
3170 3169 pdb.set_trace()
3171 3170
3172 3171 try:
3173 3172 if options['cwd']:
3174 3173 os.chdir(options['cwd'])
3175 3174
3176 3175 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3177 3176 not options["noninteractive"], options["traceback"],
3178 3177 parseconfig(options["config"]))
3179 3178
3180 3179 path = u.expandpath(options["repository"]) or ""
3181 3180 repo = path and hg.repository(u, path=path) or None
3182 3181 if repo and not repo.local():
3183 3182 raise util.Abort(_("repository '%s' is not local") % path)
3184 3183
3185 3184 if options['help']:
3186 3185 return help_(u, cmd, options['version'])
3187 3186 elif options['version']:
3188 3187 return version_(u)
3189 3188 elif not cmd:
3190 3189 return help_(u, 'shortlist')
3191 3190
3192 3191 if cmd not in norepo.split():
3193 3192 try:
3194 3193 if not repo:
3195 3194 repo = hg.repository(u, path=path)
3196 3195 u = repo.ui
3197 3196 except hg.RepoError:
3198 3197 if cmd not in optionalrepo.split():
3199 3198 raise
3200 3199 d = lambda: func(u, repo, *args, **cmdoptions)
3201 3200 else:
3202 3201 d = lambda: func(u, *args, **cmdoptions)
3203 3202
3204 3203 try:
3205 3204 if options['profile']:
3206 3205 import hotshot, hotshot.stats
3207 3206 prof = hotshot.Profile("hg.prof")
3208 3207 try:
3209 3208 try:
3210 3209 return prof.runcall(d)
3211 3210 except:
3212 3211 try:
3213 3212 u.warn(_('exception raised - generating '
3214 3213 'profile anyway\n'))
3215 3214 except:
3216 3215 pass
3217 3216 raise
3218 3217 finally:
3219 3218 prof.close()
3220 3219 stats = hotshot.stats.load("hg.prof")
3221 3220 stats.strip_dirs()
3222 3221 stats.sort_stats('time', 'calls')
3223 3222 stats.print_stats(40)
3224 3223 elif options['lsprof']:
3225 3224 try:
3226 3225 from mercurial import lsprof
3227 3226 except ImportError:
3228 3227 raise util.Abort(_(
3229 3228 'lsprof not available - install from '
3230 3229 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3231 3230 p = lsprof.Profiler()
3232 3231 p.enable(subcalls=True)
3233 3232 try:
3234 3233 return d()
3235 3234 finally:
3236 3235 p.disable()
3237 3236 stats = lsprof.Stats(p.getstats())
3238 3237 stats.sort()
3239 3238 stats.pprint(top=10, file=sys.stderr, climit=5)
3240 3239 else:
3241 3240 return d()
3242 3241 finally:
3243 3242 u.flush()
3244 3243 except:
3245 3244 # enter the debugger when we hit an exception
3246 3245 if options['debugger']:
3247 3246 pdb.post_mortem(sys.exc_info()[2])
3248 3247 u.print_exc()
3249 3248 raise
3250 3249 except ParseError, inst:
3251 3250 if inst.args[0]:
3252 3251 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3253 3252 help_(u, inst.args[0])
3254 3253 else:
3255 3254 u.warn(_("hg: %s\n") % inst.args[1])
3256 3255 help_(u, 'shortlist')
3257 3256 except AmbiguousCommand, inst:
3258 3257 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3259 3258 (inst.args[0], " ".join(inst.args[1])))
3260 3259 except UnknownCommand, inst:
3261 3260 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3262 3261 help_(u, 'shortlist')
3263 3262 except hg.RepoError, inst:
3264 3263 u.warn(_("abort: %s!\n") % inst)
3265 3264 except lock.LockHeld, inst:
3266 3265 if inst.errno == errno.ETIMEDOUT:
3267 3266 reason = _('timed out waiting for lock held by %s') % inst.locker
3268 3267 else:
3269 3268 reason = _('lock held by %s') % inst.locker
3270 3269 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3271 3270 except lock.LockUnavailable, inst:
3272 3271 u.warn(_("abort: could not lock %s: %s\n") %
3273 3272 (inst.desc or inst.filename, inst.strerror))
3274 3273 except revlog.RevlogError, inst:
3275 3274 u.warn(_("abort: %s!\n") % inst)
3276 3275 except util.SignalInterrupt:
3277 3276 u.warn(_("killed!\n"))
3278 3277 except KeyboardInterrupt:
3279 3278 try:
3280 3279 u.warn(_("interrupted!\n"))
3281 3280 except IOError, inst:
3282 3281 if inst.errno == errno.EPIPE:
3283 3282 if u.debugflag:
3284 3283 u.warn(_("\nbroken pipe\n"))
3285 3284 else:
3286 3285 raise
3287 3286 except socket.error, inst:
3288 3287 u.warn(_("abort: %s\n") % inst[1])
3289 3288 except IOError, inst:
3290 3289 if hasattr(inst, "code"):
3291 3290 u.warn(_("abort: %s\n") % inst)
3292 3291 elif hasattr(inst, "reason"):
3293 3292 try: # usually it is in the form (errno, strerror)
3294 3293 reason = inst.reason.args[1]
3295 3294 except: # it might be anything, for example a string
3296 3295 reason = inst.reason
3297 3296 u.warn(_("abort: error: %s\n") % reason)
3298 3297 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3299 3298 if u.debugflag:
3300 3299 u.warn(_("broken pipe\n"))
3301 3300 elif getattr(inst, "strerror", None):
3302 3301 if getattr(inst, "filename", None):
3303 3302 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3304 3303 else:
3305 3304 u.warn(_("abort: %s\n") % inst.strerror)
3306 3305 else:
3307 3306 raise
3308 3307 except OSError, inst:
3309 3308 if getattr(inst, "filename", None):
3310 3309 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3311 3310 else:
3312 3311 u.warn(_("abort: %s\n") % inst.strerror)
3313 3312 except util.UnexpectedOutput, inst:
3314 3313 u.warn(_("abort: %s") % inst[0])
3315 3314 if not isinstance(inst[1], basestring):
3316 3315 u.warn(" %r\n" % (inst[1],))
3317 3316 elif not inst[1]:
3318 3317 u.warn(_(" empty string\n"))
3319 3318 else:
3320 3319 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3321 3320 except util.Abort, inst:
3322 3321 u.warn(_("abort: %s\n") % inst)
3323 3322 except TypeError, inst:
3324 3323 # was this an argument error?
3325 3324 tb = traceback.extract_tb(sys.exc_info()[2])
3326 3325 if len(tb) > 2: # no
3327 3326 raise
3328 3327 u.debug(inst, "\n")
3329 3328 u.warn(_("%s: invalid arguments\n") % cmd)
3330 3329 help_(u, cmd)
3331 3330 except SystemExit, inst:
3332 3331 # Commands shouldn't sys.exit directly, but give a return code.
3333 3332 # Just in case catch this and and pass exit code to caller.
3334 3333 return inst.code
3335 3334 except:
3336 3335 u.warn(_("** unknown exception encountered, details follow\n"))
3337 3336 u.warn(_("** report bug details to "
3338 3337 "http://www.selenic.com/mercurial/bts\n"))
3339 3338 u.warn(_("** or mercurial@selenic.com\n"))
3340 3339 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3341 3340 % version.get_version())
3342 3341 raise
3343 3342
3344 3343 return -1
@@ -1,2013 +1,2016 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import *
9 9 from i18n import gettext as _
10 10 from demandload import *
11 11 import repo
12 12 demandload(globals(), "appendfile changegroup")
13 13 demandload(globals(), "changelog dirstate filelog manifest context")
14 14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 15 demandload(globals(), "os revlog time util")
16 16
17 17 class localrepository(repo.repository):
18 18 capabilities = ('lookup', 'changegroupsubset')
19 19 supported = ('revlogv1', 'store')
20 20 branchcache_features = ('unnamed',)
21 21
22 22 def __del__(self):
23 23 self.transhandle = None
24 24 def __init__(self, parentui, path=None, create=0):
25 25 repo.repository.__init__(self)
26 26 if not path:
27 27 p = os.getcwd()
28 28 while not os.path.isdir(os.path.join(p, ".hg")):
29 29 oldp = p
30 30 p = os.path.dirname(p)
31 31 if p == oldp:
32 32 raise repo.RepoError(_("There is no Mercurial repository"
33 33 " here (.hg not found)"))
34 34 path = p
35 35
36 36 self.root = os.path.realpath(path)
37 37 self.path = os.path.join(self.root, ".hg")
38 38 self.origroot = path
39 39 self.opener = util.opener(self.path)
40 40 self.wopener = util.opener(self.root)
41 41
42 42 if not os.path.isdir(self.path):
43 43 if create:
44 44 if not os.path.exists(path):
45 45 os.mkdir(path)
46 46 os.mkdir(self.path)
47 47 os.mkdir(os.path.join(self.path, "store"))
48 48 requirements = ("revlogv1", "store")
49 49 reqfile = self.opener("requires", "w")
50 50 for r in requirements:
51 51 reqfile.write("%s\n" % r)
52 52 reqfile.close()
53 53 # create an invalid changelog
54 54 self.opener("00changelog.i", "a").write(
55 55 '\0\0\0\2' # represents revlogv2
56 56 ' dummy changelog to prevent using the old repo layout'
57 57 )
58 58 else:
59 59 raise repo.RepoError(_("repository %s not found") % path)
60 60 elif create:
61 61 raise repo.RepoError(_("repository %s already exists") % path)
62 62 else:
63 63 # find requirements
64 64 try:
65 65 requirements = self.opener("requires").read().splitlines()
66 66 except IOError, inst:
67 67 if inst.errno != errno.ENOENT:
68 68 raise
69 69 requirements = []
70 70 # check them
71 71 for r in requirements:
72 72 if r not in self.supported:
73 73 raise repo.RepoError(_("requirement '%s' not supported") % r)
74 74
75 75 # setup store
76 76 if "store" in requirements:
77 77 self.encodefn = util.encodefilename
78 78 self.decodefn = util.decodefilename
79 79 self.spath = os.path.join(self.path, "store")
80 80 else:
81 81 self.encodefn = lambda x: x
82 82 self.decodefn = lambda x: x
83 83 self.spath = self.path
84 84 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
85 85
86 86 self.ui = ui.ui(parentui=parentui)
87 87 try:
88 88 self.ui.readconfig(self.join("hgrc"), self.root)
89 89 except IOError:
90 90 pass
91 91
92 92 v = self.ui.configrevlog()
93 93 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
94 94 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
95 95 fl = v.get('flags', None)
96 96 flags = 0
97 97 if fl != None:
98 98 for x in fl.split():
99 99 flags |= revlog.flagstr(x)
100 100 elif self.revlogv1:
101 101 flags = revlog.REVLOG_DEFAULT_FLAGS
102 102
103 103 v = self.revlogversion | flags
104 104 self.manifest = manifest.manifest(self.sopener, v)
105 105 self.changelog = changelog.changelog(self.sopener, v)
106 106
107 107 fallback = self.ui.config('ui', 'fallbackencoding')
108 108 if fallback:
109 109 util._fallbackencoding = fallback
110 110
111 111 # the changelog might not have the inline index flag
112 112 # on. If the format of the changelog is the same as found in
113 113 # .hgrc, apply any flags found in the .hgrc as well.
114 114 # Otherwise, just version from the changelog
115 115 v = self.changelog.version
116 116 if v == self.revlogversion:
117 117 v |= flags
118 118 self.revlogversion = v
119 119
120 120 self.tagscache = None
121 121 self.branchcache = None
122 122 self.nodetagscache = None
123 123 self.encodepats = None
124 124 self.decodepats = None
125 125 self.transhandle = None
126 126
127 127 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
128 128
129 129 def url(self):
130 130 return 'file:' + self.root
131 131
132 132 def hook(self, name, throw=False, **args):
133 133 def callhook(hname, funcname):
134 134 '''call python hook. hook is callable object, looked up as
135 135 name in python module. if callable returns "true", hook
136 136 fails, else passes. if hook raises exception, treated as
137 137 hook failure. exception propagates if throw is "true".
138 138
139 139 reason for "true" meaning "hook failed" is so that
140 140 unmodified commands (e.g. mercurial.commands.update) can
141 141 be run as hooks without wrappers to convert return values.'''
142 142
143 143 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
144 144 d = funcname.rfind('.')
145 145 if d == -1:
146 146 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
147 147 % (hname, funcname))
148 148 modname = funcname[:d]
149 149 try:
150 150 obj = __import__(modname)
151 151 except ImportError:
152 152 try:
153 153 # extensions are loaded with hgext_ prefix
154 154 obj = __import__("hgext_%s" % modname)
155 155 except ImportError:
156 156 raise util.Abort(_('%s hook is invalid '
157 157 '(import of "%s" failed)') %
158 158 (hname, modname))
159 159 try:
160 160 for p in funcname.split('.')[1:]:
161 161 obj = getattr(obj, p)
162 162 except AttributeError, err:
163 163 raise util.Abort(_('%s hook is invalid '
164 164 '("%s" is not defined)') %
165 165 (hname, funcname))
166 166 if not callable(obj):
167 167 raise util.Abort(_('%s hook is invalid '
168 168 '("%s" is not callable)') %
169 169 (hname, funcname))
170 170 try:
171 171 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
172 172 except (KeyboardInterrupt, util.SignalInterrupt):
173 173 raise
174 174 except Exception, exc:
175 175 if isinstance(exc, util.Abort):
176 176 self.ui.warn(_('error: %s hook failed: %s\n') %
177 177 (hname, exc.args[0]))
178 178 else:
179 179 self.ui.warn(_('error: %s hook raised an exception: '
180 180 '%s\n') % (hname, exc))
181 181 if throw:
182 182 raise
183 183 self.ui.print_exc()
184 184 return True
185 185 if r:
186 186 if throw:
187 187 raise util.Abort(_('%s hook failed') % hname)
188 188 self.ui.warn(_('warning: %s hook failed\n') % hname)
189 189 return r
190 190
191 191 def runhook(name, cmd):
192 192 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
193 193 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
194 194 r = util.system(cmd, environ=env, cwd=self.root)
195 195 if r:
196 196 desc, r = util.explain_exit(r)
197 197 if throw:
198 198 raise util.Abort(_('%s hook %s') % (name, desc))
199 199 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
200 200 return r
201 201
202 202 r = False
203 203 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
204 204 if hname.split(".", 1)[0] == name and cmd]
205 205 hooks.sort()
206 206 for hname, cmd in hooks:
207 207 if cmd.startswith('python:'):
208 208 r = callhook(hname, cmd[7:].strip()) or r
209 209 else:
210 210 r = runhook(hname, cmd) or r
211 211 return r
212 212
213 213 tag_disallowed = ':\r\n'
214 214
215 215 def tag(self, name, node, message, local, user, date):
216 216 '''tag a revision with a symbolic name.
217 217
218 218 if local is True, the tag is stored in a per-repository file.
219 219 otherwise, it is stored in the .hgtags file, and a new
220 220 changeset is committed with the change.
221 221
222 222 keyword arguments:
223 223
224 224 local: whether to store tag in non-version-controlled file
225 225 (default False)
226 226
227 227 message: commit message to use if committing
228 228
229 229 user: name of user to use if committing
230 230
231 231 date: date tuple to use if committing'''
232 232
233 233 for c in self.tag_disallowed:
234 234 if c in name:
235 235 raise util.Abort(_('%r cannot be used in a tag name') % c)
236 236
237 237 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
238 238
239 239 if local:
240 240 # local tags are stored in the current charset
241 241 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
242 242 self.hook('tag', node=hex(node), tag=name, local=local)
243 243 return
244 244
245 245 for x in self.status()[:5]:
246 246 if '.hgtags' in x:
247 247 raise util.Abort(_('working copy of .hgtags is changed '
248 248 '(please commit .hgtags manually)'))
249 249
250 250 # committed tags are stored in UTF-8
251 251 line = '%s %s\n' % (hex(node), util.fromlocal(name))
252 252 self.wfile('.hgtags', 'ab').write(line)
253 253 if self.dirstate.state('.hgtags') == '?':
254 254 self.add(['.hgtags'])
255 255
256 256 self.commit(['.hgtags'], message, user, date)
257 257 self.hook('tag', node=hex(node), tag=name, local=local)
258 258
259 259 def tags(self):
260 260 '''return a mapping of tag to node'''
261 261 if not self.tagscache:
262 262 self.tagscache = {}
263 263
264 264 def parsetag(line, context):
265 265 if not line:
266 266 return
267 267 s = l.split(" ", 1)
268 268 if len(s) != 2:
269 269 self.ui.warn(_("%s: cannot parse entry\n") % context)
270 270 return
271 271 node, key = s
272 272 key = util.tolocal(key.strip()) # stored in UTF-8
273 273 try:
274 274 bin_n = bin(node)
275 275 except TypeError:
276 276 self.ui.warn(_("%s: node '%s' is not well formed\n") %
277 277 (context, node))
278 278 return
279 279 if bin_n not in self.changelog.nodemap:
280 280 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
281 281 (context, key))
282 282 return
283 283 self.tagscache[key] = bin_n
284 284
285 285 # read the tags file from each head, ending with the tip,
286 286 # and add each tag found to the map, with "newer" ones
287 287 # taking precedence
288 288 f = None
289 289 for rev, node, fnode in self._hgtagsnodes():
290 290 f = (f and f.filectx(fnode) or
291 291 self.filectx('.hgtags', fileid=fnode))
292 292 count = 0
293 293 for l in f.data().splitlines():
294 294 count += 1
295 295 parsetag(l, _("%s, line %d") % (str(f), count))
296 296
297 297 try:
298 298 f = self.opener("localtags")
299 299 count = 0
300 300 for l in f:
301 301 # localtags are stored in the local character set
302 302 # while the internal tag table is stored in UTF-8
303 303 l = util.fromlocal(l)
304 304 count += 1
305 305 parsetag(l, _("localtags, line %d") % count)
306 306 except IOError:
307 307 pass
308 308
309 309 self.tagscache['tip'] = self.changelog.tip()
310 310
311 311 return self.tagscache
312 312
313 313 def _hgtagsnodes(self):
314 314 heads = self.heads()
315 315 heads.reverse()
316 316 last = {}
317 317 ret = []
318 318 for node in heads:
319 319 c = self.changectx(node)
320 320 rev = c.rev()
321 321 try:
322 322 fnode = c.filenode('.hgtags')
323 323 except repo.LookupError:
324 324 continue
325 325 ret.append((rev, node, fnode))
326 326 if fnode in last:
327 327 ret[last[fnode]] = None
328 328 last[fnode] = len(ret) - 1
329 329 return [item for item in ret if item]
330 330
331 331 def tagslist(self):
332 332 '''return a list of tags ordered by revision'''
333 333 l = []
334 334 for t, n in self.tags().items():
335 335 try:
336 336 r = self.changelog.rev(n)
337 337 except:
338 338 r = -2 # sort to the beginning of the list if unknown
339 339 l.append((r, t, n))
340 340 l.sort()
341 341 return [(t, n) for r, t, n in l]
342 342
343 343 def nodetags(self, node):
344 344 '''return the tags associated with a node'''
345 345 if not self.nodetagscache:
346 346 self.nodetagscache = {}
347 347 for t, n in self.tags().items():
348 348 self.nodetagscache.setdefault(n, []).append(t)
349 349 return self.nodetagscache.get(node, [])
350 350
351 351 def _branchtags(self):
352 352 partial, last, lrev = self._readbranchcache()
353 353
354 354 tiprev = self.changelog.count() - 1
355 355 if lrev != tiprev:
356 356 self._updatebranchcache(partial, lrev+1, tiprev+1)
357 357 self._writebranchcache(partial, self.changelog.tip(), tiprev)
358 358
359 359 return partial
360 360
361 361 def branchtags(self):
362 362 if self.branchcache is not None:
363 363 return self.branchcache
364 364
365 365 self.branchcache = {} # avoid recursion in changectx
366 366 partial = self._branchtags()
367 367
368 368 # the branch cache is stored on disk as UTF-8, but in the local
369 369 # charset internally
370 370 for k, v in partial.items():
371 371 self.branchcache[util.tolocal(k)] = v
372 372 return self.branchcache
373 373
374 374 def _readbranchcache(self):
375 375 partial = {}
376 376 try:
377 377 f = self.opener("branches.cache")
378 378 lines = f.read().split('\n')
379 379 f.close()
380 380 features = lines.pop(0).strip()
381 381 if not features.startswith('features: '):
382 382 raise ValueError(_('branch cache: no features specified'))
383 383 features = features.split(' ', 1)[1].split()
384 384 missing_features = []
385 385 for feature in self.branchcache_features:
386 386 try:
387 387 features.remove(feature)
388 388 except ValueError, inst:
389 389 missing_features.append(feature)
390 390 if missing_features:
391 391 raise ValueError(_('branch cache: missing features: %s')
392 392 % ', '.join(missing_features))
393 393 if features:
394 394 raise ValueError(_('branch cache: unknown features: %s')
395 395 % ', '.join(features))
396 396 last, lrev = lines.pop(0).split(" ", 1)
397 397 last, lrev = bin(last), int(lrev)
398 398 if not (lrev < self.changelog.count() and
399 399 self.changelog.node(lrev) == last): # sanity check
400 400 # invalidate the cache
401 401 raise ValueError('Invalid branch cache: unknown tip')
402 402 for l in lines:
403 403 if not l: continue
404 404 node, label = l.split(" ", 1)
405 405 partial[label.strip()] = bin(node)
406 406 except (KeyboardInterrupt, util.SignalInterrupt):
407 407 raise
408 408 except Exception, inst:
409 409 if self.ui.debugflag:
410 410 self.ui.warn(str(inst), '\n')
411 411 partial, last, lrev = {}, nullid, nullrev
412 412 return partial, last, lrev
413 413
414 414 def _writebranchcache(self, branches, tip, tiprev):
415 415 try:
416 416 f = self.opener("branches.cache", "w")
417 417 f.write(" features: %s\n" % ' '.join(self.branchcache_features))
418 418 f.write("%s %s\n" % (hex(tip), tiprev))
419 419 for label, node in branches.iteritems():
420 420 f.write("%s %s\n" % (hex(node), label))
421 421 except IOError:
422 422 pass
423 423
424 424 def _updatebranchcache(self, partial, start, end):
425 425 for r in xrange(start, end):
426 426 c = self.changectx(r)
427 427 b = c.branch()
428 428 partial[b] = c.node()
429 429
430 430 def lookup(self, key):
431 431 if key == '.':
432 432 key = self.dirstate.parents()[0]
433 433 if key == nullid:
434 434 raise repo.RepoError(_("no revision checked out"))
435 435 elif key == 'null':
436 436 return nullid
437 437 n = self.changelog._match(key)
438 438 if n:
439 439 return n
440 440 if key in self.tags():
441 441 return self.tags()[key]
442 442 if key in self.branchtags():
443 443 return self.branchtags()[key]
444 444 n = self.changelog._partialmatch(key)
445 445 if n:
446 446 return n
447 447 raise repo.RepoError(_("unknown revision '%s'") % key)
448 448
449 449 def dev(self):
450 450 return os.lstat(self.path).st_dev
451 451
452 452 def local(self):
453 453 return True
454 454
455 455 def join(self, f):
456 456 return os.path.join(self.path, f)
457 457
458 458 def sjoin(self, f):
459 459 f = self.encodefn(f)
460 460 return os.path.join(self.spath, f)
461 461
462 462 def wjoin(self, f):
463 463 return os.path.join(self.root, f)
464 464
465 465 def file(self, f):
466 466 if f[0] == '/':
467 467 f = f[1:]
468 468 return filelog.filelog(self.sopener, f, self.revlogversion)
469 469
470 470 def changectx(self, changeid=None):
471 471 return context.changectx(self, changeid)
472 472
473 473 def workingctx(self):
474 474 return context.workingctx(self)
475 475
476 476 def parents(self, changeid=None):
477 477 '''
478 478 get list of changectxs for parents of changeid or working directory
479 479 '''
480 480 if changeid is None:
481 481 pl = self.dirstate.parents()
482 482 else:
483 483 n = self.changelog.lookup(changeid)
484 484 pl = self.changelog.parents(n)
485 485 if pl[1] == nullid:
486 486 return [self.changectx(pl[0])]
487 487 return [self.changectx(pl[0]), self.changectx(pl[1])]
488 488
489 489 def filectx(self, path, changeid=None, fileid=None):
490 490 """changeid can be a changeset revision, node, or tag.
491 491 fileid can be a file revision or node."""
492 492 return context.filectx(self, path, changeid, fileid)
493 493
494 494 def getcwd(self):
495 495 return self.dirstate.getcwd()
496 496
497 497 def wfile(self, f, mode='r'):
498 498 return self.wopener(f, mode)
499 499
500 500 def wread(self, filename):
501 501 if self.encodepats == None:
502 502 l = []
503 503 for pat, cmd in self.ui.configitems("encode"):
504 504 mf = util.matcher(self.root, "", [pat], [], [])[1]
505 505 l.append((mf, cmd))
506 506 self.encodepats = l
507 507
508 508 data = self.wopener(filename, 'r').read()
509 509
510 510 for mf, cmd in self.encodepats:
511 511 if mf(filename):
512 512 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
513 513 data = util.filter(data, cmd)
514 514 break
515 515
516 516 return data
517 517
518 518 def wwrite(self, filename, data, fd=None):
519 519 if self.decodepats == None:
520 520 l = []
521 521 for pat, cmd in self.ui.configitems("decode"):
522 522 mf = util.matcher(self.root, "", [pat], [], [])[1]
523 523 l.append((mf, cmd))
524 524 self.decodepats = l
525 525
526 526 for mf, cmd in self.decodepats:
527 527 if mf(filename):
528 528 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
529 529 data = util.filter(data, cmd)
530 530 break
531 531
532 532 if fd:
533 533 return fd.write(data)
534 534 return self.wopener(filename, 'w').write(data)
535 535
536 536 def transaction(self):
537 537 tr = self.transhandle
538 538 if tr != None and tr.running():
539 539 return tr.nest()
540 540
541 541 # save dirstate for rollback
542 542 try:
543 543 ds = self.opener("dirstate").read()
544 544 except IOError:
545 545 ds = ""
546 546 self.opener("journal.dirstate", "w").write(ds)
547 547
548 548 renames = [(self.sjoin("journal"), self.sjoin("undo")),
549 549 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
550 550 tr = transaction.transaction(self.ui.warn, self.sopener,
551 551 self.sjoin("journal"),
552 552 aftertrans(renames))
553 553 self.transhandle = tr
554 554 return tr
555 555
556 556 def recover(self):
557 557 l = self.lock()
558 558 if os.path.exists(self.sjoin("journal")):
559 559 self.ui.status(_("rolling back interrupted transaction\n"))
560 560 transaction.rollback(self.sopener, self.sjoin("journal"))
561 561 self.reload()
562 562 return True
563 563 else:
564 564 self.ui.warn(_("no interrupted transaction available\n"))
565 565 return False
566 566
567 567 def rollback(self, wlock=None):
568 568 if not wlock:
569 569 wlock = self.wlock()
570 570 l = self.lock()
571 571 if os.path.exists(self.sjoin("undo")):
572 572 self.ui.status(_("rolling back last transaction\n"))
573 573 transaction.rollback(self.sopener, self.sjoin("undo"))
574 574 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
575 575 self.reload()
576 576 self.wreload()
577 577 else:
578 578 self.ui.warn(_("no rollback information available\n"))
579 579
580 580 def wreload(self):
581 581 self.dirstate.read()
582 582
583 583 def reload(self):
584 584 self.changelog.load()
585 585 self.manifest.load()
586 586 self.tagscache = None
587 587 self.nodetagscache = None
588 588
589 589 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
590 590 desc=None):
591 591 try:
592 592 l = lock.lock(lockname, 0, releasefn, desc=desc)
593 593 except lock.LockHeld, inst:
594 594 if not wait:
595 595 raise
596 596 self.ui.warn(_("waiting for lock on %s held by %r\n") %
597 597 (desc, inst.locker))
598 598 # default to 600 seconds timeout
599 599 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
600 600 releasefn, desc=desc)
601 601 if acquirefn:
602 602 acquirefn()
603 603 return l
604 604
605 605 def lock(self, wait=1):
606 606 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
607 607 desc=_('repository %s') % self.origroot)
608 608
609 609 def wlock(self, wait=1):
610 610 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
611 611 self.wreload,
612 612 desc=_('working directory of %s') % self.origroot)
613 613
614 614 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
615 615 """
616 616 commit an individual file as part of a larger transaction
617 617 """
618 618
619 619 t = self.wread(fn)
620 620 fl = self.file(fn)
621 621 fp1 = manifest1.get(fn, nullid)
622 622 fp2 = manifest2.get(fn, nullid)
623 623
624 624 meta = {}
625 625 cp = self.dirstate.copied(fn)
626 626 if cp:
627 627 # Mark the new revision of this file as a copy of another
628 628 # file. This copy data will effectively act as a parent
629 629 # of this new revision. If this is a merge, the first
630 630 # parent will be the nullid (meaning "look up the copy data")
631 631 # and the second one will be the other parent. For example:
632 632 #
633 633 # 0 --- 1 --- 3 rev1 changes file foo
634 634 # \ / rev2 renames foo to bar and changes it
635 635 # \- 2 -/ rev3 should have bar with all changes and
636 636 # should record that bar descends from
637 637 # bar in rev2 and foo in rev1
638 638 #
639 639 # this allows this merge to succeed:
640 640 #
641 641 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
642 642 # \ / merging rev3 and rev4 should use bar@rev2
643 643 # \- 2 --- 4 as the merge base
644 644 #
645 645 meta["copy"] = cp
646 646 if not manifest2: # not a branch merge
647 647 meta["copyrev"] = hex(manifest1.get(cp, nullid))
648 648 fp2 = nullid
649 649 elif fp2 != nullid: # copied on remote side
650 650 meta["copyrev"] = hex(manifest1.get(cp, nullid))
651 651 elif fp1 != nullid: # copied on local side, reversed
652 652 meta["copyrev"] = hex(manifest2.get(cp))
653 653 fp2 = fp1
654 654 else: # directory rename
655 655 meta["copyrev"] = hex(manifest1.get(cp, nullid))
656 656 self.ui.debug(_(" %s: copy %s:%s\n") %
657 657 (fn, cp, meta["copyrev"]))
658 658 fp1 = nullid
659 659 elif fp2 != nullid:
660 660 # is one parent an ancestor of the other?
661 661 fpa = fl.ancestor(fp1, fp2)
662 662 if fpa == fp1:
663 663 fp1, fp2 = fp2, nullid
664 664 elif fpa == fp2:
665 665 fp2 = nullid
666 666
667 667 # is the file unmodified from the parent? report existing entry
668 668 if fp2 == nullid and not fl.cmp(fp1, t):
669 669 return fp1
670 670
671 671 changelist.append(fn)
672 672 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
673 673
674 674 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
675 675 if p1 is None:
676 676 p1, p2 = self.dirstate.parents()
677 677 return self.commit(files=files, text=text, user=user, date=date,
678 678 p1=p1, p2=p2, wlock=wlock)
679 679
680 680 def commit(self, files=None, text="", user=None, date=None,
681 681 match=util.always, force=False, lock=None, wlock=None,
682 682 force_editor=False, p1=None, p2=None, extra={}):
683 683
684 684 commit = []
685 685 remove = []
686 686 changed = []
687 687 use_dirstate = (p1 is None) # not rawcommit
688 688 extra = extra.copy()
689 689
690 690 if use_dirstate:
691 691 if files:
692 692 for f in files:
693 693 s = self.dirstate.state(f)
694 694 if s in 'nmai':
695 695 commit.append(f)
696 696 elif s == 'r':
697 697 remove.append(f)
698 698 else:
699 699 self.ui.warn(_("%s not tracked!\n") % f)
700 700 else:
701 701 changes = self.status(match=match)[:5]
702 702 modified, added, removed, deleted, unknown = changes
703 703 commit = modified + added
704 704 remove = removed
705 705 else:
706 706 commit = files
707 707
708 708 if use_dirstate:
709 709 p1, p2 = self.dirstate.parents()
710 710 update_dirstate = True
711 711 else:
712 712 p1, p2 = p1, p2 or nullid
713 713 update_dirstate = (self.dirstate.parents()[0] == p1)
714 714
715 715 c1 = self.changelog.read(p1)
716 716 c2 = self.changelog.read(p2)
717 717 m1 = self.manifest.read(c1[0]).copy()
718 718 m2 = self.manifest.read(c2[0])
719 719
720 720 if use_dirstate:
721 721 branchname = self.workingctx().branch()
722 722 try:
723 723 branchname = branchname.decode('UTF-8').encode('UTF-8')
724 724 except UnicodeDecodeError:
725 725 raise util.Abort(_('branch name not in UTF-8!'))
726 726 else:
727 727 branchname = ""
728 728
729 729 if use_dirstate:
730 730 oldname = c1[5].get("branch", "") # stored in UTF-8
731 731 if not commit and not remove and not force and p2 == nullid and \
732 732 branchname == oldname:
733 733 self.ui.status(_("nothing changed\n"))
734 734 return None
735 735
736 736 xp1 = hex(p1)
737 737 if p2 == nullid: xp2 = ''
738 738 else: xp2 = hex(p2)
739 739
740 740 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
741 741
742 742 if not wlock:
743 743 wlock = self.wlock()
744 744 if not lock:
745 745 lock = self.lock()
746 746 tr = self.transaction()
747 747
748 748 # check in files
749 749 new = {}
750 750 linkrev = self.changelog.count()
751 751 commit.sort()
752 752 for f in commit:
753 753 self.ui.note(f + "\n")
754 754 try:
755 755 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
756 756 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
757 757 except IOError:
758 758 if use_dirstate:
759 759 self.ui.warn(_("trouble committing %s!\n") % f)
760 760 raise
761 761 else:
762 762 remove.append(f)
763 763
764 764 # update manifest
765 765 m1.update(new)
766 766 remove.sort()
767 767
768 768 for f in remove:
769 769 if f in m1:
770 770 del m1[f]
771 771 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
772 772
773 773 # add changeset
774 774 new = new.keys()
775 775 new.sort()
776 776
777 777 user = user or self.ui.username()
778 778 if not text or force_editor:
779 779 edittext = []
780 780 if text:
781 781 edittext.append(text)
782 782 edittext.append("")
783 783 edittext.append("HG: user: %s" % user)
784 784 if p2 != nullid:
785 785 edittext.append("HG: branch merge")
786 786 edittext.extend(["HG: changed %s" % f for f in changed])
787 787 edittext.extend(["HG: removed %s" % f for f in remove])
788 788 if not changed and not remove:
789 789 edittext.append("HG: no files changed")
790 790 edittext.append("")
791 791 # run editor in the repository root
792 792 olddir = os.getcwd()
793 793 os.chdir(self.root)
794 794 text = self.ui.edit("\n".join(edittext), user)
795 795 os.chdir(olddir)
796 796
797 797 lines = [line.rstrip() for line in text.rstrip().splitlines()]
798 798 while lines and not lines[0]:
799 799 del lines[0]
800 800 if not lines:
801 801 return None
802 802 text = '\n'.join(lines)
803 803 if branchname:
804 804 extra["branch"] = branchname
805 805 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
806 806 user, date, extra)
807 807 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
808 808 parent2=xp2)
809 809 tr.close()
810 810
811 811 if use_dirstate or update_dirstate:
812 812 self.dirstate.setparents(n)
813 813 if use_dirstate:
814 814 self.dirstate.update(new, "n")
815 815 self.dirstate.forget(remove)
816 816
817 817 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
818 818 return n
819 819
820 820 def walk(self, node=None, files=[], match=util.always, badmatch=None):
821 821 '''
822 822 walk recursively through the directory tree or a given
823 823 changeset, finding all files matched by the match
824 824 function
825 825
826 826 results are yielded in a tuple (src, filename), where src
827 827 is one of:
828 828 'f' the file was found in the directory tree
829 829 'm' the file was only in the dirstate and not in the tree
830 830 'b' file was not found and matched badmatch
831 831 '''
832 832
833 833 if node:
834 834 fdict = dict.fromkeys(files)
835 # for dirstate.walk, files=['.'] means "walk the whole tree".
836 # follow that here, too
837 fdict.pop('.', None)
835 838 mdict = self.manifest.read(self.changelog.read(node)[0])
836 839 mfiles = mdict.keys()
837 840 mfiles.sort()
838 841 for fn in mfiles:
839 842 for ffn in fdict:
840 843 # match if the file is the exact name or a directory
841 844 if ffn == fn or fn.startswith("%s/" % ffn):
842 845 del fdict[ffn]
843 846 break
844 847 if match(fn):
845 848 yield 'm', fn
846 849 ffiles = fdict.keys()
847 850 ffiles.sort()
848 851 for fn in ffiles:
849 852 if badmatch and badmatch(fn):
850 853 if match(fn):
851 854 yield 'b', fn
852 855 else:
853 856 self.ui.warn(_('%s: No such file in rev %s\n') % (
854 857 util.pathto(self.getcwd(), fn), short(node)))
855 858 else:
856 859 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
857 860 yield src, fn
858 861
859 862 def status(self, node1=None, node2=None, files=[], match=util.always,
860 863 wlock=None, list_ignored=False, list_clean=False):
861 864 """return status of files between two nodes or node and working directory
862 865
863 866 If node1 is None, use the first dirstate parent instead.
864 867 If node2 is None, compare node1 with working directory.
865 868 """
866 869
867 870 def fcmp(fn, mf):
868 871 t1 = self.wread(fn)
869 872 return self.file(fn).cmp(mf.get(fn, nullid), t1)
870 873
871 874 def mfmatches(node):
872 875 change = self.changelog.read(node)
873 876 mf = self.manifest.read(change[0]).copy()
874 877 for fn in mf.keys():
875 878 if not match(fn):
876 879 del mf[fn]
877 880 return mf
878 881
879 882 modified, added, removed, deleted, unknown = [], [], [], [], []
880 883 ignored, clean = [], []
881 884
882 885 compareworking = False
883 886 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
884 887 compareworking = True
885 888
886 889 if not compareworking:
887 890 # read the manifest from node1 before the manifest from node2,
888 891 # so that we'll hit the manifest cache if we're going through
889 892 # all the revisions in parent->child order.
890 893 mf1 = mfmatches(node1)
891 894
892 895 # are we comparing the working directory?
893 896 if not node2:
894 897 if not wlock:
895 898 try:
896 899 wlock = self.wlock(wait=0)
897 900 except lock.LockException:
898 901 wlock = None
899 902 (lookup, modified, added, removed, deleted, unknown,
900 903 ignored, clean) = self.dirstate.status(files, match,
901 904 list_ignored, list_clean)
902 905
903 906 # are we comparing working dir against its parent?
904 907 if compareworking:
905 908 if lookup:
906 909 # do a full compare of any files that might have changed
907 910 mf2 = mfmatches(self.dirstate.parents()[0])
908 911 for f in lookup:
909 912 if fcmp(f, mf2):
910 913 modified.append(f)
911 914 else:
912 915 clean.append(f)
913 916 if wlock is not None:
914 917 self.dirstate.update([f], "n")
915 918 else:
916 919 # we are comparing working dir against non-parent
917 920 # generate a pseudo-manifest for the working dir
918 921 # XXX: create it in dirstate.py ?
919 922 mf2 = mfmatches(self.dirstate.parents()[0])
920 923 for f in lookup + modified + added:
921 924 mf2[f] = ""
922 925 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
923 926 for f in removed:
924 927 if f in mf2:
925 928 del mf2[f]
926 929 else:
927 930 # we are comparing two revisions
928 931 mf2 = mfmatches(node2)
929 932
930 933 if not compareworking:
931 934 # flush lists from dirstate before comparing manifests
932 935 modified, added, clean = [], [], []
933 936
934 937 # make sure to sort the files so we talk to the disk in a
935 938 # reasonable order
936 939 mf2keys = mf2.keys()
937 940 mf2keys.sort()
938 941 for fn in mf2keys:
939 942 if mf1.has_key(fn):
940 943 if mf1.flags(fn) != mf2.flags(fn) or \
941 944 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
942 945 modified.append(fn)
943 946 elif list_clean:
944 947 clean.append(fn)
945 948 del mf1[fn]
946 949 else:
947 950 added.append(fn)
948 951
949 952 removed = mf1.keys()
950 953
951 954 # sort and return results:
952 955 for l in modified, added, removed, deleted, unknown, ignored, clean:
953 956 l.sort()
954 957 return (modified, added, removed, deleted, unknown, ignored, clean)
955 958
956 959 def add(self, list, wlock=None):
957 960 if not wlock:
958 961 wlock = self.wlock()
959 962 for f in list:
960 963 p = self.wjoin(f)
961 964 if not os.path.exists(p):
962 965 self.ui.warn(_("%s does not exist!\n") % f)
963 966 elif not os.path.isfile(p):
964 967 self.ui.warn(_("%s not added: only files supported currently\n")
965 968 % f)
966 969 elif self.dirstate.state(f) in 'an':
967 970 self.ui.warn(_("%s already tracked!\n") % f)
968 971 else:
969 972 self.dirstate.update([f], "a")
970 973
971 974 def forget(self, list, wlock=None):
972 975 if not wlock:
973 976 wlock = self.wlock()
974 977 for f in list:
975 978 if self.dirstate.state(f) not in 'ai':
976 979 self.ui.warn(_("%s not added!\n") % f)
977 980 else:
978 981 self.dirstate.forget([f])
979 982
980 983 def remove(self, list, unlink=False, wlock=None):
981 984 if unlink:
982 985 for f in list:
983 986 try:
984 987 util.unlink(self.wjoin(f))
985 988 except OSError, inst:
986 989 if inst.errno != errno.ENOENT:
987 990 raise
988 991 if not wlock:
989 992 wlock = self.wlock()
990 993 for f in list:
991 994 p = self.wjoin(f)
992 995 if os.path.exists(p):
993 996 self.ui.warn(_("%s still exists!\n") % f)
994 997 elif self.dirstate.state(f) == 'a':
995 998 self.dirstate.forget([f])
996 999 elif f not in self.dirstate:
997 1000 self.ui.warn(_("%s not tracked!\n") % f)
998 1001 else:
999 1002 self.dirstate.update([f], "r")
1000 1003
1001 1004 def undelete(self, list, wlock=None):
1002 1005 p = self.dirstate.parents()[0]
1003 1006 mn = self.changelog.read(p)[0]
1004 1007 m = self.manifest.read(mn)
1005 1008 if not wlock:
1006 1009 wlock = self.wlock()
1007 1010 for f in list:
1008 1011 if self.dirstate.state(f) not in "r":
1009 1012 self.ui.warn("%s not removed!\n" % f)
1010 1013 else:
1011 1014 t = self.file(f).read(m[f])
1012 1015 self.wwrite(f, t)
1013 1016 util.set_exec(self.wjoin(f), m.execf(f))
1014 1017 self.dirstate.update([f], "n")
1015 1018
1016 1019 def copy(self, source, dest, wlock=None):
1017 1020 p = self.wjoin(dest)
1018 1021 if not os.path.exists(p):
1019 1022 self.ui.warn(_("%s does not exist!\n") % dest)
1020 1023 elif not os.path.isfile(p):
1021 1024 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
1022 1025 else:
1023 1026 if not wlock:
1024 1027 wlock = self.wlock()
1025 1028 if self.dirstate.state(dest) == '?':
1026 1029 self.dirstate.update([dest], "a")
1027 1030 self.dirstate.copy(source, dest)
1028 1031
1029 1032 def heads(self, start=None):
1030 1033 heads = self.changelog.heads(start)
1031 1034 # sort the output in rev descending order
1032 1035 heads = [(-self.changelog.rev(h), h) for h in heads]
1033 1036 heads.sort()
1034 1037 return [n for (r, n) in heads]
1035 1038
1036 1039 # branchlookup returns a dict giving a list of branches for
1037 1040 # each head. A branch is defined as the tag of a node or
1038 1041 # the branch of the node's parents. If a node has multiple
1039 1042 # branch tags, tags are eliminated if they are visible from other
1040 1043 # branch tags.
1041 1044 #
1042 1045 # So, for this graph: a->b->c->d->e
1043 1046 # \ /
1044 1047 # aa -----/
1045 1048 # a has tag 2.6.12
1046 1049 # d has tag 2.6.13
1047 1050 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
1048 1051 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
1049 1052 # from the list.
1050 1053 #
1051 1054 # It is possible that more than one head will have the same branch tag.
1052 1055 # callers need to check the result for multiple heads under the same
1053 1056 # branch tag if that is a problem for them (ie checkout of a specific
1054 1057 # branch).
1055 1058 #
1056 1059 # passing in a specific branch will limit the depth of the search
1057 1060 # through the parents. It won't limit the branches returned in the
1058 1061 # result though.
1059 1062 def branchlookup(self, heads=None, branch=None):
1060 1063 if not heads:
1061 1064 heads = self.heads()
1062 1065 headt = [ h for h in heads ]
1063 1066 chlog = self.changelog
1064 1067 branches = {}
1065 1068 merges = []
1066 1069 seenmerge = {}
1067 1070
1068 1071 # traverse the tree once for each head, recording in the branches
1069 1072 # dict which tags are visible from this head. The branches
1070 1073 # dict also records which tags are visible from each tag
1071 1074 # while we traverse.
1072 1075 while headt or merges:
1073 1076 if merges:
1074 1077 n, found = merges.pop()
1075 1078 visit = [n]
1076 1079 else:
1077 1080 h = headt.pop()
1078 1081 visit = [h]
1079 1082 found = [h]
1080 1083 seen = {}
1081 1084 while visit:
1082 1085 n = visit.pop()
1083 1086 if n in seen:
1084 1087 continue
1085 1088 pp = chlog.parents(n)
1086 1089 tags = self.nodetags(n)
1087 1090 if tags:
1088 1091 for x in tags:
1089 1092 if x == 'tip':
1090 1093 continue
1091 1094 for f in found:
1092 1095 branches.setdefault(f, {})[n] = 1
1093 1096 branches.setdefault(n, {})[n] = 1
1094 1097 break
1095 1098 if n not in found:
1096 1099 found.append(n)
1097 1100 if branch in tags:
1098 1101 continue
1099 1102 seen[n] = 1
1100 1103 if pp[1] != nullid and n not in seenmerge:
1101 1104 merges.append((pp[1], [x for x in found]))
1102 1105 seenmerge[n] = 1
1103 1106 if pp[0] != nullid:
1104 1107 visit.append(pp[0])
1105 1108 # traverse the branches dict, eliminating branch tags from each
1106 1109 # head that are visible from another branch tag for that head.
1107 1110 out = {}
1108 1111 viscache = {}
1109 1112 for h in heads:
1110 1113 def visible(node):
1111 1114 if node in viscache:
1112 1115 return viscache[node]
1113 1116 ret = {}
1114 1117 visit = [node]
1115 1118 while visit:
1116 1119 x = visit.pop()
1117 1120 if x in viscache:
1118 1121 ret.update(viscache[x])
1119 1122 elif x not in ret:
1120 1123 ret[x] = 1
1121 1124 if x in branches:
1122 1125 visit[len(visit):] = branches[x].keys()
1123 1126 viscache[node] = ret
1124 1127 return ret
1125 1128 if h not in branches:
1126 1129 continue
1127 1130 # O(n^2), but somewhat limited. This only searches the
1128 1131 # tags visible from a specific head, not all the tags in the
1129 1132 # whole repo.
1130 1133 for b in branches[h]:
1131 1134 vis = False
1132 1135 for bb in branches[h].keys():
1133 1136 if b != bb:
1134 1137 if b in visible(bb):
1135 1138 vis = True
1136 1139 break
1137 1140 if not vis:
1138 1141 l = out.setdefault(h, [])
1139 1142 l[len(l):] = self.nodetags(b)
1140 1143 return out
1141 1144
1142 1145 def branches(self, nodes):
1143 1146 if not nodes:
1144 1147 nodes = [self.changelog.tip()]
1145 1148 b = []
1146 1149 for n in nodes:
1147 1150 t = n
1148 1151 while 1:
1149 1152 p = self.changelog.parents(n)
1150 1153 if p[1] != nullid or p[0] == nullid:
1151 1154 b.append((t, n, p[0], p[1]))
1152 1155 break
1153 1156 n = p[0]
1154 1157 return b
1155 1158
1156 1159 def between(self, pairs):
1157 1160 r = []
1158 1161
1159 1162 for top, bottom in pairs:
1160 1163 n, l, i = top, [], 0
1161 1164 f = 1
1162 1165
1163 1166 while n != bottom:
1164 1167 p = self.changelog.parents(n)[0]
1165 1168 if i == f:
1166 1169 l.append(n)
1167 1170 f = f * 2
1168 1171 n = p
1169 1172 i += 1
1170 1173
1171 1174 r.append(l)
1172 1175
1173 1176 return r
1174 1177
1175 1178 def findincoming(self, remote, base=None, heads=None, force=False):
1176 1179 """Return list of roots of the subsets of missing nodes from remote
1177 1180
1178 1181 If base dict is specified, assume that these nodes and their parents
1179 1182 exist on the remote side and that no child of a node of base exists
1180 1183 in both remote and self.
1181 1184 Furthermore base will be updated to include the nodes that exists
1182 1185 in self and remote but no children exists in self and remote.
1183 1186 If a list of heads is specified, return only nodes which are heads
1184 1187 or ancestors of these heads.
1185 1188
1186 1189 All the ancestors of base are in self and in remote.
1187 1190 All the descendants of the list returned are missing in self.
1188 1191 (and so we know that the rest of the nodes are missing in remote, see
1189 1192 outgoing)
1190 1193 """
1191 1194 m = self.changelog.nodemap
1192 1195 search = []
1193 1196 fetch = {}
1194 1197 seen = {}
1195 1198 seenbranch = {}
1196 1199 if base == None:
1197 1200 base = {}
1198 1201
1199 1202 if not heads:
1200 1203 heads = remote.heads()
1201 1204
1202 1205 if self.changelog.tip() == nullid:
1203 1206 base[nullid] = 1
1204 1207 if heads != [nullid]:
1205 1208 return [nullid]
1206 1209 return []
1207 1210
1208 1211 # assume we're closer to the tip than the root
1209 1212 # and start by examining the heads
1210 1213 self.ui.status(_("searching for changes\n"))
1211 1214
1212 1215 unknown = []
1213 1216 for h in heads:
1214 1217 if h not in m:
1215 1218 unknown.append(h)
1216 1219 else:
1217 1220 base[h] = 1
1218 1221
1219 1222 if not unknown:
1220 1223 return []
1221 1224
1222 1225 req = dict.fromkeys(unknown)
1223 1226 reqcnt = 0
1224 1227
1225 1228 # search through remote branches
1226 1229 # a 'branch' here is a linear segment of history, with four parts:
1227 1230 # head, root, first parent, second parent
1228 1231 # (a branch always has two parents (or none) by definition)
1229 1232 unknown = remote.branches(unknown)
1230 1233 while unknown:
1231 1234 r = []
1232 1235 while unknown:
1233 1236 n = unknown.pop(0)
1234 1237 if n[0] in seen:
1235 1238 continue
1236 1239
1237 1240 self.ui.debug(_("examining %s:%s\n")
1238 1241 % (short(n[0]), short(n[1])))
1239 1242 if n[0] == nullid: # found the end of the branch
1240 1243 pass
1241 1244 elif n in seenbranch:
1242 1245 self.ui.debug(_("branch already found\n"))
1243 1246 continue
1244 1247 elif n[1] and n[1] in m: # do we know the base?
1245 1248 self.ui.debug(_("found incomplete branch %s:%s\n")
1246 1249 % (short(n[0]), short(n[1])))
1247 1250 search.append(n) # schedule branch range for scanning
1248 1251 seenbranch[n] = 1
1249 1252 else:
1250 1253 if n[1] not in seen and n[1] not in fetch:
1251 1254 if n[2] in m and n[3] in m:
1252 1255 self.ui.debug(_("found new changeset %s\n") %
1253 1256 short(n[1]))
1254 1257 fetch[n[1]] = 1 # earliest unknown
1255 1258 for p in n[2:4]:
1256 1259 if p in m:
1257 1260 base[p] = 1 # latest known
1258 1261
1259 1262 for p in n[2:4]:
1260 1263 if p not in req and p not in m:
1261 1264 r.append(p)
1262 1265 req[p] = 1
1263 1266 seen[n[0]] = 1
1264 1267
1265 1268 if r:
1266 1269 reqcnt += 1
1267 1270 self.ui.debug(_("request %d: %s\n") %
1268 1271 (reqcnt, " ".join(map(short, r))))
1269 1272 for p in xrange(0, len(r), 10):
1270 1273 for b in remote.branches(r[p:p+10]):
1271 1274 self.ui.debug(_("received %s:%s\n") %
1272 1275 (short(b[0]), short(b[1])))
1273 1276 unknown.append(b)
1274 1277
1275 1278 # do binary search on the branches we found
1276 1279 while search:
1277 1280 n = search.pop(0)
1278 1281 reqcnt += 1
1279 1282 l = remote.between([(n[0], n[1])])[0]
1280 1283 l.append(n[1])
1281 1284 p = n[0]
1282 1285 f = 1
1283 1286 for i in l:
1284 1287 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1285 1288 if i in m:
1286 1289 if f <= 2:
1287 1290 self.ui.debug(_("found new branch changeset %s\n") %
1288 1291 short(p))
1289 1292 fetch[p] = 1
1290 1293 base[i] = 1
1291 1294 else:
1292 1295 self.ui.debug(_("narrowed branch search to %s:%s\n")
1293 1296 % (short(p), short(i)))
1294 1297 search.append((p, i))
1295 1298 break
1296 1299 p, f = i, f * 2
1297 1300
1298 1301 # sanity check our fetch list
1299 1302 for f in fetch.keys():
1300 1303 if f in m:
1301 1304 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1302 1305
1303 1306 if base.keys() == [nullid]:
1304 1307 if force:
1305 1308 self.ui.warn(_("warning: repository is unrelated\n"))
1306 1309 else:
1307 1310 raise util.Abort(_("repository is unrelated"))
1308 1311
1309 1312 self.ui.debug(_("found new changesets starting at ") +
1310 1313 " ".join([short(f) for f in fetch]) + "\n")
1311 1314
1312 1315 self.ui.debug(_("%d total queries\n") % reqcnt)
1313 1316
1314 1317 return fetch.keys()
1315 1318
1316 1319 def findoutgoing(self, remote, base=None, heads=None, force=False):
1317 1320 """Return list of nodes that are roots of subsets not in remote
1318 1321
1319 1322 If base dict is specified, assume that these nodes and their parents
1320 1323 exist on the remote side.
1321 1324 If a list of heads is specified, return only nodes which are heads
1322 1325 or ancestors of these heads, and return a second element which
1323 1326 contains all remote heads which get new children.
1324 1327 """
1325 1328 if base == None:
1326 1329 base = {}
1327 1330 self.findincoming(remote, base, heads, force=force)
1328 1331
1329 1332 self.ui.debug(_("common changesets up to ")
1330 1333 + " ".join(map(short, base.keys())) + "\n")
1331 1334
1332 1335 remain = dict.fromkeys(self.changelog.nodemap)
1333 1336
1334 1337 # prune everything remote has from the tree
1335 1338 del remain[nullid]
1336 1339 remove = base.keys()
1337 1340 while remove:
1338 1341 n = remove.pop(0)
1339 1342 if n in remain:
1340 1343 del remain[n]
1341 1344 for p in self.changelog.parents(n):
1342 1345 remove.append(p)
1343 1346
1344 1347 # find every node whose parents have been pruned
1345 1348 subset = []
1346 1349 # find every remote head that will get new children
1347 1350 updated_heads = {}
1348 1351 for n in remain:
1349 1352 p1, p2 = self.changelog.parents(n)
1350 1353 if p1 not in remain and p2 not in remain:
1351 1354 subset.append(n)
1352 1355 if heads:
1353 1356 if p1 in heads:
1354 1357 updated_heads[p1] = True
1355 1358 if p2 in heads:
1356 1359 updated_heads[p2] = True
1357 1360
1358 1361 # this is the set of all roots we have to push
1359 1362 if heads:
1360 1363 return subset, updated_heads.keys()
1361 1364 else:
1362 1365 return subset
1363 1366
1364 1367 def pull(self, remote, heads=None, force=False, lock=None):
1365 1368 mylock = False
1366 1369 if not lock:
1367 1370 lock = self.lock()
1368 1371 mylock = True
1369 1372
1370 1373 try:
1371 1374 fetch = self.findincoming(remote, force=force)
1372 1375 if fetch == [nullid]:
1373 1376 self.ui.status(_("requesting all changes\n"))
1374 1377
1375 1378 if not fetch:
1376 1379 self.ui.status(_("no changes found\n"))
1377 1380 return 0
1378 1381
1379 1382 if heads is None:
1380 1383 cg = remote.changegroup(fetch, 'pull')
1381 1384 else:
1382 1385 if 'changegroupsubset' not in remote.capabilities:
1383 1386 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1384 1387 cg = remote.changegroupsubset(fetch, heads, 'pull')
1385 1388 return self.addchangegroup(cg, 'pull', remote.url())
1386 1389 finally:
1387 1390 if mylock:
1388 1391 lock.release()
1389 1392
1390 1393 def push(self, remote, force=False, revs=None):
1391 1394 # there are two ways to push to remote repo:
1392 1395 #
1393 1396 # addchangegroup assumes local user can lock remote
1394 1397 # repo (local filesystem, old ssh servers).
1395 1398 #
1396 1399 # unbundle assumes local user cannot lock remote repo (new ssh
1397 1400 # servers, http servers).
1398 1401
1399 1402 if remote.capable('unbundle'):
1400 1403 return self.push_unbundle(remote, force, revs)
1401 1404 return self.push_addchangegroup(remote, force, revs)
1402 1405
1403 1406 def prepush(self, remote, force, revs):
1404 1407 base = {}
1405 1408 remote_heads = remote.heads()
1406 1409 inc = self.findincoming(remote, base, remote_heads, force=force)
1407 1410
1408 1411 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1409 1412 if revs is not None:
1410 1413 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1411 1414 else:
1412 1415 bases, heads = update, self.changelog.heads()
1413 1416
1414 1417 if not bases:
1415 1418 self.ui.status(_("no changes found\n"))
1416 1419 return None, 1
1417 1420 elif not force:
1418 1421 # check if we're creating new remote heads
1419 1422 # to be a remote head after push, node must be either
1420 1423 # - unknown locally
1421 1424 # - a local outgoing head descended from update
1422 1425 # - a remote head that's known locally and not
1423 1426 # ancestral to an outgoing head
1424 1427
1425 1428 warn = 0
1426 1429
1427 1430 if remote_heads == [nullid]:
1428 1431 warn = 0
1429 1432 elif not revs and len(heads) > len(remote_heads):
1430 1433 warn = 1
1431 1434 else:
1432 1435 newheads = list(heads)
1433 1436 for r in remote_heads:
1434 1437 if r in self.changelog.nodemap:
1435 1438 desc = self.changelog.heads(r, heads)
1436 1439 l = [h for h in heads if h in desc]
1437 1440 if not l:
1438 1441 newheads.append(r)
1439 1442 else:
1440 1443 newheads.append(r)
1441 1444 if len(newheads) > len(remote_heads):
1442 1445 warn = 1
1443 1446
1444 1447 if warn:
1445 1448 self.ui.warn(_("abort: push creates new remote branches!\n"))
1446 1449 self.ui.status(_("(did you forget to merge?"
1447 1450 " use push -f to force)\n"))
1448 1451 return None, 1
1449 1452 elif inc:
1450 1453 self.ui.warn(_("note: unsynced remote changes!\n"))
1451 1454
1452 1455
1453 1456 if revs is None:
1454 1457 cg = self.changegroup(update, 'push')
1455 1458 else:
1456 1459 cg = self.changegroupsubset(update, revs, 'push')
1457 1460 return cg, remote_heads
1458 1461
1459 1462 def push_addchangegroup(self, remote, force, revs):
1460 1463 lock = remote.lock()
1461 1464
1462 1465 ret = self.prepush(remote, force, revs)
1463 1466 if ret[0] is not None:
1464 1467 cg, remote_heads = ret
1465 1468 return remote.addchangegroup(cg, 'push', self.url())
1466 1469 return ret[1]
1467 1470
1468 1471 def push_unbundle(self, remote, force, revs):
1469 1472 # local repo finds heads on server, finds out what revs it
1470 1473 # must push. once revs transferred, if server finds it has
1471 1474 # different heads (someone else won commit/push race), server
1472 1475 # aborts.
1473 1476
1474 1477 ret = self.prepush(remote, force, revs)
1475 1478 if ret[0] is not None:
1476 1479 cg, remote_heads = ret
1477 1480 if force: remote_heads = ['force']
1478 1481 return remote.unbundle(cg, remote_heads, 'push')
1479 1482 return ret[1]
1480 1483
1481 1484 def changegroupinfo(self, nodes):
1482 1485 self.ui.note(_("%d changesets found\n") % len(nodes))
1483 1486 if self.ui.debugflag:
1484 1487 self.ui.debug(_("List of changesets:\n"))
1485 1488 for node in nodes:
1486 1489 self.ui.debug("%s\n" % hex(node))
1487 1490
1488 1491 def changegroupsubset(self, bases, heads, source):
1489 1492 """This function generates a changegroup consisting of all the nodes
1490 1493 that are descendents of any of the bases, and ancestors of any of
1491 1494 the heads.
1492 1495
1493 1496 It is fairly complex as determining which filenodes and which
1494 1497 manifest nodes need to be included for the changeset to be complete
1495 1498 is non-trivial.
1496 1499
1497 1500 Another wrinkle is doing the reverse, figuring out which changeset in
1498 1501 the changegroup a particular filenode or manifestnode belongs to."""
1499 1502
1500 1503 self.hook('preoutgoing', throw=True, source=source)
1501 1504
1502 1505 # Set up some initial variables
1503 1506 # Make it easy to refer to self.changelog
1504 1507 cl = self.changelog
1505 1508 # msng is short for missing - compute the list of changesets in this
1506 1509 # changegroup.
1507 1510 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1508 1511 self.changegroupinfo(msng_cl_lst)
1509 1512 # Some bases may turn out to be superfluous, and some heads may be
1510 1513 # too. nodesbetween will return the minimal set of bases and heads
1511 1514 # necessary to re-create the changegroup.
1512 1515
1513 1516 # Known heads are the list of heads that it is assumed the recipient
1514 1517 # of this changegroup will know about.
1515 1518 knownheads = {}
1516 1519 # We assume that all parents of bases are known heads.
1517 1520 for n in bases:
1518 1521 for p in cl.parents(n):
1519 1522 if p != nullid:
1520 1523 knownheads[p] = 1
1521 1524 knownheads = knownheads.keys()
1522 1525 if knownheads:
1523 1526 # Now that we know what heads are known, we can compute which
1524 1527 # changesets are known. The recipient must know about all
1525 1528 # changesets required to reach the known heads from the null
1526 1529 # changeset.
1527 1530 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1528 1531 junk = None
1529 1532 # Transform the list into an ersatz set.
1530 1533 has_cl_set = dict.fromkeys(has_cl_set)
1531 1534 else:
1532 1535 # If there were no known heads, the recipient cannot be assumed to
1533 1536 # know about any changesets.
1534 1537 has_cl_set = {}
1535 1538
1536 1539 # Make it easy to refer to self.manifest
1537 1540 mnfst = self.manifest
1538 1541 # We don't know which manifests are missing yet
1539 1542 msng_mnfst_set = {}
1540 1543 # Nor do we know which filenodes are missing.
1541 1544 msng_filenode_set = {}
1542 1545
1543 1546 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1544 1547 junk = None
1545 1548
1546 1549 # A changeset always belongs to itself, so the changenode lookup
1547 1550 # function for a changenode is identity.
1548 1551 def identity(x):
1549 1552 return x
1550 1553
1551 1554 # A function generating function. Sets up an environment for the
1552 1555 # inner function.
1553 1556 def cmp_by_rev_func(revlog):
1554 1557 # Compare two nodes by their revision number in the environment's
1555 1558 # revision history. Since the revision number both represents the
1556 1559 # most efficient order to read the nodes in, and represents a
1557 1560 # topological sorting of the nodes, this function is often useful.
1558 1561 def cmp_by_rev(a, b):
1559 1562 return cmp(revlog.rev(a), revlog.rev(b))
1560 1563 return cmp_by_rev
1561 1564
1562 1565 # If we determine that a particular file or manifest node must be a
1563 1566 # node that the recipient of the changegroup will already have, we can
1564 1567 # also assume the recipient will have all the parents. This function
1565 1568 # prunes them from the set of missing nodes.
1566 1569 def prune_parents(revlog, hasset, msngset):
1567 1570 haslst = hasset.keys()
1568 1571 haslst.sort(cmp_by_rev_func(revlog))
1569 1572 for node in haslst:
1570 1573 parentlst = [p for p in revlog.parents(node) if p != nullid]
1571 1574 while parentlst:
1572 1575 n = parentlst.pop()
1573 1576 if n not in hasset:
1574 1577 hasset[n] = 1
1575 1578 p = [p for p in revlog.parents(n) if p != nullid]
1576 1579 parentlst.extend(p)
1577 1580 for n in hasset:
1578 1581 msngset.pop(n, None)
1579 1582
1580 1583 # This is a function generating function used to set up an environment
1581 1584 # for the inner function to execute in.
1582 1585 def manifest_and_file_collector(changedfileset):
1583 1586 # This is an information gathering function that gathers
1584 1587 # information from each changeset node that goes out as part of
1585 1588 # the changegroup. The information gathered is a list of which
1586 1589 # manifest nodes are potentially required (the recipient may
1587 1590 # already have them) and total list of all files which were
1588 1591 # changed in any changeset in the changegroup.
1589 1592 #
1590 1593 # We also remember the first changenode we saw any manifest
1591 1594 # referenced by so we can later determine which changenode 'owns'
1592 1595 # the manifest.
1593 1596 def collect_manifests_and_files(clnode):
1594 1597 c = cl.read(clnode)
1595 1598 for f in c[3]:
1596 1599 # This is to make sure we only have one instance of each
1597 1600 # filename string for each filename.
1598 1601 changedfileset.setdefault(f, f)
1599 1602 msng_mnfst_set.setdefault(c[0], clnode)
1600 1603 return collect_manifests_and_files
1601 1604
1602 1605 # Figure out which manifest nodes (of the ones we think might be part
1603 1606 # of the changegroup) the recipient must know about and remove them
1604 1607 # from the changegroup.
1605 1608 def prune_manifests():
1606 1609 has_mnfst_set = {}
1607 1610 for n in msng_mnfst_set:
1608 1611 # If a 'missing' manifest thinks it belongs to a changenode
1609 1612 # the recipient is assumed to have, obviously the recipient
1610 1613 # must have that manifest.
1611 1614 linknode = cl.node(mnfst.linkrev(n))
1612 1615 if linknode in has_cl_set:
1613 1616 has_mnfst_set[n] = 1
1614 1617 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1615 1618
1616 1619 # Use the information collected in collect_manifests_and_files to say
1617 1620 # which changenode any manifestnode belongs to.
1618 1621 def lookup_manifest_link(mnfstnode):
1619 1622 return msng_mnfst_set[mnfstnode]
1620 1623
1621 1624 # A function generating function that sets up the initial environment
1622 1625 # the inner function.
1623 1626 def filenode_collector(changedfiles):
1624 1627 next_rev = [0]
1625 1628 # This gathers information from each manifestnode included in the
1626 1629 # changegroup about which filenodes the manifest node references
1627 1630 # so we can include those in the changegroup too.
1628 1631 #
1629 1632 # It also remembers which changenode each filenode belongs to. It
1630 1633 # does this by assuming the a filenode belongs to the changenode
1631 1634 # the first manifest that references it belongs to.
1632 1635 def collect_msng_filenodes(mnfstnode):
1633 1636 r = mnfst.rev(mnfstnode)
1634 1637 if r == next_rev[0]:
1635 1638 # If the last rev we looked at was the one just previous,
1636 1639 # we only need to see a diff.
1637 1640 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1638 1641 # For each line in the delta
1639 1642 for dline in delta.splitlines():
1640 1643 # get the filename and filenode for that line
1641 1644 f, fnode = dline.split('\0')
1642 1645 fnode = bin(fnode[:40])
1643 1646 f = changedfiles.get(f, None)
1644 1647 # And if the file is in the list of files we care
1645 1648 # about.
1646 1649 if f is not None:
1647 1650 # Get the changenode this manifest belongs to
1648 1651 clnode = msng_mnfst_set[mnfstnode]
1649 1652 # Create the set of filenodes for the file if
1650 1653 # there isn't one already.
1651 1654 ndset = msng_filenode_set.setdefault(f, {})
1652 1655 # And set the filenode's changelog node to the
1653 1656 # manifest's if it hasn't been set already.
1654 1657 ndset.setdefault(fnode, clnode)
1655 1658 else:
1656 1659 # Otherwise we need a full manifest.
1657 1660 m = mnfst.read(mnfstnode)
1658 1661 # For every file in we care about.
1659 1662 for f in changedfiles:
1660 1663 fnode = m.get(f, None)
1661 1664 # If it's in the manifest
1662 1665 if fnode is not None:
1663 1666 # See comments above.
1664 1667 clnode = msng_mnfst_set[mnfstnode]
1665 1668 ndset = msng_filenode_set.setdefault(f, {})
1666 1669 ndset.setdefault(fnode, clnode)
1667 1670 # Remember the revision we hope to see next.
1668 1671 next_rev[0] = r + 1
1669 1672 return collect_msng_filenodes
1670 1673
1671 1674 # We have a list of filenodes we think we need for a file, lets remove
1672 1675 # all those we now the recipient must have.
1673 1676 def prune_filenodes(f, filerevlog):
1674 1677 msngset = msng_filenode_set[f]
1675 1678 hasset = {}
1676 1679 # If a 'missing' filenode thinks it belongs to a changenode we
1677 1680 # assume the recipient must have, then the recipient must have
1678 1681 # that filenode.
1679 1682 for n in msngset:
1680 1683 clnode = cl.node(filerevlog.linkrev(n))
1681 1684 if clnode in has_cl_set:
1682 1685 hasset[n] = 1
1683 1686 prune_parents(filerevlog, hasset, msngset)
1684 1687
1685 1688 # A function generator function that sets up the a context for the
1686 1689 # inner function.
1687 1690 def lookup_filenode_link_func(fname):
1688 1691 msngset = msng_filenode_set[fname]
1689 1692 # Lookup the changenode the filenode belongs to.
1690 1693 def lookup_filenode_link(fnode):
1691 1694 return msngset[fnode]
1692 1695 return lookup_filenode_link
1693 1696
1694 1697 # Now that we have all theses utility functions to help out and
1695 1698 # logically divide up the task, generate the group.
1696 1699 def gengroup():
1697 1700 # The set of changed files starts empty.
1698 1701 changedfiles = {}
1699 1702 # Create a changenode group generator that will call our functions
1700 1703 # back to lookup the owning changenode and collect information.
1701 1704 group = cl.group(msng_cl_lst, identity,
1702 1705 manifest_and_file_collector(changedfiles))
1703 1706 for chnk in group:
1704 1707 yield chnk
1705 1708
1706 1709 # The list of manifests has been collected by the generator
1707 1710 # calling our functions back.
1708 1711 prune_manifests()
1709 1712 msng_mnfst_lst = msng_mnfst_set.keys()
1710 1713 # Sort the manifestnodes by revision number.
1711 1714 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1712 1715 # Create a generator for the manifestnodes that calls our lookup
1713 1716 # and data collection functions back.
1714 1717 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1715 1718 filenode_collector(changedfiles))
1716 1719 for chnk in group:
1717 1720 yield chnk
1718 1721
1719 1722 # These are no longer needed, dereference and toss the memory for
1720 1723 # them.
1721 1724 msng_mnfst_lst = None
1722 1725 msng_mnfst_set.clear()
1723 1726
1724 1727 changedfiles = changedfiles.keys()
1725 1728 changedfiles.sort()
1726 1729 # Go through all our files in order sorted by name.
1727 1730 for fname in changedfiles:
1728 1731 filerevlog = self.file(fname)
1729 1732 # Toss out the filenodes that the recipient isn't really
1730 1733 # missing.
1731 1734 if msng_filenode_set.has_key(fname):
1732 1735 prune_filenodes(fname, filerevlog)
1733 1736 msng_filenode_lst = msng_filenode_set[fname].keys()
1734 1737 else:
1735 1738 msng_filenode_lst = []
1736 1739 # If any filenodes are left, generate the group for them,
1737 1740 # otherwise don't bother.
1738 1741 if len(msng_filenode_lst) > 0:
1739 1742 yield changegroup.genchunk(fname)
1740 1743 # Sort the filenodes by their revision #
1741 1744 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1742 1745 # Create a group generator and only pass in a changenode
1743 1746 # lookup function as we need to collect no information
1744 1747 # from filenodes.
1745 1748 group = filerevlog.group(msng_filenode_lst,
1746 1749 lookup_filenode_link_func(fname))
1747 1750 for chnk in group:
1748 1751 yield chnk
1749 1752 if msng_filenode_set.has_key(fname):
1750 1753 # Don't need this anymore, toss it to free memory.
1751 1754 del msng_filenode_set[fname]
1752 1755 # Signal that no more groups are left.
1753 1756 yield changegroup.closechunk()
1754 1757
1755 1758 if msng_cl_lst:
1756 1759 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1757 1760
1758 1761 return util.chunkbuffer(gengroup())
1759 1762
1760 1763 def changegroup(self, basenodes, source):
1761 1764 """Generate a changegroup of all nodes that we have that a recipient
1762 1765 doesn't.
1763 1766
1764 1767 This is much easier than the previous function as we can assume that
1765 1768 the recipient has any changenode we aren't sending them."""
1766 1769
1767 1770 self.hook('preoutgoing', throw=True, source=source)
1768 1771
1769 1772 cl = self.changelog
1770 1773 nodes = cl.nodesbetween(basenodes, None)[0]
1771 1774 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1772 1775 self.changegroupinfo(nodes)
1773 1776
1774 1777 def identity(x):
1775 1778 return x
1776 1779
1777 1780 def gennodelst(revlog):
1778 1781 for r in xrange(0, revlog.count()):
1779 1782 n = revlog.node(r)
1780 1783 if revlog.linkrev(n) in revset:
1781 1784 yield n
1782 1785
1783 1786 def changed_file_collector(changedfileset):
1784 1787 def collect_changed_files(clnode):
1785 1788 c = cl.read(clnode)
1786 1789 for fname in c[3]:
1787 1790 changedfileset[fname] = 1
1788 1791 return collect_changed_files
1789 1792
1790 1793 def lookuprevlink_func(revlog):
1791 1794 def lookuprevlink(n):
1792 1795 return cl.node(revlog.linkrev(n))
1793 1796 return lookuprevlink
1794 1797
1795 1798 def gengroup():
1796 1799 # construct a list of all changed files
1797 1800 changedfiles = {}
1798 1801
1799 1802 for chnk in cl.group(nodes, identity,
1800 1803 changed_file_collector(changedfiles)):
1801 1804 yield chnk
1802 1805 changedfiles = changedfiles.keys()
1803 1806 changedfiles.sort()
1804 1807
1805 1808 mnfst = self.manifest
1806 1809 nodeiter = gennodelst(mnfst)
1807 1810 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1808 1811 yield chnk
1809 1812
1810 1813 for fname in changedfiles:
1811 1814 filerevlog = self.file(fname)
1812 1815 nodeiter = gennodelst(filerevlog)
1813 1816 nodeiter = list(nodeiter)
1814 1817 if nodeiter:
1815 1818 yield changegroup.genchunk(fname)
1816 1819 lookup = lookuprevlink_func(filerevlog)
1817 1820 for chnk in filerevlog.group(nodeiter, lookup):
1818 1821 yield chnk
1819 1822
1820 1823 yield changegroup.closechunk()
1821 1824
1822 1825 if nodes:
1823 1826 self.hook('outgoing', node=hex(nodes[0]), source=source)
1824 1827
1825 1828 return util.chunkbuffer(gengroup())
1826 1829
1827 1830 def addchangegroup(self, source, srctype, url):
1828 1831 """add changegroup to repo.
1829 1832
1830 1833 return values:
1831 1834 - nothing changed or no source: 0
1832 1835 - more heads than before: 1+added heads (2..n)
1833 1836 - less heads than before: -1-removed heads (-2..-n)
1834 1837 - number of heads stays the same: 1
1835 1838 """
1836 1839 def csmap(x):
1837 1840 self.ui.debug(_("add changeset %s\n") % short(x))
1838 1841 return cl.count()
1839 1842
1840 1843 def revmap(x):
1841 1844 return cl.rev(x)
1842 1845
1843 1846 if not source:
1844 1847 return 0
1845 1848
1846 1849 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1847 1850
1848 1851 changesets = files = revisions = 0
1849 1852
1850 1853 tr = self.transaction()
1851 1854
1852 1855 # write changelog data to temp files so concurrent readers will not see
1853 1856 # inconsistent view
1854 1857 cl = None
1855 1858 try:
1856 1859 cl = appendfile.appendchangelog(self.sopener,
1857 1860 self.changelog.version)
1858 1861
1859 1862 oldheads = len(cl.heads())
1860 1863
1861 1864 # pull off the changeset group
1862 1865 self.ui.status(_("adding changesets\n"))
1863 1866 cor = cl.count() - 1
1864 1867 chunkiter = changegroup.chunkiter(source)
1865 1868 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1866 1869 raise util.Abort(_("received changelog group is empty"))
1867 1870 cnr = cl.count() - 1
1868 1871 changesets = cnr - cor
1869 1872
1870 1873 # pull off the manifest group
1871 1874 self.ui.status(_("adding manifests\n"))
1872 1875 chunkiter = changegroup.chunkiter(source)
1873 1876 # no need to check for empty manifest group here:
1874 1877 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1875 1878 # no new manifest will be created and the manifest group will
1876 1879 # be empty during the pull
1877 1880 self.manifest.addgroup(chunkiter, revmap, tr)
1878 1881
1879 1882 # process the files
1880 1883 self.ui.status(_("adding file changes\n"))
1881 1884 while 1:
1882 1885 f = changegroup.getchunk(source)
1883 1886 if not f:
1884 1887 break
1885 1888 self.ui.debug(_("adding %s revisions\n") % f)
1886 1889 fl = self.file(f)
1887 1890 o = fl.count()
1888 1891 chunkiter = changegroup.chunkiter(source)
1889 1892 if fl.addgroup(chunkiter, revmap, tr) is None:
1890 1893 raise util.Abort(_("received file revlog group is empty"))
1891 1894 revisions += fl.count() - o
1892 1895 files += 1
1893 1896
1894 1897 cl.writedata()
1895 1898 finally:
1896 1899 if cl:
1897 1900 cl.cleanup()
1898 1901
1899 1902 # make changelog see real files again
1900 1903 self.changelog = changelog.changelog(self.sopener,
1901 1904 self.changelog.version)
1902 1905 self.changelog.checkinlinesize(tr)
1903 1906
1904 1907 newheads = len(self.changelog.heads())
1905 1908 heads = ""
1906 1909 if oldheads and newheads != oldheads:
1907 1910 heads = _(" (%+d heads)") % (newheads - oldheads)
1908 1911
1909 1912 self.ui.status(_("added %d changesets"
1910 1913 " with %d changes to %d files%s\n")
1911 1914 % (changesets, revisions, files, heads))
1912 1915
1913 1916 if changesets > 0:
1914 1917 self.hook('pretxnchangegroup', throw=True,
1915 1918 node=hex(self.changelog.node(cor+1)), source=srctype,
1916 1919 url=url)
1917 1920
1918 1921 tr.close()
1919 1922
1920 1923 if changesets > 0:
1921 1924 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1922 1925 source=srctype, url=url)
1923 1926
1924 1927 for i in xrange(cor + 1, cnr + 1):
1925 1928 self.hook("incoming", node=hex(self.changelog.node(i)),
1926 1929 source=srctype, url=url)
1927 1930
1928 1931 # never return 0 here:
1929 1932 if newheads < oldheads:
1930 1933 return newheads - oldheads - 1
1931 1934 else:
1932 1935 return newheads - oldheads + 1
1933 1936
1934 1937
1935 1938 def stream_in(self, remote):
1936 1939 fp = remote.stream_out()
1937 1940 l = fp.readline()
1938 1941 try:
1939 1942 resp = int(l)
1940 1943 except ValueError:
1941 1944 raise util.UnexpectedOutput(
1942 1945 _('Unexpected response from remote server:'), l)
1943 1946 if resp == 1:
1944 1947 raise util.Abort(_('operation forbidden by server'))
1945 1948 elif resp == 2:
1946 1949 raise util.Abort(_('locking the remote repository failed'))
1947 1950 elif resp != 0:
1948 1951 raise util.Abort(_('the server sent an unknown error code'))
1949 1952 self.ui.status(_('streaming all changes\n'))
1950 1953 l = fp.readline()
1951 1954 try:
1952 1955 total_files, total_bytes = map(int, l.split(' ', 1))
1953 1956 except ValueError, TypeError:
1954 1957 raise util.UnexpectedOutput(
1955 1958 _('Unexpected response from remote server:'), l)
1956 1959 self.ui.status(_('%d files to transfer, %s of data\n') %
1957 1960 (total_files, util.bytecount(total_bytes)))
1958 1961 start = time.time()
1959 1962 for i in xrange(total_files):
1960 1963 # XXX doesn't support '\n' or '\r' in filenames
1961 1964 l = fp.readline()
1962 1965 try:
1963 1966 name, size = l.split('\0', 1)
1964 1967 size = int(size)
1965 1968 except ValueError, TypeError:
1966 1969 raise util.UnexpectedOutput(
1967 1970 _('Unexpected response from remote server:'), l)
1968 1971 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1969 1972 ofp = self.sopener(name, 'w')
1970 1973 for chunk in util.filechunkiter(fp, limit=size):
1971 1974 ofp.write(chunk)
1972 1975 ofp.close()
1973 1976 elapsed = time.time() - start
1974 1977 if elapsed <= 0:
1975 1978 elapsed = 0.001
1976 1979 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1977 1980 (util.bytecount(total_bytes), elapsed,
1978 1981 util.bytecount(total_bytes / elapsed)))
1979 1982 self.reload()
1980 1983 return len(self.heads()) + 1
1981 1984
1982 1985 def clone(self, remote, heads=[], stream=False):
1983 1986 '''clone remote repository.
1984 1987
1985 1988 keyword arguments:
1986 1989 heads: list of revs to clone (forces use of pull)
1987 1990 stream: use streaming clone if possible'''
1988 1991
1989 1992 # now, all clients that can request uncompressed clones can
1990 1993 # read repo formats supported by all servers that can serve
1991 1994 # them.
1992 1995
1993 1996 # if revlog format changes, client will have to check version
1994 1997 # and format flags on "stream" capability, and use
1995 1998 # uncompressed only if compatible.
1996 1999
1997 2000 if stream and not heads and remote.capable('stream'):
1998 2001 return self.stream_in(remote)
1999 2002 return self.pull(remote, heads)
2000 2003
2001 2004 # used to avoid circular references so destructors work
2002 2005 def aftertrans(files):
2003 2006 renamefiles = [tuple(t) for t in files]
2004 2007 def a():
2005 2008 for src, dest in renamefiles:
2006 2009 util.rename(src, dest)
2007 2010 return a
2008 2011
2009 2012 def instance(ui, path, create):
2010 2013 return localrepository(ui, util.drop_scheme('file', path), create)
2011 2014
2012 2015 def islocal(path):
2013 2016 return True
@@ -1,1378 +1,1379 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import gettext as _
16 16 from demandload import *
17 17 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
18 18 demandload(globals(), "os threading time calendar ConfigParser locale glob")
19 19
20 20 try:
21 21 _encoding = os.environ.get("HGENCODING") or locale.getpreferredencoding() \
22 22 or "ascii"
23 23 except locale.Error:
24 24 _encoding = 'ascii'
25 25 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
26 26 _fallbackencoding = 'ISO-8859-1'
27 27
28 28 def tolocal(s):
29 29 """
30 30 Convert a string from internal UTF-8 to local encoding
31 31
32 32 All internal strings should be UTF-8 but some repos before the
33 33 implementation of locale support may contain latin1 or possibly
34 34 other character sets. We attempt to decode everything strictly
35 35 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
36 36 replace unknown characters.
37 37 """
38 38 for e in ('UTF-8', _fallbackencoding):
39 39 try:
40 40 u = s.decode(e) # attempt strict decoding
41 41 return u.encode(_encoding, "replace")
42 42 except LookupError, k:
43 43 raise Abort(_("%s, please check your locale settings") % k)
44 44 except UnicodeDecodeError:
45 45 pass
46 46 u = s.decode("utf-8", "replace") # last ditch
47 47 return u.encode(_encoding, "replace")
48 48
49 49 def fromlocal(s):
50 50 """
51 51 Convert a string from the local character encoding to UTF-8
52 52
53 53 We attempt to decode strings using the encoding mode set by
54 54 HG_ENCODINGMODE, which defaults to 'strict'. In this mode, unknown
55 55 characters will cause an error message. Other modes include
56 56 'replace', which replaces unknown characters with a special
57 57 Unicode character, and 'ignore', which drops the character.
58 58 """
59 59 try:
60 60 return s.decode(_encoding, _encodingmode).encode("utf-8")
61 61 except UnicodeDecodeError, inst:
62 62 sub = s[max(0, inst.start-10):inst.start+10]
63 63 raise Abort("decoding near '%s': %s!" % (sub, inst))
64 64 except LookupError, k:
65 65 raise Abort(_("%s, please check your locale settings") % k)
66 66
67 67 def locallen(s):
68 68 """Find the length in characters of a local string"""
69 69 return len(s.decode(_encoding, "replace"))
70 70
71 71 def localsub(s, a, b=None):
72 72 try:
73 73 u = s.decode(_encoding, _encodingmode)
74 74 if b is not None:
75 75 u = u[a:b]
76 76 else:
77 77 u = u[:a]
78 78 return u.encode(_encoding, _encodingmode)
79 79 except UnicodeDecodeError, inst:
80 80 sub = s[max(0, inst.start-10), inst.start+10]
81 81 raise Abort(_("decoding near '%s': %s!\n") % (sub, inst))
82 82
83 83 # used by parsedate
84 84 defaultdateformats = (
85 85 '%Y-%m-%d %H:%M:%S',
86 86 '%Y-%m-%d %I:%M:%S%p',
87 87 '%Y-%m-%d %H:%M',
88 88 '%Y-%m-%d %I:%M%p',
89 89 '%Y-%m-%d',
90 90 '%m-%d',
91 91 '%m/%d',
92 92 '%m/%d/%y',
93 93 '%m/%d/%Y',
94 94 '%a %b %d %H:%M:%S %Y',
95 95 '%a %b %d %I:%M:%S%p %Y',
96 96 '%b %d %H:%M:%S %Y',
97 97 '%b %d %I:%M:%S%p %Y',
98 98 '%b %d %H:%M:%S',
99 99 '%b %d %I:%M:%S%p',
100 100 '%b %d %H:%M',
101 101 '%b %d %I:%M%p',
102 102 '%b %d %Y',
103 103 '%b %d',
104 104 '%H:%M:%S',
105 105 '%I:%M:%SP',
106 106 '%H:%M',
107 107 '%I:%M%p',
108 108 )
109 109
110 110 extendeddateformats = defaultdateformats + (
111 111 "%Y",
112 112 "%Y-%m",
113 113 "%b",
114 114 "%b %Y",
115 115 )
116 116
117 117 class SignalInterrupt(Exception):
118 118 """Exception raised on SIGTERM and SIGHUP."""
119 119
120 120 # like SafeConfigParser but with case-sensitive keys
121 121 class configparser(ConfigParser.SafeConfigParser):
122 122 def optionxform(self, optionstr):
123 123 return optionstr
124 124
125 125 def cachefunc(func):
126 126 '''cache the result of function calls'''
127 127 # XXX doesn't handle keywords args
128 128 cache = {}
129 129 if func.func_code.co_argcount == 1:
130 130 # we gain a small amount of time because
131 131 # we don't need to pack/unpack the list
132 132 def f(arg):
133 133 if arg not in cache:
134 134 cache[arg] = func(arg)
135 135 return cache[arg]
136 136 else:
137 137 def f(*args):
138 138 if args not in cache:
139 139 cache[args] = func(*args)
140 140 return cache[args]
141 141
142 142 return f
143 143
144 144 def pipefilter(s, cmd):
145 145 '''filter string S through command CMD, returning its output'''
146 146 (pout, pin) = popen2.popen2(cmd, -1, 'b')
147 147 def writer():
148 148 try:
149 149 pin.write(s)
150 150 pin.close()
151 151 except IOError, inst:
152 152 if inst.errno != errno.EPIPE:
153 153 raise
154 154
155 155 # we should use select instead on UNIX, but this will work on most
156 156 # systems, including Windows
157 157 w = threading.Thread(target=writer)
158 158 w.start()
159 159 f = pout.read()
160 160 pout.close()
161 161 w.join()
162 162 return f
163 163
164 164 def tempfilter(s, cmd):
165 165 '''filter string S through a pair of temporary files with CMD.
166 166 CMD is used as a template to create the real command to be run,
167 167 with the strings INFILE and OUTFILE replaced by the real names of
168 168 the temporary files generated.'''
169 169 inname, outname = None, None
170 170 try:
171 171 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
172 172 fp = os.fdopen(infd, 'wb')
173 173 fp.write(s)
174 174 fp.close()
175 175 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
176 176 os.close(outfd)
177 177 cmd = cmd.replace('INFILE', inname)
178 178 cmd = cmd.replace('OUTFILE', outname)
179 179 code = os.system(cmd)
180 180 if code: raise Abort(_("command '%s' failed: %s") %
181 181 (cmd, explain_exit(code)))
182 182 return open(outname, 'rb').read()
183 183 finally:
184 184 try:
185 185 if inname: os.unlink(inname)
186 186 except: pass
187 187 try:
188 188 if outname: os.unlink(outname)
189 189 except: pass
190 190
191 191 filtertable = {
192 192 'tempfile:': tempfilter,
193 193 'pipe:': pipefilter,
194 194 }
195 195
196 196 def filter(s, cmd):
197 197 "filter a string through a command that transforms its input to its output"
198 198 for name, fn in filtertable.iteritems():
199 199 if cmd.startswith(name):
200 200 return fn(s, cmd[len(name):].lstrip())
201 201 return pipefilter(s, cmd)
202 202
203 203 def find_in_path(name, path, default=None):
204 204 '''find name in search path. path can be string (will be split
205 205 with os.pathsep), or iterable thing that returns strings. if name
206 206 found, return path to name. else return default.'''
207 207 if isinstance(path, str):
208 208 path = path.split(os.pathsep)
209 209 for p in path:
210 210 p_name = os.path.join(p, name)
211 211 if os.path.exists(p_name):
212 212 return p_name
213 213 return default
214 214
215 215 def binary(s):
216 216 """return true if a string is binary data using diff's heuristic"""
217 217 if s and '\0' in s[:4096]:
218 218 return True
219 219 return False
220 220
221 221 def unique(g):
222 222 """return the uniq elements of iterable g"""
223 223 seen = {}
224 224 l = []
225 225 for f in g:
226 226 if f not in seen:
227 227 seen[f] = 1
228 228 l.append(f)
229 229 return l
230 230
231 231 class Abort(Exception):
232 232 """Raised if a command needs to print an error and exit."""
233 233
234 234 class UnexpectedOutput(Abort):
235 235 """Raised to print an error with part of output and exit."""
236 236
237 237 def always(fn): return True
238 238 def never(fn): return False
239 239
240 240 def expand_glob(pats):
241 241 '''On Windows, expand the implicit globs in a list of patterns'''
242 242 if os.name != 'nt':
243 243 return list(pats)
244 244 ret = []
245 245 for p in pats:
246 246 kind, name = patkind(p, None)
247 247 if kind is None:
248 248 globbed = glob.glob(name)
249 249 if globbed:
250 250 ret.extend(globbed)
251 251 continue
252 252 # if we couldn't expand the glob, just keep it around
253 253 ret.append(p)
254 254 return ret
255 255
256 256 def patkind(name, dflt_pat='glob'):
257 257 """Split a string into an optional pattern kind prefix and the
258 258 actual pattern."""
259 259 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
260 260 if name.startswith(prefix + ':'): return name.split(':', 1)
261 261 return dflt_pat, name
262 262
263 263 def globre(pat, head='^', tail='$'):
264 264 "convert a glob pattern into a regexp"
265 265 i, n = 0, len(pat)
266 266 res = ''
267 267 group = False
268 268 def peek(): return i < n and pat[i]
269 269 while i < n:
270 270 c = pat[i]
271 271 i = i+1
272 272 if c == '*':
273 273 if peek() == '*':
274 274 i += 1
275 275 res += '.*'
276 276 else:
277 277 res += '[^/]*'
278 278 elif c == '?':
279 279 res += '.'
280 280 elif c == '[':
281 281 j = i
282 282 if j < n and pat[j] in '!]':
283 283 j += 1
284 284 while j < n and pat[j] != ']':
285 285 j += 1
286 286 if j >= n:
287 287 res += '\\['
288 288 else:
289 289 stuff = pat[i:j].replace('\\','\\\\')
290 290 i = j + 1
291 291 if stuff[0] == '!':
292 292 stuff = '^' + stuff[1:]
293 293 elif stuff[0] == '^':
294 294 stuff = '\\' + stuff
295 295 res = '%s[%s]' % (res, stuff)
296 296 elif c == '{':
297 297 group = True
298 298 res += '(?:'
299 299 elif c == '}' and group:
300 300 res += ')'
301 301 group = False
302 302 elif c == ',' and group:
303 303 res += '|'
304 304 elif c == '\\':
305 305 p = peek()
306 306 if p:
307 307 i += 1
308 308 res += re.escape(p)
309 309 else:
310 310 res += re.escape(c)
311 311 else:
312 312 res += re.escape(c)
313 313 return head + res + tail
314 314
315 315 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
316 316
317 317 def pathto(n1, n2):
318 318 '''return the relative path from one place to another.
319 319 n1 should use os.sep to separate directories
320 320 n2 should use "/" to separate directories
321 321 returns an os.sep-separated path.
322 322 '''
323 323 if not n1: return localpath(n2)
324 324 a, b = n1.split(os.sep), n2.split('/')
325 325 a.reverse()
326 326 b.reverse()
327 327 while a and b and a[-1] == b[-1]:
328 328 a.pop()
329 329 b.pop()
330 330 b.reverse()
331 331 return os.sep.join((['..'] * len(a)) + b)
332 332
333 333 def canonpath(root, cwd, myname):
334 334 """return the canonical path of myname, given cwd and root"""
335 335 if root == os.sep:
336 336 rootsep = os.sep
337 337 elif root.endswith(os.sep):
338 338 rootsep = root
339 339 else:
340 340 rootsep = root + os.sep
341 341 name = myname
342 342 if not os.path.isabs(name):
343 343 name = os.path.join(root, cwd, name)
344 344 name = os.path.normpath(name)
345 345 if name != rootsep and name.startswith(rootsep):
346 346 name = name[len(rootsep):]
347 347 audit_path(name)
348 348 return pconvert(name)
349 349 elif name == root:
350 350 return ''
351 351 else:
352 352 # Determine whether `name' is in the hierarchy at or beneath `root',
353 353 # by iterating name=dirname(name) until that causes no change (can't
354 354 # check name == '/', because that doesn't work on windows). For each
355 355 # `name', compare dev/inode numbers. If they match, the list `rel'
356 356 # holds the reversed list of components making up the relative file
357 357 # name we want.
358 358 root_st = os.stat(root)
359 359 rel = []
360 360 while True:
361 361 try:
362 362 name_st = os.stat(name)
363 363 except OSError:
364 364 break
365 365 if samestat(name_st, root_st):
366 366 if not rel:
367 367 # name was actually the same as root (maybe a symlink)
368 368 return ''
369 369 rel.reverse()
370 370 name = os.path.join(*rel)
371 371 audit_path(name)
372 372 return pconvert(name)
373 373 dirname, basename = os.path.split(name)
374 374 rel.append(basename)
375 375 if dirname == name:
376 376 break
377 377 name = dirname
378 378
379 379 raise Abort('%s not under root' % myname)
380 380
381 381 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], head='', src=None):
382 382 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
383 383
384 384 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], head='',
385 src=None, globbed=False):
386 if not globbed:
385 src=None, globbed=False, default=None):
386 default = default or 'relpath'
387 if default == 'relpath' and not globbed:
387 388 names = expand_glob(names)
388 return _matcher(canonroot, cwd, names, inc, exc, head, 'relpath', src)
389 return _matcher(canonroot, cwd, names, inc, exc, head, default, src)
389 390
390 391 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
391 392 """build a function to match a set of file patterns
392 393
393 394 arguments:
394 395 canonroot - the canonical root of the tree you're matching against
395 396 cwd - the current working directory, if relevant
396 397 names - patterns to find
397 398 inc - patterns to include
398 399 exc - patterns to exclude
399 400 head - a regex to prepend to patterns to control whether a match is rooted
400 401 dflt_pat - if a pattern in names has no explicit type, assume this one
401 402 src - where these patterns came from (e.g. .hgignore)
402 403
403 404 a pattern is one of:
404 405 'glob:<glob>' - a glob relative to cwd
405 406 're:<regexp>' - a regular expression
406 407 'path:<path>' - a path relative to canonroot
407 408 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
408 409 'relpath:<path>' - a path relative to cwd
409 410 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
410 411 '<something>' - one of the cases above, selected by the dflt_pat argument
411 412
412 413 returns:
413 414 a 3-tuple containing
414 415 - list of roots (places where one should start a recursive walk of the fs);
415 416 this often matches the explicit non-pattern names passed in, but also
416 417 includes the initial part of glob: patterns that has no glob characters
417 418 - a bool match(filename) function
418 419 - a bool indicating if any patterns were passed in
419 420
420 421 todo:
421 422 make head regex a rooted bool
422 423 """
423 424
424 425 def contains_glob(name):
425 426 for c in name:
426 427 if c in _globchars: return True
427 428 return False
428 429
429 430 def regex(kind, name, tail):
430 431 '''convert a pattern into a regular expression'''
431 432 if not name:
432 433 return ''
433 434 if kind == 're':
434 435 return name
435 436 elif kind == 'path':
436 437 return '^' + re.escape(name) + '(?:/|$)'
437 438 elif kind == 'relglob':
438 439 return head + globre(name, '(?:|.*/)', '(?:/|$)')
439 440 elif kind == 'relpath':
440 441 return head + re.escape(name) + '(?:/|$)'
441 442 elif kind == 'relre':
442 443 if name.startswith('^'):
443 444 return name
444 445 return '.*' + name
445 446 return head + globre(name, '', tail)
446 447
447 448 def matchfn(pats, tail):
448 449 """build a matching function from a set of patterns"""
449 450 if not pats:
450 451 return
451 452 matches = []
452 453 for k, p in pats:
453 454 try:
454 455 pat = '(?:%s)' % regex(k, p, tail)
455 456 matches.append(re.compile(pat).match)
456 457 except re.error:
457 458 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
458 459 else: raise Abort("invalid pattern (%s): %s" % (k, p))
459 460
460 461 def buildfn(text):
461 462 for m in matches:
462 463 r = m(text)
463 464 if r:
464 465 return r
465 466
466 467 return buildfn
467 468
468 469 def globprefix(pat):
469 470 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
470 471 root = []
471 472 for p in pat.split('/'):
472 473 if contains_glob(p): break
473 474 root.append(p)
474 475 return '/'.join(root) or '.'
475 476
476 477 def normalizepats(names, default):
477 478 pats = []
478 479 files = []
479 480 roots = []
480 481 anypats = False
481 482 for kind, name in [patkind(p, default) for p in names]:
482 483 if kind in ('glob', 'relpath'):
483 484 name = canonpath(canonroot, cwd, name)
484 485 elif kind in ('relglob', 'path'):
485 486 name = normpath(name)
486 487 if kind in ('glob', 're', 'relglob', 'relre'):
487 488 pats.append((kind, name))
488 489 anypats = True
489 490 if kind == 'glob':
490 491 root = globprefix(name)
491 492 roots.append(root)
492 493 elif kind in ('relpath', 'path'):
493 494 files.append((kind, name))
494 495 roots.append(name)
495 496 elif kind == 'relglob':
496 497 roots.append('.')
497 498 return roots, pats + files, anypats
498 499
499 500 roots, pats, anypats = normalizepats(names, dflt_pat)
500 501
501 502 patmatch = matchfn(pats, '$') or always
502 503 incmatch = always
503 504 if inc:
504 505 dummy, inckinds, dummy = normalizepats(inc, 'glob')
505 506 incmatch = matchfn(inckinds, '(?:/|$)')
506 507 excmatch = lambda fn: False
507 508 if exc:
508 509 dummy, exckinds, dummy = normalizepats(exc, 'glob')
509 510 excmatch = matchfn(exckinds, '(?:/|$)')
510 511
511 512 return (roots,
512 513 lambda fn: (incmatch(fn) and not excmatch(fn) and patmatch(fn)),
513 514 (inc or exc or anypats) and True)
514 515
515 516 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
516 517 '''enhanced shell command execution.
517 518 run with environment maybe modified, maybe in different dir.
518 519
519 520 if command fails and onerr is None, return status. if ui object,
520 521 print error message and return status, else raise onerr object as
521 522 exception.'''
522 523 def py2shell(val):
523 524 'convert python object into string that is useful to shell'
524 525 if val in (None, False):
525 526 return '0'
526 527 if val == True:
527 528 return '1'
528 529 return str(val)
529 530 oldenv = {}
530 531 for k in environ:
531 532 oldenv[k] = os.environ.get(k)
532 533 if cwd is not None:
533 534 oldcwd = os.getcwd()
534 535 origcmd = cmd
535 536 if os.name == 'nt':
536 537 cmd = '"%s"' % cmd
537 538 try:
538 539 for k, v in environ.iteritems():
539 540 os.environ[k] = py2shell(v)
540 541 if cwd is not None and oldcwd != cwd:
541 542 os.chdir(cwd)
542 543 rc = os.system(cmd)
543 544 if rc and onerr:
544 545 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
545 546 explain_exit(rc)[0])
546 547 if errprefix:
547 548 errmsg = '%s: %s' % (errprefix, errmsg)
548 549 try:
549 550 onerr.warn(errmsg + '\n')
550 551 except AttributeError:
551 552 raise onerr(errmsg)
552 553 return rc
553 554 finally:
554 555 for k, v in oldenv.iteritems():
555 556 if v is None:
556 557 del os.environ[k]
557 558 else:
558 559 os.environ[k] = v
559 560 if cwd is not None and oldcwd != cwd:
560 561 os.chdir(oldcwd)
561 562
562 563 def rename(src, dst):
563 564 """forcibly rename a file"""
564 565 try:
565 566 os.rename(src, dst)
566 567 except OSError, err:
567 568 # on windows, rename to existing file is not allowed, so we
568 569 # must delete destination first. but if file is open, unlink
569 570 # schedules it for delete but does not delete it. rename
570 571 # happens immediately even for open files, so we create
571 572 # temporary file, delete it, rename destination to that name,
572 573 # then delete that. then rename is safe to do.
573 574 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
574 575 os.close(fd)
575 576 os.unlink(temp)
576 577 os.rename(dst, temp)
577 578 os.unlink(temp)
578 579 os.rename(src, dst)
579 580
580 581 def unlink(f):
581 582 """unlink and remove the directory if it is empty"""
582 583 os.unlink(f)
583 584 # try removing directories that might now be empty
584 585 try:
585 586 os.removedirs(os.path.dirname(f))
586 587 except OSError:
587 588 pass
588 589
589 590 def copyfile(src, dest):
590 591 "copy a file, preserving mode"
591 592 try:
592 593 shutil.copyfile(src, dest)
593 594 shutil.copymode(src, dest)
594 595 except shutil.Error, inst:
595 596 raise Abort(str(inst))
596 597
597 598 def copyfiles(src, dst, hardlink=None):
598 599 """Copy a directory tree using hardlinks if possible"""
599 600
600 601 if hardlink is None:
601 602 hardlink = (os.stat(src).st_dev ==
602 603 os.stat(os.path.dirname(dst)).st_dev)
603 604
604 605 if os.path.isdir(src):
605 606 os.mkdir(dst)
606 607 for name in os.listdir(src):
607 608 srcname = os.path.join(src, name)
608 609 dstname = os.path.join(dst, name)
609 610 copyfiles(srcname, dstname, hardlink)
610 611 else:
611 612 if hardlink:
612 613 try:
613 614 os_link(src, dst)
614 615 except (IOError, OSError):
615 616 hardlink = False
616 617 shutil.copy(src, dst)
617 618 else:
618 619 shutil.copy(src, dst)
619 620
620 621 def audit_path(path):
621 622 """Abort if path contains dangerous components"""
622 623 parts = os.path.normcase(path).split(os.sep)
623 624 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
624 625 or os.pardir in parts):
625 626 raise Abort(_("path contains illegal component: %s\n") % path)
626 627
627 628 def _makelock_file(info, pathname):
628 629 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
629 630 os.write(ld, info)
630 631 os.close(ld)
631 632
632 633 def _readlock_file(pathname):
633 634 return posixfile(pathname).read()
634 635
635 636 def nlinks(pathname):
636 637 """Return number of hardlinks for the given file."""
637 638 return os.lstat(pathname).st_nlink
638 639
639 640 if hasattr(os, 'link'):
640 641 os_link = os.link
641 642 else:
642 643 def os_link(src, dst):
643 644 raise OSError(0, _("Hardlinks not supported"))
644 645
645 646 def fstat(fp):
646 647 '''stat file object that may not have fileno method.'''
647 648 try:
648 649 return os.fstat(fp.fileno())
649 650 except AttributeError:
650 651 return os.stat(fp.name)
651 652
652 653 posixfile = file
653 654
654 655 def is_win_9x():
655 656 '''return true if run on windows 95, 98 or me.'''
656 657 try:
657 658 return sys.getwindowsversion()[3] == 1
658 659 except AttributeError:
659 660 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
660 661
661 662 getuser_fallback = None
662 663
663 664 def getuser():
664 665 '''return name of current user'''
665 666 try:
666 667 return getpass.getuser()
667 668 except ImportError:
668 669 # import of pwd will fail on windows - try fallback
669 670 if getuser_fallback:
670 671 return getuser_fallback()
671 672 # raised if win32api not available
672 673 raise Abort(_('user name not available - set USERNAME '
673 674 'environment variable'))
674 675
675 676 def username(uid=None):
676 677 """Return the name of the user with the given uid.
677 678
678 679 If uid is None, return the name of the current user."""
679 680 try:
680 681 import pwd
681 682 if uid is None:
682 683 uid = os.getuid()
683 684 try:
684 685 return pwd.getpwuid(uid)[0]
685 686 except KeyError:
686 687 return str(uid)
687 688 except ImportError:
688 689 return None
689 690
690 691 def groupname(gid=None):
691 692 """Return the name of the group with the given gid.
692 693
693 694 If gid is None, return the name of the current group."""
694 695 try:
695 696 import grp
696 697 if gid is None:
697 698 gid = os.getgid()
698 699 try:
699 700 return grp.getgrgid(gid)[0]
700 701 except KeyError:
701 702 return str(gid)
702 703 except ImportError:
703 704 return None
704 705
705 706 # File system features
706 707
707 708 def checkfolding(path):
708 709 """
709 710 Check whether the given path is on a case-sensitive filesystem
710 711
711 712 Requires a path (like /foo/.hg) ending with a foldable final
712 713 directory component.
713 714 """
714 715 s1 = os.stat(path)
715 716 d, b = os.path.split(path)
716 717 p2 = os.path.join(d, b.upper())
717 718 if path == p2:
718 719 p2 = os.path.join(d, b.lower())
719 720 try:
720 721 s2 = os.stat(p2)
721 722 if s2 == s1:
722 723 return False
723 724 return True
724 725 except:
725 726 return True
726 727
727 728 # Platform specific variants
728 729 if os.name == 'nt':
729 730 demandload(globals(), "msvcrt")
730 731 nulldev = 'NUL:'
731 732
732 733 class winstdout:
733 734 '''stdout on windows misbehaves if sent through a pipe'''
734 735
735 736 def __init__(self, fp):
736 737 self.fp = fp
737 738
738 739 def __getattr__(self, key):
739 740 return getattr(self.fp, key)
740 741
741 742 def close(self):
742 743 try:
743 744 self.fp.close()
744 745 except: pass
745 746
746 747 def write(self, s):
747 748 try:
748 749 return self.fp.write(s)
749 750 except IOError, inst:
750 751 if inst.errno != 0: raise
751 752 self.close()
752 753 raise IOError(errno.EPIPE, 'Broken pipe')
753 754
754 755 def flush(self):
755 756 try:
756 757 return self.fp.flush()
757 758 except IOError, inst:
758 759 if inst.errno != errno.EINVAL: raise
759 760 self.close()
760 761 raise IOError(errno.EPIPE, 'Broken pipe')
761 762
762 763 sys.stdout = winstdout(sys.stdout)
763 764
764 765 def system_rcpath():
765 766 try:
766 767 return system_rcpath_win32()
767 768 except:
768 769 return [r'c:\mercurial\mercurial.ini']
769 770
770 771 def os_rcpath():
771 772 '''return default os-specific hgrc search path'''
772 773 path = system_rcpath()
773 774 path.append(user_rcpath())
774 775 userprofile = os.environ.get('USERPROFILE')
775 776 if userprofile:
776 777 path.append(os.path.join(userprofile, 'mercurial.ini'))
777 778 return path
778 779
779 780 def user_rcpath():
780 781 '''return os-specific hgrc search path to the user dir'''
781 782 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
782 783
783 784 def parse_patch_output(output_line):
784 785 """parses the output produced by patch and returns the file name"""
785 786 pf = output_line[14:]
786 787 if pf[0] == '`':
787 788 pf = pf[1:-1] # Remove the quotes
788 789 return pf
789 790
790 791 def testpid(pid):
791 792 '''return False if pid dead, True if running or not known'''
792 793 return True
793 794
794 795 def is_exec(f, last):
795 796 return last
796 797
797 798 def set_exec(f, mode):
798 799 pass
799 800
800 801 def set_binary(fd):
801 802 msvcrt.setmode(fd.fileno(), os.O_BINARY)
802 803
803 804 def pconvert(path):
804 805 return path.replace("\\", "/")
805 806
806 807 def localpath(path):
807 808 return path.replace('/', '\\')
808 809
809 810 def normpath(path):
810 811 return pconvert(os.path.normpath(path))
811 812
812 813 makelock = _makelock_file
813 814 readlock = _readlock_file
814 815
815 816 def samestat(s1, s2):
816 817 return False
817 818
818 819 # A sequence of backslashes is special iff it precedes a double quote:
819 820 # - if there's an even number of backslashes, the double quote is not
820 821 # quoted (i.e. it ends the quoted region)
821 822 # - if there's an odd number of backslashes, the double quote is quoted
822 823 # - in both cases, every pair of backslashes is unquoted into a single
823 824 # backslash
824 825 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
825 826 # So, to quote a string, we must surround it in double quotes, double
826 827 # the number of backslashes that preceed double quotes and add another
827 828 # backslash before every double quote (being careful with the double
828 829 # quote we've appended to the end)
829 830 _quotere = None
830 831 def shellquote(s):
831 832 global _quotere
832 833 if _quotere is None:
833 834 _quotere = re.compile(r'(\\*)("|\\$)')
834 835 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
835 836
836 837 def explain_exit(code):
837 838 return _("exited with status %d") % code, code
838 839
839 840 # if you change this stub into a real check, please try to implement the
840 841 # username and groupname functions above, too.
841 842 def isowner(fp, st=None):
842 843 return True
843 844
844 845 try:
845 846 # override functions with win32 versions if possible
846 847 from util_win32 import *
847 848 if not is_win_9x():
848 849 posixfile = posixfile_nt
849 850 except ImportError:
850 851 pass
851 852
852 853 else:
853 854 nulldev = '/dev/null'
854 855
855 856 def rcfiles(path):
856 857 rcs = [os.path.join(path, 'hgrc')]
857 858 rcdir = os.path.join(path, 'hgrc.d')
858 859 try:
859 860 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
860 861 if f.endswith(".rc")])
861 862 except OSError:
862 863 pass
863 864 return rcs
864 865
865 866 def os_rcpath():
866 867 '''return default os-specific hgrc search path'''
867 868 path = []
868 869 # old mod_python does not set sys.argv
869 870 if len(getattr(sys, 'argv', [])) > 0:
870 871 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
871 872 '/../etc/mercurial'))
872 873 path.extend(rcfiles('/etc/mercurial'))
873 874 path.append(os.path.expanduser('~/.hgrc'))
874 875 path = [os.path.normpath(f) for f in path]
875 876 return path
876 877
877 878 def parse_patch_output(output_line):
878 879 """parses the output produced by patch and returns the file name"""
879 880 pf = output_line[14:]
880 881 if pf.startswith("'") and pf.endswith("'") and " " in pf:
881 882 pf = pf[1:-1] # Remove the quotes
882 883 return pf
883 884
884 885 def is_exec(f, last):
885 886 """check whether a file is executable"""
886 887 return (os.lstat(f).st_mode & 0100 != 0)
887 888
888 889 def set_exec(f, mode):
889 890 s = os.lstat(f).st_mode
890 891 if (s & 0100 != 0) == mode:
891 892 return
892 893 if mode:
893 894 # Turn on +x for every +r bit when making a file executable
894 895 # and obey umask.
895 896 umask = os.umask(0)
896 897 os.umask(umask)
897 898 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
898 899 else:
899 900 os.chmod(f, s & 0666)
900 901
901 902 def set_binary(fd):
902 903 pass
903 904
904 905 def pconvert(path):
905 906 return path
906 907
907 908 def localpath(path):
908 909 return path
909 910
910 911 normpath = os.path.normpath
911 912 samestat = os.path.samestat
912 913
913 914 def makelock(info, pathname):
914 915 try:
915 916 os.symlink(info, pathname)
916 917 except OSError, why:
917 918 if why.errno == errno.EEXIST:
918 919 raise
919 920 else:
920 921 _makelock_file(info, pathname)
921 922
922 923 def readlock(pathname):
923 924 try:
924 925 return os.readlink(pathname)
925 926 except OSError, why:
926 927 if why.errno == errno.EINVAL:
927 928 return _readlock_file(pathname)
928 929 else:
929 930 raise
930 931
931 932 def shellquote(s):
932 933 return "'%s'" % s.replace("'", "'\\''")
933 934
934 935 def testpid(pid):
935 936 '''return False if pid dead, True if running or not sure'''
936 937 try:
937 938 os.kill(pid, 0)
938 939 return True
939 940 except OSError, inst:
940 941 return inst.errno != errno.ESRCH
941 942
942 943 def explain_exit(code):
943 944 """return a 2-tuple (desc, code) describing a process's status"""
944 945 if os.WIFEXITED(code):
945 946 val = os.WEXITSTATUS(code)
946 947 return _("exited with status %d") % val, val
947 948 elif os.WIFSIGNALED(code):
948 949 val = os.WTERMSIG(code)
949 950 return _("killed by signal %d") % val, val
950 951 elif os.WIFSTOPPED(code):
951 952 val = os.WSTOPSIG(code)
952 953 return _("stopped by signal %d") % val, val
953 954 raise ValueError(_("invalid exit code"))
954 955
955 956 def isowner(fp, st=None):
956 957 """Return True if the file object f belongs to the current user.
957 958
958 959 The return value of a util.fstat(f) may be passed as the st argument.
959 960 """
960 961 if st is None:
961 962 st = fstat(fp)
962 963 return st.st_uid == os.getuid()
963 964
964 965 def _buildencodefun():
965 966 e = '_'
966 967 win_reserved = [ord(x) for x in '\\:*?"<>|']
967 968 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
968 969 for x in (range(32) + range(126, 256) + win_reserved):
969 970 cmap[chr(x)] = "~%02x" % x
970 971 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
971 972 cmap[chr(x)] = e + chr(x).lower()
972 973 dmap = {}
973 974 for k, v in cmap.iteritems():
974 975 dmap[v] = k
975 976 def decode(s):
976 977 i = 0
977 978 while i < len(s):
978 979 for l in xrange(1, 4):
979 980 try:
980 981 yield dmap[s[i:i+l]]
981 982 i += l
982 983 break
983 984 except KeyError:
984 985 pass
985 986 else:
986 987 raise KeyError
987 988 return (lambda s: "".join([cmap[c] for c in s]),
988 989 lambda s: "".join(list(decode(s))))
989 990
990 991 encodefilename, decodefilename = _buildencodefun()
991 992
992 993 def encodedopener(openerfn, fn):
993 994 def o(path, *args, **kw):
994 995 return openerfn(fn(path), *args, **kw)
995 996 return o
996 997
997 998 def opener(base, audit=True):
998 999 """
999 1000 return a function that opens files relative to base
1000 1001
1001 1002 this function is used to hide the details of COW semantics and
1002 1003 remote file access from higher level code.
1003 1004 """
1004 1005 p = base
1005 1006 audit_p = audit
1006 1007
1007 1008 def mktempcopy(name):
1008 1009 d, fn = os.path.split(name)
1009 1010 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1010 1011 os.close(fd)
1011 1012 ofp = posixfile(temp, "wb")
1012 1013 try:
1013 1014 try:
1014 1015 ifp = posixfile(name, "rb")
1015 1016 except IOError, inst:
1016 1017 if not getattr(inst, 'filename', None):
1017 1018 inst.filename = name
1018 1019 raise
1019 1020 for chunk in filechunkiter(ifp):
1020 1021 ofp.write(chunk)
1021 1022 ifp.close()
1022 1023 ofp.close()
1023 1024 except:
1024 1025 try: os.unlink(temp)
1025 1026 except: pass
1026 1027 raise
1027 1028 st = os.lstat(name)
1028 1029 os.chmod(temp, st.st_mode)
1029 1030 return temp
1030 1031
1031 1032 class atomictempfile(posixfile):
1032 1033 """the file will only be copied when rename is called"""
1033 1034 def __init__(self, name, mode):
1034 1035 self.__name = name
1035 1036 self.temp = mktempcopy(name)
1036 1037 posixfile.__init__(self, self.temp, mode)
1037 1038 def rename(self):
1038 1039 if not self.closed:
1039 1040 posixfile.close(self)
1040 1041 rename(self.temp, localpath(self.__name))
1041 1042 def __del__(self):
1042 1043 if not self.closed:
1043 1044 try:
1044 1045 os.unlink(self.temp)
1045 1046 except: pass
1046 1047 posixfile.close(self)
1047 1048
1048 1049 class atomicfile(atomictempfile):
1049 1050 """the file will only be copied on close"""
1050 1051 def __init__(self, name, mode):
1051 1052 atomictempfile.__init__(self, name, mode)
1052 1053 def close(self):
1053 1054 self.rename()
1054 1055 def __del__(self):
1055 1056 self.rename()
1056 1057
1057 1058 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
1058 1059 if audit_p:
1059 1060 audit_path(path)
1060 1061 f = os.path.join(p, path)
1061 1062
1062 1063 if not text:
1063 1064 mode += "b" # for that other OS
1064 1065
1065 1066 if mode[0] != "r":
1066 1067 try:
1067 1068 nlink = nlinks(f)
1068 1069 except OSError:
1069 1070 d = os.path.dirname(f)
1070 1071 if not os.path.isdir(d):
1071 1072 os.makedirs(d)
1072 1073 else:
1073 1074 if atomic:
1074 1075 return atomicfile(f, mode)
1075 1076 elif atomictemp:
1076 1077 return atomictempfile(f, mode)
1077 1078 if nlink > 1:
1078 1079 rename(mktempcopy(f), f)
1079 1080 return posixfile(f, mode)
1080 1081
1081 1082 return o
1082 1083
1083 1084 class chunkbuffer(object):
1084 1085 """Allow arbitrary sized chunks of data to be efficiently read from an
1085 1086 iterator over chunks of arbitrary size."""
1086 1087
1087 1088 def __init__(self, in_iter, targetsize = 2**16):
1088 1089 """in_iter is the iterator that's iterating over the input chunks.
1089 1090 targetsize is how big a buffer to try to maintain."""
1090 1091 self.in_iter = iter(in_iter)
1091 1092 self.buf = ''
1092 1093 self.targetsize = int(targetsize)
1093 1094 if self.targetsize <= 0:
1094 1095 raise ValueError(_("targetsize must be greater than 0, was %d") %
1095 1096 targetsize)
1096 1097 self.iterempty = False
1097 1098
1098 1099 def fillbuf(self):
1099 1100 """Ignore target size; read every chunk from iterator until empty."""
1100 1101 if not self.iterempty:
1101 1102 collector = cStringIO.StringIO()
1102 1103 collector.write(self.buf)
1103 1104 for ch in self.in_iter:
1104 1105 collector.write(ch)
1105 1106 self.buf = collector.getvalue()
1106 1107 self.iterempty = True
1107 1108
1108 1109 def read(self, l):
1109 1110 """Read L bytes of data from the iterator of chunks of data.
1110 1111 Returns less than L bytes if the iterator runs dry."""
1111 1112 if l > len(self.buf) and not self.iterempty:
1112 1113 # Clamp to a multiple of self.targetsize
1113 1114 targetsize = self.targetsize * ((l // self.targetsize) + 1)
1114 1115 collector = cStringIO.StringIO()
1115 1116 collector.write(self.buf)
1116 1117 collected = len(self.buf)
1117 1118 for chunk in self.in_iter:
1118 1119 collector.write(chunk)
1119 1120 collected += len(chunk)
1120 1121 if collected >= targetsize:
1121 1122 break
1122 1123 if collected < targetsize:
1123 1124 self.iterempty = True
1124 1125 self.buf = collector.getvalue()
1125 1126 s, self.buf = self.buf[:l], buffer(self.buf, l)
1126 1127 return s
1127 1128
1128 1129 def filechunkiter(f, size=65536, limit=None):
1129 1130 """Create a generator that produces the data in the file size
1130 1131 (default 65536) bytes at a time, up to optional limit (default is
1131 1132 to read all data). Chunks may be less than size bytes if the
1132 1133 chunk is the last chunk in the file, or the file is a socket or
1133 1134 some other type of file that sometimes reads less data than is
1134 1135 requested."""
1135 1136 assert size >= 0
1136 1137 assert limit is None or limit >= 0
1137 1138 while True:
1138 1139 if limit is None: nbytes = size
1139 1140 else: nbytes = min(limit, size)
1140 1141 s = nbytes and f.read(nbytes)
1141 1142 if not s: break
1142 1143 if limit: limit -= len(s)
1143 1144 yield s
1144 1145
1145 1146 def makedate():
1146 1147 lt = time.localtime()
1147 1148 if lt[8] == 1 and time.daylight:
1148 1149 tz = time.altzone
1149 1150 else:
1150 1151 tz = time.timezone
1151 1152 return time.mktime(lt), tz
1152 1153
1153 1154 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
1154 1155 """represent a (unixtime, offset) tuple as a localized time.
1155 1156 unixtime is seconds since the epoch, and offset is the time zone's
1156 1157 number of seconds away from UTC. if timezone is false, do not
1157 1158 append time zone to string."""
1158 1159 t, tz = date or makedate()
1159 1160 s = time.strftime(format, time.gmtime(float(t) - tz))
1160 1161 if timezone:
1161 1162 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
1162 1163 return s
1163 1164
1164 1165 def strdate(string, format, defaults):
1165 1166 """parse a localized time string and return a (unixtime, offset) tuple.
1166 1167 if the string cannot be parsed, ValueError is raised."""
1167 1168 def timezone(string):
1168 1169 tz = string.split()[-1]
1169 1170 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1170 1171 tz = int(tz)
1171 1172 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1172 1173 return offset
1173 1174 if tz == "GMT" or tz == "UTC":
1174 1175 return 0
1175 1176 return None
1176 1177
1177 1178 # NOTE: unixtime = localunixtime + offset
1178 1179 offset, date = timezone(string), string
1179 1180 if offset != None:
1180 1181 date = " ".join(string.split()[:-1])
1181 1182
1182 1183 # add missing elements from defaults
1183 1184 for part in defaults:
1184 1185 found = [True for p in part if ("%"+p) in format]
1185 1186 if not found:
1186 1187 date += "@" + defaults[part]
1187 1188 format += "@%" + part[0]
1188 1189
1189 1190 timetuple = time.strptime(date, format)
1190 1191 localunixtime = int(calendar.timegm(timetuple))
1191 1192 if offset is None:
1192 1193 # local timezone
1193 1194 unixtime = int(time.mktime(timetuple))
1194 1195 offset = unixtime - localunixtime
1195 1196 else:
1196 1197 unixtime = localunixtime + offset
1197 1198 return unixtime, offset
1198 1199
1199 1200 def parsedate(string, formats=None, defaults=None):
1200 1201 """parse a localized time string and return a (unixtime, offset) tuple.
1201 1202 The date may be a "unixtime offset" string or in one of the specified
1202 1203 formats."""
1203 1204 if not string:
1204 1205 return 0, 0
1205 1206 if not formats:
1206 1207 formats = defaultdateformats
1207 1208 string = string.strip()
1208 1209 try:
1209 1210 when, offset = map(int, string.split(' '))
1210 1211 except ValueError:
1211 1212 # fill out defaults
1212 1213 if not defaults:
1213 1214 defaults = {}
1214 1215 now = makedate()
1215 1216 for part in "d mb yY HI M S".split():
1216 1217 if part not in defaults:
1217 1218 if part[0] in "HMS":
1218 1219 defaults[part] = "00"
1219 1220 elif part[0] in "dm":
1220 1221 defaults[part] = "1"
1221 1222 else:
1222 1223 defaults[part] = datestr(now, "%" + part[0], False)
1223 1224
1224 1225 for format in formats:
1225 1226 try:
1226 1227 when, offset = strdate(string, format, defaults)
1227 1228 except ValueError:
1228 1229 pass
1229 1230 else:
1230 1231 break
1231 1232 else:
1232 1233 raise Abort(_('invalid date: %r ') % string)
1233 1234 # validate explicit (probably user-specified) date and
1234 1235 # time zone offset. values must fit in signed 32 bits for
1235 1236 # current 32-bit linux runtimes. timezones go from UTC-12
1236 1237 # to UTC+14
1237 1238 if abs(when) > 0x7fffffff:
1238 1239 raise Abort(_('date exceeds 32 bits: %d') % when)
1239 1240 if offset < -50400 or offset > 43200:
1240 1241 raise Abort(_('impossible time zone offset: %d') % offset)
1241 1242 return when, offset
1242 1243
1243 1244 def matchdate(date):
1244 1245 """Return a function that matches a given date match specifier
1245 1246
1246 1247 Formats include:
1247 1248
1248 1249 '{date}' match a given date to the accuracy provided
1249 1250
1250 1251 '<{date}' on or before a given date
1251 1252
1252 1253 '>{date}' on or after a given date
1253 1254
1254 1255 """
1255 1256
1256 1257 def lower(date):
1257 1258 return parsedate(date, extendeddateformats)[0]
1258 1259
1259 1260 def upper(date):
1260 1261 d = dict(mb="12", HI="23", M="59", S="59")
1261 1262 for days in "31 30 29".split():
1262 1263 try:
1263 1264 d["d"] = days
1264 1265 return parsedate(date, extendeddateformats, d)[0]
1265 1266 except:
1266 1267 pass
1267 1268 d["d"] = "28"
1268 1269 return parsedate(date, extendeddateformats, d)[0]
1269 1270
1270 1271 if date[0] == "<":
1271 1272 when = upper(date[1:])
1272 1273 return lambda x: x <= when
1273 1274 elif date[0] == ">":
1274 1275 when = lower(date[1:])
1275 1276 return lambda x: x >= when
1276 1277 elif date[0] == "-":
1277 1278 try:
1278 1279 days = int(date[1:])
1279 1280 except ValueError:
1280 1281 raise Abort(_("invalid day spec: %s") % date[1:])
1281 1282 when = makedate()[0] - days * 3600 * 24
1282 1283 return lambda x: x >= when
1283 1284 elif " to " in date:
1284 1285 a, b = date.split(" to ")
1285 1286 start, stop = lower(a), upper(b)
1286 1287 return lambda x: x >= start and x <= stop
1287 1288 else:
1288 1289 start, stop = lower(date), upper(date)
1289 1290 return lambda x: x >= start and x <= stop
1290 1291
1291 1292 def shortuser(user):
1292 1293 """Return a short representation of a user name or email address."""
1293 1294 f = user.find('@')
1294 1295 if f >= 0:
1295 1296 user = user[:f]
1296 1297 f = user.find('<')
1297 1298 if f >= 0:
1298 1299 user = user[f+1:]
1299 1300 f = user.find(' ')
1300 1301 if f >= 0:
1301 1302 user = user[:f]
1302 1303 f = user.find('.')
1303 1304 if f >= 0:
1304 1305 user = user[:f]
1305 1306 return user
1306 1307
1307 1308 def ellipsis(text, maxlength=400):
1308 1309 """Trim string to at most maxlength (default: 400) characters."""
1309 1310 if len(text) <= maxlength:
1310 1311 return text
1311 1312 else:
1312 1313 return "%s..." % (text[:maxlength-3])
1313 1314
1314 1315 def walkrepos(path):
1315 1316 '''yield every hg repository under path, recursively.'''
1316 1317 def errhandler(err):
1317 1318 if err.filename == path:
1318 1319 raise err
1319 1320
1320 1321 for root, dirs, files in os.walk(path, onerror=errhandler):
1321 1322 for d in dirs:
1322 1323 if d == '.hg':
1323 1324 yield root
1324 1325 dirs[:] = []
1325 1326 break
1326 1327
1327 1328 _rcpath = None
1328 1329
1329 1330 def rcpath():
1330 1331 '''return hgrc search path. if env var HGRCPATH is set, use it.
1331 1332 for each item in path, if directory, use files ending in .rc,
1332 1333 else use item.
1333 1334 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1334 1335 if no HGRCPATH, use default os-specific path.'''
1335 1336 global _rcpath
1336 1337 if _rcpath is None:
1337 1338 if 'HGRCPATH' in os.environ:
1338 1339 _rcpath = []
1339 1340 for p in os.environ['HGRCPATH'].split(os.pathsep):
1340 1341 if not p: continue
1341 1342 if os.path.isdir(p):
1342 1343 for f in os.listdir(p):
1343 1344 if f.endswith('.rc'):
1344 1345 _rcpath.append(os.path.join(p, f))
1345 1346 else:
1346 1347 _rcpath.append(p)
1347 1348 else:
1348 1349 _rcpath = os_rcpath()
1349 1350 return _rcpath
1350 1351
1351 1352 def bytecount(nbytes):
1352 1353 '''return byte count formatted as readable string, with units'''
1353 1354
1354 1355 units = (
1355 1356 (100, 1<<30, _('%.0f GB')),
1356 1357 (10, 1<<30, _('%.1f GB')),
1357 1358 (1, 1<<30, _('%.2f GB')),
1358 1359 (100, 1<<20, _('%.0f MB')),
1359 1360 (10, 1<<20, _('%.1f MB')),
1360 1361 (1, 1<<20, _('%.2f MB')),
1361 1362 (100, 1<<10, _('%.0f KB')),
1362 1363 (10, 1<<10, _('%.1f KB')),
1363 1364 (1, 1<<10, _('%.2f KB')),
1364 1365 (1, 1, _('%.0f bytes')),
1365 1366 )
1366 1367
1367 1368 for multiplier, divisor, format in units:
1368 1369 if nbytes >= divisor * multiplier:
1369 1370 return format % (nbytes / float(divisor))
1370 1371 return units[-1][2] % nbytes
1371 1372
1372 1373 def drop_scheme(scheme, path):
1373 1374 sc = scheme + ':'
1374 1375 if path.startswith(sc):
1375 1376 path = path[len(sc):]
1376 1377 if path.startswith('//'):
1377 1378 path = path[2:]
1378 1379 return path
@@ -1,51 +1,50 b''
1 1 # basic operation
2 2 adding a
3 3 reverting a
4 4 changeset 2:b38a34ddfd9f backs out changeset 1:a820f4f40a57
5 5 a
6 6 # file that was removed is recreated
7 7 adding a
8 8 adding a
9 9 changeset 2:44cd84c7349a backs out changeset 1:76862dcce372
10 10 content
11 11 # backout of backout is as if nothing happened
12 12 removing a
13 13 changeset 3:0dd8a0ed5e99 backs out changeset 2:44cd84c7349a
14 14 cat: a: No such file or directory
15 15 # backout with merge
16 16 adding a
17 17 reverting a
18 18 changeset 3:6c77ecc28460 backs out changeset 1:314f55b1bf23
19 19 merging with changeset 2:b66ea5b77abb
20 20 merging a
21 21 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
22 22 (branch merge, don't forget to commit)
23 23 line 1
24 24 # backout should not back out subsequent changesets
25 25 adding a
26 26 adding b
27 27 reverting a
28 28 changeset 3:4cbb1e70196a backs out changeset 1:22bca4c721e5
29 29 the backout changeset is a new head - do not forget to merge
30 30 (use "backout --merge" if you want to auto-merge)
31 b: No such file or directory
32 31 adding a
33 32 adding b
34 33 adding c
35 34 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
36 35 adding d
37 36 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
38 37 (branch merge, don't forget to commit)
39 38 # backout of merge should fail
40 39 abort: cannot back out a merge changeset without --parent
41 40 # backout of merge with bad parent should fail
42 41 abort: cb9a9f314b8b is not a parent of b2f3bb92043e
43 42 # backout of non-merge with parent should fail
44 43 abort: cannot use --parent on non-merge changeset
45 44 # backout with valid parent should be ok
46 45 removing d
47 46 changeset 5:11fbd9be634c backs out changeset 4:b2f3bb92043e
48 47 rolling back last transaction
49 48 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 49 removing c
51 50 changeset 5:1a5f1a63bf2c backs out changeset 4:b2f3bb92043e
@@ -1,64 +1,62 b''
1 1 % new file
2 2 applying patch from stdin
3 3 % new empty file
4 4 applying patch from stdin
5 5 empty
6 6 % chmod +x
7 7 applying patch from stdin
8 8 % copy
9 9 applying patch from stdin
10 10 a
11 11 a
12 12 % rename
13 13 applying patch from stdin
14 14 copyx
15 15 empty
16 16 new
17 17 rename
18 18 % delete
19 19 applying patch from stdin
20 20 empty
21 21 new
22 22 rename
23 23 % regular diff
24 24 applying patch from stdin
25 25 % copy and modify
26 26 applying patch from stdin
27 27 a
28 28 a
29 29 b
30 30 a
31 31 a
32 32 % rename and modify
33 33 applying patch from stdin
34 copy2: No such file or directory
35 34 a
36 35 a
37 36 b
38 37 c
39 38 a
40 39 % one file renamed multiple times
41 40 applying patch from stdin
42 41 9 rename2 rename3 rename3-2 / rename3 (rename2)rename3-2 (rename2)
43 rename2: No such file or directory
44 42 rename3
45 43 rename3-2
46 44 a
47 45 a
48 46 b
49 47 c
50 48 a
51 49
52 50 a
53 51 a
54 52 b
55 53 c
56 54 a
57 55 % binary files and regular patch hunks
58 56 applying patch from stdin
59 57 foo
60 58 045c85ba38952325e126c70962cc0f9d9077bc67 644 binary
61 59 % many binary files
62 60 applying patch from stdin
63 61 045c85ba38952325e126c70962cc0f9d9077bc67 644 mbinary1
64 62 a874b471193996e7cb034bb301cac7bdaf3e3f46 644 mbinary2
@@ -1,27 +1,23 b''
1 1 adding a
2 2 adding b
3 3 adding t.h
4 4 adding t/x
5 5 a
6 NONEXISTENT: No such file or directory
7 6 a
8 7 b
9 8 t.h
10 9 t/x
11 a: No such file or directory
12 NONEXISTENT: No such file or directory
13 10 b
14 11 t.h
15 12 t/x
16 13 a
17 NONEXISTENT: No such file in rev ce18e5bc5cd3
18 14 a
19 15 b
20 16 t.h
21 17 t/x
22 18 % -I/-X with relative path should work
23 19 b
24 20 t.h
25 21 t/x
26 22 t/x
27 23 t/x
General Comments 0
You need to be logged in to leave comments. Login now