##// END OF EJS Templates
simplify flag handling...
Matt Mackall -
r6743:86e8187b default
parent child Browse files
Show More
@@ -1,567 +1,567 b''
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a DSCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an audience
15 15 # not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <http://www.selenic.com/mercurial/wiki/index.cgi/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Setup in hgrc:
25 25 #
26 26 # [extensions]
27 27 # # enable extension
28 28 # hgext.keyword =
29 29 #
30 30 # Files to act upon/ignore are specified in the [keyword] section.
31 31 # Customized keyword template mappings in the [keywordmaps] section.
32 32 #
33 33 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
34 34
35 35 '''keyword expansion in local repositories
36 36
37 37 This extension expands RCS/CVS-like or self-customized $Keywords$
38 38 in tracked text files selected by your configuration.
39 39
40 40 Keywords are only expanded in local repositories and not stored in
41 41 the change history. The mechanism can be regarded as a convenience
42 42 for the current user or for archive distribution.
43 43
44 44 Configuration is done in the [keyword] and [keywordmaps] sections
45 45 of hgrc files.
46 46
47 47 Example:
48 48
49 49 [keyword]
50 50 # expand keywords in every python file except those matching "x*"
51 51 **.py =
52 52 x* = ignore
53 53
54 54 Note: the more specific you are in your filename patterns
55 55 the less you lose speed in huge repos.
56 56
57 57 For [keywordmaps] template mapping and expansion demonstration and
58 58 control run "hg kwdemo".
59 59
60 60 An additional date template filter {date|utcdate} is provided.
61 61
62 62 The default template mappings (view with "hg kwdemo -d") can be replaced
63 63 with customized keywords and templates.
64 64 Again, run "hg kwdemo" to control the results of your config changes.
65 65
66 66 Before changing/disabling active keywords, run "hg kwshrink" to avoid
67 67 the risk of inadvertedly storing expanded keywords in the change history.
68 68
69 69 To force expansion after enabling it, or a configuration change, run
70 70 "hg kwexpand".
71 71
72 72 Also, when committing with the record extension or using mq's qrecord, be aware
73 73 that keywords cannot be updated. Again, run "hg kwexpand" on the files in
74 74 question to update keyword expansions after all changes have been checked in.
75 75
76 76 Expansions spanning more than one line and incremental expansions,
77 77 like CVS' $Log$, are not supported. A keyword template map
78 78 "Log = {desc}" expands to the first line of the changeset description.
79 79 '''
80 80
81 81 from mercurial import commands, cmdutil, dispatch, filelog, revlog
82 82 from mercurial import patch, localrepo, templater, templatefilters, util
83 83 from mercurial.hgweb import webcommands
84 84 from mercurial.node import nullid, hex
85 85 from mercurial.i18n import _
86 86 import re, shutil, tempfile, time
87 87
88 88 commands.optionalrepo += ' kwdemo'
89 89
90 90 # hg commands that do not act on keywords
91 91 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
92 92 ' log outgoing push rename rollback tip'
93 93 ' convert email glog')
94 94
95 95 # hg commands that trigger expansion only when writing to working dir,
96 96 # not when reading filelog, and unexpand when reading from working dir
97 97 restricted = 'record qfold qimport qnew qpush qrefresh qrecord'
98 98
99 99 def utcdate(date):
100 100 '''Returns hgdate in cvs-like UTC format.'''
101 101 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
102 102
103 103 # make keyword tools accessible
104 104 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
105 105
106 106
107 107 class kwtemplater(object):
108 108 '''
109 109 Sets up keyword templates, corresponding keyword regex, and
110 110 provides keyword substitution functions.
111 111 '''
112 112 templates = {
113 113 'Revision': '{node|short}',
114 114 'Author': '{author|user}',
115 115 'Date': '{date|utcdate}',
116 116 'RCSFile': '{file|basename},v',
117 117 'Source': '{root}/{file},v',
118 118 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
119 119 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
120 120 }
121 121
122 122 def __init__(self, ui, repo):
123 123 self.ui = ui
124 124 self.repo = repo
125 125 self.matcher = util.matcher(repo.root,
126 126 inc=kwtools['inc'], exc=kwtools['exc'])[1]
127 127 self.restrict = kwtools['hgcmd'] in restricted.split()
128 128
129 129 kwmaps = self.ui.configitems('keywordmaps')
130 130 if kwmaps: # override default templates
131 131 kwmaps = [(k, templater.parsestring(v, False))
132 132 for (k, v) in kwmaps]
133 133 self.templates = dict(kwmaps)
134 134 escaped = map(re.escape, self.templates.keys())
135 135 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
136 136 self.re_kw = re.compile(kwpat)
137 137
138 138 templatefilters.filters['utcdate'] = utcdate
139 139 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
140 140 False, '', False)
141 141
142 142 def getnode(self, path, fnode):
143 143 '''Derives changenode from file path and filenode.'''
144 144 # used by kwfilelog.read and kwexpand
145 145 c = self.repo.filectx(path, fileid=fnode)
146 146 return c.node()
147 147
148 148 def substitute(self, data, path, node, subfunc):
149 149 '''Replaces keywords in data with expanded template.'''
150 150 def kwsub(mobj):
151 151 kw = mobj.group(1)
152 152 self.ct.use_template(self.templates[kw])
153 153 self.ui.pushbuffer()
154 154 self.ct.show(changenode=node, root=self.repo.root, file=path)
155 155 ekw = templatefilters.firstline(self.ui.popbuffer())
156 156 return '$%s: %s $' % (kw, ekw)
157 157 return subfunc(kwsub, data)
158 158
159 159 def expand(self, path, node, data):
160 160 '''Returns data with keywords expanded.'''
161 161 if not self.restrict and self.matcher(path) and not util.binary(data):
162 162 changenode = self.getnode(path, node)
163 163 return self.substitute(data, path, changenode, self.re_kw.sub)
164 164 return data
165 165
166 166 def iskwfile(self, path, islink):
167 167 '''Returns true if path matches [keyword] pattern
168 168 and is not a symbolic link.
169 169 Caveat: localrepository._link fails on Windows.'''
170 170 return self.matcher(path) and not islink(path)
171 171
172 172 def overwrite(self, node, expand, files):
173 173 '''Overwrites selected files expanding/shrinking keywords.'''
174 174 if node is not None: # commit
175 175 ctx = self.repo.changectx(node)
176 176 mf = ctx.manifest()
177 177 files = [f for f in ctx.files() if f in mf]
178 178 notify = self.ui.debug
179 179 else: # kwexpand/kwshrink
180 180 ctx = self.repo.changectx('.')
181 181 mf = ctx.manifest()
182 182 notify = self.ui.note
183 183 candidates = [f for f in files if self.iskwfile(f, mf.linkf)]
184 184 if candidates:
185 185 self.restrict = True # do not expand when reading
186 186 candidates.sort()
187 187 action = expand and 'expanding' or 'shrinking'
188 188 for f in candidates:
189 189 fp = self.repo.file(f)
190 190 data = fp.read(mf[f])
191 191 if util.binary(data):
192 192 continue
193 193 if expand:
194 194 changenode = node or self.getnode(f, mf[f])
195 195 data, found = self.substitute(data, f, changenode,
196 196 self.re_kw.subn)
197 197 else:
198 198 found = self.re_kw.search(data)
199 199 if found:
200 200 notify(_('overwriting %s %s keywords\n') % (f, action))
201 201 self.repo.wwrite(f, data, mf.flags(f))
202 202 self.repo.dirstate.normal(f)
203 203 self.restrict = False
204 204
205 205 def shrinktext(self, text):
206 206 '''Unconditionally removes all keyword substitutions from text.'''
207 207 return self.re_kw.sub(r'$\1$', text)
208 208
209 209 def shrink(self, fname, text):
210 210 '''Returns text with all keyword substitutions removed.'''
211 211 if self.matcher(fname) and not util.binary(text):
212 212 return self.shrinktext(text)
213 213 return text
214 214
215 215 def shrinklines(self, fname, lines):
216 216 '''Returns lines with keyword substitutions removed.'''
217 217 if self.matcher(fname):
218 218 text = ''.join(lines)
219 219 if not util.binary(text):
220 220 return self.shrinktext(text).splitlines(True)
221 221 return lines
222 222
223 223 def wread(self, fname, data):
224 224 '''If in restricted mode returns data read from wdir with
225 225 keyword substitutions removed.'''
226 226 return self.restrict and self.shrink(fname, data) or data
227 227
228 228 class kwfilelog(filelog.filelog):
229 229 '''
230 230 Subclass of filelog to hook into its read, add, cmp methods.
231 231 Keywords are "stored" unexpanded, and processed on reading.
232 232 '''
233 233 def __init__(self, opener, kwt, path):
234 234 super(kwfilelog, self).__init__(opener, path)
235 235 self.kwt = kwt
236 236 self.path = path
237 237
238 238 def read(self, node):
239 239 '''Expands keywords when reading filelog.'''
240 240 data = super(kwfilelog, self).read(node)
241 241 return self.kwt.expand(self.path, node, data)
242 242
243 243 def add(self, text, meta, tr, link, p1=None, p2=None):
244 244 '''Removes keyword substitutions when adding to filelog.'''
245 245 text = self.kwt.shrink(self.path, text)
246 246 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
247 247
248 248 def cmp(self, node, text):
249 249 '''Removes keyword substitutions for comparison.'''
250 250 text = self.kwt.shrink(self.path, text)
251 251 if self.renamed(node):
252 252 t2 = super(kwfilelog, self).read(node)
253 253 return t2 != text
254 254 return revlog.revlog.cmp(self, node, text)
255 255
256 256 def _status(ui, repo, kwt, *pats, **opts):
257 257 '''Bails out if [keyword] configuration is not active.
258 258 Returns status of working directory.'''
259 259 if kwt:
260 260 matcher = cmdutil.match(repo, pats, opts)
261 261 return repo.status(match=matcher, list_clean=True)
262 262 if ui.configitems('keyword'):
263 263 raise util.Abort(_('[keyword] patterns cannot match'))
264 264 raise util.Abort(_('no [keyword] patterns configured'))
265 265
266 266 def _kwfwrite(ui, repo, expand, *pats, **opts):
267 267 '''Selects files and passes them to kwtemplater.overwrite.'''
268 268 if repo.dirstate.parents()[1] != nullid:
269 269 raise util.Abort(_('outstanding uncommitted merge'))
270 270 kwt = kwtools['templater']
271 271 status = _status(ui, repo, kwt, *pats, **opts)
272 272 modified, added, removed, deleted, unknown, ignored, clean = status
273 273 if modified or added or removed or deleted:
274 274 raise util.Abort(_('outstanding uncommitted changes'))
275 275 wlock = lock = None
276 276 try:
277 277 wlock = repo.wlock()
278 278 lock = repo.lock()
279 279 kwt.overwrite(None, expand, clean)
280 280 finally:
281 281 del wlock, lock
282 282
283 283
284 284 def demo(ui, repo, *args, **opts):
285 285 '''print [keywordmaps] configuration and an expansion example
286 286
287 287 Show current, custom, or default keyword template maps
288 288 and their expansion.
289 289
290 290 Extend current configuration by specifying maps as arguments
291 291 and optionally by reading from an additional hgrc file.
292 292
293 293 Override current keyword template maps with "default" option.
294 294 '''
295 295 def demostatus(stat):
296 296 ui.status(_('\n\t%s\n') % stat)
297 297
298 298 def demoitems(section, items):
299 299 ui.write('[%s]\n' % section)
300 300 for k, v in items:
301 301 ui.write('%s = %s\n' % (k, v))
302 302
303 303 msg = 'hg keyword config and expansion example'
304 304 kwstatus = 'current'
305 305 fn = 'demo.txt'
306 306 branchname = 'demobranch'
307 307 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
308 308 ui.note(_('creating temporary repo at %s\n') % tmpdir)
309 309 repo = localrepo.localrepository(ui, tmpdir, True)
310 310 ui.setconfig('keyword', fn, '')
311 311 if args or opts.get('rcfile'):
312 312 kwstatus = 'custom'
313 313 if opts.get('rcfile'):
314 314 ui.readconfig(opts.get('rcfile'))
315 315 if opts.get('default'):
316 316 kwstatus = 'default'
317 317 kwmaps = kwtemplater.templates
318 318 if ui.configitems('keywordmaps'):
319 319 # override maps from optional rcfile
320 320 for k, v in kwmaps.iteritems():
321 321 ui.setconfig('keywordmaps', k, v)
322 322 elif args:
323 323 # simulate hgrc parsing
324 324 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
325 325 fp = repo.opener('hgrc', 'w')
326 326 fp.writelines(rcmaps)
327 327 fp.close()
328 328 ui.readconfig(repo.join('hgrc'))
329 329 if not opts.get('default'):
330 330 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
331 331 uisetup(ui)
332 332 reposetup(ui, repo)
333 333 for k, v in ui.configitems('extensions'):
334 334 if k.endswith('keyword'):
335 335 extension = '%s = %s' % (k, v)
336 336 break
337 337 demostatus('config using %s keyword template maps' % kwstatus)
338 338 ui.write('[extensions]\n%s\n' % extension)
339 339 demoitems('keyword', ui.configitems('keyword'))
340 340 demoitems('keywordmaps', kwmaps.iteritems())
341 341 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
342 342 repo.wopener(fn, 'w').write(keywords)
343 343 repo.add([fn])
344 344 path = repo.wjoin(fn)
345 345 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
346 346 ui.note(keywords)
347 347 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
348 348 # silence branch command if not verbose
349 349 quiet = ui.quiet
350 350 ui.quiet = not ui.verbose
351 351 commands.branch(ui, repo, branchname)
352 352 ui.quiet = quiet
353 353 for name, cmd in ui.configitems('hooks'):
354 354 if name.split('.', 1)[0].find('commit') > -1:
355 355 repo.ui.setconfig('hooks', name, '')
356 356 ui.note(_('unhooked all commit hooks\n'))
357 357 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
358 358 repo.commit(text=msg)
359 359 format = ui.verbose and ' in %s' % path or ''
360 360 demostatus('%s keywords expanded%s' % (kwstatus, format))
361 361 ui.write(repo.wread(fn))
362 362 ui.debug(_('\nremoving temporary repo %s\n') % tmpdir)
363 363 shutil.rmtree(tmpdir, ignore_errors=True)
364 364
365 365 def expand(ui, repo, *pats, **opts):
366 366 '''expand keywords in working directory
367 367
368 368 Run after (re)enabling keyword expansion.
369 369
370 370 kwexpand refuses to run if given files contain local changes.
371 371 '''
372 372 # 3rd argument sets expansion to True
373 373 _kwfwrite(ui, repo, True, *pats, **opts)
374 374
375 375 def files(ui, repo, *pats, **opts):
376 376 '''print files currently configured for keyword expansion
377 377
378 378 Crosscheck which files in working directory are potential targets for
379 379 keyword expansion.
380 380 That is, files matched by [keyword] config patterns but not symlinks.
381 381 '''
382 382 kwt = kwtools['templater']
383 383 status = _status(ui, repo, kwt, *pats, **opts)
384 384 modified, added, removed, deleted, unknown, ignored, clean = status
385 385 files = modified + added + clean
386 386 if opts.get('untracked'):
387 387 files += unknown
388 388 files.sort()
389 389 wctx = repo.changectx(None)
390 islink = lambda p: 'l' in wctx.fileflags(p)
390 islink = lambda p: 'l' in wctx.flags(p)
391 391 kwfiles = [f for f in files if kwt.iskwfile(f, islink)]
392 392 cwd = pats and repo.getcwd() or ''
393 393 kwfstats = not opts.get('ignore') and (('K', kwfiles),) or ()
394 394 if opts.get('all') or opts.get('ignore'):
395 395 kwfstats += (('I', [f for f in files if f not in kwfiles]),)
396 396 for char, filenames in kwfstats:
397 397 format = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
398 398 for f in filenames:
399 399 ui.write(format % repo.pathto(f, cwd))
400 400
401 401 def shrink(ui, repo, *pats, **opts):
402 402 '''revert expanded keywords in working directory
403 403
404 404 Run before changing/disabling active keywords
405 405 or if you experience problems with "hg import" or "hg merge".
406 406
407 407 kwshrink refuses to run if given files contain local changes.
408 408 '''
409 409 # 3rd argument sets expansion to False
410 410 _kwfwrite(ui, repo, False, *pats, **opts)
411 411
412 412
413 413 def uisetup(ui):
414 414 '''Collects [keyword] config in kwtools.
415 415 Monkeypatches dispatch._parse if needed.'''
416 416
417 417 for pat, opt in ui.configitems('keyword'):
418 418 if opt != 'ignore':
419 419 kwtools['inc'].append(pat)
420 420 else:
421 421 kwtools['exc'].append(pat)
422 422
423 423 if kwtools['inc']:
424 424 def kwdispatch_parse(ui, args):
425 425 '''Monkeypatch dispatch._parse to obtain running hg command.'''
426 426 cmd, func, args, options, cmdoptions = dispatch_parse(ui, args)
427 427 kwtools['hgcmd'] = cmd
428 428 return cmd, func, args, options, cmdoptions
429 429
430 430 dispatch_parse = dispatch._parse
431 431 dispatch._parse = kwdispatch_parse
432 432
433 433 def reposetup(ui, repo):
434 434 '''Sets up repo as kwrepo for keyword substitution.
435 435 Overrides file method to return kwfilelog instead of filelog
436 436 if file matches user configuration.
437 437 Wraps commit to overwrite configured files with updated
438 438 keyword substitutions.
439 439 Monkeypatches patch and webcommands.'''
440 440
441 441 try:
442 442 if (not repo.local() or not kwtools['inc']
443 443 or kwtools['hgcmd'] in nokwcommands.split()
444 444 or '.hg' in util.splitpath(repo.root)
445 445 or repo._url.startswith('bundle:')):
446 446 return
447 447 except AttributeError:
448 448 pass
449 449
450 450 kwtools['templater'] = kwt = kwtemplater(ui, repo)
451 451
452 452 class kwrepo(repo.__class__):
453 453 def file(self, f):
454 454 if f[0] == '/':
455 455 f = f[1:]
456 456 return kwfilelog(self.sopener, kwt, f)
457 457
458 458 def wread(self, filename):
459 459 data = super(kwrepo, self).wread(filename)
460 460 return kwt.wread(filename, data)
461 461
462 462 def commit(self, files=None, text='', user=None, date=None,
463 463 match=None, force=False, force_editor=False,
464 464 p1=None, p2=None, extra={}, empty_ok=False):
465 465 wlock = lock = None
466 466 _p1 = _p2 = None
467 467 try:
468 468 wlock = self.wlock()
469 469 lock = self.lock()
470 470 # store and postpone commit hooks
471 471 commithooks = {}
472 472 for name, cmd in ui.configitems('hooks'):
473 473 if name.split('.', 1)[0] == 'commit':
474 474 commithooks[name] = cmd
475 475 ui.setconfig('hooks', name, None)
476 476 if commithooks:
477 477 # store parents for commit hook environment
478 478 if p1 is None:
479 479 _p1, _p2 = repo.dirstate.parents()
480 480 else:
481 481 _p1, _p2 = p1, p2 or nullid
482 482 _p1 = hex(_p1)
483 483 if _p2 == nullid:
484 484 _p2 = ''
485 485 else:
486 486 _p2 = hex(_p2)
487 487
488 488 n = super(kwrepo, self).commit(files, text, user, date, match,
489 489 force, force_editor, p1, p2,
490 490 extra, empty_ok)
491 491
492 492 # restore commit hooks
493 493 for name, cmd in commithooks.iteritems():
494 494 ui.setconfig('hooks', name, cmd)
495 495 if n is not None:
496 496 kwt.overwrite(n, True, None)
497 497 repo.hook('commit', node=n, parent1=_p1, parent2=_p2)
498 498 return n
499 499 finally:
500 500 del wlock, lock
501 501
502 502 # monkeypatches
503 503 def kwpatchfile_init(self, ui, fname, missing=False):
504 504 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
505 505 rejects or conflicts due to expanded keywords in working dir.'''
506 506 patchfile_init(self, ui, fname, missing)
507 507 # shrink keywords read from working dir
508 508 self.lines = kwt.shrinklines(self.fname, self.lines)
509 509
510 510 def kw_diff(repo, node1=None, node2=None, match=None,
511 511 fp=None, changes=None, opts=None):
512 512 '''Monkeypatch patch.diff to avoid expansion except when
513 513 comparing against working dir.'''
514 514 if node2 is not None:
515 515 kwt.matcher = util.never
516 516 elif node1 is not None and node1 != repo.changectx('.').node():
517 517 kwt.restrict = True
518 518 patch_diff(repo, node1, node2, match, fp, changes, opts)
519 519
520 520 def kwweb_annotate(web, req, tmpl):
521 521 '''Wraps webcommands.annotate turning off keyword expansion.'''
522 522 kwt.matcher = util.never
523 523 return webcommands_annotate(web, req, tmpl)
524 524
525 525 def kwweb_changeset(web, req, tmpl):
526 526 '''Wraps webcommands.changeset turning off keyword expansion.'''
527 527 kwt.matcher = util.never
528 528 return webcommands_changeset(web, req, tmpl)
529 529
530 530 def kwweb_filediff(web, req, tmpl):
531 531 '''Wraps webcommands.filediff turning off keyword expansion.'''
532 532 kwt.matcher = util.never
533 533 return webcommands_filediff(web, req, tmpl)
534 534
535 535 repo.__class__ = kwrepo
536 536
537 537 patchfile_init = patch.patchfile.__init__
538 538 patch_diff = patch.diff
539 539 webcommands_annotate = webcommands.annotate
540 540 webcommands_changeset = webcommands.changeset
541 541 webcommands_filediff = webcommands.filediff
542 542
543 543 patch.patchfile.__init__ = kwpatchfile_init
544 544 patch.diff = kw_diff
545 545 webcommands.annotate = kwweb_annotate
546 546 webcommands.changeset = webcommands.rev = kwweb_changeset
547 547 webcommands.filediff = webcommands.diff = kwweb_filediff
548 548
549 549
550 550 cmdtable = {
551 551 'kwdemo':
552 552 (demo,
553 553 [('d', 'default', None, _('show default keyword template maps')),
554 554 ('f', 'rcfile', [], _('read maps from rcfile'))],
555 555 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
556 556 'kwexpand': (expand, commands.walkopts,
557 557 _('hg kwexpand [OPTION]... [FILE]...')),
558 558 'kwfiles':
559 559 (files,
560 560 [('a', 'all', None, _('show keyword status flags of all files')),
561 561 ('i', 'ignore', None, _('show files excluded from expansion')),
562 562 ('u', 'untracked', None, _('additionally show untracked files')),
563 563 ] + commands.walkopts,
564 564 _('hg kwfiles [OPTION]... [FILE]...')),
565 565 'kwshrink': (shrink, commands.walkopts,
566 566 _('hg kwshrink [OPTION]... [FILE]...')),
567 567 }
@@ -1,3339 +1,3339 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from repo import RepoError, NoCapability
10 10 from i18n import _
11 11 import os, re, sys, urllib
12 12 import hg, util, revlog, bundlerepo, extensions, copies
13 13 import difflib, patch, time, help, mdiff, tempfile
14 14 import version, socket
15 15 import archival, changegroup, cmdutil, hgweb.server, sshserver, hbisect
16 16 import merge as merge_
17 17
18 18 # Commands start here, listed alphabetically
19 19
20 20 def add(ui, repo, *pats, **opts):
21 21 """add the specified files on the next commit
22 22
23 23 Schedule files to be version controlled and added to the repository.
24 24
25 25 The files will be added to the repository at the next commit. To
26 26 undo an add before that, see hg revert.
27 27
28 28 If no names are given, add all files in the repository.
29 29 """
30 30
31 31 rejected = None
32 32 exacts = {}
33 33 names = []
34 34 m = cmdutil.match(repo, pats, opts)
35 35 m.bad = lambda x,y: True
36 36 for abs in repo.walk(m):
37 37 if m.exact(abs):
38 38 if ui.verbose:
39 39 ui.status(_('adding %s\n') % m.rel(abs))
40 40 names.append(abs)
41 41 exacts[abs] = 1
42 42 elif abs not in repo.dirstate:
43 43 ui.status(_('adding %s\n') % m.rel(abs))
44 44 names.append(abs)
45 45 if not opts.get('dry_run'):
46 46 rejected = repo.add(names)
47 47 rejected = [p for p in rejected if p in exacts]
48 48 return rejected and 1 or 0
49 49
50 50 def addremove(ui, repo, *pats, **opts):
51 51 """add all new files, delete all missing files
52 52
53 53 Add all new files and remove all missing files from the repository.
54 54
55 55 New files are ignored if they match any of the patterns in .hgignore. As
56 56 with add, these changes take effect at the next commit.
57 57
58 58 Use the -s option to detect renamed files. With a parameter > 0,
59 59 this compares every removed file with every added file and records
60 60 those similar enough as renames. This option takes a percentage
61 61 between 0 (disabled) and 100 (files must be identical) as its
62 62 parameter. Detecting renamed files this way can be expensive.
63 63 """
64 64 try:
65 65 sim = float(opts.get('similarity') or 0)
66 66 except ValueError:
67 67 raise util.Abort(_('similarity must be a number'))
68 68 if sim < 0 or sim > 100:
69 69 raise util.Abort(_('similarity must be between 0 and 100'))
70 70 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
71 71
72 72 def annotate(ui, repo, *pats, **opts):
73 73 """show changeset information per file line
74 74
75 75 List changes in files, showing the revision id responsible for each line
76 76
77 77 This command is useful to discover who did a change or when a change took
78 78 place.
79 79
80 80 Without the -a option, annotate will avoid processing files it
81 81 detects as binary. With -a, annotate will generate an annotation
82 82 anyway, probably with undesirable results.
83 83 """
84 84 datefunc = ui.quiet and util.shortdate or util.datestr
85 85 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
86 86
87 87 if not pats:
88 88 raise util.Abort(_('at least one file name or pattern required'))
89 89
90 90 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
91 91 ('number', lambda x: str(x[0].rev())),
92 92 ('changeset', lambda x: short(x[0].node())),
93 93 ('date', getdate),
94 94 ('follow', lambda x: x[0].path()),
95 95 ]
96 96
97 97 if (not opts['user'] and not opts['changeset'] and not opts['date']
98 98 and not opts['follow']):
99 99 opts['number'] = 1
100 100
101 101 linenumber = opts.get('line_number') is not None
102 102 if (linenumber and (not opts['changeset']) and (not opts['number'])):
103 103 raise util.Abort(_('at least one of -n/-c is required for -l'))
104 104
105 105 funcmap = [func for op, func in opmap if opts.get(op)]
106 106 if linenumber:
107 107 lastfunc = funcmap[-1]
108 108 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
109 109
110 110 ctx = repo.changectx(opts['rev'])
111 111
112 112 m = cmdutil.match(repo, pats, opts)
113 113 for abs in repo.walk(m, ctx.node()):
114 114 fctx = ctx.filectx(abs)
115 115 if not opts['text'] and util.binary(fctx.data()):
116 116 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
117 117 continue
118 118
119 119 lines = fctx.annotate(follow=opts.get('follow'),
120 120 linenumber=linenumber)
121 121 pieces = []
122 122
123 123 for f in funcmap:
124 124 l = [f(n) for n, dummy in lines]
125 125 if l:
126 126 m = max(map(len, l))
127 127 pieces.append(["%*s" % (m, x) for x in l])
128 128
129 129 if pieces:
130 130 for p, l in zip(zip(*pieces), lines):
131 131 ui.write("%s: %s" % (" ".join(p), l[1]))
132 132
133 133 def archive(ui, repo, dest, **opts):
134 134 '''create unversioned archive of a repository revision
135 135
136 136 By default, the revision used is the parent of the working
137 137 directory; use "-r" to specify a different revision.
138 138
139 139 To specify the type of archive to create, use "-t". Valid
140 140 types are:
141 141
142 142 "files" (default): a directory full of files
143 143 "tar": tar archive, uncompressed
144 144 "tbz2": tar archive, compressed using bzip2
145 145 "tgz": tar archive, compressed using gzip
146 146 "uzip": zip archive, uncompressed
147 147 "zip": zip archive, compressed using deflate
148 148
149 149 The exact name of the destination archive or directory is given
150 150 using a format string; see "hg help export" for details.
151 151
152 152 Each member added to an archive file has a directory prefix
153 153 prepended. Use "-p" to specify a format string for the prefix.
154 154 The default is the basename of the archive, with suffixes removed.
155 155 '''
156 156
157 157 ctx = repo.changectx(opts['rev'])
158 158 if not ctx:
159 159 raise util.Abort(_('repository has no revisions'))
160 160 node = ctx.node()
161 161 dest = cmdutil.make_filename(repo, dest, node)
162 162 if os.path.realpath(dest) == repo.root:
163 163 raise util.Abort(_('repository root cannot be destination'))
164 164 matchfn = cmdutil.match(repo, [], opts)
165 165 kind = opts.get('type') or 'files'
166 166 prefix = opts['prefix']
167 167 if dest == '-':
168 168 if kind == 'files':
169 169 raise util.Abort(_('cannot archive plain files to stdout'))
170 170 dest = sys.stdout
171 171 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
172 172 prefix = cmdutil.make_filename(repo, prefix, node)
173 173 archival.archive(repo, dest, node, kind, not opts['no_decode'],
174 174 matchfn, prefix)
175 175
176 176 def backout(ui, repo, node=None, rev=None, **opts):
177 177 '''reverse effect of earlier changeset
178 178
179 179 Commit the backed out changes as a new changeset. The new
180 180 changeset is a child of the backed out changeset.
181 181
182 182 If you back out a changeset other than the tip, a new head is
183 183 created. This head will be the new tip and you should merge this
184 184 backout changeset with another head (current one by default).
185 185
186 186 The --merge option remembers the parent of the working directory
187 187 before starting the backout, then merges the new head with that
188 188 changeset afterwards. This saves you from doing the merge by
189 189 hand. The result of this merge is not committed, as for a normal
190 190 merge.
191 191
192 192 See \'hg help dates\' for a list of formats valid for -d/--date.
193 193 '''
194 194 if rev and node:
195 195 raise util.Abort(_("please specify just one revision"))
196 196
197 197 if not rev:
198 198 rev = node
199 199
200 200 if not rev:
201 201 raise util.Abort(_("please specify a revision to backout"))
202 202
203 203 date = opts.get('date')
204 204 if date:
205 205 opts['date'] = util.parsedate(date)
206 206
207 207 cmdutil.bail_if_changed(repo)
208 208 node = repo.lookup(rev)
209 209
210 210 op1, op2 = repo.dirstate.parents()
211 211 a = repo.changelog.ancestor(op1, node)
212 212 if a != node:
213 213 raise util.Abort(_('cannot back out change on a different branch'))
214 214
215 215 p1, p2 = repo.changelog.parents(node)
216 216 if p1 == nullid:
217 217 raise util.Abort(_('cannot back out a change with no parents'))
218 218 if p2 != nullid:
219 219 if not opts['parent']:
220 220 raise util.Abort(_('cannot back out a merge changeset without '
221 221 '--parent'))
222 222 p = repo.lookup(opts['parent'])
223 223 if p not in (p1, p2):
224 224 raise util.Abort(_('%s is not a parent of %s') %
225 225 (short(p), short(node)))
226 226 parent = p
227 227 else:
228 228 if opts['parent']:
229 229 raise util.Abort(_('cannot use --parent on non-merge changeset'))
230 230 parent = p1
231 231
232 232 # the backout should appear on the same branch
233 233 branch = repo.dirstate.branch()
234 234 hg.clean(repo, node, show_stats=False)
235 235 repo.dirstate.setbranch(branch)
236 236 revert_opts = opts.copy()
237 237 revert_opts['date'] = None
238 238 revert_opts['all'] = True
239 239 revert_opts['rev'] = hex(parent)
240 240 revert_opts['no_backup'] = None
241 241 revert(ui, repo, **revert_opts)
242 242 commit_opts = opts.copy()
243 243 commit_opts['addremove'] = False
244 244 if not commit_opts['message'] and not commit_opts['logfile']:
245 245 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
246 246 commit_opts['force_editor'] = True
247 247 commit(ui, repo, **commit_opts)
248 248 def nice(node):
249 249 return '%d:%s' % (repo.changelog.rev(node), short(node))
250 250 ui.status(_('changeset %s backs out changeset %s\n') %
251 251 (nice(repo.changelog.tip()), nice(node)))
252 252 if op1 != node:
253 253 hg.clean(repo, op1, show_stats=False)
254 254 if opts['merge']:
255 255 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
256 256 hg.merge(repo, hex(repo.changelog.tip()))
257 257 else:
258 258 ui.status(_('the backout changeset is a new head - '
259 259 'do not forget to merge\n'))
260 260 ui.status(_('(use "backout --merge" '
261 261 'if you want to auto-merge)\n'))
262 262
263 263 def bisect(ui, repo, rev=None, extra=None,
264 264 reset=None, good=None, bad=None, skip=None, noupdate=None):
265 265 """subdivision search of changesets
266 266
267 267 This command helps to find changesets which introduce problems.
268 268 To use, mark the earliest changeset you know exhibits the problem
269 269 as bad, then mark the latest changeset which is free from the
270 270 problem as good. Bisect will update your working directory to a
271 271 revision for testing. Once you have performed tests, mark the
272 272 working directory as bad or good and bisect will either update to
273 273 another candidate changeset or announce that it has found the bad
274 274 revision.
275 275 """
276 276 # backward compatibility
277 277 if rev in "good bad reset init".split():
278 278 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
279 279 cmd, rev, extra = rev, extra, None
280 280 if cmd == "good":
281 281 good = True
282 282 elif cmd == "bad":
283 283 bad = True
284 284 else:
285 285 reset = True
286 286 elif extra or good + bad + skip + reset > 1:
287 287 raise util.Abort("Incompatible arguments")
288 288
289 289 if reset:
290 290 p = repo.join("bisect.state")
291 291 if os.path.exists(p):
292 292 os.unlink(p)
293 293 return
294 294
295 295 # load state
296 296 state = {'good': [], 'bad': [], 'skip': []}
297 297 if os.path.exists(repo.join("bisect.state")):
298 298 for l in repo.opener("bisect.state"):
299 299 kind, node = l[:-1].split()
300 300 node = repo.lookup(node)
301 301 if kind not in state:
302 302 raise util.Abort(_("unknown bisect kind %s") % kind)
303 303 state[kind].append(node)
304 304
305 305 # update state
306 306 node = repo.lookup(rev or '.')
307 307 if good:
308 308 state['good'].append(node)
309 309 elif bad:
310 310 state['bad'].append(node)
311 311 elif skip:
312 312 state['skip'].append(node)
313 313
314 314 # save state
315 315 f = repo.opener("bisect.state", "w", atomictemp=True)
316 316 wlock = repo.wlock()
317 317 try:
318 318 for kind in state:
319 319 for node in state[kind]:
320 320 f.write("%s %s\n" % (kind, hex(node)))
321 321 f.rename()
322 322 finally:
323 323 del wlock
324 324
325 325 if not state['good'] or not state['bad']:
326 326 return
327 327
328 328 # actually bisect
329 329 node, changesets, good = hbisect.bisect(repo.changelog, state)
330 330 if changesets == 0:
331 331 ui.write(_("The first %s revision is:\n") % (good and "good" or "bad"))
332 332 displayer = cmdutil.show_changeset(ui, repo, {})
333 333 displayer.show(changenode=node)
334 334 elif node is not None:
335 335 # compute the approximate number of remaining tests
336 336 tests, size = 0, 2
337 337 while size <= changesets:
338 338 tests, size = tests + 1, size * 2
339 339 rev = repo.changelog.rev(node)
340 340 ui.write(_("Testing changeset %s:%s "
341 341 "(%s changesets remaining, ~%s tests)\n")
342 342 % (rev, short(node), changesets, tests))
343 343 if not noupdate:
344 344 cmdutil.bail_if_changed(repo)
345 345 return hg.clean(repo, node)
346 346
347 347 def branch(ui, repo, label=None, **opts):
348 348 """set or show the current branch name
349 349
350 350 With no argument, show the current branch name. With one argument,
351 351 set the working directory branch name (the branch does not exist in
352 352 the repository until the next commit).
353 353
354 354 Unless --force is specified, branch will not let you set a
355 355 branch name that shadows an existing branch.
356 356
357 357 Use the command 'hg update' to switch to an existing branch.
358 358 """
359 359
360 360 if label:
361 361 if not opts.get('force') and label in repo.branchtags():
362 362 if label not in [p.branch() for p in repo.changectx(None).parents()]:
363 363 raise util.Abort(_('a branch of the same name already exists'
364 364 ' (use --force to override)'))
365 365 repo.dirstate.setbranch(util.fromlocal(label))
366 366 ui.status(_('marked working directory as branch %s\n') % label)
367 367 else:
368 368 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
369 369
370 370 def branches(ui, repo, active=False):
371 371 """list repository named branches
372 372
373 373 List the repository's named branches, indicating which ones are
374 374 inactive. If active is specified, only show active branches.
375 375
376 376 A branch is considered active if it contains repository heads.
377 377
378 378 Use the command 'hg update' to switch to an existing branch.
379 379 """
380 380 hexfunc = ui.debugflag and hex or short
381 381 activebranches = [util.tolocal(repo.changectx(n).branch())
382 382 for n in repo.heads()]
383 383 branches = [(tag in activebranches, repo.changelog.rev(node), tag)
384 384 for tag, node in repo.branchtags().items()]
385 385 branches.sort()
386 386 branches.reverse()
387 387
388 388 for isactive, node, tag in branches:
389 389 if (not active) or isactive:
390 390 if ui.quiet:
391 391 ui.write("%s\n" % tag)
392 392 else:
393 393 rev = str(node).rjust(32 - util.locallen(tag))
394 394 isinactive = ((not isactive) and " (inactive)") or ''
395 395 data = tag, rev, hexfunc(repo.lookup(node)), isinactive
396 396 ui.write("%s%s:%s%s\n" % data)
397 397
398 398 def bundle(ui, repo, fname, dest=None, **opts):
399 399 """create a changegroup file
400 400
401 401 Generate a compressed changegroup file collecting changesets not
402 402 found in the other repository.
403 403
404 404 If no destination repository is specified the destination is
405 405 assumed to have all the nodes specified by one or more --base
406 406 parameters. To create a bundle containing all changesets, use
407 407 --all (or --base null). To change the compression method applied,
408 408 use the -t option (by default, bundles are compressed using bz2).
409 409
410 410 The bundle file can then be transferred using conventional means and
411 411 applied to another repository with the unbundle or pull command.
412 412 This is useful when direct push and pull are not available or when
413 413 exporting an entire repository is undesirable.
414 414
415 415 Applying bundles preserves all changeset contents including
416 416 permissions, copy/rename information, and revision history.
417 417 """
418 418 revs = opts.get('rev') or None
419 419 if revs:
420 420 revs = [repo.lookup(rev) for rev in revs]
421 421 if opts.get('all'):
422 422 base = ['null']
423 423 else:
424 424 base = opts.get('base')
425 425 if base:
426 426 if dest:
427 427 raise util.Abort(_("--base is incompatible with specifiying "
428 428 "a destination"))
429 429 base = [repo.lookup(rev) for rev in base]
430 430 # create the right base
431 431 # XXX: nodesbetween / changegroup* should be "fixed" instead
432 432 o = []
433 433 has = {nullid: None}
434 434 for n in base:
435 435 has.update(repo.changelog.reachable(n))
436 436 if revs:
437 437 visit = list(revs)
438 438 else:
439 439 visit = repo.changelog.heads()
440 440 seen = {}
441 441 while visit:
442 442 n = visit.pop(0)
443 443 parents = [p for p in repo.changelog.parents(n) if p not in has]
444 444 if len(parents) == 0:
445 445 o.insert(0, n)
446 446 else:
447 447 for p in parents:
448 448 if p not in seen:
449 449 seen[p] = 1
450 450 visit.append(p)
451 451 else:
452 452 cmdutil.setremoteconfig(ui, opts)
453 453 dest, revs, checkout = hg.parseurl(
454 454 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
455 455 other = hg.repository(ui, dest)
456 456 o = repo.findoutgoing(other, force=opts['force'])
457 457
458 458 if revs:
459 459 cg = repo.changegroupsubset(o, revs, 'bundle')
460 460 else:
461 461 cg = repo.changegroup(o, 'bundle')
462 462
463 463 bundletype = opts.get('type', 'bzip2').lower()
464 464 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
465 465 bundletype = btypes.get(bundletype)
466 466 if bundletype not in changegroup.bundletypes:
467 467 raise util.Abort(_('unknown bundle type specified with --type'))
468 468
469 469 changegroup.writebundle(cg, fname, bundletype)
470 470
471 471 def cat(ui, repo, file1, *pats, **opts):
472 472 """output the current or given revision of files
473 473
474 474 Print the specified files as they were at the given revision.
475 475 If no revision is given, the parent of the working directory is used,
476 476 or tip if no revision is checked out.
477 477
478 478 Output may be to a file, in which case the name of the file is
479 479 given using a format string. The formatting rules are the same as
480 480 for the export command, with the following additions:
481 481
482 482 %s basename of file being printed
483 483 %d dirname of file being printed, or '.' if in repo root
484 484 %p root-relative path name of file being printed
485 485 """
486 486 ctx = repo.changectx(opts['rev'])
487 487 err = 1
488 488 m = cmdutil.match(repo, (file1,) + pats, opts)
489 489 for abs in repo.walk(m, ctx.node()):
490 490 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
491 491 data = ctx.filectx(abs).data()
492 492 if opts.get('decode'):
493 493 data = repo.wwritedata(abs, data)
494 494 fp.write(data)
495 495 err = 0
496 496 return err
497 497
498 498 def clone(ui, source, dest=None, **opts):
499 499 """make a copy of an existing repository
500 500
501 501 Create a copy of an existing repository in a new directory.
502 502
503 503 If no destination directory name is specified, it defaults to the
504 504 basename of the source.
505 505
506 506 The location of the source is added to the new repository's
507 507 .hg/hgrc file, as the default to be used for future pulls.
508 508
509 509 For efficiency, hardlinks are used for cloning whenever the source
510 510 and destination are on the same filesystem (note this applies only
511 511 to the repository data, not to the checked out files). Some
512 512 filesystems, such as AFS, implement hardlinking incorrectly, but
513 513 do not report errors. In these cases, use the --pull option to
514 514 avoid hardlinking.
515 515
516 516 In some cases, you can clone repositories and checked out files
517 517 using full hardlinks with
518 518
519 519 $ cp -al REPO REPOCLONE
520 520
521 521 This is the fastest way to clone, but it is not always safe. The
522 522 operation is not atomic (making sure REPO is not modified during
523 523 the operation is up to you) and you have to make sure your editor
524 524 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
525 525 this is not compatible with certain extensions that place their
526 526 metadata under the .hg directory, such as mq.
527 527
528 528 If you use the -r option to clone up to a specific revision, no
529 529 subsequent revisions will be present in the cloned repository.
530 530 This option implies --pull, even on local repositories.
531 531
532 532 If the -U option is used, the new clone will contain only a repository
533 533 (.hg) and no working copy (the working copy parent is the null revision).
534 534
535 535 See pull for valid source format details.
536 536
537 537 It is possible to specify an ssh:// URL as the destination, but no
538 538 .hg/hgrc and working directory will be created on the remote side.
539 539 Look at the help text for the pull command for important details
540 540 about ssh:// URLs.
541 541 """
542 542 cmdutil.setremoteconfig(ui, opts)
543 543 hg.clone(ui, source, dest,
544 544 pull=opts['pull'],
545 545 stream=opts['uncompressed'],
546 546 rev=opts['rev'],
547 547 update=not opts['noupdate'])
548 548
549 549 def commit(ui, repo, *pats, **opts):
550 550 """commit the specified files or all outstanding changes
551 551
552 552 Commit changes to the given files into the repository.
553 553
554 554 If a list of files is omitted, all changes reported by "hg status"
555 555 will be committed.
556 556
557 557 If you are committing the result of a merge, do not provide any
558 558 file names or -I/-X filters.
559 559
560 560 If no commit message is specified, the configured editor is started to
561 561 enter a message.
562 562
563 563 See 'hg help dates' for a list of formats valid for -d/--date.
564 564 """
565 565 def commitfunc(ui, repo, message, match, opts):
566 566 return repo.commit(match.files(), message, opts['user'], opts['date'],
567 567 match, force_editor=opts.get('force_editor'))
568 568
569 569 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
570 570 if not node:
571 571 return
572 572 cl = repo.changelog
573 573 rev = cl.rev(node)
574 574 parents = cl.parentrevs(rev)
575 575 if rev - 1 in parents:
576 576 # one of the parents was the old tip
577 577 return
578 578 if (parents == (nullrev, nullrev) or
579 579 len(cl.heads(cl.node(parents[0]))) > 1 and
580 580 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
581 581 ui.status(_('created new head\n'))
582 582
583 583 def copy(ui, repo, *pats, **opts):
584 584 """mark files as copied for the next commit
585 585
586 586 Mark dest as having copies of source files. If dest is a
587 587 directory, copies are put in that directory. If dest is a file,
588 588 there can only be one source.
589 589
590 590 By default, this command copies the contents of files as they
591 591 stand in the working directory. If invoked with --after, the
592 592 operation is recorded, but no copying is performed.
593 593
594 594 This command takes effect in the next commit. To undo a copy
595 595 before that, see hg revert.
596 596 """
597 597 wlock = repo.wlock(False)
598 598 try:
599 599 return cmdutil.copy(ui, repo, pats, opts)
600 600 finally:
601 601 del wlock
602 602
603 603 def debugancestor(ui, repo, *args):
604 604 """find the ancestor revision of two revisions in a given index"""
605 605 if len(args) == 3:
606 606 index, rev1, rev2 = args
607 607 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
608 608 lookup = r.lookup
609 609 elif len(args) == 2:
610 610 if not repo:
611 611 raise util.Abort(_("There is no Mercurial repository here "
612 612 "(.hg not found)"))
613 613 rev1, rev2 = args
614 614 r = repo.changelog
615 615 lookup = repo.lookup
616 616 else:
617 617 raise util.Abort(_('either two or three arguments required'))
618 618 a = r.ancestor(lookup(rev1), lookup(rev2))
619 619 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
620 620
621 621 def debugcomplete(ui, cmd='', **opts):
622 622 """returns the completion list associated with the given command"""
623 623
624 624 if opts['options']:
625 625 options = []
626 626 otables = [globalopts]
627 627 if cmd:
628 628 aliases, entry = cmdutil.findcmd(ui, cmd, table)
629 629 otables.append(entry[1])
630 630 for t in otables:
631 631 for o in t:
632 632 if o[0]:
633 633 options.append('-%s' % o[0])
634 634 options.append('--%s' % o[1])
635 635 ui.write("%s\n" % "\n".join(options))
636 636 return
637 637
638 638 clist = cmdutil.findpossible(ui, cmd, table).keys()
639 639 clist.sort()
640 640 ui.write("%s\n" % "\n".join(clist))
641 641
642 642 def debugfsinfo(ui, path = "."):
643 643 file('.debugfsinfo', 'w').write('')
644 644 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
645 645 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
646 646 ui.write('case-sensitive: %s\n' % (util.checkfolding('.debugfsinfo')
647 647 and 'yes' or 'no'))
648 648 os.unlink('.debugfsinfo')
649 649
650 650 def debugrebuildstate(ui, repo, rev=""):
651 651 """rebuild the dirstate as it would look like for the given revision"""
652 652 if rev == "":
653 653 rev = repo.changelog.tip()
654 654 ctx = repo.changectx(rev)
655 655 files = ctx.manifest()
656 656 wlock = repo.wlock()
657 657 try:
658 658 repo.dirstate.rebuild(rev, files)
659 659 finally:
660 660 del wlock
661 661
662 662 def debugcheckstate(ui, repo):
663 663 """validate the correctness of the current dirstate"""
664 664 parent1, parent2 = repo.dirstate.parents()
665 665 m1 = repo.changectx(parent1).manifest()
666 666 m2 = repo.changectx(parent2).manifest()
667 667 errors = 0
668 668 for f in repo.dirstate:
669 669 state = repo.dirstate[f]
670 670 if state in "nr" and f not in m1:
671 671 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
672 672 errors += 1
673 673 if state in "a" and f in m1:
674 674 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
675 675 errors += 1
676 676 if state in "m" and f not in m1 and f not in m2:
677 677 ui.warn(_("%s in state %s, but not in either manifest\n") %
678 678 (f, state))
679 679 errors += 1
680 680 for f in m1:
681 681 state = repo.dirstate[f]
682 682 if state not in "nrm":
683 683 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
684 684 errors += 1
685 685 if errors:
686 686 error = _(".hg/dirstate inconsistent with current parent's manifest")
687 687 raise util.Abort(error)
688 688
689 689 def showconfig(ui, repo, *values, **opts):
690 690 """show combined config settings from all hgrc files
691 691
692 692 With no args, print names and values of all config items.
693 693
694 694 With one arg of the form section.name, print just the value of
695 695 that config item.
696 696
697 697 With multiple args, print names and values of all config items
698 698 with matching section names."""
699 699
700 700 untrusted = bool(opts.get('untrusted'))
701 701 if values:
702 702 if len([v for v in values if '.' in v]) > 1:
703 703 raise util.Abort(_('only one config item permitted'))
704 704 for section, name, value in ui.walkconfig(untrusted=untrusted):
705 705 sectname = section + '.' + name
706 706 if values:
707 707 for v in values:
708 708 if v == section:
709 709 ui.write('%s=%s\n' % (sectname, value))
710 710 elif v == sectname:
711 711 ui.write(value, '\n')
712 712 else:
713 713 ui.write('%s=%s\n' % (sectname, value))
714 714
715 715 def debugsetparents(ui, repo, rev1, rev2=None):
716 716 """manually set the parents of the current working directory
717 717
718 718 This is useful for writing repository conversion tools, but should
719 719 be used with care.
720 720 """
721 721
722 722 if not rev2:
723 723 rev2 = hex(nullid)
724 724
725 725 wlock = repo.wlock()
726 726 try:
727 727 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
728 728 finally:
729 729 del wlock
730 730
731 731 def debugstate(ui, repo, nodates=None):
732 732 """show the contents of the current dirstate"""
733 733 k = repo.dirstate._map.items()
734 734 k.sort()
735 735 timestr = ""
736 736 showdate = not nodates
737 737 for file_, ent in k:
738 738 if showdate:
739 739 if ent[3] == -1:
740 740 # Pad or slice to locale representation
741 741 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
742 742 timestr = 'unset'
743 743 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
744 744 else:
745 745 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
746 746 if ent[1] & 020000:
747 747 mode = 'lnk'
748 748 else:
749 749 mode = '%3o' % (ent[1] & 0777)
750 750 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
751 751 for f in repo.dirstate.copies():
752 752 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
753 753
754 754 def debugdata(ui, file_, rev):
755 755 """dump the contents of a data file revision"""
756 756 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
757 757 try:
758 758 ui.write(r.revision(r.lookup(rev)))
759 759 except KeyError:
760 760 raise util.Abort(_('invalid revision identifier %s') % rev)
761 761
762 762 def debugdate(ui, date, range=None, **opts):
763 763 """parse and display a date"""
764 764 if opts["extended"]:
765 765 d = util.parsedate(date, util.extendeddateformats)
766 766 else:
767 767 d = util.parsedate(date)
768 768 ui.write("internal: %s %s\n" % d)
769 769 ui.write("standard: %s\n" % util.datestr(d))
770 770 if range:
771 771 m = util.matchdate(range)
772 772 ui.write("match: %s\n" % m(d[0]))
773 773
774 774 def debugindex(ui, file_):
775 775 """dump the contents of an index file"""
776 776 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
777 777 ui.write(" rev offset length base linkrev" +
778 778 " nodeid p1 p2\n")
779 779 for i in xrange(r.count()):
780 780 node = r.node(i)
781 781 try:
782 782 pp = r.parents(node)
783 783 except:
784 784 pp = [nullid, nullid]
785 785 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
786 786 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
787 787 short(node), short(pp[0]), short(pp[1])))
788 788
789 789 def debugindexdot(ui, file_):
790 790 """dump an index DAG as a .dot file"""
791 791 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
792 792 ui.write("digraph G {\n")
793 793 for i in xrange(r.count()):
794 794 node = r.node(i)
795 795 pp = r.parents(node)
796 796 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
797 797 if pp[1] != nullid:
798 798 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
799 799 ui.write("}\n")
800 800
801 801 def debuginstall(ui):
802 802 '''test Mercurial installation'''
803 803
804 804 def writetemp(contents):
805 805 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
806 806 f = os.fdopen(fd, "wb")
807 807 f.write(contents)
808 808 f.close()
809 809 return name
810 810
811 811 problems = 0
812 812
813 813 # encoding
814 814 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
815 815 try:
816 816 util.fromlocal("test")
817 817 except util.Abort, inst:
818 818 ui.write(" %s\n" % inst)
819 819 ui.write(_(" (check that your locale is properly set)\n"))
820 820 problems += 1
821 821
822 822 # compiled modules
823 823 ui.status(_("Checking extensions...\n"))
824 824 try:
825 825 import bdiff, mpatch, base85
826 826 except Exception, inst:
827 827 ui.write(" %s\n" % inst)
828 828 ui.write(_(" One or more extensions could not be found"))
829 829 ui.write(_(" (check that you compiled the extensions)\n"))
830 830 problems += 1
831 831
832 832 # templates
833 833 ui.status(_("Checking templates...\n"))
834 834 try:
835 835 import templater
836 836 t = templater.templater(templater.templatepath("map-cmdline.default"))
837 837 except Exception, inst:
838 838 ui.write(" %s\n" % inst)
839 839 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
840 840 problems += 1
841 841
842 842 # patch
843 843 ui.status(_("Checking patch...\n"))
844 844 patchproblems = 0
845 845 a = "1\n2\n3\n4\n"
846 846 b = "1\n2\n3\ninsert\n4\n"
847 847 fa = writetemp(a)
848 848 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
849 849 os.path.basename(fa))
850 850 fd = writetemp(d)
851 851
852 852 files = {}
853 853 try:
854 854 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
855 855 except util.Abort, e:
856 856 ui.write(_(" patch call failed:\n"))
857 857 ui.write(" " + str(e) + "\n")
858 858 patchproblems += 1
859 859 else:
860 860 if list(files) != [os.path.basename(fa)]:
861 861 ui.write(_(" unexpected patch output!\n"))
862 862 patchproblems += 1
863 863 a = file(fa).read()
864 864 if a != b:
865 865 ui.write(_(" patch test failed!\n"))
866 866 patchproblems += 1
867 867
868 868 if patchproblems:
869 869 if ui.config('ui', 'patch'):
870 870 ui.write(_(" (Current patch tool may be incompatible with patch,"
871 871 " or misconfigured. Please check your .hgrc file)\n"))
872 872 else:
873 873 ui.write(_(" Internal patcher failure, please report this error"
874 874 " to http://www.selenic.com/mercurial/bts\n"))
875 875 problems += patchproblems
876 876
877 877 os.unlink(fa)
878 878 os.unlink(fd)
879 879
880 880 # editor
881 881 ui.status(_("Checking commit editor...\n"))
882 882 editor = ui.geteditor()
883 883 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
884 884 if not cmdpath:
885 885 if editor == 'vi':
886 886 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
887 887 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
888 888 else:
889 889 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
890 890 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
891 891 problems += 1
892 892
893 893 # check username
894 894 ui.status(_("Checking username...\n"))
895 895 user = os.environ.get("HGUSER")
896 896 if user is None:
897 897 user = ui.config("ui", "username")
898 898 if user is None:
899 899 user = os.environ.get("EMAIL")
900 900 if not user:
901 901 ui.warn(" ")
902 902 ui.username()
903 903 ui.write(_(" (specify a username in your .hgrc file)\n"))
904 904
905 905 if not problems:
906 906 ui.status(_("No problems detected\n"))
907 907 else:
908 908 ui.write(_("%s problems detected,"
909 909 " please check your install!\n") % problems)
910 910
911 911 return problems
912 912
913 913 def debugrename(ui, repo, file1, *pats, **opts):
914 914 """dump rename information"""
915 915
916 916 ctx = repo.changectx(opts.get('rev', 'tip'))
917 917 m = cmdutil.match(repo, (file1,) + pats, opts)
918 918 for abs in repo.walk(m, ctx.node()):
919 919 fctx = ctx.filectx(abs)
920 920 o = fctx.filelog().renamed(fctx.filenode())
921 921 rel = m.rel(abs)
922 922 if o:
923 923 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
924 924 else:
925 925 ui.write(_("%s not renamed\n") % rel)
926 926
927 927 def debugwalk(ui, repo, *pats, **opts):
928 928 """show how files match on given patterns"""
929 929 m = cmdutil.match(repo, pats, opts)
930 930 items = list(repo.walk(m))
931 931 if not items:
932 932 return
933 933 fmt = 'f %%-%ds %%-%ds %%s' % (
934 934 max([len(abs) for abs in items]),
935 935 max([len(m.rel(abs)) for abs in items]))
936 936 for abs in items:
937 937 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
938 938 ui.write("%s\n" % line.rstrip())
939 939
940 940 def diff(ui, repo, *pats, **opts):
941 941 """diff repository (or selected files)
942 942
943 943 Show differences between revisions for the specified files.
944 944
945 945 Differences between files are shown using the unified diff format.
946 946
947 947 NOTE: diff may generate unexpected results for merges, as it will
948 948 default to comparing against the working directory's first parent
949 949 changeset if no revisions are specified.
950 950
951 951 When two revision arguments are given, then changes are shown
952 952 between those revisions. If only one revision is specified then
953 953 that revision is compared to the working directory, and, when no
954 954 revisions are specified, the working directory files are compared
955 955 to its parent.
956 956
957 957 Without the -a option, diff will avoid generating diffs of files
958 958 it detects as binary. With -a, diff will generate a diff anyway,
959 959 probably with undesirable results.
960 960 """
961 961 node1, node2 = cmdutil.revpair(repo, opts['rev'])
962 962
963 963 m = cmdutil.match(repo, pats, opts)
964 964 patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
965 965
966 966 def export(ui, repo, *changesets, **opts):
967 967 """dump the header and diffs for one or more changesets
968 968
969 969 Print the changeset header and diffs for one or more revisions.
970 970
971 971 The information shown in the changeset header is: author,
972 972 changeset hash, parent(s) and commit comment.
973 973
974 974 NOTE: export may generate unexpected diff output for merge changesets,
975 975 as it will compare the merge changeset against its first parent only.
976 976
977 977 Output may be to a file, in which case the name of the file is
978 978 given using a format string. The formatting rules are as follows:
979 979
980 980 %% literal "%" character
981 981 %H changeset hash (40 bytes of hexadecimal)
982 982 %N number of patches being generated
983 983 %R changeset revision number
984 984 %b basename of the exporting repository
985 985 %h short-form changeset hash (12 bytes of hexadecimal)
986 986 %n zero-padded sequence number, starting at 1
987 987 %r zero-padded changeset revision number
988 988
989 989 Without the -a option, export will avoid generating diffs of files
990 990 it detects as binary. With -a, export will generate a diff anyway,
991 991 probably with undesirable results.
992 992
993 993 With the --switch-parent option, the diff will be against the second
994 994 parent. It can be useful to review a merge.
995 995 """
996 996 if not changesets:
997 997 raise util.Abort(_("export requires at least one changeset"))
998 998 revs = cmdutil.revrange(repo, changesets)
999 999 if len(revs) > 1:
1000 1000 ui.note(_('exporting patches:\n'))
1001 1001 else:
1002 1002 ui.note(_('exporting patch:\n'))
1003 1003 patch.export(repo, revs, template=opts['output'],
1004 1004 switch_parent=opts['switch_parent'],
1005 1005 opts=patch.diffopts(ui, opts))
1006 1006
1007 1007 def grep(ui, repo, pattern, *pats, **opts):
1008 1008 """search for a pattern in specified files and revisions
1009 1009
1010 1010 Search revisions of files for a regular expression.
1011 1011
1012 1012 This command behaves differently than Unix grep. It only accepts
1013 1013 Python/Perl regexps. It searches repository history, not the
1014 1014 working directory. It always prints the revision number in which
1015 1015 a match appears.
1016 1016
1017 1017 By default, grep only prints output for the first revision of a
1018 1018 file in which it finds a match. To get it to print every revision
1019 1019 that contains a change in match status ("-" for a match that
1020 1020 becomes a non-match, or "+" for a non-match that becomes a match),
1021 1021 use the --all flag.
1022 1022 """
1023 1023 reflags = 0
1024 1024 if opts['ignore_case']:
1025 1025 reflags |= re.I
1026 1026 try:
1027 1027 regexp = re.compile(pattern, reflags)
1028 1028 except Exception, inst:
1029 1029 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1030 1030 return None
1031 1031 sep, eol = ':', '\n'
1032 1032 if opts['print0']:
1033 1033 sep = eol = '\0'
1034 1034
1035 1035 fcache = {}
1036 1036 def getfile(fn):
1037 1037 if fn not in fcache:
1038 1038 fcache[fn] = repo.file(fn)
1039 1039 return fcache[fn]
1040 1040
1041 1041 def matchlines(body):
1042 1042 begin = 0
1043 1043 linenum = 0
1044 1044 while True:
1045 1045 match = regexp.search(body, begin)
1046 1046 if not match:
1047 1047 break
1048 1048 mstart, mend = match.span()
1049 1049 linenum += body.count('\n', begin, mstart) + 1
1050 1050 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1051 1051 lend = body.find('\n', mend)
1052 1052 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1053 1053 begin = lend + 1
1054 1054
1055 1055 class linestate(object):
1056 1056 def __init__(self, line, linenum, colstart, colend):
1057 1057 self.line = line
1058 1058 self.linenum = linenum
1059 1059 self.colstart = colstart
1060 1060 self.colend = colend
1061 1061
1062 1062 def __hash__(self):
1063 1063 return hash((self.linenum, self.line))
1064 1064
1065 1065 def __eq__(self, other):
1066 1066 return self.line == other.line
1067 1067
1068 1068 matches = {}
1069 1069 copies = {}
1070 1070 def grepbody(fn, rev, body):
1071 1071 matches[rev].setdefault(fn, [])
1072 1072 m = matches[rev][fn]
1073 1073 for lnum, cstart, cend, line in matchlines(body):
1074 1074 s = linestate(line, lnum, cstart, cend)
1075 1075 m.append(s)
1076 1076
1077 1077 def difflinestates(a, b):
1078 1078 sm = difflib.SequenceMatcher(None, a, b)
1079 1079 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1080 1080 if tag == 'insert':
1081 1081 for i in xrange(blo, bhi):
1082 1082 yield ('+', b[i])
1083 1083 elif tag == 'delete':
1084 1084 for i in xrange(alo, ahi):
1085 1085 yield ('-', a[i])
1086 1086 elif tag == 'replace':
1087 1087 for i in xrange(alo, ahi):
1088 1088 yield ('-', a[i])
1089 1089 for i in xrange(blo, bhi):
1090 1090 yield ('+', b[i])
1091 1091
1092 1092 prev = {}
1093 1093 def display(fn, rev, states, prevstates):
1094 1094 datefunc = ui.quiet and util.shortdate or util.datestr
1095 1095 found = False
1096 1096 filerevmatches = {}
1097 1097 r = prev.get(fn, -1)
1098 1098 if opts['all']:
1099 1099 iter = difflinestates(states, prevstates)
1100 1100 else:
1101 1101 iter = [('', l) for l in prevstates]
1102 1102 for change, l in iter:
1103 1103 cols = [fn, str(r)]
1104 1104 if opts['line_number']:
1105 1105 cols.append(str(l.linenum))
1106 1106 if opts['all']:
1107 1107 cols.append(change)
1108 1108 if opts['user']:
1109 1109 cols.append(ui.shortuser(get(r)[1]))
1110 1110 if opts.get('date'):
1111 1111 cols.append(datefunc(get(r)[2]))
1112 1112 if opts['files_with_matches']:
1113 1113 c = (fn, r)
1114 1114 if c in filerevmatches:
1115 1115 continue
1116 1116 filerevmatches[c] = 1
1117 1117 else:
1118 1118 cols.append(l.line)
1119 1119 ui.write(sep.join(cols), eol)
1120 1120 found = True
1121 1121 return found
1122 1122
1123 1123 fstate = {}
1124 1124 skip = {}
1125 1125 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1126 1126 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1127 1127 found = False
1128 1128 follow = opts.get('follow')
1129 1129 for st, rev, fns in changeiter:
1130 1130 if st == 'window':
1131 1131 matches.clear()
1132 1132 elif st == 'add':
1133 1133 ctx = repo.changectx(rev)
1134 1134 matches[rev] = {}
1135 1135 for fn in fns:
1136 1136 if fn in skip:
1137 1137 continue
1138 1138 try:
1139 1139 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1140 1140 fstate.setdefault(fn, [])
1141 1141 if follow:
1142 1142 copied = getfile(fn).renamed(ctx.filenode(fn))
1143 1143 if copied:
1144 1144 copies.setdefault(rev, {})[fn] = copied[0]
1145 1145 except revlog.LookupError:
1146 1146 pass
1147 1147 elif st == 'iter':
1148 1148 states = matches[rev].items()
1149 1149 states.sort()
1150 1150 for fn, m in states:
1151 1151 copy = copies.get(rev, {}).get(fn)
1152 1152 if fn in skip:
1153 1153 if copy:
1154 1154 skip[copy] = True
1155 1155 continue
1156 1156 if fn in prev or fstate[fn]:
1157 1157 r = display(fn, rev, m, fstate[fn])
1158 1158 found = found or r
1159 1159 if r and not opts['all']:
1160 1160 skip[fn] = True
1161 1161 if copy:
1162 1162 skip[copy] = True
1163 1163 fstate[fn] = m
1164 1164 if copy:
1165 1165 fstate[copy] = m
1166 1166 prev[fn] = rev
1167 1167
1168 1168 fstate = fstate.items()
1169 1169 fstate.sort()
1170 1170 for fn, state in fstate:
1171 1171 if fn in skip:
1172 1172 continue
1173 1173 if fn not in copies.get(prev[fn], {}):
1174 1174 found = display(fn, rev, {}, state) or found
1175 1175 return (not found and 1) or 0
1176 1176
1177 1177 def heads(ui, repo, *branchrevs, **opts):
1178 1178 """show current repository heads or show branch heads
1179 1179
1180 1180 With no arguments, show all repository head changesets.
1181 1181
1182 1182 If branch or revisions names are given this will show the heads of
1183 1183 the specified branches or the branches those revisions are tagged
1184 1184 with.
1185 1185
1186 1186 Repository "heads" are changesets that don't have child
1187 1187 changesets. They are where development generally takes place and
1188 1188 are the usual targets for update and merge operations.
1189 1189
1190 1190 Branch heads are changesets that have a given branch tag, but have
1191 1191 no child changesets with that tag. They are usually where
1192 1192 development on the given branch takes place.
1193 1193 """
1194 1194 if opts['rev']:
1195 1195 start = repo.lookup(opts['rev'])
1196 1196 else:
1197 1197 start = None
1198 1198 if not branchrevs:
1199 1199 # Assume we're looking repo-wide heads if no revs were specified.
1200 1200 heads = repo.heads(start)
1201 1201 else:
1202 1202 heads = []
1203 1203 visitedset = util.set()
1204 1204 for branchrev in branchrevs:
1205 1205 branch = repo.changectx(branchrev).branch()
1206 1206 if branch in visitedset:
1207 1207 continue
1208 1208 visitedset.add(branch)
1209 1209 bheads = repo.branchheads(branch, start)
1210 1210 if not bheads:
1211 1211 if branch != branchrev:
1212 1212 ui.warn(_("no changes on branch %s containing %s are "
1213 1213 "reachable from %s\n")
1214 1214 % (branch, branchrev, opts['rev']))
1215 1215 else:
1216 1216 ui.warn(_("no changes on branch %s are reachable from %s\n")
1217 1217 % (branch, opts['rev']))
1218 1218 heads.extend(bheads)
1219 1219 if not heads:
1220 1220 return 1
1221 1221 displayer = cmdutil.show_changeset(ui, repo, opts)
1222 1222 for n in heads:
1223 1223 displayer.show(changenode=n)
1224 1224
1225 1225 def help_(ui, name=None, with_version=False):
1226 1226 """show help for a command, extension, or list of commands
1227 1227
1228 1228 With no arguments, print a list of commands and short help.
1229 1229
1230 1230 Given a command name, print help for that command.
1231 1231
1232 1232 Given an extension name, print help for that extension, and the
1233 1233 commands it provides."""
1234 1234 option_lists = []
1235 1235
1236 1236 def addglobalopts(aliases):
1237 1237 if ui.verbose:
1238 1238 option_lists.append((_("global options:"), globalopts))
1239 1239 if name == 'shortlist':
1240 1240 option_lists.append((_('use "hg help" for the full list '
1241 1241 'of commands'), ()))
1242 1242 else:
1243 1243 if name == 'shortlist':
1244 1244 msg = _('use "hg help" for the full list of commands '
1245 1245 'or "hg -v" for details')
1246 1246 elif aliases:
1247 1247 msg = _('use "hg -v help%s" to show aliases and '
1248 1248 'global options') % (name and " " + name or "")
1249 1249 else:
1250 1250 msg = _('use "hg -v help %s" to show global options') % name
1251 1251 option_lists.append((msg, ()))
1252 1252
1253 1253 def helpcmd(name):
1254 1254 if with_version:
1255 1255 version_(ui)
1256 1256 ui.write('\n')
1257 1257
1258 1258 try:
1259 1259 aliases, i = cmdutil.findcmd(ui, name, table)
1260 1260 except cmdutil.AmbiguousCommand, inst:
1261 1261 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1262 1262 helplist(_('list of commands:\n\n'), select)
1263 1263 return
1264 1264
1265 1265 # synopsis
1266 1266 ui.write("%s\n" % i[2])
1267 1267
1268 1268 # aliases
1269 1269 if not ui.quiet and len(aliases) > 1:
1270 1270 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1271 1271
1272 1272 # description
1273 1273 doc = i[0].__doc__
1274 1274 if not doc:
1275 1275 doc = _("(No help text available)")
1276 1276 if ui.quiet:
1277 1277 doc = doc.splitlines(0)[0]
1278 1278 ui.write("\n%s\n" % doc.rstrip())
1279 1279
1280 1280 if not ui.quiet:
1281 1281 # options
1282 1282 if i[1]:
1283 1283 option_lists.append((_("options:\n"), i[1]))
1284 1284
1285 1285 addglobalopts(False)
1286 1286
1287 1287 def helplist(header, select=None):
1288 1288 h = {}
1289 1289 cmds = {}
1290 1290 for c, e in table.items():
1291 1291 f = c.split("|", 1)[0]
1292 1292 if select and not select(f):
1293 1293 continue
1294 1294 if name == "shortlist" and not f.startswith("^"):
1295 1295 continue
1296 1296 f = f.lstrip("^")
1297 1297 if not ui.debugflag and f.startswith("debug"):
1298 1298 continue
1299 1299 doc = e[0].__doc__
1300 1300 if not doc:
1301 1301 doc = _("(No help text available)")
1302 1302 h[f] = doc.splitlines(0)[0].rstrip()
1303 1303 cmds[f] = c.lstrip("^")
1304 1304
1305 1305 if not h:
1306 1306 ui.status(_('no commands defined\n'))
1307 1307 return
1308 1308
1309 1309 ui.status(header)
1310 1310 fns = h.keys()
1311 1311 fns.sort()
1312 1312 m = max(map(len, fns))
1313 1313 for f in fns:
1314 1314 if ui.verbose:
1315 1315 commands = cmds[f].replace("|",", ")
1316 1316 ui.write(" %s:\n %s\n"%(commands, h[f]))
1317 1317 else:
1318 1318 ui.write(' %-*s %s\n' % (m, f, h[f]))
1319 1319
1320 1320 if not ui.quiet:
1321 1321 addglobalopts(True)
1322 1322
1323 1323 def helptopic(name):
1324 1324 v = None
1325 1325 for i, d in help.helptable:
1326 1326 l = i.split('|')
1327 1327 if name in l:
1328 1328 v = i
1329 1329 header = l[-1]
1330 1330 doc = d
1331 1331 if not v:
1332 1332 raise cmdutil.UnknownCommand(name)
1333 1333
1334 1334 # description
1335 1335 if not doc:
1336 1336 doc = _("(No help text available)")
1337 1337 if callable(doc):
1338 1338 doc = doc()
1339 1339
1340 1340 ui.write("%s\n" % header)
1341 1341 ui.write("%s\n" % doc.rstrip())
1342 1342
1343 1343 def helpext(name):
1344 1344 try:
1345 1345 mod = extensions.find(name)
1346 1346 except KeyError:
1347 1347 raise cmdutil.UnknownCommand(name)
1348 1348
1349 1349 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1350 1350 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1351 1351 for d in doc[1:]:
1352 1352 ui.write(d, '\n')
1353 1353
1354 1354 ui.status('\n')
1355 1355
1356 1356 try:
1357 1357 ct = mod.cmdtable
1358 1358 except AttributeError:
1359 1359 ct = {}
1360 1360
1361 1361 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1362 1362 helplist(_('list of commands:\n\n'), modcmds.has_key)
1363 1363
1364 1364 if name and name != 'shortlist':
1365 1365 i = None
1366 1366 for f in (helpcmd, helptopic, helpext):
1367 1367 try:
1368 1368 f(name)
1369 1369 i = None
1370 1370 break
1371 1371 except cmdutil.UnknownCommand, inst:
1372 1372 i = inst
1373 1373 if i:
1374 1374 raise i
1375 1375
1376 1376 else:
1377 1377 # program name
1378 1378 if ui.verbose or with_version:
1379 1379 version_(ui)
1380 1380 else:
1381 1381 ui.status(_("Mercurial Distributed SCM\n"))
1382 1382 ui.status('\n')
1383 1383
1384 1384 # list of commands
1385 1385 if name == "shortlist":
1386 1386 header = _('basic commands:\n\n')
1387 1387 else:
1388 1388 header = _('list of commands:\n\n')
1389 1389
1390 1390 helplist(header)
1391 1391
1392 1392 # list all option lists
1393 1393 opt_output = []
1394 1394 for title, options in option_lists:
1395 1395 opt_output.append(("\n%s" % title, None))
1396 1396 for shortopt, longopt, default, desc in options:
1397 1397 if "DEPRECATED" in desc and not ui.verbose: continue
1398 1398 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1399 1399 longopt and " --%s" % longopt),
1400 1400 "%s%s" % (desc,
1401 1401 default
1402 1402 and _(" (default: %s)") % default
1403 1403 or "")))
1404 1404
1405 1405 if ui.verbose:
1406 1406 ui.write(_("\nspecial help topics:\n"))
1407 1407 topics = []
1408 1408 for i, d in help.helptable:
1409 1409 l = i.split('|')
1410 1410 topics.append((", ".join(l[:-1]), l[-1]))
1411 1411 topics_len = max([len(s[0]) for s in topics])
1412 1412 for t, desc in topics:
1413 1413 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1414 1414
1415 1415 if opt_output:
1416 1416 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1417 1417 for first, second in opt_output:
1418 1418 if second:
1419 1419 ui.write(" %-*s %s\n" % (opts_len, first, second))
1420 1420 else:
1421 1421 ui.write("%s\n" % first)
1422 1422
1423 1423 def identify(ui, repo, source=None,
1424 1424 rev=None, num=None, id=None, branch=None, tags=None):
1425 1425 """identify the working copy or specified revision
1426 1426
1427 1427 With no revision, print a summary of the current state of the repo.
1428 1428
1429 1429 With a path, do a lookup in another repository.
1430 1430
1431 1431 This summary identifies the repository state using one or two parent
1432 1432 hash identifiers, followed by a "+" if there are uncommitted changes
1433 1433 in the working directory, a list of tags for this revision and a branch
1434 1434 name for non-default branches.
1435 1435 """
1436 1436
1437 1437 if not repo and not source:
1438 1438 raise util.Abort(_("There is no Mercurial repository here "
1439 1439 "(.hg not found)"))
1440 1440
1441 1441 hexfunc = ui.debugflag and hex or short
1442 1442 default = not (num or id or branch or tags)
1443 1443 output = []
1444 1444
1445 1445 if source:
1446 1446 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1447 1447 srepo = hg.repository(ui, source)
1448 1448 if not rev and revs:
1449 1449 rev = revs[0]
1450 1450 if not rev:
1451 1451 rev = "tip"
1452 1452 if num or branch or tags:
1453 1453 raise util.Abort(
1454 1454 "can't query remote revision number, branch, or tags")
1455 1455 output = [hexfunc(srepo.lookup(rev))]
1456 1456 elif not rev:
1457 1457 ctx = repo.changectx(None)
1458 1458 parents = ctx.parents()
1459 1459 changed = False
1460 1460 if default or id or num:
1461 1461 changed = ctx.files() + ctx.deleted()
1462 1462 if default or id:
1463 1463 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1464 1464 (changed) and "+" or "")]
1465 1465 if num:
1466 1466 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1467 1467 (changed) and "+" or ""))
1468 1468 else:
1469 1469 ctx = repo.changectx(rev)
1470 1470 if default or id:
1471 1471 output = [hexfunc(ctx.node())]
1472 1472 if num:
1473 1473 output.append(str(ctx.rev()))
1474 1474
1475 1475 if not source and default and not ui.quiet:
1476 1476 b = util.tolocal(ctx.branch())
1477 1477 if b != 'default':
1478 1478 output.append("(%s)" % b)
1479 1479
1480 1480 # multiple tags for a single parent separated by '/'
1481 1481 t = "/".join(ctx.tags())
1482 1482 if t:
1483 1483 output.append(t)
1484 1484
1485 1485 if branch:
1486 1486 output.append(util.tolocal(ctx.branch()))
1487 1487
1488 1488 if tags:
1489 1489 output.extend(ctx.tags())
1490 1490
1491 1491 ui.write("%s\n" % ' '.join(output))
1492 1492
1493 1493 def import_(ui, repo, patch1, *patches, **opts):
1494 1494 """import an ordered set of patches
1495 1495
1496 1496 Import a list of patches and commit them individually.
1497 1497
1498 1498 If there are outstanding changes in the working directory, import
1499 1499 will abort unless given the -f flag.
1500 1500
1501 1501 You can import a patch straight from a mail message. Even patches
1502 1502 as attachments work (body part must be type text/plain or
1503 1503 text/x-patch to be used). From and Subject headers of email
1504 1504 message are used as default committer and commit message. All
1505 1505 text/plain body parts before first diff are added to commit
1506 1506 message.
1507 1507
1508 1508 If the imported patch was generated by hg export, user and description
1509 1509 from patch override values from message headers and body. Values
1510 1510 given on command line with -m and -u override these.
1511 1511
1512 1512 If --exact is specified, import will set the working directory
1513 1513 to the parent of each patch before applying it, and will abort
1514 1514 if the resulting changeset has a different ID than the one
1515 1515 recorded in the patch. This may happen due to character set
1516 1516 problems or other deficiencies in the text patch format.
1517 1517
1518 1518 To read a patch from standard input, use patch name "-".
1519 1519 See 'hg help dates' for a list of formats valid for -d/--date.
1520 1520 """
1521 1521 patches = (patch1,) + patches
1522 1522
1523 1523 date = opts.get('date')
1524 1524 if date:
1525 1525 opts['date'] = util.parsedate(date)
1526 1526
1527 1527 if opts.get('exact') or not opts['force']:
1528 1528 cmdutil.bail_if_changed(repo)
1529 1529
1530 1530 d = opts["base"]
1531 1531 strip = opts["strip"]
1532 1532 wlock = lock = None
1533 1533 try:
1534 1534 wlock = repo.wlock()
1535 1535 lock = repo.lock()
1536 1536 for p in patches:
1537 1537 pf = os.path.join(d, p)
1538 1538
1539 1539 if pf == '-':
1540 1540 ui.status(_("applying patch from stdin\n"))
1541 1541 data = patch.extract(ui, sys.stdin)
1542 1542 else:
1543 1543 ui.status(_("applying %s\n") % p)
1544 1544 if os.path.exists(pf):
1545 1545 data = patch.extract(ui, file(pf, 'rb'))
1546 1546 else:
1547 1547 data = patch.extract(ui, urllib.urlopen(pf))
1548 1548 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1549 1549
1550 1550 if tmpname is None:
1551 1551 raise util.Abort(_('no diffs found'))
1552 1552
1553 1553 try:
1554 1554 cmdline_message = cmdutil.logmessage(opts)
1555 1555 if cmdline_message:
1556 1556 # pickup the cmdline msg
1557 1557 message = cmdline_message
1558 1558 elif message:
1559 1559 # pickup the patch msg
1560 1560 message = message.strip()
1561 1561 else:
1562 1562 # launch the editor
1563 1563 message = None
1564 1564 ui.debug(_('message:\n%s\n') % message)
1565 1565
1566 1566 wp = repo.changectx(None).parents()
1567 1567 if opts.get('exact'):
1568 1568 if not nodeid or not p1:
1569 1569 raise util.Abort(_('not a mercurial patch'))
1570 1570 p1 = repo.lookup(p1)
1571 1571 p2 = repo.lookup(p2 or hex(nullid))
1572 1572
1573 1573 if p1 != wp[0].node():
1574 1574 hg.clean(repo, p1)
1575 1575 repo.dirstate.setparents(p1, p2)
1576 1576 elif p2:
1577 1577 try:
1578 1578 p1 = repo.lookup(p1)
1579 1579 p2 = repo.lookup(p2)
1580 1580 if p1 == wp[0].node():
1581 1581 repo.dirstate.setparents(p1, p2)
1582 1582 except RepoError:
1583 1583 pass
1584 1584 if opts.get('exact') or opts.get('import_branch'):
1585 1585 repo.dirstate.setbranch(branch or 'default')
1586 1586
1587 1587 files = {}
1588 1588 try:
1589 1589 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1590 1590 files=files)
1591 1591 finally:
1592 1592 files = patch.updatedir(ui, repo, files)
1593 1593 if not opts.get('no_commit'):
1594 1594 n = repo.commit(files, message, opts.get('user') or user,
1595 1595 opts.get('date') or date)
1596 1596 if opts.get('exact'):
1597 1597 if hex(n) != nodeid:
1598 1598 repo.rollback()
1599 1599 raise util.Abort(_('patch is damaged'
1600 1600 ' or loses information'))
1601 1601 # Force a dirstate write so that the next transaction
1602 1602 # backups an up-do-date file.
1603 1603 repo.dirstate.write()
1604 1604 finally:
1605 1605 os.unlink(tmpname)
1606 1606 finally:
1607 1607 del lock, wlock
1608 1608
1609 1609 def incoming(ui, repo, source="default", **opts):
1610 1610 """show new changesets found in source
1611 1611
1612 1612 Show new changesets found in the specified path/URL or the default
1613 1613 pull location. These are the changesets that would be pulled if a pull
1614 1614 was requested.
1615 1615
1616 1616 For remote repository, using --bundle avoids downloading the changesets
1617 1617 twice if the incoming is followed by a pull.
1618 1618
1619 1619 See pull for valid source format details.
1620 1620 """
1621 1621 limit = cmdutil.loglimit(opts)
1622 1622 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
1623 1623 cmdutil.setremoteconfig(ui, opts)
1624 1624
1625 1625 other = hg.repository(ui, source)
1626 1626 ui.status(_('comparing with %s\n') % util.hidepassword(source))
1627 1627 if revs:
1628 1628 revs = [other.lookup(rev) for rev in revs]
1629 1629 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1630 1630 if not incoming:
1631 1631 try:
1632 1632 os.unlink(opts["bundle"])
1633 1633 except:
1634 1634 pass
1635 1635 ui.status(_("no changes found\n"))
1636 1636 return 1
1637 1637
1638 1638 cleanup = None
1639 1639 try:
1640 1640 fname = opts["bundle"]
1641 1641 if fname or not other.local():
1642 1642 # create a bundle (uncompressed if other repo is not local)
1643 1643 if revs is None:
1644 1644 cg = other.changegroup(incoming, "incoming")
1645 1645 else:
1646 1646 cg = other.changegroupsubset(incoming, revs, 'incoming')
1647 1647 bundletype = other.local() and "HG10BZ" or "HG10UN"
1648 1648 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1649 1649 # keep written bundle?
1650 1650 if opts["bundle"]:
1651 1651 cleanup = None
1652 1652 if not other.local():
1653 1653 # use the created uncompressed bundlerepo
1654 1654 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1655 1655
1656 1656 o = other.changelog.nodesbetween(incoming, revs)[0]
1657 1657 if opts['newest_first']:
1658 1658 o.reverse()
1659 1659 displayer = cmdutil.show_changeset(ui, other, opts)
1660 1660 count = 0
1661 1661 for n in o:
1662 1662 if count >= limit:
1663 1663 break
1664 1664 parents = [p for p in other.changelog.parents(n) if p != nullid]
1665 1665 if opts['no_merges'] and len(parents) == 2:
1666 1666 continue
1667 1667 count += 1
1668 1668 displayer.show(changenode=n)
1669 1669 finally:
1670 1670 if hasattr(other, 'close'):
1671 1671 other.close()
1672 1672 if cleanup:
1673 1673 os.unlink(cleanup)
1674 1674
1675 1675 def init(ui, dest=".", **opts):
1676 1676 """create a new repository in the given directory
1677 1677
1678 1678 Initialize a new repository in the given directory. If the given
1679 1679 directory does not exist, it is created.
1680 1680
1681 1681 If no directory is given, the current directory is used.
1682 1682
1683 1683 It is possible to specify an ssh:// URL as the destination.
1684 1684 Look at the help text for the pull command for important details
1685 1685 about ssh:// URLs.
1686 1686 """
1687 1687 cmdutil.setremoteconfig(ui, opts)
1688 1688 hg.repository(ui, dest, create=1)
1689 1689
1690 1690 def locate(ui, repo, *pats, **opts):
1691 1691 """locate files matching specific patterns
1692 1692
1693 1693 Print all files under Mercurial control whose names match the
1694 1694 given patterns.
1695 1695
1696 1696 This command searches the entire repository by default. To search
1697 1697 just the current directory and its subdirectories, use
1698 1698 "--include .".
1699 1699
1700 1700 If no patterns are given to match, this command prints all file
1701 1701 names.
1702 1702
1703 1703 If you want to feed the output of this command into the "xargs"
1704 1704 command, use the "-0" option to both this command and "xargs".
1705 1705 This will avoid the problem of "xargs" treating single filenames
1706 1706 that contain white space as multiple filenames.
1707 1707 """
1708 1708 end = opts['print0'] and '\0' or '\n'
1709 1709 rev = opts['rev']
1710 1710 if rev:
1711 1711 node = repo.lookup(rev)
1712 1712 else:
1713 1713 node = None
1714 1714
1715 1715 ret = 1
1716 1716 m = cmdutil.match(repo, pats, opts, default='relglob')
1717 1717 m.bad = lambda x,y: False
1718 1718 for abs in repo.walk(m, node):
1719 1719 if not node and abs not in repo.dirstate:
1720 1720 continue
1721 1721 if opts['fullpath']:
1722 1722 ui.write(os.path.join(repo.root, abs), end)
1723 1723 else:
1724 1724 ui.write(((pats and m.rel(abs)) or abs), end)
1725 1725 ret = 0
1726 1726
1727 1727 return ret
1728 1728
1729 1729 def log(ui, repo, *pats, **opts):
1730 1730 """show revision history of entire repository or files
1731 1731
1732 1732 Print the revision history of the specified files or the entire
1733 1733 project.
1734 1734
1735 1735 File history is shown without following rename or copy history of
1736 1736 files. Use -f/--follow with a file name to follow history across
1737 1737 renames and copies. --follow without a file name will only show
1738 1738 ancestors or descendants of the starting revision. --follow-first
1739 1739 only follows the first parent of merge revisions.
1740 1740
1741 1741 If no revision range is specified, the default is tip:0 unless
1742 1742 --follow is set, in which case the working directory parent is
1743 1743 used as the starting revision.
1744 1744
1745 1745 See 'hg help dates' for a list of formats valid for -d/--date.
1746 1746
1747 1747 By default this command outputs: changeset id and hash, tags,
1748 1748 non-trivial parents, user, date and time, and a summary for each
1749 1749 commit. When the -v/--verbose switch is used, the list of changed
1750 1750 files and full commit message is shown.
1751 1751
1752 1752 NOTE: log -p may generate unexpected diff output for merge
1753 1753 changesets, as it will compare the merge changeset against its
1754 1754 first parent only. Also, the files: list will only reflect files
1755 1755 that are different from BOTH parents.
1756 1756
1757 1757 """
1758 1758
1759 1759 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1760 1760 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1761 1761
1762 1762 limit = cmdutil.loglimit(opts)
1763 1763 count = 0
1764 1764
1765 1765 if opts['copies'] and opts['rev']:
1766 1766 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1767 1767 else:
1768 1768 endrev = repo.changelog.count()
1769 1769 rcache = {}
1770 1770 ncache = {}
1771 1771 def getrenamed(fn, rev):
1772 1772 '''looks up all renames for a file (up to endrev) the first
1773 1773 time the file is given. It indexes on the changerev and only
1774 1774 parses the manifest if linkrev != changerev.
1775 1775 Returns rename info for fn at changerev rev.'''
1776 1776 if fn not in rcache:
1777 1777 rcache[fn] = {}
1778 1778 ncache[fn] = {}
1779 1779 fl = repo.file(fn)
1780 1780 for i in xrange(fl.count()):
1781 1781 node = fl.node(i)
1782 1782 lr = fl.linkrev(node)
1783 1783 renamed = fl.renamed(node)
1784 1784 rcache[fn][lr] = renamed
1785 1785 if renamed:
1786 1786 ncache[fn][node] = renamed
1787 1787 if lr >= endrev:
1788 1788 break
1789 1789 if rev in rcache[fn]:
1790 1790 return rcache[fn][rev]
1791 1791
1792 1792 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1793 1793 # filectx logic.
1794 1794
1795 1795 try:
1796 1796 return repo.changectx(rev).filectx(fn).renamed()
1797 1797 except revlog.LookupError:
1798 1798 pass
1799 1799 return None
1800 1800
1801 1801 df = False
1802 1802 if opts["date"]:
1803 1803 df = util.matchdate(opts["date"])
1804 1804
1805 1805 only_branches = opts['only_branch']
1806 1806
1807 1807 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1808 1808 for st, rev, fns in changeiter:
1809 1809 if st == 'add':
1810 1810 changenode = repo.changelog.node(rev)
1811 1811 parents = [p for p in repo.changelog.parentrevs(rev)
1812 1812 if p != nullrev]
1813 1813 if opts['no_merges'] and len(parents) == 2:
1814 1814 continue
1815 1815 if opts['only_merges'] and len(parents) != 2:
1816 1816 continue
1817 1817
1818 1818 if only_branches:
1819 1819 revbranch = get(rev)[5]['branch']
1820 1820 if revbranch not in only_branches:
1821 1821 continue
1822 1822
1823 1823 if df:
1824 1824 changes = get(rev)
1825 1825 if not df(changes[2][0]):
1826 1826 continue
1827 1827
1828 1828 if opts['keyword']:
1829 1829 changes = get(rev)
1830 1830 miss = 0
1831 1831 for k in [kw.lower() for kw in opts['keyword']]:
1832 1832 if not (k in changes[1].lower() or
1833 1833 k in changes[4].lower() or
1834 1834 k in " ".join(changes[3]).lower()):
1835 1835 miss = 1
1836 1836 break
1837 1837 if miss:
1838 1838 continue
1839 1839
1840 1840 copies = []
1841 1841 if opts.get('copies') and rev:
1842 1842 for fn in get(rev)[3]:
1843 1843 rename = getrenamed(fn, rev)
1844 1844 if rename:
1845 1845 copies.append((fn, rename[0]))
1846 1846 displayer.show(rev, changenode, copies=copies)
1847 1847 elif st == 'iter':
1848 1848 if count == limit: break
1849 1849 if displayer.flush(rev):
1850 1850 count += 1
1851 1851
1852 1852 def manifest(ui, repo, node=None, rev=None):
1853 1853 """output the current or given revision of the project manifest
1854 1854
1855 1855 Print a list of version controlled files for the given revision.
1856 1856 If no revision is given, the parent of the working directory is used,
1857 1857 or tip if no revision is checked out.
1858 1858
1859 1859 The manifest is the list of files being version controlled. If no revision
1860 1860 is given then the first parent of the working directory is used.
1861 1861
1862 1862 With -v flag, print file permissions, symlink and executable bits. With
1863 1863 --debug flag, print file revision hashes.
1864 1864 """
1865 1865
1866 1866 if rev and node:
1867 1867 raise util.Abort(_("please specify just one revision"))
1868 1868
1869 1869 if not node:
1870 1870 node = rev
1871 1871
1872 1872 m = repo.changectx(node).manifest()
1873 1873 files = m.keys()
1874 1874 files.sort()
1875 1875
1876 1876 for f in files:
1877 1877 if ui.debugflag:
1878 1878 ui.write("%40s " % hex(m[f]))
1879 1879 if ui.verbose:
1880 1880 type = m.execf(f) and "*" or m.linkf(f) and "@" or " "
1881 1881 perm = m.execf(f) and "755" or "644"
1882 1882 ui.write("%3s %1s " % (perm, type))
1883 1883 ui.write("%s\n" % f)
1884 1884
1885 1885 def merge(ui, repo, node=None, force=None, rev=None):
1886 1886 """merge working directory with another revision
1887 1887
1888 1888 Merge the contents of the current working directory and the
1889 1889 requested revision. Files that changed between either parent are
1890 1890 marked as changed for the next commit and a commit must be
1891 1891 performed before any further updates are allowed.
1892 1892
1893 1893 If no revision is specified, the working directory's parent is a
1894 1894 head revision, and the current branch contains exactly one other head,
1895 1895 the other head is merged with by default. Otherwise, an explicit
1896 1896 revision to merge with must be provided.
1897 1897 """
1898 1898
1899 1899 if rev and node:
1900 1900 raise util.Abort(_("please specify just one revision"))
1901 1901 if not node:
1902 1902 node = rev
1903 1903
1904 1904 if not node:
1905 1905 branch = repo.changectx(None).branch()
1906 1906 bheads = repo.branchheads()
1907 1907 if len(bheads) > 2:
1908 1908 raise util.Abort(_("branch '%s' has %d heads - "
1909 1909 "please merge with an explicit rev") %
1910 1910 (branch, len(bheads)))
1911 1911
1912 1912 parent = repo.dirstate.parents()[0]
1913 1913 if len(bheads) == 1:
1914 1914 if len(repo.heads()) > 1:
1915 1915 raise util.Abort(_("branch '%s' has one head - "
1916 1916 "please merge with an explicit rev") %
1917 1917 branch)
1918 1918 msg = _('there is nothing to merge')
1919 1919 if parent != repo.lookup(repo.changectx(None).branch()):
1920 1920 msg = _('%s - use "hg update" instead') % msg
1921 1921 raise util.Abort(msg)
1922 1922
1923 1923 if parent not in bheads:
1924 1924 raise util.Abort(_('working dir not at a head rev - '
1925 1925 'use "hg update" or merge with an explicit rev'))
1926 1926 node = parent == bheads[0] and bheads[-1] or bheads[0]
1927 1927 return hg.merge(repo, node, force=force)
1928 1928
1929 1929 def outgoing(ui, repo, dest=None, **opts):
1930 1930 """show changesets not found in destination
1931 1931
1932 1932 Show changesets not found in the specified destination repository or
1933 1933 the default push location. These are the changesets that would be pushed
1934 1934 if a push was requested.
1935 1935
1936 1936 See pull for valid destination format details.
1937 1937 """
1938 1938 limit = cmdutil.loglimit(opts)
1939 1939 dest, revs, checkout = hg.parseurl(
1940 1940 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1941 1941 cmdutil.setremoteconfig(ui, opts)
1942 1942 if revs:
1943 1943 revs = [repo.lookup(rev) for rev in revs]
1944 1944
1945 1945 other = hg.repository(ui, dest)
1946 1946 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
1947 1947 o = repo.findoutgoing(other, force=opts['force'])
1948 1948 if not o:
1949 1949 ui.status(_("no changes found\n"))
1950 1950 return 1
1951 1951 o = repo.changelog.nodesbetween(o, revs)[0]
1952 1952 if opts['newest_first']:
1953 1953 o.reverse()
1954 1954 displayer = cmdutil.show_changeset(ui, repo, opts)
1955 1955 count = 0
1956 1956 for n in o:
1957 1957 if count >= limit:
1958 1958 break
1959 1959 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1960 1960 if opts['no_merges'] and len(parents) == 2:
1961 1961 continue
1962 1962 count += 1
1963 1963 displayer.show(changenode=n)
1964 1964
1965 1965 def parents(ui, repo, file_=None, **opts):
1966 1966 """show the parents of the working dir or revision
1967 1967
1968 1968 Print the working directory's parent revisions. If a
1969 1969 revision is given via --rev, the parent of that revision
1970 1970 will be printed. If a file argument is given, revision in
1971 1971 which the file was last changed (before the working directory
1972 1972 revision or the argument to --rev if given) is printed.
1973 1973 """
1974 1974 rev = opts.get('rev')
1975 1975 if rev:
1976 1976 ctx = repo.changectx(rev)
1977 1977 else:
1978 1978 ctx = repo.changectx(None)
1979 1979
1980 1980 if file_:
1981 1981 m = cmdutil.match(repo, (file_,), opts)
1982 1982 if m.anypats() or len(m.files()) != 1:
1983 1983 raise util.Abort(_('can only specify an explicit file name'))
1984 1984 file_ = m.files()[0]
1985 1985 filenodes = []
1986 1986 for cp in ctx.parents():
1987 1987 if not cp:
1988 1988 continue
1989 1989 try:
1990 1990 filenodes.append(cp.filenode(file_))
1991 1991 except revlog.LookupError:
1992 1992 pass
1993 1993 if not filenodes:
1994 1994 raise util.Abort(_("'%s' not found in manifest!") % file_)
1995 1995 fl = repo.file(file_)
1996 1996 p = [repo.lookup(fl.linkrev(fn)) for fn in filenodes]
1997 1997 else:
1998 1998 p = [cp.node() for cp in ctx.parents()]
1999 1999
2000 2000 displayer = cmdutil.show_changeset(ui, repo, opts)
2001 2001 for n in p:
2002 2002 if n != nullid:
2003 2003 displayer.show(changenode=n)
2004 2004
2005 2005 def paths(ui, repo, search=None):
2006 2006 """show definition of symbolic path names
2007 2007
2008 2008 Show definition of symbolic path name NAME. If no name is given, show
2009 2009 definition of available names.
2010 2010
2011 2011 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2012 2012 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2013 2013 """
2014 2014 if search:
2015 2015 for name, path in ui.configitems("paths"):
2016 2016 if name == search:
2017 2017 ui.write("%s\n" % util.hidepassword(path))
2018 2018 return
2019 2019 ui.warn(_("not found!\n"))
2020 2020 return 1
2021 2021 else:
2022 2022 for name, path in ui.configitems("paths"):
2023 2023 ui.write("%s = %s\n" % (name, util.hidepassword(path)))
2024 2024
2025 2025 def postincoming(ui, repo, modheads, optupdate, checkout):
2026 2026 if modheads == 0:
2027 2027 return
2028 2028 if optupdate:
2029 2029 if modheads <= 1 or checkout:
2030 2030 return hg.update(repo, checkout)
2031 2031 else:
2032 2032 ui.status(_("not updating, since new heads added\n"))
2033 2033 if modheads > 1:
2034 2034 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2035 2035 else:
2036 2036 ui.status(_("(run 'hg update' to get a working copy)\n"))
2037 2037
2038 2038 def pull(ui, repo, source="default", **opts):
2039 2039 """pull changes from the specified source
2040 2040
2041 2041 Pull changes from a remote repository to a local one.
2042 2042
2043 2043 This finds all changes from the repository at the specified path
2044 2044 or URL and adds them to the local repository. By default, this
2045 2045 does not update the copy of the project in the working directory.
2046 2046
2047 2047 Valid URLs are of the form:
2048 2048
2049 2049 local/filesystem/path (or file://local/filesystem/path)
2050 2050 http://[user@]host[:port]/[path]
2051 2051 https://[user@]host[:port]/[path]
2052 2052 ssh://[user@]host[:port]/[path]
2053 2053 static-http://host[:port]/[path]
2054 2054
2055 2055 Paths in the local filesystem can either point to Mercurial
2056 2056 repositories or to bundle files (as created by 'hg bundle' or
2057 2057 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2058 2058 allows access to a Mercurial repository where you simply use a web
2059 2059 server to publish the .hg directory as static content.
2060 2060
2061 2061 An optional identifier after # indicates a particular branch, tag,
2062 2062 or changeset to pull.
2063 2063
2064 2064 Some notes about using SSH with Mercurial:
2065 2065 - SSH requires an accessible shell account on the destination machine
2066 2066 and a copy of hg in the remote path or specified with as remotecmd.
2067 2067 - path is relative to the remote user's home directory by default.
2068 2068 Use an extra slash at the start of a path to specify an absolute path:
2069 2069 ssh://example.com//tmp/repository
2070 2070 - Mercurial doesn't use its own compression via SSH; the right thing
2071 2071 to do is to configure it in your ~/.ssh/config, e.g.:
2072 2072 Host *.mylocalnetwork.example.com
2073 2073 Compression no
2074 2074 Host *
2075 2075 Compression yes
2076 2076 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2077 2077 with the --ssh command line option.
2078 2078 """
2079 2079 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts['rev'])
2080 2080 cmdutil.setremoteconfig(ui, opts)
2081 2081
2082 2082 other = hg.repository(ui, source)
2083 2083 ui.status(_('pulling from %s\n') % util.hidepassword(source))
2084 2084 if revs:
2085 2085 try:
2086 2086 revs = [other.lookup(rev) for rev in revs]
2087 2087 except NoCapability:
2088 2088 error = _("Other repository doesn't support revision lookup, "
2089 2089 "so a rev cannot be specified.")
2090 2090 raise util.Abort(error)
2091 2091
2092 2092 modheads = repo.pull(other, heads=revs, force=opts['force'])
2093 2093 return postincoming(ui, repo, modheads, opts['update'], checkout)
2094 2094
2095 2095 def push(ui, repo, dest=None, **opts):
2096 2096 """push changes to the specified destination
2097 2097
2098 2098 Push changes from the local repository to the given destination.
2099 2099
2100 2100 This is the symmetrical operation for pull. It helps to move
2101 2101 changes from the current repository to a different one. If the
2102 2102 destination is local this is identical to a pull in that directory
2103 2103 from the current one.
2104 2104
2105 2105 By default, push will refuse to run if it detects the result would
2106 2106 increase the number of remote heads. This generally indicates the
2107 2107 the client has forgotten to pull and merge before pushing.
2108 2108
2109 2109 Valid URLs are of the form:
2110 2110
2111 2111 local/filesystem/path (or file://local/filesystem/path)
2112 2112 ssh://[user@]host[:port]/[path]
2113 2113 http://[user@]host[:port]/[path]
2114 2114 https://[user@]host[:port]/[path]
2115 2115
2116 2116 An optional identifier after # indicates a particular branch, tag,
2117 2117 or changeset to push. If -r is used, the named changeset and all its
2118 2118 ancestors will be pushed to the remote repository.
2119 2119
2120 2120 Look at the help text for the pull command for important details
2121 2121 about ssh:// URLs.
2122 2122
2123 2123 Pushing to http:// and https:// URLs is only possible, if this
2124 2124 feature is explicitly enabled on the remote Mercurial server.
2125 2125 """
2126 2126 dest, revs, checkout = hg.parseurl(
2127 2127 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2128 2128 cmdutil.setremoteconfig(ui, opts)
2129 2129
2130 2130 other = hg.repository(ui, dest)
2131 2131 ui.status('pushing to %s\n' % util.hidepassword(dest))
2132 2132 if revs:
2133 2133 revs = [repo.lookup(rev) for rev in revs]
2134 2134 r = repo.push(other, opts['force'], revs=revs)
2135 2135 return r == 0
2136 2136
2137 2137 def rawcommit(ui, repo, *pats, **opts):
2138 2138 """raw commit interface (DEPRECATED)
2139 2139
2140 2140 (DEPRECATED)
2141 2141 Lowlevel commit, for use in helper scripts.
2142 2142
2143 2143 This command is not intended to be used by normal users, as it is
2144 2144 primarily useful for importing from other SCMs.
2145 2145
2146 2146 This command is now deprecated and will be removed in a future
2147 2147 release, please use debugsetparents and commit instead.
2148 2148 """
2149 2149
2150 2150 ui.warn(_("(the rawcommit command is deprecated)\n"))
2151 2151
2152 2152 message = cmdutil.logmessage(opts)
2153 2153
2154 2154 files = cmdutil.match(repo, pats, opts).files()
2155 2155 if opts['files']:
2156 2156 files += open(opts['files']).read().splitlines()
2157 2157
2158 2158 parents = [repo.lookup(p) for p in opts['parent']]
2159 2159
2160 2160 try:
2161 2161 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2162 2162 except ValueError, inst:
2163 2163 raise util.Abort(str(inst))
2164 2164
2165 2165 def recover(ui, repo):
2166 2166 """roll back an interrupted transaction
2167 2167
2168 2168 Recover from an interrupted commit or pull.
2169 2169
2170 2170 This command tries to fix the repository status after an interrupted
2171 2171 operation. It should only be necessary when Mercurial suggests it.
2172 2172 """
2173 2173 if repo.recover():
2174 2174 return hg.verify(repo)
2175 2175 return 1
2176 2176
2177 2177 def remove(ui, repo, *pats, **opts):
2178 2178 """remove the specified files on the next commit
2179 2179
2180 2180 Schedule the indicated files for removal from the repository.
2181 2181
2182 2182 This only removes files from the current branch, not from the entire
2183 2183 project history. -A can be used to remove only files that have already
2184 2184 been deleted, -f can be used to force deletion, and -Af can be used
2185 2185 to remove files from the next revision without deleting them.
2186 2186
2187 2187 The following table details the behavior of remove for different file
2188 2188 states (columns) and option combinations (rows). The file states are
2189 2189 Added, Clean, Modified and Missing (as reported by hg status). The
2190 2190 actions are Warn, Remove (from branch) and Delete (from disk).
2191 2191
2192 2192 A C M !
2193 2193 none W RD W R
2194 2194 -f R RD RD R
2195 2195 -A W W W R
2196 2196 -Af R R R R
2197 2197
2198 2198 This command schedules the files to be removed at the next commit.
2199 2199 To undo a remove before that, see hg revert.
2200 2200 """
2201 2201
2202 2202 after, force = opts.get('after'), opts.get('force')
2203 2203 if not pats and not after:
2204 2204 raise util.Abort(_('no files specified'))
2205 2205
2206 2206 m = cmdutil.match(repo, pats, opts)
2207 2207 mardu = map(dict.fromkeys, repo.status(match=m))[:5]
2208 2208 modified, added, removed, deleted, unknown = mardu
2209 2209
2210 2210 remove, forget = [], []
2211 2211 for abs in repo.walk(m):
2212 2212
2213 2213 reason = None
2214 2214 if abs in removed or abs in unknown:
2215 2215 continue
2216 2216
2217 2217 # last column
2218 2218 elif abs in deleted:
2219 2219 remove.append(abs)
2220 2220
2221 2221 # rest of the third row
2222 2222 elif after and not force:
2223 2223 reason = _('still exists (use -f to force removal)')
2224 2224
2225 2225 # rest of the first column
2226 2226 elif abs in added:
2227 2227 if not force:
2228 2228 reason = _('has been marked for add (use -f to force removal)')
2229 2229 else:
2230 2230 forget.append(abs)
2231 2231
2232 2232 # rest of the third column
2233 2233 elif abs in modified:
2234 2234 if not force:
2235 2235 reason = _('is modified (use -f to force removal)')
2236 2236 else:
2237 2237 remove.append(abs)
2238 2238
2239 2239 # rest of the second column
2240 2240 elif not reason:
2241 2241 remove.append(abs)
2242 2242
2243 2243 if reason:
2244 2244 ui.warn(_('not removing %s: file %s\n') % (m.rel(abs), reason))
2245 2245 elif ui.verbose or not m.exact(abs):
2246 2246 ui.status(_('removing %s\n') % m.rel(abs))
2247 2247
2248 2248 repo.forget(forget)
2249 2249 repo.remove(remove, unlink=not after)
2250 2250
2251 2251 def rename(ui, repo, *pats, **opts):
2252 2252 """rename files; equivalent of copy + remove
2253 2253
2254 2254 Mark dest as copies of sources; mark sources for deletion. If
2255 2255 dest is a directory, copies are put in that directory. If dest is
2256 2256 a file, there can only be one source.
2257 2257
2258 2258 By default, this command copies the contents of files as they
2259 2259 stand in the working directory. If invoked with --after, the
2260 2260 operation is recorded, but no copying is performed.
2261 2261
2262 2262 This command takes effect in the next commit. To undo a rename
2263 2263 before that, see hg revert.
2264 2264 """
2265 2265 wlock = repo.wlock(False)
2266 2266 try:
2267 2267 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2268 2268 finally:
2269 2269 del wlock
2270 2270
2271 2271 def resolve(ui, repo, *pats, **opts):
2272 2272 """resolve file merges from a branch merge or update
2273 2273
2274 2274 This command will attempt to resolve unresolved merges from the
2275 2275 last update or merge command. This will use the local file
2276 2276 revision preserved at the last update or merge to cleanly retry
2277 2277 the file merge attempt. With no file or options specified, this
2278 2278 command will attempt to resolve all unresolved files.
2279 2279
2280 2280 The codes used to show the status of files are:
2281 2281 U = unresolved
2282 2282 R = resolved
2283 2283 """
2284 2284
2285 2285 if len([x for x in opts if opts[x]]) > 1:
2286 2286 raise util.Abort(_("too many options specified"))
2287 2287
2288 2288 ms = merge_.mergestate(repo)
2289 2289 m = cmdutil.match(repo, pats, opts)
2290 2290
2291 2291 for f in ms:
2292 2292 if m(f):
2293 2293 if opts.get("list"):
2294 2294 ui.write("%s %s\n" % (ms[f].upper(), f))
2295 2295 elif opts.get("mark"):
2296 2296 ms.mark(f, "r")
2297 2297 elif opts.get("unmark"):
2298 2298 ms.mark(f, "u")
2299 2299 else:
2300 2300 wctx = repo.changectx(None)
2301 2301 mctx = wctx.parents()[-1]
2302 2302 ms.resolve(f, wctx, mctx)
2303 2303
2304 2304 def revert(ui, repo, *pats, **opts):
2305 2305 """restore individual files or dirs to an earlier state
2306 2306
2307 2307 (use update -r to check out earlier revisions, revert does not
2308 2308 change the working dir parents)
2309 2309
2310 2310 With no revision specified, revert the named files or directories
2311 2311 to the contents they had in the parent of the working directory.
2312 2312 This restores the contents of the affected files to an unmodified
2313 2313 state and unschedules adds, removes, copies, and renames. If the
2314 2314 working directory has two parents, you must explicitly specify the
2315 2315 revision to revert to.
2316 2316
2317 2317 Using the -r option, revert the given files or directories to their
2318 2318 contents as of a specific revision. This can be helpful to "roll
2319 2319 back" some or all of an earlier change.
2320 2320 See 'hg help dates' for a list of formats valid for -d/--date.
2321 2321
2322 2322 Revert modifies the working directory. It does not commit any
2323 2323 changes, or change the parent of the working directory. If you
2324 2324 revert to a revision other than the parent of the working
2325 2325 directory, the reverted files will thus appear modified
2326 2326 afterwards.
2327 2327
2328 2328 If a file has been deleted, it is restored. If the executable
2329 2329 mode of a file was changed, it is reset.
2330 2330
2331 2331 If names are given, all files matching the names are reverted.
2332 2332 If no arguments are given, no files are reverted.
2333 2333
2334 2334 Modified files are saved with a .orig suffix before reverting.
2335 2335 To disable these backups, use --no-backup.
2336 2336 """
2337 2337
2338 2338 if opts["date"]:
2339 2339 if opts["rev"]:
2340 2340 raise util.Abort(_("you can't specify a revision and a date"))
2341 2341 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2342 2342
2343 2343 if not pats and not opts['all']:
2344 2344 raise util.Abort(_('no files or directories specified; '
2345 2345 'use --all to revert the whole repo'))
2346 2346
2347 2347 parent, p2 = repo.dirstate.parents()
2348 2348 if not opts['rev'] and p2 != nullid:
2349 2349 raise util.Abort(_('uncommitted merge - please provide a '
2350 2350 'specific revision'))
2351 2351 ctx = repo.changectx(opts['rev'])
2352 2352 node = ctx.node()
2353 2353 mf = ctx.manifest()
2354 2354 if node == parent:
2355 2355 pmf = mf
2356 2356 else:
2357 2357 pmf = None
2358 2358
2359 2359 # need all matching names in dirstate and manifest of target rev,
2360 2360 # so have to walk both. do not print errors if files exist in one
2361 2361 # but not other.
2362 2362
2363 2363 names = {}
2364 2364
2365 2365 wlock = repo.wlock()
2366 2366 try:
2367 2367 # walk dirstate.
2368 2368 files = []
2369 2369
2370 2370 m = cmdutil.match(repo, pats, opts)
2371 2371 m.bad = lambda x,y: False
2372 2372 for abs in repo.walk(m):
2373 2373 names[abs] = m.rel(abs), m.exact(abs)
2374 2374
2375 2375 # walk target manifest.
2376 2376
2377 2377 def badfn(path, msg):
2378 2378 if path in names:
2379 2379 return False
2380 2380 path_ = path + '/'
2381 2381 for f in names:
2382 2382 if f.startswith(path_):
2383 2383 return False
2384 2384 repo.ui.warn("%s: %s\n" % (m.rel(path), msg))
2385 2385 return False
2386 2386
2387 2387 m = cmdutil.match(repo, pats, opts)
2388 2388 m.bad = badfn
2389 2389 for abs in repo.walk(m, node=node):
2390 2390 if abs not in names:
2391 2391 names[abs] = m.rel(abs), m.exact(abs)
2392 2392
2393 2393 m = cmdutil.matchfiles(repo, names)
2394 2394 changes = repo.status(match=m)[:4]
2395 2395 modified, added, removed, deleted = map(dict.fromkeys, changes)
2396 2396
2397 2397 # if f is a rename, also revert the source
2398 2398 cwd = repo.getcwd()
2399 2399 for f in added:
2400 2400 src = repo.dirstate.copied(f)
2401 2401 if src and src not in names and repo.dirstate[src] == 'r':
2402 2402 removed[src] = None
2403 2403 names[src] = (repo.pathto(src, cwd), True)
2404 2404
2405 2405 def removeforget(abs):
2406 2406 if repo.dirstate[abs] == 'a':
2407 2407 return _('forgetting %s\n')
2408 2408 return _('removing %s\n')
2409 2409
2410 2410 revert = ([], _('reverting %s\n'))
2411 2411 add = ([], _('adding %s\n'))
2412 2412 remove = ([], removeforget)
2413 2413 undelete = ([], _('undeleting %s\n'))
2414 2414
2415 2415 disptable = (
2416 2416 # dispatch table:
2417 2417 # file state
2418 2418 # action if in target manifest
2419 2419 # action if not in target manifest
2420 2420 # make backup if in target manifest
2421 2421 # make backup if not in target manifest
2422 2422 (modified, revert, remove, True, True),
2423 2423 (added, revert, remove, True, False),
2424 2424 (removed, undelete, None, False, False),
2425 2425 (deleted, revert, remove, False, False),
2426 2426 )
2427 2427
2428 2428 entries = names.items()
2429 2429 entries.sort()
2430 2430
2431 2431 for abs, (rel, exact) in entries:
2432 2432 mfentry = mf.get(abs)
2433 2433 target = repo.wjoin(abs)
2434 2434 def handle(xlist, dobackup):
2435 2435 xlist[0].append(abs)
2436 2436 if dobackup and not opts['no_backup'] and util.lexists(target):
2437 2437 bakname = "%s.orig" % rel
2438 2438 ui.note(_('saving current version of %s as %s\n') %
2439 2439 (rel, bakname))
2440 2440 if not opts.get('dry_run'):
2441 2441 util.copyfile(target, bakname)
2442 2442 if ui.verbose or not exact:
2443 2443 msg = xlist[1]
2444 2444 if not isinstance(msg, basestring):
2445 2445 msg = msg(abs)
2446 2446 ui.status(msg % rel)
2447 2447 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2448 2448 if abs not in table: continue
2449 2449 # file has changed in dirstate
2450 2450 if mfentry:
2451 2451 handle(hitlist, backuphit)
2452 2452 elif misslist is not None:
2453 2453 handle(misslist, backupmiss)
2454 2454 break
2455 2455 else:
2456 2456 if abs not in repo.dirstate:
2457 2457 if mfentry:
2458 2458 handle(add, True)
2459 2459 elif exact:
2460 2460 ui.warn(_('file not managed: %s\n') % rel)
2461 2461 continue
2462 2462 # file has not changed in dirstate
2463 2463 if node == parent:
2464 2464 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2465 2465 continue
2466 2466 if pmf is None:
2467 2467 # only need parent manifest in this unlikely case,
2468 2468 # so do not read by default
2469 2469 pmf = repo.changectx(parent).manifest()
2470 2470 if abs in pmf:
2471 2471 if mfentry:
2472 2472 # if version of file is same in parent and target
2473 2473 # manifests, do nothing
2474 2474 if (pmf[abs] != mfentry or
2475 2475 pmf.flags(abs) != mf.flags(abs)):
2476 2476 handle(revert, False)
2477 2477 else:
2478 2478 handle(remove, False)
2479 2479
2480 2480 if not opts.get('dry_run'):
2481 2481 def checkout(f):
2482 2482 fc = ctx[f]
2483 repo.wwrite(f, fc.data(), fc.fileflags())
2483 repo.wwrite(f, fc.data(), fc.flags())
2484 2484
2485 2485 audit_path = util.path_auditor(repo.root)
2486 2486 for f in remove[0]:
2487 2487 if repo.dirstate[f] == 'a':
2488 2488 repo.dirstate.forget(f)
2489 2489 continue
2490 2490 audit_path(f)
2491 2491 try:
2492 2492 util.unlink(repo.wjoin(f))
2493 2493 except OSError:
2494 2494 pass
2495 2495 repo.dirstate.remove(f)
2496 2496
2497 2497 normal = None
2498 2498 if node == parent:
2499 2499 # We're reverting to our parent. If possible, we'd like status
2500 2500 # to report the file as clean. We have to use normallookup for
2501 2501 # merges to avoid losing information about merged/dirty files.
2502 2502 if p2 != nullid:
2503 2503 normal = repo.dirstate.normallookup
2504 2504 else:
2505 2505 normal = repo.dirstate.normal
2506 2506 for f in revert[0]:
2507 2507 checkout(f)
2508 2508 if normal:
2509 2509 normal(f)
2510 2510
2511 2511 for f in add[0]:
2512 2512 checkout(f)
2513 2513 repo.dirstate.add(f)
2514 2514
2515 2515 normal = repo.dirstate.normallookup
2516 2516 if node == parent and p2 == nullid:
2517 2517 normal = repo.dirstate.normal
2518 2518 for f in undelete[0]:
2519 2519 checkout(f)
2520 2520 normal(f)
2521 2521
2522 2522 finally:
2523 2523 del wlock
2524 2524
2525 2525 def rollback(ui, repo):
2526 2526 """roll back the last transaction
2527 2527
2528 2528 This command should be used with care. There is only one level of
2529 2529 rollback, and there is no way to undo a rollback. It will also
2530 2530 restore the dirstate at the time of the last transaction, losing
2531 2531 any dirstate changes since that time.
2532 2532
2533 2533 Transactions are used to encapsulate the effects of all commands
2534 2534 that create new changesets or propagate existing changesets into a
2535 2535 repository. For example, the following commands are transactional,
2536 2536 and their effects can be rolled back:
2537 2537
2538 2538 commit
2539 2539 import
2540 2540 pull
2541 2541 push (with this repository as destination)
2542 2542 unbundle
2543 2543
2544 2544 This command is not intended for use on public repositories. Once
2545 2545 changes are visible for pull by other users, rolling a transaction
2546 2546 back locally is ineffective (someone else may already have pulled
2547 2547 the changes). Furthermore, a race is possible with readers of the
2548 2548 repository; for example an in-progress pull from the repository
2549 2549 may fail if a rollback is performed.
2550 2550 """
2551 2551 repo.rollback()
2552 2552
2553 2553 def root(ui, repo):
2554 2554 """print the root (top) of the current working dir
2555 2555
2556 2556 Print the root directory of the current repository.
2557 2557 """
2558 2558 ui.write(repo.root + "\n")
2559 2559
2560 2560 def serve(ui, repo, **opts):
2561 2561 """export the repository via HTTP
2562 2562
2563 2563 Start a local HTTP repository browser and pull server.
2564 2564
2565 2565 By default, the server logs accesses to stdout and errors to
2566 2566 stderr. Use the "-A" and "-E" options to log to files.
2567 2567 """
2568 2568
2569 2569 if opts["stdio"]:
2570 2570 if repo is None:
2571 2571 raise RepoError(_("There is no Mercurial repository here"
2572 2572 " (.hg not found)"))
2573 2573 s = sshserver.sshserver(ui, repo)
2574 2574 s.serve_forever()
2575 2575
2576 2576 parentui = ui.parentui or ui
2577 2577 optlist = ("name templates style address port prefix ipv6"
2578 2578 " accesslog errorlog webdir_conf certificate")
2579 2579 for o in optlist.split():
2580 2580 if opts[o]:
2581 2581 parentui.setconfig("web", o, str(opts[o]))
2582 2582 if (repo is not None) and (repo.ui != parentui):
2583 2583 repo.ui.setconfig("web", o, str(opts[o]))
2584 2584
2585 2585 if repo is None and not ui.config("web", "webdir_conf"):
2586 2586 raise RepoError(_("There is no Mercurial repository here"
2587 2587 " (.hg not found)"))
2588 2588
2589 2589 class service:
2590 2590 def init(self):
2591 2591 util.set_signal_handler()
2592 2592 self.httpd = hgweb.server.create_server(parentui, repo)
2593 2593
2594 2594 if not ui.verbose: return
2595 2595
2596 2596 if self.httpd.prefix:
2597 2597 prefix = self.httpd.prefix.strip('/') + '/'
2598 2598 else:
2599 2599 prefix = ''
2600 2600
2601 2601 port = ':%d' % self.httpd.port
2602 2602 if port == ':80':
2603 2603 port = ''
2604 2604
2605 2605 bindaddr = self.httpd.addr
2606 2606 if bindaddr == '0.0.0.0':
2607 2607 bindaddr = '*'
2608 2608 elif ':' in bindaddr: # IPv6
2609 2609 bindaddr = '[%s]' % bindaddr
2610 2610
2611 2611 fqaddr = self.httpd.fqaddr
2612 2612 if ':' in fqaddr:
2613 2613 fqaddr = '[%s]' % fqaddr
2614 2614 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2615 2615 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2616 2616
2617 2617 def run(self):
2618 2618 self.httpd.serve_forever()
2619 2619
2620 2620 service = service()
2621 2621
2622 2622 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2623 2623
2624 2624 def status(ui, repo, *pats, **opts):
2625 2625 """show changed files in the working directory
2626 2626
2627 2627 Show status of files in the repository. If names are given, only
2628 2628 files that match are shown. Files that are clean or ignored or
2629 2629 source of a copy/move operation, are not listed unless -c (clean),
2630 2630 -i (ignored), -C (copies) or -A is given. Unless options described
2631 2631 with "show only ..." are given, the options -mardu are used.
2632 2632
2633 2633 Option -q/--quiet hides untracked (unknown and ignored) files
2634 2634 unless explicitly requested with -u/--unknown or -i/-ignored.
2635 2635
2636 2636 NOTE: status may appear to disagree with diff if permissions have
2637 2637 changed or a merge has occurred. The standard diff format does not
2638 2638 report permission changes and diff only reports changes relative
2639 2639 to one merge parent.
2640 2640
2641 2641 If one revision is given, it is used as the base revision.
2642 2642 If two revisions are given, the difference between them is shown.
2643 2643
2644 2644 The codes used to show the status of files are:
2645 2645 M = modified
2646 2646 A = added
2647 2647 R = removed
2648 2648 C = clean
2649 2649 ! = deleted, but still tracked
2650 2650 ? = not tracked
2651 2651 I = ignored
2652 2652 = the previous added file was copied from here
2653 2653 """
2654 2654
2655 2655 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2656 2656 cwd = (pats and repo.getcwd()) or ''
2657 2657 end = opts['print0'] and '\0' or '\n'
2658 2658 copy = {}
2659 2659 states = 'modified added removed deleted unknown ignored clean'.split()
2660 2660 show = [k for k in states if opts[k]]
2661 2661 if opts['all']:
2662 2662 show += ui.quiet and (states[:4] + ['clean']) or states
2663 2663 if not show:
2664 2664 show = ui.quiet and states[:4] or states[:5]
2665 2665
2666 2666 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2667 2667 'ignored' in show, 'clean' in show, 'unknown' in show)
2668 2668 changestates = zip(states, 'MAR!?IC', stat)
2669 2669
2670 2670 if (opts['all'] or opts['copies']) and not opts['no_status']:
2671 2671 ctxn = repo.changectx(nullid)
2672 2672 ctx1 = repo.changectx(node1)
2673 2673 ctx2 = repo.changectx(node2)
2674 2674 added = stat[1]
2675 2675 if node2 is None:
2676 2676 added = stat[0] + stat[1] # merged?
2677 2677
2678 2678 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].items():
2679 2679 if k in added:
2680 2680 copy[k] = v
2681 2681 elif v in added:
2682 2682 copy[v] = k
2683 2683
2684 2684 for state, char, files in changestates:
2685 2685 if state in show:
2686 2686 format = "%s %%s%s" % (char, end)
2687 2687 if opts['no_status']:
2688 2688 format = "%%s%s" % end
2689 2689
2690 2690 for f in files:
2691 2691 ui.write(format % repo.pathto(f, cwd))
2692 2692 if f in copy:
2693 2693 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2694 2694
2695 2695 def tag(ui, repo, name1, *names, **opts):
2696 2696 """add one or more tags for the current or given revision
2697 2697
2698 2698 Name a particular revision using <name>.
2699 2699
2700 2700 Tags are used to name particular revisions of the repository and are
2701 2701 very useful to compare different revisions, to go back to significant
2702 2702 earlier versions or to mark branch points as releases, etc.
2703 2703
2704 2704 If no revision is given, the parent of the working directory is used,
2705 2705 or tip if no revision is checked out.
2706 2706
2707 2707 To facilitate version control, distribution, and merging of tags,
2708 2708 they are stored as a file named ".hgtags" which is managed
2709 2709 similarly to other project files and can be hand-edited if
2710 2710 necessary. The file '.hg/localtags' is used for local tags (not
2711 2711 shared among repositories).
2712 2712
2713 2713 See 'hg help dates' for a list of formats valid for -d/--date.
2714 2714 """
2715 2715
2716 2716 rev_ = "."
2717 2717 names = (name1,) + names
2718 2718 if len(names) != len(dict.fromkeys(names)):
2719 2719 raise util.Abort(_('tag names must be unique'))
2720 2720 for n in names:
2721 2721 if n in ['tip', '.', 'null']:
2722 2722 raise util.Abort(_('the name \'%s\' is reserved') % n)
2723 2723 if opts['rev'] and opts['remove']:
2724 2724 raise util.Abort(_("--rev and --remove are incompatible"))
2725 2725 if opts['rev']:
2726 2726 rev_ = opts['rev']
2727 2727 message = opts['message']
2728 2728 if opts['remove']:
2729 2729 expectedtype = opts['local'] and 'local' or 'global'
2730 2730 for n in names:
2731 2731 if not repo.tagtype(n):
2732 2732 raise util.Abort(_('tag \'%s\' does not exist') % n)
2733 2733 if repo.tagtype(n) != expectedtype:
2734 2734 raise util.Abort(_('tag \'%s\' is not a %s tag') %
2735 2735 (n, expectedtype))
2736 2736 rev_ = nullid
2737 2737 if not message:
2738 2738 message = _('Removed tag %s') % ', '.join(names)
2739 2739 elif not opts['force']:
2740 2740 for n in names:
2741 2741 if n in repo.tags():
2742 2742 raise util.Abort(_('tag \'%s\' already exists '
2743 2743 '(use -f to force)') % n)
2744 2744 if not rev_ and repo.dirstate.parents()[1] != nullid:
2745 2745 raise util.Abort(_('uncommitted merge - please provide a '
2746 2746 'specific revision'))
2747 2747 r = repo.changectx(rev_).node()
2748 2748
2749 2749 if not message:
2750 2750 message = (_('Added tag %s for changeset %s') %
2751 2751 (', '.join(names), short(r)))
2752 2752
2753 2753 date = opts.get('date')
2754 2754 if date:
2755 2755 date = util.parsedate(date)
2756 2756
2757 2757 repo.tag(names, r, message, opts['local'], opts['user'], date)
2758 2758
2759 2759 def tags(ui, repo):
2760 2760 """list repository tags
2761 2761
2762 2762 List the repository tags.
2763 2763
2764 2764 This lists both regular and local tags. When the -v/--verbose switch
2765 2765 is used, a third column "local" is printed for local tags.
2766 2766 """
2767 2767
2768 2768 l = repo.tagslist()
2769 2769 l.reverse()
2770 2770 hexfunc = ui.debugflag and hex or short
2771 2771 tagtype = ""
2772 2772
2773 2773 for t, n in l:
2774 2774 if ui.quiet:
2775 2775 ui.write("%s\n" % t)
2776 2776 continue
2777 2777
2778 2778 try:
2779 2779 hn = hexfunc(n)
2780 2780 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2781 2781 except revlog.LookupError:
2782 2782 r = " ?:%s" % hn
2783 2783 else:
2784 2784 spaces = " " * (30 - util.locallen(t))
2785 2785 if ui.verbose:
2786 2786 if repo.tagtype(t) == 'local':
2787 2787 tagtype = " local"
2788 2788 else:
2789 2789 tagtype = ""
2790 2790 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2791 2791
2792 2792 def tip(ui, repo, **opts):
2793 2793 """show the tip revision
2794 2794
2795 2795 The tip revision (usually just called the tip) is the most
2796 2796 recently added changeset in the repository, the most recently
2797 2797 changed head.
2798 2798
2799 2799 If you have just made a commit, that commit will be the tip. If
2800 2800 you have just pulled changes from another repository, the tip of
2801 2801 that repository becomes the current tip. The "tip" tag is special
2802 2802 and cannot be renamed or assigned to a different changeset.
2803 2803 """
2804 2804 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2805 2805
2806 2806 def unbundle(ui, repo, fname1, *fnames, **opts):
2807 2807 """apply one or more changegroup files
2808 2808
2809 2809 Apply one or more compressed changegroup files generated by the
2810 2810 bundle command.
2811 2811 """
2812 2812 fnames = (fname1,) + fnames
2813 2813
2814 2814 lock = None
2815 2815 try:
2816 2816 lock = repo.lock()
2817 2817 for fname in fnames:
2818 2818 if os.path.exists(fname):
2819 2819 f = open(fname, "rb")
2820 2820 else:
2821 2821 f = urllib.urlopen(fname)
2822 2822 gen = changegroup.readbundle(f, fname)
2823 2823 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2824 2824 finally:
2825 2825 del lock
2826 2826
2827 2827 return postincoming(ui, repo, modheads, opts['update'], None)
2828 2828
2829 2829 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2830 2830 """update working directory
2831 2831
2832 2832 Update the repository's working directory to the specified revision,
2833 2833 or the tip of the current branch if none is specified.
2834 2834
2835 2835 If the requested revision is a descendant of the working
2836 2836 directory, any outstanding changes in the working directory will
2837 2837 be merged into the result. If it is not directly descended but is
2838 2838 on the same named branch, update aborts with a suggestion to use
2839 2839 merge or update -C instead.
2840 2840
2841 2841 If the requested revision is on a different named branch and the
2842 2842 working directory is clean, update quietly switches branches.
2843 2843
2844 2844 See 'hg help dates' for a list of formats valid for --date.
2845 2845 """
2846 2846 if rev and node:
2847 2847 raise util.Abort(_("please specify just one revision"))
2848 2848
2849 2849 if not rev:
2850 2850 rev = node
2851 2851
2852 2852 if date:
2853 2853 if rev:
2854 2854 raise util.Abort(_("you can't specify a revision and a date"))
2855 2855 rev = cmdutil.finddate(ui, repo, date)
2856 2856
2857 2857 if clean:
2858 2858 return hg.clean(repo, rev)
2859 2859 else:
2860 2860 return hg.update(repo, rev)
2861 2861
2862 2862 def verify(ui, repo):
2863 2863 """verify the integrity of the repository
2864 2864
2865 2865 Verify the integrity of the current repository.
2866 2866
2867 2867 This will perform an extensive check of the repository's
2868 2868 integrity, validating the hashes and checksums of each entry in
2869 2869 the changelog, manifest, and tracked files, as well as the
2870 2870 integrity of their crosslinks and indices.
2871 2871 """
2872 2872 return hg.verify(repo)
2873 2873
2874 2874 def version_(ui):
2875 2875 """output version and copyright information"""
2876 2876 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2877 2877 % version.get_version())
2878 2878 ui.status(_(
2879 2879 "\nCopyright (C) 2005-2008 Matt Mackall <mpm@selenic.com> and others\n"
2880 2880 "This is free software; see the source for copying conditions. "
2881 2881 "There is NO\nwarranty; "
2882 2882 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2883 2883 ))
2884 2884
2885 2885 # Command options and aliases are listed here, alphabetically
2886 2886
2887 2887 globalopts = [
2888 2888 ('R', 'repository', '',
2889 2889 _('repository root directory or symbolic path name')),
2890 2890 ('', 'cwd', '', _('change working directory')),
2891 2891 ('y', 'noninteractive', None,
2892 2892 _('do not prompt, assume \'yes\' for any required answers')),
2893 2893 ('q', 'quiet', None, _('suppress output')),
2894 2894 ('v', 'verbose', None, _('enable additional output')),
2895 2895 ('', 'config', [], _('set/override config option')),
2896 2896 ('', 'debug', None, _('enable debugging output')),
2897 2897 ('', 'debugger', None, _('start debugger')),
2898 2898 ('', 'encoding', util._encoding, _('set the charset encoding')),
2899 2899 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2900 2900 ('', 'lsprof', None, _('print improved command execution profile')),
2901 2901 ('', 'traceback', None, _('print traceback on exception')),
2902 2902 ('', 'time', None, _('time how long the command takes')),
2903 2903 ('', 'profile', None, _('print command execution profile')),
2904 2904 ('', 'version', None, _('output version information and exit')),
2905 2905 ('h', 'help', None, _('display help and exit')),
2906 2906 ]
2907 2907
2908 2908 dryrunopts = [('n', 'dry-run', None,
2909 2909 _('do not perform actions, just print output'))]
2910 2910
2911 2911 remoteopts = [
2912 2912 ('e', 'ssh', '', _('specify ssh command to use')),
2913 2913 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2914 2914 ]
2915 2915
2916 2916 walkopts = [
2917 2917 ('I', 'include', [], _('include names matching the given patterns')),
2918 2918 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2919 2919 ]
2920 2920
2921 2921 commitopts = [
2922 2922 ('m', 'message', '', _('use <text> as commit message')),
2923 2923 ('l', 'logfile', '', _('read commit message from <file>')),
2924 2924 ]
2925 2925
2926 2926 commitopts2 = [
2927 2927 ('d', 'date', '', _('record datecode as commit date')),
2928 2928 ('u', 'user', '', _('record user as committer')),
2929 2929 ]
2930 2930
2931 2931 templateopts = [
2932 2932 ('', 'style', '', _('display using template map file')),
2933 2933 ('', 'template', '', _('display with template')),
2934 2934 ]
2935 2935
2936 2936 logopts = [
2937 2937 ('p', 'patch', None, _('show patch')),
2938 2938 ('l', 'limit', '', _('limit number of changes displayed')),
2939 2939 ('M', 'no-merges', None, _('do not show merges')),
2940 2940 ] + templateopts
2941 2941
2942 2942 diffopts = [
2943 2943 ('a', 'text', None, _('treat all files as text')),
2944 2944 ('g', 'git', None, _('use git extended diff format')),
2945 2945 ('', 'nodates', None, _("don't include dates in diff headers"))
2946 2946 ]
2947 2947
2948 2948 diffopts2 = [
2949 2949 ('p', 'show-function', None, _('show which function each change is in')),
2950 2950 ('w', 'ignore-all-space', None,
2951 2951 _('ignore white space when comparing lines')),
2952 2952 ('b', 'ignore-space-change', None,
2953 2953 _('ignore changes in the amount of white space')),
2954 2954 ('B', 'ignore-blank-lines', None,
2955 2955 _('ignore changes whose lines are all blank')),
2956 2956 ('U', 'unified', '', _('number of lines of context to show'))
2957 2957 ]
2958 2958
2959 2959 table = {
2960 2960 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2961 2961 "addremove":
2962 2962 (addremove,
2963 2963 [('s', 'similarity', '',
2964 2964 _('guess renamed files by similarity (0<=s<=100)')),
2965 2965 ] + walkopts + dryrunopts,
2966 2966 _('hg addremove [OPTION]... [FILE]...')),
2967 2967 "^annotate|blame":
2968 2968 (annotate,
2969 2969 [('r', 'rev', '', _('annotate the specified revision')),
2970 2970 ('f', 'follow', None, _('follow file copies and renames')),
2971 2971 ('a', 'text', None, _('treat all files as text')),
2972 2972 ('u', 'user', None, _('list the author (long with -v)')),
2973 2973 ('d', 'date', None, _('list the date (short with -q)')),
2974 2974 ('n', 'number', None, _('list the revision number (default)')),
2975 2975 ('c', 'changeset', None, _('list the changeset')),
2976 2976 ('l', 'line-number', None,
2977 2977 _('show line number at the first appearance'))
2978 2978 ] + walkopts,
2979 2979 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
2980 2980 "archive":
2981 2981 (archive,
2982 2982 [('', 'no-decode', None, _('do not pass files through decoders')),
2983 2983 ('p', 'prefix', '', _('directory prefix for files in archive')),
2984 2984 ('r', 'rev', '', _('revision to distribute')),
2985 2985 ('t', 'type', '', _('type of distribution to create')),
2986 2986 ] + walkopts,
2987 2987 _('hg archive [OPTION]... DEST')),
2988 2988 "backout":
2989 2989 (backout,
2990 2990 [('', 'merge', None,
2991 2991 _('merge with old dirstate parent after backout')),
2992 2992 ('', 'parent', '', _('parent to choose when backing out merge')),
2993 2993 ('r', 'rev', '', _('revision to backout')),
2994 2994 ] + walkopts + commitopts + commitopts2,
2995 2995 _('hg backout [OPTION]... [-r] REV')),
2996 2996 "bisect":
2997 2997 (bisect,
2998 2998 [('r', 'reset', False, _('reset bisect state')),
2999 2999 ('g', 'good', False, _('mark changeset good')),
3000 3000 ('b', 'bad', False, _('mark changeset bad')),
3001 3001 ('s', 'skip', False, _('skip testing changeset')),
3002 3002 ('U', 'noupdate', False, _('do not update to target'))],
3003 3003 _("hg bisect [-gbsr] [REV]")),
3004 3004 "branch":
3005 3005 (branch,
3006 3006 [('f', 'force', None,
3007 3007 _('set branch name even if it shadows an existing branch'))],
3008 3008 _('hg branch [-f] [NAME]')),
3009 3009 "branches":
3010 3010 (branches,
3011 3011 [('a', 'active', False,
3012 3012 _('show only branches that have unmerged heads'))],
3013 3013 _('hg branches [-a]')),
3014 3014 "bundle":
3015 3015 (bundle,
3016 3016 [('f', 'force', None,
3017 3017 _('run even when remote repository is unrelated')),
3018 3018 ('r', 'rev', [],
3019 3019 _('a changeset up to which you would like to bundle')),
3020 3020 ('', 'base', [],
3021 3021 _('a base changeset to specify instead of a destination')),
3022 3022 ('a', 'all', None, _('bundle all changesets in the repository')),
3023 3023 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3024 3024 ] + remoteopts,
3025 3025 _('hg bundle [-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3026 3026 "cat":
3027 3027 (cat,
3028 3028 [('o', 'output', '', _('print output to file with formatted name')),
3029 3029 ('r', 'rev', '', _('print the given revision')),
3030 3030 ('', 'decode', None, _('apply any matching decode filter')),
3031 3031 ] + walkopts,
3032 3032 _('hg cat [OPTION]... FILE...')),
3033 3033 "^clone":
3034 3034 (clone,
3035 3035 [('U', 'noupdate', None,
3036 3036 _('the clone will only contain a repository (no working copy)')),
3037 3037 ('r', 'rev', [],
3038 3038 _('a changeset you would like to have after cloning')),
3039 3039 ('', 'pull', None, _('use pull protocol to copy metadata')),
3040 3040 ('', 'uncompressed', None,
3041 3041 _('use uncompressed transfer (fast over LAN)')),
3042 3042 ] + remoteopts,
3043 3043 _('hg clone [OPTION]... SOURCE [DEST]')),
3044 3044 "^commit|ci":
3045 3045 (commit,
3046 3046 [('A', 'addremove', None,
3047 3047 _('mark new/missing files as added/removed before committing')),
3048 3048 ] + walkopts + commitopts + commitopts2,
3049 3049 _('hg commit [OPTION]... [FILE]...')),
3050 3050 "copy|cp":
3051 3051 (copy,
3052 3052 [('A', 'after', None, _('record a copy that has already occurred')),
3053 3053 ('f', 'force', None,
3054 3054 _('forcibly copy over an existing managed file')),
3055 3055 ] + walkopts + dryrunopts,
3056 3056 _('hg copy [OPTION]... [SOURCE]... DEST')),
3057 3057 "debugancestor": (debugancestor, [],
3058 3058 _('hg debugancestor [INDEX] REV1 REV2')),
3059 3059 "debugcheckstate": (debugcheckstate, [], _('hg debugcheckstate')),
3060 3060 "debugcomplete":
3061 3061 (debugcomplete,
3062 3062 [('o', 'options', None, _('show the command options'))],
3063 3063 _('hg debugcomplete [-o] CMD')),
3064 3064 "debugdate":
3065 3065 (debugdate,
3066 3066 [('e', 'extended', None, _('try extended date formats'))],
3067 3067 _('hg debugdate [-e] DATE [RANGE]')),
3068 3068 "debugdata": (debugdata, [], _('hg debugdata FILE REV')),
3069 3069 "debugfsinfo": (debugfsinfo, [], _('hg debugfsinfo [PATH]')),
3070 3070 "debugindex": (debugindex, [], _('hg debugindex FILE')),
3071 3071 "debugindexdot": (debugindexdot, [], _('hg debugindexdot FILE')),
3072 3072 "debuginstall": (debuginstall, [], _('hg debuginstall')),
3073 3073 "debugrawcommit|rawcommit":
3074 3074 (rawcommit,
3075 3075 [('p', 'parent', [], _('parent')),
3076 3076 ('F', 'files', '', _('file list'))
3077 3077 ] + commitopts + commitopts2,
3078 3078 _('hg debugrawcommit [OPTION]... [FILE]...')),
3079 3079 "debugrebuildstate":
3080 3080 (debugrebuildstate,
3081 3081 [('r', 'rev', '', _('revision to rebuild to'))],
3082 3082 _('hg debugrebuildstate [-r REV] [REV]')),
3083 3083 "debugrename":
3084 3084 (debugrename,
3085 3085 [('r', 'rev', '', _('revision to debug'))],
3086 3086 _('hg debugrename [-r REV] FILE')),
3087 3087 "debugsetparents":
3088 3088 (debugsetparents,
3089 3089 [],
3090 3090 _('hg debugsetparents REV1 [REV2]')),
3091 3091 "debugstate":
3092 3092 (debugstate,
3093 3093 [('', 'nodates', None, _('do not display the saved mtime'))],
3094 3094 _('hg debugstate [OPTS]')),
3095 3095 "debugwalk": (debugwalk, walkopts, _('hg debugwalk [OPTION]... [FILE]...')),
3096 3096 "^diff":
3097 3097 (diff,
3098 3098 [('r', 'rev', [], _('revision'))
3099 3099 ] + diffopts + diffopts2 + walkopts,
3100 3100 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3101 3101 "^export":
3102 3102 (export,
3103 3103 [('o', 'output', '', _('print output to file with formatted name')),
3104 3104 ('', 'switch-parent', None, _('diff against the second parent'))
3105 3105 ] + diffopts,
3106 3106 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
3107 3107 "grep":
3108 3108 (grep,
3109 3109 [('0', 'print0', None, _('end fields with NUL')),
3110 3110 ('', 'all', None, _('print all revisions that match')),
3111 3111 ('f', 'follow', None,
3112 3112 _('follow changeset history, or file history across copies and renames')),
3113 3113 ('i', 'ignore-case', None, _('ignore case when matching')),
3114 3114 ('l', 'files-with-matches', None,
3115 3115 _('print only filenames and revs that match')),
3116 3116 ('n', 'line-number', None, _('print matching line numbers')),
3117 3117 ('r', 'rev', [], _('search in given revision range')),
3118 3118 ('u', 'user', None, _('list the author (long with -v)')),
3119 3119 ('d', 'date', None, _('list the date (short with -q)')),
3120 3120 ] + walkopts,
3121 3121 _('hg grep [OPTION]... PATTERN [FILE]...')),
3122 3122 "heads":
3123 3123 (heads,
3124 3124 [('r', 'rev', '', _('show only heads which are descendants of rev')),
3125 3125 ] + templateopts,
3126 3126 _('hg heads [-r REV] [REV]...')),
3127 3127 "help": (help_, [], _('hg help [COMMAND]')),
3128 3128 "identify|id":
3129 3129 (identify,
3130 3130 [('r', 'rev', '', _('identify the specified rev')),
3131 3131 ('n', 'num', None, _('show local revision number')),
3132 3132 ('i', 'id', None, _('show global revision id')),
3133 3133 ('b', 'branch', None, _('show branch')),
3134 3134 ('t', 'tags', None, _('show tags'))],
3135 3135 _('hg identify [-nibt] [-r REV] [SOURCE]')),
3136 3136 "import|patch":
3137 3137 (import_,
3138 3138 [('p', 'strip', 1,
3139 3139 _('directory strip option for patch. This has the same\n'
3140 3140 'meaning as the corresponding patch option')),
3141 3141 ('b', 'base', '', _('base path')),
3142 3142 ('f', 'force', None,
3143 3143 _('skip check for outstanding uncommitted changes')),
3144 3144 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3145 3145 ('', 'exact', None,
3146 3146 _('apply patch to the nodes from which it was generated')),
3147 3147 ('', 'import-branch', None,
3148 3148 _('Use any branch information in patch (implied by --exact)'))] +
3149 3149 commitopts + commitopts2,
3150 3150 _('hg import [OPTION]... PATCH...')),
3151 3151 "incoming|in":
3152 3152 (incoming,
3153 3153 [('f', 'force', None,
3154 3154 _('run even when remote repository is unrelated')),
3155 3155 ('n', 'newest-first', None, _('show newest record first')),
3156 3156 ('', 'bundle', '', _('file to store the bundles into')),
3157 3157 ('r', 'rev', [],
3158 3158 _('a specific revision up to which you would like to pull')),
3159 3159 ] + logopts + remoteopts,
3160 3160 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
3161 3161 ' [--bundle FILENAME] [SOURCE]')),
3162 3162 "^init":
3163 3163 (init,
3164 3164 remoteopts,
3165 3165 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
3166 3166 "locate":
3167 3167 (locate,
3168 3168 [('r', 'rev', '', _('search the repository as it stood at rev')),
3169 3169 ('0', 'print0', None,
3170 3170 _('end filenames with NUL, for use with xargs')),
3171 3171 ('f', 'fullpath', None,
3172 3172 _('print complete paths from the filesystem root')),
3173 3173 ] + walkopts,
3174 3174 _('hg locate [OPTION]... [PATTERN]...')),
3175 3175 "^log|history":
3176 3176 (log,
3177 3177 [('f', 'follow', None,
3178 3178 _('follow changeset history, or file history across copies and renames')),
3179 3179 ('', 'follow-first', None,
3180 3180 _('only follow the first parent of merge changesets')),
3181 3181 ('d', 'date', '', _('show revs matching date spec')),
3182 3182 ('C', 'copies', None, _('show copied files')),
3183 3183 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3184 3184 ('r', 'rev', [], _('show the specified revision or range')),
3185 3185 ('', 'removed', None, _('include revs where files were removed')),
3186 3186 ('m', 'only-merges', None, _('show only merges')),
3187 3187 ('b', 'only-branch', [],
3188 3188 _('show only changesets within the given named branch')),
3189 3189 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3190 3190 ] + logopts + walkopts,
3191 3191 _('hg log [OPTION]... [FILE]')),
3192 3192 "manifest":
3193 3193 (manifest,
3194 3194 [('r', 'rev', '', _('revision to display'))],
3195 3195 _('hg manifest [-r REV]')),
3196 3196 "^merge":
3197 3197 (merge,
3198 3198 [('f', 'force', None, _('force a merge with outstanding changes')),
3199 3199 ('r', 'rev', '', _('revision to merge')),
3200 3200 ],
3201 3201 _('hg merge [-f] [[-r] REV]')),
3202 3202 "outgoing|out":
3203 3203 (outgoing,
3204 3204 [('f', 'force', None,
3205 3205 _('run even when remote repository is unrelated')),
3206 3206 ('r', 'rev', [],
3207 3207 _('a specific revision up to which you would like to push')),
3208 3208 ('n', 'newest-first', None, _('show newest record first')),
3209 3209 ] + logopts + remoteopts,
3210 3210 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3211 3211 "^parents":
3212 3212 (parents,
3213 3213 [('r', 'rev', '', _('show parents from the specified rev')),
3214 3214 ] + templateopts,
3215 3215 _('hg parents [-r REV] [FILE]')),
3216 3216 "paths": (paths, [], _('hg paths [NAME]')),
3217 3217 "^pull":
3218 3218 (pull,
3219 3219 [('u', 'update', None,
3220 3220 _('update to new tip if changesets were pulled')),
3221 3221 ('f', 'force', None,
3222 3222 _('run even when remote repository is unrelated')),
3223 3223 ('r', 'rev', [],
3224 3224 _('a specific revision up to which you would like to pull')),
3225 3225 ] + remoteopts,
3226 3226 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3227 3227 "^push":
3228 3228 (push,
3229 3229 [('f', 'force', None, _('force push')),
3230 3230 ('r', 'rev', [],
3231 3231 _('a specific revision up to which you would like to push')),
3232 3232 ] + remoteopts,
3233 3233 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3234 3234 "recover": (recover, [], _('hg recover')),
3235 3235 "^remove|rm":
3236 3236 (remove,
3237 3237 [('A', 'after', None, _('record delete for missing files')),
3238 3238 ('f', 'force', None,
3239 3239 _('remove (and delete) file even if added or modified')),
3240 3240 ] + walkopts,
3241 3241 _('hg remove [OPTION]... FILE...')),
3242 3242 "rename|mv":
3243 3243 (rename,
3244 3244 [('A', 'after', None, _('record a rename that has already occurred')),
3245 3245 ('f', 'force', None,
3246 3246 _('forcibly copy over an existing managed file')),
3247 3247 ] + walkopts + dryrunopts,
3248 3248 _('hg rename [OPTION]... SOURCE... DEST')),
3249 3249 "resolve":
3250 3250 (resolve,
3251 3251 [('l', 'list', None, _('list state of files needing merge')),
3252 3252 ('m', 'mark', None, _('mark files as resolved')),
3253 3253 ('u', 'unmark', None, _('unmark files as resolved'))],
3254 3254 ('hg resolve [OPTION] [FILES...]')),
3255 3255 "revert":
3256 3256 (revert,
3257 3257 [('a', 'all', None, _('revert all changes when no arguments given')),
3258 3258 ('d', 'date', '', _('tipmost revision matching date')),
3259 3259 ('r', 'rev', '', _('revision to revert to')),
3260 3260 ('', 'no-backup', None, _('do not save backup copies of files')),
3261 3261 ] + walkopts + dryrunopts,
3262 3262 _('hg revert [OPTION]... [-r REV] [NAME]...')),
3263 3263 "rollback": (rollback, [], _('hg rollback')),
3264 3264 "root": (root, [], _('hg root')),
3265 3265 "^serve":
3266 3266 (serve,
3267 3267 [('A', 'accesslog', '', _('name of access log file to write to')),
3268 3268 ('d', 'daemon', None, _('run server in background')),
3269 3269 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3270 3270 ('E', 'errorlog', '', _('name of error log file to write to')),
3271 3271 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3272 3272 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3273 3273 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3274 3274 ('n', 'name', '',
3275 3275 _('name to show in web pages (default: working dir)')),
3276 3276 ('', 'webdir-conf', '', _('name of the webdir config file'
3277 3277 ' (serve more than one repo)')),
3278 3278 ('', 'pid-file', '', _('name of file to write process ID to')),
3279 3279 ('', 'stdio', None, _('for remote clients')),
3280 3280 ('t', 'templates', '', _('web templates to use')),
3281 3281 ('', 'style', '', _('template style to use')),
3282 3282 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3283 3283 ('', 'certificate', '', _('SSL certificate file'))],
3284 3284 _('hg serve [OPTION]...')),
3285 3285 "showconfig|debugconfig":
3286 3286 (showconfig,
3287 3287 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3288 3288 _('hg showconfig [-u] [NAME]...')),
3289 3289 "^status|st":
3290 3290 (status,
3291 3291 [('A', 'all', None, _('show status of all files')),
3292 3292 ('m', 'modified', None, _('show only modified files')),
3293 3293 ('a', 'added', None, _('show only added files')),
3294 3294 ('r', 'removed', None, _('show only removed files')),
3295 3295 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3296 3296 ('c', 'clean', None, _('show only files without changes')),
3297 3297 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3298 3298 ('i', 'ignored', None, _('show only ignored files')),
3299 3299 ('n', 'no-status', None, _('hide status prefix')),
3300 3300 ('C', 'copies', None, _('show source of copied files')),
3301 3301 ('0', 'print0', None,
3302 3302 _('end filenames with NUL, for use with xargs')),
3303 3303 ('', 'rev', [], _('show difference from revision')),
3304 3304 ] + walkopts,
3305 3305 _('hg status [OPTION]... [FILE]...')),
3306 3306 "tag":
3307 3307 (tag,
3308 3308 [('f', 'force', None, _('replace existing tag')),
3309 3309 ('l', 'local', None, _('make the tag local')),
3310 3310 ('r', 'rev', '', _('revision to tag')),
3311 3311 ('', 'remove', None, _('remove a tag')),
3312 3312 # -l/--local is already there, commitopts cannot be used
3313 3313 ('m', 'message', '', _('use <text> as commit message')),
3314 3314 ] + commitopts2,
3315 3315 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3316 3316 "tags": (tags, [], _('hg tags')),
3317 3317 "tip":
3318 3318 (tip,
3319 3319 [('p', 'patch', None, _('show patch')),
3320 3320 ] + templateopts,
3321 3321 _('hg tip [-p]')),
3322 3322 "unbundle":
3323 3323 (unbundle,
3324 3324 [('u', 'update', None,
3325 3325 _('update to new tip if changesets were unbundled'))],
3326 3326 _('hg unbundle [-u] FILE...')),
3327 3327 "^update|up|checkout|co":
3328 3328 (update,
3329 3329 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3330 3330 ('d', 'date', '', _('tipmost revision matching date')),
3331 3331 ('r', 'rev', '', _('revision'))],
3332 3332 _('hg update [-C] [-d DATE] [[-r] REV]')),
3333 3333 "verify": (verify, [], _('hg verify')),
3334 3334 "version": (version_, [], _('hg version')),
3335 3335 }
3336 3336
3337 3337 norepo = ("clone init version help debugcomplete debugdata"
3338 3338 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3339 3339 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,757 +1,753 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import nullid, nullrev, short
9 9 from i18n import _
10 10 import ancestor, bdiff, revlog, util, os, errno
11 11
12 12 class changectx(object):
13 13 """A changecontext object makes access to data related to a particular
14 14 changeset convenient."""
15 15 def __init__(self, repo, changeid=''):
16 16 """changeid is a revision number, node, or tag"""
17 17 if changeid == '':
18 18 changeid = '.'
19 19 self._repo = repo
20 20 self._node = self._repo.lookup(changeid)
21 21 self._rev = self._repo.changelog.rev(self._node)
22 22
23 23 def __str__(self):
24 24 return short(self.node())
25 25
26 26 def __repr__(self):
27 27 return "<changectx %s>" % str(self)
28 28
29 29 def __hash__(self):
30 30 try:
31 31 return hash(self._rev)
32 32 except AttributeError:
33 33 return id(self)
34 34
35 35 def __eq__(self, other):
36 36 try:
37 37 return self._rev == other._rev
38 38 except AttributeError:
39 39 return False
40 40
41 41 def __ne__(self, other):
42 42 return not (self == other)
43 43
44 44 def __nonzero__(self):
45 45 return self._rev != nullrev
46 46
47 47 def __getattr__(self, name):
48 48 if name == '_changeset':
49 49 self._changeset = self._repo.changelog.read(self.node())
50 50 return self._changeset
51 51 elif name == '_manifest':
52 52 self._manifest = self._repo.manifest.read(self._changeset[0])
53 53 return self._manifest
54 54 elif name == '_manifestdelta':
55 55 md = self._repo.manifest.readdelta(self._changeset[0])
56 56 self._manifestdelta = md
57 57 return self._manifestdelta
58 58 elif name == '_parents':
59 59 p = self._repo.changelog.parents(self._node)
60 60 if p[1] == nullid:
61 61 p = p[:-1]
62 62 self._parents = [changectx(self._repo, x) for x in p]
63 63 return self._parents
64 64 else:
65 65 raise AttributeError, name
66 66
67 67 def __contains__(self, key):
68 68 return key in self._manifest
69 69
70 70 def __getitem__(self, key):
71 71 return self.filectx(key)
72 72
73 73 def __iter__(self):
74 74 a = self._manifest.keys()
75 75 a.sort()
76 76 for f in a:
77 77 yield f
78 78
79 79 def changeset(self): return self._changeset
80 80 def manifest(self): return self._manifest
81 81
82 82 def rev(self): return self._rev
83 83 def node(self): return self._node
84 84 def user(self): return self._changeset[1]
85 85 def date(self): return self._changeset[2]
86 86 def files(self): return self._changeset[3]
87 87 def description(self): return self._changeset[4]
88 88 def branch(self): return self._changeset[5].get("branch")
89 89 def extra(self): return self._changeset[5]
90 90 def tags(self): return self._repo.nodetags(self._node)
91 91
92 92 def parents(self):
93 93 """return contexts for each parent changeset"""
94 94 return self._parents
95 95
96 96 def children(self):
97 97 """return contexts for each child changeset"""
98 98 c = self._repo.changelog.children(self._node)
99 99 return [changectx(self._repo, x) for x in c]
100 100
101 101 def _fileinfo(self, path):
102 102 if '_manifest' in self.__dict__:
103 103 try:
104 104 return self._manifest[path], self._manifest.flags(path)
105 105 except KeyError:
106 106 raise revlog.LookupError(self._node, path,
107 107 _('not found in manifest'))
108 108 if '_manifestdelta' in self.__dict__ or path in self.files():
109 109 if path in self._manifestdelta:
110 110 return self._manifestdelta[path], self._manifestdelta.flags(path)
111 111 node, flag = self._repo.manifest.find(self._changeset[0], path)
112 112 if not node:
113 113 raise revlog.LookupError(self._node, path,
114 114 _('not found in manifest'))
115 115
116 116 return node, flag
117 117
118 118 def filenode(self, path):
119 119 return self._fileinfo(path)[0]
120 120
121 def fileflags(self, path):
121 def flags(self, path):
122 122 try:
123 123 return self._fileinfo(path)[1]
124 124 except revlog.LookupError:
125 125 return ''
126 126
127 127 def filectx(self, path, fileid=None, filelog=None):
128 128 """get a file context from this changeset"""
129 129 if fileid is None:
130 130 fileid = self.filenode(path)
131 131 return filectx(self._repo, path, fileid=fileid,
132 132 changectx=self, filelog=filelog)
133 133
134 134 def filectxs(self):
135 135 """generate a file context for each file in this changeset's
136 136 manifest"""
137 137 mf = self.manifest()
138 138 m = mf.keys()
139 139 m.sort()
140 140 for f in m:
141 141 yield self.filectx(f, fileid=mf[f])
142 142
143 143 def ancestor(self, c2):
144 144 """
145 145 return the ancestor context of self and c2
146 146 """
147 147 n = self._repo.changelog.ancestor(self._node, c2._node)
148 148 return changectx(self._repo, n)
149 149
150 150 class filectx(object):
151 151 """A filecontext object makes access to data related to a particular
152 152 filerevision convenient."""
153 153 def __init__(self, repo, path, changeid=None, fileid=None,
154 154 filelog=None, changectx=None):
155 155 """changeid can be a changeset revision, node, or tag.
156 156 fileid can be a file revision or node."""
157 157 self._repo = repo
158 158 self._path = path
159 159
160 160 assert (changeid is not None
161 161 or fileid is not None
162 162 or changectx is not None)
163 163
164 164 if filelog:
165 165 self._filelog = filelog
166 166
167 167 if changeid is not None:
168 168 self._changeid = changeid
169 169 if changectx is not None:
170 170 self._changectx = changectx
171 171 if fileid is not None:
172 172 self._fileid = fileid
173 173
174 174 def __getattr__(self, name):
175 175 if name == '_changectx':
176 176 self._changectx = changectx(self._repo, self._changeid)
177 177 return self._changectx
178 178 elif name == '_filelog':
179 179 self._filelog = self._repo.file(self._path)
180 180 return self._filelog
181 181 elif name == '_changeid':
182 182 if '_changectx' in self.__dict__:
183 183 self._changeid = self._changectx.rev()
184 184 else:
185 185 self._changeid = self._filelog.linkrev(self._filenode)
186 186 return self._changeid
187 187 elif name == '_filenode':
188 188 if '_fileid' in self.__dict__:
189 189 self._filenode = self._filelog.lookup(self._fileid)
190 190 else:
191 191 self._filenode = self._changectx.filenode(self._path)
192 192 return self._filenode
193 193 elif name == '_filerev':
194 194 self._filerev = self._filelog.rev(self._filenode)
195 195 return self._filerev
196 196 elif name == '_repopath':
197 197 self._repopath = self._path
198 198 return self._repopath
199 199 else:
200 200 raise AttributeError, name
201 201
202 202 def __nonzero__(self):
203 203 try:
204 204 n = self._filenode
205 205 return True
206 206 except revlog.LookupError:
207 207 # file is missing
208 208 return False
209 209
210 210 def __str__(self):
211 211 return "%s@%s" % (self.path(), short(self.node()))
212 212
213 213 def __repr__(self):
214 214 return "<filectx %s>" % str(self)
215 215
216 216 def __hash__(self):
217 217 try:
218 218 return hash((self._path, self._fileid))
219 219 except AttributeError:
220 220 return id(self)
221 221
222 222 def __eq__(self, other):
223 223 try:
224 224 return (self._path == other._path
225 225 and self._fileid == other._fileid)
226 226 except AttributeError:
227 227 return False
228 228
229 229 def __ne__(self, other):
230 230 return not (self == other)
231 231
232 232 def filectx(self, fileid):
233 233 '''opens an arbitrary revision of the file without
234 234 opening a new filelog'''
235 235 return filectx(self._repo, self._path, fileid=fileid,
236 236 filelog=self._filelog)
237 237
238 238 def filerev(self): return self._filerev
239 239 def filenode(self): return self._filenode
240 def fileflags(self): return self._changectx.fileflags(self._path)
241 def isexec(self): return 'x' in self.fileflags()
242 def islink(self): return 'l' in self.fileflags()
240 def flags(self): return self._changectx.flags(self._path)
241 def isexec(self): return 'x' in self.flags()
242 def islink(self): return 'l' in self.flags()
243 243 def filelog(self): return self._filelog
244 244
245 245 def rev(self):
246 246 if '_changectx' in self.__dict__:
247 247 return self._changectx.rev()
248 248 if '_changeid' in self.__dict__:
249 249 return self._changectx.rev()
250 250 return self._filelog.linkrev(self._filenode)
251 251
252 252 def linkrev(self): return self._filelog.linkrev(self._filenode)
253 253 def node(self): return self._changectx.node()
254 254 def user(self): return self._changectx.user()
255 255 def date(self): return self._changectx.date()
256 256 def files(self): return self._changectx.files()
257 257 def description(self): return self._changectx.description()
258 258 def branch(self): return self._changectx.branch()
259 259 def manifest(self): return self._changectx.manifest()
260 260 def changectx(self): return self._changectx
261 261
262 262 def data(self): return self._filelog.read(self._filenode)
263 263 def path(self): return self._path
264 264 def size(self): return self._filelog.size(self._filerev)
265 265
266 266 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
267 267
268 268 def renamed(self):
269 269 """check if file was actually renamed in this changeset revision
270 270
271 271 If rename logged in file revision, we report copy for changeset only
272 272 if file revisions linkrev points back to the changeset in question
273 273 or both changeset parents contain different file revisions.
274 274 """
275 275
276 276 renamed = self._filelog.renamed(self._filenode)
277 277 if not renamed:
278 278 return renamed
279 279
280 280 if self.rev() == self.linkrev():
281 281 return renamed
282 282
283 283 name = self.path()
284 284 fnode = self._filenode
285 285 for p in self._changectx.parents():
286 286 try:
287 287 if fnode == p.filenode(name):
288 288 return None
289 289 except revlog.LookupError:
290 290 pass
291 291 return renamed
292 292
293 293 def parents(self):
294 294 p = self._path
295 295 fl = self._filelog
296 296 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
297 297
298 298 r = self._filelog.renamed(self._filenode)
299 299 if r:
300 300 pl[0] = (r[0], r[1], None)
301 301
302 302 return [filectx(self._repo, p, fileid=n, filelog=l)
303 303 for p,n,l in pl if n != nullid]
304 304
305 305 def children(self):
306 306 # hard for renames
307 307 c = self._filelog.children(self._filenode)
308 308 return [filectx(self._repo, self._path, fileid=x,
309 309 filelog=self._filelog) for x in c]
310 310
311 311 def annotate(self, follow=False, linenumber=None):
312 312 '''returns a list of tuples of (ctx, line) for each line
313 313 in the file, where ctx is the filectx of the node where
314 314 that line was last changed.
315 315 This returns tuples of ((ctx, linenumber), line) for each line,
316 316 if "linenumber" parameter is NOT "None".
317 317 In such tuples, linenumber means one at the first appearance
318 318 in the managed file.
319 319 To reduce annotation cost,
320 320 this returns fixed value(False is used) as linenumber,
321 321 if "linenumber" parameter is "False".'''
322 322
323 323 def decorate_compat(text, rev):
324 324 return ([rev] * len(text.splitlines()), text)
325 325
326 326 def without_linenumber(text, rev):
327 327 return ([(rev, False)] * len(text.splitlines()), text)
328 328
329 329 def with_linenumber(text, rev):
330 330 size = len(text.splitlines())
331 331 return ([(rev, i) for i in xrange(1, size + 1)], text)
332 332
333 333 decorate = (((linenumber is None) and decorate_compat) or
334 334 (linenumber and with_linenumber) or
335 335 without_linenumber)
336 336
337 337 def pair(parent, child):
338 338 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
339 339 child[0][b1:b2] = parent[0][a1:a2]
340 340 return child
341 341
342 342 getlog = util.cachefunc(lambda x: self._repo.file(x))
343 343 def getctx(path, fileid):
344 344 log = path == self._path and self._filelog or getlog(path)
345 345 return filectx(self._repo, path, fileid=fileid, filelog=log)
346 346 getctx = util.cachefunc(getctx)
347 347
348 348 def parents(f):
349 349 # we want to reuse filectx objects as much as possible
350 350 p = f._path
351 351 if f._filerev is None: # working dir
352 352 pl = [(n.path(), n.filerev()) for n in f.parents()]
353 353 else:
354 354 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
355 355
356 356 if follow:
357 357 r = f.renamed()
358 358 if r:
359 359 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
360 360
361 361 return [getctx(p, n) for p, n in pl if n != nullrev]
362 362
363 363 # use linkrev to find the first changeset where self appeared
364 364 if self.rev() != self.linkrev():
365 365 base = self.filectx(self.filerev())
366 366 else:
367 367 base = self
368 368
369 369 # find all ancestors
370 370 needed = {base: 1}
371 371 visit = [base]
372 372 files = [base._path]
373 373 while visit:
374 374 f = visit.pop(0)
375 375 for p in parents(f):
376 376 if p not in needed:
377 377 needed[p] = 1
378 378 visit.append(p)
379 379 if p._path not in files:
380 380 files.append(p._path)
381 381 else:
382 382 # count how many times we'll use this
383 383 needed[p] += 1
384 384
385 385 # sort by revision (per file) which is a topological order
386 386 visit = []
387 387 for f in files:
388 388 fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
389 389 visit.extend(fn)
390 390 visit.sort()
391 391 hist = {}
392 392
393 393 for r, f in visit:
394 394 curr = decorate(f.data(), f)
395 395 for p in parents(f):
396 396 if p != nullid:
397 397 curr = pair(hist[p], curr)
398 398 # trim the history of unneeded revs
399 399 needed[p] -= 1
400 400 if not needed[p]:
401 401 del hist[p]
402 402 hist[f] = curr
403 403
404 404 return zip(hist[f][0], hist[f][1].splitlines(1))
405 405
406 406 def ancestor(self, fc2):
407 407 """
408 408 find the common ancestor file context, if any, of self, and fc2
409 409 """
410 410
411 411 acache = {}
412 412
413 413 # prime the ancestor cache for the working directory
414 414 for c in (self, fc2):
415 415 if c._filerev == None:
416 416 pl = [(n.path(), n.filenode()) for n in c.parents()]
417 417 acache[(c._path, None)] = pl
418 418
419 419 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
420 420 def parents(vertex):
421 421 if vertex in acache:
422 422 return acache[vertex]
423 423 f, n = vertex
424 424 if f not in flcache:
425 425 flcache[f] = self._repo.file(f)
426 426 fl = flcache[f]
427 427 pl = [(f, p) for p in fl.parents(n) if p != nullid]
428 428 re = fl.renamed(n)
429 429 if re:
430 430 pl.append(re)
431 431 acache[vertex] = pl
432 432 return pl
433 433
434 434 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
435 435 v = ancestor.ancestor(a, b, parents)
436 436 if v:
437 437 f, n = v
438 438 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
439 439
440 440 return None
441 441
442 442 class workingctx(changectx):
443 443 """A workingctx object makes access to data related to
444 444 the current working directory convenient.
445 445 parents - a pair of parent nodeids, or None to use the dirstate.
446 446 date - any valid date string or (unixtime, offset), or None.
447 447 user - username string, or None.
448 448 extra - a dictionary of extra values, or None.
449 449 changes - a list of file lists as returned by localrepo.status()
450 450 or None to use the repository status.
451 451 """
452 452 def __init__(self, repo, parents=None, text="", user=None, date=None,
453 453 extra=None, changes=None):
454 454 self._repo = repo
455 455 self._rev = None
456 456 self._node = None
457 457 self._text = text
458 458 if date:
459 459 self._date = util.parsedate(date)
460 460 else:
461 461 self._date = util.makedate()
462 462 if user:
463 463 self._user = user
464 464 else:
465 465 self._user = self._repo.ui.username()
466 466 if parents:
467 467 p1, p2 = parents
468 468 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
469 469 if changes:
470 470 self._status = list(changes)
471 471
472 472 self._extra = {}
473 473 if extra:
474 474 self._extra = extra.copy()
475 475 if 'branch' not in self._extra:
476 476 branch = self._repo.dirstate.branch()
477 477 try:
478 478 branch = branch.decode('UTF-8').encode('UTF-8')
479 479 except UnicodeDecodeError:
480 480 raise util.Abort(_('branch name not in UTF-8!'))
481 481 self._extra['branch'] = branch
482 482 if self._extra['branch'] == '':
483 483 self._extra['branch'] = 'default'
484 484
485 485 def __str__(self):
486 486 return str(self._parents[0]) + "+"
487 487
488 488 def __nonzero__(self):
489 489 return True
490 490
491 491 def __getattr__(self, name):
492 492 if name == '_status':
493 493 self._status = self._repo.status()
494 494 return self._status
495 495 if name == '_manifest':
496 496 self._buildmanifest()
497 497 return self._manifest
498 498 elif name == '_parents':
499 499 p = self._repo.dirstate.parents()
500 500 if p[1] == nullid:
501 501 p = p[:-1]
502 502 self._parents = [changectx(self._repo, x) for x in p]
503 503 return self._parents
504 504 else:
505 505 raise AttributeError, name
506 506
507 507 def _buildmanifest(self):
508 508 """generate a manifest corresponding to the working directory"""
509 509
510 510 man = self._parents[0].manifest().copy()
511 511 copied = self._repo.dirstate.copies()
512 is_exec = util.execfunc(self._repo.root,
513 lambda p: man.execf(copied.get(p,p)))
514 is_link = util.linkfunc(self._repo.root,
515 lambda p: man.linkf(copied.get(p,p)))
512 cf = lambda x: man.flags(copied.get(x, x))
513 ff = self._repo.dirstate.flagfunc(cf)
516 514 modified, added, removed, deleted, unknown = self._status[:5]
517 515 for i, l in (("a", added), ("m", modified), ("u", unknown)):
518 516 for f in l:
519 517 man[f] = man.get(copied.get(f, f), nullid) + i
520 518 try:
521 man.set(f, is_exec(f), is_link(f))
519 man.set(f, ff(f))
522 520 except OSError:
523 521 pass
524 522
525 523 for f in deleted + removed:
526 524 if f in man:
527 525 del man[f]
528 526
529 527 self._manifest = man
530 528
531 529 def manifest(self): return self._manifest
532 530
533 531 def user(self): return self._user
534 532 def date(self): return self._date
535 533 def description(self): return self._text
536 534 def files(self):
537 535 f = self.modified() + self.added() + self.removed()
538 536 f.sort()
539 537 return f
540 538
541 539 def modified(self): return self._status[0]
542 540 def added(self): return self._status[1]
543 541 def removed(self): return self._status[2]
544 542 def deleted(self): return self._status[3]
545 543 def unknown(self): return self._status[4]
546 544 def clean(self): return self._status[5]
547 545 def branch(self): return self._extra['branch']
548 546 def extra(self): return self._extra
549 547
550 548 def tags(self):
551 549 t = []
552 550 [t.extend(p.tags()) for p in self.parents()]
553 551 return t
554 552
555 553 def children(self):
556 554 return []
557 555
558 def fileflags(self, path):
556 def flags(self, path):
559 557 if '_manifest' in self.__dict__:
560 558 try:
561 559 return self._manifest.flags(path)
562 560 except KeyError:
563 561 return ''
564 562
565 563 pnode = self._parents[0].changeset()[0]
566 564 orig = self._repo.dirstate.copies().get(path, path)
567 565 node, flag = self._repo.manifest.find(pnode, orig)
568 is_link = util.linkfunc(self._repo.root,
569 lambda p: flag and 'l' in flag)
570 is_exec = util.execfunc(self._repo.root,
571 lambda p: flag and 'x' in flag)
572 566 try:
573 return (is_link(path) and 'l' or '') + (is_exec(path) and 'x' or '')
567 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
568 return ff(path)
574 569 except OSError:
575 570 pass
576 571
577 572 if not node or path in self.deleted() or path in self.removed():
578 573 return ''
579 574 return flag
580 575
581 576 def filectx(self, path, filelog=None):
582 577 """get a file context from the working directory"""
583 578 return workingfilectx(self._repo, path, workingctx=self,
584 579 filelog=filelog)
585 580
586 581 def ancestor(self, c2):
587 582 """return the ancestor context of self and c2"""
588 583 return self._parents[0].ancestor(c2) # punt on two parents for now
589 584
590 585 class workingfilectx(filectx):
591 586 """A workingfilectx object makes access to data related to a particular
592 587 file in the working directory convenient."""
593 588 def __init__(self, repo, path, filelog=None, workingctx=None):
594 589 """changeid can be a changeset revision, node, or tag.
595 590 fileid can be a file revision or node."""
596 591 self._repo = repo
597 592 self._path = path
598 593 self._changeid = None
599 594 self._filerev = self._filenode = None
600 595
601 596 if filelog:
602 597 self._filelog = filelog
603 598 if workingctx:
604 599 self._changectx = workingctx
605 600
606 601 def __getattr__(self, name):
607 602 if name == '_changectx':
608 603 self._changectx = workingctx(self._repo)
609 604 return self._changectx
610 605 elif name == '_repopath':
611 606 self._repopath = (self._repo.dirstate.copied(self._path)
612 607 or self._path)
613 608 return self._repopath
614 609 elif name == '_filelog':
615 610 self._filelog = self._repo.file(self._repopath)
616 611 return self._filelog
617 612 else:
618 613 raise AttributeError, name
619 614
620 615 def __nonzero__(self):
621 616 return True
622 617
623 618 def __str__(self):
624 619 return "%s@%s" % (self.path(), self._changectx)
625 620
626 621 def filectx(self, fileid):
627 622 '''opens an arbitrary revision of the file without
628 623 opening a new filelog'''
629 624 return filectx(self._repo, self._repopath, fileid=fileid,
630 625 filelog=self._filelog)
631 626
632 627 def rev(self):
633 628 if '_changectx' in self.__dict__:
634 629 return self._changectx.rev()
635 630 return self._filelog.linkrev(self._filenode)
636 631
637 632 def data(self): return self._repo.wread(self._path)
638 633 def renamed(self):
639 634 rp = self._repopath
640 635 if rp == self._path:
641 636 return None
642 637 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
643 638
644 639 def parents(self):
645 640 '''return parent filectxs, following copies if necessary'''
646 641 p = self._path
647 642 rp = self._repopath
648 643 pcl = self._changectx._parents
649 644 fl = self._filelog
650 645 pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
651 646 if len(pcl) > 1:
652 647 if rp != p:
653 648 fl = None
654 649 pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
655 650
656 651 return [filectx(self._repo, p, fileid=n, filelog=l)
657 652 for p,n,l in pl if n != nullid]
658 653
659 654 def children(self):
660 655 return []
661 656
662 657 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
663 658 def date(self):
664 659 t, tz = self._changectx.date()
665 660 try:
666 661 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
667 662 except OSError, err:
668 663 if err.errno != errno.ENOENT: raise
669 664 return (t, tz)
670 665
671 666 def cmp(self, text): return self._repo.wread(self._path) == text
672 667
673 668 class memctx(object):
674 669 """A memctx is a subset of changectx supposed to be built on memory
675 670 and passed to commit functions.
676 671
677 672 NOTE: this interface and the related memfilectx are experimental and
678 673 may change without notice.
679 674
680 675 parents - a pair of parent nodeids.
681 676 filectxfn - a callable taking (repo, memctx, path) arguments and
682 677 returning a memctx object.
683 678 date - any valid date string or (unixtime, offset), or None.
684 679 user - username string, or None.
685 680 extra - a dictionary of extra values, or None.
686 681 """
687 682 def __init__(self, repo, parents, text, files, filectxfn, user=None,
688 683 date=None, extra=None):
689 684 self._repo = repo
690 685 self._rev = None
691 686 self._node = None
692 687 self._text = text
693 688 self._date = date and util.parsedate(date) or util.makedate()
694 689 self._user = user or self._repo.ui.username()
695 690 parents = [(p or nullid) for p in parents]
696 691 p1, p2 = parents
697 692 self._parents = [self._repo.changectx(p) for p in (p1, p2)]
698 693 files = list(files)
699 694 files.sort()
700 695 self._status = [files, [], [], [], []]
701 696 self._filectxfn = filectxfn
702 697
703 698 self._extra = extra and extra.copy() or {}
704 699 if 'branch' not in self._extra:
705 700 self._extra['branch'] = 'default'
706 701 elif self._extra.get('branch') == '':
707 702 self._extra['branch'] = 'default'
708 703
709 704 def __str__(self):
710 705 return str(self._parents[0]) + "+"
711 706
712 707 def __nonzero__(self):
713 708 return True
714 709
715 710 def user(self): return self._user
716 711 def date(self): return self._date
717 712 def description(self): return self._text
718 713 def files(self): return self.modified()
719 714 def modified(self): return self._status[0]
720 715 def added(self): return self._status[1]
721 716 def removed(self): return self._status[2]
722 717 def deleted(self): return self._status[3]
723 718 def unknown(self): return self._status[4]
724 719 def clean(self): return self._status[5]
725 720 def branch(self): return self._extra['branch']
726 721 def extra(self): return self._extra
722 def flags(self, f): return self[f].flags()
727 723
728 724 def parents(self):
729 725 """return contexts for each parent changeset"""
730 726 return self._parents
731 727
732 728 def filectx(self, path, filelog=None):
733 729 """get a file context from the working directory"""
734 730 return self._filectxfn(self._repo, self, path)
735 731
736 732 class memfilectx(object):
737 733 """A memfilectx is a subset of filectx supposed to be built by client
738 734 code and passed to commit functions.
739 735 """
740 736 def __init__(self, path, data, islink, isexec, copied):
741 737 """copied is the source file path, or None."""
742 738 self._path = path
743 739 self._data = data
744 740 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
745 741 self._copied = None
746 742 if copied:
747 743 self._copied = (copied, nullid)
748 744
749 745 def __nonzero__(self): return True
750 746 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
751 747 def path(self): return self._path
752 748 def data(self): return self._data
753 def fileflags(self): return self._flags
749 def flags(self): return self._flags
754 750 def isexec(self): return 'x' in self._flags
755 751 def islink(self): return 'l' in self._flags
756 752 def renamed(self): return self._copied
757 753
@@ -1,670 +1,701 b''
1 1 """
2 2 dirstate.py - working directory tracking for mercurial
3 3
4 4 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5
6 6 This software may be used and distributed according to the terms
7 7 of the GNU General Public License, incorporated herein by reference.
8 8 """
9 9
10 10 from node import nullid
11 11 from i18n import _
12 12 import struct, os, bisect, stat, strutil, util, errno, ignore
13 13 import cStringIO, osutil, sys
14 14
15 15 _unknown = ('?', 0, 0, 0)
16 16 _format = ">cllll"
17 17
18 18 class dirstate(object):
19 19
20 20 def __init__(self, opener, ui, root):
21 21 self._opener = opener
22 22 self._root = root
23 23 self._dirty = False
24 24 self._dirtypl = False
25 25 self._ui = ui
26 26
27 27 def __getattr__(self, name):
28 28 if name == '_map':
29 29 self._read()
30 30 return self._map
31 31 elif name == '_copymap':
32 32 self._read()
33 33 return self._copymap
34 34 elif name == '_foldmap':
35 35 _foldmap = {}
36 36 for name in self._map:
37 37 norm = os.path.normcase(os.path.normpath(name))
38 38 _foldmap[norm] = name
39 39 self._foldmap = _foldmap
40 40 return self._foldmap
41 41 elif name == '_branch':
42 42 try:
43 43 self._branch = (self._opener("branch").read().strip()
44 44 or "default")
45 45 except IOError:
46 46 self._branch = "default"
47 47 return self._branch
48 48 elif name == '_pl':
49 49 self._pl = [nullid, nullid]
50 50 try:
51 51 st = self._opener("dirstate").read(40)
52 52 if len(st) == 40:
53 53 self._pl = st[:20], st[20:40]
54 54 except IOError, err:
55 55 if err.errno != errno.ENOENT: raise
56 56 return self._pl
57 57 elif name == '_dirs':
58 58 self._dirs = {}
59 59 for f in self._map:
60 60 if self[f] != 'r':
61 61 self._incpath(f)
62 62 return self._dirs
63 63 elif name == '_ignore':
64 64 files = [self._join('.hgignore')]
65 65 for name, path in self._ui.configitems("ui"):
66 66 if name == 'ignore' or name.startswith('ignore.'):
67 67 files.append(os.path.expanduser(path))
68 68 self._ignore = ignore.ignore(self._root, files, self._ui.warn)
69 69 return self._ignore
70 70 elif name == '_slash':
71 71 self._slash = self._ui.configbool('ui', 'slash') and os.sep != '/'
72 72 return self._slash
73 elif name == '_checklink':
74 self._checklink = util.checklink(self._root)
75 return self._checklink
73 76 elif name == '_checkexec':
74 77 self._checkexec = util.checkexec(self._root)
75 78 return self._checkexec
76 79 elif name == '_folding':
77 80 self._folding = not util.checkfolding(self._join('.hg'))
78 81 return self._folding
79 82 elif name == 'normalize':
80 83 if self._folding:
81 84 self.normalize = self._normalize
82 85 else:
83 86 self.normalize = lambda x: x
84 87 return self.normalize
85 88 else:
86 89 raise AttributeError, name
87 90
88 91 def _join(self, f):
89 92 return os.path.join(self._root, f)
90 93
91 94 def folding(self):
92 95 return self._folding
93 96
97 def flagfunc(self, fallback):
98 if self._checklink:
99 if self._checkexec:
100 def f(x):
101 p = os.path.join(self._root, x)
102 if os.path.islink(p):
103 return 'l'
104 if util.is_exec(p):
105 return 'x'
106 return ''
107 return f
108 def f(x):
109 if os.path.islink(os.path.join(self._root, x)):
110 return 'l'
111 if 'x' in fallback(x):
112 return 'x'
113 return ''
114 return f
115 if self._checkexec:
116 def f(x):
117 if 'l' in fallback(x):
118 return 'l'
119 if util.is_exec(os.path.join(self._root, x)):
120 return 'x'
121 return ''
122 return f
123 return fallback
124
94 125 def getcwd(self):
95 126 cwd = os.getcwd()
96 127 if cwd == self._root: return ''
97 128 # self._root ends with a path separator if self._root is '/' or 'C:\'
98 129 rootsep = self._root
99 130 if not util.endswithsep(rootsep):
100 131 rootsep += os.sep
101 132 if cwd.startswith(rootsep):
102 133 return cwd[len(rootsep):]
103 134 else:
104 135 # we're outside the repo. return an absolute path.
105 136 return cwd
106 137
107 138 def pathto(self, f, cwd=None):
108 139 if cwd is None:
109 140 cwd = self.getcwd()
110 141 path = util.pathto(self._root, cwd, f)
111 142 if self._slash:
112 143 return util.normpath(path)
113 144 return path
114 145
115 146 def __getitem__(self, key):
116 147 ''' current states:
117 148 n normal
118 149 m needs merging
119 150 r marked for removal
120 151 a marked for addition
121 152 ? not tracked'''
122 153 return self._map.get(key, ("?",))[0]
123 154
124 155 def __contains__(self, key):
125 156 return key in self._map
126 157
127 158 def __iter__(self):
128 159 a = self._map.keys()
129 160 a.sort()
130 161 for x in a:
131 162 yield x
132 163
133 164 def parents(self):
134 165 return self._pl
135 166
136 167 def branch(self):
137 168 return self._branch
138 169
139 170 def setparents(self, p1, p2=nullid):
140 171 self._dirty = self._dirtypl = True
141 172 self._pl = p1, p2
142 173
143 174 def setbranch(self, branch):
144 175 self._branch = branch
145 176 self._opener("branch", "w").write(branch + '\n')
146 177
147 178 def _read(self):
148 179 self._map = {}
149 180 self._copymap = {}
150 181 if not self._dirtypl:
151 182 self._pl = [nullid, nullid]
152 183 try:
153 184 st = self._opener("dirstate").read()
154 185 except IOError, err:
155 186 if err.errno != errno.ENOENT: raise
156 187 return
157 188 if not st:
158 189 return
159 190
160 191 if not self._dirtypl:
161 192 self._pl = [st[:20], st[20: 40]]
162 193
163 194 # deref fields so they will be local in loop
164 195 dmap = self._map
165 196 copymap = self._copymap
166 197 unpack = struct.unpack
167 198 e_size = struct.calcsize(_format)
168 199 pos1 = 40
169 200 l = len(st)
170 201
171 202 # the inner loop
172 203 while pos1 < l:
173 204 pos2 = pos1 + e_size
174 205 e = unpack(">cllll", st[pos1:pos2]) # a literal here is faster
175 206 pos1 = pos2 + e[4]
176 207 f = st[pos2:pos1]
177 208 if '\0' in f:
178 209 f, c = f.split('\0')
179 210 copymap[f] = c
180 211 dmap[f] = e # we hold onto e[4] because making a subtuple is slow
181 212
182 213 def invalidate(self):
183 214 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
184 215 if a in self.__dict__:
185 216 delattr(self, a)
186 217 self._dirty = False
187 218
188 219 def copy(self, source, dest):
189 220 if source == dest:
190 221 return
191 222 self._dirty = True
192 223 self._copymap[dest] = source
193 224
194 225 def copied(self, file):
195 226 return self._copymap.get(file, None)
196 227
197 228 def copies(self):
198 229 return self._copymap
199 230
200 231 def _incpath(self, path):
201 232 c = path.rfind('/')
202 233 if c >= 0:
203 234 dirs = self._dirs
204 235 base = path[:c]
205 236 if base not in dirs:
206 237 self._incpath(base)
207 238 dirs[base] = 1
208 239 else:
209 240 dirs[base] += 1
210 241
211 242 def _decpath(self, path):
212 243 c = path.rfind('/')
213 244 if c >= 0:
214 245 base = path[:c]
215 246 dirs = self._dirs
216 247 if dirs[base] == 1:
217 248 del dirs[base]
218 249 self._decpath(base)
219 250 else:
220 251 dirs[base] -= 1
221 252
222 253 def _incpathcheck(self, f):
223 254 if '\r' in f or '\n' in f:
224 255 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
225 256 % f)
226 257 # shadows
227 258 if f in self._dirs:
228 259 raise util.Abort(_('directory %r already in dirstate') % f)
229 260 for c in strutil.rfindall(f, '/'):
230 261 d = f[:c]
231 262 if d in self._dirs:
232 263 break
233 264 if d in self._map and self[d] != 'r':
234 265 raise util.Abort(_('file %r in dirstate clashes with %r') %
235 266 (d, f))
236 267 self._incpath(f)
237 268
238 269 def _changepath(self, f, newstate, relaxed=False):
239 270 # handle upcoming path changes
240 271 oldstate = self[f]
241 272 if oldstate not in "?r" and newstate in "?r":
242 273 if "_dirs" in self.__dict__:
243 274 self._decpath(f)
244 275 return
245 276 if oldstate in "?r" and newstate not in "?r":
246 277 if relaxed and oldstate == '?':
247 278 # XXX
248 279 # in relaxed mode we assume the caller knows
249 280 # what it is doing, workaround for updating
250 281 # dir-to-file revisions
251 282 if "_dirs" in self.__dict__:
252 283 self._incpath(f)
253 284 return
254 285 self._incpathcheck(f)
255 286 return
256 287
257 288 def normal(self, f):
258 289 'mark a file normal and clean'
259 290 self._dirty = True
260 291 self._changepath(f, 'n', True)
261 292 s = os.lstat(self._join(f))
262 293 self._map[f] = ('n', s.st_mode, s.st_size, s.st_mtime, 0)
263 294 if f in self._copymap:
264 295 del self._copymap[f]
265 296
266 297 def normallookup(self, f):
267 298 'mark a file normal, but possibly dirty'
268 299 if self._pl[1] != nullid and f in self._map:
269 300 # if there is a merge going on and the file was either
270 301 # in state 'm' or dirty before being removed, restore that state.
271 302 entry = self._map[f]
272 303 if entry[0] == 'r' and entry[2] in (-1, -2):
273 304 source = self._copymap.get(f)
274 305 if entry[2] == -1:
275 306 self.merge(f)
276 307 elif entry[2] == -2:
277 308 self.normaldirty(f)
278 309 if source:
279 310 self.copy(source, f)
280 311 return
281 312 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
282 313 return
283 314 self._dirty = True
284 315 self._changepath(f, 'n', True)
285 316 self._map[f] = ('n', 0, -1, -1, 0)
286 317 if f in self._copymap:
287 318 del self._copymap[f]
288 319
289 320 def normaldirty(self, f):
290 321 'mark a file normal, but dirty'
291 322 self._dirty = True
292 323 self._changepath(f, 'n', True)
293 324 self._map[f] = ('n', 0, -2, -1, 0)
294 325 if f in self._copymap:
295 326 del self._copymap[f]
296 327
297 328 def add(self, f):
298 329 'mark a file added'
299 330 self._dirty = True
300 331 self._changepath(f, 'a')
301 332 self._map[f] = ('a', 0, -1, -1, 0)
302 333 if f in self._copymap:
303 334 del self._copymap[f]
304 335
305 336 def remove(self, f):
306 337 'mark a file removed'
307 338 self._dirty = True
308 339 self._changepath(f, 'r')
309 340 size = 0
310 341 if self._pl[1] != nullid and f in self._map:
311 342 entry = self._map[f]
312 343 if entry[0] == 'm':
313 344 size = -1
314 345 elif entry[0] == 'n' and entry[2] == -2:
315 346 size = -2
316 347 self._map[f] = ('r', 0, size, 0, 0)
317 348 if size == 0 and f in self._copymap:
318 349 del self._copymap[f]
319 350
320 351 def merge(self, f):
321 352 'mark a file merged'
322 353 self._dirty = True
323 354 s = os.lstat(self._join(f))
324 355 self._changepath(f, 'm', True)
325 356 self._map[f] = ('m', s.st_mode, s.st_size, s.st_mtime, 0)
326 357 if f in self._copymap:
327 358 del self._copymap[f]
328 359
329 360 def forget(self, f):
330 361 'forget a file'
331 362 self._dirty = True
332 363 try:
333 364 self._changepath(f, '?')
334 365 del self._map[f]
335 366 except KeyError:
336 367 self._ui.warn(_("not in dirstate: %s\n") % f)
337 368
338 369 def _normalize(self, path):
339 370 normpath = os.path.normcase(os.path.normpath(path))
340 371 if normpath in self._foldmap:
341 372 return self._foldmap[normpath]
342 373 elif os.path.exists(path):
343 374 self._foldmap[normpath] = util.fspath(path, self._root)
344 375 return self._foldmap[normpath]
345 376 else:
346 377 return path
347 378
348 379 def clear(self):
349 380 self._map = {}
350 381 if "_dirs" in self.__dict__:
351 382 delattr(self, "_dirs");
352 383 self._copymap = {}
353 384 self._pl = [nullid, nullid]
354 385 self._dirty = True
355 386
356 387 def rebuild(self, parent, files):
357 388 self.clear()
358 389 for f in files:
359 390 if files.execf(f):
360 391 self._map[f] = ('n', 0777, -1, 0, 0)
361 392 else:
362 393 self._map[f] = ('n', 0666, -1, 0, 0)
363 394 self._pl = (parent, nullid)
364 395 self._dirty = True
365 396
366 397 def write(self):
367 398 if not self._dirty:
368 399 return
369 400 st = self._opener("dirstate", "w", atomictemp=True)
370 401
371 402 try:
372 403 gran = int(self._ui.config('dirstate', 'granularity', 1))
373 404 except ValueError:
374 405 gran = 1
375 406 limit = sys.maxint
376 407 if gran > 0:
377 408 limit = util.fstat(st).st_mtime - gran
378 409
379 410 cs = cStringIO.StringIO()
380 411 copymap = self._copymap
381 412 pack = struct.pack
382 413 write = cs.write
383 414 write("".join(self._pl))
384 415 for f, e in self._map.iteritems():
385 416 if f in copymap:
386 417 f = "%s\0%s" % (f, copymap[f])
387 418 if e[3] > limit and e[0] == 'n':
388 419 e = (e[0], 0, -1, -1, 0)
389 420 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
390 421 write(e)
391 422 write(f)
392 423 st.write(cs.getvalue())
393 424 st.rename()
394 425 self._dirty = self._dirtypl = False
395 426
396 427 def _filter(self, files):
397 428 ret = {}
398 429 unknown = []
399 430
400 431 for x in files:
401 432 if x == '.':
402 433 return self._map.copy()
403 434 if x not in self._map:
404 435 unknown.append(x)
405 436 else:
406 437 ret[x] = self._map[x]
407 438
408 439 if not unknown:
409 440 return ret
410 441
411 442 b = self._map.keys()
412 443 b.sort()
413 444 blen = len(b)
414 445
415 446 for x in unknown:
416 447 bs = bisect.bisect(b, "%s%s" % (x, '/'))
417 448 while bs < blen:
418 449 s = b[bs]
419 450 if len(s) > len(x) and s.startswith(x):
420 451 ret[s] = self._map[s]
421 452 else:
422 453 break
423 454 bs += 1
424 455 return ret
425 456
426 457 def _supported(self, f, mode, verbose=False):
427 458 if stat.S_ISREG(mode) or stat.S_ISLNK(mode):
428 459 return True
429 460 if verbose:
430 461 kind = 'unknown'
431 462 if stat.S_ISCHR(mode): kind = _('character device')
432 463 elif stat.S_ISBLK(mode): kind = _('block device')
433 464 elif stat.S_ISFIFO(mode): kind = _('fifo')
434 465 elif stat.S_ISSOCK(mode): kind = _('socket')
435 466 elif stat.S_ISDIR(mode): kind = _('directory')
436 467 self._ui.warn(_('%s: unsupported file type (type is %s)\n')
437 468 % (self.pathto(f), kind))
438 469 return False
439 470
440 471 def _dirignore(self, f):
441 472 if f == '.':
442 473 return False
443 474 if self._ignore(f):
444 475 return True
445 476 for c in strutil.findall(f, '/'):
446 477 if self._ignore(f[:c]):
447 478 return True
448 479 return False
449 480
450 481 def walk(self, match):
451 482 # filter out the src and stat
452 483 for src, f, st in self.statwalk(match):
453 484 yield f
454 485
455 486 def statwalk(self, match, unknown=True, ignored=False):
456 487 '''
457 488 walk recursively through the directory tree, finding all files
458 489 matched by the match function
459 490
460 491 results are yielded in a tuple (src, filename, st), where src
461 492 is one of:
462 493 'f' the file was found in the directory tree
463 494 'm' the file was only in the dirstate and not in the tree
464 495
465 496 and st is the stat result if the file was found in the directory.
466 497 '''
467 498
468 499 def fwarn(f, msg):
469 500 self._ui.warn('%s: %s\n' % (self.pathto(ff), msg))
470 501 return False
471 502 badfn = fwarn
472 503 if hasattr(match, 'bad'):
473 504 badfn = match.bad
474 505
475 506 # walk all files by default
476 507 files = match.files()
477 508 if not files:
478 509 files = ['.']
479 510 dc = self._map.copy()
480 511 else:
481 512 files = util.unique(files)
482 513 dc = self._filter(files)
483 514
484 515 def imatch(file_):
485 516 if file_ not in dc and self._ignore(file_):
486 517 return False
487 518 return match(file_)
488 519
489 520 # TODO: don't walk unknown directories if unknown and ignored are False
490 521 ignore = self._ignore
491 522 dirignore = self._dirignore
492 523 if ignored:
493 524 imatch = match
494 525 ignore = util.never
495 526 dirignore = util.never
496 527
497 528 # self._root may end with a path separator when self._root == '/'
498 529 common_prefix_len = len(self._root)
499 530 if not util.endswithsep(self._root):
500 531 common_prefix_len += 1
501 532
502 533 normpath = util.normpath
503 534 listdir = osutil.listdir
504 535 lstat = os.lstat
505 536 bisect_left = bisect.bisect_left
506 537 isdir = os.path.isdir
507 538 pconvert = util.pconvert
508 539 join = os.path.join
509 540 s_isdir = stat.S_ISDIR
510 541 supported = self._supported
511 542 _join = self._join
512 543 known = {'.hg': 1}
513 544
514 545 # recursion free walker, faster than os.walk.
515 546 def findfiles(s):
516 547 work = [s]
517 548 wadd = work.append
518 549 found = []
519 550 add = found.append
520 551 if hasattr(match, 'dir'):
521 552 match.dir(normpath(s[common_prefix_len:]))
522 553 while work:
523 554 top = work.pop()
524 555 entries = listdir(top, stat=True)
525 556 # nd is the top of the repository dir tree
526 557 nd = normpath(top[common_prefix_len:])
527 558 if nd == '.':
528 559 nd = ''
529 560 else:
530 561 # do not recurse into a repo contained in this
531 562 # one. use bisect to find .hg directory so speed
532 563 # is good on big directory.
533 564 names = [e[0] for e in entries]
534 565 hg = bisect_left(names, '.hg')
535 566 if hg < len(names) and names[hg] == '.hg':
536 567 if isdir(join(top, '.hg')):
537 568 continue
538 569 for f, kind, st in entries:
539 570 np = pconvert(join(nd, f))
540 571 if np in known:
541 572 continue
542 573 known[np] = 1
543 574 p = join(top, f)
544 575 # don't trip over symlinks
545 576 if kind == stat.S_IFDIR:
546 577 if not ignore(np):
547 578 wadd(p)
548 579 if hasattr(match, 'dir'):
549 580 match.dir(np)
550 581 if np in dc and match(np):
551 582 add((np, 'm', st))
552 583 elif imatch(np):
553 584 if supported(np, st.st_mode):
554 585 add((np, 'f', st))
555 586 elif np in dc:
556 587 add((np, 'm', st))
557 588 found.sort()
558 589 return found
559 590
560 591 # step one, find all files that match our criteria
561 592 files.sort()
562 593 for ff in files:
563 594 nf = normpath(ff)
564 595 f = _join(ff)
565 596 try:
566 597 st = lstat(f)
567 598 except OSError, inst:
568 599 found = False
569 600 for fn in dc:
570 601 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
571 602 found = True
572 603 break
573 604 if not found:
574 605 if inst.errno != errno.ENOENT:
575 606 fwarn(ff, inst.strerror)
576 607 elif badfn(ff, inst.strerror) and imatch(nf):
577 608 yield 'f', ff, None
578 609 continue
579 610 if s_isdir(st.st_mode):
580 611 if not dirignore(nf):
581 612 for f, src, st in findfiles(f):
582 613 yield src, f, st
583 614 else:
584 615 if nf in known:
585 616 continue
586 617 known[nf] = 1
587 618 if match(nf):
588 619 if supported(ff, st.st_mode, verbose=True):
589 620 yield 'f', self.normalize(nf), st
590 621 elif ff in dc:
591 622 yield 'm', nf, st
592 623
593 624 # step two run through anything left in the dc hash and yield
594 625 # if we haven't already seen it
595 626 ks = dc.keys()
596 627 ks.sort()
597 628 for k in ks:
598 629 if k in known:
599 630 continue
600 631 known[k] = 1
601 632 if imatch(k):
602 633 yield 'm', k, None
603 634
604 635 def status(self, match, list_ignored, list_clean, list_unknown):
605 636 lookup, modified, added, unknown, ignored = [], [], [], [], []
606 637 removed, deleted, clean = [], [], []
607 638
608 639 _join = self._join
609 640 lstat = os.lstat
610 641 cmap = self._copymap
611 642 dmap = self._map
612 643 ladd = lookup.append
613 644 madd = modified.append
614 645 aadd = added.append
615 646 uadd = unknown.append
616 647 iadd = ignored.append
617 648 radd = removed.append
618 649 dadd = deleted.append
619 650 cadd = clean.append
620 651
621 652 for src, fn, st in self.statwalk(match, unknown=list_unknown,
622 653 ignored=list_ignored):
623 654 if fn not in dmap:
624 655 if (list_ignored or match.exact(fn)) and self._dirignore(fn):
625 656 if list_ignored:
626 657 iadd(fn)
627 658 elif list_unknown:
628 659 uadd(fn)
629 660 continue
630 661
631 662 state, mode, size, time, foo = dmap[fn]
632 663
633 664 if src == 'm':
634 665 nonexistent = True
635 666 if not st:
636 667 try:
637 668 st = lstat(_join(fn))
638 669 except OSError, inst:
639 670 if inst.errno not in (errno.ENOENT, errno.ENOTDIR):
640 671 raise
641 672 st = None
642 673 # We need to re-check that it is a valid file
643 674 if st and self._supported(fn, st.st_mode):
644 675 nonexistent = False
645 676 if nonexistent and state in "nma":
646 677 dadd(fn)
647 678 continue
648 679 # check the common case first
649 680 if state == 'n':
650 681 if not st:
651 682 st = lstat(_join(fn))
652 683 if (size >= 0 and
653 684 (size != st.st_size
654 685 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
655 686 or size == -2
656 687 or fn in self._copymap):
657 688 madd(fn)
658 689 elif time != int(st.st_mtime):
659 690 ladd(fn)
660 691 elif list_clean:
661 692 cadd(fn)
662 693 elif state == 'm':
663 694 madd(fn)
664 695 elif state == 'a':
665 696 aadd(fn)
666 697 elif state == 'r':
667 698 radd(fn)
668 699
669 700 return (lookup, modified, added, removed, deleted, unknown, ignored,
670 701 clean)
@@ -1,219 +1,219 b''
1 1 # filemerge.py - file-level merge handling for Mercurial
2 2 #
3 3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import nullrev, short
9 9 from i18n import _
10 10 import util, os, tempfile, simplemerge, re, filecmp
11 11
12 12 def _toolstr(ui, tool, part, default=""):
13 13 return ui.config("merge-tools", tool + "." + part, default)
14 14
15 15 def _toolbool(ui, tool, part, default=False):
16 16 return ui.configbool("merge-tools", tool + "." + part, default)
17 17
18 18 def _findtool(ui, tool):
19 19 if tool in ("internal:fail", "internal:local", "internal:other"):
20 20 return tool
21 21 k = _toolstr(ui, tool, "regkey")
22 22 if k:
23 23 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
24 24 if p:
25 25 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
26 26 if p:
27 27 return p
28 28 return util.find_exe(_toolstr(ui, tool, "executable", tool))
29 29
30 30 def _picktool(repo, ui, path, binary, symlink):
31 31 def check(tool, pat, symlink, binary):
32 32 tmsg = tool
33 33 if pat:
34 34 tmsg += " specified for " + pat
35 35 if pat and not _findtool(ui, tool): # skip search if not matching
36 36 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
37 37 elif symlink and not _toolbool(ui, tool, "symlink"):
38 38 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
39 39 elif binary and not _toolbool(ui, tool, "binary"):
40 40 ui.warn(_("tool %s can't handle binary\n") % tmsg)
41 41 elif not util.gui() and _toolbool(ui, tool, "gui"):
42 42 ui.warn(_("tool %s requires a GUI\n") % tmsg)
43 43 else:
44 44 return True
45 45 return False
46 46
47 47 # HGMERGE takes precedence
48 48 hgmerge = os.environ.get("HGMERGE")
49 49 if hgmerge:
50 50 return (hgmerge, hgmerge)
51 51
52 52 # then patterns
53 53 for pat, tool in ui.configitems("merge-patterns"):
54 54 mf = util.matcher(repo.root, "", [pat], [], [])[1]
55 55 if mf(path) and check(tool, pat, symlink, False):
56 56 toolpath = _findtool(ui, tool)
57 57 return (tool, '"' + toolpath + '"')
58 58
59 59 # then merge tools
60 60 tools = {}
61 61 for k,v in ui.configitems("merge-tools"):
62 62 t = k.split('.')[0]
63 63 if t not in tools:
64 64 tools[t] = int(_toolstr(ui, t, "priority", "0"))
65 65 names = tools.keys()
66 66 tools = [(-p,t) for t,p in tools.items()]
67 67 tools.sort()
68 68 uimerge = ui.config("ui", "merge")
69 69 if uimerge:
70 70 if uimerge not in names:
71 71 return (uimerge, uimerge)
72 72 tools.insert(0, (None, uimerge)) # highest priority
73 73 tools.append((None, "hgmerge")) # the old default, if found
74 74 for p,t in tools:
75 75 toolpath = _findtool(ui, t)
76 76 if toolpath and check(t, None, symlink, binary):
77 77 return (t, '"' + toolpath + '"')
78 78 # internal merge as last resort
79 79 return (not (symlink or binary) and "internal:merge" or None, None)
80 80
81 81 def _eoltype(data):
82 82 "Guess the EOL type of a file"
83 83 if '\0' in data: # binary
84 84 return None
85 85 if '\r\n' in data: # Windows
86 86 return '\r\n'
87 87 if '\r' in data: # Old Mac
88 88 return '\r'
89 89 if '\n' in data: # UNIX
90 90 return '\n'
91 91 return None # unknown
92 92
93 93 def _matcheol(file, origfile):
94 94 "Convert EOL markers in a file to match origfile"
95 95 tostyle = _eoltype(open(origfile, "rb").read())
96 96 if tostyle:
97 97 data = open(file, "rb").read()
98 98 style = _eoltype(data)
99 99 if style:
100 100 newdata = data.replace(style, tostyle)
101 101 if newdata != data:
102 102 open(file, "wb").write(newdata)
103 103
104 104 def filemerge(repo, mynode, orig, fcd, fco, fca):
105 105 """perform a 3-way merge in the working directory
106 106
107 107 mynode = parent node before merge
108 108 orig = original local filename before merge
109 109 fco = other file context
110 110 fca = ancestor file context
111 111 fcd = local file context for current/destination file
112 112 """
113 113
114 114 def temp(prefix, ctx):
115 115 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
116 116 (fd, name) = tempfile.mkstemp(prefix=pre)
117 117 data = repo.wwritedata(ctx.path(), ctx.data())
118 118 f = os.fdopen(fd, "wb")
119 119 f.write(data)
120 120 f.close()
121 121 return name
122 122
123 123 def isbin(ctx):
124 124 try:
125 125 return util.binary(ctx.data())
126 126 except IOError:
127 127 return False
128 128
129 129 if not fco.cmp(fcd.data()): # files identical?
130 130 return None
131 131
132 132 ui = repo.ui
133 133 fd = fcd.path()
134 134 binary = isbin(fcd) or isbin(fco) or isbin(fca)
135 135 symlink = fcd.islink() or fco.islink()
136 136 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
137 137 ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
138 138 (tool, fd, binary, symlink))
139 139
140 140 if not tool:
141 141 tool = "internal:local"
142 142 if ui.prompt(_(" no tool found to merge %s\n"
143 143 "keep (l)ocal or take (o)ther?") % fd,
144 144 _("[lo]"), _("l")) != _("l"):
145 145 tool = "internal:other"
146 146 if tool == "internal:local":
147 147 return 0
148 148 if tool == "internal:other":
149 repo.wwrite(fd, fco.data(), fco.fileflags())
149 repo.wwrite(fd, fco.data(), fco.flags())
150 150 return 0
151 151 if tool == "internal:fail":
152 152 return 1
153 153
154 154 # do the actual merge
155 155 a = repo.wjoin(fd)
156 156 b = temp("base", fca)
157 157 c = temp("other", fco)
158 158 out = ""
159 159 back = a + ".orig"
160 160 util.copyfile(a, back)
161 161
162 162 if orig != fco.path():
163 163 repo.ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
164 164 else:
165 165 repo.ui.status(_("merging %s\n") % fd)
166 166
167 167 repo.ui.debug(_("my %s other %s ancestor %s\n") % (fcd, fco, fca))
168 168
169 169 # do we attempt to simplemerge first?
170 170 if _toolbool(ui, tool, "premerge", not (binary or symlink)):
171 171 r = simplemerge.simplemerge(a, b, c, quiet=True)
172 172 if not r:
173 173 ui.debug(_(" premerge successful\n"))
174 174 os.unlink(back)
175 175 os.unlink(b)
176 176 os.unlink(c)
177 177 return 0
178 178 util.copyfile(back, a) # restore from backup and try again
179 179
180 180 env = dict(HG_FILE=fd,
181 181 HG_MY_NODE=short(mynode),
182 182 HG_OTHER_NODE=str(fco.changectx()),
183 183 HG_MY_ISLINK=fcd.islink(),
184 184 HG_OTHER_ISLINK=fco.islink(),
185 185 HG_BASE_ISLINK=fca.islink())
186 186
187 187 if tool == "internal:merge":
188 188 r = simplemerge.simplemerge(a, b, c, label=['local', 'other'])
189 189 else:
190 190 args = _toolstr(ui, tool, "args", '$local $base $other')
191 191 if "$output" in args:
192 192 out, a = a, back # read input from backup, write to original
193 193 replace = dict(local=a, base=b, other=c, output=out)
194 194 args = re.sub("\$(local|base|other|output)",
195 195 lambda x: '"%s"' % replace[x.group()[1:]], args)
196 196 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
197 197
198 198 if not r and _toolbool(ui, tool, "checkconflicts"):
199 199 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()):
200 200 r = 1
201 201
202 202 if not r and _toolbool(ui, tool, "checkchanged"):
203 203 if filecmp.cmp(repo.wjoin(fd), back):
204 204 if ui.prompt(_(" output file %s appears unchanged\n"
205 205 "was merge successful (yn)?") % fd,
206 206 _("[yn]"), _("n")) != _("y"):
207 207 r = 1
208 208
209 209 if _toolbool(ui, tool, "fixeol"):
210 210 _matcheol(repo.wjoin(fd), back)
211 211
212 212 if r:
213 213 repo.ui.warn(_("merging %s failed!\n") % fd)
214 214 else:
215 215 os.unlink(back)
216 216
217 217 os.unlink(b)
218 218 os.unlink(c)
219 219 return r
@@ -1,2132 +1,2123 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup
11 11 import changelog, dirstate, filelog, manifest, context, weakref
12 12 import lock, transaction, stat, errno, ui
13 13 import os, revlog, time, util, extensions, hook, inspect
14 14 import match as match_
15 15
16 16 class localrepository(repo.repository):
17 17 capabilities = util.set(('lookup', 'changegroupsubset'))
18 18 supported = ('revlogv1', 'store')
19 19
20 20 def __init__(self, parentui, path=None, create=0):
21 21 repo.repository.__init__(self)
22 22 self.root = os.path.realpath(path)
23 23 self.path = os.path.join(self.root, ".hg")
24 24 self.origroot = path
25 25 self.opener = util.opener(self.path)
26 26 self.wopener = util.opener(self.root)
27 27
28 28 if not os.path.isdir(self.path):
29 29 if create:
30 30 if not os.path.exists(path):
31 31 os.mkdir(path)
32 32 os.mkdir(self.path)
33 33 requirements = ["revlogv1"]
34 34 if parentui.configbool('format', 'usestore', True):
35 35 os.mkdir(os.path.join(self.path, "store"))
36 36 requirements.append("store")
37 37 # create an invalid changelog
38 38 self.opener("00changelog.i", "a").write(
39 39 '\0\0\0\2' # represents revlogv2
40 40 ' dummy changelog to prevent using the old repo layout'
41 41 )
42 42 reqfile = self.opener("requires", "w")
43 43 for r in requirements:
44 44 reqfile.write("%s\n" % r)
45 45 reqfile.close()
46 46 else:
47 47 raise repo.RepoError(_("repository %s not found") % path)
48 48 elif create:
49 49 raise repo.RepoError(_("repository %s already exists") % path)
50 50 else:
51 51 # find requirements
52 52 try:
53 53 requirements = self.opener("requires").read().splitlines()
54 54 except IOError, inst:
55 55 if inst.errno != errno.ENOENT:
56 56 raise
57 57 requirements = []
58 58 # check them
59 59 for r in requirements:
60 60 if r not in self.supported:
61 61 raise repo.RepoError(_("requirement '%s' not supported") % r)
62 62
63 63 # setup store
64 64 if "store" in requirements:
65 65 self.encodefn = util.encodefilename
66 66 self.decodefn = util.decodefilename
67 67 self.spath = os.path.join(self.path, "store")
68 68 else:
69 69 self.encodefn = lambda x: x
70 70 self.decodefn = lambda x: x
71 71 self.spath = self.path
72 72
73 73 try:
74 74 # files in .hg/ will be created using this mode
75 75 mode = os.stat(self.spath).st_mode
76 76 # avoid some useless chmods
77 77 if (0777 & ~util._umask) == (0777 & mode):
78 78 mode = None
79 79 except OSError:
80 80 mode = None
81 81
82 82 self._createmode = mode
83 83 self.opener.createmode = mode
84 84 sopener = util.opener(self.spath)
85 85 sopener.createmode = mode
86 86 self.sopener = util.encodedopener(sopener, self.encodefn)
87 87
88 88 self.ui = ui.ui(parentui=parentui)
89 89 try:
90 90 self.ui.readconfig(self.join("hgrc"), self.root)
91 91 extensions.loadall(self.ui)
92 92 except IOError:
93 93 pass
94 94
95 95 self.tagscache = None
96 96 self._tagstypecache = None
97 97 self.branchcache = None
98 98 self._ubranchcache = None # UTF-8 version of branchcache
99 99 self._branchcachetip = None
100 100 self.nodetagscache = None
101 101 self.filterpats = {}
102 102 self._datafilters = {}
103 103 self._transref = self._lockref = self._wlockref = None
104 104
105 105 def __getattr__(self, name):
106 106 if name == 'changelog':
107 107 self.changelog = changelog.changelog(self.sopener)
108 108 self.sopener.defversion = self.changelog.version
109 109 return self.changelog
110 110 if name == 'manifest':
111 111 self.changelog
112 112 self.manifest = manifest.manifest(self.sopener)
113 113 return self.manifest
114 114 if name == 'dirstate':
115 115 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
116 116 return self.dirstate
117 117 else:
118 118 raise AttributeError, name
119 119
120 120 def url(self):
121 121 return 'file:' + self.root
122 122
123 123 def hook(self, name, throw=False, **args):
124 124 return hook.hook(self.ui, self, name, throw, **args)
125 125
126 126 tag_disallowed = ':\r\n'
127 127
128 128 def _tag(self, names, node, message, local, user, date, parent=None,
129 129 extra={}):
130 130 use_dirstate = parent is None
131 131
132 132 if isinstance(names, str):
133 133 allchars = names
134 134 names = (names,)
135 135 else:
136 136 allchars = ''.join(names)
137 137 for c in self.tag_disallowed:
138 138 if c in allchars:
139 139 raise util.Abort(_('%r cannot be used in a tag name') % c)
140 140
141 141 for name in names:
142 142 self.hook('pretag', throw=True, node=hex(node), tag=name,
143 143 local=local)
144 144
145 145 def writetags(fp, names, munge, prevtags):
146 146 fp.seek(0, 2)
147 147 if prevtags and prevtags[-1] != '\n':
148 148 fp.write('\n')
149 149 for name in names:
150 150 m = munge and munge(name) or name
151 151 if self._tagstypecache and name in self._tagstypecache:
152 152 old = self.tagscache.get(name, nullid)
153 153 fp.write('%s %s\n' % (hex(old), m))
154 154 fp.write('%s %s\n' % (hex(node), m))
155 155 fp.close()
156 156
157 157 prevtags = ''
158 158 if local:
159 159 try:
160 160 fp = self.opener('localtags', 'r+')
161 161 except IOError, err:
162 162 fp = self.opener('localtags', 'a')
163 163 else:
164 164 prevtags = fp.read()
165 165
166 166 # local tags are stored in the current charset
167 167 writetags(fp, names, None, prevtags)
168 168 for name in names:
169 169 self.hook('tag', node=hex(node), tag=name, local=local)
170 170 return
171 171
172 172 if use_dirstate:
173 173 try:
174 174 fp = self.wfile('.hgtags', 'rb+')
175 175 except IOError, err:
176 176 fp = self.wfile('.hgtags', 'ab')
177 177 else:
178 178 prevtags = fp.read()
179 179 else:
180 180 try:
181 181 prevtags = self.filectx('.hgtags', parent).data()
182 182 except revlog.LookupError:
183 183 pass
184 184 fp = self.wfile('.hgtags', 'wb')
185 185 if prevtags:
186 186 fp.write(prevtags)
187 187
188 188 # committed tags are stored in UTF-8
189 189 writetags(fp, names, util.fromlocal, prevtags)
190 190
191 191 if use_dirstate and '.hgtags' not in self.dirstate:
192 192 self.add(['.hgtags'])
193 193
194 194 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent,
195 195 extra=extra)
196 196
197 197 for name in names:
198 198 self.hook('tag', node=hex(node), tag=name, local=local)
199 199
200 200 return tagnode
201 201
202 202 def tag(self, names, node, message, local, user, date):
203 203 '''tag a revision with one or more symbolic names.
204 204
205 205 names is a list of strings or, when adding a single tag, names may be a
206 206 string.
207 207
208 208 if local is True, the tags are stored in a per-repository file.
209 209 otherwise, they are stored in the .hgtags file, and a new
210 210 changeset is committed with the change.
211 211
212 212 keyword arguments:
213 213
214 214 local: whether to store tags in non-version-controlled file
215 215 (default False)
216 216
217 217 message: commit message to use if committing
218 218
219 219 user: name of user to use if committing
220 220
221 221 date: date tuple to use if committing'''
222 222
223 223 for x in self.status()[:5]:
224 224 if '.hgtags' in x:
225 225 raise util.Abort(_('working copy of .hgtags is changed '
226 226 '(please commit .hgtags manually)'))
227 227
228 228 self._tag(names, node, message, local, user, date)
229 229
230 230 def tags(self):
231 231 '''return a mapping of tag to node'''
232 232 if self.tagscache:
233 233 return self.tagscache
234 234
235 235 globaltags = {}
236 236 tagtypes = {}
237 237
238 238 def readtags(lines, fn, tagtype):
239 239 filetags = {}
240 240 count = 0
241 241
242 242 def warn(msg):
243 243 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
244 244
245 245 for l in lines:
246 246 count += 1
247 247 if not l:
248 248 continue
249 249 s = l.split(" ", 1)
250 250 if len(s) != 2:
251 251 warn(_("cannot parse entry"))
252 252 continue
253 253 node, key = s
254 254 key = util.tolocal(key.strip()) # stored in UTF-8
255 255 try:
256 256 bin_n = bin(node)
257 257 except TypeError:
258 258 warn(_("node '%s' is not well formed") % node)
259 259 continue
260 260 if bin_n not in self.changelog.nodemap:
261 261 warn(_("tag '%s' refers to unknown node") % key)
262 262 continue
263 263
264 264 h = []
265 265 if key in filetags:
266 266 n, h = filetags[key]
267 267 h.append(n)
268 268 filetags[key] = (bin_n, h)
269 269
270 270 for k, nh in filetags.items():
271 271 if k not in globaltags:
272 272 globaltags[k] = nh
273 273 tagtypes[k] = tagtype
274 274 continue
275 275
276 276 # we prefer the global tag if:
277 277 # it supercedes us OR
278 278 # mutual supercedes and it has a higher rank
279 279 # otherwise we win because we're tip-most
280 280 an, ah = nh
281 281 bn, bh = globaltags[k]
282 282 if (bn != an and an in bh and
283 283 (bn not in ah or len(bh) > len(ah))):
284 284 an = bn
285 285 ah.extend([n for n in bh if n not in ah])
286 286 globaltags[k] = an, ah
287 287 tagtypes[k] = tagtype
288 288
289 289 # read the tags file from each head, ending with the tip
290 290 f = None
291 291 for rev, node, fnode in self._hgtagsnodes():
292 292 f = (f and f.filectx(fnode) or
293 293 self.filectx('.hgtags', fileid=fnode))
294 294 readtags(f.data().splitlines(), f, "global")
295 295
296 296 try:
297 297 data = util.fromlocal(self.opener("localtags").read())
298 298 # localtags are stored in the local character set
299 299 # while the internal tag table is stored in UTF-8
300 300 readtags(data.splitlines(), "localtags", "local")
301 301 except IOError:
302 302 pass
303 303
304 304 self.tagscache = {}
305 305 self._tagstypecache = {}
306 306 for k,nh in globaltags.items():
307 307 n = nh[0]
308 308 if n != nullid:
309 309 self.tagscache[k] = n
310 310 self._tagstypecache[k] = tagtypes[k]
311 311 self.tagscache['tip'] = self.changelog.tip()
312 312 return self.tagscache
313 313
314 314 def tagtype(self, tagname):
315 315 '''
316 316 return the type of the given tag. result can be:
317 317
318 318 'local' : a local tag
319 319 'global' : a global tag
320 320 None : tag does not exist
321 321 '''
322 322
323 323 self.tags()
324 324
325 325 return self._tagstypecache.get(tagname)
326 326
327 327 def _hgtagsnodes(self):
328 328 heads = self.heads()
329 329 heads.reverse()
330 330 last = {}
331 331 ret = []
332 332 for node in heads:
333 333 c = self.changectx(node)
334 334 rev = c.rev()
335 335 try:
336 336 fnode = c.filenode('.hgtags')
337 337 except revlog.LookupError:
338 338 continue
339 339 ret.append((rev, node, fnode))
340 340 if fnode in last:
341 341 ret[last[fnode]] = None
342 342 last[fnode] = len(ret) - 1
343 343 return [item for item in ret if item]
344 344
345 345 def tagslist(self):
346 346 '''return a list of tags ordered by revision'''
347 347 l = []
348 348 for t, n in self.tags().items():
349 349 try:
350 350 r = self.changelog.rev(n)
351 351 except:
352 352 r = -2 # sort to the beginning of the list if unknown
353 353 l.append((r, t, n))
354 354 l.sort()
355 355 return [(t, n) for r, t, n in l]
356 356
357 357 def nodetags(self, node):
358 358 '''return the tags associated with a node'''
359 359 if not self.nodetagscache:
360 360 self.nodetagscache = {}
361 361 for t, n in self.tags().items():
362 362 self.nodetagscache.setdefault(n, []).append(t)
363 363 return self.nodetagscache.get(node, [])
364 364
365 365 def _branchtags(self, partial, lrev):
366 366 tiprev = self.changelog.count() - 1
367 367 if lrev != tiprev:
368 368 self._updatebranchcache(partial, lrev+1, tiprev+1)
369 369 self._writebranchcache(partial, self.changelog.tip(), tiprev)
370 370
371 371 return partial
372 372
373 373 def branchtags(self):
374 374 tip = self.changelog.tip()
375 375 if self.branchcache is not None and self._branchcachetip == tip:
376 376 return self.branchcache
377 377
378 378 oldtip = self._branchcachetip
379 379 self._branchcachetip = tip
380 380 if self.branchcache is None:
381 381 self.branchcache = {} # avoid recursion in changectx
382 382 else:
383 383 self.branchcache.clear() # keep using the same dict
384 384 if oldtip is None or oldtip not in self.changelog.nodemap:
385 385 partial, last, lrev = self._readbranchcache()
386 386 else:
387 387 lrev = self.changelog.rev(oldtip)
388 388 partial = self._ubranchcache
389 389
390 390 self._branchtags(partial, lrev)
391 391
392 392 # the branch cache is stored on disk as UTF-8, but in the local
393 393 # charset internally
394 394 for k, v in partial.items():
395 395 self.branchcache[util.tolocal(k)] = v
396 396 self._ubranchcache = partial
397 397 return self.branchcache
398 398
399 399 def _readbranchcache(self):
400 400 partial = {}
401 401 try:
402 402 f = self.opener("branch.cache")
403 403 lines = f.read().split('\n')
404 404 f.close()
405 405 except (IOError, OSError):
406 406 return {}, nullid, nullrev
407 407
408 408 try:
409 409 last, lrev = lines.pop(0).split(" ", 1)
410 410 last, lrev = bin(last), int(lrev)
411 411 if not (lrev < self.changelog.count() and
412 412 self.changelog.node(lrev) == last): # sanity check
413 413 # invalidate the cache
414 414 raise ValueError('invalidating branch cache (tip differs)')
415 415 for l in lines:
416 416 if not l: continue
417 417 node, label = l.split(" ", 1)
418 418 partial[label.strip()] = bin(node)
419 419 except (KeyboardInterrupt, util.SignalInterrupt):
420 420 raise
421 421 except Exception, inst:
422 422 if self.ui.debugflag:
423 423 self.ui.warn(str(inst), '\n')
424 424 partial, last, lrev = {}, nullid, nullrev
425 425 return partial, last, lrev
426 426
427 427 def _writebranchcache(self, branches, tip, tiprev):
428 428 try:
429 429 f = self.opener("branch.cache", "w", atomictemp=True)
430 430 f.write("%s %s\n" % (hex(tip), tiprev))
431 431 for label, node in branches.iteritems():
432 432 f.write("%s %s\n" % (hex(node), label))
433 433 f.rename()
434 434 except (IOError, OSError):
435 435 pass
436 436
437 437 def _updatebranchcache(self, partial, start, end):
438 438 for r in xrange(start, end):
439 439 c = self.changectx(r)
440 440 b = c.branch()
441 441 partial[b] = c.node()
442 442
443 443 def lookup(self, key):
444 444 if key == '.':
445 445 return self.dirstate.parents()[0]
446 446 elif key == 'null':
447 447 return nullid
448 448 n = self.changelog._match(key)
449 449 if n:
450 450 return n
451 451 if key in self.tags():
452 452 return self.tags()[key]
453 453 if key in self.branchtags():
454 454 return self.branchtags()[key]
455 455 n = self.changelog._partialmatch(key)
456 456 if n:
457 457 return n
458 458 try:
459 459 if len(key) == 20:
460 460 key = hex(key)
461 461 except:
462 462 pass
463 463 raise repo.RepoError(_("unknown revision '%s'") % key)
464 464
465 465 def local(self):
466 466 return True
467 467
468 468 def join(self, f):
469 469 return os.path.join(self.path, f)
470 470
471 471 def sjoin(self, f):
472 472 f = self.encodefn(f)
473 473 return os.path.join(self.spath, f)
474 474
475 475 def wjoin(self, f):
476 476 return os.path.join(self.root, f)
477 477
478 478 def rjoin(self, f):
479 479 return os.path.join(self.root, util.pconvert(f))
480 480
481 481 def file(self, f):
482 482 if f[0] == '/':
483 483 f = f[1:]
484 484 return filelog.filelog(self.sopener, f)
485 485
486 486 def changectx(self, changeid):
487 487 if changeid == None:
488 488 return context.workingctx(self)
489 489 return context.changectx(self, changeid)
490 490
491 491 def parents(self, changeid=None):
492 492 '''get list of changectxs for parents of changeid'''
493 493 return self.changectx(changeid).parents()
494 494
495 495 def filectx(self, path, changeid=None, fileid=None):
496 496 """changeid can be a changeset revision, node, or tag.
497 497 fileid can be a file revision or node."""
498 498 return context.filectx(self, path, changeid, fileid)
499 499
500 500 def getcwd(self):
501 501 return self.dirstate.getcwd()
502 502
503 503 def pathto(self, f, cwd=None):
504 504 return self.dirstate.pathto(f, cwd)
505 505
506 506 def wfile(self, f, mode='r'):
507 507 return self.wopener(f, mode)
508 508
509 509 def _link(self, f):
510 510 return os.path.islink(self.wjoin(f))
511 511
512 512 def _filter(self, filter, filename, data):
513 513 if filter not in self.filterpats:
514 514 l = []
515 515 for pat, cmd in self.ui.configitems(filter):
516 516 mf = util.matcher(self.root, "", [pat], [], [])[1]
517 517 fn = None
518 518 params = cmd
519 519 for name, filterfn in self._datafilters.iteritems():
520 520 if cmd.startswith(name):
521 521 fn = filterfn
522 522 params = cmd[len(name):].lstrip()
523 523 break
524 524 if not fn:
525 525 fn = lambda s, c, **kwargs: util.filter(s, c)
526 526 # Wrap old filters not supporting keyword arguments
527 527 if not inspect.getargspec(fn)[2]:
528 528 oldfn = fn
529 529 fn = lambda s, c, **kwargs: oldfn(s, c)
530 530 l.append((mf, fn, params))
531 531 self.filterpats[filter] = l
532 532
533 533 for mf, fn, cmd in self.filterpats[filter]:
534 534 if mf(filename):
535 535 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
536 536 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
537 537 break
538 538
539 539 return data
540 540
541 541 def adddatafilter(self, name, filter):
542 542 self._datafilters[name] = filter
543 543
544 544 def wread(self, filename):
545 545 if self._link(filename):
546 546 data = os.readlink(self.wjoin(filename))
547 547 else:
548 548 data = self.wopener(filename, 'r').read()
549 549 return self._filter("encode", filename, data)
550 550
551 551 def wwrite(self, filename, data, flags):
552 552 data = self._filter("decode", filename, data)
553 553 try:
554 554 os.unlink(self.wjoin(filename))
555 555 except OSError:
556 556 pass
557 557 self.wopener(filename, 'w').write(data)
558 558 util.set_flags(self.wjoin(filename), flags)
559 559
560 560 def wwritedata(self, filename, data):
561 561 return self._filter("decode", filename, data)
562 562
563 563 def transaction(self):
564 564 if self._transref and self._transref():
565 565 return self._transref().nest()
566 566
567 567 # abort here if the journal already exists
568 568 if os.path.exists(self.sjoin("journal")):
569 569 raise repo.RepoError(_("journal already exists - run hg recover"))
570 570
571 571 # save dirstate for rollback
572 572 try:
573 573 ds = self.opener("dirstate").read()
574 574 except IOError:
575 575 ds = ""
576 576 self.opener("journal.dirstate", "w").write(ds)
577 577 self.opener("journal.branch", "w").write(self.dirstate.branch())
578 578
579 579 renames = [(self.sjoin("journal"), self.sjoin("undo")),
580 580 (self.join("journal.dirstate"), self.join("undo.dirstate")),
581 581 (self.join("journal.branch"), self.join("undo.branch"))]
582 582 tr = transaction.transaction(self.ui.warn, self.sopener,
583 583 self.sjoin("journal"),
584 584 aftertrans(renames),
585 585 self._createmode)
586 586 self._transref = weakref.ref(tr)
587 587 return tr
588 588
589 589 def recover(self):
590 590 l = self.lock()
591 591 try:
592 592 if os.path.exists(self.sjoin("journal")):
593 593 self.ui.status(_("rolling back interrupted transaction\n"))
594 594 transaction.rollback(self.sopener, self.sjoin("journal"))
595 595 self.invalidate()
596 596 return True
597 597 else:
598 598 self.ui.warn(_("no interrupted transaction available\n"))
599 599 return False
600 600 finally:
601 601 del l
602 602
603 603 def rollback(self):
604 604 wlock = lock = None
605 605 try:
606 606 wlock = self.wlock()
607 607 lock = self.lock()
608 608 if os.path.exists(self.sjoin("undo")):
609 609 self.ui.status(_("rolling back last transaction\n"))
610 610 transaction.rollback(self.sopener, self.sjoin("undo"))
611 611 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
612 612 try:
613 613 branch = self.opener("undo.branch").read()
614 614 self.dirstate.setbranch(branch)
615 615 except IOError:
616 616 self.ui.warn(_("Named branch could not be reset, "
617 617 "current branch still is: %s\n")
618 618 % util.tolocal(self.dirstate.branch()))
619 619 self.invalidate()
620 620 self.dirstate.invalidate()
621 621 else:
622 622 self.ui.warn(_("no rollback information available\n"))
623 623 finally:
624 624 del lock, wlock
625 625
626 626 def invalidate(self):
627 627 for a in "changelog manifest".split():
628 628 if a in self.__dict__:
629 629 delattr(self, a)
630 630 self.tagscache = None
631 631 self._tagstypecache = None
632 632 self.nodetagscache = None
633 633 self.branchcache = None
634 634 self._ubranchcache = None
635 635 self._branchcachetip = None
636 636
637 637 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
638 638 try:
639 639 l = lock.lock(lockname, 0, releasefn, desc=desc)
640 640 except lock.LockHeld, inst:
641 641 if not wait:
642 642 raise
643 643 self.ui.warn(_("waiting for lock on %s held by %r\n") %
644 644 (desc, inst.locker))
645 645 # default to 600 seconds timeout
646 646 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
647 647 releasefn, desc=desc)
648 648 if acquirefn:
649 649 acquirefn()
650 650 return l
651 651
652 652 def lock(self, wait=True):
653 653 if self._lockref and self._lockref():
654 654 return self._lockref()
655 655
656 656 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
657 657 _('repository %s') % self.origroot)
658 658 self._lockref = weakref.ref(l)
659 659 return l
660 660
661 661 def wlock(self, wait=True):
662 662 if self._wlockref and self._wlockref():
663 663 return self._wlockref()
664 664
665 665 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
666 666 self.dirstate.invalidate, _('working directory of %s') %
667 667 self.origroot)
668 668 self._wlockref = weakref.ref(l)
669 669 return l
670 670
671 671 def filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
672 672 """
673 673 commit an individual file as part of a larger transaction
674 674 """
675 675
676 676 fn = fctx.path()
677 677 t = fctx.data()
678 678 fl = self.file(fn)
679 679 fp1 = manifest1.get(fn, nullid)
680 680 fp2 = manifest2.get(fn, nullid)
681 681
682 682 meta = {}
683 683 cp = fctx.renamed()
684 684 if cp and cp[0] != fn:
685 685 cp = cp[0]
686 686 # Mark the new revision of this file as a copy of another
687 687 # file. This copy data will effectively act as a parent
688 688 # of this new revision. If this is a merge, the first
689 689 # parent will be the nullid (meaning "look up the copy data")
690 690 # and the second one will be the other parent. For example:
691 691 #
692 692 # 0 --- 1 --- 3 rev1 changes file foo
693 693 # \ / rev2 renames foo to bar and changes it
694 694 # \- 2 -/ rev3 should have bar with all changes and
695 695 # should record that bar descends from
696 696 # bar in rev2 and foo in rev1
697 697 #
698 698 # this allows this merge to succeed:
699 699 #
700 700 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
701 701 # \ / merging rev3 and rev4 should use bar@rev2
702 702 # \- 2 --- 4 as the merge base
703 703 #
704 704 meta["copy"] = cp
705 705 if not manifest2: # not a branch merge
706 706 meta["copyrev"] = hex(manifest1[cp])
707 707 fp2 = nullid
708 708 elif fp2 != nullid: # copied on remote side
709 709 meta["copyrev"] = hex(manifest1[cp])
710 710 elif fp1 != nullid: # copied on local side, reversed
711 711 meta["copyrev"] = hex(manifest2[cp])
712 712 fp2 = fp1
713 713 elif cp in manifest2: # directory rename on local side
714 714 meta["copyrev"] = hex(manifest2[cp])
715 715 else: # directory rename on remote side
716 716 meta["copyrev"] = hex(manifest1[cp])
717 717 self.ui.debug(_(" %s: copy %s:%s\n") %
718 718 (fn, cp, meta["copyrev"]))
719 719 fp1 = nullid
720 720 elif fp2 != nullid:
721 721 # is one parent an ancestor of the other?
722 722 fpa = fl.ancestor(fp1, fp2)
723 723 if fpa == fp1:
724 724 fp1, fp2 = fp2, nullid
725 725 elif fpa == fp2:
726 726 fp2 = nullid
727 727
728 728 # is the file unmodified from the parent? report existing entry
729 729 if fp2 == nullid and not fl.cmp(fp1, t) and not meta:
730 730 return fp1
731 731
732 732 changelist.append(fn)
733 733 return fl.add(t, meta, tr, linkrev, fp1, fp2)
734 734
735 735 def rawcommit(self, files, text, user, date, p1=None, p2=None, extra={}):
736 736 if p1 is None:
737 737 p1, p2 = self.dirstate.parents()
738 738 return self.commit(files=files, text=text, user=user, date=date,
739 739 p1=p1, p2=p2, extra=extra, empty_ok=True)
740 740
741 741 def commit(self, files=None, text="", user=None, date=None,
742 742 match=None, force=False, force_editor=False,
743 743 p1=None, p2=None, extra={}, empty_ok=False):
744 744 wlock = lock = None
745 745 if files:
746 746 files = util.unique(files)
747 747 try:
748 748 wlock = self.wlock()
749 749 lock = self.lock()
750 750 use_dirstate = (p1 is None) # not rawcommit
751 751
752 752 if use_dirstate:
753 753 p1, p2 = self.dirstate.parents()
754 754 update_dirstate = True
755 755
756 756 if (not force and p2 != nullid and
757 757 (match and (match.files() or match.anypats()))):
758 758 raise util.Abort(_('cannot partially commit a merge '
759 759 '(do not specify files or patterns)'))
760 760
761 761 if files:
762 762 modified, removed = [], []
763 763 for f in files:
764 764 s = self.dirstate[f]
765 765 if s in 'nma':
766 766 modified.append(f)
767 767 elif s == 'r':
768 768 removed.append(f)
769 769 else:
770 770 self.ui.warn(_("%s not tracked!\n") % f)
771 771 changes = [modified, [], removed, [], []]
772 772 else:
773 773 changes = self.status(match=match)
774 774 else:
775 775 p1, p2 = p1, p2 or nullid
776 776 update_dirstate = (self.dirstate.parents()[0] == p1)
777 777 changes = [files, [], [], [], []]
778 778
779 779 wctx = context.workingctx(self, (p1, p2), text, user, date,
780 780 extra, changes)
781 781 return self._commitctx(wctx, force, force_editor, empty_ok,
782 782 use_dirstate, update_dirstate)
783 783 finally:
784 784 del lock, wlock
785 785
786 786 def commitctx(self, ctx):
787 787 wlock = lock = None
788 788 try:
789 789 wlock = self.wlock()
790 790 lock = self.lock()
791 791 return self._commitctx(ctx, force=True, force_editor=False,
792 792 empty_ok=True, use_dirstate=False,
793 793 update_dirstate=False)
794 794 finally:
795 795 del lock, wlock
796 796
797 797 def _commitctx(self, wctx, force=False, force_editor=False, empty_ok=False,
798 798 use_dirstate=True, update_dirstate=True):
799 799 tr = None
800 800 valid = 0 # don't save the dirstate if this isn't set
801 801 try:
802 802 commit = wctx.modified() + wctx.added()
803 803 remove = wctx.removed()
804 804 extra = wctx.extra().copy()
805 805 branchname = extra['branch']
806 806 user = wctx.user()
807 807 text = wctx.description()
808 808
809 809 p1, p2 = [p.node() for p in wctx.parents()]
810 810 c1 = self.changelog.read(p1)
811 811 c2 = self.changelog.read(p2)
812 812 m1 = self.manifest.read(c1[0]).copy()
813 813 m2 = self.manifest.read(c2[0])
814 814
815 815 if use_dirstate:
816 816 oldname = c1[5].get("branch") # stored in UTF-8
817 817 if (not commit and not remove and not force and p2 == nullid
818 818 and branchname == oldname):
819 819 self.ui.status(_("nothing changed\n"))
820 820 return None
821 821
822 822 xp1 = hex(p1)
823 823 if p2 == nullid: xp2 = ''
824 824 else: xp2 = hex(p2)
825 825
826 826 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
827 827
828 828 tr = self.transaction()
829 829 trp = weakref.proxy(tr)
830 830
831 831 # check in files
832 832 new = {}
833 833 changed = []
834 834 linkrev = self.changelog.count()
835 835 commit.sort()
836 836 for f in commit:
837 837 self.ui.note(f + "\n")
838 838 try:
839 839 fctx = wctx.filectx(f)
840 newflags = fctx.flags()
840 841 new[f] = self.filecommit(fctx, m1, m2, linkrev, trp, changed)
841 new_exec = fctx.isexec()
842 new_link = fctx.islink()
843 842 if ((not changed or changed[-1] != f) and
844 843 m2.get(f) != new[f]):
845 844 # mention the file in the changelog if some
846 845 # flag changed, even if there was no content
847 846 # change.
848 old_exec = m1.execf(f)
849 old_link = m1.linkf(f)
850 if old_exec != new_exec or old_link != new_link:
847 if m1.flags(f) != newflags:
851 848 changed.append(f)
852 m1.set(f, new_exec, new_link)
849 m1.set(f, newflags)
853 850 if use_dirstate:
854 851 self.dirstate.normal(f)
855 852
856 853 except (OSError, IOError):
857 854 if use_dirstate:
858 855 self.ui.warn(_("trouble committing %s!\n") % f)
859 856 raise
860 857 else:
861 858 remove.append(f)
862 859
863 860 # update manifest
864 861 m1.update(new)
865 862 remove.sort()
866 863 removed = []
867 864
868 865 for f in remove:
869 866 if f in m1:
870 867 del m1[f]
871 868 removed.append(f)
872 869 elif f in m2:
873 870 removed.append(f)
874 871 mn = self.manifest.add(m1, trp, linkrev, c1[0], c2[0],
875 872 (new, removed))
876 873
877 874 # add changeset
878 875 if (not empty_ok and not text) or force_editor:
879 876 edittext = []
880 877 if text:
881 878 edittext.append(text)
882 879 edittext.append("")
883 880 edittext.append(_("HG: Enter commit message."
884 881 " Lines beginning with 'HG:' are removed."))
885 882 edittext.append("HG: --")
886 883 edittext.append("HG: user: %s" % user)
887 884 if p2 != nullid:
888 885 edittext.append("HG: branch merge")
889 886 if branchname:
890 887 edittext.append("HG: branch '%s'" % util.tolocal(branchname))
891 888 edittext.extend(["HG: changed %s" % f for f in changed])
892 889 edittext.extend(["HG: removed %s" % f for f in removed])
893 890 if not changed and not remove:
894 891 edittext.append("HG: no files changed")
895 892 edittext.append("")
896 893 # run editor in the repository root
897 894 olddir = os.getcwd()
898 895 os.chdir(self.root)
899 896 text = self.ui.edit("\n".join(edittext), user)
900 897 os.chdir(olddir)
901 898
902 899 lines = [line.rstrip() for line in text.rstrip().splitlines()]
903 900 while lines and not lines[0]:
904 901 del lines[0]
905 902 if not lines and use_dirstate:
906 903 raise util.Abort(_("empty commit message"))
907 904 text = '\n'.join(lines)
908 905
909 906 n = self.changelog.add(mn, changed + removed, text, trp, p1, p2,
910 907 user, wctx.date(), extra)
911 908 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
912 909 parent2=xp2)
913 910 tr.close()
914 911
915 912 if self.branchcache:
916 913 self.branchtags()
917 914
918 915 if use_dirstate or update_dirstate:
919 916 self.dirstate.setparents(n)
920 917 if use_dirstate:
921 918 for f in removed:
922 919 self.dirstate.forget(f)
923 920 valid = 1 # our dirstate updates are complete
924 921
925 922 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
926 923 return n
927 924 finally:
928 925 if not valid: # don't save our updated dirstate
929 926 self.dirstate.invalidate()
930 927 del tr
931 928
932 929 def walk(self, match, node=None):
933 930 '''
934 931 walk recursively through the directory tree or a given
935 932 changeset, finding all files matched by the match
936 933 function
937 934 '''
938 935
939 936 if node:
940 937 fdict = dict.fromkeys(match.files())
941 938 # for dirstate.walk, files=['.'] means "walk the whole tree".
942 939 # follow that here, too
943 940 fdict.pop('.', None)
944 941 mdict = self.manifest.read(self.changelog.read(node)[0])
945 942 mfiles = mdict.keys()
946 943 mfiles.sort()
947 944 for fn in mfiles:
948 945 for ffn in fdict:
949 946 # match if the file is the exact name or a directory
950 947 if ffn == fn or fn.startswith("%s/" % ffn):
951 948 del fdict[ffn]
952 949 break
953 950 if match(fn):
954 951 yield fn
955 952 ffiles = fdict.keys()
956 953 ffiles.sort()
957 954 for fn in ffiles:
958 955 if match.bad(fn, 'No such file in rev ' + short(node)) \
959 956 and match(fn):
960 957 yield fn
961 958 else:
962 959 for fn in self.dirstate.walk(match):
963 960 yield fn
964 961
965 962 def status(self, node1=None, node2=None, match=None,
966 963 list_ignored=False, list_clean=False, list_unknown=True):
967 964 """return status of files between two nodes or node and working directory
968 965
969 966 If node1 is None, use the first dirstate parent instead.
970 967 If node2 is None, compare node1 with working directory.
971 968 """
972 969
973 970 def fcmp(fn, getnode):
974 971 t1 = self.wread(fn)
975 972 return self.file(fn).cmp(getnode(fn), t1)
976 973
977 974 def mfmatches(node):
978 975 change = self.changelog.read(node)
979 976 mf = self.manifest.read(change[0]).copy()
980 977 for fn in mf.keys():
981 978 if not match(fn):
982 979 del mf[fn]
983 980 return mf
984 981
985 982 if not match:
986 983 match = match_.always(self.root, self.getcwd())
987 984
988 985 modified, added, removed, deleted, unknown = [], [], [], [], []
989 986 ignored, clean = [], []
990 987
991 988 compareworking = False
992 989 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
993 990 compareworking = True
994 991
995 992 if not compareworking:
996 993 # read the manifest from node1 before the manifest from node2,
997 994 # so that we'll hit the manifest cache if we're going through
998 995 # all the revisions in parent->child order.
999 996 mf1 = mfmatches(node1)
1000 997
1001 998 # are we comparing the working directory?
1002 999 if not node2:
1003 1000 (lookup, modified, added, removed, deleted, unknown,
1004 1001 ignored, clean) = self.dirstate.status(match, list_ignored,
1005 1002 list_clean, list_unknown)
1006 1003 # are we comparing working dir against its parent?
1007 1004 if compareworking:
1008 1005 if lookup:
1009 1006 fixup = []
1010 1007 # do a full compare of any files that might have changed
1011 1008 ctx = self.changectx('')
1012 mexec = lambda f: 'x' in ctx.fileflags(f)
1013 mlink = lambda f: 'l' in ctx.fileflags(f)
1014 is_exec = util.execfunc(self.root, mexec)
1015 is_link = util.linkfunc(self.root, mlink)
1016 def flags(f):
1017 return is_link(f) and 'l' or is_exec(f) and 'x' or ''
1009 ff = self.dirstate.flagfunc(ctx.flags)
1018 1010 for f in lookup:
1019 if (f not in ctx or flags(f) != ctx.fileflags(f)
1011 if (f not in ctx or ff(f) != ctx.flags(f)
1020 1012 or ctx[f].cmp(self.wread(f))):
1021 1013 modified.append(f)
1022 1014 else:
1023 1015 fixup.append(f)
1024 1016 if list_clean:
1025 1017 clean.append(f)
1026 1018
1027 1019 # update dirstate for files that are actually clean
1028 1020 if fixup:
1029 1021 wlock = None
1030 1022 try:
1031 1023 try:
1032 1024 wlock = self.wlock(False)
1033 1025 except lock.LockException:
1034 1026 pass
1035 1027 if wlock:
1036 1028 for f in fixup:
1037 1029 self.dirstate.normal(f)
1038 1030 finally:
1039 1031 del wlock
1040 1032 else:
1041 1033 # we are comparing working dir against non-parent
1042 1034 # generate a pseudo-manifest for the working dir
1043 1035 # XXX: create it in dirstate.py ?
1044 1036 mf2 = mfmatches(self.dirstate.parents()[0])
1045 is_exec = util.execfunc(self.root, mf2.execf)
1046 is_link = util.linkfunc(self.root, mf2.linkf)
1037 ff = self.dirstate.flagfunc(mf2.flags)
1047 1038 for f in lookup + modified + added:
1048 1039 mf2[f] = ""
1049 mf2.set(f, is_exec(f), is_link(f))
1040 mf2.set(f, ff(f))
1050 1041 for f in removed:
1051 1042 if f in mf2:
1052 1043 del mf2[f]
1053 1044
1054 1045 else:
1055 1046 # we are comparing two revisions
1056 1047 mf2 = mfmatches(node2)
1057 1048
1058 1049 if not compareworking:
1059 1050 # flush lists from dirstate before comparing manifests
1060 1051 modified, added, clean = [], [], []
1061 1052
1062 1053 # make sure to sort the files so we talk to the disk in a
1063 1054 # reasonable order
1064 1055 mf2keys = mf2.keys()
1065 1056 mf2keys.sort()
1066 1057 getnode = lambda fn: mf1.get(fn, nullid)
1067 1058 for fn in mf2keys:
1068 1059 if fn in mf1:
1069 1060 if (mf1.flags(fn) != mf2.flags(fn) or
1070 1061 (mf1[fn] != mf2[fn] and
1071 1062 (mf2[fn] != "" or fcmp(fn, getnode)))):
1072 1063 modified.append(fn)
1073 1064 elif list_clean:
1074 1065 clean.append(fn)
1075 1066 del mf1[fn]
1076 1067 else:
1077 1068 added.append(fn)
1078 1069
1079 1070 removed = mf1.keys()
1080 1071
1081 1072 # sort and return results:
1082 1073 for l in modified, added, removed, deleted, unknown, ignored, clean:
1083 1074 l.sort()
1084 1075 return (modified, added, removed, deleted, unknown, ignored, clean)
1085 1076
1086 1077 def add(self, list):
1087 1078 wlock = self.wlock()
1088 1079 try:
1089 1080 rejected = []
1090 1081 for f in list:
1091 1082 p = self.wjoin(f)
1092 1083 try:
1093 1084 st = os.lstat(p)
1094 1085 except:
1095 1086 self.ui.warn(_("%s does not exist!\n") % f)
1096 1087 rejected.append(f)
1097 1088 continue
1098 1089 if st.st_size > 10000000:
1099 1090 self.ui.warn(_("%s: files over 10MB may cause memory and"
1100 1091 " performance problems\n"
1101 1092 "(use 'hg revert %s' to unadd the file)\n")
1102 1093 % (f, f))
1103 1094 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1104 1095 self.ui.warn(_("%s not added: only files and symlinks "
1105 1096 "supported currently\n") % f)
1106 1097 rejected.append(p)
1107 1098 elif self.dirstate[f] in 'amn':
1108 1099 self.ui.warn(_("%s already tracked!\n") % f)
1109 1100 elif self.dirstate[f] == 'r':
1110 1101 self.dirstate.normallookup(f)
1111 1102 else:
1112 1103 self.dirstate.add(f)
1113 1104 return rejected
1114 1105 finally:
1115 1106 del wlock
1116 1107
1117 1108 def forget(self, list):
1118 1109 wlock = self.wlock()
1119 1110 try:
1120 1111 for f in list:
1121 1112 if self.dirstate[f] != 'a':
1122 1113 self.ui.warn(_("%s not added!\n") % f)
1123 1114 else:
1124 1115 self.dirstate.forget(f)
1125 1116 finally:
1126 1117 del wlock
1127 1118
1128 1119 def remove(self, list, unlink=False):
1129 1120 wlock = None
1130 1121 try:
1131 1122 if unlink:
1132 1123 for f in list:
1133 1124 try:
1134 1125 util.unlink(self.wjoin(f))
1135 1126 except OSError, inst:
1136 1127 if inst.errno != errno.ENOENT:
1137 1128 raise
1138 1129 wlock = self.wlock()
1139 1130 for f in list:
1140 1131 if unlink and os.path.exists(self.wjoin(f)):
1141 1132 self.ui.warn(_("%s still exists!\n") % f)
1142 1133 elif self.dirstate[f] == 'a':
1143 1134 self.dirstate.forget(f)
1144 1135 elif f not in self.dirstate:
1145 1136 self.ui.warn(_("%s not tracked!\n") % f)
1146 1137 else:
1147 1138 self.dirstate.remove(f)
1148 1139 finally:
1149 1140 del wlock
1150 1141
1151 1142 def undelete(self, list):
1152 1143 wlock = None
1153 1144 try:
1154 1145 manifests = [self.manifest.read(self.changelog.read(p)[0])
1155 1146 for p in self.dirstate.parents() if p != nullid]
1156 1147 wlock = self.wlock()
1157 1148 for f in list:
1158 1149 if self.dirstate[f] != 'r':
1159 1150 self.ui.warn("%s not removed!\n" % f)
1160 1151 else:
1161 1152 m = f in manifests[0] and manifests[0] or manifests[1]
1162 1153 t = self.file(f).read(m[f])
1163 1154 self.wwrite(f, t, m.flags(f))
1164 1155 self.dirstate.normal(f)
1165 1156 finally:
1166 1157 del wlock
1167 1158
1168 1159 def copy(self, source, dest):
1169 1160 wlock = None
1170 1161 try:
1171 1162 p = self.wjoin(dest)
1172 1163 if not (os.path.exists(p) or os.path.islink(p)):
1173 1164 self.ui.warn(_("%s does not exist!\n") % dest)
1174 1165 elif not (os.path.isfile(p) or os.path.islink(p)):
1175 1166 self.ui.warn(_("copy failed: %s is not a file or a "
1176 1167 "symbolic link\n") % dest)
1177 1168 else:
1178 1169 wlock = self.wlock()
1179 1170 if dest not in self.dirstate:
1180 1171 self.dirstate.add(dest)
1181 1172 self.dirstate.copy(source, dest)
1182 1173 finally:
1183 1174 del wlock
1184 1175
1185 1176 def heads(self, start=None):
1186 1177 heads = self.changelog.heads(start)
1187 1178 # sort the output in rev descending order
1188 1179 heads = [(-self.changelog.rev(h), h) for h in heads]
1189 1180 heads.sort()
1190 1181 return [n for (r, n) in heads]
1191 1182
1192 1183 def branchheads(self, branch=None, start=None):
1193 1184 branch = branch is None and self.changectx(None).branch() or branch
1194 1185 branches = self.branchtags()
1195 1186 if branch not in branches:
1196 1187 return []
1197 1188 # The basic algorithm is this:
1198 1189 #
1199 1190 # Start from the branch tip since there are no later revisions that can
1200 1191 # possibly be in this branch, and the tip is a guaranteed head.
1201 1192 #
1202 1193 # Remember the tip's parents as the first ancestors, since these by
1203 1194 # definition are not heads.
1204 1195 #
1205 1196 # Step backwards from the brach tip through all the revisions. We are
1206 1197 # guaranteed by the rules of Mercurial that we will now be visiting the
1207 1198 # nodes in reverse topological order (children before parents).
1208 1199 #
1209 1200 # If a revision is one of the ancestors of a head then we can toss it
1210 1201 # out of the ancestors set (we've already found it and won't be
1211 1202 # visiting it again) and put its parents in the ancestors set.
1212 1203 #
1213 1204 # Otherwise, if a revision is in the branch it's another head, since it
1214 1205 # wasn't in the ancestor list of an existing head. So add it to the
1215 1206 # head list, and add its parents to the ancestor list.
1216 1207 #
1217 1208 # If it is not in the branch ignore it.
1218 1209 #
1219 1210 # Once we have a list of heads, use nodesbetween to filter out all the
1220 1211 # heads that cannot be reached from startrev. There may be a more
1221 1212 # efficient way to do this as part of the previous algorithm.
1222 1213
1223 1214 set = util.set
1224 1215 heads = [self.changelog.rev(branches[branch])]
1225 1216 # Don't care if ancestors contains nullrev or not.
1226 1217 ancestors = set(self.changelog.parentrevs(heads[0]))
1227 1218 for rev in xrange(heads[0] - 1, nullrev, -1):
1228 1219 if rev in ancestors:
1229 1220 ancestors.update(self.changelog.parentrevs(rev))
1230 1221 ancestors.remove(rev)
1231 1222 elif self.changectx(rev).branch() == branch:
1232 1223 heads.append(rev)
1233 1224 ancestors.update(self.changelog.parentrevs(rev))
1234 1225 heads = [self.changelog.node(rev) for rev in heads]
1235 1226 if start is not None:
1236 1227 heads = self.changelog.nodesbetween([start], heads)[2]
1237 1228 return heads
1238 1229
1239 1230 def branches(self, nodes):
1240 1231 if not nodes:
1241 1232 nodes = [self.changelog.tip()]
1242 1233 b = []
1243 1234 for n in nodes:
1244 1235 t = n
1245 1236 while 1:
1246 1237 p = self.changelog.parents(n)
1247 1238 if p[1] != nullid or p[0] == nullid:
1248 1239 b.append((t, n, p[0], p[1]))
1249 1240 break
1250 1241 n = p[0]
1251 1242 return b
1252 1243
1253 1244 def between(self, pairs):
1254 1245 r = []
1255 1246
1256 1247 for top, bottom in pairs:
1257 1248 n, l, i = top, [], 0
1258 1249 f = 1
1259 1250
1260 1251 while n != bottom:
1261 1252 p = self.changelog.parents(n)[0]
1262 1253 if i == f:
1263 1254 l.append(n)
1264 1255 f = f * 2
1265 1256 n = p
1266 1257 i += 1
1267 1258
1268 1259 r.append(l)
1269 1260
1270 1261 return r
1271 1262
1272 1263 def findincoming(self, remote, base=None, heads=None, force=False):
1273 1264 """Return list of roots of the subsets of missing nodes from remote
1274 1265
1275 1266 If base dict is specified, assume that these nodes and their parents
1276 1267 exist on the remote side and that no child of a node of base exists
1277 1268 in both remote and self.
1278 1269 Furthermore base will be updated to include the nodes that exists
1279 1270 in self and remote but no children exists in self and remote.
1280 1271 If a list of heads is specified, return only nodes which are heads
1281 1272 or ancestors of these heads.
1282 1273
1283 1274 All the ancestors of base are in self and in remote.
1284 1275 All the descendants of the list returned are missing in self.
1285 1276 (and so we know that the rest of the nodes are missing in remote, see
1286 1277 outgoing)
1287 1278 """
1288 1279 m = self.changelog.nodemap
1289 1280 search = []
1290 1281 fetch = {}
1291 1282 seen = {}
1292 1283 seenbranch = {}
1293 1284 if base == None:
1294 1285 base = {}
1295 1286
1296 1287 if not heads:
1297 1288 heads = remote.heads()
1298 1289
1299 1290 if self.changelog.tip() == nullid:
1300 1291 base[nullid] = 1
1301 1292 if heads != [nullid]:
1302 1293 return [nullid]
1303 1294 return []
1304 1295
1305 1296 # assume we're closer to the tip than the root
1306 1297 # and start by examining the heads
1307 1298 self.ui.status(_("searching for changes\n"))
1308 1299
1309 1300 unknown = []
1310 1301 for h in heads:
1311 1302 if h not in m:
1312 1303 unknown.append(h)
1313 1304 else:
1314 1305 base[h] = 1
1315 1306
1316 1307 if not unknown:
1317 1308 return []
1318 1309
1319 1310 req = dict.fromkeys(unknown)
1320 1311 reqcnt = 0
1321 1312
1322 1313 # search through remote branches
1323 1314 # a 'branch' here is a linear segment of history, with four parts:
1324 1315 # head, root, first parent, second parent
1325 1316 # (a branch always has two parents (or none) by definition)
1326 1317 unknown = remote.branches(unknown)
1327 1318 while unknown:
1328 1319 r = []
1329 1320 while unknown:
1330 1321 n = unknown.pop(0)
1331 1322 if n[0] in seen:
1332 1323 continue
1333 1324
1334 1325 self.ui.debug(_("examining %s:%s\n")
1335 1326 % (short(n[0]), short(n[1])))
1336 1327 if n[0] == nullid: # found the end of the branch
1337 1328 pass
1338 1329 elif n in seenbranch:
1339 1330 self.ui.debug(_("branch already found\n"))
1340 1331 continue
1341 1332 elif n[1] and n[1] in m: # do we know the base?
1342 1333 self.ui.debug(_("found incomplete branch %s:%s\n")
1343 1334 % (short(n[0]), short(n[1])))
1344 1335 search.append(n) # schedule branch range for scanning
1345 1336 seenbranch[n] = 1
1346 1337 else:
1347 1338 if n[1] not in seen and n[1] not in fetch:
1348 1339 if n[2] in m and n[3] in m:
1349 1340 self.ui.debug(_("found new changeset %s\n") %
1350 1341 short(n[1]))
1351 1342 fetch[n[1]] = 1 # earliest unknown
1352 1343 for p in n[2:4]:
1353 1344 if p in m:
1354 1345 base[p] = 1 # latest known
1355 1346
1356 1347 for p in n[2:4]:
1357 1348 if p not in req and p not in m:
1358 1349 r.append(p)
1359 1350 req[p] = 1
1360 1351 seen[n[0]] = 1
1361 1352
1362 1353 if r:
1363 1354 reqcnt += 1
1364 1355 self.ui.debug(_("request %d: %s\n") %
1365 1356 (reqcnt, " ".join(map(short, r))))
1366 1357 for p in xrange(0, len(r), 10):
1367 1358 for b in remote.branches(r[p:p+10]):
1368 1359 self.ui.debug(_("received %s:%s\n") %
1369 1360 (short(b[0]), short(b[1])))
1370 1361 unknown.append(b)
1371 1362
1372 1363 # do binary search on the branches we found
1373 1364 while search:
1374 1365 n = search.pop(0)
1375 1366 reqcnt += 1
1376 1367 l = remote.between([(n[0], n[1])])[0]
1377 1368 l.append(n[1])
1378 1369 p = n[0]
1379 1370 f = 1
1380 1371 for i in l:
1381 1372 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1382 1373 if i in m:
1383 1374 if f <= 2:
1384 1375 self.ui.debug(_("found new branch changeset %s\n") %
1385 1376 short(p))
1386 1377 fetch[p] = 1
1387 1378 base[i] = 1
1388 1379 else:
1389 1380 self.ui.debug(_("narrowed branch search to %s:%s\n")
1390 1381 % (short(p), short(i)))
1391 1382 search.append((p, i))
1392 1383 break
1393 1384 p, f = i, f * 2
1394 1385
1395 1386 # sanity check our fetch list
1396 1387 for f in fetch.keys():
1397 1388 if f in m:
1398 1389 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1399 1390
1400 1391 if base.keys() == [nullid]:
1401 1392 if force:
1402 1393 self.ui.warn(_("warning: repository is unrelated\n"))
1403 1394 else:
1404 1395 raise util.Abort(_("repository is unrelated"))
1405 1396
1406 1397 self.ui.debug(_("found new changesets starting at ") +
1407 1398 " ".join([short(f) for f in fetch]) + "\n")
1408 1399
1409 1400 self.ui.debug(_("%d total queries\n") % reqcnt)
1410 1401
1411 1402 return fetch.keys()
1412 1403
1413 1404 def findoutgoing(self, remote, base=None, heads=None, force=False):
1414 1405 """Return list of nodes that are roots of subsets not in remote
1415 1406
1416 1407 If base dict is specified, assume that these nodes and their parents
1417 1408 exist on the remote side.
1418 1409 If a list of heads is specified, return only nodes which are heads
1419 1410 or ancestors of these heads, and return a second element which
1420 1411 contains all remote heads which get new children.
1421 1412 """
1422 1413 if base == None:
1423 1414 base = {}
1424 1415 self.findincoming(remote, base, heads, force=force)
1425 1416
1426 1417 self.ui.debug(_("common changesets up to ")
1427 1418 + " ".join(map(short, base.keys())) + "\n")
1428 1419
1429 1420 remain = dict.fromkeys(self.changelog.nodemap)
1430 1421
1431 1422 # prune everything remote has from the tree
1432 1423 del remain[nullid]
1433 1424 remove = base.keys()
1434 1425 while remove:
1435 1426 n = remove.pop(0)
1436 1427 if n in remain:
1437 1428 del remain[n]
1438 1429 for p in self.changelog.parents(n):
1439 1430 remove.append(p)
1440 1431
1441 1432 # find every node whose parents have been pruned
1442 1433 subset = []
1443 1434 # find every remote head that will get new children
1444 1435 updated_heads = {}
1445 1436 for n in remain:
1446 1437 p1, p2 = self.changelog.parents(n)
1447 1438 if p1 not in remain and p2 not in remain:
1448 1439 subset.append(n)
1449 1440 if heads:
1450 1441 if p1 in heads:
1451 1442 updated_heads[p1] = True
1452 1443 if p2 in heads:
1453 1444 updated_heads[p2] = True
1454 1445
1455 1446 # this is the set of all roots we have to push
1456 1447 if heads:
1457 1448 return subset, updated_heads.keys()
1458 1449 else:
1459 1450 return subset
1460 1451
1461 1452 def pull(self, remote, heads=None, force=False):
1462 1453 lock = self.lock()
1463 1454 try:
1464 1455 fetch = self.findincoming(remote, heads=heads, force=force)
1465 1456 if fetch == [nullid]:
1466 1457 self.ui.status(_("requesting all changes\n"))
1467 1458
1468 1459 if not fetch:
1469 1460 self.ui.status(_("no changes found\n"))
1470 1461 return 0
1471 1462
1472 1463 if heads is None:
1473 1464 cg = remote.changegroup(fetch, 'pull')
1474 1465 else:
1475 1466 if 'changegroupsubset' not in remote.capabilities:
1476 1467 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1477 1468 cg = remote.changegroupsubset(fetch, heads, 'pull')
1478 1469 return self.addchangegroup(cg, 'pull', remote.url())
1479 1470 finally:
1480 1471 del lock
1481 1472
1482 1473 def push(self, remote, force=False, revs=None):
1483 1474 # there are two ways to push to remote repo:
1484 1475 #
1485 1476 # addchangegroup assumes local user can lock remote
1486 1477 # repo (local filesystem, old ssh servers).
1487 1478 #
1488 1479 # unbundle assumes local user cannot lock remote repo (new ssh
1489 1480 # servers, http servers).
1490 1481
1491 1482 if remote.capable('unbundle'):
1492 1483 return self.push_unbundle(remote, force, revs)
1493 1484 return self.push_addchangegroup(remote, force, revs)
1494 1485
1495 1486 def prepush(self, remote, force, revs):
1496 1487 base = {}
1497 1488 remote_heads = remote.heads()
1498 1489 inc = self.findincoming(remote, base, remote_heads, force=force)
1499 1490
1500 1491 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1501 1492 if revs is not None:
1502 1493 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1503 1494 else:
1504 1495 bases, heads = update, self.changelog.heads()
1505 1496
1506 1497 if not bases:
1507 1498 self.ui.status(_("no changes found\n"))
1508 1499 return None, 1
1509 1500 elif not force:
1510 1501 # check if we're creating new remote heads
1511 1502 # to be a remote head after push, node must be either
1512 1503 # - unknown locally
1513 1504 # - a local outgoing head descended from update
1514 1505 # - a remote head that's known locally and not
1515 1506 # ancestral to an outgoing head
1516 1507
1517 1508 warn = 0
1518 1509
1519 1510 if remote_heads == [nullid]:
1520 1511 warn = 0
1521 1512 elif not revs and len(heads) > len(remote_heads):
1522 1513 warn = 1
1523 1514 else:
1524 1515 newheads = list(heads)
1525 1516 for r in remote_heads:
1526 1517 if r in self.changelog.nodemap:
1527 1518 desc = self.changelog.heads(r, heads)
1528 1519 l = [h for h in heads if h in desc]
1529 1520 if not l:
1530 1521 newheads.append(r)
1531 1522 else:
1532 1523 newheads.append(r)
1533 1524 if len(newheads) > len(remote_heads):
1534 1525 warn = 1
1535 1526
1536 1527 if warn:
1537 1528 self.ui.warn(_("abort: push creates new remote heads!\n"))
1538 1529 self.ui.status(_("(did you forget to merge?"
1539 1530 " use push -f to force)\n"))
1540 1531 return None, 0
1541 1532 elif inc:
1542 1533 self.ui.warn(_("note: unsynced remote changes!\n"))
1543 1534
1544 1535
1545 1536 if revs is None:
1546 1537 cg = self.changegroup(update, 'push')
1547 1538 else:
1548 1539 cg = self.changegroupsubset(update, revs, 'push')
1549 1540 return cg, remote_heads
1550 1541
1551 1542 def push_addchangegroup(self, remote, force, revs):
1552 1543 lock = remote.lock()
1553 1544 try:
1554 1545 ret = self.prepush(remote, force, revs)
1555 1546 if ret[0] is not None:
1556 1547 cg, remote_heads = ret
1557 1548 return remote.addchangegroup(cg, 'push', self.url())
1558 1549 return ret[1]
1559 1550 finally:
1560 1551 del lock
1561 1552
1562 1553 def push_unbundle(self, remote, force, revs):
1563 1554 # local repo finds heads on server, finds out what revs it
1564 1555 # must push. once revs transferred, if server finds it has
1565 1556 # different heads (someone else won commit/push race), server
1566 1557 # aborts.
1567 1558
1568 1559 ret = self.prepush(remote, force, revs)
1569 1560 if ret[0] is not None:
1570 1561 cg, remote_heads = ret
1571 1562 if force: remote_heads = ['force']
1572 1563 return remote.unbundle(cg, remote_heads, 'push')
1573 1564 return ret[1]
1574 1565
1575 1566 def changegroupinfo(self, nodes, source):
1576 1567 if self.ui.verbose or source == 'bundle':
1577 1568 self.ui.status(_("%d changesets found\n") % len(nodes))
1578 1569 if self.ui.debugflag:
1579 1570 self.ui.debug(_("List of changesets:\n"))
1580 1571 for node in nodes:
1581 1572 self.ui.debug("%s\n" % hex(node))
1582 1573
1583 1574 def changegroupsubset(self, bases, heads, source, extranodes=None):
1584 1575 """This function generates a changegroup consisting of all the nodes
1585 1576 that are descendents of any of the bases, and ancestors of any of
1586 1577 the heads.
1587 1578
1588 1579 It is fairly complex as determining which filenodes and which
1589 1580 manifest nodes need to be included for the changeset to be complete
1590 1581 is non-trivial.
1591 1582
1592 1583 Another wrinkle is doing the reverse, figuring out which changeset in
1593 1584 the changegroup a particular filenode or manifestnode belongs to.
1594 1585
1595 1586 The caller can specify some nodes that must be included in the
1596 1587 changegroup using the extranodes argument. It should be a dict
1597 1588 where the keys are the filenames (or 1 for the manifest), and the
1598 1589 values are lists of (node, linknode) tuples, where node is a wanted
1599 1590 node and linknode is the changelog node that should be transmitted as
1600 1591 the linkrev.
1601 1592 """
1602 1593
1603 1594 self.hook('preoutgoing', throw=True, source=source)
1604 1595
1605 1596 # Set up some initial variables
1606 1597 # Make it easy to refer to self.changelog
1607 1598 cl = self.changelog
1608 1599 # msng is short for missing - compute the list of changesets in this
1609 1600 # changegroup.
1610 1601 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1611 1602 self.changegroupinfo(msng_cl_lst, source)
1612 1603 # Some bases may turn out to be superfluous, and some heads may be
1613 1604 # too. nodesbetween will return the minimal set of bases and heads
1614 1605 # necessary to re-create the changegroup.
1615 1606
1616 1607 # Known heads are the list of heads that it is assumed the recipient
1617 1608 # of this changegroup will know about.
1618 1609 knownheads = {}
1619 1610 # We assume that all parents of bases are known heads.
1620 1611 for n in bases:
1621 1612 for p in cl.parents(n):
1622 1613 if p != nullid:
1623 1614 knownheads[p] = 1
1624 1615 knownheads = knownheads.keys()
1625 1616 if knownheads:
1626 1617 # Now that we know what heads are known, we can compute which
1627 1618 # changesets are known. The recipient must know about all
1628 1619 # changesets required to reach the known heads from the null
1629 1620 # changeset.
1630 1621 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1631 1622 junk = None
1632 1623 # Transform the list into an ersatz set.
1633 1624 has_cl_set = dict.fromkeys(has_cl_set)
1634 1625 else:
1635 1626 # If there were no known heads, the recipient cannot be assumed to
1636 1627 # know about any changesets.
1637 1628 has_cl_set = {}
1638 1629
1639 1630 # Make it easy to refer to self.manifest
1640 1631 mnfst = self.manifest
1641 1632 # We don't know which manifests are missing yet
1642 1633 msng_mnfst_set = {}
1643 1634 # Nor do we know which filenodes are missing.
1644 1635 msng_filenode_set = {}
1645 1636
1646 1637 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1647 1638 junk = None
1648 1639
1649 1640 # A changeset always belongs to itself, so the changenode lookup
1650 1641 # function for a changenode is identity.
1651 1642 def identity(x):
1652 1643 return x
1653 1644
1654 1645 # A function generating function. Sets up an environment for the
1655 1646 # inner function.
1656 1647 def cmp_by_rev_func(revlog):
1657 1648 # Compare two nodes by their revision number in the environment's
1658 1649 # revision history. Since the revision number both represents the
1659 1650 # most efficient order to read the nodes in, and represents a
1660 1651 # topological sorting of the nodes, this function is often useful.
1661 1652 def cmp_by_rev(a, b):
1662 1653 return cmp(revlog.rev(a), revlog.rev(b))
1663 1654 return cmp_by_rev
1664 1655
1665 1656 # If we determine that a particular file or manifest node must be a
1666 1657 # node that the recipient of the changegroup will already have, we can
1667 1658 # also assume the recipient will have all the parents. This function
1668 1659 # prunes them from the set of missing nodes.
1669 1660 def prune_parents(revlog, hasset, msngset):
1670 1661 haslst = hasset.keys()
1671 1662 haslst.sort(cmp_by_rev_func(revlog))
1672 1663 for node in haslst:
1673 1664 parentlst = [p for p in revlog.parents(node) if p != nullid]
1674 1665 while parentlst:
1675 1666 n = parentlst.pop()
1676 1667 if n not in hasset:
1677 1668 hasset[n] = 1
1678 1669 p = [p for p in revlog.parents(n) if p != nullid]
1679 1670 parentlst.extend(p)
1680 1671 for n in hasset:
1681 1672 msngset.pop(n, None)
1682 1673
1683 1674 # This is a function generating function used to set up an environment
1684 1675 # for the inner function to execute in.
1685 1676 def manifest_and_file_collector(changedfileset):
1686 1677 # This is an information gathering function that gathers
1687 1678 # information from each changeset node that goes out as part of
1688 1679 # the changegroup. The information gathered is a list of which
1689 1680 # manifest nodes are potentially required (the recipient may
1690 1681 # already have them) and total list of all files which were
1691 1682 # changed in any changeset in the changegroup.
1692 1683 #
1693 1684 # We also remember the first changenode we saw any manifest
1694 1685 # referenced by so we can later determine which changenode 'owns'
1695 1686 # the manifest.
1696 1687 def collect_manifests_and_files(clnode):
1697 1688 c = cl.read(clnode)
1698 1689 for f in c[3]:
1699 1690 # This is to make sure we only have one instance of each
1700 1691 # filename string for each filename.
1701 1692 changedfileset.setdefault(f, f)
1702 1693 msng_mnfst_set.setdefault(c[0], clnode)
1703 1694 return collect_manifests_and_files
1704 1695
1705 1696 # Figure out which manifest nodes (of the ones we think might be part
1706 1697 # of the changegroup) the recipient must know about and remove them
1707 1698 # from the changegroup.
1708 1699 def prune_manifests():
1709 1700 has_mnfst_set = {}
1710 1701 for n in msng_mnfst_set:
1711 1702 # If a 'missing' manifest thinks it belongs to a changenode
1712 1703 # the recipient is assumed to have, obviously the recipient
1713 1704 # must have that manifest.
1714 1705 linknode = cl.node(mnfst.linkrev(n))
1715 1706 if linknode in has_cl_set:
1716 1707 has_mnfst_set[n] = 1
1717 1708 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1718 1709
1719 1710 # Use the information collected in collect_manifests_and_files to say
1720 1711 # which changenode any manifestnode belongs to.
1721 1712 def lookup_manifest_link(mnfstnode):
1722 1713 return msng_mnfst_set[mnfstnode]
1723 1714
1724 1715 # A function generating function that sets up the initial environment
1725 1716 # the inner function.
1726 1717 def filenode_collector(changedfiles):
1727 1718 next_rev = [0]
1728 1719 # This gathers information from each manifestnode included in the
1729 1720 # changegroup about which filenodes the manifest node references
1730 1721 # so we can include those in the changegroup too.
1731 1722 #
1732 1723 # It also remembers which changenode each filenode belongs to. It
1733 1724 # does this by assuming the a filenode belongs to the changenode
1734 1725 # the first manifest that references it belongs to.
1735 1726 def collect_msng_filenodes(mnfstnode):
1736 1727 r = mnfst.rev(mnfstnode)
1737 1728 if r == next_rev[0]:
1738 1729 # If the last rev we looked at was the one just previous,
1739 1730 # we only need to see a diff.
1740 1731 deltamf = mnfst.readdelta(mnfstnode)
1741 1732 # For each line in the delta
1742 1733 for f, fnode in deltamf.items():
1743 1734 f = changedfiles.get(f, None)
1744 1735 # And if the file is in the list of files we care
1745 1736 # about.
1746 1737 if f is not None:
1747 1738 # Get the changenode this manifest belongs to
1748 1739 clnode = msng_mnfst_set[mnfstnode]
1749 1740 # Create the set of filenodes for the file if
1750 1741 # there isn't one already.
1751 1742 ndset = msng_filenode_set.setdefault(f, {})
1752 1743 # And set the filenode's changelog node to the
1753 1744 # manifest's if it hasn't been set already.
1754 1745 ndset.setdefault(fnode, clnode)
1755 1746 else:
1756 1747 # Otherwise we need a full manifest.
1757 1748 m = mnfst.read(mnfstnode)
1758 1749 # For every file in we care about.
1759 1750 for f in changedfiles:
1760 1751 fnode = m.get(f, None)
1761 1752 # If it's in the manifest
1762 1753 if fnode is not None:
1763 1754 # See comments above.
1764 1755 clnode = msng_mnfst_set[mnfstnode]
1765 1756 ndset = msng_filenode_set.setdefault(f, {})
1766 1757 ndset.setdefault(fnode, clnode)
1767 1758 # Remember the revision we hope to see next.
1768 1759 next_rev[0] = r + 1
1769 1760 return collect_msng_filenodes
1770 1761
1771 1762 # We have a list of filenodes we think we need for a file, lets remove
1772 1763 # all those we now the recipient must have.
1773 1764 def prune_filenodes(f, filerevlog):
1774 1765 msngset = msng_filenode_set[f]
1775 1766 hasset = {}
1776 1767 # If a 'missing' filenode thinks it belongs to a changenode we
1777 1768 # assume the recipient must have, then the recipient must have
1778 1769 # that filenode.
1779 1770 for n in msngset:
1780 1771 clnode = cl.node(filerevlog.linkrev(n))
1781 1772 if clnode in has_cl_set:
1782 1773 hasset[n] = 1
1783 1774 prune_parents(filerevlog, hasset, msngset)
1784 1775
1785 1776 # A function generator function that sets up the a context for the
1786 1777 # inner function.
1787 1778 def lookup_filenode_link_func(fname):
1788 1779 msngset = msng_filenode_set[fname]
1789 1780 # Lookup the changenode the filenode belongs to.
1790 1781 def lookup_filenode_link(fnode):
1791 1782 return msngset[fnode]
1792 1783 return lookup_filenode_link
1793 1784
1794 1785 # Add the nodes that were explicitly requested.
1795 1786 def add_extra_nodes(name, nodes):
1796 1787 if not extranodes or name not in extranodes:
1797 1788 return
1798 1789
1799 1790 for node, linknode in extranodes[name]:
1800 1791 if node not in nodes:
1801 1792 nodes[node] = linknode
1802 1793
1803 1794 # Now that we have all theses utility functions to help out and
1804 1795 # logically divide up the task, generate the group.
1805 1796 def gengroup():
1806 1797 # The set of changed files starts empty.
1807 1798 changedfiles = {}
1808 1799 # Create a changenode group generator that will call our functions
1809 1800 # back to lookup the owning changenode and collect information.
1810 1801 group = cl.group(msng_cl_lst, identity,
1811 1802 manifest_and_file_collector(changedfiles))
1812 1803 for chnk in group:
1813 1804 yield chnk
1814 1805
1815 1806 # The list of manifests has been collected by the generator
1816 1807 # calling our functions back.
1817 1808 prune_manifests()
1818 1809 add_extra_nodes(1, msng_mnfst_set)
1819 1810 msng_mnfst_lst = msng_mnfst_set.keys()
1820 1811 # Sort the manifestnodes by revision number.
1821 1812 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1822 1813 # Create a generator for the manifestnodes that calls our lookup
1823 1814 # and data collection functions back.
1824 1815 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1825 1816 filenode_collector(changedfiles))
1826 1817 for chnk in group:
1827 1818 yield chnk
1828 1819
1829 1820 # These are no longer needed, dereference and toss the memory for
1830 1821 # them.
1831 1822 msng_mnfst_lst = None
1832 1823 msng_mnfst_set.clear()
1833 1824
1834 1825 if extranodes:
1835 1826 for fname in extranodes:
1836 1827 if isinstance(fname, int):
1837 1828 continue
1838 1829 add_extra_nodes(fname,
1839 1830 msng_filenode_set.setdefault(fname, {}))
1840 1831 changedfiles[fname] = 1
1841 1832 changedfiles = changedfiles.keys()
1842 1833 changedfiles.sort()
1843 1834 # Go through all our files in order sorted by name.
1844 1835 for fname in changedfiles:
1845 1836 filerevlog = self.file(fname)
1846 1837 if filerevlog.count() == 0:
1847 1838 raise util.Abort(_("empty or missing revlog for %s") % fname)
1848 1839 # Toss out the filenodes that the recipient isn't really
1849 1840 # missing.
1850 1841 if fname in msng_filenode_set:
1851 1842 prune_filenodes(fname, filerevlog)
1852 1843 msng_filenode_lst = msng_filenode_set[fname].keys()
1853 1844 else:
1854 1845 msng_filenode_lst = []
1855 1846 # If any filenodes are left, generate the group for them,
1856 1847 # otherwise don't bother.
1857 1848 if len(msng_filenode_lst) > 0:
1858 1849 yield changegroup.chunkheader(len(fname))
1859 1850 yield fname
1860 1851 # Sort the filenodes by their revision #
1861 1852 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1862 1853 # Create a group generator and only pass in a changenode
1863 1854 # lookup function as we need to collect no information
1864 1855 # from filenodes.
1865 1856 group = filerevlog.group(msng_filenode_lst,
1866 1857 lookup_filenode_link_func(fname))
1867 1858 for chnk in group:
1868 1859 yield chnk
1869 1860 if fname in msng_filenode_set:
1870 1861 # Don't need this anymore, toss it to free memory.
1871 1862 del msng_filenode_set[fname]
1872 1863 # Signal that no more groups are left.
1873 1864 yield changegroup.closechunk()
1874 1865
1875 1866 if msng_cl_lst:
1876 1867 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1877 1868
1878 1869 return util.chunkbuffer(gengroup())
1879 1870
1880 1871 def changegroup(self, basenodes, source):
1881 1872 """Generate a changegroup of all nodes that we have that a recipient
1882 1873 doesn't.
1883 1874
1884 1875 This is much easier than the previous function as we can assume that
1885 1876 the recipient has any changenode we aren't sending them."""
1886 1877
1887 1878 self.hook('preoutgoing', throw=True, source=source)
1888 1879
1889 1880 cl = self.changelog
1890 1881 nodes = cl.nodesbetween(basenodes, None)[0]
1891 1882 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1892 1883 self.changegroupinfo(nodes, source)
1893 1884
1894 1885 def identity(x):
1895 1886 return x
1896 1887
1897 1888 def gennodelst(revlog):
1898 1889 for r in xrange(0, revlog.count()):
1899 1890 n = revlog.node(r)
1900 1891 if revlog.linkrev(n) in revset:
1901 1892 yield n
1902 1893
1903 1894 def changed_file_collector(changedfileset):
1904 1895 def collect_changed_files(clnode):
1905 1896 c = cl.read(clnode)
1906 1897 for fname in c[3]:
1907 1898 changedfileset[fname] = 1
1908 1899 return collect_changed_files
1909 1900
1910 1901 def lookuprevlink_func(revlog):
1911 1902 def lookuprevlink(n):
1912 1903 return cl.node(revlog.linkrev(n))
1913 1904 return lookuprevlink
1914 1905
1915 1906 def gengroup():
1916 1907 # construct a list of all changed files
1917 1908 changedfiles = {}
1918 1909
1919 1910 for chnk in cl.group(nodes, identity,
1920 1911 changed_file_collector(changedfiles)):
1921 1912 yield chnk
1922 1913 changedfiles = changedfiles.keys()
1923 1914 changedfiles.sort()
1924 1915
1925 1916 mnfst = self.manifest
1926 1917 nodeiter = gennodelst(mnfst)
1927 1918 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1928 1919 yield chnk
1929 1920
1930 1921 for fname in changedfiles:
1931 1922 filerevlog = self.file(fname)
1932 1923 if filerevlog.count() == 0:
1933 1924 raise util.Abort(_("empty or missing revlog for %s") % fname)
1934 1925 nodeiter = gennodelst(filerevlog)
1935 1926 nodeiter = list(nodeiter)
1936 1927 if nodeiter:
1937 1928 yield changegroup.chunkheader(len(fname))
1938 1929 yield fname
1939 1930 lookup = lookuprevlink_func(filerevlog)
1940 1931 for chnk in filerevlog.group(nodeiter, lookup):
1941 1932 yield chnk
1942 1933
1943 1934 yield changegroup.closechunk()
1944 1935
1945 1936 if nodes:
1946 1937 self.hook('outgoing', node=hex(nodes[0]), source=source)
1947 1938
1948 1939 return util.chunkbuffer(gengroup())
1949 1940
1950 1941 def addchangegroup(self, source, srctype, url, emptyok=False):
1951 1942 """add changegroup to repo.
1952 1943
1953 1944 return values:
1954 1945 - nothing changed or no source: 0
1955 1946 - more heads than before: 1+added heads (2..n)
1956 1947 - less heads than before: -1-removed heads (-2..-n)
1957 1948 - number of heads stays the same: 1
1958 1949 """
1959 1950 def csmap(x):
1960 1951 self.ui.debug(_("add changeset %s\n") % short(x))
1961 1952 return cl.count()
1962 1953
1963 1954 def revmap(x):
1964 1955 return cl.rev(x)
1965 1956
1966 1957 if not source:
1967 1958 return 0
1968 1959
1969 1960 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1970 1961
1971 1962 changesets = files = revisions = 0
1972 1963
1973 1964 # write changelog data to temp files so concurrent readers will not see
1974 1965 # inconsistent view
1975 1966 cl = self.changelog
1976 1967 cl.delayupdate()
1977 1968 oldheads = len(cl.heads())
1978 1969
1979 1970 tr = self.transaction()
1980 1971 try:
1981 1972 trp = weakref.proxy(tr)
1982 1973 # pull off the changeset group
1983 1974 self.ui.status(_("adding changesets\n"))
1984 1975 cor = cl.count() - 1
1985 1976 chunkiter = changegroup.chunkiter(source)
1986 1977 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
1987 1978 raise util.Abort(_("received changelog group is empty"))
1988 1979 cnr = cl.count() - 1
1989 1980 changesets = cnr - cor
1990 1981
1991 1982 # pull off the manifest group
1992 1983 self.ui.status(_("adding manifests\n"))
1993 1984 chunkiter = changegroup.chunkiter(source)
1994 1985 # no need to check for empty manifest group here:
1995 1986 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1996 1987 # no new manifest will be created and the manifest group will
1997 1988 # be empty during the pull
1998 1989 self.manifest.addgroup(chunkiter, revmap, trp)
1999 1990
2000 1991 # process the files
2001 1992 self.ui.status(_("adding file changes\n"))
2002 1993 while 1:
2003 1994 f = changegroup.getchunk(source)
2004 1995 if not f:
2005 1996 break
2006 1997 self.ui.debug(_("adding %s revisions\n") % f)
2007 1998 fl = self.file(f)
2008 1999 o = fl.count()
2009 2000 chunkiter = changegroup.chunkiter(source)
2010 2001 if fl.addgroup(chunkiter, revmap, trp) is None:
2011 2002 raise util.Abort(_("received file revlog group is empty"))
2012 2003 revisions += fl.count() - o
2013 2004 files += 1
2014 2005
2015 2006 # make changelog see real files again
2016 2007 cl.finalize(trp)
2017 2008
2018 2009 newheads = len(self.changelog.heads())
2019 2010 heads = ""
2020 2011 if oldheads and newheads != oldheads:
2021 2012 heads = _(" (%+d heads)") % (newheads - oldheads)
2022 2013
2023 2014 self.ui.status(_("added %d changesets"
2024 2015 " with %d changes to %d files%s\n")
2025 2016 % (changesets, revisions, files, heads))
2026 2017
2027 2018 if changesets > 0:
2028 2019 self.hook('pretxnchangegroup', throw=True,
2029 2020 node=hex(self.changelog.node(cor+1)), source=srctype,
2030 2021 url=url)
2031 2022
2032 2023 tr.close()
2033 2024 finally:
2034 2025 del tr
2035 2026
2036 2027 if changesets > 0:
2037 2028 # forcefully update the on-disk branch cache
2038 2029 self.ui.debug(_("updating the branch cache\n"))
2039 2030 self.branchtags()
2040 2031 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
2041 2032 source=srctype, url=url)
2042 2033
2043 2034 for i in xrange(cor + 1, cnr + 1):
2044 2035 self.hook("incoming", node=hex(self.changelog.node(i)),
2045 2036 source=srctype, url=url)
2046 2037
2047 2038 # never return 0 here:
2048 2039 if newheads < oldheads:
2049 2040 return newheads - oldheads - 1
2050 2041 else:
2051 2042 return newheads - oldheads + 1
2052 2043
2053 2044
2054 2045 def stream_in(self, remote):
2055 2046 fp = remote.stream_out()
2056 2047 l = fp.readline()
2057 2048 try:
2058 2049 resp = int(l)
2059 2050 except ValueError:
2060 2051 raise util.UnexpectedOutput(
2061 2052 _('Unexpected response from remote server:'), l)
2062 2053 if resp == 1:
2063 2054 raise util.Abort(_('operation forbidden by server'))
2064 2055 elif resp == 2:
2065 2056 raise util.Abort(_('locking the remote repository failed'))
2066 2057 elif resp != 0:
2067 2058 raise util.Abort(_('the server sent an unknown error code'))
2068 2059 self.ui.status(_('streaming all changes\n'))
2069 2060 l = fp.readline()
2070 2061 try:
2071 2062 total_files, total_bytes = map(int, l.split(' ', 1))
2072 2063 except (ValueError, TypeError):
2073 2064 raise util.UnexpectedOutput(
2074 2065 _('Unexpected response from remote server:'), l)
2075 2066 self.ui.status(_('%d files to transfer, %s of data\n') %
2076 2067 (total_files, util.bytecount(total_bytes)))
2077 2068 start = time.time()
2078 2069 for i in xrange(total_files):
2079 2070 # XXX doesn't support '\n' or '\r' in filenames
2080 2071 l = fp.readline()
2081 2072 try:
2082 2073 name, size = l.split('\0', 1)
2083 2074 size = int(size)
2084 2075 except ValueError, TypeError:
2085 2076 raise util.UnexpectedOutput(
2086 2077 _('Unexpected response from remote server:'), l)
2087 2078 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2088 2079 ofp = self.sopener(name, 'w')
2089 2080 for chunk in util.filechunkiter(fp, limit=size):
2090 2081 ofp.write(chunk)
2091 2082 ofp.close()
2092 2083 elapsed = time.time() - start
2093 2084 if elapsed <= 0:
2094 2085 elapsed = 0.001
2095 2086 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2096 2087 (util.bytecount(total_bytes), elapsed,
2097 2088 util.bytecount(total_bytes / elapsed)))
2098 2089 self.invalidate()
2099 2090 return len(self.heads()) + 1
2100 2091
2101 2092 def clone(self, remote, heads=[], stream=False):
2102 2093 '''clone remote repository.
2103 2094
2104 2095 keyword arguments:
2105 2096 heads: list of revs to clone (forces use of pull)
2106 2097 stream: use streaming clone if possible'''
2107 2098
2108 2099 # now, all clients that can request uncompressed clones can
2109 2100 # read repo formats supported by all servers that can serve
2110 2101 # them.
2111 2102
2112 2103 # if revlog format changes, client will have to check version
2113 2104 # and format flags on "stream" capability, and use
2114 2105 # uncompressed only if compatible.
2115 2106
2116 2107 if stream and not heads and remote.capable('stream'):
2117 2108 return self.stream_in(remote)
2118 2109 return self.pull(remote, heads)
2119 2110
2120 2111 # used to avoid circular references so destructors work
2121 2112 def aftertrans(files):
2122 2113 renamefiles = [tuple(t) for t in files]
2123 2114 def a():
2124 2115 for src, dest in renamefiles:
2125 2116 util.rename(src, dest)
2126 2117 return a
2127 2118
2128 2119 def instance(ui, path, create):
2129 2120 return localrepository(ui, util.drop_scheme('file', path), create)
2130 2121
2131 2122 def islocal(path):
2132 2123 return True
@@ -1,207 +1,205 b''
1 1 # manifest.py - manifest revision class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid
9 9 from revlog import revlog, RevlogError
10 10 from i18n import _
11 11 import array, struct, mdiff, parsers
12 12
13 13 class manifestdict(dict):
14 14 def __init__(self, mapping=None, flags=None):
15 15 if mapping is None: mapping = {}
16 16 if flags is None: flags = {}
17 17 dict.__init__(self, mapping)
18 18 self._flags = flags
19 19 def flags(self, f):
20 20 return self._flags.get(f, "")
21 21 def execf(self, f):
22 22 "test for executable in manifest flags"
23 23 return "x" in self.flags(f)
24 24 def linkf(self, f):
25 25 "test for symlink in manifest flags"
26 26 return "l" in self.flags(f)
27 def set(self, f, execf=False, linkf=False):
28 if linkf: self._flags[f] = "l"
29 elif execf: self._flags[f] = "x"
30 else: self._flags[f] = ""
27 def set(self, f, flags):
28 self._flags[f] = flags
31 29 def copy(self):
32 30 return manifestdict(dict.copy(self), dict.copy(self._flags))
33 31
34 32 class manifest(revlog):
35 33 def __init__(self, opener):
36 34 self.mapcache = None
37 35 self.listcache = None
38 36 revlog.__init__(self, opener, "00manifest.i")
39 37
40 38 def parse(self, lines):
41 39 mfdict = manifestdict()
42 40 parsers.parse_manifest(mfdict, mfdict._flags, lines)
43 41 return mfdict
44 42
45 43 def readdelta(self, node):
46 44 return self.parse(mdiff.patchtext(self.delta(node)))
47 45
48 46 def read(self, node):
49 47 if node == nullid: return manifestdict() # don't upset local cache
50 48 if self.mapcache and self.mapcache[0] == node:
51 49 return self.mapcache[1]
52 50 text = self.revision(node)
53 51 self.listcache = array.array('c', text)
54 52 mapping = self.parse(text)
55 53 self.mapcache = (node, mapping)
56 54 return mapping
57 55
58 56 def _search(self, m, s, lo=0, hi=None):
59 57 '''return a tuple (start, end) that says where to find s within m.
60 58
61 59 If the string is found m[start:end] are the line containing
62 60 that string. If start == end the string was not found and
63 61 they indicate the proper sorted insertion point. This was
64 62 taken from bisect_left, and modified to find line start/end as
65 63 it goes along.
66 64
67 65 m should be a buffer or a string
68 66 s is a string'''
69 67 def advance(i, c):
70 68 while i < lenm and m[i] != c:
71 69 i += 1
72 70 return i
73 71 lenm = len(m)
74 72 if not hi:
75 73 hi = lenm
76 74 while lo < hi:
77 75 mid = (lo + hi) // 2
78 76 start = mid
79 77 while start > 0 and m[start-1] != '\n':
80 78 start -= 1
81 79 end = advance(start, '\0')
82 80 if m[start:end] < s:
83 81 # we know that after the null there are 40 bytes of sha1
84 82 # this translates to the bisect lo = mid + 1
85 83 lo = advance(end + 40, '\n') + 1
86 84 else:
87 85 # this translates to the bisect hi = mid
88 86 hi = start
89 87 end = advance(lo, '\0')
90 88 found = m[lo:end]
91 89 if cmp(s, found) == 0:
92 90 # we know that after the null there are 40 bytes of sha1
93 91 end = advance(end + 40, '\n')
94 92 return (lo, end+1)
95 93 else:
96 94 return (lo, lo)
97 95
98 96 def find(self, node, f):
99 97 '''look up entry for a single file efficiently.
100 98 return (node, flags) pair if found, (None, None) if not.'''
101 99 if self.mapcache and node == self.mapcache[0]:
102 100 return self.mapcache[1].get(f), self.mapcache[1].flags(f)
103 101 text = self.revision(node)
104 102 start, end = self._search(text, f)
105 103 if start == end:
106 104 return None, None
107 105 l = text[start:end]
108 106 f, n = l.split('\0')
109 107 return bin(n[:40]), n[40:-1]
110 108
111 109 def add(self, map, transaction, link, p1=None, p2=None,
112 110 changed=None):
113 111 # apply the changes collected during the bisect loop to our addlist
114 112 # return a delta suitable for addrevision
115 113 def addlistdelta(addlist, x):
116 114 # start from the bottom up
117 115 # so changes to the offsets don't mess things up.
118 116 i = len(x)
119 117 while i > 0:
120 118 i -= 1
121 119 start = x[i][0]
122 120 end = x[i][1]
123 121 if x[i][2]:
124 122 addlist[start:end] = array.array('c', x[i][2])
125 123 else:
126 124 del addlist[start:end]
127 125 return "".join([struct.pack(">lll", d[0], d[1], len(d[2])) + d[2]
128 126 for d in x ])
129 127
130 128 def checkforbidden(f):
131 129 if '\n' in f or '\r' in f:
132 130 raise RevlogError(_("'\\n' and '\\r' disallowed in filenames"))
133 131
134 132 # if we're using the listcache, make sure it is valid and
135 133 # parented by the same node we're diffing against
136 134 if not (changed and self.listcache and p1 and self.mapcache[0] == p1):
137 135 files = map.keys()
138 136 files.sort()
139 137
140 138 for f in files:
141 139 checkforbidden(f)
142 140
143 141 # if this is changed to support newlines in filenames,
144 142 # be sure to check the templates/ dir again (especially *-raw.tmpl)
145 143 text = ["%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
146 144 for f in files]
147 145 self.listcache = array.array('c', "".join(text))
148 146 cachedelta = None
149 147 else:
150 148 addlist = self.listcache
151 149
152 150 for f in changed[0]:
153 151 checkforbidden(f)
154 152 # combine the changed lists into one list for sorting
155 153 work = [[x, 0] for x in changed[0]]
156 154 work[len(work):] = [[x, 1] for x in changed[1]]
157 155 work.sort()
158 156
159 157 delta = []
160 158 dstart = None
161 159 dend = None
162 160 dline = [""]
163 161 start = 0
164 162 # zero copy representation of addlist as a buffer
165 163 addbuf = buffer(addlist)
166 164
167 165 # start with a readonly loop that finds the offset of
168 166 # each line and creates the deltas
169 167 for w in work:
170 168 f = w[0]
171 169 # bs will either be the index of the item or the insert point
172 170 start, end = self._search(addbuf, f, start)
173 171 if w[1] == 0:
174 172 l = "%s\000%s%s\n" % (f, hex(map[f]), map.flags(f))
175 173 else:
176 174 l = ""
177 175 if start == end and w[1] == 1:
178 176 # item we want to delete was not found, error out
179 177 raise AssertionError(
180 178 _("failed to remove %s from manifest") % f)
181 179 if dstart != None and dstart <= start and dend >= start:
182 180 if dend < end:
183 181 dend = end
184 182 if l:
185 183 dline.append(l)
186 184 else:
187 185 if dstart != None:
188 186 delta.append([dstart, dend, "".join(dline)])
189 187 dstart = start
190 188 dend = end
191 189 dline = [l]
192 190
193 191 if dstart != None:
194 192 delta.append([dstart, dend, "".join(dline)])
195 193 # apply the delta to the addlist, and get a delta for addrevision
196 194 cachedelta = addlistdelta(addlist, delta)
197 195
198 196 # the delta is only valid if we've been processing the tip revision
199 197 if self.mapcache[0] != self.tip():
200 198 cachedelta = None
201 199 self.listcache = addlist
202 200
203 201 n = self.addrevision(buffer(self.listcache), transaction, link,
204 202 p1, p2, cachedelta)
205 203 self.mapcache = (n, map)
206 204
207 205 return n
@@ -1,1347 +1,1337 b''
1 1 # patch.py - patch file parsing routines
2 2 #
3 3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 from i18n import _
10 10 from node import hex, nullid, short
11 11 import base85, cmdutil, mdiff, util, revlog, diffhelpers, copies
12 12 import cStringIO, email.Parser, os, popen2, re, errno
13 13 import sys, tempfile, zlib
14 14
15 15 class PatchError(Exception):
16 16 pass
17 17
18 18 class NoHunks(PatchError):
19 19 pass
20 20
21 21 # helper functions
22 22
23 23 def copyfile(src, dst, basedir=None):
24 24 if not basedir:
25 25 basedir = os.getcwd()
26 26
27 27 abssrc, absdst = [os.path.join(basedir, n) for n in (src, dst)]
28 28 if os.path.exists(absdst):
29 29 raise util.Abort(_("cannot create %s: destination already exists") %
30 30 dst)
31 31
32 32 targetdir = os.path.dirname(absdst)
33 33 if not os.path.isdir(targetdir):
34 34 os.makedirs(targetdir)
35 35
36 36 util.copyfile(abssrc, absdst)
37 37
38 38 # public functions
39 39
40 40 def extract(ui, fileobj):
41 41 '''extract patch from data read from fileobj.
42 42
43 43 patch can be a normal patch or contained in an email message.
44 44
45 45 return tuple (filename, message, user, date, node, p1, p2).
46 46 Any item in the returned tuple can be None. If filename is None,
47 47 fileobj did not contain a patch. Caller must unlink filename when done.'''
48 48
49 49 # attempt to detect the start of a patch
50 50 # (this heuristic is borrowed from quilt)
51 51 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
52 52 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
53 53 '(---|\*\*\*)[ \t])', re.MULTILINE)
54 54
55 55 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
56 56 tmpfp = os.fdopen(fd, 'w')
57 57 try:
58 58 msg = email.Parser.Parser().parse(fileobj)
59 59
60 60 subject = msg['Subject']
61 61 user = msg['From']
62 62 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
63 63 # should try to parse msg['Date']
64 64 date = None
65 65 nodeid = None
66 66 branch = None
67 67 parents = []
68 68
69 69 if subject:
70 70 if subject.startswith('[PATCH'):
71 71 pend = subject.find(']')
72 72 if pend >= 0:
73 73 subject = subject[pend+1:].lstrip()
74 74 subject = subject.replace('\n\t', ' ')
75 75 ui.debug('Subject: %s\n' % subject)
76 76 if user:
77 77 ui.debug('From: %s\n' % user)
78 78 diffs_seen = 0
79 79 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
80 80 message = ''
81 81 for part in msg.walk():
82 82 content_type = part.get_content_type()
83 83 ui.debug('Content-Type: %s\n' % content_type)
84 84 if content_type not in ok_types:
85 85 continue
86 86 payload = part.get_payload(decode=True)
87 87 m = diffre.search(payload)
88 88 if m:
89 89 hgpatch = False
90 90 ignoretext = False
91 91
92 92 ui.debug(_('found patch at byte %d\n') % m.start(0))
93 93 diffs_seen += 1
94 94 cfp = cStringIO.StringIO()
95 95 for line in payload[:m.start(0)].splitlines():
96 96 if line.startswith('# HG changeset patch'):
97 97 ui.debug(_('patch generated by hg export\n'))
98 98 hgpatch = True
99 99 # drop earlier commit message content
100 100 cfp.seek(0)
101 101 cfp.truncate()
102 102 subject = None
103 103 elif hgpatch:
104 104 if line.startswith('# User '):
105 105 user = line[7:]
106 106 ui.debug('From: %s\n' % user)
107 107 elif line.startswith("# Date "):
108 108 date = line[7:]
109 109 elif line.startswith("# Branch "):
110 110 branch = line[9:]
111 111 elif line.startswith("# Node ID "):
112 112 nodeid = line[10:]
113 113 elif line.startswith("# Parent "):
114 114 parents.append(line[10:])
115 115 elif line == '---' and gitsendmail:
116 116 ignoretext = True
117 117 if not line.startswith('# ') and not ignoretext:
118 118 cfp.write(line)
119 119 cfp.write('\n')
120 120 message = cfp.getvalue()
121 121 if tmpfp:
122 122 tmpfp.write(payload)
123 123 if not payload.endswith('\n'):
124 124 tmpfp.write('\n')
125 125 elif not diffs_seen and message and content_type == 'text/plain':
126 126 message += '\n' + payload
127 127 except:
128 128 tmpfp.close()
129 129 os.unlink(tmpname)
130 130 raise
131 131
132 132 if subject and not message.startswith(subject):
133 133 message = '%s\n%s' % (subject, message)
134 134 tmpfp.close()
135 135 if not diffs_seen:
136 136 os.unlink(tmpname)
137 137 return None, message, user, date, branch, None, None, None
138 138 p1 = parents and parents.pop(0) or None
139 139 p2 = parents and parents.pop(0) or None
140 140 return tmpname, message, user, date, branch, nodeid, p1, p2
141 141
142 142 GP_PATCH = 1 << 0 # we have to run patch
143 143 GP_FILTER = 1 << 1 # there's some copy/rename operation
144 144 GP_BINARY = 1 << 2 # there's a binary patch
145 145
146 146 def readgitpatch(fp, firstline=None):
147 147 """extract git-style metadata about patches from <patchname>"""
148 148 class gitpatch:
149 149 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
150 150 def __init__(self, path):
151 151 self.path = path
152 152 self.oldpath = None
153 153 self.mode = None
154 154 self.op = 'MODIFY'
155 155 self.lineno = 0
156 156 self.binary = False
157 157
158 158 def reader(fp, firstline):
159 159 if firstline is not None:
160 160 yield firstline
161 161 for line in fp:
162 162 yield line
163 163
164 164 # Filter patch for git information
165 165 gitre = re.compile('diff --git a/(.*) b/(.*)')
166 166 gp = None
167 167 gitpatches = []
168 168 # Can have a git patch with only metadata, causing patch to complain
169 169 dopatch = 0
170 170
171 171 lineno = 0
172 172 for line in reader(fp, firstline):
173 173 lineno += 1
174 174 if line.startswith('diff --git'):
175 175 m = gitre.match(line)
176 176 if m:
177 177 if gp:
178 178 gitpatches.append(gp)
179 179 src, dst = m.group(1, 2)
180 180 gp = gitpatch(dst)
181 181 gp.lineno = lineno
182 182 elif gp:
183 183 if line.startswith('--- '):
184 184 if gp.op in ('COPY', 'RENAME'):
185 185 dopatch |= GP_FILTER
186 186 gitpatches.append(gp)
187 187 gp = None
188 188 dopatch |= GP_PATCH
189 189 continue
190 190 if line.startswith('rename from '):
191 191 gp.op = 'RENAME'
192 192 gp.oldpath = line[12:].rstrip()
193 193 elif line.startswith('rename to '):
194 194 gp.path = line[10:].rstrip()
195 195 elif line.startswith('copy from '):
196 196 gp.op = 'COPY'
197 197 gp.oldpath = line[10:].rstrip()
198 198 elif line.startswith('copy to '):
199 199 gp.path = line[8:].rstrip()
200 200 elif line.startswith('deleted file'):
201 201 gp.op = 'DELETE'
202 202 elif line.startswith('new file mode '):
203 203 gp.op = 'ADD'
204 204 gp.mode = int(line.rstrip()[-6:], 8)
205 205 elif line.startswith('new mode '):
206 206 gp.mode = int(line.rstrip()[-6:], 8)
207 207 elif line.startswith('GIT binary patch'):
208 208 dopatch |= GP_BINARY
209 209 gp.binary = True
210 210 if gp:
211 211 gitpatches.append(gp)
212 212
213 213 if not gitpatches:
214 214 dopatch = GP_PATCH
215 215
216 216 return (dopatch, gitpatches)
217 217
218 218 def patch(patchname, ui, strip=1, cwd=None, files={}):
219 219 """apply <patchname> to the working directory.
220 220 returns whether patch was applied with fuzz factor."""
221 221 patcher = ui.config('ui', 'patch')
222 222 args = []
223 223 try:
224 224 if patcher:
225 225 return externalpatch(patcher, args, patchname, ui, strip, cwd,
226 226 files)
227 227 else:
228 228 try:
229 229 return internalpatch(patchname, ui, strip, cwd, files)
230 230 except NoHunks:
231 231 patcher = util.find_exe('gpatch') or util.find_exe('patch')
232 232 ui.debug('no valid hunks found; trying with %r instead\n' %
233 233 patcher)
234 234 if util.needbinarypatch():
235 235 args.append('--binary')
236 236 return externalpatch(patcher, args, patchname, ui, strip, cwd,
237 237 files)
238 238 except PatchError, err:
239 239 s = str(err)
240 240 if s:
241 241 raise util.Abort(s)
242 242 else:
243 243 raise util.Abort(_('patch failed to apply'))
244 244
245 245 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
246 246 """use <patcher> to apply <patchname> to the working directory.
247 247 returns whether patch was applied with fuzz factor."""
248 248
249 249 fuzz = False
250 250 if cwd:
251 251 args.append('-d %s' % util.shellquote(cwd))
252 252 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
253 253 util.shellquote(patchname)))
254 254
255 255 for line in fp:
256 256 line = line.rstrip()
257 257 ui.note(line + '\n')
258 258 if line.startswith('patching file '):
259 259 pf = util.parse_patch_output(line)
260 260 printed_file = False
261 261 files.setdefault(pf, (None, None))
262 262 elif line.find('with fuzz') >= 0:
263 263 fuzz = True
264 264 if not printed_file:
265 265 ui.warn(pf + '\n')
266 266 printed_file = True
267 267 ui.warn(line + '\n')
268 268 elif line.find('saving rejects to file') >= 0:
269 269 ui.warn(line + '\n')
270 270 elif line.find('FAILED') >= 0:
271 271 if not printed_file:
272 272 ui.warn(pf + '\n')
273 273 printed_file = True
274 274 ui.warn(line + '\n')
275 275 code = fp.close()
276 276 if code:
277 277 raise PatchError(_("patch command failed: %s") %
278 278 util.explain_exit(code)[0])
279 279 return fuzz
280 280
281 281 def internalpatch(patchobj, ui, strip, cwd, files={}):
282 282 """use builtin patch to apply <patchobj> to the working directory.
283 283 returns whether patch was applied with fuzz factor."""
284 284 try:
285 285 fp = file(patchobj, 'rb')
286 286 except TypeError:
287 287 fp = patchobj
288 288 if cwd:
289 289 curdir = os.getcwd()
290 290 os.chdir(cwd)
291 291 try:
292 292 ret = applydiff(ui, fp, files, strip=strip)
293 293 finally:
294 294 if cwd:
295 295 os.chdir(curdir)
296 296 if ret < 0:
297 297 raise PatchError
298 298 return ret > 0
299 299
300 300 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
301 301 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
302 302 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
303 303
304 304 class patchfile:
305 305 def __init__(self, ui, fname, missing=False):
306 306 self.fname = fname
307 307 self.ui = ui
308 308 self.lines = []
309 309 self.exists = False
310 310 self.missing = missing
311 311 if not missing:
312 312 try:
313 313 fp = file(fname, 'rb')
314 314 self.lines = fp.readlines()
315 315 self.exists = True
316 316 except IOError:
317 317 pass
318 318 else:
319 319 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
320 320
321 321 if not self.exists:
322 322 dirname = os.path.dirname(fname)
323 323 if dirname and not os.path.isdir(dirname):
324 324 os.makedirs(dirname)
325 325
326 326 self.hash = {}
327 327 self.dirty = 0
328 328 self.offset = 0
329 329 self.rej = []
330 330 self.fileprinted = False
331 331 self.printfile(False)
332 332 self.hunks = 0
333 333
334 334 def printfile(self, warn):
335 335 if self.fileprinted:
336 336 return
337 337 if warn or self.ui.verbose:
338 338 self.fileprinted = True
339 339 s = _("patching file %s\n") % self.fname
340 340 if warn:
341 341 self.ui.warn(s)
342 342 else:
343 343 self.ui.note(s)
344 344
345 345
346 346 def findlines(self, l, linenum):
347 347 # looks through the hash and finds candidate lines. The
348 348 # result is a list of line numbers sorted based on distance
349 349 # from linenum
350 350 def sorter(a, b):
351 351 vala = abs(a - linenum)
352 352 valb = abs(b - linenum)
353 353 return cmp(vala, valb)
354 354
355 355 try:
356 356 cand = self.hash[l]
357 357 except:
358 358 return []
359 359
360 360 if len(cand) > 1:
361 361 # resort our list of potentials forward then back.
362 362 cand.sort(sorter)
363 363 return cand
364 364
365 365 def hashlines(self):
366 366 self.hash = {}
367 367 for x in xrange(len(self.lines)):
368 368 s = self.lines[x]
369 369 self.hash.setdefault(s, []).append(x)
370 370
371 371 def write_rej(self):
372 372 # our rejects are a little different from patch(1). This always
373 373 # creates rejects in the same form as the original patch. A file
374 374 # header is inserted so that you can run the reject through patch again
375 375 # without having to type the filename.
376 376
377 377 if not self.rej:
378 378 return
379 379 if self.hunks != 1:
380 380 hunkstr = "s"
381 381 else:
382 382 hunkstr = ""
383 383
384 384 fname = self.fname + ".rej"
385 385 self.ui.warn(
386 386 _("%d out of %d hunk%s FAILED -- saving rejects to file %s\n") %
387 387 (len(self.rej), self.hunks, hunkstr, fname))
388 388 try: os.unlink(fname)
389 389 except:
390 390 pass
391 391 fp = file(fname, 'wb')
392 392 base = os.path.basename(self.fname)
393 393 fp.write("--- %s\n+++ %s\n" % (base, base))
394 394 for x in self.rej:
395 395 for l in x.hunk:
396 396 fp.write(l)
397 397 if l[-1] != '\n':
398 398 fp.write("\n\ No newline at end of file\n")
399 399
400 400 def write(self, dest=None):
401 401 if self.dirty:
402 402 if not dest:
403 403 dest = self.fname
404 404 st = None
405 405 try:
406 406 st = os.lstat(dest)
407 407 except OSError, inst:
408 408 if inst.errno != errno.ENOENT:
409 409 raise
410 410 if st and st.st_nlink > 1:
411 411 os.unlink(dest)
412 412 fp = file(dest, 'wb')
413 413 if st and st.st_nlink > 1:
414 414 os.chmod(dest, st.st_mode)
415 415 fp.writelines(self.lines)
416 416 fp.close()
417 417
418 418 def close(self):
419 419 self.write()
420 420 self.write_rej()
421 421
422 422 def apply(self, h, reverse):
423 423 if not h.complete():
424 424 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
425 425 (h.number, h.desc, len(h.a), h.lena, len(h.b),
426 426 h.lenb))
427 427
428 428 self.hunks += 1
429 429 if reverse:
430 430 h.reverse()
431 431
432 432 if self.missing:
433 433 self.rej.append(h)
434 434 return -1
435 435
436 436 if self.exists and h.createfile():
437 437 self.ui.warn(_("file %s already exists\n") % self.fname)
438 438 self.rej.append(h)
439 439 return -1
440 440
441 441 if isinstance(h, binhunk):
442 442 if h.rmfile():
443 443 os.unlink(self.fname)
444 444 else:
445 445 self.lines[:] = h.new()
446 446 self.offset += len(h.new())
447 447 self.dirty = 1
448 448 return 0
449 449
450 450 # fast case first, no offsets, no fuzz
451 451 old = h.old()
452 452 # patch starts counting at 1 unless we are adding the file
453 453 if h.starta == 0:
454 454 start = 0
455 455 else:
456 456 start = h.starta + self.offset - 1
457 457 orig_start = start
458 458 if diffhelpers.testhunk(old, self.lines, start) == 0:
459 459 if h.rmfile():
460 460 os.unlink(self.fname)
461 461 else:
462 462 self.lines[start : start + h.lena] = h.new()
463 463 self.offset += h.lenb - h.lena
464 464 self.dirty = 1
465 465 return 0
466 466
467 467 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
468 468 self.hashlines()
469 469 if h.hunk[-1][0] != ' ':
470 470 # if the hunk tried to put something at the bottom of the file
471 471 # override the start line and use eof here
472 472 search_start = len(self.lines)
473 473 else:
474 474 search_start = orig_start
475 475
476 476 for fuzzlen in xrange(3):
477 477 for toponly in [ True, False ]:
478 478 old = h.old(fuzzlen, toponly)
479 479
480 480 cand = self.findlines(old[0][1:], search_start)
481 481 for l in cand:
482 482 if diffhelpers.testhunk(old, self.lines, l) == 0:
483 483 newlines = h.new(fuzzlen, toponly)
484 484 self.lines[l : l + len(old)] = newlines
485 485 self.offset += len(newlines) - len(old)
486 486 self.dirty = 1
487 487 if fuzzlen:
488 488 fuzzstr = "with fuzz %d " % fuzzlen
489 489 f = self.ui.warn
490 490 self.printfile(True)
491 491 else:
492 492 fuzzstr = ""
493 493 f = self.ui.note
494 494 offset = l - orig_start - fuzzlen
495 495 if offset == 1:
496 496 linestr = "line"
497 497 else:
498 498 linestr = "lines"
499 499 f(_("Hunk #%d succeeded at %d %s(offset %d %s).\n") %
500 500 (h.number, l+1, fuzzstr, offset, linestr))
501 501 return fuzzlen
502 502 self.printfile(True)
503 503 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
504 504 self.rej.append(h)
505 505 return -1
506 506
507 507 class hunk:
508 508 def __init__(self, desc, num, lr, context, create=False, remove=False):
509 509 self.number = num
510 510 self.desc = desc
511 511 self.hunk = [ desc ]
512 512 self.a = []
513 513 self.b = []
514 514 if context:
515 515 self.read_context_hunk(lr)
516 516 else:
517 517 self.read_unified_hunk(lr)
518 518 self.create = create
519 519 self.remove = remove and not create
520 520
521 521 def read_unified_hunk(self, lr):
522 522 m = unidesc.match(self.desc)
523 523 if not m:
524 524 raise PatchError(_("bad hunk #%d") % self.number)
525 525 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
526 526 if self.lena == None:
527 527 self.lena = 1
528 528 else:
529 529 self.lena = int(self.lena)
530 530 if self.lenb == None:
531 531 self.lenb = 1
532 532 else:
533 533 self.lenb = int(self.lenb)
534 534 self.starta = int(self.starta)
535 535 self.startb = int(self.startb)
536 536 diffhelpers.addlines(lr.fp, self.hunk, self.lena, self.lenb, self.a, self.b)
537 537 # if we hit eof before finishing out the hunk, the last line will
538 538 # be zero length. Lets try to fix it up.
539 539 while len(self.hunk[-1]) == 0:
540 540 del self.hunk[-1]
541 541 del self.a[-1]
542 542 del self.b[-1]
543 543 self.lena -= 1
544 544 self.lenb -= 1
545 545
546 546 def read_context_hunk(self, lr):
547 547 self.desc = lr.readline()
548 548 m = contextdesc.match(self.desc)
549 549 if not m:
550 550 raise PatchError(_("bad hunk #%d") % self.number)
551 551 foo, self.starta, foo2, aend, foo3 = m.groups()
552 552 self.starta = int(self.starta)
553 553 if aend == None:
554 554 aend = self.starta
555 555 self.lena = int(aend) - self.starta
556 556 if self.starta:
557 557 self.lena += 1
558 558 for x in xrange(self.lena):
559 559 l = lr.readline()
560 560 if l.startswith('---'):
561 561 lr.push(l)
562 562 break
563 563 s = l[2:]
564 564 if l.startswith('- ') or l.startswith('! '):
565 565 u = '-' + s
566 566 elif l.startswith(' '):
567 567 u = ' ' + s
568 568 else:
569 569 raise PatchError(_("bad hunk #%d old text line %d") %
570 570 (self.number, x))
571 571 self.a.append(u)
572 572 self.hunk.append(u)
573 573
574 574 l = lr.readline()
575 575 if l.startswith('\ '):
576 576 s = self.a[-1][:-1]
577 577 self.a[-1] = s
578 578 self.hunk[-1] = s
579 579 l = lr.readline()
580 580 m = contextdesc.match(l)
581 581 if not m:
582 582 raise PatchError(_("bad hunk #%d") % self.number)
583 583 foo, self.startb, foo2, bend, foo3 = m.groups()
584 584 self.startb = int(self.startb)
585 585 if bend == None:
586 586 bend = self.startb
587 587 self.lenb = int(bend) - self.startb
588 588 if self.startb:
589 589 self.lenb += 1
590 590 hunki = 1
591 591 for x in xrange(self.lenb):
592 592 l = lr.readline()
593 593 if l.startswith('\ '):
594 594 s = self.b[-1][:-1]
595 595 self.b[-1] = s
596 596 self.hunk[hunki-1] = s
597 597 continue
598 598 if not l:
599 599 lr.push(l)
600 600 break
601 601 s = l[2:]
602 602 if l.startswith('+ ') or l.startswith('! '):
603 603 u = '+' + s
604 604 elif l.startswith(' '):
605 605 u = ' ' + s
606 606 elif len(self.b) == 0:
607 607 # this can happen when the hunk does not add any lines
608 608 lr.push(l)
609 609 break
610 610 else:
611 611 raise PatchError(_("bad hunk #%d old text line %d") %
612 612 (self.number, x))
613 613 self.b.append(s)
614 614 while True:
615 615 if hunki >= len(self.hunk):
616 616 h = ""
617 617 else:
618 618 h = self.hunk[hunki]
619 619 hunki += 1
620 620 if h == u:
621 621 break
622 622 elif h.startswith('-'):
623 623 continue
624 624 else:
625 625 self.hunk.insert(hunki-1, u)
626 626 break
627 627
628 628 if not self.a:
629 629 # this happens when lines were only added to the hunk
630 630 for x in self.hunk:
631 631 if x.startswith('-') or x.startswith(' '):
632 632 self.a.append(x)
633 633 if not self.b:
634 634 # this happens when lines were only deleted from the hunk
635 635 for x in self.hunk:
636 636 if x.startswith('+') or x.startswith(' '):
637 637 self.b.append(x[1:])
638 638 # @@ -start,len +start,len @@
639 639 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
640 640 self.startb, self.lenb)
641 641 self.hunk[0] = self.desc
642 642
643 643 def reverse(self):
644 644 self.create, self.remove = self.remove, self.create
645 645 origlena = self.lena
646 646 origstarta = self.starta
647 647 self.lena = self.lenb
648 648 self.starta = self.startb
649 649 self.lenb = origlena
650 650 self.startb = origstarta
651 651 self.a = []
652 652 self.b = []
653 653 # self.hunk[0] is the @@ description
654 654 for x in xrange(1, len(self.hunk)):
655 655 o = self.hunk[x]
656 656 if o.startswith('-'):
657 657 n = '+' + o[1:]
658 658 self.b.append(o[1:])
659 659 elif o.startswith('+'):
660 660 n = '-' + o[1:]
661 661 self.a.append(n)
662 662 else:
663 663 n = o
664 664 self.b.append(o[1:])
665 665 self.a.append(o)
666 666 self.hunk[x] = o
667 667
668 668 def fix_newline(self):
669 669 diffhelpers.fix_newline(self.hunk, self.a, self.b)
670 670
671 671 def complete(self):
672 672 return len(self.a) == self.lena and len(self.b) == self.lenb
673 673
674 674 def createfile(self):
675 675 return self.starta == 0 and self.lena == 0 and self.create
676 676
677 677 def rmfile(self):
678 678 return self.startb == 0 and self.lenb == 0 and self.remove
679 679
680 680 def fuzzit(self, l, fuzz, toponly):
681 681 # this removes context lines from the top and bottom of list 'l'. It
682 682 # checks the hunk to make sure only context lines are removed, and then
683 683 # returns a new shortened list of lines.
684 684 fuzz = min(fuzz, len(l)-1)
685 685 if fuzz:
686 686 top = 0
687 687 bot = 0
688 688 hlen = len(self.hunk)
689 689 for x in xrange(hlen-1):
690 690 # the hunk starts with the @@ line, so use x+1
691 691 if self.hunk[x+1][0] == ' ':
692 692 top += 1
693 693 else:
694 694 break
695 695 if not toponly:
696 696 for x in xrange(hlen-1):
697 697 if self.hunk[hlen-bot-1][0] == ' ':
698 698 bot += 1
699 699 else:
700 700 break
701 701
702 702 # top and bot now count context in the hunk
703 703 # adjust them if either one is short
704 704 context = max(top, bot, 3)
705 705 if bot < context:
706 706 bot = max(0, fuzz - (context - bot))
707 707 else:
708 708 bot = min(fuzz, bot)
709 709 if top < context:
710 710 top = max(0, fuzz - (context - top))
711 711 else:
712 712 top = min(fuzz, top)
713 713
714 714 return l[top:len(l)-bot]
715 715 return l
716 716
717 717 def old(self, fuzz=0, toponly=False):
718 718 return self.fuzzit(self.a, fuzz, toponly)
719 719
720 720 def newctrl(self):
721 721 res = []
722 722 for x in self.hunk:
723 723 c = x[0]
724 724 if c == ' ' or c == '+':
725 725 res.append(x)
726 726 return res
727 727
728 728 def new(self, fuzz=0, toponly=False):
729 729 return self.fuzzit(self.b, fuzz, toponly)
730 730
731 731 class binhunk:
732 732 'A binary patch file. Only understands literals so far.'
733 733 def __init__(self, gitpatch):
734 734 self.gitpatch = gitpatch
735 735 self.text = None
736 736 self.hunk = ['GIT binary patch\n']
737 737
738 738 def createfile(self):
739 739 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
740 740
741 741 def rmfile(self):
742 742 return self.gitpatch.op == 'DELETE'
743 743
744 744 def complete(self):
745 745 return self.text is not None
746 746
747 747 def new(self):
748 748 return [self.text]
749 749
750 750 def extract(self, fp):
751 751 line = fp.readline()
752 752 self.hunk.append(line)
753 753 while line and not line.startswith('literal '):
754 754 line = fp.readline()
755 755 self.hunk.append(line)
756 756 if not line:
757 757 raise PatchError(_('could not extract binary patch'))
758 758 size = int(line[8:].rstrip())
759 759 dec = []
760 760 line = fp.readline()
761 761 self.hunk.append(line)
762 762 while len(line) > 1:
763 763 l = line[0]
764 764 if l <= 'Z' and l >= 'A':
765 765 l = ord(l) - ord('A') + 1
766 766 else:
767 767 l = ord(l) - ord('a') + 27
768 768 dec.append(base85.b85decode(line[1:-1])[:l])
769 769 line = fp.readline()
770 770 self.hunk.append(line)
771 771 text = zlib.decompress(''.join(dec))
772 772 if len(text) != size:
773 773 raise PatchError(_('binary patch is %d bytes, not %d') %
774 774 len(text), size)
775 775 self.text = text
776 776
777 777 def parsefilename(str):
778 778 # --- filename \t|space stuff
779 779 s = str[4:].rstrip('\r\n')
780 780 i = s.find('\t')
781 781 if i < 0:
782 782 i = s.find(' ')
783 783 if i < 0:
784 784 return s
785 785 return s[:i]
786 786
787 787 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
788 788 def pathstrip(path, count=1):
789 789 pathlen = len(path)
790 790 i = 0
791 791 if count == 0:
792 792 return '', path.rstrip()
793 793 while count > 0:
794 794 i = path.find('/', i)
795 795 if i == -1:
796 796 raise PatchError(_("unable to strip away %d dirs from %s") %
797 797 (count, path))
798 798 i += 1
799 799 # consume '//' in the path
800 800 while i < pathlen - 1 and path[i] == '/':
801 801 i += 1
802 802 count -= 1
803 803 return path[:i].lstrip(), path[i:].rstrip()
804 804
805 805 nulla = afile_orig == "/dev/null"
806 806 nullb = bfile_orig == "/dev/null"
807 807 abase, afile = pathstrip(afile_orig, strip)
808 808 gooda = not nulla and os.path.exists(afile)
809 809 bbase, bfile = pathstrip(bfile_orig, strip)
810 810 if afile == bfile:
811 811 goodb = gooda
812 812 else:
813 813 goodb = not nullb and os.path.exists(bfile)
814 814 createfunc = hunk.createfile
815 815 if reverse:
816 816 createfunc = hunk.rmfile
817 817 missing = not goodb and not gooda and not createfunc()
818 818 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
819 819 # diff is between a file and its backup. In this case, the original
820 820 # file should be patched (see original mpatch code).
821 821 isbackup = (abase == bbase and bfile.startswith(afile))
822 822 fname = None
823 823 if not missing:
824 824 if gooda and goodb:
825 825 fname = isbackup and afile or bfile
826 826 elif gooda:
827 827 fname = afile
828 828
829 829 if not fname:
830 830 if not nullb:
831 831 fname = isbackup and afile or bfile
832 832 elif not nulla:
833 833 fname = afile
834 834 else:
835 835 raise PatchError(_("undefined source and destination files"))
836 836
837 837 return fname, missing
838 838
839 839 class linereader:
840 840 # simple class to allow pushing lines back into the input stream
841 841 def __init__(self, fp):
842 842 self.fp = fp
843 843 self.buf = []
844 844
845 845 def push(self, line):
846 846 self.buf.append(line)
847 847
848 848 def readline(self):
849 849 if self.buf:
850 850 l = self.buf[0]
851 851 del self.buf[0]
852 852 return l
853 853 return self.fp.readline()
854 854
855 855 def iterhunks(ui, fp, sourcefile=None):
856 856 """Read a patch and yield the following events:
857 857 - ("file", afile, bfile, firsthunk): select a new target file.
858 858 - ("hunk", hunk): a new hunk is ready to be applied, follows a
859 859 "file" event.
860 860 - ("git", gitchanges): current diff is in git format, gitchanges
861 861 maps filenames to gitpatch records. Unique event.
862 862 """
863 863
864 864 def scangitpatch(fp, firstline):
865 865 '''git patches can modify a file, then copy that file to
866 866 a new file, but expect the source to be the unmodified form.
867 867 So we scan the patch looking for that case so we can do
868 868 the copies ahead of time.'''
869 869
870 870 pos = 0
871 871 try:
872 872 pos = fp.tell()
873 873 except IOError:
874 874 fp = cStringIO.StringIO(fp.read())
875 875
876 876 (dopatch, gitpatches) = readgitpatch(fp, firstline)
877 877 fp.seek(pos)
878 878
879 879 return fp, dopatch, gitpatches
880 880
881 881 changed = {}
882 882 current_hunk = None
883 883 afile = ""
884 884 bfile = ""
885 885 state = None
886 886 hunknum = 0
887 887 emitfile = False
888 888
889 889 git = False
890 890 gitre = re.compile('diff --git (a/.*) (b/.*)')
891 891
892 892 # our states
893 893 BFILE = 1
894 894 context = None
895 895 lr = linereader(fp)
896 896 dopatch = True
897 897 # gitworkdone is True if a git operation (copy, rename, ...) was
898 898 # performed already for the current file. Useful when the file
899 899 # section may have no hunk.
900 900 gitworkdone = False
901 901
902 902 while True:
903 903 newfile = False
904 904 x = lr.readline()
905 905 if not x:
906 906 break
907 907 if current_hunk:
908 908 if x.startswith('\ '):
909 909 current_hunk.fix_newline()
910 910 yield 'hunk', current_hunk
911 911 current_hunk = None
912 912 gitworkdone = False
913 913 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
914 914 ((context or context == None) and x.startswith('***************')))):
915 915 try:
916 916 if context == None and x.startswith('***************'):
917 917 context = True
918 918 gpatch = changed.get(bfile[2:], (None, None))[1]
919 919 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
920 920 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
921 921 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
922 922 except PatchError, err:
923 923 ui.debug(err)
924 924 current_hunk = None
925 925 continue
926 926 hunknum += 1
927 927 if emitfile:
928 928 emitfile = False
929 929 yield 'file', (afile, bfile, current_hunk)
930 930 elif state == BFILE and x.startswith('GIT binary patch'):
931 931 current_hunk = binhunk(changed[bfile[2:]][1])
932 932 hunknum += 1
933 933 if emitfile:
934 934 emitfile = False
935 935 yield 'file', (afile, bfile, current_hunk)
936 936 current_hunk.extract(fp)
937 937 elif x.startswith('diff --git'):
938 938 # check for git diff, scanning the whole patch file if needed
939 939 m = gitre.match(x)
940 940 if m:
941 941 afile, bfile = m.group(1, 2)
942 942 if not git:
943 943 git = True
944 944 fp, dopatch, gitpatches = scangitpatch(fp, x)
945 945 yield 'git', gitpatches
946 946 for gp in gitpatches:
947 947 changed[gp.path] = (gp.op, gp)
948 948 # else error?
949 949 # copy/rename + modify should modify target, not source
950 950 gitop = changed.get(bfile[2:], (None, None))[0]
951 951 if gitop in ('COPY', 'DELETE', 'RENAME'):
952 952 afile = bfile
953 953 gitworkdone = True
954 954 newfile = True
955 955 elif x.startswith('---'):
956 956 # check for a unified diff
957 957 l2 = lr.readline()
958 958 if not l2.startswith('+++'):
959 959 lr.push(l2)
960 960 continue
961 961 newfile = True
962 962 context = False
963 963 afile = parsefilename(x)
964 964 bfile = parsefilename(l2)
965 965 elif x.startswith('***'):
966 966 # check for a context diff
967 967 l2 = lr.readline()
968 968 if not l2.startswith('---'):
969 969 lr.push(l2)
970 970 continue
971 971 l3 = lr.readline()
972 972 lr.push(l3)
973 973 if not l3.startswith("***************"):
974 974 lr.push(l2)
975 975 continue
976 976 newfile = True
977 977 context = True
978 978 afile = parsefilename(x)
979 979 bfile = parsefilename(l2)
980 980
981 981 if newfile:
982 982 emitfile = True
983 983 state = BFILE
984 984 hunknum = 0
985 985 if current_hunk:
986 986 if current_hunk.complete():
987 987 yield 'hunk', current_hunk
988 988 else:
989 989 raise PatchError(_("malformed patch %s %s") % (afile,
990 990 current_hunk.desc))
991 991
992 992 if hunknum == 0 and dopatch and not gitworkdone:
993 993 raise NoHunks
994 994
995 995 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False,
996 996 rejmerge=None, updatedir=None):
997 997 """reads a patch from fp and tries to apply it. The dict 'changed' is
998 998 filled in with all of the filenames changed by the patch. Returns 0
999 999 for a clean patch, -1 if any rejects were found and 1 if there was
1000 1000 any fuzz."""
1001 1001
1002 1002 rejects = 0
1003 1003 err = 0
1004 1004 current_file = None
1005 1005 gitpatches = None
1006 1006
1007 1007 def closefile():
1008 1008 if not current_file:
1009 1009 return 0
1010 1010 current_file.close()
1011 1011 if rejmerge:
1012 1012 rejmerge(current_file)
1013 1013 return len(current_file.rej)
1014 1014
1015 1015 for state, values in iterhunks(ui, fp, sourcefile):
1016 1016 if state == 'hunk':
1017 1017 if not current_file:
1018 1018 continue
1019 1019 current_hunk = values
1020 1020 ret = current_file.apply(current_hunk, reverse)
1021 1021 if ret >= 0:
1022 1022 changed.setdefault(current_file.fname, (None, None))
1023 1023 if ret > 0:
1024 1024 err = 1
1025 1025 elif state == 'file':
1026 1026 rejects += closefile()
1027 1027 afile, bfile, first_hunk = values
1028 1028 try:
1029 1029 if sourcefile:
1030 1030 current_file = patchfile(ui, sourcefile)
1031 1031 else:
1032 1032 current_file, missing = selectfile(afile, bfile, first_hunk,
1033 1033 strip, reverse)
1034 1034 current_file = patchfile(ui, current_file, missing)
1035 1035 except PatchError, err:
1036 1036 ui.warn(str(err) + '\n')
1037 1037 current_file, current_hunk = None, None
1038 1038 rejects += 1
1039 1039 continue
1040 1040 elif state == 'git':
1041 1041 gitpatches = values
1042 1042 for gp in gitpatches:
1043 1043 if gp.op in ('COPY', 'RENAME'):
1044 1044 copyfile(gp.oldpath, gp.path)
1045 1045 changed[gp.path] = (gp.op, gp)
1046 1046 else:
1047 1047 raise util.Abort(_('unsupported parser state: %s') % state)
1048 1048
1049 1049 rejects += closefile()
1050 1050
1051 1051 if updatedir and gitpatches:
1052 1052 updatedir(gitpatches)
1053 1053 if rejects:
1054 1054 return -1
1055 1055 return err
1056 1056
1057 1057 def diffopts(ui, opts={}, untrusted=False):
1058 1058 def get(key, name=None, getter=ui.configbool):
1059 1059 return (opts.get(key) or
1060 1060 getter('diff', name or key, None, untrusted=untrusted))
1061 1061 return mdiff.diffopts(
1062 1062 text=opts.get('text'),
1063 1063 git=get('git'),
1064 1064 nodates=get('nodates'),
1065 1065 showfunc=get('show_function', 'showfunc'),
1066 1066 ignorews=get('ignore_all_space', 'ignorews'),
1067 1067 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1068 1068 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1069 1069 context=get('unified', getter=ui.config))
1070 1070
1071 1071 def updatedir(ui, repo, patches):
1072 1072 '''Update dirstate after patch application according to metadata'''
1073 1073 if not patches:
1074 1074 return
1075 1075 copies = []
1076 1076 removes = {}
1077 1077 cfiles = patches.keys()
1078 1078 cwd = repo.getcwd()
1079 1079 if cwd:
1080 1080 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1081 1081 for f in patches:
1082 1082 ctype, gp = patches[f]
1083 1083 if ctype == 'RENAME':
1084 1084 copies.append((gp.oldpath, gp.path))
1085 1085 removes[gp.oldpath] = 1
1086 1086 elif ctype == 'COPY':
1087 1087 copies.append((gp.oldpath, gp.path))
1088 1088 elif ctype == 'DELETE':
1089 1089 removes[gp.path] = 1
1090 1090 for src, dst in copies:
1091 1091 repo.copy(src, dst)
1092 1092 removes = removes.keys()
1093 1093 if removes:
1094 1094 removes.sort()
1095 1095 repo.remove(removes, True)
1096 1096 for f in patches:
1097 1097 ctype, gp = patches[f]
1098 1098 if gp and gp.mode:
1099 1099 flags = ''
1100 1100 if gp.mode & 0100:
1101 1101 flags = 'x'
1102 1102 elif gp.mode & 020000:
1103 1103 flags = 'l'
1104 1104 dst = os.path.join(repo.root, gp.path)
1105 1105 # patch won't create empty files
1106 1106 if ctype == 'ADD' and not os.path.exists(dst):
1107 1107 repo.wwrite(gp.path, '', flags)
1108 1108 else:
1109 1109 util.set_flags(dst, flags)
1110 1110 cmdutil.addremove(repo, cfiles)
1111 1111 files = patches.keys()
1112 1112 files.extend([r for r in removes if r not in files])
1113 1113 files.sort()
1114 1114
1115 1115 return files
1116 1116
1117 1117 def b85diff(to, tn):
1118 1118 '''print base85-encoded binary diff'''
1119 1119 def gitindex(text):
1120 1120 if not text:
1121 1121 return '0' * 40
1122 1122 l = len(text)
1123 1123 s = util.sha1('blob %d\0' % l)
1124 1124 s.update(text)
1125 1125 return s.hexdigest()
1126 1126
1127 1127 def fmtline(line):
1128 1128 l = len(line)
1129 1129 if l <= 26:
1130 1130 l = chr(ord('A') + l - 1)
1131 1131 else:
1132 1132 l = chr(l - 26 + ord('a') - 1)
1133 1133 return '%c%s\n' % (l, base85.b85encode(line, True))
1134 1134
1135 1135 def chunk(text, csize=52):
1136 1136 l = len(text)
1137 1137 i = 0
1138 1138 while i < l:
1139 1139 yield text[i:i+csize]
1140 1140 i += csize
1141 1141
1142 1142 tohash = gitindex(to)
1143 1143 tnhash = gitindex(tn)
1144 1144 if tohash == tnhash:
1145 1145 return ""
1146 1146
1147 1147 # TODO: deltas
1148 1148 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1149 1149 (tohash, tnhash, len(tn))]
1150 1150 for l in chunk(zlib.compress(tn)):
1151 1151 ret.append(fmtline(l))
1152 1152 ret.append('\n')
1153 1153 return ''.join(ret)
1154 1154
1155 1155 def diff(repo, node1=None, node2=None, match=None,
1156 1156 fp=None, changes=None, opts=None):
1157 1157 '''print diff of changes to files between two nodes, or node and
1158 1158 working directory.
1159 1159
1160 1160 if node1 is None, use first dirstate parent instead.
1161 1161 if node2 is None, compare node1 with working directory.'''
1162 1162
1163 1163 if not match:
1164 1164 match = cmdutil.matchall(repo)
1165 1165
1166 1166 if opts is None:
1167 1167 opts = mdiff.defaultopts
1168 1168 if fp is None:
1169 1169 fp = repo.ui
1170 1170
1171 1171 if not node1:
1172 1172 node1 = repo.dirstate.parents()[0]
1173 1173
1174 1174 flcache = {}
1175 1175 def getfilectx(f, ctx):
1176 1176 flctx = ctx.filectx(f, filelog=flcache.get(f))
1177 1177 if f not in flcache:
1178 1178 flcache[f] = flctx._filelog
1179 1179 return flctx
1180 1180
1181 1181 # reading the data for node1 early allows it to play nicely
1182 1182 # with repo.status and the revlog cache.
1183 1183 ctx1 = repo.changectx(node1)
1184 1184 # force manifest reading
1185 1185 man1 = ctx1.manifest()
1186 1186 date1 = util.datestr(ctx1.date())
1187 1187
1188 1188 if not changes:
1189 1189 changes = repo.status(node1, node2, match=match)[:5]
1190 1190 modified, added, removed, deleted, unknown = changes
1191 1191
1192 1192 if not modified and not added and not removed:
1193 1193 return
1194 1194
1195 1195 ctx2 = repo.changectx(node2)
1196 if node2:
1197 execf2 = ctx2.manifest().execf
1198 linkf2 = ctx2.manifest().linkf
1199 else:
1200 execf2 = util.execfunc(repo.root, None)
1201 linkf2 = util.linkfunc(repo.root, None)
1202 if execf2 is None:
1203 mc = ctx2.parents()[0].manifest().copy()
1204 execf2 = mc.execf
1205 linkf2 = mc.linkf
1206 1196
1207 1197 if repo.ui.quiet:
1208 1198 r = None
1209 1199 else:
1210 1200 hexfunc = repo.ui.debugflag and hex or short
1211 1201 r = [hexfunc(node) for node in [node1, node2] if node]
1212 1202
1213 1203 if opts.git:
1214 1204 copy, diverge = copies.copies(repo, ctx1, ctx2, repo.changectx(nullid))
1215 1205 for k, v in copy.items():
1216 1206 copy[v] = k
1217 1207
1218 1208 all = modified + added + removed
1219 1209 all.sort()
1220 1210 gone = {}
1221 1211
1212 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1213
1222 1214 for f in all:
1223 1215 to = None
1224 1216 tn = None
1225 1217 dodiff = True
1226 1218 header = []
1227 1219 if f in man1:
1228 1220 to = getfilectx(f, ctx1).data()
1229 1221 if f not in removed:
1230 1222 tn = getfilectx(f, ctx2).data()
1231 1223 a, b = f, f
1232 1224 if opts.git:
1233 def gitmode(x, l):
1234 return l and '120000' or (x and '100755' or '100644')
1235 1225 def addmodehdr(header, omode, nmode):
1236 1226 if omode != nmode:
1237 1227 header.append('old mode %s\n' % omode)
1238 1228 header.append('new mode %s\n' % nmode)
1239 1229
1240 1230 if f in added:
1241 mode = gitmode(execf2(f), linkf2(f))
1231 mode = gitmode[ctx2.flags(f)]
1242 1232 if f in copy:
1243 1233 a = copy[f]
1244 omode = gitmode(man1.execf(a), man1.linkf(a))
1234 omode = gitmode[man1.flags(a)]
1245 1235 addmodehdr(header, omode, mode)
1246 1236 if a in removed and a not in gone:
1247 1237 op = 'rename'
1248 1238 gone[a] = 1
1249 1239 else:
1250 1240 op = 'copy'
1251 1241 header.append('%s from %s\n' % (op, a))
1252 1242 header.append('%s to %s\n' % (op, f))
1253 1243 to = getfilectx(a, ctx1).data()
1254 1244 else:
1255 1245 header.append('new file mode %s\n' % mode)
1256 1246 if util.binary(tn):
1257 1247 dodiff = 'binary'
1258 1248 elif f in removed:
1259 1249 # have we already reported a copy above?
1260 1250 if f in copy and copy[f] in added and copy[copy[f]] == f:
1261 1251 dodiff = False
1262 1252 else:
1263 mode = gitmode(man1.execf(f), man1.linkf(f))
1264 header.append('deleted file mode %s\n' % mode)
1253 header.append('deleted file mode %s\n' %
1254 gitmode[man1.flags(f)])
1265 1255 else:
1266 omode = gitmode(man1.execf(f), man1.linkf(f))
1267 nmode = gitmode(execf2(f), linkf2(f))
1256 omode = gitmode[man1.flags(f)]
1257 nmode = gitmode[ctx2.flags(f)]
1268 1258 addmodehdr(header, omode, nmode)
1269 1259 if util.binary(to) or util.binary(tn):
1270 1260 dodiff = 'binary'
1271 1261 r = None
1272 1262 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
1273 1263 if dodiff:
1274 1264 if dodiff == 'binary':
1275 1265 text = b85diff(to, tn)
1276 1266 else:
1277 1267 text = mdiff.unidiff(to, date1,
1278 1268 # ctx2 date may be dynamic
1279 1269 tn, util.datestr(ctx2.date()),
1280 1270 a, b, r, opts=opts)
1281 1271 if text or len(header) > 1:
1282 1272 fp.write(''.join(header))
1283 1273 fp.write(text)
1284 1274
1285 1275 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1286 1276 opts=None):
1287 1277 '''export changesets as hg patches.'''
1288 1278
1289 1279 total = len(revs)
1290 1280 revwidth = max([len(str(rev)) for rev in revs])
1291 1281
1292 1282 def single(rev, seqno, fp):
1293 1283 ctx = repo.changectx(rev)
1294 1284 node = ctx.node()
1295 1285 parents = [p.node() for p in ctx.parents() if p]
1296 1286 branch = ctx.branch()
1297 1287 if switch_parent:
1298 1288 parents.reverse()
1299 1289 prev = (parents and parents[0]) or nullid
1300 1290
1301 1291 if not fp:
1302 1292 fp = cmdutil.make_file(repo, template, node, total=total,
1303 1293 seqno=seqno, revwidth=revwidth)
1304 1294 if fp != sys.stdout and hasattr(fp, 'name'):
1305 1295 repo.ui.note("%s\n" % fp.name)
1306 1296
1307 1297 fp.write("# HG changeset patch\n")
1308 1298 fp.write("# User %s\n" % ctx.user())
1309 1299 fp.write("# Date %d %d\n" % ctx.date())
1310 1300 if branch and (branch != 'default'):
1311 1301 fp.write("# Branch %s\n" % branch)
1312 1302 fp.write("# Node ID %s\n" % hex(node))
1313 1303 fp.write("# Parent %s\n" % hex(prev))
1314 1304 if len(parents) > 1:
1315 1305 fp.write("# Parent %s\n" % hex(parents[1]))
1316 1306 fp.write(ctx.description().rstrip())
1317 1307 fp.write("\n\n")
1318 1308
1319 1309 diff(repo, prev, node, fp=fp, opts=opts)
1320 1310 if fp not in (sys.stdout, repo.ui):
1321 1311 fp.close()
1322 1312
1323 1313 for seqno, rev in enumerate(revs):
1324 1314 single(rev, seqno+1, fp)
1325 1315
1326 1316 def diffstat(patchlines):
1327 1317 if not util.find_exe('diffstat'):
1328 1318 return
1329 1319 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
1330 1320 try:
1331 1321 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
1332 1322 try:
1333 1323 for line in patchlines:
1334 1324 p.tochild.write(line + "\n")
1335 1325 p.tochild.close()
1336 1326 if p.wait(): return
1337 1327 fp = os.fdopen(fd, 'r')
1338 1328 stat = []
1339 1329 for line in fp: stat.append(line.lstrip())
1340 1330 last = stat.pop()
1341 1331 stat.insert(0, last)
1342 1332 stat = ''.join(stat)
1343 1333 return stat
1344 1334 except: raise
1345 1335 finally:
1346 1336 try: os.unlink(name)
1347 1337 except: pass
@@ -1,1903 +1,1891 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, getpass, re, shutil, sys, tempfile
17 17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 18 import imp, urlparse
19 19
20 20 # Python compatibility
21 21
22 22 try:
23 23 set = set
24 24 frozenset = frozenset
25 25 except NameError:
26 26 from sets import Set as set, ImmutableSet as frozenset
27 27
28 28 _md5 = None
29 29 def md5(s):
30 30 global _md5
31 31 if _md5 is None:
32 32 try:
33 33 import hashlib
34 34 _md5 = hashlib.md5
35 35 except ImportError:
36 36 import md5
37 37 _md5 = md5.md5
38 38 return _md5(s)
39 39
40 40 _sha1 = None
41 41 def sha1(s):
42 42 global _sha1
43 43 if _sha1 is None:
44 44 try:
45 45 import hashlib
46 46 _sha1 = hashlib.sha1
47 47 except ImportError:
48 48 import sha
49 49 _sha1 = sha.sha
50 50 return _sha1(s)
51 51
52 52 try:
53 53 _encoding = os.environ.get("HGENCODING")
54 54 if sys.platform == 'darwin' and not _encoding:
55 55 # On darwin, getpreferredencoding ignores the locale environment and
56 56 # always returns mac-roman. We override this if the environment is
57 57 # not C (has been customized by the user).
58 58 locale.setlocale(locale.LC_CTYPE, '')
59 59 _encoding = locale.getlocale()[1]
60 60 if not _encoding:
61 61 _encoding = locale.getpreferredencoding() or 'ascii'
62 62 except locale.Error:
63 63 _encoding = 'ascii'
64 64 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
65 65 _fallbackencoding = 'ISO-8859-1'
66 66
67 67 def tolocal(s):
68 68 """
69 69 Convert a string from internal UTF-8 to local encoding
70 70
71 71 All internal strings should be UTF-8 but some repos before the
72 72 implementation of locale support may contain latin1 or possibly
73 73 other character sets. We attempt to decode everything strictly
74 74 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
75 75 replace unknown characters.
76 76 """
77 77 for e in ('UTF-8', _fallbackencoding):
78 78 try:
79 79 u = s.decode(e) # attempt strict decoding
80 80 return u.encode(_encoding, "replace")
81 81 except LookupError, k:
82 82 raise Abort(_("%s, please check your locale settings") % k)
83 83 except UnicodeDecodeError:
84 84 pass
85 85 u = s.decode("utf-8", "replace") # last ditch
86 86 return u.encode(_encoding, "replace")
87 87
88 88 def fromlocal(s):
89 89 """
90 90 Convert a string from the local character encoding to UTF-8
91 91
92 92 We attempt to decode strings using the encoding mode set by
93 93 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
94 94 characters will cause an error message. Other modes include
95 95 'replace', which replaces unknown characters with a special
96 96 Unicode character, and 'ignore', which drops the character.
97 97 """
98 98 try:
99 99 return s.decode(_encoding, _encodingmode).encode("utf-8")
100 100 except UnicodeDecodeError, inst:
101 101 sub = s[max(0, inst.start-10):inst.start+10]
102 102 raise Abort("decoding near '%s': %s!" % (sub, inst))
103 103 except LookupError, k:
104 104 raise Abort(_("%s, please check your locale settings") % k)
105 105
106 106 def locallen(s):
107 107 """Find the length in characters of a local string"""
108 108 return len(s.decode(_encoding, "replace"))
109 109
110 110 # used by parsedate
111 111 defaultdateformats = (
112 112 '%Y-%m-%d %H:%M:%S',
113 113 '%Y-%m-%d %I:%M:%S%p',
114 114 '%Y-%m-%d %H:%M',
115 115 '%Y-%m-%d %I:%M%p',
116 116 '%Y-%m-%d',
117 117 '%m-%d',
118 118 '%m/%d',
119 119 '%m/%d/%y',
120 120 '%m/%d/%Y',
121 121 '%a %b %d %H:%M:%S %Y',
122 122 '%a %b %d %I:%M:%S%p %Y',
123 123 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
124 124 '%b %d %H:%M:%S %Y',
125 125 '%b %d %I:%M:%S%p %Y',
126 126 '%b %d %H:%M:%S',
127 127 '%b %d %I:%M:%S%p',
128 128 '%b %d %H:%M',
129 129 '%b %d %I:%M%p',
130 130 '%b %d %Y',
131 131 '%b %d',
132 132 '%H:%M:%S',
133 133 '%I:%M:%SP',
134 134 '%H:%M',
135 135 '%I:%M%p',
136 136 )
137 137
138 138 extendeddateformats = defaultdateformats + (
139 139 "%Y",
140 140 "%Y-%m",
141 141 "%b",
142 142 "%b %Y",
143 143 )
144 144
145 145 class SignalInterrupt(Exception):
146 146 """Exception raised on SIGTERM and SIGHUP."""
147 147
148 148 # differences from SafeConfigParser:
149 149 # - case-sensitive keys
150 150 # - allows values that are not strings (this means that you may not
151 151 # be able to save the configuration to a file)
152 152 class configparser(ConfigParser.SafeConfigParser):
153 153 def optionxform(self, optionstr):
154 154 return optionstr
155 155
156 156 def set(self, section, option, value):
157 157 return ConfigParser.ConfigParser.set(self, section, option, value)
158 158
159 159 def _interpolate(self, section, option, rawval, vars):
160 160 if not isinstance(rawval, basestring):
161 161 return rawval
162 162 return ConfigParser.SafeConfigParser._interpolate(self, section,
163 163 option, rawval, vars)
164 164
165 165 def cachefunc(func):
166 166 '''cache the result of function calls'''
167 167 # XXX doesn't handle keywords args
168 168 cache = {}
169 169 if func.func_code.co_argcount == 1:
170 170 # we gain a small amount of time because
171 171 # we don't need to pack/unpack the list
172 172 def f(arg):
173 173 if arg not in cache:
174 174 cache[arg] = func(arg)
175 175 return cache[arg]
176 176 else:
177 177 def f(*args):
178 178 if args not in cache:
179 179 cache[args] = func(*args)
180 180 return cache[args]
181 181
182 182 return f
183 183
184 184 def pipefilter(s, cmd):
185 185 '''filter string S through command CMD, returning its output'''
186 186 (pin, pout) = os.popen2(cmd, 'b')
187 187 def writer():
188 188 try:
189 189 pin.write(s)
190 190 pin.close()
191 191 except IOError, inst:
192 192 if inst.errno != errno.EPIPE:
193 193 raise
194 194
195 195 # we should use select instead on UNIX, but this will work on most
196 196 # systems, including Windows
197 197 w = threading.Thread(target=writer)
198 198 w.start()
199 199 f = pout.read()
200 200 pout.close()
201 201 w.join()
202 202 return f
203 203
204 204 def tempfilter(s, cmd):
205 205 '''filter string S through a pair of temporary files with CMD.
206 206 CMD is used as a template to create the real command to be run,
207 207 with the strings INFILE and OUTFILE replaced by the real names of
208 208 the temporary files generated.'''
209 209 inname, outname = None, None
210 210 try:
211 211 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
212 212 fp = os.fdopen(infd, 'wb')
213 213 fp.write(s)
214 214 fp.close()
215 215 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
216 216 os.close(outfd)
217 217 cmd = cmd.replace('INFILE', inname)
218 218 cmd = cmd.replace('OUTFILE', outname)
219 219 code = os.system(cmd)
220 220 if sys.platform == 'OpenVMS' and code & 1:
221 221 code = 0
222 222 if code: raise Abort(_("command '%s' failed: %s") %
223 223 (cmd, explain_exit(code)))
224 224 return open(outname, 'rb').read()
225 225 finally:
226 226 try:
227 227 if inname: os.unlink(inname)
228 228 except: pass
229 229 try:
230 230 if outname: os.unlink(outname)
231 231 except: pass
232 232
233 233 filtertable = {
234 234 'tempfile:': tempfilter,
235 235 'pipe:': pipefilter,
236 236 }
237 237
238 238 def filter(s, cmd):
239 239 "filter a string through a command that transforms its input to its output"
240 240 for name, fn in filtertable.iteritems():
241 241 if cmd.startswith(name):
242 242 return fn(s, cmd[len(name):].lstrip())
243 243 return pipefilter(s, cmd)
244 244
245 245 def binary(s):
246 246 """return true if a string is binary data"""
247 247 if s and '\0' in s:
248 248 return True
249 249 return False
250 250
251 251 def unique(g):
252 252 """return the uniq elements of iterable g"""
253 253 return dict.fromkeys(g).keys()
254 254
255 255 class Abort(Exception):
256 256 """Raised if a command needs to print an error and exit."""
257 257
258 258 class UnexpectedOutput(Abort):
259 259 """Raised to print an error with part of output and exit."""
260 260
261 261 def always(fn): return True
262 262 def never(fn): return False
263 263
264 264 def expand_glob(pats):
265 265 '''On Windows, expand the implicit globs in a list of patterns'''
266 266 if os.name != 'nt':
267 267 return list(pats)
268 268 ret = []
269 269 for p in pats:
270 270 kind, name = patkind(p, None)
271 271 if kind is None:
272 272 globbed = glob.glob(name)
273 273 if globbed:
274 274 ret.extend(globbed)
275 275 continue
276 276 # if we couldn't expand the glob, just keep it around
277 277 ret.append(p)
278 278 return ret
279 279
280 280 def patkind(name, default):
281 281 """Split a string into an optional pattern kind prefix and the
282 282 actual pattern."""
283 283 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
284 284 if name.startswith(prefix + ':'): return name.split(':', 1)
285 285 return default, name
286 286
287 287 def globre(pat, head='^', tail='$'):
288 288 "convert a glob pattern into a regexp"
289 289 i, n = 0, len(pat)
290 290 res = ''
291 291 group = 0
292 292 def peek(): return i < n and pat[i]
293 293 while i < n:
294 294 c = pat[i]
295 295 i = i+1
296 296 if c == '*':
297 297 if peek() == '*':
298 298 i += 1
299 299 res += '.*'
300 300 else:
301 301 res += '[^/]*'
302 302 elif c == '?':
303 303 res += '.'
304 304 elif c == '[':
305 305 j = i
306 306 if j < n and pat[j] in '!]':
307 307 j += 1
308 308 while j < n and pat[j] != ']':
309 309 j += 1
310 310 if j >= n:
311 311 res += '\\['
312 312 else:
313 313 stuff = pat[i:j].replace('\\','\\\\')
314 314 i = j + 1
315 315 if stuff[0] == '!':
316 316 stuff = '^' + stuff[1:]
317 317 elif stuff[0] == '^':
318 318 stuff = '\\' + stuff
319 319 res = '%s[%s]' % (res, stuff)
320 320 elif c == '{':
321 321 group += 1
322 322 res += '(?:'
323 323 elif c == '}' and group:
324 324 res += ')'
325 325 group -= 1
326 326 elif c == ',' and group:
327 327 res += '|'
328 328 elif c == '\\':
329 329 p = peek()
330 330 if p:
331 331 i += 1
332 332 res += re.escape(p)
333 333 else:
334 334 res += re.escape(c)
335 335 else:
336 336 res += re.escape(c)
337 337 return head + res + tail
338 338
339 339 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
340 340
341 341 def pathto(root, n1, n2):
342 342 '''return the relative path from one place to another.
343 343 root should use os.sep to separate directories
344 344 n1 should use os.sep to separate directories
345 345 n2 should use "/" to separate directories
346 346 returns an os.sep-separated path.
347 347
348 348 If n1 is a relative path, it's assumed it's
349 349 relative to root.
350 350 n2 should always be relative to root.
351 351 '''
352 352 if not n1: return localpath(n2)
353 353 if os.path.isabs(n1):
354 354 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
355 355 return os.path.join(root, localpath(n2))
356 356 n2 = '/'.join((pconvert(root), n2))
357 357 a, b = splitpath(n1), n2.split('/')
358 358 a.reverse()
359 359 b.reverse()
360 360 while a and b and a[-1] == b[-1]:
361 361 a.pop()
362 362 b.pop()
363 363 b.reverse()
364 364 return os.sep.join((['..'] * len(a)) + b) or '.'
365 365
366 366 def canonpath(root, cwd, myname):
367 367 """return the canonical path of myname, given cwd and root"""
368 368 if root == os.sep:
369 369 rootsep = os.sep
370 370 elif endswithsep(root):
371 371 rootsep = root
372 372 else:
373 373 rootsep = root + os.sep
374 374 name = myname
375 375 if not os.path.isabs(name):
376 376 name = os.path.join(root, cwd, name)
377 377 name = os.path.normpath(name)
378 378 audit_path = path_auditor(root)
379 379 if name != rootsep and name.startswith(rootsep):
380 380 name = name[len(rootsep):]
381 381 audit_path(name)
382 382 return pconvert(name)
383 383 elif name == root:
384 384 return ''
385 385 else:
386 386 # Determine whether `name' is in the hierarchy at or beneath `root',
387 387 # by iterating name=dirname(name) until that causes no change (can't
388 388 # check name == '/', because that doesn't work on windows). For each
389 389 # `name', compare dev/inode numbers. If they match, the list `rel'
390 390 # holds the reversed list of components making up the relative file
391 391 # name we want.
392 392 root_st = os.stat(root)
393 393 rel = []
394 394 while True:
395 395 try:
396 396 name_st = os.stat(name)
397 397 except OSError:
398 398 break
399 399 if samestat(name_st, root_st):
400 400 if not rel:
401 401 # name was actually the same as root (maybe a symlink)
402 402 return ''
403 403 rel.reverse()
404 404 name = os.path.join(*rel)
405 405 audit_path(name)
406 406 return pconvert(name)
407 407 dirname, basename = os.path.split(name)
408 408 rel.append(basename)
409 409 if dirname == name:
410 410 break
411 411 name = dirname
412 412
413 413 raise Abort('%s not under root' % myname)
414 414
415 415 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
416 416 """build a function to match a set of file patterns
417 417
418 418 arguments:
419 419 canonroot - the canonical root of the tree you're matching against
420 420 cwd - the current working directory, if relevant
421 421 names - patterns to find
422 422 inc - patterns to include
423 423 exc - patterns to exclude
424 424 dflt_pat - if a pattern in names has no explicit type, assume this one
425 425 src - where these patterns came from (e.g. .hgignore)
426 426
427 427 a pattern is one of:
428 428 'glob:<glob>' - a glob relative to cwd
429 429 're:<regexp>' - a regular expression
430 430 'path:<path>' - a path relative to canonroot
431 431 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
432 432 'relpath:<path>' - a path relative to cwd
433 433 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
434 434 '<something>' - one of the cases above, selected by the dflt_pat argument
435 435
436 436 returns:
437 437 a 3-tuple containing
438 438 - list of roots (places where one should start a recursive walk of the fs);
439 439 this often matches the explicit non-pattern names passed in, but also
440 440 includes the initial part of glob: patterns that has no glob characters
441 441 - a bool match(filename) function
442 442 - a bool indicating if any patterns were passed in
443 443 """
444 444
445 445 # a common case: no patterns at all
446 446 if not names and not inc and not exc:
447 447 return [], always, False
448 448
449 449 def contains_glob(name):
450 450 for c in name:
451 451 if c in _globchars: return True
452 452 return False
453 453
454 454 def regex(kind, name, tail):
455 455 '''convert a pattern into a regular expression'''
456 456 if not name:
457 457 return ''
458 458 if kind == 're':
459 459 return name
460 460 elif kind == 'path':
461 461 return '^' + re.escape(name) + '(?:/|$)'
462 462 elif kind == 'relglob':
463 463 return globre(name, '(?:|.*/)', tail)
464 464 elif kind == 'relpath':
465 465 return re.escape(name) + '(?:/|$)'
466 466 elif kind == 'relre':
467 467 if name.startswith('^'):
468 468 return name
469 469 return '.*' + name
470 470 return globre(name, '', tail)
471 471
472 472 def matchfn(pats, tail):
473 473 """build a matching function from a set of patterns"""
474 474 if not pats:
475 475 return
476 476 try:
477 477 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
478 478 if len(pat) > 20000:
479 479 raise OverflowError()
480 480 return re.compile(pat).match
481 481 except OverflowError:
482 482 # We're using a Python with a tiny regex engine and we
483 483 # made it explode, so we'll divide the pattern list in two
484 484 # until it works
485 485 l = len(pats)
486 486 if l < 2:
487 487 raise
488 488 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
489 489 return lambda s: a(s) or b(s)
490 490 except re.error:
491 491 for k, p in pats:
492 492 try:
493 493 re.compile('(?:%s)' % regex(k, p, tail))
494 494 except re.error:
495 495 if src:
496 496 raise Abort("%s: invalid pattern (%s): %s" %
497 497 (src, k, p))
498 498 else:
499 499 raise Abort("invalid pattern (%s): %s" % (k, p))
500 500 raise Abort("invalid pattern")
501 501
502 502 def globprefix(pat):
503 503 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
504 504 root = []
505 505 for p in pat.split('/'):
506 506 if contains_glob(p): break
507 507 root.append(p)
508 508 return '/'.join(root) or '.'
509 509
510 510 def normalizepats(names, default):
511 511 pats = []
512 512 roots = []
513 513 anypats = False
514 514 for kind, name in [patkind(p, default) for p in names]:
515 515 if kind in ('glob', 'relpath'):
516 516 name = canonpath(canonroot, cwd, name)
517 517 elif kind in ('relglob', 'path'):
518 518 name = normpath(name)
519 519
520 520 pats.append((kind, name))
521 521
522 522 if kind in ('glob', 're', 'relglob', 'relre'):
523 523 anypats = True
524 524
525 525 if kind == 'glob':
526 526 root = globprefix(name)
527 527 roots.append(root)
528 528 elif kind in ('relpath', 'path'):
529 529 roots.append(name or '.')
530 530 elif kind == 'relglob':
531 531 roots.append('.')
532 532 return roots, pats, anypats
533 533
534 534 roots, pats, anypats = normalizepats(names, dflt_pat)
535 535
536 536 patmatch = matchfn(pats, '$') or always
537 537 incmatch = always
538 538 if inc:
539 539 dummy, inckinds, dummy = normalizepats(inc, 'glob')
540 540 incmatch = matchfn(inckinds, '(?:/|$)')
541 541 excmatch = lambda fn: False
542 542 if exc:
543 543 dummy, exckinds, dummy = normalizepats(exc, 'glob')
544 544 excmatch = matchfn(exckinds, '(?:/|$)')
545 545
546 546 if not names and inc and not exc:
547 547 # common case: hgignore patterns
548 548 match = incmatch
549 549 else:
550 550 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
551 551
552 552 return (roots, match, (inc or exc or anypats) and True)
553 553
554 554 _hgexecutable = None
555 555
556 556 def main_is_frozen():
557 557 """return True if we are a frozen executable.
558 558
559 559 The code supports py2exe (most common, Windows only) and tools/freeze
560 560 (portable, not much used).
561 561 """
562 562 return (hasattr(sys, "frozen") or # new py2exe
563 563 hasattr(sys, "importers") or # old py2exe
564 564 imp.is_frozen("__main__")) # tools/freeze
565 565
566 566 def hgexecutable():
567 567 """return location of the 'hg' executable.
568 568
569 569 Defaults to $HG or 'hg' in the search path.
570 570 """
571 571 if _hgexecutable is None:
572 572 hg = os.environ.get('HG')
573 573 if hg:
574 574 set_hgexecutable(hg)
575 575 elif main_is_frozen():
576 576 set_hgexecutable(sys.executable)
577 577 else:
578 578 set_hgexecutable(find_exe('hg', 'hg'))
579 579 return _hgexecutable
580 580
581 581 def set_hgexecutable(path):
582 582 """set location of the 'hg' executable"""
583 583 global _hgexecutable
584 584 _hgexecutable = path
585 585
586 586 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
587 587 '''enhanced shell command execution.
588 588 run with environment maybe modified, maybe in different dir.
589 589
590 590 if command fails and onerr is None, return status. if ui object,
591 591 print error message and return status, else raise onerr object as
592 592 exception.'''
593 593 def py2shell(val):
594 594 'convert python object into string that is useful to shell'
595 595 if val in (None, False):
596 596 return '0'
597 597 if val == True:
598 598 return '1'
599 599 return str(val)
600 600 oldenv = {}
601 601 for k in environ:
602 602 oldenv[k] = os.environ.get(k)
603 603 if cwd is not None:
604 604 oldcwd = os.getcwd()
605 605 origcmd = cmd
606 606 if os.name == 'nt':
607 607 cmd = '"%s"' % cmd
608 608 try:
609 609 for k, v in environ.iteritems():
610 610 os.environ[k] = py2shell(v)
611 611 os.environ['HG'] = hgexecutable()
612 612 if cwd is not None and oldcwd != cwd:
613 613 os.chdir(cwd)
614 614 rc = os.system(cmd)
615 615 if sys.platform == 'OpenVMS' and rc & 1:
616 616 rc = 0
617 617 if rc and onerr:
618 618 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
619 619 explain_exit(rc)[0])
620 620 if errprefix:
621 621 errmsg = '%s: %s' % (errprefix, errmsg)
622 622 try:
623 623 onerr.warn(errmsg + '\n')
624 624 except AttributeError:
625 625 raise onerr(errmsg)
626 626 return rc
627 627 finally:
628 628 for k, v in oldenv.iteritems():
629 629 if v is None:
630 630 del os.environ[k]
631 631 else:
632 632 os.environ[k] = v
633 633 if cwd is not None and oldcwd != cwd:
634 634 os.chdir(oldcwd)
635 635
636 636 # os.path.lexists is not available on python2.3
637 637 def lexists(filename):
638 638 "test whether a file with this name exists. does not follow symlinks"
639 639 try:
640 640 os.lstat(filename)
641 641 except:
642 642 return False
643 643 return True
644 644
645 645 def rename(src, dst):
646 646 """forcibly rename a file"""
647 647 try:
648 648 os.rename(src, dst)
649 649 except OSError, err: # FIXME: check err (EEXIST ?)
650 650 # on windows, rename to existing file is not allowed, so we
651 651 # must delete destination first. but if file is open, unlink
652 652 # schedules it for delete but does not delete it. rename
653 653 # happens immediately even for open files, so we create
654 654 # temporary file, delete it, rename destination to that name,
655 655 # then delete that. then rename is safe to do.
656 656 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
657 657 os.close(fd)
658 658 os.unlink(temp)
659 659 os.rename(dst, temp)
660 660 os.unlink(temp)
661 661 os.rename(src, dst)
662 662
663 663 def unlink(f):
664 664 """unlink and remove the directory if it is empty"""
665 665 os.unlink(f)
666 666 # try removing directories that might now be empty
667 667 try:
668 668 os.removedirs(os.path.dirname(f))
669 669 except OSError:
670 670 pass
671 671
672 672 def copyfile(src, dest):
673 673 "copy a file, preserving mode"
674 674 if os.path.islink(src):
675 675 try:
676 676 os.unlink(dest)
677 677 except:
678 678 pass
679 679 os.symlink(os.readlink(src), dest)
680 680 else:
681 681 try:
682 682 shutil.copyfile(src, dest)
683 683 shutil.copymode(src, dest)
684 684 except shutil.Error, inst:
685 685 raise Abort(str(inst))
686 686
687 687 def copyfiles(src, dst, hardlink=None):
688 688 """Copy a directory tree using hardlinks if possible"""
689 689
690 690 if hardlink is None:
691 691 hardlink = (os.stat(src).st_dev ==
692 692 os.stat(os.path.dirname(dst)).st_dev)
693 693
694 694 if os.path.isdir(src):
695 695 os.mkdir(dst)
696 696 for name, kind in osutil.listdir(src):
697 697 srcname = os.path.join(src, name)
698 698 dstname = os.path.join(dst, name)
699 699 copyfiles(srcname, dstname, hardlink)
700 700 else:
701 701 if hardlink:
702 702 try:
703 703 os_link(src, dst)
704 704 except (IOError, OSError):
705 705 hardlink = False
706 706 shutil.copy(src, dst)
707 707 else:
708 708 shutil.copy(src, dst)
709 709
710 710 class path_auditor(object):
711 711 '''ensure that a filesystem path contains no banned components.
712 712 the following properties of a path are checked:
713 713
714 714 - under top-level .hg
715 715 - starts at the root of a windows drive
716 716 - contains ".."
717 717 - traverses a symlink (e.g. a/symlink_here/b)
718 718 - inside a nested repository'''
719 719
720 720 def __init__(self, root):
721 721 self.audited = set()
722 722 self.auditeddir = set()
723 723 self.root = root
724 724
725 725 def __call__(self, path):
726 726 if path in self.audited:
727 727 return
728 728 normpath = os.path.normcase(path)
729 729 parts = splitpath(normpath)
730 730 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
731 731 or os.pardir in parts):
732 732 raise Abort(_("path contains illegal component: %s") % path)
733 733 def check(prefix):
734 734 curpath = os.path.join(self.root, prefix)
735 735 try:
736 736 st = os.lstat(curpath)
737 737 except OSError, err:
738 738 # EINVAL can be raised as invalid path syntax under win32.
739 739 # They must be ignored for patterns can be checked too.
740 740 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
741 741 raise
742 742 else:
743 743 if stat.S_ISLNK(st.st_mode):
744 744 raise Abort(_('path %r traverses symbolic link %r') %
745 745 (path, prefix))
746 746 elif (stat.S_ISDIR(st.st_mode) and
747 747 os.path.isdir(os.path.join(curpath, '.hg'))):
748 748 raise Abort(_('path %r is inside repo %r') %
749 749 (path, prefix))
750 750 parts.pop()
751 751 prefixes = []
752 752 for n in range(len(parts)):
753 753 prefix = os.sep.join(parts)
754 754 if prefix in self.auditeddir:
755 755 break
756 756 check(prefix)
757 757 prefixes.append(prefix)
758 758 parts.pop()
759 759
760 760 self.audited.add(path)
761 761 # only add prefixes to the cache after checking everything: we don't
762 762 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
763 763 self.auditeddir.update(prefixes)
764 764
765 765 def _makelock_file(info, pathname):
766 766 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
767 767 os.write(ld, info)
768 768 os.close(ld)
769 769
770 770 def _readlock_file(pathname):
771 771 return posixfile(pathname).read()
772 772
773 773 def nlinks(pathname):
774 774 """Return number of hardlinks for the given file."""
775 775 return os.lstat(pathname).st_nlink
776 776
777 777 if hasattr(os, 'link'):
778 778 os_link = os.link
779 779 else:
780 780 def os_link(src, dst):
781 781 raise OSError(0, _("Hardlinks not supported"))
782 782
783 783 def fstat(fp):
784 784 '''stat file object that may not have fileno method.'''
785 785 try:
786 786 return os.fstat(fp.fileno())
787 787 except AttributeError:
788 788 return os.stat(fp.name)
789 789
790 790 posixfile = file
791 791
792 792 def openhardlinks():
793 793 '''return true if it is safe to hold open file handles to hardlinks'''
794 794 return True
795 795
796 796 getuser_fallback = None
797 797
798 798 def getuser():
799 799 '''return name of current user'''
800 800 try:
801 801 return getpass.getuser()
802 802 except ImportError:
803 803 # import of pwd will fail on windows - try fallback
804 804 if getuser_fallback:
805 805 return getuser_fallback()
806 806 # raised if win32api not available
807 807 raise Abort(_('user name not available - set USERNAME '
808 808 'environment variable'))
809 809
810 810 def username(uid=None):
811 811 """Return the name of the user with the given uid.
812 812
813 813 If uid is None, return the name of the current user."""
814 814 try:
815 815 import pwd
816 816 if uid is None:
817 817 uid = os.getuid()
818 818 try:
819 819 return pwd.getpwuid(uid)[0]
820 820 except KeyError:
821 821 return str(uid)
822 822 except ImportError:
823 823 return None
824 824
825 825 def groupname(gid=None):
826 826 """Return the name of the group with the given gid.
827 827
828 828 If gid is None, return the name of the current group."""
829 829 try:
830 830 import grp
831 831 if gid is None:
832 832 gid = os.getgid()
833 833 try:
834 834 return grp.getgrgid(gid)[0]
835 835 except KeyError:
836 836 return str(gid)
837 837 except ImportError:
838 838 return None
839 839
840 840 # File system features
841 841
842 842 def checkfolding(path):
843 843 """
844 844 Check whether the given path is on a case-sensitive filesystem
845 845
846 846 Requires a path (like /foo/.hg) ending with a foldable final
847 847 directory component.
848 848 """
849 849 s1 = os.stat(path)
850 850 d, b = os.path.split(path)
851 851 p2 = os.path.join(d, b.upper())
852 852 if path == p2:
853 853 p2 = os.path.join(d, b.lower())
854 854 try:
855 855 s2 = os.stat(p2)
856 856 if s2 == s1:
857 857 return False
858 858 return True
859 859 except:
860 860 return True
861 861
862 862 _fspathcache = {}
863 863 def fspath(name, root):
864 864 '''Get name in the case stored in the filesystem
865 865
866 866 The name is either relative to root, or it is an absolute path starting
867 867 with root. Note that this function is unnecessary, and should not be
868 868 called, for case-sensitive filesystems (simply because it's expensive).
869 869 '''
870 870 # If name is absolute, make it relative
871 871 if name.lower().startswith(root.lower()):
872 872 l = len(root)
873 873 if name[l] == os.sep or name[l] == os.altsep:
874 874 l = l + 1
875 875 name = name[l:]
876 876
877 877 if not os.path.exists(os.path.join(root, name)):
878 878 return None
879 879
880 880 seps = os.sep
881 881 if os.altsep:
882 882 seps = seps + os.altsep
883 883 # Protect backslashes. This gets silly very quickly.
884 884 seps.replace('\\','\\\\')
885 885 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
886 886 dir = os.path.normcase(os.path.normpath(root))
887 887 result = []
888 888 for part, sep in pattern.findall(name):
889 889 if sep:
890 890 result.append(sep)
891 891 continue
892 892
893 893 if dir not in _fspathcache:
894 894 _fspathcache[dir] = os.listdir(dir)
895 895 contents = _fspathcache[dir]
896 896
897 897 lpart = part.lower()
898 898 for n in contents:
899 899 if n.lower() == lpart:
900 900 result.append(n)
901 901 break
902 902 else:
903 903 # Cannot happen, as the file exists!
904 904 result.append(part)
905 905 dir = os.path.join(dir, lpart)
906 906
907 907 return ''.join(result)
908 908
909 909 def checkexec(path):
910 910 """
911 911 Check whether the given path is on a filesystem with UNIX-like exec flags
912 912
913 913 Requires a directory (like /foo/.hg)
914 914 """
915 915
916 916 # VFAT on some Linux versions can flip mode but it doesn't persist
917 917 # a FS remount. Frequently we can detect it if files are created
918 918 # with exec bit on.
919 919
920 920 try:
921 921 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
922 922 fh, fn = tempfile.mkstemp("", "", path)
923 923 try:
924 924 os.close(fh)
925 925 m = os.stat(fn).st_mode & 0777
926 926 new_file_has_exec = m & EXECFLAGS
927 927 os.chmod(fn, m ^ EXECFLAGS)
928 928 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
929 929 finally:
930 930 os.unlink(fn)
931 931 except (IOError, OSError):
932 932 # we don't care, the user probably won't be able to commit anyway
933 933 return False
934 934 return not (new_file_has_exec or exec_flags_cannot_flip)
935 935
936 def execfunc(path, fallback):
937 '''return an is_exec() function with default to fallback'''
938 if checkexec(path):
939 return lambda x: is_exec(os.path.join(path, x))
940 return fallback
941
942 936 def checklink(path):
943 937 """check whether the given path is on a symlink-capable filesystem"""
944 938 # mktemp is not racy because symlink creation will fail if the
945 939 # file already exists
946 940 name = tempfile.mktemp(dir=path)
947 941 try:
948 942 os.symlink(".", name)
949 943 os.unlink(name)
950 944 return True
951 945 except (OSError, AttributeError):
952 946 return False
953 947
954 def linkfunc(path, fallback):
955 '''return an is_link() function with default to fallback'''
956 if checklink(path):
957 return lambda x: os.path.islink(os.path.join(path, x))
958 return fallback
959
960 948 _umask = os.umask(0)
961 949 os.umask(_umask)
962 950
963 951 def needbinarypatch():
964 952 """return True if patches should be applied in binary mode by default."""
965 953 return os.name == 'nt'
966 954
967 955 def endswithsep(path):
968 956 '''Check path ends with os.sep or os.altsep.'''
969 957 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
970 958
971 959 def splitpath(path):
972 960 '''Split path by os.sep.
973 961 Note that this function does not use os.altsep because this is
974 962 an alternative of simple "xxx.split(os.sep)".
975 963 It is recommended to use os.path.normpath() before using this
976 964 function if need.'''
977 965 return path.split(os.sep)
978 966
979 967 def gui():
980 968 '''Are we running in a GUI?'''
981 969 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
982 970
983 971 def lookup_reg(key, name=None, scope=None):
984 972 return None
985 973
986 974 # Platform specific variants
987 975 if os.name == 'nt':
988 976 import msvcrt
989 977 nulldev = 'NUL:'
990 978
991 979 class winstdout:
992 980 '''stdout on windows misbehaves if sent through a pipe'''
993 981
994 982 def __init__(self, fp):
995 983 self.fp = fp
996 984
997 985 def __getattr__(self, key):
998 986 return getattr(self.fp, key)
999 987
1000 988 def close(self):
1001 989 try:
1002 990 self.fp.close()
1003 991 except: pass
1004 992
1005 993 def write(self, s):
1006 994 try:
1007 995 # This is workaround for "Not enough space" error on
1008 996 # writing large size of data to console.
1009 997 limit = 16000
1010 998 l = len(s)
1011 999 start = 0
1012 1000 while start < l:
1013 1001 end = start + limit
1014 1002 self.fp.write(s[start:end])
1015 1003 start = end
1016 1004 except IOError, inst:
1017 1005 if inst.errno != 0: raise
1018 1006 self.close()
1019 1007 raise IOError(errno.EPIPE, 'Broken pipe')
1020 1008
1021 1009 def flush(self):
1022 1010 try:
1023 1011 return self.fp.flush()
1024 1012 except IOError, inst:
1025 1013 if inst.errno != errno.EINVAL: raise
1026 1014 self.close()
1027 1015 raise IOError(errno.EPIPE, 'Broken pipe')
1028 1016
1029 1017 sys.stdout = winstdout(sys.stdout)
1030 1018
1031 1019 def _is_win_9x():
1032 1020 '''return true if run on windows 95, 98 or me.'''
1033 1021 try:
1034 1022 return sys.getwindowsversion()[3] == 1
1035 1023 except AttributeError:
1036 1024 return 'command' in os.environ.get('comspec', '')
1037 1025
1038 1026 def openhardlinks():
1039 1027 return not _is_win_9x and "win32api" in locals()
1040 1028
1041 1029 def system_rcpath():
1042 1030 try:
1043 1031 return system_rcpath_win32()
1044 1032 except:
1045 1033 return [r'c:\mercurial\mercurial.ini']
1046 1034
1047 1035 def user_rcpath():
1048 1036 '''return os-specific hgrc search path to the user dir'''
1049 1037 try:
1050 1038 path = user_rcpath_win32()
1051 1039 except:
1052 1040 home = os.path.expanduser('~')
1053 1041 path = [os.path.join(home, 'mercurial.ini'),
1054 1042 os.path.join(home, '.hgrc')]
1055 1043 userprofile = os.environ.get('USERPROFILE')
1056 1044 if userprofile:
1057 1045 path.append(os.path.join(userprofile, 'mercurial.ini'))
1058 1046 path.append(os.path.join(userprofile, '.hgrc'))
1059 1047 return path
1060 1048
1061 1049 def parse_patch_output(output_line):
1062 1050 """parses the output produced by patch and returns the file name"""
1063 1051 pf = output_line[14:]
1064 1052 if pf[0] == '`':
1065 1053 pf = pf[1:-1] # Remove the quotes
1066 1054 return pf
1067 1055
1068 1056 def sshargs(sshcmd, host, user, port):
1069 1057 '''Build argument list for ssh or Plink'''
1070 1058 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
1071 1059 args = user and ("%s@%s" % (user, host)) or host
1072 1060 return port and ("%s %s %s" % (args, pflag, port)) or args
1073 1061
1074 1062 def testpid(pid):
1075 1063 '''return False if pid dead, True if running or not known'''
1076 1064 return True
1077 1065
1078 1066 def set_flags(f, flags):
1079 1067 pass
1080 1068
1081 1069 def set_binary(fd):
1082 1070 # When run without console, pipes may expose invalid
1083 1071 # fileno(), usually set to -1.
1084 1072 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
1085 1073 msvcrt.setmode(fd.fileno(), os.O_BINARY)
1086 1074
1087 1075 def pconvert(path):
1088 1076 return '/'.join(splitpath(path))
1089 1077
1090 1078 def localpath(path):
1091 1079 return path.replace('/', '\\')
1092 1080
1093 1081 def normpath(path):
1094 1082 return pconvert(os.path.normpath(path))
1095 1083
1096 1084 makelock = _makelock_file
1097 1085 readlock = _readlock_file
1098 1086
1099 1087 def samestat(s1, s2):
1100 1088 return False
1101 1089
1102 1090 # A sequence of backslashes is special iff it precedes a double quote:
1103 1091 # - if there's an even number of backslashes, the double quote is not
1104 1092 # quoted (i.e. it ends the quoted region)
1105 1093 # - if there's an odd number of backslashes, the double quote is quoted
1106 1094 # - in both cases, every pair of backslashes is unquoted into a single
1107 1095 # backslash
1108 1096 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1109 1097 # So, to quote a string, we must surround it in double quotes, double
1110 1098 # the number of backslashes that preceed double quotes and add another
1111 1099 # backslash before every double quote (being careful with the double
1112 1100 # quote we've appended to the end)
1113 1101 _quotere = None
1114 1102 def shellquote(s):
1115 1103 global _quotere
1116 1104 if _quotere is None:
1117 1105 _quotere = re.compile(r'(\\*)("|\\$)')
1118 1106 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1119 1107
1120 1108 def quotecommand(cmd):
1121 1109 """Build a command string suitable for os.popen* calls."""
1122 1110 # The extra quotes are needed because popen* runs the command
1123 1111 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1124 1112 return '"' + cmd + '"'
1125 1113
1126 1114 def popen(command, mode='r'):
1127 1115 # Work around "popen spawned process may not write to stdout
1128 1116 # under windows"
1129 1117 # http://bugs.python.org/issue1366
1130 1118 command += " 2> %s" % nulldev
1131 1119 return os.popen(quotecommand(command), mode)
1132 1120
1133 1121 def explain_exit(code):
1134 1122 return _("exited with status %d") % code, code
1135 1123
1136 1124 # if you change this stub into a real check, please try to implement the
1137 1125 # username and groupname functions above, too.
1138 1126 def isowner(fp, st=None):
1139 1127 return True
1140 1128
1141 1129 def find_in_path(name, path, default=None):
1142 1130 '''find name in search path. path can be string (will be split
1143 1131 with os.pathsep), or iterable thing that returns strings. if name
1144 1132 found, return path to name. else return default. name is looked up
1145 1133 using cmd.exe rules, using PATHEXT.'''
1146 1134 if isinstance(path, str):
1147 1135 path = path.split(os.pathsep)
1148 1136
1149 1137 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1150 1138 pathext = pathext.lower().split(os.pathsep)
1151 1139 isexec = os.path.splitext(name)[1].lower() in pathext
1152 1140
1153 1141 for p in path:
1154 1142 p_name = os.path.join(p, name)
1155 1143
1156 1144 if isexec and os.path.exists(p_name):
1157 1145 return p_name
1158 1146
1159 1147 for ext in pathext:
1160 1148 p_name_ext = p_name + ext
1161 1149 if os.path.exists(p_name_ext):
1162 1150 return p_name_ext
1163 1151 return default
1164 1152
1165 1153 def set_signal_handler():
1166 1154 try:
1167 1155 set_signal_handler_win32()
1168 1156 except NameError:
1169 1157 pass
1170 1158
1171 1159 try:
1172 1160 # override functions with win32 versions if possible
1173 1161 from util_win32 import *
1174 1162 if not _is_win_9x():
1175 1163 posixfile = posixfile_nt
1176 1164 except ImportError:
1177 1165 pass
1178 1166
1179 1167 else:
1180 1168 nulldev = '/dev/null'
1181 1169
1182 1170 def rcfiles(path):
1183 1171 rcs = [os.path.join(path, 'hgrc')]
1184 1172 rcdir = os.path.join(path, 'hgrc.d')
1185 1173 try:
1186 1174 rcs.extend([os.path.join(rcdir, f)
1187 1175 for f, kind in osutil.listdir(rcdir)
1188 1176 if f.endswith(".rc")])
1189 1177 except OSError:
1190 1178 pass
1191 1179 return rcs
1192 1180
1193 1181 def system_rcpath():
1194 1182 path = []
1195 1183 # old mod_python does not set sys.argv
1196 1184 if len(getattr(sys, 'argv', [])) > 0:
1197 1185 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1198 1186 '/../etc/mercurial'))
1199 1187 path.extend(rcfiles('/etc/mercurial'))
1200 1188 return path
1201 1189
1202 1190 def user_rcpath():
1203 1191 return [os.path.expanduser('~/.hgrc')]
1204 1192
1205 1193 def parse_patch_output(output_line):
1206 1194 """parses the output produced by patch and returns the file name"""
1207 1195 pf = output_line[14:]
1208 1196 if os.sys.platform == 'OpenVMS':
1209 1197 if pf[0] == '`':
1210 1198 pf = pf[1:-1] # Remove the quotes
1211 1199 else:
1212 1200 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1213 1201 pf = pf[1:-1] # Remove the quotes
1214 1202 return pf
1215 1203
1216 1204 def sshargs(sshcmd, host, user, port):
1217 1205 '''Build argument list for ssh'''
1218 1206 args = user and ("%s@%s" % (user, host)) or host
1219 1207 return port and ("%s -p %s" % (args, port)) or args
1220 1208
1221 1209 def is_exec(f):
1222 1210 """check whether a file is executable"""
1223 1211 return (os.lstat(f).st_mode & 0100 != 0)
1224 1212
1225 1213 def set_flags(f, flags):
1226 1214 s = os.lstat(f).st_mode
1227 1215 x = "x" in flags
1228 1216 l = "l" in flags
1229 1217 if l:
1230 1218 if not stat.S_ISLNK(s):
1231 1219 # switch file to link
1232 1220 data = file(f).read()
1233 1221 os.unlink(f)
1234 1222 os.symlink(data, f)
1235 1223 # no chmod needed at this point
1236 1224 return
1237 1225 if stat.S_ISLNK(s):
1238 1226 # switch link to file
1239 1227 data = os.readlink(f)
1240 1228 os.unlink(f)
1241 1229 file(f, "w").write(data)
1242 1230 s = 0666 & ~_umask # avoid restatting for chmod
1243 1231
1244 1232 sx = s & 0100
1245 1233 if x and not sx:
1246 1234 # Turn on +x for every +r bit when making a file executable
1247 1235 # and obey umask.
1248 1236 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1249 1237 elif not x and sx:
1250 1238 # Turn off all +x bits
1251 1239 os.chmod(f, s & 0666)
1252 1240
1253 1241 def set_binary(fd):
1254 1242 pass
1255 1243
1256 1244 def pconvert(path):
1257 1245 return path
1258 1246
1259 1247 def localpath(path):
1260 1248 return path
1261 1249
1262 1250 normpath = os.path.normpath
1263 1251 samestat = os.path.samestat
1264 1252
1265 1253 def makelock(info, pathname):
1266 1254 try:
1267 1255 os.symlink(info, pathname)
1268 1256 except OSError, why:
1269 1257 if why.errno == errno.EEXIST:
1270 1258 raise
1271 1259 else:
1272 1260 _makelock_file(info, pathname)
1273 1261
1274 1262 def readlock(pathname):
1275 1263 try:
1276 1264 return os.readlink(pathname)
1277 1265 except OSError, why:
1278 1266 if why.errno in (errno.EINVAL, errno.ENOSYS):
1279 1267 return _readlock_file(pathname)
1280 1268 else:
1281 1269 raise
1282 1270
1283 1271 def shellquote(s):
1284 1272 if os.sys.platform == 'OpenVMS':
1285 1273 return '"%s"' % s
1286 1274 else:
1287 1275 return "'%s'" % s.replace("'", "'\\''")
1288 1276
1289 1277 def quotecommand(cmd):
1290 1278 return cmd
1291 1279
1292 1280 def popen(command, mode='r'):
1293 1281 return os.popen(command, mode)
1294 1282
1295 1283 def testpid(pid):
1296 1284 '''return False if pid dead, True if running or not sure'''
1297 1285 if os.sys.platform == 'OpenVMS':
1298 1286 return True
1299 1287 try:
1300 1288 os.kill(pid, 0)
1301 1289 return True
1302 1290 except OSError, inst:
1303 1291 return inst.errno != errno.ESRCH
1304 1292
1305 1293 def explain_exit(code):
1306 1294 """return a 2-tuple (desc, code) describing a process's status"""
1307 1295 if os.WIFEXITED(code):
1308 1296 val = os.WEXITSTATUS(code)
1309 1297 return _("exited with status %d") % val, val
1310 1298 elif os.WIFSIGNALED(code):
1311 1299 val = os.WTERMSIG(code)
1312 1300 return _("killed by signal %d") % val, val
1313 1301 elif os.WIFSTOPPED(code):
1314 1302 val = os.WSTOPSIG(code)
1315 1303 return _("stopped by signal %d") % val, val
1316 1304 raise ValueError(_("invalid exit code"))
1317 1305
1318 1306 def isowner(fp, st=None):
1319 1307 """Return True if the file object f belongs to the current user.
1320 1308
1321 1309 The return value of a util.fstat(f) may be passed as the st argument.
1322 1310 """
1323 1311 if st is None:
1324 1312 st = fstat(fp)
1325 1313 return st.st_uid == os.getuid()
1326 1314
1327 1315 def find_in_path(name, path, default=None):
1328 1316 '''find name in search path. path can be string (will be split
1329 1317 with os.pathsep), or iterable thing that returns strings. if name
1330 1318 found, return path to name. else return default.'''
1331 1319 if isinstance(path, str):
1332 1320 path = path.split(os.pathsep)
1333 1321 for p in path:
1334 1322 p_name = os.path.join(p, name)
1335 1323 if os.path.exists(p_name):
1336 1324 return p_name
1337 1325 return default
1338 1326
1339 1327 def set_signal_handler():
1340 1328 pass
1341 1329
1342 1330 def find_exe(name, default=None):
1343 1331 '''find path of an executable.
1344 1332 if name contains a path component, return it as is. otherwise,
1345 1333 use normal executable search path.'''
1346 1334
1347 1335 if os.sep in name or sys.platform == 'OpenVMS':
1348 1336 # don't check the executable bit. if the file isn't
1349 1337 # executable, whoever tries to actually run it will give a
1350 1338 # much more useful error message.
1351 1339 return name
1352 1340 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1353 1341
1354 1342 def _buildencodefun():
1355 1343 e = '_'
1356 1344 win_reserved = [ord(x) for x in '\\:*?"<>|']
1357 1345 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1358 1346 for x in (range(32) + range(126, 256) + win_reserved):
1359 1347 cmap[chr(x)] = "~%02x" % x
1360 1348 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1361 1349 cmap[chr(x)] = e + chr(x).lower()
1362 1350 dmap = {}
1363 1351 for k, v in cmap.iteritems():
1364 1352 dmap[v] = k
1365 1353 def decode(s):
1366 1354 i = 0
1367 1355 while i < len(s):
1368 1356 for l in xrange(1, 4):
1369 1357 try:
1370 1358 yield dmap[s[i:i+l]]
1371 1359 i += l
1372 1360 break
1373 1361 except KeyError:
1374 1362 pass
1375 1363 else:
1376 1364 raise KeyError
1377 1365 return (lambda s: "".join([cmap[c] for c in s]),
1378 1366 lambda s: "".join(list(decode(s))))
1379 1367
1380 1368 encodefilename, decodefilename = _buildencodefun()
1381 1369
1382 1370 def encodedopener(openerfn, fn):
1383 1371 def o(path, *args, **kw):
1384 1372 return openerfn(fn(path), *args, **kw)
1385 1373 return o
1386 1374
1387 1375 def mktempcopy(name, emptyok=False, createmode=None):
1388 1376 """Create a temporary file with the same contents from name
1389 1377
1390 1378 The permission bits are copied from the original file.
1391 1379
1392 1380 If the temporary file is going to be truncated immediately, you
1393 1381 can use emptyok=True as an optimization.
1394 1382
1395 1383 Returns the name of the temporary file.
1396 1384 """
1397 1385 d, fn = os.path.split(name)
1398 1386 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1399 1387 os.close(fd)
1400 1388 # Temporary files are created with mode 0600, which is usually not
1401 1389 # what we want. If the original file already exists, just copy
1402 1390 # its mode. Otherwise, manually obey umask.
1403 1391 try:
1404 1392 st_mode = os.lstat(name).st_mode & 0777
1405 1393 except OSError, inst:
1406 1394 if inst.errno != errno.ENOENT:
1407 1395 raise
1408 1396 st_mode = createmode
1409 1397 if st_mode is None:
1410 1398 st_mode = ~_umask
1411 1399 st_mode &= 0666
1412 1400 os.chmod(temp, st_mode)
1413 1401 if emptyok:
1414 1402 return temp
1415 1403 try:
1416 1404 try:
1417 1405 ifp = posixfile(name, "rb")
1418 1406 except IOError, inst:
1419 1407 if inst.errno == errno.ENOENT:
1420 1408 return temp
1421 1409 if not getattr(inst, 'filename', None):
1422 1410 inst.filename = name
1423 1411 raise
1424 1412 ofp = posixfile(temp, "wb")
1425 1413 for chunk in filechunkiter(ifp):
1426 1414 ofp.write(chunk)
1427 1415 ifp.close()
1428 1416 ofp.close()
1429 1417 except:
1430 1418 try: os.unlink(temp)
1431 1419 except: pass
1432 1420 raise
1433 1421 return temp
1434 1422
1435 1423 class atomictempfile(posixfile):
1436 1424 """file-like object that atomically updates a file
1437 1425
1438 1426 All writes will be redirected to a temporary copy of the original
1439 1427 file. When rename is called, the copy is renamed to the original
1440 1428 name, making the changes visible.
1441 1429 """
1442 1430 def __init__(self, name, mode, createmode):
1443 1431 self.__name = name
1444 1432 self.temp = mktempcopy(name, emptyok=('w' in mode),
1445 1433 createmode=createmode)
1446 1434 posixfile.__init__(self, self.temp, mode)
1447 1435
1448 1436 def rename(self):
1449 1437 if not self.closed:
1450 1438 posixfile.close(self)
1451 1439 rename(self.temp, localpath(self.__name))
1452 1440
1453 1441 def __del__(self):
1454 1442 if not self.closed:
1455 1443 try:
1456 1444 os.unlink(self.temp)
1457 1445 except: pass
1458 1446 posixfile.close(self)
1459 1447
1460 1448 def makedirs(name, mode=None):
1461 1449 """recursive directory creation with parent mode inheritance"""
1462 1450 try:
1463 1451 os.mkdir(name)
1464 1452 if mode is not None:
1465 1453 os.chmod(name, mode)
1466 1454 return
1467 1455 except OSError, err:
1468 1456 if err.errno == errno.EEXIST:
1469 1457 return
1470 1458 if err.errno != errno.ENOENT:
1471 1459 raise
1472 1460 parent = os.path.abspath(os.path.dirname(name))
1473 1461 makedirs(parent, mode)
1474 1462 makedirs(name, mode)
1475 1463
1476 1464 class opener(object):
1477 1465 """Open files relative to a base directory
1478 1466
1479 1467 This class is used to hide the details of COW semantics and
1480 1468 remote file access from higher level code.
1481 1469 """
1482 1470 def __init__(self, base, audit=True):
1483 1471 self.base = base
1484 1472 if audit:
1485 1473 self.audit_path = path_auditor(base)
1486 1474 else:
1487 1475 self.audit_path = always
1488 1476 self.createmode = None
1489 1477
1490 1478 def __getattr__(self, name):
1491 1479 if name == '_can_symlink':
1492 1480 self._can_symlink = checklink(self.base)
1493 1481 return self._can_symlink
1494 1482 raise AttributeError(name)
1495 1483
1496 1484 def _fixfilemode(self, name):
1497 1485 if self.createmode is None:
1498 1486 return
1499 1487 os.chmod(name, self.createmode & 0666)
1500 1488
1501 1489 def __call__(self, path, mode="r", text=False, atomictemp=False):
1502 1490 self.audit_path(path)
1503 1491 f = os.path.join(self.base, path)
1504 1492
1505 1493 if not text and "b" not in mode:
1506 1494 mode += "b" # for that other OS
1507 1495
1508 1496 nlink = -1
1509 1497 if mode[0] != "r":
1510 1498 try:
1511 1499 nlink = nlinks(f)
1512 1500 except OSError:
1513 1501 nlink = 0
1514 1502 d = os.path.dirname(f)
1515 1503 if not os.path.isdir(d):
1516 1504 makedirs(d, self.createmode)
1517 1505 if atomictemp:
1518 1506 return atomictempfile(f, mode, self.createmode)
1519 1507 if nlink > 1:
1520 1508 rename(mktempcopy(f), f)
1521 1509 fp = posixfile(f, mode)
1522 1510 if nlink == 0:
1523 1511 self._fixfilemode(f)
1524 1512 return fp
1525 1513
1526 1514 def symlink(self, src, dst):
1527 1515 self.audit_path(dst)
1528 1516 linkname = os.path.join(self.base, dst)
1529 1517 try:
1530 1518 os.unlink(linkname)
1531 1519 except OSError:
1532 1520 pass
1533 1521
1534 1522 dirname = os.path.dirname(linkname)
1535 1523 if not os.path.exists(dirname):
1536 1524 makedirs(dirname, self.createmode)
1537 1525
1538 1526 if self._can_symlink:
1539 1527 try:
1540 1528 os.symlink(src, linkname)
1541 1529 except OSError, err:
1542 1530 raise OSError(err.errno, _('could not symlink to %r: %s') %
1543 1531 (src, err.strerror), linkname)
1544 1532 else:
1545 1533 f = self(dst, "w")
1546 1534 f.write(src)
1547 1535 f.close()
1548 1536 self._fixfilemode(dst)
1549 1537
1550 1538 class chunkbuffer(object):
1551 1539 """Allow arbitrary sized chunks of data to be efficiently read from an
1552 1540 iterator over chunks of arbitrary size."""
1553 1541
1554 1542 def __init__(self, in_iter):
1555 1543 """in_iter is the iterator that's iterating over the input chunks.
1556 1544 targetsize is how big a buffer to try to maintain."""
1557 1545 self.iter = iter(in_iter)
1558 1546 self.buf = ''
1559 1547 self.targetsize = 2**16
1560 1548
1561 1549 def read(self, l):
1562 1550 """Read L bytes of data from the iterator of chunks of data.
1563 1551 Returns less than L bytes if the iterator runs dry."""
1564 1552 if l > len(self.buf) and self.iter:
1565 1553 # Clamp to a multiple of self.targetsize
1566 1554 targetsize = max(l, self.targetsize)
1567 1555 collector = cStringIO.StringIO()
1568 1556 collector.write(self.buf)
1569 1557 collected = len(self.buf)
1570 1558 for chunk in self.iter:
1571 1559 collector.write(chunk)
1572 1560 collected += len(chunk)
1573 1561 if collected >= targetsize:
1574 1562 break
1575 1563 if collected < targetsize:
1576 1564 self.iter = False
1577 1565 self.buf = collector.getvalue()
1578 1566 if len(self.buf) == l:
1579 1567 s, self.buf = str(self.buf), ''
1580 1568 else:
1581 1569 s, self.buf = self.buf[:l], buffer(self.buf, l)
1582 1570 return s
1583 1571
1584 1572 def filechunkiter(f, size=65536, limit=None):
1585 1573 """Create a generator that produces the data in the file size
1586 1574 (default 65536) bytes at a time, up to optional limit (default is
1587 1575 to read all data). Chunks may be less than size bytes if the
1588 1576 chunk is the last chunk in the file, or the file is a socket or
1589 1577 some other type of file that sometimes reads less data than is
1590 1578 requested."""
1591 1579 assert size >= 0
1592 1580 assert limit is None or limit >= 0
1593 1581 while True:
1594 1582 if limit is None: nbytes = size
1595 1583 else: nbytes = min(limit, size)
1596 1584 s = nbytes and f.read(nbytes)
1597 1585 if not s: break
1598 1586 if limit: limit -= len(s)
1599 1587 yield s
1600 1588
1601 1589 def makedate():
1602 1590 lt = time.localtime()
1603 1591 if lt[8] == 1 and time.daylight:
1604 1592 tz = time.altzone
1605 1593 else:
1606 1594 tz = time.timezone
1607 1595 return time.mktime(lt), tz
1608 1596
1609 1597 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1610 1598 """represent a (unixtime, offset) tuple as a localized time.
1611 1599 unixtime is seconds since the epoch, and offset is the time zone's
1612 1600 number of seconds away from UTC. if timezone is false, do not
1613 1601 append time zone to string."""
1614 1602 t, tz = date or makedate()
1615 1603 if "%1" in format or "%2" in format:
1616 1604 sign = (tz > 0) and "-" or "+"
1617 1605 minutes = abs(tz) / 60
1618 1606 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1619 1607 format = format.replace("%2", "%02d" % (minutes % 60))
1620 1608 s = time.strftime(format, time.gmtime(float(t) - tz))
1621 1609 return s
1622 1610
1623 1611 def shortdate(date=None):
1624 1612 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1625 1613 return datestr(date, format='%Y-%m-%d')
1626 1614
1627 1615 def strdate(string, format, defaults=[]):
1628 1616 """parse a localized time string and return a (unixtime, offset) tuple.
1629 1617 if the string cannot be parsed, ValueError is raised."""
1630 1618 def timezone(string):
1631 1619 tz = string.split()[-1]
1632 1620 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1633 1621 sign = (tz[0] == "+") and 1 or -1
1634 1622 hours = int(tz[1:3])
1635 1623 minutes = int(tz[3:5])
1636 1624 return -sign * (hours * 60 + minutes) * 60
1637 1625 if tz == "GMT" or tz == "UTC":
1638 1626 return 0
1639 1627 return None
1640 1628
1641 1629 # NOTE: unixtime = localunixtime + offset
1642 1630 offset, date = timezone(string), string
1643 1631 if offset != None:
1644 1632 date = " ".join(string.split()[:-1])
1645 1633
1646 1634 # add missing elements from defaults
1647 1635 for part in defaults:
1648 1636 found = [True for p in part if ("%"+p) in format]
1649 1637 if not found:
1650 1638 date += "@" + defaults[part]
1651 1639 format += "@%" + part[0]
1652 1640
1653 1641 timetuple = time.strptime(date, format)
1654 1642 localunixtime = int(calendar.timegm(timetuple))
1655 1643 if offset is None:
1656 1644 # local timezone
1657 1645 unixtime = int(time.mktime(timetuple))
1658 1646 offset = unixtime - localunixtime
1659 1647 else:
1660 1648 unixtime = localunixtime + offset
1661 1649 return unixtime, offset
1662 1650
1663 1651 def parsedate(date, formats=None, defaults=None):
1664 1652 """parse a localized date/time string and return a (unixtime, offset) tuple.
1665 1653
1666 1654 The date may be a "unixtime offset" string or in one of the specified
1667 1655 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1668 1656 """
1669 1657 if not date:
1670 1658 return 0, 0
1671 1659 if isinstance(date, tuple) and len(date) == 2:
1672 1660 return date
1673 1661 if not formats:
1674 1662 formats = defaultdateformats
1675 1663 date = date.strip()
1676 1664 try:
1677 1665 when, offset = map(int, date.split(' '))
1678 1666 except ValueError:
1679 1667 # fill out defaults
1680 1668 if not defaults:
1681 1669 defaults = {}
1682 1670 now = makedate()
1683 1671 for part in "d mb yY HI M S".split():
1684 1672 if part not in defaults:
1685 1673 if part[0] in "HMS":
1686 1674 defaults[part] = "00"
1687 1675 else:
1688 1676 defaults[part] = datestr(now, "%" + part[0])
1689 1677
1690 1678 for format in formats:
1691 1679 try:
1692 1680 when, offset = strdate(date, format, defaults)
1693 1681 except (ValueError, OverflowError):
1694 1682 pass
1695 1683 else:
1696 1684 break
1697 1685 else:
1698 1686 raise Abort(_('invalid date: %r ') % date)
1699 1687 # validate explicit (probably user-specified) date and
1700 1688 # time zone offset. values must fit in signed 32 bits for
1701 1689 # current 32-bit linux runtimes. timezones go from UTC-12
1702 1690 # to UTC+14
1703 1691 if abs(when) > 0x7fffffff:
1704 1692 raise Abort(_('date exceeds 32 bits: %d') % when)
1705 1693 if offset < -50400 or offset > 43200:
1706 1694 raise Abort(_('impossible time zone offset: %d') % offset)
1707 1695 return when, offset
1708 1696
1709 1697 def matchdate(date):
1710 1698 """Return a function that matches a given date match specifier
1711 1699
1712 1700 Formats include:
1713 1701
1714 1702 '{date}' match a given date to the accuracy provided
1715 1703
1716 1704 '<{date}' on or before a given date
1717 1705
1718 1706 '>{date}' on or after a given date
1719 1707
1720 1708 """
1721 1709
1722 1710 def lower(date):
1723 1711 d = dict(mb="1", d="1")
1724 1712 return parsedate(date, extendeddateformats, d)[0]
1725 1713
1726 1714 def upper(date):
1727 1715 d = dict(mb="12", HI="23", M="59", S="59")
1728 1716 for days in "31 30 29".split():
1729 1717 try:
1730 1718 d["d"] = days
1731 1719 return parsedate(date, extendeddateformats, d)[0]
1732 1720 except:
1733 1721 pass
1734 1722 d["d"] = "28"
1735 1723 return parsedate(date, extendeddateformats, d)[0]
1736 1724
1737 1725 if date[0] == "<":
1738 1726 when = upper(date[1:])
1739 1727 return lambda x: x <= when
1740 1728 elif date[0] == ">":
1741 1729 when = lower(date[1:])
1742 1730 return lambda x: x >= when
1743 1731 elif date[0] == "-":
1744 1732 try:
1745 1733 days = int(date[1:])
1746 1734 except ValueError:
1747 1735 raise Abort(_("invalid day spec: %s") % date[1:])
1748 1736 when = makedate()[0] - days * 3600 * 24
1749 1737 return lambda x: x >= when
1750 1738 elif " to " in date:
1751 1739 a, b = date.split(" to ")
1752 1740 start, stop = lower(a), upper(b)
1753 1741 return lambda x: x >= start and x <= stop
1754 1742 else:
1755 1743 start, stop = lower(date), upper(date)
1756 1744 return lambda x: x >= start and x <= stop
1757 1745
1758 1746 def shortuser(user):
1759 1747 """Return a short representation of a user name or email address."""
1760 1748 f = user.find('@')
1761 1749 if f >= 0:
1762 1750 user = user[:f]
1763 1751 f = user.find('<')
1764 1752 if f >= 0:
1765 1753 user = user[f+1:]
1766 1754 f = user.find(' ')
1767 1755 if f >= 0:
1768 1756 user = user[:f]
1769 1757 f = user.find('.')
1770 1758 if f >= 0:
1771 1759 user = user[:f]
1772 1760 return user
1773 1761
1774 1762 def email(author):
1775 1763 '''get email of author.'''
1776 1764 r = author.find('>')
1777 1765 if r == -1: r = None
1778 1766 return author[author.find('<')+1:r]
1779 1767
1780 1768 def ellipsis(text, maxlength=400):
1781 1769 """Trim string to at most maxlength (default: 400) characters."""
1782 1770 if len(text) <= maxlength:
1783 1771 return text
1784 1772 else:
1785 1773 return "%s..." % (text[:maxlength-3])
1786 1774
1787 1775 def walkrepos(path, followsym=False, seen_dirs=None):
1788 1776 '''yield every hg repository under path, recursively.'''
1789 1777 def errhandler(err):
1790 1778 if err.filename == path:
1791 1779 raise err
1792 1780 if followsym and hasattr(os.path, 'samestat'):
1793 1781 def _add_dir_if_not_there(dirlst, dirname):
1794 1782 match = False
1795 1783 samestat = os.path.samestat
1796 1784 dirstat = os.stat(dirname)
1797 1785 for lstdirstat in dirlst:
1798 1786 if samestat(dirstat, lstdirstat):
1799 1787 match = True
1800 1788 break
1801 1789 if not match:
1802 1790 dirlst.append(dirstat)
1803 1791 return not match
1804 1792 else:
1805 1793 followsym = False
1806 1794
1807 1795 if (seen_dirs is None) and followsym:
1808 1796 seen_dirs = []
1809 1797 _add_dir_if_not_there(seen_dirs, path)
1810 1798 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1811 1799 if '.hg' in dirs:
1812 1800 dirs[:] = [] # don't descend further
1813 1801 yield root # found a repository
1814 1802 qroot = os.path.join(root, '.hg', 'patches')
1815 1803 if os.path.isdir(os.path.join(qroot, '.hg')):
1816 1804 yield qroot # we have a patch queue repo here
1817 1805 elif followsym:
1818 1806 newdirs = []
1819 1807 for d in dirs:
1820 1808 fname = os.path.join(root, d)
1821 1809 if _add_dir_if_not_there(seen_dirs, fname):
1822 1810 if os.path.islink(fname):
1823 1811 for hgname in walkrepos(fname, True, seen_dirs):
1824 1812 yield hgname
1825 1813 else:
1826 1814 newdirs.append(d)
1827 1815 dirs[:] = newdirs
1828 1816
1829 1817 _rcpath = None
1830 1818
1831 1819 def os_rcpath():
1832 1820 '''return default os-specific hgrc search path'''
1833 1821 path = system_rcpath()
1834 1822 path.extend(user_rcpath())
1835 1823 path = [os.path.normpath(f) for f in path]
1836 1824 return path
1837 1825
1838 1826 def rcpath():
1839 1827 '''return hgrc search path. if env var HGRCPATH is set, use it.
1840 1828 for each item in path, if directory, use files ending in .rc,
1841 1829 else use item.
1842 1830 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1843 1831 if no HGRCPATH, use default os-specific path.'''
1844 1832 global _rcpath
1845 1833 if _rcpath is None:
1846 1834 if 'HGRCPATH' in os.environ:
1847 1835 _rcpath = []
1848 1836 for p in os.environ['HGRCPATH'].split(os.pathsep):
1849 1837 if not p: continue
1850 1838 if os.path.isdir(p):
1851 1839 for f, kind in osutil.listdir(p):
1852 1840 if f.endswith('.rc'):
1853 1841 _rcpath.append(os.path.join(p, f))
1854 1842 else:
1855 1843 _rcpath.append(p)
1856 1844 else:
1857 1845 _rcpath = os_rcpath()
1858 1846 return _rcpath
1859 1847
1860 1848 def bytecount(nbytes):
1861 1849 '''return byte count formatted as readable string, with units'''
1862 1850
1863 1851 units = (
1864 1852 (100, 1<<30, _('%.0f GB')),
1865 1853 (10, 1<<30, _('%.1f GB')),
1866 1854 (1, 1<<30, _('%.2f GB')),
1867 1855 (100, 1<<20, _('%.0f MB')),
1868 1856 (10, 1<<20, _('%.1f MB')),
1869 1857 (1, 1<<20, _('%.2f MB')),
1870 1858 (100, 1<<10, _('%.0f KB')),
1871 1859 (10, 1<<10, _('%.1f KB')),
1872 1860 (1, 1<<10, _('%.2f KB')),
1873 1861 (1, 1, _('%.0f bytes')),
1874 1862 )
1875 1863
1876 1864 for multiplier, divisor, format in units:
1877 1865 if nbytes >= divisor * multiplier:
1878 1866 return format % (nbytes / float(divisor))
1879 1867 return units[-1][2] % nbytes
1880 1868
1881 1869 def drop_scheme(scheme, path):
1882 1870 sc = scheme + ':'
1883 1871 if path.startswith(sc):
1884 1872 path = path[len(sc):]
1885 1873 if path.startswith('//'):
1886 1874 path = path[2:]
1887 1875 return path
1888 1876
1889 1877 def uirepr(s):
1890 1878 # Avoid double backslash in Windows path repr()
1891 1879 return repr(s).replace('\\\\', '\\')
1892 1880
1893 1881 def hidepassword(url):
1894 1882 '''hide user credential in a url string'''
1895 1883 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1896 1884 netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc)
1897 1885 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
1898 1886
1899 1887 def removeauth(url):
1900 1888 '''remove all authentication information from a url string'''
1901 1889 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1902 1890 netloc = netloc[netloc.find('@')+1:]
1903 1891 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
General Comments 0
You need to be logged in to leave comments. Login now