##// END OF EJS Templates
Merge with stable
Matt Mackall -
r9102:bbc78cb1 merge default
parent child Browse files
Show More
@@ -1,542 +1,543 b''
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007, 2008 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a DSCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an
15 15 # audience not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <http://mercurial.selenic.com/wiki/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Files to act upon/ignore are specified in the [keyword] section.
25 25 # Customized keyword template mappings in the [keywordmaps] section.
26 26 #
27 27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
28 28
29 29 '''expand keywords in tracked files
30 30
31 31 This extension expands RCS/CVS-like or self-customized $Keywords$ in tracked
32 32 text files selected by your configuration.
33 33
34 34 Keywords are only expanded in local repositories and not stored in the change
35 35 history. The mechanism can be regarded as a convenience for the current user
36 36 or for archive distribution.
37 37
38 38 Configuration is done in the [keyword] and [keywordmaps] sections of hgrc
39 39 files.
40 40
41 41 Example:
42 42
43 43 [keyword]
44 44 # expand keywords in every python file except those matching "x*"
45 45 **.py =
46 46 x* = ignore
47 47
48 48 Note: the more specific you are in your filename patterns the less you lose
49 49 speed in huge repositories.
50 50
51 51 For [keywordmaps] template mapping and expansion demonstration and control run
52 52 "hg kwdemo".
53 53
54 54 An additional date template filter {date|utcdate} is provided.
55 55
56 56 The default template mappings (view with "hg kwdemo -d") can be replaced with
57 57 customized keywords and templates. Again, run "hg kwdemo" to control the
58 58 results of your config changes.
59 59
60 60 Before changing/disabling active keywords, run "hg kwshrink" to avoid the risk
61 61 of inadvertently storing expanded keywords in the change history.
62 62
63 63 To force expansion after enabling it, or a configuration change, run "hg
64 64 kwexpand".
65 65
66 66 Also, when committing with the record extension or using mq's qrecord, be
67 67 aware that keywords cannot be updated. Again, run "hg kwexpand" on the files
68 68 in question to update keyword expansions after all changes have been checked
69 69 in.
70 70
71 71 Expansions spanning more than one line and incremental expansions, like CVS'
72 72 $Log$, are not supported. A keyword template map "Log = {desc}" expands to the
73 73 first line of the changeset description.
74 74 '''
75 75
76 76 from mercurial import commands, cmdutil, dispatch, filelog, revlog, extensions
77 77 from mercurial import patch, localrepo, templater, templatefilters, util, match
78 78 from mercurial.hgweb import webcommands
79 79 from mercurial.lock import release
80 80 from mercurial.node import nullid
81 81 from mercurial.i18n import _
82 82 import re, shutil, tempfile, time
83 83
84 84 commands.optionalrepo += ' kwdemo'
85 85
86 86 # hg commands that do not act on keywords
87 87 nokwcommands = ('add addremove annotate bundle copy export grep incoming init'
88 88 ' log outgoing push rename rollback tip verify'
89 89 ' convert email glog')
90 90
91 91 # hg commands that trigger expansion only when writing to working dir,
92 92 # not when reading filelog, and unexpand when reading from working dir
93 93 restricted = 'merge record resolve qfold qimport qnew qpush qrefresh qrecord'
94 94
95 95 def utcdate(date):
96 96 '''Returns hgdate in cvs-like UTC format.'''
97 97 return time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(date[0]))
98 98
99 99 # make keyword tools accessible
100 100 kwtools = {'templater': None, 'hgcmd': '', 'inc': [], 'exc': ['.hg*']}
101 101
102 102
103 103 class kwtemplater(object):
104 104 '''
105 105 Sets up keyword templates, corresponding keyword regex, and
106 106 provides keyword substitution functions.
107 107 '''
108 108 templates = {
109 109 'Revision': '{node|short}',
110 110 'Author': '{author|user}',
111 111 'Date': '{date|utcdate}',
112 112 'RCSFile': '{file|basename},v',
113 113 'Source': '{root}/{file},v',
114 114 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
115 115 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
116 116 }
117 117
118 118 def __init__(self, ui, repo):
119 119 self.ui = ui
120 120 self.repo = repo
121 121 self.match = match.match(repo.root, '', [],
122 122 kwtools['inc'], kwtools['exc'])
123 123 self.restrict = kwtools['hgcmd'] in restricted.split()
124 124
125 125 kwmaps = self.ui.configitems('keywordmaps')
126 126 if kwmaps: # override default templates
127 127 self.templates = dict((k, templater.parsestring(v, False))
128 128 for k, v in kwmaps)
129 129 escaped = map(re.escape, self.templates.keys())
130 130 kwpat = r'\$(%s)(: [^$\n\r]*? )??\$' % '|'.join(escaped)
131 131 self.re_kw = re.compile(kwpat)
132 132
133 133 templatefilters.filters['utcdate'] = utcdate
134 134 self.ct = cmdutil.changeset_templater(self.ui, self.repo,
135 135 False, None, '', False)
136 136
137 137 def substitute(self, data, path, ctx, subfunc):
138 138 '''Replaces keywords in data with expanded template.'''
139 139 def kwsub(mobj):
140 140 kw = mobj.group(1)
141 141 self.ct.use_template(self.templates[kw])
142 142 self.ui.pushbuffer()
143 143 self.ct.show(ctx, root=self.repo.root, file=path)
144 144 ekw = templatefilters.firstline(self.ui.popbuffer())
145 145 return '$%s: %s $' % (kw, ekw)
146 146 return subfunc(kwsub, data)
147 147
148 148 def expand(self, path, node, data):
149 149 '''Returns data with keywords expanded.'''
150 150 if not self.restrict and self.match(path) and not util.binary(data):
151 151 ctx = self.repo.filectx(path, fileid=node).changectx()
152 152 return self.substitute(data, path, ctx, self.re_kw.sub)
153 153 return data
154 154
155 155 def iskwfile(self, path, flagfunc):
156 156 '''Returns true if path matches [keyword] pattern
157 157 and is not a symbolic link.
158 158 Caveat: localrepository._link fails on Windows.'''
159 159 return self.match(path) and not 'l' in flagfunc(path)
160 160
161 161 def overwrite(self, node, expand, files):
162 162 '''Overwrites selected files expanding/shrinking keywords.'''
163 163 ctx = self.repo[node]
164 164 mf = ctx.manifest()
165 165 if node is not None: # commit
166 166 files = [f for f in ctx.files() if f in mf]
167 167 notify = self.ui.debug
168 168 else: # kwexpand/kwshrink
169 169 notify = self.ui.note
170 170 candidates = [f for f in files if self.iskwfile(f, ctx.flags)]
171 171 if candidates:
172 172 self.restrict = True # do not expand when reading
173 173 msg = (expand and _('overwriting %s expanding keywords\n')
174 174 or _('overwriting %s shrinking keywords\n'))
175 175 for f in candidates:
176 176 fp = self.repo.file(f)
177 177 data = fp.read(mf[f])
178 178 if util.binary(data):
179 179 continue
180 180 if expand:
181 181 if node is None:
182 182 ctx = self.repo.filectx(f, fileid=mf[f]).changectx()
183 183 data, found = self.substitute(data, f, ctx,
184 184 self.re_kw.subn)
185 185 else:
186 186 found = self.re_kw.search(data)
187 187 if found:
188 188 notify(msg % f)
189 189 self.repo.wwrite(f, data, mf.flags(f))
190 190 if node is None:
191 191 self.repo.dirstate.normal(f)
192 192 self.restrict = False
193 193
194 194 def shrinktext(self, text):
195 195 '''Unconditionally removes all keyword substitutions from text.'''
196 196 return self.re_kw.sub(r'$\1$', text)
197 197
198 198 def shrink(self, fname, text):
199 199 '''Returns text with all keyword substitutions removed.'''
200 200 if self.match(fname) and not util.binary(text):
201 201 return self.shrinktext(text)
202 202 return text
203 203
204 204 def shrinklines(self, fname, lines):
205 205 '''Returns lines with keyword substitutions removed.'''
206 206 if self.match(fname):
207 207 text = ''.join(lines)
208 208 if not util.binary(text):
209 209 return self.shrinktext(text).splitlines(True)
210 210 return lines
211 211
212 212 def wread(self, fname, data):
213 213 '''If in restricted mode returns data read from wdir with
214 214 keyword substitutions removed.'''
215 215 return self.restrict and self.shrink(fname, data) or data
216 216
217 217 class kwfilelog(filelog.filelog):
218 218 '''
219 219 Subclass of filelog to hook into its read, add, cmp methods.
220 220 Keywords are "stored" unexpanded, and processed on reading.
221 221 '''
222 222 def __init__(self, opener, kwt, path):
223 223 super(kwfilelog, self).__init__(opener, path)
224 224 self.kwt = kwt
225 225 self.path = path
226 226
227 227 def read(self, node):
228 228 '''Expands keywords when reading filelog.'''
229 229 data = super(kwfilelog, self).read(node)
230 230 return self.kwt.expand(self.path, node, data)
231 231
232 232 def add(self, text, meta, tr, link, p1=None, p2=None):
233 233 '''Removes keyword substitutions when adding to filelog.'''
234 234 text = self.kwt.shrink(self.path, text)
235 235 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
236 236
237 237 def cmp(self, node, text):
238 238 '''Removes keyword substitutions for comparison.'''
239 239 text = self.kwt.shrink(self.path, text)
240 240 if self.renamed(node):
241 241 t2 = super(kwfilelog, self).read(node)
242 242 return t2 != text
243 243 return revlog.revlog.cmp(self, node, text)
244 244
245 245 def _status(ui, repo, kwt, unknown, *pats, **opts):
246 246 '''Bails out if [keyword] configuration is not active.
247 247 Returns status of working directory.'''
248 248 if kwt:
249 249 match = cmdutil.match(repo, pats, opts)
250 250 return repo.status(match=match, unknown=unknown, clean=True)
251 251 if ui.configitems('keyword'):
252 252 raise util.Abort(_('[keyword] patterns cannot match'))
253 253 raise util.Abort(_('no [keyword] patterns configured'))
254 254
255 255 def _kwfwrite(ui, repo, expand, *pats, **opts):
256 256 '''Selects files and passes them to kwtemplater.overwrite.'''
257 257 if repo.dirstate.parents()[1] != nullid:
258 258 raise util.Abort(_('outstanding uncommitted merge'))
259 259 kwt = kwtools['templater']
260 260 status = _status(ui, repo, kwt, False, *pats, **opts)
261 261 modified, added, removed, deleted = status[:4]
262 262 if modified or added or removed or deleted:
263 263 raise util.Abort(_('outstanding uncommitted changes'))
264 264 wlock = lock = None
265 265 try:
266 266 wlock = repo.wlock()
267 267 lock = repo.lock()
268 268 kwt.overwrite(None, expand, status[6])
269 269 finally:
270 270 release(lock, wlock)
271 271
272 272 def demo(ui, repo, *args, **opts):
273 273 '''print [keywordmaps] configuration and an expansion example
274 274
275 275 Show current, custom, or default keyword template maps and their
276 276 expansions.
277 277
278 278 Extend current configuration by specifying maps as arguments and
279 279 optionally by reading from an additional hgrc file.
280 280
281 281 Override current keyword template maps with "default" option.
282 282 '''
283 283 def demoitems(section, items):
284 284 ui.write('[%s]\n' % section)
285 285 for k, v in items:
286 286 ui.write('%s = %s\n' % (k, v))
287 287
288 288 msg = 'hg keyword config and expansion example'
289 289 kwstatus = 'current'
290 290 fn = 'demo.txt'
291 291 branchname = 'demobranch'
292 292 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
293 293 ui.note(_('creating temporary repository at %s\n') % tmpdir)
294 294 repo = localrepo.localrepository(ui, tmpdir, True)
295 295 ui.setconfig('keyword', fn, '')
296 296 if args or opts.get('rcfile'):
297 297 kwstatus = 'custom'
298 298 if opts.get('rcfile'):
299 299 ui.readconfig(opts.get('rcfile'))
300 300 if opts.get('default'):
301 301 kwstatus = 'default'
302 302 kwmaps = kwtemplater.templates
303 303 if ui.configitems('keywordmaps'):
304 304 # override maps from optional rcfile
305 305 for k, v in kwmaps.iteritems():
306 306 ui.setconfig('keywordmaps', k, v)
307 307 elif args:
308 308 # simulate hgrc parsing
309 309 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
310 310 fp = repo.opener('hgrc', 'w')
311 311 fp.writelines(rcmaps)
312 312 fp.close()
313 313 ui.readconfig(repo.join('hgrc'))
314 314 if not opts.get('default'):
315 315 kwmaps = dict(ui.configitems('keywordmaps')) or kwtemplater.templates
316 316 uisetup(ui)
317 317 reposetup(ui, repo)
318 318 for k, v in ui.configitems('extensions'):
319 319 if k.endswith('keyword'):
320 320 extension = '%s = %s' % (k, v)
321 321 break
322 322 ui.status(_('\n\tconfig using %s keyword template maps\n') % kwstatus)
323 323 ui.write('[extensions]\n%s\n' % extension)
324 324 demoitems('keyword', ui.configitems('keyword'))
325 325 demoitems('keywordmaps', kwmaps.iteritems())
326 326 keywords = '$' + '$\n$'.join(kwmaps.keys()) + '$\n'
327 327 repo.wopener(fn, 'w').write(keywords)
328 328 repo.add([fn])
329 329 path = repo.wjoin(fn)
330 330 ui.note(_('\n%s keywords written to %s:\n') % (kwstatus, path))
331 331 ui.note(keywords)
332 332 ui.note('\nhg -R "%s" branch "%s"\n' % (tmpdir, branchname))
333 333 # silence branch command if not verbose
334 334 quiet = ui.quiet
335 335 ui.quiet = not ui.verbose
336 336 commands.branch(ui, repo, branchname)
337 337 ui.quiet = quiet
338 338 for name, cmd in ui.configitems('hooks'):
339 339 if name.split('.', 1)[0].find('commit') > -1:
340 340 repo.ui.setconfig('hooks', name, '')
341 341 ui.note(_('unhooked all commit hooks\n'))
342 342 ui.note('hg -R "%s" ci -m "%s"\n' % (tmpdir, msg))
343 343 repo.commit(text=msg)
344 344 fmt = ui.verbose and ' in %s' % path or ''
345 345 ui.status(_('\n\t%s keywords expanded%s\n') % (kwstatus, fmt))
346 346 ui.write(repo.wread(fn))
347 347 ui.debug(_('\nremoving temporary repository %s\n') % tmpdir)
348 348 shutil.rmtree(tmpdir, ignore_errors=True)
349 349
350 350 def expand(ui, repo, *pats, **opts):
351 351 '''expand keywords in the working directory
352 352
353 353 Run after (re)enabling keyword expansion.
354 354
355 355 kwexpand refuses to run if given files contain local changes.
356 356 '''
357 357 # 3rd argument sets expansion to True
358 358 _kwfwrite(ui, repo, True, *pats, **opts)
359 359
360 360 def files(ui, repo, *pats, **opts):
361 361 '''show files configured for keyword expansion
362 362
363 363 List which files in the working directory are matched by the [keyword]
364 364 configuration patterns.
365 365
366 366 Useful to prevent inadvertent keyword expansion and to speed up execution
367 367 by including only files that are actual candidates for expansion.
368 368
369 369 See "hg help keyword" on how to construct patterns both for inclusion and
370 370 exclusion of files.
371 371
372 372 Use -u/--untracked to list untracked files as well.
373 373
374 374 With -a/--all and -v/--verbose the codes used to show the status of files
375 375 are:
376 376 K = keyword expansion candidate
377 377 k = keyword expansion candidate (untracked)
378 378 I = ignored
379 379 i = ignored (untracked)
380 380 '''
381 381 kwt = kwtools['templater']
382 382 status = _status(ui, repo, kwt, opts.get('untracked'), *pats, **opts)
383 383 modified, added, removed, deleted, unknown, ignored, clean = status
384 384 files = sorted(modified + added + clean)
385 385 wctx = repo[None]
386 386 kwfiles = [f for f in files if kwt.iskwfile(f, wctx.flags)]
387 387 kwuntracked = [f for f in unknown if kwt.iskwfile(f, wctx.flags)]
388 388 cwd = pats and repo.getcwd() or ''
389 389 kwfstats = (not opts.get('ignore') and
390 390 (('K', kwfiles), ('k', kwuntracked),) or ())
391 391 if opts.get('all') or opts.get('ignore'):
392 392 kwfstats += (('I', [f for f in files if f not in kwfiles]),
393 393 ('i', [f for f in unknown if f not in kwuntracked]),)
394 394 for char, filenames in kwfstats:
395 395 fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
396 396 for f in filenames:
397 397 ui.write(fmt % repo.pathto(f, cwd))
398 398
399 399 def shrink(ui, repo, *pats, **opts):
400 400 '''revert expanded keywords in the working directory
401 401
402 402 Run before changing/disabling active keywords or if you experience
403 403 problems with "hg import" or "hg merge".
404 404
405 405 kwshrink refuses to run if given files contain local changes.
406 406 '''
407 407 # 3rd argument sets expansion to False
408 408 _kwfwrite(ui, repo, False, *pats, **opts)
409 409
410 410
411 411 def uisetup(ui):
412 412 '''Collects [keyword] config in kwtools.
413 413 Monkeypatches dispatch._parse if needed.'''
414 414
415 415 for pat, opt in ui.configitems('keyword'):
416 416 if opt != 'ignore':
417 417 kwtools['inc'].append(pat)
418 418 else:
419 419 kwtools['exc'].append(pat)
420 420
421 421 if kwtools['inc']:
422 422 def kwdispatch_parse(orig, ui, args):
423 423 '''Monkeypatch dispatch._parse to obtain running hg command.'''
424 424 cmd, func, args, options, cmdoptions = orig(ui, args)
425 425 kwtools['hgcmd'] = cmd
426 426 return cmd, func, args, options, cmdoptions
427 427
428 428 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
429 429
430 430 def reposetup(ui, repo):
431 431 '''Sets up repo as kwrepo for keyword substitution.
432 432 Overrides file method to return kwfilelog instead of filelog
433 433 if file matches user configuration.
434 434 Wraps commit to overwrite configured files with updated
435 435 keyword substitutions.
436 436 Monkeypatches patch and webcommands.'''
437 437
438 438 try:
439 439 if (not repo.local() or not kwtools['inc']
440 440 or kwtools['hgcmd'] in nokwcommands.split()
441 441 or '.hg' in util.splitpath(repo.root)
442 442 or repo._url.startswith('bundle:')):
443 443 return
444 444 except AttributeError:
445 445 pass
446 446
447 447 kwtools['templater'] = kwt = kwtemplater(ui, repo)
448 448
449 449 class kwrepo(repo.__class__):
450 450 def file(self, f):
451 451 if f[0] == '/':
452 452 f = f[1:]
453 453 return kwfilelog(self.sopener, kwt, f)
454 454
455 455 def wread(self, filename):
456 456 data = super(kwrepo, self).wread(filename)
457 457 return kwt.wread(filename, data)
458 458
459 def commit(self, text='', user=None, date=None, match=None,
460 force=False, editor=None, extra={}):
459 def commit(self, *args, **opts):
461 460 # use custom commitctx for user commands
462 461 # other extensions can still wrap repo.commitctx directly
463 repo.commitctx = self.kwcommitctx
464 return super(kwrepo, self).commit(text, user, date, match, force,
465 editor, extra)
462 self.commitctx = self.kwcommitctx
463 try:
464 return super(kwrepo, self).commit(*args, **opts)
465 finally:
466 del self.commitctx
466 467
467 468 def kwcommitctx(self, ctx, error=False):
468 469 wlock = lock = None
469 470 try:
470 471 wlock = self.wlock()
471 472 lock = self.lock()
472 473 # store and postpone commit hooks
473 474 commithooks = {}
474 475 for name, cmd in ui.configitems('hooks'):
475 476 if name.split('.', 1)[0] == 'commit':
476 477 commithooks[name] = cmd
477 478 ui.setconfig('hooks', name, None)
478 479 if commithooks:
479 480 # store parents for commit hooks
480 481 p1, p2 = ctx.p1(), ctx.p2()
481 482 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
482 483
483 484 n = super(kwrepo, self).commitctx(ctx, error)
484 485
485 486 kwt.overwrite(n, True, None)
486 487 if commithooks:
487 488 for name, cmd in commithooks.iteritems():
488 489 ui.setconfig('hooks', name, cmd)
489 repo.hook('commit', node=n, parent1=xp1, parent2=xp2)
490 self.hook('commit', node=n, parent1=xp1, parent2=xp2)
490 491 return n
491 492 finally:
492 493 release(lock, wlock)
493 494
494 495 # monkeypatches
495 496 def kwpatchfile_init(orig, self, ui, fname, opener,
496 497 missing=False, eol=None):
497 498 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
498 499 rejects or conflicts due to expanded keywords in working dir.'''
499 500 orig(self, ui, fname, opener, missing, eol)
500 501 # shrink keywords read from working dir
501 502 self.lines = kwt.shrinklines(self.fname, self.lines)
502 503
503 504 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
504 505 opts=None):
505 506 '''Monkeypatch patch.diff to avoid expansion except when
506 507 comparing against working dir.'''
507 508 if node2 is not None:
508 509 kwt.match = util.never
509 510 elif node1 is not None and node1 != repo['.'].node():
510 511 kwt.restrict = True
511 512 return orig(repo, node1, node2, match, changes, opts)
512 513
513 514 def kwweb_skip(orig, web, req, tmpl):
514 515 '''Wraps webcommands.x turning off keyword expansion.'''
515 516 kwt.match = util.never
516 517 return orig(web, req, tmpl)
517 518
518 519 repo.__class__ = kwrepo
519 520
520 521 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
521 522 extensions.wrapfunction(patch, 'diff', kw_diff)
522 523 for c in 'annotate changeset rev filediff diff'.split():
523 524 extensions.wrapfunction(webcommands, c, kwweb_skip)
524 525
525 526 cmdtable = {
526 527 'kwdemo':
527 528 (demo,
528 529 [('d', 'default', None, _('show default keyword template maps')),
529 530 ('f', 'rcfile', [], _('read maps from rcfile'))],
530 531 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
531 532 'kwexpand': (expand, commands.walkopts,
532 533 _('hg kwexpand [OPTION]... [FILE]...')),
533 534 'kwfiles':
534 535 (files,
535 536 [('a', 'all', None, _('show keyword status flags of all files')),
536 537 ('i', 'ignore', None, _('show files excluded from expansion')),
537 538 ('u', 'untracked', None, _('additionally show untracked files')),
538 539 ] + commands.walkopts,
539 540 _('hg kwfiles [OPTION]... [FILE]...')),
540 541 'kwshrink': (shrink, commands.walkopts,
541 542 _('hg kwshrink [OPTION]... [FILE]...')),
542 543 }
@@ -1,117 +1,116 b''
1 1 # win32mbcs.py -- MBCS filename support for Mercurial
2 2 #
3 3 # Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
4 4 #
5 5 # Version: 0.2
6 6 # Author: Shun-ichi Goto <shunichi.goto@gmail.com>
7 7 #
8 8 # This software may be used and distributed according to the terms of the
9 9 # GNU General Public License version 2, incorporated herein by reference.
10 10 #
11 11
12 12 '''allow the use of MBCS paths with problematic encodings
13 13
14 14 Some MBCS encodings are not good for some path operations (i.e. splitting
15 15 path, case conversion, etc.) with its encoded bytes. We call such a encoding
16 16 (i.e. shift_jis and big5) as "problematic encoding". This extension can be
17 17 used to fix the issue with those encodings by wrapping some functions to
18 18 convert to Unicode string before path operation.
19 19
20 20 This extension is useful for:
21 21 * Japanese Windows users using shift_jis encoding.
22 22 * Chinese Windows users using big5 encoding.
23 23 * All users who use a repository with one of problematic encodings on
24 24 case-insensitive file system.
25 25
26 26 This extension is not needed for:
27 27 * Any user who use only ASCII chars in path.
28 28 * Any user who do not use any of problematic encodings.
29 29
30 30 Note that there are some limitations on using this extension:
31 31 * You should use single encoding in one repository.
32 32 * You should set same encoding for the repository by locale or HGENCODING.
33 33
34 34 Path encoding conversion are done between Unicode and encoding.encoding which
35 35 is decided by Mercurial from current locale setting or HGENCODING.
36 36 '''
37 37
38 import os
38 import os, sys
39 39 from mercurial.i18n import _
40 40 from mercurial import util, encoding
41 41
42 42 def decode(arg):
43 43 if isinstance(arg, str):
44 44 uarg = arg.decode(encoding.encoding)
45 45 if arg == uarg.encode(encoding.encoding):
46 46 return uarg
47 47 raise UnicodeError("Not local encoding")
48 48 elif isinstance(arg, tuple):
49 49 return tuple(map(decode, arg))
50 50 elif isinstance(arg, list):
51 51 return map(decode, arg)
52 52 return arg
53 53
54 54 def encode(arg):
55 55 if isinstance(arg, unicode):
56 56 return arg.encode(encoding.encoding)
57 57 elif isinstance(arg, tuple):
58 58 return tuple(map(encode, arg))
59 59 elif isinstance(arg, list):
60 60 return map(encode, arg)
61 61 return arg
62 62
63 63 def wrapper(func, args):
64 64 # check argument is unicode, then call original
65 65 for arg in args:
66 66 if isinstance(arg, unicode):
67 67 return func(*args)
68 68
69 69 try:
70 70 # convert arguments to unicode, call func, then convert back
71 71 return encode(func(*decode(args)))
72 72 except UnicodeError:
73 73 # If not encoded with encoding.encoding, report it then
74 74 # continue with calling original function.
75 75 raise util.Abort(_("[win32mbcs] filename conversion fail with"
76 76 " %s encoding\n") % (encoding.encoding))
77 77
78 78 def wrapname(name):
79 idx = name.rfind('.')
80 module = name[:idx]
81 name = name[idx+1:]
82 module = globals()[module]
79 module, name = name.rsplit('.', 1)
80 module = sys.modules[module]
83 81 func = getattr(module, name)
84 82 def f(*args):
85 83 return wrapper(func, args)
86 84 try:
87 85 f.__name__ = func.__name__ # fail with python23
88 86 except Exception:
89 87 pass
90 88 setattr(module, name, f)
91 89
92 90 # List of functions to be wrapped.
93 91 # NOTE: os.path.dirname() and os.path.basename() are safe because
94 92 # they use result of os.path.split()
95 93 funcs = '''os.path.join os.path.split os.path.splitext
96 94 os.path.splitunc os.path.normpath os.path.normcase os.makedirs
97 util.endswithsep util.splitpath util.checkcase util.fspath'''
95 mercurial.util.endswithsep mercurial.util.splitpath mercurial.util.checkcase
96 mercurial.util.fspath mercurial.windows.pconvert'''
98 97
99 98 # codec and alias names of sjis and big5 to be faked.
100 99 problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
101 100 hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
102 101 sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
103 102 shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 '''
104 103
105 104 def reposetup(ui, repo):
106 105 # TODO: decide use of config section for this extension
107 106 if not os.path.supports_unicode_filenames:
108 107 ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
109 108 return
110 109
111 110 # fake is only for relevant environment.
112 111 if encoding.encoding.lower() in problematic_encodings.split():
113 112 for f in funcs.split():
114 113 wrapname(f)
115 114 ui.debug(_("[win32mbcs] activated with encoding: %s\n")
116 115 % encoding.encoding)
117 116
@@ -1,3518 +1,3507 b''
1 1 # commands.py - command processing for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import hex, nullid, nullrev, short
9 9 from lock import release
10 10 from i18n import _, gettext
11 11 import os, re, sys, subprocess, difflib, time, tempfile
12 12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 13 import patch, help, mdiff, url, encoding
14 14 import archival, changegroup, cmdutil, sshserver, hbisect
15 15 from hgweb import server
16 16 import merge as merge_
17 17
18 18 # Commands start here, listed alphabetically
19 19
20 20 def add(ui, repo, *pats, **opts):
21 21 """add the specified files on the next commit
22 22
23 23 Schedule files to be version controlled and added to the repository.
24 24
25 25 The files will be added to the repository at the next commit. To undo an
26 26 add before that, see hg forget.
27 27
28 28 If no names are given, add all files to the repository.
29 29 """
30 30
31 31 bad = []
32 32 exacts = {}
33 33 names = []
34 34 m = cmdutil.match(repo, pats, opts)
35 35 oldbad = m.bad
36 36 m.bad = lambda x,y: bad.append(x) or oldbad(x,y)
37 37
38 38 for f in repo.walk(m):
39 39 exact = m.exact(f)
40 40 if exact or f not in repo.dirstate:
41 41 names.append(f)
42 42 if ui.verbose or not exact:
43 43 ui.status(_('adding %s\n') % m.rel(f))
44 44 if not opts.get('dry_run'):
45 45 bad += [f for f in repo.add(names) if f in m.files()]
46 46 return bad and 1 or 0
47 47
48 48 def addremove(ui, repo, *pats, **opts):
49 49 """add all new files, delete all missing files
50 50
51 51 Add all new files and remove all missing files from the repository.
52 52
53 53 New files are ignored if they match any of the patterns in .hgignore. As
54 54 with add, these changes take effect at the next commit.
55 55
56 56 Use the -s/--similarity option to detect renamed files. With a parameter
57 57 greater than 0, this compares every removed file with every added file and
58 58 records those similar enough as renames. This option takes a percentage
59 59 between 0 (disabled) and 100 (files must be identical) as its parameter.
60 60 Detecting renamed files this way can be expensive.
61 61 """
62 62 try:
63 63 sim = float(opts.get('similarity') or 0)
64 64 except ValueError:
65 65 raise util.Abort(_('similarity must be a number'))
66 66 if sim < 0 or sim > 100:
67 67 raise util.Abort(_('similarity must be between 0 and 100'))
68 68 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
69 69
70 70 def annotate(ui, repo, *pats, **opts):
71 71 """show changeset information by line for each file
72 72
73 73 List changes in files, showing the revision id responsible for each line
74 74
75 75 This command is useful for discovering when a change was made and by whom.
76 76
77 77 Without the -a/--text option, annotate will avoid processing files it
78 78 detects as binary. With -a, annotate will annotate the file anyway,
79 79 although the results will probably be neither useful nor desirable.
80 80 """
81 81 datefunc = ui.quiet and util.shortdate or util.datestr
82 82 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
83 83
84 84 if not pats:
85 85 raise util.Abort(_('at least one filename or pattern is required'))
86 86
87 87 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
88 88 ('number', lambda x: str(x[0].rev())),
89 89 ('changeset', lambda x: short(x[0].node())),
90 90 ('date', getdate),
91 91 ('follow', lambda x: x[0].path()),
92 92 ]
93 93
94 94 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
95 95 and not opts.get('follow')):
96 96 opts['number'] = 1
97 97
98 98 linenumber = opts.get('line_number') is not None
99 99 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
100 100 raise util.Abort(_('at least one of -n/-c is required for -l'))
101 101
102 102 funcmap = [func for op, func in opmap if opts.get(op)]
103 103 if linenumber:
104 104 lastfunc = funcmap[-1]
105 105 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
106 106
107 107 ctx = repo[opts.get('rev')]
108 108
109 109 m = cmdutil.match(repo, pats, opts)
110 110 for abs in ctx.walk(m):
111 111 fctx = ctx[abs]
112 112 if not opts.get('text') and util.binary(fctx.data()):
113 113 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
114 114 continue
115 115
116 116 lines = fctx.annotate(follow=opts.get('follow'),
117 117 linenumber=linenumber)
118 118 pieces = []
119 119
120 120 for f in funcmap:
121 121 l = [f(n) for n, dummy in lines]
122 122 if l:
123 123 ml = max(map(len, l))
124 124 pieces.append(["%*s" % (ml, x) for x in l])
125 125
126 126 if pieces:
127 127 for p, l in zip(zip(*pieces), lines):
128 128 ui.write("%s: %s" % (" ".join(p), l[1]))
129 129
130 130 def archive(ui, repo, dest, **opts):
131 131 '''create an unversioned archive of a repository revision
132 132
133 133 By default, the revision used is the parent of the working directory; use
134 134 -r/--rev to specify a different revision.
135 135
136 136 To specify the type of archive to create, use -t/--type. Valid types are:
137 137
138 138 "files" (default): a directory full of files
139 139 "tar": tar archive, uncompressed
140 140 "tbz2": tar archive, compressed using bzip2
141 141 "tgz": tar archive, compressed using gzip
142 142 "uzip": zip archive, uncompressed
143 143 "zip": zip archive, compressed using deflate
144 144
145 145 The exact name of the destination archive or directory is given using a
146 146 format string; see 'hg help export' for details.
147 147
148 148 Each member added to an archive file has a directory prefix prepended. Use
149 149 -p/--prefix to specify a format string for the prefix. The default is the
150 150 basename of the archive, with suffixes removed.
151 151 '''
152 152
153 153 ctx = repo[opts.get('rev')]
154 154 if not ctx:
155 155 raise util.Abort(_('no working directory: please specify a revision'))
156 156 node = ctx.node()
157 157 dest = cmdutil.make_filename(repo, dest, node)
158 158 if os.path.realpath(dest) == repo.root:
159 159 raise util.Abort(_('repository root cannot be destination'))
160 160 matchfn = cmdutil.match(repo, [], opts)
161 161 kind = opts.get('type') or 'files'
162 162 prefix = opts.get('prefix')
163 163 if dest == '-':
164 164 if kind == 'files':
165 165 raise util.Abort(_('cannot archive plain files to stdout'))
166 166 dest = sys.stdout
167 167 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
168 168 prefix = cmdutil.make_filename(repo, prefix, node)
169 169 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
170 170 matchfn, prefix)
171 171
172 172 def backout(ui, repo, node=None, rev=None, **opts):
173 173 '''reverse effect of earlier changeset
174 174
175 175 Commit the backed out changes as a new changeset. The new changeset is a
176 176 child of the backed out changeset.
177 177
178 178 If you backout a changeset other than the tip, a new head is created. This
179 179 head will be the new tip and you should merge this backout changeset with
180 180 another head.
181 181
182 182 The --merge option remembers the parent of the working directory before
183 183 starting the backout, then merges the new head with that changeset
184 184 afterwards. This saves you from doing the merge by hand. The result of
185 185 this merge is not committed, as with a normal merge.
186 186
187 187 See 'hg help dates' for a list of formats valid for -d/--date.
188 188 '''
189 189 if rev and node:
190 190 raise util.Abort(_("please specify just one revision"))
191 191
192 192 if not rev:
193 193 rev = node
194 194
195 195 if not rev:
196 196 raise util.Abort(_("please specify a revision to backout"))
197 197
198 198 date = opts.get('date')
199 199 if date:
200 200 opts['date'] = util.parsedate(date)
201 201
202 202 cmdutil.bail_if_changed(repo)
203 203 node = repo.lookup(rev)
204 204
205 205 op1, op2 = repo.dirstate.parents()
206 206 a = repo.changelog.ancestor(op1, node)
207 207 if a != node:
208 208 raise util.Abort(_('cannot backout change on a different branch'))
209 209
210 210 p1, p2 = repo.changelog.parents(node)
211 211 if p1 == nullid:
212 212 raise util.Abort(_('cannot backout a change with no parents'))
213 213 if p2 != nullid:
214 214 if not opts.get('parent'):
215 215 raise util.Abort(_('cannot backout a merge changeset without '
216 216 '--parent'))
217 217 p = repo.lookup(opts['parent'])
218 218 if p not in (p1, p2):
219 219 raise util.Abort(_('%s is not a parent of %s') %
220 220 (short(p), short(node)))
221 221 parent = p
222 222 else:
223 223 if opts.get('parent'):
224 224 raise util.Abort(_('cannot use --parent on non-merge changeset'))
225 225 parent = p1
226 226
227 227 # the backout should appear on the same branch
228 228 branch = repo.dirstate.branch()
229 229 hg.clean(repo, node, show_stats=False)
230 230 repo.dirstate.setbranch(branch)
231 231 revert_opts = opts.copy()
232 232 revert_opts['date'] = None
233 233 revert_opts['all'] = True
234 234 revert_opts['rev'] = hex(parent)
235 235 revert_opts['no_backup'] = None
236 236 revert(ui, repo, **revert_opts)
237 237 commit_opts = opts.copy()
238 238 commit_opts['addremove'] = False
239 239 if not commit_opts['message'] and not commit_opts['logfile']:
240 240 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
241 241 commit_opts['force_editor'] = True
242 242 commit(ui, repo, **commit_opts)
243 243 def nice(node):
244 244 return '%d:%s' % (repo.changelog.rev(node), short(node))
245 245 ui.status(_('changeset %s backs out changeset %s\n') %
246 246 (nice(repo.changelog.tip()), nice(node)))
247 247 if op1 != node:
248 248 hg.clean(repo, op1, show_stats=False)
249 249 if opts.get('merge'):
250 250 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
251 251 hg.merge(repo, hex(repo.changelog.tip()))
252 252 else:
253 253 ui.status(_('the backout changeset is a new head - '
254 254 'do not forget to merge\n'))
255 255 ui.status(_('(use "backout --merge" '
256 256 'if you want to auto-merge)\n'))
257 257
258 258 def bisect(ui, repo, rev=None, extra=None, command=None,
259 259 reset=None, good=None, bad=None, skip=None, noupdate=None):
260 260 """subdivision search of changesets
261 261
262 262 This command helps to find changesets which introduce problems. To use,
263 263 mark the earliest changeset you know exhibits the problem as bad, then
264 264 mark the latest changeset which is free from the problem as good. Bisect
265 265 will update your working directory to a revision for testing (unless the
266 266 -U/--noupdate option is specified). Once you have performed tests, mark
267 267 the working directory as good or bad, and bisect will either update to
268 268 another candidate changeset or announce that it has found the bad
269 269 revision.
270 270
271 271 As a shortcut, you can also use the revision argument to mark a revision
272 272 as good or bad without checking it out first.
273 273
274 274 If you supply a command, it will be used for automatic bisection. Its exit
275 275 status will be used to mark revisions as good or bad: status 0 means good,
276 276 125 means to skip the revision, 127 (command not found) will abort the
277 277 bisection, and any other non-zero exit status means the revision is bad.
278 278 """
279 279 def print_result(nodes, good):
280 280 displayer = cmdutil.show_changeset(ui, repo, {})
281 281 if len(nodes) == 1:
282 282 # narrowed it down to a single revision
283 283 if good:
284 284 ui.write(_("The first good revision is:\n"))
285 285 else:
286 286 ui.write(_("The first bad revision is:\n"))
287 287 displayer.show(repo[nodes[0]])
288 288 else:
289 289 # multiple possible revisions
290 290 if good:
291 291 ui.write(_("Due to skipped revisions, the first "
292 292 "good revision could be any of:\n"))
293 293 else:
294 294 ui.write(_("Due to skipped revisions, the first "
295 295 "bad revision could be any of:\n"))
296 296 for n in nodes:
297 297 displayer.show(repo[n])
298 298
299 299 def check_state(state, interactive=True):
300 300 if not state['good'] or not state['bad']:
301 301 if (good or bad or skip or reset) and interactive:
302 302 return
303 303 if not state['good']:
304 304 raise util.Abort(_('cannot bisect (no known good revisions)'))
305 305 else:
306 306 raise util.Abort(_('cannot bisect (no known bad revisions)'))
307 307 return True
308 308
309 309 # backward compatibility
310 310 if rev in "good bad reset init".split():
311 311 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
312 312 cmd, rev, extra = rev, extra, None
313 313 if cmd == "good":
314 314 good = True
315 315 elif cmd == "bad":
316 316 bad = True
317 317 else:
318 318 reset = True
319 319 elif extra or good + bad + skip + reset + bool(command) > 1:
320 320 raise util.Abort(_('incompatible arguments'))
321 321
322 322 if reset:
323 323 p = repo.join("bisect.state")
324 324 if os.path.exists(p):
325 325 os.unlink(p)
326 326 return
327 327
328 328 state = hbisect.load_state(repo)
329 329
330 330 if command:
331 331 commandpath = util.find_exe(command)
332 332 if commandpath is None:
333 333 raise util.Abort(_("cannot find executable: %s") % command)
334 334 changesets = 1
335 335 try:
336 336 while changesets:
337 337 # update state
338 338 status = subprocess.call([commandpath])
339 339 if status == 125:
340 340 transition = "skip"
341 341 elif status == 0:
342 342 transition = "good"
343 343 # status < 0 means process was killed
344 344 elif status == 127:
345 345 raise util.Abort(_("failed to execute %s") % command)
346 346 elif status < 0:
347 347 raise util.Abort(_("%s killed") % command)
348 348 else:
349 349 transition = "bad"
350 350 ctx = repo[rev or '.']
351 351 state[transition].append(ctx.node())
352 352 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
353 353 check_state(state, interactive=False)
354 354 # bisect
355 355 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
356 356 # update to next check
357 357 cmdutil.bail_if_changed(repo)
358 358 hg.clean(repo, nodes[0], show_stats=False)
359 359 finally:
360 360 hbisect.save_state(repo, state)
361 361 return print_result(nodes, not status)
362 362
363 363 # update state
364 364 node = repo.lookup(rev or '.')
365 365 if good:
366 366 state['good'].append(node)
367 367 elif bad:
368 368 state['bad'].append(node)
369 369 elif skip:
370 370 state['skip'].append(node)
371 371
372 372 hbisect.save_state(repo, state)
373 373
374 374 if not check_state(state):
375 375 return
376 376
377 377 # actually bisect
378 378 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
379 379 if changesets == 0:
380 380 print_result(nodes, good)
381 381 else:
382 382 assert len(nodes) == 1 # only a single node can be tested next
383 383 node = nodes[0]
384 384 # compute the approximate number of remaining tests
385 385 tests, size = 0, 2
386 386 while size <= changesets:
387 387 tests, size = tests + 1, size * 2
388 388 rev = repo.changelog.rev(node)
389 389 ui.write(_("Testing changeset %d:%s "
390 390 "(%d changesets remaining, ~%d tests)\n")
391 391 % (rev, short(node), changesets, tests))
392 392 if not noupdate:
393 393 cmdutil.bail_if_changed(repo)
394 394 return hg.clean(repo, node)
395 395
396 396 def branch(ui, repo, label=None, **opts):
397 397 """set or show the current branch name
398 398
399 399 With no argument, show the current branch name. With one argument, set the
400 400 working directory branch name (the branch will not exist in the repository
401 401 until the next commit). Standard practice recommends that primary
402 402 development take place on the 'default' branch.
403 403
404 404 Unless -f/--force is specified, branch will not let you set a branch name
405 405 that already exists, even if it's inactive.
406 406
407 407 Use -C/--clean to reset the working directory branch to that of the parent
408 408 of the working directory, negating a previous branch change.
409 409
410 410 Use the command 'hg update' to switch to an existing branch. Use 'hg
411 411 commit --close-branch' to mark this branch as closed.
412 412 """
413 413
414 414 if opts.get('clean'):
415 415 label = repo[None].parents()[0].branch()
416 416 repo.dirstate.setbranch(label)
417 417 ui.status(_('reset working directory to branch %s\n') % label)
418 418 elif label:
419 419 if not opts.get('force') and label in repo.branchtags():
420 420 if label not in [p.branch() for p in repo.parents()]:
421 421 raise util.Abort(_('a branch of the same name already exists'
422 422 ' (use --force to override)'))
423 423 repo.dirstate.setbranch(encoding.fromlocal(label))
424 424 ui.status(_('marked working directory as branch %s\n') % label)
425 425 else:
426 426 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
427 427
428 428 def branches(ui, repo, active=False, closed=False):
429 429 """list repository named branches
430 430
431 431 List the repository's named branches, indicating which ones are inactive.
432 432 If -c/--closed is specified, also list branches which have been marked
433 433 closed (see hg commit --close-branch).
434 434
435 435 If -a/--active is specified, only show active branches. A branch is
436 436 considered active if it contains repository heads.
437 437
438 438 Use the command 'hg update' to switch to an existing branch.
439 439 """
440 440
441 441 hexfunc = ui.debugflag and hex or short
442 442 activebranches = [encoding.tolocal(repo[n].branch())
443 443 for n in repo.heads()]
444 444 def testactive(tag, node):
445 445 realhead = tag in activebranches
446 446 open = node in repo.branchheads(tag, closed=False)
447 447 return realhead and open
448 448 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
449 449 for tag, node in repo.branchtags().items()],
450 450 reverse=True)
451 451
452 452 for isactive, node, tag in branches:
453 453 if (not active) or isactive:
454 454 if ui.quiet:
455 455 ui.write("%s\n" % tag)
456 456 else:
457 457 hn = repo.lookup(node)
458 458 if isactive:
459 459 notice = ''
460 460 elif hn not in repo.branchheads(tag, closed=False):
461 461 if not closed:
462 462 continue
463 463 notice = ' (closed)'
464 464 else:
465 465 notice = ' (inactive)'
466 466 rev = str(node).rjust(31 - encoding.colwidth(tag))
467 467 data = tag, rev, hexfunc(hn), notice
468 468 ui.write("%s %s:%s%s\n" % data)
469 469
470 470 def bundle(ui, repo, fname, dest=None, **opts):
471 471 """create a changegroup file
472 472
473 473 Generate a compressed changegroup file collecting changesets not known to
474 474 be in another repository.
475 475
476 476 If no destination repository is specified the destination is assumed to
477 477 have all the nodes specified by one or more --base parameters. To create a
478 478 bundle containing all changesets, use -a/--all (or --base null).
479 479
480 480 You can change compression method with the -t/--type option. The available
481 481 compression methods are: none, bzip2, and gzip (by default, bundles are
482 482 compressed using bzip2).
483 483
484 484 The bundle file can then be transferred using conventional means and
485 485 applied to another repository with the unbundle or pull command. This is
486 486 useful when direct push and pull are not available or when exporting an
487 487 entire repository is undesirable.
488 488
489 489 Applying bundles preserves all changeset contents including permissions,
490 490 copy/rename information, and revision history.
491 491 """
492 492 revs = opts.get('rev') or None
493 493 if revs:
494 494 revs = [repo.lookup(rev) for rev in revs]
495 495 if opts.get('all'):
496 496 base = ['null']
497 497 else:
498 498 base = opts.get('base')
499 499 if base:
500 500 if dest:
501 501 raise util.Abort(_("--base is incompatible with specifying "
502 502 "a destination"))
503 503 base = [repo.lookup(rev) for rev in base]
504 504 # create the right base
505 505 # XXX: nodesbetween / changegroup* should be "fixed" instead
506 506 o = []
507 507 has = set((nullid,))
508 508 for n in base:
509 509 has.update(repo.changelog.reachable(n))
510 510 if revs:
511 511 visit = list(revs)
512 512 else:
513 513 visit = repo.changelog.heads()
514 514 seen = {}
515 515 while visit:
516 516 n = visit.pop(0)
517 517 parents = [p for p in repo.changelog.parents(n) if p not in has]
518 518 if len(parents) == 0:
519 519 o.insert(0, n)
520 520 else:
521 521 for p in parents:
522 522 if p not in seen:
523 523 seen[p] = 1
524 524 visit.append(p)
525 525 else:
526 526 dest, revs, checkout = hg.parseurl(
527 527 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
528 528 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
529 529 o = repo.findoutgoing(other, force=opts.get('force'))
530 530
531 531 if revs:
532 532 cg = repo.changegroupsubset(o, revs, 'bundle')
533 533 else:
534 534 cg = repo.changegroup(o, 'bundle')
535 535
536 536 bundletype = opts.get('type', 'bzip2').lower()
537 537 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
538 538 bundletype = btypes.get(bundletype)
539 539 if bundletype not in changegroup.bundletypes:
540 540 raise util.Abort(_('unknown bundle type specified with --type'))
541 541
542 542 changegroup.writebundle(cg, fname, bundletype)
543 543
544 544 def cat(ui, repo, file1, *pats, **opts):
545 545 """output the current or given revision of files
546 546
547 547 Print the specified files as they were at the given revision. If no
548 548 revision is given, the parent of the working directory is used, or tip if
549 549 no revision is checked out.
550 550
551 551 Output may be to a file, in which case the name of the file is given using
552 552 a format string. The formatting rules are the same as for the export
553 553 command, with the following additions:
554 554
555 555 %s basename of file being printed
556 556 %d dirname of file being printed, or '.' if in repository root
557 557 %p root-relative path name of file being printed
558 558 """
559 559 ctx = repo[opts.get('rev')]
560 560 err = 1
561 561 m = cmdutil.match(repo, (file1,) + pats, opts)
562 562 for abs in ctx.walk(m):
563 563 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
564 564 data = ctx[abs].data()
565 565 if opts.get('decode'):
566 566 data = repo.wwritedata(abs, data)
567 567 fp.write(data)
568 568 err = 0
569 569 return err
570 570
571 571 def clone(ui, source, dest=None, **opts):
572 572 """make a copy of an existing repository
573 573
574 574 Create a copy of an existing repository in a new directory.
575 575
576 576 If no destination directory name is specified, it defaults to the basename
577 577 of the source.
578 578
579 579 The location of the source is added to the new repository's .hg/hgrc file,
580 580 as the default to be used for future pulls.
581 581
582 582 If you use the -r/--rev option to clone up to a specific revision, no
583 583 subsequent revisions (including subsequent tags) will be present in the
584 584 cloned repository. This option implies --pull, even on local repositories.
585 585
586 586 By default, clone will check out the head of the 'default' branch. If the
587 587 -U/--noupdate option is used, the new clone will contain only a repository
588 588 (.hg) and no working copy (the working copy parent is the null revision).
589 589
590 590 See 'hg help urls' for valid source format details.
591 591
592 592 It is possible to specify an ssh:// URL as the destination, but no
593 593 .hg/hgrc and working directory will be created on the remote side. Please
594 594 see 'hg help urls' for important details about ssh:// URLs.
595 595
596 596 For efficiency, hardlinks are used for cloning whenever the source and
597 597 destination are on the same filesystem (note this applies only to the
598 598 repository data, not to the checked out files). Some filesystems, such as
599 599 AFS, implement hardlinking incorrectly, but do not report errors. In these
600 600 cases, use the --pull option to avoid hardlinking.
601 601
602 602 In some cases, you can clone repositories and checked out files using full
603 603 hardlinks with
604 604
605 605 $ cp -al REPO REPOCLONE
606 606
607 607 This is the fastest way to clone, but it is not always safe. The operation
608 608 is not atomic (making sure REPO is not modified during the operation is up
609 609 to you) and you have to make sure your editor breaks hardlinks (Emacs and
610 610 most Linux Kernel tools do so). Also, this is not compatible with certain
611 611 extensions that place their metadata under the .hg directory, such as mq.
612 612 """
613 613 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
614 614 pull=opts.get('pull'),
615 615 stream=opts.get('uncompressed'),
616 616 rev=opts.get('rev'),
617 617 update=not opts.get('noupdate'))
618 618
619 619 def commit(ui, repo, *pats, **opts):
620 620 """commit the specified files or all outstanding changes
621 621
622 622 Commit changes to the given files into the repository. Unlike a
623 623 centralized RCS, this operation is a local operation. See hg push for a
624 624 way to actively distribute your changes.
625 625
626 626 If a list of files is omitted, all changes reported by "hg status" will be
627 627 committed.
628 628
629 629 If you are committing the result of a merge, do not provide any filenames
630 630 or -I/-X filters.
631 631
632 632 If no commit message is specified, the configured editor is started to
633 633 prompt you for a message.
634 634
635 635 See 'hg help dates' for a list of formats valid for -d/--date.
636 636 """
637 637 extra = {}
638 638 if opts.get('close_branch'):
639 639 extra['close'] = 1
640 640 e = cmdutil.commiteditor
641 641 if opts.get('force_editor'):
642 642 e = cmdutil.commitforceeditor
643 643
644 644 def commitfunc(ui, repo, message, match, opts):
645 645 return repo.commit(message, opts.get('user'), opts.get('date'), match,
646 646 editor=e, extra=extra)
647 647
648 648 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
649 649 if not node:
650 650 ui.status(_("nothing changed\n"))
651 651 return
652 652 cl = repo.changelog
653 653 rev = cl.rev(node)
654 654 parents = cl.parentrevs(rev)
655 655 if rev - 1 in parents:
656 656 # one of the parents was the old tip
657 657 pass
658 658 elif (parents == (nullrev, nullrev) or
659 659 len(cl.heads(cl.node(parents[0]))) > 1 and
660 660 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
661 661 ui.status(_('created new head\n'))
662 662
663 663 if ui.debugflag:
664 664 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
665 665 elif ui.verbose:
666 666 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
667 667
668 668 def copy(ui, repo, *pats, **opts):
669 669 """mark files as copied for the next commit
670 670
671 671 Mark dest as having copies of source files. If dest is a directory, copies
672 672 are put in that directory. If dest is a file, the source must be a single
673 673 file.
674 674
675 675 By default, this command copies the contents of files as they exist in the
676 676 working directory. If invoked with -A/--after, the operation is recorded,
677 677 but no copying is performed.
678 678
679 679 This command takes effect with the next commit. To undo a copy before
680 680 that, see hg revert.
681 681 """
682 682 wlock = repo.wlock(False)
683 683 try:
684 684 return cmdutil.copy(ui, repo, pats, opts)
685 685 finally:
686 686 wlock.release()
687 687
688 688 def debugancestor(ui, repo, *args):
689 689 """find the ancestor revision of two revisions in a given index"""
690 690 if len(args) == 3:
691 691 index, rev1, rev2 = args
692 692 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
693 693 lookup = r.lookup
694 694 elif len(args) == 2:
695 695 if not repo:
696 696 raise util.Abort(_("There is no Mercurial repository here "
697 697 "(.hg not found)"))
698 698 rev1, rev2 = args
699 699 r = repo.changelog
700 700 lookup = repo.lookup
701 701 else:
702 702 raise util.Abort(_('either two or three arguments required'))
703 703 a = r.ancestor(lookup(rev1), lookup(rev2))
704 704 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
705 705
706 706 def debugcommands(ui, cmd='', *args):
707 707 for cmd, vals in sorted(table.iteritems()):
708 708 cmd = cmd.split('|')[0].strip('^')
709 709 opts = ', '.join([i[1] for i in vals[1]])
710 710 ui.write('%s: %s\n' % (cmd, opts))
711 711
712 712 def debugcomplete(ui, cmd='', **opts):
713 713 """returns the completion list associated with the given command"""
714 714
715 715 if opts.get('options'):
716 716 options = []
717 717 otables = [globalopts]
718 718 if cmd:
719 719 aliases, entry = cmdutil.findcmd(cmd, table, False)
720 720 otables.append(entry[1])
721 721 for t in otables:
722 722 for o in t:
723 723 if o[0]:
724 724 options.append('-%s' % o[0])
725 725 options.append('--%s' % o[1])
726 726 ui.write("%s\n" % "\n".join(options))
727 727 return
728 728
729 729 cmdlist = cmdutil.findpossible(cmd, table)
730 730 if ui.verbose:
731 731 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
732 732 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
733 733
734 734 def debugfsinfo(ui, path = "."):
735 735 open('.debugfsinfo', 'w').write('')
736 736 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
737 737 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
738 738 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
739 739 and 'yes' or 'no'))
740 740 os.unlink('.debugfsinfo')
741 741
742 742 def debugrebuildstate(ui, repo, rev="tip"):
743 743 """rebuild the dirstate as it would look like for the given revision"""
744 744 ctx = repo[rev]
745 745 wlock = repo.wlock()
746 746 try:
747 747 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
748 748 finally:
749 749 wlock.release()
750 750
751 751 def debugcheckstate(ui, repo):
752 752 """validate the correctness of the current dirstate"""
753 753 parent1, parent2 = repo.dirstate.parents()
754 754 m1 = repo[parent1].manifest()
755 755 m2 = repo[parent2].manifest()
756 756 errors = 0
757 757 for f in repo.dirstate:
758 758 state = repo.dirstate[f]
759 759 if state in "nr" and f not in m1:
760 760 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
761 761 errors += 1
762 762 if state in "a" and f in m1:
763 763 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
764 764 errors += 1
765 765 if state in "m" and f not in m1 and f not in m2:
766 766 ui.warn(_("%s in state %s, but not in either manifest\n") %
767 767 (f, state))
768 768 errors += 1
769 769 for f in m1:
770 770 state = repo.dirstate[f]
771 771 if state not in "nrm":
772 772 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
773 773 errors += 1
774 774 if errors:
775 775 error = _(".hg/dirstate inconsistent with current parent's manifest")
776 776 raise util.Abort(error)
777 777
778 778 def showconfig(ui, repo, *values, **opts):
779 779 """show combined config settings from all hgrc files
780 780
781 781 With no arguments, print names and values of all config items.
782 782
783 783 With one argument of the form section.name, print just the value of that
784 784 config item.
785 785
786 786 With multiple arguments, print names and values of all config items with
787 787 matching section names.
788 788
789 789 With --debug, the source (filename and line number) is printed for each
790 790 config item.
791 791 """
792 792
793 793 untrusted = bool(opts.get('untrusted'))
794 794 if values:
795 795 if len([v for v in values if '.' in v]) > 1:
796 796 raise util.Abort(_('only one config item permitted'))
797 797 for section, name, value in ui.walkconfig(untrusted=untrusted):
798 798 sectname = section + '.' + name
799 799 if values:
800 800 for v in values:
801 801 if v == section:
802 802 ui.debug('%s: ' %
803 803 ui.configsource(section, name, untrusted))
804 804 ui.write('%s=%s\n' % (sectname, value))
805 805 elif v == sectname:
806 806 ui.debug('%s: ' %
807 807 ui.configsource(section, name, untrusted))
808 808 ui.write(value, '\n')
809 809 else:
810 810 ui.debug('%s: ' %
811 811 ui.configsource(section, name, untrusted))
812 812 ui.write('%s=%s\n' % (sectname, value))
813 813
814 814 def debugsetparents(ui, repo, rev1, rev2=None):
815 815 """manually set the parents of the current working directory
816 816
817 817 This is useful for writing repository conversion tools, but should be used
818 818 with care.
819 819 """
820 820
821 821 if not rev2:
822 822 rev2 = hex(nullid)
823 823
824 824 wlock = repo.wlock()
825 825 try:
826 826 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
827 827 finally:
828 828 wlock.release()
829 829
830 830 def debugstate(ui, repo, nodates=None):
831 831 """show the contents of the current dirstate"""
832 832 timestr = ""
833 833 showdate = not nodates
834 834 for file_, ent in sorted(repo.dirstate._map.iteritems()):
835 835 if showdate:
836 836 if ent[3] == -1:
837 837 # Pad or slice to locale representation
838 838 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
839 839 timestr = 'unset'
840 840 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
841 841 else:
842 842 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
843 843 if ent[1] & 020000:
844 844 mode = 'lnk'
845 845 else:
846 846 mode = '%3o' % (ent[1] & 0777)
847 847 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
848 848 for f in repo.dirstate.copies():
849 849 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
850 850
851 851 def debugsub(ui, repo, rev=None):
852 852 if rev == '':
853 853 rev = None
854 854 for k,v in sorted(repo[rev].substate.items()):
855 855 ui.write('path %s\n' % k)
856 856 ui.write(' source %s\n' % v[0])
857 857 ui.write(' revision %s\n' % v[1])
858 858
859 859 def debugdata(ui, file_, rev):
860 860 """dump the contents of a data file revision"""
861 861 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
862 862 try:
863 863 ui.write(r.revision(r.lookup(rev)))
864 864 except KeyError:
865 865 raise util.Abort(_('invalid revision identifier %s') % rev)
866 866
867 867 def debugdate(ui, date, range=None, **opts):
868 868 """parse and display a date"""
869 869 if opts["extended"]:
870 870 d = util.parsedate(date, util.extendeddateformats)
871 871 else:
872 872 d = util.parsedate(date)
873 873 ui.write("internal: %s %s\n" % d)
874 874 ui.write("standard: %s\n" % util.datestr(d))
875 875 if range:
876 876 m = util.matchdate(range)
877 877 ui.write("match: %s\n" % m(d[0]))
878 878
879 879 def debugindex(ui, file_):
880 880 """dump the contents of an index file"""
881 881 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
882 882 ui.write(" rev offset length base linkrev"
883 883 " nodeid p1 p2\n")
884 884 for i in r:
885 885 node = r.node(i)
886 886 try:
887 887 pp = r.parents(node)
888 888 except:
889 889 pp = [nullid, nullid]
890 890 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
891 891 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
892 892 short(node), short(pp[0]), short(pp[1])))
893 893
894 894 def debugindexdot(ui, file_):
895 895 """dump an index DAG as a graphviz dot file"""
896 896 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
897 897 ui.write("digraph G {\n")
898 898 for i in r:
899 899 node = r.node(i)
900 900 pp = r.parents(node)
901 901 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
902 902 if pp[1] != nullid:
903 903 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
904 904 ui.write("}\n")
905 905
906 906 def debuginstall(ui):
907 907 '''test Mercurial installation'''
908 908
909 909 def writetemp(contents):
910 910 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
911 911 f = os.fdopen(fd, "wb")
912 912 f.write(contents)
913 913 f.close()
914 914 return name
915 915
916 916 problems = 0
917 917
918 918 # encoding
919 919 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
920 920 try:
921 921 encoding.fromlocal("test")
922 922 except util.Abort, inst:
923 923 ui.write(" %s\n" % inst)
924 924 ui.write(_(" (check that your locale is properly set)\n"))
925 925 problems += 1
926 926
927 927 # compiled modules
928 928 ui.status(_("Checking extensions...\n"))
929 929 try:
930 930 import bdiff, mpatch, base85
931 931 except Exception, inst:
932 932 ui.write(" %s\n" % inst)
933 933 ui.write(_(" One or more extensions could not be found"))
934 934 ui.write(_(" (check that you compiled the extensions)\n"))
935 935 problems += 1
936 936
937 937 # templates
938 938 ui.status(_("Checking templates...\n"))
939 939 try:
940 940 import templater
941 941 templater.templater(templater.templatepath("map-cmdline.default"))
942 942 except Exception, inst:
943 943 ui.write(" %s\n" % inst)
944 944 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
945 945 problems += 1
946 946
947 947 # patch
948 948 ui.status(_("Checking patch...\n"))
949 949 patchproblems = 0
950 950 a = "1\n2\n3\n4\n"
951 951 b = "1\n2\n3\ninsert\n4\n"
952 952 fa = writetemp(a)
953 953 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
954 954 os.path.basename(fa))
955 955 fd = writetemp(d)
956 956
957 957 files = {}
958 958 try:
959 959 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
960 960 except util.Abort, e:
961 961 ui.write(_(" patch call failed:\n"))
962 962 ui.write(" " + str(e) + "\n")
963 963 patchproblems += 1
964 964 else:
965 965 if list(files) != [os.path.basename(fa)]:
966 966 ui.write(_(" unexpected patch output!\n"))
967 967 patchproblems += 1
968 968 a = open(fa).read()
969 969 if a != b:
970 970 ui.write(_(" patch test failed!\n"))
971 971 patchproblems += 1
972 972
973 973 if patchproblems:
974 974 if ui.config('ui', 'patch'):
975 975 ui.write(_(" (Current patch tool may be incompatible with patch,"
976 976 " or misconfigured. Please check your .hgrc file)\n"))
977 977 else:
978 978 ui.write(_(" Internal patcher failure, please report this error"
979 979 " to http://mercurial.selenic.com/bts/\n"))
980 980 problems += patchproblems
981 981
982 982 os.unlink(fa)
983 983 os.unlink(fd)
984 984
985 985 # editor
986 986 ui.status(_("Checking commit editor...\n"))
987 987 editor = ui.geteditor()
988 988 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
989 989 if not cmdpath:
990 990 if editor == 'vi':
991 991 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
992 992 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
993 993 else:
994 994 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
995 995 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
996 996 problems += 1
997 997
998 998 # check username
999 999 ui.status(_("Checking username...\n"))
1000 1000 user = os.environ.get("HGUSER")
1001 1001 if user is None:
1002 1002 user = ui.config("ui", "username")
1003 1003 if user is None:
1004 1004 user = os.environ.get("EMAIL")
1005 1005 if not user:
1006 1006 ui.warn(" ")
1007 1007 ui.username()
1008 1008 ui.write(_(" (specify a username in your .hgrc file)\n"))
1009 1009
1010 1010 if not problems:
1011 1011 ui.status(_("No problems detected\n"))
1012 1012 else:
1013 1013 ui.write(_("%s problems detected,"
1014 1014 " please check your install!\n") % problems)
1015 1015
1016 1016 return problems
1017 1017
1018 1018 def debugrename(ui, repo, file1, *pats, **opts):
1019 1019 """dump rename information"""
1020 1020
1021 1021 ctx = repo[opts.get('rev')]
1022 1022 m = cmdutil.match(repo, (file1,) + pats, opts)
1023 1023 for abs in ctx.walk(m):
1024 1024 fctx = ctx[abs]
1025 1025 o = fctx.filelog().renamed(fctx.filenode())
1026 1026 rel = m.rel(abs)
1027 1027 if o:
1028 1028 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1029 1029 else:
1030 1030 ui.write(_("%s not renamed\n") % rel)
1031 1031
1032 1032 def debugwalk(ui, repo, *pats, **opts):
1033 1033 """show how files match on given patterns"""
1034 1034 m = cmdutil.match(repo, pats, opts)
1035 1035 items = list(repo.walk(m))
1036 1036 if not items:
1037 1037 return
1038 1038 fmt = 'f %%-%ds %%-%ds %%s' % (
1039 1039 max([len(abs) for abs in items]),
1040 1040 max([len(m.rel(abs)) for abs in items]))
1041 1041 for abs in items:
1042 1042 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1043 1043 ui.write("%s\n" % line.rstrip())
1044 1044
1045 1045 def diff(ui, repo, *pats, **opts):
1046 1046 """diff repository (or selected files)
1047 1047
1048 1048 Show differences between revisions for the specified files.
1049 1049
1050 1050 Differences between files are shown using the unified diff format.
1051 1051
1052 1052 NOTE: diff may generate unexpected results for merges, as it will default
1053 1053 to comparing against the working directory's first parent changeset if no
1054 1054 revisions are specified.
1055 1055
1056 1056 When two revision arguments are given, then changes are shown between
1057 1057 those revisions. If only one revision is specified then that revision is
1058 1058 compared to the working directory, and, when no revisions are specified,
1059 1059 the working directory files are compared to its parent.
1060 1060
1061 1061 Without the -a/--text option, diff will avoid generating diffs of files it
1062 1062 detects as binary. With -a, diff will generate a diff anyway, probably
1063 1063 with undesirable results.
1064 1064
1065 1065 Use the -g/--git option to generate diffs in the git extended diff format.
1066 1066 For more information, read 'hg help diffs'.
1067 1067 """
1068 1068
1069 1069 revs = opts.get('rev')
1070 1070 change = opts.get('change')
1071 1071
1072 1072 if revs and change:
1073 1073 msg = _('cannot specify --rev and --change at the same time')
1074 1074 raise util.Abort(msg)
1075 1075 elif change:
1076 1076 node2 = repo.lookup(change)
1077 1077 node1 = repo[node2].parents()[0].node()
1078 1078 else:
1079 1079 node1, node2 = cmdutil.revpair(repo, revs)
1080 1080
1081 1081 m = cmdutil.match(repo, pats, opts)
1082 1082 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1083 1083 for chunk in it:
1084 1084 ui.write(chunk)
1085 1085
1086 1086 def export(ui, repo, *changesets, **opts):
1087 1087 """dump the header and diffs for one or more changesets
1088 1088
1089 1089 Print the changeset header and diffs for one or more revisions.
1090 1090
1091 1091 The information shown in the changeset header is: author, changeset hash,
1092 1092 parent(s) and commit comment.
1093 1093
1094 1094 NOTE: export may generate unexpected diff output for merge changesets, as
1095 1095 it will compare the merge changeset against its first parent only.
1096 1096
1097 1097 Output may be to a file, in which case the name of the file is given using
1098 1098 a format string. The formatting rules are as follows:
1099 1099
1100 1100 %% literal "%" character
1101 1101 %H changeset hash (40 bytes of hexadecimal)
1102 1102 %N number of patches being generated
1103 1103 %R changeset revision number
1104 1104 %b basename of the exporting repository
1105 1105 %h short-form changeset hash (12 bytes of hexadecimal)
1106 1106 %n zero-padded sequence number, starting at 1
1107 1107 %r zero-padded changeset revision number
1108 1108
1109 1109 Without the -a/--text option, export will avoid generating diffs of files
1110 1110 it detects as binary. With -a, export will generate a diff anyway,
1111 1111 probably with undesirable results.
1112 1112
1113 1113 Use the -g/--git option to generate diffs in the git extended diff format.
1114 1114 See 'hg help diffs' for more information.
1115 1115
1116 1116 With the --switch-parent option, the diff will be against the second
1117 1117 parent. It can be useful to review a merge.
1118 1118 """
1119 1119 if not changesets:
1120 1120 raise util.Abort(_("export requires at least one changeset"))
1121 1121 revs = cmdutil.revrange(repo, changesets)
1122 1122 if len(revs) > 1:
1123 1123 ui.note(_('exporting patches:\n'))
1124 1124 else:
1125 1125 ui.note(_('exporting patch:\n'))
1126 1126 patch.export(repo, revs, template=opts.get('output'),
1127 1127 switch_parent=opts.get('switch_parent'),
1128 1128 opts=patch.diffopts(ui, opts))
1129 1129
1130 1130 def forget(ui, repo, *pats, **opts):
1131 1131 """forget the specified files on the next commit
1132 1132
1133 1133 Mark the specified files so they will no longer be tracked after the next
1134 1134 commit.
1135 1135
1136 1136 This only removes files from the current branch, not from the entire
1137 1137 project history, and it does not delete them from the working directory.
1138 1138
1139 1139 To undo a forget before the next commit, see hg add.
1140 1140 """
1141 1141
1142 1142 if not pats:
1143 1143 raise util.Abort(_('no files specified'))
1144 1144
1145 1145 m = cmdutil.match(repo, pats, opts)
1146 1146 s = repo.status(match=m, clean=True)
1147 1147 forget = sorted(s[0] + s[1] + s[3] + s[6])
1148 1148
1149 1149 for f in m.files():
1150 1150 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1151 1151 ui.warn(_('not removing %s: file is already untracked\n')
1152 1152 % m.rel(f))
1153 1153
1154 1154 for f in forget:
1155 1155 if ui.verbose or not m.exact(f):
1156 1156 ui.status(_('removing %s\n') % m.rel(f))
1157 1157
1158 1158 repo.remove(forget, unlink=False)
1159 1159
1160 1160 def grep(ui, repo, pattern, *pats, **opts):
1161 1161 """search for a pattern in specified files and revisions
1162 1162
1163 1163 Search revisions of files for a regular expression.
1164 1164
1165 1165 This command behaves differently than Unix grep. It only accepts
1166 1166 Python/Perl regexps. It searches repository history, not the working
1167 1167 directory. It always prints the revision number in which a match appears.
1168 1168
1169 1169 By default, grep only prints output for the first revision of a file in
1170 1170 which it finds a match. To get it to print every revision that contains a
1171 1171 change in match status ("-" for a match that becomes a non-match, or "+"
1172 1172 for a non-match that becomes a match), use the --all flag.
1173 1173 """
1174 1174 reflags = 0
1175 1175 if opts.get('ignore_case'):
1176 1176 reflags |= re.I
1177 1177 try:
1178 1178 regexp = re.compile(pattern, reflags)
1179 1179 except Exception, inst:
1180 1180 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1181 1181 return None
1182 1182 sep, eol = ':', '\n'
1183 1183 if opts.get('print0'):
1184 1184 sep = eol = '\0'
1185 1185
1186 fcache = {}
1187 forder = []
1188 def getfile(fn):
1189 if fn not in fcache:
1190 if len(fcache) > 20:
1191 del fcache[forder.pop(0)]
1192 fcache[fn] = repo.file(fn)
1193 else:
1194 forder.remove(fn)
1195
1196 forder.append(fn)
1197 return fcache[fn]
1186 getfile = util.lrucachefunc(repo.file)
1198 1187
1199 1188 def matchlines(body):
1200 1189 begin = 0
1201 1190 linenum = 0
1202 1191 while True:
1203 1192 match = regexp.search(body, begin)
1204 1193 if not match:
1205 1194 break
1206 1195 mstart, mend = match.span()
1207 1196 linenum += body.count('\n', begin, mstart) + 1
1208 1197 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1209 1198 begin = body.find('\n', mend) + 1 or len(body)
1210 1199 lend = begin - 1
1211 1200 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1212 1201
1213 1202 class linestate(object):
1214 1203 def __init__(self, line, linenum, colstart, colend):
1215 1204 self.line = line
1216 1205 self.linenum = linenum
1217 1206 self.colstart = colstart
1218 1207 self.colend = colend
1219 1208
1220 1209 def __hash__(self):
1221 1210 return hash((self.linenum, self.line))
1222 1211
1223 1212 def __eq__(self, other):
1224 1213 return self.line == other.line
1225 1214
1226 1215 matches = {}
1227 1216 copies = {}
1228 1217 def grepbody(fn, rev, body):
1229 1218 matches[rev].setdefault(fn, [])
1230 1219 m = matches[rev][fn]
1231 1220 for lnum, cstart, cend, line in matchlines(body):
1232 1221 s = linestate(line, lnum, cstart, cend)
1233 1222 m.append(s)
1234 1223
1235 1224 def difflinestates(a, b):
1236 1225 sm = difflib.SequenceMatcher(None, a, b)
1237 1226 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1238 1227 if tag == 'insert':
1239 1228 for i in xrange(blo, bhi):
1240 1229 yield ('+', b[i])
1241 1230 elif tag == 'delete':
1242 1231 for i in xrange(alo, ahi):
1243 1232 yield ('-', a[i])
1244 1233 elif tag == 'replace':
1245 1234 for i in xrange(alo, ahi):
1246 1235 yield ('-', a[i])
1247 1236 for i in xrange(blo, bhi):
1248 1237 yield ('+', b[i])
1249 1238
1250 1239 def display(fn, r, pstates, states):
1251 1240 datefunc = ui.quiet and util.shortdate or util.datestr
1252 1241 found = False
1253 1242 filerevmatches = {}
1254 1243 if opts.get('all'):
1255 1244 iter = difflinestates(pstates, states)
1256 1245 else:
1257 1246 iter = [('', l) for l in states]
1258 1247 for change, l in iter:
1259 1248 cols = [fn, str(r)]
1260 1249 if opts.get('line_number'):
1261 1250 cols.append(str(l.linenum))
1262 1251 if opts.get('all'):
1263 1252 cols.append(change)
1264 1253 if opts.get('user'):
1265 1254 cols.append(ui.shortuser(get(r)[1]))
1266 1255 if opts.get('date'):
1267 1256 cols.append(datefunc(get(r)[2]))
1268 1257 if opts.get('files_with_matches'):
1269 1258 c = (fn, r)
1270 1259 if c in filerevmatches:
1271 1260 continue
1272 1261 filerevmatches[c] = 1
1273 1262 else:
1274 1263 cols.append(l.line)
1275 1264 ui.write(sep.join(cols), eol)
1276 1265 found = True
1277 1266 return found
1278 1267
1279 1268 skip = {}
1280 1269 revfiles = {}
1281 1270 get = util.cachefunc(lambda r: repo[r].changeset())
1282 1271 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1283 1272 found = False
1284 1273 follow = opts.get('follow')
1285 1274 for st, rev, fns in changeiter:
1286 1275 if st == 'window':
1287 1276 matches.clear()
1288 1277 revfiles.clear()
1289 1278 elif st == 'add':
1290 1279 ctx = repo[rev]
1291 1280 pctx = ctx.parents()[0]
1292 1281 parent = pctx.rev()
1293 1282 matches.setdefault(rev, {})
1294 1283 matches.setdefault(parent, {})
1295 1284 files = revfiles.setdefault(rev, [])
1296 1285 for fn in fns:
1297 1286 flog = getfile(fn)
1298 1287 try:
1299 1288 fnode = ctx.filenode(fn)
1300 1289 except error.LookupError:
1301 1290 continue
1302 1291
1303 1292 copied = flog.renamed(fnode)
1304 1293 copy = follow and copied and copied[0]
1305 1294 if copy:
1306 1295 copies.setdefault(rev, {})[fn] = copy
1307 1296 if fn in skip:
1308 1297 if copy:
1309 1298 skip[copy] = True
1310 1299 continue
1311 1300 files.append(fn)
1312 1301
1313 1302 if not matches[rev].has_key(fn):
1314 1303 grepbody(fn, rev, flog.read(fnode))
1315 1304
1316 1305 pfn = copy or fn
1317 1306 if not matches[parent].has_key(pfn):
1318 1307 try:
1319 1308 fnode = pctx.filenode(pfn)
1320 1309 grepbody(pfn, parent, flog.read(fnode))
1321 1310 except error.LookupError:
1322 1311 pass
1323 1312 elif st == 'iter':
1324 1313 parent = repo[rev].parents()[0].rev()
1325 1314 for fn in sorted(revfiles.get(rev, [])):
1326 1315 states = matches[rev][fn]
1327 1316 copy = copies.get(rev, {}).get(fn)
1328 1317 if fn in skip:
1329 1318 if copy:
1330 1319 skip[copy] = True
1331 1320 continue
1332 1321 pstates = matches.get(parent, {}).get(copy or fn, [])
1333 1322 if pstates or states:
1334 1323 r = display(fn, rev, pstates, states)
1335 1324 found = found or r
1336 1325 if r and not opts.get('all'):
1337 1326 skip[fn] = True
1338 1327 if copy:
1339 1328 skip[copy] = True
1340 1329
1341 1330 def heads(ui, repo, *branchrevs, **opts):
1342 1331 """show current repository heads or show branch heads
1343 1332
1344 1333 With no arguments, show all repository head changesets.
1345 1334
1346 1335 Repository "heads" are changesets that don't have child changesets. They
1347 1336 are where development generally takes place and are the usual targets for
1348 1337 update and merge operations.
1349 1338
1350 1339 If one or more REV is given, the "branch heads" will be shown for the
1351 1340 named branch associated with that revision. The name of the branch is
1352 1341 called the revision's branch tag.
1353 1342
1354 1343 Branch heads are revisions on a given named branch that do not have any
1355 1344 descendants on the same branch. A branch head could be a true head or it
1356 1345 could be the last changeset on a branch before a new branch was created.
1357 1346 If none of the branch heads are true heads, the branch is considered
1358 1347 inactive. If -c/--closed is specified, also show branch heads marked
1359 1348 closed (see hg commit --close-branch).
1360 1349
1361 1350 If STARTREV is specified only those heads (or branch heads) that are
1362 1351 descendants of STARTREV will be displayed.
1363 1352 """
1364 1353 if opts.get('rev'):
1365 1354 start = repo.lookup(opts['rev'])
1366 1355 else:
1367 1356 start = None
1368 1357 closed = opts.get('closed')
1369 1358 hideinactive, _heads = opts.get('active'), None
1370 1359 if not branchrevs:
1371 1360 # Assume we're looking repo-wide heads if no revs were specified.
1372 1361 heads = repo.heads(start)
1373 1362 else:
1374 1363 if hideinactive:
1375 1364 _heads = repo.heads(start)
1376 1365 heads = []
1377 1366 visitedset = set()
1378 1367 for branchrev in branchrevs:
1379 1368 branch = repo[branchrev].branch()
1380 1369 if branch in visitedset:
1381 1370 continue
1382 1371 visitedset.add(branch)
1383 1372 bheads = repo.branchheads(branch, start, closed=closed)
1384 1373 if not bheads:
1385 1374 if not opts.get('rev'):
1386 1375 ui.warn(_("no open branch heads on branch %s\n") % branch)
1387 1376 elif branch != branchrev:
1388 1377 ui.warn(_("no changes on branch %s containing %s are "
1389 1378 "reachable from %s\n")
1390 1379 % (branch, branchrev, opts.get('rev')))
1391 1380 else:
1392 1381 ui.warn(_("no changes on branch %s are reachable from %s\n")
1393 1382 % (branch, opts.get('rev')))
1394 1383 if hideinactive:
1395 1384 bheads = [bhead for bhead in bheads if bhead in _heads]
1396 1385 heads.extend(bheads)
1397 1386 if not heads:
1398 1387 return 1
1399 1388 displayer = cmdutil.show_changeset(ui, repo, opts)
1400 1389 for n in heads:
1401 1390 displayer.show(repo[n])
1402 1391
1403 1392 def help_(ui, name=None, with_version=False):
1404 1393 """show help for a given topic or a help overview
1405 1394
1406 1395 With no arguments, print a list of commands with short help messages.
1407 1396
1408 1397 Given a topic, extension, or command name, print help for that topic.
1409 1398 """
1410 1399 option_lists = []
1411 1400
1412 1401 def addglobalopts(aliases):
1413 1402 if ui.verbose:
1414 1403 option_lists.append((_("global options:"), globalopts))
1415 1404 if name == 'shortlist':
1416 1405 option_lists.append((_('use "hg help" for the full list '
1417 1406 'of commands'), ()))
1418 1407 else:
1419 1408 if name == 'shortlist':
1420 1409 msg = _('use "hg help" for the full list of commands '
1421 1410 'or "hg -v" for details')
1422 1411 elif aliases:
1423 1412 msg = _('use "hg -v help%s" to show aliases and '
1424 1413 'global options') % (name and " " + name or "")
1425 1414 else:
1426 1415 msg = _('use "hg -v help %s" to show global options') % name
1427 1416 option_lists.append((msg, ()))
1428 1417
1429 1418 def helpcmd(name):
1430 1419 if with_version:
1431 1420 version_(ui)
1432 1421 ui.write('\n')
1433 1422
1434 1423 try:
1435 1424 aliases, i = cmdutil.findcmd(name, table, False)
1436 1425 except error.AmbiguousCommand, inst:
1437 1426 # py3k fix: except vars can't be used outside the scope of the
1438 1427 # except block, nor can be used inside a lambda. python issue4617
1439 1428 prefix = inst.args[0]
1440 1429 select = lambda c: c.lstrip('^').startswith(prefix)
1441 1430 helplist(_('list of commands:\n\n'), select)
1442 1431 return
1443 1432
1444 1433 # synopsis
1445 1434 if len(i) > 2:
1446 1435 if i[2].startswith('hg'):
1447 1436 ui.write("%s\n" % i[2])
1448 1437 else:
1449 1438 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1450 1439 else:
1451 1440 ui.write('hg %s\n' % aliases[0])
1452 1441
1453 1442 # aliases
1454 1443 if not ui.quiet and len(aliases) > 1:
1455 1444 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1456 1445
1457 1446 # description
1458 1447 doc = gettext(i[0].__doc__)
1459 1448 if not doc:
1460 1449 doc = _("(no help text available)")
1461 1450 if ui.quiet:
1462 1451 doc = doc.splitlines(0)[0]
1463 1452 ui.write("\n%s\n" % doc.rstrip())
1464 1453
1465 1454 if not ui.quiet:
1466 1455 # options
1467 1456 if i[1]:
1468 1457 option_lists.append((_("options:\n"), i[1]))
1469 1458
1470 1459 addglobalopts(False)
1471 1460
1472 1461 def helplist(header, select=None):
1473 1462 h = {}
1474 1463 cmds = {}
1475 1464 for c, e in table.iteritems():
1476 1465 f = c.split("|", 1)[0]
1477 1466 if select and not select(f):
1478 1467 continue
1479 1468 if (not select and name != 'shortlist' and
1480 1469 e[0].__module__ != __name__):
1481 1470 continue
1482 1471 if name == "shortlist" and not f.startswith("^"):
1483 1472 continue
1484 1473 f = f.lstrip("^")
1485 1474 if not ui.debugflag and f.startswith("debug"):
1486 1475 continue
1487 1476 doc = gettext(e[0].__doc__)
1488 1477 if not doc:
1489 1478 doc = _("(no help text available)")
1490 1479 h[f] = doc.splitlines(0)[0].rstrip()
1491 1480 cmds[f] = c.lstrip("^")
1492 1481
1493 1482 if not h:
1494 1483 ui.status(_('no commands defined\n'))
1495 1484 return
1496 1485
1497 1486 ui.status(header)
1498 1487 fns = sorted(h)
1499 1488 m = max(map(len, fns))
1500 1489 for f in fns:
1501 1490 if ui.verbose:
1502 1491 commands = cmds[f].replace("|",", ")
1503 1492 ui.write(" %s:\n %s\n"%(commands, h[f]))
1504 1493 else:
1505 1494 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1506 1495
1507 1496 if name != 'shortlist':
1508 1497 exts, maxlength = extensions.enabled()
1509 1498 ui.write(help.listexts(_('enabled extensions:'), exts, maxlength))
1510 1499
1511 1500 if not ui.quiet:
1512 1501 addglobalopts(True)
1513 1502
1514 1503 def helptopic(name):
1515 1504 for names, header, doc in help.helptable:
1516 1505 if name in names:
1517 1506 break
1518 1507 else:
1519 1508 raise error.UnknownCommand(name)
1520 1509
1521 1510 # description
1522 1511 if not doc:
1523 1512 doc = _("(no help text available)")
1524 1513 if hasattr(doc, '__call__'):
1525 1514 doc = doc()
1526 1515
1527 1516 ui.write("%s\n" % header)
1528 1517 ui.write("%s\n" % doc.rstrip())
1529 1518
1530 1519 def helpext(name):
1531 1520 try:
1532 1521 mod = extensions.find(name)
1533 1522 except KeyError:
1534 1523 raise error.UnknownCommand(name)
1535 1524
1536 1525 doc = gettext(mod.__doc__) or _('no help text available')
1537 1526 doc = doc.splitlines(0)
1538 1527 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1539 1528 for d in doc[1:]:
1540 1529 ui.write(d, '\n')
1541 1530
1542 1531 ui.status('\n')
1543 1532
1544 1533 try:
1545 1534 ct = mod.cmdtable
1546 1535 except AttributeError:
1547 1536 ct = {}
1548 1537
1549 1538 modcmds = set([c.split('|', 1)[0] for c in ct])
1550 1539 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1551 1540
1552 1541 if name and name != 'shortlist':
1553 1542 i = None
1554 1543 for f in (helptopic, helpcmd, helpext):
1555 1544 try:
1556 1545 f(name)
1557 1546 i = None
1558 1547 break
1559 1548 except error.UnknownCommand, inst:
1560 1549 i = inst
1561 1550 if i:
1562 1551 raise i
1563 1552
1564 1553 else:
1565 1554 # program name
1566 1555 if ui.verbose or with_version:
1567 1556 version_(ui)
1568 1557 else:
1569 1558 ui.status(_("Mercurial Distributed SCM\n"))
1570 1559 ui.status('\n')
1571 1560
1572 1561 # list of commands
1573 1562 if name == "shortlist":
1574 1563 header = _('basic commands:\n\n')
1575 1564 else:
1576 1565 header = _('list of commands:\n\n')
1577 1566
1578 1567 helplist(header)
1579 1568
1580 1569 # list all option lists
1581 1570 opt_output = []
1582 1571 for title, options in option_lists:
1583 1572 opt_output.append(("\n%s" % title, None))
1584 1573 for shortopt, longopt, default, desc in options:
1585 1574 if "DEPRECATED" in desc and not ui.verbose: continue
1586 1575 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1587 1576 longopt and " --%s" % longopt),
1588 1577 "%s%s" % (desc,
1589 1578 default
1590 1579 and _(" (default: %s)") % default
1591 1580 or "")))
1592 1581
1593 1582 if not name:
1594 1583 ui.write(_("\nadditional help topics:\n\n"))
1595 1584 topics = []
1596 1585 for names, header, doc in help.helptable:
1597 1586 names = [(-len(name), name) for name in names]
1598 1587 names.sort()
1599 1588 topics.append((names[0][1], header))
1600 1589 topics_len = max([len(s[0]) for s in topics])
1601 1590 for t, desc in topics:
1602 1591 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1603 1592
1604 1593 if opt_output:
1605 1594 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1606 1595 for first, second in opt_output:
1607 1596 if second:
1608 1597 second = util.wrap(second, opts_len + 3)
1609 1598 ui.write(" %-*s %s\n" % (opts_len, first, second))
1610 1599 else:
1611 1600 ui.write("%s\n" % first)
1612 1601
1613 1602 def identify(ui, repo, source=None,
1614 1603 rev=None, num=None, id=None, branch=None, tags=None):
1615 1604 """identify the working copy or specified revision
1616 1605
1617 1606 With no revision, print a summary of the current state of the repository.
1618 1607
1619 1608 Specifying a path to a repository root or Mercurial bundle will cause
1620 1609 lookup to operate on that repository/bundle.
1621 1610
1622 1611 This summary identifies the repository state using one or two parent hash
1623 1612 identifiers, followed by a "+" if there are uncommitted changes in the
1624 1613 working directory, a list of tags for this revision and a branch name for
1625 1614 non-default branches.
1626 1615 """
1627 1616
1628 1617 if not repo and not source:
1629 1618 raise util.Abort(_("There is no Mercurial repository here "
1630 1619 "(.hg not found)"))
1631 1620
1632 1621 hexfunc = ui.debugflag and hex or short
1633 1622 default = not (num or id or branch or tags)
1634 1623 output = []
1635 1624
1636 1625 revs = []
1637 1626 if source:
1638 1627 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1639 1628 repo = hg.repository(ui, source)
1640 1629
1641 1630 if not repo.local():
1642 1631 if not rev and revs:
1643 1632 rev = revs[0]
1644 1633 if not rev:
1645 1634 rev = "tip"
1646 1635 if num or branch or tags:
1647 1636 raise util.Abort(
1648 1637 "can't query remote revision number, branch, or tags")
1649 1638 output = [hexfunc(repo.lookup(rev))]
1650 1639 elif not rev:
1651 1640 ctx = repo[None]
1652 1641 parents = ctx.parents()
1653 1642 changed = False
1654 1643 if default or id or num:
1655 1644 changed = ctx.files() + ctx.deleted()
1656 1645 if default or id:
1657 1646 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1658 1647 (changed) and "+" or "")]
1659 1648 if num:
1660 1649 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1661 1650 (changed) and "+" or ""))
1662 1651 else:
1663 1652 ctx = repo[rev]
1664 1653 if default or id:
1665 1654 output = [hexfunc(ctx.node())]
1666 1655 if num:
1667 1656 output.append(str(ctx.rev()))
1668 1657
1669 1658 if repo.local() and default and not ui.quiet:
1670 1659 b = encoding.tolocal(ctx.branch())
1671 1660 if b != 'default':
1672 1661 output.append("(%s)" % b)
1673 1662
1674 1663 # multiple tags for a single parent separated by '/'
1675 1664 t = "/".join(ctx.tags())
1676 1665 if t:
1677 1666 output.append(t)
1678 1667
1679 1668 if branch:
1680 1669 output.append(encoding.tolocal(ctx.branch()))
1681 1670
1682 1671 if tags:
1683 1672 output.extend(ctx.tags())
1684 1673
1685 1674 ui.write("%s\n" % ' '.join(output))
1686 1675
1687 1676 def import_(ui, repo, patch1, *patches, **opts):
1688 1677 """import an ordered set of patches
1689 1678
1690 1679 Import a list of patches and commit them individually.
1691 1680
1692 1681 If there are outstanding changes in the working directory, import will
1693 1682 abort unless given the -f/--force flag.
1694 1683
1695 1684 You can import a patch straight from a mail message. Even patches as
1696 1685 attachments work (to use the body part, it must have type text/plain or
1697 1686 text/x-patch). From and Subject headers of email message are used as
1698 1687 default committer and commit message. All text/plain body parts before
1699 1688 first diff are added to commit message.
1700 1689
1701 1690 If the imported patch was generated by hg export, user and description
1702 1691 from patch override values from message headers and body. Values given on
1703 1692 command line with -m/--message and -u/--user override these.
1704 1693
1705 1694 If --exact is specified, import will set the working directory to the
1706 1695 parent of each patch before applying it, and will abort if the resulting
1707 1696 changeset has a different ID than the one recorded in the patch. This may
1708 1697 happen due to character set problems or other deficiencies in the text
1709 1698 patch format.
1710 1699
1711 1700 With -s/--similarity, hg will attempt to discover renames and copies in
1712 1701 the patch in the same way as 'addremove'.
1713 1702
1714 1703 To read a patch from standard input, use "-" as the patch name. If a URL
1715 1704 is specified, the patch will be downloaded from it. See 'hg help dates'
1716 1705 for a list of formats valid for -d/--date.
1717 1706 """
1718 1707 patches = (patch1,) + patches
1719 1708
1720 1709 date = opts.get('date')
1721 1710 if date:
1722 1711 opts['date'] = util.parsedate(date)
1723 1712
1724 1713 try:
1725 1714 sim = float(opts.get('similarity') or 0)
1726 1715 except ValueError:
1727 1716 raise util.Abort(_('similarity must be a number'))
1728 1717 if sim < 0 or sim > 100:
1729 1718 raise util.Abort(_('similarity must be between 0 and 100'))
1730 1719
1731 1720 if opts.get('exact') or not opts.get('force'):
1732 1721 cmdutil.bail_if_changed(repo)
1733 1722
1734 1723 d = opts["base"]
1735 1724 strip = opts["strip"]
1736 1725 wlock = lock = None
1737 1726 try:
1738 1727 wlock = repo.wlock()
1739 1728 lock = repo.lock()
1740 1729 for p in patches:
1741 1730 pf = os.path.join(d, p)
1742 1731
1743 1732 if pf == '-':
1744 1733 ui.status(_("applying patch from stdin\n"))
1745 1734 pf = sys.stdin
1746 1735 else:
1747 1736 ui.status(_("applying %s\n") % p)
1748 1737 pf = url.open(ui, pf)
1749 1738 data = patch.extract(ui, pf)
1750 1739 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1751 1740
1752 1741 if tmpname is None:
1753 1742 raise util.Abort(_('no diffs found'))
1754 1743
1755 1744 try:
1756 1745 cmdline_message = cmdutil.logmessage(opts)
1757 1746 if cmdline_message:
1758 1747 # pickup the cmdline msg
1759 1748 message = cmdline_message
1760 1749 elif message:
1761 1750 # pickup the patch msg
1762 1751 message = message.strip()
1763 1752 else:
1764 1753 # launch the editor
1765 1754 message = None
1766 1755 ui.debug(_('message:\n%s\n') % message)
1767 1756
1768 1757 wp = repo.parents()
1769 1758 if opts.get('exact'):
1770 1759 if not nodeid or not p1:
1771 1760 raise util.Abort(_('not a Mercurial patch'))
1772 1761 p1 = repo.lookup(p1)
1773 1762 p2 = repo.lookup(p2 or hex(nullid))
1774 1763
1775 1764 if p1 != wp[0].node():
1776 1765 hg.clean(repo, p1)
1777 1766 repo.dirstate.setparents(p1, p2)
1778 1767 elif p2:
1779 1768 try:
1780 1769 p1 = repo.lookup(p1)
1781 1770 p2 = repo.lookup(p2)
1782 1771 if p1 == wp[0].node():
1783 1772 repo.dirstate.setparents(p1, p2)
1784 1773 except error.RepoError:
1785 1774 pass
1786 1775 if opts.get('exact') or opts.get('import_branch'):
1787 1776 repo.dirstate.setbranch(branch or 'default')
1788 1777
1789 1778 files = {}
1790 1779 try:
1791 1780 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1792 1781 files=files, eolmode=None)
1793 1782 finally:
1794 1783 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1795 1784 if not opts.get('no_commit'):
1796 1785 m = cmdutil.matchfiles(repo, files or [])
1797 1786 n = repo.commit(message, opts.get('user') or user,
1798 1787 opts.get('date') or date, match=m,
1799 1788 editor=cmdutil.commiteditor)
1800 1789 if opts.get('exact'):
1801 1790 if hex(n) != nodeid:
1802 1791 repo.rollback()
1803 1792 raise util.Abort(_('patch is damaged'
1804 1793 ' or loses information'))
1805 1794 # Force a dirstate write so that the next transaction
1806 1795 # backups an up-do-date file.
1807 1796 repo.dirstate.write()
1808 1797 finally:
1809 1798 os.unlink(tmpname)
1810 1799 finally:
1811 1800 release(lock, wlock)
1812 1801
1813 1802 def incoming(ui, repo, source="default", **opts):
1814 1803 """show new changesets found in source
1815 1804
1816 1805 Show new changesets found in the specified path/URL or the default pull
1817 1806 location. These are the changesets that would have been pulled if a pull
1818 1807 at the time you issued this command.
1819 1808
1820 1809 For remote repository, using --bundle avoids downloading the changesets
1821 1810 twice if the incoming is followed by a pull.
1822 1811
1823 1812 See pull for valid source format details.
1824 1813 """
1825 1814 limit = cmdutil.loglimit(opts)
1826 1815 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1827 1816 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1828 1817 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1829 1818 if revs:
1830 1819 revs = [other.lookup(rev) for rev in revs]
1831 1820 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1832 1821 force=opts["force"])
1833 1822 if not incoming:
1834 1823 try:
1835 1824 os.unlink(opts["bundle"])
1836 1825 except:
1837 1826 pass
1838 1827 ui.status(_("no changes found\n"))
1839 1828 return 1
1840 1829
1841 1830 cleanup = None
1842 1831 try:
1843 1832 fname = opts["bundle"]
1844 1833 if fname or not other.local():
1845 1834 # create a bundle (uncompressed if other repo is not local)
1846 1835
1847 1836 if revs is None and other.capable('changegroupsubset'):
1848 1837 revs = rheads
1849 1838
1850 1839 if revs is None:
1851 1840 cg = other.changegroup(incoming, "incoming")
1852 1841 else:
1853 1842 cg = other.changegroupsubset(incoming, revs, 'incoming')
1854 1843 bundletype = other.local() and "HG10BZ" or "HG10UN"
1855 1844 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1856 1845 # keep written bundle?
1857 1846 if opts["bundle"]:
1858 1847 cleanup = None
1859 1848 if not other.local():
1860 1849 # use the created uncompressed bundlerepo
1861 1850 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1862 1851
1863 1852 o = other.changelog.nodesbetween(incoming, revs)[0]
1864 1853 if opts.get('newest_first'):
1865 1854 o.reverse()
1866 1855 displayer = cmdutil.show_changeset(ui, other, opts)
1867 1856 count = 0
1868 1857 for n in o:
1869 1858 if count >= limit:
1870 1859 break
1871 1860 parents = [p for p in other.changelog.parents(n) if p != nullid]
1872 1861 if opts.get('no_merges') and len(parents) == 2:
1873 1862 continue
1874 1863 count += 1
1875 1864 displayer.show(other[n])
1876 1865 finally:
1877 1866 if hasattr(other, 'close'):
1878 1867 other.close()
1879 1868 if cleanup:
1880 1869 os.unlink(cleanup)
1881 1870
1882 1871 def init(ui, dest=".", **opts):
1883 1872 """create a new repository in the given directory
1884 1873
1885 1874 Initialize a new repository in the given directory. If the given directory
1886 1875 does not exist, it will be created.
1887 1876
1888 1877 If no directory is given, the current directory is used.
1889 1878
1890 1879 It is possible to specify an ssh:// URL as the destination. See 'hg help
1891 1880 urls' for more information.
1892 1881 """
1893 1882 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1894 1883
1895 1884 def locate(ui, repo, *pats, **opts):
1896 1885 """locate files matching specific patterns
1897 1886
1898 1887 Print files under Mercurial control in the working directory whose names
1899 1888 match the given patterns.
1900 1889
1901 1890 By default, this command searches all directories in the working
1902 1891 directory. To search just the current directory and its subdirectories,
1903 1892 use "--include .".
1904 1893
1905 1894 If no patterns are given to match, this command prints the names of all
1906 1895 files under Mercurial control in the working directory.
1907 1896
1908 1897 If you want to feed the output of this command into the "xargs" command,
1909 1898 use the -0 option to both this command and "xargs". This will avoid the
1910 1899 problem of "xargs" treating single filenames that contain whitespace as
1911 1900 multiple filenames.
1912 1901 """
1913 1902 end = opts.get('print0') and '\0' or '\n'
1914 1903 rev = opts.get('rev') or None
1915 1904
1916 1905 ret = 1
1917 1906 m = cmdutil.match(repo, pats, opts, default='relglob')
1918 1907 m.bad = lambda x,y: False
1919 1908 for abs in repo[rev].walk(m):
1920 1909 if not rev and abs not in repo.dirstate:
1921 1910 continue
1922 1911 if opts.get('fullpath'):
1923 1912 ui.write(repo.wjoin(abs), end)
1924 1913 else:
1925 1914 ui.write(((pats and m.rel(abs)) or abs), end)
1926 1915 ret = 0
1927 1916
1928 1917 return ret
1929 1918
1930 1919 def log(ui, repo, *pats, **opts):
1931 1920 """show revision history of entire repository or files
1932 1921
1933 1922 Print the revision history of the specified files or the entire project.
1934 1923
1935 1924 File history is shown without following rename or copy history of files.
1936 1925 Use -f/--follow with a filename to follow history across renames and
1937 1926 copies. --follow without a filename will only show ancestors or
1938 1927 descendants of the starting revision. --follow-first only follows the
1939 1928 first parent of merge revisions.
1940 1929
1941 1930 If no revision range is specified, the default is tip:0 unless --follow is
1942 1931 set, in which case the working directory parent is used as the starting
1943 1932 revision.
1944 1933
1945 1934 See 'hg help dates' for a list of formats valid for -d/--date.
1946 1935
1947 1936 By default this command prints revision number and changeset id, tags,
1948 1937 non-trivial parents, user, date and time, and a summary for each commit.
1949 1938 When the -v/--verbose switch is used, the list of changed files and full
1950 1939 commit message are shown.
1951 1940
1952 1941 NOTE: log -p/--patch may generate unexpected diff output for merge
1953 1942 changesets, as it will only compare the merge changeset against its first
1954 1943 parent. Also, only files different from BOTH parents will appear in
1955 1944 files:.
1956 1945 """
1957 1946
1958 1947 get = util.cachefunc(lambda r: repo[r].changeset())
1959 1948 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1960 1949
1961 1950 limit = cmdutil.loglimit(opts)
1962 1951 count = 0
1963 1952
1964 1953 if opts.get('copies') and opts.get('rev'):
1965 1954 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1966 1955 else:
1967 1956 endrev = len(repo)
1968 1957 rcache = {}
1969 1958 ncache = {}
1970 1959 def getrenamed(fn, rev):
1971 1960 '''looks up all renames for a file (up to endrev) the first
1972 1961 time the file is given. It indexes on the changerev and only
1973 1962 parses the manifest if linkrev != changerev.
1974 1963 Returns rename info for fn at changerev rev.'''
1975 1964 if fn not in rcache:
1976 1965 rcache[fn] = {}
1977 1966 ncache[fn] = {}
1978 1967 fl = repo.file(fn)
1979 1968 for i in fl:
1980 1969 node = fl.node(i)
1981 1970 lr = fl.linkrev(i)
1982 1971 renamed = fl.renamed(node)
1983 1972 rcache[fn][lr] = renamed
1984 1973 if renamed:
1985 1974 ncache[fn][node] = renamed
1986 1975 if lr >= endrev:
1987 1976 break
1988 1977 if rev in rcache[fn]:
1989 1978 return rcache[fn][rev]
1990 1979
1991 1980 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1992 1981 # filectx logic.
1993 1982
1994 1983 try:
1995 1984 return repo[rev][fn].renamed()
1996 1985 except error.LookupError:
1997 1986 pass
1998 1987 return None
1999 1988
2000 1989 df = False
2001 1990 if opts["date"]:
2002 1991 df = util.matchdate(opts["date"])
2003 1992
2004 1993 only_branches = opts.get('only_branch')
2005 1994
2006 1995 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2007 1996 for st, rev, fns in changeiter:
2008 1997 if st == 'add':
2009 1998 parents = [p for p in repo.changelog.parentrevs(rev)
2010 1999 if p != nullrev]
2011 2000 if opts.get('no_merges') and len(parents) == 2:
2012 2001 continue
2013 2002 if opts.get('only_merges') and len(parents) != 2:
2014 2003 continue
2015 2004
2016 2005 if only_branches:
2017 2006 revbranch = get(rev)[5]['branch']
2018 2007 if revbranch not in only_branches:
2019 2008 continue
2020 2009
2021 2010 if df:
2022 2011 changes = get(rev)
2023 2012 if not df(changes[2][0]):
2024 2013 continue
2025 2014
2026 2015 if opts.get('keyword'):
2027 2016 changes = get(rev)
2028 2017 miss = 0
2029 2018 for k in [kw.lower() for kw in opts['keyword']]:
2030 2019 if not (k in changes[1].lower() or
2031 2020 k in changes[4].lower() or
2032 2021 k in " ".join(changes[3]).lower()):
2033 2022 miss = 1
2034 2023 break
2035 2024 if miss:
2036 2025 continue
2037 2026
2038 2027 if opts['user']:
2039 2028 changes = get(rev)
2040 2029 if not [k for k in opts['user'] if k in changes[1]]:
2041 2030 continue
2042 2031
2043 2032 copies = []
2044 2033 if opts.get('copies') and rev:
2045 2034 for fn in get(rev)[3]:
2046 2035 rename = getrenamed(fn, rev)
2047 2036 if rename:
2048 2037 copies.append((fn, rename[0]))
2049 2038 displayer.show(context.changectx(repo, rev), copies=copies)
2050 2039 elif st == 'iter':
2051 2040 if count == limit: break
2052 2041 if displayer.flush(rev):
2053 2042 count += 1
2054 2043
2055 2044 def manifest(ui, repo, node=None, rev=None):
2056 2045 """output the current or given revision of the project manifest
2057 2046
2058 2047 Print a list of version controlled files for the given revision. If no
2059 2048 revision is given, the first parent of the working directory is used, or
2060 2049 the null revision if no revision is checked out.
2061 2050
2062 2051 With -v, print file permissions, symlink and executable bits.
2063 2052 With --debug, print file revision hashes.
2064 2053 """
2065 2054
2066 2055 if rev and node:
2067 2056 raise util.Abort(_("please specify just one revision"))
2068 2057
2069 2058 if not node:
2070 2059 node = rev
2071 2060
2072 2061 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2073 2062 ctx = repo[node]
2074 2063 for f in ctx:
2075 2064 if ui.debugflag:
2076 2065 ui.write("%40s " % hex(ctx.manifest()[f]))
2077 2066 if ui.verbose:
2078 2067 ui.write(decor[ctx.flags(f)])
2079 2068 ui.write("%s\n" % f)
2080 2069
2081 2070 def merge(ui, repo, node=None, **opts):
2082 2071 """merge working directory with another revision
2083 2072
2084 2073 The current working directory is updated with all changes made in the
2085 2074 requested revision since the last common predecessor revision.
2086 2075
2087 2076 Files that changed between either parent are marked as changed for the
2088 2077 next commit and a commit must be performed before any further updates to
2089 2078 the repository are allowed. The next commit will have two parents.
2090 2079
2091 2080 If no revision is specified, the working directory's parent is a head
2092 2081 revision, and the current branch contains exactly one other head, the
2093 2082 other head is merged with by default. Otherwise, an explicit revision with
2094 2083 which to merge with must be provided.
2095 2084 """
2096 2085
2097 2086 if opts.get('rev') and node:
2098 2087 raise util.Abort(_("please specify just one revision"))
2099 2088 if not node:
2100 2089 node = opts.get('rev')
2101 2090
2102 2091 if not node:
2103 2092 branch = repo.changectx(None).branch()
2104 2093 bheads = repo.branchheads(branch)
2105 2094 if len(bheads) > 2:
2106 2095 raise util.Abort(_("branch '%s' has %d heads - "
2107 2096 "please merge with an explicit rev") %
2108 2097 (branch, len(bheads)))
2109 2098
2110 2099 parent = repo.dirstate.parents()[0]
2111 2100 if len(bheads) == 1:
2112 2101 if len(repo.heads()) > 1:
2113 2102 raise util.Abort(_("branch '%s' has one head - "
2114 2103 "please merge with an explicit rev") %
2115 2104 branch)
2116 2105 msg = _('there is nothing to merge')
2117 2106 if parent != repo.lookup(repo[None].branch()):
2118 2107 msg = _('%s - use "hg update" instead') % msg
2119 2108 raise util.Abort(msg)
2120 2109
2121 2110 if parent not in bheads:
2122 2111 raise util.Abort(_('working dir not at a head rev - '
2123 2112 'use "hg update" or merge with an explicit rev'))
2124 2113 node = parent == bheads[0] and bheads[-1] or bheads[0]
2125 2114
2126 2115 if opts.get('preview'):
2127 2116 p1 = repo['.']
2128 2117 p2 = repo[node]
2129 2118 common = p1.ancestor(p2)
2130 2119 roots, heads = [common.node()], [p2.node()]
2131 2120 displayer = cmdutil.show_changeset(ui, repo, opts)
2132 2121 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2133 2122 displayer.show(repo[node])
2134 2123 return 0
2135 2124
2136 2125 return hg.merge(repo, node, force=opts.get('force'))
2137 2126
2138 2127 def outgoing(ui, repo, dest=None, **opts):
2139 2128 """show changesets not found in destination
2140 2129
2141 2130 Show changesets not found in the specified destination repository or the
2142 2131 default push location. These are the changesets that would be pushed if a
2143 2132 push was requested.
2144 2133
2145 2134 See pull for valid destination format details.
2146 2135 """
2147 2136 limit = cmdutil.loglimit(opts)
2148 2137 dest, revs, checkout = hg.parseurl(
2149 2138 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2150 2139 if revs:
2151 2140 revs = [repo.lookup(rev) for rev in revs]
2152 2141
2153 2142 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2154 2143 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2155 2144 o = repo.findoutgoing(other, force=opts.get('force'))
2156 2145 if not o:
2157 2146 ui.status(_("no changes found\n"))
2158 2147 return 1
2159 2148 o = repo.changelog.nodesbetween(o, revs)[0]
2160 2149 if opts.get('newest_first'):
2161 2150 o.reverse()
2162 2151 displayer = cmdutil.show_changeset(ui, repo, opts)
2163 2152 count = 0
2164 2153 for n in o:
2165 2154 if count >= limit:
2166 2155 break
2167 2156 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2168 2157 if opts.get('no_merges') and len(parents) == 2:
2169 2158 continue
2170 2159 count += 1
2171 2160 displayer.show(repo[n])
2172 2161
2173 2162 def parents(ui, repo, file_=None, **opts):
2174 2163 """show the parents of the working directory or revision
2175 2164
2176 2165 Print the working directory's parent revisions. If a revision is given via
2177 2166 -r/--rev, the parent of that revision will be printed. If a file argument
2178 2167 is given, the revision in which the file was last changed (before the
2179 2168 working directory revision or the argument to --rev if given) is printed.
2180 2169 """
2181 2170 rev = opts.get('rev')
2182 2171 if rev:
2183 2172 ctx = repo[rev]
2184 2173 else:
2185 2174 ctx = repo[None]
2186 2175
2187 2176 if file_:
2188 2177 m = cmdutil.match(repo, (file_,), opts)
2189 2178 if m.anypats() or len(m.files()) != 1:
2190 2179 raise util.Abort(_('can only specify an explicit filename'))
2191 2180 file_ = m.files()[0]
2192 2181 filenodes = []
2193 2182 for cp in ctx.parents():
2194 2183 if not cp:
2195 2184 continue
2196 2185 try:
2197 2186 filenodes.append(cp.filenode(file_))
2198 2187 except error.LookupError:
2199 2188 pass
2200 2189 if not filenodes:
2201 2190 raise util.Abort(_("'%s' not found in manifest!") % file_)
2202 2191 fl = repo.file(file_)
2203 2192 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2204 2193 else:
2205 2194 p = [cp.node() for cp in ctx.parents()]
2206 2195
2207 2196 displayer = cmdutil.show_changeset(ui, repo, opts)
2208 2197 for n in p:
2209 2198 if n != nullid:
2210 2199 displayer.show(repo[n])
2211 2200
2212 2201 def paths(ui, repo, search=None):
2213 2202 """show aliases for remote repositories
2214 2203
2215 2204 Show definition of symbolic path name NAME. If no name is given, show
2216 2205 definition of all available names.
2217 2206
2218 2207 Path names are defined in the [paths] section of /etc/mercurial/hgrc and
2219 2208 $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2220 2209
2221 2210 See 'hg help urls' for more information.
2222 2211 """
2223 2212 if search:
2224 2213 for name, path in ui.configitems("paths"):
2225 2214 if name == search:
2226 2215 ui.write("%s\n" % url.hidepassword(path))
2227 2216 return
2228 2217 ui.warn(_("not found!\n"))
2229 2218 return 1
2230 2219 else:
2231 2220 for name, path in ui.configitems("paths"):
2232 2221 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2233 2222
2234 2223 def postincoming(ui, repo, modheads, optupdate, checkout):
2235 2224 if modheads == 0:
2236 2225 return
2237 2226 if optupdate:
2238 2227 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2239 2228 return hg.update(repo, checkout)
2240 2229 else:
2241 2230 ui.status(_("not updating, since new heads added\n"))
2242 2231 if modheads > 1:
2243 2232 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2244 2233 else:
2245 2234 ui.status(_("(run 'hg update' to get a working copy)\n"))
2246 2235
2247 2236 def pull(ui, repo, source="default", **opts):
2248 2237 """pull changes from the specified source
2249 2238
2250 2239 Pull changes from a remote repository to a local one.
2251 2240
2252 2241 This finds all changes from the repository at the specified path or URL
2253 2242 and adds them to a local repository (the current one unless -R is
2254 2243 specified). By default, this does not update the copy of the project in
2255 2244 the working directory.
2256 2245
2257 2246 Use hg incoming if you want to see what would have been added by a pull at
2258 2247 the time you issued this command. If you then decide to added those
2259 2248 changes to the repository, you should use pull -r X where X is the last
2260 2249 changeset listed by hg incoming.
2261 2250
2262 2251 If SOURCE is omitted, the 'default' path will be used. See 'hg help urls'
2263 2252 for more information.
2264 2253 """
2265 2254 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2266 2255 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2267 2256 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2268 2257 if revs:
2269 2258 try:
2270 2259 revs = [other.lookup(rev) for rev in revs]
2271 2260 except error.CapabilityError:
2272 2261 err = _("Other repository doesn't support revision lookup, "
2273 2262 "so a rev cannot be specified.")
2274 2263 raise util.Abort(err)
2275 2264
2276 2265 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2277 2266 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2278 2267
2279 2268 def push(ui, repo, dest=None, **opts):
2280 2269 """push changes to the specified destination
2281 2270
2282 2271 Push changes from the local repository to the given destination.
2283 2272
2284 2273 This is the symmetrical operation for pull. It moves changes from the
2285 2274 current repository to a different one. If the destination is local this is
2286 2275 identical to a pull in that directory from the current one.
2287 2276
2288 2277 By default, push will refuse to run if it detects the result would
2289 2278 increase the number of remote heads. This generally indicates the user
2290 2279 forgot to pull and merge before pushing.
2291 2280
2292 2281 If -r/--rev is used, the named revision and all its ancestors will be
2293 2282 pushed to the remote repository.
2294 2283
2295 2284 Please see 'hg help urls' for important details about ssh:// URLs. If
2296 2285 DESTINATION is omitted, a default path will be used.
2297 2286 """
2298 2287 dest, revs, checkout = hg.parseurl(
2299 2288 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2300 2289 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2301 2290 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2302 2291 if revs:
2303 2292 revs = [repo.lookup(rev) for rev in revs]
2304 2293
2305 2294 # push subrepos depth-first for coherent ordering
2306 2295 c = repo['']
2307 2296 subs = c.substate # only repos that are committed
2308 2297 for s in sorted(subs):
2309 2298 c.sub(s).push(opts.get('force'))
2310 2299
2311 2300 r = repo.push(other, opts.get('force'), revs=revs)
2312 2301 return r == 0
2313 2302
2314 2303 def recover(ui, repo):
2315 2304 """roll back an interrupted transaction
2316 2305
2317 2306 Recover from an interrupted commit or pull.
2318 2307
2319 2308 This command tries to fix the repository status after an interrupted
2320 2309 operation. It should only be necessary when Mercurial suggests it.
2321 2310 """
2322 2311 if repo.recover():
2323 2312 return hg.verify(repo)
2324 2313 return 1
2325 2314
2326 2315 def remove(ui, repo, *pats, **opts):
2327 2316 """remove the specified files on the next commit
2328 2317
2329 2318 Schedule the indicated files for removal from the repository.
2330 2319
2331 2320 This only removes files from the current branch, not from the entire
2332 2321 project history. -A/--after can be used to remove only files that have
2333 2322 already been deleted, -f/--force can be used to force deletion, and -Af
2334 2323 can be used to remove files from the next revision without deleting them
2335 2324 from the working directory.
2336 2325
2337 2326 The following table details the behavior of remove for different file
2338 2327 states (columns) and option combinations (rows). The file states are Added
2339 2328 [A], Clean [C], Modified [M] and Missing [!] (as reported by hg status).
2340 2329 The actions are Warn, Remove (from branch) and Delete (from disk).
2341 2330
2342 2331 A C M !
2343 2332 none W RD W R
2344 2333 -f R RD RD R
2345 2334 -A W W W R
2346 2335 -Af R R R R
2347 2336
2348 2337 This command schedules the files to be removed at the next commit. To undo
2349 2338 a remove before that, see hg revert.
2350 2339 """
2351 2340
2352 2341 after, force = opts.get('after'), opts.get('force')
2353 2342 if not pats and not after:
2354 2343 raise util.Abort(_('no files specified'))
2355 2344
2356 2345 m = cmdutil.match(repo, pats, opts)
2357 2346 s = repo.status(match=m, clean=True)
2358 2347 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2359 2348
2360 2349 for f in m.files():
2361 2350 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2362 2351 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2363 2352
2364 2353 def warn(files, reason):
2365 2354 for f in files:
2366 2355 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2367 2356 % (m.rel(f), reason))
2368 2357
2369 2358 if force:
2370 2359 remove, forget = modified + deleted + clean, added
2371 2360 elif after:
2372 2361 remove, forget = deleted, []
2373 2362 warn(modified + added + clean, _('still exists'))
2374 2363 else:
2375 2364 remove, forget = deleted + clean, []
2376 2365 warn(modified, _('is modified'))
2377 2366 warn(added, _('has been marked for add'))
2378 2367
2379 2368 for f in sorted(remove + forget):
2380 2369 if ui.verbose or not m.exact(f):
2381 2370 ui.status(_('removing %s\n') % m.rel(f))
2382 2371
2383 2372 repo.forget(forget)
2384 2373 repo.remove(remove, unlink=not after)
2385 2374
2386 2375 def rename(ui, repo, *pats, **opts):
2387 2376 """rename files; equivalent of copy + remove
2388 2377
2389 2378 Mark dest as copies of sources; mark sources for deletion. If dest is a
2390 2379 directory, copies are put in that directory. If dest is a file, there can
2391 2380 only be one source.
2392 2381
2393 2382 By default, this command copies the contents of files as they exist in the
2394 2383 working directory. If invoked with -A/--after, the operation is recorded,
2395 2384 but no copying is performed.
2396 2385
2397 2386 This command takes effect at the next commit. To undo a rename before
2398 2387 that, see hg revert.
2399 2388 """
2400 2389 wlock = repo.wlock(False)
2401 2390 try:
2402 2391 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2403 2392 finally:
2404 2393 wlock.release()
2405 2394
2406 2395 def resolve(ui, repo, *pats, **opts):
2407 2396 """retry file merges from a merge or update
2408 2397
2409 2398 This command will cleanly retry unresolved file merges using file
2410 2399 revisions preserved from the last update or merge. To attempt to resolve
2411 2400 all unresolved files, use the -a/--all switch.
2412 2401
2413 2402 If a conflict is resolved manually, please note that the changes will be
2414 2403 overwritten if the merge is retried with resolve. The -m/--mark switch
2415 2404 should be used to mark the file as resolved.
2416 2405
2417 2406 This command also allows listing resolved files and manually indicating
2418 2407 whether or not files are resolved. All files must be marked as resolved
2419 2408 before a commit is permitted.
2420 2409
2421 2410 The codes used to show the status of files are:
2422 2411 U = unresolved
2423 2412 R = resolved
2424 2413 """
2425 2414
2426 2415 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2427 2416
2428 2417 if (show and (mark or unmark)) or (mark and unmark):
2429 2418 raise util.Abort(_("too many options specified"))
2430 2419 if pats and all:
2431 2420 raise util.Abort(_("can't specify --all and patterns"))
2432 2421 if not (all or pats or show or mark or unmark):
2433 2422 raise util.Abort(_('no files or directories specified; '
2434 2423 'use --all to remerge all files'))
2435 2424
2436 2425 ms = merge_.mergestate(repo)
2437 2426 m = cmdutil.match(repo, pats, opts)
2438 2427
2439 2428 for f in ms:
2440 2429 if m(f):
2441 2430 if show:
2442 2431 ui.write("%s %s\n" % (ms[f].upper(), f))
2443 2432 elif mark:
2444 2433 ms.mark(f, "r")
2445 2434 elif unmark:
2446 2435 ms.mark(f, "u")
2447 2436 else:
2448 2437 wctx = repo[None]
2449 2438 mctx = wctx.parents()[-1]
2450 2439
2451 2440 # backup pre-resolve (merge uses .orig for its own purposes)
2452 2441 a = repo.wjoin(f)
2453 2442 util.copyfile(a, a + ".resolve")
2454 2443
2455 2444 # resolve file
2456 2445 ms.resolve(f, wctx, mctx)
2457 2446
2458 2447 # replace filemerge's .orig file with our resolve file
2459 2448 util.rename(a + ".resolve", a + ".orig")
2460 2449
2461 2450 def revert(ui, repo, *pats, **opts):
2462 2451 """restore individual files or directories to an earlier state
2463 2452
2464 2453 (Use update -r to check out earlier revisions, revert does not change the
2465 2454 working directory parents.)
2466 2455
2467 2456 With no revision specified, revert the named files or directories to the
2468 2457 contents they had in the parent of the working directory. This restores
2469 2458 the contents of the affected files to an unmodified state and unschedules
2470 2459 adds, removes, copies, and renames. If the working directory has two
2471 2460 parents, you must explicitly specify the revision to revert to.
2472 2461
2473 2462 Using the -r/--rev option, revert the given files or directories to their
2474 2463 contents as of a specific revision. This can be helpful to "roll back"
2475 2464 some or all of an earlier change. See 'hg help dates' for a list of
2476 2465 formats valid for -d/--date.
2477 2466
2478 2467 Revert modifies the working directory. It does not commit any changes, or
2479 2468 change the parent of the working directory. If you revert to a revision
2480 2469 other than the parent of the working directory, the reverted files will
2481 2470 thus appear modified afterwards.
2482 2471
2483 2472 If a file has been deleted, it is restored. If the executable mode of a
2484 2473 file was changed, it is reset.
2485 2474
2486 2475 If names are given, all files matching the names are reverted. If no
2487 2476 arguments are given, no files are reverted.
2488 2477
2489 2478 Modified files are saved with a .orig suffix before reverting. To disable
2490 2479 these backups, use --no-backup.
2491 2480 """
2492 2481
2493 2482 if opts["date"]:
2494 2483 if opts["rev"]:
2495 2484 raise util.Abort(_("you can't specify a revision and a date"))
2496 2485 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2497 2486
2498 2487 if not pats and not opts.get('all'):
2499 2488 raise util.Abort(_('no files or directories specified; '
2500 2489 'use --all to revert the whole repo'))
2501 2490
2502 2491 parent, p2 = repo.dirstate.parents()
2503 2492 if not opts.get('rev') and p2 != nullid:
2504 2493 raise util.Abort(_('uncommitted merge - please provide a '
2505 2494 'specific revision'))
2506 2495 ctx = repo[opts.get('rev')]
2507 2496 node = ctx.node()
2508 2497 mf = ctx.manifest()
2509 2498 if node == parent:
2510 2499 pmf = mf
2511 2500 else:
2512 2501 pmf = None
2513 2502
2514 2503 # need all matching names in dirstate and manifest of target rev,
2515 2504 # so have to walk both. do not print errors if files exist in one
2516 2505 # but not other.
2517 2506
2518 2507 names = {}
2519 2508
2520 2509 wlock = repo.wlock()
2521 2510 try:
2522 2511 # walk dirstate.
2523 2512
2524 2513 m = cmdutil.match(repo, pats, opts)
2525 2514 m.bad = lambda x,y: False
2526 2515 for abs in repo.walk(m):
2527 2516 names[abs] = m.rel(abs), m.exact(abs)
2528 2517
2529 2518 # walk target manifest.
2530 2519
2531 2520 def badfn(path, msg):
2532 2521 if path in names:
2533 2522 return
2534 2523 path_ = path + '/'
2535 2524 for f in names:
2536 2525 if f.startswith(path_):
2537 2526 return
2538 2527 ui.warn("%s: %s\n" % (m.rel(path), msg))
2539 2528
2540 2529 m = cmdutil.match(repo, pats, opts)
2541 2530 m.bad = badfn
2542 2531 for abs in repo[node].walk(m):
2543 2532 if abs not in names:
2544 2533 names[abs] = m.rel(abs), m.exact(abs)
2545 2534
2546 2535 m = cmdutil.matchfiles(repo, names)
2547 2536 changes = repo.status(match=m)[:4]
2548 2537 modified, added, removed, deleted = map(set, changes)
2549 2538
2550 2539 # if f is a rename, also revert the source
2551 2540 cwd = repo.getcwd()
2552 2541 for f in added:
2553 2542 src = repo.dirstate.copied(f)
2554 2543 if src and src not in names and repo.dirstate[src] == 'r':
2555 2544 removed.add(src)
2556 2545 names[src] = (repo.pathto(src, cwd), True)
2557 2546
2558 2547 def removeforget(abs):
2559 2548 if repo.dirstate[abs] == 'a':
2560 2549 return _('forgetting %s\n')
2561 2550 return _('removing %s\n')
2562 2551
2563 2552 revert = ([], _('reverting %s\n'))
2564 2553 add = ([], _('adding %s\n'))
2565 2554 remove = ([], removeforget)
2566 2555 undelete = ([], _('undeleting %s\n'))
2567 2556
2568 2557 disptable = (
2569 2558 # dispatch table:
2570 2559 # file state
2571 2560 # action if in target manifest
2572 2561 # action if not in target manifest
2573 2562 # make backup if in target manifest
2574 2563 # make backup if not in target manifest
2575 2564 (modified, revert, remove, True, True),
2576 2565 (added, revert, remove, True, False),
2577 2566 (removed, undelete, None, False, False),
2578 2567 (deleted, revert, remove, False, False),
2579 2568 )
2580 2569
2581 2570 for abs, (rel, exact) in sorted(names.items()):
2582 2571 mfentry = mf.get(abs)
2583 2572 target = repo.wjoin(abs)
2584 2573 def handle(xlist, dobackup):
2585 2574 xlist[0].append(abs)
2586 2575 if dobackup and not opts.get('no_backup') and util.lexists(target):
2587 2576 bakname = "%s.orig" % rel
2588 2577 ui.note(_('saving current version of %s as %s\n') %
2589 2578 (rel, bakname))
2590 2579 if not opts.get('dry_run'):
2591 2580 util.copyfile(target, bakname)
2592 2581 if ui.verbose or not exact:
2593 2582 msg = xlist[1]
2594 2583 if not isinstance(msg, basestring):
2595 2584 msg = msg(abs)
2596 2585 ui.status(msg % rel)
2597 2586 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2598 2587 if abs not in table: continue
2599 2588 # file has changed in dirstate
2600 2589 if mfentry:
2601 2590 handle(hitlist, backuphit)
2602 2591 elif misslist is not None:
2603 2592 handle(misslist, backupmiss)
2604 2593 break
2605 2594 else:
2606 2595 if abs not in repo.dirstate:
2607 2596 if mfentry:
2608 2597 handle(add, True)
2609 2598 elif exact:
2610 2599 ui.warn(_('file not managed: %s\n') % rel)
2611 2600 continue
2612 2601 # file has not changed in dirstate
2613 2602 if node == parent:
2614 2603 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2615 2604 continue
2616 2605 if pmf is None:
2617 2606 # only need parent manifest in this unlikely case,
2618 2607 # so do not read by default
2619 2608 pmf = repo[parent].manifest()
2620 2609 if abs in pmf:
2621 2610 if mfentry:
2622 2611 # if version of file is same in parent and target
2623 2612 # manifests, do nothing
2624 2613 if (pmf[abs] != mfentry or
2625 2614 pmf.flags(abs) != mf.flags(abs)):
2626 2615 handle(revert, False)
2627 2616 else:
2628 2617 handle(remove, False)
2629 2618
2630 2619 if not opts.get('dry_run'):
2631 2620 def checkout(f):
2632 2621 fc = ctx[f]
2633 2622 repo.wwrite(f, fc.data(), fc.flags())
2634 2623
2635 2624 audit_path = util.path_auditor(repo.root)
2636 2625 for f in remove[0]:
2637 2626 if repo.dirstate[f] == 'a':
2638 2627 repo.dirstate.forget(f)
2639 2628 continue
2640 2629 audit_path(f)
2641 2630 try:
2642 2631 util.unlink(repo.wjoin(f))
2643 2632 except OSError:
2644 2633 pass
2645 2634 repo.dirstate.remove(f)
2646 2635
2647 2636 normal = None
2648 2637 if node == parent:
2649 2638 # We're reverting to our parent. If possible, we'd like status
2650 2639 # to report the file as clean. We have to use normallookup for
2651 2640 # merges to avoid losing information about merged/dirty files.
2652 2641 if p2 != nullid:
2653 2642 normal = repo.dirstate.normallookup
2654 2643 else:
2655 2644 normal = repo.dirstate.normal
2656 2645 for f in revert[0]:
2657 2646 checkout(f)
2658 2647 if normal:
2659 2648 normal(f)
2660 2649
2661 2650 for f in add[0]:
2662 2651 checkout(f)
2663 2652 repo.dirstate.add(f)
2664 2653
2665 2654 normal = repo.dirstate.normallookup
2666 2655 if node == parent and p2 == nullid:
2667 2656 normal = repo.dirstate.normal
2668 2657 for f in undelete[0]:
2669 2658 checkout(f)
2670 2659 normal(f)
2671 2660
2672 2661 finally:
2673 2662 wlock.release()
2674 2663
2675 2664 def rollback(ui, repo):
2676 2665 """roll back the last transaction
2677 2666
2678 2667 This command should be used with care. There is only one level of
2679 2668 rollback, and there is no way to undo a rollback. It will also restore the
2680 2669 dirstate at the time of the last transaction, losing any dirstate changes
2681 2670 since that time. This command does not alter the working directory.
2682 2671
2683 2672 Transactions are used to encapsulate the effects of all commands that
2684 2673 create new changesets or propagate existing changesets into a repository.
2685 2674 For example, the following commands are transactional, and their effects
2686 2675 can be rolled back:
2687 2676
2688 2677 commit
2689 2678 import
2690 2679 pull
2691 2680 push (with this repository as destination)
2692 2681 unbundle
2693 2682
2694 2683 This command is not intended for use on public repositories. Once changes
2695 2684 are visible for pull by other users, rolling a transaction back locally is
2696 2685 ineffective (someone else may already have pulled the changes).
2697 2686 Furthermore, a race is possible with readers of the repository; for
2698 2687 example an in-progress pull from the repository may fail if a rollback is
2699 2688 performed.
2700 2689 """
2701 2690 repo.rollback()
2702 2691
2703 2692 def root(ui, repo):
2704 2693 """print the root (top) of the current working directory
2705 2694
2706 2695 Print the root directory of the current repository.
2707 2696 """
2708 2697 ui.write(repo.root + "\n")
2709 2698
2710 2699 def serve(ui, repo, **opts):
2711 2700 """export the repository via HTTP
2712 2701
2713 2702 Start a local HTTP repository browser and pull server.
2714 2703
2715 2704 By default, the server logs accesses to stdout and errors to stderr. Use
2716 2705 the -A/--accesslog and -E/--errorlog options to log to files.
2717 2706 """
2718 2707
2719 2708 if opts["stdio"]:
2720 2709 if repo is None:
2721 2710 raise error.RepoError(_("There is no Mercurial repository here"
2722 2711 " (.hg not found)"))
2723 2712 s = sshserver.sshserver(ui, repo)
2724 2713 s.serve_forever()
2725 2714
2726 2715 baseui = repo and repo.baseui or ui
2727 2716 optlist = ("name templates style address port prefix ipv6"
2728 2717 " accesslog errorlog webdir_conf certificate encoding")
2729 2718 for o in optlist.split():
2730 2719 if opts.get(o, None):
2731 2720 baseui.setconfig("web", o, str(opts[o]))
2732 2721 if (repo is not None) and (repo.ui != baseui):
2733 2722 repo.ui.setconfig("web", o, str(opts[o]))
2734 2723
2735 2724 if repo is None and not ui.config("web", "webdir_conf"):
2736 2725 raise error.RepoError(_("There is no Mercurial repository here"
2737 2726 " (.hg not found)"))
2738 2727
2739 2728 class service(object):
2740 2729 def init(self):
2741 2730 util.set_signal_handler()
2742 2731 self.httpd = server.create_server(baseui, repo)
2743 2732
2744 2733 if not ui.verbose: return
2745 2734
2746 2735 if self.httpd.prefix:
2747 2736 prefix = self.httpd.prefix.strip('/') + '/'
2748 2737 else:
2749 2738 prefix = ''
2750 2739
2751 2740 port = ':%d' % self.httpd.port
2752 2741 if port == ':80':
2753 2742 port = ''
2754 2743
2755 2744 bindaddr = self.httpd.addr
2756 2745 if bindaddr == '0.0.0.0':
2757 2746 bindaddr = '*'
2758 2747 elif ':' in bindaddr: # IPv6
2759 2748 bindaddr = '[%s]' % bindaddr
2760 2749
2761 2750 fqaddr = self.httpd.fqaddr
2762 2751 if ':' in fqaddr:
2763 2752 fqaddr = '[%s]' % fqaddr
2764 2753 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2765 2754 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2766 2755
2767 2756 def run(self):
2768 2757 self.httpd.serve_forever()
2769 2758
2770 2759 service = service()
2771 2760
2772 2761 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2773 2762
2774 2763 def status(ui, repo, *pats, **opts):
2775 2764 """show changed files in the working directory
2776 2765
2777 2766 Show status of files in the repository. If names are given, only files
2778 2767 that match are shown. Files that are clean or ignored or the source of a
2779 2768 copy/move operation, are not listed unless -c/--clean, -i/--ignored,
2780 2769 -C/--copies or -A/--all are given. Unless options described with "show
2781 2770 only ..." are given, the options -mardu are used.
2782 2771
2783 2772 Option -q/--quiet hides untracked (unknown and ignored) files unless
2784 2773 explicitly requested with -u/--unknown or -i/--ignored.
2785 2774
2786 2775 NOTE: status may appear to disagree with diff if permissions have changed
2787 2776 or a merge has occurred. The standard diff format does not report
2788 2777 permission changes and diff only reports changes relative to one merge
2789 2778 parent.
2790 2779
2791 2780 If one revision is given, it is used as the base revision. If two
2792 2781 revisions are given, the differences between them are shown.
2793 2782
2794 2783 The codes used to show the status of files are:
2795 2784 M = modified
2796 2785 A = added
2797 2786 R = removed
2798 2787 C = clean
2799 2788 ! = missing (deleted by non-hg command, but still tracked)
2800 2789 ? = not tracked
2801 2790 I = ignored
2802 2791 = origin of the previous file listed as A (added)
2803 2792 """
2804 2793
2805 2794 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2806 2795 cwd = (pats and repo.getcwd()) or ''
2807 2796 end = opts.get('print0') and '\0' or '\n'
2808 2797 copy = {}
2809 2798 states = 'modified added removed deleted unknown ignored clean'.split()
2810 2799 show = [k for k in states if opts.get(k)]
2811 2800 if opts.get('all'):
2812 2801 show += ui.quiet and (states[:4] + ['clean']) or states
2813 2802 if not show:
2814 2803 show = ui.quiet and states[:4] or states[:5]
2815 2804
2816 2805 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2817 2806 'ignored' in show, 'clean' in show, 'unknown' in show)
2818 2807 changestates = zip(states, 'MAR!?IC', stat)
2819 2808
2820 2809 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2821 2810 ctxn = repo[nullid]
2822 2811 ctx1 = repo[node1]
2823 2812 ctx2 = repo[node2]
2824 2813 added = stat[1]
2825 2814 if node2 is None:
2826 2815 added = stat[0] + stat[1] # merged?
2827 2816
2828 2817 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2829 2818 if k in added:
2830 2819 copy[k] = v
2831 2820 elif v in added:
2832 2821 copy[v] = k
2833 2822
2834 2823 for state, char, files in changestates:
2835 2824 if state in show:
2836 2825 format = "%s %%s%s" % (char, end)
2837 2826 if opts.get('no_status'):
2838 2827 format = "%%s%s" % end
2839 2828
2840 2829 for f in files:
2841 2830 ui.write(format % repo.pathto(f, cwd))
2842 2831 if f in copy:
2843 2832 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2844 2833
2845 2834 def tag(ui, repo, name1, *names, **opts):
2846 2835 """add one or more tags for the current or given revision
2847 2836
2848 2837 Name a particular revision using <name>.
2849 2838
2850 2839 Tags are used to name particular revisions of the repository and are very
2851 2840 useful to compare different revisions, to go back to significant earlier
2852 2841 versions or to mark branch points as releases, etc.
2853 2842
2854 2843 If no revision is given, the parent of the working directory is used, or
2855 2844 tip if no revision is checked out.
2856 2845
2857 2846 To facilitate version control, distribution, and merging of tags, they are
2858 2847 stored as a file named ".hgtags" which is managed similarly to other
2859 2848 project files and can be hand-edited if necessary. The file
2860 2849 '.hg/localtags' is used for local tags (not shared among repositories).
2861 2850
2862 2851 See 'hg help dates' for a list of formats valid for -d/--date.
2863 2852 """
2864 2853
2865 2854 rev_ = "."
2866 2855 names = (name1,) + names
2867 2856 if len(names) != len(set(names)):
2868 2857 raise util.Abort(_('tag names must be unique'))
2869 2858 for n in names:
2870 2859 if n in ['tip', '.', 'null']:
2871 2860 raise util.Abort(_('the name \'%s\' is reserved') % n)
2872 2861 if opts.get('rev') and opts.get('remove'):
2873 2862 raise util.Abort(_("--rev and --remove are incompatible"))
2874 2863 if opts.get('rev'):
2875 2864 rev_ = opts['rev']
2876 2865 message = opts.get('message')
2877 2866 if opts.get('remove'):
2878 2867 expectedtype = opts.get('local') and 'local' or 'global'
2879 2868 for n in names:
2880 2869 if not repo.tagtype(n):
2881 2870 raise util.Abort(_('tag \'%s\' does not exist') % n)
2882 2871 if repo.tagtype(n) != expectedtype:
2883 2872 if expectedtype == 'global':
2884 2873 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2885 2874 else:
2886 2875 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2887 2876 rev_ = nullid
2888 2877 if not message:
2889 2878 message = _('Removed tag %s') % ', '.join(names)
2890 2879 elif not opts.get('force'):
2891 2880 for n in names:
2892 2881 if n in repo.tags():
2893 2882 raise util.Abort(_('tag \'%s\' already exists '
2894 2883 '(use -f to force)') % n)
2895 2884 if not rev_ and repo.dirstate.parents()[1] != nullid:
2896 2885 raise util.Abort(_('uncommitted merge - please provide a '
2897 2886 'specific revision'))
2898 2887 r = repo[rev_].node()
2899 2888
2900 2889 if not message:
2901 2890 message = (_('Added tag %s for changeset %s') %
2902 2891 (', '.join(names), short(r)))
2903 2892
2904 2893 date = opts.get('date')
2905 2894 if date:
2906 2895 date = util.parsedate(date)
2907 2896
2908 2897 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2909 2898
2910 2899 def tags(ui, repo):
2911 2900 """list repository tags
2912 2901
2913 2902 This lists both regular and local tags. When the -v/--verbose switch is
2914 2903 used, a third column "local" is printed for local tags.
2915 2904 """
2916 2905
2917 2906 hexfunc = ui.debugflag and hex or short
2918 2907 tagtype = ""
2919 2908
2920 2909 for t, n in reversed(repo.tagslist()):
2921 2910 if ui.quiet:
2922 2911 ui.write("%s\n" % t)
2923 2912 continue
2924 2913
2925 2914 try:
2926 2915 hn = hexfunc(n)
2927 2916 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2928 2917 except error.LookupError:
2929 2918 r = " ?:%s" % hn
2930 2919 else:
2931 2920 spaces = " " * (30 - encoding.colwidth(t))
2932 2921 if ui.verbose:
2933 2922 if repo.tagtype(t) == 'local':
2934 2923 tagtype = " local"
2935 2924 else:
2936 2925 tagtype = ""
2937 2926 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2938 2927
2939 2928 def tip(ui, repo, **opts):
2940 2929 """show the tip revision
2941 2930
2942 2931 The tip revision (usually just called the tip) is the changeset most
2943 2932 recently added to the repository (and therefore the most recently changed
2944 2933 head).
2945 2934
2946 2935 If you have just made a commit, that commit will be the tip. If you have
2947 2936 just pulled changes from another repository, the tip of that repository
2948 2937 becomes the current tip. The "tip" tag is special and cannot be renamed or
2949 2938 assigned to a different changeset.
2950 2939 """
2951 2940 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2952 2941
2953 2942 def unbundle(ui, repo, fname1, *fnames, **opts):
2954 2943 """apply one or more changegroup files
2955 2944
2956 2945 Apply one or more compressed changegroup files generated by the bundle
2957 2946 command.
2958 2947 """
2959 2948 fnames = (fname1,) + fnames
2960 2949
2961 2950 lock = repo.lock()
2962 2951 try:
2963 2952 for fname in fnames:
2964 2953 f = url.open(ui, fname)
2965 2954 gen = changegroup.readbundle(f, fname)
2966 2955 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2967 2956 finally:
2968 2957 lock.release()
2969 2958
2970 2959 return postincoming(ui, repo, modheads, opts.get('update'), None)
2971 2960
2972 2961 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
2973 2962 """update working directory
2974 2963
2975 2964 Update the repository's working directory to the specified revision, or
2976 2965 the tip of the current branch if none is specified. Use null as the
2977 2966 revision to remove the working copy (like 'hg clone -U').
2978 2967
2979 2968 When the working directory contains no uncommitted changes, it will be
2980 2969 replaced by the state of the requested revision from the repository. When
2981 2970 the requested revision is on a different branch, the working directory
2982 2971 will additionally be switched to that branch.
2983 2972
2984 2973 When there are uncommitted changes, use option -C/--clean to discard them,
2985 2974 forcibly replacing the state of the working directory with the requested
2986 2975 revision. Alternately, use -c/--check to abort.
2987 2976
2988 2977 When there are uncommitted changes and option -C/--clean is not used, and
2989 2978 the parent revision and requested revision are on the same branch, and one
2990 2979 of them is an ancestor of the other, then the new working directory will
2991 2980 contain the requested revision merged with the uncommitted changes.
2992 2981 Otherwise, the update will fail with a suggestion to use 'merge' or
2993 2982 'update -C' instead.
2994 2983
2995 2984 If you want to update just one file to an older revision, use revert.
2996 2985
2997 2986 See 'hg help dates' for a list of formats valid for -d/--date.
2998 2987 """
2999 2988 if rev and node:
3000 2989 raise util.Abort(_("please specify just one revision"))
3001 2990
3002 2991 if not rev:
3003 2992 rev = node
3004 2993
3005 2994 if not clean and check:
3006 2995 # we could use dirty() but we can ignore merge and branch trivia
3007 2996 c = repo[None]
3008 2997 if c.modified() or c.added() or c.removed():
3009 2998 raise util.Abort(_("uncommitted local changes"))
3010 2999
3011 3000 if date:
3012 3001 if rev:
3013 3002 raise util.Abort(_("you can't specify a revision and a date"))
3014 3003 rev = cmdutil.finddate(ui, repo, date)
3015 3004
3016 3005 if clean:
3017 3006 return hg.clean(repo, rev)
3018 3007 else:
3019 3008 return hg.update(repo, rev)
3020 3009
3021 3010 def verify(ui, repo):
3022 3011 """verify the integrity of the repository
3023 3012
3024 3013 Verify the integrity of the current repository.
3025 3014
3026 3015 This will perform an extensive check of the repository's integrity,
3027 3016 validating the hashes and checksums of each entry in the changelog,
3028 3017 manifest, and tracked files, as well as the integrity of their crosslinks
3029 3018 and indices.
3030 3019 """
3031 3020 return hg.verify(repo)
3032 3021
3033 3022 def version_(ui):
3034 3023 """output version and copyright information"""
3035 3024 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3036 3025 % util.version())
3037 3026 ui.status(_(
3038 3027 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
3039 3028 "This is free software; see the source for copying conditions. "
3040 3029 "There is NO\nwarranty; "
3041 3030 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3042 3031 ))
3043 3032
3044 3033 # Command options and aliases are listed here, alphabetically
3045 3034
3046 3035 globalopts = [
3047 3036 ('R', 'repository', '',
3048 3037 _('repository root directory or symbolic path name')),
3049 3038 ('', 'cwd', '', _('change working directory')),
3050 3039 ('y', 'noninteractive', None,
3051 3040 _('do not prompt, assume \'yes\' for any required answers')),
3052 3041 ('q', 'quiet', None, _('suppress output')),
3053 3042 ('v', 'verbose', None, _('enable additional output')),
3054 3043 ('', 'config', [], _('set/override config option')),
3055 3044 ('', 'debug', None, _('enable debugging output')),
3056 3045 ('', 'debugger', None, _('start debugger')),
3057 3046 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3058 3047 ('', 'encodingmode', encoding.encodingmode,
3059 3048 _('set the charset encoding mode')),
3060 3049 ('', 'traceback', None, _('print traceback on exception')),
3061 3050 ('', 'time', None, _('time how long the command takes')),
3062 3051 ('', 'profile', None, _('print command execution profile')),
3063 3052 ('', 'version', None, _('output version information and exit')),
3064 3053 ('h', 'help', None, _('display help and exit')),
3065 3054 ]
3066 3055
3067 3056 dryrunopts = [('n', 'dry-run', None,
3068 3057 _('do not perform actions, just print output'))]
3069 3058
3070 3059 remoteopts = [
3071 3060 ('e', 'ssh', '', _('specify ssh command to use')),
3072 3061 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3073 3062 ]
3074 3063
3075 3064 walkopts = [
3076 3065 ('I', 'include', [], _('include names matching the given patterns')),
3077 3066 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3078 3067 ]
3079 3068
3080 3069 commitopts = [
3081 3070 ('m', 'message', '', _('use <text> as commit message')),
3082 3071 ('l', 'logfile', '', _('read commit message from <file>')),
3083 3072 ]
3084 3073
3085 3074 commitopts2 = [
3086 3075 ('d', 'date', '', _('record datecode as commit date')),
3087 3076 ('u', 'user', '', _('record the specified user as committer')),
3088 3077 ]
3089 3078
3090 3079 templateopts = [
3091 3080 ('', 'style', '', _('display using template map file')),
3092 3081 ('', 'template', '', _('display with template')),
3093 3082 ]
3094 3083
3095 3084 logopts = [
3096 3085 ('p', 'patch', None, _('show patch')),
3097 3086 ('g', 'git', None, _('use git extended diff format')),
3098 3087 ('l', 'limit', '', _('limit number of changes displayed')),
3099 3088 ('M', 'no-merges', None, _('do not show merges')),
3100 3089 ] + templateopts
3101 3090
3102 3091 diffopts = [
3103 3092 ('a', 'text', None, _('treat all files as text')),
3104 3093 ('g', 'git', None, _('use git extended diff format')),
3105 3094 ('', 'nodates', None, _("don't include dates in diff headers"))
3106 3095 ]
3107 3096
3108 3097 diffopts2 = [
3109 3098 ('p', 'show-function', None, _('show which function each change is in')),
3110 3099 ('w', 'ignore-all-space', None,
3111 3100 _('ignore white space when comparing lines')),
3112 3101 ('b', 'ignore-space-change', None,
3113 3102 _('ignore changes in the amount of white space')),
3114 3103 ('B', 'ignore-blank-lines', None,
3115 3104 _('ignore changes whose lines are all blank')),
3116 3105 ('U', 'unified', '', _('number of lines of context to show'))
3117 3106 ]
3118 3107
3119 3108 similarityopts = [
3120 3109 ('s', 'similarity', '',
3121 3110 _('guess renamed files by similarity (0<=s<=100)'))
3122 3111 ]
3123 3112
3124 3113 table = {
3125 3114 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3126 3115 "addremove":
3127 3116 (addremove, similarityopts + walkopts + dryrunopts,
3128 3117 _('[OPTION]... [FILE]...')),
3129 3118 "^annotate|blame":
3130 3119 (annotate,
3131 3120 [('r', 'rev', '', _('annotate the specified revision')),
3132 3121 ('f', 'follow', None, _('follow file copies and renames')),
3133 3122 ('a', 'text', None, _('treat all files as text')),
3134 3123 ('u', 'user', None, _('list the author (long with -v)')),
3135 3124 ('d', 'date', None, _('list the date (short with -q)')),
3136 3125 ('n', 'number', None, _('list the revision number (default)')),
3137 3126 ('c', 'changeset', None, _('list the changeset')),
3138 3127 ('l', 'line-number', None,
3139 3128 _('show line number at the first appearance'))
3140 3129 ] + walkopts,
3141 3130 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3142 3131 "archive":
3143 3132 (archive,
3144 3133 [('', 'no-decode', None, _('do not pass files through decoders')),
3145 3134 ('p', 'prefix', '', _('directory prefix for files in archive')),
3146 3135 ('r', 'rev', '', _('revision to distribute')),
3147 3136 ('t', 'type', '', _('type of distribution to create')),
3148 3137 ] + walkopts,
3149 3138 _('[OPTION]... DEST')),
3150 3139 "backout":
3151 3140 (backout,
3152 3141 [('', 'merge', None,
3153 3142 _('merge with old dirstate parent after backout')),
3154 3143 ('', 'parent', '', _('parent to choose when backing out merge')),
3155 3144 ('r', 'rev', '', _('revision to backout')),
3156 3145 ] + walkopts + commitopts + commitopts2,
3157 3146 _('[OPTION]... [-r] REV')),
3158 3147 "bisect":
3159 3148 (bisect,
3160 3149 [('r', 'reset', False, _('reset bisect state')),
3161 3150 ('g', 'good', False, _('mark changeset good')),
3162 3151 ('b', 'bad', False, _('mark changeset bad')),
3163 3152 ('s', 'skip', False, _('skip testing changeset')),
3164 3153 ('c', 'command', '', _('use command to check changeset state')),
3165 3154 ('U', 'noupdate', False, _('do not update to target'))],
3166 3155 _("[-gbsr] [-c CMD] [REV]")),
3167 3156 "branch":
3168 3157 (branch,
3169 3158 [('f', 'force', None,
3170 3159 _('set branch name even if it shadows an existing branch')),
3171 3160 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3172 3161 _('[-fC] [NAME]')),
3173 3162 "branches":
3174 3163 (branches,
3175 3164 [('a', 'active', False,
3176 3165 _('show only branches that have unmerged heads')),
3177 3166 ('c', 'closed', False,
3178 3167 _('show normal and closed heads'))],
3179 3168 _('[-a]')),
3180 3169 "bundle":
3181 3170 (bundle,
3182 3171 [('f', 'force', None,
3183 3172 _('run even when remote repository is unrelated')),
3184 3173 ('r', 'rev', [],
3185 3174 _('a changeset up to which you would like to bundle')),
3186 3175 ('', 'base', [],
3187 3176 _('a base changeset to specify instead of a destination')),
3188 3177 ('a', 'all', None, _('bundle all changesets in the repository')),
3189 3178 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3190 3179 ] + remoteopts,
3191 3180 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3192 3181 "cat":
3193 3182 (cat,
3194 3183 [('o', 'output', '', _('print output to file with formatted name')),
3195 3184 ('r', 'rev', '', _('print the given revision')),
3196 3185 ('', 'decode', None, _('apply any matching decode filter')),
3197 3186 ] + walkopts,
3198 3187 _('[OPTION]... FILE...')),
3199 3188 "^clone":
3200 3189 (clone,
3201 3190 [('U', 'noupdate', None,
3202 3191 _('the clone will only contain a repository (no working copy)')),
3203 3192 ('r', 'rev', [],
3204 3193 _('a changeset you would like to have after cloning')),
3205 3194 ('', 'pull', None, _('use pull protocol to copy metadata')),
3206 3195 ('', 'uncompressed', None,
3207 3196 _('use uncompressed transfer (fast over LAN)')),
3208 3197 ] + remoteopts,
3209 3198 _('[OPTION]... SOURCE [DEST]')),
3210 3199 "^commit|ci":
3211 3200 (commit,
3212 3201 [('A', 'addremove', None,
3213 3202 _('mark new/missing files as added/removed before committing')),
3214 3203 ('', 'close-branch', None,
3215 3204 _('mark a branch as closed, hiding it from the branch list')),
3216 3205 ] + walkopts + commitopts + commitopts2,
3217 3206 _('[OPTION]... [FILE]...')),
3218 3207 "copy|cp":
3219 3208 (copy,
3220 3209 [('A', 'after', None, _('record a copy that has already occurred')),
3221 3210 ('f', 'force', None,
3222 3211 _('forcibly copy over an existing managed file')),
3223 3212 ] + walkopts + dryrunopts,
3224 3213 _('[OPTION]... [SOURCE]... DEST')),
3225 3214 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3226 3215 "debugcheckstate": (debugcheckstate, []),
3227 3216 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3228 3217 "debugcomplete":
3229 3218 (debugcomplete,
3230 3219 [('o', 'options', None, _('show the command options'))],
3231 3220 _('[-o] CMD')),
3232 3221 "debugdate":
3233 3222 (debugdate,
3234 3223 [('e', 'extended', None, _('try extended date formats'))],
3235 3224 _('[-e] DATE [RANGE]')),
3236 3225 "debugdata": (debugdata, [], _('FILE REV')),
3237 3226 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3238 3227 "debugindex": (debugindex, [], _('FILE')),
3239 3228 "debugindexdot": (debugindexdot, [], _('FILE')),
3240 3229 "debuginstall": (debuginstall, []),
3241 3230 "debugrebuildstate":
3242 3231 (debugrebuildstate,
3243 3232 [('r', 'rev', '', _('revision to rebuild to'))],
3244 3233 _('[-r REV] [REV]')),
3245 3234 "debugrename":
3246 3235 (debugrename,
3247 3236 [('r', 'rev', '', _('revision to debug'))],
3248 3237 _('[-r REV] FILE')),
3249 3238 "debugsetparents":
3250 3239 (debugsetparents, [], _('REV1 [REV2]')),
3251 3240 "debugstate":
3252 3241 (debugstate,
3253 3242 [('', 'nodates', None, _('do not display the saved mtime'))],
3254 3243 _('[OPTION]...')),
3255 3244 "debugsub":
3256 3245 (debugsub,
3257 3246 [('r', 'rev', '', _('revision to check'))],
3258 3247 _('[-r REV] [REV]')),
3259 3248 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3260 3249 "^diff":
3261 3250 (diff,
3262 3251 [('r', 'rev', [], _('revision')),
3263 3252 ('c', 'change', '', _('change made by revision'))
3264 3253 ] + diffopts + diffopts2 + walkopts,
3265 3254 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3266 3255 "^export":
3267 3256 (export,
3268 3257 [('o', 'output', '', _('print output to file with formatted name')),
3269 3258 ('', 'switch-parent', None, _('diff against the second parent'))
3270 3259 ] + diffopts,
3271 3260 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3272 3261 "^forget":
3273 3262 (forget,
3274 3263 [] + walkopts,
3275 3264 _('[OPTION]... FILE...')),
3276 3265 "grep":
3277 3266 (grep,
3278 3267 [('0', 'print0', None, _('end fields with NUL')),
3279 3268 ('', 'all', None, _('print all revisions that match')),
3280 3269 ('f', 'follow', None,
3281 3270 _('follow changeset history, or file history across copies and renames')),
3282 3271 ('i', 'ignore-case', None, _('ignore case when matching')),
3283 3272 ('l', 'files-with-matches', None,
3284 3273 _('print only filenames and revisions that match')),
3285 3274 ('n', 'line-number', None, _('print matching line numbers')),
3286 3275 ('r', 'rev', [], _('search in given revision range')),
3287 3276 ('u', 'user', None, _('list the author (long with -v)')),
3288 3277 ('d', 'date', None, _('list the date (short with -q)')),
3289 3278 ] + walkopts,
3290 3279 _('[OPTION]... PATTERN [FILE]...')),
3291 3280 "heads":
3292 3281 (heads,
3293 3282 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3294 3283 ('a', 'active', False,
3295 3284 _('show only the active heads from open branches')),
3296 3285 ('c', 'closed', False,
3297 3286 _('show normal and closed heads')),
3298 3287 ] + templateopts,
3299 3288 _('[-r STARTREV] [REV]...')),
3300 3289 "help": (help_, [], _('[TOPIC]')),
3301 3290 "identify|id":
3302 3291 (identify,
3303 3292 [('r', 'rev', '', _('identify the specified revision')),
3304 3293 ('n', 'num', None, _('show local revision number')),
3305 3294 ('i', 'id', None, _('show global revision id')),
3306 3295 ('b', 'branch', None, _('show branch')),
3307 3296 ('t', 'tags', None, _('show tags'))],
3308 3297 _('[-nibt] [-r REV] [SOURCE]')),
3309 3298 "import|patch":
3310 3299 (import_,
3311 3300 [('p', 'strip', 1,
3312 3301 _('directory strip option for patch. This has the same '
3313 3302 'meaning as the corresponding patch option')),
3314 3303 ('b', 'base', '', _('base path')),
3315 3304 ('f', 'force', None,
3316 3305 _('skip check for outstanding uncommitted changes')),
3317 3306 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3318 3307 ('', 'exact', None,
3319 3308 _('apply patch to the nodes from which it was generated')),
3320 3309 ('', 'import-branch', None,
3321 3310 _('use any branch information in patch (implied by --exact)'))] +
3322 3311 commitopts + commitopts2 + similarityopts,
3323 3312 _('[OPTION]... PATCH...')),
3324 3313 "incoming|in":
3325 3314 (incoming,
3326 3315 [('f', 'force', None,
3327 3316 _('run even when remote repository is unrelated')),
3328 3317 ('n', 'newest-first', None, _('show newest record first')),
3329 3318 ('', 'bundle', '', _('file to store the bundles into')),
3330 3319 ('r', 'rev', [],
3331 3320 _('a specific revision up to which you would like to pull')),
3332 3321 ] + logopts + remoteopts,
3333 3322 _('[-p] [-n] [-M] [-f] [-r REV]...'
3334 3323 ' [--bundle FILENAME] [SOURCE]')),
3335 3324 "^init":
3336 3325 (init,
3337 3326 remoteopts,
3338 3327 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3339 3328 "locate":
3340 3329 (locate,
3341 3330 [('r', 'rev', '', _('search the repository as it stood at REV')),
3342 3331 ('0', 'print0', None,
3343 3332 _('end filenames with NUL, for use with xargs')),
3344 3333 ('f', 'fullpath', None,
3345 3334 _('print complete paths from the filesystem root')),
3346 3335 ] + walkopts,
3347 3336 _('[OPTION]... [PATTERN]...')),
3348 3337 "^log|history":
3349 3338 (log,
3350 3339 [('f', 'follow', None,
3351 3340 _('follow changeset history, or file history across copies and renames')),
3352 3341 ('', 'follow-first', None,
3353 3342 _('only follow the first parent of merge changesets')),
3354 3343 ('d', 'date', '', _('show revisions matching date spec')),
3355 3344 ('C', 'copies', None, _('show copied files')),
3356 3345 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3357 3346 ('r', 'rev', [], _('show the specified revision or range')),
3358 3347 ('', 'removed', None, _('include revisions where files were removed')),
3359 3348 ('m', 'only-merges', None, _('show only merges')),
3360 3349 ('u', 'user', [], _('revisions committed by user')),
3361 3350 ('b', 'only-branch', [],
3362 3351 _('show only changesets within the given named branch')),
3363 3352 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3364 3353 ] + logopts + walkopts,
3365 3354 _('[OPTION]... [FILE]')),
3366 3355 "manifest":
3367 3356 (manifest,
3368 3357 [('r', 'rev', '', _('revision to display'))],
3369 3358 _('[-r REV]')),
3370 3359 "^merge":
3371 3360 (merge,
3372 3361 [('f', 'force', None, _('force a merge with outstanding changes')),
3373 3362 ('r', 'rev', '', _('revision to merge')),
3374 3363 ('P', 'preview', None,
3375 3364 _('review revisions to merge (no merge is performed)'))],
3376 3365 _('[-f] [[-r] REV]')),
3377 3366 "outgoing|out":
3378 3367 (outgoing,
3379 3368 [('f', 'force', None,
3380 3369 _('run even when remote repository is unrelated')),
3381 3370 ('r', 'rev', [],
3382 3371 _('a specific revision up to which you would like to push')),
3383 3372 ('n', 'newest-first', None, _('show newest record first')),
3384 3373 ] + logopts + remoteopts,
3385 3374 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3386 3375 "^parents":
3387 3376 (parents,
3388 3377 [('r', 'rev', '', _('show parents from the specified revision')),
3389 3378 ] + templateopts,
3390 3379 _('[-r REV] [FILE]')),
3391 3380 "paths": (paths, [], _('[NAME]')),
3392 3381 "^pull":
3393 3382 (pull,
3394 3383 [('u', 'update', None,
3395 3384 _('update to new tip if changesets were pulled')),
3396 3385 ('f', 'force', None,
3397 3386 _('run even when remote repository is unrelated')),
3398 3387 ('r', 'rev', [],
3399 3388 _('a specific revision up to which you would like to pull')),
3400 3389 ] + remoteopts,
3401 3390 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3402 3391 "^push":
3403 3392 (push,
3404 3393 [('f', 'force', None, _('force push')),
3405 3394 ('r', 'rev', [],
3406 3395 _('a specific revision up to which you would like to push')),
3407 3396 ] + remoteopts,
3408 3397 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3409 3398 "recover": (recover, []),
3410 3399 "^remove|rm":
3411 3400 (remove,
3412 3401 [('A', 'after', None, _('record delete for missing files')),
3413 3402 ('f', 'force', None,
3414 3403 _('remove (and delete) file even if added or modified')),
3415 3404 ] + walkopts,
3416 3405 _('[OPTION]... FILE...')),
3417 3406 "rename|mv":
3418 3407 (rename,
3419 3408 [('A', 'after', None, _('record a rename that has already occurred')),
3420 3409 ('f', 'force', None,
3421 3410 _('forcibly copy over an existing managed file')),
3422 3411 ] + walkopts + dryrunopts,
3423 3412 _('[OPTION]... SOURCE... DEST')),
3424 3413 "resolve":
3425 3414 (resolve,
3426 3415 [('a', 'all', None, _('remerge all unresolved files')),
3427 3416 ('l', 'list', None, _('list state of files needing merge')),
3428 3417 ('m', 'mark', None, _('mark files as resolved')),
3429 3418 ('u', 'unmark', None, _('unmark files as resolved'))]
3430 3419 + walkopts,
3431 3420 _('[OPTION]... [FILE]...')),
3432 3421 "revert":
3433 3422 (revert,
3434 3423 [('a', 'all', None, _('revert all changes when no arguments given')),
3435 3424 ('d', 'date', '', _('tipmost revision matching date')),
3436 3425 ('r', 'rev', '', _('revision to revert to')),
3437 3426 ('', 'no-backup', None, _('do not save backup copies of files')),
3438 3427 ] + walkopts + dryrunopts,
3439 3428 _('[OPTION]... [-r REV] [NAME]...')),
3440 3429 "rollback": (rollback, []),
3441 3430 "root": (root, []),
3442 3431 "^serve":
3443 3432 (serve,
3444 3433 [('A', 'accesslog', '', _('name of access log file to write to')),
3445 3434 ('d', 'daemon', None, _('run server in background')),
3446 3435 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3447 3436 ('E', 'errorlog', '', _('name of error log file to write to')),
3448 3437 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3449 3438 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3450 3439 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3451 3440 ('n', 'name', '',
3452 3441 _('name to show in web pages (default: working directory)')),
3453 3442 ('', 'webdir-conf', '', _('name of the webdir config file'
3454 3443 ' (serve more than one repository)')),
3455 3444 ('', 'pid-file', '', _('name of file to write process ID to')),
3456 3445 ('', 'stdio', None, _('for remote clients')),
3457 3446 ('t', 'templates', '', _('web templates to use')),
3458 3447 ('', 'style', '', _('template style to use')),
3459 3448 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3460 3449 ('', 'certificate', '', _('SSL certificate file'))],
3461 3450 _('[OPTION]...')),
3462 3451 "showconfig|debugconfig":
3463 3452 (showconfig,
3464 3453 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3465 3454 _('[-u] [NAME]...')),
3466 3455 "^status|st":
3467 3456 (status,
3468 3457 [('A', 'all', None, _('show status of all files')),
3469 3458 ('m', 'modified', None, _('show only modified files')),
3470 3459 ('a', 'added', None, _('show only added files')),
3471 3460 ('r', 'removed', None, _('show only removed files')),
3472 3461 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3473 3462 ('c', 'clean', None, _('show only files without changes')),
3474 3463 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3475 3464 ('i', 'ignored', None, _('show only ignored files')),
3476 3465 ('n', 'no-status', None, _('hide status prefix')),
3477 3466 ('C', 'copies', None, _('show source of copied files')),
3478 3467 ('0', 'print0', None,
3479 3468 _('end filenames with NUL, for use with xargs')),
3480 3469 ('', 'rev', [], _('show difference from revision')),
3481 3470 ] + walkopts,
3482 3471 _('[OPTION]... [FILE]...')),
3483 3472 "tag":
3484 3473 (tag,
3485 3474 [('f', 'force', None, _('replace existing tag')),
3486 3475 ('l', 'local', None, _('make the tag local')),
3487 3476 ('r', 'rev', '', _('revision to tag')),
3488 3477 ('', 'remove', None, _('remove a tag')),
3489 3478 # -l/--local is already there, commitopts cannot be used
3490 3479 ('m', 'message', '', _('use <text> as commit message')),
3491 3480 ] + commitopts2,
3492 3481 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3493 3482 "tags": (tags, []),
3494 3483 "tip":
3495 3484 (tip,
3496 3485 [('p', 'patch', None, _('show patch')),
3497 3486 ('g', 'git', None, _('use git extended diff format')),
3498 3487 ] + templateopts,
3499 3488 _('[-p]')),
3500 3489 "unbundle":
3501 3490 (unbundle,
3502 3491 [('u', 'update', None,
3503 3492 _('update to new tip if changesets were unbundled'))],
3504 3493 _('[-u] FILE...')),
3505 3494 "^update|up|checkout|co":
3506 3495 (update,
3507 3496 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3508 3497 ('c', 'check', None, _('check for uncommitted changes')),
3509 3498 ('d', 'date', '', _('tipmost revision matching date')),
3510 3499 ('r', 'rev', '', _('revision'))],
3511 3500 _('[-C] [-d DATE] [[-r] REV]')),
3512 3501 "verify": (verify, []),
3513 3502 "version": (version_, []),
3514 3503 }
3515 3504
3516 3505 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3517 3506 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3518 3507 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,817 +1,818 b''
1 1 # context.py - changeset and file context objects for mercurial
2 2 #
3 3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import nullid, nullrev, short, hex
9 9 from i18n import _
10 10 import ancestor, bdiff, error, util, subrepo
11 11 import os, errno
12 12
13 13 propertycache = util.propertycache
14 14
15 15 class changectx(object):
16 16 """A changecontext object makes access to data related to a particular
17 17 changeset convenient."""
18 18 def __init__(self, repo, changeid=''):
19 19 """changeid is a revision number, node, or tag"""
20 20 if changeid == '':
21 21 changeid = '.'
22 22 self._repo = repo
23 23 if isinstance(changeid, (long, int)):
24 24 self._rev = changeid
25 25 self._node = self._repo.changelog.node(changeid)
26 26 else:
27 27 self._node = self._repo.lookup(changeid)
28 28 self._rev = self._repo.changelog.rev(self._node)
29 29
30 30 def __str__(self):
31 31 return short(self.node())
32 32
33 33 def __int__(self):
34 34 return self.rev()
35 35
36 36 def __repr__(self):
37 37 return "<changectx %s>" % str(self)
38 38
39 39 def __hash__(self):
40 40 try:
41 41 return hash(self._rev)
42 42 except AttributeError:
43 43 return id(self)
44 44
45 45 def __eq__(self, other):
46 46 try:
47 47 return self._rev == other._rev
48 48 except AttributeError:
49 49 return False
50 50
51 51 def __ne__(self, other):
52 52 return not (self == other)
53 53
54 54 def __nonzero__(self):
55 55 return self._rev != nullrev
56 56
57 57 @propertycache
58 58 def _changeset(self):
59 59 return self._repo.changelog.read(self.node())
60 60
61 61 @propertycache
62 62 def _manifest(self):
63 63 return self._repo.manifest.read(self._changeset[0])
64 64
65 65 @propertycache
66 66 def _manifestdelta(self):
67 67 return self._repo.manifest.readdelta(self._changeset[0])
68 68
69 69 @propertycache
70 70 def _parents(self):
71 71 p = self._repo.changelog.parentrevs(self._rev)
72 72 if p[1] == nullrev:
73 73 p = p[:-1]
74 74 return [changectx(self._repo, x) for x in p]
75 75
76 76 @propertycache
77 77 def substate(self):
78 78 return subrepo.state(self)
79 79
80 80 def __contains__(self, key):
81 81 return key in self._manifest
82 82
83 83 def __getitem__(self, key):
84 84 return self.filectx(key)
85 85
86 86 def __iter__(self):
87 87 for f in sorted(self._manifest):
88 88 yield f
89 89
90 90 def changeset(self): return self._changeset
91 91 def manifest(self): return self._manifest
92 92 def manifestnode(self): return self._changeset[0]
93 93
94 94 def rev(self): return self._rev
95 95 def node(self): return self._node
96 96 def hex(self): return hex(self._node)
97 97 def user(self): return self._changeset[1]
98 98 def date(self): return self._changeset[2]
99 99 def files(self): return self._changeset[3]
100 100 def description(self): return self._changeset[4]
101 101 def branch(self): return self._changeset[5].get("branch")
102 102 def extra(self): return self._changeset[5]
103 103 def tags(self): return self._repo.nodetags(self._node)
104 104
105 105 def parents(self):
106 106 """return contexts for each parent changeset"""
107 107 return self._parents
108 108
109 109 def p1(self):
110 110 return self._parents[0]
111 111
112 112 def p2(self):
113 113 if len(self._parents) == 2:
114 114 return self._parents[1]
115 115 return changectx(self._repo, -1)
116 116
117 117 def children(self):
118 118 """return contexts for each child changeset"""
119 119 c = self._repo.changelog.children(self._node)
120 120 return [changectx(self._repo, x) for x in c]
121 121
122 122 def ancestors(self):
123 123 for a in self._repo.changelog.ancestors(self._rev):
124 124 yield changectx(self._repo, a)
125 125
126 126 def descendants(self):
127 127 for d in self._repo.changelog.descendants(self._rev):
128 128 yield changectx(self._repo, d)
129 129
130 130 def _fileinfo(self, path):
131 131 if '_manifest' in self.__dict__:
132 132 try:
133 133 return self._manifest[path], self._manifest.flags(path)
134 134 except KeyError:
135 135 raise error.LookupError(self._node, path,
136 136 _('not found in manifest'))
137 137 if '_manifestdelta' in self.__dict__ or path in self.files():
138 138 if path in self._manifestdelta:
139 139 return self._manifestdelta[path], self._manifestdelta.flags(path)
140 140 node, flag = self._repo.manifest.find(self._changeset[0], path)
141 141 if not node:
142 142 raise error.LookupError(self._node, path,
143 143 _('not found in manifest'))
144 144
145 145 return node, flag
146 146
147 147 def filenode(self, path):
148 148 return self._fileinfo(path)[0]
149 149
150 150 def flags(self, path):
151 151 try:
152 152 return self._fileinfo(path)[1]
153 153 except error.LookupError:
154 154 return ''
155 155
156 156 def filectx(self, path, fileid=None, filelog=None):
157 157 """get a file context from this changeset"""
158 158 if fileid is None:
159 159 fileid = self.filenode(path)
160 160 return filectx(self._repo, path, fileid=fileid,
161 161 changectx=self, filelog=filelog)
162 162
163 163 def ancestor(self, c2):
164 164 """
165 165 return the ancestor context of self and c2
166 166 """
167 167 n = self._repo.changelog.ancestor(self._node, c2._node)
168 168 return changectx(self._repo, n)
169 169
170 170 def walk(self, match):
171 171 fset = set(match.files())
172 172 # for dirstate.walk, files=['.'] means "walk the whole tree".
173 173 # follow that here, too
174 174 fset.discard('.')
175 175 for fn in self:
176 176 for ffn in fset:
177 177 # match if the file is the exact name or a directory
178 178 if ffn == fn or fn.startswith("%s/" % ffn):
179 179 fset.remove(ffn)
180 180 break
181 181 if match(fn):
182 182 yield fn
183 183 for fn in sorted(fset):
184 184 if match.bad(fn, 'No such file in rev ' + str(self)) and match(fn):
185 185 yield fn
186 186
187 187 def sub(self, path):
188 188 return subrepo.subrepo(self, path)
189 189
190 190 class filectx(object):
191 191 """A filecontext object makes access to data related to a particular
192 192 filerevision convenient."""
193 193 def __init__(self, repo, path, changeid=None, fileid=None,
194 194 filelog=None, changectx=None):
195 195 """changeid can be a changeset revision, node, or tag.
196 196 fileid can be a file revision or node."""
197 197 self._repo = repo
198 198 self._path = path
199 199
200 200 assert (changeid is not None
201 201 or fileid is not None
202 202 or changectx is not None), \
203 203 ("bad args: changeid=%r, fileid=%r, changectx=%r"
204 204 % (changeid, fileid, changectx))
205 205
206 206 if filelog:
207 207 self._filelog = filelog
208 208
209 209 if changeid is not None:
210 210 self._changeid = changeid
211 211 if changectx is not None:
212 212 self._changectx = changectx
213 213 if fileid is not None:
214 214 self._fileid = fileid
215 215
216 216 @propertycache
217 217 def _changectx(self):
218 218 return changectx(self._repo, self._changeid)
219 219
220 220 @propertycache
221 221 def _filelog(self):
222 222 return self._repo.file(self._path)
223 223
224 224 @propertycache
225 225 def _changeid(self):
226 226 if '_changectx' in self.__dict__:
227 227 return self._changectx.rev()
228 228 else:
229 229 return self._filelog.linkrev(self._filerev)
230 230
231 231 @propertycache
232 232 def _filenode(self):
233 233 if '_fileid' in self.__dict__:
234 234 return self._filelog.lookup(self._fileid)
235 235 else:
236 236 return self._changectx.filenode(self._path)
237 237
238 238 @propertycache
239 239 def _filerev(self):
240 240 return self._filelog.rev(self._filenode)
241 241
242 242 @propertycache
243 243 def _repopath(self):
244 244 return self._path
245 245
246 246 def __nonzero__(self):
247 247 try:
248 248 self._filenode
249 249 return True
250 250 except error.LookupError:
251 251 # file is missing
252 252 return False
253 253
254 254 def __str__(self):
255 255 return "%s@%s" % (self.path(), short(self.node()))
256 256
257 257 def __repr__(self):
258 258 return "<filectx %s>" % str(self)
259 259
260 260 def __hash__(self):
261 261 try:
262 262 return hash((self._path, self._fileid))
263 263 except AttributeError:
264 264 return id(self)
265 265
266 266 def __eq__(self, other):
267 267 try:
268 268 return (self._path == other._path
269 269 and self._fileid == other._fileid)
270 270 except AttributeError:
271 271 return False
272 272
273 273 def __ne__(self, other):
274 274 return not (self == other)
275 275
276 276 def filectx(self, fileid):
277 277 '''opens an arbitrary revision of the file without
278 278 opening a new filelog'''
279 279 return filectx(self._repo, self._path, fileid=fileid,
280 280 filelog=self._filelog)
281 281
282 282 def filerev(self): return self._filerev
283 283 def filenode(self): return self._filenode
284 284 def flags(self): return self._changectx.flags(self._path)
285 285 def filelog(self): return self._filelog
286 286
287 287 def rev(self):
288 288 if '_changectx' in self.__dict__:
289 289 return self._changectx.rev()
290 290 if '_changeid' in self.__dict__:
291 291 return self._changectx.rev()
292 292 return self._filelog.linkrev(self._filerev)
293 293
294 294 def linkrev(self): return self._filelog.linkrev(self._filerev)
295 295 def node(self): return self._changectx.node()
296 def hex(self): return hex(self.node())
296 297 def user(self): return self._changectx.user()
297 298 def date(self): return self._changectx.date()
298 299 def files(self): return self._changectx.files()
299 300 def description(self): return self._changectx.description()
300 301 def branch(self): return self._changectx.branch()
301 302 def manifest(self): return self._changectx.manifest()
302 303 def changectx(self): return self._changectx
303 304
304 305 def data(self): return self._filelog.read(self._filenode)
305 306 def path(self): return self._path
306 307 def size(self): return self._filelog.size(self._filerev)
307 308
308 309 def cmp(self, text): return self._filelog.cmp(self._filenode, text)
309 310
310 311 def renamed(self):
311 312 """check if file was actually renamed in this changeset revision
312 313
313 314 If rename logged in file revision, we report copy for changeset only
314 315 if file revisions linkrev points back to the changeset in question
315 316 or both changeset parents contain different file revisions.
316 317 """
317 318
318 319 renamed = self._filelog.renamed(self._filenode)
319 320 if not renamed:
320 321 return renamed
321 322
322 323 if self.rev() == self.linkrev():
323 324 return renamed
324 325
325 326 name = self.path()
326 327 fnode = self._filenode
327 328 for p in self._changectx.parents():
328 329 try:
329 330 if fnode == p.filenode(name):
330 331 return None
331 332 except error.LookupError:
332 333 pass
333 334 return renamed
334 335
335 336 def parents(self):
336 337 p = self._path
337 338 fl = self._filelog
338 339 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
339 340
340 341 r = self._filelog.renamed(self._filenode)
341 342 if r:
342 343 pl[0] = (r[0], r[1], None)
343 344
344 345 return [filectx(self._repo, p, fileid=n, filelog=l)
345 346 for p,n,l in pl if n != nullid]
346 347
347 348 def children(self):
348 349 # hard for renames
349 350 c = self._filelog.children(self._filenode)
350 351 return [filectx(self._repo, self._path, fileid=x,
351 352 filelog=self._filelog) for x in c]
352 353
353 354 def annotate(self, follow=False, linenumber=None):
354 355 '''returns a list of tuples of (ctx, line) for each line
355 356 in the file, where ctx is the filectx of the node where
356 357 that line was last changed.
357 358 This returns tuples of ((ctx, linenumber), line) for each line,
358 359 if "linenumber" parameter is NOT "None".
359 360 In such tuples, linenumber means one at the first appearance
360 361 in the managed file.
361 362 To reduce annotation cost,
362 363 this returns fixed value(False is used) as linenumber,
363 364 if "linenumber" parameter is "False".'''
364 365
365 366 def decorate_compat(text, rev):
366 367 return ([rev] * len(text.splitlines()), text)
367 368
368 369 def without_linenumber(text, rev):
369 370 return ([(rev, False)] * len(text.splitlines()), text)
370 371
371 372 def with_linenumber(text, rev):
372 373 size = len(text.splitlines())
373 374 return ([(rev, i) for i in xrange(1, size + 1)], text)
374 375
375 376 decorate = (((linenumber is None) and decorate_compat) or
376 377 (linenumber and with_linenumber) or
377 378 without_linenumber)
378 379
379 380 def pair(parent, child):
380 381 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
381 382 child[0][b1:b2] = parent[0][a1:a2]
382 383 return child
383 384
384 getlog = util.cachefunc(lambda x: self._repo.file(x))
385 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
385 386 def getctx(path, fileid):
386 387 log = path == self._path and self._filelog or getlog(path)
387 388 return filectx(self._repo, path, fileid=fileid, filelog=log)
388 getctx = util.cachefunc(getctx)
389 getctx = util.lrucachefunc(getctx)
389 390
390 391 def parents(f):
391 392 # we want to reuse filectx objects as much as possible
392 393 p = f._path
393 394 if f._filerev is None: # working dir
394 395 pl = [(n.path(), n.filerev()) for n in f.parents()]
395 396 else:
396 397 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
397 398
398 399 if follow:
399 400 r = f.renamed()
400 401 if r:
401 402 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
402 403
403 404 return [getctx(p, n) for p, n in pl if n != nullrev]
404 405
405 406 # use linkrev to find the first changeset where self appeared
406 407 if self.rev() != self.linkrev():
407 408 base = self.filectx(self.filerev())
408 409 else:
409 410 base = self
410 411
411 412 # find all ancestors
412 413 needed = {base: 1}
413 414 visit = [base]
414 415 files = [base._path]
415 416 while visit:
416 417 f = visit.pop(0)
417 418 for p in parents(f):
418 419 if p not in needed:
419 420 needed[p] = 1
420 421 visit.append(p)
421 422 if p._path not in files:
422 423 files.append(p._path)
423 424 else:
424 425 # count how many times we'll use this
425 426 needed[p] += 1
426 427
427 428 # sort by revision (per file) which is a topological order
428 429 visit = []
429 430 for f in files:
430 431 fn = [(n.rev(), n) for n in needed if n._path == f]
431 432 visit.extend(fn)
432 433
433 434 hist = {}
434 435 for r, f in sorted(visit):
435 436 curr = decorate(f.data(), f)
436 437 for p in parents(f):
437 438 if p != nullid:
438 439 curr = pair(hist[p], curr)
439 440 # trim the history of unneeded revs
440 441 needed[p] -= 1
441 442 if not needed[p]:
442 443 del hist[p]
443 444 hist[f] = curr
444 445
445 446 return zip(hist[f][0], hist[f][1].splitlines(1))
446 447
447 448 def ancestor(self, fc2):
448 449 """
449 450 find the common ancestor file context, if any, of self, and fc2
450 451 """
451 452
452 453 acache = {}
453 454
454 455 # prime the ancestor cache for the working directory
455 456 for c in (self, fc2):
456 457 if c._filerev is None:
457 458 pl = [(n.path(), n.filenode()) for n in c.parents()]
458 459 acache[(c._path, None)] = pl
459 460
460 461 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
461 462 def parents(vertex):
462 463 if vertex in acache:
463 464 return acache[vertex]
464 465 f, n = vertex
465 466 if f not in flcache:
466 467 flcache[f] = self._repo.file(f)
467 468 fl = flcache[f]
468 469 pl = [(f, p) for p in fl.parents(n) if p != nullid]
469 470 re = fl.renamed(n)
470 471 if re:
471 472 pl.append(re)
472 473 acache[vertex] = pl
473 474 return pl
474 475
475 476 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
476 477 v = ancestor.ancestor(a, b, parents)
477 478 if v:
478 479 f, n = v
479 480 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
480 481
481 482 return None
482 483
483 484 class workingctx(changectx):
484 485 """A workingctx object makes access to data related to
485 486 the current working directory convenient.
486 487 parents - a pair of parent nodeids, or None to use the dirstate.
487 488 date - any valid date string or (unixtime, offset), or None.
488 489 user - username string, or None.
489 490 extra - a dictionary of extra values, or None.
490 491 changes - a list of file lists as returned by localrepo.status()
491 492 or None to use the repository status.
492 493 """
493 494 def __init__(self, repo, parents=None, text="", user=None, date=None,
494 495 extra=None, changes=None):
495 496 self._repo = repo
496 497 self._rev = None
497 498 self._node = None
498 499 self._text = text
499 500 if date:
500 501 self._date = util.parsedate(date)
501 502 if user:
502 503 self._user = user
503 504 if parents:
504 505 self._parents = [changectx(self._repo, p) for p in parents]
505 506 if changes:
506 507 self._status = list(changes)
507 508
508 509 self._extra = {}
509 510 if extra:
510 511 self._extra = extra.copy()
511 512 if 'branch' not in self._extra:
512 513 branch = self._repo.dirstate.branch()
513 514 try:
514 515 branch = branch.decode('UTF-8').encode('UTF-8')
515 516 except UnicodeDecodeError:
516 517 raise util.Abort(_('branch name not in UTF-8!'))
517 518 self._extra['branch'] = branch
518 519 if self._extra['branch'] == '':
519 520 self._extra['branch'] = 'default'
520 521
521 522 def __str__(self):
522 523 return str(self._parents[0]) + "+"
523 524
524 525 def __nonzero__(self):
525 526 return True
526 527
527 528 def __contains__(self, key):
528 529 return self._repo.dirstate[key] not in "?r"
529 530
530 531 @propertycache
531 532 def _manifest(self):
532 533 """generate a manifest corresponding to the working directory"""
533 534
534 535 man = self._parents[0].manifest().copy()
535 536 copied = self._repo.dirstate.copies()
536 537 cf = lambda x: man.flags(copied.get(x, x))
537 538 ff = self._repo.dirstate.flagfunc(cf)
538 539 modified, added, removed, deleted, unknown = self._status[:5]
539 540 for i, l in (("a", added), ("m", modified), ("u", unknown)):
540 541 for f in l:
541 542 man[f] = man.get(copied.get(f, f), nullid) + i
542 543 try:
543 544 man.set(f, ff(f))
544 545 except OSError:
545 546 pass
546 547
547 548 for f in deleted + removed:
548 549 if f in man:
549 550 del man[f]
550 551
551 552 return man
552 553
553 554 @propertycache
554 555 def _status(self):
555 556 return self._repo.status(unknown=True)
556 557
557 558 @propertycache
558 559 def _user(self):
559 560 return self._repo.ui.username()
560 561
561 562 @propertycache
562 563 def _date(self):
563 564 return util.makedate()
564 565
565 566 @propertycache
566 567 def _parents(self):
567 568 p = self._repo.dirstate.parents()
568 569 if p[1] == nullid:
569 570 p = p[:-1]
570 571 self._parents = [changectx(self._repo, x) for x in p]
571 572 return self._parents
572 573
573 574 def manifest(self): return self._manifest
574 575
575 576 def user(self): return self._user or self._repo.ui.username()
576 577 def date(self): return self._date
577 578 def description(self): return self._text
578 579 def files(self):
579 580 return sorted(self._status[0] + self._status[1] + self._status[2])
580 581
581 582 def modified(self): return self._status[0]
582 583 def added(self): return self._status[1]
583 584 def removed(self): return self._status[2]
584 585 def deleted(self): return self._status[3]
585 586 def unknown(self): return self._status[4]
586 587 def clean(self): return self._status[5]
587 588 def branch(self): return self._extra['branch']
588 589 def extra(self): return self._extra
589 590
590 591 def tags(self):
591 592 t = []
592 593 [t.extend(p.tags()) for p in self.parents()]
593 594 return t
594 595
595 596 def children(self):
596 597 return []
597 598
598 599 def flags(self, path):
599 600 if '_manifest' in self.__dict__:
600 601 try:
601 602 return self._manifest.flags(path)
602 603 except KeyError:
603 604 return ''
604 605
605 606 pnode = self._parents[0].changeset()[0]
606 607 orig = self._repo.dirstate.copies().get(path, path)
607 608 node, flag = self._repo.manifest.find(pnode, orig)
608 609 try:
609 610 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
610 611 return ff(path)
611 612 except OSError:
612 613 pass
613 614
614 615 if not node or path in self.deleted() or path in self.removed():
615 616 return ''
616 617 return flag
617 618
618 619 def filectx(self, path, filelog=None):
619 620 """get a file context from the working directory"""
620 621 return workingfilectx(self._repo, path, workingctx=self,
621 622 filelog=filelog)
622 623
623 624 def ancestor(self, c2):
624 625 """return the ancestor context of self and c2"""
625 626 return self._parents[0].ancestor(c2) # punt on two parents for now
626 627
627 628 def walk(self, match):
628 629 return sorted(self._repo.dirstate.walk(match, True, False))
629 630
630 631 def dirty(self, missing=False):
631 632 "check whether a working directory is modified"
632 633
633 634 return (self.p2() or self.branch() != self.p1().branch() or
634 635 self.modified() or self.added() or self.removed() or
635 636 (missing and self.deleted()))
636 637
637 638 class workingfilectx(filectx):
638 639 """A workingfilectx object makes access to data related to a particular
639 640 file in the working directory convenient."""
640 641 def __init__(self, repo, path, filelog=None, workingctx=None):
641 642 """changeid can be a changeset revision, node, or tag.
642 643 fileid can be a file revision or node."""
643 644 self._repo = repo
644 645 self._path = path
645 646 self._changeid = None
646 647 self._filerev = self._filenode = None
647 648
648 649 if filelog:
649 650 self._filelog = filelog
650 651 if workingctx:
651 652 self._changectx = workingctx
652 653
653 654 @propertycache
654 655 def _changectx(self):
655 656 return workingctx(self._repo)
656 657
657 658 def __nonzero__(self):
658 659 return True
659 660
660 661 def __str__(self):
661 662 return "%s@%s" % (self.path(), self._changectx)
662 663
663 664 def data(self): return self._repo.wread(self._path)
664 665 def renamed(self):
665 666 rp = self._repo.dirstate.copied(self._path)
666 667 if not rp:
667 668 return None
668 669 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
669 670
670 671 def parents(self):
671 672 '''return parent filectxs, following copies if necessary'''
672 673 def filenode(ctx, path):
673 674 return ctx._manifest.get(path, nullid)
674 675
675 676 path = self._path
676 677 fl = self._filelog
677 678 pcl = self._changectx._parents
678 679 renamed = self.renamed()
679 680
680 681 if renamed:
681 682 pl = [renamed + (None,)]
682 683 else:
683 684 pl = [(path, filenode(pcl[0], path), fl)]
684 685
685 686 for pc in pcl[1:]:
686 687 pl.append((path, filenode(pc, path), fl))
687 688
688 689 return [filectx(self._repo, p, fileid=n, filelog=l)
689 690 for p,n,l in pl if n != nullid]
690 691
691 692 def children(self):
692 693 return []
693 694
694 695 def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
695 696 def date(self):
696 697 t, tz = self._changectx.date()
697 698 try:
698 699 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
699 700 except OSError, err:
700 701 if err.errno != errno.ENOENT: raise
701 702 return (t, tz)
702 703
703 704 def cmp(self, text): return self._repo.wread(self._path) == text
704 705
705 706 class memctx(object):
706 707 """Use memctx to perform in-memory commits via localrepo.commitctx().
707 708
708 709 Revision information is supplied at initialization time while
709 710 related files data and is made available through a callback
710 711 mechanism. 'repo' is the current localrepo, 'parents' is a
711 712 sequence of two parent revisions identifiers (pass None for every
712 713 missing parent), 'text' is the commit message and 'files' lists
713 714 names of files touched by the revision (normalized and relative to
714 715 repository root).
715 716
716 717 filectxfn(repo, memctx, path) is a callable receiving the
717 718 repository, the current memctx object and the normalized path of
718 719 requested file, relative to repository root. It is fired by the
719 720 commit function for every file in 'files', but calls order is
720 721 undefined. If the file is available in the revision being
721 722 committed (updated or added), filectxfn returns a memfilectx
722 723 object. If the file was removed, filectxfn raises an
723 724 IOError. Moved files are represented by marking the source file
724 725 removed and the new file added with copy information (see
725 726 memfilectx).
726 727
727 728 user receives the committer name and defaults to current
728 729 repository username, date is the commit date in any format
729 730 supported by util.parsedate() and defaults to current date, extra
730 731 is a dictionary of metadata or is left empty.
731 732 """
732 733 def __init__(self, repo, parents, text, files, filectxfn, user=None,
733 734 date=None, extra=None):
734 735 self._repo = repo
735 736 self._rev = None
736 737 self._node = None
737 738 self._text = text
738 739 self._date = date and util.parsedate(date) or util.makedate()
739 740 self._user = user
740 741 parents = [(p or nullid) for p in parents]
741 742 p1, p2 = parents
742 743 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
743 744 files = sorted(set(files))
744 745 self._status = [files, [], [], [], []]
745 746 self._filectxfn = filectxfn
746 747
747 748 self._extra = extra and extra.copy() or {}
748 749 if 'branch' not in self._extra:
749 750 self._extra['branch'] = 'default'
750 751 elif self._extra.get('branch') == '':
751 752 self._extra['branch'] = 'default'
752 753
753 754 def __str__(self):
754 755 return str(self._parents[0]) + "+"
755 756
756 757 def __int__(self):
757 758 return self._rev
758 759
759 760 def __nonzero__(self):
760 761 return True
761 762
762 763 def __getitem__(self, key):
763 764 return self.filectx(key)
764 765
765 766 def p1(self): return self._parents[0]
766 767 def p2(self): return self._parents[1]
767 768
768 769 def user(self): return self._user or self._repo.ui.username()
769 770 def date(self): return self._date
770 771 def description(self): return self._text
771 772 def files(self): return self.modified()
772 773 def modified(self): return self._status[0]
773 774 def added(self): return self._status[1]
774 775 def removed(self): return self._status[2]
775 776 def deleted(self): return self._status[3]
776 777 def unknown(self): return self._status[4]
777 778 def clean(self): return self._status[5]
778 779 def branch(self): return self._extra['branch']
779 780 def extra(self): return self._extra
780 781 def flags(self, f): return self[f].flags()
781 782
782 783 def parents(self):
783 784 """return contexts for each parent changeset"""
784 785 return self._parents
785 786
786 787 def filectx(self, path, filelog=None):
787 788 """get a file context from the working directory"""
788 789 return self._filectxfn(self._repo, self, path)
789 790
790 791 class memfilectx(object):
791 792 """memfilectx represents an in-memory file to commit.
792 793
793 794 See memctx for more details.
794 795 """
795 796 def __init__(self, path, data, islink, isexec, copied):
796 797 """
797 798 path is the normalized file path relative to repository root.
798 799 data is the file content as a string.
799 800 islink is True if the file is a symbolic link.
800 801 isexec is True if the file is executable.
801 802 copied is the source file path if current file was copied in the
802 803 revision being committed, or None."""
803 804 self._path = path
804 805 self._data = data
805 806 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
806 807 self._copied = None
807 808 if copied:
808 809 self._copied = (copied, nullid)
809 810
810 811 def __nonzero__(self): return True
811 812 def __str__(self): return "%s@%s" % (self.path(), self._changectx)
812 813 def path(self): return self._path
813 814 def data(self): return self._data
814 815 def flags(self): return self._flags
815 816 def isexec(self): return 'x' in self._flags
816 817 def islink(self): return 'l' in self._flags
817 818 def renamed(self): return self._copied
@@ -1,233 +1,233 b''
1 1 # copies.py - copy detection for Mercurial
2 2 #
3 3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import util
10 10 import heapq
11 11
12 12 def _nonoverlap(d1, d2, d3):
13 13 "Return list of elements in d1 not in d2 or d3"
14 14 return sorted([d for d in d1 if d not in d3 and d not in d2])
15 15
16 16 def _dirname(f):
17 17 s = f.rfind("/")
18 18 if s == -1:
19 19 return ""
20 20 return f[:s]
21 21
22 22 def _dirs(files):
23 23 d = set()
24 24 for f in files:
25 25 f = _dirname(f)
26 26 while f not in d:
27 27 d.add(f)
28 28 f = _dirname(f)
29 29 return d
30 30
31 31 def _findoldnames(fctx, limit):
32 32 "find files that path was copied from, back to linkrev limit"
33 33 old = {}
34 34 seen = set()
35 35 orig = fctx.path()
36 36 visit = [(fctx, 0)]
37 37 while visit:
38 38 fc, depth = visit.pop()
39 39 s = str(fc)
40 40 if s in seen:
41 41 continue
42 42 seen.add(s)
43 43 if fc.path() != orig and fc.path() not in old:
44 44 old[fc.path()] = (depth, fc.path()) # remember depth
45 45 if fc.rev() is not None and fc.rev() < limit:
46 46 continue
47 47 visit += [(p, depth - 1) for p in fc.parents()]
48 48
49 49 # return old names sorted by depth
50 50 return [o[1] for o in sorted(old.values())]
51 51
52 52 def _findlimit(repo, a, b):
53 53 "find the earliest revision that's an ancestor of a or b but not both"
54 54 # basic idea:
55 55 # - mark a and b with different sides
56 56 # - if a parent's children are all on the same side, the parent is
57 57 # on that side, otherwise it is on no side
58 58 # - walk the graph in topological order with the help of a heap;
59 59 # - add unseen parents to side map
60 60 # - clear side of any parent that has children on different sides
61 61 # - track number of interesting revs that might still be on a side
62 62 # - track the lowest interesting rev seen
63 63 # - quit when interesting revs is zero
64 64
65 65 cl = repo.changelog
66 66 working = len(cl) # pseudo rev for the working directory
67 67 if a is None:
68 68 a = working
69 69 if b is None:
70 70 b = working
71 71
72 72 side = {a: -1, b: 1}
73 73 visit = [-a, -b]
74 74 heapq.heapify(visit)
75 75 interesting = len(visit)
76 76 limit = working
77 77
78 78 while interesting:
79 79 r = -heapq.heappop(visit)
80 80 if r == working:
81 81 parents = [cl.rev(p) for p in repo.dirstate.parents()]
82 82 else:
83 83 parents = cl.parentrevs(r)
84 84 for p in parents:
85 85 if p not in side:
86 86 # first time we see p; add it to visit
87 87 side[p] = side[r]
88 88 if side[p]:
89 89 interesting += 1
90 90 heapq.heappush(visit, -p)
91 91 elif side[p] and side[p] != side[r]:
92 92 # p was interesting but now we know better
93 93 side[p] = 0
94 94 interesting -= 1
95 95 if side[r]:
96 96 limit = r # lowest rev visited
97 97 interesting -= 1
98 98 return limit
99 99
100 100 def copies(repo, c1, c2, ca, checkdirs=False):
101 101 """
102 102 Find moves and copies between context c1 and c2
103 103 """
104 104 # avoid silly behavior for update from empty dir
105 105 if not c1 or not c2 or c1 == c2:
106 106 return {}, {}
107 107
108 108 # avoid silly behavior for parent -> working dir
109 109 if c2.node() is None and c1.node() == repo.dirstate.parents()[0]:
110 110 return repo.dirstate.copies(), {}
111 111
112 112 limit = _findlimit(repo, c1.rev(), c2.rev())
113 113 m1 = c1.manifest()
114 114 m2 = c2.manifest()
115 115 ma = ca.manifest()
116 116
117 117 def makectx(f, n):
118 118 if len(n) != 20: # in a working context?
119 119 if c1.rev() is None:
120 120 return c1.filectx(f)
121 121 return c2.filectx(f)
122 122 return repo.filectx(f, fileid=n)
123 ctx = util.cachefunc(makectx)
124 123
124 ctx = util.lrucachefunc(makectx)
125 125 copy = {}
126 126 fullcopy = {}
127 127 diverge = {}
128 128
129 129 def checkcopies(f, m1, m2):
130 130 '''check possible copies of f from m1 to m2'''
131 131 c1 = ctx(f, m1[f])
132 132 for of in _findoldnames(c1, limit):
133 133 fullcopy[f] = of # remember for dir rename detection
134 134 if of in m2: # original file not in other manifest?
135 135 # if the original file is unchanged on the other branch,
136 136 # no merge needed
137 137 if m2[of] != ma.get(of):
138 138 c2 = ctx(of, m2[of])
139 139 ca = c1.ancestor(c2)
140 140 # related and named changed on only one side?
141 141 if ca and (ca.path() == f or ca.path() == c2.path()):
142 142 if c1 != ca or c2 != ca: # merge needed?
143 143 copy[f] = of
144 144 elif of in ma:
145 145 diverge.setdefault(of, []).append(f)
146 146
147 147 repo.ui.debug(_(" searching for copies back to rev %d\n") % limit)
148 148
149 149 u1 = _nonoverlap(m1, m2, ma)
150 150 u2 = _nonoverlap(m2, m1, ma)
151 151
152 152 if u1:
153 153 repo.ui.debug(_(" unmatched files in local:\n %s\n")
154 154 % "\n ".join(u1))
155 155 if u2:
156 156 repo.ui.debug(_(" unmatched files in other:\n %s\n")
157 157 % "\n ".join(u2))
158 158
159 159 for f in u1:
160 160 checkcopies(f, m1, m2)
161 161 for f in u2:
162 162 checkcopies(f, m2, m1)
163 163
164 164 diverge2 = set()
165 165 for of, fl in diverge.items():
166 166 if len(fl) == 1:
167 167 del diverge[of] # not actually divergent
168 168 else:
169 169 diverge2.update(fl) # reverse map for below
170 170
171 171 if fullcopy:
172 172 repo.ui.debug(_(" all copies found (* = to merge, ! = divergent):\n"))
173 173 for f in fullcopy:
174 174 note = ""
175 175 if f in copy: note += "*"
176 176 if f in diverge2: note += "!"
177 177 repo.ui.debug(" %s -> %s %s\n" % (f, fullcopy[f], note))
178 178 del diverge2
179 179
180 180 if not fullcopy or not checkdirs:
181 181 return copy, diverge
182 182
183 183 repo.ui.debug(_(" checking for directory renames\n"))
184 184
185 185 # generate a directory move map
186 186 d1, d2 = _dirs(m1), _dirs(m2)
187 187 invalid = set()
188 188 dirmove = {}
189 189
190 190 # examine each file copy for a potential directory move, which is
191 191 # when all the files in a directory are moved to a new directory
192 192 for dst, src in fullcopy.iteritems():
193 193 dsrc, ddst = _dirname(src), _dirname(dst)
194 194 if dsrc in invalid:
195 195 # already seen to be uninteresting
196 196 continue
197 197 elif dsrc in d1 and ddst in d1:
198 198 # directory wasn't entirely moved locally
199 199 invalid.add(dsrc)
200 200 elif dsrc in d2 and ddst in d2:
201 201 # directory wasn't entirely moved remotely
202 202 invalid.add(dsrc)
203 203 elif dsrc in dirmove and dirmove[dsrc] != ddst:
204 204 # files from the same directory moved to two different places
205 205 invalid.add(dsrc)
206 206 else:
207 207 # looks good so far
208 208 dirmove[dsrc + "/"] = ddst + "/"
209 209
210 210 for i in invalid:
211 211 if i in dirmove:
212 212 del dirmove[i]
213 213 del d1, d2, invalid
214 214
215 215 if not dirmove:
216 216 return copy, diverge
217 217
218 218 for d in dirmove:
219 219 repo.ui.debug(_(" dir %s -> %s\n") % (d, dirmove[d]))
220 220
221 221 # check unaccounted nonoverlapping files against directory moves
222 222 for f in u1 + u2:
223 223 if f not in fullcopy:
224 224 for d in dirmove:
225 225 if f.startswith(d):
226 226 # new file added in a directory that was moved, move it
227 227 df = dirmove[d] + f[len(d):]
228 228 if df not in copy:
229 229 copy[f] = df
230 230 repo.ui.debug(_(" file %s -> %s\n") % (f, copy[f]))
231 231 break
232 232
233 233 return copy, diverge
@@ -1,2179 +1,2181 b''
1 1 # localrepo.py - read/write repository class for mercurial
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from node import bin, hex, nullid, nullrev, short
9 9 from i18n import _
10 10 import repo, changegroup, subrepo
11 11 import changelog, dirstate, filelog, manifest, context
12 12 import lock, transaction, store, encoding
13 13 import util, extensions, hook, error
14 14 import match as match_
15 15 import merge as merge_
16 16 from lock import release
17 17 import weakref, stat, errno, os, time, inspect
18 18 propertycache = util.propertycache
19 19
20 20 class localrepository(repo.repository):
21 21 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
22 22 supported = set('revlogv1 store fncache shared'.split())
23 23
24 24 def __init__(self, baseui, path=None, create=0):
25 25 repo.repository.__init__(self)
26 26 self.root = os.path.realpath(path)
27 27 self.path = os.path.join(self.root, ".hg")
28 28 self.origroot = path
29 29 self.opener = util.opener(self.path)
30 30 self.wopener = util.opener(self.root)
31 31 self.baseui = baseui
32 32 self.ui = baseui.copy()
33 33
34 34 try:
35 35 self.ui.readconfig(self.join("hgrc"), self.root)
36 36 extensions.loadall(self.ui)
37 37 except IOError:
38 38 pass
39 39
40 40 if not os.path.isdir(self.path):
41 41 if create:
42 42 if not os.path.exists(path):
43 43 os.mkdir(path)
44 44 os.mkdir(self.path)
45 45 requirements = ["revlogv1"]
46 46 if self.ui.configbool('format', 'usestore', True):
47 47 os.mkdir(os.path.join(self.path, "store"))
48 48 requirements.append("store")
49 49 if self.ui.configbool('format', 'usefncache', True):
50 50 requirements.append("fncache")
51 51 # create an invalid changelog
52 52 self.opener("00changelog.i", "a").write(
53 53 '\0\0\0\2' # represents revlogv2
54 54 ' dummy changelog to prevent using the old repo layout'
55 55 )
56 56 reqfile = self.opener("requires", "w")
57 57 for r in requirements:
58 58 reqfile.write("%s\n" % r)
59 59 reqfile.close()
60 60 else:
61 61 raise error.RepoError(_("repository %s not found") % path)
62 62 elif create:
63 63 raise error.RepoError(_("repository %s already exists") % path)
64 64 else:
65 65 # find requirements
66 66 requirements = set()
67 67 try:
68 68 requirements = set(self.opener("requires").read().splitlines())
69 69 except IOError, inst:
70 70 if inst.errno != errno.ENOENT:
71 71 raise
72 72 for r in requirements - self.supported:
73 73 raise error.RepoError(_("requirement '%s' not supported") % r)
74 74
75 75 self.sharedpath = self.path
76 76 try:
77 77 s = os.path.realpath(self.opener("sharedpath").read())
78 78 if not os.path.exists(s):
79 79 raise error.RepoError(
80 80 _('.hg/sharedpath points to nonexistent directory %s') % s)
81 81 self.sharedpath = s
82 82 except IOError, inst:
83 83 if inst.errno != errno.ENOENT:
84 84 raise
85 85
86 86 self.store = store.store(requirements, self.sharedpath, util.opener)
87 87 self.spath = self.store.path
88 88 self.sopener = self.store.opener
89 89 self.sjoin = self.store.join
90 90 self.opener.createmode = self.store.createmode
91 91
92 92 self.tagscache = None
93 93 self._tagstypecache = None
94 94 self.branchcache = None
95 95 self._ubranchcache = None # UTF-8 version of branchcache
96 96 self._branchcachetip = None
97 97 self.nodetagscache = None
98 98 self.filterpats = {}
99 99 self._datafilters = {}
100 100 self._transref = self._lockref = self._wlockref = None
101 101
102 102 @propertycache
103 103 def changelog(self):
104 104 c = changelog.changelog(self.sopener)
105 105 if 'HG_PENDING' in os.environ:
106 106 p = os.environ['HG_PENDING']
107 107 if p.startswith(self.root):
108 108 c.readpending('00changelog.i.a')
109 109 self.sopener.defversion = c.version
110 110 return c
111 111
112 112 @propertycache
113 113 def manifest(self):
114 114 return manifest.manifest(self.sopener)
115 115
116 116 @propertycache
117 117 def dirstate(self):
118 118 return dirstate.dirstate(self.opener, self.ui, self.root)
119 119
120 120 def __getitem__(self, changeid):
121 121 if changeid is None:
122 122 return context.workingctx(self)
123 123 return context.changectx(self, changeid)
124 124
125 125 def __nonzero__(self):
126 126 return True
127 127
128 128 def __len__(self):
129 129 return len(self.changelog)
130 130
131 131 def __iter__(self):
132 132 for i in xrange(len(self)):
133 133 yield i
134 134
135 135 def url(self):
136 136 return 'file:' + self.root
137 137
138 138 def hook(self, name, throw=False, **args):
139 139 return hook.hook(self.ui, self, name, throw, **args)
140 140
141 141 tag_disallowed = ':\r\n'
142 142
143 143 def _tag(self, names, node, message, local, user, date, extra={}):
144 144 if isinstance(names, str):
145 145 allchars = names
146 146 names = (names,)
147 147 else:
148 148 allchars = ''.join(names)
149 149 for c in self.tag_disallowed:
150 150 if c in allchars:
151 151 raise util.Abort(_('%r cannot be used in a tag name') % c)
152 152
153 153 for name in names:
154 154 self.hook('pretag', throw=True, node=hex(node), tag=name,
155 155 local=local)
156 156
157 157 def writetags(fp, names, munge, prevtags):
158 158 fp.seek(0, 2)
159 159 if prevtags and prevtags[-1] != '\n':
160 160 fp.write('\n')
161 161 for name in names:
162 162 m = munge and munge(name) or name
163 163 if self._tagstypecache and name in self._tagstypecache:
164 164 old = self.tagscache.get(name, nullid)
165 165 fp.write('%s %s\n' % (hex(old), m))
166 166 fp.write('%s %s\n' % (hex(node), m))
167 167 fp.close()
168 168
169 169 prevtags = ''
170 170 if local:
171 171 try:
172 172 fp = self.opener('localtags', 'r+')
173 173 except IOError:
174 174 fp = self.opener('localtags', 'a')
175 175 else:
176 176 prevtags = fp.read()
177 177
178 178 # local tags are stored in the current charset
179 179 writetags(fp, names, None, prevtags)
180 180 for name in names:
181 181 self.hook('tag', node=hex(node), tag=name, local=local)
182 182 return
183 183
184 184 try:
185 185 fp = self.wfile('.hgtags', 'rb+')
186 186 except IOError:
187 187 fp = self.wfile('.hgtags', 'ab')
188 188 else:
189 189 prevtags = fp.read()
190 190
191 191 # committed tags are stored in UTF-8
192 192 writetags(fp, names, encoding.fromlocal, prevtags)
193 193
194 194 if '.hgtags' not in self.dirstate:
195 195 self.add(['.hgtags'])
196 196
197 197 m = match_.exact(self.root, '', ['.hgtags'])
198 198 tagnode = self.commit(message, user, date, extra=extra, match=m)
199 199
200 200 for name in names:
201 201 self.hook('tag', node=hex(node), tag=name, local=local)
202 202
203 203 return tagnode
204 204
205 205 def tag(self, names, node, message, local, user, date):
206 206 '''tag a revision with one or more symbolic names.
207 207
208 208 names is a list of strings or, when adding a single tag, names may be a
209 209 string.
210 210
211 211 if local is True, the tags are stored in a per-repository file.
212 212 otherwise, they are stored in the .hgtags file, and a new
213 213 changeset is committed with the change.
214 214
215 215 keyword arguments:
216 216
217 217 local: whether to store tags in non-version-controlled file
218 218 (default False)
219 219
220 220 message: commit message to use if committing
221 221
222 222 user: name of user to use if committing
223 223
224 224 date: date tuple to use if committing'''
225 225
226 226 for x in self.status()[:5]:
227 227 if '.hgtags' in x:
228 228 raise util.Abort(_('working copy of .hgtags is changed '
229 229 '(please commit .hgtags manually)'))
230 230
231 231 self.tags() # instantiate the cache
232 232 self._tag(names, node, message, local, user, date)
233 233
234 234 def tags(self):
235 235 '''return a mapping of tag to node'''
236 236 if self.tagscache:
237 237 return self.tagscache
238 238
239 239 globaltags = {}
240 240 tagtypes = {}
241 241
242 242 def readtags(lines, fn, tagtype):
243 243 filetags = {}
244 244 count = 0
245 245
246 246 def warn(msg):
247 247 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
248 248
249 249 for l in lines:
250 250 count += 1
251 251 if not l:
252 252 continue
253 253 s = l.split(" ", 1)
254 254 if len(s) != 2:
255 255 warn(_("cannot parse entry"))
256 256 continue
257 257 node, key = s
258 258 key = encoding.tolocal(key.strip()) # stored in UTF-8
259 259 try:
260 260 bin_n = bin(node)
261 261 except TypeError:
262 262 warn(_("node '%s' is not well formed") % node)
263 263 continue
264 264 if bin_n not in self.changelog.nodemap:
265 265 # silently ignore as pull -r might cause this
266 266 continue
267 267
268 268 h = []
269 269 if key in filetags:
270 270 n, h = filetags[key]
271 271 h.append(n)
272 272 filetags[key] = (bin_n, h)
273 273
274 274 for k, nh in filetags.iteritems():
275 275 if k not in globaltags:
276 276 globaltags[k] = nh
277 277 tagtypes[k] = tagtype
278 278 continue
279 279
280 280 # we prefer the global tag if:
281 281 # it supercedes us OR
282 282 # mutual supercedes and it has a higher rank
283 283 # otherwise we win because we're tip-most
284 284 an, ah = nh
285 285 bn, bh = globaltags[k]
286 286 if (bn != an and an in bh and
287 287 (bn not in ah or len(bh) > len(ah))):
288 288 an = bn
289 289 ah.extend([n for n in bh if n not in ah])
290 290 globaltags[k] = an, ah
291 291 tagtypes[k] = tagtype
292 292
293 293 seen = set()
294 294 f = None
295 295 ctxs = []
296 296 for node in self.heads():
297 297 try:
298 298 fnode = self[node].filenode('.hgtags')
299 299 except error.LookupError:
300 300 continue
301 301 if fnode not in seen:
302 302 seen.add(fnode)
303 303 if not f:
304 304 f = self.filectx('.hgtags', fileid=fnode)
305 305 else:
306 306 f = f.filectx(fnode)
307 307 ctxs.append(f)
308 308
309 309 # read the tags file from each head, ending with the tip
310 310 for f in reversed(ctxs):
311 311 readtags(f.data().splitlines(), f, "global")
312 312
313 313 try:
314 314 data = encoding.fromlocal(self.opener("localtags").read())
315 315 # localtags are stored in the local character set
316 316 # while the internal tag table is stored in UTF-8
317 317 readtags(data.splitlines(), "localtags", "local")
318 318 except IOError:
319 319 pass
320 320
321 321 self.tagscache = {}
322 322 self._tagstypecache = {}
323 323 for k, nh in globaltags.iteritems():
324 324 n = nh[0]
325 325 if n != nullid:
326 326 self.tagscache[k] = n
327 327 self._tagstypecache[k] = tagtypes[k]
328 328 self.tagscache['tip'] = self.changelog.tip()
329 329 return self.tagscache
330 330
331 331 def tagtype(self, tagname):
332 332 '''
333 333 return the type of the given tag. result can be:
334 334
335 335 'local' : a local tag
336 336 'global' : a global tag
337 337 None : tag does not exist
338 338 '''
339 339
340 340 self.tags()
341 341
342 342 return self._tagstypecache.get(tagname)
343 343
344 344 def tagslist(self):
345 345 '''return a list of tags ordered by revision'''
346 346 l = []
347 347 for t, n in self.tags().iteritems():
348 348 try:
349 349 r = self.changelog.rev(n)
350 350 except:
351 351 r = -2 # sort to the beginning of the list if unknown
352 352 l.append((r, t, n))
353 353 return [(t, n) for r, t, n in sorted(l)]
354 354
355 355 def nodetags(self, node):
356 356 '''return the tags associated with a node'''
357 357 if not self.nodetagscache:
358 358 self.nodetagscache = {}
359 359 for t, n in self.tags().iteritems():
360 360 self.nodetagscache.setdefault(n, []).append(t)
361 361 return self.nodetagscache.get(node, [])
362 362
363 363 def _branchtags(self, partial, lrev):
364 364 # TODO: rename this function?
365 365 tiprev = len(self) - 1
366 366 if lrev != tiprev:
367 367 self._updatebranchcache(partial, lrev+1, tiprev+1)
368 368 self._writebranchcache(partial, self.changelog.tip(), tiprev)
369 369
370 370 return partial
371 371
372 372 def branchmap(self):
373 373 tip = self.changelog.tip()
374 374 if self.branchcache is not None and self._branchcachetip == tip:
375 375 return self.branchcache
376 376
377 377 oldtip = self._branchcachetip
378 378 self._branchcachetip = tip
379 379 if self.branchcache is None:
380 380 self.branchcache = {} # avoid recursion in changectx
381 381 else:
382 382 self.branchcache.clear() # keep using the same dict
383 383 if oldtip is None or oldtip not in self.changelog.nodemap:
384 384 partial, last, lrev = self._readbranchcache()
385 385 else:
386 386 lrev = self.changelog.rev(oldtip)
387 387 partial = self._ubranchcache
388 388
389 389 self._branchtags(partial, lrev)
390 390 # this private cache holds all heads (not just tips)
391 391 self._ubranchcache = partial
392 392
393 393 # the branch cache is stored on disk as UTF-8, but in the local
394 394 # charset internally
395 395 for k, v in partial.iteritems():
396 396 self.branchcache[encoding.tolocal(k)] = v
397 397 return self.branchcache
398 398
399 399
400 400 def branchtags(self):
401 401 '''return a dict where branch names map to the tipmost head of
402 402 the branch, open heads come before closed'''
403 403 bt = {}
404 404 for bn, heads in self.branchmap().iteritems():
405 405 head = None
406 406 for i in range(len(heads)-1, -1, -1):
407 407 h = heads[i]
408 408 if 'close' not in self.changelog.read(h)[5]:
409 409 head = h
410 410 break
411 411 # no open heads were found
412 412 if head is None:
413 413 head = heads[-1]
414 414 bt[bn] = head
415 415 return bt
416 416
417 417
418 418 def _readbranchcache(self):
419 419 partial = {}
420 420 try:
421 421 f = self.opener("branchheads.cache")
422 422 lines = f.read().split('\n')
423 423 f.close()
424 424 except (IOError, OSError):
425 425 return {}, nullid, nullrev
426 426
427 427 try:
428 428 last, lrev = lines.pop(0).split(" ", 1)
429 429 last, lrev = bin(last), int(lrev)
430 430 if lrev >= len(self) or self[lrev].node() != last:
431 431 # invalidate the cache
432 432 raise ValueError('invalidating branch cache (tip differs)')
433 433 for l in lines:
434 434 if not l: continue
435 435 node, label = l.split(" ", 1)
436 436 partial.setdefault(label.strip(), []).append(bin(node))
437 437 except KeyboardInterrupt:
438 438 raise
439 439 except Exception, inst:
440 440 if self.ui.debugflag:
441 441 self.ui.warn(str(inst), '\n')
442 442 partial, last, lrev = {}, nullid, nullrev
443 443 return partial, last, lrev
444 444
445 445 def _writebranchcache(self, branches, tip, tiprev):
446 446 try:
447 447 f = self.opener("branchheads.cache", "w", atomictemp=True)
448 448 f.write("%s %s\n" % (hex(tip), tiprev))
449 449 for label, nodes in branches.iteritems():
450 450 for node in nodes:
451 451 f.write("%s %s\n" % (hex(node), label))
452 452 f.rename()
453 453 except (IOError, OSError):
454 454 pass
455 455
456 456 def _updatebranchcache(self, partial, start, end):
457 457 # collect new branch entries
458 458 newbranches = {}
459 459 for r in xrange(start, end):
460 460 c = self[r]
461 461 newbranches.setdefault(c.branch(), []).append(c.node())
462 462 # if older branchheads are reachable from new ones, they aren't
463 463 # really branchheads. Note checking parents is insufficient:
464 464 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
465 465 for branch, newnodes in newbranches.iteritems():
466 466 bheads = partial.setdefault(branch, [])
467 467 bheads.extend(newnodes)
468 468 if len(bheads) < 2:
469 469 continue
470 470 newbheads = []
471 471 # starting from tip means fewer passes over reachable
472 472 while newnodes:
473 473 latest = newnodes.pop()
474 474 if latest not in bheads:
475 475 continue
476 reachable = self.changelog.reachable(latest, bheads[0])
476 reachable = set()
477 for bh in bheads:
478 reachable |= self.changelog.reachable(latest, bh)
477 479 bheads = [b for b in bheads if b not in reachable]
478 480 newbheads.insert(0, latest)
479 481 bheads.extend(newbheads)
480 482 partial[branch] = bheads
481 483
482 484 def lookup(self, key):
483 485 if isinstance(key, int):
484 486 return self.changelog.node(key)
485 487 elif key == '.':
486 488 return self.dirstate.parents()[0]
487 489 elif key == 'null':
488 490 return nullid
489 491 elif key == 'tip':
490 492 return self.changelog.tip()
491 493 n = self.changelog._match(key)
492 494 if n:
493 495 return n
494 496 if key in self.tags():
495 497 return self.tags()[key]
496 498 if key in self.branchtags():
497 499 return self.branchtags()[key]
498 500 n = self.changelog._partialmatch(key)
499 501 if n:
500 502 return n
501 503
502 504 # can't find key, check if it might have come from damaged dirstate
503 505 if key in self.dirstate.parents():
504 506 raise error.Abort(_("working directory has unknown parent '%s'!")
505 507 % short(key))
506 508 try:
507 509 if len(key) == 20:
508 510 key = hex(key)
509 511 except:
510 512 pass
511 513 raise error.RepoError(_("unknown revision '%s'") % key)
512 514
513 515 def local(self):
514 516 return True
515 517
516 518 def join(self, f):
517 519 return os.path.join(self.path, f)
518 520
519 521 def wjoin(self, f):
520 522 return os.path.join(self.root, f)
521 523
522 524 def rjoin(self, f):
523 525 return os.path.join(self.root, util.pconvert(f))
524 526
525 527 def file(self, f):
526 528 if f[0] == '/':
527 529 f = f[1:]
528 530 return filelog.filelog(self.sopener, f)
529 531
530 532 def changectx(self, changeid):
531 533 return self[changeid]
532 534
533 535 def parents(self, changeid=None):
534 536 '''get list of changectxs for parents of changeid'''
535 537 return self[changeid].parents()
536 538
537 539 def filectx(self, path, changeid=None, fileid=None):
538 540 """changeid can be a changeset revision, node, or tag.
539 541 fileid can be a file revision or node."""
540 542 return context.filectx(self, path, changeid, fileid)
541 543
542 544 def getcwd(self):
543 545 return self.dirstate.getcwd()
544 546
545 547 def pathto(self, f, cwd=None):
546 548 return self.dirstate.pathto(f, cwd)
547 549
548 550 def wfile(self, f, mode='r'):
549 551 return self.wopener(f, mode)
550 552
551 553 def _link(self, f):
552 554 return os.path.islink(self.wjoin(f))
553 555
554 556 def _filter(self, filter, filename, data):
555 557 if filter not in self.filterpats:
556 558 l = []
557 559 for pat, cmd in self.ui.configitems(filter):
558 560 if cmd == '!':
559 561 continue
560 562 mf = match_.match(self.root, '', [pat])
561 563 fn = None
562 564 params = cmd
563 565 for name, filterfn in self._datafilters.iteritems():
564 566 if cmd.startswith(name):
565 567 fn = filterfn
566 568 params = cmd[len(name):].lstrip()
567 569 break
568 570 if not fn:
569 571 fn = lambda s, c, **kwargs: util.filter(s, c)
570 572 # Wrap old filters not supporting keyword arguments
571 573 if not inspect.getargspec(fn)[2]:
572 574 oldfn = fn
573 575 fn = lambda s, c, **kwargs: oldfn(s, c)
574 576 l.append((mf, fn, params))
575 577 self.filterpats[filter] = l
576 578
577 579 for mf, fn, cmd in self.filterpats[filter]:
578 580 if mf(filename):
579 581 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
580 582 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
581 583 break
582 584
583 585 return data
584 586
585 587 def adddatafilter(self, name, filter):
586 588 self._datafilters[name] = filter
587 589
588 590 def wread(self, filename):
589 591 if self._link(filename):
590 592 data = os.readlink(self.wjoin(filename))
591 593 else:
592 594 data = self.wopener(filename, 'r').read()
593 595 return self._filter("encode", filename, data)
594 596
595 597 def wwrite(self, filename, data, flags):
596 598 data = self._filter("decode", filename, data)
597 599 try:
598 600 os.unlink(self.wjoin(filename))
599 601 except OSError:
600 602 pass
601 603 if 'l' in flags:
602 604 self.wopener.symlink(data, filename)
603 605 else:
604 606 self.wopener(filename, 'w').write(data)
605 607 if 'x' in flags:
606 608 util.set_flags(self.wjoin(filename), False, True)
607 609
608 610 def wwritedata(self, filename, data):
609 611 return self._filter("decode", filename, data)
610 612
611 613 def transaction(self):
612 614 tr = self._transref and self._transref() or None
613 615 if tr and tr.running():
614 616 return tr.nest()
615 617
616 618 # abort here if the journal already exists
617 619 if os.path.exists(self.sjoin("journal")):
618 620 raise error.RepoError(_("journal already exists - run hg recover"))
619 621
620 622 # save dirstate for rollback
621 623 try:
622 624 ds = self.opener("dirstate").read()
623 625 except IOError:
624 626 ds = ""
625 627 self.opener("journal.dirstate", "w").write(ds)
626 628 self.opener("journal.branch", "w").write(self.dirstate.branch())
627 629
628 630 renames = [(self.sjoin("journal"), self.sjoin("undo")),
629 631 (self.join("journal.dirstate"), self.join("undo.dirstate")),
630 632 (self.join("journal.branch"), self.join("undo.branch"))]
631 633 tr = transaction.transaction(self.ui.warn, self.sopener,
632 634 self.sjoin("journal"),
633 635 aftertrans(renames),
634 636 self.store.createmode)
635 637 self._transref = weakref.ref(tr)
636 638 return tr
637 639
638 640 def recover(self):
639 641 lock = self.lock()
640 642 try:
641 643 if os.path.exists(self.sjoin("journal")):
642 644 self.ui.status(_("rolling back interrupted transaction\n"))
643 645 transaction.rollback(self.sopener, self.sjoin("journal"), self.ui.warn)
644 646 self.invalidate()
645 647 return True
646 648 else:
647 649 self.ui.warn(_("no interrupted transaction available\n"))
648 650 return False
649 651 finally:
650 652 lock.release()
651 653
652 654 def rollback(self):
653 655 wlock = lock = None
654 656 try:
655 657 wlock = self.wlock()
656 658 lock = self.lock()
657 659 if os.path.exists(self.sjoin("undo")):
658 660 self.ui.status(_("rolling back last transaction\n"))
659 661 transaction.rollback(self.sopener, self.sjoin("undo"), self.ui.warn)
660 662 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
661 663 try:
662 664 branch = self.opener("undo.branch").read()
663 665 self.dirstate.setbranch(branch)
664 666 except IOError:
665 667 self.ui.warn(_("Named branch could not be reset, "
666 668 "current branch still is: %s\n")
667 669 % encoding.tolocal(self.dirstate.branch()))
668 670 self.invalidate()
669 671 self.dirstate.invalidate()
670 672 else:
671 673 self.ui.warn(_("no rollback information available\n"))
672 674 finally:
673 675 release(lock, wlock)
674 676
675 677 def invalidate(self):
676 678 for a in "changelog manifest".split():
677 679 if a in self.__dict__:
678 680 delattr(self, a)
679 681 self.tagscache = None
680 682 self._tagstypecache = None
681 683 self.nodetagscache = None
682 684 self.branchcache = None
683 685 self._ubranchcache = None
684 686 self._branchcachetip = None
685 687
686 688 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
687 689 try:
688 690 l = lock.lock(lockname, 0, releasefn, desc=desc)
689 691 except error.LockHeld, inst:
690 692 if not wait:
691 693 raise
692 694 self.ui.warn(_("waiting for lock on %s held by %r\n") %
693 695 (desc, inst.locker))
694 696 # default to 600 seconds timeout
695 697 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
696 698 releasefn, desc=desc)
697 699 if acquirefn:
698 700 acquirefn()
699 701 return l
700 702
701 703 def lock(self, wait=True):
702 704 l = self._lockref and self._lockref()
703 705 if l is not None and l.held:
704 706 l.lock()
705 707 return l
706 708
707 709 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
708 710 _('repository %s') % self.origroot)
709 711 self._lockref = weakref.ref(l)
710 712 return l
711 713
712 714 def wlock(self, wait=True):
713 715 l = self._wlockref and self._wlockref()
714 716 if l is not None and l.held:
715 717 l.lock()
716 718 return l
717 719
718 720 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
719 721 self.dirstate.invalidate, _('working directory of %s') %
720 722 self.origroot)
721 723 self._wlockref = weakref.ref(l)
722 724 return l
723 725
724 726 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
725 727 """
726 728 commit an individual file as part of a larger transaction
727 729 """
728 730
729 731 fname = fctx.path()
730 732 text = fctx.data()
731 733 flog = self.file(fname)
732 734 fparent1 = manifest1.get(fname, nullid)
733 735 fparent2 = fparent2o = manifest2.get(fname, nullid)
734 736
735 737 meta = {}
736 738 copy = fctx.renamed()
737 739 if copy and copy[0] != fname:
738 740 # Mark the new revision of this file as a copy of another
739 741 # file. This copy data will effectively act as a parent
740 742 # of this new revision. If this is a merge, the first
741 743 # parent will be the nullid (meaning "look up the copy data")
742 744 # and the second one will be the other parent. For example:
743 745 #
744 746 # 0 --- 1 --- 3 rev1 changes file foo
745 747 # \ / rev2 renames foo to bar and changes it
746 748 # \- 2 -/ rev3 should have bar with all changes and
747 749 # should record that bar descends from
748 750 # bar in rev2 and foo in rev1
749 751 #
750 752 # this allows this merge to succeed:
751 753 #
752 754 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
753 755 # \ / merging rev3 and rev4 should use bar@rev2
754 756 # \- 2 --- 4 as the merge base
755 757 #
756 758
757 759 cfname = copy[0]
758 760 crev = manifest1.get(cfname)
759 761 newfparent = fparent2
760 762
761 763 if manifest2: # branch merge
762 764 if fparent2 == nullid or crev is None: # copied on remote side
763 765 if cfname in manifest2:
764 766 crev = manifest2[cfname]
765 767 newfparent = fparent1
766 768
767 769 # find source in nearest ancestor if we've lost track
768 770 if not crev:
769 771 self.ui.debug(_(" %s: searching for copy revision for %s\n") %
770 772 (fname, cfname))
771 773 for ancestor in self['.'].ancestors():
772 774 if cfname in ancestor:
773 775 crev = ancestor[cfname].filenode()
774 776 break
775 777
776 778 self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
777 779 meta["copy"] = cfname
778 780 meta["copyrev"] = hex(crev)
779 781 fparent1, fparent2 = nullid, newfparent
780 782 elif fparent2 != nullid:
781 783 # is one parent an ancestor of the other?
782 784 fparentancestor = flog.ancestor(fparent1, fparent2)
783 785 if fparentancestor == fparent1:
784 786 fparent1, fparent2 = fparent2, nullid
785 787 elif fparentancestor == fparent2:
786 788 fparent2 = nullid
787 789
788 790 # is the file changed?
789 791 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
790 792 changelist.append(fname)
791 793 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
792 794
793 795 # are just the flags changed during merge?
794 796 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
795 797 changelist.append(fname)
796 798
797 799 return fparent1
798 800
799 801 def commit(self, text="", user=None, date=None, match=None, force=False,
800 802 editor=False, extra={}):
801 803 """Add a new revision to current repository.
802 804
803 805 Revision information is gathered from the working directory,
804 806 match can be used to filter the committed files. If editor is
805 807 supplied, it is called to get a commit message.
806 808 """
807 809
808 810 def fail(f, msg):
809 811 raise util.Abort('%s: %s' % (f, msg))
810 812
811 813 if not match:
812 814 match = match_.always(self.root, '')
813 815
814 816 if not force:
815 817 vdirs = []
816 818 match.dir = vdirs.append
817 819 match.bad = fail
818 820
819 821 wlock = self.wlock()
820 822 try:
821 823 p1, p2 = self.dirstate.parents()
822 824 wctx = self[None]
823 825
824 826 if (not force and p2 != nullid and match and
825 827 (match.files() or match.anypats())):
826 828 raise util.Abort(_('cannot partially commit a merge '
827 829 '(do not specify files or patterns)'))
828 830
829 831 changes = self.status(match=match, clean=force)
830 832 if force:
831 833 changes[0].extend(changes[6]) # mq may commit unchanged files
832 834
833 835 # check subrepos
834 836 subs = []
835 837 for s in wctx.substate:
836 838 if match(s) and wctx.sub(s).dirty():
837 839 subs.append(s)
838 840 if subs and '.hgsubstate' not in changes[0]:
839 841 changes[0].insert(0, '.hgsubstate')
840 842
841 843 # make sure all explicit patterns are matched
842 844 if not force and match.files():
843 845 matched = set(changes[0] + changes[1] + changes[2])
844 846
845 847 for f in match.files():
846 848 if f == '.' or f in matched or f in wctx.substate:
847 849 continue
848 850 if f in changes[3]: # missing
849 851 fail(f, _('file not found!'))
850 852 if f in vdirs: # visited directory
851 853 d = f + '/'
852 854 for mf in matched:
853 855 if mf.startswith(d):
854 856 break
855 857 else:
856 858 fail(f, _("no match under directory!"))
857 859 elif f not in self.dirstate:
858 860 fail(f, _("file not tracked!"))
859 861
860 862 if (not force and not extra.get("close") and p2 == nullid
861 863 and not (changes[0] or changes[1] or changes[2])
862 864 and self[None].branch() == self['.'].branch()):
863 865 return None
864 866
865 867 ms = merge_.mergestate(self)
866 868 for f in changes[0]:
867 869 if f in ms and ms[f] == 'u':
868 870 raise util.Abort(_("unresolved merge conflicts "
869 871 "(see hg resolve)"))
870 872
871 873 cctx = context.workingctx(self, (p1, p2), text, user, date,
872 874 extra, changes)
873 875 if editor:
874 876 cctx._text = editor(self, cctx, subs)
875 877
876 878 # commit subs
877 879 if subs:
878 880 state = wctx.substate.copy()
879 881 for s in subs:
880 882 self.ui.status(_('committing subrepository %s\n') % s)
881 883 sr = wctx.sub(s).commit(cctx._text, user, date)
882 884 state[s] = (state[s][0], sr)
883 885 subrepo.writestate(self, state)
884 886
885 887 ret = self.commitctx(cctx, True)
886 888
887 889 # update dirstate and mergestate
888 890 for f in changes[0] + changes[1]:
889 891 self.dirstate.normal(f)
890 892 for f in changes[2]:
891 893 self.dirstate.forget(f)
892 894 self.dirstate.setparents(ret)
893 895 ms.reset()
894 896
895 897 return ret
896 898
897 899 finally:
898 900 wlock.release()
899 901
900 902 def commitctx(self, ctx, error=False):
901 903 """Add a new revision to current repository.
902 904
903 905 Revision information is passed via the context argument.
904 906 """
905 907
906 908 tr = lock = None
907 909 removed = ctx.removed()
908 910 p1, p2 = ctx.p1(), ctx.p2()
909 911 m1 = p1.manifest().copy()
910 912 m2 = p2.manifest()
911 913 user = ctx.user()
912 914
913 915 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
914 916 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
915 917
916 918 lock = self.lock()
917 919 try:
918 920 tr = self.transaction()
919 921 trp = weakref.proxy(tr)
920 922
921 923 # check in files
922 924 new = {}
923 925 changed = []
924 926 linkrev = len(self)
925 927 for f in sorted(ctx.modified() + ctx.added()):
926 928 self.ui.note(f + "\n")
927 929 try:
928 930 fctx = ctx[f]
929 931 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
930 932 changed)
931 933 m1.set(f, fctx.flags())
932 934 except (OSError, IOError):
933 935 if error:
934 936 self.ui.warn(_("trouble committing %s!\n") % f)
935 937 raise
936 938 else:
937 939 removed.append(f)
938 940
939 941 # update manifest
940 942 m1.update(new)
941 943 removed = [f for f in sorted(removed) if f in m1 or f in m2]
942 944 drop = [f for f in removed if f in m1]
943 945 for f in drop:
944 946 del m1[f]
945 947 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
946 948 p2.manifestnode(), (new, drop))
947 949
948 950 # update changelog
949 951 self.changelog.delayupdate()
950 952 n = self.changelog.add(mn, changed + removed, ctx.description(),
951 953 trp, p1.node(), p2.node(),
952 954 user, ctx.date(), ctx.extra().copy())
953 955 p = lambda: self.changelog.writepending() and self.root or ""
954 956 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
955 957 parent2=xp2, pending=p)
956 958 self.changelog.finalize(trp)
957 959 tr.close()
958 960
959 961 if self.branchcache:
960 962 self.branchtags()
961 963
962 964 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
963 965 return n
964 966 finally:
965 967 del tr
966 968 lock.release()
967 969
968 970 def walk(self, match, node=None):
969 971 '''
970 972 walk recursively through the directory tree or a given
971 973 changeset, finding all files matched by the match
972 974 function
973 975 '''
974 976 return self[node].walk(match)
975 977
976 978 def status(self, node1='.', node2=None, match=None,
977 979 ignored=False, clean=False, unknown=False):
978 980 """return status of files between two nodes or node and working directory
979 981
980 982 If node1 is None, use the first dirstate parent instead.
981 983 If node2 is None, compare node1 with working directory.
982 984 """
983 985
984 986 def mfmatches(ctx):
985 987 mf = ctx.manifest().copy()
986 988 for fn in mf.keys():
987 989 if not match(fn):
988 990 del mf[fn]
989 991 return mf
990 992
991 993 if isinstance(node1, context.changectx):
992 994 ctx1 = node1
993 995 else:
994 996 ctx1 = self[node1]
995 997 if isinstance(node2, context.changectx):
996 998 ctx2 = node2
997 999 else:
998 1000 ctx2 = self[node2]
999 1001
1000 1002 working = ctx2.rev() is None
1001 1003 parentworking = working and ctx1 == self['.']
1002 1004 match = match or match_.always(self.root, self.getcwd())
1003 1005 listignored, listclean, listunknown = ignored, clean, unknown
1004 1006
1005 1007 # load earliest manifest first for caching reasons
1006 1008 if not working and ctx2.rev() < ctx1.rev():
1007 1009 ctx2.manifest()
1008 1010
1009 1011 if not parentworking:
1010 1012 def bad(f, msg):
1011 1013 if f not in ctx1:
1012 1014 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1013 1015 match.bad = bad
1014 1016
1015 1017 if working: # we need to scan the working dir
1016 1018 s = self.dirstate.status(match, listignored, listclean, listunknown)
1017 1019 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1018 1020
1019 1021 # check for any possibly clean files
1020 1022 if parentworking and cmp:
1021 1023 fixup = []
1022 1024 # do a full compare of any files that might have changed
1023 1025 for f in sorted(cmp):
1024 1026 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1025 1027 or ctx1[f].cmp(ctx2[f].data())):
1026 1028 modified.append(f)
1027 1029 else:
1028 1030 fixup.append(f)
1029 1031
1030 1032 if listclean:
1031 1033 clean += fixup
1032 1034
1033 1035 # update dirstate for files that are actually clean
1034 1036 if fixup:
1035 1037 try:
1036 1038 # updating the dirstate is optional
1037 1039 # so we don't wait on the lock
1038 1040 wlock = self.wlock(False)
1039 1041 try:
1040 1042 for f in fixup:
1041 1043 self.dirstate.normal(f)
1042 1044 finally:
1043 1045 wlock.release()
1044 1046 except error.LockError:
1045 1047 pass
1046 1048
1047 1049 if not parentworking:
1048 1050 mf1 = mfmatches(ctx1)
1049 1051 if working:
1050 1052 # we are comparing working dir against non-parent
1051 1053 # generate a pseudo-manifest for the working dir
1052 1054 mf2 = mfmatches(self['.'])
1053 1055 for f in cmp + modified + added:
1054 1056 mf2[f] = None
1055 1057 mf2.set(f, ctx2.flags(f))
1056 1058 for f in removed:
1057 1059 if f in mf2:
1058 1060 del mf2[f]
1059 1061 else:
1060 1062 # we are comparing two revisions
1061 1063 deleted, unknown, ignored = [], [], []
1062 1064 mf2 = mfmatches(ctx2)
1063 1065
1064 1066 modified, added, clean = [], [], []
1065 1067 for fn in mf2:
1066 1068 if fn in mf1:
1067 1069 if (mf1.flags(fn) != mf2.flags(fn) or
1068 1070 (mf1[fn] != mf2[fn] and
1069 1071 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1070 1072 modified.append(fn)
1071 1073 elif listclean:
1072 1074 clean.append(fn)
1073 1075 del mf1[fn]
1074 1076 else:
1075 1077 added.append(fn)
1076 1078 removed = mf1.keys()
1077 1079
1078 1080 r = modified, added, removed, deleted, unknown, ignored, clean
1079 1081 [l.sort() for l in r]
1080 1082 return r
1081 1083
1082 1084 def add(self, list):
1083 1085 wlock = self.wlock()
1084 1086 try:
1085 1087 rejected = []
1086 1088 for f in list:
1087 1089 p = self.wjoin(f)
1088 1090 try:
1089 1091 st = os.lstat(p)
1090 1092 except:
1091 1093 self.ui.warn(_("%s does not exist!\n") % f)
1092 1094 rejected.append(f)
1093 1095 continue
1094 1096 if st.st_size > 10000000:
1095 1097 self.ui.warn(_("%s: files over 10MB may cause memory and"
1096 1098 " performance problems\n"
1097 1099 "(use 'hg revert %s' to unadd the file)\n")
1098 1100 % (f, f))
1099 1101 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1100 1102 self.ui.warn(_("%s not added: only files and symlinks "
1101 1103 "supported currently\n") % f)
1102 1104 rejected.append(p)
1103 1105 elif self.dirstate[f] in 'amn':
1104 1106 self.ui.warn(_("%s already tracked!\n") % f)
1105 1107 elif self.dirstate[f] == 'r':
1106 1108 self.dirstate.normallookup(f)
1107 1109 else:
1108 1110 self.dirstate.add(f)
1109 1111 return rejected
1110 1112 finally:
1111 1113 wlock.release()
1112 1114
1113 1115 def forget(self, list):
1114 1116 wlock = self.wlock()
1115 1117 try:
1116 1118 for f in list:
1117 1119 if self.dirstate[f] != 'a':
1118 1120 self.ui.warn(_("%s not added!\n") % f)
1119 1121 else:
1120 1122 self.dirstate.forget(f)
1121 1123 finally:
1122 1124 wlock.release()
1123 1125
1124 1126 def remove(self, list, unlink=False):
1125 1127 if unlink:
1126 1128 for f in list:
1127 1129 try:
1128 1130 util.unlink(self.wjoin(f))
1129 1131 except OSError, inst:
1130 1132 if inst.errno != errno.ENOENT:
1131 1133 raise
1132 1134 wlock = self.wlock()
1133 1135 try:
1134 1136 for f in list:
1135 1137 if unlink and os.path.exists(self.wjoin(f)):
1136 1138 self.ui.warn(_("%s still exists!\n") % f)
1137 1139 elif self.dirstate[f] == 'a':
1138 1140 self.dirstate.forget(f)
1139 1141 elif f not in self.dirstate:
1140 1142 self.ui.warn(_("%s not tracked!\n") % f)
1141 1143 else:
1142 1144 self.dirstate.remove(f)
1143 1145 finally:
1144 1146 wlock.release()
1145 1147
1146 1148 def undelete(self, list):
1147 1149 manifests = [self.manifest.read(self.changelog.read(p)[0])
1148 1150 for p in self.dirstate.parents() if p != nullid]
1149 1151 wlock = self.wlock()
1150 1152 try:
1151 1153 for f in list:
1152 1154 if self.dirstate[f] != 'r':
1153 1155 self.ui.warn(_("%s not removed!\n") % f)
1154 1156 else:
1155 1157 m = f in manifests[0] and manifests[0] or manifests[1]
1156 1158 t = self.file(f).read(m[f])
1157 1159 self.wwrite(f, t, m.flags(f))
1158 1160 self.dirstate.normal(f)
1159 1161 finally:
1160 1162 wlock.release()
1161 1163
1162 1164 def copy(self, source, dest):
1163 1165 p = self.wjoin(dest)
1164 1166 if not (os.path.exists(p) or os.path.islink(p)):
1165 1167 self.ui.warn(_("%s does not exist!\n") % dest)
1166 1168 elif not (os.path.isfile(p) or os.path.islink(p)):
1167 1169 self.ui.warn(_("copy failed: %s is not a file or a "
1168 1170 "symbolic link\n") % dest)
1169 1171 else:
1170 1172 wlock = self.wlock()
1171 1173 try:
1172 1174 if self.dirstate[dest] in '?r':
1173 1175 self.dirstate.add(dest)
1174 1176 self.dirstate.copy(source, dest)
1175 1177 finally:
1176 1178 wlock.release()
1177 1179
1178 1180 def heads(self, start=None):
1179 1181 heads = self.changelog.heads(start)
1180 1182 # sort the output in rev descending order
1181 1183 heads = [(-self.changelog.rev(h), h) for h in heads]
1182 1184 return [n for (r, n) in sorted(heads)]
1183 1185
1184 1186 def branchheads(self, branch=None, start=None, closed=False):
1185 1187 if branch is None:
1186 1188 branch = self[None].branch()
1187 1189 branches = self.branchmap()
1188 1190 if branch not in branches:
1189 1191 return []
1190 1192 bheads = branches[branch]
1191 1193 # the cache returns heads ordered lowest to highest
1192 1194 bheads.reverse()
1193 1195 if start is not None:
1194 1196 # filter out the heads that cannot be reached from startrev
1195 1197 bheads = self.changelog.nodesbetween([start], bheads)[2]
1196 1198 if not closed:
1197 1199 bheads = [h for h in bheads if
1198 1200 ('close' not in self.changelog.read(h)[5])]
1199 1201 return bheads
1200 1202
1201 1203 def branches(self, nodes):
1202 1204 if not nodes:
1203 1205 nodes = [self.changelog.tip()]
1204 1206 b = []
1205 1207 for n in nodes:
1206 1208 t = n
1207 1209 while 1:
1208 1210 p = self.changelog.parents(n)
1209 1211 if p[1] != nullid or p[0] == nullid:
1210 1212 b.append((t, n, p[0], p[1]))
1211 1213 break
1212 1214 n = p[0]
1213 1215 return b
1214 1216
1215 1217 def between(self, pairs):
1216 1218 r = []
1217 1219
1218 1220 for top, bottom in pairs:
1219 1221 n, l, i = top, [], 0
1220 1222 f = 1
1221 1223
1222 1224 while n != bottom and n != nullid:
1223 1225 p = self.changelog.parents(n)[0]
1224 1226 if i == f:
1225 1227 l.append(n)
1226 1228 f = f * 2
1227 1229 n = p
1228 1230 i += 1
1229 1231
1230 1232 r.append(l)
1231 1233
1232 1234 return r
1233 1235
1234 1236 def findincoming(self, remote, base=None, heads=None, force=False):
1235 1237 """Return list of roots of the subsets of missing nodes from remote
1236 1238
1237 1239 If base dict is specified, assume that these nodes and their parents
1238 1240 exist on the remote side and that no child of a node of base exists
1239 1241 in both remote and self.
1240 1242 Furthermore base will be updated to include the nodes that exists
1241 1243 in self and remote but no children exists in self and remote.
1242 1244 If a list of heads is specified, return only nodes which are heads
1243 1245 or ancestors of these heads.
1244 1246
1245 1247 All the ancestors of base are in self and in remote.
1246 1248 All the descendants of the list returned are missing in self.
1247 1249 (and so we know that the rest of the nodes are missing in remote, see
1248 1250 outgoing)
1249 1251 """
1250 1252 return self.findcommonincoming(remote, base, heads, force)[1]
1251 1253
1252 1254 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1253 1255 """Return a tuple (common, missing roots, heads) used to identify
1254 1256 missing nodes from remote.
1255 1257
1256 1258 If base dict is specified, assume that these nodes and their parents
1257 1259 exist on the remote side and that no child of a node of base exists
1258 1260 in both remote and self.
1259 1261 Furthermore base will be updated to include the nodes that exists
1260 1262 in self and remote but no children exists in self and remote.
1261 1263 If a list of heads is specified, return only nodes which are heads
1262 1264 or ancestors of these heads.
1263 1265
1264 1266 All the ancestors of base are in self and in remote.
1265 1267 """
1266 1268 m = self.changelog.nodemap
1267 1269 search = []
1268 1270 fetch = set()
1269 1271 seen = set()
1270 1272 seenbranch = set()
1271 1273 if base is None:
1272 1274 base = {}
1273 1275
1274 1276 if not heads:
1275 1277 heads = remote.heads()
1276 1278
1277 1279 if self.changelog.tip() == nullid:
1278 1280 base[nullid] = 1
1279 1281 if heads != [nullid]:
1280 1282 return [nullid], [nullid], list(heads)
1281 1283 return [nullid], [], []
1282 1284
1283 1285 # assume we're closer to the tip than the root
1284 1286 # and start by examining the heads
1285 1287 self.ui.status(_("searching for changes\n"))
1286 1288
1287 1289 unknown = []
1288 1290 for h in heads:
1289 1291 if h not in m:
1290 1292 unknown.append(h)
1291 1293 else:
1292 1294 base[h] = 1
1293 1295
1294 1296 heads = unknown
1295 1297 if not unknown:
1296 1298 return base.keys(), [], []
1297 1299
1298 1300 req = set(unknown)
1299 1301 reqcnt = 0
1300 1302
1301 1303 # search through remote branches
1302 1304 # a 'branch' here is a linear segment of history, with four parts:
1303 1305 # head, root, first parent, second parent
1304 1306 # (a branch always has two parents (or none) by definition)
1305 1307 unknown = remote.branches(unknown)
1306 1308 while unknown:
1307 1309 r = []
1308 1310 while unknown:
1309 1311 n = unknown.pop(0)
1310 1312 if n[0] in seen:
1311 1313 continue
1312 1314
1313 1315 self.ui.debug(_("examining %s:%s\n")
1314 1316 % (short(n[0]), short(n[1])))
1315 1317 if n[0] == nullid: # found the end of the branch
1316 1318 pass
1317 1319 elif n in seenbranch:
1318 1320 self.ui.debug(_("branch already found\n"))
1319 1321 continue
1320 1322 elif n[1] and n[1] in m: # do we know the base?
1321 1323 self.ui.debug(_("found incomplete branch %s:%s\n")
1322 1324 % (short(n[0]), short(n[1])))
1323 1325 search.append(n[0:2]) # schedule branch range for scanning
1324 1326 seenbranch.add(n)
1325 1327 else:
1326 1328 if n[1] not in seen and n[1] not in fetch:
1327 1329 if n[2] in m and n[3] in m:
1328 1330 self.ui.debug(_("found new changeset %s\n") %
1329 1331 short(n[1]))
1330 1332 fetch.add(n[1]) # earliest unknown
1331 1333 for p in n[2:4]:
1332 1334 if p in m:
1333 1335 base[p] = 1 # latest known
1334 1336
1335 1337 for p in n[2:4]:
1336 1338 if p not in req and p not in m:
1337 1339 r.append(p)
1338 1340 req.add(p)
1339 1341 seen.add(n[0])
1340 1342
1341 1343 if r:
1342 1344 reqcnt += 1
1343 1345 self.ui.debug(_("request %d: %s\n") %
1344 1346 (reqcnt, " ".join(map(short, r))))
1345 1347 for p in xrange(0, len(r), 10):
1346 1348 for b in remote.branches(r[p:p+10]):
1347 1349 self.ui.debug(_("received %s:%s\n") %
1348 1350 (short(b[0]), short(b[1])))
1349 1351 unknown.append(b)
1350 1352
1351 1353 # do binary search on the branches we found
1352 1354 while search:
1353 1355 newsearch = []
1354 1356 reqcnt += 1
1355 1357 for n, l in zip(search, remote.between(search)):
1356 1358 l.append(n[1])
1357 1359 p = n[0]
1358 1360 f = 1
1359 1361 for i in l:
1360 1362 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1361 1363 if i in m:
1362 1364 if f <= 2:
1363 1365 self.ui.debug(_("found new branch changeset %s\n") %
1364 1366 short(p))
1365 1367 fetch.add(p)
1366 1368 base[i] = 1
1367 1369 else:
1368 1370 self.ui.debug(_("narrowed branch search to %s:%s\n")
1369 1371 % (short(p), short(i)))
1370 1372 newsearch.append((p, i))
1371 1373 break
1372 1374 p, f = i, f * 2
1373 1375 search = newsearch
1374 1376
1375 1377 # sanity check our fetch list
1376 1378 for f in fetch:
1377 1379 if f in m:
1378 1380 raise error.RepoError(_("already have changeset ")
1379 1381 + short(f[:4]))
1380 1382
1381 1383 if base.keys() == [nullid]:
1382 1384 if force:
1383 1385 self.ui.warn(_("warning: repository is unrelated\n"))
1384 1386 else:
1385 1387 raise util.Abort(_("repository is unrelated"))
1386 1388
1387 1389 self.ui.debug(_("found new changesets starting at ") +
1388 1390 " ".join([short(f) for f in fetch]) + "\n")
1389 1391
1390 1392 self.ui.debug(_("%d total queries\n") % reqcnt)
1391 1393
1392 1394 return base.keys(), list(fetch), heads
1393 1395
1394 1396 def findoutgoing(self, remote, base=None, heads=None, force=False):
1395 1397 """Return list of nodes that are roots of subsets not in remote
1396 1398
1397 1399 If base dict is specified, assume that these nodes and their parents
1398 1400 exist on the remote side.
1399 1401 If a list of heads is specified, return only nodes which are heads
1400 1402 or ancestors of these heads, and return a second element which
1401 1403 contains all remote heads which get new children.
1402 1404 """
1403 1405 if base is None:
1404 1406 base = {}
1405 1407 self.findincoming(remote, base, heads, force=force)
1406 1408
1407 1409 self.ui.debug(_("common changesets up to ")
1408 1410 + " ".join(map(short, base.keys())) + "\n")
1409 1411
1410 1412 remain = set(self.changelog.nodemap)
1411 1413
1412 1414 # prune everything remote has from the tree
1413 1415 remain.remove(nullid)
1414 1416 remove = base.keys()
1415 1417 while remove:
1416 1418 n = remove.pop(0)
1417 1419 if n in remain:
1418 1420 remain.remove(n)
1419 1421 for p in self.changelog.parents(n):
1420 1422 remove.append(p)
1421 1423
1422 1424 # find every node whose parents have been pruned
1423 1425 subset = []
1424 1426 # find every remote head that will get new children
1425 1427 updated_heads = set()
1426 1428 for n in remain:
1427 1429 p1, p2 = self.changelog.parents(n)
1428 1430 if p1 not in remain and p2 not in remain:
1429 1431 subset.append(n)
1430 1432 if heads:
1431 1433 if p1 in heads:
1432 1434 updated_heads.add(p1)
1433 1435 if p2 in heads:
1434 1436 updated_heads.add(p2)
1435 1437
1436 1438 # this is the set of all roots we have to push
1437 1439 if heads:
1438 1440 return subset, list(updated_heads)
1439 1441 else:
1440 1442 return subset
1441 1443
1442 1444 def pull(self, remote, heads=None, force=False):
1443 1445 lock = self.lock()
1444 1446 try:
1445 1447 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1446 1448 force=force)
1447 1449 if fetch == [nullid]:
1448 1450 self.ui.status(_("requesting all changes\n"))
1449 1451
1450 1452 if not fetch:
1451 1453 self.ui.status(_("no changes found\n"))
1452 1454 return 0
1453 1455
1454 1456 if heads is None and remote.capable('changegroupsubset'):
1455 1457 heads = rheads
1456 1458
1457 1459 if heads is None:
1458 1460 cg = remote.changegroup(fetch, 'pull')
1459 1461 else:
1460 1462 if not remote.capable('changegroupsubset'):
1461 1463 raise util.Abort(_("Partial pull cannot be done because "
1462 1464 "other repository doesn't support "
1463 1465 "changegroupsubset."))
1464 1466 cg = remote.changegroupsubset(fetch, heads, 'pull')
1465 1467 return self.addchangegroup(cg, 'pull', remote.url())
1466 1468 finally:
1467 1469 lock.release()
1468 1470
1469 1471 def push(self, remote, force=False, revs=None):
1470 1472 # there are two ways to push to remote repo:
1471 1473 #
1472 1474 # addchangegroup assumes local user can lock remote
1473 1475 # repo (local filesystem, old ssh servers).
1474 1476 #
1475 1477 # unbundle assumes local user cannot lock remote repo (new ssh
1476 1478 # servers, http servers).
1477 1479
1478 1480 if remote.capable('unbundle'):
1479 1481 return self.push_unbundle(remote, force, revs)
1480 1482 return self.push_addchangegroup(remote, force, revs)
1481 1483
1482 1484 def prepush(self, remote, force, revs):
1483 1485 common = {}
1484 1486 remote_heads = remote.heads()
1485 1487 inc = self.findincoming(remote, common, remote_heads, force=force)
1486 1488
1487 1489 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1488 1490 if revs is not None:
1489 1491 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1490 1492 else:
1491 1493 bases, heads = update, self.changelog.heads()
1492 1494
1493 1495 def checkbranch(lheads, rheads, updatelh):
1494 1496 '''
1495 1497 check whether there are more local heads than remote heads on
1496 1498 a specific branch.
1497 1499
1498 1500 lheads: local branch heads
1499 1501 rheads: remote branch heads
1500 1502 updatelh: outgoing local branch heads
1501 1503 '''
1502 1504
1503 1505 warn = 0
1504 1506
1505 1507 if not revs and len(lheads) > len(rheads):
1506 1508 warn = 1
1507 1509 else:
1508 1510 updatelheads = [self.changelog.heads(x, lheads)
1509 1511 for x in updatelh]
1510 1512 newheads = set(sum(updatelheads, [])) & set(lheads)
1511 1513
1512 1514 if not newheads:
1513 1515 return True
1514 1516
1515 1517 for r in rheads:
1516 1518 if r in self.changelog.nodemap:
1517 1519 desc = self.changelog.heads(r, heads)
1518 1520 l = [h for h in heads if h in desc]
1519 1521 if not l:
1520 1522 newheads.add(r)
1521 1523 else:
1522 1524 newheads.add(r)
1523 1525 if len(newheads) > len(rheads):
1524 1526 warn = 1
1525 1527
1526 1528 if warn:
1527 1529 if not rheads: # new branch requires --force
1528 1530 self.ui.warn(_("abort: push creates new"
1529 1531 " remote branch '%s'!\n") %
1530 1532 self[updatelh[0]].branch())
1531 1533 else:
1532 1534 self.ui.warn(_("abort: push creates new remote heads!\n"))
1533 1535
1534 1536 self.ui.status(_("(did you forget to merge?"
1535 1537 " use push -f to force)\n"))
1536 1538 return False
1537 1539 return True
1538 1540
1539 1541 if not bases:
1540 1542 self.ui.status(_("no changes found\n"))
1541 1543 return None, 1
1542 1544 elif not force:
1543 1545 # Check for each named branch if we're creating new remote heads.
1544 1546 # To be a remote head after push, node must be either:
1545 1547 # - unknown locally
1546 1548 # - a local outgoing head descended from update
1547 1549 # - a remote head that's known locally and not
1548 1550 # ancestral to an outgoing head
1549 1551 #
1550 1552 # New named branches cannot be created without --force.
1551 1553
1552 1554 if remote_heads != [nullid]:
1553 1555 if remote.capable('branchmap'):
1554 1556 localhds = {}
1555 1557 if not revs:
1556 1558 localhds = self.branchmap()
1557 1559 else:
1558 1560 for n in heads:
1559 1561 branch = self[n].branch()
1560 1562 if branch in localhds:
1561 1563 localhds[branch].append(n)
1562 1564 else:
1563 1565 localhds[branch] = [n]
1564 1566
1565 1567 remotehds = remote.branchmap()
1566 1568
1567 1569 for lh in localhds:
1568 1570 if lh in remotehds:
1569 1571 rheads = remotehds[lh]
1570 1572 else:
1571 1573 rheads = []
1572 1574 lheads = localhds[lh]
1573 1575 updatelh = [upd for upd in update
1574 1576 if self[upd].branch() == lh]
1575 1577 if not updatelh:
1576 1578 continue
1577 1579 if not checkbranch(lheads, rheads, updatelh):
1578 1580 return None, 0
1579 1581 else:
1580 1582 if not checkbranch(heads, remote_heads, update):
1581 1583 return None, 0
1582 1584
1583 1585 if inc:
1584 1586 self.ui.warn(_("note: unsynced remote changes!\n"))
1585 1587
1586 1588
1587 1589 if revs is None:
1588 1590 # use the fast path, no race possible on push
1589 1591 cg = self._changegroup(common.keys(), 'push')
1590 1592 else:
1591 1593 cg = self.changegroupsubset(update, revs, 'push')
1592 1594 return cg, remote_heads
1593 1595
1594 1596 def push_addchangegroup(self, remote, force, revs):
1595 1597 lock = remote.lock()
1596 1598 try:
1597 1599 ret = self.prepush(remote, force, revs)
1598 1600 if ret[0] is not None:
1599 1601 cg, remote_heads = ret
1600 1602 return remote.addchangegroup(cg, 'push', self.url())
1601 1603 return ret[1]
1602 1604 finally:
1603 1605 lock.release()
1604 1606
1605 1607 def push_unbundle(self, remote, force, revs):
1606 1608 # local repo finds heads on server, finds out what revs it
1607 1609 # must push. once revs transferred, if server finds it has
1608 1610 # different heads (someone else won commit/push race), server
1609 1611 # aborts.
1610 1612
1611 1613 ret = self.prepush(remote, force, revs)
1612 1614 if ret[0] is not None:
1613 1615 cg, remote_heads = ret
1614 1616 if force: remote_heads = ['force']
1615 1617 return remote.unbundle(cg, remote_heads, 'push')
1616 1618 return ret[1]
1617 1619
1618 1620 def changegroupinfo(self, nodes, source):
1619 1621 if self.ui.verbose or source == 'bundle':
1620 1622 self.ui.status(_("%d changesets found\n") % len(nodes))
1621 1623 if self.ui.debugflag:
1622 1624 self.ui.debug(_("list of changesets:\n"))
1623 1625 for node in nodes:
1624 1626 self.ui.debug("%s\n" % hex(node))
1625 1627
1626 1628 def changegroupsubset(self, bases, heads, source, extranodes=None):
1627 1629 """This function generates a changegroup consisting of all the nodes
1628 1630 that are descendents of any of the bases, and ancestors of any of
1629 1631 the heads.
1630 1632
1631 1633 It is fairly complex as determining which filenodes and which
1632 1634 manifest nodes need to be included for the changeset to be complete
1633 1635 is non-trivial.
1634 1636
1635 1637 Another wrinkle is doing the reverse, figuring out which changeset in
1636 1638 the changegroup a particular filenode or manifestnode belongs to.
1637 1639
1638 1640 The caller can specify some nodes that must be included in the
1639 1641 changegroup using the extranodes argument. It should be a dict
1640 1642 where the keys are the filenames (or 1 for the manifest), and the
1641 1643 values are lists of (node, linknode) tuples, where node is a wanted
1642 1644 node and linknode is the changelog node that should be transmitted as
1643 1645 the linkrev.
1644 1646 """
1645 1647
1646 1648 if extranodes is None:
1647 1649 # can we go through the fast path ?
1648 1650 heads.sort()
1649 1651 allheads = self.heads()
1650 1652 allheads.sort()
1651 1653 if heads == allheads:
1652 1654 common = []
1653 1655 # parents of bases are known from both sides
1654 1656 for n in bases:
1655 1657 for p in self.changelog.parents(n):
1656 1658 if p != nullid:
1657 1659 common.append(p)
1658 1660 return self._changegroup(common, source)
1659 1661
1660 1662 self.hook('preoutgoing', throw=True, source=source)
1661 1663
1662 1664 # Set up some initial variables
1663 1665 # Make it easy to refer to self.changelog
1664 1666 cl = self.changelog
1665 1667 # msng is short for missing - compute the list of changesets in this
1666 1668 # changegroup.
1667 1669 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1668 1670 self.changegroupinfo(msng_cl_lst, source)
1669 1671 # Some bases may turn out to be superfluous, and some heads may be
1670 1672 # too. nodesbetween will return the minimal set of bases and heads
1671 1673 # necessary to re-create the changegroup.
1672 1674
1673 1675 # Known heads are the list of heads that it is assumed the recipient
1674 1676 # of this changegroup will know about.
1675 1677 knownheads = set()
1676 1678 # We assume that all parents of bases are known heads.
1677 1679 for n in bases:
1678 1680 knownheads.update(cl.parents(n))
1679 1681 knownheads.discard(nullid)
1680 1682 knownheads = list(knownheads)
1681 1683 if knownheads:
1682 1684 # Now that we know what heads are known, we can compute which
1683 1685 # changesets are known. The recipient must know about all
1684 1686 # changesets required to reach the known heads from the null
1685 1687 # changeset.
1686 1688 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1687 1689 junk = None
1688 1690 # Transform the list into a set.
1689 1691 has_cl_set = set(has_cl_set)
1690 1692 else:
1691 1693 # If there were no known heads, the recipient cannot be assumed to
1692 1694 # know about any changesets.
1693 1695 has_cl_set = set()
1694 1696
1695 1697 # Make it easy to refer to self.manifest
1696 1698 mnfst = self.manifest
1697 1699 # We don't know which manifests are missing yet
1698 1700 msng_mnfst_set = {}
1699 1701 # Nor do we know which filenodes are missing.
1700 1702 msng_filenode_set = {}
1701 1703
1702 1704 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1703 1705 junk = None
1704 1706
1705 1707 # A changeset always belongs to itself, so the changenode lookup
1706 1708 # function for a changenode is identity.
1707 1709 def identity(x):
1708 1710 return x
1709 1711
1710 1712 # If we determine that a particular file or manifest node must be a
1711 1713 # node that the recipient of the changegroup will already have, we can
1712 1714 # also assume the recipient will have all the parents. This function
1713 1715 # prunes them from the set of missing nodes.
1714 1716 def prune_parents(revlog, hasset, msngset):
1715 1717 haslst = list(hasset)
1716 1718 haslst.sort(key=revlog.rev)
1717 1719 for node in haslst:
1718 1720 parentlst = [p for p in revlog.parents(node) if p != nullid]
1719 1721 while parentlst:
1720 1722 n = parentlst.pop()
1721 1723 if n not in hasset:
1722 1724 hasset.add(n)
1723 1725 p = [p for p in revlog.parents(n) if p != nullid]
1724 1726 parentlst.extend(p)
1725 1727 for n in hasset:
1726 1728 msngset.pop(n, None)
1727 1729
1728 1730 # This is a function generating function used to set up an environment
1729 1731 # for the inner function to execute in.
1730 1732 def manifest_and_file_collector(changedfileset):
1731 1733 # This is an information gathering function that gathers
1732 1734 # information from each changeset node that goes out as part of
1733 1735 # the changegroup. The information gathered is a list of which
1734 1736 # manifest nodes are potentially required (the recipient may
1735 1737 # already have them) and total list of all files which were
1736 1738 # changed in any changeset in the changegroup.
1737 1739 #
1738 1740 # We also remember the first changenode we saw any manifest
1739 1741 # referenced by so we can later determine which changenode 'owns'
1740 1742 # the manifest.
1741 1743 def collect_manifests_and_files(clnode):
1742 1744 c = cl.read(clnode)
1743 1745 for f in c[3]:
1744 1746 # This is to make sure we only have one instance of each
1745 1747 # filename string for each filename.
1746 1748 changedfileset.setdefault(f, f)
1747 1749 msng_mnfst_set.setdefault(c[0], clnode)
1748 1750 return collect_manifests_and_files
1749 1751
1750 1752 # Figure out which manifest nodes (of the ones we think might be part
1751 1753 # of the changegroup) the recipient must know about and remove them
1752 1754 # from the changegroup.
1753 1755 def prune_manifests():
1754 1756 has_mnfst_set = set()
1755 1757 for n in msng_mnfst_set:
1756 1758 # If a 'missing' manifest thinks it belongs to a changenode
1757 1759 # the recipient is assumed to have, obviously the recipient
1758 1760 # must have that manifest.
1759 1761 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1760 1762 if linknode in has_cl_set:
1761 1763 has_mnfst_set.add(n)
1762 1764 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1763 1765
1764 1766 # Use the information collected in collect_manifests_and_files to say
1765 1767 # which changenode any manifestnode belongs to.
1766 1768 def lookup_manifest_link(mnfstnode):
1767 1769 return msng_mnfst_set[mnfstnode]
1768 1770
1769 1771 # A function generating function that sets up the initial environment
1770 1772 # the inner function.
1771 1773 def filenode_collector(changedfiles):
1772 1774 next_rev = [0]
1773 1775 # This gathers information from each manifestnode included in the
1774 1776 # changegroup about which filenodes the manifest node references
1775 1777 # so we can include those in the changegroup too.
1776 1778 #
1777 1779 # It also remembers which changenode each filenode belongs to. It
1778 1780 # does this by assuming the a filenode belongs to the changenode
1779 1781 # the first manifest that references it belongs to.
1780 1782 def collect_msng_filenodes(mnfstnode):
1781 1783 r = mnfst.rev(mnfstnode)
1782 1784 if r == next_rev[0]:
1783 1785 # If the last rev we looked at was the one just previous,
1784 1786 # we only need to see a diff.
1785 1787 deltamf = mnfst.readdelta(mnfstnode)
1786 1788 # For each line in the delta
1787 1789 for f, fnode in deltamf.iteritems():
1788 1790 f = changedfiles.get(f, None)
1789 1791 # And if the file is in the list of files we care
1790 1792 # about.
1791 1793 if f is not None:
1792 1794 # Get the changenode this manifest belongs to
1793 1795 clnode = msng_mnfst_set[mnfstnode]
1794 1796 # Create the set of filenodes for the file if
1795 1797 # there isn't one already.
1796 1798 ndset = msng_filenode_set.setdefault(f, {})
1797 1799 # And set the filenode's changelog node to the
1798 1800 # manifest's if it hasn't been set already.
1799 1801 ndset.setdefault(fnode, clnode)
1800 1802 else:
1801 1803 # Otherwise we need a full manifest.
1802 1804 m = mnfst.read(mnfstnode)
1803 1805 # For every file in we care about.
1804 1806 for f in changedfiles:
1805 1807 fnode = m.get(f, None)
1806 1808 # If it's in the manifest
1807 1809 if fnode is not None:
1808 1810 # See comments above.
1809 1811 clnode = msng_mnfst_set[mnfstnode]
1810 1812 ndset = msng_filenode_set.setdefault(f, {})
1811 1813 ndset.setdefault(fnode, clnode)
1812 1814 # Remember the revision we hope to see next.
1813 1815 next_rev[0] = r + 1
1814 1816 return collect_msng_filenodes
1815 1817
1816 1818 # We have a list of filenodes we think we need for a file, lets remove
1817 1819 # all those we know the recipient must have.
1818 1820 def prune_filenodes(f, filerevlog):
1819 1821 msngset = msng_filenode_set[f]
1820 1822 hasset = set()
1821 1823 # If a 'missing' filenode thinks it belongs to a changenode we
1822 1824 # assume the recipient must have, then the recipient must have
1823 1825 # that filenode.
1824 1826 for n in msngset:
1825 1827 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1826 1828 if clnode in has_cl_set:
1827 1829 hasset.add(n)
1828 1830 prune_parents(filerevlog, hasset, msngset)
1829 1831
1830 1832 # A function generator function that sets up the a context for the
1831 1833 # inner function.
1832 1834 def lookup_filenode_link_func(fname):
1833 1835 msngset = msng_filenode_set[fname]
1834 1836 # Lookup the changenode the filenode belongs to.
1835 1837 def lookup_filenode_link(fnode):
1836 1838 return msngset[fnode]
1837 1839 return lookup_filenode_link
1838 1840
1839 1841 # Add the nodes that were explicitly requested.
1840 1842 def add_extra_nodes(name, nodes):
1841 1843 if not extranodes or name not in extranodes:
1842 1844 return
1843 1845
1844 1846 for node, linknode in extranodes[name]:
1845 1847 if node not in nodes:
1846 1848 nodes[node] = linknode
1847 1849
1848 1850 # Now that we have all theses utility functions to help out and
1849 1851 # logically divide up the task, generate the group.
1850 1852 def gengroup():
1851 1853 # The set of changed files starts empty.
1852 1854 changedfiles = {}
1853 1855 # Create a changenode group generator that will call our functions
1854 1856 # back to lookup the owning changenode and collect information.
1855 1857 group = cl.group(msng_cl_lst, identity,
1856 1858 manifest_and_file_collector(changedfiles))
1857 1859 for chnk in group:
1858 1860 yield chnk
1859 1861
1860 1862 # The list of manifests has been collected by the generator
1861 1863 # calling our functions back.
1862 1864 prune_manifests()
1863 1865 add_extra_nodes(1, msng_mnfst_set)
1864 1866 msng_mnfst_lst = msng_mnfst_set.keys()
1865 1867 # Sort the manifestnodes by revision number.
1866 1868 msng_mnfst_lst.sort(key=mnfst.rev)
1867 1869 # Create a generator for the manifestnodes that calls our lookup
1868 1870 # and data collection functions back.
1869 1871 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1870 1872 filenode_collector(changedfiles))
1871 1873 for chnk in group:
1872 1874 yield chnk
1873 1875
1874 1876 # These are no longer needed, dereference and toss the memory for
1875 1877 # them.
1876 1878 msng_mnfst_lst = None
1877 1879 msng_mnfst_set.clear()
1878 1880
1879 1881 if extranodes:
1880 1882 for fname in extranodes:
1881 1883 if isinstance(fname, int):
1882 1884 continue
1883 1885 msng_filenode_set.setdefault(fname, {})
1884 1886 changedfiles[fname] = 1
1885 1887 # Go through all our files in order sorted by name.
1886 1888 for fname in sorted(changedfiles):
1887 1889 filerevlog = self.file(fname)
1888 1890 if not len(filerevlog):
1889 1891 raise util.Abort(_("empty or missing revlog for %s") % fname)
1890 1892 # Toss out the filenodes that the recipient isn't really
1891 1893 # missing.
1892 1894 if fname in msng_filenode_set:
1893 1895 prune_filenodes(fname, filerevlog)
1894 1896 add_extra_nodes(fname, msng_filenode_set[fname])
1895 1897 msng_filenode_lst = msng_filenode_set[fname].keys()
1896 1898 else:
1897 1899 msng_filenode_lst = []
1898 1900 # If any filenodes are left, generate the group for them,
1899 1901 # otherwise don't bother.
1900 1902 if len(msng_filenode_lst) > 0:
1901 1903 yield changegroup.chunkheader(len(fname))
1902 1904 yield fname
1903 1905 # Sort the filenodes by their revision #
1904 1906 msng_filenode_lst.sort(key=filerevlog.rev)
1905 1907 # Create a group generator and only pass in a changenode
1906 1908 # lookup function as we need to collect no information
1907 1909 # from filenodes.
1908 1910 group = filerevlog.group(msng_filenode_lst,
1909 1911 lookup_filenode_link_func(fname))
1910 1912 for chnk in group:
1911 1913 yield chnk
1912 1914 if fname in msng_filenode_set:
1913 1915 # Don't need this anymore, toss it to free memory.
1914 1916 del msng_filenode_set[fname]
1915 1917 # Signal that no more groups are left.
1916 1918 yield changegroup.closechunk()
1917 1919
1918 1920 if msng_cl_lst:
1919 1921 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1920 1922
1921 1923 return util.chunkbuffer(gengroup())
1922 1924
1923 1925 def changegroup(self, basenodes, source):
1924 1926 # to avoid a race we use changegroupsubset() (issue1320)
1925 1927 return self.changegroupsubset(basenodes, self.heads(), source)
1926 1928
1927 1929 def _changegroup(self, common, source):
1928 1930 """Generate a changegroup of all nodes that we have that a recipient
1929 1931 doesn't.
1930 1932
1931 1933 This is much easier than the previous function as we can assume that
1932 1934 the recipient has any changenode we aren't sending them.
1933 1935
1934 1936 common is the set of common nodes between remote and self"""
1935 1937
1936 1938 self.hook('preoutgoing', throw=True, source=source)
1937 1939
1938 1940 cl = self.changelog
1939 1941 nodes = cl.findmissing(common)
1940 1942 revset = set([cl.rev(n) for n in nodes])
1941 1943 self.changegroupinfo(nodes, source)
1942 1944
1943 1945 def identity(x):
1944 1946 return x
1945 1947
1946 1948 def gennodelst(log):
1947 1949 for r in log:
1948 1950 if log.linkrev(r) in revset:
1949 1951 yield log.node(r)
1950 1952
1951 1953 def changed_file_collector(changedfileset):
1952 1954 def collect_changed_files(clnode):
1953 1955 c = cl.read(clnode)
1954 1956 changedfileset.update(c[3])
1955 1957 return collect_changed_files
1956 1958
1957 1959 def lookuprevlink_func(revlog):
1958 1960 def lookuprevlink(n):
1959 1961 return cl.node(revlog.linkrev(revlog.rev(n)))
1960 1962 return lookuprevlink
1961 1963
1962 1964 def gengroup():
1963 1965 # construct a list of all changed files
1964 1966 changedfiles = set()
1965 1967
1966 1968 for chnk in cl.group(nodes, identity,
1967 1969 changed_file_collector(changedfiles)):
1968 1970 yield chnk
1969 1971
1970 1972 mnfst = self.manifest
1971 1973 nodeiter = gennodelst(mnfst)
1972 1974 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1973 1975 yield chnk
1974 1976
1975 1977 for fname in sorted(changedfiles):
1976 1978 filerevlog = self.file(fname)
1977 1979 if not len(filerevlog):
1978 1980 raise util.Abort(_("empty or missing revlog for %s") % fname)
1979 1981 nodeiter = gennodelst(filerevlog)
1980 1982 nodeiter = list(nodeiter)
1981 1983 if nodeiter:
1982 1984 yield changegroup.chunkheader(len(fname))
1983 1985 yield fname
1984 1986 lookup = lookuprevlink_func(filerevlog)
1985 1987 for chnk in filerevlog.group(nodeiter, lookup):
1986 1988 yield chnk
1987 1989
1988 1990 yield changegroup.closechunk()
1989 1991
1990 1992 if nodes:
1991 1993 self.hook('outgoing', node=hex(nodes[0]), source=source)
1992 1994
1993 1995 return util.chunkbuffer(gengroup())
1994 1996
1995 1997 def addchangegroup(self, source, srctype, url, emptyok=False):
1996 1998 """add changegroup to repo.
1997 1999
1998 2000 return values:
1999 2001 - nothing changed or no source: 0
2000 2002 - more heads than before: 1+added heads (2..n)
2001 2003 - less heads than before: -1-removed heads (-2..-n)
2002 2004 - number of heads stays the same: 1
2003 2005 """
2004 2006 def csmap(x):
2005 2007 self.ui.debug(_("add changeset %s\n") % short(x))
2006 2008 return len(cl)
2007 2009
2008 2010 def revmap(x):
2009 2011 return cl.rev(x)
2010 2012
2011 2013 if not source:
2012 2014 return 0
2013 2015
2014 2016 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2015 2017
2016 2018 changesets = files = revisions = 0
2017 2019
2018 2020 # write changelog data to temp files so concurrent readers will not see
2019 2021 # inconsistent view
2020 2022 cl = self.changelog
2021 2023 cl.delayupdate()
2022 2024 oldheads = len(cl.heads())
2023 2025
2024 2026 tr = self.transaction()
2025 2027 try:
2026 2028 trp = weakref.proxy(tr)
2027 2029 # pull off the changeset group
2028 2030 self.ui.status(_("adding changesets\n"))
2029 2031 clstart = len(cl)
2030 2032 chunkiter = changegroup.chunkiter(source)
2031 2033 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2032 2034 raise util.Abort(_("received changelog group is empty"))
2033 2035 clend = len(cl)
2034 2036 changesets = clend - clstart
2035 2037
2036 2038 # pull off the manifest group
2037 2039 self.ui.status(_("adding manifests\n"))
2038 2040 chunkiter = changegroup.chunkiter(source)
2039 2041 # no need to check for empty manifest group here:
2040 2042 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2041 2043 # no new manifest will be created and the manifest group will
2042 2044 # be empty during the pull
2043 2045 self.manifest.addgroup(chunkiter, revmap, trp)
2044 2046
2045 2047 # process the files
2046 2048 self.ui.status(_("adding file changes\n"))
2047 2049 while 1:
2048 2050 f = changegroup.getchunk(source)
2049 2051 if not f:
2050 2052 break
2051 2053 self.ui.debug(_("adding %s revisions\n") % f)
2052 2054 fl = self.file(f)
2053 2055 o = len(fl)
2054 2056 chunkiter = changegroup.chunkiter(source)
2055 2057 if fl.addgroup(chunkiter, revmap, trp) is None:
2056 2058 raise util.Abort(_("received file revlog group is empty"))
2057 2059 revisions += len(fl) - o
2058 2060 files += 1
2059 2061
2060 2062 newheads = len(cl.heads())
2061 2063 heads = ""
2062 2064 if oldheads and newheads != oldheads:
2063 2065 heads = _(" (%+d heads)") % (newheads - oldheads)
2064 2066
2065 2067 self.ui.status(_("added %d changesets"
2066 2068 " with %d changes to %d files%s\n")
2067 2069 % (changesets, revisions, files, heads))
2068 2070
2069 2071 if changesets > 0:
2070 2072 p = lambda: cl.writepending() and self.root or ""
2071 2073 self.hook('pretxnchangegroup', throw=True,
2072 2074 node=hex(cl.node(clstart)), source=srctype,
2073 2075 url=url, pending=p)
2074 2076
2075 2077 # make changelog see real files again
2076 2078 cl.finalize(trp)
2077 2079
2078 2080 tr.close()
2079 2081 finally:
2080 2082 del tr
2081 2083
2082 2084 if changesets > 0:
2083 2085 # forcefully update the on-disk branch cache
2084 2086 self.ui.debug(_("updating the branch cache\n"))
2085 2087 self.branchtags()
2086 2088 self.hook("changegroup", node=hex(cl.node(clstart)),
2087 2089 source=srctype, url=url)
2088 2090
2089 2091 for i in xrange(clstart, clend):
2090 2092 self.hook("incoming", node=hex(cl.node(i)),
2091 2093 source=srctype, url=url)
2092 2094
2093 2095 # never return 0 here:
2094 2096 if newheads < oldheads:
2095 2097 return newheads - oldheads - 1
2096 2098 else:
2097 2099 return newheads - oldheads + 1
2098 2100
2099 2101
2100 2102 def stream_in(self, remote):
2101 2103 fp = remote.stream_out()
2102 2104 l = fp.readline()
2103 2105 try:
2104 2106 resp = int(l)
2105 2107 except ValueError:
2106 2108 raise error.ResponseError(
2107 2109 _('Unexpected response from remote server:'), l)
2108 2110 if resp == 1:
2109 2111 raise util.Abort(_('operation forbidden by server'))
2110 2112 elif resp == 2:
2111 2113 raise util.Abort(_('locking the remote repository failed'))
2112 2114 elif resp != 0:
2113 2115 raise util.Abort(_('the server sent an unknown error code'))
2114 2116 self.ui.status(_('streaming all changes\n'))
2115 2117 l = fp.readline()
2116 2118 try:
2117 2119 total_files, total_bytes = map(int, l.split(' ', 1))
2118 2120 except (ValueError, TypeError):
2119 2121 raise error.ResponseError(
2120 2122 _('Unexpected response from remote server:'), l)
2121 2123 self.ui.status(_('%d files to transfer, %s of data\n') %
2122 2124 (total_files, util.bytecount(total_bytes)))
2123 2125 start = time.time()
2124 2126 for i in xrange(total_files):
2125 2127 # XXX doesn't support '\n' or '\r' in filenames
2126 2128 l = fp.readline()
2127 2129 try:
2128 2130 name, size = l.split('\0', 1)
2129 2131 size = int(size)
2130 2132 except (ValueError, TypeError):
2131 2133 raise error.ResponseError(
2132 2134 _('Unexpected response from remote server:'), l)
2133 2135 self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
2134 2136 # for backwards compat, name was partially encoded
2135 2137 ofp = self.sopener(store.decodedir(name), 'w')
2136 2138 for chunk in util.filechunkiter(fp, limit=size):
2137 2139 ofp.write(chunk)
2138 2140 ofp.close()
2139 2141 elapsed = time.time() - start
2140 2142 if elapsed <= 0:
2141 2143 elapsed = 0.001
2142 2144 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2143 2145 (util.bytecount(total_bytes), elapsed,
2144 2146 util.bytecount(total_bytes / elapsed)))
2145 2147 self.invalidate()
2146 2148 return len(self.heads()) + 1
2147 2149
2148 2150 def clone(self, remote, heads=[], stream=False):
2149 2151 '''clone remote repository.
2150 2152
2151 2153 keyword arguments:
2152 2154 heads: list of revs to clone (forces use of pull)
2153 2155 stream: use streaming clone if possible'''
2154 2156
2155 2157 # now, all clients that can request uncompressed clones can
2156 2158 # read repo formats supported by all servers that can serve
2157 2159 # them.
2158 2160
2159 2161 # if revlog format changes, client will have to check version
2160 2162 # and format flags on "stream" capability, and use
2161 2163 # uncompressed only if compatible.
2162 2164
2163 2165 if stream and not heads and remote.capable('stream'):
2164 2166 return self.stream_in(remote)
2165 2167 return self.pull(remote, heads)
2166 2168
2167 2169 # used to avoid circular references so destructors work
2168 2170 def aftertrans(files):
2169 2171 renamefiles = [tuple(t) for t in files]
2170 2172 def a():
2171 2173 for src, dest in renamefiles:
2172 2174 util.rename(src, dest)
2173 2175 return a
2174 2176
2175 2177 def instance(ui, path, create):
2176 2178 return localrepository(ui, util.drop_scheme('file', path), create)
2177 2179
2178 2180 def islocal(path):
2179 2181 return True
@@ -1,161 +1,161 b''
1 1 # transaction.py - simple journalling scheme for mercurial
2 2 #
3 3 # This transaction scheme is intended to gracefully handle program
4 4 # errors and interruptions. More serious failures like system crashes
5 5 # can be recovered with an fsck-like tool. As the whole repository is
6 6 # effectively log-structured, this should amount to simply truncating
7 7 # anything that isn't referenced in the changelog.
8 8 #
9 9 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
10 10 #
11 11 # This software may be used and distributed according to the terms of the
12 12 # GNU General Public License version 2, incorporated herein by reference.
13 13
14 14 from i18n import _
15 15 import os, errno
16 16 import error
17 17
18 18 def active(func):
19 19 def _active(self, *args, **kwds):
20 20 if self.count == 0:
21 21 raise error.Abort(_(
22 22 'cannot use transaction when it is already committed/aborted'))
23 23 return func(self, *args, **kwds)
24 24 return _active
25 25
26 26 def _playback(journal, report, opener, entries, unlink=True):
27 27 for f, o, ignore in entries:
28 28 if o or not unlink:
29 29 try:
30 30 opener(f, 'a').truncate(o)
31 31 except:
32 32 report(_("failed to truncate %s\n") % f)
33 33 raise
34 34 else:
35 35 try:
36 36 fn = opener(f).name
37 37 os.unlink(fn)
38 except OSError, inst:
38 except IOError, inst:
39 39 if inst.errno != errno.ENOENT:
40 40 raise
41 41 os.unlink(journal)
42 42
43 43 class transaction(object):
44 44 def __init__(self, report, opener, journal, after=None, createmode=None):
45 45 self.journal = None
46 46
47 47 self.count = 1
48 48 self.report = report
49 49 self.opener = opener
50 50 self.after = after
51 51 self.entries = []
52 52 self.map = {}
53 53 self.journal = journal
54 54 self._queue = []
55 55
56 56 self.file = open(self.journal, "w")
57 57 if createmode is not None:
58 58 os.chmod(self.journal, createmode & 0666)
59 59
60 60 def __del__(self):
61 61 if self.journal:
62 62 if self.entries: self._abort()
63 63 self.file.close()
64 64
65 65 @active
66 66 def startgroup(self):
67 67 self._queue.append([])
68 68
69 69 @active
70 70 def endgroup(self):
71 71 q = self._queue.pop()
72 72 d = ''.join(['%s\0%d\n' % (x[0], x[1]) for x in q])
73 73 self.entries.extend(q)
74 74 self.file.write(d)
75 75 self.file.flush()
76 76
77 77 @active
78 78 def add(self, file, offset, data=None):
79 79 if file in self.map: return
80 80
81 81 if self._queue:
82 82 self._queue[-1].append((file, offset, data))
83 83 return
84 84
85 85 self.entries.append((file, offset, data))
86 86 self.map[file] = len(self.entries) - 1
87 87 # add enough data to the journal to do the truncate
88 88 self.file.write("%s\0%d\n" % (file, offset))
89 89 self.file.flush()
90 90
91 91 @active
92 92 def find(self, file):
93 93 if file in self.map:
94 94 return self.entries[self.map[file]]
95 95 return None
96 96
97 97 @active
98 98 def replace(self, file, offset, data=None):
99 99 '''
100 100 replace can only replace already committed entries
101 101 that are not pending in the queue
102 102 '''
103 103
104 104 if file not in self.map:
105 105 raise KeyError(file)
106 106 index = self.map[file]
107 107 self.entries[index] = (file, offset, data)
108 108 self.file.write("%s\0%d\n" % (file, offset))
109 109 self.file.flush()
110 110
111 111 @active
112 112 def nest(self):
113 113 self.count += 1
114 114 return self
115 115
116 116 def running(self):
117 117 return self.count > 0
118 118
119 119 @active
120 120 def close(self):
121 121 self.count -= 1
122 122 if self.count != 0:
123 123 return
124 124 self.file.close()
125 125 self.entries = []
126 126 if self.after:
127 127 self.after()
128 128 else:
129 129 os.unlink(self.journal)
130 130 self.journal = None
131 131
132 132 @active
133 133 def abort(self):
134 134 self._abort()
135 135
136 136 def _abort(self):
137 137 self.count = 0
138 138 self.file.close()
139 139
140 140 if not self.entries: return
141 141
142 142 self.report(_("transaction abort!\n"))
143 143
144 144 try:
145 145 try:
146 146 _playback(self.journal, self.report, self.opener, self.entries, False)
147 147 self.report(_("rollback completed\n"))
148 148 except:
149 149 self.report(_("rollback failed - please run hg recover\n"))
150 150 finally:
151 151 self.journal = None
152 152
153 153
154 154 def rollback(opener, file, report):
155 155 entries = []
156 156
157 157 for l in open(file).readlines():
158 158 f, o = l.split('\0')
159 159 entries.append((f, int(o), None))
160 160
161 161 _playback(file, report, opener, entries)
@@ -1,1257 +1,1284 b''
1 1 # util.py - Mercurial utility functions and platform specfic implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2, incorporated herein by reference.
9 9
10 10 """Mercurial utility functions and platform specfic implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 from i18n import _
17 17 import error, osutil
18 18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 19 import os, stat, time, calendar, random, textwrap
20 20 import imp
21 21
22 22 # Python compatibility
23 23
24 24 def sha1(s):
25 25 return _fastsha1(s)
26 26
27 27 def _fastsha1(s):
28 28 # This function will import sha1 from hashlib or sha (whichever is
29 29 # available) and overwrite itself with it on the first call.
30 30 # Subsequent calls will go directly to the imported function.
31 31 try:
32 32 from hashlib import sha1 as _sha1
33 33 except ImportError:
34 34 from sha import sha as _sha1
35 35 global _fastsha1, sha1
36 36 _fastsha1 = sha1 = _sha1
37 37 return _sha1(s)
38 38
39 39 import subprocess
40 40 closefds = os.name == 'posix'
41 41 def popen2(cmd):
42 42 # Setting bufsize to -1 lets the system decide the buffer size.
43 43 # The default for bufsize is 0, meaning unbuffered. This leads to
44 44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
45 45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
46 46 close_fds=closefds,
47 47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
48 48 return p.stdin, p.stdout
49 49 def popen3(cmd):
50 50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
51 51 close_fds=closefds,
52 52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
53 53 stderr=subprocess.PIPE)
54 54 return p.stdin, p.stdout, p.stderr
55 55
56 56 def version():
57 57 """Return version information if available."""
58 58 try:
59 59 import __version__
60 60 return __version__.version
61 61 except ImportError:
62 62 return 'unknown'
63 63
64 64 # used by parsedate
65 65 defaultdateformats = (
66 66 '%Y-%m-%d %H:%M:%S',
67 67 '%Y-%m-%d %I:%M:%S%p',
68 68 '%Y-%m-%d %H:%M',
69 69 '%Y-%m-%d %I:%M%p',
70 70 '%Y-%m-%d',
71 71 '%m-%d',
72 72 '%m/%d',
73 73 '%m/%d/%y',
74 74 '%m/%d/%Y',
75 75 '%a %b %d %H:%M:%S %Y',
76 76 '%a %b %d %I:%M:%S%p %Y',
77 77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
78 78 '%b %d %H:%M:%S %Y',
79 79 '%b %d %I:%M:%S%p %Y',
80 80 '%b %d %H:%M:%S',
81 81 '%b %d %I:%M:%S%p',
82 82 '%b %d %H:%M',
83 83 '%b %d %I:%M%p',
84 84 '%b %d %Y',
85 85 '%b %d',
86 86 '%H:%M:%S',
87 87 '%I:%M:%SP',
88 88 '%H:%M',
89 89 '%I:%M%p',
90 90 )
91 91
92 92 extendeddateformats = defaultdateformats + (
93 93 "%Y",
94 94 "%Y-%m",
95 95 "%b",
96 96 "%b %Y",
97 97 )
98 98
99 99 def cachefunc(func):
100 100 '''cache the result of function calls'''
101 101 # XXX doesn't handle keywords args
102 102 cache = {}
103 103 if func.func_code.co_argcount == 1:
104 104 # we gain a small amount of time because
105 105 # we don't need to pack/unpack the list
106 106 def f(arg):
107 107 if arg not in cache:
108 108 cache[arg] = func(arg)
109 109 return cache[arg]
110 110 else:
111 111 def f(*args):
112 112 if args not in cache:
113 113 cache[args] = func(*args)
114 114 return cache[args]
115 115
116 116 return f
117 117
118 def lrucachefunc(func):
119 '''cache most recent results of function calls'''
120 cache = {}
121 order = []
122 if func.func_code.co_argcount == 1:
123 def f(arg):
124 if arg not in cache:
125 if len(cache) > 20:
126 del cache[order.pop(0)]
127 cache[arg] = func(arg)
128 else:
129 order.remove(arg)
130 order.append(arg)
131 return cache[arg]
132 else:
133 def f(*args):
134 if args not in cache:
135 if len(cache) > 20:
136 del cache[order.pop(0)]
137 cache[args] = func(*args)
138 else:
139 order.remove(args)
140 order.append(args)
141 return cache[args]
142
143 return f
144
118 145 class propertycache(object):
119 146 def __init__(self, func):
120 147 self.func = func
121 148 self.name = func.__name__
122 149 def __get__(self, obj, type=None):
123 150 result = self.func(obj)
124 151 setattr(obj, self.name, result)
125 152 return result
126 153
127 154 def pipefilter(s, cmd):
128 155 '''filter string S through command CMD, returning its output'''
129 156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
130 157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
131 158 pout, perr = p.communicate(s)
132 159 return pout
133 160
134 161 def tempfilter(s, cmd):
135 162 '''filter string S through a pair of temporary files with CMD.
136 163 CMD is used as a template to create the real command to be run,
137 164 with the strings INFILE and OUTFILE replaced by the real names of
138 165 the temporary files generated.'''
139 166 inname, outname = None, None
140 167 try:
141 168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
142 169 fp = os.fdopen(infd, 'wb')
143 170 fp.write(s)
144 171 fp.close()
145 172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
146 173 os.close(outfd)
147 174 cmd = cmd.replace('INFILE', inname)
148 175 cmd = cmd.replace('OUTFILE', outname)
149 176 code = os.system(cmd)
150 177 if sys.platform == 'OpenVMS' and code & 1:
151 178 code = 0
152 179 if code: raise Abort(_("command '%s' failed: %s") %
153 180 (cmd, explain_exit(code)))
154 181 return open(outname, 'rb').read()
155 182 finally:
156 183 try:
157 184 if inname: os.unlink(inname)
158 185 except: pass
159 186 try:
160 187 if outname: os.unlink(outname)
161 188 except: pass
162 189
163 190 filtertable = {
164 191 'tempfile:': tempfilter,
165 192 'pipe:': pipefilter,
166 193 }
167 194
168 195 def filter(s, cmd):
169 196 "filter a string through a command that transforms its input to its output"
170 197 for name, fn in filtertable.iteritems():
171 198 if cmd.startswith(name):
172 199 return fn(s, cmd[len(name):].lstrip())
173 200 return pipefilter(s, cmd)
174 201
175 202 def binary(s):
176 203 """return true if a string is binary data"""
177 204 return bool(s and '\0' in s)
178 205
179 206 def increasingchunks(source, min=1024, max=65536):
180 207 '''return no less than min bytes per chunk while data remains,
181 208 doubling min after each chunk until it reaches max'''
182 209 def log2(x):
183 210 if not x:
184 211 return 0
185 212 i = 0
186 213 while x:
187 214 x >>= 1
188 215 i += 1
189 216 return i - 1
190 217
191 218 buf = []
192 219 blen = 0
193 220 for chunk in source:
194 221 buf.append(chunk)
195 222 blen += len(chunk)
196 223 if blen >= min:
197 224 if min < max:
198 225 min = min << 1
199 226 nmin = 1 << log2(blen)
200 227 if nmin > min:
201 228 min = nmin
202 229 if min > max:
203 230 min = max
204 231 yield ''.join(buf)
205 232 blen = 0
206 233 buf = []
207 234 if buf:
208 235 yield ''.join(buf)
209 236
210 237 Abort = error.Abort
211 238
212 239 def always(fn): return True
213 240 def never(fn): return False
214 241
215 242 def pathto(root, n1, n2):
216 243 '''return the relative path from one place to another.
217 244 root should use os.sep to separate directories
218 245 n1 should use os.sep to separate directories
219 246 n2 should use "/" to separate directories
220 247 returns an os.sep-separated path.
221 248
222 249 If n1 is a relative path, it's assumed it's
223 250 relative to root.
224 251 n2 should always be relative to root.
225 252 '''
226 253 if not n1: return localpath(n2)
227 254 if os.path.isabs(n1):
228 255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
229 256 return os.path.join(root, localpath(n2))
230 257 n2 = '/'.join((pconvert(root), n2))
231 258 a, b = splitpath(n1), n2.split('/')
232 259 a.reverse()
233 260 b.reverse()
234 261 while a and b and a[-1] == b[-1]:
235 262 a.pop()
236 263 b.pop()
237 264 b.reverse()
238 265 return os.sep.join((['..'] * len(a)) + b) or '.'
239 266
240 267 def canonpath(root, cwd, myname):
241 268 """return the canonical path of myname, given cwd and root"""
242 269 if root == os.sep:
243 270 rootsep = os.sep
244 271 elif endswithsep(root):
245 272 rootsep = root
246 273 else:
247 274 rootsep = root + os.sep
248 275 name = myname
249 276 if not os.path.isabs(name):
250 277 name = os.path.join(root, cwd, name)
251 278 name = os.path.normpath(name)
252 279 audit_path = path_auditor(root)
253 280 if name != rootsep and name.startswith(rootsep):
254 281 name = name[len(rootsep):]
255 282 audit_path(name)
256 283 return pconvert(name)
257 284 elif name == root:
258 285 return ''
259 286 else:
260 287 # Determine whether `name' is in the hierarchy at or beneath `root',
261 288 # by iterating name=dirname(name) until that causes no change (can't
262 289 # check name == '/', because that doesn't work on windows). For each
263 290 # `name', compare dev/inode numbers. If they match, the list `rel'
264 291 # holds the reversed list of components making up the relative file
265 292 # name we want.
266 293 root_st = os.stat(root)
267 294 rel = []
268 295 while True:
269 296 try:
270 297 name_st = os.stat(name)
271 298 except OSError:
272 299 break
273 300 if samestat(name_st, root_st):
274 301 if not rel:
275 302 # name was actually the same as root (maybe a symlink)
276 303 return ''
277 304 rel.reverse()
278 305 name = os.path.join(*rel)
279 306 audit_path(name)
280 307 return pconvert(name)
281 308 dirname, basename = os.path.split(name)
282 309 rel.append(basename)
283 310 if dirname == name:
284 311 break
285 312 name = dirname
286 313
287 314 raise Abort('%s not under root' % myname)
288 315
289 316 _hgexecutable = None
290 317
291 318 def main_is_frozen():
292 319 """return True if we are a frozen executable.
293 320
294 321 The code supports py2exe (most common, Windows only) and tools/freeze
295 322 (portable, not much used).
296 323 """
297 324 return (hasattr(sys, "frozen") or # new py2exe
298 325 hasattr(sys, "importers") or # old py2exe
299 326 imp.is_frozen("__main__")) # tools/freeze
300 327
301 328 def hgexecutable():
302 329 """return location of the 'hg' executable.
303 330
304 331 Defaults to $HG or 'hg' in the search path.
305 332 """
306 333 if _hgexecutable is None:
307 334 hg = os.environ.get('HG')
308 335 if hg:
309 336 set_hgexecutable(hg)
310 337 elif main_is_frozen():
311 338 set_hgexecutable(sys.executable)
312 339 else:
313 340 set_hgexecutable(find_exe('hg') or 'hg')
314 341 return _hgexecutable
315 342
316 343 def set_hgexecutable(path):
317 344 """set location of the 'hg' executable"""
318 345 global _hgexecutable
319 346 _hgexecutable = path
320 347
321 348 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
322 349 '''enhanced shell command execution.
323 350 run with environment maybe modified, maybe in different dir.
324 351
325 352 if command fails and onerr is None, return status. if ui object,
326 353 print error message and return status, else raise onerr object as
327 354 exception.'''
328 355 def py2shell(val):
329 356 'convert python object into string that is useful to shell'
330 357 if val is None or val is False:
331 358 return '0'
332 359 if val is True:
333 360 return '1'
334 361 return str(val)
335 362 oldenv = {}
336 363 for k in environ:
337 364 oldenv[k] = os.environ.get(k)
338 365 if cwd is not None:
339 366 oldcwd = os.getcwd()
340 367 origcmd = cmd
341 368 if os.name == 'nt':
342 369 cmd = '"%s"' % cmd
343 370 try:
344 371 for k, v in environ.iteritems():
345 372 os.environ[k] = py2shell(v)
346 373 os.environ['HG'] = hgexecutable()
347 374 if cwd is not None and oldcwd != cwd:
348 375 os.chdir(cwd)
349 376 rc = os.system(cmd)
350 377 if sys.platform == 'OpenVMS' and rc & 1:
351 378 rc = 0
352 379 if rc and onerr:
353 380 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
354 381 explain_exit(rc)[0])
355 382 if errprefix:
356 383 errmsg = '%s: %s' % (errprefix, errmsg)
357 384 try:
358 385 onerr.warn(errmsg + '\n')
359 386 except AttributeError:
360 387 raise onerr(errmsg)
361 388 return rc
362 389 finally:
363 390 for k, v in oldenv.iteritems():
364 391 if v is None:
365 392 del os.environ[k]
366 393 else:
367 394 os.environ[k] = v
368 395 if cwd is not None and oldcwd != cwd:
369 396 os.chdir(oldcwd)
370 397
371 398 def checksignature(func):
372 399 '''wrap a function with code to check for calling errors'''
373 400 def check(*args, **kwargs):
374 401 try:
375 402 return func(*args, **kwargs)
376 403 except TypeError:
377 404 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
378 405 raise error.SignatureError
379 406 raise
380 407
381 408 return check
382 409
383 410 # os.path.lexists is not available on python2.3
384 411 def lexists(filename):
385 412 "test whether a file with this name exists. does not follow symlinks"
386 413 try:
387 414 os.lstat(filename)
388 415 except:
389 416 return False
390 417 return True
391 418
392 419 def rename(src, dst):
393 420 """forcibly rename a file"""
394 421 try:
395 422 os.rename(src, dst)
396 423 except OSError, err: # FIXME: check err (EEXIST ?)
397 424
398 425 # On windows, rename to existing file is not allowed, so we
399 426 # must delete destination first. But if a file is open, unlink
400 427 # schedules it for delete but does not delete it. Rename
401 428 # happens immediately even for open files, so we rename
402 429 # destination to a temporary name, then delete that. Then
403 430 # rename is safe to do.
404 431 # The temporary name is chosen at random to avoid the situation
405 432 # where a file is left lying around from a previous aborted run.
406 433 # The usual race condition this introduces can't be avoided as
407 434 # we need the name to rename into, and not the file itself. Due
408 435 # to the nature of the operation however, any races will at worst
409 436 # lead to the rename failing and the current operation aborting.
410 437
411 438 def tempname(prefix):
412 439 for tries in xrange(10):
413 440 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
414 441 if not os.path.exists(temp):
415 442 return temp
416 443 raise IOError, (errno.EEXIST, "No usable temporary filename found")
417 444
418 445 temp = tempname(dst)
419 446 os.rename(dst, temp)
420 447 os.unlink(temp)
421 448 os.rename(src, dst)
422 449
423 450 def unlink(f):
424 451 """unlink and remove the directory if it is empty"""
425 452 os.unlink(f)
426 453 # try removing directories that might now be empty
427 454 try:
428 455 os.removedirs(os.path.dirname(f))
429 456 except OSError:
430 457 pass
431 458
432 459 def copyfile(src, dest):
433 460 "copy a file, preserving mode and atime/mtime"
434 461 if os.path.islink(src):
435 462 try:
436 463 os.unlink(dest)
437 464 except:
438 465 pass
439 466 os.symlink(os.readlink(src), dest)
440 467 else:
441 468 try:
442 469 shutil.copyfile(src, dest)
443 470 shutil.copystat(src, dest)
444 471 except shutil.Error, inst:
445 472 raise Abort(str(inst))
446 473
447 474 def copyfiles(src, dst, hardlink=None):
448 475 """Copy a directory tree using hardlinks if possible"""
449 476
450 477 if hardlink is None:
451 478 hardlink = (os.stat(src).st_dev ==
452 479 os.stat(os.path.dirname(dst)).st_dev)
453 480
454 481 if os.path.isdir(src):
455 482 os.mkdir(dst)
456 483 for name, kind in osutil.listdir(src):
457 484 srcname = os.path.join(src, name)
458 485 dstname = os.path.join(dst, name)
459 486 copyfiles(srcname, dstname, hardlink)
460 487 else:
461 488 if hardlink:
462 489 try:
463 490 os_link(src, dst)
464 491 except (IOError, OSError):
465 492 hardlink = False
466 493 shutil.copy(src, dst)
467 494 else:
468 495 shutil.copy(src, dst)
469 496
470 497 class path_auditor(object):
471 498 '''ensure that a filesystem path contains no banned components.
472 499 the following properties of a path are checked:
473 500
474 501 - under top-level .hg
475 502 - starts at the root of a windows drive
476 503 - contains ".."
477 504 - traverses a symlink (e.g. a/symlink_here/b)
478 505 - inside a nested repository'''
479 506
480 507 def __init__(self, root):
481 508 self.audited = set()
482 509 self.auditeddir = set()
483 510 self.root = root
484 511
485 512 def __call__(self, path):
486 513 if path in self.audited:
487 514 return
488 515 normpath = os.path.normcase(path)
489 516 parts = splitpath(normpath)
490 517 if (os.path.splitdrive(path)[0]
491 518 or parts[0].lower() in ('.hg', '.hg.', '')
492 519 or os.pardir in parts):
493 520 raise Abort(_("path contains illegal component: %s") % path)
494 521 if '.hg' in path.lower():
495 522 lparts = [p.lower() for p in parts]
496 523 for p in '.hg', '.hg.':
497 524 if p in lparts[1:]:
498 525 pos = lparts.index(p)
499 526 base = os.path.join(*parts[:pos])
500 527 raise Abort(_('path %r is inside repo %r') % (path, base))
501 528 def check(prefix):
502 529 curpath = os.path.join(self.root, prefix)
503 530 try:
504 531 st = os.lstat(curpath)
505 532 except OSError, err:
506 533 # EINVAL can be raised as invalid path syntax under win32.
507 534 # They must be ignored for patterns can be checked too.
508 535 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
509 536 raise
510 537 else:
511 538 if stat.S_ISLNK(st.st_mode):
512 539 raise Abort(_('path %r traverses symbolic link %r') %
513 540 (path, prefix))
514 541 elif (stat.S_ISDIR(st.st_mode) and
515 542 os.path.isdir(os.path.join(curpath, '.hg'))):
516 543 raise Abort(_('path %r is inside repo %r') %
517 544 (path, prefix))
518 545 parts.pop()
519 546 prefixes = []
520 547 while parts:
521 548 prefix = os.sep.join(parts)
522 549 if prefix in self.auditeddir:
523 550 break
524 551 check(prefix)
525 552 prefixes.append(prefix)
526 553 parts.pop()
527 554
528 555 self.audited.add(path)
529 556 # only add prefixes to the cache after checking everything: we don't
530 557 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
531 558 self.auditeddir.update(prefixes)
532 559
533 560 def nlinks(pathname):
534 561 """Return number of hardlinks for the given file."""
535 562 return os.lstat(pathname).st_nlink
536 563
537 564 if hasattr(os, 'link'):
538 565 os_link = os.link
539 566 else:
540 567 def os_link(src, dst):
541 568 raise OSError(0, _("Hardlinks not supported"))
542 569
543 570 def lookup_reg(key, name=None, scope=None):
544 571 return None
545 572
546 573 if os.name == 'nt':
547 574 from windows import *
548 575 else:
549 576 from posix import *
550 577
551 578 def makelock(info, pathname):
552 579 try:
553 580 return os.symlink(info, pathname)
554 581 except OSError, why:
555 582 if why.errno == errno.EEXIST:
556 583 raise
557 584 except AttributeError: # no symlink in os
558 585 pass
559 586
560 587 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
561 588 os.write(ld, info)
562 589 os.close(ld)
563 590
564 591 def readlock(pathname):
565 592 try:
566 593 return os.readlink(pathname)
567 594 except OSError, why:
568 595 if why.errno not in (errno.EINVAL, errno.ENOSYS):
569 596 raise
570 597 except AttributeError: # no symlink in os
571 598 pass
572 599 return posixfile(pathname).read()
573 600
574 601 def fstat(fp):
575 602 '''stat file object that may not have fileno method.'''
576 603 try:
577 604 return os.fstat(fp.fileno())
578 605 except AttributeError:
579 606 return os.stat(fp.name)
580 607
581 608 # File system features
582 609
583 610 def checkcase(path):
584 611 """
585 612 Check whether the given path is on a case-sensitive filesystem
586 613
587 614 Requires a path (like /foo/.hg) ending with a foldable final
588 615 directory component.
589 616 """
590 617 s1 = os.stat(path)
591 618 d, b = os.path.split(path)
592 619 p2 = os.path.join(d, b.upper())
593 620 if path == p2:
594 621 p2 = os.path.join(d, b.lower())
595 622 try:
596 623 s2 = os.stat(p2)
597 624 if s2 == s1:
598 625 return False
599 626 return True
600 627 except:
601 628 return True
602 629
603 630 _fspathcache = {}
604 631 def fspath(name, root):
605 632 '''Get name in the case stored in the filesystem
606 633
607 634 The name is either relative to root, or it is an absolute path starting
608 635 with root. Note that this function is unnecessary, and should not be
609 636 called, for case-sensitive filesystems (simply because it's expensive).
610 637 '''
611 638 # If name is absolute, make it relative
612 639 if name.lower().startswith(root.lower()):
613 640 l = len(root)
614 641 if name[l] == os.sep or name[l] == os.altsep:
615 642 l = l + 1
616 643 name = name[l:]
617 644
618 645 if not os.path.exists(os.path.join(root, name)):
619 646 return None
620 647
621 648 seps = os.sep
622 649 if os.altsep:
623 650 seps = seps + os.altsep
624 651 # Protect backslashes. This gets silly very quickly.
625 652 seps.replace('\\','\\\\')
626 653 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
627 654 dir = os.path.normcase(os.path.normpath(root))
628 655 result = []
629 656 for part, sep in pattern.findall(name):
630 657 if sep:
631 658 result.append(sep)
632 659 continue
633 660
634 661 if dir not in _fspathcache:
635 662 _fspathcache[dir] = os.listdir(dir)
636 663 contents = _fspathcache[dir]
637 664
638 665 lpart = part.lower()
639 666 for n in contents:
640 667 if n.lower() == lpart:
641 668 result.append(n)
642 669 break
643 670 else:
644 671 # Cannot happen, as the file exists!
645 672 result.append(part)
646 673 dir = os.path.join(dir, lpart)
647 674
648 675 return ''.join(result)
649 676
650 677 def checkexec(path):
651 678 """
652 679 Check whether the given path is on a filesystem with UNIX-like exec flags
653 680
654 681 Requires a directory (like /foo/.hg)
655 682 """
656 683
657 684 # VFAT on some Linux versions can flip mode but it doesn't persist
658 685 # a FS remount. Frequently we can detect it if files are created
659 686 # with exec bit on.
660 687
661 688 try:
662 689 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
663 690 fh, fn = tempfile.mkstemp("", "", path)
664 691 try:
665 692 os.close(fh)
666 693 m = os.stat(fn).st_mode & 0777
667 694 new_file_has_exec = m & EXECFLAGS
668 695 os.chmod(fn, m ^ EXECFLAGS)
669 696 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
670 697 finally:
671 698 os.unlink(fn)
672 699 except (IOError, OSError):
673 700 # we don't care, the user probably won't be able to commit anyway
674 701 return False
675 702 return not (new_file_has_exec or exec_flags_cannot_flip)
676 703
677 704 def checklink(path):
678 705 """check whether the given path is on a symlink-capable filesystem"""
679 706 # mktemp is not racy because symlink creation will fail if the
680 707 # file already exists
681 708 name = tempfile.mktemp(dir=path)
682 709 try:
683 710 os.symlink(".", name)
684 711 os.unlink(name)
685 712 return True
686 713 except (OSError, AttributeError):
687 714 return False
688 715
689 716 def needbinarypatch():
690 717 """return True if patches should be applied in binary mode by default."""
691 718 return os.name == 'nt'
692 719
693 720 def endswithsep(path):
694 721 '''Check path ends with os.sep or os.altsep.'''
695 722 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
696 723
697 724 def splitpath(path):
698 725 '''Split path by os.sep.
699 726 Note that this function does not use os.altsep because this is
700 727 an alternative of simple "xxx.split(os.sep)".
701 728 It is recommended to use os.path.normpath() before using this
702 729 function if need.'''
703 730 return path.split(os.sep)
704 731
705 732 def gui():
706 733 '''Are we running in a GUI?'''
707 734 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
708 735
709 736 def mktempcopy(name, emptyok=False, createmode=None):
710 737 """Create a temporary file with the same contents from name
711 738
712 739 The permission bits are copied from the original file.
713 740
714 741 If the temporary file is going to be truncated immediately, you
715 742 can use emptyok=True as an optimization.
716 743
717 744 Returns the name of the temporary file.
718 745 """
719 746 d, fn = os.path.split(name)
720 747 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
721 748 os.close(fd)
722 749 # Temporary files are created with mode 0600, which is usually not
723 750 # what we want. If the original file already exists, just copy
724 751 # its mode. Otherwise, manually obey umask.
725 752 try:
726 753 st_mode = os.lstat(name).st_mode & 0777
727 754 except OSError, inst:
728 755 if inst.errno != errno.ENOENT:
729 756 raise
730 757 st_mode = createmode
731 758 if st_mode is None:
732 759 st_mode = ~umask
733 760 st_mode &= 0666
734 761 os.chmod(temp, st_mode)
735 762 if emptyok:
736 763 return temp
737 764 try:
738 765 try:
739 766 ifp = posixfile(name, "rb")
740 767 except IOError, inst:
741 768 if inst.errno == errno.ENOENT:
742 769 return temp
743 770 if not getattr(inst, 'filename', None):
744 771 inst.filename = name
745 772 raise
746 773 ofp = posixfile(temp, "wb")
747 774 for chunk in filechunkiter(ifp):
748 775 ofp.write(chunk)
749 776 ifp.close()
750 777 ofp.close()
751 778 except:
752 779 try: os.unlink(temp)
753 780 except: pass
754 781 raise
755 782 return temp
756 783
757 784 class atomictempfile(object):
758 785 """file-like object that atomically updates a file
759 786
760 787 All writes will be redirected to a temporary copy of the original
761 788 file. When rename is called, the copy is renamed to the original
762 789 name, making the changes visible.
763 790 """
764 791 def __init__(self, name, mode, createmode):
765 792 self.__name = name
766 793 self._fp = None
767 794 self.temp = mktempcopy(name, emptyok=('w' in mode),
768 795 createmode=createmode)
769 796 self._fp = posixfile(self.temp, mode)
770 797
771 798 def __getattr__(self, name):
772 799 return getattr(self._fp, name)
773 800
774 801 def rename(self):
775 802 if not self._fp.closed:
776 803 self._fp.close()
777 804 rename(self.temp, localpath(self.__name))
778 805
779 806 def __del__(self):
780 807 if not self._fp:
781 808 return
782 809 if not self._fp.closed:
783 810 try:
784 811 os.unlink(self.temp)
785 812 except: pass
786 813 self._fp.close()
787 814
788 815 def makedirs(name, mode=None):
789 816 """recursive directory creation with parent mode inheritance"""
790 817 try:
791 818 os.mkdir(name)
792 819 if mode is not None:
793 820 os.chmod(name, mode)
794 821 return
795 822 except OSError, err:
796 823 if err.errno == errno.EEXIST:
797 824 return
798 825 if err.errno != errno.ENOENT:
799 826 raise
800 827 parent = os.path.abspath(os.path.dirname(name))
801 828 makedirs(parent, mode)
802 829 makedirs(name, mode)
803 830
804 831 class opener(object):
805 832 """Open files relative to a base directory
806 833
807 834 This class is used to hide the details of COW semantics and
808 835 remote file access from higher level code.
809 836 """
810 837 def __init__(self, base, audit=True):
811 838 self.base = base
812 839 if audit:
813 840 self.audit_path = path_auditor(base)
814 841 else:
815 842 self.audit_path = always
816 843 self.createmode = None
817 844
818 845 def __getattr__(self, name):
819 846 if name == '_can_symlink':
820 847 self._can_symlink = checklink(self.base)
821 848 return self._can_symlink
822 849 raise AttributeError(name)
823 850
824 851 def _fixfilemode(self, name):
825 852 if self.createmode is None:
826 853 return
827 854 os.chmod(name, self.createmode & 0666)
828 855
829 856 def __call__(self, path, mode="r", text=False, atomictemp=False):
830 857 self.audit_path(path)
831 858 f = os.path.join(self.base, path)
832 859
833 860 if not text and "b" not in mode:
834 861 mode += "b" # for that other OS
835 862
836 863 nlink = -1
837 864 if mode not in ("r", "rb"):
838 865 try:
839 866 nlink = nlinks(f)
840 867 except OSError:
841 868 nlink = 0
842 869 d = os.path.dirname(f)
843 870 if not os.path.isdir(d):
844 871 makedirs(d, self.createmode)
845 872 if atomictemp:
846 873 return atomictempfile(f, mode, self.createmode)
847 874 if nlink > 1:
848 875 rename(mktempcopy(f), f)
849 876 fp = posixfile(f, mode)
850 877 if nlink == 0:
851 878 self._fixfilemode(f)
852 879 return fp
853 880
854 881 def symlink(self, src, dst):
855 882 self.audit_path(dst)
856 883 linkname = os.path.join(self.base, dst)
857 884 try:
858 885 os.unlink(linkname)
859 886 except OSError:
860 887 pass
861 888
862 889 dirname = os.path.dirname(linkname)
863 890 if not os.path.exists(dirname):
864 891 makedirs(dirname, self.createmode)
865 892
866 893 if self._can_symlink:
867 894 try:
868 895 os.symlink(src, linkname)
869 896 except OSError, err:
870 897 raise OSError(err.errno, _('could not symlink to %r: %s') %
871 898 (src, err.strerror), linkname)
872 899 else:
873 900 f = self(dst, "w")
874 901 f.write(src)
875 902 f.close()
876 903 self._fixfilemode(dst)
877 904
878 905 class chunkbuffer(object):
879 906 """Allow arbitrary sized chunks of data to be efficiently read from an
880 907 iterator over chunks of arbitrary size."""
881 908
882 909 def __init__(self, in_iter):
883 910 """in_iter is the iterator that's iterating over the input chunks.
884 911 targetsize is how big a buffer to try to maintain."""
885 912 self.iter = iter(in_iter)
886 913 self.buf = ''
887 914 self.targetsize = 2**16
888 915
889 916 def read(self, l):
890 917 """Read L bytes of data from the iterator of chunks of data.
891 918 Returns less than L bytes if the iterator runs dry."""
892 919 if l > len(self.buf) and self.iter:
893 920 # Clamp to a multiple of self.targetsize
894 921 targetsize = max(l, self.targetsize)
895 922 collector = cStringIO.StringIO()
896 923 collector.write(self.buf)
897 924 collected = len(self.buf)
898 925 for chunk in self.iter:
899 926 collector.write(chunk)
900 927 collected += len(chunk)
901 928 if collected >= targetsize:
902 929 break
903 930 if collected < targetsize:
904 931 self.iter = False
905 932 self.buf = collector.getvalue()
906 933 if len(self.buf) == l:
907 934 s, self.buf = str(self.buf), ''
908 935 else:
909 936 s, self.buf = self.buf[:l], buffer(self.buf, l)
910 937 return s
911 938
912 939 def filechunkiter(f, size=65536, limit=None):
913 940 """Create a generator that produces the data in the file size
914 941 (default 65536) bytes at a time, up to optional limit (default is
915 942 to read all data). Chunks may be less than size bytes if the
916 943 chunk is the last chunk in the file, or the file is a socket or
917 944 some other type of file that sometimes reads less data than is
918 945 requested."""
919 946 assert size >= 0
920 947 assert limit is None or limit >= 0
921 948 while True:
922 949 if limit is None: nbytes = size
923 950 else: nbytes = min(limit, size)
924 951 s = nbytes and f.read(nbytes)
925 952 if not s: break
926 953 if limit: limit -= len(s)
927 954 yield s
928 955
929 956 def makedate():
930 957 lt = time.localtime()
931 958 if lt[8] == 1 and time.daylight:
932 959 tz = time.altzone
933 960 else:
934 961 tz = time.timezone
935 962 return time.mktime(lt), tz
936 963
937 964 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
938 965 """represent a (unixtime, offset) tuple as a localized time.
939 966 unixtime is seconds since the epoch, and offset is the time zone's
940 967 number of seconds away from UTC. if timezone is false, do not
941 968 append time zone to string."""
942 969 t, tz = date or makedate()
943 970 if "%1" in format or "%2" in format:
944 971 sign = (tz > 0) and "-" or "+"
945 972 minutes = abs(tz) // 60
946 973 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
947 974 format = format.replace("%2", "%02d" % (minutes % 60))
948 975 s = time.strftime(format, time.gmtime(float(t) - tz))
949 976 return s
950 977
951 978 def shortdate(date=None):
952 979 """turn (timestamp, tzoff) tuple into iso 8631 date."""
953 980 return datestr(date, format='%Y-%m-%d')
954 981
955 982 def strdate(string, format, defaults=[]):
956 983 """parse a localized time string and return a (unixtime, offset) tuple.
957 984 if the string cannot be parsed, ValueError is raised."""
958 985 def timezone(string):
959 986 tz = string.split()[-1]
960 987 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
961 988 sign = (tz[0] == "+") and 1 or -1
962 989 hours = int(tz[1:3])
963 990 minutes = int(tz[3:5])
964 991 return -sign * (hours * 60 + minutes) * 60
965 992 if tz == "GMT" or tz == "UTC":
966 993 return 0
967 994 return None
968 995
969 996 # NOTE: unixtime = localunixtime + offset
970 997 offset, date = timezone(string), string
971 998 if offset != None:
972 999 date = " ".join(string.split()[:-1])
973 1000
974 1001 # add missing elements from defaults
975 1002 for part in defaults:
976 1003 found = [True for p in part if ("%"+p) in format]
977 1004 if not found:
978 1005 date += "@" + defaults[part]
979 1006 format += "@%" + part[0]
980 1007
981 1008 timetuple = time.strptime(date, format)
982 1009 localunixtime = int(calendar.timegm(timetuple))
983 1010 if offset is None:
984 1011 # local timezone
985 1012 unixtime = int(time.mktime(timetuple))
986 1013 offset = unixtime - localunixtime
987 1014 else:
988 1015 unixtime = localunixtime + offset
989 1016 return unixtime, offset
990 1017
991 1018 def parsedate(date, formats=None, defaults=None):
992 1019 """parse a localized date/time string and return a (unixtime, offset) tuple.
993 1020
994 1021 The date may be a "unixtime offset" string or in one of the specified
995 1022 formats. If the date already is a (unixtime, offset) tuple, it is returned.
996 1023 """
997 1024 if not date:
998 1025 return 0, 0
999 1026 if isinstance(date, tuple) and len(date) == 2:
1000 1027 return date
1001 1028 if not formats:
1002 1029 formats = defaultdateformats
1003 1030 date = date.strip()
1004 1031 try:
1005 1032 when, offset = map(int, date.split(' '))
1006 1033 except ValueError:
1007 1034 # fill out defaults
1008 1035 if not defaults:
1009 1036 defaults = {}
1010 1037 now = makedate()
1011 1038 for part in "d mb yY HI M S".split():
1012 1039 if part not in defaults:
1013 1040 if part[0] in "HMS":
1014 1041 defaults[part] = "00"
1015 1042 else:
1016 1043 defaults[part] = datestr(now, "%" + part[0])
1017 1044
1018 1045 for format in formats:
1019 1046 try:
1020 1047 when, offset = strdate(date, format, defaults)
1021 1048 except (ValueError, OverflowError):
1022 1049 pass
1023 1050 else:
1024 1051 break
1025 1052 else:
1026 1053 raise Abort(_('invalid date: %r ') % date)
1027 1054 # validate explicit (probably user-specified) date and
1028 1055 # time zone offset. values must fit in signed 32 bits for
1029 1056 # current 32-bit linux runtimes. timezones go from UTC-12
1030 1057 # to UTC+14
1031 1058 if abs(when) > 0x7fffffff:
1032 1059 raise Abort(_('date exceeds 32 bits: %d') % when)
1033 1060 if offset < -50400 or offset > 43200:
1034 1061 raise Abort(_('impossible time zone offset: %d') % offset)
1035 1062 return when, offset
1036 1063
1037 1064 def matchdate(date):
1038 1065 """Return a function that matches a given date match specifier
1039 1066
1040 1067 Formats include:
1041 1068
1042 1069 '{date}' match a given date to the accuracy provided
1043 1070
1044 1071 '<{date}' on or before a given date
1045 1072
1046 1073 '>{date}' on or after a given date
1047 1074
1048 1075 """
1049 1076
1050 1077 def lower(date):
1051 1078 d = dict(mb="1", d="1")
1052 1079 return parsedate(date, extendeddateformats, d)[0]
1053 1080
1054 1081 def upper(date):
1055 1082 d = dict(mb="12", HI="23", M="59", S="59")
1056 1083 for days in "31 30 29".split():
1057 1084 try:
1058 1085 d["d"] = days
1059 1086 return parsedate(date, extendeddateformats, d)[0]
1060 1087 except:
1061 1088 pass
1062 1089 d["d"] = "28"
1063 1090 return parsedate(date, extendeddateformats, d)[0]
1064 1091
1065 1092 date = date.strip()
1066 1093 if date[0] == "<":
1067 1094 when = upper(date[1:])
1068 1095 return lambda x: x <= when
1069 1096 elif date[0] == ">":
1070 1097 when = lower(date[1:])
1071 1098 return lambda x: x >= when
1072 1099 elif date[0] == "-":
1073 1100 try:
1074 1101 days = int(date[1:])
1075 1102 except ValueError:
1076 1103 raise Abort(_("invalid day spec: %s") % date[1:])
1077 1104 when = makedate()[0] - days * 3600 * 24
1078 1105 return lambda x: x >= when
1079 1106 elif " to " in date:
1080 1107 a, b = date.split(" to ")
1081 1108 start, stop = lower(a), upper(b)
1082 1109 return lambda x: x >= start and x <= stop
1083 1110 else:
1084 1111 start, stop = lower(date), upper(date)
1085 1112 return lambda x: x >= start and x <= stop
1086 1113
1087 1114 def shortuser(user):
1088 1115 """Return a short representation of a user name or email address."""
1089 1116 f = user.find('@')
1090 1117 if f >= 0:
1091 1118 user = user[:f]
1092 1119 f = user.find('<')
1093 1120 if f >= 0:
1094 1121 user = user[f+1:]
1095 1122 f = user.find(' ')
1096 1123 if f >= 0:
1097 1124 user = user[:f]
1098 1125 f = user.find('.')
1099 1126 if f >= 0:
1100 1127 user = user[:f]
1101 1128 return user
1102 1129
1103 1130 def email(author):
1104 1131 '''get email of author.'''
1105 1132 r = author.find('>')
1106 1133 if r == -1: r = None
1107 1134 return author[author.find('<')+1:r]
1108 1135
1109 1136 def ellipsis(text, maxlength=400):
1110 1137 """Trim string to at most maxlength (default: 400) characters."""
1111 1138 if len(text) <= maxlength:
1112 1139 return text
1113 1140 else:
1114 1141 return "%s..." % (text[:maxlength-3])
1115 1142
1116 1143 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1117 1144 '''yield every hg repository under path, recursively.'''
1118 1145 def errhandler(err):
1119 1146 if err.filename == path:
1120 1147 raise err
1121 1148 if followsym and hasattr(os.path, 'samestat'):
1122 1149 def _add_dir_if_not_there(dirlst, dirname):
1123 1150 match = False
1124 1151 samestat = os.path.samestat
1125 1152 dirstat = os.stat(dirname)
1126 1153 for lstdirstat in dirlst:
1127 1154 if samestat(dirstat, lstdirstat):
1128 1155 match = True
1129 1156 break
1130 1157 if not match:
1131 1158 dirlst.append(dirstat)
1132 1159 return not match
1133 1160 else:
1134 1161 followsym = False
1135 1162
1136 1163 if (seen_dirs is None) and followsym:
1137 1164 seen_dirs = []
1138 1165 _add_dir_if_not_there(seen_dirs, path)
1139 1166 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1140 1167 if '.hg' in dirs:
1141 1168 yield root # found a repository
1142 1169 qroot = os.path.join(root, '.hg', 'patches')
1143 1170 if os.path.isdir(os.path.join(qroot, '.hg')):
1144 1171 yield qroot # we have a patch queue repo here
1145 1172 if recurse:
1146 1173 # avoid recursing inside the .hg directory
1147 1174 dirs.remove('.hg')
1148 1175 else:
1149 1176 dirs[:] = [] # don't descend further
1150 1177 elif followsym:
1151 1178 newdirs = []
1152 1179 for d in dirs:
1153 1180 fname = os.path.join(root, d)
1154 1181 if _add_dir_if_not_there(seen_dirs, fname):
1155 1182 if os.path.islink(fname):
1156 1183 for hgname in walkrepos(fname, True, seen_dirs):
1157 1184 yield hgname
1158 1185 else:
1159 1186 newdirs.append(d)
1160 1187 dirs[:] = newdirs
1161 1188
1162 1189 _rcpath = None
1163 1190
1164 1191 def os_rcpath():
1165 1192 '''return default os-specific hgrc search path'''
1166 1193 path = system_rcpath()
1167 1194 path.extend(user_rcpath())
1168 1195 path = [os.path.normpath(f) for f in path]
1169 1196 return path
1170 1197
1171 1198 def rcpath():
1172 1199 '''return hgrc search path. if env var HGRCPATH is set, use it.
1173 1200 for each item in path, if directory, use files ending in .rc,
1174 1201 else use item.
1175 1202 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1176 1203 if no HGRCPATH, use default os-specific path.'''
1177 1204 global _rcpath
1178 1205 if _rcpath is None:
1179 1206 if 'HGRCPATH' in os.environ:
1180 1207 _rcpath = []
1181 1208 for p in os.environ['HGRCPATH'].split(os.pathsep):
1182 1209 if not p: continue
1183 1210 if os.path.isdir(p):
1184 1211 for f, kind in osutil.listdir(p):
1185 1212 if f.endswith('.rc'):
1186 1213 _rcpath.append(os.path.join(p, f))
1187 1214 else:
1188 1215 _rcpath.append(p)
1189 1216 else:
1190 1217 _rcpath = os_rcpath()
1191 1218 return _rcpath
1192 1219
1193 1220 def bytecount(nbytes):
1194 1221 '''return byte count formatted as readable string, with units'''
1195 1222
1196 1223 units = (
1197 1224 (100, 1<<30, _('%.0f GB')),
1198 1225 (10, 1<<30, _('%.1f GB')),
1199 1226 (1, 1<<30, _('%.2f GB')),
1200 1227 (100, 1<<20, _('%.0f MB')),
1201 1228 (10, 1<<20, _('%.1f MB')),
1202 1229 (1, 1<<20, _('%.2f MB')),
1203 1230 (100, 1<<10, _('%.0f KB')),
1204 1231 (10, 1<<10, _('%.1f KB')),
1205 1232 (1, 1<<10, _('%.2f KB')),
1206 1233 (1, 1, _('%.0f bytes')),
1207 1234 )
1208 1235
1209 1236 for multiplier, divisor, format in units:
1210 1237 if nbytes >= divisor * multiplier:
1211 1238 return format % (nbytes / float(divisor))
1212 1239 return units[-1][2] % nbytes
1213 1240
1214 1241 def drop_scheme(scheme, path):
1215 1242 sc = scheme + ':'
1216 1243 if path.startswith(sc):
1217 1244 path = path[len(sc):]
1218 1245 if path.startswith('//'):
1219 1246 path = path[2:]
1220 1247 return path
1221 1248
1222 1249 def uirepr(s):
1223 1250 # Avoid double backslash in Windows path repr()
1224 1251 return repr(s).replace('\\\\', '\\')
1225 1252
1226 1253 def termwidth():
1227 1254 if 'COLUMNS' in os.environ:
1228 1255 try:
1229 1256 return int(os.environ['COLUMNS'])
1230 1257 except ValueError:
1231 1258 pass
1232 1259 try:
1233 1260 import termios, array, fcntl
1234 1261 for dev in (sys.stdout, sys.stdin):
1235 1262 try:
1236 1263 try:
1237 1264 fd = dev.fileno()
1238 1265 except AttributeError:
1239 1266 continue
1240 1267 if not os.isatty(fd):
1241 1268 continue
1242 1269 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1243 1270 return array.array('h', arri)[1]
1244 1271 except ValueError:
1245 1272 pass
1246 1273 except ImportError:
1247 1274 pass
1248 1275 return 80
1249 1276
1250 1277 def wrap(line, hangindent, width=78):
1251 1278 padding = '\n' + ' ' * hangindent
1252 1279 return padding.join(textwrap.wrap(line, width=width - hangindent))
1253 1280
1254 1281 def iterlines(iterator):
1255 1282 for chunk in iterator:
1256 1283 for line in chunk.splitlines():
1257 1284 yield line
@@ -1,284 +1,282 b''
1 1 # windows.py - Windows utility function implementations for Mercurial
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2, incorporated herein by reference.
7 7
8 8 from i18n import _
9 9 import osutil, error
10 10 import errno, msvcrt, os, re, sys
11 11
12 12 nulldev = 'NUL:'
13 13 umask = 002
14 14
15 15 # wrap osutil.posixfile to provide friendlier exceptions
16 16 def posixfile(name, mode='r', buffering=-1):
17 17 try:
18 18 return osutil.posixfile(name, mode, buffering)
19 19 except WindowsError, err:
20 20 raise IOError(err.errno, err.strerror)
21 21 posixfile.__doc__ = osutil.posixfile.__doc__
22 22
23 23 class winstdout(object):
24 24 '''stdout on windows misbehaves if sent through a pipe'''
25 25
26 26 def __init__(self, fp):
27 27 self.fp = fp
28 28
29 29 def __getattr__(self, key):
30 30 return getattr(self.fp, key)
31 31
32 32 def close(self):
33 33 try:
34 34 self.fp.close()
35 35 except: pass
36 36
37 37 def write(self, s):
38 38 try:
39 39 # This is workaround for "Not enough space" error on
40 40 # writing large size of data to console.
41 41 limit = 16000
42 42 l = len(s)
43 43 start = 0
44 44 while start < l:
45 45 end = start + limit
46 46 self.fp.write(s[start:end])
47 47 start = end
48 48 except IOError, inst:
49 49 if inst.errno != 0: raise
50 50 self.close()
51 51 raise IOError(errno.EPIPE, 'Broken pipe')
52 52
53 53 def flush(self):
54 54 try:
55 55 return self.fp.flush()
56 56 except IOError, inst:
57 57 if inst.errno != errno.EINVAL: raise
58 58 self.close()
59 59 raise IOError(errno.EPIPE, 'Broken pipe')
60 60
61 61 sys.stdout = winstdout(sys.stdout)
62 62
63 63 def _is_win_9x():
64 64 '''return true if run on windows 95, 98 or me.'''
65 65 try:
66 66 return sys.getwindowsversion()[3] == 1
67 67 except AttributeError:
68 68 return 'command' in os.environ.get('comspec', '')
69 69
70 70 def openhardlinks():
71 71 return not _is_win_9x() and "win32api" in globals()
72 72
73 73 def system_rcpath():
74 74 try:
75 75 return system_rcpath_win32()
76 76 except:
77 77 return [r'c:\mercurial\mercurial.ini']
78 78
79 79 def user_rcpath():
80 80 '''return os-specific hgrc search path to the user dir'''
81 81 try:
82 82 path = user_rcpath_win32()
83 83 except:
84 84 home = os.path.expanduser('~')
85 85 path = [os.path.join(home, 'mercurial.ini'),
86 86 os.path.join(home, '.hgrc')]
87 87 userprofile = os.environ.get('USERPROFILE')
88 88 if userprofile:
89 89 path.append(os.path.join(userprofile, 'mercurial.ini'))
90 90 path.append(os.path.join(userprofile, '.hgrc'))
91 91 return path
92 92
93 93 def parse_patch_output(output_line):
94 94 """parses the output produced by patch and returns the filename"""
95 95 pf = output_line[14:]
96 96 if pf[0] == '`':
97 97 pf = pf[1:-1] # Remove the quotes
98 98 return pf
99 99
100 100 def sshargs(sshcmd, host, user, port):
101 101 '''Build argument list for ssh or Plink'''
102 102 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
103 103 args = user and ("%s@%s" % (user, host)) or host
104 104 return port and ("%s %s %s" % (args, pflag, port)) or args
105 105
106 106 def testpid(pid):
107 107 '''return False if pid dead, True if running or not known'''
108 108 return True
109 109
110 110 def set_flags(f, l, x):
111 111 pass
112 112
113 113 def set_binary(fd):
114 114 # When run without console, pipes may expose invalid
115 115 # fileno(), usually set to -1.
116 116 if hasattr(fd, 'fileno') and fd.fileno() >= 0:
117 117 msvcrt.setmode(fd.fileno(), os.O_BINARY)
118 118
119 119 def pconvert(path):
120 120 return '/'.join(path.split(os.sep))
121 121
122 122 def localpath(path):
123 123 return path.replace('/', '\\')
124 124
125 125 def normpath(path):
126 126 return pconvert(os.path.normpath(path))
127 127
128 128 def samestat(s1, s2):
129 129 return False
130 130
131 131 # A sequence of backslashes is special iff it precedes a double quote:
132 132 # - if there's an even number of backslashes, the double quote is not
133 133 # quoted (i.e. it ends the quoted region)
134 134 # - if there's an odd number of backslashes, the double quote is quoted
135 135 # - in both cases, every pair of backslashes is unquoted into a single
136 136 # backslash
137 137 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
138 138 # So, to quote a string, we must surround it in double quotes, double
139 139 # the number of backslashes that preceed double quotes and add another
140 140 # backslash before every double quote (being careful with the double
141 141 # quote we've appended to the end)
142 142 _quotere = None
143 143 def shellquote(s):
144 144 global _quotere
145 145 if _quotere is None:
146 146 _quotere = re.compile(r'(\\*)("|\\$)')
147 147 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
148 148
149 149 def quotecommand(cmd):
150 150 """Build a command string suitable for os.popen* calls."""
151 151 # The extra quotes are needed because popen* runs the command
152 152 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
153 153 return '"' + cmd + '"'
154 154
155 155 def popen(command, mode='r'):
156 156 # Work around "popen spawned process may not write to stdout
157 157 # under windows"
158 158 # http://bugs.python.org/issue1366
159 159 command += " 2> %s" % nulldev
160 160 return os.popen(quotecommand(command), mode)
161 161
162 162 def explain_exit(code):
163 163 return _("exited with status %d") % code, code
164 164
165 165 # if you change this stub into a real check, please try to implement the
166 166 # username and groupname functions above, too.
167 167 def isowner(st):
168 168 return True
169 169
170 170 def find_exe(command):
171 171 '''Find executable for command searching like cmd.exe does.
172 172 If command is a basename then PATH is searched for command.
173 173 PATH isn't searched if command is an absolute or relative path.
174 174 An extension from PATHEXT is found and added if not present.
175 175 If command isn't found None is returned.'''
176 176 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
177 177 pathexts = [ext for ext in pathext.lower().split(os.pathsep)]
178 178 if os.path.splitext(command)[1].lower() in pathexts:
179 179 pathexts = ['']
180 180
181 181 def findexisting(pathcommand):
182 182 'Will append extension (if needed) and return existing file'
183 183 for ext in pathexts:
184 184 executable = pathcommand + ext
185 185 if os.path.exists(executable):
186 186 return executable
187 187 return None
188 188
189 189 if os.sep in command:
190 190 return findexisting(command)
191 191
192 192 for path in os.environ.get('PATH', '').split(os.pathsep):
193 193 executable = findexisting(os.path.join(path, command))
194 194 if executable is not None:
195 195 return executable
196 196 return None
197 197
198 198 def set_signal_handler():
199 199 try:
200 200 set_signal_handler_win32()
201 201 except NameError:
202 202 pass
203 203
204 204 def statfiles(files):
205 205 '''Stat each file in files and yield stat or None if file does not exist.
206 206 Cluster and cache stat per directory to minimize number of OS stat calls.'''
207 207 ncase = os.path.normcase
208 208 sep = os.sep
209 209 dircache = {} # dirname -> filename -> status | None if file does not exist
210 210 for nf in files:
211 211 nf = ncase(nf)
212 pos = nf.rfind(sep)
213 if pos == -1:
214 dir, base = '.', nf
215 else:
216 dir, base = nf[:pos+1], nf[pos+1:]
212 dir, base = os.path.split(nf)
213 if not dir:
214 dir = '.'
217 215 cache = dircache.get(dir, None)
218 216 if cache is None:
219 217 try:
220 218 dmap = dict([(ncase(n), s)
221 219 for n, k, s in osutil.listdir(dir, True)])
222 220 except OSError, err:
223 221 # handle directory not found in Python version prior to 2.5
224 222 # Python <= 2.4 returns native Windows code 3 in errno
225 223 # Python >= 2.5 returns ENOENT and adds winerror field
226 224 # EINVAL is raised if dir is not a directory.
227 225 if err.errno not in (3, errno.ENOENT, errno.EINVAL,
228 226 errno.ENOTDIR):
229 227 raise
230 228 dmap = {}
231 229 cache = dircache.setdefault(dir, dmap)
232 230 yield cache.get(base, None)
233 231
234 232 def getuser():
235 233 '''return name of current user'''
236 234 raise error.Abort(_('user name not available - set USERNAME '
237 235 'environment variable'))
238 236
239 237 def username(uid=None):
240 238 """Return the name of the user with the given uid.
241 239
242 240 If uid is None, return the name of the current user."""
243 241 return None
244 242
245 243 def groupname(gid=None):
246 244 """Return the name of the group with the given gid.
247 245
248 246 If gid is None, return the name of the current group."""
249 247 return None
250 248
251 249 def _removedirs(name):
252 250 """special version of os.removedirs that does not remove symlinked
253 251 directories or junction points if they actually contain files"""
254 252 if osutil.listdir(name):
255 253 return
256 254 os.rmdir(name)
257 255 head, tail = os.path.split(name)
258 256 if not tail:
259 257 head, tail = os.path.split(head)
260 258 while head and tail:
261 259 try:
262 260 if osutil.listdir(name):
263 261 return
264 262 os.rmdir(head)
265 263 except:
266 264 break
267 265 head, tail = os.path.split(head)
268 266
269 267 def unlink(f):
270 268 """unlink and remove the directory if it is empty"""
271 269 os.unlink(f)
272 270 # try removing directories that might now be empty
273 271 try:
274 272 _removedirs(os.path.dirname(f))
275 273 except OSError:
276 274 pass
277 275
278 276 try:
279 277 # override functions with win32 versions if possible
280 278 from win32 import *
281 279 except ImportError:
282 280 pass
283 281
284 282 expandglobs = True
@@ -1,190 +1,202 b''
1 1 #!/bin/sh
2 2
3 3 # adjust to non-default HGPORT, e.g. with run-tests.py -j
4 4 hideport() { sed "s/localhost:$HGPORT/localhost:20059/"; }
5 5 hidehash() { sed "s/changeset 3:............ merges/changeset 3:... merges/"; }
6 6
7 7 echo "[extensions]" >> $HGRCPATH
8 8 echo "fetch=" >> $HGRCPATH
9 9
10 10 echo % test fetch with default branches only
11 11 hg init a
12 12 echo a > a/a
13 13 hg --cwd a commit -d '1 0' -Ama
14 14
15 15 hg clone a b
16 16 hg clone a c
17 17
18 18 echo b > a/b
19 19 hg --cwd a commit -d '2 0' -Amb
20 20 hg --cwd a parents -q
21 21
22 22 echo % should pull one change
23 23 hg --cwd b fetch ../a
24 24 hg --cwd b parents -q
25 25
26 26 echo c > c/c
27 27 hg --cwd c commit -d '3 0' -Amc
28 28
29 29 hg clone c d
30 30 hg clone c e
31 31
32 32 # We cannot use the default commit message if fetching from a local
33 33 # repo, because the path of the repo will be included in the commit
34 34 # message, making every commit appear different.
35 35
36 36 echo % should merge c into a
37 37 hg --cwd c fetch -d '4 0' -m 'automated merge' ../a
38 38 ls c
39 39
40 40 netstat -tnap 2>/dev/null | grep $HGPORT | grep LISTEN
41 41 hg --cwd a serve -a localhost -p $HGPORT -d --pid-file=hg.pid
42 42 cat a/hg.pid >> "$DAEMON_PIDS"
43 43
44 44 echo '% fetch over http, no auth'
45 45 hg --cwd d fetch -d '5 0' http://localhost:$HGPORT/ | hideport | hidehash
46 46 hg --cwd d tip --template '{desc}\n' | hideport
47 47
48 48 echo '% fetch over http with auth (should be hidden in desc)'
49 49 hg --cwd e fetch -d '5 0' http://user:password@localhost:$HGPORT/ | hideport | hidehash
50 50 hg --cwd e tip --template '{desc}\n' | hideport
51 51
52 52 hg clone a f
53 53 hg clone a g
54 54
55 55 echo f > f/f
56 56 hg --cwd f ci -d '6 0' -Amf
57 57
58 58 echo g > g/g
59 59 hg --cwd g ci -d '6 0' -Amg
60 60
61 61 hg clone -q f h
62 62 hg clone -q g i
63 63
64 64 echo % should merge f into g
65 65 hg --cwd g fetch -d '7 0' --switch -m 'automated merge' ../f
66 66
67 67 rm i/g
68 68 echo % should abort, because i is modified
69 69 hg --cwd i fetch ../h
70 70
71 71
72 72 echo % test fetch with named branches
73 73 hg init nbase
74 74 echo base > nbase/a
75 75 hg -R nbase ci -d '1 0' -Am base
76 76 hg -R nbase branch a
77 77 echo a > nbase/a
78 78 hg -R nbase ci -d '2 0' -m a
79 79 hg -R nbase up -C 0
80 80 hg -R nbase branch b
81 81 echo b > nbase/b
82 82 hg -R nbase ci -Ad '3 0' -m b
83 83
84 84 echo
85 85 echo % pull in change on foreign branch
86 86 hg clone nbase n1
87 87 hg clone nbase n2
88 88 hg -R n1 up -C a
89 89 echo aa > n1/a
90 90 hg -R n1 ci -d '4 0' -m a1
91 91
92 92 hg -R n2 up -C b
93 93 hg -R n2 fetch -d '9 0' -m 'merge' n1
94 94 echo '% parent should be 2 (no automatic update)'
95 95 hg -R n2 parents --template '{rev}\n'
96 96 rm -fr n1 n2
97 97
98 98 echo
99 99 echo % pull in changes on both foreign and local branches
100 100 hg clone nbase n1
101 101 hg clone nbase n2
102 102 hg -R n1 up -C a
103 103 echo aa > n1/a
104 104 hg -R n1 ci -d '4 0' -m a1
105 105 hg -R n1 up -C b
106 106 echo bb > n1/b
107 107 hg -R n1 ci -d '5 0' -m b1
108 108
109 109 hg -R n2 up -C b
110 110 hg -R n2 fetch -d '9 0' -m 'merge' n1
111 111 echo '% parent should be 4 (fast forward)'
112 112 hg -R n2 parents --template '{rev}\n'
113 113 rm -fr n1 n2
114 114
115 115 echo
116 116 echo '% pull changes on foreign (2 new heads) and local (1 new head) branches'
117 117 echo % with a local change
118 118 hg clone nbase n1
119 119 hg clone nbase n2
120 120 hg -R n1 up -C a
121 121 echo a1 > n1/a
122 122 hg -R n1 ci -d '4 0' -m a1
123 123 hg -R n1 up -C b
124 124 echo bb > n1/b
125 125 hg -R n1 ci -d '5 0' -m b1
126 126 hg -R n1 up -C 1
127 127 echo a2 > n1/a
128 128 hg -R n1 ci -d '6 0' -m a2
129 129
130 130 hg -R n2 up -C b
131 131 echo change >> n2/c
132 132 hg -R n2 ci -Ad '7 0' -m local
133 133 hg -R n2 fetch -d '9 0' -m 'merge' n1
134 134 echo '% parent should be 7 (new merge changeset)'
135 135 hg -R n2 parents --template '{rev}\n'
136 136 rm -fr n1 n2
137 137
138 138 echo '% pull in changes on foreign (merge of local branch) and local (2 new'
139 139 echo '% heads) with a local change'
140 140 hg clone nbase n1
141 141 hg clone nbase n2
142 142 hg -R n1 up -C a
143 143 hg -R n1 merge b
144 144 hg -R n1 ci -d '4 0' -m merge
145 145 hg -R n1 up -C 2
146 146 echo c > n1/a
147 147 hg -R n1 ci -d '5 0' -m c
148 148 hg -R n1 up -C 2
149 149 echo cc > n1/a
150 150 hg -R n1 ci -d '6 0' -m cc
151 151
152 152 hg -R n2 up -C b
153 153 echo change >> n2/b
154 154 hg -R n2 ci -Ad '7 0' -m local
155 155 hg -R n2 fetch -d '9 0' -m 'merge' n1
156 156 echo '% parent should be 3 (fetch did not merge anything)'
157 157 hg -R n2 parents --template '{rev}\n'
158 158 rm -fr n1 n2
159 159
160 160 echo % pull in change on different branch than dirstate
161 161 hg init n1
162 162 echo a > n1/a
163 163 hg -R n1 ci -Am initial
164 164 hg clone n1 n2
165 165 echo b > n1/a
166 166 hg -R n1 ci -m next
167 167 hg -R n2 branch topic
168 168 hg -R n2 fetch -d '0 0' -m merge n1
169 169 echo '% parent should be 0 (fetch did not update or merge anything)'
170 170 hg -R n2 parents --template '{rev}\n'
171 171 rm -fr n1 n2
172 172
173 173 echo % test fetch with inactive branches
174 174 hg init ib1
175 175 echo a > ib1/a
176 176 hg --cwd ib1 ci -Am base
177 177 hg --cwd ib1 branch second
178 178 echo b > ib1/b
179 179 hg --cwd ib1 ci -Am onsecond
180 180 hg --cwd ib1 branch -f default
181 181 echo c > ib1/c
182 182 hg --cwd ib1 ci -Am newdefault
183 183 hg clone ib1 ib2
184 184 echo % fetch should succeed
185 185 hg --cwd ib2 fetch ../ib1
186 186 rm -fr ib1 ib2
187 187
188 echo % test issue1726
189 hg init i1726r1
190 echo a > i1726r1/a
191 hg --cwd i1726r1 ci -Am base
192 hg clone i1726r1 i1726r2
193 echo b > i1726r1/a
194 hg --cwd i1726r1 ci -m second
195 echo c > i1726r2/a
196 hg --cwd i1726r2 ci -m third
197 HGMERGE=true hg --cwd i1726r2 fetch ../i1726r1 | sed 's/new changeset 3:[0-9a-zA-Z]\+/new changeset 3/'
198 hg --cwd i1726r2 heads default --template '{rev}\n'
199
188 200 "$TESTDIR/killdaemons.py"
189 201
190 202 true
@@ -1,192 +1,209 b''
1 1 % test fetch with default branches only
2 2 adding a
3 3 updating working directory
4 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
5 5 updating working directory
6 6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
7 7 adding b
8 8 1:97d72e5f12c7
9 9 % should pull one change
10 10 pulling from ../a
11 11 searching for changes
12 12 adding changesets
13 13 adding manifests
14 14 adding file changes
15 15 added 1 changesets with 1 changes to 1 files
16 16 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
17 17 1:97d72e5f12c7
18 18 adding c
19 19 updating working directory
20 20 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
21 21 updating working directory
22 22 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
23 23 % should merge c into a
24 24 pulling from ../a
25 25 searching for changes
26 26 adding changesets
27 27 adding manifests
28 28 adding file changes
29 29 added 1 changesets with 1 changes to 1 files (+1 heads)
30 30 updating to 2:97d72e5f12c7
31 31 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
32 32 merging with 1:5e056962225c
33 33 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
34 34 new changeset 3:cd3a41621cf0 merges remote changes with local
35 35 a
36 36 b
37 37 c
38 38 % fetch over http, no auth
39 39 pulling from http://localhost:20059/
40 40 searching for changes
41 41 adding changesets
42 42 adding manifests
43 43 adding file changes
44 44 added 1 changesets with 1 changes to 1 files (+1 heads)
45 45 updating to 2:97d72e5f12c7
46 46 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
47 47 merging with 1:5e056962225c
48 48 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
49 49 new changeset 3:... merges remote changes with local
50 50 Automated merge with http://localhost:20059/
51 51 % fetch over http with auth (should be hidden in desc)
52 52 pulling from http://user:***@localhost:20059/
53 53 searching for changes
54 54 adding changesets
55 55 adding manifests
56 56 adding file changes
57 57 added 1 changesets with 1 changes to 1 files (+1 heads)
58 58 updating to 2:97d72e5f12c7
59 59 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
60 60 merging with 1:5e056962225c
61 61 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 62 new changeset 3:... merges remote changes with local
63 63 Automated merge with http://localhost:20059/
64 64 updating working directory
65 65 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
66 66 updating working directory
67 67 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
68 68 adding f
69 69 adding g
70 70 % should merge f into g
71 71 pulling from ../f
72 72 searching for changes
73 73 adding changesets
74 74 adding manifests
75 75 adding file changes
76 76 added 1 changesets with 1 changes to 1 files (+1 heads)
77 77 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
78 78 merging with 3:cc6a3744834d
79 79 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
80 80 new changeset 4:55aa4f32ec59 merges remote changes with local
81 81 % should abort, because i is modified
82 82 abort: working directory is missing some files
83 83 % test fetch with named branches
84 84 adding a
85 85 marked working directory as branch a
86 86 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
87 87 marked working directory as branch b
88 88 adding b
89 89 created new head
90 90
91 91 % pull in change on foreign branch
92 92 updating working directory
93 93 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
94 94 updating working directory
95 95 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
96 96 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
97 97 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
98 98 pulling from n1
99 99 searching for changes
100 100 adding changesets
101 101 adding manifests
102 102 adding file changes
103 103 added 1 changesets with 1 changes to 1 files
104 104 % parent should be 2 (no automatic update)
105 105 2
106 106
107 107 % pull in changes on both foreign and local branches
108 108 updating working directory
109 109 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
110 110 updating working directory
111 111 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
112 112 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
113 113 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
114 114 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
115 115 pulling from n1
116 116 searching for changes
117 117 adding changesets
118 118 adding manifests
119 119 adding file changes
120 120 added 2 changesets with 2 changes to 2 files
121 121 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
122 122 % parent should be 4 (fast forward)
123 123 4
124 124
125 125 % pull changes on foreign (2 new heads) and local (1 new head) branches
126 126 % with a local change
127 127 updating working directory
128 128 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
129 129 updating working directory
130 130 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
131 131 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
132 132 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
133 133 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
134 134 created new head
135 135 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
136 136 adding c
137 137 pulling from n1
138 138 searching for changes
139 139 adding changesets
140 140 adding manifests
141 141 adding file changes
142 142 added 3 changesets with 3 changes to 2 files (+2 heads)
143 143 updating to 5:708c6cce3d26
144 144 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
145 145 merging with 3:d83427717b1f
146 146 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
147 147 new changeset 7:48f1a33f52af merges remote changes with local
148 148 % parent should be 7 (new merge changeset)
149 149 7
150 150 % pull in changes on foreign (merge of local branch) and local (2 new
151 151 % heads) with a local change
152 152 updating working directory
153 153 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
154 154 updating working directory
155 155 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
156 156 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
157 157 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
158 158 (branch merge, don't forget to commit)
159 159 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
160 160 created new head
161 161 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
162 162 created new head
163 163 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
164 164 pulling from n1
165 165 searching for changes
166 166 adding changesets
167 167 adding manifests
168 168 adding file changes
169 169 added 3 changesets with 2 changes to 1 files (+2 heads)
170 170 not merging with 1 other new branch heads (use "hg heads ." and "hg merge" to merge them)
171 171 % parent should be 3 (fetch did not merge anything)
172 172 3
173 173 % pull in change on different branch than dirstate
174 174 adding a
175 175 updating working directory
176 176 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
177 177 marked working directory as branch topic
178 178 abort: working dir not at branch tip (use "hg update" to check out branch tip)
179 179 % parent should be 0 (fetch did not update or merge anything)
180 180 0
181 181 % test fetch with inactive branches
182 182 adding a
183 183 marked working directory as branch second
184 184 adding b
185 185 marked working directory as branch default
186 186 adding c
187 187 updating working directory
188 188 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
189 189 % fetch should succeed
190 190 pulling from ../ib1
191 191 searching for changes
192 192 no changes found
193 % test issue1726
194 adding a
195 updating working directory
196 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
197 pulling from ../i1726r1
198 searching for changes
199 adding changesets
200 adding manifests
201 adding file changes
202 added 1 changesets with 1 changes to 1 files (+1 heads)
203 updating to 2:7837755a2789
204 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
205 merging with 1:d1f0c6c48ebd
206 merging a
207 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
208 new changeset 3 merges remote changes with local
209 3
General Comments 0
You need to be logged in to leave comments. Login now