##// END OF EJS Templates
cleanup: migrate from re.escape to stringutil.reescape...
Augie Fackler -
r38494:67dc32d4 @56 default
parent child Browse files
Show More
@@ -1,815 +1,815
1 1 # keyword.py - $Keyword$ expansion for Mercurial
2 2 #
3 3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7 #
8 8 # $Id$
9 9 #
10 10 # Keyword expansion hack against the grain of a Distributed SCM
11 11 #
12 12 # There are many good reasons why this is not needed in a distributed
13 13 # SCM, still it may be useful in very small projects based on single
14 14 # files (like LaTeX packages), that are mostly addressed to an
15 15 # audience not running a version control system.
16 16 #
17 17 # For in-depth discussion refer to
18 18 # <https://mercurial-scm.org/wiki/KeywordPlan>.
19 19 #
20 20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 21 #
22 22 # Binary files are not touched.
23 23 #
24 24 # Files to act upon/ignore are specified in the [keyword] section.
25 25 # Customized keyword template mappings in the [keywordmaps] section.
26 26 #
27 27 # Run 'hg help keyword' and 'hg kwdemo' to get info on configuration.
28 28
29 29 '''expand keywords in tracked files
30 30
31 31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 32 tracked text files selected by your configuration.
33 33
34 34 Keywords are only expanded in local repositories and not stored in the
35 35 change history. The mechanism can be regarded as a convenience for the
36 36 current user or for archive distribution.
37 37
38 38 Keywords expand to the changeset data pertaining to the latest change
39 39 relative to the working directory parent of each file.
40 40
41 41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 42 sections of hgrc files.
43 43
44 44 Example::
45 45
46 46 [keyword]
47 47 # expand keywords in every python file except those matching "x*"
48 48 **.py =
49 49 x* = ignore
50 50
51 51 [keywordset]
52 52 # prefer svn- over cvs-like default keywordmaps
53 53 svn = True
54 54
55 55 .. note::
56 56
57 57 The more specific you are in your filename patterns the less you
58 58 lose speed in huge repositories.
59 59
60 60 For [keywordmaps] template mapping and expansion demonstration and
61 61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
62 62 available templates and filters.
63 63
64 64 Three additional date template filters are provided:
65 65
66 66 :``utcdate``: "2006/09/18 15:13:13"
67 67 :``svnutcdate``: "2006-09-18 15:13:13Z"
68 68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
69 69
70 70 The default template mappings (view with :hg:`kwdemo -d`) can be
71 71 replaced with customized keywords and templates. Again, run
72 72 :hg:`kwdemo` to control the results of your configuration changes.
73 73
74 74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
75 75 to avoid storing expanded keywords in the change history.
76 76
77 77 To force expansion after enabling it, or a configuration change, run
78 78 :hg:`kwexpand`.
79 79
80 80 Expansions spanning more than one line and incremental expansions,
81 81 like CVS' $Log$, are not supported. A keyword template map "Log =
82 82 {desc}" expands to the first line of the changeset description.
83 83 '''
84 84
85 85
86 86 from __future__ import absolute_import
87 87
88 88 import os
89 89 import re
90 90 import weakref
91 91
92 92 from mercurial.i18n import _
93 93 from mercurial.hgweb import webcommands
94 94
95 95 from mercurial import (
96 96 cmdutil,
97 97 context,
98 98 dispatch,
99 99 error,
100 100 extensions,
101 101 filelog,
102 102 localrepo,
103 103 logcmdutil,
104 104 match,
105 105 patch,
106 106 pathutil,
107 107 pycompat,
108 108 registrar,
109 109 scmutil,
110 110 templatefilters,
111 111 templateutil,
112 112 util,
113 113 )
114 114 from mercurial.utils import (
115 115 dateutil,
116 116 stringutil,
117 117 )
118 118
119 119 cmdtable = {}
120 120 command = registrar.command(cmdtable)
121 121 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
122 122 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
123 123 # be specifying the version(s) of Mercurial they are tested with, or
124 124 # leave the attribute unspecified.
125 125 testedwith = 'ships-with-hg-core'
126 126
127 127 # hg commands that do not act on keywords
128 128 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
129 129 ' outgoing push tip verify convert email glog')
130 130
131 131 # webcommands that do not act on keywords
132 132 nokwwebcommands = ('annotate changeset rev filediff diff comparison')
133 133
134 134 # hg commands that trigger expansion only when writing to working dir,
135 135 # not when reading filelog, and unexpand when reading from working dir
136 136 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
137 137 ' unshelve rebase graft backout histedit fetch')
138 138
139 139 # names of extensions using dorecord
140 140 recordextensions = 'record'
141 141
142 142 colortable = {
143 143 'kwfiles.enabled': 'green bold',
144 144 'kwfiles.deleted': 'cyan bold underline',
145 145 'kwfiles.enabledunknown': 'green',
146 146 'kwfiles.ignored': 'bold',
147 147 'kwfiles.ignoredunknown': 'none'
148 148 }
149 149
150 150 templatefilter = registrar.templatefilter()
151 151
152 152 configtable = {}
153 153 configitem = registrar.configitem(configtable)
154 154
155 155 configitem('keywordset', 'svn',
156 156 default=False,
157 157 )
158 158 # date like in cvs' $Date
159 159 @templatefilter('utcdate', intype=templateutil.date)
160 160 def utcdate(date):
161 161 '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
162 162 '''
163 163 dateformat = '%Y/%m/%d %H:%M:%S'
164 164 return dateutil.datestr((date[0], 0), dateformat)
165 165 # date like in svn's $Date
166 166 @templatefilter('svnisodate', intype=templateutil.date)
167 167 def svnisodate(date):
168 168 '''Date. Returns a date in this format: "2009-08-18 13:00:13
169 169 +0200 (Tue, 18 Aug 2009)".
170 170 '''
171 171 return dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
172 172 # date like in svn's $Id
173 173 @templatefilter('svnutcdate', intype=templateutil.date)
174 174 def svnutcdate(date):
175 175 '''Date. Returns a UTC-date in this format: "2009-08-18
176 176 11:00:13Z".
177 177 '''
178 178 dateformat = '%Y-%m-%d %H:%M:%SZ'
179 179 return dateutil.datestr((date[0], 0), dateformat)
180 180
181 181 # make keyword tools accessible
182 182 kwtools = {'hgcmd': ''}
183 183
184 184 def _defaultkwmaps(ui):
185 185 '''Returns default keywordmaps according to keywordset configuration.'''
186 186 templates = {
187 187 'Revision': '{node|short}',
188 188 'Author': '{author|user}',
189 189 }
190 190 kwsets = ({
191 191 'Date': '{date|utcdate}',
192 192 'RCSfile': '{file|basename},v',
193 193 'RCSFile': '{file|basename},v', # kept for backwards compatibility
194 194 # with hg-keyword
195 195 'Source': '{root}/{file},v',
196 196 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
197 197 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
198 198 }, {
199 199 'Date': '{date|svnisodate}',
200 200 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
201 201 'LastChangedRevision': '{node|short}',
202 202 'LastChangedBy': '{author|user}',
203 203 'LastChangedDate': '{date|svnisodate}',
204 204 })
205 205 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
206 206 return templates
207 207
208 208 def _shrinktext(text, subfunc):
209 209 '''Helper for keyword expansion removal in text.
210 210 Depending on subfunc also returns number of substitutions.'''
211 211 return subfunc(r'$\1$', text)
212 212
213 213 def _preselect(wstatus, changed):
214 214 '''Retrieves modified and added files from a working directory state
215 215 and returns the subset of each contained in given changed files
216 216 retrieved from a change context.'''
217 217 modified = [f for f in wstatus.modified if f in changed]
218 218 added = [f for f in wstatus.added if f in changed]
219 219 return modified, added
220 220
221 221
222 222 class kwtemplater(object):
223 223 '''
224 224 Sets up keyword templates, corresponding keyword regex, and
225 225 provides keyword substitution functions.
226 226 '''
227 227
228 228 def __init__(self, ui, repo, inc, exc):
229 229 self.ui = ui
230 230 self._repo = weakref.ref(repo)
231 231 self.match = match.match(repo.root, '', [], inc, exc)
232 232 self.restrict = kwtools['hgcmd'] in restricted.split()
233 233 self.postcommit = False
234 234
235 235 kwmaps = self.ui.configitems('keywordmaps')
236 236 if kwmaps: # override default templates
237 237 self.templates = dict(kwmaps)
238 238 else:
239 239 self.templates = _defaultkwmaps(self.ui)
240 240
241 241 @property
242 242 def repo(self):
243 243 return self._repo()
244 244
245 245 @util.propertycache
246 246 def escape(self):
247 247 '''Returns bar-separated and escaped keywords.'''
248 return '|'.join(map(re.escape, self.templates.keys()))
248 return '|'.join(map(stringutil.reescape, self.templates.keys()))
249 249
250 250 @util.propertycache
251 251 def rekw(self):
252 252 '''Returns regex for unexpanded keywords.'''
253 253 return re.compile(r'\$(%s)\$' % self.escape)
254 254
255 255 @util.propertycache
256 256 def rekwexp(self):
257 257 '''Returns regex for expanded keywords.'''
258 258 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
259 259
260 260 def substitute(self, data, path, ctx, subfunc):
261 261 '''Replaces keywords in data with expanded template.'''
262 262 def kwsub(mobj):
263 263 kw = mobj.group(1)
264 264 ct = logcmdutil.maketemplater(self.ui, self.repo,
265 265 self.templates[kw])
266 266 self.ui.pushbuffer()
267 267 ct.show(ctx, root=self.repo.root, file=path)
268 268 ekw = templatefilters.firstline(self.ui.popbuffer())
269 269 return '$%s: %s $' % (kw, ekw)
270 270 return subfunc(kwsub, data)
271 271
272 272 def linkctx(self, path, fileid):
273 273 '''Similar to filelog.linkrev, but returns a changectx.'''
274 274 return self.repo.filectx(path, fileid=fileid).changectx()
275 275
276 276 def expand(self, path, node, data):
277 277 '''Returns data with keywords expanded.'''
278 278 if (not self.restrict and self.match(path)
279 279 and not stringutil.binary(data)):
280 280 ctx = self.linkctx(path, node)
281 281 return self.substitute(data, path, ctx, self.rekw.sub)
282 282 return data
283 283
284 284 def iskwfile(self, cand, ctx):
285 285 '''Returns subset of candidates which are configured for keyword
286 286 expansion but are not symbolic links.'''
287 287 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
288 288
289 289 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
290 290 '''Overwrites selected files expanding/shrinking keywords.'''
291 291 if self.restrict or lookup or self.postcommit: # exclude kw_copy
292 292 candidates = self.iskwfile(candidates, ctx)
293 293 if not candidates:
294 294 return
295 295 kwcmd = self.restrict and lookup # kwexpand/kwshrink
296 296 if self.restrict or expand and lookup:
297 297 mf = ctx.manifest()
298 298 if self.restrict or rekw:
299 299 re_kw = self.rekw
300 300 else:
301 301 re_kw = self.rekwexp
302 302 if expand:
303 303 msg = _('overwriting %s expanding keywords\n')
304 304 else:
305 305 msg = _('overwriting %s shrinking keywords\n')
306 306 for f in candidates:
307 307 if self.restrict:
308 308 data = self.repo.file(f).read(mf[f])
309 309 else:
310 310 data = self.repo.wread(f)
311 311 if stringutil.binary(data):
312 312 continue
313 313 if expand:
314 314 parents = ctx.parents()
315 315 if lookup:
316 316 ctx = self.linkctx(f, mf[f])
317 317 elif self.restrict and len(parents) > 1:
318 318 # merge commit
319 319 # in case of conflict f is in modified state during
320 320 # merge, even if f does not differ from f in parent
321 321 for p in parents:
322 322 if f in p and not p[f].cmp(ctx[f]):
323 323 ctx = p[f].changectx()
324 324 break
325 325 data, found = self.substitute(data, f, ctx, re_kw.subn)
326 326 elif self.restrict:
327 327 found = re_kw.search(data)
328 328 else:
329 329 data, found = _shrinktext(data, re_kw.subn)
330 330 if found:
331 331 self.ui.note(msg % f)
332 332 fp = self.repo.wvfs(f, "wb", atomictemp=True)
333 333 fp.write(data)
334 334 fp.close()
335 335 if kwcmd:
336 336 self.repo.dirstate.normal(f)
337 337 elif self.postcommit:
338 338 self.repo.dirstate.normallookup(f)
339 339
340 340 def shrink(self, fname, text):
341 341 '''Returns text with all keyword substitutions removed.'''
342 342 if self.match(fname) and not stringutil.binary(text):
343 343 return _shrinktext(text, self.rekwexp.sub)
344 344 return text
345 345
346 346 def shrinklines(self, fname, lines):
347 347 '''Returns lines with keyword substitutions removed.'''
348 348 if self.match(fname):
349 349 text = ''.join(lines)
350 350 if not stringutil.binary(text):
351 351 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
352 352 return lines
353 353
354 354 def wread(self, fname, data):
355 355 '''If in restricted mode returns data read from wdir with
356 356 keyword substitutions removed.'''
357 357 if self.restrict:
358 358 return self.shrink(fname, data)
359 359 return data
360 360
361 361 class kwfilelog(filelog.filelog):
362 362 '''
363 363 Subclass of filelog to hook into its read, add, cmp methods.
364 364 Keywords are "stored" unexpanded, and processed on reading.
365 365 '''
366 366 def __init__(self, opener, kwt, path):
367 367 super(kwfilelog, self).__init__(opener, path)
368 368 self.kwt = kwt
369 369 self.path = path
370 370
371 371 def read(self, node):
372 372 '''Expands keywords when reading filelog.'''
373 373 data = super(kwfilelog, self).read(node)
374 374 if self.renamed(node):
375 375 return data
376 376 return self.kwt.expand(self.path, node, data)
377 377
378 378 def add(self, text, meta, tr, link, p1=None, p2=None):
379 379 '''Removes keyword substitutions when adding to filelog.'''
380 380 text = self.kwt.shrink(self.path, text)
381 381 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
382 382
383 383 def cmp(self, node, text):
384 384 '''Removes keyword substitutions for comparison.'''
385 385 text = self.kwt.shrink(self.path, text)
386 386 return super(kwfilelog, self).cmp(node, text)
387 387
388 388 def _status(ui, repo, wctx, kwt, *pats, **opts):
389 389 '''Bails out if [keyword] configuration is not active.
390 390 Returns status of working directory.'''
391 391 if kwt:
392 392 opts = pycompat.byteskwargs(opts)
393 393 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
394 394 unknown=opts.get('unknown') or opts.get('all'))
395 395 if ui.configitems('keyword'):
396 396 raise error.Abort(_('[keyword] patterns cannot match'))
397 397 raise error.Abort(_('no [keyword] patterns configured'))
398 398
399 399 def _kwfwrite(ui, repo, expand, *pats, **opts):
400 400 '''Selects files and passes them to kwtemplater.overwrite.'''
401 401 wctx = repo[None]
402 402 if len(wctx.parents()) > 1:
403 403 raise error.Abort(_('outstanding uncommitted merge'))
404 404 kwt = getattr(repo, '_keywordkwt', None)
405 405 with repo.wlock():
406 406 status = _status(ui, repo, wctx, kwt, *pats, **opts)
407 407 if status.modified or status.added or status.removed or status.deleted:
408 408 raise error.Abort(_('outstanding uncommitted changes'))
409 409 kwt.overwrite(wctx, status.clean, True, expand)
410 410
411 411 @command('kwdemo',
412 412 [('d', 'default', None, _('show default keyword template maps')),
413 413 ('f', 'rcfile', '',
414 414 _('read maps from rcfile'), _('FILE'))],
415 415 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
416 416 optionalrepo=True)
417 417 def demo(ui, repo, *args, **opts):
418 418 '''print [keywordmaps] configuration and an expansion example
419 419
420 420 Show current, custom, or default keyword template maps and their
421 421 expansions.
422 422
423 423 Extend the current configuration by specifying maps as arguments
424 424 and using -f/--rcfile to source an external hgrc file.
425 425
426 426 Use -d/--default to disable current configuration.
427 427
428 428 See :hg:`help templates` for information on templates and filters.
429 429 '''
430 430 def demoitems(section, items):
431 431 ui.write('[%s]\n' % section)
432 432 for k, v in sorted(items):
433 433 ui.write('%s = %s\n' % (k, v))
434 434
435 435 fn = 'demo.txt'
436 436 tmpdir = pycompat.mkdtemp('', 'kwdemo.')
437 437 ui.note(_('creating temporary repository at %s\n') % tmpdir)
438 438 if repo is None:
439 439 baseui = ui
440 440 else:
441 441 baseui = repo.baseui
442 442 repo = localrepo.localrepository(baseui, tmpdir, True)
443 443 ui.setconfig('keyword', fn, '', 'keyword')
444 444 svn = ui.configbool('keywordset', 'svn')
445 445 # explicitly set keywordset for demo output
446 446 ui.setconfig('keywordset', 'svn', svn, 'keyword')
447 447
448 448 uikwmaps = ui.configitems('keywordmaps')
449 449 if args or opts.get(r'rcfile'):
450 450 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
451 451 if uikwmaps:
452 452 ui.status(_('\textending current template maps\n'))
453 453 if opts.get(r'default') or not uikwmaps:
454 454 if svn:
455 455 ui.status(_('\toverriding default svn keywordset\n'))
456 456 else:
457 457 ui.status(_('\toverriding default cvs keywordset\n'))
458 458 if opts.get(r'rcfile'):
459 459 ui.readconfig(opts.get('rcfile'))
460 460 if args:
461 461 # simulate hgrc parsing
462 462 rcmaps = '[keywordmaps]\n%s\n' % '\n'.join(args)
463 463 repo.vfs.write('hgrc', rcmaps)
464 464 ui.readconfig(repo.vfs.join('hgrc'))
465 465 kwmaps = dict(ui.configitems('keywordmaps'))
466 466 elif opts.get(r'default'):
467 467 if svn:
468 468 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
469 469 else:
470 470 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
471 471 kwmaps = _defaultkwmaps(ui)
472 472 if uikwmaps:
473 473 ui.status(_('\tdisabling current template maps\n'))
474 474 for k, v in kwmaps.iteritems():
475 475 ui.setconfig('keywordmaps', k, v, 'keyword')
476 476 else:
477 477 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
478 478 if uikwmaps:
479 479 kwmaps = dict(uikwmaps)
480 480 else:
481 481 kwmaps = _defaultkwmaps(ui)
482 482
483 483 uisetup(ui)
484 484 reposetup(ui, repo)
485 485 ui.write(('[extensions]\nkeyword =\n'))
486 486 demoitems('keyword', ui.configitems('keyword'))
487 487 demoitems('keywordset', ui.configitems('keywordset'))
488 488 demoitems('keywordmaps', kwmaps.iteritems())
489 489 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
490 490 repo.wvfs.write(fn, keywords)
491 491 repo[None].add([fn])
492 492 ui.note(_('\nkeywords written to %s:\n') % fn)
493 493 ui.note(keywords)
494 494 with repo.wlock():
495 495 repo.dirstate.setbranch('demobranch')
496 496 for name, cmd in ui.configitems('hooks'):
497 497 if name.split('.', 1)[0].find('commit') > -1:
498 498 repo.ui.setconfig('hooks', name, '', 'keyword')
499 499 msg = _('hg keyword configuration and expansion example')
500 500 ui.note(("hg ci -m '%s'\n" % msg))
501 501 repo.commit(text=msg)
502 502 ui.status(_('\n\tkeywords expanded\n'))
503 503 ui.write(repo.wread(fn))
504 504 repo.wvfs.rmtree(repo.root)
505 505
506 506 @command('kwexpand',
507 507 cmdutil.walkopts,
508 508 _('hg kwexpand [OPTION]... [FILE]...'),
509 509 inferrepo=True)
510 510 def expand(ui, repo, *pats, **opts):
511 511 '''expand keywords in the working directory
512 512
513 513 Run after (re)enabling keyword expansion.
514 514
515 515 kwexpand refuses to run if given files contain local changes.
516 516 '''
517 517 # 3rd argument sets expansion to True
518 518 _kwfwrite(ui, repo, True, *pats, **opts)
519 519
520 520 @command('kwfiles',
521 521 [('A', 'all', None, _('show keyword status flags of all files')),
522 522 ('i', 'ignore', None, _('show files excluded from expansion')),
523 523 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
524 524 ] + cmdutil.walkopts,
525 525 _('hg kwfiles [OPTION]... [FILE]...'),
526 526 inferrepo=True)
527 527 def files(ui, repo, *pats, **opts):
528 528 '''show files configured for keyword expansion
529 529
530 530 List which files in the working directory are matched by the
531 531 [keyword] configuration patterns.
532 532
533 533 Useful to prevent inadvertent keyword expansion and to speed up
534 534 execution by including only files that are actual candidates for
535 535 expansion.
536 536
537 537 See :hg:`help keyword` on how to construct patterns both for
538 538 inclusion and exclusion of files.
539 539
540 540 With -A/--all and -v/--verbose the codes used to show the status
541 541 of files are::
542 542
543 543 K = keyword expansion candidate
544 544 k = keyword expansion candidate (not tracked)
545 545 I = ignored
546 546 i = ignored (not tracked)
547 547 '''
548 548 kwt = getattr(repo, '_keywordkwt', None)
549 549 wctx = repo[None]
550 550 status = _status(ui, repo, wctx, kwt, *pats, **opts)
551 551 if pats:
552 552 cwd = repo.getcwd()
553 553 else:
554 554 cwd = ''
555 555 files = []
556 556 opts = pycompat.byteskwargs(opts)
557 557 if not opts.get('unknown') or opts.get('all'):
558 558 files = sorted(status.modified + status.added + status.clean)
559 559 kwfiles = kwt.iskwfile(files, wctx)
560 560 kwdeleted = kwt.iskwfile(status.deleted, wctx)
561 561 kwunknown = kwt.iskwfile(status.unknown, wctx)
562 562 if not opts.get('ignore') or opts.get('all'):
563 563 showfiles = kwfiles, kwdeleted, kwunknown
564 564 else:
565 565 showfiles = [], [], []
566 566 if opts.get('all') or opts.get('ignore'):
567 567 showfiles += ([f for f in files if f not in kwfiles],
568 568 [f for f in status.unknown if f not in kwunknown])
569 569 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
570 570 kwstates = zip(kwlabels, 'K!kIi', showfiles)
571 571 fm = ui.formatter('kwfiles', opts)
572 572 fmt = '%.0s%s\n'
573 573 if opts.get('all') or ui.verbose:
574 574 fmt = '%s %s\n'
575 575 for kwstate, char, filenames in kwstates:
576 576 label = 'kwfiles.' + kwstate
577 577 for f in filenames:
578 578 fm.startitem()
579 579 fm.write('kwstatus path', fmt, char,
580 580 repo.pathto(f, cwd), label=label)
581 581 fm.end()
582 582
583 583 @command('kwshrink',
584 584 cmdutil.walkopts,
585 585 _('hg kwshrink [OPTION]... [FILE]...'),
586 586 inferrepo=True)
587 587 def shrink(ui, repo, *pats, **opts):
588 588 '''revert expanded keywords in the working directory
589 589
590 590 Must be run before changing/disabling active keywords.
591 591
592 592 kwshrink refuses to run if given files contain local changes.
593 593 '''
594 594 # 3rd argument sets expansion to False
595 595 _kwfwrite(ui, repo, False, *pats, **opts)
596 596
597 597 # monkeypatches
598 598
599 599 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
600 600 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
601 601 rejects or conflicts due to expanded keywords in working dir.'''
602 602 orig(self, ui, gp, backend, store, eolmode)
603 603 kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
604 604 if kwt:
605 605 # shrink keywords read from working dir
606 606 self.lines = kwt.shrinklines(self.fname, self.lines)
607 607
608 608 def kwdiff(orig, repo, *args, **kwargs):
609 609 '''Monkeypatch patch.diff to avoid expansion.'''
610 610 kwt = getattr(repo, '_keywordkwt', None)
611 611 if kwt:
612 612 restrict = kwt.restrict
613 613 kwt.restrict = True
614 614 try:
615 615 for chunk in orig(repo, *args, **kwargs):
616 616 yield chunk
617 617 finally:
618 618 if kwt:
619 619 kwt.restrict = restrict
620 620
621 621 def kwweb_skip(orig, web):
622 622 '''Wraps webcommands.x turning off keyword expansion.'''
623 623 kwt = getattr(web.repo, '_keywordkwt', None)
624 624 if kwt:
625 625 origmatch = kwt.match
626 626 kwt.match = util.never
627 627 try:
628 628 for chunk in orig(web):
629 629 yield chunk
630 630 finally:
631 631 if kwt:
632 632 kwt.match = origmatch
633 633
634 634 def kw_amend(orig, ui, repo, old, extra, pats, opts):
635 635 '''Wraps cmdutil.amend expanding keywords after amend.'''
636 636 kwt = getattr(repo, '_keywordkwt', None)
637 637 if kwt is None:
638 638 return orig(ui, repo, old, extra, pats, opts)
639 639 with repo.wlock():
640 640 kwt.postcommit = True
641 641 newid = orig(ui, repo, old, extra, pats, opts)
642 642 if newid != old.node():
643 643 ctx = repo[newid]
644 644 kwt.restrict = True
645 645 kwt.overwrite(ctx, ctx.files(), False, True)
646 646 kwt.restrict = False
647 647 return newid
648 648
649 649 def kw_copy(orig, ui, repo, pats, opts, rename=False):
650 650 '''Wraps cmdutil.copy so that copy/rename destinations do not
651 651 contain expanded keywords.
652 652 Note that the source of a regular file destination may also be a
653 653 symlink:
654 654 hg cp sym x -> x is symlink
655 655 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
656 656 For the latter we have to follow the symlink to find out whether its
657 657 target is configured for expansion and we therefore must unexpand the
658 658 keywords in the destination.'''
659 659 kwt = getattr(repo, '_keywordkwt', None)
660 660 if kwt is None:
661 661 return orig(ui, repo, pats, opts, rename)
662 662 with repo.wlock():
663 663 orig(ui, repo, pats, opts, rename)
664 664 if opts.get('dry_run'):
665 665 return
666 666 wctx = repo[None]
667 667 cwd = repo.getcwd()
668 668
669 669 def haskwsource(dest):
670 670 '''Returns true if dest is a regular file and configured for
671 671 expansion or a symlink which points to a file configured for
672 672 expansion. '''
673 673 source = repo.dirstate.copied(dest)
674 674 if 'l' in wctx.flags(source):
675 675 source = pathutil.canonpath(repo.root, cwd,
676 676 os.path.realpath(source))
677 677 return kwt.match(source)
678 678
679 679 candidates = [f for f in repo.dirstate.copies() if
680 680 'l' not in wctx.flags(f) and haskwsource(f)]
681 681 kwt.overwrite(wctx, candidates, False, False)
682 682
683 683 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
684 684 '''Wraps record.dorecord expanding keywords after recording.'''
685 685 kwt = getattr(repo, '_keywordkwt', None)
686 686 if kwt is None:
687 687 return orig(ui, repo, commitfunc, *pats, **opts)
688 688 with repo.wlock():
689 689 # record returns 0 even when nothing has changed
690 690 # therefore compare nodes before and after
691 691 kwt.postcommit = True
692 692 ctx = repo['.']
693 693 wstatus = ctx.status()
694 694 ret = orig(ui, repo, commitfunc, *pats, **opts)
695 695 recctx = repo['.']
696 696 if ctx != recctx:
697 697 modified, added = _preselect(wstatus, recctx.files())
698 698 kwt.restrict = False
699 699 kwt.overwrite(recctx, modified, False, True)
700 700 kwt.overwrite(recctx, added, False, True, True)
701 701 kwt.restrict = True
702 702 return ret
703 703
704 704 def kwfilectx_cmp(orig, self, fctx):
705 705 if fctx._customcmp:
706 706 return fctx.cmp(self)
707 707 kwt = getattr(self._repo, '_keywordkwt', None)
708 708 if kwt is None:
709 709 return orig(self, fctx)
710 710 # keyword affects data size, comparing wdir and filelog size does
711 711 # not make sense
712 712 if (fctx._filenode is None and
713 713 (self._repo._encodefilterpats or
714 714 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
715 715 self.size() - 4 == fctx.size()) or
716 716 self.size() == fctx.size()):
717 717 return self._filelog.cmp(self._filenode, fctx.data())
718 718 return True
719 719
720 720 def uisetup(ui):
721 721 ''' Monkeypatches dispatch._parse to retrieve user command.
722 722 Overrides file method to return kwfilelog instead of filelog
723 723 if file matches user configuration.
724 724 Wraps commit to overwrite configured files with updated
725 725 keyword substitutions.
726 726 Monkeypatches patch and webcommands.'''
727 727
728 728 def kwdispatch_parse(orig, ui, args):
729 729 '''Monkeypatch dispatch._parse to obtain running hg command.'''
730 730 cmd, func, args, options, cmdoptions = orig(ui, args)
731 731 kwtools['hgcmd'] = cmd
732 732 return cmd, func, args, options, cmdoptions
733 733
734 734 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
735 735
736 736 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
737 737 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
738 738 extensions.wrapfunction(patch, 'diff', kwdiff)
739 739 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
740 740 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
741 741 extensions.wrapfunction(cmdutil, 'dorecord', kw_dorecord)
742 742 for c in nokwwebcommands.split():
743 743 extensions.wrapfunction(webcommands, c, kwweb_skip)
744 744
745 745 def reposetup(ui, repo):
746 746 '''Sets up repo as kwrepo for keyword substitution.'''
747 747
748 748 try:
749 749 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
750 750 or '.hg' in util.splitpath(repo.root)
751 751 or repo._url.startswith('bundle:')):
752 752 return
753 753 except AttributeError:
754 754 pass
755 755
756 756 inc, exc = [], ['.hg*']
757 757 for pat, opt in ui.configitems('keyword'):
758 758 if opt != 'ignore':
759 759 inc.append(pat)
760 760 else:
761 761 exc.append(pat)
762 762 if not inc:
763 763 return
764 764
765 765 kwt = kwtemplater(ui, repo, inc, exc)
766 766
767 767 class kwrepo(repo.__class__):
768 768 def file(self, f):
769 769 if f[0] == '/':
770 770 f = f[1:]
771 771 return kwfilelog(self.svfs, kwt, f)
772 772
773 773 def wread(self, filename):
774 774 data = super(kwrepo, self).wread(filename)
775 775 return kwt.wread(filename, data)
776 776
777 777 def commit(self, *args, **opts):
778 778 # use custom commitctx for user commands
779 779 # other extensions can still wrap repo.commitctx directly
780 780 self.commitctx = self.kwcommitctx
781 781 try:
782 782 return super(kwrepo, self).commit(*args, **opts)
783 783 finally:
784 784 del self.commitctx
785 785
786 786 def kwcommitctx(self, ctx, error=False):
787 787 n = super(kwrepo, self).commitctx(ctx, error)
788 788 # no lock needed, only called from repo.commit() which already locks
789 789 if not kwt.postcommit:
790 790 restrict = kwt.restrict
791 791 kwt.restrict = True
792 792 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
793 793 False, True)
794 794 kwt.restrict = restrict
795 795 return n
796 796
797 797 def rollback(self, dryrun=False, force=False):
798 798 with self.wlock():
799 799 origrestrict = kwt.restrict
800 800 try:
801 801 if not dryrun:
802 802 changed = self['.'].files()
803 803 ret = super(kwrepo, self).rollback(dryrun, force)
804 804 if not dryrun:
805 805 ctx = self['.']
806 806 modified, added = _preselect(ctx.status(), changed)
807 807 kwt.restrict = False
808 808 kwt.overwrite(ctx, modified, True, True)
809 809 kwt.overwrite(ctx, added, True, False)
810 810 return ret
811 811 finally:
812 812 kwt.restrict = origrestrict
813 813
814 814 repo.__class__ = kwrepo
815 815 repo._keywordkwt = kwt
@@ -1,83 +1,83
1 1 # pointer.py - Git-LFS pointer serialization
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import re
11 11
12 12 from mercurial.i18n import _
13 13
14 14 from mercurial import (
15 15 error,
16 16 pycompat,
17 17 )
18 18 from mercurial.utils import (
19 19 stringutil,
20 20 )
21 21
22 22 class InvalidPointer(error.RevlogError):
23 23 pass
24 24
25 25 class gitlfspointer(dict):
26 26 VERSION = 'https://git-lfs.github.com/spec/v1'
27 27
28 28 def __init__(self, *args, **kwargs):
29 29 self['version'] = self.VERSION
30 30 super(gitlfspointer, self).__init__(*args)
31 31 self.update(pycompat.byteskwargs(kwargs))
32 32
33 33 @classmethod
34 34 def deserialize(cls, text):
35 35 try:
36 36 return cls(l.split(' ', 1) for l in text.splitlines()).validate()
37 37 except ValueError: # l.split returns 1 item instead of 2
38 38 raise InvalidPointer(_('cannot parse git-lfs text: %s')
39 39 % stringutil.pprint(text))
40 40
41 41 def serialize(self):
42 42 sortkeyfunc = lambda x: (x[0] != 'version', x)
43 43 items = sorted(self.validate().iteritems(), key=sortkeyfunc)
44 44 return ''.join('%s %s\n' % (k, v) for k, v in items)
45 45
46 46 def oid(self):
47 47 return self['oid'].split(':')[-1]
48 48
49 49 def size(self):
50 50 return int(self['size'])
51 51
52 52 # regular expressions used by _validate
53 53 # see https://github.com/git-lfs/git-lfs/blob/master/docs/spec.md
54 54 _keyre = re.compile(br'\A[a-z0-9.-]+\Z')
55 55 _valuere = re.compile(br'\A[^\n]*\Z')
56 56 _requiredre = {
57 57 'size': re.compile(br'\A[0-9]+\Z'),
58 58 'oid': re.compile(br'\Asha256:[0-9a-f]{64}\Z'),
59 'version': re.compile(br'\A%s\Z' % re.escape(VERSION)),
59 'version': re.compile(br'\A%s\Z' % stringutil.reescape(VERSION)),
60 60 }
61 61
62 62 def validate(self):
63 63 """raise InvalidPointer on error. return self if there is no error"""
64 64 requiredcount = 0
65 65 for k, v in self.iteritems():
66 66 if k in self._requiredre:
67 67 if not self._requiredre[k].match(v):
68 68 raise InvalidPointer(
69 69 _('unexpected lfs pointer value: %s=%s')
70 70 % (k, stringutil.pprint(v)))
71 71 requiredcount += 1
72 72 elif not self._keyre.match(k):
73 73 raise InvalidPointer(_('unexpected lfs pointer key: %s') % k)
74 74 if not self._valuere.match(v):
75 75 raise InvalidPointer(_('unexpected lfs pointer value: %s=%s')
76 76 % (k, stringutil.pprint(v)))
77 77 if len(self._requiredre) != requiredcount:
78 78 miss = sorted(set(self._requiredre.keys()).difference(self.keys()))
79 79 raise InvalidPointer(_('missing lfs pointer keys: %s')
80 80 % ', '.join(miss))
81 81 return self
82 82
83 83 deserialize = gitlfspointer.deserialize
@@ -1,804 +1,804
1 1 # hgweb/webutil.py - utility library for the web interface.
2 2 #
3 3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from __future__ import absolute_import
10 10
11 11 import copy
12 12 import difflib
13 13 import os
14 14 import re
15 15
16 16 from ..i18n import _
17 17 from ..node import hex, nullid, short
18 18
19 19 from .common import (
20 20 ErrorResponse,
21 21 HTTP_BAD_REQUEST,
22 22 HTTP_NOT_FOUND,
23 23 paritygen,
24 24 )
25 25
26 26 from .. import (
27 27 context,
28 28 error,
29 29 match,
30 30 mdiff,
31 31 obsutil,
32 32 patch,
33 33 pathutil,
34 34 pycompat,
35 35 scmutil,
36 36 templatefilters,
37 37 templatekw,
38 38 templateutil,
39 39 ui as uimod,
40 40 util,
41 41 )
42 42
43 43 from ..utils import (
44 44 stringutil,
45 45 )
46 46
47 47 archivespecs = util.sortdict((
48 48 ('zip', ('application/zip', 'zip', '.zip', None)),
49 49 ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)),
50 50 ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)),
51 51 ))
52 52
53 53 def archivelist(ui, nodeid, url=None):
54 54 allowed = ui.configlist('web', 'allow-archive', untrusted=True)
55 55 archives = []
56 56
57 57 for typ, spec in archivespecs.iteritems():
58 58 if typ in allowed or ui.configbool('web', 'allow' + typ,
59 59 untrusted=True):
60 60 archives.append({
61 61 'type': typ,
62 62 'extension': spec[2],
63 63 'node': nodeid,
64 64 'url': url,
65 65 })
66 66
67 67 return templateutil.mappinglist(archives)
68 68
69 69 def up(p):
70 70 if p[0:1] != "/":
71 71 p = "/" + p
72 72 if p[-1:] == "/":
73 73 p = p[:-1]
74 74 up = os.path.dirname(p)
75 75 if up == "/":
76 76 return "/"
77 77 return up + "/"
78 78
79 79 def _navseq(step, firststep=None):
80 80 if firststep:
81 81 yield firststep
82 82 if firststep >= 20 and firststep <= 40:
83 83 firststep = 50
84 84 yield firststep
85 85 assert step > 0
86 86 assert firststep > 0
87 87 while step <= firststep:
88 88 step *= 10
89 89 while True:
90 90 yield 1 * step
91 91 yield 3 * step
92 92 step *= 10
93 93
94 94 class revnav(object):
95 95
96 96 def __init__(self, repo):
97 97 """Navigation generation object
98 98
99 99 :repo: repo object we generate nav for
100 100 """
101 101 # used for hex generation
102 102 self._revlog = repo.changelog
103 103
104 104 def __nonzero__(self):
105 105 """return True if any revision to navigate over"""
106 106 return self._first() is not None
107 107
108 108 __bool__ = __nonzero__
109 109
110 110 def _first(self):
111 111 """return the minimum non-filtered changeset or None"""
112 112 try:
113 113 return next(iter(self._revlog))
114 114 except StopIteration:
115 115 return None
116 116
117 117 def hex(self, rev):
118 118 return hex(self._revlog.node(rev))
119 119
120 120 def gen(self, pos, pagelen, limit):
121 121 """computes label and revision id for navigation link
122 122
123 123 :pos: is the revision relative to which we generate navigation.
124 124 :pagelen: the size of each navigation page
125 125 :limit: how far shall we link
126 126
127 127 The return is:
128 128 - a single element mappinglist
129 129 - containing a dictionary with a `before` and `after` key
130 130 - values are dictionaries with `label` and `node` keys
131 131 """
132 132 if not self:
133 133 # empty repo
134 134 return templateutil.mappinglist([
135 135 {'before': templateutil.mappinglist([]),
136 136 'after': templateutil.mappinglist([])},
137 137 ])
138 138
139 139 targets = []
140 140 for f in _navseq(1, pagelen):
141 141 if f > limit:
142 142 break
143 143 targets.append(pos + f)
144 144 targets.append(pos - f)
145 145 targets.sort()
146 146
147 147 first = self._first()
148 148 navbefore = [{'label': '(%i)' % first, 'node': self.hex(first)}]
149 149 navafter = []
150 150 for rev in targets:
151 151 if rev not in self._revlog:
152 152 continue
153 153 if pos < rev < limit:
154 154 navafter.append({'label': '+%d' % abs(rev - pos),
155 155 'node': self.hex(rev)})
156 156 if 0 < rev < pos:
157 157 navbefore.append({'label': '-%d' % abs(rev - pos),
158 158 'node': self.hex(rev)})
159 159
160 160 navafter.append({'label': 'tip', 'node': 'tip'})
161 161
162 162 # TODO: maybe this can be a scalar object supporting tomap()
163 163 return templateutil.mappinglist([
164 164 {'before': templateutil.mappinglist(navbefore),
165 165 'after': templateutil.mappinglist(navafter)},
166 166 ])
167 167
168 168 class filerevnav(revnav):
169 169
170 170 def __init__(self, repo, path):
171 171 """Navigation generation object
172 172
173 173 :repo: repo object we generate nav for
174 174 :path: path of the file we generate nav for
175 175 """
176 176 # used for iteration
177 177 self._changelog = repo.unfiltered().changelog
178 178 # used for hex generation
179 179 self._revlog = repo.file(path)
180 180
181 181 def hex(self, rev):
182 182 return hex(self._changelog.node(self._revlog.linkrev(rev)))
183 183
184 184 # TODO: maybe this can be a wrapper class for changectx/filectx list, which
185 185 # yields {'ctx': ctx}
186 186 def _ctxsgen(context, ctxs):
187 187 for s in ctxs:
188 188 d = {
189 189 'node': s.hex(),
190 190 'rev': s.rev(),
191 191 'user': s.user(),
192 192 'date': s.date(),
193 193 'description': s.description(),
194 194 'branch': s.branch(),
195 195 }
196 196 if util.safehasattr(s, 'path'):
197 197 d['file'] = s.path()
198 198 yield d
199 199
200 200 def _siblings(siblings=None, hiderev=None):
201 201 if siblings is None:
202 202 siblings = []
203 203 siblings = [s for s in siblings if s.node() != nullid]
204 204 if len(siblings) == 1 and siblings[0].rev() == hiderev:
205 205 siblings = []
206 206 return templateutil.mappinggenerator(_ctxsgen, args=(siblings,))
207 207
208 208 def difffeatureopts(req, ui, section):
209 209 diffopts = patch.difffeatureopts(ui, untrusted=True,
210 210 section=section, whitespace=True)
211 211
212 212 for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'):
213 213 v = req.qsparams.get(k)
214 214 if v is not None:
215 215 v = stringutil.parsebool(v)
216 216 setattr(diffopts, k, v if v is not None else True)
217 217
218 218 return diffopts
219 219
220 220 def annotate(req, fctx, ui):
221 221 diffopts = difffeatureopts(req, ui, 'annotate')
222 222 return fctx.annotate(follow=True, diffopts=diffopts)
223 223
224 224 def parents(ctx, hide=None):
225 225 if isinstance(ctx, context.basefilectx):
226 226 introrev = ctx.introrev()
227 227 if ctx.changectx().rev() != introrev:
228 228 return _siblings([ctx.repo()[introrev]], hide)
229 229 return _siblings(ctx.parents(), hide)
230 230
231 231 def children(ctx, hide=None):
232 232 return _siblings(ctx.children(), hide)
233 233
234 234 def renamelink(fctx):
235 235 r = fctx.renamed()
236 236 if r:
237 237 return templateutil.mappinglist([{'file': r[0], 'node': hex(r[1])}])
238 238 return templateutil.mappinglist([])
239 239
240 240 def nodetagsdict(repo, node):
241 241 return templateutil.hybridlist(repo.nodetags(node), name='name')
242 242
243 243 def nodebookmarksdict(repo, node):
244 244 return templateutil.hybridlist(repo.nodebookmarks(node), name='name')
245 245
246 246 def nodebranchdict(repo, ctx):
247 247 branches = []
248 248 branch = ctx.branch()
249 249 # If this is an empty repo, ctx.node() == nullid,
250 250 # ctx.branch() == 'default'.
251 251 try:
252 252 branchnode = repo.branchtip(branch)
253 253 except error.RepoLookupError:
254 254 branchnode = None
255 255 if branchnode == ctx.node():
256 256 branches.append(branch)
257 257 return templateutil.hybridlist(branches, name='name')
258 258
259 259 def nodeinbranch(repo, ctx):
260 260 branches = []
261 261 branch = ctx.branch()
262 262 try:
263 263 branchnode = repo.branchtip(branch)
264 264 except error.RepoLookupError:
265 265 branchnode = None
266 266 if branch != 'default' and branchnode != ctx.node():
267 267 branches.append(branch)
268 268 return templateutil.hybridlist(branches, name='name')
269 269
270 270 def nodebranchnodefault(ctx):
271 271 branches = []
272 272 branch = ctx.branch()
273 273 if branch != 'default':
274 274 branches.append(branch)
275 275 return templateutil.hybridlist(branches, name='name')
276 276
277 277 def _nodenamesgen(context, f, node, name):
278 278 for t in f(node):
279 279 yield {name: t}
280 280
281 281 def showtag(repo, t1, node=nullid):
282 282 args = (repo.nodetags, node, 'tag')
283 283 return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1)
284 284
285 285 def showbookmark(repo, t1, node=nullid):
286 286 args = (repo.nodebookmarks, node, 'bookmark')
287 287 return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1)
288 288
289 289 def branchentries(repo, stripecount, limit=0):
290 290 tips = []
291 291 heads = repo.heads()
292 292 parity = paritygen(stripecount)
293 293 sortkey = lambda item: (not item[1], item[0].rev())
294 294
295 295 def entries(context):
296 296 count = 0
297 297 if not tips:
298 298 for tag, hs, tip, closed in repo.branchmap().iterbranches():
299 299 tips.append((repo[tip], closed))
300 300 for ctx, closed in sorted(tips, key=sortkey, reverse=True):
301 301 if limit > 0 and count >= limit:
302 302 return
303 303 count += 1
304 304 if closed:
305 305 status = 'closed'
306 306 elif ctx.node() not in heads:
307 307 status = 'inactive'
308 308 else:
309 309 status = 'open'
310 310 yield {
311 311 'parity': next(parity),
312 312 'branch': ctx.branch(),
313 313 'status': status,
314 314 'node': ctx.hex(),
315 315 'date': ctx.date()
316 316 }
317 317
318 318 return templateutil.mappinggenerator(entries)
319 319
320 320 def cleanpath(repo, path):
321 321 path = path.lstrip('/')
322 322 return pathutil.canonpath(repo.root, '', path)
323 323
324 324 def changectx(repo, req):
325 325 changeid = "tip"
326 326 if 'node' in req.qsparams:
327 327 changeid = req.qsparams['node']
328 328 ipos = changeid.find(':')
329 329 if ipos != -1:
330 330 changeid = changeid[(ipos + 1):]
331 331
332 332 return scmutil.revsymbol(repo, changeid)
333 333
334 334 def basechangectx(repo, req):
335 335 if 'node' in req.qsparams:
336 336 changeid = req.qsparams['node']
337 337 ipos = changeid.find(':')
338 338 if ipos != -1:
339 339 changeid = changeid[:ipos]
340 340 return scmutil.revsymbol(repo, changeid)
341 341
342 342 return None
343 343
344 344 def filectx(repo, req):
345 345 if 'file' not in req.qsparams:
346 346 raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
347 347 path = cleanpath(repo, req.qsparams['file'])
348 348 if 'node' in req.qsparams:
349 349 changeid = req.qsparams['node']
350 350 elif 'filenode' in req.qsparams:
351 351 changeid = req.qsparams['filenode']
352 352 else:
353 353 raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given')
354 354 try:
355 355 fctx = scmutil.revsymbol(repo, changeid)[path]
356 356 except error.RepoError:
357 357 fctx = repo.filectx(path, fileid=changeid)
358 358
359 359 return fctx
360 360
361 361 def linerange(req):
362 362 linerange = req.qsparams.getall('linerange')
363 363 if not linerange:
364 364 return None
365 365 if len(linerange) > 1:
366 366 raise ErrorResponse(HTTP_BAD_REQUEST,
367 367 'redundant linerange parameter')
368 368 try:
369 369 fromline, toline = map(int, linerange[0].split(':', 1))
370 370 except ValueError:
371 371 raise ErrorResponse(HTTP_BAD_REQUEST,
372 372 'invalid linerange parameter')
373 373 try:
374 374 return util.processlinerange(fromline, toline)
375 375 except error.ParseError as exc:
376 376 raise ErrorResponse(HTTP_BAD_REQUEST, pycompat.bytestr(exc))
377 377
378 378 def formatlinerange(fromline, toline):
379 379 return '%d:%d' % (fromline + 1, toline)
380 380
381 381 def _succsandmarkersgen(context, mapping):
382 382 repo = context.resource(mapping, 'repo')
383 383 itemmappings = templatekw.showsuccsandmarkers(context, mapping)
384 384 for item in itemmappings.tovalue(context, mapping):
385 385 item['successors'] = _siblings(repo[successor]
386 386 for successor in item['successors'])
387 387 yield item
388 388
389 389 def succsandmarkers(context, mapping):
390 390 return templateutil.mappinggenerator(_succsandmarkersgen, args=(mapping,))
391 391
392 392 # teach templater succsandmarkers is switched to (context, mapping) API
393 393 succsandmarkers._requires = {'repo', 'ctx'}
394 394
395 395 def _whyunstablegen(context, mapping):
396 396 repo = context.resource(mapping, 'repo')
397 397 ctx = context.resource(mapping, 'ctx')
398 398
399 399 entries = obsutil.whyunstable(repo, ctx)
400 400 for entry in entries:
401 401 if entry.get('divergentnodes'):
402 402 entry['divergentnodes'] = _siblings(entry['divergentnodes'])
403 403 yield entry
404 404
405 405 def whyunstable(context, mapping):
406 406 return templateutil.mappinggenerator(_whyunstablegen, args=(mapping,))
407 407
408 408 whyunstable._requires = {'repo', 'ctx'}
409 409
410 410 def commonentry(repo, ctx):
411 411 node = ctx.node()
412 412 return {
413 413 # TODO: perhaps ctx.changectx() should be assigned if ctx is a
414 414 # filectx, but I'm not pretty sure if that would always work because
415 415 # fctx.parents() != fctx.changectx.parents() for example.
416 416 'ctx': ctx,
417 417 'rev': ctx.rev(),
418 418 'node': hex(node),
419 419 'author': ctx.user(),
420 420 'desc': ctx.description(),
421 421 'date': ctx.date(),
422 422 'extra': ctx.extra(),
423 423 'phase': ctx.phasestr(),
424 424 'obsolete': ctx.obsolete(),
425 425 'succsandmarkers': succsandmarkers,
426 426 'instabilities': templateutil.hybridlist(ctx.instabilities(),
427 427 name='instability'),
428 428 'whyunstable': whyunstable,
429 429 'branch': nodebranchnodefault(ctx),
430 430 'inbranch': nodeinbranch(repo, ctx),
431 431 'branches': nodebranchdict(repo, ctx),
432 432 'tags': nodetagsdict(repo, node),
433 433 'bookmarks': nodebookmarksdict(repo, node),
434 434 'parent': lambda **x: parents(ctx),
435 435 'child': lambda **x: children(ctx),
436 436 }
437 437
438 438 def changelistentry(web, ctx):
439 439 '''Obtain a dictionary to be used for entries in a changelist.
440 440
441 441 This function is called when producing items for the "entries" list passed
442 442 to the "shortlog" and "changelog" templates.
443 443 '''
444 444 repo = web.repo
445 445 rev = ctx.rev()
446 446 n = ctx.node()
447 447 showtags = showtag(repo, 'changelogtag', n)
448 448 files = listfilediffs(ctx.files(), n, web.maxfiles)
449 449
450 450 entry = commonentry(repo, ctx)
451 451 entry.update(
452 452 allparents=lambda **x: parents(ctx),
453 453 parent=lambda **x: parents(ctx, rev - 1),
454 454 child=lambda **x: children(ctx, rev + 1),
455 455 changelogtag=showtags,
456 456 files=files,
457 457 )
458 458 return entry
459 459
460 460 def changelistentries(web, revs, maxcount, parityfn):
461 461 """Emit up to N records for an iterable of revisions."""
462 462 repo = web.repo
463 463
464 464 count = 0
465 465 for rev in revs:
466 466 if count >= maxcount:
467 467 break
468 468
469 469 count += 1
470 470
471 471 entry = changelistentry(web, repo[rev])
472 472 entry['parity'] = next(parityfn)
473 473
474 474 yield entry
475 475
476 476 def symrevorshortnode(req, ctx):
477 477 if 'node' in req.qsparams:
478 478 return templatefilters.revescape(req.qsparams['node'])
479 479 else:
480 480 return short(ctx.node())
481 481
482 482 def _listfilesgen(context, ctx, stripecount):
483 483 parity = paritygen(stripecount)
484 484 for blockno, f in enumerate(ctx.files()):
485 485 template = 'filenodelink' if f in ctx else 'filenolink'
486 486 yield context.process(template, {
487 487 'node': ctx.hex(),
488 488 'file': f,
489 489 'blockno': blockno + 1,
490 490 'parity': next(parity),
491 491 })
492 492
493 493 def changesetentry(web, ctx):
494 494 '''Obtain a dictionary to be used to render the "changeset" template.'''
495 495
496 496 showtags = showtag(web.repo, 'changesettag', ctx.node())
497 497 showbookmarks = showbookmark(web.repo, 'changesetbookmark', ctx.node())
498 498 showbranch = nodebranchnodefault(ctx)
499 499
500 500 basectx = basechangectx(web.repo, web.req)
501 501 if basectx is None:
502 502 basectx = ctx.p1()
503 503
504 504 style = web.config('web', 'style')
505 505 if 'style' in web.req.qsparams:
506 506 style = web.req.qsparams['style']
507 507
508 508 diff = diffs(web, ctx, basectx, None, style)
509 509
510 510 parity = paritygen(web.stripecount)
511 511 diffstatsgen = diffstatgen(ctx, basectx)
512 512 diffstats = diffstat(ctx, diffstatsgen, parity)
513 513
514 514 return dict(
515 515 diff=diff,
516 516 symrev=symrevorshortnode(web.req, ctx),
517 517 basenode=basectx.hex(),
518 518 changesettag=showtags,
519 519 changesetbookmark=showbookmarks,
520 520 changesetbranch=showbranch,
521 521 files=templateutil.mappedgenerator(_listfilesgen,
522 522 args=(ctx, web.stripecount)),
523 523 diffsummary=lambda **x: diffsummary(diffstatsgen),
524 524 diffstat=diffstats,
525 525 archives=web.archivelist(ctx.hex()),
526 526 **pycompat.strkwargs(commonentry(web.repo, ctx)))
527 527
528 528 def _listfilediffsgen(context, files, node, max):
529 529 for f in files[:max]:
530 530 yield context.process('filedifflink', {'node': hex(node), 'file': f})
531 531 if len(files) > max:
532 532 yield context.process('fileellipses', {})
533 533
534 534 def listfilediffs(files, node, max):
535 535 return templateutil.mappedgenerator(_listfilediffsgen,
536 536 args=(files, node, max))
537 537
538 538 def _prettyprintdifflines(context, lines, blockno, lineidprefix):
539 539 for lineno, l in enumerate(lines, 1):
540 540 difflineno = "%d.%d" % (blockno, lineno)
541 541 if l.startswith('+'):
542 542 ltype = "difflineplus"
543 543 elif l.startswith('-'):
544 544 ltype = "difflineminus"
545 545 elif l.startswith('@'):
546 546 ltype = "difflineat"
547 547 else:
548 548 ltype = "diffline"
549 549 yield context.process(ltype, {
550 550 'line': l,
551 551 'lineno': lineno,
552 552 'lineid': lineidprefix + "l%s" % difflineno,
553 553 'linenumber': "% 8s" % difflineno,
554 554 })
555 555
556 556 def _diffsgen(context, repo, ctx, basectx, files, style, stripecount,
557 557 linerange, lineidprefix):
558 558 if files:
559 559 m = match.exact(repo.root, repo.getcwd(), files)
560 560 else:
561 561 m = match.always(repo.root, repo.getcwd())
562 562
563 563 diffopts = patch.diffopts(repo.ui, untrusted=True)
564 564 node1 = basectx.node()
565 565 node2 = ctx.node()
566 566 parity = paritygen(stripecount)
567 567
568 568 diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts)
569 569 for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1):
570 570 if style != 'raw':
571 571 header = header[1:]
572 572 lines = [h + '\n' for h in header]
573 573 for hunkrange, hunklines in hunks:
574 574 if linerange is not None and hunkrange is not None:
575 575 s1, l1, s2, l2 = hunkrange
576 576 if not mdiff.hunkinrange((s2, l2), linerange):
577 577 continue
578 578 lines.extend(hunklines)
579 579 if lines:
580 580 l = templateutil.mappedgenerator(_prettyprintdifflines,
581 581 args=(lines, blockno,
582 582 lineidprefix))
583 583 yield {
584 584 'parity': next(parity),
585 585 'blockno': blockno,
586 586 'lines': l,
587 587 }
588 588
589 589 def diffs(web, ctx, basectx, files, style, linerange=None, lineidprefix=''):
590 590 args = (web.repo, ctx, basectx, files, style, web.stripecount,
591 591 linerange, lineidprefix)
592 592 return templateutil.mappinggenerator(_diffsgen, args=args, name='diffblock')
593 593
594 594 def _compline(type, leftlineno, leftline, rightlineno, rightline):
595 595 lineid = leftlineno and ("l%d" % leftlineno) or ''
596 596 lineid += rightlineno and ("r%d" % rightlineno) or ''
597 597 llno = '%d' % leftlineno if leftlineno else ''
598 598 rlno = '%d' % rightlineno if rightlineno else ''
599 599 return {
600 600 'type': type,
601 601 'lineid': lineid,
602 602 'leftlineno': leftlineno,
603 603 'leftlinenumber': "% 6s" % llno,
604 604 'leftline': leftline or '',
605 605 'rightlineno': rightlineno,
606 606 'rightlinenumber': "% 6s" % rlno,
607 607 'rightline': rightline or '',
608 608 }
609 609
610 610 def _getcompblockgen(context, leftlines, rightlines, opcodes):
611 611 for type, llo, lhi, rlo, rhi in opcodes:
612 612 len1 = lhi - llo
613 613 len2 = rhi - rlo
614 614 count = min(len1, len2)
615 615 for i in xrange(count):
616 616 yield _compline(type=type,
617 617 leftlineno=llo + i + 1,
618 618 leftline=leftlines[llo + i],
619 619 rightlineno=rlo + i + 1,
620 620 rightline=rightlines[rlo + i])
621 621 if len1 > len2:
622 622 for i in xrange(llo + count, lhi):
623 623 yield _compline(type=type,
624 624 leftlineno=i + 1,
625 625 leftline=leftlines[i],
626 626 rightlineno=None,
627 627 rightline=None)
628 628 elif len2 > len1:
629 629 for i in xrange(rlo + count, rhi):
630 630 yield _compline(type=type,
631 631 leftlineno=None,
632 632 leftline=None,
633 633 rightlineno=i + 1,
634 634 rightline=rightlines[i])
635 635
636 636 def _getcompblock(leftlines, rightlines, opcodes):
637 637 args = (leftlines, rightlines, opcodes)
638 638 return templateutil.mappinggenerator(_getcompblockgen, args=args,
639 639 name='comparisonline')
640 640
641 641 def _comparegen(context, contextnum, leftlines, rightlines):
642 642 '''Generator function that provides side-by-side comparison data.'''
643 643 s = difflib.SequenceMatcher(None, leftlines, rightlines)
644 644 if contextnum < 0:
645 645 l = _getcompblock(leftlines, rightlines, s.get_opcodes())
646 646 yield {'lines': l}
647 647 else:
648 648 for oc in s.get_grouped_opcodes(n=contextnum):
649 649 l = _getcompblock(leftlines, rightlines, oc)
650 650 yield {'lines': l}
651 651
652 652 def compare(contextnum, leftlines, rightlines):
653 653 args = (contextnum, leftlines, rightlines)
654 654 return templateutil.mappinggenerator(_comparegen, args=args,
655 655 name='comparisonblock')
656 656
657 657 def diffstatgen(ctx, basectx):
658 658 '''Generator function that provides the diffstat data.'''
659 659
660 660 stats = patch.diffstatdata(
661 661 util.iterlines(ctx.diff(basectx, noprefix=False)))
662 662 maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats)
663 663 while True:
664 664 yield stats, maxname, maxtotal, addtotal, removetotal, binary
665 665
666 666 def diffsummary(statgen):
667 667 '''Return a short summary of the diff.'''
668 668
669 669 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
670 670 return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
671 671 len(stats), addtotal, removetotal)
672 672
673 673 def _diffstattmplgen(context, ctx, statgen, parity):
674 674 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
675 675 files = ctx.files()
676 676
677 677 def pct(i):
678 678 if maxtotal == 0:
679 679 return 0
680 680 return (float(i) / maxtotal) * 100
681 681
682 682 fileno = 0
683 683 for filename, adds, removes, isbinary in stats:
684 684 template = 'diffstatlink' if filename in files else 'diffstatnolink'
685 685 total = adds + removes
686 686 fileno += 1
687 687 yield context.process(template, {
688 688 'node': ctx.hex(),
689 689 'file': filename,
690 690 'fileno': fileno,
691 691 'total': total,
692 692 'addpct': pct(adds),
693 693 'removepct': pct(removes),
694 694 'parity': next(parity),
695 695 })
696 696
697 697 def diffstat(ctx, statgen, parity):
698 698 '''Return a diffstat template for each file in the diff.'''
699 699 args = (ctx, statgen, parity)
700 700 return templateutil.mappedgenerator(_diffstattmplgen, args=args)
701 701
702 702 class sessionvars(templateutil.wrapped):
703 703 def __init__(self, vars, start='?'):
704 704 self._start = start
705 705 self._vars = vars
706 706
707 707 def __getitem__(self, key):
708 708 return self._vars[key]
709 709
710 710 def __setitem__(self, key, value):
711 711 self._vars[key] = value
712 712
713 713 def __copy__(self):
714 714 return sessionvars(copy.copy(self._vars), self._start)
715 715
716 716 def contains(self, context, mapping, item):
717 717 item = templateutil.unwrapvalue(context, mapping, item)
718 718 return item in self._vars
719 719
720 720 def getmember(self, context, mapping, key):
721 721 key = templateutil.unwrapvalue(context, mapping, key)
722 722 return self._vars.get(key)
723 723
724 724 def getmin(self, context, mapping):
725 725 raise error.ParseError(_('not comparable'))
726 726
727 727 def getmax(self, context, mapping):
728 728 raise error.ParseError(_('not comparable'))
729 729
730 730 def filter(self, context, mapping, select):
731 731 # implement if necessary
732 732 raise error.ParseError(_('not filterable'))
733 733
734 734 def itermaps(self, context):
735 735 separator = self._start
736 736 for key, value in sorted(self._vars.iteritems()):
737 737 yield {'name': key,
738 738 'value': pycompat.bytestr(value),
739 739 'separator': separator,
740 740 }
741 741 separator = '&'
742 742
743 743 def join(self, context, mapping, sep):
744 744 # could be '{separator}{name}={value|urlescape}'
745 745 raise error.ParseError(_('not displayable without template'))
746 746
747 747 def show(self, context, mapping):
748 748 return self.join(context, '')
749 749
750 750 def tobool(self, context, mapping):
751 751 return bool(self._vars)
752 752
753 753 def tovalue(self, context, mapping):
754 754 return self._vars
755 755
756 756 class wsgiui(uimod.ui):
757 757 # default termwidth breaks under mod_wsgi
758 758 def termwidth(self):
759 759 return 80
760 760
761 761 def getwebsubs(repo):
762 762 websubtable = []
763 763 websubdefs = repo.ui.configitems('websub')
764 764 # we must maintain interhg backwards compatibility
765 765 websubdefs += repo.ui.configitems('interhg')
766 766 for key, pattern in websubdefs:
767 767 # grab the delimiter from the character after the "s"
768 768 unesc = pattern[1:2]
769 delim = re.escape(unesc)
769 delim = stringutil.reescape(unesc)
770 770
771 771 # identify portions of the pattern, taking care to avoid escaped
772 772 # delimiters. the replace format and flags are optional, but
773 773 # delimiters are required.
774 774 match = re.match(
775 775 br'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
776 776 % (delim, delim, delim), pattern)
777 777 if not match:
778 778 repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
779 779 % (key, pattern))
780 780 continue
781 781
782 782 # we need to unescape the delimiter for regexp and format
783 783 delim_re = re.compile(br'(?<!\\)\\%s' % delim)
784 784 regexp = delim_re.sub(unesc, match.group(1))
785 785 format = delim_re.sub(unesc, match.group(2))
786 786
787 787 # the pattern allows for 6 regexp flags, so set them if necessary
788 788 flagin = match.group(3)
789 789 flags = 0
790 790 if flagin:
791 791 for flag in flagin.upper():
792 792 flags |= re.__dict__[flag]
793 793
794 794 try:
795 795 regexp = re.compile(regexp, flags)
796 796 websubtable.append((regexp, format))
797 797 except re.error:
798 798 repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
799 799 % (key, regexp))
800 800 return websubtable
801 801
802 802 def getgraphnode(repo, ctx):
803 803 return (templatekw.getgraphnodecurrent(repo, ctx) +
804 804 templatekw.getgraphnodesymbol(ctx))
@@ -1,1032 +1,1032
1 1 # match.py - filename matching
2 2 #
3 3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import, print_function
9 9
10 10 import copy
11 11 import os
12 12 import re
13 13
14 14 from .i18n import _
15 15 from . import (
16 16 encoding,
17 17 error,
18 18 pathutil,
19 19 pycompat,
20 20 util,
21 21 )
22 22 from .utils import (
23 23 stringutil,
24 24 )
25 25
26 26 allpatternkinds = ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
27 27 'listfile', 'listfile0', 'set', 'include', 'subinclude',
28 28 'rootfilesin')
29 29 cwdrelativepatternkinds = ('relpath', 'glob')
30 30
31 31 propertycache = util.propertycache
32 32
33 33 def _rematcher(regex):
34 34 '''compile the regexp with the best available regexp engine and return a
35 35 matcher function'''
36 36 m = util.re.compile(regex)
37 37 try:
38 38 # slightly faster, provided by facebook's re2 bindings
39 39 return m.test_match
40 40 except AttributeError:
41 41 return m.match
42 42
43 43 def _expandsets(kindpats, ctx, listsubrepos):
44 44 '''Returns the kindpats list with the 'set' patterns expanded.'''
45 45 fset = set()
46 46 other = []
47 47
48 48 for kind, pat, source in kindpats:
49 49 if kind == 'set':
50 50 if not ctx:
51 51 raise error.ProgrammingError("fileset expression with no "
52 52 "context")
53 53 s = ctx.getfileset(pat)
54 54 fset.update(s)
55 55
56 56 if listsubrepos:
57 57 for subpath in ctx.substate:
58 58 s = ctx.sub(subpath).getfileset(pat)
59 59 fset.update(subpath + '/' + f for f in s)
60 60
61 61 continue
62 62 other.append((kind, pat, source))
63 63 return fset, other
64 64
65 65 def _expandsubinclude(kindpats, root):
66 66 '''Returns the list of subinclude matcher args and the kindpats without the
67 67 subincludes in it.'''
68 68 relmatchers = []
69 69 other = []
70 70
71 71 for kind, pat, source in kindpats:
72 72 if kind == 'subinclude':
73 73 sourceroot = pathutil.dirname(util.normpath(source))
74 74 pat = util.pconvert(pat)
75 75 path = pathutil.join(sourceroot, pat)
76 76
77 77 newroot = pathutil.dirname(path)
78 78 matcherargs = (newroot, '', [], ['include:%s' % path])
79 79
80 80 prefix = pathutil.canonpath(root, root, newroot)
81 81 if prefix:
82 82 prefix += '/'
83 83 relmatchers.append((prefix, matcherargs))
84 84 else:
85 85 other.append((kind, pat, source))
86 86
87 87 return relmatchers, other
88 88
89 89 def _kindpatsalwaysmatch(kindpats):
90 90 """"Checks whether the kindspats match everything, as e.g.
91 91 'relpath:.' does.
92 92 """
93 93 for kind, pat, source in kindpats:
94 94 if pat != '' or kind not in ['relpath', 'glob']:
95 95 return False
96 96 return True
97 97
98 98 def match(root, cwd, patterns=None, include=None, exclude=None, default='glob',
99 99 exact=False, auditor=None, ctx=None, listsubrepos=False, warn=None,
100 100 badfn=None, icasefs=False):
101 101 """build an object to match a set of file patterns
102 102
103 103 arguments:
104 104 root - the canonical root of the tree you're matching against
105 105 cwd - the current working directory, if relevant
106 106 patterns - patterns to find
107 107 include - patterns to include (unless they are excluded)
108 108 exclude - patterns to exclude (even if they are included)
109 109 default - if a pattern in patterns has no explicit type, assume this one
110 110 exact - patterns are actually filenames (include/exclude still apply)
111 111 warn - optional function used for printing warnings
112 112 badfn - optional bad() callback for this matcher instead of the default
113 113 icasefs - make a matcher for wdir on case insensitive filesystems, which
114 114 normalizes the given patterns to the case in the filesystem
115 115
116 116 a pattern is one of:
117 117 'glob:<glob>' - a glob relative to cwd
118 118 're:<regexp>' - a regular expression
119 119 'path:<path>' - a path relative to repository root, which is matched
120 120 recursively
121 121 'rootfilesin:<path>' - a path relative to repository root, which is
122 122 matched non-recursively (will not match subdirectories)
123 123 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
124 124 'relpath:<path>' - a path relative to cwd
125 125 'relre:<regexp>' - a regexp that needn't match the start of a name
126 126 'set:<fileset>' - a fileset expression
127 127 'include:<path>' - a file of patterns to read and include
128 128 'subinclude:<path>' - a file of patterns to match against files under
129 129 the same directory
130 130 '<something>' - a pattern of the specified default type
131 131 """
132 132 normalize = _donormalize
133 133 if icasefs:
134 134 if exact:
135 135 raise error.ProgrammingError("a case-insensitive exact matcher "
136 136 "doesn't make sense")
137 137 dirstate = ctx.repo().dirstate
138 138 dsnormalize = dirstate.normalize
139 139
140 140 def normalize(patterns, default, root, cwd, auditor, warn):
141 141 kp = _donormalize(patterns, default, root, cwd, auditor, warn)
142 142 kindpats = []
143 143 for kind, pats, source in kp:
144 144 if kind not in ('re', 'relre'): # regex can't be normalized
145 145 p = pats
146 146 pats = dsnormalize(pats)
147 147
148 148 # Preserve the original to handle a case only rename.
149 149 if p != pats and p in dirstate:
150 150 kindpats.append((kind, p, source))
151 151
152 152 kindpats.append((kind, pats, source))
153 153 return kindpats
154 154
155 155 if exact:
156 156 m = exactmatcher(root, cwd, patterns, badfn)
157 157 elif patterns:
158 158 kindpats = normalize(patterns, default, root, cwd, auditor, warn)
159 159 if _kindpatsalwaysmatch(kindpats):
160 160 m = alwaysmatcher(root, cwd, badfn, relativeuipath=True)
161 161 else:
162 162 m = patternmatcher(root, cwd, kindpats, ctx=ctx,
163 163 listsubrepos=listsubrepos, badfn=badfn)
164 164 else:
165 165 # It's a little strange that no patterns means to match everything.
166 166 # Consider changing this to match nothing (probably using nevermatcher).
167 167 m = alwaysmatcher(root, cwd, badfn)
168 168
169 169 if include:
170 170 kindpats = normalize(include, 'glob', root, cwd, auditor, warn)
171 171 im = includematcher(root, cwd, kindpats, ctx=ctx,
172 172 listsubrepos=listsubrepos, badfn=None)
173 173 m = intersectmatchers(m, im)
174 174 if exclude:
175 175 kindpats = normalize(exclude, 'glob', root, cwd, auditor, warn)
176 176 em = includematcher(root, cwd, kindpats, ctx=ctx,
177 177 listsubrepos=listsubrepos, badfn=None)
178 178 m = differencematcher(m, em)
179 179 return m
180 180
181 181 def exact(root, cwd, files, badfn=None):
182 182 return exactmatcher(root, cwd, files, badfn=badfn)
183 183
184 184 def always(root, cwd):
185 185 return alwaysmatcher(root, cwd)
186 186
187 187 def never(root, cwd):
188 188 return nevermatcher(root, cwd)
189 189
190 190 def badmatch(match, badfn):
191 191 """Make a copy of the given matcher, replacing its bad method with the given
192 192 one.
193 193 """
194 194 m = copy.copy(match)
195 195 m.bad = badfn
196 196 return m
197 197
198 198 def _donormalize(patterns, default, root, cwd, auditor, warn):
199 199 '''Convert 'kind:pat' from the patterns list to tuples with kind and
200 200 normalized and rooted patterns and with listfiles expanded.'''
201 201 kindpats = []
202 202 for kind, pat in [_patsplit(p, default) for p in patterns]:
203 203 if kind in cwdrelativepatternkinds:
204 204 pat = pathutil.canonpath(root, cwd, pat, auditor)
205 205 elif kind in ('relglob', 'path', 'rootfilesin'):
206 206 pat = util.normpath(pat)
207 207 elif kind in ('listfile', 'listfile0'):
208 208 try:
209 209 files = util.readfile(pat)
210 210 if kind == 'listfile0':
211 211 files = files.split('\0')
212 212 else:
213 213 files = files.splitlines()
214 214 files = [f for f in files if f]
215 215 except EnvironmentError:
216 216 raise error.Abort(_("unable to read file list (%s)") % pat)
217 217 for k, p, source in _donormalize(files, default, root, cwd,
218 218 auditor, warn):
219 219 kindpats.append((k, p, pat))
220 220 continue
221 221 elif kind == 'include':
222 222 try:
223 223 fullpath = os.path.join(root, util.localpath(pat))
224 224 includepats = readpatternfile(fullpath, warn)
225 225 for k, p, source in _donormalize(includepats, default,
226 226 root, cwd, auditor, warn):
227 227 kindpats.append((k, p, source or pat))
228 228 except error.Abort as inst:
229 229 raise error.Abort('%s: %s' % (pat, inst[0]))
230 230 except IOError as inst:
231 231 if warn:
232 232 warn(_("skipping unreadable pattern file '%s': %s\n") %
233 233 (pat, stringutil.forcebytestr(inst.strerror)))
234 234 continue
235 235 # else: re or relre - which cannot be normalized
236 236 kindpats.append((kind, pat, ''))
237 237 return kindpats
238 238
239 239 class basematcher(object):
240 240
241 241 def __init__(self, root, cwd, badfn=None, relativeuipath=True):
242 242 self._root = root
243 243 self._cwd = cwd
244 244 if badfn is not None:
245 245 self.bad = badfn
246 246 self._relativeuipath = relativeuipath
247 247
248 248 def __call__(self, fn):
249 249 return self.matchfn(fn)
250 250 def __iter__(self):
251 251 for f in self._files:
252 252 yield f
253 253 # Callbacks related to how the matcher is used by dirstate.walk.
254 254 # Subscribers to these events must monkeypatch the matcher object.
255 255 def bad(self, f, msg):
256 256 '''Callback from dirstate.walk for each explicit file that can't be
257 257 found/accessed, with an error message.'''
258 258
259 259 # If an explicitdir is set, it will be called when an explicitly listed
260 260 # directory is visited.
261 261 explicitdir = None
262 262
263 263 # If an traversedir is set, it will be called when a directory discovered
264 264 # by recursive traversal is visited.
265 265 traversedir = None
266 266
267 267 def abs(self, f):
268 268 '''Convert a repo path back to path that is relative to the root of the
269 269 matcher.'''
270 270 return f
271 271
272 272 def rel(self, f):
273 273 '''Convert repo path back to path that is relative to cwd of matcher.'''
274 274 return util.pathto(self._root, self._cwd, f)
275 275
276 276 def uipath(self, f):
277 277 '''Convert repo path to a display path. If patterns or -I/-X were used
278 278 to create this matcher, the display path will be relative to cwd.
279 279 Otherwise it is relative to the root of the repo.'''
280 280 return (self._relativeuipath and self.rel(f)) or self.abs(f)
281 281
282 282 @propertycache
283 283 def _files(self):
284 284 return []
285 285
286 286 def files(self):
287 287 '''Explicitly listed files or patterns or roots:
288 288 if no patterns or .always(): empty list,
289 289 if exact: list exact files,
290 290 if not .anypats(): list all files and dirs,
291 291 else: optimal roots'''
292 292 return self._files
293 293
294 294 @propertycache
295 295 def _fileset(self):
296 296 return set(self._files)
297 297
298 298 def exact(self, f):
299 299 '''Returns True if f is in .files().'''
300 300 return f in self._fileset
301 301
302 302 def matchfn(self, f):
303 303 return False
304 304
305 305 def visitdir(self, dir):
306 306 '''Decides whether a directory should be visited based on whether it
307 307 has potential matches in it or one of its subdirectories. This is
308 308 based on the match's primary, included, and excluded patterns.
309 309
310 310 Returns the string 'all' if the given directory and all subdirectories
311 311 should be visited. Otherwise returns True or False indicating whether
312 312 the given directory should be visited.
313 313 '''
314 314 return True
315 315
316 316 def always(self):
317 317 '''Matcher will match everything and .files() will be empty --
318 318 optimization might be possible.'''
319 319 return False
320 320
321 321 def isexact(self):
322 322 '''Matcher will match exactly the list of files in .files() --
323 323 optimization might be possible.'''
324 324 return False
325 325
326 326 def prefix(self):
327 327 '''Matcher will match the paths in .files() recursively --
328 328 optimization might be possible.'''
329 329 return False
330 330
331 331 def anypats(self):
332 332 '''None of .always(), .isexact(), and .prefix() is true --
333 333 optimizations will be difficult.'''
334 334 return not self.always() and not self.isexact() and not self.prefix()
335 335
336 336 class alwaysmatcher(basematcher):
337 337 '''Matches everything.'''
338 338
339 339 def __init__(self, root, cwd, badfn=None, relativeuipath=False):
340 340 super(alwaysmatcher, self).__init__(root, cwd, badfn,
341 341 relativeuipath=relativeuipath)
342 342
343 343 def always(self):
344 344 return True
345 345
346 346 def matchfn(self, f):
347 347 return True
348 348
349 349 def visitdir(self, dir):
350 350 return 'all'
351 351
352 352 def __repr__(self):
353 353 return r'<alwaysmatcher>'
354 354
355 355 class nevermatcher(basematcher):
356 356 '''Matches nothing.'''
357 357
358 358 def __init__(self, root, cwd, badfn=None):
359 359 super(nevermatcher, self).__init__(root, cwd, badfn)
360 360
361 361 # It's a little weird to say that the nevermatcher is an exact matcher
362 362 # or a prefix matcher, but it seems to make sense to let callers take
363 363 # fast paths based on either. There will be no exact matches, nor any
364 364 # prefixes (files() returns []), so fast paths iterating over them should
365 365 # be efficient (and correct).
366 366 def isexact(self):
367 367 return True
368 368
369 369 def prefix(self):
370 370 return True
371 371
372 372 def visitdir(self, dir):
373 373 return False
374 374
375 375 def __repr__(self):
376 376 return r'<nevermatcher>'
377 377
378 378 class patternmatcher(basematcher):
379 379
380 380 def __init__(self, root, cwd, kindpats, ctx=None, listsubrepos=False,
381 381 badfn=None):
382 382 super(patternmatcher, self).__init__(root, cwd, badfn)
383 383
384 384 self._files = _explicitfiles(kindpats)
385 385 self._prefix = _prefix(kindpats)
386 386 self._pats, self.matchfn = _buildmatch(ctx, kindpats, '$', listsubrepos,
387 387 root)
388 388
389 389 @propertycache
390 390 def _dirs(self):
391 391 return set(util.dirs(self._fileset)) | {'.'}
392 392
393 393 def visitdir(self, dir):
394 394 if self._prefix and dir in self._fileset:
395 395 return 'all'
396 396 return ('.' in self._fileset or
397 397 dir in self._fileset or
398 398 dir in self._dirs or
399 399 any(parentdir in self._fileset
400 400 for parentdir in util.finddirs(dir)))
401 401
402 402 def prefix(self):
403 403 return self._prefix
404 404
405 405 @encoding.strmethod
406 406 def __repr__(self):
407 407 return ('<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats))
408 408
409 409 class includematcher(basematcher):
410 410
411 411 def __init__(self, root, cwd, kindpats, ctx=None, listsubrepos=False,
412 412 badfn=None):
413 413 super(includematcher, self).__init__(root, cwd, badfn)
414 414
415 415 self._pats, self.matchfn = _buildmatch(ctx, kindpats, '(?:/|$)',
416 416 listsubrepos, root)
417 417 self._prefix = _prefix(kindpats)
418 418 roots, dirs = _rootsanddirs(kindpats)
419 419 # roots are directories which are recursively included.
420 420 self._roots = set(roots)
421 421 # dirs are directories which are non-recursively included.
422 422 self._dirs = set(dirs)
423 423
424 424 def visitdir(self, dir):
425 425 if self._prefix and dir in self._roots:
426 426 return 'all'
427 427 return ('.' in self._roots or
428 428 dir in self._roots or
429 429 dir in self._dirs or
430 430 any(parentdir in self._roots
431 431 for parentdir in util.finddirs(dir)))
432 432
433 433 @encoding.strmethod
434 434 def __repr__(self):
435 435 return ('<includematcher includes=%r>' % pycompat.bytestr(self._pats))
436 436
437 437 class exactmatcher(basematcher):
438 438 '''Matches the input files exactly. They are interpreted as paths, not
439 439 patterns (so no kind-prefixes).
440 440 '''
441 441
442 442 def __init__(self, root, cwd, files, badfn=None):
443 443 super(exactmatcher, self).__init__(root, cwd, badfn)
444 444
445 445 if isinstance(files, list):
446 446 self._files = files
447 447 else:
448 448 self._files = list(files)
449 449
450 450 matchfn = basematcher.exact
451 451
452 452 @propertycache
453 453 def _dirs(self):
454 454 return set(util.dirs(self._fileset)) | {'.'}
455 455
456 456 def visitdir(self, dir):
457 457 return dir in self._dirs
458 458
459 459 def isexact(self):
460 460 return True
461 461
462 462 @encoding.strmethod
463 463 def __repr__(self):
464 464 return ('<exactmatcher files=%r>' % self._files)
465 465
466 466 class differencematcher(basematcher):
467 467 '''Composes two matchers by matching if the first matches and the second
468 468 does not.
469 469
470 470 The second matcher's non-matching-attributes (root, cwd, bad, explicitdir,
471 471 traversedir) are ignored.
472 472 '''
473 473 def __init__(self, m1, m2):
474 474 super(differencematcher, self).__init__(m1._root, m1._cwd)
475 475 self._m1 = m1
476 476 self._m2 = m2
477 477 self.bad = m1.bad
478 478 self.explicitdir = m1.explicitdir
479 479 self.traversedir = m1.traversedir
480 480
481 481 def matchfn(self, f):
482 482 return self._m1(f) and not self._m2(f)
483 483
484 484 @propertycache
485 485 def _files(self):
486 486 if self.isexact():
487 487 return [f for f in self._m1.files() if self(f)]
488 488 # If m1 is not an exact matcher, we can't easily figure out the set of
489 489 # files, because its files() are not always files. For example, if
490 490 # m1 is "path:dir" and m2 is "rootfileins:.", we don't
491 491 # want to remove "dir" from the set even though it would match m2,
492 492 # because the "dir" in m1 may not be a file.
493 493 return self._m1.files()
494 494
495 495 def visitdir(self, dir):
496 496 if self._m2.visitdir(dir) == 'all':
497 497 return False
498 498 return bool(self._m1.visitdir(dir))
499 499
500 500 def isexact(self):
501 501 return self._m1.isexact()
502 502
503 503 @encoding.strmethod
504 504 def __repr__(self):
505 505 return ('<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2))
506 506
507 507 def intersectmatchers(m1, m2):
508 508 '''Composes two matchers by matching if both of them match.
509 509
510 510 The second matcher's non-matching-attributes (root, cwd, bad, explicitdir,
511 511 traversedir) are ignored.
512 512 '''
513 513 if m1 is None or m2 is None:
514 514 return m1 or m2
515 515 if m1.always():
516 516 m = copy.copy(m2)
517 517 # TODO: Consider encapsulating these things in a class so there's only
518 518 # one thing to copy from m1.
519 519 m.bad = m1.bad
520 520 m.explicitdir = m1.explicitdir
521 521 m.traversedir = m1.traversedir
522 522 m.abs = m1.abs
523 523 m.rel = m1.rel
524 524 m._relativeuipath |= m1._relativeuipath
525 525 return m
526 526 if m2.always():
527 527 m = copy.copy(m1)
528 528 m._relativeuipath |= m2._relativeuipath
529 529 return m
530 530 return intersectionmatcher(m1, m2)
531 531
532 532 class intersectionmatcher(basematcher):
533 533 def __init__(self, m1, m2):
534 534 super(intersectionmatcher, self).__init__(m1._root, m1._cwd)
535 535 self._m1 = m1
536 536 self._m2 = m2
537 537 self.bad = m1.bad
538 538 self.explicitdir = m1.explicitdir
539 539 self.traversedir = m1.traversedir
540 540
541 541 @propertycache
542 542 def _files(self):
543 543 if self.isexact():
544 544 m1, m2 = self._m1, self._m2
545 545 if not m1.isexact():
546 546 m1, m2 = m2, m1
547 547 return [f for f in m1.files() if m2(f)]
548 548 # It neither m1 nor m2 is an exact matcher, we can't easily intersect
549 549 # the set of files, because their files() are not always files. For
550 550 # example, if intersecting a matcher "-I glob:foo.txt" with matcher of
551 551 # "path:dir2", we don't want to remove "dir2" from the set.
552 552 return self._m1.files() + self._m2.files()
553 553
554 554 def matchfn(self, f):
555 555 return self._m1(f) and self._m2(f)
556 556
557 557 def visitdir(self, dir):
558 558 visit1 = self._m1.visitdir(dir)
559 559 if visit1 == 'all':
560 560 return self._m2.visitdir(dir)
561 561 # bool() because visit1=True + visit2='all' should not be 'all'
562 562 return bool(visit1 and self._m2.visitdir(dir))
563 563
564 564 def always(self):
565 565 return self._m1.always() and self._m2.always()
566 566
567 567 def isexact(self):
568 568 return self._m1.isexact() or self._m2.isexact()
569 569
570 570 @encoding.strmethod
571 571 def __repr__(self):
572 572 return ('<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2))
573 573
574 574 class subdirmatcher(basematcher):
575 575 """Adapt a matcher to work on a subdirectory only.
576 576
577 577 The paths are remapped to remove/insert the path as needed:
578 578
579 579 >>> from . import pycompat
580 580 >>> m1 = match(b'root', b'', [b'a.txt', b'sub/b.txt'])
581 581 >>> m2 = subdirmatcher(b'sub', m1)
582 582 >>> bool(m2(b'a.txt'))
583 583 False
584 584 >>> bool(m2(b'b.txt'))
585 585 True
586 586 >>> bool(m2.matchfn(b'a.txt'))
587 587 False
588 588 >>> bool(m2.matchfn(b'b.txt'))
589 589 True
590 590 >>> m2.files()
591 591 ['b.txt']
592 592 >>> m2.exact(b'b.txt')
593 593 True
594 594 >>> util.pconvert(m2.rel(b'b.txt'))
595 595 'sub/b.txt'
596 596 >>> def bad(f, msg):
597 597 ... print(pycompat.sysstr(b"%s: %s" % (f, msg)))
598 598 >>> m1.bad = bad
599 599 >>> m2.bad(b'x.txt', b'No such file')
600 600 sub/x.txt: No such file
601 601 >>> m2.abs(b'c.txt')
602 602 'sub/c.txt'
603 603 """
604 604
605 605 def __init__(self, path, matcher):
606 606 super(subdirmatcher, self).__init__(matcher._root, matcher._cwd)
607 607 self._path = path
608 608 self._matcher = matcher
609 609 self._always = matcher.always()
610 610
611 611 self._files = [f[len(path) + 1:] for f in matcher._files
612 612 if f.startswith(path + "/")]
613 613
614 614 # If the parent repo had a path to this subrepo and the matcher is
615 615 # a prefix matcher, this submatcher always matches.
616 616 if matcher.prefix():
617 617 self._always = any(f == path for f in matcher._files)
618 618
619 619 def bad(self, f, msg):
620 620 self._matcher.bad(self._path + "/" + f, msg)
621 621
622 622 def abs(self, f):
623 623 return self._matcher.abs(self._path + "/" + f)
624 624
625 625 def rel(self, f):
626 626 return self._matcher.rel(self._path + "/" + f)
627 627
628 628 def uipath(self, f):
629 629 return self._matcher.uipath(self._path + "/" + f)
630 630
631 631 def matchfn(self, f):
632 632 # Some information is lost in the superclass's constructor, so we
633 633 # can not accurately create the matching function for the subdirectory
634 634 # from the inputs. Instead, we override matchfn() and visitdir() to
635 635 # call the original matcher with the subdirectory path prepended.
636 636 return self._matcher.matchfn(self._path + "/" + f)
637 637
638 638 def visitdir(self, dir):
639 639 if dir == '.':
640 640 dir = self._path
641 641 else:
642 642 dir = self._path + "/" + dir
643 643 return self._matcher.visitdir(dir)
644 644
645 645 def always(self):
646 646 return self._always
647 647
648 648 def prefix(self):
649 649 return self._matcher.prefix() and not self._always
650 650
651 651 @encoding.strmethod
652 652 def __repr__(self):
653 653 return ('<subdirmatcher path=%r, matcher=%r>' %
654 654 (self._path, self._matcher))
655 655
656 656 class unionmatcher(basematcher):
657 657 """A matcher that is the union of several matchers.
658 658
659 659 The non-matching-attributes (root, cwd, bad, explicitdir, traversedir) are
660 660 taken from the first matcher.
661 661 """
662 662
663 663 def __init__(self, matchers):
664 664 m1 = matchers[0]
665 665 super(unionmatcher, self).__init__(m1._root, m1._cwd)
666 666 self.explicitdir = m1.explicitdir
667 667 self.traversedir = m1.traversedir
668 668 self._matchers = matchers
669 669
670 670 def matchfn(self, f):
671 671 for match in self._matchers:
672 672 if match(f):
673 673 return True
674 674 return False
675 675
676 676 def visitdir(self, dir):
677 677 r = False
678 678 for m in self._matchers:
679 679 v = m.visitdir(dir)
680 680 if v == 'all':
681 681 return v
682 682 r |= v
683 683 return r
684 684
685 685 @encoding.strmethod
686 686 def __repr__(self):
687 687 return ('<unionmatcher matchers=%r>' % self._matchers)
688 688
689 689 def patkind(pattern, default=None):
690 690 '''If pattern is 'kind:pat' with a known kind, return kind.'''
691 691 return _patsplit(pattern, default)[0]
692 692
693 693 def _patsplit(pattern, default):
694 694 """Split a string into the optional pattern kind prefix and the actual
695 695 pattern."""
696 696 if ':' in pattern:
697 697 kind, pat = pattern.split(':', 1)
698 698 if kind in allpatternkinds:
699 699 return kind, pat
700 700 return default, pattern
701 701
702 702 def _globre(pat):
703 703 r'''Convert an extended glob string to a regexp string.
704 704
705 705 >>> from . import pycompat
706 706 >>> def bprint(s):
707 707 ... print(pycompat.sysstr(s))
708 708 >>> bprint(_globre(br'?'))
709 709 .
710 710 >>> bprint(_globre(br'*'))
711 711 [^/]*
712 712 >>> bprint(_globre(br'**'))
713 713 .*
714 714 >>> bprint(_globre(br'**/a'))
715 715 (?:.*/)?a
716 716 >>> bprint(_globre(br'a/**/b'))
717 a\/(?:.*/)?b
717 a/(?:.*/)?b
718 718 >>> bprint(_globre(br'[a*?!^][^b][!c]'))
719 719 [a*?!^][\^b][^c]
720 720 >>> bprint(_globre(br'{a,b}'))
721 721 (?:a|b)
722 722 >>> bprint(_globre(br'.\*\?'))
723 723 \.\*\?
724 724 '''
725 725 i, n = 0, len(pat)
726 726 res = ''
727 727 group = 0
728 escape = util.re.escape
728 escape = util.stringutil.reescape
729 729 def peek():
730 730 return i < n and pat[i:i + 1]
731 731 while i < n:
732 732 c = pat[i:i + 1]
733 733 i += 1
734 734 if c not in '*?[{},\\':
735 735 res += escape(c)
736 736 elif c == '*':
737 737 if peek() == '*':
738 738 i += 1
739 739 if peek() == '/':
740 740 i += 1
741 741 res += '(?:.*/)?'
742 742 else:
743 743 res += '.*'
744 744 else:
745 745 res += '[^/]*'
746 746 elif c == '?':
747 747 res += '.'
748 748 elif c == '[':
749 749 j = i
750 750 if j < n and pat[j:j + 1] in '!]':
751 751 j += 1
752 752 while j < n and pat[j:j + 1] != ']':
753 753 j += 1
754 754 if j >= n:
755 755 res += '\\['
756 756 else:
757 757 stuff = pat[i:j].replace('\\','\\\\')
758 758 i = j + 1
759 759 if stuff[0:1] == '!':
760 760 stuff = '^' + stuff[1:]
761 761 elif stuff[0:1] == '^':
762 762 stuff = '\\' + stuff
763 763 res = '%s[%s]' % (res, stuff)
764 764 elif c == '{':
765 765 group += 1
766 766 res += '(?:'
767 767 elif c == '}' and group:
768 768 res += ')'
769 769 group -= 1
770 770 elif c == ',' and group:
771 771 res += '|'
772 772 elif c == '\\':
773 773 p = peek()
774 774 if p:
775 775 i += 1
776 776 res += escape(p)
777 777 else:
778 778 res += escape(c)
779 779 else:
780 780 res += escape(c)
781 781 return res
782 782
783 783 def _regex(kind, pat, globsuffix):
784 784 '''Convert a (normalized) pattern of any kind into a regular expression.
785 785 globsuffix is appended to the regexp of globs.'''
786 786 if not pat:
787 787 return ''
788 788 if kind == 're':
789 789 return pat
790 790 if kind in ('path', 'relpath'):
791 791 if pat == '.':
792 792 return ''
793 return util.re.escape(pat) + '(?:/|$)'
793 return util.stringutil.reescape(pat) + '(?:/|$)'
794 794 if kind == 'rootfilesin':
795 795 if pat == '.':
796 796 escaped = ''
797 797 else:
798 798 # Pattern is a directory name.
799 escaped = util.re.escape(pat) + '/'
799 escaped = util.stringutil.reescape(pat) + '/'
800 800 # Anything after the pattern must be a non-directory.
801 801 return escaped + '[^/]+$'
802 802 if kind == 'relglob':
803 803 return '(?:|.*/)' + _globre(pat) + globsuffix
804 804 if kind == 'relre':
805 805 if pat.startswith('^'):
806 806 return pat
807 807 return '.*' + pat
808 808 return _globre(pat) + globsuffix
809 809
810 810 def _buildmatch(ctx, kindpats, globsuffix, listsubrepos, root):
811 811 '''Return regexp string and a matcher function for kindpats.
812 812 globsuffix is appended to the regexp of globs.'''
813 813 matchfuncs = []
814 814
815 815 subincludes, kindpats = _expandsubinclude(kindpats, root)
816 816 if subincludes:
817 817 submatchers = {}
818 818 def matchsubinclude(f):
819 819 for prefix, matcherargs in subincludes:
820 820 if f.startswith(prefix):
821 821 mf = submatchers.get(prefix)
822 822 if mf is None:
823 823 mf = match(*matcherargs)
824 824 submatchers[prefix] = mf
825 825
826 826 if mf(f[len(prefix):]):
827 827 return True
828 828 return False
829 829 matchfuncs.append(matchsubinclude)
830 830
831 831 fset, kindpats = _expandsets(kindpats, ctx, listsubrepos)
832 832 if fset:
833 833 matchfuncs.append(fset.__contains__)
834 834
835 835 regex = ''
836 836 if kindpats:
837 837 regex, mf = _buildregexmatch(kindpats, globsuffix)
838 838 matchfuncs.append(mf)
839 839
840 840 if len(matchfuncs) == 1:
841 841 return regex, matchfuncs[0]
842 842 else:
843 843 return regex, lambda f: any(mf(f) for mf in matchfuncs)
844 844
845 845 def _buildregexmatch(kindpats, globsuffix):
846 846 """Build a match function from a list of kinds and kindpats,
847 847 return regexp string and a matcher function."""
848 848 try:
849 849 regex = '(?:%s)' % '|'.join([_regex(k, p, globsuffix)
850 850 for (k, p, s) in kindpats])
851 851 if len(regex) > 20000:
852 852 raise OverflowError
853 853 return regex, _rematcher(regex)
854 854 except OverflowError:
855 855 # We're using a Python with a tiny regex engine and we
856 856 # made it explode, so we'll divide the pattern list in two
857 857 # until it works
858 858 l = len(kindpats)
859 859 if l < 2:
860 860 raise
861 861 regexa, a = _buildregexmatch(kindpats[:l//2], globsuffix)
862 862 regexb, b = _buildregexmatch(kindpats[l//2:], globsuffix)
863 863 return regex, lambda s: a(s) or b(s)
864 864 except re.error:
865 865 for k, p, s in kindpats:
866 866 try:
867 867 _rematcher('(?:%s)' % _regex(k, p, globsuffix))
868 868 except re.error:
869 869 if s:
870 870 raise error.Abort(_("%s: invalid pattern (%s): %s") %
871 871 (s, k, p))
872 872 else:
873 873 raise error.Abort(_("invalid pattern (%s): %s") % (k, p))
874 874 raise error.Abort(_("invalid pattern"))
875 875
876 876 def _patternrootsanddirs(kindpats):
877 877 '''Returns roots and directories corresponding to each pattern.
878 878
879 879 This calculates the roots and directories exactly matching the patterns and
880 880 returns a tuple of (roots, dirs) for each. It does not return other
881 881 directories which may also need to be considered, like the parent
882 882 directories.
883 883 '''
884 884 r = []
885 885 d = []
886 886 for kind, pat, source in kindpats:
887 887 if kind == 'glob': # find the non-glob prefix
888 888 root = []
889 889 for p in pat.split('/'):
890 890 if '[' in p or '{' in p or '*' in p or '?' in p:
891 891 break
892 892 root.append(p)
893 893 r.append('/'.join(root) or '.')
894 894 elif kind in ('relpath', 'path'):
895 895 r.append(pat or '.')
896 896 elif kind in ('rootfilesin',):
897 897 d.append(pat or '.')
898 898 else: # relglob, re, relre
899 899 r.append('.')
900 900 return r, d
901 901
902 902 def _roots(kindpats):
903 903 '''Returns root directories to match recursively from the given patterns.'''
904 904 roots, dirs = _patternrootsanddirs(kindpats)
905 905 return roots
906 906
907 907 def _rootsanddirs(kindpats):
908 908 '''Returns roots and exact directories from patterns.
909 909
910 910 roots are directories to match recursively, whereas exact directories should
911 911 be matched non-recursively. The returned (roots, dirs) tuple will also
912 912 include directories that need to be implicitly considered as either, such as
913 913 parent directories.
914 914
915 915 >>> _rootsanddirs(
916 916 ... [(b'glob', b'g/h/*', b''), (b'glob', b'g/h', b''),
917 917 ... (b'glob', b'g*', b'')])
918 918 (['g/h', 'g/h', '.'], ['g', '.'])
919 919 >>> _rootsanddirs(
920 920 ... [(b'rootfilesin', b'g/h', b''), (b'rootfilesin', b'', b'')])
921 921 ([], ['g/h', '.', 'g', '.'])
922 922 >>> _rootsanddirs(
923 923 ... [(b'relpath', b'r', b''), (b'path', b'p/p', b''),
924 924 ... (b'path', b'', b'')])
925 925 (['r', 'p/p', '.'], ['p', '.'])
926 926 >>> _rootsanddirs(
927 927 ... [(b'relglob', b'rg*', b''), (b're', b're/', b''),
928 928 ... (b'relre', b'rr', b'')])
929 929 (['.', '.', '.'], ['.'])
930 930 '''
931 931 r, d = _patternrootsanddirs(kindpats)
932 932
933 933 # Append the parents as non-recursive/exact directories, since they must be
934 934 # scanned to get to either the roots or the other exact directories.
935 935 d.extend(util.dirs(d))
936 936 d.extend(util.dirs(r))
937 937 # util.dirs() does not include the root directory, so add it manually
938 938 d.append('.')
939 939
940 940 return r, d
941 941
942 942 def _explicitfiles(kindpats):
943 943 '''Returns the potential explicit filenames from the patterns.
944 944
945 945 >>> _explicitfiles([(b'path', b'foo/bar', b'')])
946 946 ['foo/bar']
947 947 >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
948 948 []
949 949 '''
950 950 # Keep only the pattern kinds where one can specify filenames (vs only
951 951 # directory names).
952 952 filable = [kp for kp in kindpats if kp[0] not in ('rootfilesin',)]
953 953 return _roots(filable)
954 954
955 955 def _prefix(kindpats):
956 956 '''Whether all the patterns match a prefix (i.e. recursively)'''
957 957 for kind, pat, source in kindpats:
958 958 if kind not in ('path', 'relpath'):
959 959 return False
960 960 return True
961 961
962 962 _commentre = None
963 963
964 964 def readpatternfile(filepath, warn, sourceinfo=False):
965 965 '''parse a pattern file, returning a list of
966 966 patterns. These patterns should be given to compile()
967 967 to be validated and converted into a match function.
968 968
969 969 trailing white space is dropped.
970 970 the escape character is backslash.
971 971 comments start with #.
972 972 empty lines are skipped.
973 973
974 974 lines can be of the following formats:
975 975
976 976 syntax: regexp # defaults following lines to non-rooted regexps
977 977 syntax: glob # defaults following lines to non-rooted globs
978 978 re:pattern # non-rooted regular expression
979 979 glob:pattern # non-rooted glob
980 980 pattern # pattern of the current default type
981 981
982 982 if sourceinfo is set, returns a list of tuples:
983 983 (pattern, lineno, originalline). This is useful to debug ignore patterns.
984 984 '''
985 985
986 986 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:',
987 987 'include': 'include', 'subinclude': 'subinclude'}
988 988 syntax = 'relre:'
989 989 patterns = []
990 990
991 991 fp = open(filepath, 'rb')
992 992 for lineno, line in enumerate(util.iterfile(fp), start=1):
993 993 if "#" in line:
994 994 global _commentre
995 995 if not _commentre:
996 996 _commentre = util.re.compile(br'((?:^|[^\\])(?:\\\\)*)#.*')
997 997 # remove comments prefixed by an even number of escapes
998 998 m = _commentre.search(line)
999 999 if m:
1000 1000 line = line[:m.end(1)]
1001 1001 # fixup properly escaped comments that survived the above
1002 1002 line = line.replace("\\#", "#")
1003 1003 line = line.rstrip()
1004 1004 if not line:
1005 1005 continue
1006 1006
1007 1007 if line.startswith('syntax:'):
1008 1008 s = line[7:].strip()
1009 1009 try:
1010 1010 syntax = syntaxes[s]
1011 1011 except KeyError:
1012 1012 if warn:
1013 1013 warn(_("%s: ignoring invalid syntax '%s'\n") %
1014 1014 (filepath, s))
1015 1015 continue
1016 1016
1017 1017 linesyntax = syntax
1018 1018 for s, rels in syntaxes.iteritems():
1019 1019 if line.startswith(rels):
1020 1020 linesyntax = rels
1021 1021 line = line[len(rels):]
1022 1022 break
1023 1023 elif line.startswith(s+':'):
1024 1024 linesyntax = rels
1025 1025 line = line[len(s) + 1:]
1026 1026 break
1027 1027 if sourceinfo:
1028 1028 patterns.append((linesyntax + line, lineno, line))
1029 1029 else:
1030 1030 patterns.append(linesyntax + line)
1031 1031 fp.close()
1032 1032 return patterns
@@ -1,634 +1,635
1 1 # sshpeer.py - ssh repository proxy class for mercurial
2 2 #
3 3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import re
11 11 import uuid
12 12
13 13 from .i18n import _
14 14 from . import (
15 15 error,
16 16 pycompat,
17 17 util,
18 18 wireprotoserver,
19 19 wireprototypes,
20 20 wireprotov1peer,
21 21 wireprotov1server,
22 22 )
23 23 from .utils import (
24 24 procutil,
25 stringutil,
25 26 )
26 27
27 28 def _serverquote(s):
28 29 """quote a string for the remote shell ... which we assume is sh"""
29 30 if not s:
30 31 return s
31 32 if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
32 33 return s
33 34 return "'%s'" % s.replace("'", "'\\''")
34 35
35 36 def _forwardoutput(ui, pipe):
36 37 """display all data currently available on pipe as remote output.
37 38
38 39 This is non blocking."""
39 40 if pipe:
40 41 s = procutil.readpipe(pipe)
41 42 if s:
42 43 for l in s.splitlines():
43 44 ui.status(_("remote: "), l, '\n')
44 45
45 46 class doublepipe(object):
46 47 """Operate a side-channel pipe in addition of a main one
47 48
48 49 The side-channel pipe contains server output to be forwarded to the user
49 50 input. The double pipe will behave as the "main" pipe, but will ensure the
50 51 content of the "side" pipe is properly processed while we wait for blocking
51 52 call on the "main" pipe.
52 53
53 54 If large amounts of data are read from "main", the forward will cease after
54 55 the first bytes start to appear. This simplifies the implementation
55 56 without affecting actual output of sshpeer too much as we rarely issue
56 57 large read for data not yet emitted by the server.
57 58
58 59 The main pipe is expected to be a 'bufferedinputpipe' from the util module
59 60 that handle all the os specific bits. This class lives in this module
60 61 because it focus on behavior specific to the ssh protocol."""
61 62
62 63 def __init__(self, ui, main, side):
63 64 self._ui = ui
64 65 self._main = main
65 66 self._side = side
66 67
67 68 def _wait(self):
68 69 """wait until some data are available on main or side
69 70
70 71 return a pair of boolean (ismainready, issideready)
71 72
72 73 (This will only wait for data if the setup is supported by `util.poll`)
73 74 """
74 75 if (isinstance(self._main, util.bufferedinputpipe) and
75 76 self._main.hasbuffer):
76 77 # Main has data. Assume side is worth poking at.
77 78 return True, True
78 79
79 80 fds = [self._main.fileno(), self._side.fileno()]
80 81 try:
81 82 act = util.poll(fds)
82 83 except NotImplementedError:
83 84 # non supported yet case, assume all have data.
84 85 act = fds
85 86 return (self._main.fileno() in act, self._side.fileno() in act)
86 87
87 88 def write(self, data):
88 89 return self._call('write', data)
89 90
90 91 def read(self, size):
91 92 r = self._call('read', size)
92 93 if size != 0 and not r:
93 94 # We've observed a condition that indicates the
94 95 # stdout closed unexpectedly. Check stderr one
95 96 # more time and snag anything that's there before
96 97 # letting anyone know the main part of the pipe
97 98 # closed prematurely.
98 99 _forwardoutput(self._ui, self._side)
99 100 return r
100 101
101 102 def readline(self):
102 103 return self._call('readline')
103 104
104 105 def _call(self, methname, data=None):
105 106 """call <methname> on "main", forward output of "side" while blocking
106 107 """
107 108 # data can be '' or 0
108 109 if (data is not None and not data) or self._main.closed:
109 110 _forwardoutput(self._ui, self._side)
110 111 return ''
111 112 while True:
112 113 mainready, sideready = self._wait()
113 114 if sideready:
114 115 _forwardoutput(self._ui, self._side)
115 116 if mainready:
116 117 meth = getattr(self._main, methname)
117 118 if data is None:
118 119 return meth()
119 120 else:
120 121 return meth(data)
121 122
122 123 def close(self):
123 124 return self._main.close()
124 125
125 126 def flush(self):
126 127 return self._main.flush()
127 128
128 129 def _cleanuppipes(ui, pipei, pipeo, pipee):
129 130 """Clean up pipes used by an SSH connection."""
130 131 if pipeo:
131 132 pipeo.close()
132 133 if pipei:
133 134 pipei.close()
134 135
135 136 if pipee:
136 137 # Try to read from the err descriptor until EOF.
137 138 try:
138 139 for l in pipee:
139 140 ui.status(_('remote: '), l)
140 141 except (IOError, ValueError):
141 142 pass
142 143
143 144 pipee.close()
144 145
145 146 def _makeconnection(ui, sshcmd, args, remotecmd, path, sshenv=None):
146 147 """Create an SSH connection to a server.
147 148
148 149 Returns a tuple of (process, stdin, stdout, stderr) for the
149 150 spawned process.
150 151 """
151 152 cmd = '%s %s %s' % (
152 153 sshcmd,
153 154 args,
154 155 procutil.shellquote('%s -R %s serve --stdio' % (
155 156 _serverquote(remotecmd), _serverquote(path))))
156 157
157 158 ui.debug('running %s\n' % cmd)
158 159 cmd = procutil.quotecommand(cmd)
159 160
160 161 # no buffer allow the use of 'select'
161 162 # feel free to remove buffering and select usage when we ultimately
162 163 # move to threading.
163 164 stdin, stdout, stderr, proc = procutil.popen4(cmd, bufsize=0, env=sshenv)
164 165
165 166 return proc, stdin, stdout, stderr
166 167
167 168 def _clientcapabilities():
168 169 """Return list of capabilities of this client.
169 170
170 171 Returns a list of capabilities that are supported by this client.
171 172 """
172 173 protoparams = {'partial-pull'}
173 174 comps = [e.wireprotosupport().name for e in
174 175 util.compengines.supportedwireengines(util.CLIENTROLE)]
175 176 protoparams.add('comp=%s' % ','.join(comps))
176 177 return protoparams
177 178
178 179 def _performhandshake(ui, stdin, stdout, stderr):
179 180 def badresponse():
180 181 # Flush any output on stderr.
181 182 _forwardoutput(ui, stderr)
182 183
183 184 msg = _('no suitable response from remote hg')
184 185 hint = ui.config('ui', 'ssherrorhint')
185 186 raise error.RepoError(msg, hint=hint)
186 187
187 188 # The handshake consists of sending wire protocol commands in reverse
188 189 # order of protocol implementation and then sniffing for a response
189 190 # to one of them.
190 191 #
191 192 # Those commands (from oldest to newest) are:
192 193 #
193 194 # ``between``
194 195 # Asks for the set of revisions between a pair of revisions. Command
195 196 # present in all Mercurial server implementations.
196 197 #
197 198 # ``hello``
198 199 # Instructs the server to advertise its capabilities. Introduced in
199 200 # Mercurial 0.9.1.
200 201 #
201 202 # ``upgrade``
202 203 # Requests upgrade from default transport protocol version 1 to
203 204 # a newer version. Introduced in Mercurial 4.6 as an experimental
204 205 # feature.
205 206 #
206 207 # The ``between`` command is issued with a request for the null
207 208 # range. If the remote is a Mercurial server, this request will
208 209 # generate a specific response: ``1\n\n``. This represents the
209 210 # wire protocol encoded value for ``\n``. We look for ``1\n\n``
210 211 # in the output stream and know this is the response to ``between``
211 212 # and we're at the end of our handshake reply.
212 213 #
213 214 # The response to the ``hello`` command will be a line with the
214 215 # length of the value returned by that command followed by that
215 216 # value. If the server doesn't support ``hello`` (which should be
216 217 # rare), that line will be ``0\n``. Otherwise, the value will contain
217 218 # RFC 822 like lines. Of these, the ``capabilities:`` line contains
218 219 # the capabilities of the server.
219 220 #
220 221 # The ``upgrade`` command isn't really a command in the traditional
221 222 # sense of version 1 of the transport because it isn't using the
222 223 # proper mechanism for formatting insteads: instead, it just encodes
223 224 # arguments on the line, delimited by spaces.
224 225 #
225 226 # The ``upgrade`` line looks like ``upgrade <token> <capabilities>``.
226 227 # If the server doesn't support protocol upgrades, it will reply to
227 228 # this line with ``0\n``. Otherwise, it emits an
228 229 # ``upgraded <token> <protocol>`` line to both stdout and stderr.
229 230 # Content immediately following this line describes additional
230 231 # protocol and server state.
231 232 #
232 233 # In addition to the responses to our command requests, the server
233 234 # may emit "banner" output on stdout. SSH servers are allowed to
234 235 # print messages to stdout on login. Issuing commands on connection
235 236 # allows us to flush this banner output from the server by scanning
236 237 # for output to our well-known ``between`` command. Of course, if
237 238 # the banner contains ``1\n\n``, this will throw off our detection.
238 239
239 240 requestlog = ui.configbool('devel', 'debug.peer-request')
240 241
241 242 # Generate a random token to help identify responses to version 2
242 243 # upgrade request.
243 244 token = pycompat.sysbytes(str(uuid.uuid4()))
244 245 upgradecaps = [
245 246 ('proto', wireprotoserver.SSHV2),
246 247 ]
247 248 upgradecaps = util.urlreq.urlencode(upgradecaps)
248 249
249 250 try:
250 251 pairsarg = '%s-%s' % ('0' * 40, '0' * 40)
251 252 handshake = [
252 253 'hello\n',
253 254 'between\n',
254 255 'pairs %d\n' % len(pairsarg),
255 256 pairsarg,
256 257 ]
257 258
258 259 # Request upgrade to version 2 if configured.
259 260 if ui.configbool('experimental', 'sshpeer.advertise-v2'):
260 261 ui.debug('sending upgrade request: %s %s\n' % (token, upgradecaps))
261 262 handshake.insert(0, 'upgrade %s %s\n' % (token, upgradecaps))
262 263
263 264 if requestlog:
264 265 ui.debug('devel-peer-request: hello+between\n')
265 266 ui.debug('devel-peer-request: pairs: %d bytes\n' % len(pairsarg))
266 267 ui.debug('sending hello command\n')
267 268 ui.debug('sending between command\n')
268 269
269 270 stdin.write(''.join(handshake))
270 271 stdin.flush()
271 272 except IOError:
272 273 badresponse()
273 274
274 275 # Assume version 1 of wire protocol by default.
275 276 protoname = wireprototypes.SSHV1
276 reupgraded = re.compile(b'^upgraded %s (.*)$' % re.escape(token))
277 reupgraded = re.compile(b'^upgraded %s (.*)$' % stringutil.reescape(token))
277 278
278 279 lines = ['', 'dummy']
279 280 max_noise = 500
280 281 while lines[-1] and max_noise:
281 282 try:
282 283 l = stdout.readline()
283 284 _forwardoutput(ui, stderr)
284 285
285 286 # Look for reply to protocol upgrade request. It has a token
286 287 # in it, so there should be no false positives.
287 288 m = reupgraded.match(l)
288 289 if m:
289 290 protoname = m.group(1)
290 291 ui.debug('protocol upgraded to %s\n' % protoname)
291 292 # If an upgrade was handled, the ``hello`` and ``between``
292 293 # requests are ignored. The next output belongs to the
293 294 # protocol, so stop scanning lines.
294 295 break
295 296
296 297 # Otherwise it could be a banner, ``0\n`` response if server
297 298 # doesn't support upgrade.
298 299
299 300 if lines[-1] == '1\n' and l == '\n':
300 301 break
301 302 if l:
302 303 ui.debug('remote: ', l)
303 304 lines.append(l)
304 305 max_noise -= 1
305 306 except IOError:
306 307 badresponse()
307 308 else:
308 309 badresponse()
309 310
310 311 caps = set()
311 312
312 313 # For version 1, we should see a ``capabilities`` line in response to the
313 314 # ``hello`` command.
314 315 if protoname == wireprototypes.SSHV1:
315 316 for l in reversed(lines):
316 317 # Look for response to ``hello`` command. Scan from the back so
317 318 # we don't misinterpret banner output as the command reply.
318 319 if l.startswith('capabilities:'):
319 320 caps.update(l[:-1].split(':')[1].split())
320 321 break
321 322 elif protoname == wireprotoserver.SSHV2:
322 323 # We see a line with number of bytes to follow and then a value
323 324 # looking like ``capabilities: *``.
324 325 line = stdout.readline()
325 326 try:
326 327 valuelen = int(line)
327 328 except ValueError:
328 329 badresponse()
329 330
330 331 capsline = stdout.read(valuelen)
331 332 if not capsline.startswith('capabilities: '):
332 333 badresponse()
333 334
334 335 ui.debug('remote: %s\n' % capsline)
335 336
336 337 caps.update(capsline.split(':')[1].split())
337 338 # Trailing newline.
338 339 stdout.read(1)
339 340
340 341 # Error if we couldn't find capabilities, this means:
341 342 #
342 343 # 1. Remote isn't a Mercurial server
343 344 # 2. Remote is a <0.9.1 Mercurial server
344 345 # 3. Remote is a future Mercurial server that dropped ``hello``
345 346 # and other attempted handshake mechanisms.
346 347 if not caps:
347 348 badresponse()
348 349
349 350 # Flush any output on stderr before proceeding.
350 351 _forwardoutput(ui, stderr)
351 352
352 353 return protoname, caps
353 354
354 355 class sshv1peer(wireprotov1peer.wirepeer):
355 356 def __init__(self, ui, url, proc, stdin, stdout, stderr, caps,
356 357 autoreadstderr=True):
357 358 """Create a peer from an existing SSH connection.
358 359
359 360 ``proc`` is a handle on the underlying SSH process.
360 361 ``stdin``, ``stdout``, and ``stderr`` are handles on the stdio
361 362 pipes for that process.
362 363 ``caps`` is a set of capabilities supported by the remote.
363 364 ``autoreadstderr`` denotes whether to automatically read from
364 365 stderr and to forward its output.
365 366 """
366 367 self._url = url
367 368 self.ui = ui
368 369 # self._subprocess is unused. Keeping a handle on the process
369 370 # holds a reference and prevents it from being garbage collected.
370 371 self._subprocess = proc
371 372
372 373 # And we hook up our "doublepipe" wrapper to allow querying
373 374 # stderr any time we perform I/O.
374 375 if autoreadstderr:
375 376 stdout = doublepipe(ui, util.bufferedinputpipe(stdout), stderr)
376 377 stdin = doublepipe(ui, stdin, stderr)
377 378
378 379 self._pipeo = stdin
379 380 self._pipei = stdout
380 381 self._pipee = stderr
381 382 self._caps = caps
382 383 self._autoreadstderr = autoreadstderr
383 384
384 385 # Commands that have a "framed" response where the first line of the
385 386 # response contains the length of that response.
386 387 _FRAMED_COMMANDS = {
387 388 'batch',
388 389 }
389 390
390 391 # Begin of ipeerconnection interface.
391 392
392 393 def url(self):
393 394 return self._url
394 395
395 396 def local(self):
396 397 return None
397 398
398 399 def peer(self):
399 400 return self
400 401
401 402 def canpush(self):
402 403 return True
403 404
404 405 def close(self):
405 406 pass
406 407
407 408 # End of ipeerconnection interface.
408 409
409 410 # Begin of ipeercommands interface.
410 411
411 412 def capabilities(self):
412 413 return self._caps
413 414
414 415 # End of ipeercommands interface.
415 416
416 417 def _readerr(self):
417 418 _forwardoutput(self.ui, self._pipee)
418 419
419 420 def _abort(self, exception):
420 421 self._cleanup()
421 422 raise exception
422 423
423 424 def _cleanup(self):
424 425 _cleanuppipes(self.ui, self._pipei, self._pipeo, self._pipee)
425 426
426 427 __del__ = _cleanup
427 428
428 429 def _sendrequest(self, cmd, args, framed=False):
429 430 if (self.ui.debugflag
430 431 and self.ui.configbool('devel', 'debug.peer-request')):
431 432 dbg = self.ui.debug
432 433 line = 'devel-peer-request: %s\n'
433 434 dbg(line % cmd)
434 435 for key, value in sorted(args.items()):
435 436 if not isinstance(value, dict):
436 437 dbg(line % ' %s: %d bytes' % (key, len(value)))
437 438 else:
438 439 for dk, dv in sorted(value.items()):
439 440 dbg(line % ' %s-%s: %d' % (key, dk, len(dv)))
440 441 self.ui.debug("sending %s command\n" % cmd)
441 442 self._pipeo.write("%s\n" % cmd)
442 443 _func, names = wireprotov1server.commands[cmd]
443 444 keys = names.split()
444 445 wireargs = {}
445 446 for k in keys:
446 447 if k == '*':
447 448 wireargs['*'] = args
448 449 break
449 450 else:
450 451 wireargs[k] = args[k]
451 452 del args[k]
452 453 for k, v in sorted(wireargs.iteritems()):
453 454 self._pipeo.write("%s %d\n" % (k, len(v)))
454 455 if isinstance(v, dict):
455 456 for dk, dv in v.iteritems():
456 457 self._pipeo.write("%s %d\n" % (dk, len(dv)))
457 458 self._pipeo.write(dv)
458 459 else:
459 460 self._pipeo.write(v)
460 461 self._pipeo.flush()
461 462
462 463 # We know exactly how many bytes are in the response. So return a proxy
463 464 # around the raw output stream that allows reading exactly this many
464 465 # bytes. Callers then can read() without fear of overrunning the
465 466 # response.
466 467 if framed:
467 468 amount = self._getamount()
468 469 return util.cappedreader(self._pipei, amount)
469 470
470 471 return self._pipei
471 472
472 473 def _callstream(self, cmd, **args):
473 474 args = pycompat.byteskwargs(args)
474 475 return self._sendrequest(cmd, args, framed=cmd in self._FRAMED_COMMANDS)
475 476
476 477 def _callcompressable(self, cmd, **args):
477 478 args = pycompat.byteskwargs(args)
478 479 return self._sendrequest(cmd, args, framed=cmd in self._FRAMED_COMMANDS)
479 480
480 481 def _call(self, cmd, **args):
481 482 args = pycompat.byteskwargs(args)
482 483 return self._sendrequest(cmd, args, framed=True).read()
483 484
484 485 def _callpush(self, cmd, fp, **args):
485 486 # The server responds with an empty frame if the client should
486 487 # continue submitting the payload.
487 488 r = self._call(cmd, **args)
488 489 if r:
489 490 return '', r
490 491
491 492 # The payload consists of frames with content followed by an empty
492 493 # frame.
493 494 for d in iter(lambda: fp.read(4096), ''):
494 495 self._writeframed(d)
495 496 self._writeframed("", flush=True)
496 497
497 498 # In case of success, there is an empty frame and a frame containing
498 499 # the integer result (as a string).
499 500 # In case of error, there is a non-empty frame containing the error.
500 501 r = self._readframed()
501 502 if r:
502 503 return '', r
503 504 return self._readframed(), ''
504 505
505 506 def _calltwowaystream(self, cmd, fp, **args):
506 507 # The server responds with an empty frame if the client should
507 508 # continue submitting the payload.
508 509 r = self._call(cmd, **args)
509 510 if r:
510 511 # XXX needs to be made better
511 512 raise error.Abort(_('unexpected remote reply: %s') % r)
512 513
513 514 # The payload consists of frames with content followed by an empty
514 515 # frame.
515 516 for d in iter(lambda: fp.read(4096), ''):
516 517 self._writeframed(d)
517 518 self._writeframed("", flush=True)
518 519
519 520 return self._pipei
520 521
521 522 def _getamount(self):
522 523 l = self._pipei.readline()
523 524 if l == '\n':
524 525 if self._autoreadstderr:
525 526 self._readerr()
526 527 msg = _('check previous remote output')
527 528 self._abort(error.OutOfBandError(hint=msg))
528 529 if self._autoreadstderr:
529 530 self._readerr()
530 531 try:
531 532 return int(l)
532 533 except ValueError:
533 534 self._abort(error.ResponseError(_("unexpected response:"), l))
534 535
535 536 def _readframed(self):
536 537 size = self._getamount()
537 538 if not size:
538 539 return b''
539 540
540 541 return self._pipei.read(size)
541 542
542 543 def _writeframed(self, data, flush=False):
543 544 self._pipeo.write("%d\n" % len(data))
544 545 if data:
545 546 self._pipeo.write(data)
546 547 if flush:
547 548 self._pipeo.flush()
548 549 if self._autoreadstderr:
549 550 self._readerr()
550 551
551 552 class sshv2peer(sshv1peer):
552 553 """A peer that speakers version 2 of the transport protocol."""
553 554 # Currently version 2 is identical to version 1 post handshake.
554 555 # And handshake is performed before the peer is instantiated. So
555 556 # we need no custom code.
556 557
557 558 def makepeer(ui, path, proc, stdin, stdout, stderr, autoreadstderr=True):
558 559 """Make a peer instance from existing pipes.
559 560
560 561 ``path`` and ``proc`` are stored on the eventual peer instance and may
561 562 not be used for anything meaningful.
562 563
563 564 ``stdin``, ``stdout``, and ``stderr`` are the pipes connected to the
564 565 SSH server's stdio handles.
565 566
566 567 This function is factored out to allow creating peers that don't
567 568 actually spawn a new process. It is useful for starting SSH protocol
568 569 servers and clients via non-standard means, which can be useful for
569 570 testing.
570 571 """
571 572 try:
572 573 protoname, caps = _performhandshake(ui, stdin, stdout, stderr)
573 574 except Exception:
574 575 _cleanuppipes(ui, stdout, stdin, stderr)
575 576 raise
576 577
577 578 if protoname == wireprototypes.SSHV1:
578 579 return sshv1peer(ui, path, proc, stdin, stdout, stderr, caps,
579 580 autoreadstderr=autoreadstderr)
580 581 elif protoname == wireprototypes.SSHV2:
581 582 return sshv2peer(ui, path, proc, stdin, stdout, stderr, caps,
582 583 autoreadstderr=autoreadstderr)
583 584 else:
584 585 _cleanuppipes(ui, stdout, stdin, stderr)
585 586 raise error.RepoError(_('unknown version of SSH protocol: %s') %
586 587 protoname)
587 588
588 589 def instance(ui, path, create, intents=None):
589 590 """Create an SSH peer.
590 591
591 592 The returned object conforms to the ``wireprotov1peer.wirepeer`` interface.
592 593 """
593 594 u = util.url(path, parsequery=False, parsefragment=False)
594 595 if u.scheme != 'ssh' or not u.host or u.path is None:
595 596 raise error.RepoError(_("couldn't parse location %s") % path)
596 597
597 598 util.checksafessh(path)
598 599
599 600 if u.passwd is not None:
600 601 raise error.RepoError(_('password in URL not supported'))
601 602
602 603 sshcmd = ui.config('ui', 'ssh')
603 604 remotecmd = ui.config('ui', 'remotecmd')
604 605 sshaddenv = dict(ui.configitems('sshenv'))
605 606 sshenv = procutil.shellenviron(sshaddenv)
606 607 remotepath = u.path or '.'
607 608
608 609 args = procutil.sshargs(sshcmd, u.host, u.user, u.port)
609 610
610 611 if create:
611 612 cmd = '%s %s %s' % (sshcmd, args,
612 613 procutil.shellquote('%s init %s' %
613 614 (_serverquote(remotecmd), _serverquote(remotepath))))
614 615 ui.debug('running %s\n' % cmd)
615 616 res = ui.system(cmd, blockedtag='sshpeer', environ=sshenv)
616 617 if res != 0:
617 618 raise error.RepoError(_('could not create remote repo'))
618 619
619 620 proc, stdin, stdout, stderr = _makeconnection(ui, sshcmd, args, remotecmd,
620 621 remotepath, sshenv)
621 622
622 623 peer = makepeer(ui, path, proc, stdin, stdout, stderr)
623 624
624 625 # Finally, if supported by the server, notify it about our own
625 626 # capabilities.
626 627 if 'protocaps' in peer.capabilities():
627 628 try:
628 629 peer._call("protocaps",
629 630 caps=' '.join(sorted(_clientcapabilities())))
630 631 except IOError:
631 632 peer._cleanup()
632 633 raise error.RepoError(_('capability exchange failed'))
633 634
634 635 return peer
@@ -1,877 +1,877
1 1 # sslutil.py - SSL handling for mercurial
2 2 #
3 3 # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 4 # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 from __future__ import absolute_import
11 11
12 12 import hashlib
13 13 import os
14 14 import re
15 15 import ssl
16 16
17 17 from .i18n import _
18 18 from . import (
19 19 error,
20 20 node,
21 21 pycompat,
22 22 util,
23 23 )
24 24 from .utils import (
25 25 procutil,
26 26 stringutil,
27 27 )
28 28
29 29 # Python 2.7.9+ overhauled the built-in SSL/TLS features of Python. It added
30 30 # support for TLS 1.1, TLS 1.2, SNI, system CA stores, etc. These features are
31 31 # all exposed via the "ssl" module.
32 32 #
33 33 # Depending on the version of Python being used, SSL/TLS support is either
34 34 # modern/secure or legacy/insecure. Many operations in this module have
35 35 # separate code paths depending on support in Python.
36 36
37 37 configprotocols = {
38 38 'tls1.0',
39 39 'tls1.1',
40 40 'tls1.2',
41 41 }
42 42
43 43 hassni = getattr(ssl, 'HAS_SNI', False)
44 44
45 45 # TLS 1.1 and 1.2 may not be supported if the OpenSSL Python is compiled
46 46 # against doesn't support them.
47 47 supportedprotocols = {'tls1.0'}
48 48 if util.safehasattr(ssl, 'PROTOCOL_TLSv1_1'):
49 49 supportedprotocols.add('tls1.1')
50 50 if util.safehasattr(ssl, 'PROTOCOL_TLSv1_2'):
51 51 supportedprotocols.add('tls1.2')
52 52
53 53 try:
54 54 # ssl.SSLContext was added in 2.7.9 and presence indicates modern
55 55 # SSL/TLS features are available.
56 56 SSLContext = ssl.SSLContext
57 57 modernssl = True
58 58 _canloaddefaultcerts = util.safehasattr(SSLContext, 'load_default_certs')
59 59 except AttributeError:
60 60 modernssl = False
61 61 _canloaddefaultcerts = False
62 62
63 63 # We implement SSLContext using the interface from the standard library.
64 64 class SSLContext(object):
65 65 def __init__(self, protocol):
66 66 # From the public interface of SSLContext
67 67 self.protocol = protocol
68 68 self.check_hostname = False
69 69 self.options = 0
70 70 self.verify_mode = ssl.CERT_NONE
71 71
72 72 # Used by our implementation.
73 73 self._certfile = None
74 74 self._keyfile = None
75 75 self._certpassword = None
76 76 self._cacerts = None
77 77 self._ciphers = None
78 78
79 79 def load_cert_chain(self, certfile, keyfile=None, password=None):
80 80 self._certfile = certfile
81 81 self._keyfile = keyfile
82 82 self._certpassword = password
83 83
84 84 def load_default_certs(self, purpose=None):
85 85 pass
86 86
87 87 def load_verify_locations(self, cafile=None, capath=None, cadata=None):
88 88 if capath:
89 89 raise error.Abort(_('capath not supported'))
90 90 if cadata:
91 91 raise error.Abort(_('cadata not supported'))
92 92
93 93 self._cacerts = cafile
94 94
95 95 def set_ciphers(self, ciphers):
96 96 self._ciphers = ciphers
97 97
98 98 def wrap_socket(self, socket, server_hostname=None, server_side=False):
99 99 # server_hostname is unique to SSLContext.wrap_socket and is used
100 100 # for SNI in that context. So there's nothing for us to do with it
101 101 # in this legacy code since we don't support SNI.
102 102
103 103 args = {
104 104 r'keyfile': self._keyfile,
105 105 r'certfile': self._certfile,
106 106 r'server_side': server_side,
107 107 r'cert_reqs': self.verify_mode,
108 108 r'ssl_version': self.protocol,
109 109 r'ca_certs': self._cacerts,
110 110 r'ciphers': self._ciphers,
111 111 }
112 112
113 113 return ssl.wrap_socket(socket, **args)
114 114
115 115 def _hostsettings(ui, hostname):
116 116 """Obtain security settings for a hostname.
117 117
118 118 Returns a dict of settings relevant to that hostname.
119 119 """
120 120 bhostname = pycompat.bytesurl(hostname)
121 121 s = {
122 122 # Whether we should attempt to load default/available CA certs
123 123 # if an explicit ``cafile`` is not defined.
124 124 'allowloaddefaultcerts': True,
125 125 # List of 2-tuple of (hash algorithm, hash).
126 126 'certfingerprints': [],
127 127 # Path to file containing concatenated CA certs. Used by
128 128 # SSLContext.load_verify_locations().
129 129 'cafile': None,
130 130 # Whether certificate verification should be disabled.
131 131 'disablecertverification': False,
132 132 # Whether the legacy [hostfingerprints] section has data for this host.
133 133 'legacyfingerprint': False,
134 134 # PROTOCOL_* constant to use for SSLContext.__init__.
135 135 'protocol': None,
136 136 # String representation of minimum protocol to be used for UI
137 137 # presentation.
138 138 'protocolui': None,
139 139 # ssl.CERT_* constant used by SSLContext.verify_mode.
140 140 'verifymode': None,
141 141 # Defines extra ssl.OP* bitwise options to set.
142 142 'ctxoptions': None,
143 143 # OpenSSL Cipher List to use (instead of default).
144 144 'ciphers': None,
145 145 }
146 146
147 147 # Allow minimum TLS protocol to be specified in the config.
148 148 def validateprotocol(protocol, key):
149 149 if protocol not in configprotocols:
150 150 raise error.Abort(
151 151 _('unsupported protocol from hostsecurity.%s: %s') %
152 152 (key, protocol),
153 153 hint=_('valid protocols: %s') %
154 154 ' '.join(sorted(configprotocols)))
155 155
156 156 # We default to TLS 1.1+ where we can because TLS 1.0 has known
157 157 # vulnerabilities (like BEAST and POODLE). We allow users to downgrade to
158 158 # TLS 1.0+ via config options in case a legacy server is encountered.
159 159 if 'tls1.1' in supportedprotocols:
160 160 defaultprotocol = 'tls1.1'
161 161 else:
162 162 # Let people know they are borderline secure.
163 163 # We don't document this config option because we want people to see
164 164 # the bold warnings on the web site.
165 165 # internal config: hostsecurity.disabletls10warning
166 166 if not ui.configbool('hostsecurity', 'disabletls10warning'):
167 167 ui.warn(_('warning: connecting to %s using legacy security '
168 168 'technology (TLS 1.0); see '
169 169 'https://mercurial-scm.org/wiki/SecureConnections for '
170 170 'more info\n') % bhostname)
171 171 defaultprotocol = 'tls1.0'
172 172
173 173 key = 'minimumprotocol'
174 174 protocol = ui.config('hostsecurity', key, defaultprotocol)
175 175 validateprotocol(protocol, key)
176 176
177 177 key = '%s:minimumprotocol' % bhostname
178 178 protocol = ui.config('hostsecurity', key, protocol)
179 179 validateprotocol(protocol, key)
180 180
181 181 # If --insecure is used, we allow the use of TLS 1.0 despite config options.
182 182 # We always print a "connection security to %s is disabled..." message when
183 183 # --insecure is used. So no need to print anything more here.
184 184 if ui.insecureconnections:
185 185 protocol = 'tls1.0'
186 186
187 187 s['protocol'], s['ctxoptions'], s['protocolui'] = protocolsettings(protocol)
188 188
189 189 ciphers = ui.config('hostsecurity', 'ciphers')
190 190 ciphers = ui.config('hostsecurity', '%s:ciphers' % bhostname, ciphers)
191 191 s['ciphers'] = ciphers
192 192
193 193 # Look for fingerprints in [hostsecurity] section. Value is a list
194 194 # of <alg>:<fingerprint> strings.
195 195 fingerprints = ui.configlist('hostsecurity', '%s:fingerprints' % bhostname)
196 196 for fingerprint in fingerprints:
197 197 if not (fingerprint.startswith(('sha1:', 'sha256:', 'sha512:'))):
198 198 raise error.Abort(_('invalid fingerprint for %s: %s') % (
199 199 bhostname, fingerprint),
200 200 hint=_('must begin with "sha1:", "sha256:", '
201 201 'or "sha512:"'))
202 202
203 203 alg, fingerprint = fingerprint.split(':', 1)
204 204 fingerprint = fingerprint.replace(':', '').lower()
205 205 s['certfingerprints'].append((alg, fingerprint))
206 206
207 207 # Fingerprints from [hostfingerprints] are always SHA-1.
208 208 for fingerprint in ui.configlist('hostfingerprints', bhostname):
209 209 fingerprint = fingerprint.replace(':', '').lower()
210 210 s['certfingerprints'].append(('sha1', fingerprint))
211 211 s['legacyfingerprint'] = True
212 212
213 213 # If a host cert fingerprint is defined, it is the only thing that
214 214 # matters. No need to validate CA certs.
215 215 if s['certfingerprints']:
216 216 s['verifymode'] = ssl.CERT_NONE
217 217 s['allowloaddefaultcerts'] = False
218 218
219 219 # If --insecure is used, don't take CAs into consideration.
220 220 elif ui.insecureconnections:
221 221 s['disablecertverification'] = True
222 222 s['verifymode'] = ssl.CERT_NONE
223 223 s['allowloaddefaultcerts'] = False
224 224
225 225 if ui.configbool('devel', 'disableloaddefaultcerts'):
226 226 s['allowloaddefaultcerts'] = False
227 227
228 228 # If both fingerprints and a per-host ca file are specified, issue a warning
229 229 # because users should not be surprised about what security is or isn't
230 230 # being performed.
231 231 cafile = ui.config('hostsecurity', '%s:verifycertsfile' % bhostname)
232 232 if s['certfingerprints'] and cafile:
233 233 ui.warn(_('(hostsecurity.%s:verifycertsfile ignored when host '
234 234 'fingerprints defined; using host fingerprints for '
235 235 'verification)\n') % bhostname)
236 236
237 237 # Try to hook up CA certificate validation unless something above
238 238 # makes it not necessary.
239 239 if s['verifymode'] is None:
240 240 # Look at per-host ca file first.
241 241 if cafile:
242 242 cafile = util.expandpath(cafile)
243 243 if not os.path.exists(cafile):
244 244 raise error.Abort(_('path specified by %s does not exist: %s') %
245 245 ('hostsecurity.%s:verifycertsfile' % (
246 246 bhostname,), cafile))
247 247 s['cafile'] = cafile
248 248 else:
249 249 # Find global certificates file in config.
250 250 cafile = ui.config('web', 'cacerts')
251 251
252 252 if cafile:
253 253 cafile = util.expandpath(cafile)
254 254 if not os.path.exists(cafile):
255 255 raise error.Abort(_('could not find web.cacerts: %s') %
256 256 cafile)
257 257 elif s['allowloaddefaultcerts']:
258 258 # CAs not defined in config. Try to find system bundles.
259 259 cafile = _defaultcacerts(ui)
260 260 if cafile:
261 261 ui.debug('using %s for CA file\n' % cafile)
262 262
263 263 s['cafile'] = cafile
264 264
265 265 # Require certificate validation if CA certs are being loaded and
266 266 # verification hasn't been disabled above.
267 267 if cafile or (_canloaddefaultcerts and s['allowloaddefaultcerts']):
268 268 s['verifymode'] = ssl.CERT_REQUIRED
269 269 else:
270 270 # At this point we don't have a fingerprint, aren't being
271 271 # explicitly insecure, and can't load CA certs. Connecting
272 272 # is insecure. We allow the connection and abort during
273 273 # validation (once we have the fingerprint to print to the
274 274 # user).
275 275 s['verifymode'] = ssl.CERT_NONE
276 276
277 277 assert s['protocol'] is not None
278 278 assert s['ctxoptions'] is not None
279 279 assert s['verifymode'] is not None
280 280
281 281 return s
282 282
283 283 def protocolsettings(protocol):
284 284 """Resolve the protocol for a config value.
285 285
286 286 Returns a 3-tuple of (protocol, options, ui value) where the first
287 287 2 items are values used by SSLContext and the last is a string value
288 288 of the ``minimumprotocol`` config option equivalent.
289 289 """
290 290 if protocol not in configprotocols:
291 291 raise ValueError('protocol value not supported: %s' % protocol)
292 292
293 293 # Despite its name, PROTOCOL_SSLv23 selects the highest protocol
294 294 # that both ends support, including TLS protocols. On legacy stacks,
295 295 # the highest it likely goes is TLS 1.0. On modern stacks, it can
296 296 # support TLS 1.2.
297 297 #
298 298 # The PROTOCOL_TLSv* constants select a specific TLS version
299 299 # only (as opposed to multiple versions). So the method for
300 300 # supporting multiple TLS versions is to use PROTOCOL_SSLv23 and
301 301 # disable protocols via SSLContext.options and OP_NO_* constants.
302 302 # However, SSLContext.options doesn't work unless we have the
303 303 # full/real SSLContext available to us.
304 304 if supportedprotocols == {'tls1.0'}:
305 305 if protocol != 'tls1.0':
306 306 raise error.Abort(_('current Python does not support protocol '
307 307 'setting %s') % protocol,
308 308 hint=_('upgrade Python or disable setting since '
309 309 'only TLS 1.0 is supported'))
310 310
311 311 return ssl.PROTOCOL_TLSv1, 0, 'tls1.0'
312 312
313 313 # WARNING: returned options don't work unless the modern ssl module
314 314 # is available. Be careful when adding options here.
315 315
316 316 # SSLv2 and SSLv3 are broken. We ban them outright.
317 317 options = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3
318 318
319 319 if protocol == 'tls1.0':
320 320 # Defaults above are to use TLS 1.0+
321 321 pass
322 322 elif protocol == 'tls1.1':
323 323 options |= ssl.OP_NO_TLSv1
324 324 elif protocol == 'tls1.2':
325 325 options |= ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
326 326 else:
327 327 raise error.Abort(_('this should not happen'))
328 328
329 329 # Prevent CRIME.
330 330 # There is no guarantee this attribute is defined on the module.
331 331 options |= getattr(ssl, 'OP_NO_COMPRESSION', 0)
332 332
333 333 return ssl.PROTOCOL_SSLv23, options, protocol
334 334
335 335 def wrapsocket(sock, keyfile, certfile, ui, serverhostname=None):
336 336 """Add SSL/TLS to a socket.
337 337
338 338 This is a glorified wrapper for ``ssl.wrap_socket()``. It makes sane
339 339 choices based on what security options are available.
340 340
341 341 In addition to the arguments supported by ``ssl.wrap_socket``, we allow
342 342 the following additional arguments:
343 343
344 344 * serverhostname - The expected hostname of the remote server. If the
345 345 server (and client) support SNI, this tells the server which certificate
346 346 to use.
347 347 """
348 348 if not serverhostname:
349 349 raise error.Abort(_('serverhostname argument is required'))
350 350
351 351 for f in (keyfile, certfile):
352 352 if f and not os.path.exists(f):
353 353 raise error.Abort(
354 354 _('certificate file (%s) does not exist; cannot connect to %s')
355 355 % (f, pycompat.bytesurl(serverhostname)),
356 356 hint=_('restore missing file or fix references '
357 357 'in Mercurial config'))
358 358
359 359 settings = _hostsettings(ui, serverhostname)
360 360
361 361 # We can't use ssl.create_default_context() because it calls
362 362 # load_default_certs() unless CA arguments are passed to it. We want to
363 363 # have explicit control over CA loading because implicitly loading
364 364 # CAs may undermine the user's intent. For example, a user may define a CA
365 365 # bundle with a specific CA cert removed. If the system/default CA bundle
366 366 # is loaded and contains that removed CA, you've just undone the user's
367 367 # choice.
368 368 sslcontext = SSLContext(settings['protocol'])
369 369
370 370 # This is a no-op unless using modern ssl.
371 371 sslcontext.options |= settings['ctxoptions']
372 372
373 373 # This still works on our fake SSLContext.
374 374 sslcontext.verify_mode = settings['verifymode']
375 375
376 376 if settings['ciphers']:
377 377 try:
378 378 sslcontext.set_ciphers(pycompat.sysstr(settings['ciphers']))
379 379 except ssl.SSLError as e:
380 380 raise error.Abort(
381 381 _('could not set ciphers: %s')
382 382 % stringutil.forcebytestr(e.args[0]),
383 383 hint=_('change cipher string (%s) in config') %
384 384 settings['ciphers'])
385 385
386 386 if certfile is not None:
387 387 def password():
388 388 f = keyfile or certfile
389 389 return ui.getpass(_('passphrase for %s: ') % f, '')
390 390 sslcontext.load_cert_chain(certfile, keyfile, password)
391 391
392 392 if settings['cafile'] is not None:
393 393 try:
394 394 sslcontext.load_verify_locations(cafile=settings['cafile'])
395 395 except ssl.SSLError as e:
396 396 if len(e.args) == 1: # pypy has different SSLError args
397 397 msg = e.args[0]
398 398 else:
399 399 msg = e.args[1]
400 400 raise error.Abort(_('error loading CA file %s: %s') % (
401 401 settings['cafile'], stringutil.forcebytestr(msg)),
402 402 hint=_('file is empty or malformed?'))
403 403 caloaded = True
404 404 elif settings['allowloaddefaultcerts']:
405 405 # This is a no-op on old Python.
406 406 sslcontext.load_default_certs()
407 407 caloaded = True
408 408 else:
409 409 caloaded = False
410 410
411 411 try:
412 412 sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname)
413 413 except ssl.SSLError as e:
414 414 # If we're doing certificate verification and no CA certs are loaded,
415 415 # that is almost certainly the reason why verification failed. Provide
416 416 # a hint to the user.
417 417 # Only modern ssl module exposes SSLContext.get_ca_certs() so we can
418 418 # only show this warning if modern ssl is available.
419 419 # The exception handler is here to handle bugs around cert attributes:
420 420 # https://bugs.python.org/issue20916#msg213479. (See issues5313.)
421 421 # When the main 20916 bug occurs, 'sslcontext.get_ca_certs()' is a
422 422 # non-empty list, but the following conditional is otherwise True.
423 423 try:
424 424 if (caloaded and settings['verifymode'] == ssl.CERT_REQUIRED and
425 425 modernssl and not sslcontext.get_ca_certs()):
426 426 ui.warn(_('(an attempt was made to load CA certificates but '
427 427 'none were loaded; see '
428 428 'https://mercurial-scm.org/wiki/SecureConnections '
429 429 'for how to configure Mercurial to avoid this '
430 430 'error)\n'))
431 431 except ssl.SSLError:
432 432 pass
433 433 # Try to print more helpful error messages for known failures.
434 434 if util.safehasattr(e, 'reason'):
435 435 # This error occurs when the client and server don't share a
436 436 # common/supported SSL/TLS protocol. We've disabled SSLv2 and SSLv3
437 437 # outright. Hopefully the reason for this error is that we require
438 438 # TLS 1.1+ and the server only supports TLS 1.0. Whatever the
439 439 # reason, try to emit an actionable warning.
440 440 if e.reason == 'UNSUPPORTED_PROTOCOL':
441 441 # We attempted TLS 1.0+.
442 442 if settings['protocolui'] == 'tls1.0':
443 443 # We support more than just TLS 1.0+. If this happens,
444 444 # the likely scenario is either the client or the server
445 445 # is really old. (e.g. server doesn't support TLS 1.0+ or
446 446 # client doesn't support modern TLS versions introduced
447 447 # several years from when this comment was written).
448 448 if supportedprotocols != {'tls1.0'}:
449 449 ui.warn(_(
450 450 '(could not communicate with %s using security '
451 451 'protocols %s; if you are using a modern Mercurial '
452 452 'version, consider contacting the operator of this '
453 453 'server; see '
454 454 'https://mercurial-scm.org/wiki/SecureConnections '
455 455 'for more info)\n') % (
456 456 serverhostname,
457 457 ', '.join(sorted(supportedprotocols))))
458 458 else:
459 459 ui.warn(_(
460 460 '(could not communicate with %s using TLS 1.0; the '
461 461 'likely cause of this is the server no longer '
462 462 'supports TLS 1.0 because it has known security '
463 463 'vulnerabilities; see '
464 464 'https://mercurial-scm.org/wiki/SecureConnections '
465 465 'for more info)\n') % serverhostname)
466 466 else:
467 467 # We attempted TLS 1.1+. We can only get here if the client
468 468 # supports the configured protocol. So the likely reason is
469 469 # the client wants better security than the server can
470 470 # offer.
471 471 ui.warn(_(
472 472 '(could not negotiate a common security protocol (%s+) '
473 473 'with %s; the likely cause is Mercurial is configured '
474 474 'to be more secure than the server can support)\n') % (
475 475 settings['protocolui'], serverhostname))
476 476 ui.warn(_('(consider contacting the operator of this '
477 477 'server and ask them to support modern TLS '
478 478 'protocol versions; or, set '
479 479 'hostsecurity.%s:minimumprotocol=tls1.0 to allow '
480 480 'use of legacy, less secure protocols when '
481 481 'communicating with this server)\n') %
482 482 serverhostname)
483 483 ui.warn(_(
484 484 '(see https://mercurial-scm.org/wiki/SecureConnections '
485 485 'for more info)\n'))
486 486
487 487 elif (e.reason == 'CERTIFICATE_VERIFY_FAILED' and
488 488 pycompat.iswindows):
489 489
490 490 ui.warn(_('(the full certificate chain may not be available '
491 491 'locally; see "hg help debugssl")\n'))
492 492 raise
493 493
494 494 # check if wrap_socket failed silently because socket had been
495 495 # closed
496 496 # - see http://bugs.python.org/issue13721
497 497 if not sslsocket.cipher():
498 498 raise error.Abort(_('ssl connection failed'))
499 499
500 500 sslsocket._hgstate = {
501 501 'caloaded': caloaded,
502 502 'hostname': serverhostname,
503 503 'settings': settings,
504 504 'ui': ui,
505 505 }
506 506
507 507 return sslsocket
508 508
509 509 def wrapserversocket(sock, ui, certfile=None, keyfile=None, cafile=None,
510 510 requireclientcert=False):
511 511 """Wrap a socket for use by servers.
512 512
513 513 ``certfile`` and ``keyfile`` specify the files containing the certificate's
514 514 public and private keys, respectively. Both keys can be defined in the same
515 515 file via ``certfile`` (the private key must come first in the file).
516 516
517 517 ``cafile`` defines the path to certificate authorities.
518 518
519 519 ``requireclientcert`` specifies whether to require client certificates.
520 520
521 521 Typically ``cafile`` is only defined if ``requireclientcert`` is true.
522 522 """
523 523 # This function is not used much by core Mercurial, so the error messaging
524 524 # doesn't have to be as detailed as for wrapsocket().
525 525 for f in (certfile, keyfile, cafile):
526 526 if f and not os.path.exists(f):
527 527 raise error.Abort(_('referenced certificate file (%s) does not '
528 528 'exist') % f)
529 529
530 530 protocol, options, _protocolui = protocolsettings('tls1.0')
531 531
532 532 # This config option is intended for use in tests only. It is a giant
533 533 # footgun to kill security. Don't define it.
534 534 exactprotocol = ui.config('devel', 'serverexactprotocol')
535 535 if exactprotocol == 'tls1.0':
536 536 protocol = ssl.PROTOCOL_TLSv1
537 537 elif exactprotocol == 'tls1.1':
538 538 if 'tls1.1' not in supportedprotocols:
539 539 raise error.Abort(_('TLS 1.1 not supported by this Python'))
540 540 protocol = ssl.PROTOCOL_TLSv1_1
541 541 elif exactprotocol == 'tls1.2':
542 542 if 'tls1.2' not in supportedprotocols:
543 543 raise error.Abort(_('TLS 1.2 not supported by this Python'))
544 544 protocol = ssl.PROTOCOL_TLSv1_2
545 545 elif exactprotocol:
546 546 raise error.Abort(_('invalid value for serverexactprotocol: %s') %
547 547 exactprotocol)
548 548
549 549 if modernssl:
550 550 # We /could/ use create_default_context() here since it doesn't load
551 551 # CAs when configured for client auth. However, it is hard-coded to
552 552 # use ssl.PROTOCOL_SSLv23 which may not be appropriate here.
553 553 sslcontext = SSLContext(protocol)
554 554 sslcontext.options |= options
555 555
556 556 # Improve forward secrecy.
557 557 sslcontext.options |= getattr(ssl, 'OP_SINGLE_DH_USE', 0)
558 558 sslcontext.options |= getattr(ssl, 'OP_SINGLE_ECDH_USE', 0)
559 559
560 560 # Use the list of more secure ciphers if found in the ssl module.
561 561 if util.safehasattr(ssl, '_RESTRICTED_SERVER_CIPHERS'):
562 562 sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0)
563 563 sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS)
564 564 else:
565 565 sslcontext = SSLContext(ssl.PROTOCOL_TLSv1)
566 566
567 567 if requireclientcert:
568 568 sslcontext.verify_mode = ssl.CERT_REQUIRED
569 569 else:
570 570 sslcontext.verify_mode = ssl.CERT_NONE
571 571
572 572 if certfile or keyfile:
573 573 sslcontext.load_cert_chain(certfile=certfile, keyfile=keyfile)
574 574
575 575 if cafile:
576 576 sslcontext.load_verify_locations(cafile=cafile)
577 577
578 578 return sslcontext.wrap_socket(sock, server_side=True)
579 579
580 580 class wildcarderror(Exception):
581 581 """Represents an error parsing wildcards in DNS name."""
582 582
583 583 def _dnsnamematch(dn, hostname, maxwildcards=1):
584 584 """Match DNS names according RFC 6125 section 6.4.3.
585 585
586 586 This code is effectively copied from CPython's ssl._dnsname_match.
587 587
588 588 Returns a bool indicating whether the expected hostname matches
589 589 the value in ``dn``.
590 590 """
591 591 pats = []
592 592 if not dn:
593 593 return False
594 594 dn = pycompat.bytesurl(dn)
595 595 hostname = pycompat.bytesurl(hostname)
596 596
597 597 pieces = dn.split('.')
598 598 leftmost = pieces[0]
599 599 remainder = pieces[1:]
600 600 wildcards = leftmost.count('*')
601 601 if wildcards > maxwildcards:
602 602 raise wildcarderror(
603 603 _('too many wildcards in certificate DNS name: %s') % dn)
604 604
605 605 # speed up common case w/o wildcards
606 606 if not wildcards:
607 607 return dn.lower() == hostname.lower()
608 608
609 609 # RFC 6125, section 6.4.3, subitem 1.
610 610 # The client SHOULD NOT attempt to match a presented identifier in which
611 611 # the wildcard character comprises a label other than the left-most label.
612 612 if leftmost == '*':
613 613 # When '*' is a fragment by itself, it matches a non-empty dotless
614 614 # fragment.
615 615 pats.append('[^.]+')
616 616 elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
617 617 # RFC 6125, section 6.4.3, subitem 3.
618 618 # The client SHOULD NOT attempt to match a presented identifier
619 619 # where the wildcard character is embedded within an A-label or
620 620 # U-label of an internationalized domain name.
621 pats.append(re.escape(leftmost))
621 pats.append(stringutil.reescape(leftmost))
622 622 else:
623 623 # Otherwise, '*' matches any dotless string, e.g. www*
624 pats.append(re.escape(leftmost).replace(br'\*', '[^.]*'))
624 pats.append(stringutil.reescape(leftmost).replace(br'\*', '[^.]*'))
625 625
626 626 # add the remaining fragments, ignore any wildcards
627 627 for frag in remainder:
628 pats.append(re.escape(frag))
628 pats.append(stringutil.reescape(frag))
629 629
630 630 pat = re.compile(br'\A' + br'\.'.join(pats) + br'\Z', re.IGNORECASE)
631 631 return pat.match(hostname) is not None
632 632
633 633 def _verifycert(cert, hostname):
634 634 '''Verify that cert (in socket.getpeercert() format) matches hostname.
635 635 CRLs is not handled.
636 636
637 637 Returns error message if any problems are found and None on success.
638 638 '''
639 639 if not cert:
640 640 return _('no certificate received')
641 641
642 642 dnsnames = []
643 643 san = cert.get(r'subjectAltName', [])
644 644 for key, value in san:
645 645 if key == r'DNS':
646 646 try:
647 647 if _dnsnamematch(value, hostname):
648 648 return
649 649 except wildcarderror as e:
650 650 return stringutil.forcebytestr(e.args[0])
651 651
652 652 dnsnames.append(value)
653 653
654 654 if not dnsnames:
655 655 # The subject is only checked when there is no DNS in subjectAltName.
656 656 for sub in cert.get(r'subject', []):
657 657 for key, value in sub:
658 658 # According to RFC 2818 the most specific Common Name must
659 659 # be used.
660 660 if key == r'commonName':
661 661 # 'subject' entries are unicode.
662 662 try:
663 663 value = value.encode('ascii')
664 664 except UnicodeEncodeError:
665 665 return _('IDN in certificate not supported')
666 666
667 667 try:
668 668 if _dnsnamematch(value, hostname):
669 669 return
670 670 except wildcarderror as e:
671 671 return stringutil.forcebytestr(e.args[0])
672 672
673 673 dnsnames.append(value)
674 674
675 675 dnsnames = [pycompat.bytesurl(d) for d in dnsnames]
676 676 if len(dnsnames) > 1:
677 677 return _('certificate is for %s') % ', '.join(dnsnames)
678 678 elif len(dnsnames) == 1:
679 679 return _('certificate is for %s') % dnsnames[0]
680 680 else:
681 681 return _('no commonName or subjectAltName found in certificate')
682 682
683 683 def _plainapplepython():
684 684 """return true if this seems to be a pure Apple Python that
685 685 * is unfrozen and presumably has the whole mercurial module in the file
686 686 system
687 687 * presumably is an Apple Python that uses Apple OpenSSL which has patches
688 688 for using system certificate store CAs in addition to the provided
689 689 cacerts file
690 690 """
691 691 if (not pycompat.isdarwin or procutil.mainfrozen() or
692 692 not pycompat.sysexecutable):
693 693 return False
694 694 exe = os.path.realpath(pycompat.sysexecutable).lower()
695 695 return (exe.startswith('/usr/bin/python') or
696 696 exe.startswith('/system/library/frameworks/python.framework/'))
697 697
698 698 _systemcacertpaths = [
699 699 # RHEL, CentOS, and Fedora
700 700 '/etc/pki/tls/certs/ca-bundle.trust.crt',
701 701 # Debian, Ubuntu, Gentoo
702 702 '/etc/ssl/certs/ca-certificates.crt',
703 703 ]
704 704
705 705 def _defaultcacerts(ui):
706 706 """return path to default CA certificates or None.
707 707
708 708 It is assumed this function is called when the returned certificates
709 709 file will actually be used to validate connections. Therefore this
710 710 function may print warnings or debug messages assuming this usage.
711 711
712 712 We don't print a message when the Python is able to load default
713 713 CA certs because this scenario is detected at socket connect time.
714 714 """
715 715 # The "certifi" Python package provides certificates. If it is installed
716 716 # and usable, assume the user intends it to be used and use it.
717 717 try:
718 718 import certifi
719 719 certs = certifi.where()
720 720 if os.path.exists(certs):
721 721 ui.debug('using ca certificates from certifi\n')
722 722 return certs
723 723 except (ImportError, AttributeError):
724 724 pass
725 725
726 726 # On Windows, only the modern ssl module is capable of loading the system
727 727 # CA certificates. If we're not capable of doing that, emit a warning
728 728 # because we'll get a certificate verification error later and the lack
729 729 # of loaded CA certificates will be the reason why.
730 730 # Assertion: this code is only called if certificates are being verified.
731 731 if pycompat.iswindows:
732 732 if not _canloaddefaultcerts:
733 733 ui.warn(_('(unable to load Windows CA certificates; see '
734 734 'https://mercurial-scm.org/wiki/SecureConnections for '
735 735 'how to configure Mercurial to avoid this message)\n'))
736 736
737 737 return None
738 738
739 739 # Apple's OpenSSL has patches that allow a specially constructed certificate
740 740 # to load the system CA store. If we're running on Apple Python, use this
741 741 # trick.
742 742 if _plainapplepython():
743 743 dummycert = os.path.join(
744 744 os.path.dirname(pycompat.fsencode(__file__)), 'dummycert.pem')
745 745 if os.path.exists(dummycert):
746 746 return dummycert
747 747
748 748 # The Apple OpenSSL trick isn't available to us. If Python isn't able to
749 749 # load system certs, we're out of luck.
750 750 if pycompat.isdarwin:
751 751 # FUTURE Consider looking for Homebrew or MacPorts installed certs
752 752 # files. Also consider exporting the keychain certs to a file during
753 753 # Mercurial install.
754 754 if not _canloaddefaultcerts:
755 755 ui.warn(_('(unable to load CA certificates; see '
756 756 'https://mercurial-scm.org/wiki/SecureConnections for '
757 757 'how to configure Mercurial to avoid this message)\n'))
758 758 return None
759 759
760 760 # / is writable on Windows. Out of an abundance of caution make sure
761 761 # we're not on Windows because paths from _systemcacerts could be installed
762 762 # by non-admin users.
763 763 assert not pycompat.iswindows
764 764
765 765 # Try to find CA certificates in well-known locations. We print a warning
766 766 # when using a found file because we don't want too much silent magic
767 767 # for security settings. The expectation is that proper Mercurial
768 768 # installs will have the CA certs path defined at install time and the
769 769 # installer/packager will make an appropriate decision on the user's
770 770 # behalf. We only get here and perform this setting as a feature of
771 771 # last resort.
772 772 if not _canloaddefaultcerts:
773 773 for path in _systemcacertpaths:
774 774 if os.path.isfile(path):
775 775 ui.warn(_('(using CA certificates from %s; if you see this '
776 776 'message, your Mercurial install is not properly '
777 777 'configured; see '
778 778 'https://mercurial-scm.org/wiki/SecureConnections '
779 779 'for how to configure Mercurial to avoid this '
780 780 'message)\n') % path)
781 781 return path
782 782
783 783 ui.warn(_('(unable to load CA certificates; see '
784 784 'https://mercurial-scm.org/wiki/SecureConnections for '
785 785 'how to configure Mercurial to avoid this message)\n'))
786 786
787 787 return None
788 788
789 789 def validatesocket(sock):
790 790 """Validate a socket meets security requirements.
791 791
792 792 The passed socket must have been created with ``wrapsocket()``.
793 793 """
794 794 shost = sock._hgstate['hostname']
795 795 host = pycompat.bytesurl(shost)
796 796 ui = sock._hgstate['ui']
797 797 settings = sock._hgstate['settings']
798 798
799 799 try:
800 800 peercert = sock.getpeercert(True)
801 801 peercert2 = sock.getpeercert()
802 802 except AttributeError:
803 803 raise error.Abort(_('%s ssl connection error') % host)
804 804
805 805 if not peercert:
806 806 raise error.Abort(_('%s certificate error: '
807 807 'no certificate received') % host)
808 808
809 809 if settings['disablecertverification']:
810 810 # We don't print the certificate fingerprint because it shouldn't
811 811 # be necessary: if the user requested certificate verification be
812 812 # disabled, they presumably already saw a message about the inability
813 813 # to verify the certificate and this message would have printed the
814 814 # fingerprint. So printing the fingerprint here adds little to no
815 815 # value.
816 816 ui.warn(_('warning: connection security to %s is disabled per current '
817 817 'settings; communication is susceptible to eavesdropping '
818 818 'and tampering\n') % host)
819 819 return
820 820
821 821 # If a certificate fingerprint is pinned, use it and only it to
822 822 # validate the remote cert.
823 823 peerfingerprints = {
824 824 'sha1': node.hex(hashlib.sha1(peercert).digest()),
825 825 'sha256': node.hex(hashlib.sha256(peercert).digest()),
826 826 'sha512': node.hex(hashlib.sha512(peercert).digest()),
827 827 }
828 828
829 829 def fmtfingerprint(s):
830 830 return ':'.join([s[x:x + 2] for x in range(0, len(s), 2)])
831 831
832 832 nicefingerprint = 'sha256:%s' % fmtfingerprint(peerfingerprints['sha256'])
833 833
834 834 if settings['certfingerprints']:
835 835 for hash, fingerprint in settings['certfingerprints']:
836 836 if peerfingerprints[hash].lower() == fingerprint:
837 837 ui.debug('%s certificate matched fingerprint %s:%s\n' %
838 838 (host, hash, fmtfingerprint(fingerprint)))
839 839 if settings['legacyfingerprint']:
840 840 ui.warn(_('(SHA-1 fingerprint for %s found in legacy '
841 841 '[hostfingerprints] section; '
842 842 'if you trust this fingerprint, remove the old '
843 843 'SHA-1 fingerprint from [hostfingerprints] and '
844 844 'add the following entry to the new '
845 845 '[hostsecurity] section: %s:fingerprints=%s)\n') %
846 846 (host, host, nicefingerprint))
847 847 return
848 848
849 849 # Pinned fingerprint didn't match. This is a fatal error.
850 850 if settings['legacyfingerprint']:
851 851 section = 'hostfingerprint'
852 852 nice = fmtfingerprint(peerfingerprints['sha1'])
853 853 else:
854 854 section = 'hostsecurity'
855 855 nice = '%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
856 856 raise error.Abort(_('certificate for %s has unexpected '
857 857 'fingerprint %s') % (host, nice),
858 858 hint=_('check %s configuration') % section)
859 859
860 860 # Security is enabled but no CAs are loaded. We can't establish trust
861 861 # for the cert so abort.
862 862 if not sock._hgstate['caloaded']:
863 863 raise error.Abort(
864 864 _('unable to verify security of %s (no loaded CA certificates); '
865 865 'refusing to connect') % host,
866 866 hint=_('see https://mercurial-scm.org/wiki/SecureConnections for '
867 867 'how to configure Mercurial to avoid this error or set '
868 868 'hostsecurity.%s:fingerprints=%s to trust this server') %
869 869 (host, nicefingerprint))
870 870
871 871 msg = _verifycert(peercert2, shost)
872 872 if msg:
873 873 raise error.Abort(_('%s certificate error: %s') % (host, msg),
874 874 hint=_('set hostsecurity.%s:certfingerprints=%s '
875 875 'config setting or use --insecure to connect '
876 876 'insecurely') %
877 877 (host, nicefingerprint))
@@ -1,652 +1,652
1 1 $ hg init t
2 2 $ cd t
3 3 $ mkdir -p beans
4 4 $ for b in kidney navy turtle borlotti black pinto; do
5 5 > echo $b > beans/$b
6 6 > done
7 7 $ mkdir -p mammals/Procyonidae
8 8 $ for m in cacomistle coatimundi raccoon; do
9 9 > echo $m > mammals/Procyonidae/$m
10 10 > done
11 11 $ echo skunk > mammals/skunk
12 12 $ echo fennel > fennel
13 13 $ echo fenugreek > fenugreek
14 14 $ echo fiddlehead > fiddlehead
15 15 $ hg addremove
16 16 adding beans/black
17 17 adding beans/borlotti
18 18 adding beans/kidney
19 19 adding beans/navy
20 20 adding beans/pinto
21 21 adding beans/turtle
22 22 adding fennel
23 23 adding fenugreek
24 24 adding fiddlehead
25 25 adding mammals/Procyonidae/cacomistle
26 26 adding mammals/Procyonidae/coatimundi
27 27 adding mammals/Procyonidae/raccoon
28 28 adding mammals/skunk
29 29 $ hg commit -m "commit #0"
30 30
31 31 $ hg debugwalk -v
32 32 * matcher:
33 33 <alwaysmatcher>
34 34 f beans/black beans/black
35 35 f beans/borlotti beans/borlotti
36 36 f beans/kidney beans/kidney
37 37 f beans/navy beans/navy
38 38 f beans/pinto beans/pinto
39 39 f beans/turtle beans/turtle
40 40 f fennel fennel
41 41 f fenugreek fenugreek
42 42 f fiddlehead fiddlehead
43 43 f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
44 44 f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
45 45 f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
46 46 f mammals/skunk mammals/skunk
47 47 $ hg debugwalk -v -I.
48 48 * matcher:
49 49 <includematcher includes='(?:)'>
50 50 f beans/black beans/black
51 51 f beans/borlotti beans/borlotti
52 52 f beans/kidney beans/kidney
53 53 f beans/navy beans/navy
54 54 f beans/pinto beans/pinto
55 55 f beans/turtle beans/turtle
56 56 f fennel fennel
57 57 f fenugreek fenugreek
58 58 f fiddlehead fiddlehead
59 59 f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
60 60 f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
61 61 f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
62 62 f mammals/skunk mammals/skunk
63 63
64 64 $ cd mammals
65 65 $ hg debugwalk -v
66 66 * matcher:
67 67 <alwaysmatcher>
68 68 f beans/black ../beans/black
69 69 f beans/borlotti ../beans/borlotti
70 70 f beans/kidney ../beans/kidney
71 71 f beans/navy ../beans/navy
72 72 f beans/pinto ../beans/pinto
73 73 f beans/turtle ../beans/turtle
74 74 f fennel ../fennel
75 75 f fenugreek ../fenugreek
76 76 f fiddlehead ../fiddlehead
77 77 f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
78 78 f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
79 79 f mammals/Procyonidae/raccoon Procyonidae/raccoon
80 80 f mammals/skunk skunk
81 81 $ hg debugwalk -v -X ../beans
82 82 * matcher:
83 83 <differencematcher
84 84 m1=<alwaysmatcher>,
85 85 m2=<includematcher includes='(?:beans(?:/|$))'>>
86 86 f fennel ../fennel
87 87 f fenugreek ../fenugreek
88 88 f fiddlehead ../fiddlehead
89 89 f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
90 90 f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
91 91 f mammals/Procyonidae/raccoon Procyonidae/raccoon
92 92 f mammals/skunk skunk
93 93 $ hg debugwalk -v -I '*k'
94 94 * matcher:
95 <includematcher includes='(?:mammals\\/[^/]*k(?:/|$))'>
95 <includematcher includes='(?:mammals/[^/]*k(?:/|$))'>
96 96 f mammals/skunk skunk
97 97 $ hg debugwalk -v -I 'glob:*k'
98 98 * matcher:
99 <includematcher includes='(?:mammals\\/[^/]*k(?:/|$))'>
99 <includematcher includes='(?:mammals/[^/]*k(?:/|$))'>
100 100 f mammals/skunk skunk
101 101 $ hg debugwalk -v -I 'relglob:*k'
102 102 * matcher:
103 103 <includematcher includes='(?:(?:|.*/)[^/]*k(?:/|$))'>
104 104 f beans/black ../beans/black
105 105 f fenugreek ../fenugreek
106 106 f mammals/skunk skunk
107 107 $ hg debugwalk -v -I 'relglob:*k' .
108 108 * matcher:
109 109 <intersectionmatcher
110 110 m1=<patternmatcher patterns='(?:mammals(?:/|$))'>,
111 111 m2=<includematcher includes='(?:(?:|.*/)[^/]*k(?:/|$))'>>
112 112 f mammals/skunk skunk
113 113 $ hg debugwalk -v -I 're:.*k$'
114 114 * matcher:
115 115 <includematcher includes='(?:.*k$)'>
116 116 f beans/black ../beans/black
117 117 f fenugreek ../fenugreek
118 118 f mammals/skunk skunk
119 119 $ hg debugwalk -v -I 'relre:.*k$'
120 120 * matcher:
121 121 <includematcher includes='(?:.*.*k$)'>
122 122 f beans/black ../beans/black
123 123 f fenugreek ../fenugreek
124 124 f mammals/skunk skunk
125 125 $ hg debugwalk -v -I 'path:beans'
126 126 * matcher:
127 127 <includematcher includes='(?:beans(?:/|$))'>
128 128 f beans/black ../beans/black
129 129 f beans/borlotti ../beans/borlotti
130 130 f beans/kidney ../beans/kidney
131 131 f beans/navy ../beans/navy
132 132 f beans/pinto ../beans/pinto
133 133 f beans/turtle ../beans/turtle
134 134 $ hg debugwalk -v -I 'relpath:detour/../../beans'
135 135 * matcher:
136 136 <includematcher includes='(?:beans(?:/|$))'>
137 137 f beans/black ../beans/black
138 138 f beans/borlotti ../beans/borlotti
139 139 f beans/kidney ../beans/kidney
140 140 f beans/navy ../beans/navy
141 141 f beans/pinto ../beans/pinto
142 142 f beans/turtle ../beans/turtle
143 143
144 144 $ hg debugwalk -v 'rootfilesin:'
145 145 * matcher:
146 146 <patternmatcher patterns='(?:[^/]+$)'>
147 147 f fennel ../fennel
148 148 f fenugreek ../fenugreek
149 149 f fiddlehead ../fiddlehead
150 150 $ hg debugwalk -v -I 'rootfilesin:'
151 151 * matcher:
152 152 <includematcher includes='(?:[^/]+$)'>
153 153 f fennel ../fennel
154 154 f fenugreek ../fenugreek
155 155 f fiddlehead ../fiddlehead
156 156 $ hg debugwalk -v 'rootfilesin:.'
157 157 * matcher:
158 158 <patternmatcher patterns='(?:[^/]+$)'>
159 159 f fennel ../fennel
160 160 f fenugreek ../fenugreek
161 161 f fiddlehead ../fiddlehead
162 162 $ hg debugwalk -v -I 'rootfilesin:.'
163 163 * matcher:
164 164 <includematcher includes='(?:[^/]+$)'>
165 165 f fennel ../fennel
166 166 f fenugreek ../fenugreek
167 167 f fiddlehead ../fiddlehead
168 168 $ hg debugwalk -v -X 'rootfilesin:'
169 169 * matcher:
170 170 <differencematcher
171 171 m1=<alwaysmatcher>,
172 172 m2=<includematcher includes='(?:[^/]+$)'>>
173 173 f beans/black ../beans/black
174 174 f beans/borlotti ../beans/borlotti
175 175 f beans/kidney ../beans/kidney
176 176 f beans/navy ../beans/navy
177 177 f beans/pinto ../beans/pinto
178 178 f beans/turtle ../beans/turtle
179 179 f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
180 180 f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
181 181 f mammals/Procyonidae/raccoon Procyonidae/raccoon
182 182 f mammals/skunk skunk
183 183 $ hg debugwalk -v 'rootfilesin:fennel'
184 184 * matcher:
185 185 <patternmatcher patterns='(?:fennel/[^/]+$)'>
186 186 $ hg debugwalk -v -I 'rootfilesin:fennel'
187 187 * matcher:
188 188 <includematcher includes='(?:fennel/[^/]+$)'>
189 189 $ hg debugwalk -v 'rootfilesin:skunk'
190 190 * matcher:
191 191 <patternmatcher patterns='(?:skunk/[^/]+$)'>
192 192 $ hg debugwalk -v -I 'rootfilesin:skunk'
193 193 * matcher:
194 194 <includematcher includes='(?:skunk/[^/]+$)'>
195 195 $ hg debugwalk -v 'rootfilesin:beans'
196 196 * matcher:
197 197 <patternmatcher patterns='(?:beans/[^/]+$)'>
198 198 f beans/black ../beans/black
199 199 f beans/borlotti ../beans/borlotti
200 200 f beans/kidney ../beans/kidney
201 201 f beans/navy ../beans/navy
202 202 f beans/pinto ../beans/pinto
203 203 f beans/turtle ../beans/turtle
204 204 $ hg debugwalk -v -I 'rootfilesin:beans'
205 205 * matcher:
206 206 <includematcher includes='(?:beans/[^/]+$)'>
207 207 f beans/black ../beans/black
208 208 f beans/borlotti ../beans/borlotti
209 209 f beans/kidney ../beans/kidney
210 210 f beans/navy ../beans/navy
211 211 f beans/pinto ../beans/pinto
212 212 f beans/turtle ../beans/turtle
213 213 $ hg debugwalk -v 'rootfilesin:mammals'
214 214 * matcher:
215 215 <patternmatcher patterns='(?:mammals/[^/]+$)'>
216 216 f mammals/skunk skunk
217 217 $ hg debugwalk -v -I 'rootfilesin:mammals'
218 218 * matcher:
219 219 <includematcher includes='(?:mammals/[^/]+$)'>
220 220 f mammals/skunk skunk
221 221 $ hg debugwalk -v 'rootfilesin:mammals/'
222 222 * matcher:
223 223 <patternmatcher patterns='(?:mammals/[^/]+$)'>
224 224 f mammals/skunk skunk
225 225 $ hg debugwalk -v -I 'rootfilesin:mammals/'
226 226 * matcher:
227 227 <includematcher includes='(?:mammals/[^/]+$)'>
228 228 f mammals/skunk skunk
229 229 $ hg debugwalk -v -X 'rootfilesin:mammals'
230 230 * matcher:
231 231 <differencematcher
232 232 m1=<alwaysmatcher>,
233 233 m2=<includematcher includes='(?:mammals/[^/]+$)'>>
234 234 f beans/black ../beans/black
235 235 f beans/borlotti ../beans/borlotti
236 236 f beans/kidney ../beans/kidney
237 237 f beans/navy ../beans/navy
238 238 f beans/pinto ../beans/pinto
239 239 f beans/turtle ../beans/turtle
240 240 f fennel ../fennel
241 241 f fenugreek ../fenugreek
242 242 f fiddlehead ../fiddlehead
243 243 f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
244 244 f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
245 245 f mammals/Procyonidae/raccoon Procyonidae/raccoon
246 246
247 247 $ hg debugwalk -v .
248 248 * matcher:
249 249 <patternmatcher patterns='(?:mammals(?:/|$))'>
250 250 f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
251 251 f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
252 252 f mammals/Procyonidae/raccoon Procyonidae/raccoon
253 253 f mammals/skunk skunk
254 254 $ hg debugwalk -v -I.
255 255 * matcher:
256 256 <includematcher includes='(?:mammals(?:/|$))'>
257 257 f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
258 258 f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
259 259 f mammals/Procyonidae/raccoon Procyonidae/raccoon
260 260 f mammals/skunk skunk
261 261 $ hg debugwalk -v Procyonidae
262 262 * matcher:
263 <patternmatcher patterns='(?:mammals\\/Procyonidae(?:/|$))'>
263 <patternmatcher patterns='(?:mammals/Procyonidae(?:/|$))'>
264 264 f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
265 265 f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
266 266 f mammals/Procyonidae/raccoon Procyonidae/raccoon
267 267
268 268 $ cd Procyonidae
269 269 $ hg debugwalk -v .
270 270 * matcher:
271 <patternmatcher patterns='(?:mammals\\/Procyonidae(?:/|$))'>
271 <patternmatcher patterns='(?:mammals/Procyonidae(?:/|$))'>
272 272 f mammals/Procyonidae/cacomistle cacomistle
273 273 f mammals/Procyonidae/coatimundi coatimundi
274 274 f mammals/Procyonidae/raccoon raccoon
275 275 $ hg debugwalk -v ..
276 276 * matcher:
277 277 <patternmatcher patterns='(?:mammals(?:/|$))'>
278 278 f mammals/Procyonidae/cacomistle cacomistle
279 279 f mammals/Procyonidae/coatimundi coatimundi
280 280 f mammals/Procyonidae/raccoon raccoon
281 281 f mammals/skunk ../skunk
282 282 $ cd ..
283 283
284 284 $ hg debugwalk -v ../beans
285 285 * matcher:
286 286 <patternmatcher patterns='(?:beans(?:/|$))'>
287 287 f beans/black ../beans/black
288 288 f beans/borlotti ../beans/borlotti
289 289 f beans/kidney ../beans/kidney
290 290 f beans/navy ../beans/navy
291 291 f beans/pinto ../beans/pinto
292 292 f beans/turtle ../beans/turtle
293 293 $ hg debugwalk -v .
294 294 * matcher:
295 295 <patternmatcher patterns='(?:mammals(?:/|$))'>
296 296 f mammals/Procyonidae/cacomistle Procyonidae/cacomistle
297 297 f mammals/Procyonidae/coatimundi Procyonidae/coatimundi
298 298 f mammals/Procyonidae/raccoon Procyonidae/raccoon
299 299 f mammals/skunk skunk
300 300 $ hg debugwalk -v .hg
301 301 abort: path 'mammals/.hg' is inside nested repo 'mammals'
302 302 [255]
303 303 $ hg debugwalk -v ../.hg
304 304 abort: path contains illegal component: .hg
305 305 [255]
306 306 $ cd ..
307 307
308 308 $ hg debugwalk -v -Ibeans
309 309 * matcher:
310 310 <includematcher includes='(?:beans(?:/|$))'>
311 311 f beans/black beans/black
312 312 f beans/borlotti beans/borlotti
313 313 f beans/kidney beans/kidney
314 314 f beans/navy beans/navy
315 315 f beans/pinto beans/pinto
316 316 f beans/turtle beans/turtle
317 317 $ hg debugwalk -v -I '{*,{b,m}*/*}k'
318 318 * matcher:
319 <includematcher includes='(?:(?:[^/]*|(?:b|m)[^/]*\\/[^/]*)k(?:/|$))'>
319 <includematcher includes='(?:(?:[^/]*|(?:b|m)[^/]*/[^/]*)k(?:/|$))'>
320 320 f beans/black beans/black
321 321 f fenugreek fenugreek
322 322 f mammals/skunk mammals/skunk
323 323 $ hg debugwalk -v -Ibeans mammals
324 324 * matcher:
325 325 <intersectionmatcher
326 326 m1=<patternmatcher patterns='(?:mammals(?:/|$))'>,
327 327 m2=<includematcher includes='(?:beans(?:/|$))'>>
328 328 $ hg debugwalk -v -Inon-existent
329 329 * matcher:
330 330 <includematcher includes='(?:non\\-existent(?:/|$))'>
331 331 $ hg debugwalk -v -Inon-existent -Ibeans/black
332 332 * matcher:
333 <includematcher includes='(?:non\\-existent(?:/|$)|beans\\/black(?:/|$))'>
333 <includematcher includes='(?:non\\-existent(?:/|$)|beans/black(?:/|$))'>
334 334 f beans/black beans/black
335 335 $ hg debugwalk -v -Ibeans beans/black
336 336 * matcher:
337 337 <intersectionmatcher
338 m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>,
338 m1=<patternmatcher patterns='(?:beans/black(?:/|$))'>,
339 339 m2=<includematcher includes='(?:beans(?:/|$))'>>
340 340 f beans/black beans/black exact
341 341 $ hg debugwalk -v -Ibeans/black beans
342 342 * matcher:
343 343 <intersectionmatcher
344 344 m1=<patternmatcher patterns='(?:beans(?:/|$))'>,
345 m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
345 m2=<includematcher includes='(?:beans/black(?:/|$))'>>
346 346 f beans/black beans/black
347 347 $ hg debugwalk -v -Xbeans/black beans
348 348 * matcher:
349 349 <differencematcher
350 350 m1=<patternmatcher patterns='(?:beans(?:/|$))'>,
351 m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
351 m2=<includematcher includes='(?:beans/black(?:/|$))'>>
352 352 f beans/borlotti beans/borlotti
353 353 f beans/kidney beans/kidney
354 354 f beans/navy beans/navy
355 355 f beans/pinto beans/pinto
356 356 f beans/turtle beans/turtle
357 357 $ hg debugwalk -v -Xbeans/black -Ibeans
358 358 * matcher:
359 359 <differencematcher
360 360 m1=<includematcher includes='(?:beans(?:/|$))'>,
361 m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
361 m2=<includematcher includes='(?:beans/black(?:/|$))'>>
362 362 f beans/borlotti beans/borlotti
363 363 f beans/kidney beans/kidney
364 364 f beans/navy beans/navy
365 365 f beans/pinto beans/pinto
366 366 f beans/turtle beans/turtle
367 367 $ hg debugwalk -v -Xbeans/black beans/black
368 368 * matcher:
369 369 <differencematcher
370 m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>,
371 m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
370 m1=<patternmatcher patterns='(?:beans/black(?:/|$))'>,
371 m2=<includematcher includes='(?:beans/black(?:/|$))'>>
372 372 $ hg debugwalk -v -Xbeans/black -Ibeans/black
373 373 * matcher:
374 374 <differencematcher
375 m1=<includematcher includes='(?:beans\\/black(?:/|$))'>,
376 m2=<includematcher includes='(?:beans\\/black(?:/|$))'>>
375 m1=<includematcher includes='(?:beans/black(?:/|$))'>,
376 m2=<includematcher includes='(?:beans/black(?:/|$))'>>
377 377 $ hg debugwalk -v -Xbeans beans/black
378 378 * matcher:
379 379 <differencematcher
380 m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>,
380 m1=<patternmatcher patterns='(?:beans/black(?:/|$))'>,
381 381 m2=<includematcher includes='(?:beans(?:/|$))'>>
382 382 $ hg debugwalk -v -Xbeans -Ibeans/black
383 383 * matcher:
384 384 <differencematcher
385 m1=<includematcher includes='(?:beans\\/black(?:/|$))'>,
385 m1=<includematcher includes='(?:beans/black(?:/|$))'>,
386 386 m2=<includematcher includes='(?:beans(?:/|$))'>>
387 387 $ hg debugwalk -v 'glob:mammals/../beans/b*'
388 388 * matcher:
389 <patternmatcher patterns='(?:beans\\/b[^/]*$)'>
389 <patternmatcher patterns='(?:beans/b[^/]*$)'>
390 390 f beans/black beans/black
391 391 f beans/borlotti beans/borlotti
392 392 $ hg debugwalk -v '-X*/Procyonidae' mammals
393 393 * matcher:
394 394 <differencematcher
395 395 m1=<patternmatcher patterns='(?:mammals(?:/|$))'>,
396 m2=<includematcher includes='(?:[^/]*\\/Procyonidae(?:/|$))'>>
396 m2=<includematcher includes='(?:[^/]*/Procyonidae(?:/|$))'>>
397 397 f mammals/skunk mammals/skunk
398 398 $ hg debugwalk -v path:mammals
399 399 * matcher:
400 400 <patternmatcher patterns='(?:mammals(?:/|$))'>
401 401 f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
402 402 f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
403 403 f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
404 404 f mammals/skunk mammals/skunk
405 405 $ hg debugwalk -v ..
406 406 abort: .. not under root '$TESTTMP/t'
407 407 [255]
408 408 $ hg debugwalk -v beans/../..
409 409 abort: beans/../.. not under root '$TESTTMP/t'
410 410 [255]
411 411 $ hg debugwalk -v .hg
412 412 abort: path contains illegal component: .hg
413 413 [255]
414 414 $ hg debugwalk -v beans/../.hg
415 415 abort: path contains illegal component: .hg
416 416 [255]
417 417 $ hg debugwalk -v beans/../.hg/data
418 418 abort: path contains illegal component: .hg/data
419 419 [255]
420 420 $ hg debugwalk -v beans/.hg
421 421 abort: path 'beans/.hg' is inside nested repo 'beans'
422 422 [255]
423 423
424 424 Test explicit paths and excludes:
425 425
426 426 $ hg debugwalk -v fennel -X fennel
427 427 * matcher:
428 428 <differencematcher
429 429 m1=<patternmatcher patterns='(?:fennel(?:/|$))'>,
430 430 m2=<includematcher includes='(?:fennel(?:/|$))'>>
431 431 $ hg debugwalk -v fennel -X 'f*'
432 432 * matcher:
433 433 <differencematcher
434 434 m1=<patternmatcher patterns='(?:fennel(?:/|$))'>,
435 435 m2=<includematcher includes='(?:f[^/]*(?:/|$))'>>
436 436 $ hg debugwalk -v beans/black -X 'path:beans'
437 437 * matcher:
438 438 <differencematcher
439 m1=<patternmatcher patterns='(?:beans\\/black(?:/|$))'>,
439 m1=<patternmatcher patterns='(?:beans/black(?:/|$))'>,
440 440 m2=<includematcher includes='(?:beans(?:/|$))'>>
441 441 $ hg debugwalk -v -I 'path:beans/black' -X 'path:beans'
442 442 * matcher:
443 443 <differencematcher
444 m1=<includematcher includes='(?:beans\\/black(?:/|$))'>,
444 m1=<includematcher includes='(?:beans/black(?:/|$))'>,
445 445 m2=<includematcher includes='(?:beans(?:/|$))'>>
446 446
447 447 Test absolute paths:
448 448
449 449 $ hg debugwalk -v `pwd`/beans
450 450 * matcher:
451 451 <patternmatcher patterns='(?:beans(?:/|$))'>
452 452 f beans/black beans/black
453 453 f beans/borlotti beans/borlotti
454 454 f beans/kidney beans/kidney
455 455 f beans/navy beans/navy
456 456 f beans/pinto beans/pinto
457 457 f beans/turtle beans/turtle
458 458 $ hg debugwalk -v `pwd`/..
459 459 abort: $TESTTMP/t/.. not under root '$TESTTMP/t'
460 460 [255]
461 461
462 462 Test patterns:
463 463
464 464 $ hg debugwalk -v glob:\*
465 465 * matcher:
466 466 <patternmatcher patterns='(?:[^/]*$)'>
467 467 f fennel fennel
468 468 f fenugreek fenugreek
469 469 f fiddlehead fiddlehead
470 470 #if eol-in-paths
471 471 $ echo glob:glob > glob:glob
472 472 $ hg addremove
473 473 adding glob:glob
474 474 warning: filename contains ':', which is reserved on Windows: 'glob:glob'
475 475 $ hg debugwalk -v glob:\*
476 476 * matcher:
477 477 <patternmatcher patterns='(?:[^/]*$)'>
478 478 f fennel fennel
479 479 f fenugreek fenugreek
480 480 f fiddlehead fiddlehead
481 481 f glob:glob glob:glob
482 482 $ hg debugwalk -v glob:glob
483 483 * matcher:
484 484 <patternmatcher patterns='(?:glob$)'>
485 485 glob: $ENOENT$
486 486 $ hg debugwalk -v glob:glob:glob
487 487 * matcher:
488 <patternmatcher patterns='(?:glob\\:glob$)'>
488 <patternmatcher patterns='(?:glob:glob$)'>
489 489 f glob:glob glob:glob exact
490 490 $ hg debugwalk -v path:glob:glob
491 491 * matcher:
492 <patternmatcher patterns='(?:glob\\:glob(?:/|$))'>
492 <patternmatcher patterns='(?:glob:glob(?:/|$))'>
493 493 f glob:glob glob:glob exact
494 494 $ rm glob:glob
495 495 $ hg addremove
496 496 removing glob:glob
497 497 #endif
498 498
499 499 $ hg debugwalk -v 'glob:**e'
500 500 * matcher:
501 501 <patternmatcher patterns='(?:.*e$)'>
502 502 f beans/turtle beans/turtle
503 503 f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
504 504
505 505 $ hg debugwalk -v 're:.*[kb]$'
506 506 * matcher:
507 507 <patternmatcher patterns='(?:.*[kb]$)'>
508 508 f beans/black beans/black
509 509 f fenugreek fenugreek
510 510 f mammals/skunk mammals/skunk
511 511
512 512 $ hg debugwalk -v path:beans/black
513 513 * matcher:
514 <patternmatcher patterns='(?:beans\\/black(?:/|$))'>
514 <patternmatcher patterns='(?:beans/black(?:/|$))'>
515 515 f beans/black beans/black exact
516 516 $ hg debugwalk -v path:beans//black
517 517 * matcher:
518 <patternmatcher patterns='(?:beans\\/black(?:/|$))'>
518 <patternmatcher patterns='(?:beans/black(?:/|$))'>
519 519 f beans/black beans/black exact
520 520
521 521 $ hg debugwalk -v relglob:Procyonidae
522 522 * matcher:
523 523 <patternmatcher patterns='(?:(?:|.*/)Procyonidae$)'>
524 524 $ hg debugwalk -v 'relglob:Procyonidae/**'
525 525 * matcher:
526 <patternmatcher patterns='(?:(?:|.*/)Procyonidae\\/.*$)'>
526 <patternmatcher patterns='(?:(?:|.*/)Procyonidae/.*$)'>
527 527 f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
528 528 f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
529 529 f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
530 530 $ hg debugwalk -v 'relglob:Procyonidae/**' fennel
531 531 * matcher:
532 <patternmatcher patterns='(?:(?:|.*/)Procyonidae\\/.*$|fennel(?:/|$))'>
532 <patternmatcher patterns='(?:(?:|.*/)Procyonidae/.*$|fennel(?:/|$))'>
533 533 f fennel fennel exact
534 534 f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
535 535 f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
536 536 f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
537 537 $ hg debugwalk -v beans 'glob:beans/*'
538 538 * matcher:
539 <patternmatcher patterns='(?:beans(?:/|$)|beans\\/[^/]*$)'>
539 <patternmatcher patterns='(?:beans(?:/|$)|beans/[^/]*$)'>
540 540 f beans/black beans/black
541 541 f beans/borlotti beans/borlotti
542 542 f beans/kidney beans/kidney
543 543 f beans/navy beans/navy
544 544 f beans/pinto beans/pinto
545 545 f beans/turtle beans/turtle
546 546 $ hg debugwalk -v 'glob:mamm**'
547 547 * matcher:
548 548 <patternmatcher patterns='(?:mamm.*$)'>
549 549 f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
550 550 f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
551 551 f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
552 552 f mammals/skunk mammals/skunk
553 553 $ hg debugwalk -v 'glob:mamm**' fennel
554 554 * matcher:
555 555 <patternmatcher patterns='(?:mamm.*$|fennel(?:/|$))'>
556 556 f fennel fennel exact
557 557 f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle
558 558 f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi
559 559 f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon
560 560 f mammals/skunk mammals/skunk
561 561 $ hg debugwalk -v 'glob:j*'
562 562 * matcher:
563 563 <patternmatcher patterns='(?:j[^/]*$)'>
564 564 $ hg debugwalk -v NOEXIST
565 565 * matcher:
566 566 <patternmatcher patterns='(?:NOEXIST(?:/|$))'>
567 567 NOEXIST: * (glob)
568 568
569 569 #if fifo
570 570 $ mkfifo fifo
571 571 $ hg debugwalk -v fifo
572 572 * matcher:
573 573 <patternmatcher patterns='(?:fifo(?:/|$))'>
574 574 fifo: unsupported file type (type is fifo)
575 575 #endif
576 576
577 577 $ rm fenugreek
578 578 $ hg debugwalk -v fenugreek
579 579 * matcher:
580 580 <patternmatcher patterns='(?:fenugreek(?:/|$))'>
581 581 f fenugreek fenugreek exact
582 582 $ hg rm fenugreek
583 583 $ hg debugwalk -v fenugreek
584 584 * matcher:
585 585 <patternmatcher patterns='(?:fenugreek(?:/|$))'>
586 586 f fenugreek fenugreek exact
587 587 $ touch new
588 588 $ hg debugwalk -v new
589 589 * matcher:
590 590 <patternmatcher patterns='(?:new(?:/|$))'>
591 591 f new new exact
592 592
593 593 $ mkdir ignored
594 594 $ touch ignored/file
595 595 $ echo '^ignored$' > .hgignore
596 596 $ hg debugwalk -v ignored
597 597 * matcher:
598 598 <patternmatcher patterns='(?:ignored(?:/|$))'>
599 599 $ hg debugwalk -v ignored/file
600 600 * matcher:
601 <patternmatcher patterns='(?:ignored\\/file(?:/|$))'>
601 <patternmatcher patterns='(?:ignored/file(?:/|$))'>
602 602 f ignored/file ignored/file exact
603 603
604 604 Test listfile and listfile0
605 605
606 606 $ $PYTHON -c "open('listfile0', 'wb').write(b'fenugreek\0new\0')"
607 607 $ hg debugwalk -v -I 'listfile0:listfile0'
608 608 * matcher:
609 609 <includematcher includes='(?:fenugreek(?:/|$)|new(?:/|$))'>
610 610 f fenugreek fenugreek
611 611 f new new
612 612 $ $PYTHON -c "open('listfile', 'wb').write(b'fenugreek\nnew\r\nmammals/skunk\n')"
613 613 $ hg debugwalk -v -I 'listfile:listfile'
614 614 * matcher:
615 <includematcher includes='(?:fenugreek(?:/|$)|new(?:/|$)|mammals\\/skunk(?:/|$))'>
615 <includematcher includes='(?:fenugreek(?:/|$)|new(?:/|$)|mammals/skunk(?:/|$))'>
616 616 f fenugreek fenugreek
617 617 f mammals/skunk mammals/skunk
618 618 f new new
619 619
620 620 $ cd ..
621 621 $ hg debugwalk -v -R t t/mammals/skunk
622 622 * matcher:
623 <patternmatcher patterns='(?:mammals\\/skunk(?:/|$))'>
623 <patternmatcher patterns='(?:mammals/skunk(?:/|$))'>
624 624 f mammals/skunk t/mammals/skunk exact
625 625 $ mkdir t2
626 626 $ cd t2
627 627 $ hg debugwalk -v -R ../t ../t/mammals/skunk
628 628 * matcher:
629 <patternmatcher patterns='(?:mammals\\/skunk(?:/|$))'>
629 <patternmatcher patterns='(?:mammals/skunk(?:/|$))'>
630 630 f mammals/skunk ../t/mammals/skunk exact
631 631 $ hg debugwalk -v --cwd ../t mammals/skunk
632 632 * matcher:
633 <patternmatcher patterns='(?:mammals\\/skunk(?:/|$))'>
633 <patternmatcher patterns='(?:mammals/skunk(?:/|$))'>
634 634 f mammals/skunk mammals/skunk exact
635 635
636 636 $ cd ..
637 637
638 638 Test split patterns on overflow
639 639
640 640 $ cd t
641 641 $ echo fennel > overflow.list
642 642 $ cat >> printnum.py <<EOF
643 643 > from __future__ import print_function
644 644 > for i in range(20000 // 100):
645 645 > print('x' * 100)
646 646 > EOF
647 647 $ $PYTHON printnum.py >> overflow.list
648 648 $ echo fenugreek >> overflow.list
649 649 $ hg debugwalk 'listfile:overflow.list' 2>&1 | egrep -v '^xxx'
650 650 f fennel fennel exact
651 651 f fenugreek fenugreek exact
652 652 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now