##// END OF EJS Templates
archive: change "matcnfn" argument to a real matcher...
Martin von Zweigbergk -
r40443:3d76a8e6 default
parent child Browse files
Show More
@@ -1,435 +1,435 b''
1 1 # extdiff.py - external diff program support for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''command to allow external programs to compare revisions
9 9
10 10 The extdiff Mercurial extension allows you to use external programs
11 11 to compare revisions, or revision with working directory. The external
12 12 diff programs are called with a configurable set of options and two
13 13 non-option arguments: paths to directories containing snapshots of
14 14 files to compare.
15 15
16 16 If there is more than one file being compared and the "child" revision
17 17 is the working directory, any modifications made in the external diff
18 18 program will be copied back to the working directory from the temporary
19 19 directory.
20 20
21 21 The extdiff extension also allows you to configure new diff commands, so
22 22 you do not need to type :hg:`extdiff -p kdiff3` always. ::
23 23
24 24 [extdiff]
25 25 # add new command that runs GNU diff(1) in 'context diff' mode
26 26 cdiff = gdiff -Nprc5
27 27 ## or the old way:
28 28 #cmd.cdiff = gdiff
29 29 #opts.cdiff = -Nprc5
30 30
31 31 # add new command called meld, runs meld (no need to name twice). If
32 32 # the meld executable is not available, the meld tool in [merge-tools]
33 33 # will be used, if available
34 34 meld =
35 35
36 36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
37 37 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
38 38 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
39 39 # your .vimrc
40 40 vimdiff = gvim -f "+next" \\
41 41 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
42 42
43 43 Tool arguments can include variables that are expanded at runtime::
44 44
45 45 $parent1, $plabel1 - filename, descriptive label of first parent
46 46 $child, $clabel - filename, descriptive label of child revision
47 47 $parent2, $plabel2 - filename, descriptive label of second parent
48 48 $root - repository root
49 49 $parent is an alias for $parent1.
50 50
51 51 The extdiff extension will look in your [diff-tools] and [merge-tools]
52 52 sections for diff tool arguments, when none are specified in [extdiff].
53 53
54 54 ::
55 55
56 56 [extdiff]
57 57 kdiff3 =
58 58
59 59 [diff-tools]
60 60 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
61 61
62 62 You can use -I/-X and list of file or directory names like normal
63 63 :hg:`diff` command. The extdiff extension makes snapshots of only
64 64 needed files, so running the external diff program will actually be
65 65 pretty fast (at least faster than having to compare the entire tree).
66 66 '''
67 67
68 68 from __future__ import absolute_import
69 69
70 70 import os
71 71 import re
72 72 import shutil
73 73 import stat
74 74
75 75 from mercurial.i18n import _
76 76 from mercurial.node import (
77 77 nullid,
78 78 short,
79 79 )
80 80 from mercurial import (
81 81 archival,
82 82 cmdutil,
83 83 error,
84 84 filemerge,
85 85 formatter,
86 86 pycompat,
87 87 registrar,
88 88 scmutil,
89 89 util,
90 90 )
91 91 from mercurial.utils import (
92 92 procutil,
93 93 stringutil,
94 94 )
95 95
96 96 cmdtable = {}
97 97 command = registrar.command(cmdtable)
98 98
99 99 configtable = {}
100 100 configitem = registrar.configitem(configtable)
101 101
102 102 configitem('extdiff', br'opts\..*',
103 103 default='',
104 104 generic=True,
105 105 )
106 106
107 107 configitem('diff-tools', br'.*\.diffargs$',
108 108 default=None,
109 109 generic=True,
110 110 )
111 111
112 112 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
113 113 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
114 114 # be specifying the version(s) of Mercurial they are tested with, or
115 115 # leave the attribute unspecified.
116 116 testedwith = 'ships-with-hg-core'
117 117
118 118 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
119 119 '''snapshot files as of some revision
120 120 if not using snapshot, -I/-X does not work and recursive diff
121 121 in tools like kdiff3 and meld displays too many files.'''
122 122 dirname = os.path.basename(repo.root)
123 123 if dirname == "":
124 124 dirname = "root"
125 125 if node is not None:
126 126 dirname = '%s.%s' % (dirname, short(node))
127 127 base = os.path.join(tmproot, dirname)
128 128 os.mkdir(base)
129 129 fnsandstat = []
130 130
131 131 if node is not None:
132 132 ui.note(_('making snapshot of %d files from rev %s\n') %
133 133 (len(files), short(node)))
134 134 else:
135 135 ui.note(_('making snapshot of %d files from working directory\n') %
136 136 (len(files)))
137 137
138 138 if files:
139 139 repo.ui.setconfig("ui", "archivemeta", False)
140 140
141 141 archival.archive(repo, base, node, 'files',
142 matchfn=scmutil.matchfiles(repo, files),
142 match=scmutil.matchfiles(repo, files),
143 143 subrepos=listsubrepos)
144 144
145 145 for fn in sorted(files):
146 146 wfn = util.pconvert(fn)
147 147 ui.note(' %s\n' % wfn)
148 148
149 149 if node is None:
150 150 dest = os.path.join(base, wfn)
151 151
152 152 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
153 153 return dirname, fnsandstat
154 154
155 155 def dodiff(ui, repo, cmdline, pats, opts):
156 156 '''Do the actual diff:
157 157
158 158 - copy to a temp structure if diffing 2 internal revisions
159 159 - copy to a temp structure if diffing working revision with
160 160 another one and more than 1 file is changed
161 161 - just invoke the diff for a single file in the working dir
162 162 '''
163 163
164 164 revs = opts.get('rev')
165 165 change = opts.get('change')
166 166 do3way = '$parent2' in cmdline
167 167
168 168 if revs and change:
169 169 msg = _('cannot specify --rev and --change at the same time')
170 170 raise error.Abort(msg)
171 171 elif change:
172 172 ctx2 = scmutil.revsingle(repo, change, None)
173 173 ctx1a, ctx1b = ctx2.p1(), ctx2.p2()
174 174 else:
175 175 ctx1a, ctx2 = scmutil.revpair(repo, revs)
176 176 if not revs:
177 177 ctx1b = repo[None].p2()
178 178 else:
179 179 ctx1b = repo[nullid]
180 180
181 181 node1a = ctx1a.node()
182 182 node1b = ctx1b.node()
183 183 node2 = ctx2.node()
184 184
185 185 # Disable 3-way merge if there is only one parent
186 186 if do3way:
187 187 if node1b == nullid:
188 188 do3way = False
189 189
190 190 subrepos=opts.get('subrepos')
191 191
192 192 matcher = scmutil.match(repo[node2], pats, opts)
193 193
194 194 if opts.get('patch'):
195 195 if subrepos:
196 196 raise error.Abort(_('--patch cannot be used with --subrepos'))
197 197 if node2 is None:
198 198 raise error.Abort(_('--patch requires two revisions'))
199 199 else:
200 200 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
201 201 listsubrepos=subrepos)[:3])
202 202 if do3way:
203 203 mod_b, add_b, rem_b = map(set,
204 204 repo.status(node1b, node2, matcher,
205 205 listsubrepos=subrepos)[:3])
206 206 else:
207 207 mod_b, add_b, rem_b = set(), set(), set()
208 208 modadd = mod_a | add_a | mod_b | add_b
209 209 common = modadd | rem_a | rem_b
210 210 if not common:
211 211 return 0
212 212
213 213 tmproot = pycompat.mkdtemp(prefix='extdiff.')
214 214 try:
215 215 if not opts.get('patch'):
216 216 # Always make a copy of node1a (and node1b, if applicable)
217 217 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
218 218 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
219 219 subrepos)[0]
220 220 rev1a = '@%d' % repo[node1a].rev()
221 221 if do3way:
222 222 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
223 223 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
224 224 subrepos)[0]
225 225 rev1b = '@%d' % repo[node1b].rev()
226 226 else:
227 227 dir1b = None
228 228 rev1b = ''
229 229
230 230 fnsandstat = []
231 231
232 232 # If node2 in not the wc or there is >1 change, copy it
233 233 dir2root = ''
234 234 rev2 = ''
235 235 if node2:
236 236 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
237 237 rev2 = '@%d' % repo[node2].rev()
238 238 elif len(common) > 1:
239 239 #we only actually need to get the files to copy back to
240 240 #the working dir in this case (because the other cases
241 241 #are: diffing 2 revisions or single file -- in which case
242 242 #the file is already directly passed to the diff tool).
243 243 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
244 244 subrepos)
245 245 else:
246 246 # This lets the diff tool open the changed file directly
247 247 dir2 = ''
248 248 dir2root = repo.root
249 249
250 250 label1a = rev1a
251 251 label1b = rev1b
252 252 label2 = rev2
253 253
254 254 # If only one change, diff the files instead of the directories
255 255 # Handle bogus modifies correctly by checking if the files exist
256 256 if len(common) == 1:
257 257 common_file = util.localpath(common.pop())
258 258 dir1a = os.path.join(tmproot, dir1a, common_file)
259 259 label1a = common_file + rev1a
260 260 if not os.path.isfile(dir1a):
261 261 dir1a = os.devnull
262 262 if do3way:
263 263 dir1b = os.path.join(tmproot, dir1b, common_file)
264 264 label1b = common_file + rev1b
265 265 if not os.path.isfile(dir1b):
266 266 dir1b = os.devnull
267 267 dir2 = os.path.join(dir2root, dir2, common_file)
268 268 label2 = common_file + rev2
269 269 else:
270 270 template = 'hg-%h.patch'
271 271 with formatter.nullformatter(ui, 'extdiff', {}) as fm:
272 272 cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
273 273 fm,
274 274 fntemplate=repo.vfs.reljoin(tmproot, template),
275 275 match=matcher)
276 276 label1a = cmdutil.makefilename(repo[node1a], template)
277 277 label2 = cmdutil.makefilename(repo[node2], template)
278 278 dir1a = repo.vfs.reljoin(tmproot, label1a)
279 279 dir2 = repo.vfs.reljoin(tmproot, label2)
280 280 dir1b = None
281 281 label1b = None
282 282 fnsandstat = []
283 283
284 284 # Function to quote file/dir names in the argument string.
285 285 # When not operating in 3-way mode, an empty string is
286 286 # returned for parent2
287 287 replace = {'parent': dir1a, 'parent1': dir1a, 'parent2': dir1b,
288 288 'plabel1': label1a, 'plabel2': label1b,
289 289 'clabel': label2, 'child': dir2,
290 290 'root': repo.root}
291 291 def quote(match):
292 292 pre = match.group(2)
293 293 key = match.group(3)
294 294 if not do3way and key == 'parent2':
295 295 return pre
296 296 return pre + procutil.shellquote(replace[key])
297 297
298 298 # Match parent2 first, so 'parent1?' will match both parent1 and parent
299 299 regex = (br'''(['"]?)([^\s'"$]*)'''
300 300 br'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
301 301 if not do3way and not re.search(regex, cmdline):
302 302 cmdline += ' $parent1 $child'
303 303 cmdline = re.sub(regex, quote, cmdline)
304 304
305 305 ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot))
306 306 ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
307 307
308 308 for copy_fn, working_fn, st in fnsandstat:
309 309 cpstat = os.lstat(copy_fn)
310 310 # Some tools copy the file and attributes, so mtime may not detect
311 311 # all changes. A size check will detect more cases, but not all.
312 312 # The only certain way to detect every case is to diff all files,
313 313 # which could be expensive.
314 314 # copyfile() carries over the permission, so the mode check could
315 315 # be in an 'elif' branch, but for the case where the file has
316 316 # changed without affecting mtime or size.
317 317 if (cpstat[stat.ST_MTIME] != st[stat.ST_MTIME]
318 318 or cpstat.st_size != st.st_size
319 319 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
320 320 ui.debug('file changed while diffing. '
321 321 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
322 322 util.copyfile(copy_fn, working_fn)
323 323
324 324 return 1
325 325 finally:
326 326 ui.note(_('cleaning up temp directory\n'))
327 327 shutil.rmtree(tmproot)
328 328
329 329 extdiffopts = [
330 330 ('o', 'option', [],
331 331 _('pass option to comparison program'), _('OPT')),
332 332 ('r', 'rev', [], _('revision'), _('REV')),
333 333 ('c', 'change', '', _('change made by revision'), _('REV')),
334 334 ('', 'patch', None, _('compare patches for two revisions'))
335 335 ] + cmdutil.walkopts + cmdutil.subrepoopts
336 336
337 337 @command('extdiff',
338 338 [('p', 'program', '', _('comparison program to run'), _('CMD')),
339 339 ] + extdiffopts,
340 340 _('hg extdiff [OPT]... [FILE]...'),
341 341 helpcategory=command.CATEGORY_FILE_CONTENTS,
342 342 inferrepo=True)
343 343 def extdiff(ui, repo, *pats, **opts):
344 344 '''use external program to diff repository (or selected files)
345 345
346 346 Show differences between revisions for the specified files, using
347 347 an external program. The default program used is diff, with
348 348 default options "-Npru".
349 349
350 350 To select a different program, use the -p/--program option. The
351 351 program will be passed the names of two directories to compare. To
352 352 pass additional options to the program, use -o/--option. These
353 353 will be passed before the names of the directories to compare.
354 354
355 355 When two revision arguments are given, then changes are shown
356 356 between those revisions. If only one revision is specified then
357 357 that revision is compared to the working directory, and, when no
358 358 revisions are specified, the working directory files are compared
359 359 to its parent.'''
360 360 opts = pycompat.byteskwargs(opts)
361 361 program = opts.get('program')
362 362 option = opts.get('option')
363 363 if not program:
364 364 program = 'diff'
365 365 option = option or ['-Npru']
366 366 cmdline = ' '.join(map(procutil.shellquote, [program] + option))
367 367 return dodiff(ui, repo, cmdline, pats, opts)
368 368
369 369 class savedcmd(object):
370 370 """use external program to diff repository (or selected files)
371 371
372 372 Show differences between revisions for the specified files, using
373 373 the following program::
374 374
375 375 %(path)s
376 376
377 377 When two revision arguments are given, then changes are shown
378 378 between those revisions. If only one revision is specified then
379 379 that revision is compared to the working directory, and, when no
380 380 revisions are specified, the working directory files are compared
381 381 to its parent.
382 382 """
383 383
384 384 def __init__(self, path, cmdline):
385 385 # We can't pass non-ASCII through docstrings (and path is
386 386 # in an unknown encoding anyway)
387 387 docpath = stringutil.escapestr(path)
388 388 self.__doc__ %= {r'path': pycompat.sysstr(stringutil.uirepr(docpath))}
389 389 self._cmdline = cmdline
390 390
391 391 def __call__(self, ui, repo, *pats, **opts):
392 392 opts = pycompat.byteskwargs(opts)
393 393 options = ' '.join(map(procutil.shellquote, opts['option']))
394 394 if options:
395 395 options = ' ' + options
396 396 return dodiff(ui, repo, self._cmdline + options, pats, opts)
397 397
398 398 def uisetup(ui):
399 399 for cmd, path in ui.configitems('extdiff'):
400 400 path = util.expandpath(path)
401 401 if cmd.startswith('cmd.'):
402 402 cmd = cmd[4:]
403 403 if not path:
404 404 path = procutil.findexe(cmd)
405 405 if path is None:
406 406 path = filemerge.findexternaltool(ui, cmd) or cmd
407 407 diffopts = ui.config('extdiff', 'opts.' + cmd)
408 408 cmdline = procutil.shellquote(path)
409 409 if diffopts:
410 410 cmdline += ' ' + diffopts
411 411 elif cmd.startswith('opts.'):
412 412 continue
413 413 else:
414 414 if path:
415 415 # case "cmd = path opts"
416 416 cmdline = path
417 417 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
418 418 else:
419 419 # case "cmd ="
420 420 path = procutil.findexe(cmd)
421 421 if path is None:
422 422 path = filemerge.findexternaltool(ui, cmd) or cmd
423 423 cmdline = procutil.shellquote(path)
424 424 diffopts = False
425 425 # look for diff arguments in [diff-tools] then [merge-tools]
426 426 if not diffopts:
427 427 args = ui.config('diff-tools', cmd+'.diffargs') or \
428 428 ui.config('merge-tools', cmd+'.diffargs')
429 429 if args:
430 430 cmdline += ' ' + args
431 431 command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd,
432 432 inferrepo=True)(savedcmd(path, cmdline))
433 433
434 434 # tell hggettext to extract docstrings from these functions:
435 435 i18nfunctions = [savedcmd]
@@ -1,1484 +1,1484 b''
1 1 # Copyright 2009-2010 Gregory P. Ward
2 2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 3 # Copyright 2010-2011 Fog Creek Software
4 4 # Copyright 2010-2011 Unity Technologies
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 '''Overridden Mercurial commands and functions for the largefiles extension'''
10 10 from __future__ import absolute_import
11 11
12 12 import copy
13 13 import os
14 14
15 15 from mercurial.i18n import _
16 16
17 17 from mercurial import (
18 18 archival,
19 19 cmdutil,
20 20 error,
21 21 hg,
22 22 logcmdutil,
23 23 match as matchmod,
24 24 pathutil,
25 25 pycompat,
26 26 registrar,
27 27 scmutil,
28 28 smartset,
29 29 util,
30 30 )
31 31
32 32 from . import (
33 33 lfcommands,
34 34 lfutil,
35 35 storefactory,
36 36 )
37 37
38 38 # -- Utility functions: commonly/repeatedly needed functionality ---------------
39 39
40 40 def composelargefilematcher(match, manifest):
41 41 '''create a matcher that matches only the largefiles in the original
42 42 matcher'''
43 43 m = copy.copy(match)
44 44 lfile = lambda f: lfutil.standin(f) in manifest
45 45 m._files = [lf for lf in m._files if lfile(lf)]
46 46 m._fileset = set(m._files)
47 47 m.always = lambda: False
48 48 origmatchfn = m.matchfn
49 49 m.matchfn = lambda f: lfile(f) and origmatchfn(f)
50 50 return m
51 51
52 52 def composenormalfilematcher(match, manifest, exclude=None):
53 53 excluded = set()
54 54 if exclude is not None:
55 55 excluded.update(exclude)
56 56
57 57 m = copy.copy(match)
58 58 notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
59 59 manifest or f in excluded)
60 60 m._files = [lf for lf in m._files if notlfile(lf)]
61 61 m._fileset = set(m._files)
62 62 m.always = lambda: False
63 63 origmatchfn = m.matchfn
64 64 m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
65 65 return m
66 66
67 67 def installnormalfilesmatchfn(manifest):
68 68 '''installmatchfn with a matchfn that ignores all largefiles'''
69 69 def overridematch(ctx, pats=(), opts=None, globbed=False,
70 70 default='relpath', badfn=None):
71 71 if opts is None:
72 72 opts = {}
73 73 match = oldmatch(ctx, pats, opts, globbed, default, badfn=badfn)
74 74 return composenormalfilematcher(match, manifest)
75 75 oldmatch = installmatchfn(overridematch)
76 76
77 77 def installmatchfn(f):
78 78 '''monkey patch the scmutil module with a custom match function.
79 79 Warning: it is monkey patching the _module_ on runtime! Not thread safe!'''
80 80 oldmatch = scmutil.match
81 81 setattr(f, 'oldmatch', oldmatch)
82 82 scmutil.match = f
83 83 return oldmatch
84 84
85 85 def restorematchfn():
86 86 '''restores scmutil.match to what it was before installmatchfn
87 87 was called. no-op if scmutil.match is its original function.
88 88
89 89 Note that n calls to installmatchfn will require n calls to
90 90 restore the original matchfn.'''
91 91 scmutil.match = getattr(scmutil.match, 'oldmatch')
92 92
93 93 def installmatchandpatsfn(f):
94 94 oldmatchandpats = scmutil.matchandpats
95 95 setattr(f, 'oldmatchandpats', oldmatchandpats)
96 96 scmutil.matchandpats = f
97 97 return oldmatchandpats
98 98
99 99 def restorematchandpatsfn():
100 100 '''restores scmutil.matchandpats to what it was before
101 101 installmatchandpatsfn was called. No-op if scmutil.matchandpats
102 102 is its original function.
103 103
104 104 Note that n calls to installmatchandpatsfn will require n calls
105 105 to restore the original matchfn.'''
106 106 scmutil.matchandpats = getattr(scmutil.matchandpats, 'oldmatchandpats',
107 107 scmutil.matchandpats)
108 108
109 109 def addlargefiles(ui, repo, isaddremove, matcher, **opts):
110 110 large = opts.get(r'large')
111 111 lfsize = lfutil.getminsize(
112 112 ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize'))
113 113
114 114 lfmatcher = None
115 115 if lfutil.islfilesrepo(repo):
116 116 lfpats = ui.configlist(lfutil.longname, 'patterns')
117 117 if lfpats:
118 118 lfmatcher = matchmod.match(repo.root, '', list(lfpats))
119 119
120 120 lfnames = []
121 121 m = matcher
122 122
123 123 wctx = repo[None]
124 124 for f in wctx.walk(matchmod.badmatch(m, lambda x, y: None)):
125 125 exact = m.exact(f)
126 126 lfile = lfutil.standin(f) in wctx
127 127 nfile = f in wctx
128 128 exists = lfile or nfile
129 129
130 130 # addremove in core gets fancy with the name, add doesn't
131 131 if isaddremove:
132 132 name = m.uipath(f)
133 133 else:
134 134 name = m.rel(f)
135 135
136 136 # Don't warn the user when they attempt to add a normal tracked file.
137 137 # The normal add code will do that for us.
138 138 if exact and exists:
139 139 if lfile:
140 140 ui.warn(_('%s already a largefile\n') % name)
141 141 continue
142 142
143 143 if (exact or not exists) and not lfutil.isstandin(f):
144 144 # In case the file was removed previously, but not committed
145 145 # (issue3507)
146 146 if not repo.wvfs.exists(f):
147 147 continue
148 148
149 149 abovemin = (lfsize and
150 150 repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
151 151 if large or abovemin or (lfmatcher and lfmatcher(f)):
152 152 lfnames.append(f)
153 153 if ui.verbose or not exact:
154 154 ui.status(_('adding %s as a largefile\n') % name)
155 155
156 156 bad = []
157 157
158 158 # Need to lock, otherwise there could be a race condition between
159 159 # when standins are created and added to the repo.
160 160 with repo.wlock():
161 161 if not opts.get(r'dry_run'):
162 162 standins = []
163 163 lfdirstate = lfutil.openlfdirstate(ui, repo)
164 164 for f in lfnames:
165 165 standinname = lfutil.standin(f)
166 166 lfutil.writestandin(repo, standinname, hash='',
167 167 executable=lfutil.getexecutable(repo.wjoin(f)))
168 168 standins.append(standinname)
169 169 if lfdirstate[f] == 'r':
170 170 lfdirstate.normallookup(f)
171 171 else:
172 172 lfdirstate.add(f)
173 173 lfdirstate.write()
174 174 bad += [lfutil.splitstandin(f)
175 175 for f in repo[None].add(standins)
176 176 if f in m.files()]
177 177
178 178 added = [f for f in lfnames if f not in bad]
179 179 return added, bad
180 180
181 181 def removelargefiles(ui, repo, isaddremove, matcher, dryrun, **opts):
182 182 after = opts.get(r'after')
183 183 m = composelargefilematcher(matcher, repo[None].manifest())
184 184 try:
185 185 repo.lfstatus = True
186 186 s = repo.status(match=m, clean=not isaddremove)
187 187 finally:
188 188 repo.lfstatus = False
189 189 manifest = repo[None].manifest()
190 190 modified, added, deleted, clean = [[f for f in list
191 191 if lfutil.standin(f) in manifest]
192 192 for list in (s.modified, s.added,
193 193 s.deleted, s.clean)]
194 194
195 195 def warn(files, msg):
196 196 for f in files:
197 197 ui.warn(msg % m.rel(f))
198 198 return int(len(files) > 0)
199 199
200 200 result = 0
201 201
202 202 if after:
203 203 remove = deleted
204 204 result = warn(modified + added + clean,
205 205 _('not removing %s: file still exists\n'))
206 206 else:
207 207 remove = deleted + clean
208 208 result = warn(modified, _('not removing %s: file is modified (use -f'
209 209 ' to force removal)\n'))
210 210 result = warn(added, _('not removing %s: file has been marked for add'
211 211 ' (use forget to undo)\n')) or result
212 212
213 213 # Need to lock because standin files are deleted then removed from the
214 214 # repository and we could race in-between.
215 215 with repo.wlock():
216 216 lfdirstate = lfutil.openlfdirstate(ui, repo)
217 217 for f in sorted(remove):
218 218 if ui.verbose or not m.exact(f):
219 219 # addremove in core gets fancy with the name, remove doesn't
220 220 if isaddremove:
221 221 name = m.uipath(f)
222 222 else:
223 223 name = m.rel(f)
224 224 ui.status(_('removing %s\n') % name)
225 225
226 226 if not dryrun:
227 227 if not after:
228 228 repo.wvfs.unlinkpath(f, ignoremissing=True)
229 229
230 230 if dryrun:
231 231 return result
232 232
233 233 remove = [lfutil.standin(f) for f in remove]
234 234 # If this is being called by addremove, let the original addremove
235 235 # function handle this.
236 236 if not isaddremove:
237 237 for f in remove:
238 238 repo.wvfs.unlinkpath(f, ignoremissing=True)
239 239 repo[None].forget(remove)
240 240
241 241 for f in remove:
242 242 lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
243 243 False)
244 244
245 245 lfdirstate.write()
246 246
247 247 return result
248 248
249 249 # For overriding mercurial.hgweb.webcommands so that largefiles will
250 250 # appear at their right place in the manifests.
251 251 def decodepath(orig, path):
252 252 return lfutil.splitstandin(path) or path
253 253
254 254 # -- Wrappers: modify existing commands --------------------------------
255 255
256 256 def overrideadd(orig, ui, repo, *pats, **opts):
257 257 if opts.get(r'normal') and opts.get(r'large'):
258 258 raise error.Abort(_('--normal cannot be used with --large'))
259 259 return orig(ui, repo, *pats, **opts)
260 260
261 261 def cmdutiladd(orig, ui, repo, matcher, prefix, explicitonly, **opts):
262 262 # The --normal flag short circuits this override
263 263 if opts.get(r'normal'):
264 264 return orig(ui, repo, matcher, prefix, explicitonly, **opts)
265 265
266 266 ladded, lbad = addlargefiles(ui, repo, False, matcher, **opts)
267 267 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
268 268 ladded)
269 269 bad = orig(ui, repo, normalmatcher, prefix, explicitonly, **opts)
270 270
271 271 bad.extend(f for f in lbad)
272 272 return bad
273 273
274 274 def cmdutilremove(orig, ui, repo, matcher, prefix, after, force, subrepos,
275 275 dryrun):
276 276 normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
277 277 result = orig(ui, repo, normalmatcher, prefix, after, force, subrepos,
278 278 dryrun)
279 279 return removelargefiles(ui, repo, False, matcher, dryrun, after=after,
280 280 force=force) or result
281 281
282 282 def overridestatusfn(orig, repo, rev2, **opts):
283 283 try:
284 284 repo._repo.lfstatus = True
285 285 return orig(repo, rev2, **opts)
286 286 finally:
287 287 repo._repo.lfstatus = False
288 288
289 289 def overridestatus(orig, ui, repo, *pats, **opts):
290 290 try:
291 291 repo.lfstatus = True
292 292 return orig(ui, repo, *pats, **opts)
293 293 finally:
294 294 repo.lfstatus = False
295 295
296 296 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
297 297 try:
298 298 repo._repo.lfstatus = True
299 299 return orig(repo, ignoreupdate=ignoreupdate, missing=missing)
300 300 finally:
301 301 repo._repo.lfstatus = False
302 302
303 303 def overridelog(orig, ui, repo, *pats, **opts):
304 304 def overridematchandpats(ctx, pats=(), opts=None, globbed=False,
305 305 default='relpath', badfn=None):
306 306 """Matcher that merges root directory with .hglf, suitable for log.
307 307 It is still possible to match .hglf directly.
308 308 For any listed files run log on the standin too.
309 309 matchfn tries both the given filename and with .hglf stripped.
310 310 """
311 311 if opts is None:
312 312 opts = {}
313 313 matchandpats = oldmatchandpats(ctx, pats, opts, globbed, default,
314 314 badfn=badfn)
315 315 m, p = copy.copy(matchandpats)
316 316
317 317 if m.always():
318 318 # We want to match everything anyway, so there's no benefit trying
319 319 # to add standins.
320 320 return matchandpats
321 321
322 322 pats = set(p)
323 323
324 324 def fixpats(pat, tostandin=lfutil.standin):
325 325 if pat.startswith('set:'):
326 326 return pat
327 327
328 328 kindpat = matchmod._patsplit(pat, None)
329 329
330 330 if kindpat[0] is not None:
331 331 return kindpat[0] + ':' + tostandin(kindpat[1])
332 332 return tostandin(kindpat[1])
333 333
334 334 if m._cwd:
335 335 hglf = lfutil.shortname
336 336 back = util.pconvert(m.rel(hglf)[:-len(hglf)])
337 337
338 338 def tostandin(f):
339 339 # The file may already be a standin, so truncate the back
340 340 # prefix and test before mangling it. This avoids turning
341 341 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
342 342 if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
343 343 return f
344 344
345 345 # An absolute path is from outside the repo, so truncate the
346 346 # path to the root before building the standin. Otherwise cwd
347 347 # is somewhere in the repo, relative to root, and needs to be
348 348 # prepended before building the standin.
349 349 if os.path.isabs(m._cwd):
350 350 f = f[len(back):]
351 351 else:
352 352 f = m._cwd + '/' + f
353 353 return back + lfutil.standin(f)
354 354 else:
355 355 def tostandin(f):
356 356 if lfutil.isstandin(f):
357 357 return f
358 358 return lfutil.standin(f)
359 359 pats.update(fixpats(f, tostandin) for f in p)
360 360
361 361 for i in range(0, len(m._files)):
362 362 # Don't add '.hglf' to m.files, since that is already covered by '.'
363 363 if m._files[i] == '.':
364 364 continue
365 365 standin = lfutil.standin(m._files[i])
366 366 # If the "standin" is a directory, append instead of replace to
367 367 # support naming a directory on the command line with only
368 368 # largefiles. The original directory is kept to support normal
369 369 # files.
370 370 if standin in ctx:
371 371 m._files[i] = standin
372 372 elif m._files[i] not in ctx and repo.wvfs.isdir(standin):
373 373 m._files.append(standin)
374 374
375 375 m._fileset = set(m._files)
376 376 m.always = lambda: False
377 377 origmatchfn = m.matchfn
378 378 def lfmatchfn(f):
379 379 lf = lfutil.splitstandin(f)
380 380 if lf is not None and origmatchfn(lf):
381 381 return True
382 382 r = origmatchfn(f)
383 383 return r
384 384 m.matchfn = lfmatchfn
385 385
386 386 ui.debug('updated patterns: %s\n' % ', '.join(sorted(pats)))
387 387 return m, pats
388 388
389 389 # For hg log --patch, the match object is used in two different senses:
390 390 # (1) to determine what revisions should be printed out, and
391 391 # (2) to determine what files to print out diffs for.
392 392 # The magic matchandpats override should be used for case (1) but not for
393 393 # case (2).
394 394 def overridemakefilematcher(repo, pats, opts, badfn=None):
395 395 wctx = repo[None]
396 396 match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
397 397 return lambda ctx: match
398 398
399 399 oldmatchandpats = installmatchandpatsfn(overridematchandpats)
400 400 oldmakefilematcher = logcmdutil._makenofollowfilematcher
401 401 setattr(logcmdutil, '_makenofollowfilematcher', overridemakefilematcher)
402 402
403 403 try:
404 404 return orig(ui, repo, *pats, **opts)
405 405 finally:
406 406 restorematchandpatsfn()
407 407 setattr(logcmdutil, '_makenofollowfilematcher', oldmakefilematcher)
408 408
409 409 def overrideverify(orig, ui, repo, *pats, **opts):
410 410 large = opts.pop(r'large', False)
411 411 all = opts.pop(r'lfa', False)
412 412 contents = opts.pop(r'lfc', False)
413 413
414 414 result = orig(ui, repo, *pats, **opts)
415 415 if large or all or contents:
416 416 result = result or lfcommands.verifylfiles(ui, repo, all, contents)
417 417 return result
418 418
419 419 def overridedebugstate(orig, ui, repo, *pats, **opts):
420 420 large = opts.pop(r'large', False)
421 421 if large:
422 422 class fakerepo(object):
423 423 dirstate = lfutil.openlfdirstate(ui, repo)
424 424 orig(ui, fakerepo, *pats, **opts)
425 425 else:
426 426 orig(ui, repo, *pats, **opts)
427 427
428 428 # Before starting the manifest merge, merge.updates will call
429 429 # _checkunknownfile to check if there are any files in the merged-in
430 430 # changeset that collide with unknown files in the working copy.
431 431 #
432 432 # The largefiles are seen as unknown, so this prevents us from merging
433 433 # in a file 'foo' if we already have a largefile with the same name.
434 434 #
435 435 # The overridden function filters the unknown files by removing any
436 436 # largefiles. This makes the merge proceed and we can then handle this
437 437 # case further in the overridden calculateupdates function below.
438 438 def overridecheckunknownfile(origfn, repo, wctx, mctx, f, f2=None):
439 439 if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
440 440 return False
441 441 return origfn(repo, wctx, mctx, f, f2)
442 442
443 443 # The manifest merge handles conflicts on the manifest level. We want
444 444 # to handle changes in largefile-ness of files at this level too.
445 445 #
446 446 # The strategy is to run the original calculateupdates and then process
447 447 # the action list it outputs. There are two cases we need to deal with:
448 448 #
449 449 # 1. Normal file in p1, largefile in p2. Here the largefile is
450 450 # detected via its standin file, which will enter the working copy
451 451 # with a "get" action. It is not "merge" since the standin is all
452 452 # Mercurial is concerned with at this level -- the link to the
453 453 # existing normal file is not relevant here.
454 454 #
455 455 # 2. Largefile in p1, normal file in p2. Here we get a "merge" action
456 456 # since the largefile will be present in the working copy and
457 457 # different from the normal file in p2. Mercurial therefore
458 458 # triggers a merge action.
459 459 #
460 460 # In both cases, we prompt the user and emit new actions to either
461 461 # remove the standin (if the normal file was kept) or to remove the
462 462 # normal file and get the standin (if the largefile was kept). The
463 463 # default prompt answer is to use the largefile version since it was
464 464 # presumably changed on purpose.
465 465 #
466 466 # Finally, the merge.applyupdates function will then take care of
467 467 # writing the files into the working copy and lfcommands.updatelfiles
468 468 # will update the largefiles.
469 469 def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
470 470 acceptremote, *args, **kwargs):
471 471 overwrite = force and not branchmerge
472 472 actions, diverge, renamedelete = origfn(
473 473 repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs)
474 474
475 475 if overwrite:
476 476 return actions, diverge, renamedelete
477 477
478 478 # Convert to dictionary with filename as key and action as value.
479 479 lfiles = set()
480 480 for f in actions:
481 481 splitstandin = lfutil.splitstandin(f)
482 482 if splitstandin in p1:
483 483 lfiles.add(splitstandin)
484 484 elif lfutil.standin(f) in p1:
485 485 lfiles.add(f)
486 486
487 487 for lfile in sorted(lfiles):
488 488 standin = lfutil.standin(lfile)
489 489 (lm, largs, lmsg) = actions.get(lfile, (None, None, None))
490 490 (sm, sargs, smsg) = actions.get(standin, (None, None, None))
491 491 if sm in ('g', 'dc') and lm != 'r':
492 492 if sm == 'dc':
493 493 f1, f2, fa, move, anc = sargs
494 494 sargs = (p2[f2].flags(), False)
495 495 # Case 1: normal file in the working copy, largefile in
496 496 # the second parent
497 497 usermsg = _('remote turned local normal file %s into a largefile\n'
498 498 'use (l)argefile or keep (n)ormal file?'
499 499 '$$ &Largefile $$ &Normal file') % lfile
500 500 if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
501 501 actions[lfile] = ('r', None, 'replaced by standin')
502 502 actions[standin] = ('g', sargs, 'replaces standin')
503 503 else: # keep local normal file
504 504 actions[lfile] = ('k', None, 'replaces standin')
505 505 if branchmerge:
506 506 actions[standin] = ('k', None, 'replaced by non-standin')
507 507 else:
508 508 actions[standin] = ('r', None, 'replaced by non-standin')
509 509 elif lm in ('g', 'dc') and sm != 'r':
510 510 if lm == 'dc':
511 511 f1, f2, fa, move, anc = largs
512 512 largs = (p2[f2].flags(), False)
513 513 # Case 2: largefile in the working copy, normal file in
514 514 # the second parent
515 515 usermsg = _('remote turned local largefile %s into a normal file\n'
516 516 'keep (l)argefile or use (n)ormal file?'
517 517 '$$ &Largefile $$ &Normal file') % lfile
518 518 if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
519 519 if branchmerge:
520 520 # largefile can be restored from standin safely
521 521 actions[lfile] = ('k', None, 'replaced by standin')
522 522 actions[standin] = ('k', None, 'replaces standin')
523 523 else:
524 524 # "lfile" should be marked as "removed" without
525 525 # removal of itself
526 526 actions[lfile] = ('lfmr', None,
527 527 'forget non-standin largefile')
528 528
529 529 # linear-merge should treat this largefile as 're-added'
530 530 actions[standin] = ('a', None, 'keep standin')
531 531 else: # pick remote normal file
532 532 actions[lfile] = ('g', largs, 'replaces standin')
533 533 actions[standin] = ('r', None, 'replaced by non-standin')
534 534
535 535 return actions, diverge, renamedelete
536 536
537 537 def mergerecordupdates(orig, repo, actions, branchmerge):
538 538 if 'lfmr' in actions:
539 539 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
540 540 for lfile, args, msg in actions['lfmr']:
541 541 # this should be executed before 'orig', to execute 'remove'
542 542 # before all other actions
543 543 repo.dirstate.remove(lfile)
544 544 # make sure lfile doesn't get synclfdirstate'd as normal
545 545 lfdirstate.add(lfile)
546 546 lfdirstate.write()
547 547
548 548 return orig(repo, actions, branchmerge)
549 549
550 550 # Override filemerge to prompt the user about how they wish to merge
551 551 # largefiles. This will handle identical edits without prompting the user.
552 552 def overridefilemerge(origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca,
553 553 labels=None):
554 554 if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
555 555 return origfn(premerge, repo, wctx, mynode, orig, fcd, fco, fca,
556 556 labels=labels)
557 557
558 558 ahash = lfutil.readasstandin(fca).lower()
559 559 dhash = lfutil.readasstandin(fcd).lower()
560 560 ohash = lfutil.readasstandin(fco).lower()
561 561 if (ohash != ahash and
562 562 ohash != dhash and
563 563 (dhash == ahash or
564 564 repo.ui.promptchoice(
565 565 _('largefile %s has a merge conflict\nancestor was %s\n'
566 566 'keep (l)ocal %s or\ntake (o)ther %s?'
567 567 '$$ &Local $$ &Other') %
568 568 (lfutil.splitstandin(orig), ahash, dhash, ohash),
569 569 0) == 1)):
570 570 repo.wwrite(fcd.path(), fco.data(), fco.flags())
571 571 return True, 0, False
572 572
573 573 def copiespathcopies(orig, ctx1, ctx2, match=None):
574 574 copies = orig(ctx1, ctx2, match=match)
575 575 updated = {}
576 576
577 577 for k, v in copies.iteritems():
578 578 updated[lfutil.splitstandin(k) or k] = lfutil.splitstandin(v) or v
579 579
580 580 return updated
581 581
582 582 # Copy first changes the matchers to match standins instead of
583 583 # largefiles. Then it overrides util.copyfile in that function it
584 584 # checks if the destination largefile already exists. It also keeps a
585 585 # list of copied files so that the largefiles can be copied and the
586 586 # dirstate updated.
587 587 def overridecopy(orig, ui, repo, pats, opts, rename=False):
588 588 # doesn't remove largefile on rename
589 589 if len(pats) < 2:
590 590 # this isn't legal, let the original function deal with it
591 591 return orig(ui, repo, pats, opts, rename)
592 592
593 593 # This could copy both lfiles and normal files in one command,
594 594 # but we don't want to do that. First replace their matcher to
595 595 # only match normal files and run it, then replace it to just
596 596 # match largefiles and run it again.
597 597 nonormalfiles = False
598 598 nolfiles = False
599 599 installnormalfilesmatchfn(repo[None].manifest())
600 600 try:
601 601 result = orig(ui, repo, pats, opts, rename)
602 602 except error.Abort as e:
603 603 if pycompat.bytestr(e) != _('no files to copy'):
604 604 raise e
605 605 else:
606 606 nonormalfiles = True
607 607 result = 0
608 608 finally:
609 609 restorematchfn()
610 610
611 611 # The first rename can cause our current working directory to be removed.
612 612 # In that case there is nothing left to copy/rename so just quit.
613 613 try:
614 614 repo.getcwd()
615 615 except OSError:
616 616 return result
617 617
618 618 def makestandin(relpath):
619 619 path = pathutil.canonpath(repo.root, repo.getcwd(), relpath)
620 620 return repo.wvfs.join(lfutil.standin(path))
621 621
622 622 fullpats = scmutil.expandpats(pats)
623 623 dest = fullpats[-1]
624 624
625 625 if os.path.isdir(dest):
626 626 if not os.path.isdir(makestandin(dest)):
627 627 os.makedirs(makestandin(dest))
628 628
629 629 try:
630 630 # When we call orig below it creates the standins but we don't add
631 631 # them to the dir state until later so lock during that time.
632 632 wlock = repo.wlock()
633 633
634 634 manifest = repo[None].manifest()
635 635 def overridematch(ctx, pats=(), opts=None, globbed=False,
636 636 default='relpath', badfn=None):
637 637 if opts is None:
638 638 opts = {}
639 639 newpats = []
640 640 # The patterns were previously mangled to add the standin
641 641 # directory; we need to remove that now
642 642 for pat in pats:
643 643 if matchmod.patkind(pat) is None and lfutil.shortname in pat:
644 644 newpats.append(pat.replace(lfutil.shortname, ''))
645 645 else:
646 646 newpats.append(pat)
647 647 match = oldmatch(ctx, newpats, opts, globbed, default, badfn=badfn)
648 648 m = copy.copy(match)
649 649 lfile = lambda f: lfutil.standin(f) in manifest
650 650 m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
651 651 m._fileset = set(m._files)
652 652 origmatchfn = m.matchfn
653 653 def matchfn(f):
654 654 lfile = lfutil.splitstandin(f)
655 655 return (lfile is not None and
656 656 (f in manifest) and
657 657 origmatchfn(lfile) or
658 658 None)
659 659 m.matchfn = matchfn
660 660 return m
661 661 oldmatch = installmatchfn(overridematch)
662 662 listpats = []
663 663 for pat in pats:
664 664 if matchmod.patkind(pat) is not None:
665 665 listpats.append(pat)
666 666 else:
667 667 listpats.append(makestandin(pat))
668 668
669 669 try:
670 670 origcopyfile = util.copyfile
671 671 copiedfiles = []
672 672 def overridecopyfile(src, dest, *args, **kwargs):
673 673 if (lfutil.shortname in src and
674 674 dest.startswith(repo.wjoin(lfutil.shortname))):
675 675 destlfile = dest.replace(lfutil.shortname, '')
676 676 if not opts['force'] and os.path.exists(destlfile):
677 677 raise IOError('',
678 678 _('destination largefile already exists'))
679 679 copiedfiles.append((src, dest))
680 680 origcopyfile(src, dest, *args, **kwargs)
681 681
682 682 util.copyfile = overridecopyfile
683 683 result += orig(ui, repo, listpats, opts, rename)
684 684 finally:
685 685 util.copyfile = origcopyfile
686 686
687 687 lfdirstate = lfutil.openlfdirstate(ui, repo)
688 688 for (src, dest) in copiedfiles:
689 689 if (lfutil.shortname in src and
690 690 dest.startswith(repo.wjoin(lfutil.shortname))):
691 691 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
692 692 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
693 693 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or '.'
694 694 if not os.path.isdir(destlfiledir):
695 695 os.makedirs(destlfiledir)
696 696 if rename:
697 697 os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
698 698
699 699 # The file is gone, but this deletes any empty parent
700 700 # directories as a side-effect.
701 701 repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
702 702 lfdirstate.remove(srclfile)
703 703 else:
704 704 util.copyfile(repo.wjoin(srclfile),
705 705 repo.wjoin(destlfile))
706 706
707 707 lfdirstate.add(destlfile)
708 708 lfdirstate.write()
709 709 except error.Abort as e:
710 710 if pycompat.bytestr(e) != _('no files to copy'):
711 711 raise e
712 712 else:
713 713 nolfiles = True
714 714 finally:
715 715 restorematchfn()
716 716 wlock.release()
717 717
718 718 if nolfiles and nonormalfiles:
719 719 raise error.Abort(_('no files to copy'))
720 720
721 721 return result
722 722
723 723 # When the user calls revert, we have to be careful to not revert any
724 724 # changes to other largefiles accidentally. This means we have to keep
725 725 # track of the largefiles that are being reverted so we only pull down
726 726 # the necessary largefiles.
727 727 #
728 728 # Standins are only updated (to match the hash of largefiles) before
729 729 # commits. Update the standins then run the original revert, changing
730 730 # the matcher to hit standins instead of largefiles. Based on the
731 731 # resulting standins update the largefiles.
732 732 def overriderevert(orig, ui, repo, ctx, parents, *pats, **opts):
733 733 # Because we put the standins in a bad state (by updating them)
734 734 # and then return them to a correct state we need to lock to
735 735 # prevent others from changing them in their incorrect state.
736 736 with repo.wlock():
737 737 lfdirstate = lfutil.openlfdirstate(ui, repo)
738 738 s = lfutil.lfdirstatestatus(lfdirstate, repo)
739 739 lfdirstate.write()
740 740 for lfile in s.modified:
741 741 lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
742 742 for lfile in s.deleted:
743 743 fstandin = lfutil.standin(lfile)
744 744 if (repo.wvfs.exists(fstandin)):
745 745 repo.wvfs.unlink(fstandin)
746 746
747 747 oldstandins = lfutil.getstandinsstate(repo)
748 748
749 749 def overridematch(mctx, pats=(), opts=None, globbed=False,
750 750 default='relpath', badfn=None):
751 751 if opts is None:
752 752 opts = {}
753 753 match = oldmatch(mctx, pats, opts, globbed, default, badfn=badfn)
754 754 m = copy.copy(match)
755 755
756 756 # revert supports recursing into subrepos, and though largefiles
757 757 # currently doesn't work correctly in that case, this match is
758 758 # called, so the lfdirstate above may not be the correct one for
759 759 # this invocation of match.
760 760 lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
761 761 False)
762 762
763 763 wctx = repo[None]
764 764 matchfiles = []
765 765 for f in m._files:
766 766 standin = lfutil.standin(f)
767 767 if standin in ctx or standin in mctx:
768 768 matchfiles.append(standin)
769 769 elif standin in wctx or lfdirstate[f] == 'r':
770 770 continue
771 771 else:
772 772 matchfiles.append(f)
773 773 m._files = matchfiles
774 774 m._fileset = set(m._files)
775 775 origmatchfn = m.matchfn
776 776 def matchfn(f):
777 777 lfile = lfutil.splitstandin(f)
778 778 if lfile is not None:
779 779 return (origmatchfn(lfile) and
780 780 (f in ctx or f in mctx))
781 781 return origmatchfn(f)
782 782 m.matchfn = matchfn
783 783 return m
784 784 oldmatch = installmatchfn(overridematch)
785 785 try:
786 786 orig(ui, repo, ctx, parents, *pats, **opts)
787 787 finally:
788 788 restorematchfn()
789 789
790 790 newstandins = lfutil.getstandinsstate(repo)
791 791 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
792 792 # lfdirstate should be 'normallookup'-ed for updated files,
793 793 # because reverting doesn't touch dirstate for 'normal' files
794 794 # when target revision is explicitly specified: in such case,
795 795 # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
796 796 # of target (standin) file.
797 797 lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
798 798 normallookup=True)
799 799
800 800 # after pulling changesets, we need to take some extra care to get
801 801 # largefiles updated remotely
802 802 def overridepull(orig, ui, repo, source=None, **opts):
803 803 revsprepull = len(repo)
804 804 if not source:
805 805 source = 'default'
806 806 repo.lfpullsource = source
807 807 result = orig(ui, repo, source, **opts)
808 808 revspostpull = len(repo)
809 809 lfrevs = opts.get(r'lfrev', [])
810 810 if opts.get(r'all_largefiles'):
811 811 lfrevs.append('pulled()')
812 812 if lfrevs and revspostpull > revsprepull:
813 813 numcached = 0
814 814 repo.firstpulled = revsprepull # for pulled() revset expression
815 815 try:
816 816 for rev in scmutil.revrange(repo, lfrevs):
817 817 ui.note(_('pulling largefiles for revision %d\n') % rev)
818 818 (cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
819 819 numcached += len(cached)
820 820 finally:
821 821 del repo.firstpulled
822 822 ui.status(_("%d largefiles cached\n") % numcached)
823 823 return result
824 824
825 825 def overridepush(orig, ui, repo, *args, **kwargs):
826 826 """Override push command and store --lfrev parameters in opargs"""
827 827 lfrevs = kwargs.pop(r'lfrev', None)
828 828 if lfrevs:
829 829 opargs = kwargs.setdefault(r'opargs', {})
830 830 opargs['lfrevs'] = scmutil.revrange(repo, lfrevs)
831 831 return orig(ui, repo, *args, **kwargs)
832 832
833 833 def exchangepushoperation(orig, *args, **kwargs):
834 834 """Override pushoperation constructor and store lfrevs parameter"""
835 835 lfrevs = kwargs.pop(r'lfrevs', None)
836 836 pushop = orig(*args, **kwargs)
837 837 pushop.lfrevs = lfrevs
838 838 return pushop
839 839
840 840 revsetpredicate = registrar.revsetpredicate()
841 841
842 842 @revsetpredicate('pulled()')
843 843 def pulledrevsetsymbol(repo, subset, x):
844 844 """Changesets that just has been pulled.
845 845
846 846 Only available with largefiles from pull --lfrev expressions.
847 847
848 848 .. container:: verbose
849 849
850 850 Some examples:
851 851
852 852 - pull largefiles for all new changesets::
853 853
854 854 hg pull -lfrev "pulled()"
855 855
856 856 - pull largefiles for all new branch heads::
857 857
858 858 hg pull -lfrev "head(pulled()) and not closed()"
859 859
860 860 """
861 861
862 862 try:
863 863 firstpulled = repo.firstpulled
864 864 except AttributeError:
865 865 raise error.Abort(_("pulled() only available in --lfrev"))
866 866 return smartset.baseset([r for r in subset if r >= firstpulled])
867 867
868 868 def overrideclone(orig, ui, source, dest=None, **opts):
869 869 d = dest
870 870 if d is None:
871 871 d = hg.defaultdest(source)
872 872 if opts.get(r'all_largefiles') and not hg.islocal(d):
873 873 raise error.Abort(_(
874 874 '--all-largefiles is incompatible with non-local destination %s') %
875 875 d)
876 876
877 877 return orig(ui, source, dest, **opts)
878 878
879 879 def hgclone(orig, ui, opts, *args, **kwargs):
880 880 result = orig(ui, opts, *args, **kwargs)
881 881
882 882 if result is not None:
883 883 sourcerepo, destrepo = result
884 884 repo = destrepo.local()
885 885
886 886 # When cloning to a remote repo (like through SSH), no repo is available
887 887 # from the peer. Therefore the largefiles can't be downloaded and the
888 888 # hgrc can't be updated.
889 889 if not repo:
890 890 return result
891 891
892 892 # Caching is implicitly limited to 'rev' option, since the dest repo was
893 893 # truncated at that point. The user may expect a download count with
894 894 # this option, so attempt whether or not this is a largefile repo.
895 895 if opts.get('all_largefiles'):
896 896 success, missing = lfcommands.downloadlfiles(ui, repo, None)
897 897
898 898 if missing != 0:
899 899 return None
900 900
901 901 return result
902 902
903 903 def overriderebase(orig, ui, repo, **opts):
904 904 if not util.safehasattr(repo, '_largefilesenabled'):
905 905 return orig(ui, repo, **opts)
906 906
907 907 resuming = opts.get(r'continue')
908 908 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
909 909 repo._lfstatuswriters.append(lambda *msg, **opts: None)
910 910 try:
911 911 return orig(ui, repo, **opts)
912 912 finally:
913 913 repo._lfstatuswriters.pop()
914 914 repo._lfcommithooks.pop()
915 915
916 916 def overridearchivecmd(orig, ui, repo, dest, **opts):
917 917 repo.unfiltered().lfstatus = True
918 918
919 919 try:
920 920 return orig(ui, repo.unfiltered(), dest, **opts)
921 921 finally:
922 922 repo.unfiltered().lfstatus = False
923 923
924 924 def hgwebarchive(orig, web):
925 925 web.repo.lfstatus = True
926 926
927 927 try:
928 928 return orig(web)
929 929 finally:
930 930 web.repo.lfstatus = False
931 931
932 def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
932 def overridearchive(orig, repo, dest, node, kind, decode=True, match=None,
933 933 prefix='', mtime=None, subrepos=None):
934 934 # For some reason setting repo.lfstatus in hgwebarchive only changes the
935 935 # unfiltered repo's attr, so check that as well.
936 936 if not repo.lfstatus and not repo.unfiltered().lfstatus:
937 return orig(repo, dest, node, kind, decode, matchfn, prefix, mtime,
937 return orig(repo, dest, node, kind, decode, match, prefix, mtime,
938 938 subrepos)
939 939
940 940 # No need to lock because we are only reading history and
941 941 # largefile caches, neither of which are modified.
942 942 if node is not None:
943 943 lfcommands.cachelfiles(repo.ui, repo, node)
944 944
945 945 if kind not in archival.archivers:
946 946 raise error.Abort(_("unknown archive type '%s'") % kind)
947 947
948 948 ctx = repo[node]
949 949
950 950 if kind == 'files':
951 951 if prefix:
952 952 raise error.Abort(
953 953 _('cannot give prefix when archiving to files'))
954 954 else:
955 955 prefix = archival.tidyprefix(dest, kind, prefix)
956 956
957 957 def write(name, mode, islink, getdata):
958 if matchfn and not matchfn(name):
958 if match and not match(name):
959 959 return
960 960 data = getdata()
961 961 if decode:
962 962 data = repo.wwritedata(name, data)
963 963 archiver.addfile(prefix + name, mode, islink, data)
964 964
965 965 archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
966 966
967 967 if repo.ui.configbool("ui", "archivemeta"):
968 968 write('.hg_archival.txt', 0o644, False,
969 969 lambda: archival.buildmetadata(ctx))
970 970
971 971 for f in ctx:
972 972 ff = ctx.flags(f)
973 973 getdata = ctx[f].data
974 974 lfile = lfutil.splitstandin(f)
975 975 if lfile is not None:
976 976 if node is not None:
977 977 path = lfutil.findfile(repo, getdata().strip())
978 978
979 979 if path is None:
980 980 raise error.Abort(
981 981 _('largefile %s not found in repo store or system cache')
982 982 % lfile)
983 983 else:
984 984 path = lfile
985 985
986 986 f = lfile
987 987
988 988 getdata = lambda: util.readfile(path)
989 989 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
990 990
991 991 if subrepos:
992 992 for subpath in sorted(ctx.substate):
993 993 sub = ctx.workingsub(subpath)
994 submatch = matchmod.subdirmatcher(subpath, matchfn)
994 submatch = matchmod.subdirmatcher(subpath, match)
995 995 sub._repo.lfstatus = True
996 996 sub.archive(archiver, prefix, submatch)
997 997
998 998 archiver.done()
999 999
1000 1000 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
1001 1001 lfenabled = util.safehasattr(repo._repo, '_largefilesenabled')
1002 1002 if not lfenabled or not repo._repo.lfstatus:
1003 1003 return orig(repo, archiver, prefix, match, decode)
1004 1004
1005 1005 repo._get(repo._state + ('hg',))
1006 1006 rev = repo._state[1]
1007 1007 ctx = repo._repo[rev]
1008 1008
1009 1009 if ctx.node() is not None:
1010 1010 lfcommands.cachelfiles(repo.ui, repo._repo, ctx.node())
1011 1011
1012 1012 def write(name, mode, islink, getdata):
1013 1013 # At this point, the standin has been replaced with the largefile name,
1014 1014 # so the normal matcher works here without the lfutil variants.
1015 1015 if match and not match(f):
1016 1016 return
1017 1017 data = getdata()
1018 1018 if decode:
1019 1019 data = repo._repo.wwritedata(name, data)
1020 1020
1021 1021 archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
1022 1022
1023 1023 for f in ctx:
1024 1024 ff = ctx.flags(f)
1025 1025 getdata = ctx[f].data
1026 1026 lfile = lfutil.splitstandin(f)
1027 1027 if lfile is not None:
1028 1028 if ctx.node() is not None:
1029 1029 path = lfutil.findfile(repo._repo, getdata().strip())
1030 1030
1031 1031 if path is None:
1032 1032 raise error.Abort(
1033 1033 _('largefile %s not found in repo store or system cache')
1034 1034 % lfile)
1035 1035 else:
1036 1036 path = lfile
1037 1037
1038 1038 f = lfile
1039 1039
1040 1040 getdata = lambda: util.readfile(os.path.join(prefix, path))
1041 1041
1042 1042 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, getdata)
1043 1043
1044 1044 for subpath in sorted(ctx.substate):
1045 1045 sub = ctx.workingsub(subpath)
1046 1046 submatch = matchmod.subdirmatcher(subpath, match)
1047 1047 sub._repo.lfstatus = True
1048 1048 sub.archive(archiver, prefix + repo._path + '/', submatch, decode)
1049 1049
1050 1050 # If a largefile is modified, the change is not reflected in its
1051 1051 # standin until a commit. cmdutil.bailifchanged() raises an exception
1052 1052 # if the repo has uncommitted changes. Wrap it to also check if
1053 1053 # largefiles were changed. This is used by bisect, backout and fetch.
1054 1054 def overridebailifchanged(orig, repo, *args, **kwargs):
1055 1055 orig(repo, *args, **kwargs)
1056 1056 repo.lfstatus = True
1057 1057 s = repo.status()
1058 1058 repo.lfstatus = False
1059 1059 if s.modified or s.added or s.removed or s.deleted:
1060 1060 raise error.Abort(_('uncommitted changes'))
1061 1061
1062 1062 def postcommitstatus(orig, repo, *args, **kwargs):
1063 1063 repo.lfstatus = True
1064 1064 try:
1065 1065 return orig(repo, *args, **kwargs)
1066 1066 finally:
1067 1067 repo.lfstatus = False
1068 1068
1069 1069 def cmdutilforget(orig, ui, repo, match, prefix, explicitonly, dryrun,
1070 1070 interactive):
1071 1071 normalmatcher = composenormalfilematcher(match, repo[None].manifest())
1072 1072 bad, forgot = orig(ui, repo, normalmatcher, prefix, explicitonly, dryrun,
1073 1073 interactive)
1074 1074 m = composelargefilematcher(match, repo[None].manifest())
1075 1075
1076 1076 try:
1077 1077 repo.lfstatus = True
1078 1078 s = repo.status(match=m, clean=True)
1079 1079 finally:
1080 1080 repo.lfstatus = False
1081 1081 manifest = repo[None].manifest()
1082 1082 forget = sorted(s.modified + s.added + s.deleted + s.clean)
1083 1083 forget = [f for f in forget if lfutil.standin(f) in manifest]
1084 1084
1085 1085 for f in forget:
1086 1086 fstandin = lfutil.standin(f)
1087 1087 if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
1088 1088 ui.warn(_('not removing %s: file is already untracked\n')
1089 1089 % m.rel(f))
1090 1090 bad.append(f)
1091 1091
1092 1092 for f in forget:
1093 1093 if ui.verbose or not m.exact(f):
1094 1094 ui.status(_('removing %s\n') % m.rel(f))
1095 1095
1096 1096 # Need to lock because standin files are deleted then removed from the
1097 1097 # repository and we could race in-between.
1098 1098 with repo.wlock():
1099 1099 lfdirstate = lfutil.openlfdirstate(ui, repo)
1100 1100 for f in forget:
1101 1101 if lfdirstate[f] == 'a':
1102 1102 lfdirstate.drop(f)
1103 1103 else:
1104 1104 lfdirstate.remove(f)
1105 1105 lfdirstate.write()
1106 1106 standins = [lfutil.standin(f) for f in forget]
1107 1107 for f in standins:
1108 1108 repo.wvfs.unlinkpath(f, ignoremissing=True)
1109 1109 rejected = repo[None].forget(standins)
1110 1110
1111 1111 bad.extend(f for f in rejected if f in m.files())
1112 1112 forgot.extend(f for f in forget if f not in rejected)
1113 1113 return bad, forgot
1114 1114
1115 1115 def _getoutgoings(repo, other, missing, addfunc):
1116 1116 """get pairs of filename and largefile hash in outgoing revisions
1117 1117 in 'missing'.
1118 1118
1119 1119 largefiles already existing on 'other' repository are ignored.
1120 1120
1121 1121 'addfunc' is invoked with each unique pairs of filename and
1122 1122 largefile hash value.
1123 1123 """
1124 1124 knowns = set()
1125 1125 lfhashes = set()
1126 1126 def dedup(fn, lfhash):
1127 1127 k = (fn, lfhash)
1128 1128 if k not in knowns:
1129 1129 knowns.add(k)
1130 1130 lfhashes.add(lfhash)
1131 1131 lfutil.getlfilestoupload(repo, missing, dedup)
1132 1132 if lfhashes:
1133 1133 lfexists = storefactory.openstore(repo, other).exists(lfhashes)
1134 1134 for fn, lfhash in knowns:
1135 1135 if not lfexists[lfhash]: # lfhash doesn't exist on "other"
1136 1136 addfunc(fn, lfhash)
1137 1137
1138 1138 def outgoinghook(ui, repo, other, opts, missing):
1139 1139 if opts.pop('large', None):
1140 1140 lfhashes = set()
1141 1141 if ui.debugflag:
1142 1142 toupload = {}
1143 1143 def addfunc(fn, lfhash):
1144 1144 if fn not in toupload:
1145 1145 toupload[fn] = []
1146 1146 toupload[fn].append(lfhash)
1147 1147 lfhashes.add(lfhash)
1148 1148 def showhashes(fn):
1149 1149 for lfhash in sorted(toupload[fn]):
1150 1150 ui.debug(' %s\n' % (lfhash))
1151 1151 else:
1152 1152 toupload = set()
1153 1153 def addfunc(fn, lfhash):
1154 1154 toupload.add(fn)
1155 1155 lfhashes.add(lfhash)
1156 1156 def showhashes(fn):
1157 1157 pass
1158 1158 _getoutgoings(repo, other, missing, addfunc)
1159 1159
1160 1160 if not toupload:
1161 1161 ui.status(_('largefiles: no files to upload\n'))
1162 1162 else:
1163 1163 ui.status(_('largefiles to upload (%d entities):\n')
1164 1164 % (len(lfhashes)))
1165 1165 for file in sorted(toupload):
1166 1166 ui.status(lfutil.splitstandin(file) + '\n')
1167 1167 showhashes(file)
1168 1168 ui.status('\n')
1169 1169
1170 1170 def summaryremotehook(ui, repo, opts, changes):
1171 1171 largeopt = opts.get('large', False)
1172 1172 if changes is None:
1173 1173 if largeopt:
1174 1174 return (False, True) # only outgoing check is needed
1175 1175 else:
1176 1176 return (False, False)
1177 1177 elif largeopt:
1178 1178 url, branch, peer, outgoing = changes[1]
1179 1179 if peer is None:
1180 1180 # i18n: column positioning for "hg summary"
1181 1181 ui.status(_('largefiles: (no remote repo)\n'))
1182 1182 return
1183 1183
1184 1184 toupload = set()
1185 1185 lfhashes = set()
1186 1186 def addfunc(fn, lfhash):
1187 1187 toupload.add(fn)
1188 1188 lfhashes.add(lfhash)
1189 1189 _getoutgoings(repo, peer, outgoing.missing, addfunc)
1190 1190
1191 1191 if not toupload:
1192 1192 # i18n: column positioning for "hg summary"
1193 1193 ui.status(_('largefiles: (no files to upload)\n'))
1194 1194 else:
1195 1195 # i18n: column positioning for "hg summary"
1196 1196 ui.status(_('largefiles: %d entities for %d files to upload\n')
1197 1197 % (len(lfhashes), len(toupload)))
1198 1198
1199 1199 def overridesummary(orig, ui, repo, *pats, **opts):
1200 1200 try:
1201 1201 repo.lfstatus = True
1202 1202 orig(ui, repo, *pats, **opts)
1203 1203 finally:
1204 1204 repo.lfstatus = False
1205 1205
1206 1206 def scmutiladdremove(orig, repo, matcher, prefix, opts=None):
1207 1207 if opts is None:
1208 1208 opts = {}
1209 1209 if not lfutil.islfilesrepo(repo):
1210 1210 return orig(repo, matcher, prefix, opts)
1211 1211 # Get the list of missing largefiles so we can remove them
1212 1212 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1213 1213 unsure, s = lfdirstate.status(matchmod.always(repo.root, repo.getcwd()),
1214 1214 subrepos=[], ignored=False, clean=False,
1215 1215 unknown=False)
1216 1216
1217 1217 # Call into the normal remove code, but the removing of the standin, we want
1218 1218 # to have handled by original addremove. Monkey patching here makes sure
1219 1219 # we don't remove the standin in the largefiles code, preventing a very
1220 1220 # confused state later.
1221 1221 if s.deleted:
1222 1222 m = copy.copy(matcher)
1223 1223
1224 1224 # The m._files and m._map attributes are not changed to the deleted list
1225 1225 # because that affects the m.exact() test, which in turn governs whether
1226 1226 # or not the file name is printed, and how. Simply limit the original
1227 1227 # matches to those in the deleted status list.
1228 1228 matchfn = m.matchfn
1229 1229 m.matchfn = lambda f: f in s.deleted and matchfn(f)
1230 1230
1231 1231 removelargefiles(repo.ui, repo, True, m, opts.get('dry_run'),
1232 1232 **pycompat.strkwargs(opts))
1233 1233 # Call into the normal add code, and any files that *should* be added as
1234 1234 # largefiles will be
1235 1235 added, bad = addlargefiles(repo.ui, repo, True, matcher,
1236 1236 **pycompat.strkwargs(opts))
1237 1237 # Now that we've handled largefiles, hand off to the original addremove
1238 1238 # function to take care of the rest. Make sure it doesn't do anything with
1239 1239 # largefiles by passing a matcher that will ignore them.
1240 1240 matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
1241 1241 return orig(repo, matcher, prefix, opts)
1242 1242
1243 1243 # Calling purge with --all will cause the largefiles to be deleted.
1244 1244 # Override repo.status to prevent this from happening.
1245 1245 def overridepurge(orig, ui, repo, *dirs, **opts):
1246 1246 # XXX Monkey patching a repoview will not work. The assigned attribute will
1247 1247 # be set on the unfiltered repo, but we will only lookup attributes in the
1248 1248 # unfiltered repo if the lookup in the repoview object itself fails. As the
1249 1249 # monkey patched method exists on the repoview class the lookup will not
1250 1250 # fail. As a result, the original version will shadow the monkey patched
1251 1251 # one, defeating the monkey patch.
1252 1252 #
1253 1253 # As a work around we use an unfiltered repo here. We should do something
1254 1254 # cleaner instead.
1255 1255 repo = repo.unfiltered()
1256 1256 oldstatus = repo.status
1257 1257 def overridestatus(node1='.', node2=None, match=None, ignored=False,
1258 1258 clean=False, unknown=False, listsubrepos=False):
1259 1259 r = oldstatus(node1, node2, match, ignored, clean, unknown,
1260 1260 listsubrepos)
1261 1261 lfdirstate = lfutil.openlfdirstate(ui, repo)
1262 1262 unknown = [f for f in r.unknown if lfdirstate[f] == '?']
1263 1263 ignored = [f for f in r.ignored if lfdirstate[f] == '?']
1264 1264 return scmutil.status(r.modified, r.added, r.removed, r.deleted,
1265 1265 unknown, ignored, r.clean)
1266 1266 repo.status = overridestatus
1267 1267 orig(ui, repo, *dirs, **opts)
1268 1268 repo.status = oldstatus
1269 1269
1270 1270 def overriderollback(orig, ui, repo, **opts):
1271 1271 with repo.wlock():
1272 1272 before = repo.dirstate.parents()
1273 1273 orphans = set(f for f in repo.dirstate
1274 1274 if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
1275 1275 result = orig(ui, repo, **opts)
1276 1276 after = repo.dirstate.parents()
1277 1277 if before == after:
1278 1278 return result # no need to restore standins
1279 1279
1280 1280 pctx = repo['.']
1281 1281 for f in repo.dirstate:
1282 1282 if lfutil.isstandin(f):
1283 1283 orphans.discard(f)
1284 1284 if repo.dirstate[f] == 'r':
1285 1285 repo.wvfs.unlinkpath(f, ignoremissing=True)
1286 1286 elif f in pctx:
1287 1287 fctx = pctx[f]
1288 1288 repo.wwrite(f, fctx.data(), fctx.flags())
1289 1289 else:
1290 1290 # content of standin is not so important in 'a',
1291 1291 # 'm' or 'n' (coming from the 2nd parent) cases
1292 1292 lfutil.writestandin(repo, f, '', False)
1293 1293 for standin in orphans:
1294 1294 repo.wvfs.unlinkpath(standin, ignoremissing=True)
1295 1295
1296 1296 lfdirstate = lfutil.openlfdirstate(ui, repo)
1297 1297 orphans = set(lfdirstate)
1298 1298 lfiles = lfutil.listlfiles(repo)
1299 1299 for file in lfiles:
1300 1300 lfutil.synclfdirstate(repo, lfdirstate, file, True)
1301 1301 orphans.discard(file)
1302 1302 for lfile in orphans:
1303 1303 lfdirstate.drop(lfile)
1304 1304 lfdirstate.write()
1305 1305 return result
1306 1306
1307 1307 def overridetransplant(orig, ui, repo, *revs, **opts):
1308 1308 resuming = opts.get(r'continue')
1309 1309 repo._lfcommithooks.append(lfutil.automatedcommithook(resuming))
1310 1310 repo._lfstatuswriters.append(lambda *msg, **opts: None)
1311 1311 try:
1312 1312 result = orig(ui, repo, *revs, **opts)
1313 1313 finally:
1314 1314 repo._lfstatuswriters.pop()
1315 1315 repo._lfcommithooks.pop()
1316 1316 return result
1317 1317
1318 1318 def overridecat(orig, ui, repo, file1, *pats, **opts):
1319 1319 opts = pycompat.byteskwargs(opts)
1320 1320 ctx = scmutil.revsingle(repo, opts.get('rev'))
1321 1321 err = 1
1322 1322 notbad = set()
1323 1323 m = scmutil.match(ctx, (file1,) + pats, opts)
1324 1324 origmatchfn = m.matchfn
1325 1325 def lfmatchfn(f):
1326 1326 if origmatchfn(f):
1327 1327 return True
1328 1328 lf = lfutil.splitstandin(f)
1329 1329 if lf is None:
1330 1330 return False
1331 1331 notbad.add(lf)
1332 1332 return origmatchfn(lf)
1333 1333 m.matchfn = lfmatchfn
1334 1334 origbadfn = m.bad
1335 1335 def lfbadfn(f, msg):
1336 1336 if not f in notbad:
1337 1337 origbadfn(f, msg)
1338 1338 m.bad = lfbadfn
1339 1339
1340 1340 origvisitdirfn = m.visitdir
1341 1341 def lfvisitdirfn(dir):
1342 1342 if dir == lfutil.shortname:
1343 1343 return True
1344 1344 ret = origvisitdirfn(dir)
1345 1345 if ret:
1346 1346 return ret
1347 1347 lf = lfutil.splitstandin(dir)
1348 1348 if lf is None:
1349 1349 return False
1350 1350 return origvisitdirfn(lf)
1351 1351 m.visitdir = lfvisitdirfn
1352 1352
1353 1353 for f in ctx.walk(m):
1354 1354 with cmdutil.makefileobj(ctx, opts.get('output'), pathname=f) as fp:
1355 1355 lf = lfutil.splitstandin(f)
1356 1356 if lf is None or origmatchfn(f):
1357 1357 # duplicating unreachable code from commands.cat
1358 1358 data = ctx[f].data()
1359 1359 if opts.get('decode'):
1360 1360 data = repo.wwritedata(f, data)
1361 1361 fp.write(data)
1362 1362 else:
1363 1363 hash = lfutil.readasstandin(ctx[f])
1364 1364 if not lfutil.inusercache(repo.ui, hash):
1365 1365 store = storefactory.openstore(repo)
1366 1366 success, missing = store.get([(lf, hash)])
1367 1367 if len(success) != 1:
1368 1368 raise error.Abort(
1369 1369 _('largefile %s is not in cache and could not be '
1370 1370 'downloaded') % lf)
1371 1371 path = lfutil.usercachepath(repo.ui, hash)
1372 1372 with open(path, "rb") as fpin:
1373 1373 for chunk in util.filechunkiter(fpin):
1374 1374 fp.write(chunk)
1375 1375 err = 0
1376 1376 return err
1377 1377
1378 1378 def mergeupdate(orig, repo, node, branchmerge, force,
1379 1379 *args, **kwargs):
1380 1380 matcher = kwargs.get(r'matcher', None)
1381 1381 # note if this is a partial update
1382 1382 partial = matcher and not matcher.always()
1383 1383 with repo.wlock():
1384 1384 # branch | | |
1385 1385 # merge | force | partial | action
1386 1386 # -------+-------+---------+--------------
1387 1387 # x | x | x | linear-merge
1388 1388 # o | x | x | branch-merge
1389 1389 # x | o | x | overwrite (as clean update)
1390 1390 # o | o | x | force-branch-merge (*1)
1391 1391 # x | x | o | (*)
1392 1392 # o | x | o | (*)
1393 1393 # x | o | o | overwrite (as revert)
1394 1394 # o | o | o | (*)
1395 1395 #
1396 1396 # (*) don't care
1397 1397 # (*1) deprecated, but used internally (e.g: "rebase --collapse")
1398 1398
1399 1399 lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
1400 1400 unsure, s = lfdirstate.status(matchmod.always(repo.root,
1401 1401 repo.getcwd()),
1402 1402 subrepos=[], ignored=False,
1403 1403 clean=True, unknown=False)
1404 1404 oldclean = set(s.clean)
1405 1405 pctx = repo['.']
1406 1406 dctx = repo[node]
1407 1407 for lfile in unsure + s.modified:
1408 1408 lfileabs = repo.wvfs.join(lfile)
1409 1409 if not repo.wvfs.exists(lfileabs):
1410 1410 continue
1411 1411 lfhash = lfutil.hashfile(lfileabs)
1412 1412 standin = lfutil.standin(lfile)
1413 1413 lfutil.writestandin(repo, standin, lfhash,
1414 1414 lfutil.getexecutable(lfileabs))
1415 1415 if (standin in pctx and
1416 1416 lfhash == lfutil.readasstandin(pctx[standin])):
1417 1417 oldclean.add(lfile)
1418 1418 for lfile in s.added:
1419 1419 fstandin = lfutil.standin(lfile)
1420 1420 if fstandin not in dctx:
1421 1421 # in this case, content of standin file is meaningless
1422 1422 # (in dctx, lfile is unknown, or normal file)
1423 1423 continue
1424 1424 lfutil.updatestandin(repo, lfile, fstandin)
1425 1425 # mark all clean largefiles as dirty, just in case the update gets
1426 1426 # interrupted before largefiles and lfdirstate are synchronized
1427 1427 for lfile in oldclean:
1428 1428 lfdirstate.normallookup(lfile)
1429 1429 lfdirstate.write()
1430 1430
1431 1431 oldstandins = lfutil.getstandinsstate(repo)
1432 1432 # Make sure the merge runs on disk, not in-memory. largefiles is not a
1433 1433 # good candidate for in-memory merge (large files, custom dirstate,
1434 1434 # matcher usage).
1435 1435 kwargs[r'wc'] = repo[None]
1436 1436 result = orig(repo, node, branchmerge, force, *args, **kwargs)
1437 1437
1438 1438 newstandins = lfutil.getstandinsstate(repo)
1439 1439 filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
1440 1440
1441 1441 # to avoid leaving all largefiles as dirty and thus rehash them, mark
1442 1442 # all the ones that didn't change as clean
1443 1443 for lfile in oldclean.difference(filelist):
1444 1444 lfdirstate.normal(lfile)
1445 1445 lfdirstate.write()
1446 1446
1447 1447 if branchmerge or force or partial:
1448 1448 filelist.extend(s.deleted + s.removed)
1449 1449
1450 1450 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1451 1451 normallookup=partial)
1452 1452
1453 1453 return result
1454 1454
1455 1455 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
1456 1456 result = orig(repo, files, *args, **kwargs)
1457 1457
1458 1458 filelist = []
1459 1459 for f in files:
1460 1460 lf = lfutil.splitstandin(f)
1461 1461 if lf is not None:
1462 1462 filelist.append(lf)
1463 1463 if filelist:
1464 1464 lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
1465 1465 printmessage=False, normallookup=True)
1466 1466
1467 1467 return result
1468 1468
1469 1469 def upgraderequirements(orig, repo):
1470 1470 reqs = orig(repo)
1471 1471 if 'largefiles' in repo.requirements:
1472 1472 reqs.add('largefiles')
1473 1473 return reqs
1474 1474
1475 1475 _lfscheme = 'largefile://'
1476 1476 def openlargefile(orig, ui, url_, data=None):
1477 1477 if url_.startswith(_lfscheme):
1478 1478 if data:
1479 1479 msg = "cannot use data on a 'largefile://' url"
1480 1480 raise error.ProgrammingError(msg)
1481 1481 lfid = url_[len(_lfscheme):]
1482 1482 return storefactory.getlfile(ui, lfid)
1483 1483 else:
1484 1484 return orig(ui, url_, data=data)
@@ -1,349 +1,349 b''
1 1 # archival.py - revision archival for mercurial
2 2 #
3 3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import gzip
11 11 import os
12 12 import struct
13 13 import tarfile
14 14 import time
15 15 import zipfile
16 16 import zlib
17 17
18 18 from .i18n import _
19 19 from .node import (
20 20 nullrev,
21 21 )
22 22
23 23 from . import (
24 24 error,
25 25 formatter,
26 26 match as matchmod,
27 27 pycompat,
28 28 scmutil,
29 29 util,
30 30 vfs as vfsmod,
31 31 )
32 32 stringio = util.stringio
33 33
34 34 # from unzip source code:
35 35 _UNX_IFREG = 0x8000
36 36 _UNX_IFLNK = 0xa000
37 37
38 38 def tidyprefix(dest, kind, prefix):
39 39 '''choose prefix to use for names in archive. make sure prefix is
40 40 safe for consumers.'''
41 41
42 42 if prefix:
43 43 prefix = util.normpath(prefix)
44 44 else:
45 45 if not isinstance(dest, bytes):
46 46 raise ValueError('dest must be string if no prefix')
47 47 prefix = os.path.basename(dest)
48 48 lower = prefix.lower()
49 49 for sfx in exts.get(kind, []):
50 50 if lower.endswith(sfx):
51 51 prefix = prefix[:-len(sfx)]
52 52 break
53 53 lpfx = os.path.normpath(util.localpath(prefix))
54 54 prefix = util.pconvert(lpfx)
55 55 if not prefix.endswith('/'):
56 56 prefix += '/'
57 57 # Drop the leading '.' path component if present, so Windows can read the
58 58 # zip files (issue4634)
59 59 if prefix.startswith('./'):
60 60 prefix = prefix[2:]
61 61 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
62 62 raise error.Abort(_('archive prefix contains illegal components'))
63 63 return prefix
64 64
65 65 exts = {
66 66 'tar': ['.tar'],
67 67 'tbz2': ['.tbz2', '.tar.bz2'],
68 68 'tgz': ['.tgz', '.tar.gz'],
69 69 'zip': ['.zip'],
70 70 }
71 71
72 72 def guesskind(dest):
73 73 for kind, extensions in exts.iteritems():
74 74 if any(dest.endswith(ext) for ext in extensions):
75 75 return kind
76 76 return None
77 77
78 78 def _rootctx(repo):
79 79 # repo[0] may be hidden
80 80 for rev in repo:
81 81 return repo[rev]
82 82 return repo[nullrev]
83 83
84 84 # {tags} on ctx includes local tags and 'tip', with no current way to limit
85 85 # that to global tags. Therefore, use {latesttag} as a substitute when
86 86 # the distance is 0, since that will be the list of global tags on ctx.
87 87 _defaultmetatemplate = br'''
88 88 repo: {root}
89 89 node: {ifcontains(rev, revset("wdir()"), "{p1node}{dirty}", "{node}")}
90 90 branch: {branch|utf8}
91 91 {ifeq(latesttagdistance, 0, join(latesttag % "tag: {tag}", "\n"),
92 92 separate("\n",
93 93 join(latesttag % "latesttag: {tag}", "\n"),
94 94 "latesttagdistance: {latesttagdistance}",
95 95 "changessincelatesttag: {changessincelatesttag}"))}
96 96 '''[1:] # drop leading '\n'
97 97
98 98 def buildmetadata(ctx):
99 99 '''build content of .hg_archival.txt'''
100 100 repo = ctx.repo()
101 101
102 102 opts = {
103 103 'template': repo.ui.config('experimental', 'archivemetatemplate',
104 104 _defaultmetatemplate)
105 105 }
106 106
107 107 out = util.stringio()
108 108
109 109 fm = formatter.formatter(repo.ui, out, 'archive', opts)
110 110 fm.startitem()
111 111 fm.context(ctx=ctx)
112 112 fm.data(root=_rootctx(repo).hex())
113 113
114 114 if ctx.rev() is None:
115 115 dirty = ''
116 116 if ctx.dirty(missing=True):
117 117 dirty = '+'
118 118 fm.data(dirty=dirty)
119 119 fm.end()
120 120
121 121 return out.getvalue()
122 122
123 123 class tarit(object):
124 124 '''write archive to tar file or stream. can write uncompressed,
125 125 or compress with gzip or bzip2.'''
126 126
127 127 class GzipFileWithTime(gzip.GzipFile):
128 128
129 129 def __init__(self, *args, **kw):
130 130 timestamp = None
131 131 if r'timestamp' in kw:
132 132 timestamp = kw.pop(r'timestamp')
133 133 if timestamp is None:
134 134 self.timestamp = time.time()
135 135 else:
136 136 self.timestamp = timestamp
137 137 gzip.GzipFile.__init__(self, *args, **kw)
138 138
139 139 def _write_gzip_header(self):
140 140 self.fileobj.write('\037\213') # magic header
141 141 self.fileobj.write('\010') # compression method
142 142 fname = self.name
143 143 if fname and fname.endswith('.gz'):
144 144 fname = fname[:-3]
145 145 flags = 0
146 146 if fname:
147 147 flags = gzip.FNAME
148 148 self.fileobj.write(pycompat.bytechr(flags))
149 149 gzip.write32u(self.fileobj, int(self.timestamp))
150 150 self.fileobj.write('\002')
151 151 self.fileobj.write('\377')
152 152 if fname:
153 153 self.fileobj.write(fname + '\000')
154 154
155 155 def __init__(self, dest, mtime, kind=''):
156 156 self.mtime = mtime
157 157 self.fileobj = None
158 158
159 159 def taropen(mode, name='', fileobj=None):
160 160 if kind == 'gz':
161 161 mode = mode[0:1]
162 162 if not fileobj:
163 163 fileobj = open(name, mode + 'b')
164 164 gzfileobj = self.GzipFileWithTime(name,
165 165 pycompat.sysstr(mode + 'b'),
166 166 zlib.Z_BEST_COMPRESSION,
167 167 fileobj, timestamp=mtime)
168 168 self.fileobj = gzfileobj
169 169 return tarfile.TarFile.taropen(
170 170 name, pycompat.sysstr(mode), gzfileobj)
171 171 else:
172 172 return tarfile.open(
173 173 name, pycompat.sysstr(mode + kind), fileobj)
174 174
175 175 if isinstance(dest, bytes):
176 176 self.z = taropen('w:', name=dest)
177 177 else:
178 178 self.z = taropen('w|', fileobj=dest)
179 179
180 180 def addfile(self, name, mode, islink, data):
181 181 name = pycompat.fsdecode(name)
182 182 i = tarfile.TarInfo(name)
183 183 i.mtime = self.mtime
184 184 i.size = len(data)
185 185 if islink:
186 186 i.type = tarfile.SYMTYPE
187 187 i.mode = 0o777
188 188 i.linkname = pycompat.fsdecode(data)
189 189 data = None
190 190 i.size = 0
191 191 else:
192 192 i.mode = mode
193 193 data = stringio(data)
194 194 self.z.addfile(i, data)
195 195
196 196 def done(self):
197 197 self.z.close()
198 198 if self.fileobj:
199 199 self.fileobj.close()
200 200
201 201 class zipit(object):
202 202 '''write archive to zip file or stream. can write uncompressed,
203 203 or compressed with deflate.'''
204 204
205 205 def __init__(self, dest, mtime, compress=True):
206 206 if isinstance(dest, bytes):
207 207 dest = pycompat.fsdecode(dest)
208 208 self.z = zipfile.ZipFile(dest, r'w',
209 209 compress and zipfile.ZIP_DEFLATED or
210 210 zipfile.ZIP_STORED)
211 211
212 212 # Python's zipfile module emits deprecation warnings if we try
213 213 # to store files with a date before 1980.
214 214 epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
215 215 if mtime < epoch:
216 216 mtime = epoch
217 217
218 218 self.mtime = mtime
219 219 self.date_time = time.gmtime(mtime)[:6]
220 220
221 221 def addfile(self, name, mode, islink, data):
222 222 i = zipfile.ZipInfo(pycompat.fsdecode(name), self.date_time)
223 223 i.compress_type = self.z.compression
224 224 # unzip will not honor unix file modes unless file creator is
225 225 # set to unix (id 3).
226 226 i.create_system = 3
227 227 ftype = _UNX_IFREG
228 228 if islink:
229 229 mode = 0o777
230 230 ftype = _UNX_IFLNK
231 231 i.external_attr = (mode | ftype) << 16
232 232 # add "extended-timestamp" extra block, because zip archives
233 233 # without this will be extracted with unexpected timestamp,
234 234 # if TZ is not configured as GMT
235 235 i.extra += struct.pack('<hhBl',
236 236 0x5455, # block type: "extended-timestamp"
237 237 1 + 4, # size of this block
238 238 1, # "modification time is present"
239 239 int(self.mtime)) # last modification (UTC)
240 240 self.z.writestr(i, data)
241 241
242 242 def done(self):
243 243 self.z.close()
244 244
245 245 class fileit(object):
246 246 '''write archive as files in directory.'''
247 247
248 248 def __init__(self, name, mtime):
249 249 self.basedir = name
250 250 self.opener = vfsmod.vfs(self.basedir)
251 251 self.mtime = mtime
252 252
253 253 def addfile(self, name, mode, islink, data):
254 254 if islink:
255 255 self.opener.symlink(data, name)
256 256 return
257 257 f = self.opener(name, "w", atomictemp=False)
258 258 f.write(data)
259 259 f.close()
260 260 destfile = os.path.join(self.basedir, name)
261 261 os.chmod(destfile, mode)
262 262 if self.mtime is not None:
263 263 os.utime(destfile, (self.mtime, self.mtime))
264 264
265 265 def done(self):
266 266 pass
267 267
268 268 archivers = {
269 269 'files': fileit,
270 270 'tar': tarit,
271 271 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'),
272 272 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'),
273 273 'uzip': lambda name, mtime: zipit(name, mtime, False),
274 274 'zip': zipit,
275 275 }
276 276
277 def archive(repo, dest, node, kind, decode=True, matchfn=None,
277 def archive(repo, dest, node, kind, decode=True, match=None,
278 278 prefix='', mtime=None, subrepos=False):
279 279 '''create archive of repo as it was at node.
280 280
281 281 dest can be name of directory, name of archive file, or file
282 282 object to write archive to.
283 283
284 284 kind is type of archive to create.
285 285
286 286 decode tells whether to put files through decode filters from
287 287 hgrc.
288 288
289 matchfn is function to filter names of files to write to archive.
289 match is a matcher to filter names of files to write to archive.
290 290
291 291 prefix is name of path to put before every archive member.
292 292
293 293 mtime is the modified time, in seconds, or None to use the changeset time.
294 294
295 295 subrepos tells whether to include subrepos.
296 296 '''
297 297
298 298 if kind == 'files':
299 299 if prefix:
300 300 raise error.Abort(_('cannot give prefix when archiving to files'))
301 301 else:
302 302 prefix = tidyprefix(dest, kind, prefix)
303 303
304 304 def write(name, mode, islink, getdata):
305 305 data = getdata()
306 306 if decode:
307 307 data = repo.wwritedata(name, data)
308 308 archiver.addfile(prefix + name, mode, islink, data)
309 309
310 310 if kind not in archivers:
311 311 raise error.Abort(_("unknown archive type '%s'") % kind)
312 312
313 313 ctx = repo[node]
314 314 archiver = archivers[kind](dest, mtime or ctx.date()[0])
315 315
316 316 if repo.ui.configbool("ui", "archivemeta"):
317 317 name = '.hg_archival.txt'
318 if not matchfn or matchfn(name):
318 if not match or match(name):
319 319 write(name, 0o644, False, lambda: buildmetadata(ctx))
320 320
321 if matchfn:
322 files = [f for f in ctx.manifest().keys() if matchfn(f)]
321 if match:
322 files = [f for f in ctx.manifest().keys() if match(f)]
323 323 else:
324 324 files = ctx.manifest().keys()
325 325 total = len(files)
326 326 if total:
327 327 files.sort()
328 328 scmutil.prefetchfiles(repo, [ctx.rev()],
329 329 scmutil.matchfiles(repo, files))
330 330 progress = scmutil.progress(repo.ui, _('archiving'), unit=_('files'),
331 331 total=total)
332 332 progress.update(0)
333 333 for f in files:
334 334 ff = ctx.flags(f)
335 335 write(f, 'x' in ff and 0o755 or 0o644, 'l' in ff, ctx[f].data)
336 336 progress.increment(item=f)
337 337 progress.complete()
338 338
339 339 if subrepos:
340 340 for subpath in sorted(ctx.substate):
341 341 sub = ctx.workingsub(subpath)
342 submatch = matchmod.subdirmatcher(subpath, matchfn)
342 submatch = matchmod.subdirmatcher(subpath, match)
343 343 total += sub.archive(archiver, prefix, submatch, decode)
344 344
345 345 if total == 0:
346 346 raise error.Abort(_('no files match the archive pattern'))
347 347
348 348 archiver.done()
349 349 return total
@@ -1,1485 +1,1484 b''
1 1 #
2 2 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import copy
11 11 import mimetypes
12 12 import os
13 13 import re
14 14
15 15 from ..i18n import _
16 16 from ..node import hex, short
17 17
18 18 from .common import (
19 19 ErrorResponse,
20 20 HTTP_FORBIDDEN,
21 21 HTTP_NOT_FOUND,
22 22 get_contact,
23 23 paritygen,
24 24 staticfile,
25 25 )
26 26
27 27 from .. import (
28 28 archival,
29 29 dagop,
30 30 encoding,
31 31 error,
32 32 graphmod,
33 33 pycompat,
34 34 revset,
35 35 revsetlang,
36 36 scmutil,
37 37 smartset,
38 38 templater,
39 39 templateutil,
40 40 )
41 41
42 42 from ..utils import (
43 43 stringutil,
44 44 )
45 45
46 46 from . import (
47 47 webutil,
48 48 )
49 49
50 50 __all__ = []
51 51 commands = {}
52 52
53 53 class webcommand(object):
54 54 """Decorator used to register a web command handler.
55 55
56 56 The decorator takes as its positional arguments the name/path the
57 57 command should be accessible under.
58 58
59 59 When called, functions receive as arguments a ``requestcontext``,
60 60 ``wsgirequest``, and a templater instance for generatoring output.
61 61 The functions should populate the ``rctx.res`` object with details
62 62 about the HTTP response.
63 63
64 64 The function returns a generator to be consumed by the WSGI application.
65 65 For most commands, this should be the result from
66 66 ``web.res.sendresponse()``. Many commands will call ``web.sendtemplate()``
67 67 to render a template.
68 68
69 69 Usage:
70 70
71 71 @webcommand('mycommand')
72 72 def mycommand(web):
73 73 pass
74 74 """
75 75
76 76 def __init__(self, name):
77 77 self.name = name
78 78
79 79 def __call__(self, func):
80 80 __all__.append(self.name)
81 81 commands[self.name] = func
82 82 return func
83 83
84 84 @webcommand('log')
85 85 def log(web):
86 86 """
87 87 /log[/{revision}[/{path}]]
88 88 --------------------------
89 89
90 90 Show repository or file history.
91 91
92 92 For URLs of the form ``/log/{revision}``, a list of changesets starting at
93 93 the specified changeset identifier is shown. If ``{revision}`` is not
94 94 defined, the default is ``tip``. This form is equivalent to the
95 95 ``changelog`` handler.
96 96
97 97 For URLs of the form ``/log/{revision}/{file}``, the history for a specific
98 98 file will be shown. This form is equivalent to the ``filelog`` handler.
99 99 """
100 100
101 101 if web.req.qsparams.get('file'):
102 102 return filelog(web)
103 103 else:
104 104 return changelog(web)
105 105
106 106 @webcommand('rawfile')
107 107 def rawfile(web):
108 108 guessmime = web.configbool('web', 'guessmime')
109 109
110 110 path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', ''))
111 111 if not path:
112 112 return manifest(web)
113 113
114 114 try:
115 115 fctx = webutil.filectx(web.repo, web.req)
116 116 except error.LookupError as inst:
117 117 try:
118 118 return manifest(web)
119 119 except ErrorResponse:
120 120 raise inst
121 121
122 122 path = fctx.path()
123 123 text = fctx.data()
124 124 mt = 'application/binary'
125 125 if guessmime:
126 126 mt = mimetypes.guess_type(pycompat.fsdecode(path))[0]
127 127 if mt is None:
128 128 if stringutil.binary(text):
129 129 mt = 'application/binary'
130 130 else:
131 131 mt = 'text/plain'
132 132 else:
133 133 mt = pycompat.sysbytes(mt)
134 134
135 135 if mt.startswith('text/'):
136 136 mt += '; charset="%s"' % encoding.encoding
137 137
138 138 web.res.headers['Content-Type'] = mt
139 139 filename = (path.rpartition('/')[-1]
140 140 .replace('\\', '\\\\').replace('"', '\\"'))
141 141 web.res.headers['Content-Disposition'] = 'inline; filename="%s"' % filename
142 142 web.res.setbodybytes(text)
143 143 return web.res.sendresponse()
144 144
145 145 def _filerevision(web, fctx):
146 146 f = fctx.path()
147 147 text = fctx.data()
148 148 parity = paritygen(web.stripecount)
149 149 ishead = fctx.filenode() in fctx.filelog().heads()
150 150
151 151 if stringutil.binary(text):
152 152 mt = pycompat.sysbytes(
153 153 mimetypes.guess_type(pycompat.fsdecode(f))[0]
154 154 or r'application/octet-stream')
155 155 text = '(binary:%s)' % mt
156 156
157 157 def lines(context):
158 158 for lineno, t in enumerate(text.splitlines(True)):
159 159 yield {"line": t,
160 160 "lineid": "l%d" % (lineno + 1),
161 161 "linenumber": "% 6d" % (lineno + 1),
162 162 "parity": next(parity)}
163 163
164 164 return web.sendtemplate(
165 165 'filerevision',
166 166 file=f,
167 167 path=webutil.up(f),
168 168 text=templateutil.mappinggenerator(lines),
169 169 symrev=webutil.symrevorshortnode(web.req, fctx),
170 170 rename=webutil.renamelink(fctx),
171 171 permissions=fctx.manifest().flags(f),
172 172 ishead=int(ishead),
173 173 **pycompat.strkwargs(webutil.commonentry(web.repo, fctx)))
174 174
175 175 @webcommand('file')
176 176 def file(web):
177 177 """
178 178 /file/{revision}[/{path}]
179 179 -------------------------
180 180
181 181 Show information about a directory or file in the repository.
182 182
183 183 Info about the ``path`` given as a URL parameter will be rendered.
184 184
185 185 If ``path`` is a directory, information about the entries in that
186 186 directory will be rendered. This form is equivalent to the ``manifest``
187 187 handler.
188 188
189 189 If ``path`` is a file, information about that file will be shown via
190 190 the ``filerevision`` template.
191 191
192 192 If ``path`` is not defined, information about the root directory will
193 193 be rendered.
194 194 """
195 195 if web.req.qsparams.get('style') == 'raw':
196 196 return rawfile(web)
197 197
198 198 path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', ''))
199 199 if not path:
200 200 return manifest(web)
201 201 try:
202 202 return _filerevision(web, webutil.filectx(web.repo, web.req))
203 203 except error.LookupError as inst:
204 204 try:
205 205 return manifest(web)
206 206 except ErrorResponse:
207 207 raise inst
208 208
209 209 def _search(web):
210 210 MODE_REVISION = 'rev'
211 211 MODE_KEYWORD = 'keyword'
212 212 MODE_REVSET = 'revset'
213 213
214 214 def revsearch(ctx):
215 215 yield ctx
216 216
217 217 def keywordsearch(query):
218 218 lower = encoding.lower
219 219 qw = lower(query).split()
220 220
221 221 def revgen():
222 222 cl = web.repo.changelog
223 223 for i in pycompat.xrange(len(web.repo) - 1, 0, -100):
224 224 l = []
225 225 for j in cl.revs(max(0, i - 99), i):
226 226 ctx = web.repo[j]
227 227 l.append(ctx)
228 228 l.reverse()
229 229 for e in l:
230 230 yield e
231 231
232 232 for ctx in revgen():
233 233 miss = 0
234 234 for q in qw:
235 235 if not (q in lower(ctx.user()) or
236 236 q in lower(ctx.description()) or
237 237 q in lower(" ".join(ctx.files()))):
238 238 miss = 1
239 239 break
240 240 if miss:
241 241 continue
242 242
243 243 yield ctx
244 244
245 245 def revsetsearch(revs):
246 246 for r in revs:
247 247 yield web.repo[r]
248 248
249 249 searchfuncs = {
250 250 MODE_REVISION: (revsearch, 'exact revision search'),
251 251 MODE_KEYWORD: (keywordsearch, 'literal keyword search'),
252 252 MODE_REVSET: (revsetsearch, 'revset expression search'),
253 253 }
254 254
255 255 def getsearchmode(query):
256 256 try:
257 257 ctx = scmutil.revsymbol(web.repo, query)
258 258 except (error.RepoError, error.LookupError):
259 259 # query is not an exact revision pointer, need to
260 260 # decide if it's a revset expression or keywords
261 261 pass
262 262 else:
263 263 return MODE_REVISION, ctx
264 264
265 265 revdef = 'reverse(%s)' % query
266 266 try:
267 267 tree = revsetlang.parse(revdef)
268 268 except error.ParseError:
269 269 # can't parse to a revset tree
270 270 return MODE_KEYWORD, query
271 271
272 272 if revsetlang.depth(tree) <= 2:
273 273 # no revset syntax used
274 274 return MODE_KEYWORD, query
275 275
276 276 if any((token, (value or '')[:3]) == ('string', 're:')
277 277 for token, value, pos in revsetlang.tokenize(revdef)):
278 278 return MODE_KEYWORD, query
279 279
280 280 funcsused = revsetlang.funcsused(tree)
281 281 if not funcsused.issubset(revset.safesymbols):
282 282 return MODE_KEYWORD, query
283 283
284 284 try:
285 285 mfunc = revset.match(web.repo.ui, revdef,
286 286 lookup=revset.lookupfn(web.repo))
287 287 revs = mfunc(web.repo)
288 288 return MODE_REVSET, revs
289 289 # ParseError: wrongly placed tokens, wrongs arguments, etc
290 290 # RepoLookupError: no such revision, e.g. in 'revision:'
291 291 # Abort: bookmark/tag not exists
292 292 # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo
293 293 except (error.ParseError, error.RepoLookupError, error.Abort,
294 294 LookupError):
295 295 return MODE_KEYWORD, query
296 296
297 297 def changelist(context):
298 298 count = 0
299 299
300 300 for ctx in searchfunc[0](funcarg):
301 301 count += 1
302 302 n = scmutil.binnode(ctx)
303 303 showtags = webutil.showtag(web.repo, 'changelogtag', n)
304 304 files = webutil.listfilediffs(ctx.files(), n, web.maxfiles)
305 305
306 306 lm = webutil.commonentry(web.repo, ctx)
307 307 lm.update({
308 308 'parity': next(parity),
309 309 'changelogtag': showtags,
310 310 'files': files,
311 311 })
312 312 yield lm
313 313
314 314 if count >= revcount:
315 315 break
316 316
317 317 query = web.req.qsparams['rev']
318 318 revcount = web.maxchanges
319 319 if 'revcount' in web.req.qsparams:
320 320 try:
321 321 revcount = int(web.req.qsparams.get('revcount', revcount))
322 322 revcount = max(revcount, 1)
323 323 web.tmpl.defaults['sessionvars']['revcount'] = revcount
324 324 except ValueError:
325 325 pass
326 326
327 327 lessvars = copy.copy(web.tmpl.defaults['sessionvars'])
328 328 lessvars['revcount'] = max(revcount // 2, 1)
329 329 lessvars['rev'] = query
330 330 morevars = copy.copy(web.tmpl.defaults['sessionvars'])
331 331 morevars['revcount'] = revcount * 2
332 332 morevars['rev'] = query
333 333
334 334 mode, funcarg = getsearchmode(query)
335 335
336 336 if 'forcekw' in web.req.qsparams:
337 337 showforcekw = ''
338 338 showunforcekw = searchfuncs[mode][1]
339 339 mode = MODE_KEYWORD
340 340 funcarg = query
341 341 else:
342 342 if mode != MODE_KEYWORD:
343 343 showforcekw = searchfuncs[MODE_KEYWORD][1]
344 344 else:
345 345 showforcekw = ''
346 346 showunforcekw = ''
347 347
348 348 searchfunc = searchfuncs[mode]
349 349
350 350 tip = web.repo['tip']
351 351 parity = paritygen(web.stripecount)
352 352
353 353 return web.sendtemplate(
354 354 'search',
355 355 query=query,
356 356 node=tip.hex(),
357 357 symrev='tip',
358 358 entries=templateutil.mappinggenerator(changelist, name='searchentry'),
359 359 archives=web.archivelist('tip'),
360 360 morevars=morevars,
361 361 lessvars=lessvars,
362 362 modedesc=searchfunc[1],
363 363 showforcekw=showforcekw,
364 364 showunforcekw=showunforcekw)
365 365
366 366 @webcommand('changelog')
367 367 def changelog(web, shortlog=False):
368 368 """
369 369 /changelog[/{revision}]
370 370 -----------------------
371 371
372 372 Show information about multiple changesets.
373 373
374 374 If the optional ``revision`` URL argument is absent, information about
375 375 all changesets starting at ``tip`` will be rendered. If the ``revision``
376 376 argument is present, changesets will be shown starting from the specified
377 377 revision.
378 378
379 379 If ``revision`` is absent, the ``rev`` query string argument may be
380 380 defined. This will perform a search for changesets.
381 381
382 382 The argument for ``rev`` can be a single revision, a revision set,
383 383 or a literal keyword to search for in changeset data (equivalent to
384 384 :hg:`log -k`).
385 385
386 386 The ``revcount`` query string argument defines the maximum numbers of
387 387 changesets to render.
388 388
389 389 For non-searches, the ``changelog`` template will be rendered.
390 390 """
391 391
392 392 query = ''
393 393 if 'node' in web.req.qsparams:
394 394 ctx = webutil.changectx(web.repo, web.req)
395 395 symrev = webutil.symrevorshortnode(web.req, ctx)
396 396 elif 'rev' in web.req.qsparams:
397 397 return _search(web)
398 398 else:
399 399 ctx = web.repo['tip']
400 400 symrev = 'tip'
401 401
402 402 def changelist(maxcount):
403 403 revs = []
404 404 if pos != -1:
405 405 revs = web.repo.changelog.revs(pos, 0)
406 406
407 407 for entry in webutil.changelistentries(web, revs, maxcount, parity):
408 408 yield entry
409 409
410 410 if shortlog:
411 411 revcount = web.maxshortchanges
412 412 else:
413 413 revcount = web.maxchanges
414 414
415 415 if 'revcount' in web.req.qsparams:
416 416 try:
417 417 revcount = int(web.req.qsparams.get('revcount', revcount))
418 418 revcount = max(revcount, 1)
419 419 web.tmpl.defaults['sessionvars']['revcount'] = revcount
420 420 except ValueError:
421 421 pass
422 422
423 423 lessvars = copy.copy(web.tmpl.defaults['sessionvars'])
424 424 lessvars['revcount'] = max(revcount // 2, 1)
425 425 morevars = copy.copy(web.tmpl.defaults['sessionvars'])
426 426 morevars['revcount'] = revcount * 2
427 427
428 428 count = len(web.repo)
429 429 pos = ctx.rev()
430 430 parity = paritygen(web.stripecount)
431 431
432 432 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
433 433
434 434 entries = list(changelist(revcount + 1))
435 435 latestentry = entries[:1]
436 436 if len(entries) > revcount:
437 437 nextentry = entries[-1:]
438 438 entries = entries[:-1]
439 439 else:
440 440 nextentry = []
441 441
442 442 return web.sendtemplate(
443 443 'shortlog' if shortlog else 'changelog',
444 444 changenav=changenav,
445 445 node=ctx.hex(),
446 446 rev=pos,
447 447 symrev=symrev,
448 448 changesets=count,
449 449 entries=templateutil.mappinglist(entries),
450 450 latestentry=templateutil.mappinglist(latestentry),
451 451 nextentry=templateutil.mappinglist(nextentry),
452 452 archives=web.archivelist('tip'),
453 453 revcount=revcount,
454 454 morevars=morevars,
455 455 lessvars=lessvars,
456 456 query=query)
457 457
458 458 @webcommand('shortlog')
459 459 def shortlog(web):
460 460 """
461 461 /shortlog
462 462 ---------
463 463
464 464 Show basic information about a set of changesets.
465 465
466 466 This accepts the same parameters as the ``changelog`` handler. The only
467 467 difference is the ``shortlog`` template will be rendered instead of the
468 468 ``changelog`` template.
469 469 """
470 470 return changelog(web, shortlog=True)
471 471
472 472 @webcommand('changeset')
473 473 def changeset(web):
474 474 """
475 475 /changeset[/{revision}]
476 476 -----------------------
477 477
478 478 Show information about a single changeset.
479 479
480 480 A URL path argument is the changeset identifier to show. See ``hg help
481 481 revisions`` for possible values. If not defined, the ``tip`` changeset
482 482 will be shown.
483 483
484 484 The ``changeset`` template is rendered. Contents of the ``changesettag``,
485 485 ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many
486 486 templates related to diffs may all be used to produce the output.
487 487 """
488 488 ctx = webutil.changectx(web.repo, web.req)
489 489
490 490 return web.sendtemplate(
491 491 'changeset',
492 492 **webutil.changesetentry(web, ctx))
493 493
494 494 rev = webcommand('rev')(changeset)
495 495
496 496 def decodepath(path):
497 497 """Hook for mapping a path in the repository to a path in the
498 498 working copy.
499 499
500 500 Extensions (e.g., largefiles) can override this to remap files in
501 501 the virtual file system presented by the manifest command below."""
502 502 return path
503 503
504 504 @webcommand('manifest')
505 505 def manifest(web):
506 506 """
507 507 /manifest[/{revision}[/{path}]]
508 508 -------------------------------
509 509
510 510 Show information about a directory.
511 511
512 512 If the URL path arguments are omitted, information about the root
513 513 directory for the ``tip`` changeset will be shown.
514 514
515 515 Because this handler can only show information for directories, it
516 516 is recommended to use the ``file`` handler instead, as it can handle both
517 517 directories and files.
518 518
519 519 The ``manifest`` template will be rendered for this handler.
520 520 """
521 521 if 'node' in web.req.qsparams:
522 522 ctx = webutil.changectx(web.repo, web.req)
523 523 symrev = webutil.symrevorshortnode(web.req, ctx)
524 524 else:
525 525 ctx = web.repo['tip']
526 526 symrev = 'tip'
527 527 path = webutil.cleanpath(web.repo, web.req.qsparams.get('file', ''))
528 528 mf = ctx.manifest()
529 529 node = scmutil.binnode(ctx)
530 530
531 531 files = {}
532 532 dirs = {}
533 533 parity = paritygen(web.stripecount)
534 534
535 535 if path and path[-1:] != "/":
536 536 path += "/"
537 537 l = len(path)
538 538 abspath = "/" + path
539 539
540 540 for full, n in mf.iteritems():
541 541 # the virtual path (working copy path) used for the full
542 542 # (repository) path
543 543 f = decodepath(full)
544 544
545 545 if f[:l] != path:
546 546 continue
547 547 remain = f[l:]
548 548 elements = remain.split('/')
549 549 if len(elements) == 1:
550 550 files[remain] = full
551 551 else:
552 552 h = dirs # need to retain ref to dirs (root)
553 553 for elem in elements[0:-1]:
554 554 if elem not in h:
555 555 h[elem] = {}
556 556 h = h[elem]
557 557 if len(h) > 1:
558 558 break
559 559 h[None] = None # denotes files present
560 560
561 561 if mf and not files and not dirs:
562 562 raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
563 563
564 564 def filelist(context):
565 565 for f in sorted(files):
566 566 full = files[f]
567 567
568 568 fctx = ctx.filectx(full)
569 569 yield {"file": full,
570 570 "parity": next(parity),
571 571 "basename": f,
572 572 "date": fctx.date(),
573 573 "size": fctx.size(),
574 574 "permissions": mf.flags(full)}
575 575
576 576 def dirlist(context):
577 577 for d in sorted(dirs):
578 578
579 579 emptydirs = []
580 580 h = dirs[d]
581 581 while isinstance(h, dict) and len(h) == 1:
582 582 k, v = next(iter(h.items()))
583 583 if v:
584 584 emptydirs.append(k)
585 585 h = v
586 586
587 587 path = "%s%s" % (abspath, d)
588 588 yield {"parity": next(parity),
589 589 "path": path,
590 590 "emptydirs": "/".join(emptydirs),
591 591 "basename": d}
592 592
593 593 return web.sendtemplate(
594 594 'manifest',
595 595 symrev=symrev,
596 596 path=abspath,
597 597 up=webutil.up(abspath),
598 598 upparity=next(parity),
599 599 fentries=templateutil.mappinggenerator(filelist),
600 600 dentries=templateutil.mappinggenerator(dirlist),
601 601 archives=web.archivelist(hex(node)),
602 602 **pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))
603 603
604 604 @webcommand('tags')
605 605 def tags(web):
606 606 """
607 607 /tags
608 608 -----
609 609
610 610 Show information about tags.
611 611
612 612 No arguments are accepted.
613 613
614 614 The ``tags`` template is rendered.
615 615 """
616 616 i = list(reversed(web.repo.tagslist()))
617 617 parity = paritygen(web.stripecount)
618 618
619 619 def entries(context, notip, latestonly):
620 620 t = i
621 621 if notip:
622 622 t = [(k, n) for k, n in i if k != "tip"]
623 623 if latestonly:
624 624 t = t[:1]
625 625 for k, n in t:
626 626 yield {"parity": next(parity),
627 627 "tag": k,
628 628 "date": web.repo[n].date(),
629 629 "node": hex(n)}
630 630
631 631 return web.sendtemplate(
632 632 'tags',
633 633 node=hex(web.repo.changelog.tip()),
634 634 entries=templateutil.mappinggenerator(entries, args=(False, False)),
635 635 entriesnotip=templateutil.mappinggenerator(entries,
636 636 args=(True, False)),
637 637 latestentry=templateutil.mappinggenerator(entries, args=(True, True)))
638 638
639 639 @webcommand('bookmarks')
640 640 def bookmarks(web):
641 641 """
642 642 /bookmarks
643 643 ----------
644 644
645 645 Show information about bookmarks.
646 646
647 647 No arguments are accepted.
648 648
649 649 The ``bookmarks`` template is rendered.
650 650 """
651 651 i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
652 652 sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
653 653 i = sorted(i, key=sortkey, reverse=True)
654 654 parity = paritygen(web.stripecount)
655 655
656 656 def entries(context, latestonly):
657 657 t = i
658 658 if latestonly:
659 659 t = i[:1]
660 660 for k, n in t:
661 661 yield {"parity": next(parity),
662 662 "bookmark": k,
663 663 "date": web.repo[n].date(),
664 664 "node": hex(n)}
665 665
666 666 if i:
667 667 latestrev = i[0][1]
668 668 else:
669 669 latestrev = -1
670 670 lastdate = web.repo[latestrev].date()
671 671
672 672 return web.sendtemplate(
673 673 'bookmarks',
674 674 node=hex(web.repo.changelog.tip()),
675 675 lastchange=templateutil.mappinglist([{'date': lastdate}]),
676 676 entries=templateutil.mappinggenerator(entries, args=(False,)),
677 677 latestentry=templateutil.mappinggenerator(entries, args=(True,)))
678 678
679 679 @webcommand('branches')
680 680 def branches(web):
681 681 """
682 682 /branches
683 683 ---------
684 684
685 685 Show information about branches.
686 686
687 687 All known branches are contained in the output, even closed branches.
688 688
689 689 No arguments are accepted.
690 690
691 691 The ``branches`` template is rendered.
692 692 """
693 693 entries = webutil.branchentries(web.repo, web.stripecount)
694 694 latestentry = webutil.branchentries(web.repo, web.stripecount, 1)
695 695
696 696 return web.sendtemplate(
697 697 'branches',
698 698 node=hex(web.repo.changelog.tip()),
699 699 entries=entries,
700 700 latestentry=latestentry)
701 701
702 702 @webcommand('summary')
703 703 def summary(web):
704 704 """
705 705 /summary
706 706 --------
707 707
708 708 Show a summary of repository state.
709 709
710 710 Information about the latest changesets, bookmarks, tags, and branches
711 711 is captured by this handler.
712 712
713 713 The ``summary`` template is rendered.
714 714 """
715 715 i = reversed(web.repo.tagslist())
716 716
717 717 def tagentries(context):
718 718 parity = paritygen(web.stripecount)
719 719 count = 0
720 720 for k, n in i:
721 721 if k == "tip": # skip tip
722 722 continue
723 723
724 724 count += 1
725 725 if count > 10: # limit to 10 tags
726 726 break
727 727
728 728 yield {
729 729 'parity': next(parity),
730 730 'tag': k,
731 731 'node': hex(n),
732 732 'date': web.repo[n].date(),
733 733 }
734 734
735 735 def bookmarks(context):
736 736 parity = paritygen(web.stripecount)
737 737 marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo]
738 738 sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
739 739 marks = sorted(marks, key=sortkey, reverse=True)
740 740 for k, n in marks[:10]: # limit to 10 bookmarks
741 741 yield {'parity': next(parity),
742 742 'bookmark': k,
743 743 'date': web.repo[n].date(),
744 744 'node': hex(n)}
745 745
746 746 def changelist(context):
747 747 parity = paritygen(web.stripecount, offset=start - end)
748 748 l = [] # build a list in forward order for efficiency
749 749 revs = []
750 750 if start < end:
751 751 revs = web.repo.changelog.revs(start, end - 1)
752 752 for i in revs:
753 753 ctx = web.repo[i]
754 754 lm = webutil.commonentry(web.repo, ctx)
755 755 lm['parity'] = next(parity)
756 756 l.append(lm)
757 757
758 758 for entry in reversed(l):
759 759 yield entry
760 760
761 761 tip = web.repo['tip']
762 762 count = len(web.repo)
763 763 start = max(0, count - web.maxchanges)
764 764 end = min(count, start + web.maxchanges)
765 765
766 766 desc = web.config("web", "description")
767 767 if not desc:
768 768 desc = 'unknown'
769 769 labels = web.configlist('web', 'labels')
770 770
771 771 return web.sendtemplate(
772 772 'summary',
773 773 desc=desc,
774 774 owner=get_contact(web.config) or 'unknown',
775 775 lastchange=tip.date(),
776 776 tags=templateutil.mappinggenerator(tagentries, name='tagentry'),
777 777 bookmarks=templateutil.mappinggenerator(bookmarks),
778 778 branches=webutil.branchentries(web.repo, web.stripecount, 10),
779 779 shortlog=templateutil.mappinggenerator(changelist,
780 780 name='shortlogentry'),
781 781 node=tip.hex(),
782 782 symrev='tip',
783 783 archives=web.archivelist('tip'),
784 784 labels=templateutil.hybridlist(labels, name='label'))
785 785
786 786 @webcommand('filediff')
787 787 def filediff(web):
788 788 """
789 789 /diff/{revision}/{path}
790 790 -----------------------
791 791
792 792 Show how a file changed in a particular commit.
793 793
794 794 The ``filediff`` template is rendered.
795 795
796 796 This handler is registered under both the ``/diff`` and ``/filediff``
797 797 paths. ``/diff`` is used in modern code.
798 798 """
799 799 fctx, ctx = None, None
800 800 try:
801 801 fctx = webutil.filectx(web.repo, web.req)
802 802 except LookupError:
803 803 ctx = webutil.changectx(web.repo, web.req)
804 804 path = webutil.cleanpath(web.repo, web.req.qsparams['file'])
805 805 if path not in ctx.files():
806 806 raise
807 807
808 808 if fctx is not None:
809 809 path = fctx.path()
810 810 ctx = fctx.changectx()
811 811 basectx = ctx.p1()
812 812
813 813 style = web.config('web', 'style')
814 814 if 'style' in web.req.qsparams:
815 815 style = web.req.qsparams['style']
816 816
817 817 diffs = webutil.diffs(web, ctx, basectx, [path], style)
818 818 if fctx is not None:
819 819 rename = webutil.renamelink(fctx)
820 820 ctx = fctx
821 821 else:
822 822 rename = templateutil.mappinglist([])
823 823 ctx = ctx
824 824
825 825 return web.sendtemplate(
826 826 'filediff',
827 827 file=path,
828 828 symrev=webutil.symrevorshortnode(web.req, ctx),
829 829 rename=rename,
830 830 diff=diffs,
831 831 **pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))
832 832
833 833 diff = webcommand('diff')(filediff)
834 834
835 835 @webcommand('comparison')
836 836 def comparison(web):
837 837 """
838 838 /comparison/{revision}/{path}
839 839 -----------------------------
840 840
841 841 Show a comparison between the old and new versions of a file from changes
842 842 made on a particular revision.
843 843
844 844 This is similar to the ``diff`` handler. However, this form features
845 845 a split or side-by-side diff rather than a unified diff.
846 846
847 847 The ``context`` query string argument can be used to control the lines of
848 848 context in the diff.
849 849
850 850 The ``filecomparison`` template is rendered.
851 851 """
852 852 ctx = webutil.changectx(web.repo, web.req)
853 853 if 'file' not in web.req.qsparams:
854 854 raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
855 855 path = webutil.cleanpath(web.repo, web.req.qsparams['file'])
856 856
857 857 parsecontext = lambda v: v == 'full' and -1 or int(v)
858 858 if 'context' in web.req.qsparams:
859 859 context = parsecontext(web.req.qsparams['context'])
860 860 else:
861 861 context = parsecontext(web.config('web', 'comparisoncontext', '5'))
862 862
863 863 def filelines(f):
864 864 if f.isbinary():
865 865 mt = pycompat.sysbytes(
866 866 mimetypes.guess_type(pycompat.fsdecode(f.path()))[0]
867 867 or r'application/octet-stream')
868 868 return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))]
869 869 return f.data().splitlines()
870 870
871 871 fctx = None
872 872 parent = ctx.p1()
873 873 leftrev = parent.rev()
874 874 leftnode = parent.node()
875 875 rightrev = ctx.rev()
876 876 rightnode = scmutil.binnode(ctx)
877 877 if path in ctx:
878 878 fctx = ctx[path]
879 879 rightlines = filelines(fctx)
880 880 if path not in parent:
881 881 leftlines = ()
882 882 else:
883 883 pfctx = parent[path]
884 884 leftlines = filelines(pfctx)
885 885 else:
886 886 rightlines = ()
887 887 pfctx = ctx.parents()[0][path]
888 888 leftlines = filelines(pfctx)
889 889
890 890 comparison = webutil.compare(context, leftlines, rightlines)
891 891 if fctx is not None:
892 892 rename = webutil.renamelink(fctx)
893 893 ctx = fctx
894 894 else:
895 895 rename = templateutil.mappinglist([])
896 896 ctx = ctx
897 897
898 898 return web.sendtemplate(
899 899 'filecomparison',
900 900 file=path,
901 901 symrev=webutil.symrevorshortnode(web.req, ctx),
902 902 rename=rename,
903 903 leftrev=leftrev,
904 904 leftnode=hex(leftnode),
905 905 rightrev=rightrev,
906 906 rightnode=hex(rightnode),
907 907 comparison=comparison,
908 908 **pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))
909 909
910 910 @webcommand('annotate')
911 911 def annotate(web):
912 912 """
913 913 /annotate/{revision}/{path}
914 914 ---------------------------
915 915
916 916 Show changeset information for each line in a file.
917 917
918 918 The ``ignorews``, ``ignorewsamount``, ``ignorewseol``, and
919 919 ``ignoreblanklines`` query string arguments have the same meaning as
920 920 their ``[annotate]`` config equivalents. It uses the hgrc boolean
921 921 parsing logic to interpret the value. e.g. ``0`` and ``false`` are
922 922 false and ``1`` and ``true`` are true. If not defined, the server
923 923 default settings are used.
924 924
925 925 The ``fileannotate`` template is rendered.
926 926 """
927 927 fctx = webutil.filectx(web.repo, web.req)
928 928 f = fctx.path()
929 929 parity = paritygen(web.stripecount)
930 930 ishead = fctx.filenode() in fctx.filelog().heads()
931 931
932 932 # parents() is called once per line and several lines likely belong to
933 933 # same revision. So it is worth caching.
934 934 # TODO there are still redundant operations within basefilectx.parents()
935 935 # and from the fctx.annotate() call itself that could be cached.
936 936 parentscache = {}
937 937 def parents(context, f):
938 938 rev = f.rev()
939 939 if rev not in parentscache:
940 940 parentscache[rev] = []
941 941 for p in f.parents():
942 942 entry = {
943 943 'node': p.hex(),
944 944 'rev': p.rev(),
945 945 }
946 946 parentscache[rev].append(entry)
947 947
948 948 for p in parentscache[rev]:
949 949 yield p
950 950
951 951 def annotate(context):
952 952 if fctx.isbinary():
953 953 mt = pycompat.sysbytes(
954 954 mimetypes.guess_type(pycompat.fsdecode(fctx.path()))[0]
955 955 or r'application/octet-stream')
956 956 lines = [dagop.annotateline(fctx=fctx.filectx(fctx.filerev()),
957 957 lineno=1, text='(binary:%s)' % mt)]
958 958 else:
959 959 lines = webutil.annotate(web.req, fctx, web.repo.ui)
960 960
961 961 previousrev = None
962 962 blockparitygen = paritygen(1)
963 963 for lineno, aline in enumerate(lines):
964 964 f = aline.fctx
965 965 rev = f.rev()
966 966 if rev != previousrev:
967 967 blockhead = True
968 968 blockparity = next(blockparitygen)
969 969 else:
970 970 blockhead = None
971 971 previousrev = rev
972 972 yield {"parity": next(parity),
973 973 "node": f.hex(),
974 974 "rev": rev,
975 975 "author": f.user(),
976 976 "parents": templateutil.mappinggenerator(parents, args=(f,)),
977 977 "desc": f.description(),
978 978 "extra": f.extra(),
979 979 "file": f.path(),
980 980 "blockhead": blockhead,
981 981 "blockparity": blockparity,
982 982 "targetline": aline.lineno,
983 983 "line": aline.text,
984 984 "lineno": lineno + 1,
985 985 "lineid": "l%d" % (lineno + 1),
986 986 "linenumber": "% 6d" % (lineno + 1),
987 987 "revdate": f.date()}
988 988
989 989 diffopts = webutil.difffeatureopts(web.req, web.repo.ui, 'annotate')
990 990 diffopts = {k: getattr(diffopts, k) for k in diffopts.defaults}
991 991
992 992 return web.sendtemplate(
993 993 'fileannotate',
994 994 file=f,
995 995 annotate=templateutil.mappinggenerator(annotate),
996 996 path=webutil.up(f),
997 997 symrev=webutil.symrevorshortnode(web.req, fctx),
998 998 rename=webutil.renamelink(fctx),
999 999 permissions=fctx.manifest().flags(f),
1000 1000 ishead=int(ishead),
1001 1001 diffopts=templateutil.hybriddict(diffopts),
1002 1002 **pycompat.strkwargs(webutil.commonentry(web.repo, fctx)))
1003 1003
1004 1004 @webcommand('filelog')
1005 1005 def filelog(web):
1006 1006 """
1007 1007 /filelog/{revision}/{path}
1008 1008 --------------------------
1009 1009
1010 1010 Show information about the history of a file in the repository.
1011 1011
1012 1012 The ``revcount`` query string argument can be defined to control the
1013 1013 maximum number of entries to show.
1014 1014
1015 1015 The ``filelog`` template will be rendered.
1016 1016 """
1017 1017
1018 1018 try:
1019 1019 fctx = webutil.filectx(web.repo, web.req)
1020 1020 f = fctx.path()
1021 1021 fl = fctx.filelog()
1022 1022 except error.LookupError:
1023 1023 f = webutil.cleanpath(web.repo, web.req.qsparams['file'])
1024 1024 fl = web.repo.file(f)
1025 1025 numrevs = len(fl)
1026 1026 if not numrevs: # file doesn't exist at all
1027 1027 raise
1028 1028 rev = webutil.changectx(web.repo, web.req).rev()
1029 1029 first = fl.linkrev(0)
1030 1030 if rev < first: # current rev is from before file existed
1031 1031 raise
1032 1032 frev = numrevs - 1
1033 1033 while fl.linkrev(frev) > rev:
1034 1034 frev -= 1
1035 1035 fctx = web.repo.filectx(f, fl.linkrev(frev))
1036 1036
1037 1037 revcount = web.maxshortchanges
1038 1038 if 'revcount' in web.req.qsparams:
1039 1039 try:
1040 1040 revcount = int(web.req.qsparams.get('revcount', revcount))
1041 1041 revcount = max(revcount, 1)
1042 1042 web.tmpl.defaults['sessionvars']['revcount'] = revcount
1043 1043 except ValueError:
1044 1044 pass
1045 1045
1046 1046 lrange = webutil.linerange(web.req)
1047 1047
1048 1048 lessvars = copy.copy(web.tmpl.defaults['sessionvars'])
1049 1049 lessvars['revcount'] = max(revcount // 2, 1)
1050 1050 morevars = copy.copy(web.tmpl.defaults['sessionvars'])
1051 1051 morevars['revcount'] = revcount * 2
1052 1052
1053 1053 patch = 'patch' in web.req.qsparams
1054 1054 if patch:
1055 1055 lessvars['patch'] = morevars['patch'] = web.req.qsparams['patch']
1056 1056 descend = 'descend' in web.req.qsparams
1057 1057 if descend:
1058 1058 lessvars['descend'] = morevars['descend'] = web.req.qsparams['descend']
1059 1059
1060 1060 count = fctx.filerev() + 1
1061 1061 start = max(0, count - revcount) # first rev on this page
1062 1062 end = min(count, start + revcount) # last rev on this page
1063 1063 parity = paritygen(web.stripecount, offset=start - end)
1064 1064
1065 1065 repo = web.repo
1066 1066 filelog = fctx.filelog()
1067 1067 revs = [filerev for filerev in filelog.revs(start, end - 1)
1068 1068 if filelog.linkrev(filerev) in repo]
1069 1069 entries = []
1070 1070
1071 1071 diffstyle = web.config('web', 'style')
1072 1072 if 'style' in web.req.qsparams:
1073 1073 diffstyle = web.req.qsparams['style']
1074 1074
1075 1075 def diff(fctx, linerange=None):
1076 1076 ctx = fctx.changectx()
1077 1077 basectx = ctx.p1()
1078 1078 path = fctx.path()
1079 1079 return webutil.diffs(web, ctx, basectx, [path], diffstyle,
1080 1080 linerange=linerange,
1081 1081 lineidprefix='%s-' % ctx.hex()[:12])
1082 1082
1083 1083 linerange = None
1084 1084 if lrange is not None:
1085 1085 linerange = webutil.formatlinerange(*lrange)
1086 1086 # deactivate numeric nav links when linerange is specified as this
1087 1087 # would required a dedicated "revnav" class
1088 1088 nav = templateutil.mappinglist([])
1089 1089 if descend:
1090 1090 it = dagop.blockdescendants(fctx, *lrange)
1091 1091 else:
1092 1092 it = dagop.blockancestors(fctx, *lrange)
1093 1093 for i, (c, lr) in enumerate(it, 1):
1094 1094 diffs = None
1095 1095 if patch:
1096 1096 diffs = diff(c, linerange=lr)
1097 1097 # follow renames accross filtered (not in range) revisions
1098 1098 path = c.path()
1099 1099 lm = webutil.commonentry(repo, c)
1100 1100 lm.update({
1101 1101 'parity': next(parity),
1102 1102 'filerev': c.rev(),
1103 1103 'file': path,
1104 1104 'diff': diffs,
1105 1105 'linerange': webutil.formatlinerange(*lr),
1106 1106 'rename': templateutil.mappinglist([]),
1107 1107 })
1108 1108 entries.append(lm)
1109 1109 if i == revcount:
1110 1110 break
1111 1111 lessvars['linerange'] = webutil.formatlinerange(*lrange)
1112 1112 morevars['linerange'] = lessvars['linerange']
1113 1113 else:
1114 1114 for i in revs:
1115 1115 iterfctx = fctx.filectx(i)
1116 1116 diffs = None
1117 1117 if patch:
1118 1118 diffs = diff(iterfctx)
1119 1119 lm = webutil.commonentry(repo, iterfctx)
1120 1120 lm.update({
1121 1121 'parity': next(parity),
1122 1122 'filerev': i,
1123 1123 'file': f,
1124 1124 'diff': diffs,
1125 1125 'rename': webutil.renamelink(iterfctx),
1126 1126 })
1127 1127 entries.append(lm)
1128 1128 entries.reverse()
1129 1129 revnav = webutil.filerevnav(web.repo, fctx.path())
1130 1130 nav = revnav.gen(end - 1, revcount, count)
1131 1131
1132 1132 latestentry = entries[:1]
1133 1133
1134 1134 return web.sendtemplate(
1135 1135 'filelog',
1136 1136 file=f,
1137 1137 nav=nav,
1138 1138 symrev=webutil.symrevorshortnode(web.req, fctx),
1139 1139 entries=templateutil.mappinglist(entries),
1140 1140 descend=descend,
1141 1141 patch=patch,
1142 1142 latestentry=templateutil.mappinglist(latestentry),
1143 1143 linerange=linerange,
1144 1144 revcount=revcount,
1145 1145 morevars=morevars,
1146 1146 lessvars=lessvars,
1147 1147 **pycompat.strkwargs(webutil.commonentry(web.repo, fctx)))
1148 1148
1149 1149 @webcommand('archive')
1150 1150 def archive(web):
1151 1151 """
1152 1152 /archive/{revision}.{format}[/{path}]
1153 1153 -------------------------------------
1154 1154
1155 1155 Obtain an archive of repository content.
1156 1156
1157 1157 The content and type of the archive is defined by a URL path parameter.
1158 1158 ``format`` is the file extension of the archive type to be generated. e.g.
1159 1159 ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your
1160 1160 server configuration.
1161 1161
1162 1162 The optional ``path`` URL parameter controls content to include in the
1163 1163 archive. If omitted, every file in the specified revision is present in the
1164 1164 archive. If included, only the specified file or contents of the specified
1165 1165 directory will be included in the archive.
1166 1166
1167 1167 No template is used for this handler. Raw, binary content is generated.
1168 1168 """
1169 1169
1170 1170 type_ = web.req.qsparams.get('type')
1171 1171 allowed = web.configlist("web", "allow-archive")
1172 1172 key = web.req.qsparams['node']
1173 1173
1174 1174 if type_ not in webutil.archivespecs:
1175 1175 msg = 'Unsupported archive type: %s' % stringutil.pprint(type_)
1176 1176 raise ErrorResponse(HTTP_NOT_FOUND, msg)
1177 1177
1178 1178 if not ((type_ in allowed or
1179 1179 web.configbool("web", "allow" + type_))):
1180 1180 msg = 'Archive type not allowed: %s' % type_
1181 1181 raise ErrorResponse(HTTP_FORBIDDEN, msg)
1182 1182
1183 1183 reponame = re.sub(br"\W+", "-", os.path.basename(web.reponame))
1184 1184 cnode = web.repo.lookup(key)
1185 1185 arch_version = key
1186 1186 if cnode == key or key == 'tip':
1187 1187 arch_version = short(cnode)
1188 1188 name = "%s-%s" % (reponame, arch_version)
1189 1189
1190 1190 ctx = webutil.changectx(web.repo, web.req)
1191 1191 pats = []
1192 1192 match = scmutil.match(ctx, [])
1193 1193 file = web.req.qsparams.get('file')
1194 1194 if file:
1195 1195 pats = ['path:' + file]
1196 1196 match = scmutil.match(ctx, pats, default='path')
1197 1197 if pats:
1198 1198 files = [f for f in ctx.manifest().keys() if match(f)]
1199 1199 if not files:
1200 1200 raise ErrorResponse(HTTP_NOT_FOUND,
1201 1201 'file(s) not found: %s' % file)
1202 1202
1203 1203 mimetype, artype, extension, encoding = webutil.archivespecs[type_]
1204 1204
1205 1205 web.res.headers['Content-Type'] = mimetype
1206 1206 web.res.headers['Content-Disposition'] = 'attachment; filename=%s%s' % (
1207 1207 name, extension)
1208 1208
1209 1209 if encoding:
1210 1210 web.res.headers['Content-Encoding'] = encoding
1211 1211
1212 1212 web.res.setbodywillwrite()
1213 1213 if list(web.res.sendresponse()):
1214 1214 raise error.ProgrammingError('sendresponse() should not emit data '
1215 1215 'if writing later')
1216 1216
1217 1217 bodyfh = web.res.getbodyfile()
1218 1218
1219 archival.archive(web.repo, bodyfh, cnode, artype, prefix=name,
1220 matchfn=match,
1219 archival.archive(web.repo, bodyfh, cnode, artype, prefix=name, match=match,
1221 1220 subrepos=web.configbool("web", "archivesubrepos"))
1222 1221
1223 1222 return []
1224 1223
1225 1224 @webcommand('static')
1226 1225 def static(web):
1227 1226 fname = web.req.qsparams['file']
1228 1227 # a repo owner may set web.static in .hg/hgrc to get any file
1229 1228 # readable by the user running the CGI script
1230 1229 static = web.config("web", "static", untrusted=False)
1231 1230 if not static:
1232 1231 tp = web.templatepath or templater.templatepaths()
1233 1232 if isinstance(tp, str):
1234 1233 tp = [tp]
1235 1234 static = [os.path.join(p, 'static') for p in tp]
1236 1235
1237 1236 staticfile(static, fname, web.res)
1238 1237 return web.res.sendresponse()
1239 1238
1240 1239 @webcommand('graph')
1241 1240 def graph(web):
1242 1241 """
1243 1242 /graph[/{revision}]
1244 1243 -------------------
1245 1244
1246 1245 Show information about the graphical topology of the repository.
1247 1246
1248 1247 Information rendered by this handler can be used to create visual
1249 1248 representations of repository topology.
1250 1249
1251 1250 The ``revision`` URL parameter controls the starting changeset. If it's
1252 1251 absent, the default is ``tip``.
1253 1252
1254 1253 The ``revcount`` query string argument can define the number of changesets
1255 1254 to show information for.
1256 1255
1257 1256 The ``graphtop`` query string argument can specify the starting changeset
1258 1257 for producing ``jsdata`` variable that is used for rendering graph in
1259 1258 JavaScript. By default it has the same value as ``revision``.
1260 1259
1261 1260 This handler will render the ``graph`` template.
1262 1261 """
1263 1262
1264 1263 if 'node' in web.req.qsparams:
1265 1264 ctx = webutil.changectx(web.repo, web.req)
1266 1265 symrev = webutil.symrevorshortnode(web.req, ctx)
1267 1266 else:
1268 1267 ctx = web.repo['tip']
1269 1268 symrev = 'tip'
1270 1269 rev = ctx.rev()
1271 1270
1272 1271 bg_height = 39
1273 1272 revcount = web.maxshortchanges
1274 1273 if 'revcount' in web.req.qsparams:
1275 1274 try:
1276 1275 revcount = int(web.req.qsparams.get('revcount', revcount))
1277 1276 revcount = max(revcount, 1)
1278 1277 web.tmpl.defaults['sessionvars']['revcount'] = revcount
1279 1278 except ValueError:
1280 1279 pass
1281 1280
1282 1281 lessvars = copy.copy(web.tmpl.defaults['sessionvars'])
1283 1282 lessvars['revcount'] = max(revcount // 2, 1)
1284 1283 morevars = copy.copy(web.tmpl.defaults['sessionvars'])
1285 1284 morevars['revcount'] = revcount * 2
1286 1285
1287 1286 graphtop = web.req.qsparams.get('graphtop', ctx.hex())
1288 1287 graphvars = copy.copy(web.tmpl.defaults['sessionvars'])
1289 1288 graphvars['graphtop'] = graphtop
1290 1289
1291 1290 count = len(web.repo)
1292 1291 pos = rev
1293 1292
1294 1293 uprev = min(max(0, count - 1), rev + revcount)
1295 1294 downrev = max(0, rev - revcount)
1296 1295 changenav = webutil.revnav(web.repo).gen(pos, revcount, count)
1297 1296
1298 1297 tree = []
1299 1298 nextentry = []
1300 1299 lastrev = 0
1301 1300 if pos != -1:
1302 1301 allrevs = web.repo.changelog.revs(pos, 0)
1303 1302 revs = []
1304 1303 for i in allrevs:
1305 1304 revs.append(i)
1306 1305 if len(revs) >= revcount + 1:
1307 1306 break
1308 1307
1309 1308 if len(revs) > revcount:
1310 1309 nextentry = [webutil.commonentry(web.repo, web.repo[revs[-1]])]
1311 1310 revs = revs[:-1]
1312 1311
1313 1312 lastrev = revs[-1]
1314 1313
1315 1314 # We have to feed a baseset to dagwalker as it is expecting smartset
1316 1315 # object. This does not have a big impact on hgweb performance itself
1317 1316 # since hgweb graphing code is not itself lazy yet.
1318 1317 dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
1319 1318 # As we said one line above... not lazy.
1320 1319 tree = list(item for item in graphmod.colored(dag, web.repo)
1321 1320 if item[1] == graphmod.CHANGESET)
1322 1321
1323 1322 def fulltree():
1324 1323 pos = web.repo[graphtop].rev()
1325 1324 tree = []
1326 1325 if pos != -1:
1327 1326 revs = web.repo.changelog.revs(pos, lastrev)
1328 1327 dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
1329 1328 tree = list(item for item in graphmod.colored(dag, web.repo)
1330 1329 if item[1] == graphmod.CHANGESET)
1331 1330 return tree
1332 1331
1333 1332 def jsdata(context):
1334 1333 for (id, type, ctx, vtx, edges) in fulltree():
1335 1334 yield {'node': pycompat.bytestr(ctx),
1336 1335 'graphnode': webutil.getgraphnode(web.repo, ctx),
1337 1336 'vertex': vtx,
1338 1337 'edges': edges}
1339 1338
1340 1339 def nodes(context):
1341 1340 parity = paritygen(web.stripecount)
1342 1341 for row, (id, type, ctx, vtx, edges) in enumerate(tree):
1343 1342 entry = webutil.commonentry(web.repo, ctx)
1344 1343 edgedata = [{'col': edge[0],
1345 1344 'nextcol': edge[1],
1346 1345 'color': (edge[2] - 1) % 6 + 1,
1347 1346 'width': edge[3],
1348 1347 'bcolor': edge[4]}
1349 1348 for edge in edges]
1350 1349
1351 1350 entry.update({'col': vtx[0],
1352 1351 'color': (vtx[1] - 1) % 6 + 1,
1353 1352 'parity': next(parity),
1354 1353 'edges': templateutil.mappinglist(edgedata),
1355 1354 'row': row,
1356 1355 'nextrow': row + 1})
1357 1356
1358 1357 yield entry
1359 1358
1360 1359 rows = len(tree)
1361 1360
1362 1361 return web.sendtemplate(
1363 1362 'graph',
1364 1363 rev=rev,
1365 1364 symrev=symrev,
1366 1365 revcount=revcount,
1367 1366 uprev=uprev,
1368 1367 lessvars=lessvars,
1369 1368 morevars=morevars,
1370 1369 downrev=downrev,
1371 1370 graphvars=graphvars,
1372 1371 rows=rows,
1373 1372 bg_height=bg_height,
1374 1373 changesets=count,
1375 1374 nextentry=templateutil.mappinglist(nextentry),
1376 1375 jsdata=templateutil.mappinggenerator(jsdata),
1377 1376 nodes=templateutil.mappinggenerator(nodes),
1378 1377 node=ctx.hex(),
1379 1378 archives=web.archivelist('tip'),
1380 1379 changenav=changenav)
1381 1380
1382 1381 def _getdoc(e):
1383 1382 doc = e[0].__doc__
1384 1383 if doc:
1385 1384 doc = _(doc).partition('\n')[0]
1386 1385 else:
1387 1386 doc = _('(no help text available)')
1388 1387 return doc
1389 1388
1390 1389 @webcommand('help')
1391 1390 def help(web):
1392 1391 """
1393 1392 /help[/{topic}]
1394 1393 ---------------
1395 1394
1396 1395 Render help documentation.
1397 1396
1398 1397 This web command is roughly equivalent to :hg:`help`. If a ``topic``
1399 1398 is defined, that help topic will be rendered. If not, an index of
1400 1399 available help topics will be rendered.
1401 1400
1402 1401 The ``help`` template will be rendered when requesting help for a topic.
1403 1402 ``helptopics`` will be rendered for the index of help topics.
1404 1403 """
1405 1404 from .. import commands, help as helpmod # avoid cycle
1406 1405
1407 1406 topicname = web.req.qsparams.get('node')
1408 1407 if not topicname:
1409 1408 def topics(context):
1410 1409 for h in helpmod.helptable:
1411 1410 entries, summary, _doc = h[0:3]
1412 1411 yield {'topic': entries[0], 'summary': summary}
1413 1412
1414 1413 early, other = [], []
1415 1414 primary = lambda s: s.partition('|')[0]
1416 1415 for c, e in commands.table.iteritems():
1417 1416 doc = _getdoc(e)
1418 1417 if 'DEPRECATED' in doc or c.startswith('debug'):
1419 1418 continue
1420 1419 cmd = primary(c)
1421 1420 if getattr(e[0], 'helpbasic', False):
1422 1421 early.append((cmd, doc))
1423 1422 else:
1424 1423 other.append((cmd, doc))
1425 1424
1426 1425 early.sort()
1427 1426 other.sort()
1428 1427
1429 1428 def earlycommands(context):
1430 1429 for c, doc in early:
1431 1430 yield {'topic': c, 'summary': doc}
1432 1431
1433 1432 def othercommands(context):
1434 1433 for c, doc in other:
1435 1434 yield {'topic': c, 'summary': doc}
1436 1435
1437 1436 return web.sendtemplate(
1438 1437 'helptopics',
1439 1438 topics=templateutil.mappinggenerator(topics),
1440 1439 earlycommands=templateutil.mappinggenerator(earlycommands),
1441 1440 othercommands=templateutil.mappinggenerator(othercommands),
1442 1441 title='Index')
1443 1442
1444 1443 # Render an index of sub-topics.
1445 1444 if topicname in helpmod.subtopics:
1446 1445 topics = []
1447 1446 for entries, summary, _doc in helpmod.subtopics[topicname]:
1448 1447 topics.append({
1449 1448 'topic': '%s.%s' % (topicname, entries[0]),
1450 1449 'basename': entries[0],
1451 1450 'summary': summary,
1452 1451 })
1453 1452
1454 1453 return web.sendtemplate(
1455 1454 'helptopics',
1456 1455 topics=templateutil.mappinglist(topics),
1457 1456 title=topicname,
1458 1457 subindex=True)
1459 1458
1460 1459 u = webutil.wsgiui.load()
1461 1460 u.verbose = True
1462 1461
1463 1462 # Render a page from a sub-topic.
1464 1463 if '.' in topicname:
1465 1464 # TODO implement support for rendering sections, like
1466 1465 # `hg help` works.
1467 1466 topic, subtopic = topicname.split('.', 1)
1468 1467 if topic not in helpmod.subtopics:
1469 1468 raise ErrorResponse(HTTP_NOT_FOUND)
1470 1469 else:
1471 1470 topic = topicname
1472 1471 subtopic = None
1473 1472
1474 1473 try:
1475 1474 doc = helpmod.help_(u, commands, topic, subtopic=subtopic)
1476 1475 except error.Abort:
1477 1476 raise ErrorResponse(HTTP_NOT_FOUND)
1478 1477
1479 1478 return web.sendtemplate(
1480 1479 'help',
1481 1480 topic=topicname,
1482 1481 doc=doc)
1483 1482
1484 1483 # tell hggettext to extract docstrings from these functions:
1485 1484 i18nfunctions = commands.values()
General Comments 0
You need to be logged in to leave comments. Login now