##// END OF EJS Templates
cleanup: remove unnecessary list constructor calls around list comprehensions
Manuel Jacob -
r52263:0d414fb8 default
parent child Browse files
Show More
@@ -1,1269 +1,1269 b''
1 # githelp.py - Try to map Git commands to Mercurial equivalents.
1 # githelp.py - Try to map Git commands to Mercurial equivalents.
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """try mapping git commands to Mercurial commands
7 """try mapping git commands to Mercurial commands
8
8
9 Tries to map a given git command to a Mercurial command:
9 Tries to map a given git command to a Mercurial command:
10
10
11 $ hg githelp -- git checkout master
11 $ hg githelp -- git checkout master
12 hg update master
12 hg update master
13
13
14 If an unknown command or parameter combination is detected, an error is
14 If an unknown command or parameter combination is detected, an error is
15 produced.
15 produced.
16 """
16 """
17
17
18
18
19 import getopt
19 import getopt
20 import re
20 import re
21
21
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial import (
23 from mercurial import (
24 encoding,
24 encoding,
25 error,
25 error,
26 fancyopts,
26 fancyopts,
27 pycompat,
27 pycompat,
28 registrar,
28 registrar,
29 scmutil,
29 scmutil,
30 )
30 )
31 from mercurial.utils import procutil
31 from mercurial.utils import procutil
32
32
33 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
33 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
34 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
34 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
35 # be specifying the version(s) of Mercurial they are tested with, or
35 # be specifying the version(s) of Mercurial they are tested with, or
36 # leave the attribute unspecified.
36 # leave the attribute unspecified.
37 testedwith = b'ships-with-hg-core'
37 testedwith = b'ships-with-hg-core'
38
38
39 cmdtable = {}
39 cmdtable = {}
40 command = registrar.command(cmdtable)
40 command = registrar.command(cmdtable)
41
41
42
42
43 def convert(s):
43 def convert(s):
44 if s.startswith(b"origin/"):
44 if s.startswith(b"origin/"):
45 return s[7:]
45 return s[7:]
46 if b'HEAD' in s:
46 if b'HEAD' in s:
47 s = s.replace(b'HEAD', b'.')
47 s = s.replace(b'HEAD', b'.')
48 # HEAD~ in git is .~1 in mercurial
48 # HEAD~ in git is .~1 in mercurial
49 s = re.sub(b'~$', b'~1', s)
49 s = re.sub(b'~$', b'~1', s)
50 return s
50 return s
51
51
52
52
53 @command(
53 @command(
54 b'githelp|git',
54 b'githelp|git',
55 [],
55 [],
56 _(b'hg githelp'),
56 _(b'hg githelp'),
57 helpcategory=command.CATEGORY_HELP,
57 helpcategory=command.CATEGORY_HELP,
58 helpbasic=True,
58 helpbasic=True,
59 )
59 )
60 def githelp(ui, repo, *args, **kwargs):
60 def githelp(ui, repo, *args, **kwargs):
61 """suggests the Mercurial equivalent of the given git command
61 """suggests the Mercurial equivalent of the given git command
62
62
63 Usage: hg githelp -- <git command>
63 Usage: hg githelp -- <git command>
64 """
64 """
65
65
66 if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
66 if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
67 raise error.Abort(
67 raise error.Abort(
68 _(b'missing git command - usage: hg githelp -- <git command>')
68 _(b'missing git command - usage: hg githelp -- <git command>')
69 )
69 )
70
70
71 if args[0] == b'git':
71 if args[0] == b'git':
72 args = args[1:]
72 args = args[1:]
73
73
74 cmd = args[0]
74 cmd = args[0]
75 if not cmd in gitcommands:
75 if not cmd in gitcommands:
76 raise error.Abort(_(b"error: unknown git command %s") % cmd)
76 raise error.Abort(_(b"error: unknown git command %s") % cmd)
77
77
78 ui.pager(b'githelp')
78 ui.pager(b'githelp')
79 args = args[1:]
79 args = args[1:]
80 return gitcommands[cmd](ui, repo, *args, **kwargs)
80 return gitcommands[cmd](ui, repo, *args, **kwargs)
81
81
82
82
83 def parseoptions(ui, cmdoptions, args):
83 def parseoptions(ui, cmdoptions, args):
84 cmdoptions = list(cmdoptions)
84 cmdoptions = list(cmdoptions)
85 opts = {}
85 opts = {}
86 args = list(args)
86 args = list(args)
87 while True:
87 while True:
88 try:
88 try:
89 args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
89 args = fancyopts.fancyopts(list(args), cmdoptions, opts, True)
90 break
90 break
91 except getopt.GetoptError as ex:
91 except getopt.GetoptError as ex:
92 if "requires argument" in ex.msg:
92 if "requires argument" in ex.msg:
93 raise
93 raise
94 if ('--' + ex.opt) in ex.msg:
94 if ('--' + ex.opt) in ex.msg:
95 flag = b'--' + pycompat.bytestr(ex.opt)
95 flag = b'--' + pycompat.bytestr(ex.opt)
96 elif ('-' + ex.opt) in ex.msg:
96 elif ('-' + ex.opt) in ex.msg:
97 flag = b'-' + pycompat.bytestr(ex.opt)
97 flag = b'-' + pycompat.bytestr(ex.opt)
98 else:
98 else:
99 raise error.Abort(
99 raise error.Abort(
100 _(b"unknown option %s") % pycompat.bytestr(ex.opt)
100 _(b"unknown option %s") % pycompat.bytestr(ex.opt)
101 )
101 )
102 try:
102 try:
103 args.remove(flag)
103 args.remove(flag)
104 except Exception:
104 except Exception:
105 msg = _(b"unknown option '%s' packed with other options")
105 msg = _(b"unknown option '%s' packed with other options")
106 hint = _(b"please try passing the option as its own flag: -%s")
106 hint = _(b"please try passing the option as its own flag: -%s")
107 raise error.Abort(
107 raise error.Abort(
108 msg % pycompat.bytestr(ex.opt),
108 msg % pycompat.bytestr(ex.opt),
109 hint=hint % pycompat.bytestr(ex.opt),
109 hint=hint % pycompat.bytestr(ex.opt),
110 )
110 )
111
111
112 ui.warn(_(b"ignoring unknown option %s\n") % flag)
112 ui.warn(_(b"ignoring unknown option %s\n") % flag)
113
113
114 args = list([convert(x) for x in args])
114 args = [convert(x) for x in args]
115 opts = dict(
115 opts = dict(
116 [
116 [
117 (k, convert(v)) if isinstance(v, bytes) else (k, v)
117 (k, convert(v)) if isinstance(v, bytes) else (k, v)
118 for k, v in opts.items()
118 for k, v in opts.items()
119 ]
119 ]
120 )
120 )
121
121
122 return args, opts
122 return args, opts
123
123
124
124
125 class Command:
125 class Command:
126 def __init__(self, name):
126 def __init__(self, name):
127 self.name = name
127 self.name = name
128 self.args = []
128 self.args = []
129 self.opts = {}
129 self.opts = {}
130
130
131 def __bytes__(self):
131 def __bytes__(self):
132 cmd = b"hg " + self.name
132 cmd = b"hg " + self.name
133 if self.opts:
133 if self.opts:
134 for k, values in sorted(self.opts.items()):
134 for k, values in sorted(self.opts.items()):
135 for v in values:
135 for v in values:
136 if v:
136 if v:
137 if isinstance(v, int):
137 if isinstance(v, int):
138 fmt = b' %s %d'
138 fmt = b' %s %d'
139 else:
139 else:
140 fmt = b' %s %s'
140 fmt = b' %s %s'
141
141
142 cmd += fmt % (k, v)
142 cmd += fmt % (k, v)
143 else:
143 else:
144 cmd += b" %s" % (k,)
144 cmd += b" %s" % (k,)
145 if self.args:
145 if self.args:
146 cmd += b" "
146 cmd += b" "
147 cmd += b" ".join(self.args)
147 cmd += b" ".join(self.args)
148 return cmd
148 return cmd
149
149
150 __str__ = encoding.strmethod(__bytes__)
150 __str__ = encoding.strmethod(__bytes__)
151
151
152 def append(self, value):
152 def append(self, value):
153 self.args.append(value)
153 self.args.append(value)
154
154
155 def extend(self, values):
155 def extend(self, values):
156 self.args.extend(values)
156 self.args.extend(values)
157
157
158 def __setitem__(self, key, value):
158 def __setitem__(self, key, value):
159 values = self.opts.setdefault(key, [])
159 values = self.opts.setdefault(key, [])
160 values.append(value)
160 values.append(value)
161
161
162 def __and__(self, other):
162 def __and__(self, other):
163 return AndCommand(self, other)
163 return AndCommand(self, other)
164
164
165
165
166 class AndCommand:
166 class AndCommand:
167 def __init__(self, left, right):
167 def __init__(self, left, right):
168 self.left = left
168 self.left = left
169 self.right = right
169 self.right = right
170
170
171 def __str__(self):
171 def __str__(self):
172 return b"%s && %s" % (self.left, self.right)
172 return b"%s && %s" % (self.left, self.right)
173
173
174 def __and__(self, other):
174 def __and__(self, other):
175 return AndCommand(self, other)
175 return AndCommand(self, other)
176
176
177
177
178 def add(ui, repo, *args, **kwargs):
178 def add(ui, repo, *args, **kwargs):
179 cmdoptions = [
179 cmdoptions = [
180 (b'A', b'all', None, b''),
180 (b'A', b'all', None, b''),
181 (b'p', b'patch', None, b''),
181 (b'p', b'patch', None, b''),
182 ]
182 ]
183 args, opts = parseoptions(ui, cmdoptions, args)
183 args, opts = parseoptions(ui, cmdoptions, args)
184
184
185 if opts.get(b'patch'):
185 if opts.get(b'patch'):
186 ui.status(
186 ui.status(
187 _(
187 _(
188 b"note: Mercurial will commit when complete, "
188 b"note: Mercurial will commit when complete, "
189 b"as there is no staging area in Mercurial\n\n"
189 b"as there is no staging area in Mercurial\n\n"
190 )
190 )
191 )
191 )
192 cmd = Command(b'commit --interactive')
192 cmd = Command(b'commit --interactive')
193 else:
193 else:
194 cmd = Command(b"add")
194 cmd = Command(b"add")
195
195
196 if not opts.get(b'all'):
196 if not opts.get(b'all'):
197 cmd.extend(args)
197 cmd.extend(args)
198 else:
198 else:
199 ui.status(
199 ui.status(
200 _(
200 _(
201 b"note: use hg addremove to remove files that have "
201 b"note: use hg addremove to remove files that have "
202 b"been deleted\n\n"
202 b"been deleted\n\n"
203 )
203 )
204 )
204 )
205
205
206 ui.status((bytes(cmd)), b"\n")
206 ui.status((bytes(cmd)), b"\n")
207
207
208
208
209 def am(ui, repo, *args, **kwargs):
209 def am(ui, repo, *args, **kwargs):
210 cmdoptions = []
210 cmdoptions = []
211 parseoptions(ui, cmdoptions, args)
211 parseoptions(ui, cmdoptions, args)
212 cmd = Command(b'import')
212 cmd = Command(b'import')
213 ui.status(bytes(cmd), b"\n")
213 ui.status(bytes(cmd), b"\n")
214
214
215
215
216 def apply(ui, repo, *args, **kwargs):
216 def apply(ui, repo, *args, **kwargs):
217 cmdoptions = [
217 cmdoptions = [
218 (b'p', b'p', int, b''),
218 (b'p', b'p', int, b''),
219 (b'', b'directory', b'', b''),
219 (b'', b'directory', b'', b''),
220 ]
220 ]
221 args, opts = parseoptions(ui, cmdoptions, args)
221 args, opts = parseoptions(ui, cmdoptions, args)
222
222
223 cmd = Command(b'import --no-commit')
223 cmd = Command(b'import --no-commit')
224 if opts.get(b'p'):
224 if opts.get(b'p'):
225 cmd[b'-p'] = opts.get(b'p')
225 cmd[b'-p'] = opts.get(b'p')
226 if opts.get(b'directory'):
226 if opts.get(b'directory'):
227 cmd[b'--prefix'] = opts.get(b'directory')
227 cmd[b'--prefix'] = opts.get(b'directory')
228 cmd.extend(args)
228 cmd.extend(args)
229
229
230 ui.status((bytes(cmd)), b"\n")
230 ui.status((bytes(cmd)), b"\n")
231
231
232
232
233 def bisect(ui, repo, *args, **kwargs):
233 def bisect(ui, repo, *args, **kwargs):
234 ui.status(_(b"see 'hg help bisect' for how to use bisect\n\n"))
234 ui.status(_(b"see 'hg help bisect' for how to use bisect\n\n"))
235
235
236
236
237 def blame(ui, repo, *args, **kwargs):
237 def blame(ui, repo, *args, **kwargs):
238 cmdoptions = []
238 cmdoptions = []
239 args, opts = parseoptions(ui, cmdoptions, args)
239 args, opts = parseoptions(ui, cmdoptions, args)
240 cmd = Command(b'annotate -udl')
240 cmd = Command(b'annotate -udl')
241 cmd.extend([convert(v) for v in args])
241 cmd.extend([convert(v) for v in args])
242 ui.status((bytes(cmd)), b"\n")
242 ui.status((bytes(cmd)), b"\n")
243
243
244
244
245 def branch(ui, repo, *args, **kwargs):
245 def branch(ui, repo, *args, **kwargs):
246 cmdoptions = [
246 cmdoptions = [
247 (b'', b'set-upstream', None, b''),
247 (b'', b'set-upstream', None, b''),
248 (b'', b'set-upstream-to', b'', b''),
248 (b'', b'set-upstream-to', b'', b''),
249 (b'd', b'delete', None, b''),
249 (b'd', b'delete', None, b''),
250 (b'D', b'delete', None, b''),
250 (b'D', b'delete', None, b''),
251 (b'm', b'move', None, b''),
251 (b'm', b'move', None, b''),
252 (b'M', b'move', None, b''),
252 (b'M', b'move', None, b''),
253 ]
253 ]
254 args, opts = parseoptions(ui, cmdoptions, args)
254 args, opts = parseoptions(ui, cmdoptions, args)
255
255
256 cmd = Command(b"bookmark")
256 cmd = Command(b"bookmark")
257
257
258 if opts.get(b'set_upstream') or opts.get(b'set_upstream_to'):
258 if opts.get(b'set_upstream') or opts.get(b'set_upstream_to'):
259 ui.status(_(b"Mercurial has no concept of upstream branches\n"))
259 ui.status(_(b"Mercurial has no concept of upstream branches\n"))
260 return
260 return
261 elif opts.get(b'delete'):
261 elif opts.get(b'delete'):
262 cmd = Command(b"strip")
262 cmd = Command(b"strip")
263 for branch in args:
263 for branch in args:
264 cmd[b'-B'] = branch
264 cmd[b'-B'] = branch
265 else:
265 else:
266 cmd[b'-B'] = None
266 cmd[b'-B'] = None
267 elif opts.get(b'move'):
267 elif opts.get(b'move'):
268 if len(args) > 0:
268 if len(args) > 0:
269 if len(args) > 1:
269 if len(args) > 1:
270 old = args.pop(0)
270 old = args.pop(0)
271 else:
271 else:
272 # shell command to output the active bookmark for the active
272 # shell command to output the active bookmark for the active
273 # revision
273 # revision
274 old = b'`hg log -T"{activebookmark}" -r .`'
274 old = b'`hg log -T"{activebookmark}" -r .`'
275 else:
275 else:
276 raise error.Abort(_(b'missing newbranch argument'))
276 raise error.Abort(_(b'missing newbranch argument'))
277 new = args[0]
277 new = args[0]
278 cmd[b'-m'] = old
278 cmd[b'-m'] = old
279 cmd.append(new)
279 cmd.append(new)
280 else:
280 else:
281 if len(args) > 1:
281 if len(args) > 1:
282 cmd[b'-r'] = args[1]
282 cmd[b'-r'] = args[1]
283 cmd.append(args[0])
283 cmd.append(args[0])
284 elif len(args) == 1:
284 elif len(args) == 1:
285 cmd.append(args[0])
285 cmd.append(args[0])
286 ui.status((bytes(cmd)), b"\n")
286 ui.status((bytes(cmd)), b"\n")
287
287
288
288
289 def ispath(repo, string):
289 def ispath(repo, string):
290 """
290 """
291 The first argument to git checkout can either be a revision or a path. Let's
291 The first argument to git checkout can either be a revision or a path. Let's
292 generally assume it's a revision, unless it's obviously a path. There are
292 generally assume it's a revision, unless it's obviously a path. There are
293 too many ways to spell revisions in git for us to reasonably catch all of
293 too many ways to spell revisions in git for us to reasonably catch all of
294 them, so let's be conservative.
294 them, so let's be conservative.
295 """
295 """
296 if scmutil.isrevsymbol(repo, string):
296 if scmutil.isrevsymbol(repo, string):
297 # if it's definitely a revision let's not even check if a file of the
297 # if it's definitely a revision let's not even check if a file of the
298 # same name exists.
298 # same name exists.
299 return False
299 return False
300
300
301 cwd = repo.getcwd()
301 cwd = repo.getcwd()
302 if cwd == b'':
302 if cwd == b'':
303 repopath = string
303 repopath = string
304 else:
304 else:
305 repopath = cwd + b'/' + string
305 repopath = cwd + b'/' + string
306
306
307 exists = repo.wvfs.exists(repopath)
307 exists = repo.wvfs.exists(repopath)
308 if exists:
308 if exists:
309 return True
309 return True
310
310
311 manifest = repo[b'.'].manifest()
311 manifest = repo[b'.'].manifest()
312
312
313 didexist = (repopath in manifest) or manifest.hasdir(repopath)
313 didexist = (repopath in manifest) or manifest.hasdir(repopath)
314
314
315 return didexist
315 return didexist
316
316
317
317
318 def checkout(ui, repo, *args, **kwargs):
318 def checkout(ui, repo, *args, **kwargs):
319 cmdoptions = [
319 cmdoptions = [
320 (b'b', b'branch', b'', b''),
320 (b'b', b'branch', b'', b''),
321 (b'B', b'branch', b'', b''),
321 (b'B', b'branch', b'', b''),
322 (b'f', b'force', None, b''),
322 (b'f', b'force', None, b''),
323 (b'p', b'patch', None, b''),
323 (b'p', b'patch', None, b''),
324 ]
324 ]
325 paths = []
325 paths = []
326 if b'--' in args:
326 if b'--' in args:
327 sepindex = args.index(b'--')
327 sepindex = args.index(b'--')
328 paths.extend(args[sepindex + 1 :])
328 paths.extend(args[sepindex + 1 :])
329 args = args[:sepindex]
329 args = args[:sepindex]
330
330
331 args, opts = parseoptions(ui, cmdoptions, args)
331 args, opts = parseoptions(ui, cmdoptions, args)
332
332
333 rev = None
333 rev = None
334 if args and ispath(repo, args[0]):
334 if args and ispath(repo, args[0]):
335 paths = args + paths
335 paths = args + paths
336 elif args:
336 elif args:
337 rev = args[0]
337 rev = args[0]
338 paths = args[1:] + paths
338 paths = args[1:] + paths
339
339
340 cmd = Command(b'update')
340 cmd = Command(b'update')
341
341
342 if opts.get(b'force'):
342 if opts.get(b'force'):
343 if paths or rev:
343 if paths or rev:
344 cmd[b'-C'] = None
344 cmd[b'-C'] = None
345
345
346 if opts.get(b'patch'):
346 if opts.get(b'patch'):
347 cmd = Command(b'revert')
347 cmd = Command(b'revert')
348 cmd[b'-i'] = None
348 cmd[b'-i'] = None
349
349
350 if opts.get(b'branch'):
350 if opts.get(b'branch'):
351 if len(args) == 0:
351 if len(args) == 0:
352 cmd = Command(b'bookmark')
352 cmd = Command(b'bookmark')
353 cmd.append(opts.get(b'branch'))
353 cmd.append(opts.get(b'branch'))
354 else:
354 else:
355 cmd.append(args[0])
355 cmd.append(args[0])
356 bookcmd = Command(b'bookmark')
356 bookcmd = Command(b'bookmark')
357 bookcmd.append(opts.get(b'branch'))
357 bookcmd.append(opts.get(b'branch'))
358 cmd = cmd & bookcmd
358 cmd = cmd & bookcmd
359 # if there is any path argument supplied, use revert instead of update
359 # if there is any path argument supplied, use revert instead of update
360 elif len(paths) > 0:
360 elif len(paths) > 0:
361 ui.status(_(b"note: use --no-backup to avoid creating .orig files\n\n"))
361 ui.status(_(b"note: use --no-backup to avoid creating .orig files\n\n"))
362 cmd = Command(b'revert')
362 cmd = Command(b'revert')
363 if opts.get(b'patch'):
363 if opts.get(b'patch'):
364 cmd[b'-i'] = None
364 cmd[b'-i'] = None
365 if rev:
365 if rev:
366 cmd[b'-r'] = rev
366 cmd[b'-r'] = rev
367 cmd.extend(paths)
367 cmd.extend(paths)
368 elif rev:
368 elif rev:
369 if opts.get(b'patch'):
369 if opts.get(b'patch'):
370 cmd[b'-r'] = rev
370 cmd[b'-r'] = rev
371 else:
371 else:
372 cmd.append(rev)
372 cmd.append(rev)
373 elif opts.get(b'force'):
373 elif opts.get(b'force'):
374 cmd = Command(b'revert')
374 cmd = Command(b'revert')
375 cmd[b'--all'] = None
375 cmd[b'--all'] = None
376 else:
376 else:
377 raise error.Abort(_(b"a commit must be specified"))
377 raise error.Abort(_(b"a commit must be specified"))
378
378
379 ui.status((bytes(cmd)), b"\n")
379 ui.status((bytes(cmd)), b"\n")
380
380
381
381
382 def cherrypick(ui, repo, *args, **kwargs):
382 def cherrypick(ui, repo, *args, **kwargs):
383 cmdoptions = [
383 cmdoptions = [
384 (b'', b'continue', None, b''),
384 (b'', b'continue', None, b''),
385 (b'', b'abort', None, b''),
385 (b'', b'abort', None, b''),
386 (b'e', b'edit', None, b''),
386 (b'e', b'edit', None, b''),
387 ]
387 ]
388 args, opts = parseoptions(ui, cmdoptions, args)
388 args, opts = parseoptions(ui, cmdoptions, args)
389
389
390 cmd = Command(b'graft')
390 cmd = Command(b'graft')
391
391
392 if opts.get(b'edit'):
392 if opts.get(b'edit'):
393 cmd[b'--edit'] = None
393 cmd[b'--edit'] = None
394 if opts.get(b'continue'):
394 if opts.get(b'continue'):
395 cmd[b'--continue'] = None
395 cmd[b'--continue'] = None
396 elif opts.get(b'abort'):
396 elif opts.get(b'abort'):
397 ui.status(_(b"note: hg graft does not have --abort\n\n"))
397 ui.status(_(b"note: hg graft does not have --abort\n\n"))
398 return
398 return
399 else:
399 else:
400 cmd.extend(args)
400 cmd.extend(args)
401
401
402 ui.status((bytes(cmd)), b"\n")
402 ui.status((bytes(cmd)), b"\n")
403
403
404
404
405 def clean(ui, repo, *args, **kwargs):
405 def clean(ui, repo, *args, **kwargs):
406 cmdoptions = [
406 cmdoptions = [
407 (b'd', b'd', None, b''),
407 (b'd', b'd', None, b''),
408 (b'f', b'force', None, b''),
408 (b'f', b'force', None, b''),
409 (b'x', b'x', None, b''),
409 (b'x', b'x', None, b''),
410 ]
410 ]
411 args, opts = parseoptions(ui, cmdoptions, args)
411 args, opts = parseoptions(ui, cmdoptions, args)
412
412
413 cmd = Command(b'purge')
413 cmd = Command(b'purge')
414 if opts.get(b'x'):
414 if opts.get(b'x'):
415 cmd[b'--all'] = None
415 cmd[b'--all'] = None
416 cmd.extend(args)
416 cmd.extend(args)
417
417
418 ui.status((bytes(cmd)), b"\n")
418 ui.status((bytes(cmd)), b"\n")
419
419
420
420
421 def clone(ui, repo, *args, **kwargs):
421 def clone(ui, repo, *args, **kwargs):
422 cmdoptions = [
422 cmdoptions = [
423 (b'', b'bare', None, b''),
423 (b'', b'bare', None, b''),
424 (b'n', b'no-checkout', None, b''),
424 (b'n', b'no-checkout', None, b''),
425 (b'b', b'branch', b'', b''),
425 (b'b', b'branch', b'', b''),
426 ]
426 ]
427 args, opts = parseoptions(ui, cmdoptions, args)
427 args, opts = parseoptions(ui, cmdoptions, args)
428
428
429 if len(args) == 0:
429 if len(args) == 0:
430 raise error.Abort(_(b"a repository to clone must be specified"))
430 raise error.Abort(_(b"a repository to clone must be specified"))
431
431
432 cmd = Command(b'clone')
432 cmd = Command(b'clone')
433 cmd.append(args[0])
433 cmd.append(args[0])
434 if len(args) > 1:
434 if len(args) > 1:
435 cmd.append(args[1])
435 cmd.append(args[1])
436
436
437 if opts.get(b'bare'):
437 if opts.get(b'bare'):
438 cmd[b'-U'] = None
438 cmd[b'-U'] = None
439 ui.status(
439 ui.status(
440 _(
440 _(
441 b"note: Mercurial does not have bare clones. "
441 b"note: Mercurial does not have bare clones. "
442 b"-U will clone the repo without checking out a commit\n\n"
442 b"-U will clone the repo without checking out a commit\n\n"
443 )
443 )
444 )
444 )
445 elif opts.get(b'no_checkout'):
445 elif opts.get(b'no_checkout'):
446 cmd[b'-U'] = None
446 cmd[b'-U'] = None
447
447
448 if opts.get(b'branch'):
448 if opts.get(b'branch'):
449 cocmd = Command(b"update")
449 cocmd = Command(b"update")
450 cocmd.append(opts.get(b'branch'))
450 cocmd.append(opts.get(b'branch'))
451 cmd = cmd & cocmd
451 cmd = cmd & cocmd
452
452
453 ui.status((bytes(cmd)), b"\n")
453 ui.status((bytes(cmd)), b"\n")
454
454
455
455
456 def commit(ui, repo, *args, **kwargs):
456 def commit(ui, repo, *args, **kwargs):
457 cmdoptions = [
457 cmdoptions = [
458 (b'a', b'all', None, b''),
458 (b'a', b'all', None, b''),
459 (b'm', b'message', b'', b''),
459 (b'm', b'message', b'', b''),
460 (b'p', b'patch', None, b''),
460 (b'p', b'patch', None, b''),
461 (b'C', b'reuse-message', b'', b''),
461 (b'C', b'reuse-message', b'', b''),
462 (b'F', b'file', b'', b''),
462 (b'F', b'file', b'', b''),
463 (b'', b'author', b'', b''),
463 (b'', b'author', b'', b''),
464 (b'', b'date', b'', b''),
464 (b'', b'date', b'', b''),
465 (b'', b'amend', None, b''),
465 (b'', b'amend', None, b''),
466 (b'', b'no-edit', None, b''),
466 (b'', b'no-edit', None, b''),
467 ]
467 ]
468 args, opts = parseoptions(ui, cmdoptions, args)
468 args, opts = parseoptions(ui, cmdoptions, args)
469
469
470 cmd = Command(b'commit')
470 cmd = Command(b'commit')
471 if opts.get(b'patch'):
471 if opts.get(b'patch'):
472 cmd = Command(b'commit --interactive')
472 cmd = Command(b'commit --interactive')
473
473
474 if opts.get(b'amend'):
474 if opts.get(b'amend'):
475 if opts.get(b'no_edit'):
475 if opts.get(b'no_edit'):
476 cmd = Command(b'amend')
476 cmd = Command(b'amend')
477 else:
477 else:
478 cmd[b'--amend'] = None
478 cmd[b'--amend'] = None
479
479
480 if opts.get(b'reuse_message'):
480 if opts.get(b'reuse_message'):
481 cmd[b'-M'] = opts.get(b'reuse_message')
481 cmd[b'-M'] = opts.get(b'reuse_message')
482
482
483 if opts.get(b'message'):
483 if opts.get(b'message'):
484 cmd[b'-m'] = b"'%s'" % (opts.get(b'message'),)
484 cmd[b'-m'] = b"'%s'" % (opts.get(b'message'),)
485
485
486 if opts.get(b'all'):
486 if opts.get(b'all'):
487 ui.status(
487 ui.status(
488 _(
488 _(
489 b"note: Mercurial doesn't have a staging area, "
489 b"note: Mercurial doesn't have a staging area, "
490 b"so there is no --all. -A will add and remove files "
490 b"so there is no --all. -A will add and remove files "
491 b"for you though.\n\n"
491 b"for you though.\n\n"
492 )
492 )
493 )
493 )
494
494
495 if opts.get(b'file'):
495 if opts.get(b'file'):
496 cmd[b'-l'] = opts.get(b'file')
496 cmd[b'-l'] = opts.get(b'file')
497
497
498 if opts.get(b'author'):
498 if opts.get(b'author'):
499 cmd[b'-u'] = opts.get(b'author')
499 cmd[b'-u'] = opts.get(b'author')
500
500
501 if opts.get(b'date'):
501 if opts.get(b'date'):
502 cmd[b'-d'] = opts.get(b'date')
502 cmd[b'-d'] = opts.get(b'date')
503
503
504 cmd.extend(args)
504 cmd.extend(args)
505
505
506 ui.status((bytes(cmd)), b"\n")
506 ui.status((bytes(cmd)), b"\n")
507
507
508
508
509 def deprecated(ui, repo, *args, **kwargs):
509 def deprecated(ui, repo, *args, **kwargs):
510 ui.warn(
510 ui.warn(
511 _(
511 _(
512 b'this command has been deprecated in the git project, '
512 b'this command has been deprecated in the git project, '
513 b'thus isn\'t supported by this tool\n\n'
513 b'thus isn\'t supported by this tool\n\n'
514 )
514 )
515 )
515 )
516
516
517
517
518 def diff(ui, repo, *args, **kwargs):
518 def diff(ui, repo, *args, **kwargs):
519 cmdoptions = [
519 cmdoptions = [
520 (b'a', b'all', None, b''),
520 (b'a', b'all', None, b''),
521 (b'', b'cached', None, b''),
521 (b'', b'cached', None, b''),
522 (b'R', b'reverse', None, b''),
522 (b'R', b'reverse', None, b''),
523 ]
523 ]
524 args, opts = parseoptions(ui, cmdoptions, args)
524 args, opts = parseoptions(ui, cmdoptions, args)
525
525
526 cmd = Command(b'diff')
526 cmd = Command(b'diff')
527
527
528 if opts.get(b'cached'):
528 if opts.get(b'cached'):
529 ui.status(
529 ui.status(
530 _(
530 _(
531 b'note: Mercurial has no concept of a staging area, '
531 b'note: Mercurial has no concept of a staging area, '
532 b'so --cached does nothing\n\n'
532 b'so --cached does nothing\n\n'
533 )
533 )
534 )
534 )
535
535
536 if opts.get(b'reverse'):
536 if opts.get(b'reverse'):
537 cmd[b'--reverse'] = None
537 cmd[b'--reverse'] = None
538
538
539 for a in list(args):
539 for a in list(args):
540 args.remove(a)
540 args.remove(a)
541 try:
541 try:
542 repo.revs(a)
542 repo.revs(a)
543 cmd[b'-r'] = a
543 cmd[b'-r'] = a
544 except Exception:
544 except Exception:
545 cmd.append(a)
545 cmd.append(a)
546
546
547 ui.status((bytes(cmd)), b"\n")
547 ui.status((bytes(cmd)), b"\n")
548
548
549
549
550 def difftool(ui, repo, *args, **kwargs):
550 def difftool(ui, repo, *args, **kwargs):
551 ui.status(
551 ui.status(
552 _(
552 _(
553 b'Mercurial does not enable external difftool by default. You '
553 b'Mercurial does not enable external difftool by default. You '
554 b'need to enable the extdiff extension in your .hgrc file by adding\n'
554 b'need to enable the extdiff extension in your .hgrc file by adding\n'
555 b'extdiff =\n'
555 b'extdiff =\n'
556 b'to the [extensions] section and then running\n\n'
556 b'to the [extensions] section and then running\n\n'
557 b'hg extdiff -p <program>\n\n'
557 b'hg extdiff -p <program>\n\n'
558 b'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
558 b'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
559 b'information.\n'
559 b'information.\n'
560 )
560 )
561 )
561 )
562
562
563
563
564 def fetch(ui, repo, *args, **kwargs):
564 def fetch(ui, repo, *args, **kwargs):
565 cmdoptions = [
565 cmdoptions = [
566 (b'', b'all', None, b''),
566 (b'', b'all', None, b''),
567 (b'f', b'force', None, b''),
567 (b'f', b'force', None, b''),
568 ]
568 ]
569 args, opts = parseoptions(ui, cmdoptions, args)
569 args, opts = parseoptions(ui, cmdoptions, args)
570
570
571 cmd = Command(b'pull')
571 cmd = Command(b'pull')
572
572
573 if len(args) > 0:
573 if len(args) > 0:
574 cmd.append(args[0])
574 cmd.append(args[0])
575 if len(args) > 1:
575 if len(args) > 1:
576 ui.status(
576 ui.status(
577 _(
577 _(
578 b"note: Mercurial doesn't have refspecs. "
578 b"note: Mercurial doesn't have refspecs. "
579 b"-r can be used to specify which commits you want to "
579 b"-r can be used to specify which commits you want to "
580 b"pull. -B can be used to specify which bookmark you "
580 b"pull. -B can be used to specify which bookmark you "
581 b"want to pull.\n\n"
581 b"want to pull.\n\n"
582 )
582 )
583 )
583 )
584 for v in args[1:]:
584 for v in args[1:]:
585 if v in repo._bookmarks:
585 if v in repo._bookmarks:
586 cmd[b'-B'] = v
586 cmd[b'-B'] = v
587 else:
587 else:
588 cmd[b'-r'] = v
588 cmd[b'-r'] = v
589
589
590 ui.status((bytes(cmd)), b"\n")
590 ui.status((bytes(cmd)), b"\n")
591
591
592
592
593 def grep(ui, repo, *args, **kwargs):
593 def grep(ui, repo, *args, **kwargs):
594 cmdoptions = []
594 cmdoptions = []
595 args, opts = parseoptions(ui, cmdoptions, args)
595 args, opts = parseoptions(ui, cmdoptions, args)
596
596
597 cmd = Command(b'grep')
597 cmd = Command(b'grep')
598
598
599 # For basic usage, git grep and hg grep are the same. They both have the
599 # For basic usage, git grep and hg grep are the same. They both have the
600 # pattern first, followed by paths.
600 # pattern first, followed by paths.
601 cmd.extend(args)
601 cmd.extend(args)
602
602
603 ui.status((bytes(cmd)), b"\n")
603 ui.status((bytes(cmd)), b"\n")
604
604
605
605
606 def init(ui, repo, *args, **kwargs):
606 def init(ui, repo, *args, **kwargs):
607 cmdoptions = []
607 cmdoptions = []
608 args, opts = parseoptions(ui, cmdoptions, args)
608 args, opts = parseoptions(ui, cmdoptions, args)
609
609
610 cmd = Command(b'init')
610 cmd = Command(b'init')
611
611
612 if len(args) > 0:
612 if len(args) > 0:
613 cmd.append(args[0])
613 cmd.append(args[0])
614
614
615 ui.status((bytes(cmd)), b"\n")
615 ui.status((bytes(cmd)), b"\n")
616
616
617
617
618 def log(ui, repo, *args, **kwargs):
618 def log(ui, repo, *args, **kwargs):
619 cmdoptions = [
619 cmdoptions = [
620 (b'', b'follow', None, b''),
620 (b'', b'follow', None, b''),
621 (b'', b'decorate', None, b''),
621 (b'', b'decorate', None, b''),
622 (b'n', b'number', b'', b''),
622 (b'n', b'number', b'', b''),
623 (b'1', b'1', None, b''),
623 (b'1', b'1', None, b''),
624 (b'', b'pretty', b'', b''),
624 (b'', b'pretty', b'', b''),
625 (b'', b'format', b'', b''),
625 (b'', b'format', b'', b''),
626 (b'', b'oneline', None, b''),
626 (b'', b'oneline', None, b''),
627 (b'', b'stat', None, b''),
627 (b'', b'stat', None, b''),
628 (b'', b'graph', None, b''),
628 (b'', b'graph', None, b''),
629 (b'p', b'patch', None, b''),
629 (b'p', b'patch', None, b''),
630 (b'G', b'grep-diff', b'', b''),
630 (b'G', b'grep-diff', b'', b''),
631 (b'S', b'pickaxe-regex', b'', b''),
631 (b'S', b'pickaxe-regex', b'', b''),
632 ]
632 ]
633 args, opts = parseoptions(ui, cmdoptions, args)
633 args, opts = parseoptions(ui, cmdoptions, args)
634 grep_pat = opts.get(b'grep_diff') or opts.get(b'pickaxe_regex')
634 grep_pat = opts.get(b'grep_diff') or opts.get(b'pickaxe_regex')
635 if grep_pat:
635 if grep_pat:
636 cmd = Command(b'grep')
636 cmd = Command(b'grep')
637 cmd[b'--diff'] = grep_pat
637 cmd[b'--diff'] = grep_pat
638 ui.status(b'%s\n' % bytes(cmd))
638 ui.status(b'%s\n' % bytes(cmd))
639 return
639 return
640
640
641 ui.status(
641 ui.status(
642 _(
642 _(
643 b'note: -v prints the entire commit message like Git does. To '
643 b'note: -v prints the entire commit message like Git does. To '
644 b'print just the first line, drop the -v.\n\n'
644 b'print just the first line, drop the -v.\n\n'
645 )
645 )
646 )
646 )
647 ui.status(
647 ui.status(
648 _(
648 _(
649 b"note: see hg help revset for information on how to filter "
649 b"note: see hg help revset for information on how to filter "
650 b"log output\n\n"
650 b"log output\n\n"
651 )
651 )
652 )
652 )
653
653
654 cmd = Command(b'log')
654 cmd = Command(b'log')
655 cmd[b'-v'] = None
655 cmd[b'-v'] = None
656
656
657 if opts.get(b'number'):
657 if opts.get(b'number'):
658 cmd[b'-l'] = opts.get(b'number')
658 cmd[b'-l'] = opts.get(b'number')
659 if opts.get(b'1'):
659 if opts.get(b'1'):
660 cmd[b'-l'] = b'1'
660 cmd[b'-l'] = b'1'
661 if opts.get(b'stat'):
661 if opts.get(b'stat'):
662 cmd[b'--stat'] = None
662 cmd[b'--stat'] = None
663 if opts.get(b'graph'):
663 if opts.get(b'graph'):
664 cmd[b'-G'] = None
664 cmd[b'-G'] = None
665 if opts.get(b'patch'):
665 if opts.get(b'patch'):
666 cmd[b'-p'] = None
666 cmd[b'-p'] = None
667
667
668 if opts.get(b'pretty') or opts.get(b'format') or opts.get(b'oneline'):
668 if opts.get(b'pretty') or opts.get(b'format') or opts.get(b'oneline'):
669 format = opts.get(b'format', b'')
669 format = opts.get(b'format', b'')
670 if b'format:' in format:
670 if b'format:' in format:
671 ui.status(
671 ui.status(
672 _(
672 _(
673 b"note: --format format:??? equates to Mercurial's "
673 b"note: --format format:??? equates to Mercurial's "
674 b"--template. See hg help templates for more info.\n\n"
674 b"--template. See hg help templates for more info.\n\n"
675 )
675 )
676 )
676 )
677 cmd[b'--template'] = b'???'
677 cmd[b'--template'] = b'???'
678 else:
678 else:
679 ui.status(
679 ui.status(
680 _(
680 _(
681 b"note: --pretty/format/oneline equate to Mercurial's "
681 b"note: --pretty/format/oneline equate to Mercurial's "
682 b"--style or --template. See hg help templates for "
682 b"--style or --template. See hg help templates for "
683 b"more info.\n\n"
683 b"more info.\n\n"
684 )
684 )
685 )
685 )
686 cmd[b'--style'] = b'???'
686 cmd[b'--style'] = b'???'
687
687
688 if len(args) > 0:
688 if len(args) > 0:
689 if b'..' in args[0]:
689 if b'..' in args[0]:
690 since, until = args[0].split(b'..')
690 since, until = args[0].split(b'..')
691 cmd[b'-r'] = b"'%s::%s'" % (since, until)
691 cmd[b'-r'] = b"'%s::%s'" % (since, until)
692 del args[0]
692 del args[0]
693 cmd.extend(args)
693 cmd.extend(args)
694
694
695 ui.status((bytes(cmd)), b"\n")
695 ui.status((bytes(cmd)), b"\n")
696
696
697
697
698 def lsfiles(ui, repo, *args, **kwargs):
698 def lsfiles(ui, repo, *args, **kwargs):
699 cmdoptions = [
699 cmdoptions = [
700 (b'c', b'cached', None, b''),
700 (b'c', b'cached', None, b''),
701 (b'd', b'deleted', None, b''),
701 (b'd', b'deleted', None, b''),
702 (b'm', b'modified', None, b''),
702 (b'm', b'modified', None, b''),
703 (b'o', b'others', None, b''),
703 (b'o', b'others', None, b''),
704 (b'i', b'ignored', None, b''),
704 (b'i', b'ignored', None, b''),
705 (b's', b'stage', None, b''),
705 (b's', b'stage', None, b''),
706 (b'z', b'_zero', None, b''),
706 (b'z', b'_zero', None, b''),
707 ]
707 ]
708 args, opts = parseoptions(ui, cmdoptions, args)
708 args, opts = parseoptions(ui, cmdoptions, args)
709
709
710 if (
710 if (
711 opts.get(b'modified')
711 opts.get(b'modified')
712 or opts.get(b'deleted')
712 or opts.get(b'deleted')
713 or opts.get(b'others')
713 or opts.get(b'others')
714 or opts.get(b'ignored')
714 or opts.get(b'ignored')
715 ):
715 ):
716 cmd = Command(b'status')
716 cmd = Command(b'status')
717 if opts.get(b'deleted'):
717 if opts.get(b'deleted'):
718 cmd[b'-d'] = None
718 cmd[b'-d'] = None
719 if opts.get(b'modified'):
719 if opts.get(b'modified'):
720 cmd[b'-m'] = None
720 cmd[b'-m'] = None
721 if opts.get(b'others'):
721 if opts.get(b'others'):
722 cmd[b'-o'] = None
722 cmd[b'-o'] = None
723 if opts.get(b'ignored'):
723 if opts.get(b'ignored'):
724 cmd[b'-i'] = None
724 cmd[b'-i'] = None
725 else:
725 else:
726 cmd = Command(b'files')
726 cmd = Command(b'files')
727 if opts.get(b'stage'):
727 if opts.get(b'stage'):
728 ui.status(
728 ui.status(
729 _(
729 _(
730 b"note: Mercurial doesn't have a staging area, ignoring "
730 b"note: Mercurial doesn't have a staging area, ignoring "
731 b"--stage\n"
731 b"--stage\n"
732 )
732 )
733 )
733 )
734 if opts.get(b'_zero'):
734 if opts.get(b'_zero'):
735 cmd[b'-0'] = None
735 cmd[b'-0'] = None
736 cmd.append(b'.')
736 cmd.append(b'.')
737 for include in args:
737 for include in args:
738 cmd[b'-I'] = procutil.shellquote(include)
738 cmd[b'-I'] = procutil.shellquote(include)
739
739
740 ui.status((bytes(cmd)), b"\n")
740 ui.status((bytes(cmd)), b"\n")
741
741
742
742
743 def merge(ui, repo, *args, **kwargs):
743 def merge(ui, repo, *args, **kwargs):
744 cmdoptions = []
744 cmdoptions = []
745 args, opts = parseoptions(ui, cmdoptions, args)
745 args, opts = parseoptions(ui, cmdoptions, args)
746
746
747 cmd = Command(b'merge')
747 cmd = Command(b'merge')
748
748
749 if len(args) > 0:
749 if len(args) > 0:
750 cmd.append(args[len(args) - 1])
750 cmd.append(args[len(args) - 1])
751
751
752 ui.status((bytes(cmd)), b"\n")
752 ui.status((bytes(cmd)), b"\n")
753
753
754
754
755 def mergebase(ui, repo, *args, **kwargs):
755 def mergebase(ui, repo, *args, **kwargs):
756 cmdoptions = []
756 cmdoptions = []
757 args, opts = parseoptions(ui, cmdoptions, args)
757 args, opts = parseoptions(ui, cmdoptions, args)
758
758
759 if len(args) != 2:
759 if len(args) != 2:
760 args = [b'A', b'B']
760 args = [b'A', b'B']
761
761
762 cmd = Command(
762 cmd = Command(
763 b"log -T '{node}\\n' -r 'ancestor(%s,%s)'" % (args[0], args[1])
763 b"log -T '{node}\\n' -r 'ancestor(%s,%s)'" % (args[0], args[1])
764 )
764 )
765
765
766 ui.status(
766 ui.status(
767 _(b'note: ancestors() is part of the revset language\n'),
767 _(b'note: ancestors() is part of the revset language\n'),
768 _(b"(learn more about revsets with 'hg help revsets')\n\n"),
768 _(b"(learn more about revsets with 'hg help revsets')\n\n"),
769 )
769 )
770 ui.status((bytes(cmd)), b"\n")
770 ui.status((bytes(cmd)), b"\n")
771
771
772
772
773 def mergetool(ui, repo, *args, **kwargs):
773 def mergetool(ui, repo, *args, **kwargs):
774 cmdoptions = []
774 cmdoptions = []
775 args, opts = parseoptions(ui, cmdoptions, args)
775 args, opts = parseoptions(ui, cmdoptions, args)
776
776
777 cmd = Command(b"resolve")
777 cmd = Command(b"resolve")
778
778
779 if len(args) == 0:
779 if len(args) == 0:
780 cmd[b'--all'] = None
780 cmd[b'--all'] = None
781 cmd.extend(args)
781 cmd.extend(args)
782 ui.status((bytes(cmd)), b"\n")
782 ui.status((bytes(cmd)), b"\n")
783
783
784
784
785 def mv(ui, repo, *args, **kwargs):
785 def mv(ui, repo, *args, **kwargs):
786 cmdoptions = [
786 cmdoptions = [
787 (b'f', b'force', None, b''),
787 (b'f', b'force', None, b''),
788 (b'n', b'dry-run', None, b''),
788 (b'n', b'dry-run', None, b''),
789 ]
789 ]
790 args, opts = parseoptions(ui, cmdoptions, args)
790 args, opts = parseoptions(ui, cmdoptions, args)
791
791
792 cmd = Command(b'mv')
792 cmd = Command(b'mv')
793 cmd.extend(args)
793 cmd.extend(args)
794
794
795 if opts.get(b'force'):
795 if opts.get(b'force'):
796 cmd[b'-f'] = None
796 cmd[b'-f'] = None
797 if opts.get(b'dry_run'):
797 if opts.get(b'dry_run'):
798 cmd[b'-n'] = None
798 cmd[b'-n'] = None
799
799
800 ui.status((bytes(cmd)), b"\n")
800 ui.status((bytes(cmd)), b"\n")
801
801
802
802
803 def pull(ui, repo, *args, **kwargs):
803 def pull(ui, repo, *args, **kwargs):
804 cmdoptions = [
804 cmdoptions = [
805 (b'', b'all', None, b''),
805 (b'', b'all', None, b''),
806 (b'f', b'force', None, b''),
806 (b'f', b'force', None, b''),
807 (b'r', b'rebase', None, b''),
807 (b'r', b'rebase', None, b''),
808 ]
808 ]
809 args, opts = parseoptions(ui, cmdoptions, args)
809 args, opts = parseoptions(ui, cmdoptions, args)
810
810
811 cmd = Command(b'pull')
811 cmd = Command(b'pull')
812 cmd[b'--rebase'] = None
812 cmd[b'--rebase'] = None
813
813
814 if len(args) > 0:
814 if len(args) > 0:
815 cmd.append(args[0])
815 cmd.append(args[0])
816 if len(args) > 1:
816 if len(args) > 1:
817 ui.status(
817 ui.status(
818 _(
818 _(
819 b"note: Mercurial doesn't have refspecs. "
819 b"note: Mercurial doesn't have refspecs. "
820 b"-r can be used to specify which commits you want to "
820 b"-r can be used to specify which commits you want to "
821 b"pull. -B can be used to specify which bookmark you "
821 b"pull. -B can be used to specify which bookmark you "
822 b"want to pull.\n\n"
822 b"want to pull.\n\n"
823 )
823 )
824 )
824 )
825 for v in args[1:]:
825 for v in args[1:]:
826 if v in repo._bookmarks:
826 if v in repo._bookmarks:
827 cmd[b'-B'] = v
827 cmd[b'-B'] = v
828 else:
828 else:
829 cmd[b'-r'] = v
829 cmd[b'-r'] = v
830
830
831 ui.status((bytes(cmd)), b"\n")
831 ui.status((bytes(cmd)), b"\n")
832
832
833
833
834 def push(ui, repo, *args, **kwargs):
834 def push(ui, repo, *args, **kwargs):
835 cmdoptions = [
835 cmdoptions = [
836 (b'', b'all', None, b''),
836 (b'', b'all', None, b''),
837 (b'f', b'force', None, b''),
837 (b'f', b'force', None, b''),
838 ]
838 ]
839 args, opts = parseoptions(ui, cmdoptions, args)
839 args, opts = parseoptions(ui, cmdoptions, args)
840
840
841 cmd = Command(b'push')
841 cmd = Command(b'push')
842
842
843 if len(args) > 0:
843 if len(args) > 0:
844 cmd.append(args[0])
844 cmd.append(args[0])
845 if len(args) > 1:
845 if len(args) > 1:
846 ui.status(
846 ui.status(
847 _(
847 _(
848 b"note: Mercurial doesn't have refspecs. "
848 b"note: Mercurial doesn't have refspecs. "
849 b"-r can be used to specify which commits you want "
849 b"-r can be used to specify which commits you want "
850 b"to push. -B can be used to specify which bookmark "
850 b"to push. -B can be used to specify which bookmark "
851 b"you want to push.\n\n"
851 b"you want to push.\n\n"
852 )
852 )
853 )
853 )
854 for v in args[1:]:
854 for v in args[1:]:
855 if v in repo._bookmarks:
855 if v in repo._bookmarks:
856 cmd[b'-B'] = v
856 cmd[b'-B'] = v
857 else:
857 else:
858 cmd[b'-r'] = v
858 cmd[b'-r'] = v
859
859
860 if opts.get(b'force'):
860 if opts.get(b'force'):
861 cmd[b'-f'] = None
861 cmd[b'-f'] = None
862
862
863 ui.status((bytes(cmd)), b"\n")
863 ui.status((bytes(cmd)), b"\n")
864
864
865
865
866 def rebase(ui, repo, *args, **kwargs):
866 def rebase(ui, repo, *args, **kwargs):
867 cmdoptions = [
867 cmdoptions = [
868 (b'', b'all', None, b''),
868 (b'', b'all', None, b''),
869 (b'i', b'interactive', None, b''),
869 (b'i', b'interactive', None, b''),
870 (b'', b'onto', b'', b''),
870 (b'', b'onto', b'', b''),
871 (b'', b'abort', None, b''),
871 (b'', b'abort', None, b''),
872 (b'', b'continue', None, b''),
872 (b'', b'continue', None, b''),
873 (b'', b'skip', None, b''),
873 (b'', b'skip', None, b''),
874 ]
874 ]
875 args, opts = parseoptions(ui, cmdoptions, args)
875 args, opts = parseoptions(ui, cmdoptions, args)
876
876
877 if opts.get(b'interactive'):
877 if opts.get(b'interactive'):
878 ui.status(
878 ui.status(
879 _(
879 _(
880 b"note: hg histedit does not perform a rebase. "
880 b"note: hg histedit does not perform a rebase. "
881 b"It just edits history.\n\n"
881 b"It just edits history.\n\n"
882 )
882 )
883 )
883 )
884 cmd = Command(b'histedit')
884 cmd = Command(b'histedit')
885 if len(args) > 0:
885 if len(args) > 0:
886 ui.status(
886 ui.status(
887 _(
887 _(
888 b"also note: 'hg histedit' will automatically detect"
888 b"also note: 'hg histedit' will automatically detect"
889 b" your stack, so no second argument is necessary\n\n"
889 b" your stack, so no second argument is necessary\n\n"
890 )
890 )
891 )
891 )
892 ui.status((bytes(cmd)), b"\n")
892 ui.status((bytes(cmd)), b"\n")
893 return
893 return
894
894
895 if opts.get(b'skip'):
895 if opts.get(b'skip'):
896 cmd = Command(b'revert --all -r .')
896 cmd = Command(b'revert --all -r .')
897 ui.status((bytes(cmd)), b"\n")
897 ui.status((bytes(cmd)), b"\n")
898
898
899 cmd = Command(b'rebase')
899 cmd = Command(b'rebase')
900
900
901 if opts.get(b'continue') or opts.get(b'skip'):
901 if opts.get(b'continue') or opts.get(b'skip'):
902 cmd[b'--continue'] = None
902 cmd[b'--continue'] = None
903 if opts.get(b'abort'):
903 if opts.get(b'abort'):
904 cmd[b'--abort'] = None
904 cmd[b'--abort'] = None
905
905
906 if opts.get(b'onto'):
906 if opts.get(b'onto'):
907 ui.status(
907 ui.status(
908 _(
908 _(
909 b"note: if you're trying to lift a commit off one branch, "
909 b"note: if you're trying to lift a commit off one branch, "
910 b"try hg rebase -d <destination commit> -s <commit to be "
910 b"try hg rebase -d <destination commit> -s <commit to be "
911 b"lifted>\n\n"
911 b"lifted>\n\n"
912 )
912 )
913 )
913 )
914 cmd[b'-d'] = convert(opts.get(b'onto'))
914 cmd[b'-d'] = convert(opts.get(b'onto'))
915 if len(args) < 2:
915 if len(args) < 2:
916 raise error.Abort(_(b"expected format: git rebase --onto X Y Z"))
916 raise error.Abort(_(b"expected format: git rebase --onto X Y Z"))
917 cmd[b'-s'] = b"'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
917 cmd[b'-s'] = b"'::%s - ::%s'" % (convert(args[1]), convert(args[0]))
918 else:
918 else:
919 if len(args) == 1:
919 if len(args) == 1:
920 cmd[b'-d'] = convert(args[0])
920 cmd[b'-d'] = convert(args[0])
921 elif len(args) == 2:
921 elif len(args) == 2:
922 cmd[b'-d'] = convert(args[0])
922 cmd[b'-d'] = convert(args[0])
923 cmd[b'-b'] = convert(args[1])
923 cmd[b'-b'] = convert(args[1])
924
924
925 ui.status((bytes(cmd)), b"\n")
925 ui.status((bytes(cmd)), b"\n")
926
926
927
927
928 def reflog(ui, repo, *args, **kwargs):
928 def reflog(ui, repo, *args, **kwargs):
929 cmdoptions = [
929 cmdoptions = [
930 (b'', b'all', None, b''),
930 (b'', b'all', None, b''),
931 ]
931 ]
932 args, opts = parseoptions(ui, cmdoptions, args)
932 args, opts = parseoptions(ui, cmdoptions, args)
933
933
934 cmd = Command(b'journal')
934 cmd = Command(b'journal')
935 if opts.get(b'all'):
935 if opts.get(b'all'):
936 cmd[b'--all'] = None
936 cmd[b'--all'] = None
937 if len(args) > 0:
937 if len(args) > 0:
938 cmd.append(args[0])
938 cmd.append(args[0])
939
939
940 ui.status(bytes(cmd), b"\n\n")
940 ui.status(bytes(cmd), b"\n\n")
941 ui.status(
941 ui.status(
942 _(
942 _(
943 b"note: in hg commits can be deleted from repo but we always"
943 b"note: in hg commits can be deleted from repo but we always"
944 b" have backups\n"
944 b" have backups\n"
945 )
945 )
946 )
946 )
947
947
948
948
949 def reset(ui, repo, *args, **kwargs):
949 def reset(ui, repo, *args, **kwargs):
950 cmdoptions = [
950 cmdoptions = [
951 (b'', b'soft', None, b''),
951 (b'', b'soft', None, b''),
952 (b'', b'hard', None, b''),
952 (b'', b'hard', None, b''),
953 (b'', b'mixed', None, b''),
953 (b'', b'mixed', None, b''),
954 ]
954 ]
955 args, opts = parseoptions(ui, cmdoptions, args)
955 args, opts = parseoptions(ui, cmdoptions, args)
956
956
957 commit = convert(args[0] if len(args) > 0 else b'.')
957 commit = convert(args[0] if len(args) > 0 else b'.')
958 hard = opts.get(b'hard')
958 hard = opts.get(b'hard')
959
959
960 if opts.get(b'mixed'):
960 if opts.get(b'mixed'):
961 ui.status(
961 ui.status(
962 _(
962 _(
963 b'note: --mixed has no meaning since Mercurial has no '
963 b'note: --mixed has no meaning since Mercurial has no '
964 b'staging area\n\n'
964 b'staging area\n\n'
965 )
965 )
966 )
966 )
967 if opts.get(b'soft'):
967 if opts.get(b'soft'):
968 ui.status(
968 ui.status(
969 _(
969 _(
970 b'note: --soft has no meaning since Mercurial has no '
970 b'note: --soft has no meaning since Mercurial has no '
971 b'staging area\n\n'
971 b'staging area\n\n'
972 )
972 )
973 )
973 )
974
974
975 cmd = Command(b'update')
975 cmd = Command(b'update')
976 if hard:
976 if hard:
977 cmd.append(b'--clean')
977 cmd.append(b'--clean')
978
978
979 cmd.append(commit)
979 cmd.append(commit)
980
980
981 ui.status((bytes(cmd)), b"\n")
981 ui.status((bytes(cmd)), b"\n")
982
982
983
983
984 def revert(ui, repo, *args, **kwargs):
984 def revert(ui, repo, *args, **kwargs):
985 cmdoptions = []
985 cmdoptions = []
986 args, opts = parseoptions(ui, cmdoptions, args)
986 args, opts = parseoptions(ui, cmdoptions, args)
987
987
988 if len(args) > 1:
988 if len(args) > 1:
989 ui.status(
989 ui.status(
990 _(
990 _(
991 b"note: hg backout doesn't support multiple commits at "
991 b"note: hg backout doesn't support multiple commits at "
992 b"once\n\n"
992 b"once\n\n"
993 )
993 )
994 )
994 )
995
995
996 cmd = Command(b'backout')
996 cmd = Command(b'backout')
997 if args:
997 if args:
998 cmd.append(args[0])
998 cmd.append(args[0])
999
999
1000 ui.status((bytes(cmd)), b"\n")
1000 ui.status((bytes(cmd)), b"\n")
1001
1001
1002
1002
1003 def revparse(ui, repo, *args, **kwargs):
1003 def revparse(ui, repo, *args, **kwargs):
1004 cmdoptions = [
1004 cmdoptions = [
1005 (b'', b'show-cdup', None, b''),
1005 (b'', b'show-cdup', None, b''),
1006 (b'', b'show-toplevel', None, b''),
1006 (b'', b'show-toplevel', None, b''),
1007 ]
1007 ]
1008 args, opts = parseoptions(ui, cmdoptions, args)
1008 args, opts = parseoptions(ui, cmdoptions, args)
1009
1009
1010 if opts.get(b'show_cdup') or opts.get(b'show_toplevel'):
1010 if opts.get(b'show_cdup') or opts.get(b'show_toplevel'):
1011 cmd = Command(b'root')
1011 cmd = Command(b'root')
1012 if opts.get(b'show_cdup'):
1012 if opts.get(b'show_cdup'):
1013 ui.status(_(b"note: hg root prints the root of the repository\n\n"))
1013 ui.status(_(b"note: hg root prints the root of the repository\n\n"))
1014 ui.status((bytes(cmd)), b"\n")
1014 ui.status((bytes(cmd)), b"\n")
1015 else:
1015 else:
1016 ui.status(_(b"note: see hg help revset for how to refer to commits\n"))
1016 ui.status(_(b"note: see hg help revset for how to refer to commits\n"))
1017
1017
1018
1018
1019 def rm(ui, repo, *args, **kwargs):
1019 def rm(ui, repo, *args, **kwargs):
1020 cmdoptions = [
1020 cmdoptions = [
1021 (b'f', b'force', None, b''),
1021 (b'f', b'force', None, b''),
1022 (b'n', b'dry-run', None, b''),
1022 (b'n', b'dry-run', None, b''),
1023 ]
1023 ]
1024 args, opts = parseoptions(ui, cmdoptions, args)
1024 args, opts = parseoptions(ui, cmdoptions, args)
1025
1025
1026 cmd = Command(b'rm')
1026 cmd = Command(b'rm')
1027 cmd.extend(args)
1027 cmd.extend(args)
1028
1028
1029 if opts.get(b'force'):
1029 if opts.get(b'force'):
1030 cmd[b'-f'] = None
1030 cmd[b'-f'] = None
1031 if opts.get(b'dry_run'):
1031 if opts.get(b'dry_run'):
1032 cmd[b'-n'] = None
1032 cmd[b'-n'] = None
1033
1033
1034 ui.status((bytes(cmd)), b"\n")
1034 ui.status((bytes(cmd)), b"\n")
1035
1035
1036
1036
1037 def show(ui, repo, *args, **kwargs):
1037 def show(ui, repo, *args, **kwargs):
1038 cmdoptions = [
1038 cmdoptions = [
1039 (b'', b'name-status', None, b''),
1039 (b'', b'name-status', None, b''),
1040 (b'', b'pretty', b'', b''),
1040 (b'', b'pretty', b'', b''),
1041 (b'U', b'unified', int, b''),
1041 (b'U', b'unified', int, b''),
1042 ]
1042 ]
1043 args, opts = parseoptions(ui, cmdoptions, args)
1043 args, opts = parseoptions(ui, cmdoptions, args)
1044
1044
1045 if opts.get(b'name_status'):
1045 if opts.get(b'name_status'):
1046 if opts.get(b'pretty') == b'format:':
1046 if opts.get(b'pretty') == b'format:':
1047 cmd = Command(b'status')
1047 cmd = Command(b'status')
1048 cmd[b'--change'] = b'.'
1048 cmd[b'--change'] = b'.'
1049 else:
1049 else:
1050 cmd = Command(b'log')
1050 cmd = Command(b'log')
1051 cmd.append(b'--style status')
1051 cmd.append(b'--style status')
1052 cmd.append(b'-r .')
1052 cmd.append(b'-r .')
1053 elif len(args) > 0:
1053 elif len(args) > 0:
1054 if ispath(repo, args[0]):
1054 if ispath(repo, args[0]):
1055 cmd = Command(b'cat')
1055 cmd = Command(b'cat')
1056 else:
1056 else:
1057 cmd = Command(b'export')
1057 cmd = Command(b'export')
1058 cmd.extend(args)
1058 cmd.extend(args)
1059 if opts.get(b'unified'):
1059 if opts.get(b'unified'):
1060 cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
1060 cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
1061 elif opts.get(b'unified'):
1061 elif opts.get(b'unified'):
1062 cmd = Command(b'export')
1062 cmd = Command(b'export')
1063 cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
1063 cmd.append(b'--config diff.unified=%d' % (opts[b'unified'],))
1064 else:
1064 else:
1065 cmd = Command(b'export')
1065 cmd = Command(b'export')
1066
1066
1067 ui.status((bytes(cmd)), b"\n")
1067 ui.status((bytes(cmd)), b"\n")
1068
1068
1069
1069
1070 def stash(ui, repo, *args, **kwargs):
1070 def stash(ui, repo, *args, **kwargs):
1071 cmdoptions = [
1071 cmdoptions = [
1072 (b'p', b'patch', None, b''),
1072 (b'p', b'patch', None, b''),
1073 ]
1073 ]
1074 args, opts = parseoptions(ui, cmdoptions, args)
1074 args, opts = parseoptions(ui, cmdoptions, args)
1075
1075
1076 cmd = Command(b'shelve')
1076 cmd = Command(b'shelve')
1077 action = args[0] if len(args) > 0 else None
1077 action = args[0] if len(args) > 0 else None
1078
1078
1079 if action == b'list':
1079 if action == b'list':
1080 cmd[b'-l'] = None
1080 cmd[b'-l'] = None
1081 if opts.get(b'patch'):
1081 if opts.get(b'patch'):
1082 cmd[b'-p'] = None
1082 cmd[b'-p'] = None
1083 elif action == b'show':
1083 elif action == b'show':
1084 if opts.get(b'patch'):
1084 if opts.get(b'patch'):
1085 cmd[b'-p'] = None
1085 cmd[b'-p'] = None
1086 else:
1086 else:
1087 cmd[b'--stat'] = None
1087 cmd[b'--stat'] = None
1088 if len(args) > 1:
1088 if len(args) > 1:
1089 cmd.append(args[1])
1089 cmd.append(args[1])
1090 elif action == b'clear':
1090 elif action == b'clear':
1091 cmd[b'--cleanup'] = None
1091 cmd[b'--cleanup'] = None
1092 elif action == b'drop':
1092 elif action == b'drop':
1093 cmd[b'-d'] = None
1093 cmd[b'-d'] = None
1094 if len(args) > 1:
1094 if len(args) > 1:
1095 cmd.append(args[1])
1095 cmd.append(args[1])
1096 else:
1096 else:
1097 cmd.append(b'<shelve name>')
1097 cmd.append(b'<shelve name>')
1098 elif action == b'pop' or action == b'apply':
1098 elif action == b'pop' or action == b'apply':
1099 cmd = Command(b'unshelve')
1099 cmd = Command(b'unshelve')
1100 if len(args) > 1:
1100 if len(args) > 1:
1101 cmd.append(args[1])
1101 cmd.append(args[1])
1102 if action == b'apply':
1102 if action == b'apply':
1103 cmd[b'--keep'] = None
1103 cmd[b'--keep'] = None
1104 elif action == b'branch' or action == b'create':
1104 elif action == b'branch' or action == b'create':
1105 ui.status(
1105 ui.status(
1106 _(
1106 _(
1107 b"note: Mercurial doesn't have equivalents to the "
1107 b"note: Mercurial doesn't have equivalents to the "
1108 b"git stash branch or create actions\n\n"
1108 b"git stash branch or create actions\n\n"
1109 )
1109 )
1110 )
1110 )
1111 return
1111 return
1112 else:
1112 else:
1113 if len(args) > 0:
1113 if len(args) > 0:
1114 if args[0] != b'save':
1114 if args[0] != b'save':
1115 cmd[b'--name'] = args[0]
1115 cmd[b'--name'] = args[0]
1116 elif len(args) > 1:
1116 elif len(args) > 1:
1117 cmd[b'--name'] = args[1]
1117 cmd[b'--name'] = args[1]
1118
1118
1119 ui.status((bytes(cmd)), b"\n")
1119 ui.status((bytes(cmd)), b"\n")
1120
1120
1121
1121
1122 def status(ui, repo, *args, **kwargs):
1122 def status(ui, repo, *args, **kwargs):
1123 cmdoptions = [
1123 cmdoptions = [
1124 (b'', b'ignored', None, b''),
1124 (b'', b'ignored', None, b''),
1125 ]
1125 ]
1126 args, opts = parseoptions(ui, cmdoptions, args)
1126 args, opts = parseoptions(ui, cmdoptions, args)
1127
1127
1128 cmd = Command(b'status')
1128 cmd = Command(b'status')
1129 cmd.extend(args)
1129 cmd.extend(args)
1130
1130
1131 if opts.get(b'ignored'):
1131 if opts.get(b'ignored'):
1132 cmd[b'-i'] = None
1132 cmd[b'-i'] = None
1133
1133
1134 ui.status((bytes(cmd)), b"\n")
1134 ui.status((bytes(cmd)), b"\n")
1135
1135
1136
1136
1137 def svn(ui, repo, *args, **kwargs):
1137 def svn(ui, repo, *args, **kwargs):
1138 if not args:
1138 if not args:
1139 raise error.Abort(_(b'missing svn command'))
1139 raise error.Abort(_(b'missing svn command'))
1140 svncmd = args[0]
1140 svncmd = args[0]
1141 if svncmd not in gitsvncommands:
1141 if svncmd not in gitsvncommands:
1142 raise error.Abort(_(b'unknown git svn command "%s"') % svncmd)
1142 raise error.Abort(_(b'unknown git svn command "%s"') % svncmd)
1143
1143
1144 args = args[1:]
1144 args = args[1:]
1145 return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
1145 return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
1146
1146
1147
1147
1148 def svndcommit(ui, repo, *args, **kwargs):
1148 def svndcommit(ui, repo, *args, **kwargs):
1149 cmdoptions = []
1149 cmdoptions = []
1150 parseoptions(ui, cmdoptions, args)
1150 parseoptions(ui, cmdoptions, args)
1151
1151
1152 cmd = Command(b'push')
1152 cmd = Command(b'push')
1153
1153
1154 ui.status((bytes(cmd)), b"\n")
1154 ui.status((bytes(cmd)), b"\n")
1155
1155
1156
1156
1157 def svnfetch(ui, repo, *args, **kwargs):
1157 def svnfetch(ui, repo, *args, **kwargs):
1158 cmdoptions = []
1158 cmdoptions = []
1159 parseoptions(ui, cmdoptions, args)
1159 parseoptions(ui, cmdoptions, args)
1160
1160
1161 cmd = Command(b'pull')
1161 cmd = Command(b'pull')
1162 cmd.append(b'default-push')
1162 cmd.append(b'default-push')
1163
1163
1164 ui.status((bytes(cmd)), b"\n")
1164 ui.status((bytes(cmd)), b"\n")
1165
1165
1166
1166
1167 def svnfindrev(ui, repo, *args, **kwargs):
1167 def svnfindrev(ui, repo, *args, **kwargs):
1168 cmdoptions = []
1168 cmdoptions = []
1169 args, opts = parseoptions(ui, cmdoptions, args)
1169 args, opts = parseoptions(ui, cmdoptions, args)
1170
1170
1171 if not args:
1171 if not args:
1172 raise error.Abort(_(b'missing find-rev argument'))
1172 raise error.Abort(_(b'missing find-rev argument'))
1173
1173
1174 cmd = Command(b'log')
1174 cmd = Command(b'log')
1175 cmd[b'-r'] = args[0]
1175 cmd[b'-r'] = args[0]
1176
1176
1177 ui.status((bytes(cmd)), b"\n")
1177 ui.status((bytes(cmd)), b"\n")
1178
1178
1179
1179
1180 def svnrebase(ui, repo, *args, **kwargs):
1180 def svnrebase(ui, repo, *args, **kwargs):
1181 cmdoptions = [
1181 cmdoptions = [
1182 (b'l', b'local', None, b''),
1182 (b'l', b'local', None, b''),
1183 ]
1183 ]
1184 parseoptions(ui, cmdoptions, args)
1184 parseoptions(ui, cmdoptions, args)
1185
1185
1186 pullcmd = Command(b'pull')
1186 pullcmd = Command(b'pull')
1187 pullcmd.append(b'default-push')
1187 pullcmd.append(b'default-push')
1188 rebasecmd = Command(b'rebase')
1188 rebasecmd = Command(b'rebase')
1189 rebasecmd.append(b'tip')
1189 rebasecmd.append(b'tip')
1190
1190
1191 cmd = pullcmd & rebasecmd
1191 cmd = pullcmd & rebasecmd
1192
1192
1193 ui.status((bytes(cmd)), b"\n")
1193 ui.status((bytes(cmd)), b"\n")
1194
1194
1195
1195
1196 def tag(ui, repo, *args, **kwargs):
1196 def tag(ui, repo, *args, **kwargs):
1197 cmdoptions = [
1197 cmdoptions = [
1198 (b'f', b'force', None, b''),
1198 (b'f', b'force', None, b''),
1199 (b'l', b'list', None, b''),
1199 (b'l', b'list', None, b''),
1200 (b'd', b'delete', None, b''),
1200 (b'd', b'delete', None, b''),
1201 ]
1201 ]
1202 args, opts = parseoptions(ui, cmdoptions, args)
1202 args, opts = parseoptions(ui, cmdoptions, args)
1203
1203
1204 if opts.get(b'list'):
1204 if opts.get(b'list'):
1205 cmd = Command(b'tags')
1205 cmd = Command(b'tags')
1206 else:
1206 else:
1207 cmd = Command(b'tag')
1207 cmd = Command(b'tag')
1208
1208
1209 if not args:
1209 if not args:
1210 raise error.Abort(_(b'missing tag argument'))
1210 raise error.Abort(_(b'missing tag argument'))
1211
1211
1212 cmd.append(args[0])
1212 cmd.append(args[0])
1213 if len(args) > 1:
1213 if len(args) > 1:
1214 cmd[b'-r'] = args[1]
1214 cmd[b'-r'] = args[1]
1215
1215
1216 if opts.get(b'delete'):
1216 if opts.get(b'delete'):
1217 cmd[b'--remove'] = None
1217 cmd[b'--remove'] = None
1218
1218
1219 if opts.get(b'force'):
1219 if opts.get(b'force'):
1220 cmd[b'-f'] = None
1220 cmd[b'-f'] = None
1221
1221
1222 ui.status((bytes(cmd)), b"\n")
1222 ui.status((bytes(cmd)), b"\n")
1223
1223
1224
1224
1225 gitcommands = {
1225 gitcommands = {
1226 b'add': add,
1226 b'add': add,
1227 b'am': am,
1227 b'am': am,
1228 b'apply': apply,
1228 b'apply': apply,
1229 b'bisect': bisect,
1229 b'bisect': bisect,
1230 b'blame': blame,
1230 b'blame': blame,
1231 b'branch': branch,
1231 b'branch': branch,
1232 b'checkout': checkout,
1232 b'checkout': checkout,
1233 b'cherry-pick': cherrypick,
1233 b'cherry-pick': cherrypick,
1234 b'clean': clean,
1234 b'clean': clean,
1235 b'clone': clone,
1235 b'clone': clone,
1236 b'commit': commit,
1236 b'commit': commit,
1237 b'diff': diff,
1237 b'diff': diff,
1238 b'difftool': difftool,
1238 b'difftool': difftool,
1239 b'fetch': fetch,
1239 b'fetch': fetch,
1240 b'grep': grep,
1240 b'grep': grep,
1241 b'init': init,
1241 b'init': init,
1242 b'log': log,
1242 b'log': log,
1243 b'ls-files': lsfiles,
1243 b'ls-files': lsfiles,
1244 b'merge': merge,
1244 b'merge': merge,
1245 b'merge-base': mergebase,
1245 b'merge-base': mergebase,
1246 b'mergetool': mergetool,
1246 b'mergetool': mergetool,
1247 b'mv': mv,
1247 b'mv': mv,
1248 b'pull': pull,
1248 b'pull': pull,
1249 b'push': push,
1249 b'push': push,
1250 b'rebase': rebase,
1250 b'rebase': rebase,
1251 b'reflog': reflog,
1251 b'reflog': reflog,
1252 b'reset': reset,
1252 b'reset': reset,
1253 b'revert': revert,
1253 b'revert': revert,
1254 b'rev-parse': revparse,
1254 b'rev-parse': revparse,
1255 b'rm': rm,
1255 b'rm': rm,
1256 b'show': show,
1256 b'show': show,
1257 b'stash': stash,
1257 b'stash': stash,
1258 b'status': status,
1258 b'status': status,
1259 b'svn': svn,
1259 b'svn': svn,
1260 b'tag': tag,
1260 b'tag': tag,
1261 b'whatchanged': deprecated,
1261 b'whatchanged': deprecated,
1262 }
1262 }
1263
1263
1264 gitsvncommands = {
1264 gitsvncommands = {
1265 b'dcommit': svndcommit,
1265 b'dcommit': svndcommit,
1266 b'fetch': svnfetch,
1266 b'fetch': svnfetch,
1267 b'find-rev': svnfindrev,
1267 b'find-rev': svnfindrev,
1268 b'rebase': svnrebase,
1268 b'rebase': svnrebase,
1269 }
1269 }
@@ -1,1259 +1,1259 b''
1 # __init__.py - remotefilelog extension
1 # __init__.py - remotefilelog extension
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """remotefilelog causes Mercurial to lazilly fetch file contents (EXPERIMENTAL)
7 """remotefilelog causes Mercurial to lazilly fetch file contents (EXPERIMENTAL)
8
8
9 This extension is HIGHLY EXPERIMENTAL. There are NO BACKWARDS COMPATIBILITY
9 This extension is HIGHLY EXPERIMENTAL. There are NO BACKWARDS COMPATIBILITY
10 GUARANTEES. This means that repositories created with this extension may
10 GUARANTEES. This means that repositories created with this extension may
11 only be usable with the exact version of this extension/Mercurial that was
11 only be usable with the exact version of this extension/Mercurial that was
12 used. The extension attempts to enforce this in order to prevent repository
12 used. The extension attempts to enforce this in order to prevent repository
13 corruption.
13 corruption.
14
14
15 remotefilelog works by fetching file contents lazily and storing them
15 remotefilelog works by fetching file contents lazily and storing them
16 in a cache on the client rather than in revlogs. This allows enormous
16 in a cache on the client rather than in revlogs. This allows enormous
17 histories to be transferred only partially, making them easier to
17 histories to be transferred only partially, making them easier to
18 operate on.
18 operate on.
19
19
20 Configs:
20 Configs:
21
21
22 ``packs.maxchainlen`` specifies the maximum delta chain length in pack files
22 ``packs.maxchainlen`` specifies the maximum delta chain length in pack files
23
23
24 ``packs.maxpacksize`` specifies the maximum pack file size
24 ``packs.maxpacksize`` specifies the maximum pack file size
25
25
26 ``packs.maxpackfilecount`` specifies the maximum number of packs in the
26 ``packs.maxpackfilecount`` specifies the maximum number of packs in the
27 shared cache (trees only for now)
27 shared cache (trees only for now)
28
28
29 ``remotefilelog.backgroundprefetch`` runs prefetch in background when True
29 ``remotefilelog.backgroundprefetch`` runs prefetch in background when True
30
30
31 ``remotefilelog.bgprefetchrevs`` specifies revisions to fetch on commit and
31 ``remotefilelog.bgprefetchrevs`` specifies revisions to fetch on commit and
32 update, and on other commands that use them. Different from pullprefetch.
32 update, and on other commands that use them. Different from pullprefetch.
33
33
34 ``remotefilelog.gcrepack`` does garbage collection during repack when True
34 ``remotefilelog.gcrepack`` does garbage collection during repack when True
35
35
36 ``remotefilelog.nodettl`` specifies maximum TTL of a node in seconds before
36 ``remotefilelog.nodettl`` specifies maximum TTL of a node in seconds before
37 it is garbage collected
37 it is garbage collected
38
38
39 ``remotefilelog.repackonhggc`` runs repack on hg gc when True
39 ``remotefilelog.repackonhggc`` runs repack on hg gc when True
40
40
41 ``remotefilelog.prefetchdays`` specifies the maximum age of a commit in
41 ``remotefilelog.prefetchdays`` specifies the maximum age of a commit in
42 days after which it is no longer prefetched.
42 days after which it is no longer prefetched.
43
43
44 ``remotefilelog.prefetchdelay`` specifies delay between background
44 ``remotefilelog.prefetchdelay`` specifies delay between background
45 prefetches in seconds after operations that change the working copy parent
45 prefetches in seconds after operations that change the working copy parent
46
46
47 ``remotefilelog.data.gencountlimit`` constraints the minimum number of data
47 ``remotefilelog.data.gencountlimit`` constraints the minimum number of data
48 pack files required to be considered part of a generation. In particular,
48 pack files required to be considered part of a generation. In particular,
49 minimum number of packs files > gencountlimit.
49 minimum number of packs files > gencountlimit.
50
50
51 ``remotefilelog.data.generations`` list for specifying the lower bound of
51 ``remotefilelog.data.generations`` list for specifying the lower bound of
52 each generation of the data pack files. For example, list ['100MB','1MB']
52 each generation of the data pack files. For example, list ['100MB','1MB']
53 or ['1MB', '100MB'] will lead to three generations: [0, 1MB), [
53 or ['1MB', '100MB'] will lead to three generations: [0, 1MB), [
54 1MB, 100MB) and [100MB, infinity).
54 1MB, 100MB) and [100MB, infinity).
55
55
56 ``remotefilelog.data.maxrepackpacks`` the maximum number of pack files to
56 ``remotefilelog.data.maxrepackpacks`` the maximum number of pack files to
57 include in an incremental data repack.
57 include in an incremental data repack.
58
58
59 ``remotefilelog.data.repackmaxpacksize`` the maximum size of a pack file for
59 ``remotefilelog.data.repackmaxpacksize`` the maximum size of a pack file for
60 it to be considered for an incremental data repack.
60 it to be considered for an incremental data repack.
61
61
62 ``remotefilelog.data.repacksizelimit`` the maximum total size of pack files
62 ``remotefilelog.data.repacksizelimit`` the maximum total size of pack files
63 to include in an incremental data repack.
63 to include in an incremental data repack.
64
64
65 ``remotefilelog.history.gencountlimit`` constraints the minimum number of
65 ``remotefilelog.history.gencountlimit`` constraints the minimum number of
66 history pack files required to be considered part of a generation. In
66 history pack files required to be considered part of a generation. In
67 particular, minimum number of packs files > gencountlimit.
67 particular, minimum number of packs files > gencountlimit.
68
68
69 ``remotefilelog.history.generations`` list for specifying the lower bound of
69 ``remotefilelog.history.generations`` list for specifying the lower bound of
70 each generation of the history pack files. For example, list [
70 each generation of the history pack files. For example, list [
71 '100MB', '1MB'] or ['1MB', '100MB'] will lead to three generations: [
71 '100MB', '1MB'] or ['1MB', '100MB'] will lead to three generations: [
72 0, 1MB), [1MB, 100MB) and [100MB, infinity).
72 0, 1MB), [1MB, 100MB) and [100MB, infinity).
73
73
74 ``remotefilelog.history.maxrepackpacks`` the maximum number of pack files to
74 ``remotefilelog.history.maxrepackpacks`` the maximum number of pack files to
75 include in an incremental history repack.
75 include in an incremental history repack.
76
76
77 ``remotefilelog.history.repackmaxpacksize`` the maximum size of a pack file
77 ``remotefilelog.history.repackmaxpacksize`` the maximum size of a pack file
78 for it to be considered for an incremental history repack.
78 for it to be considered for an incremental history repack.
79
79
80 ``remotefilelog.history.repacksizelimit`` the maximum total size of pack
80 ``remotefilelog.history.repacksizelimit`` the maximum total size of pack
81 files to include in an incremental history repack.
81 files to include in an incremental history repack.
82
82
83 ``remotefilelog.backgroundrepack`` automatically consolidate packs in the
83 ``remotefilelog.backgroundrepack`` automatically consolidate packs in the
84 background
84 background
85
85
86 ``remotefilelog.cachepath`` path to cache
86 ``remotefilelog.cachepath`` path to cache
87
87
88 ``remotefilelog.cachegroup`` if set, make cache directory sgid to this
88 ``remotefilelog.cachegroup`` if set, make cache directory sgid to this
89 group
89 group
90
90
91 ``remotefilelog.cacheprocess`` binary to invoke for fetching file data
91 ``remotefilelog.cacheprocess`` binary to invoke for fetching file data
92
92
93 ``remotefilelog.debug`` turn on remotefilelog-specific debug output
93 ``remotefilelog.debug`` turn on remotefilelog-specific debug output
94
94
95 ``remotefilelog.excludepattern`` pattern of files to exclude from pulls
95 ``remotefilelog.excludepattern`` pattern of files to exclude from pulls
96
96
97 ``remotefilelog.includepattern`` pattern of files to include in pulls
97 ``remotefilelog.includepattern`` pattern of files to include in pulls
98
98
99 ``remotefilelog.fetchwarning``: message to print when too many
99 ``remotefilelog.fetchwarning``: message to print when too many
100 single-file fetches occur
100 single-file fetches occur
101
101
102 ``remotefilelog.getfilesstep`` number of files to request in a single RPC
102 ``remotefilelog.getfilesstep`` number of files to request in a single RPC
103
103
104 ``remotefilelog.getfilestype`` if set to 'threaded' use threads to fetch
104 ``remotefilelog.getfilestype`` if set to 'threaded' use threads to fetch
105 files, otherwise use optimistic fetching
105 files, otherwise use optimistic fetching
106
106
107 ``remotefilelog.pullprefetch`` revset for selecting files that should be
107 ``remotefilelog.pullprefetch`` revset for selecting files that should be
108 eagerly downloaded rather than lazily
108 eagerly downloaded rather than lazily
109
109
110 ``remotefilelog.reponame`` name of the repo. If set, used to partition
110 ``remotefilelog.reponame`` name of the repo. If set, used to partition
111 data from other repos in a shared store.
111 data from other repos in a shared store.
112
112
113 ``remotefilelog.server`` if true, enable server-side functionality
113 ``remotefilelog.server`` if true, enable server-side functionality
114
114
115 ``remotefilelog.servercachepath`` path for caching blobs on the server
115 ``remotefilelog.servercachepath`` path for caching blobs on the server
116
116
117 ``remotefilelog.serverexpiration`` number of days to keep cached server
117 ``remotefilelog.serverexpiration`` number of days to keep cached server
118 blobs
118 blobs
119
119
120 ``remotefilelog.validatecache`` if set, check cache entries for corruption
120 ``remotefilelog.validatecache`` if set, check cache entries for corruption
121 before returning blobs
121 before returning blobs
122
122
123 ``remotefilelog.validatecachelog`` if set, check cache entries for
123 ``remotefilelog.validatecachelog`` if set, check cache entries for
124 corruption before returning metadata
124 corruption before returning metadata
125
125
126 """
126 """
127
127
128 import os
128 import os
129 import time
129 import time
130 import traceback
130 import traceback
131
131
132 from mercurial.node import (
132 from mercurial.node import (
133 hex,
133 hex,
134 wdirrev,
134 wdirrev,
135 )
135 )
136 from mercurial.i18n import _
136 from mercurial.i18n import _
137 from mercurial.pycompat import open
137 from mercurial.pycompat import open
138 from mercurial import (
138 from mercurial import (
139 changegroup,
139 changegroup,
140 changelog,
140 changelog,
141 commands,
141 commands,
142 configitems,
142 configitems,
143 context,
143 context,
144 copies,
144 copies,
145 debugcommands as hgdebugcommands,
145 debugcommands as hgdebugcommands,
146 dispatch,
146 dispatch,
147 error,
147 error,
148 exchange,
148 exchange,
149 extensions,
149 extensions,
150 hg,
150 hg,
151 localrepo,
151 localrepo,
152 match as matchmod,
152 match as matchmod,
153 merge,
153 merge,
154 mergestate as mergestatemod,
154 mergestate as mergestatemod,
155 patch,
155 patch,
156 pycompat,
156 pycompat,
157 registrar,
157 registrar,
158 repair,
158 repair,
159 repoview,
159 repoview,
160 revset,
160 revset,
161 scmutil,
161 scmutil,
162 smartset,
162 smartset,
163 streamclone,
163 streamclone,
164 util,
164 util,
165 )
165 )
166 from . import (
166 from . import (
167 constants,
167 constants,
168 debugcommands,
168 debugcommands,
169 fileserverclient,
169 fileserverclient,
170 remotefilectx,
170 remotefilectx,
171 remotefilelog,
171 remotefilelog,
172 remotefilelogserver,
172 remotefilelogserver,
173 repack as repackmod,
173 repack as repackmod,
174 shallowbundle,
174 shallowbundle,
175 shallowrepo,
175 shallowrepo,
176 shallowstore,
176 shallowstore,
177 shallowutil,
177 shallowutil,
178 shallowverifier,
178 shallowverifier,
179 )
179 )
180
180
181 # ensures debug commands are registered
181 # ensures debug commands are registered
182 hgdebugcommands.command
182 hgdebugcommands.command
183
183
184 cmdtable = {}
184 cmdtable = {}
185 command = registrar.command(cmdtable)
185 command = registrar.command(cmdtable)
186
186
187 configtable = {}
187 configtable = {}
188 configitem = registrar.configitem(configtable)
188 configitem = registrar.configitem(configtable)
189
189
190 configitem(b'remotefilelog', b'debug', default=False)
190 configitem(b'remotefilelog', b'debug', default=False)
191
191
192 configitem(b'remotefilelog', b'reponame', default=b'')
192 configitem(b'remotefilelog', b'reponame', default=b'')
193 configitem(b'remotefilelog', b'cachepath', default=None)
193 configitem(b'remotefilelog', b'cachepath', default=None)
194 configitem(b'remotefilelog', b'cachegroup', default=None)
194 configitem(b'remotefilelog', b'cachegroup', default=None)
195 configitem(b'remotefilelog', b'cacheprocess', default=None)
195 configitem(b'remotefilelog', b'cacheprocess', default=None)
196 configitem(b'remotefilelog', b'cacheprocess.includepath', default=None)
196 configitem(b'remotefilelog', b'cacheprocess.includepath', default=None)
197 configitem(b"remotefilelog", b"cachelimit", default=b"1000 GB")
197 configitem(b"remotefilelog", b"cachelimit", default=b"1000 GB")
198
198
199 configitem(
199 configitem(
200 b'remotefilelog',
200 b'remotefilelog',
201 b'fallbackpath',
201 b'fallbackpath',
202 default=configitems.dynamicdefault,
202 default=configitems.dynamicdefault,
203 alias=[(b'remotefilelog', b'fallbackrepo')],
203 alias=[(b'remotefilelog', b'fallbackrepo')],
204 )
204 )
205
205
206 configitem(b'remotefilelog', b'validatecachelog', default=None)
206 configitem(b'remotefilelog', b'validatecachelog', default=None)
207 configitem(b'remotefilelog', b'validatecache', default=b'on')
207 configitem(b'remotefilelog', b'validatecache', default=b'on')
208 configitem(b'remotefilelog', b'server', default=None)
208 configitem(b'remotefilelog', b'server', default=None)
209 configitem(b'remotefilelog', b'servercachepath', default=None)
209 configitem(b'remotefilelog', b'servercachepath', default=None)
210 configitem(b"remotefilelog", b"serverexpiration", default=30)
210 configitem(b"remotefilelog", b"serverexpiration", default=30)
211 configitem(b'remotefilelog', b'backgroundrepack', default=False)
211 configitem(b'remotefilelog', b'backgroundrepack', default=False)
212 configitem(b'remotefilelog', b'bgprefetchrevs', default=None)
212 configitem(b'remotefilelog', b'bgprefetchrevs', default=None)
213 configitem(b'remotefilelog', b'pullprefetch', default=None)
213 configitem(b'remotefilelog', b'pullprefetch', default=None)
214 configitem(b'remotefilelog', b'backgroundprefetch', default=False)
214 configitem(b'remotefilelog', b'backgroundprefetch', default=False)
215 configitem(b'remotefilelog', b'prefetchdelay', default=120)
215 configitem(b'remotefilelog', b'prefetchdelay', default=120)
216 configitem(b'remotefilelog', b'prefetchdays', default=14)
216 configitem(b'remotefilelog', b'prefetchdays', default=14)
217 # Other values include 'local' or 'none'. Any unrecognized value is 'all'.
217 # Other values include 'local' or 'none'. Any unrecognized value is 'all'.
218 configitem(b'remotefilelog', b'strip.includefiles', default='all')
218 configitem(b'remotefilelog', b'strip.includefiles', default='all')
219
219
220 configitem(b'remotefilelog', b'getfilesstep', default=10000)
220 configitem(b'remotefilelog', b'getfilesstep', default=10000)
221 configitem(b'remotefilelog', b'getfilestype', default=b'optimistic')
221 configitem(b'remotefilelog', b'getfilestype', default=b'optimistic')
222 configitem(b'remotefilelog', b'batchsize', configitems.dynamicdefault)
222 configitem(b'remotefilelog', b'batchsize', configitems.dynamicdefault)
223 configitem(b'remotefilelog', b'fetchwarning', default=b'')
223 configitem(b'remotefilelog', b'fetchwarning', default=b'')
224
224
225 configitem(b'remotefilelog', b'includepattern', default=None)
225 configitem(b'remotefilelog', b'includepattern', default=None)
226 configitem(b'remotefilelog', b'excludepattern', default=None)
226 configitem(b'remotefilelog', b'excludepattern', default=None)
227
227
228 configitem(b'remotefilelog', b'gcrepack', default=False)
228 configitem(b'remotefilelog', b'gcrepack', default=False)
229 configitem(b'remotefilelog', b'repackonhggc', default=False)
229 configitem(b'remotefilelog', b'repackonhggc', default=False)
230 configitem(b'repack', b'chainorphansbysize', default=True, experimental=True)
230 configitem(b'repack', b'chainorphansbysize', default=True, experimental=True)
231
231
232 configitem(b'packs', b'maxpacksize', default=0)
232 configitem(b'packs', b'maxpacksize', default=0)
233 configitem(b'packs', b'maxchainlen', default=1000)
233 configitem(b'packs', b'maxchainlen', default=1000)
234
234
235 configitem(b'devel', b'remotefilelog.bg-wait', default=False)
235 configitem(b'devel', b'remotefilelog.bg-wait', default=False)
236
236
237 # default TTL limit is 30 days
237 # default TTL limit is 30 days
238 _defaultlimit = 60 * 60 * 24 * 30
238 _defaultlimit = 60 * 60 * 24 * 30
239 configitem(b'remotefilelog', b'nodettl', default=_defaultlimit)
239 configitem(b'remotefilelog', b'nodettl', default=_defaultlimit)
240
240
241 configitem(b'remotefilelog', b'data.gencountlimit', default=2),
241 configitem(b'remotefilelog', b'data.gencountlimit', default=2),
242 configitem(
242 configitem(
243 b'remotefilelog', b'data.generations', default=[b'1GB', b'100MB', b'1MB']
243 b'remotefilelog', b'data.generations', default=[b'1GB', b'100MB', b'1MB']
244 )
244 )
245 configitem(b'remotefilelog', b'data.maxrepackpacks', default=50)
245 configitem(b'remotefilelog', b'data.maxrepackpacks', default=50)
246 configitem(b'remotefilelog', b'data.repackmaxpacksize', default=b'4GB')
246 configitem(b'remotefilelog', b'data.repackmaxpacksize', default=b'4GB')
247 configitem(b'remotefilelog', b'data.repacksizelimit', default=b'100MB')
247 configitem(b'remotefilelog', b'data.repacksizelimit', default=b'100MB')
248
248
249 configitem(b'remotefilelog', b'history.gencountlimit', default=2),
249 configitem(b'remotefilelog', b'history.gencountlimit', default=2),
250 configitem(b'remotefilelog', b'history.generations', default=[b'100MB'])
250 configitem(b'remotefilelog', b'history.generations', default=[b'100MB'])
251 configitem(b'remotefilelog', b'history.maxrepackpacks', default=50)
251 configitem(b'remotefilelog', b'history.maxrepackpacks', default=50)
252 configitem(b'remotefilelog', b'history.repackmaxpacksize', default=b'400MB')
252 configitem(b'remotefilelog', b'history.repackmaxpacksize', default=b'400MB')
253 configitem(b'remotefilelog', b'history.repacksizelimit', default=b'100MB')
253 configitem(b'remotefilelog', b'history.repacksizelimit', default=b'100MB')
254
254
255 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
255 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
256 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
256 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
257 # be specifying the version(s) of Mercurial they are tested with, or
257 # be specifying the version(s) of Mercurial they are tested with, or
258 # leave the attribute unspecified.
258 # leave the attribute unspecified.
259 testedwith = b'ships-with-hg-core'
259 testedwith = b'ships-with-hg-core'
260
260
261 repoclass = localrepo.localrepository
261 repoclass = localrepo.localrepository
262 repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT)
262 repoclass._basesupported.add(constants.SHALLOWREPO_REQUIREMENT)
263
263
264 isenabled = shallowutil.isenabled
264 isenabled = shallowutil.isenabled
265
265
266
266
267 def uisetup(ui):
267 def uisetup(ui):
268 """Wraps user facing Mercurial commands to swap them out with shallow
268 """Wraps user facing Mercurial commands to swap them out with shallow
269 versions.
269 versions.
270 """
270 """
271 hg.wirepeersetupfuncs.append(fileserverclient.peersetup)
271 hg.wirepeersetupfuncs.append(fileserverclient.peersetup)
272
272
273 entry = extensions.wrapcommand(commands.table, b'clone', cloneshallow)
273 entry = extensions.wrapcommand(commands.table, b'clone', cloneshallow)
274 entry[1].append(
274 entry[1].append(
275 (
275 (
276 b'',
276 b'',
277 b'shallow',
277 b'shallow',
278 None,
278 None,
279 _(b"create a shallow clone which uses remote file history"),
279 _(b"create a shallow clone which uses remote file history"),
280 )
280 )
281 )
281 )
282
282
283 extensions.wrapcommand(
283 extensions.wrapcommand(
284 commands.table, b'debugindex', debugcommands.debugindex
284 commands.table, b'debugindex', debugcommands.debugindex
285 )
285 )
286 extensions.wrapcommand(
286 extensions.wrapcommand(
287 commands.table, b'debugindexdot', debugcommands.debugindexdot
287 commands.table, b'debugindexdot', debugcommands.debugindexdot
288 )
288 )
289 extensions.wrapcommand(commands.table, b'log', log)
289 extensions.wrapcommand(commands.table, b'log', log)
290 extensions.wrapcommand(commands.table, b'pull', pull)
290 extensions.wrapcommand(commands.table, b'pull', pull)
291
291
292 # Prevent 'hg manifest --all'
292 # Prevent 'hg manifest --all'
293 def _manifest(orig, ui, repo, *args, **opts):
293 def _manifest(orig, ui, repo, *args, **opts):
294 if isenabled(repo) and opts.get('all'):
294 if isenabled(repo) and opts.get('all'):
295 raise error.Abort(_(b"--all is not supported in a shallow repo"))
295 raise error.Abort(_(b"--all is not supported in a shallow repo"))
296
296
297 return orig(ui, repo, *args, **opts)
297 return orig(ui, repo, *args, **opts)
298
298
299 extensions.wrapcommand(commands.table, b"manifest", _manifest)
299 extensions.wrapcommand(commands.table, b"manifest", _manifest)
300
300
301 # Wrap remotefilelog with lfs code
301 # Wrap remotefilelog with lfs code
302 def _lfsloaded(loaded=False):
302 def _lfsloaded(loaded=False):
303 lfsmod = None
303 lfsmod = None
304 try:
304 try:
305 lfsmod = extensions.find(b'lfs')
305 lfsmod = extensions.find(b'lfs')
306 except KeyError:
306 except KeyError:
307 pass
307 pass
308 if lfsmod:
308 if lfsmod:
309 lfsmod.wrapfilelog(remotefilelog.remotefilelog)
309 lfsmod.wrapfilelog(remotefilelog.remotefilelog)
310 fileserverclient._lfsmod = lfsmod
310 fileserverclient._lfsmod = lfsmod
311
311
312 extensions.afterloaded(b'lfs', _lfsloaded)
312 extensions.afterloaded(b'lfs', _lfsloaded)
313
313
314 # debugdata needs remotefilelog.len to work
314 # debugdata needs remotefilelog.len to work
315 extensions.wrapcommand(commands.table, b'debugdata', debugdatashallow)
315 extensions.wrapcommand(commands.table, b'debugdata', debugdatashallow)
316
316
317 changegroup.cgpacker = shallowbundle.shallowcg1packer
317 changegroup.cgpacker = shallowbundle.shallowcg1packer
318
318
319 extensions.wrapfunction(
319 extensions.wrapfunction(
320 changegroup, '_addchangegroupfiles', shallowbundle.addchangegroupfiles
320 changegroup, '_addchangegroupfiles', shallowbundle.addchangegroupfiles
321 )
321 )
322 extensions.wrapfunction(
322 extensions.wrapfunction(
323 changegroup, 'makechangegroup', shallowbundle.makechangegroup
323 changegroup, 'makechangegroup', shallowbundle.makechangegroup
324 )
324 )
325 extensions.wrapfunction(localrepo, 'makestore', storewrapper)
325 extensions.wrapfunction(localrepo, 'makestore', storewrapper)
326 extensions.wrapfunction(exchange, 'pull', exchangepull)
326 extensions.wrapfunction(exchange, 'pull', exchangepull)
327 extensions.wrapfunction(merge, 'applyupdates', applyupdates)
327 extensions.wrapfunction(merge, 'applyupdates', applyupdates)
328 extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
328 extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
329 extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
329 extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
330 extensions.wrapfunction(scmutil, '_findrenames', findrenames)
330 extensions.wrapfunction(scmutil, '_findrenames', findrenames)
331 extensions.wrapfunction(
331 extensions.wrapfunction(
332 copies, '_computeforwardmissing', computeforwardmissing
332 copies, '_computeforwardmissing', computeforwardmissing
333 )
333 )
334 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
334 extensions.wrapfunction(dispatch, 'runcommand', runcommand)
335 extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
335 extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
336 extensions.wrapfunction(context.changectx, 'filectx', filectx)
336 extensions.wrapfunction(context.changectx, 'filectx', filectx)
337 extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
337 extensions.wrapfunction(context.workingctx, 'filectx', workingfilectx)
338 extensions.wrapfunction(patch, 'trydiff', trydiff)
338 extensions.wrapfunction(patch, 'trydiff', trydiff)
339 extensions.wrapfunction(hg, 'verify', _verify)
339 extensions.wrapfunction(hg, 'verify', _verify)
340 scmutil.fileprefetchhooks.add(b'remotefilelog', _fileprefetchhook)
340 scmutil.fileprefetchhooks.add(b'remotefilelog', _fileprefetchhook)
341
341
342 # disappointing hacks below
342 # disappointing hacks below
343 extensions.wrapfunction(scmutil, 'getrenamedfn', getrenamedfn)
343 extensions.wrapfunction(scmutil, 'getrenamedfn', getrenamedfn)
344 extensions.wrapfunction(revset, 'filelog', filelogrevset)
344 extensions.wrapfunction(revset, 'filelog', filelogrevset)
345 revset.symbols[b'filelog'] = revset.filelog
345 revset.symbols[b'filelog'] = revset.filelog
346
346
347
347
348 def cloneshallow(orig, ui, repo, *args, **opts):
348 def cloneshallow(orig, ui, repo, *args, **opts):
349 if opts.get('shallow'):
349 if opts.get('shallow'):
350 repos = []
350 repos = []
351
351
352 def pull_shallow(orig, self, *args, **kwargs):
352 def pull_shallow(orig, self, *args, **kwargs):
353 if not isenabled(self):
353 if not isenabled(self):
354 repos.append(self.unfiltered())
354 repos.append(self.unfiltered())
355 # set up the client hooks so the post-clone update works
355 # set up the client hooks so the post-clone update works
356 setupclient(self.ui, self.unfiltered())
356 setupclient(self.ui, self.unfiltered())
357
357
358 # setupclient fixed the class on the repo itself
358 # setupclient fixed the class on the repo itself
359 # but we also need to fix it on the repoview
359 # but we also need to fix it on the repoview
360 if isinstance(self, repoview.repoview):
360 if isinstance(self, repoview.repoview):
361 self.__class__.__bases__ = (
361 self.__class__.__bases__ = (
362 self.__class__.__bases__[0],
362 self.__class__.__bases__[0],
363 self.unfiltered().__class__,
363 self.unfiltered().__class__,
364 )
364 )
365 self.requirements.add(constants.SHALLOWREPO_REQUIREMENT)
365 self.requirements.add(constants.SHALLOWREPO_REQUIREMENT)
366 with self.lock():
366 with self.lock():
367 # acquire store lock before writing requirements as some
367 # acquire store lock before writing requirements as some
368 # requirements might be written to .hg/store/requires
368 # requirements might be written to .hg/store/requires
369 scmutil.writereporequirements(self)
369 scmutil.writereporequirements(self)
370
370
371 # Since setupclient hadn't been called, exchange.pull was not
371 # Since setupclient hadn't been called, exchange.pull was not
372 # wrapped. So we need to manually invoke our version of it.
372 # wrapped. So we need to manually invoke our version of it.
373 return exchangepull(orig, self, *args, **kwargs)
373 return exchangepull(orig, self, *args, **kwargs)
374 else:
374 else:
375 return orig(self, *args, **kwargs)
375 return orig(self, *args, **kwargs)
376
376
377 extensions.wrapfunction(exchange, 'pull', pull_shallow)
377 extensions.wrapfunction(exchange, 'pull', pull_shallow)
378
378
379 # Wrap the stream logic to add requirements and to pass include/exclude
379 # Wrap the stream logic to add requirements and to pass include/exclude
380 # patterns around.
380 # patterns around.
381 def setup_streamout(repo, remote):
381 def setup_streamout(repo, remote):
382 # Replace remote.stream_out with a version that sends file
382 # Replace remote.stream_out with a version that sends file
383 # patterns.
383 # patterns.
384 def stream_out_shallow(orig):
384 def stream_out_shallow(orig):
385 caps = remote.capabilities()
385 caps = remote.capabilities()
386 if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps:
386 if constants.NETWORK_CAP_LEGACY_SSH_GETFILES in caps:
387 opts = {}
387 opts = {}
388 if repo.includepattern:
388 if repo.includepattern:
389 opts['includepattern'] = b'\0'.join(repo.includepattern)
389 opts['includepattern'] = b'\0'.join(repo.includepattern)
390 if repo.excludepattern:
390 if repo.excludepattern:
391 opts['excludepattern'] = b'\0'.join(repo.excludepattern)
391 opts['excludepattern'] = b'\0'.join(repo.excludepattern)
392 return remote._callstream(b'stream_out_shallow', **opts)
392 return remote._callstream(b'stream_out_shallow', **opts)
393 else:
393 else:
394 return orig()
394 return orig()
395
395
396 extensions.wrapfunction(remote, 'stream_out', stream_out_shallow)
396 extensions.wrapfunction(remote, 'stream_out', stream_out_shallow)
397
397
398 def stream_wrap(orig, op):
398 def stream_wrap(orig, op):
399 setup_streamout(op.repo, op.remote)
399 setup_streamout(op.repo, op.remote)
400 return orig(op)
400 return orig(op)
401
401
402 extensions.wrapfunction(
402 extensions.wrapfunction(
403 streamclone, 'maybeperformlegacystreamclone', stream_wrap
403 streamclone, 'maybeperformlegacystreamclone', stream_wrap
404 )
404 )
405
405
406 def canperformstreamclone(orig, pullop, bundle2=False):
406 def canperformstreamclone(orig, pullop, bundle2=False):
407 # remotefilelog is currently incompatible with the
407 # remotefilelog is currently incompatible with the
408 # bundle2 flavor of streamclones, so force us to use
408 # bundle2 flavor of streamclones, so force us to use
409 # v1 instead.
409 # v1 instead.
410 if b'v2' in pullop.remotebundle2caps.get(b'stream', []):
410 if b'v2' in pullop.remotebundle2caps.get(b'stream', []):
411 pullop.remotebundle2caps[b'stream'] = []
411 pullop.remotebundle2caps[b'stream'] = []
412 if bundle2:
412 if bundle2:
413 return False, None
413 return False, None
414 supported, requirements = orig(pullop, bundle2=bundle2)
414 supported, requirements = orig(pullop, bundle2=bundle2)
415 if requirements is not None:
415 if requirements is not None:
416 requirements.add(constants.SHALLOWREPO_REQUIREMENT)
416 requirements.add(constants.SHALLOWREPO_REQUIREMENT)
417 return supported, requirements
417 return supported, requirements
418
418
419 extensions.wrapfunction(
419 extensions.wrapfunction(
420 streamclone, 'canperformstreamclone', canperformstreamclone
420 streamclone, 'canperformstreamclone', canperformstreamclone
421 )
421 )
422
422
423 try:
423 try:
424 orig(ui, repo, *args, **opts)
424 orig(ui, repo, *args, **opts)
425 finally:
425 finally:
426 if opts.get('shallow'):
426 if opts.get('shallow'):
427 for r in repos:
427 for r in repos:
428 if hasattr(r, 'fileservice'):
428 if hasattr(r, 'fileservice'):
429 r.fileservice.close()
429 r.fileservice.close()
430
430
431
431
432 def debugdatashallow(orig, *args, **kwds):
432 def debugdatashallow(orig, *args, **kwds):
433 oldlen = remotefilelog.remotefilelog.__len__
433 oldlen = remotefilelog.remotefilelog.__len__
434 try:
434 try:
435 remotefilelog.remotefilelog.__len__ = lambda x: 1
435 remotefilelog.remotefilelog.__len__ = lambda x: 1
436 return orig(*args, **kwds)
436 return orig(*args, **kwds)
437 finally:
437 finally:
438 remotefilelog.remotefilelog.__len__ = oldlen
438 remotefilelog.remotefilelog.__len__ = oldlen
439
439
440
440
441 def reposetup(ui, repo):
441 def reposetup(ui, repo):
442 if not repo.local():
442 if not repo.local():
443 return
443 return
444
444
445 # put here intentionally bc doesnt work in uisetup
445 # put here intentionally bc doesnt work in uisetup
446 ui.setconfig(b'hooks', b'update.prefetch', wcpprefetch)
446 ui.setconfig(b'hooks', b'update.prefetch', wcpprefetch)
447 ui.setconfig(b'hooks', b'commit.prefetch', wcpprefetch)
447 ui.setconfig(b'hooks', b'commit.prefetch', wcpprefetch)
448
448
449 isserverenabled = ui.configbool(b'remotefilelog', b'server')
449 isserverenabled = ui.configbool(b'remotefilelog', b'server')
450 isshallowclient = isenabled(repo)
450 isshallowclient = isenabled(repo)
451
451
452 if isserverenabled and isshallowclient:
452 if isserverenabled and isshallowclient:
453 raise RuntimeError(b"Cannot be both a server and shallow client.")
453 raise RuntimeError(b"Cannot be both a server and shallow client.")
454
454
455 if isshallowclient:
455 if isshallowclient:
456 setupclient(ui, repo)
456 setupclient(ui, repo)
457
457
458 if isserverenabled:
458 if isserverenabled:
459 remotefilelogserver.setupserver(ui, repo)
459 remotefilelogserver.setupserver(ui, repo)
460
460
461
461
462 def setupclient(ui, repo):
462 def setupclient(ui, repo):
463 if not isinstance(repo, localrepo.localrepository):
463 if not isinstance(repo, localrepo.localrepository):
464 return
464 return
465
465
466 # Even clients get the server setup since they need to have the
466 # Even clients get the server setup since they need to have the
467 # wireprotocol endpoints registered.
467 # wireprotocol endpoints registered.
468 remotefilelogserver.onetimesetup(ui)
468 remotefilelogserver.onetimesetup(ui)
469 onetimeclientsetup(ui)
469 onetimeclientsetup(ui)
470
470
471 shallowrepo.wraprepo(repo)
471 shallowrepo.wraprepo(repo)
472 repo.store = shallowstore.wrapstore(repo.store)
472 repo.store = shallowstore.wrapstore(repo.store)
473
473
474
474
475 def storewrapper(orig, requirements, path, vfstype):
475 def storewrapper(orig, requirements, path, vfstype):
476 s = orig(requirements, path, vfstype)
476 s = orig(requirements, path, vfstype)
477 if constants.SHALLOWREPO_REQUIREMENT in requirements:
477 if constants.SHALLOWREPO_REQUIREMENT in requirements:
478 s = shallowstore.wrapstore(s)
478 s = shallowstore.wrapstore(s)
479
479
480 return s
480 return s
481
481
482
482
483 # prefetch files before update
483 # prefetch files before update
484 def applyupdates(
484 def applyupdates(
485 orig, repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts
485 orig, repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts
486 ):
486 ):
487 if isenabled(repo):
487 if isenabled(repo):
488 manifest = mctx.manifest()
488 manifest = mctx.manifest()
489 files = []
489 files = []
490 for f, args, msg in mresult.getactions([mergestatemod.ACTION_GET]):
490 for f, args, msg in mresult.getactions([mergestatemod.ACTION_GET]):
491 files.append((f, hex(manifest[f])))
491 files.append((f, hex(manifest[f])))
492 # batch fetch the needed files from the server
492 # batch fetch the needed files from the server
493 repo.fileservice.prefetch(files)
493 repo.fileservice.prefetch(files)
494 return orig(repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts)
494 return orig(repo, mresult, wctx, mctx, overwrite, wantfiledata, **opts)
495
495
496
496
497 # Prefetch merge checkunknownfiles
497 # Prefetch merge checkunknownfiles
498 def checkunknownfiles(orig, repo, wctx, mctx, force, mresult, *args, **kwargs):
498 def checkunknownfiles(orig, repo, wctx, mctx, force, mresult, *args, **kwargs):
499 if isenabled(repo):
499 if isenabled(repo):
500 files = []
500 files = []
501 sparsematch = repo.maybesparsematch(mctx.rev())
501 sparsematch = repo.maybesparsematch(mctx.rev())
502 for f, (m, actionargs, msg) in mresult.filemap():
502 for f, (m, actionargs, msg) in mresult.filemap():
503 if sparsematch and not sparsematch(f):
503 if sparsematch and not sparsematch(f):
504 continue
504 continue
505 if m in (
505 if m in (
506 mergestatemod.ACTION_CREATED,
506 mergestatemod.ACTION_CREATED,
507 mergestatemod.ACTION_DELETED_CHANGED,
507 mergestatemod.ACTION_DELETED_CHANGED,
508 mergestatemod.ACTION_CREATED_MERGE,
508 mergestatemod.ACTION_CREATED_MERGE,
509 ):
509 ):
510 files.append((f, hex(mctx.filenode(f))))
510 files.append((f, hex(mctx.filenode(f))))
511 elif m == mergestatemod.ACTION_LOCAL_DIR_RENAME_GET:
511 elif m == mergestatemod.ACTION_LOCAL_DIR_RENAME_GET:
512 f2 = actionargs[0]
512 f2 = actionargs[0]
513 files.append((f2, hex(mctx.filenode(f2))))
513 files.append((f2, hex(mctx.filenode(f2))))
514 # batch fetch the needed files from the server
514 # batch fetch the needed files from the server
515 repo.fileservice.prefetch(files)
515 repo.fileservice.prefetch(files)
516 return orig(repo, wctx, mctx, force, mresult, *args, **kwargs)
516 return orig(repo, wctx, mctx, force, mresult, *args, **kwargs)
517
517
518
518
519 # Prefetch files before status attempts to look at their size and contents
519 # Prefetch files before status attempts to look at their size and contents
520 def checklookup(orig, self, files, mtime_boundary):
520 def checklookup(orig, self, files, mtime_boundary):
521 repo = self._repo
521 repo = self._repo
522 if isenabled(repo):
522 if isenabled(repo):
523 prefetchfiles = []
523 prefetchfiles = []
524 for parent in self._parents:
524 for parent in self._parents:
525 for f in files:
525 for f in files:
526 if f in parent:
526 if f in parent:
527 prefetchfiles.append((f, hex(parent.filenode(f))))
527 prefetchfiles.append((f, hex(parent.filenode(f))))
528 # batch fetch the needed files from the server
528 # batch fetch the needed files from the server
529 repo.fileservice.prefetch(prefetchfiles)
529 repo.fileservice.prefetch(prefetchfiles)
530 return orig(self, files, mtime_boundary)
530 return orig(self, files, mtime_boundary)
531
531
532
532
533 # Prefetch the logic that compares added and removed files for renames
533 # Prefetch the logic that compares added and removed files for renames
534 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
534 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
535 if isenabled(repo):
535 if isenabled(repo):
536 files = []
536 files = []
537 pmf = repo[b'.'].manifest()
537 pmf = repo[b'.'].manifest()
538 for f in removed:
538 for f in removed:
539 if f in pmf:
539 if f in pmf:
540 files.append((f, hex(pmf[f])))
540 files.append((f, hex(pmf[f])))
541 # batch fetch the needed files from the server
541 # batch fetch the needed files from the server
542 repo.fileservice.prefetch(files)
542 repo.fileservice.prefetch(files)
543 return orig(repo, matcher, added, removed, *args, **kwargs)
543 return orig(repo, matcher, added, removed, *args, **kwargs)
544
544
545
545
546 # prefetch files before pathcopies check
546 # prefetch files before pathcopies check
547 def computeforwardmissing(orig, a, b, match=None):
547 def computeforwardmissing(orig, a, b, match=None):
548 missing = orig(a, b, match=match)
548 missing = orig(a, b, match=match)
549 repo = a._repo
549 repo = a._repo
550 if isenabled(repo):
550 if isenabled(repo):
551 mb = b.manifest()
551 mb = b.manifest()
552
552
553 files = []
553 files = []
554 sparsematch = repo.maybesparsematch(b.rev())
554 sparsematch = repo.maybesparsematch(b.rev())
555 if sparsematch:
555 if sparsematch:
556 sparsemissing = set()
556 sparsemissing = set()
557 for f in missing:
557 for f in missing:
558 if sparsematch(f):
558 if sparsematch(f):
559 files.append((f, hex(mb[f])))
559 files.append((f, hex(mb[f])))
560 sparsemissing.add(f)
560 sparsemissing.add(f)
561 missing = sparsemissing
561 missing = sparsemissing
562
562
563 # batch fetch the needed files from the server
563 # batch fetch the needed files from the server
564 repo.fileservice.prefetch(files)
564 repo.fileservice.prefetch(files)
565 return missing
565 return missing
566
566
567
567
568 # close cache miss server connection after the command has finished
568 # close cache miss server connection after the command has finished
569 def runcommand(orig, lui, repo, *args, **kwargs):
569 def runcommand(orig, lui, repo, *args, **kwargs):
570 fileservice = None
570 fileservice = None
571 # repo can be None when running in chg:
571 # repo can be None when running in chg:
572 # - at startup, reposetup was called because serve is not norepo
572 # - at startup, reposetup was called because serve is not norepo
573 # - a norepo command like "help" is called
573 # - a norepo command like "help" is called
574 if repo and isenabled(repo):
574 if repo and isenabled(repo):
575 fileservice = repo.fileservice
575 fileservice = repo.fileservice
576 try:
576 try:
577 return orig(lui, repo, *args, **kwargs)
577 return orig(lui, repo, *args, **kwargs)
578 finally:
578 finally:
579 if fileservice:
579 if fileservice:
580 fileservice.close()
580 fileservice.close()
581
581
582
582
583 # prevent strip from stripping remotefilelogs
583 # prevent strip from stripping remotefilelogs
584 def _collectbrokencsets(orig, repo, files, striprev):
584 def _collectbrokencsets(orig, repo, files, striprev):
585 if isenabled(repo):
585 if isenabled(repo):
586 files = list([f for f in files if not repo.shallowmatch(f)])
586 files = [f for f in files if not repo.shallowmatch(f)]
587 return orig(repo, files, striprev)
587 return orig(repo, files, striprev)
588
588
589
589
590 # changectx wrappers
590 # changectx wrappers
591 def filectx(orig, self, path, fileid=None, filelog=None):
591 def filectx(orig, self, path, fileid=None, filelog=None):
592 if fileid is None:
592 if fileid is None:
593 fileid = self.filenode(path)
593 fileid = self.filenode(path)
594 if isenabled(self._repo) and self._repo.shallowmatch(path):
594 if isenabled(self._repo) and self._repo.shallowmatch(path):
595 return remotefilectx.remotefilectx(
595 return remotefilectx.remotefilectx(
596 self._repo, path, fileid=fileid, changectx=self, filelog=filelog
596 self._repo, path, fileid=fileid, changectx=self, filelog=filelog
597 )
597 )
598 return orig(self, path, fileid=fileid, filelog=filelog)
598 return orig(self, path, fileid=fileid, filelog=filelog)
599
599
600
600
601 def workingfilectx(orig, self, path, filelog=None):
601 def workingfilectx(orig, self, path, filelog=None):
602 if isenabled(self._repo) and self._repo.shallowmatch(path):
602 if isenabled(self._repo) and self._repo.shallowmatch(path):
603 return remotefilectx.remoteworkingfilectx(
603 return remotefilectx.remoteworkingfilectx(
604 self._repo, path, workingctx=self, filelog=filelog
604 self._repo, path, workingctx=self, filelog=filelog
605 )
605 )
606 return orig(self, path, filelog=filelog)
606 return orig(self, path, filelog=filelog)
607
607
608
608
609 # prefetch required revisions before a diff
609 # prefetch required revisions before a diff
610 def trydiff(
610 def trydiff(
611 orig,
611 orig,
612 repo,
612 repo,
613 revs,
613 revs,
614 ctx1,
614 ctx1,
615 ctx2,
615 ctx2,
616 modified,
616 modified,
617 added,
617 added,
618 removed,
618 removed,
619 copy,
619 copy,
620 getfilectx,
620 getfilectx,
621 *args,
621 *args,
622 **kwargs
622 **kwargs
623 ):
623 ):
624 if isenabled(repo):
624 if isenabled(repo):
625 prefetch = []
625 prefetch = []
626 mf1 = ctx1.manifest()
626 mf1 = ctx1.manifest()
627 for fname in modified + added + removed:
627 for fname in modified + added + removed:
628 if fname in mf1:
628 if fname in mf1:
629 fnode = getfilectx(fname, ctx1).filenode()
629 fnode = getfilectx(fname, ctx1).filenode()
630 # fnode can be None if it's a edited working ctx file
630 # fnode can be None if it's a edited working ctx file
631 if fnode:
631 if fnode:
632 prefetch.append((fname, hex(fnode)))
632 prefetch.append((fname, hex(fnode)))
633 if fname not in removed:
633 if fname not in removed:
634 fnode = getfilectx(fname, ctx2).filenode()
634 fnode = getfilectx(fname, ctx2).filenode()
635 if fnode:
635 if fnode:
636 prefetch.append((fname, hex(fnode)))
636 prefetch.append((fname, hex(fnode)))
637
637
638 repo.fileservice.prefetch(prefetch)
638 repo.fileservice.prefetch(prefetch)
639
639
640 return orig(
640 return orig(
641 repo,
641 repo,
642 revs,
642 revs,
643 ctx1,
643 ctx1,
644 ctx2,
644 ctx2,
645 modified,
645 modified,
646 added,
646 added,
647 removed,
647 removed,
648 copy,
648 copy,
649 getfilectx,
649 getfilectx,
650 *args,
650 *args,
651 **kwargs
651 **kwargs
652 )
652 )
653
653
654
654
655 # Prevent verify from processing files
655 # Prevent verify from processing files
656 # a stub for mercurial.hg.verify()
656 # a stub for mercurial.hg.verify()
657 def _verify(orig, repo, level=None):
657 def _verify(orig, repo, level=None):
658 lock = repo.lock()
658 lock = repo.lock()
659 try:
659 try:
660 return shallowverifier.shallowverifier(repo).verify()
660 return shallowverifier.shallowverifier(repo).verify()
661 finally:
661 finally:
662 lock.release()
662 lock.release()
663
663
664
664
665 clientonetime = False
665 clientonetime = False
666
666
667
667
668 def onetimeclientsetup(ui):
668 def onetimeclientsetup(ui):
669 global clientonetime
669 global clientonetime
670 if clientonetime:
670 if clientonetime:
671 return
671 return
672 clientonetime = True
672 clientonetime = True
673
673
674 # Don't commit filelogs until we know the commit hash, since the hash
674 # Don't commit filelogs until we know the commit hash, since the hash
675 # is present in the filelog blob.
675 # is present in the filelog blob.
676 # This violates Mercurial's filelog->manifest->changelog write order,
676 # This violates Mercurial's filelog->manifest->changelog write order,
677 # but is generally fine for client repos.
677 # but is generally fine for client repos.
678 pendingfilecommits = []
678 pendingfilecommits = []
679
679
680 def addrawrevision(
680 def addrawrevision(
681 orig,
681 orig,
682 self,
682 self,
683 rawtext,
683 rawtext,
684 transaction,
684 transaction,
685 link,
685 link,
686 p1,
686 p1,
687 p2,
687 p2,
688 node,
688 node,
689 flags,
689 flags,
690 cachedelta=None,
690 cachedelta=None,
691 _metatuple=None,
691 _metatuple=None,
692 ):
692 ):
693 if isinstance(link, int):
693 if isinstance(link, int):
694 pendingfilecommits.append(
694 pendingfilecommits.append(
695 (
695 (
696 self,
696 self,
697 rawtext,
697 rawtext,
698 transaction,
698 transaction,
699 link,
699 link,
700 p1,
700 p1,
701 p2,
701 p2,
702 node,
702 node,
703 flags,
703 flags,
704 cachedelta,
704 cachedelta,
705 _metatuple,
705 _metatuple,
706 )
706 )
707 )
707 )
708 return node
708 return node
709 else:
709 else:
710 return orig(
710 return orig(
711 self,
711 self,
712 rawtext,
712 rawtext,
713 transaction,
713 transaction,
714 link,
714 link,
715 p1,
715 p1,
716 p2,
716 p2,
717 node,
717 node,
718 flags,
718 flags,
719 cachedelta,
719 cachedelta,
720 _metatuple=_metatuple,
720 _metatuple=_metatuple,
721 )
721 )
722
722
723 extensions.wrapfunction(
723 extensions.wrapfunction(
724 remotefilelog.remotefilelog, 'addrawrevision', addrawrevision
724 remotefilelog.remotefilelog, 'addrawrevision', addrawrevision
725 )
725 )
726
726
727 def changelogadd(orig, self, *args, **kwargs):
727 def changelogadd(orig, self, *args, **kwargs):
728 oldlen = len(self)
728 oldlen = len(self)
729 node = orig(self, *args, **kwargs)
729 node = orig(self, *args, **kwargs)
730 newlen = len(self)
730 newlen = len(self)
731 if oldlen != newlen:
731 if oldlen != newlen:
732 for oldargs in pendingfilecommits:
732 for oldargs in pendingfilecommits:
733 log, rt, tr, link, p1, p2, n, fl, c, m = oldargs
733 log, rt, tr, link, p1, p2, n, fl, c, m = oldargs
734 linknode = self.node(link)
734 linknode = self.node(link)
735 if linknode == node:
735 if linknode == node:
736 log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
736 log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
737 else:
737 else:
738 raise error.ProgrammingError(
738 raise error.ProgrammingError(
739 b'pending multiple integer revisions are not supported'
739 b'pending multiple integer revisions are not supported'
740 )
740 )
741 else:
741 else:
742 # "link" is actually wrong here (it is set to len(changelog))
742 # "link" is actually wrong here (it is set to len(changelog))
743 # if changelog remains unchanged, skip writing file revisions
743 # if changelog remains unchanged, skip writing file revisions
744 # but still do a sanity check about pending multiple revisions
744 # but still do a sanity check about pending multiple revisions
745 if len({x[3] for x in pendingfilecommits}) > 1:
745 if len({x[3] for x in pendingfilecommits}) > 1:
746 raise error.ProgrammingError(
746 raise error.ProgrammingError(
747 b'pending multiple integer revisions are not supported'
747 b'pending multiple integer revisions are not supported'
748 )
748 )
749 del pendingfilecommits[:]
749 del pendingfilecommits[:]
750 return node
750 return node
751
751
752 extensions.wrapfunction(changelog.changelog, 'add', changelogadd)
752 extensions.wrapfunction(changelog.changelog, 'add', changelogadd)
753
753
754
754
755 def getrenamedfn(orig, repo, endrev=None):
755 def getrenamedfn(orig, repo, endrev=None):
756 if not isenabled(repo) or copies.usechangesetcentricalgo(repo):
756 if not isenabled(repo) or copies.usechangesetcentricalgo(repo):
757 return orig(repo, endrev)
757 return orig(repo, endrev)
758
758
759 rcache = {}
759 rcache = {}
760
760
761 def getrenamed(fn, rev):
761 def getrenamed(fn, rev):
762 """looks up all renames for a file (up to endrev) the first
762 """looks up all renames for a file (up to endrev) the first
763 time the file is given. It indexes on the changerev and only
763 time the file is given. It indexes on the changerev and only
764 parses the manifest if linkrev != changerev.
764 parses the manifest if linkrev != changerev.
765 Returns rename info for fn at changerev rev."""
765 Returns rename info for fn at changerev rev."""
766 if rev in rcache.setdefault(fn, {}):
766 if rev in rcache.setdefault(fn, {}):
767 return rcache[fn][rev]
767 return rcache[fn][rev]
768
768
769 try:
769 try:
770 fctx = repo[rev].filectx(fn)
770 fctx = repo[rev].filectx(fn)
771 for ancestor in fctx.ancestors():
771 for ancestor in fctx.ancestors():
772 if ancestor.path() == fn:
772 if ancestor.path() == fn:
773 renamed = ancestor.renamed()
773 renamed = ancestor.renamed()
774 rcache[fn][ancestor.rev()] = renamed and renamed[0]
774 rcache[fn][ancestor.rev()] = renamed and renamed[0]
775
775
776 renamed = fctx.renamed()
776 renamed = fctx.renamed()
777 return renamed and renamed[0]
777 return renamed and renamed[0]
778 except error.LookupError:
778 except error.LookupError:
779 return None
779 return None
780
780
781 return getrenamed
781 return getrenamed
782
782
783
783
784 def filelogrevset(orig, repo, subset, x):
784 def filelogrevset(orig, repo, subset, x):
785 """``filelog(pattern)``
785 """``filelog(pattern)``
786 Changesets connected to the specified filelog.
786 Changesets connected to the specified filelog.
787
787
788 For performance reasons, ``filelog()`` does not show every changeset
788 For performance reasons, ``filelog()`` does not show every changeset
789 that affects the requested file(s). See :hg:`help log` for details. For
789 that affects the requested file(s). See :hg:`help log` for details. For
790 a slower, more accurate result, use ``file()``.
790 a slower, more accurate result, use ``file()``.
791 """
791 """
792
792
793 if not isenabled(repo):
793 if not isenabled(repo):
794 return orig(repo, subset, x)
794 return orig(repo, subset, x)
795
795
796 # i18n: "filelog" is a keyword
796 # i18n: "filelog" is a keyword
797 pat = revset.getstring(x, _(b"filelog requires a pattern"))
797 pat = revset.getstring(x, _(b"filelog requires a pattern"))
798 m = matchmod.match(
798 m = matchmod.match(
799 repo.root, repo.getcwd(), [pat], default=b'relpath', ctx=repo[None]
799 repo.root, repo.getcwd(), [pat], default=b'relpath', ctx=repo[None]
800 )
800 )
801 s = set()
801 s = set()
802
802
803 if not matchmod.patkind(pat):
803 if not matchmod.patkind(pat):
804 # slow
804 # slow
805 for r in subset:
805 for r in subset:
806 ctx = repo[r]
806 ctx = repo[r]
807 cfiles = ctx.files()
807 cfiles = ctx.files()
808 for f in m.files():
808 for f in m.files():
809 if f in cfiles:
809 if f in cfiles:
810 s.add(ctx.rev())
810 s.add(ctx.rev())
811 break
811 break
812 else:
812 else:
813 # partial
813 # partial
814 files = (f for f in repo[None] if m(f))
814 files = (f for f in repo[None] if m(f))
815 for f in files:
815 for f in files:
816 fctx = repo[None].filectx(f)
816 fctx = repo[None].filectx(f)
817 s.add(fctx.linkrev())
817 s.add(fctx.linkrev())
818 for actx in fctx.ancestors():
818 for actx in fctx.ancestors():
819 s.add(actx.linkrev())
819 s.add(actx.linkrev())
820
820
821 return smartset.baseset([r for r in subset if r in s])
821 return smartset.baseset([r for r in subset if r in s])
822
822
823
823
824 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
824 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
825 def gc(ui, *args, **opts):
825 def gc(ui, *args, **opts):
826 """garbage collect the client and server filelog caches"""
826 """garbage collect the client and server filelog caches"""
827 cachepaths = set()
827 cachepaths = set()
828
828
829 # get the system client cache
829 # get the system client cache
830 systemcache = shallowutil.getcachepath(ui, allowempty=True)
830 systemcache = shallowutil.getcachepath(ui, allowempty=True)
831 if systemcache:
831 if systemcache:
832 cachepaths.add(systemcache)
832 cachepaths.add(systemcache)
833
833
834 # get repo client and server cache
834 # get repo client and server cache
835 repopaths = []
835 repopaths = []
836 pwd = ui.environ.get(b'PWD')
836 pwd = ui.environ.get(b'PWD')
837 if pwd:
837 if pwd:
838 repopaths.append(pwd)
838 repopaths.append(pwd)
839
839
840 repopaths.extend(args)
840 repopaths.extend(args)
841 repos = []
841 repos = []
842 for repopath in repopaths:
842 for repopath in repopaths:
843 try:
843 try:
844 repo = hg.peer(ui, {}, repopath)
844 repo = hg.peer(ui, {}, repopath)
845 repos.append(repo)
845 repos.append(repo)
846
846
847 repocache = shallowutil.getcachepath(repo.ui, allowempty=True)
847 repocache = shallowutil.getcachepath(repo.ui, allowempty=True)
848 if repocache:
848 if repocache:
849 cachepaths.add(repocache)
849 cachepaths.add(repocache)
850 except error.RepoError:
850 except error.RepoError:
851 pass
851 pass
852
852
853 # gc client cache
853 # gc client cache
854 for cachepath in cachepaths:
854 for cachepath in cachepaths:
855 gcclient(ui, cachepath)
855 gcclient(ui, cachepath)
856
856
857 # gc server cache
857 # gc server cache
858 for repo in repos:
858 for repo in repos:
859 remotefilelogserver.gcserver(ui, repo._repo)
859 remotefilelogserver.gcserver(ui, repo._repo)
860
860
861
861
862 def gcclient(ui, cachepath):
862 def gcclient(ui, cachepath):
863 # get list of repos that use this cache
863 # get list of repos that use this cache
864 repospath = os.path.join(cachepath, b'repos')
864 repospath = os.path.join(cachepath, b'repos')
865 if not os.path.exists(repospath):
865 if not os.path.exists(repospath):
866 ui.warn(_(b"no known cache at %s\n") % cachepath)
866 ui.warn(_(b"no known cache at %s\n") % cachepath)
867 return
867 return
868
868
869 reposfile = open(repospath, b'rb')
869 reposfile = open(repospath, b'rb')
870 repos = {r[:-1] for r in reposfile.readlines()}
870 repos = {r[:-1] for r in reposfile.readlines()}
871 reposfile.close()
871 reposfile.close()
872
872
873 # build list of useful files
873 # build list of useful files
874 validrepos = []
874 validrepos = []
875 keepkeys = set()
875 keepkeys = set()
876
876
877 sharedcache = None
877 sharedcache = None
878 filesrepacked = False
878 filesrepacked = False
879
879
880 count = 0
880 count = 0
881 progress = ui.makeprogress(
881 progress = ui.makeprogress(
882 _(b"analyzing repositories"), unit=b"repos", total=len(repos)
882 _(b"analyzing repositories"), unit=b"repos", total=len(repos)
883 )
883 )
884 for path in repos:
884 for path in repos:
885 progress.update(count)
885 progress.update(count)
886 count += 1
886 count += 1
887 try:
887 try:
888 path = util.expandpath(os.path.normpath(path))
888 path = util.expandpath(os.path.normpath(path))
889 except TypeError as e:
889 except TypeError as e:
890 ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e))
890 ui.warn(_(b"warning: malformed path: %r:%s\n") % (path, e))
891 traceback.print_exc()
891 traceback.print_exc()
892 continue
892 continue
893 try:
893 try:
894 peer = hg.peer(ui, {}, path)
894 peer = hg.peer(ui, {}, path)
895 repo = peer._repo
895 repo = peer._repo
896 except error.RepoError:
896 except error.RepoError:
897 continue
897 continue
898
898
899 validrepos.append(path)
899 validrepos.append(path)
900
900
901 # Protect against any repo or config changes that have happened since
901 # Protect against any repo or config changes that have happened since
902 # this repo was added to the repos file. We'd rather this loop succeed
902 # this repo was added to the repos file. We'd rather this loop succeed
903 # and too much be deleted, than the loop fail and nothing gets deleted.
903 # and too much be deleted, than the loop fail and nothing gets deleted.
904 if not isenabled(repo):
904 if not isenabled(repo):
905 continue
905 continue
906
906
907 if not hasattr(repo, 'name'):
907 if not hasattr(repo, 'name'):
908 ui.warn(
908 ui.warn(
909 _(b"repo %s is a misconfigured remotefilelog repo\n") % path
909 _(b"repo %s is a misconfigured remotefilelog repo\n") % path
910 )
910 )
911 continue
911 continue
912
912
913 # If garbage collection on repack and repack on hg gc are enabled
913 # If garbage collection on repack and repack on hg gc are enabled
914 # then loose files are repacked and garbage collected.
914 # then loose files are repacked and garbage collected.
915 # Otherwise regular garbage collection is performed.
915 # Otherwise regular garbage collection is performed.
916 repackonhggc = repo.ui.configbool(b'remotefilelog', b'repackonhggc')
916 repackonhggc = repo.ui.configbool(b'remotefilelog', b'repackonhggc')
917 gcrepack = repo.ui.configbool(b'remotefilelog', b'gcrepack')
917 gcrepack = repo.ui.configbool(b'remotefilelog', b'gcrepack')
918 if repackonhggc and gcrepack:
918 if repackonhggc and gcrepack:
919 try:
919 try:
920 repackmod.incrementalrepack(repo)
920 repackmod.incrementalrepack(repo)
921 filesrepacked = True
921 filesrepacked = True
922 continue
922 continue
923 except (IOError, repackmod.RepackAlreadyRunning):
923 except (IOError, repackmod.RepackAlreadyRunning):
924 # If repack cannot be performed due to not enough disk space
924 # If repack cannot be performed due to not enough disk space
925 # continue doing garbage collection of loose files w/o repack
925 # continue doing garbage collection of loose files w/o repack
926 pass
926 pass
927
927
928 reponame = repo.name
928 reponame = repo.name
929 if not sharedcache:
929 if not sharedcache:
930 sharedcache = repo.sharedstore
930 sharedcache = repo.sharedstore
931
931
932 # Compute a keepset which is not garbage collected
932 # Compute a keepset which is not garbage collected
933 def keyfn(fname, fnode):
933 def keyfn(fname, fnode):
934 return fileserverclient.getcachekey(reponame, fname, hex(fnode))
934 return fileserverclient.getcachekey(reponame, fname, hex(fnode))
935
935
936 keepkeys = repackmod.keepset(repo, keyfn=keyfn, lastkeepkeys=keepkeys)
936 keepkeys = repackmod.keepset(repo, keyfn=keyfn, lastkeepkeys=keepkeys)
937
937
938 progress.complete()
938 progress.complete()
939
939
940 # write list of valid repos back
940 # write list of valid repos back
941 oldumask = os.umask(0o002)
941 oldumask = os.umask(0o002)
942 try:
942 try:
943 reposfile = open(repospath, b'wb')
943 reposfile = open(repospath, b'wb')
944 reposfile.writelines([(b"%s\n" % r) for r in validrepos])
944 reposfile.writelines([(b"%s\n" % r) for r in validrepos])
945 reposfile.close()
945 reposfile.close()
946 finally:
946 finally:
947 os.umask(oldumask)
947 os.umask(oldumask)
948
948
949 # prune cache
949 # prune cache
950 if sharedcache is not None:
950 if sharedcache is not None:
951 sharedcache.gc(keepkeys)
951 sharedcache.gc(keepkeys)
952 elif not filesrepacked:
952 elif not filesrepacked:
953 ui.warn(_(b"warning: no valid repos in repofile\n"))
953 ui.warn(_(b"warning: no valid repos in repofile\n"))
954
954
955
955
956 def log(orig, ui, repo, *pats, **opts):
956 def log(orig, ui, repo, *pats, **opts):
957 if not isenabled(repo):
957 if not isenabled(repo):
958 return orig(ui, repo, *pats, **opts)
958 return orig(ui, repo, *pats, **opts)
959
959
960 follow = opts.get('follow')
960 follow = opts.get('follow')
961 revs = opts.get('rev')
961 revs = opts.get('rev')
962 if pats:
962 if pats:
963 # Force slowpath for non-follow patterns and follows that start from
963 # Force slowpath for non-follow patterns and follows that start from
964 # non-working-copy-parent revs.
964 # non-working-copy-parent revs.
965 if not follow or revs:
965 if not follow or revs:
966 # This forces the slowpath
966 # This forces the slowpath
967 opts['removed'] = True
967 opts['removed'] = True
968
968
969 # If this is a non-follow log without any revs specified, recommend that
969 # If this is a non-follow log without any revs specified, recommend that
970 # the user add -f to speed it up.
970 # the user add -f to speed it up.
971 if not follow and not revs:
971 if not follow and not revs:
972 match = scmutil.match(repo[b'.'], pats, pycompat.byteskwargs(opts))
972 match = scmutil.match(repo[b'.'], pats, pycompat.byteskwargs(opts))
973 isfile = not match.anypats()
973 isfile = not match.anypats()
974 if isfile:
974 if isfile:
975 for file in match.files():
975 for file in match.files():
976 if not os.path.isfile(repo.wjoin(file)):
976 if not os.path.isfile(repo.wjoin(file)):
977 isfile = False
977 isfile = False
978 break
978 break
979
979
980 if isfile:
980 if isfile:
981 ui.warn(
981 ui.warn(
982 _(
982 _(
983 b"warning: file log can be slow on large repos - "
983 b"warning: file log can be slow on large repos - "
984 + b"use -f to speed it up\n"
984 + b"use -f to speed it up\n"
985 )
985 )
986 )
986 )
987
987
988 return orig(ui, repo, *pats, **opts)
988 return orig(ui, repo, *pats, **opts)
989
989
990
990
991 def revdatelimit(ui, revset):
991 def revdatelimit(ui, revset):
992 """Update revset so that only changesets no older than 'prefetchdays' days
992 """Update revset so that only changesets no older than 'prefetchdays' days
993 are included. The default value is set to 14 days. If 'prefetchdays' is set
993 are included. The default value is set to 14 days. If 'prefetchdays' is set
994 to zero or negative value then date restriction is not applied.
994 to zero or negative value then date restriction is not applied.
995 """
995 """
996 days = ui.configint(b'remotefilelog', b'prefetchdays')
996 days = ui.configint(b'remotefilelog', b'prefetchdays')
997 if days > 0:
997 if days > 0:
998 revset = b'(%s) & date(-%s)' % (revset, days)
998 revset = b'(%s) & date(-%s)' % (revset, days)
999 return revset
999 return revset
1000
1000
1001
1001
1002 def readytofetch(repo):
1002 def readytofetch(repo):
1003 """Check that enough time has passed since the last background prefetch.
1003 """Check that enough time has passed since the last background prefetch.
1004 This only relates to prefetches after operations that change the working
1004 This only relates to prefetches after operations that change the working
1005 copy parent. Default delay between background prefetches is 2 minutes.
1005 copy parent. Default delay between background prefetches is 2 minutes.
1006 """
1006 """
1007 timeout = repo.ui.configint(b'remotefilelog', b'prefetchdelay')
1007 timeout = repo.ui.configint(b'remotefilelog', b'prefetchdelay')
1008 fname = repo.vfs.join(b'lastprefetch')
1008 fname = repo.vfs.join(b'lastprefetch')
1009
1009
1010 ready = False
1010 ready = False
1011 with open(fname, b'a'):
1011 with open(fname, b'a'):
1012 # the with construct above is used to avoid race conditions
1012 # the with construct above is used to avoid race conditions
1013 modtime = os.path.getmtime(fname)
1013 modtime = os.path.getmtime(fname)
1014 if (time.time() - modtime) > timeout:
1014 if (time.time() - modtime) > timeout:
1015 os.utime(fname, None)
1015 os.utime(fname, None)
1016 ready = True
1016 ready = True
1017
1017
1018 return ready
1018 return ready
1019
1019
1020
1020
1021 def wcpprefetch(ui, repo, **kwargs):
1021 def wcpprefetch(ui, repo, **kwargs):
1022 """Prefetches in background revisions specified by bgprefetchrevs revset.
1022 """Prefetches in background revisions specified by bgprefetchrevs revset.
1023 Does background repack if backgroundrepack flag is set in config.
1023 Does background repack if backgroundrepack flag is set in config.
1024 """
1024 """
1025 shallow = isenabled(repo)
1025 shallow = isenabled(repo)
1026 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs')
1026 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs')
1027 isready = readytofetch(repo)
1027 isready = readytofetch(repo)
1028
1028
1029 if not (shallow and bgprefetchrevs and isready):
1029 if not (shallow and bgprefetchrevs and isready):
1030 return
1030 return
1031
1031
1032 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1032 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1033 # update a revset with a date limit
1033 # update a revset with a date limit
1034 bgprefetchrevs = revdatelimit(ui, bgprefetchrevs)
1034 bgprefetchrevs = revdatelimit(ui, bgprefetchrevs)
1035
1035
1036 def anon(unused_success):
1036 def anon(unused_success):
1037 if hasattr(repo, 'ranprefetch') and repo.ranprefetch:
1037 if hasattr(repo, 'ranprefetch') and repo.ranprefetch:
1038 return
1038 return
1039 repo.ranprefetch = True
1039 repo.ranprefetch = True
1040 repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack)
1040 repo.backgroundprefetch(bgprefetchrevs, repack=bgrepack)
1041
1041
1042 repo._afterlock(anon)
1042 repo._afterlock(anon)
1043
1043
1044
1044
1045 def pull(orig, ui, repo, *pats, **opts):
1045 def pull(orig, ui, repo, *pats, **opts):
1046 result = orig(ui, repo, *pats, **opts)
1046 result = orig(ui, repo, *pats, **opts)
1047
1047
1048 if isenabled(repo):
1048 if isenabled(repo):
1049 # prefetch if it's configured
1049 # prefetch if it's configured
1050 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch')
1050 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch')
1051 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1051 bgrepack = repo.ui.configbool(b'remotefilelog', b'backgroundrepack')
1052 bgprefetch = repo.ui.configbool(b'remotefilelog', b'backgroundprefetch')
1052 bgprefetch = repo.ui.configbool(b'remotefilelog', b'backgroundprefetch')
1053
1053
1054 if prefetchrevset:
1054 if prefetchrevset:
1055 ui.status(_(b"prefetching file contents\n"))
1055 ui.status(_(b"prefetching file contents\n"))
1056 revs = scmutil.revrange(repo, [prefetchrevset])
1056 revs = scmutil.revrange(repo, [prefetchrevset])
1057 base = repo[b'.'].rev()
1057 base = repo[b'.'].rev()
1058 if bgprefetch:
1058 if bgprefetch:
1059 repo.backgroundprefetch(prefetchrevset, repack=bgrepack)
1059 repo.backgroundprefetch(prefetchrevset, repack=bgrepack)
1060 else:
1060 else:
1061 repo.prefetch(revs, base=base)
1061 repo.prefetch(revs, base=base)
1062 if bgrepack:
1062 if bgrepack:
1063 repackmod.backgroundrepack(repo, incremental=True)
1063 repackmod.backgroundrepack(repo, incremental=True)
1064 elif bgrepack:
1064 elif bgrepack:
1065 repackmod.backgroundrepack(repo, incremental=True)
1065 repackmod.backgroundrepack(repo, incremental=True)
1066
1066
1067 return result
1067 return result
1068
1068
1069
1069
1070 def exchangepull(orig, repo, remote, *args, **kwargs):
1070 def exchangepull(orig, repo, remote, *args, **kwargs):
1071 # Hook into the callstream/getbundle to insert bundle capabilities
1071 # Hook into the callstream/getbundle to insert bundle capabilities
1072 # during a pull.
1072 # during a pull.
1073 def localgetbundle(
1073 def localgetbundle(
1074 orig, source, heads=None, common=None, bundlecaps=None, **kwargs
1074 orig, source, heads=None, common=None, bundlecaps=None, **kwargs
1075 ):
1075 ):
1076 if not bundlecaps:
1076 if not bundlecaps:
1077 bundlecaps = set()
1077 bundlecaps = set()
1078 bundlecaps.add(constants.BUNDLE2_CAPABLITY)
1078 bundlecaps.add(constants.BUNDLE2_CAPABLITY)
1079 return orig(
1079 return orig(
1080 source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs
1080 source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs
1081 )
1081 )
1082
1082
1083 if hasattr(remote, '_callstream'):
1083 if hasattr(remote, '_callstream'):
1084 remote._localrepo = repo
1084 remote._localrepo = repo
1085 elif hasattr(remote, 'getbundle'):
1085 elif hasattr(remote, 'getbundle'):
1086 extensions.wrapfunction(remote, 'getbundle', localgetbundle)
1086 extensions.wrapfunction(remote, 'getbundle', localgetbundle)
1087
1087
1088 return orig(repo, remote, *args, **kwargs)
1088 return orig(repo, remote, *args, **kwargs)
1089
1089
1090
1090
1091 def _fileprefetchhook(repo, revmatches):
1091 def _fileprefetchhook(repo, revmatches):
1092 if isenabled(repo):
1092 if isenabled(repo):
1093 allfiles = []
1093 allfiles = []
1094 for rev, match in revmatches:
1094 for rev, match in revmatches:
1095 if rev == wdirrev or rev is None:
1095 if rev == wdirrev or rev is None:
1096 continue
1096 continue
1097 ctx = repo[rev]
1097 ctx = repo[rev]
1098 mf = ctx.manifest()
1098 mf = ctx.manifest()
1099 sparsematch = repo.maybesparsematch(ctx.rev())
1099 sparsematch = repo.maybesparsematch(ctx.rev())
1100 for path in ctx.walk(match):
1100 for path in ctx.walk(match):
1101 if (not sparsematch or sparsematch(path)) and path in mf:
1101 if (not sparsematch or sparsematch(path)) and path in mf:
1102 allfiles.append((path, hex(mf[path])))
1102 allfiles.append((path, hex(mf[path])))
1103 repo.fileservice.prefetch(allfiles)
1103 repo.fileservice.prefetch(allfiles)
1104
1104
1105
1105
1106 @command(
1106 @command(
1107 b'debugremotefilelog',
1107 b'debugremotefilelog',
1108 [
1108 [
1109 (b'd', b'decompress', None, _(b'decompress the filelog first')),
1109 (b'd', b'decompress', None, _(b'decompress the filelog first')),
1110 ],
1110 ],
1111 _(b'hg debugremotefilelog <path>'),
1111 _(b'hg debugremotefilelog <path>'),
1112 norepo=True,
1112 norepo=True,
1113 )
1113 )
1114 def debugremotefilelog(ui, path, **opts):
1114 def debugremotefilelog(ui, path, **opts):
1115 return debugcommands.debugremotefilelog(ui, path, **opts)
1115 return debugcommands.debugremotefilelog(ui, path, **opts)
1116
1116
1117
1117
1118 @command(
1118 @command(
1119 b'verifyremotefilelog',
1119 b'verifyremotefilelog',
1120 [
1120 [
1121 (b'd', b'decompress', None, _(b'decompress the filelogs first')),
1121 (b'd', b'decompress', None, _(b'decompress the filelogs first')),
1122 ],
1122 ],
1123 _(b'hg verifyremotefilelogs <directory>'),
1123 _(b'hg verifyremotefilelogs <directory>'),
1124 norepo=True,
1124 norepo=True,
1125 )
1125 )
1126 def verifyremotefilelog(ui, path, **opts):
1126 def verifyremotefilelog(ui, path, **opts):
1127 return debugcommands.verifyremotefilelog(ui, path, **opts)
1127 return debugcommands.verifyremotefilelog(ui, path, **opts)
1128
1128
1129
1129
1130 @command(
1130 @command(
1131 b'debugdatapack',
1131 b'debugdatapack',
1132 [
1132 [
1133 (b'', b'long', None, _(b'print the long hashes')),
1133 (b'', b'long', None, _(b'print the long hashes')),
1134 (b'', b'node', b'', _(b'dump the contents of node'), b'NODE'),
1134 (b'', b'node', b'', _(b'dump the contents of node'), b'NODE'),
1135 ],
1135 ],
1136 _(b'hg debugdatapack <paths>'),
1136 _(b'hg debugdatapack <paths>'),
1137 norepo=True,
1137 norepo=True,
1138 )
1138 )
1139 def debugdatapack(ui, *paths, **opts):
1139 def debugdatapack(ui, *paths, **opts):
1140 return debugcommands.debugdatapack(ui, *paths, **opts)
1140 return debugcommands.debugdatapack(ui, *paths, **opts)
1141
1141
1142
1142
1143 @command(b'debughistorypack', [], _(b'hg debughistorypack <path>'), norepo=True)
1143 @command(b'debughistorypack', [], _(b'hg debughistorypack <path>'), norepo=True)
1144 def debughistorypack(ui, path, **opts):
1144 def debughistorypack(ui, path, **opts):
1145 return debugcommands.debughistorypack(ui, path)
1145 return debugcommands.debughistorypack(ui, path)
1146
1146
1147
1147
1148 @command(b'debugkeepset', [], _(b'hg debugkeepset'))
1148 @command(b'debugkeepset', [], _(b'hg debugkeepset'))
1149 def debugkeepset(ui, repo, **opts):
1149 def debugkeepset(ui, repo, **opts):
1150 # The command is used to measure keepset computation time
1150 # The command is used to measure keepset computation time
1151 def keyfn(fname, fnode):
1151 def keyfn(fname, fnode):
1152 return fileserverclient.getcachekey(repo.name, fname, hex(fnode))
1152 return fileserverclient.getcachekey(repo.name, fname, hex(fnode))
1153
1153
1154 repackmod.keepset(repo, keyfn)
1154 repackmod.keepset(repo, keyfn)
1155 return
1155 return
1156
1156
1157
1157
1158 @command(b'debugwaitonrepack', [], _(b'hg debugwaitonrepack'))
1158 @command(b'debugwaitonrepack', [], _(b'hg debugwaitonrepack'))
1159 def debugwaitonrepack(ui, repo, **opts):
1159 def debugwaitonrepack(ui, repo, **opts):
1160 return debugcommands.debugwaitonrepack(repo)
1160 return debugcommands.debugwaitonrepack(repo)
1161
1161
1162
1162
1163 @command(b'debugwaitonprefetch', [], _(b'hg debugwaitonprefetch'))
1163 @command(b'debugwaitonprefetch', [], _(b'hg debugwaitonprefetch'))
1164 def debugwaitonprefetch(ui, repo, **opts):
1164 def debugwaitonprefetch(ui, repo, **opts):
1165 return debugcommands.debugwaitonprefetch(repo)
1165 return debugcommands.debugwaitonprefetch(repo)
1166
1166
1167
1167
1168 def resolveprefetchopts(ui, opts):
1168 def resolveprefetchopts(ui, opts):
1169 if not opts.get(b'rev'):
1169 if not opts.get(b'rev'):
1170 revset = [b'.', b'draft()']
1170 revset = [b'.', b'draft()']
1171
1171
1172 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch', None)
1172 prefetchrevset = ui.config(b'remotefilelog', b'pullprefetch', None)
1173 if prefetchrevset:
1173 if prefetchrevset:
1174 revset.append(b'(%s)' % prefetchrevset)
1174 revset.append(b'(%s)' % prefetchrevset)
1175 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs', None)
1175 bgprefetchrevs = ui.config(b'remotefilelog', b'bgprefetchrevs', None)
1176 if bgprefetchrevs:
1176 if bgprefetchrevs:
1177 revset.append(b'(%s)' % bgprefetchrevs)
1177 revset.append(b'(%s)' % bgprefetchrevs)
1178 revset = b'+'.join(revset)
1178 revset = b'+'.join(revset)
1179
1179
1180 # update a revset with a date limit
1180 # update a revset with a date limit
1181 revset = revdatelimit(ui, revset)
1181 revset = revdatelimit(ui, revset)
1182
1182
1183 opts[b'rev'] = [revset]
1183 opts[b'rev'] = [revset]
1184
1184
1185 if not opts.get(b'base'):
1185 if not opts.get(b'base'):
1186 opts[b'base'] = None
1186 opts[b'base'] = None
1187
1187
1188 return opts
1188 return opts
1189
1189
1190
1190
1191 @command(
1191 @command(
1192 b'prefetch',
1192 b'prefetch',
1193 [
1193 [
1194 (b'r', b'rev', [], _(b'prefetch the specified revisions'), _(b'REV')),
1194 (b'r', b'rev', [], _(b'prefetch the specified revisions'), _(b'REV')),
1195 (b'', b'repack', False, _(b'run repack after prefetch')),
1195 (b'', b'repack', False, _(b'run repack after prefetch')),
1196 (b'b', b'base', b'', _(b"rev that is assumed to already be local")),
1196 (b'b', b'base', b'', _(b"rev that is assumed to already be local")),
1197 ]
1197 ]
1198 + commands.walkopts,
1198 + commands.walkopts,
1199 _(b'hg prefetch [OPTIONS] [FILE...]'),
1199 _(b'hg prefetch [OPTIONS] [FILE...]'),
1200 helpcategory=command.CATEGORY_MAINTENANCE,
1200 helpcategory=command.CATEGORY_MAINTENANCE,
1201 )
1201 )
1202 def prefetch(ui, repo, *pats, **opts):
1202 def prefetch(ui, repo, *pats, **opts):
1203 """prefetch file revisions from the server
1203 """prefetch file revisions from the server
1204
1204
1205 Prefetchs file revisions for the specified revs and stores them in the
1205 Prefetchs file revisions for the specified revs and stores them in the
1206 local remotefilelog cache. If no rev is specified, the default rev is
1206 local remotefilelog cache. If no rev is specified, the default rev is
1207 used which is the union of dot, draft, pullprefetch and bgprefetchrev.
1207 used which is the union of dot, draft, pullprefetch and bgprefetchrev.
1208 File names or patterns can be used to limit which files are downloaded.
1208 File names or patterns can be used to limit which files are downloaded.
1209
1209
1210 Return 0 on success.
1210 Return 0 on success.
1211 """
1211 """
1212 opts = pycompat.byteskwargs(opts)
1212 opts = pycompat.byteskwargs(opts)
1213 if not isenabled(repo):
1213 if not isenabled(repo):
1214 raise error.Abort(_(b"repo is not shallow"))
1214 raise error.Abort(_(b"repo is not shallow"))
1215
1215
1216 opts = resolveprefetchopts(ui, opts)
1216 opts = resolveprefetchopts(ui, opts)
1217 revs = scmutil.revrange(repo, opts.get(b'rev'))
1217 revs = scmutil.revrange(repo, opts.get(b'rev'))
1218 repo.prefetch(revs, opts.get(b'base'), pats, opts)
1218 repo.prefetch(revs, opts.get(b'base'), pats, opts)
1219
1219
1220 # Run repack in background
1220 # Run repack in background
1221 if opts.get(b'repack'):
1221 if opts.get(b'repack'):
1222 repackmod.backgroundrepack(repo, incremental=True)
1222 repackmod.backgroundrepack(repo, incremental=True)
1223
1223
1224
1224
1225 @command(
1225 @command(
1226 b'repack',
1226 b'repack',
1227 [
1227 [
1228 (b'', b'background', None, _(b'run in a background process'), None),
1228 (b'', b'background', None, _(b'run in a background process'), None),
1229 (b'', b'incremental', None, _(b'do an incremental repack'), None),
1229 (b'', b'incremental', None, _(b'do an incremental repack'), None),
1230 (
1230 (
1231 b'',
1231 b'',
1232 b'packsonly',
1232 b'packsonly',
1233 None,
1233 None,
1234 _(b'only repack packs (skip loose objects)'),
1234 _(b'only repack packs (skip loose objects)'),
1235 None,
1235 None,
1236 ),
1236 ),
1237 ],
1237 ],
1238 _(b'hg repack [OPTIONS]'),
1238 _(b'hg repack [OPTIONS]'),
1239 )
1239 )
1240 def repack_(ui, repo, *pats, **opts):
1240 def repack_(ui, repo, *pats, **opts):
1241 if opts.get('background'):
1241 if opts.get('background'):
1242 repackmod.backgroundrepack(
1242 repackmod.backgroundrepack(
1243 repo,
1243 repo,
1244 incremental=opts.get('incremental'),
1244 incremental=opts.get('incremental'),
1245 packsonly=opts.get('packsonly', False),
1245 packsonly=opts.get('packsonly', False),
1246 )
1246 )
1247 return
1247 return
1248
1248
1249 options = {b'packsonly': opts.get('packsonly')}
1249 options = {b'packsonly': opts.get('packsonly')}
1250
1250
1251 try:
1251 try:
1252 if opts.get('incremental'):
1252 if opts.get('incremental'):
1253 repackmod.incrementalrepack(repo, options=options)
1253 repackmod.incrementalrepack(repo, options=options)
1254 else:
1254 else:
1255 repackmod.fullrepack(repo, options=options)
1255 repackmod.fullrepack(repo, options=options)
1256 except repackmod.RepackAlreadyRunning as ex:
1256 except repackmod.RepackAlreadyRunning as ex:
1257 # Don't propogate the exception if the repack is already in
1257 # Don't propogate the exception if the repack is already in
1258 # progress, since we want the command to exit 0.
1258 # progress, since we want the command to exit 0.
1259 repo.ui.warn(b'%s\n' % ex)
1259 repo.ui.warn(b'%s\n' % ex)
@@ -1,446 +1,446 b''
1 # remotefilelogserver.py - server logic for a remotefilelog server
1 # remotefilelogserver.py - server logic for a remotefilelog server
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os
8 import os
9 import stat
9 import stat
10 import time
10 import time
11 import zlib
11 import zlib
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial.node import bin, hex
14 from mercurial.node import bin, hex
15 from mercurial.pycompat import open
15 from mercurial.pycompat import open
16 from mercurial import (
16 from mercurial import (
17 changegroup,
17 changegroup,
18 changelog,
18 changelog,
19 context,
19 context,
20 error,
20 error,
21 extensions,
21 extensions,
22 match,
22 match,
23 scmutil,
23 scmutil,
24 store,
24 store,
25 streamclone,
25 streamclone,
26 util,
26 util,
27 wireprotoserver,
27 wireprotoserver,
28 wireprototypes,
28 wireprototypes,
29 wireprotov1server,
29 wireprotov1server,
30 )
30 )
31 from . import (
31 from . import (
32 constants,
32 constants,
33 shallowutil,
33 shallowutil,
34 )
34 )
35
35
36 _sshv1server = wireprotoserver.sshv1protocolhandler
36 _sshv1server = wireprotoserver.sshv1protocolhandler
37
37
38
38
39 def setupserver(ui, repo):
39 def setupserver(ui, repo):
40 """Sets up a normal Mercurial repo so it can serve files to shallow repos."""
40 """Sets up a normal Mercurial repo so it can serve files to shallow repos."""
41 onetimesetup(ui)
41 onetimesetup(ui)
42
42
43 # don't send files to shallow clients during pulls
43 # don't send files to shallow clients during pulls
44 def generatefiles(
44 def generatefiles(
45 orig, self, changedfiles, linknodes, commonrevs, source, *args, **kwargs
45 orig, self, changedfiles, linknodes, commonrevs, source, *args, **kwargs
46 ):
46 ):
47 caps = self._bundlecaps or []
47 caps = self._bundlecaps or []
48 if constants.BUNDLE2_CAPABLITY in caps:
48 if constants.BUNDLE2_CAPABLITY in caps:
49 # only send files that don't match the specified patterns
49 # only send files that don't match the specified patterns
50 includepattern = None
50 includepattern = None
51 excludepattern = None
51 excludepattern = None
52 for cap in self._bundlecaps or []:
52 for cap in self._bundlecaps or []:
53 if cap.startswith(b"includepattern="):
53 if cap.startswith(b"includepattern="):
54 includepattern = cap[len(b"includepattern=") :].split(b'\0')
54 includepattern = cap[len(b"includepattern=") :].split(b'\0')
55 elif cap.startswith(b"excludepattern="):
55 elif cap.startswith(b"excludepattern="):
56 excludepattern = cap[len(b"excludepattern=") :].split(b'\0')
56 excludepattern = cap[len(b"excludepattern=") :].split(b'\0')
57
57
58 m = match.always()
58 m = match.always()
59 if includepattern or excludepattern:
59 if includepattern or excludepattern:
60 m = match.match(
60 m = match.match(
61 repo.root, b'', None, includepattern, excludepattern
61 repo.root, b'', None, includepattern, excludepattern
62 )
62 )
63
63
64 changedfiles = list([f for f in changedfiles if not m(f)])
64 changedfiles = [f for f in changedfiles if not m(f)]
65 return orig(
65 return orig(
66 self, changedfiles, linknodes, commonrevs, source, *args, **kwargs
66 self, changedfiles, linknodes, commonrevs, source, *args, **kwargs
67 )
67 )
68
68
69 extensions.wrapfunction(
69 extensions.wrapfunction(
70 changegroup.cgpacker, 'generatefiles', generatefiles
70 changegroup.cgpacker, 'generatefiles', generatefiles
71 )
71 )
72
72
73
73
74 onetime = False
74 onetime = False
75
75
76
76
77 def onetimesetup(ui):
77 def onetimesetup(ui):
78 """Configures the wireprotocol for both clients and servers."""
78 """Configures the wireprotocol for both clients and servers."""
79 global onetime
79 global onetime
80 if onetime:
80 if onetime:
81 return
81 return
82 onetime = True
82 onetime = True
83
83
84 # support file content requests
84 # support file content requests
85 wireprotov1server.wireprotocommand(
85 wireprotov1server.wireprotocommand(
86 b'x_rfl_getflogheads', b'path', permission=b'pull'
86 b'x_rfl_getflogheads', b'path', permission=b'pull'
87 )(getflogheads)
87 )(getflogheads)
88 wireprotov1server.wireprotocommand(
88 wireprotov1server.wireprotocommand(
89 b'x_rfl_getfiles', b'', permission=b'pull'
89 b'x_rfl_getfiles', b'', permission=b'pull'
90 )(getfiles)
90 )(getfiles)
91 wireprotov1server.wireprotocommand(
91 wireprotov1server.wireprotocommand(
92 b'x_rfl_getfile', b'file node', permission=b'pull'
92 b'x_rfl_getfile', b'file node', permission=b'pull'
93 )(getfile)
93 )(getfile)
94
94
95 class streamstate:
95 class streamstate:
96 match = None
96 match = None
97 shallowremote = False
97 shallowremote = False
98 noflatmf = False
98 noflatmf = False
99
99
100 state = streamstate()
100 state = streamstate()
101
101
102 def stream_out_shallow(repo, proto, other):
102 def stream_out_shallow(repo, proto, other):
103 includepattern = None
103 includepattern = None
104 excludepattern = None
104 excludepattern = None
105 raw = other.get(b'includepattern')
105 raw = other.get(b'includepattern')
106 if raw:
106 if raw:
107 includepattern = raw.split(b'\0')
107 includepattern = raw.split(b'\0')
108 raw = other.get(b'excludepattern')
108 raw = other.get(b'excludepattern')
109 if raw:
109 if raw:
110 excludepattern = raw.split(b'\0')
110 excludepattern = raw.split(b'\0')
111
111
112 oldshallow = state.shallowremote
112 oldshallow = state.shallowremote
113 oldmatch = state.match
113 oldmatch = state.match
114 oldnoflatmf = state.noflatmf
114 oldnoflatmf = state.noflatmf
115 try:
115 try:
116 state.shallowremote = True
116 state.shallowremote = True
117 state.match = match.always()
117 state.match = match.always()
118 state.noflatmf = other.get(b'noflatmanifest') == b'True'
118 state.noflatmf = other.get(b'noflatmanifest') == b'True'
119 if includepattern or excludepattern:
119 if includepattern or excludepattern:
120 state.match = match.match(
120 state.match = match.match(
121 repo.root, b'', None, includepattern, excludepattern
121 repo.root, b'', None, includepattern, excludepattern
122 )
122 )
123 streamres = wireprotov1server.stream(repo, proto)
123 streamres = wireprotov1server.stream(repo, proto)
124
124
125 # Force the first value to execute, so the file list is computed
125 # Force the first value to execute, so the file list is computed
126 # within the try/finally scope
126 # within the try/finally scope
127 first = next(streamres.gen)
127 first = next(streamres.gen)
128 second = next(streamres.gen)
128 second = next(streamres.gen)
129
129
130 def gen():
130 def gen():
131 yield first
131 yield first
132 yield second
132 yield second
133 for value in streamres.gen:
133 for value in streamres.gen:
134 yield value
134 yield value
135
135
136 return wireprototypes.streamres(gen())
136 return wireprototypes.streamres(gen())
137 finally:
137 finally:
138 state.shallowremote = oldshallow
138 state.shallowremote = oldshallow
139 state.match = oldmatch
139 state.match = oldmatch
140 state.noflatmf = oldnoflatmf
140 state.noflatmf = oldnoflatmf
141
141
142 wireprotov1server.commands[b'stream_out_shallow'] = (
142 wireprotov1server.commands[b'stream_out_shallow'] = (
143 stream_out_shallow,
143 stream_out_shallow,
144 b'*',
144 b'*',
145 )
145 )
146
146
147 # don't clone filelogs to shallow clients
147 # don't clone filelogs to shallow clients
148 def _walkstreamfiles(
148 def _walkstreamfiles(
149 orig, repo, matcher=None, phase=False, obsolescence=False
149 orig, repo, matcher=None, phase=False, obsolescence=False
150 ):
150 ):
151 if state.shallowremote:
151 if state.shallowremote:
152 # if we are shallow ourselves, stream our local commits
152 # if we are shallow ourselves, stream our local commits
153 if shallowutil.isenabled(repo):
153 if shallowutil.isenabled(repo):
154 striplen = len(repo.store.path) + 1
154 striplen = len(repo.store.path) + 1
155 readdir = repo.store.rawvfs.readdir
155 readdir = repo.store.rawvfs.readdir
156 visit = [os.path.join(repo.store.path, b'data')]
156 visit = [os.path.join(repo.store.path, b'data')]
157 while visit:
157 while visit:
158 p = visit.pop()
158 p = visit.pop()
159 for f, kind, st in readdir(p, stat=True):
159 for f, kind, st in readdir(p, stat=True):
160 fp = p + b'/' + f
160 fp = p + b'/' + f
161 if kind == stat.S_IFREG:
161 if kind == stat.S_IFREG:
162 if not fp.endswith(b'.i') and not fp.endswith(
162 if not fp.endswith(b'.i') and not fp.endswith(
163 b'.d'
163 b'.d'
164 ):
164 ):
165 n = util.pconvert(fp[striplen:])
165 n = util.pconvert(fp[striplen:])
166 d = store.decodedir(n)
166 d = store.decodedir(n)
167 yield store.SimpleStoreEntry(
167 yield store.SimpleStoreEntry(
168 entry_path=d,
168 entry_path=d,
169 is_volatile=False,
169 is_volatile=False,
170 file_size=st.st_size,
170 file_size=st.st_size,
171 )
171 )
172
172
173 if kind == stat.S_IFDIR:
173 if kind == stat.S_IFDIR:
174 visit.append(fp)
174 visit.append(fp)
175
175
176 if scmutil.istreemanifest(repo):
176 if scmutil.istreemanifest(repo):
177 for entry in repo.store.data_entries():
177 for entry in repo.store.data_entries():
178 if not entry.is_revlog:
178 if not entry.is_revlog:
179 continue
179 continue
180 if entry.is_manifestlog:
180 if entry.is_manifestlog:
181 yield entry
181 yield entry
182
182
183 # Return .d and .i files that do not match the shallow pattern
183 # Return .d and .i files that do not match the shallow pattern
184 match = state.match
184 match = state.match
185 if match and not match.always():
185 if match and not match.always():
186 for entry in repo.store.data_entries():
186 for entry in repo.store.data_entries():
187 if not entry.is_revlog:
187 if not entry.is_revlog:
188 continue
188 continue
189 if not state.match(entry.target_id):
189 if not state.match(entry.target_id):
190 yield entry
190 yield entry
191
191
192 for x in repo.store.top_entries():
192 for x in repo.store.top_entries():
193 if state.noflatmf and x[1][:11] == b'00manifest.':
193 if state.noflatmf and x[1][:11] == b'00manifest.':
194 continue
194 continue
195 yield x
195 yield x
196
196
197 elif shallowutil.isenabled(repo):
197 elif shallowutil.isenabled(repo):
198 # don't allow cloning from a shallow repo to a full repo
198 # don't allow cloning from a shallow repo to a full repo
199 # since it would require fetching every version of every
199 # since it would require fetching every version of every
200 # file in order to create the revlogs.
200 # file in order to create the revlogs.
201 raise error.Abort(
201 raise error.Abort(
202 _(b"Cannot clone from a shallow repo to a full repo.")
202 _(b"Cannot clone from a shallow repo to a full repo.")
203 )
203 )
204 else:
204 else:
205 for x in orig(
205 for x in orig(
206 repo, matcher, phase=phase, obsolescence=obsolescence
206 repo, matcher, phase=phase, obsolescence=obsolescence
207 ):
207 ):
208 yield x
208 yield x
209
209
210 extensions.wrapfunction(streamclone, '_walkstreamfiles', _walkstreamfiles)
210 extensions.wrapfunction(streamclone, '_walkstreamfiles', _walkstreamfiles)
211
211
212 # expose remotefilelog capabilities
212 # expose remotefilelog capabilities
213 def _capabilities(orig, repo, proto):
213 def _capabilities(orig, repo, proto):
214 caps = orig(repo, proto)
214 caps = orig(repo, proto)
215 if shallowutil.isenabled(repo) or ui.configbool(
215 if shallowutil.isenabled(repo) or ui.configbool(
216 b'remotefilelog', b'server'
216 b'remotefilelog', b'server'
217 ):
217 ):
218 if isinstance(proto, _sshv1server):
218 if isinstance(proto, _sshv1server):
219 # legacy getfiles method which only works over ssh
219 # legacy getfiles method which only works over ssh
220 caps.append(constants.NETWORK_CAP_LEGACY_SSH_GETFILES)
220 caps.append(constants.NETWORK_CAP_LEGACY_SSH_GETFILES)
221 caps.append(b'x_rfl_getflogheads')
221 caps.append(b'x_rfl_getflogheads')
222 caps.append(b'x_rfl_getfile')
222 caps.append(b'x_rfl_getfile')
223 return caps
223 return caps
224
224
225 extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
225 extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
226
226
227 def _adjustlinkrev(orig, self, *args, **kwargs):
227 def _adjustlinkrev(orig, self, *args, **kwargs):
228 # When generating file blobs, taking the real path is too slow on large
228 # When generating file blobs, taking the real path is too slow on large
229 # repos, so force it to just return the linkrev directly.
229 # repos, so force it to just return the linkrev directly.
230 repo = self._repo
230 repo = self._repo
231 if hasattr(repo, 'forcelinkrev') and repo.forcelinkrev:
231 if hasattr(repo, 'forcelinkrev') and repo.forcelinkrev:
232 return self._filelog.linkrev(self._filelog.rev(self._filenode))
232 return self._filelog.linkrev(self._filelog.rev(self._filenode))
233 return orig(self, *args, **kwargs)
233 return orig(self, *args, **kwargs)
234
234
235 extensions.wrapfunction(
235 extensions.wrapfunction(
236 context.basefilectx, '_adjustlinkrev', _adjustlinkrev
236 context.basefilectx, '_adjustlinkrev', _adjustlinkrev
237 )
237 )
238
238
239 def _iscmd(orig, cmd):
239 def _iscmd(orig, cmd):
240 if cmd == b'x_rfl_getfiles':
240 if cmd == b'x_rfl_getfiles':
241 return False
241 return False
242 return orig(cmd)
242 return orig(cmd)
243
243
244 extensions.wrapfunction(wireprotoserver, 'iscmd', _iscmd)
244 extensions.wrapfunction(wireprotoserver, 'iscmd', _iscmd)
245
245
246
246
247 def _loadfileblob(repo, cachepath, path, node):
247 def _loadfileblob(repo, cachepath, path, node):
248 filecachepath = os.path.join(cachepath, path, hex(node))
248 filecachepath = os.path.join(cachepath, path, hex(node))
249 if not os.path.exists(filecachepath) or os.path.getsize(filecachepath) == 0:
249 if not os.path.exists(filecachepath) or os.path.getsize(filecachepath) == 0:
250 filectx = repo.filectx(path, fileid=node)
250 filectx = repo.filectx(path, fileid=node)
251 if filectx.node() == repo.nullid:
251 if filectx.node() == repo.nullid:
252 repo.changelog = changelog.changelog(repo.svfs)
252 repo.changelog = changelog.changelog(repo.svfs)
253 filectx = repo.filectx(path, fileid=node)
253 filectx = repo.filectx(path, fileid=node)
254
254
255 text = createfileblob(filectx)
255 text = createfileblob(filectx)
256 # TODO configurable compression engines
256 # TODO configurable compression engines
257 text = zlib.compress(text)
257 text = zlib.compress(text)
258
258
259 # everything should be user & group read/writable
259 # everything should be user & group read/writable
260 oldumask = os.umask(0o002)
260 oldumask = os.umask(0o002)
261 try:
261 try:
262 dirname = os.path.dirname(filecachepath)
262 dirname = os.path.dirname(filecachepath)
263 if not os.path.exists(dirname):
263 if not os.path.exists(dirname):
264 try:
264 try:
265 os.makedirs(dirname)
265 os.makedirs(dirname)
266 except FileExistsError:
266 except FileExistsError:
267 pass
267 pass
268
268
269 f = None
269 f = None
270 try:
270 try:
271 f = util.atomictempfile(filecachepath, b"wb")
271 f = util.atomictempfile(filecachepath, b"wb")
272 f.write(text)
272 f.write(text)
273 except (IOError, OSError):
273 except (IOError, OSError):
274 # Don't abort if the user only has permission to read,
274 # Don't abort if the user only has permission to read,
275 # and not write.
275 # and not write.
276 pass
276 pass
277 finally:
277 finally:
278 if f:
278 if f:
279 f.close()
279 f.close()
280 finally:
280 finally:
281 os.umask(oldumask)
281 os.umask(oldumask)
282 else:
282 else:
283 with open(filecachepath, b"rb") as f:
283 with open(filecachepath, b"rb") as f:
284 text = f.read()
284 text = f.read()
285 return text
285 return text
286
286
287
287
288 def getflogheads(repo, proto, path):
288 def getflogheads(repo, proto, path):
289 """A server api for requesting a filelog's heads"""
289 """A server api for requesting a filelog's heads"""
290 flog = repo.file(path)
290 flog = repo.file(path)
291 heads = flog.heads()
291 heads = flog.heads()
292 return b'\n'.join((hex(head) for head in heads if head != repo.nullid))
292 return b'\n'.join((hex(head) for head in heads if head != repo.nullid))
293
293
294
294
295 def getfile(repo, proto, file, node):
295 def getfile(repo, proto, file, node):
296 """A server api for requesting a particular version of a file. Can be used
296 """A server api for requesting a particular version of a file. Can be used
297 in batches to request many files at once. The return protocol is:
297 in batches to request many files at once. The return protocol is:
298 <errorcode>\0<data/errormsg> where <errorcode> is 0 for success or
298 <errorcode>\0<data/errormsg> where <errorcode> is 0 for success or
299 non-zero for an error.
299 non-zero for an error.
300
300
301 data is a compressed blob with revlog flag and ancestors information. See
301 data is a compressed blob with revlog flag and ancestors information. See
302 createfileblob for its content.
302 createfileblob for its content.
303 """
303 """
304 if shallowutil.isenabled(repo):
304 if shallowutil.isenabled(repo):
305 return b'1\0' + _(b'cannot fetch remote files from shallow repo')
305 return b'1\0' + _(b'cannot fetch remote files from shallow repo')
306 cachepath = repo.ui.config(b"remotefilelog", b"servercachepath")
306 cachepath = repo.ui.config(b"remotefilelog", b"servercachepath")
307 if not cachepath:
307 if not cachepath:
308 cachepath = os.path.join(repo.path, b"remotefilelogcache")
308 cachepath = os.path.join(repo.path, b"remotefilelogcache")
309 node = bin(node.strip())
309 node = bin(node.strip())
310 if node == repo.nullid:
310 if node == repo.nullid:
311 return b'0\0'
311 return b'0\0'
312 return b'0\0' + _loadfileblob(repo, cachepath, file, node)
312 return b'0\0' + _loadfileblob(repo, cachepath, file, node)
313
313
314
314
315 def getfiles(repo, proto):
315 def getfiles(repo, proto):
316 """A server api for requesting particular versions of particular files."""
316 """A server api for requesting particular versions of particular files."""
317 if shallowutil.isenabled(repo):
317 if shallowutil.isenabled(repo):
318 raise error.Abort(_(b'cannot fetch remote files from shallow repo'))
318 raise error.Abort(_(b'cannot fetch remote files from shallow repo'))
319 if not isinstance(proto, _sshv1server):
319 if not isinstance(proto, _sshv1server):
320 raise error.Abort(_(b'cannot fetch remote files over non-ssh protocol'))
320 raise error.Abort(_(b'cannot fetch remote files over non-ssh protocol'))
321
321
322 def streamer():
322 def streamer():
323 fin = proto._fin
323 fin = proto._fin
324
324
325 cachepath = repo.ui.config(b"remotefilelog", b"servercachepath")
325 cachepath = repo.ui.config(b"remotefilelog", b"servercachepath")
326 if not cachepath:
326 if not cachepath:
327 cachepath = os.path.join(repo.path, b"remotefilelogcache")
327 cachepath = os.path.join(repo.path, b"remotefilelogcache")
328
328
329 while True:
329 while True:
330 request = fin.readline()[:-1]
330 request = fin.readline()[:-1]
331 if not request:
331 if not request:
332 break
332 break
333
333
334 node = bin(request[:40])
334 node = bin(request[:40])
335 if node == repo.nullid:
335 if node == repo.nullid:
336 yield b'0\n'
336 yield b'0\n'
337 continue
337 continue
338
338
339 path = request[40:]
339 path = request[40:]
340
340
341 text = _loadfileblob(repo, cachepath, path, node)
341 text = _loadfileblob(repo, cachepath, path, node)
342
342
343 yield b'%d\n%s' % (len(text), text)
343 yield b'%d\n%s' % (len(text), text)
344
344
345 # it would be better to only flush after processing a whole batch
345 # it would be better to only flush after processing a whole batch
346 # but currently we don't know if there are more requests coming
346 # but currently we don't know if there are more requests coming
347 proto._fout.flush()
347 proto._fout.flush()
348
348
349 return wireprototypes.streamres(streamer())
349 return wireprototypes.streamres(streamer())
350
350
351
351
352 def createfileblob(filectx):
352 def createfileblob(filectx):
353 """
353 """
354 format:
354 format:
355 v0:
355 v0:
356 str(len(rawtext)) + '\0' + rawtext + ancestortext
356 str(len(rawtext)) + '\0' + rawtext + ancestortext
357 v1:
357 v1:
358 'v1' + '\n' + metalist + '\0' + rawtext + ancestortext
358 'v1' + '\n' + metalist + '\0' + rawtext + ancestortext
359 metalist := metalist + '\n' + meta | meta
359 metalist := metalist + '\n' + meta | meta
360 meta := sizemeta | flagmeta
360 meta := sizemeta | flagmeta
361 sizemeta := METAKEYSIZE + str(len(rawtext))
361 sizemeta := METAKEYSIZE + str(len(rawtext))
362 flagmeta := METAKEYFLAG + str(flag)
362 flagmeta := METAKEYFLAG + str(flag)
363
363
364 note: sizemeta must exist. METAKEYFLAG and METAKEYSIZE must have a
364 note: sizemeta must exist. METAKEYFLAG and METAKEYSIZE must have a
365 length of 1.
365 length of 1.
366 """
366 """
367 flog = filectx.filelog()
367 flog = filectx.filelog()
368 frev = filectx.filerev()
368 frev = filectx.filerev()
369 revlogflags = flog._revlog.flags(frev)
369 revlogflags = flog._revlog.flags(frev)
370 if revlogflags == 0:
370 if revlogflags == 0:
371 # normal files
371 # normal files
372 text = filectx.data()
372 text = filectx.data()
373 else:
373 else:
374 # lfs, read raw revision data
374 # lfs, read raw revision data
375 text = flog.rawdata(frev)
375 text = flog.rawdata(frev)
376
376
377 repo = filectx._repo
377 repo = filectx._repo
378
378
379 ancestors = [filectx]
379 ancestors = [filectx]
380
380
381 try:
381 try:
382 repo.forcelinkrev = True
382 repo.forcelinkrev = True
383 ancestors.extend([f for f in filectx.ancestors()])
383 ancestors.extend([f for f in filectx.ancestors()])
384
384
385 ancestortext = b""
385 ancestortext = b""
386 for ancestorctx in ancestors:
386 for ancestorctx in ancestors:
387 parents = ancestorctx.parents()
387 parents = ancestorctx.parents()
388 p1 = repo.nullid
388 p1 = repo.nullid
389 p2 = repo.nullid
389 p2 = repo.nullid
390 if len(parents) > 0:
390 if len(parents) > 0:
391 p1 = parents[0].filenode()
391 p1 = parents[0].filenode()
392 if len(parents) > 1:
392 if len(parents) > 1:
393 p2 = parents[1].filenode()
393 p2 = parents[1].filenode()
394
394
395 copyname = b""
395 copyname = b""
396 rename = ancestorctx.renamed()
396 rename = ancestorctx.renamed()
397 if rename:
397 if rename:
398 copyname = rename[0]
398 copyname = rename[0]
399 linknode = ancestorctx.node()
399 linknode = ancestorctx.node()
400 ancestortext += b"%s%s%s%s%s\0" % (
400 ancestortext += b"%s%s%s%s%s\0" % (
401 ancestorctx.filenode(),
401 ancestorctx.filenode(),
402 p1,
402 p1,
403 p2,
403 p2,
404 linknode,
404 linknode,
405 copyname,
405 copyname,
406 )
406 )
407 finally:
407 finally:
408 repo.forcelinkrev = False
408 repo.forcelinkrev = False
409
409
410 header = shallowutil.buildfileblobheader(len(text), revlogflags)
410 header = shallowutil.buildfileblobheader(len(text), revlogflags)
411
411
412 return b"%s\0%s%s" % (header, text, ancestortext)
412 return b"%s\0%s%s" % (header, text, ancestortext)
413
413
414
414
415 def gcserver(ui, repo):
415 def gcserver(ui, repo):
416 if not repo.ui.configbool(b"remotefilelog", b"server"):
416 if not repo.ui.configbool(b"remotefilelog", b"server"):
417 return
417 return
418
418
419 neededfiles = set()
419 neededfiles = set()
420 heads = repo.revs(b"heads(tip~25000:) - null")
420 heads = repo.revs(b"heads(tip~25000:) - null")
421
421
422 cachepath = repo.vfs.join(b"remotefilelogcache")
422 cachepath = repo.vfs.join(b"remotefilelogcache")
423 for head in heads:
423 for head in heads:
424 mf = repo[head].manifest()
424 mf = repo[head].manifest()
425 for filename, filenode in mf.items():
425 for filename, filenode in mf.items():
426 filecachepath = os.path.join(cachepath, filename, hex(filenode))
426 filecachepath = os.path.join(cachepath, filename, hex(filenode))
427 neededfiles.add(filecachepath)
427 neededfiles.add(filecachepath)
428
428
429 # delete unneeded older files
429 # delete unneeded older files
430 days = repo.ui.configint(b"remotefilelog", b"serverexpiration")
430 days = repo.ui.configint(b"remotefilelog", b"serverexpiration")
431 expiration = time.time() - (days * 24 * 60 * 60)
431 expiration = time.time() - (days * 24 * 60 * 60)
432
432
433 progress = ui.makeprogress(_(b"removing old server cache"), unit=b"files")
433 progress = ui.makeprogress(_(b"removing old server cache"), unit=b"files")
434 progress.update(0)
434 progress.update(0)
435 for root, dirs, files in os.walk(cachepath):
435 for root, dirs, files in os.walk(cachepath):
436 for file in files:
436 for file in files:
437 filepath = os.path.join(root, file)
437 filepath = os.path.join(root, file)
438 progress.increment()
438 progress.increment()
439 if filepath in neededfiles:
439 if filepath in neededfiles:
440 continue
440 continue
441
441
442 stat = os.stat(filepath)
442 stat = os.stat(filepath)
443 if stat.st_mtime < expiration:
443 if stat.st_mtime < expiration:
444 os.remove(filepath)
444 os.remove(filepath)
445
445
446 progress.complete()
446 progress.complete()
@@ -1,1161 +1,1159 b''
1 import distutils.version
1 import distutils.version
2 import os
2 import os
3 import re
3 import re
4 import socket
4 import socket
5 import stat
5 import stat
6 import subprocess
6 import subprocess
7 import sys
7 import sys
8 import tempfile
8 import tempfile
9
9
10 tempprefix = 'hg-hghave-'
10 tempprefix = 'hg-hghave-'
11
11
12 checks = {
12 checks = {
13 "true": (lambda: True, "yak shaving"),
13 "true": (lambda: True, "yak shaving"),
14 "false": (lambda: False, "nail clipper"),
14 "false": (lambda: False, "nail clipper"),
15 "known-bad-output": (lambda: True, "use for currently known bad output"),
15 "known-bad-output": (lambda: True, "use for currently known bad output"),
16 "missing-correct-output": (lambda: False, "use for missing good output"),
16 "missing-correct-output": (lambda: False, "use for missing good output"),
17 }
17 }
18
18
19 try:
19 try:
20 import msvcrt
20 import msvcrt
21
21
22 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
22 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
23 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
23 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
24 except ImportError:
24 except ImportError:
25 pass
25 pass
26
26
27 stdout = getattr(sys.stdout, 'buffer', sys.stdout)
27 stdout = getattr(sys.stdout, 'buffer', sys.stdout)
28 stderr = getattr(sys.stderr, 'buffer', sys.stderr)
28 stderr = getattr(sys.stderr, 'buffer', sys.stderr)
29
29
30
30
31 def _sys2bytes(p):
31 def _sys2bytes(p):
32 if p is None:
32 if p is None:
33 return p
33 return p
34 return p.encode('utf-8')
34 return p.encode('utf-8')
35
35
36
36
37 def _bytes2sys(p):
37 def _bytes2sys(p):
38 if p is None:
38 if p is None:
39 return p
39 return p
40 return p.decode('utf-8')
40 return p.decode('utf-8')
41
41
42
42
43 def check(name, desc):
43 def check(name, desc):
44 """Registers a check function for a feature."""
44 """Registers a check function for a feature."""
45
45
46 def decorator(func):
46 def decorator(func):
47 checks[name] = (func, desc)
47 checks[name] = (func, desc)
48 return func
48 return func
49
49
50 return decorator
50 return decorator
51
51
52
52
53 def checkvers(name, desc, vers):
53 def checkvers(name, desc, vers):
54 """Registers a check function for each of a series of versions.
54 """Registers a check function for each of a series of versions.
55
55
56 vers can be a list or an iterator.
56 vers can be a list or an iterator.
57
57
58 Produces a series of feature checks that have the form <name><vers> without
58 Produces a series of feature checks that have the form <name><vers> without
59 any punctuation (even if there's punctuation in 'vers'; i.e. this produces
59 any punctuation (even if there's punctuation in 'vers'; i.e. this produces
60 'py38', not 'py3.8' or 'py-38')."""
60 'py38', not 'py3.8' or 'py-38')."""
61
61
62 def decorator(func):
62 def decorator(func):
63 def funcv(v):
63 def funcv(v):
64 def f():
64 def f():
65 return func(v)
65 return func(v)
66
66
67 return f
67 return f
68
68
69 for v in vers:
69 for v in vers:
70 v = str(v)
70 v = str(v)
71 f = funcv(v)
71 f = funcv(v)
72 checks['%s%s' % (name, v.replace('.', ''))] = (f, desc % v)
72 checks['%s%s' % (name, v.replace('.', ''))] = (f, desc % v)
73 return func
73 return func
74
74
75 return decorator
75 return decorator
76
76
77
77
78 def checkfeatures(features):
78 def checkfeatures(features):
79 result = {
79 result = {
80 'error': [],
80 'error': [],
81 'missing': [],
81 'missing': [],
82 'skipped': [],
82 'skipped': [],
83 }
83 }
84
84
85 for feature in features:
85 for feature in features:
86 negate = feature.startswith('no-')
86 negate = feature.startswith('no-')
87 if negate:
87 if negate:
88 feature = feature[3:]
88 feature = feature[3:]
89
89
90 if feature not in checks:
90 if feature not in checks:
91 result['missing'].append(feature)
91 result['missing'].append(feature)
92 continue
92 continue
93
93
94 check, desc = checks[feature]
94 check, desc = checks[feature]
95 try:
95 try:
96 available = check()
96 available = check()
97 except Exception as e:
97 except Exception as e:
98 result['error'].append('hghave check %s failed: %r' % (feature, e))
98 result['error'].append('hghave check %s failed: %r' % (feature, e))
99 continue
99 continue
100
100
101 if not negate and not available:
101 if not negate and not available:
102 result['skipped'].append('missing feature: %s' % desc)
102 result['skipped'].append('missing feature: %s' % desc)
103 elif negate and available:
103 elif negate and available:
104 result['skipped'].append('system supports %s' % desc)
104 result['skipped'].append('system supports %s' % desc)
105
105
106 return result
106 return result
107
107
108
108
109 def require(features):
109 def require(features):
110 """Require that features are available, exiting if not."""
110 """Require that features are available, exiting if not."""
111 result = checkfeatures(features)
111 result = checkfeatures(features)
112
112
113 for missing in result['missing']:
113 for missing in result['missing']:
114 stderr.write(
114 stderr.write(
115 ('skipped: unknown feature: %s\n' % missing).encode('utf-8')
115 ('skipped: unknown feature: %s\n' % missing).encode('utf-8')
116 )
116 )
117 for msg in result['skipped']:
117 for msg in result['skipped']:
118 stderr.write(('skipped: %s\n' % msg).encode('utf-8'))
118 stderr.write(('skipped: %s\n' % msg).encode('utf-8'))
119 for msg in result['error']:
119 for msg in result['error']:
120 stderr.write(('%s\n' % msg).encode('utf-8'))
120 stderr.write(('%s\n' % msg).encode('utf-8'))
121
121
122 if result['missing']:
122 if result['missing']:
123 sys.exit(2)
123 sys.exit(2)
124
124
125 if result['skipped'] or result['error']:
125 if result['skipped'] or result['error']:
126 sys.exit(1)
126 sys.exit(1)
127
127
128
128
129 def matchoutput(cmd, regexp, ignorestatus=False):
129 def matchoutput(cmd, regexp, ignorestatus=False):
130 """Return the match object if cmd executes successfully and its output
130 """Return the match object if cmd executes successfully and its output
131 is matched by the supplied regular expression.
131 is matched by the supplied regular expression.
132 """
132 """
133
133
134 # Tests on Windows have to fake USERPROFILE to point to the test area so
134 # Tests on Windows have to fake USERPROFILE to point to the test area so
135 # that `~` is properly expanded on py3.8+. However, some tools like black
135 # that `~` is properly expanded on py3.8+. However, some tools like black
136 # make calls that need the real USERPROFILE in order to run `foo --version`.
136 # make calls that need the real USERPROFILE in order to run `foo --version`.
137 env = os.environ
137 env = os.environ
138 if os.name == 'nt':
138 if os.name == 'nt':
139 env = os.environ.copy()
139 env = os.environ.copy()
140 env['USERPROFILE'] = env['REALUSERPROFILE']
140 env['USERPROFILE'] = env['REALUSERPROFILE']
141
141
142 r = re.compile(regexp)
142 r = re.compile(regexp)
143 p = subprocess.Popen(
143 p = subprocess.Popen(
144 cmd,
144 cmd,
145 shell=True,
145 shell=True,
146 stdout=subprocess.PIPE,
146 stdout=subprocess.PIPE,
147 stderr=subprocess.STDOUT,
147 stderr=subprocess.STDOUT,
148 env=env,
148 env=env,
149 )
149 )
150 s = p.communicate()[0]
150 s = p.communicate()[0]
151 ret = p.returncode
151 ret = p.returncode
152 return (ignorestatus or not ret) and r.search(s)
152 return (ignorestatus or not ret) and r.search(s)
153
153
154
154
155 @check("baz", "GNU Arch baz client")
155 @check("baz", "GNU Arch baz client")
156 def has_baz():
156 def has_baz():
157 return matchoutput('baz --version 2>&1', br'baz Bazaar version')
157 return matchoutput('baz --version 2>&1', br'baz Bazaar version')
158
158
159
159
160 @check("bzr", "Breezy library and executable version >= 3.1")
160 @check("bzr", "Breezy library and executable version >= 3.1")
161 def has_bzr():
161 def has_bzr():
162 try:
162 try:
163 # Test the Breezy python lib
163 # Test the Breezy python lib
164 import breezy
164 import breezy
165 import breezy.bzr.bzrdir
165 import breezy.bzr.bzrdir
166 import breezy.errors
166 import breezy.errors
167 import breezy.revision
167 import breezy.revision
168 import breezy.revisionspec
168 import breezy.revisionspec
169
169
170 breezy.revisionspec.RevisionSpec
170 breezy.revisionspec.RevisionSpec
171 if breezy.__doc__ is None or breezy.version_info[:2] < (3, 1):
171 if breezy.__doc__ is None or breezy.version_info[:2] < (3, 1):
172 return False
172 return False
173 except (AttributeError, ImportError):
173 except (AttributeError, ImportError):
174 return False
174 return False
175 # Test the executable
175 # Test the executable
176 return matchoutput('brz --version 2>&1', br'Breezy \(brz\) ')
176 return matchoutput('brz --version 2>&1', br'Breezy \(brz\) ')
177
177
178
178
179 @check("chg", "running with chg")
179 @check("chg", "running with chg")
180 def has_chg():
180 def has_chg():
181 return 'CHG_INSTALLED_AS_HG' in os.environ
181 return 'CHG_INSTALLED_AS_HG' in os.environ
182
182
183
183
184 @check("rhg", "running with rhg as 'hg'")
184 @check("rhg", "running with rhg as 'hg'")
185 def has_rhg():
185 def has_rhg():
186 return 'RHG_INSTALLED_AS_HG' in os.environ
186 return 'RHG_INSTALLED_AS_HG' in os.environ
187
187
188
188
189 @check("pyoxidizer", "running with pyoxidizer build as 'hg'")
189 @check("pyoxidizer", "running with pyoxidizer build as 'hg'")
190 def has_pyoxidizer():
190 def has_pyoxidizer():
191 return 'PYOXIDIZED_INSTALLED_AS_HG' in os.environ
191 return 'PYOXIDIZED_INSTALLED_AS_HG' in os.environ
192
192
193
193
194 @check(
194 @check(
195 "pyoxidizer-in-memory",
195 "pyoxidizer-in-memory",
196 "running with pyoxidizer build as 'hg' with embedded resources",
196 "running with pyoxidizer build as 'hg' with embedded resources",
197 )
197 )
198 def has_pyoxidizer_mem():
198 def has_pyoxidizer_mem():
199 return 'PYOXIDIZED_IN_MEMORY_RSRC' in os.environ
199 return 'PYOXIDIZED_IN_MEMORY_RSRC' in os.environ
200
200
201
201
202 @check(
202 @check(
203 "pyoxidizer-in-filesystem",
203 "pyoxidizer-in-filesystem",
204 "running with pyoxidizer build as 'hg' with external resources",
204 "running with pyoxidizer build as 'hg' with external resources",
205 )
205 )
206 def has_pyoxidizer_fs():
206 def has_pyoxidizer_fs():
207 return 'PYOXIDIZED_FILESYSTEM_RSRC' in os.environ
207 return 'PYOXIDIZED_FILESYSTEM_RSRC' in os.environ
208
208
209
209
210 @check("cvs", "cvs client/server")
210 @check("cvs", "cvs client/server")
211 def has_cvs():
211 def has_cvs():
212 re = br'Concurrent Versions System.*?server'
212 re = br'Concurrent Versions System.*?server'
213 return matchoutput('cvs --version 2>&1', re) and not has_msys()
213 return matchoutput('cvs --version 2>&1', re) and not has_msys()
214
214
215
215
216 @check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
216 @check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
217 def has_cvs112():
217 def has_cvs112():
218 re = br'Concurrent Versions System \(CVS\) 1.12.*?server'
218 re = br'Concurrent Versions System \(CVS\) 1.12.*?server'
219 return matchoutput('cvs --version 2>&1', re) and not has_msys()
219 return matchoutput('cvs --version 2>&1', re) and not has_msys()
220
220
221
221
222 @check("cvsnt", "cvsnt client/server")
222 @check("cvsnt", "cvsnt client/server")
223 def has_cvsnt():
223 def has_cvsnt():
224 re = br'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
224 re = br'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
225 return matchoutput('cvsnt --version 2>&1', re)
225 return matchoutput('cvsnt --version 2>&1', re)
226
226
227
227
228 @check("darcs", "darcs client")
228 @check("darcs", "darcs client")
229 def has_darcs():
229 def has_darcs():
230 return matchoutput('darcs --version', br'\b2\.([2-9]|\d{2})', True)
230 return matchoutput('darcs --version', br'\b2\.([2-9]|\d{2})', True)
231
231
232
232
233 @check("mtn", "monotone client (>= 1.0)")
233 @check("mtn", "monotone client (>= 1.0)")
234 def has_mtn():
234 def has_mtn():
235 return matchoutput('mtn --version', br'monotone', True) and not matchoutput(
235 return matchoutput('mtn --version', br'monotone', True) and not matchoutput(
236 'mtn --version', br'monotone 0\.', True
236 'mtn --version', br'monotone 0\.', True
237 )
237 )
238
238
239
239
240 @check("eol-in-paths", "end-of-lines in paths")
240 @check("eol-in-paths", "end-of-lines in paths")
241 def has_eol_in_paths():
241 def has_eol_in_paths():
242 try:
242 try:
243 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix, suffix='\n\r')
243 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix, suffix='\n\r')
244 os.close(fd)
244 os.close(fd)
245 os.remove(path)
245 os.remove(path)
246 return True
246 return True
247 except (IOError, OSError):
247 except (IOError, OSError):
248 return False
248 return False
249
249
250
250
251 @check("execbit", "executable bit")
251 @check("execbit", "executable bit")
252 def has_executablebit():
252 def has_executablebit():
253 try:
253 try:
254 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
254 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
255 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
255 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
256 try:
256 try:
257 os.close(fh)
257 os.close(fh)
258 m = os.stat(fn).st_mode & 0o777
258 m = os.stat(fn).st_mode & 0o777
259 new_file_has_exec = m & EXECFLAGS
259 new_file_has_exec = m & EXECFLAGS
260 os.chmod(fn, m ^ EXECFLAGS)
260 os.chmod(fn, m ^ EXECFLAGS)
261 exec_flags_cannot_flip = (os.stat(fn).st_mode & 0o777) == m
261 exec_flags_cannot_flip = (os.stat(fn).st_mode & 0o777) == m
262 finally:
262 finally:
263 os.unlink(fn)
263 os.unlink(fn)
264 except (IOError, OSError):
264 except (IOError, OSError):
265 # we don't care, the user probably won't be able to commit anyway
265 # we don't care, the user probably won't be able to commit anyway
266 return False
266 return False
267 return not (new_file_has_exec or exec_flags_cannot_flip)
267 return not (new_file_has_exec or exec_flags_cannot_flip)
268
268
269
269
270 @check("suidbit", "setuid and setgid bit")
270 @check("suidbit", "setuid and setgid bit")
271 def has_suidbit():
271 def has_suidbit():
272 if (
272 if (
273 getattr(os, "statvfs", None) is None
273 getattr(os, "statvfs", None) is None
274 or getattr(os, "ST_NOSUID", None) is None
274 or getattr(os, "ST_NOSUID", None) is None
275 ):
275 ):
276 return False
276 return False
277 return bool(os.statvfs('.').f_flag & os.ST_NOSUID)
277 return bool(os.statvfs('.').f_flag & os.ST_NOSUID)
278
278
279
279
280 @check("icasefs", "case insensitive file system")
280 @check("icasefs", "case insensitive file system")
281 def has_icasefs():
281 def has_icasefs():
282 # Stolen from mercurial.util
282 # Stolen from mercurial.util
283 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
283 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
284 os.close(fd)
284 os.close(fd)
285 try:
285 try:
286 s1 = os.stat(path)
286 s1 = os.stat(path)
287 d, b = os.path.split(path)
287 d, b = os.path.split(path)
288 p2 = os.path.join(d, b.upper())
288 p2 = os.path.join(d, b.upper())
289 if path == p2:
289 if path == p2:
290 p2 = os.path.join(d, b.lower())
290 p2 = os.path.join(d, b.lower())
291 try:
291 try:
292 s2 = os.stat(p2)
292 s2 = os.stat(p2)
293 return s2 == s1
293 return s2 == s1
294 except OSError:
294 except OSError:
295 return False
295 return False
296 finally:
296 finally:
297 os.remove(path)
297 os.remove(path)
298
298
299
299
300 @check("fifo", "named pipes")
300 @check("fifo", "named pipes")
301 def has_fifo():
301 def has_fifo():
302 if getattr(os, "mkfifo", None) is None:
302 if getattr(os, "mkfifo", None) is None:
303 return False
303 return False
304 name = tempfile.mktemp(dir='.', prefix=tempprefix)
304 name = tempfile.mktemp(dir='.', prefix=tempprefix)
305 try:
305 try:
306 os.mkfifo(name)
306 os.mkfifo(name)
307 os.unlink(name)
307 os.unlink(name)
308 return True
308 return True
309 except OSError:
309 except OSError:
310 return False
310 return False
311
311
312
312
313 @check("killdaemons", 'killdaemons.py support')
313 @check("killdaemons", 'killdaemons.py support')
314 def has_killdaemons():
314 def has_killdaemons():
315 return True
315 return True
316
316
317
317
318 @check("cacheable", "cacheable filesystem")
318 @check("cacheable", "cacheable filesystem")
319 def has_cacheable_fs():
319 def has_cacheable_fs():
320 from mercurial import util
320 from mercurial import util
321
321
322 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
322 fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix)
323 os.close(fd)
323 os.close(fd)
324 try:
324 try:
325 return util.cachestat(_sys2bytes(path)).cacheable()
325 return util.cachestat(_sys2bytes(path)).cacheable()
326 finally:
326 finally:
327 os.remove(path)
327 os.remove(path)
328
328
329
329
330 @check("lsprof", "python lsprof module")
330 @check("lsprof", "python lsprof module")
331 def has_lsprof():
331 def has_lsprof():
332 try:
332 try:
333 import _lsprof
333 import _lsprof
334
334
335 _lsprof.Profiler # silence unused import warning
335 _lsprof.Profiler # silence unused import warning
336 return True
336 return True
337 except ImportError:
337 except ImportError:
338 return False
338 return False
339
339
340
340
341 def _gethgversion():
341 def _gethgversion():
342 m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
342 m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
343 if not m:
343 if not m:
344 return (0, 0)
344 return (0, 0)
345 return (int(m.group(1)), int(m.group(2)))
345 return (int(m.group(1)), int(m.group(2)))
346
346
347
347
348 _hgversion = None
348 _hgversion = None
349
349
350
350
351 def gethgversion():
351 def gethgversion():
352 global _hgversion
352 global _hgversion
353 if _hgversion is None:
353 if _hgversion is None:
354 _hgversion = _gethgversion()
354 _hgversion = _gethgversion()
355 return _hgversion
355 return _hgversion
356
356
357
357
358 @checkvers(
358 @checkvers("hg", "Mercurial >= %s", [(1.0 * x) / 10 for x in range(9, 99)])
359 "hg", "Mercurial >= %s", list([(1.0 * x) / 10 for x in range(9, 99)])
360 )
361 def has_hg_range(v):
359 def has_hg_range(v):
362 major, minor = v.split('.')[0:2]
360 major, minor = v.split('.')[0:2]
363 return gethgversion() >= (int(major), int(minor))
361 return gethgversion() >= (int(major), int(minor))
364
362
365
363
366 @check("rust", "Using the Rust extensions")
364 @check("rust", "Using the Rust extensions")
367 def has_rust():
365 def has_rust():
368 """Check is the mercurial currently running is using some rust code"""
366 """Check is the mercurial currently running is using some rust code"""
369 cmd = 'hg debuginstall --quiet 2>&1'
367 cmd = 'hg debuginstall --quiet 2>&1'
370 match = br'checking module policy \(([^)]+)\)'
368 match = br'checking module policy \(([^)]+)\)'
371 policy = matchoutput(cmd, match)
369 policy = matchoutput(cmd, match)
372 if not policy:
370 if not policy:
373 return False
371 return False
374 return b'rust' in policy.group(1)
372 return b'rust' in policy.group(1)
375
373
376
374
377 @check("hg08", "Mercurial >= 0.8")
375 @check("hg08", "Mercurial >= 0.8")
378 def has_hg08():
376 def has_hg08():
379 if checks["hg09"][0]():
377 if checks["hg09"][0]():
380 return True
378 return True
381 return matchoutput('hg help annotate 2>&1', '--date')
379 return matchoutput('hg help annotate 2>&1', '--date')
382
380
383
381
384 @check("hg07", "Mercurial >= 0.7")
382 @check("hg07", "Mercurial >= 0.7")
385 def has_hg07():
383 def has_hg07():
386 if checks["hg08"][0]():
384 if checks["hg08"][0]():
387 return True
385 return True
388 return matchoutput('hg --version --quiet 2>&1', 'Mercurial Distributed SCM')
386 return matchoutput('hg --version --quiet 2>&1', 'Mercurial Distributed SCM')
389
387
390
388
391 @check("hg06", "Mercurial >= 0.6")
389 @check("hg06", "Mercurial >= 0.6")
392 def has_hg06():
390 def has_hg06():
393 if checks["hg07"][0]():
391 if checks["hg07"][0]():
394 return True
392 return True
395 return matchoutput('hg --version --quiet 2>&1', 'Mercurial version')
393 return matchoutput('hg --version --quiet 2>&1', 'Mercurial version')
396
394
397
395
398 @check("gettext", "GNU Gettext (msgfmt)")
396 @check("gettext", "GNU Gettext (msgfmt)")
399 def has_gettext():
397 def has_gettext():
400 return matchoutput('msgfmt --version', br'GNU gettext-tools')
398 return matchoutput('msgfmt --version', br'GNU gettext-tools')
401
399
402
400
403 @check("git", "git command line client")
401 @check("git", "git command line client")
404 def has_git():
402 def has_git():
405 return matchoutput('git --version 2>&1', br'^git version')
403 return matchoutput('git --version 2>&1', br'^git version')
406
404
407
405
408 def getgitversion():
406 def getgitversion():
409 m = matchoutput('git --version 2>&1', br'git version (\d+)\.(\d+)')
407 m = matchoutput('git --version 2>&1', br'git version (\d+)\.(\d+)')
410 if not m:
408 if not m:
411 return (0, 0)
409 return (0, 0)
412 return (int(m.group(1)), int(m.group(2)))
410 return (int(m.group(1)), int(m.group(2)))
413
411
414
412
415 @check("pygit2", "pygit2 Python library")
413 @check("pygit2", "pygit2 Python library")
416 def has_pygit2():
414 def has_pygit2():
417 try:
415 try:
418 import pygit2
416 import pygit2
419
417
420 pygit2.Oid # silence unused import
418 pygit2.Oid # silence unused import
421 return True
419 return True
422 except ImportError:
420 except ImportError:
423 return False
421 return False
424
422
425
423
426 # https://github.com/git-lfs/lfs-test-server
424 # https://github.com/git-lfs/lfs-test-server
427 @check("lfs-test-server", "git-lfs test server")
425 @check("lfs-test-server", "git-lfs test server")
428 def has_lfsserver():
426 def has_lfsserver():
429 exe = 'lfs-test-server'
427 exe = 'lfs-test-server'
430 if has_windows():
428 if has_windows():
431 exe = 'lfs-test-server.exe'
429 exe = 'lfs-test-server.exe'
432 return any(
430 return any(
433 os.access(os.path.join(path, exe), os.X_OK)
431 os.access(os.path.join(path, exe), os.X_OK)
434 for path in os.environ["PATH"].split(os.pathsep)
432 for path in os.environ["PATH"].split(os.pathsep)
435 )
433 )
436
434
437
435
438 @checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
436 @checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
439 def has_git_range(v):
437 def has_git_range(v):
440 major, minor = v.split('.')[0:2]
438 major, minor = v.split('.')[0:2]
441 return getgitversion() >= (int(major), int(minor))
439 return getgitversion() >= (int(major), int(minor))
442
440
443
441
444 @check("docutils", "Docutils text processing library")
442 @check("docutils", "Docutils text processing library")
445 def has_docutils():
443 def has_docutils():
446 try:
444 try:
447 import docutils.core
445 import docutils.core
448
446
449 docutils.core.publish_cmdline # silence unused import
447 docutils.core.publish_cmdline # silence unused import
450 return True
448 return True
451 except ImportError:
449 except ImportError:
452 return False
450 return False
453
451
454
452
455 def getsvnversion():
453 def getsvnversion():
456 m = matchoutput('svn --version --quiet 2>&1', br'^(\d+)\.(\d+)')
454 m = matchoutput('svn --version --quiet 2>&1', br'^(\d+)\.(\d+)')
457 if not m:
455 if not m:
458 return (0, 0)
456 return (0, 0)
459 return (int(m.group(1)), int(m.group(2)))
457 return (int(m.group(1)), int(m.group(2)))
460
458
461
459
462 @checkvers("svn", "subversion client and admin tools >= %s", (1.3, 1.5))
460 @checkvers("svn", "subversion client and admin tools >= %s", (1.3, 1.5))
463 def has_svn_range(v):
461 def has_svn_range(v):
464 major, minor = v.split('.')[0:2]
462 major, minor = v.split('.')[0:2]
465 return getsvnversion() >= (int(major), int(minor))
463 return getsvnversion() >= (int(major), int(minor))
466
464
467
465
468 @check("svn", "subversion client and admin tools")
466 @check("svn", "subversion client and admin tools")
469 def has_svn():
467 def has_svn():
470 return matchoutput('svn --version 2>&1', br'^svn, version') and matchoutput(
468 return matchoutput('svn --version 2>&1', br'^svn, version') and matchoutput(
471 'svnadmin --version 2>&1', br'^svnadmin, version'
469 'svnadmin --version 2>&1', br'^svnadmin, version'
472 )
470 )
473
471
474
472
475 @check("svn-bindings", "subversion python bindings")
473 @check("svn-bindings", "subversion python bindings")
476 def has_svn_bindings():
474 def has_svn_bindings():
477 try:
475 try:
478 import svn.core
476 import svn.core
479
477
480 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
478 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
481 if version < (1, 4):
479 if version < (1, 4):
482 return False
480 return False
483 return True
481 return True
484 except ImportError:
482 except ImportError:
485 return False
483 return False
486
484
487
485
488 @check("p4", "Perforce server and client")
486 @check("p4", "Perforce server and client")
489 def has_p4():
487 def has_p4():
490 return matchoutput('p4 -V', br'Rev\. P4/') and matchoutput(
488 return matchoutput('p4 -V', br'Rev\. P4/') and matchoutput(
491 'p4d -V', br'Rev\. P4D/'
489 'p4d -V', br'Rev\. P4D/'
492 )
490 )
493
491
494
492
495 @check("symlink", "symbolic links")
493 @check("symlink", "symbolic links")
496 def has_symlink():
494 def has_symlink():
497 # mercurial.windows.checklink() is a hard 'no' at the moment
495 # mercurial.windows.checklink() is a hard 'no' at the moment
498 if os.name == 'nt' or getattr(os, "symlink", None) is None:
496 if os.name == 'nt' or getattr(os, "symlink", None) is None:
499 return False
497 return False
500 name = tempfile.mktemp(dir='.', prefix=tempprefix)
498 name = tempfile.mktemp(dir='.', prefix=tempprefix)
501 try:
499 try:
502 os.symlink(".", name)
500 os.symlink(".", name)
503 os.unlink(name)
501 os.unlink(name)
504 return True
502 return True
505 except (OSError, AttributeError):
503 except (OSError, AttributeError):
506 return False
504 return False
507
505
508
506
509 @check("hardlink", "hardlinks")
507 @check("hardlink", "hardlinks")
510 def has_hardlink():
508 def has_hardlink():
511 from mercurial import util
509 from mercurial import util
512
510
513 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
511 fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
514 os.close(fh)
512 os.close(fh)
515 name = tempfile.mktemp(dir='.', prefix=tempprefix)
513 name = tempfile.mktemp(dir='.', prefix=tempprefix)
516 try:
514 try:
517 util.oslink(_sys2bytes(fn), _sys2bytes(name))
515 util.oslink(_sys2bytes(fn), _sys2bytes(name))
518 os.unlink(name)
516 os.unlink(name)
519 return True
517 return True
520 except OSError:
518 except OSError:
521 return False
519 return False
522 finally:
520 finally:
523 os.unlink(fn)
521 os.unlink(fn)
524
522
525
523
526 @check("hardlink-whitelisted", "hardlinks on whitelisted filesystems")
524 @check("hardlink-whitelisted", "hardlinks on whitelisted filesystems")
527 def has_hardlink_whitelisted():
525 def has_hardlink_whitelisted():
528 from mercurial import util
526 from mercurial import util
529
527
530 try:
528 try:
531 fstype = util.getfstype(b'.')
529 fstype = util.getfstype(b'.')
532 except OSError:
530 except OSError:
533 return False
531 return False
534 return fstype in util._hardlinkfswhitelist
532 return fstype in util._hardlinkfswhitelist
535
533
536
534
537 @check("rmcwd", "can remove current working directory")
535 @check("rmcwd", "can remove current working directory")
538 def has_rmcwd():
536 def has_rmcwd():
539 ocwd = os.getcwd()
537 ocwd = os.getcwd()
540 temp = tempfile.mkdtemp(dir='.', prefix=tempprefix)
538 temp = tempfile.mkdtemp(dir='.', prefix=tempprefix)
541 try:
539 try:
542 os.chdir(temp)
540 os.chdir(temp)
543 # On Linux, 'rmdir .' isn't allowed, but the other names are okay.
541 # On Linux, 'rmdir .' isn't allowed, but the other names are okay.
544 # On Solaris and Windows, the cwd can't be removed by any names.
542 # On Solaris and Windows, the cwd can't be removed by any names.
545 os.rmdir(os.getcwd())
543 os.rmdir(os.getcwd())
546 return True
544 return True
547 except OSError:
545 except OSError:
548 return False
546 return False
549 finally:
547 finally:
550 os.chdir(ocwd)
548 os.chdir(ocwd)
551 # clean up temp dir on platforms where cwd can't be removed
549 # clean up temp dir on platforms where cwd can't be removed
552 try:
550 try:
553 os.rmdir(temp)
551 os.rmdir(temp)
554 except OSError:
552 except OSError:
555 pass
553 pass
556
554
557
555
558 @check("tla", "GNU Arch tla client")
556 @check("tla", "GNU Arch tla client")
559 def has_tla():
557 def has_tla():
560 return matchoutput('tla --version 2>&1', br'The GNU Arch Revision')
558 return matchoutput('tla --version 2>&1', br'The GNU Arch Revision')
561
559
562
560
563 @check("gpg", "gpg client")
561 @check("gpg", "gpg client")
564 def has_gpg():
562 def has_gpg():
565 return matchoutput('gpg --version 2>&1', br'GnuPG')
563 return matchoutput('gpg --version 2>&1', br'GnuPG')
566
564
567
565
568 @check("gpg2", "gpg client v2")
566 @check("gpg2", "gpg client v2")
569 def has_gpg2():
567 def has_gpg2():
570 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.')
568 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.')
571
569
572
570
573 @check("gpg21", "gpg client v2.1+")
571 @check("gpg21", "gpg client v2.1+")
574 def has_gpg21():
572 def has_gpg21():
575 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.(?!0)')
573 return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.(?!0)')
576
574
577
575
578 @check("unix-permissions", "unix-style permissions")
576 @check("unix-permissions", "unix-style permissions")
579 def has_unix_permissions():
577 def has_unix_permissions():
580 d = tempfile.mkdtemp(dir='.', prefix=tempprefix)
578 d = tempfile.mkdtemp(dir='.', prefix=tempprefix)
581 try:
579 try:
582 fname = os.path.join(d, 'foo')
580 fname = os.path.join(d, 'foo')
583 for umask in (0o77, 0o07, 0o22):
581 for umask in (0o77, 0o07, 0o22):
584 os.umask(umask)
582 os.umask(umask)
585 f = open(fname, 'w')
583 f = open(fname, 'w')
586 f.close()
584 f.close()
587 mode = os.stat(fname).st_mode
585 mode = os.stat(fname).st_mode
588 os.unlink(fname)
586 os.unlink(fname)
589 if mode & 0o777 != ~umask & 0o666:
587 if mode & 0o777 != ~umask & 0o666:
590 return False
588 return False
591 return True
589 return True
592 finally:
590 finally:
593 os.rmdir(d)
591 os.rmdir(d)
594
592
595
593
596 @check("unix-socket", "AF_UNIX socket family")
594 @check("unix-socket", "AF_UNIX socket family")
597 def has_unix_socket():
595 def has_unix_socket():
598 return getattr(socket, 'AF_UNIX', None) is not None
596 return getattr(socket, 'AF_UNIX', None) is not None
599
597
600
598
601 @check("root", "root permissions")
599 @check("root", "root permissions")
602 def has_root():
600 def has_root():
603 return getattr(os, 'geteuid', None) and os.geteuid() == 0
601 return getattr(os, 'geteuid', None) and os.geteuid() == 0
604
602
605
603
606 @check("pyflakes", "Pyflakes python linter")
604 @check("pyflakes", "Pyflakes python linter")
607 def has_pyflakes():
605 def has_pyflakes():
608 try:
606 try:
609 import pyflakes
607 import pyflakes
610
608
611 pyflakes.__version__
609 pyflakes.__version__
612 except ImportError:
610 except ImportError:
613 return False
611 return False
614 else:
612 else:
615 return True
613 return True
616
614
617
615
618 @check("pylint", "Pylint python linter")
616 @check("pylint", "Pylint python linter")
619 def has_pylint():
617 def has_pylint():
620 return matchoutput("pylint --help", br"[Uu]sage:[ ]+pylint", True)
618 return matchoutput("pylint --help", br"[Uu]sage:[ ]+pylint", True)
621
619
622
620
623 @check("clang-format", "clang-format C code formatter (>= 11)")
621 @check("clang-format", "clang-format C code formatter (>= 11)")
624 def has_clang_format():
622 def has_clang_format():
625 m = matchoutput('clang-format --version', br'clang-format version (\d+)')
623 m = matchoutput('clang-format --version', br'clang-format version (\d+)')
626 # style changed somewhere between 10.x and 11.x
624 # style changed somewhere between 10.x and 11.x
627 if m:
625 if m:
628 return int(m.group(1)) >= 11
626 return int(m.group(1)) >= 11
629 # Assist Googler contributors, they have a centrally-maintained version of
627 # Assist Googler contributors, they have a centrally-maintained version of
630 # clang-format that is generally very fresh, but unlike most builds (both
628 # clang-format that is generally very fresh, but unlike most builds (both
631 # official and unofficial), it does *not* include a version number.
629 # official and unofficial), it does *not* include a version number.
632 return matchoutput(
630 return matchoutput(
633 'clang-format --version', br'clang-format .*google3-trunk \([0-9a-f]+\)'
631 'clang-format --version', br'clang-format .*google3-trunk \([0-9a-f]+\)'
634 )
632 )
635
633
636
634
637 @check("jshint", "JSHint static code analysis tool")
635 @check("jshint", "JSHint static code analysis tool")
638 def has_jshint():
636 def has_jshint():
639 return matchoutput("jshint --version 2>&1", br"jshint v")
637 return matchoutput("jshint --version 2>&1", br"jshint v")
640
638
641
639
642 @check("pygments", "Pygments source highlighting library")
640 @check("pygments", "Pygments source highlighting library")
643 def has_pygments():
641 def has_pygments():
644 try:
642 try:
645 import pygments
643 import pygments
646
644
647 pygments.highlight # silence unused import warning
645 pygments.highlight # silence unused import warning
648 return True
646 return True
649 except ImportError:
647 except ImportError:
650 return False
648 return False
651
649
652
650
653 def getpygmentsversion():
651 def getpygmentsversion():
654 try:
652 try:
655 import pygments
653 import pygments
656
654
657 v = pygments.__version__
655 v = pygments.__version__
658
656
659 parts = v.split(".")
657 parts = v.split(".")
660 return (int(parts[0]), int(parts[1]))
658 return (int(parts[0]), int(parts[1]))
661 except ImportError:
659 except ImportError:
662 return (0, 0)
660 return (0, 0)
663
661
664
662
665 @checkvers("pygments", "Pygments version >= %s", (2.5, 2.11, 2.14))
663 @checkvers("pygments", "Pygments version >= %s", (2.5, 2.11, 2.14))
666 def has_pygments_range(v):
664 def has_pygments_range(v):
667 major, minor = v.split('.')[0:2]
665 major, minor = v.split('.')[0:2]
668 return getpygmentsversion() >= (int(major), int(minor))
666 return getpygmentsversion() >= (int(major), int(minor))
669
667
670
668
671 @check("outer-repo", "outer repo")
669 @check("outer-repo", "outer repo")
672 def has_outer_repo():
670 def has_outer_repo():
673 # failing for other reasons than 'no repo' imply that there is a repo
671 # failing for other reasons than 'no repo' imply that there is a repo
674 return not matchoutput('hg root 2>&1', br'abort: no repository found', True)
672 return not matchoutput('hg root 2>&1', br'abort: no repository found', True)
675
673
676
674
677 @check("ssl", "ssl module available")
675 @check("ssl", "ssl module available")
678 def has_ssl():
676 def has_ssl():
679 try:
677 try:
680 import ssl
678 import ssl
681
679
682 ssl.CERT_NONE
680 ssl.CERT_NONE
683 return True
681 return True
684 except ImportError:
682 except ImportError:
685 return False
683 return False
686
684
687
685
688 @check("defaultcacertsloaded", "detected presence of loaded system CA certs")
686 @check("defaultcacertsloaded", "detected presence of loaded system CA certs")
689 def has_defaultcacertsloaded():
687 def has_defaultcacertsloaded():
690 import ssl
688 import ssl
691 from mercurial import sslutil, ui as uimod
689 from mercurial import sslutil, ui as uimod
692
690
693 ui = uimod.ui.load()
691 ui = uimod.ui.load()
694 cafile = sslutil._defaultcacerts(ui)
692 cafile = sslutil._defaultcacerts(ui)
695 ctx = ssl.create_default_context()
693 ctx = ssl.create_default_context()
696 if cafile:
694 if cafile:
697 ctx.load_verify_locations(cafile=cafile)
695 ctx.load_verify_locations(cafile=cafile)
698 else:
696 else:
699 ctx.load_default_certs()
697 ctx.load_default_certs()
700
698
701 return len(ctx.get_ca_certs()) > 0
699 return len(ctx.get_ca_certs()) > 0
702
700
703
701
704 @check("tls1.2", "TLS 1.2 protocol support")
702 @check("tls1.2", "TLS 1.2 protocol support")
705 def has_tls1_2():
703 def has_tls1_2():
706 from mercurial import sslutil
704 from mercurial import sslutil
707
705
708 return b'tls1.2' in sslutil.supportedprotocols
706 return b'tls1.2' in sslutil.supportedprotocols
709
707
710
708
711 @check("windows", "Windows")
709 @check("windows", "Windows")
712 def has_windows():
710 def has_windows():
713 return os.name == 'nt'
711 return os.name == 'nt'
714
712
715
713
716 @check("system-sh", "system() uses sh")
714 @check("system-sh", "system() uses sh")
717 def has_system_sh():
715 def has_system_sh():
718 return os.name != 'nt'
716 return os.name != 'nt'
719
717
720
718
721 @check("serve", "platform and python can manage 'hg serve -d'")
719 @check("serve", "platform and python can manage 'hg serve -d'")
722 def has_serve():
720 def has_serve():
723 return True
721 return True
724
722
725
723
726 @check("setprocname", "whether osutil.setprocname is available or not")
724 @check("setprocname", "whether osutil.setprocname is available or not")
727 def has_setprocname():
725 def has_setprocname():
728 try:
726 try:
729 from mercurial.utils import procutil
727 from mercurial.utils import procutil
730
728
731 procutil.setprocname
729 procutil.setprocname
732 return True
730 return True
733 except AttributeError:
731 except AttributeError:
734 return False
732 return False
735
733
736
734
737 @check("test-repo", "running tests from repository")
735 @check("test-repo", "running tests from repository")
738 def has_test_repo():
736 def has_test_repo():
739 t = os.environ["TESTDIR"]
737 t = os.environ["TESTDIR"]
740 return os.path.isdir(os.path.join(t, "..", ".hg"))
738 return os.path.isdir(os.path.join(t, "..", ".hg"))
741
739
742
740
743 @check("network-io", "whether tests are allowed to access 3rd party services")
741 @check("network-io", "whether tests are allowed to access 3rd party services")
744 def has_network_io():
742 def has_network_io():
745 t = os.environ.get("HGTESTS_ALLOW_NETIO")
743 t = os.environ.get("HGTESTS_ALLOW_NETIO")
746 return t == "1"
744 return t == "1"
747
745
748
746
749 @check("curses", "terminfo compiler and curses module")
747 @check("curses", "terminfo compiler and curses module")
750 def has_curses():
748 def has_curses():
751 try:
749 try:
752 import curses
750 import curses
753
751
754 curses.COLOR_BLUE
752 curses.COLOR_BLUE
755
753
756 # Windows doesn't have a `tic` executable, but the windows_curses
754 # Windows doesn't have a `tic` executable, but the windows_curses
757 # package is sufficient to run the tests without it.
755 # package is sufficient to run the tests without it.
758 if os.name == 'nt':
756 if os.name == 'nt':
759 return True
757 return True
760
758
761 return has_tic()
759 return has_tic()
762
760
763 except (ImportError, AttributeError):
761 except (ImportError, AttributeError):
764 return False
762 return False
765
763
766
764
767 @check("tic", "terminfo compiler")
765 @check("tic", "terminfo compiler")
768 def has_tic():
766 def has_tic():
769 return matchoutput('test -x "`which tic`"', br'')
767 return matchoutput('test -x "`which tic`"', br'')
770
768
771
769
772 @check("xz", "xz compression utility")
770 @check("xz", "xz compression utility")
773 def has_xz():
771 def has_xz():
774 # When Windows invokes a subprocess in shell mode, it uses `cmd.exe`, which
772 # When Windows invokes a subprocess in shell mode, it uses `cmd.exe`, which
775 # only knows `where`, not `which`. So invoke MSYS shell explicitly.
773 # only knows `where`, not `which`. So invoke MSYS shell explicitly.
776 return matchoutput("sh -c 'test -x \"`which xz`\"'", b'')
774 return matchoutput("sh -c 'test -x \"`which xz`\"'", b'')
777
775
778
776
779 @check("msys", "Windows with MSYS")
777 @check("msys", "Windows with MSYS")
780 def has_msys():
778 def has_msys():
781 return os.getenv('MSYSTEM')
779 return os.getenv('MSYSTEM')
782
780
783
781
784 @check("aix", "AIX")
782 @check("aix", "AIX")
785 def has_aix():
783 def has_aix():
786 return sys.platform.startswith("aix")
784 return sys.platform.startswith("aix")
787
785
788
786
789 @check("osx", "OS X")
787 @check("osx", "OS X")
790 def has_osx():
788 def has_osx():
791 return sys.platform == 'darwin'
789 return sys.platform == 'darwin'
792
790
793
791
794 @check("osxpackaging", "OS X packaging tools")
792 @check("osxpackaging", "OS X packaging tools")
795 def has_osxpackaging():
793 def has_osxpackaging():
796 try:
794 try:
797 return (
795 return (
798 matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
796 matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
799 and matchoutput(
797 and matchoutput(
800 'productbuild', br'Usage: productbuild ', ignorestatus=1
798 'productbuild', br'Usage: productbuild ', ignorestatus=1
801 )
799 )
802 and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
800 and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
803 and matchoutput('xar --help', br'Usage: xar', ignorestatus=1)
801 and matchoutput('xar --help', br'Usage: xar', ignorestatus=1)
804 )
802 )
805 except ImportError:
803 except ImportError:
806 return False
804 return False
807
805
808
806
809 @check('linuxormacos', 'Linux or MacOS')
807 @check('linuxormacos', 'Linux or MacOS')
810 def has_linuxormacos():
808 def has_linuxormacos():
811 # This isn't a perfect test for MacOS. But it is sufficient for our needs.
809 # This isn't a perfect test for MacOS. But it is sufficient for our needs.
812 return sys.platform.startswith(('linux', 'darwin'))
810 return sys.platform.startswith(('linux', 'darwin'))
813
811
814
812
815 @check("docker", "docker support")
813 @check("docker", "docker support")
816 def has_docker():
814 def has_docker():
817 pat = br'A self-sufficient runtime for'
815 pat = br'A self-sufficient runtime for'
818 if matchoutput('docker --help', pat):
816 if matchoutput('docker --help', pat):
819 if 'linux' not in sys.platform:
817 if 'linux' not in sys.platform:
820 # TODO: in theory we should be able to test docker-based
818 # TODO: in theory we should be able to test docker-based
821 # package creation on non-linux using boot2docker, but in
819 # package creation on non-linux using boot2docker, but in
822 # practice that requires extra coordination to make sure
820 # practice that requires extra coordination to make sure
823 # $TESTTEMP is going to be visible at the same path to the
821 # $TESTTEMP is going to be visible at the same path to the
824 # boot2docker VM. If we figure out how to verify that, we
822 # boot2docker VM. If we figure out how to verify that, we
825 # can use the following instead of just saying False:
823 # can use the following instead of just saying False:
826 # return 'DOCKER_HOST' in os.environ
824 # return 'DOCKER_HOST' in os.environ
827 return False
825 return False
828
826
829 return True
827 return True
830 return False
828 return False
831
829
832
830
833 @check("debhelper", "debian packaging tools")
831 @check("debhelper", "debian packaging tools")
834 def has_debhelper():
832 def has_debhelper():
835 # Some versions of dpkg say `dpkg', some say 'dpkg' (` vs ' on the first
833 # Some versions of dpkg say `dpkg', some say 'dpkg' (` vs ' on the first
836 # quote), so just accept anything in that spot.
834 # quote), so just accept anything in that spot.
837 dpkg = matchoutput(
835 dpkg = matchoutput(
838 'dpkg --version', br"Debian .dpkg' package management program"
836 'dpkg --version', br"Debian .dpkg' package management program"
839 )
837 )
840 dh = matchoutput(
838 dh = matchoutput(
841 'dh --help', br'dh is a part of debhelper.', ignorestatus=True
839 'dh --help', br'dh is a part of debhelper.', ignorestatus=True
842 )
840 )
843 dh_py2 = matchoutput(
841 dh_py2 = matchoutput(
844 'dh_python2 --help', br'other supported Python versions'
842 'dh_python2 --help', br'other supported Python versions'
845 )
843 )
846 # debuild comes from the 'devscripts' package, though you might want
844 # debuild comes from the 'devscripts' package, though you might want
847 # the 'build-debs' package instead, which has a dependency on devscripts.
845 # the 'build-debs' package instead, which has a dependency on devscripts.
848 debuild = matchoutput(
846 debuild = matchoutput(
849 'debuild --help', br'to run debian/rules with given parameter'
847 'debuild --help', br'to run debian/rules with given parameter'
850 )
848 )
851 return dpkg and dh and dh_py2 and debuild
849 return dpkg and dh and dh_py2 and debuild
852
850
853
851
854 @check(
852 @check(
855 "debdeps", "debian build dependencies (run dpkg-checkbuilddeps in contrib/)"
853 "debdeps", "debian build dependencies (run dpkg-checkbuilddeps in contrib/)"
856 )
854 )
857 def has_debdeps():
855 def has_debdeps():
858 # just check exit status (ignoring output)
856 # just check exit status (ignoring output)
859 path = '%s/../contrib/packaging/debian/control' % os.environ['TESTDIR']
857 path = '%s/../contrib/packaging/debian/control' % os.environ['TESTDIR']
860 return matchoutput('dpkg-checkbuilddeps %s' % path, br'')
858 return matchoutput('dpkg-checkbuilddeps %s' % path, br'')
861
859
862
860
863 @check("demandimport", "demandimport enabled")
861 @check("demandimport", "demandimport enabled")
864 def has_demandimport():
862 def has_demandimport():
865 # chg disables demandimport intentionally for performance wins.
863 # chg disables demandimport intentionally for performance wins.
866 return (not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable'
864 return (not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable'
867
865
868
866
869 # Add "py36", "py37", ... as possible feature checks. Note that there's no
867 # Add "py36", "py37", ... as possible feature checks. Note that there's no
870 # punctuation here.
868 # punctuation here.
871 @checkvers("py", "Python >= %s", (3.6, 3.7, 3.8, 3.9, 3.10, 3.11))
869 @checkvers("py", "Python >= %s", (3.6, 3.7, 3.8, 3.9, 3.10, 3.11))
872 def has_python_range(v):
870 def has_python_range(v):
873 major, minor = v.split('.')[0:2]
871 major, minor = v.split('.')[0:2]
874 py_major, py_minor = sys.version_info.major, sys.version_info.minor
872 py_major, py_minor = sys.version_info.major, sys.version_info.minor
875
873
876 return (py_major, py_minor) >= (int(major), int(minor))
874 return (py_major, py_minor) >= (int(major), int(minor))
877
875
878
876
879 @check("py3", "running with Python 3.x")
877 @check("py3", "running with Python 3.x")
880 def has_py3():
878 def has_py3():
881 return 3 == sys.version_info[0]
879 return 3 == sys.version_info[0]
882
880
883
881
884 @check("py3exe", "a Python 3.x interpreter is available")
882 @check("py3exe", "a Python 3.x interpreter is available")
885 def has_python3exe():
883 def has_python3exe():
886 py = 'python3'
884 py = 'python3'
887 if os.name == 'nt':
885 if os.name == 'nt':
888 py = 'py -3'
886 py = 'py -3'
889 return matchoutput('%s -V' % py, br'^Python 3.(6|7|8|9|10|11)')
887 return matchoutput('%s -V' % py, br'^Python 3.(6|7|8|9|10|11)')
890
888
891
889
892 @check("pure", "running with pure Python code")
890 @check("pure", "running with pure Python code")
893 def has_pure():
891 def has_pure():
894 return any(
892 return any(
895 [
893 [
896 os.environ.get("HGMODULEPOLICY") == "py",
894 os.environ.get("HGMODULEPOLICY") == "py",
897 os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
895 os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
898 ]
896 ]
899 )
897 )
900
898
901
899
902 @check("slow", "allow slow tests (use --allow-slow-tests)")
900 @check("slow", "allow slow tests (use --allow-slow-tests)")
903 def has_slow():
901 def has_slow():
904 return os.environ.get('HGTEST_SLOW') == 'slow'
902 return os.environ.get('HGTEST_SLOW') == 'slow'
905
903
906
904
907 @check("hypothesis", "Hypothesis automated test generation")
905 @check("hypothesis", "Hypothesis automated test generation")
908 def has_hypothesis():
906 def has_hypothesis():
909 try:
907 try:
910 import hypothesis
908 import hypothesis
911
909
912 hypothesis.given
910 hypothesis.given
913 return True
911 return True
914 except ImportError:
912 except ImportError:
915 return False
913 return False
916
914
917
915
918 @check("unziplinks", "unzip(1) understands and extracts symlinks")
916 @check("unziplinks", "unzip(1) understands and extracts symlinks")
919 def unzip_understands_symlinks():
917 def unzip_understands_symlinks():
920 return matchoutput('unzip --help', br'Info-ZIP')
918 return matchoutput('unzip --help', br'Info-ZIP')
921
919
922
920
923 @check("zstd", "zstd Python module available")
921 @check("zstd", "zstd Python module available")
924 def has_zstd():
922 def has_zstd():
925 try:
923 try:
926 import mercurial.zstd
924 import mercurial.zstd
927
925
928 mercurial.zstd.__version__
926 mercurial.zstd.__version__
929 return True
927 return True
930 except ImportError:
928 except ImportError:
931 return False
929 return False
932
930
933
931
934 @check("devfull", "/dev/full special file")
932 @check("devfull", "/dev/full special file")
935 def has_dev_full():
933 def has_dev_full():
936 return os.path.exists('/dev/full')
934 return os.path.exists('/dev/full')
937
935
938
936
939 @check("ensurepip", "ensurepip module")
937 @check("ensurepip", "ensurepip module")
940 def has_ensurepip():
938 def has_ensurepip():
941 try:
939 try:
942 import ensurepip
940 import ensurepip
943
941
944 ensurepip.bootstrap
942 ensurepip.bootstrap
945 return True
943 return True
946 except ImportError:
944 except ImportError:
947 return False
945 return False
948
946
949
947
950 @check("virtualenv", "virtualenv support")
948 @check("virtualenv", "virtualenv support")
951 def has_virtualenv():
949 def has_virtualenv():
952 try:
950 try:
953 import virtualenv
951 import virtualenv
954
952
955 # --no-site-package became the default in 1.7 (Nov 2011), and the
953 # --no-site-package became the default in 1.7 (Nov 2011), and the
956 # argument was removed in 20.0 (Feb 2020). Rather than make the
954 # argument was removed in 20.0 (Feb 2020). Rather than make the
957 # script complicated, just ignore ancient versions.
955 # script complicated, just ignore ancient versions.
958 return int(virtualenv.__version__.split('.')[0]) > 1
956 return int(virtualenv.__version__.split('.')[0]) > 1
959 except (AttributeError, ImportError, IndexError):
957 except (AttributeError, ImportError, IndexError):
960 return False
958 return False
961
959
962
960
963 @check("fsmonitor", "running tests with fsmonitor")
961 @check("fsmonitor", "running tests with fsmonitor")
964 def has_fsmonitor():
962 def has_fsmonitor():
965 return 'HGFSMONITOR_TESTS' in os.environ
963 return 'HGFSMONITOR_TESTS' in os.environ
966
964
967
965
968 @check("fuzzywuzzy", "Fuzzy string matching library")
966 @check("fuzzywuzzy", "Fuzzy string matching library")
969 def has_fuzzywuzzy():
967 def has_fuzzywuzzy():
970 try:
968 try:
971 import fuzzywuzzy
969 import fuzzywuzzy
972
970
973 fuzzywuzzy.__version__
971 fuzzywuzzy.__version__
974 return True
972 return True
975 except ImportError:
973 except ImportError:
976 return False
974 return False
977
975
978
976
979 @check("clang-libfuzzer", "clang new enough to include libfuzzer")
977 @check("clang-libfuzzer", "clang new enough to include libfuzzer")
980 def has_clang_libfuzzer():
978 def has_clang_libfuzzer():
981 mat = matchoutput('clang --version', br'clang version (\d)')
979 mat = matchoutput('clang --version', br'clang version (\d)')
982 if mat:
980 if mat:
983 # libfuzzer is new in clang 6
981 # libfuzzer is new in clang 6
984 return int(mat.group(1)) > 5
982 return int(mat.group(1)) > 5
985 return False
983 return False
986
984
987
985
988 @check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)")
986 @check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)")
989 def has_clang60():
987 def has_clang60():
990 return matchoutput('clang-6.0 --version', br'clang version 6\.')
988 return matchoutput('clang-6.0 --version', br'clang version 6\.')
991
989
992
990
993 @check("xdiff", "xdiff algorithm")
991 @check("xdiff", "xdiff algorithm")
994 def has_xdiff():
992 def has_xdiff():
995 try:
993 try:
996 from mercurial import policy
994 from mercurial import policy
997
995
998 bdiff = policy.importmod('bdiff')
996 bdiff = policy.importmod('bdiff')
999 return bdiff.xdiffblocks(b'', b'') == [(0, 0, 0, 0)]
997 return bdiff.xdiffblocks(b'', b'') == [(0, 0, 0, 0)]
1000 except (ImportError, AttributeError):
998 except (ImportError, AttributeError):
1001 return False
999 return False
1002
1000
1003
1001
1004 @check('extraextensions', 'whether tests are running with extra extensions')
1002 @check('extraextensions', 'whether tests are running with extra extensions')
1005 def has_extraextensions():
1003 def has_extraextensions():
1006 return 'HGTESTEXTRAEXTENSIONS' in os.environ
1004 return 'HGTESTEXTRAEXTENSIONS' in os.environ
1007
1005
1008
1006
1009 def getrepofeatures():
1007 def getrepofeatures():
1010 """Obtain set of repository features in use.
1008 """Obtain set of repository features in use.
1011
1009
1012 HGREPOFEATURES can be used to define or remove features. It contains
1010 HGREPOFEATURES can be used to define or remove features. It contains
1013 a space-delimited list of feature strings. Strings beginning with ``-``
1011 a space-delimited list of feature strings. Strings beginning with ``-``
1014 mean to remove.
1012 mean to remove.
1015 """
1013 """
1016 # Default list provided by core.
1014 # Default list provided by core.
1017 features = {
1015 features = {
1018 'bundlerepo',
1016 'bundlerepo',
1019 'revlogstore',
1017 'revlogstore',
1020 'fncache',
1018 'fncache',
1021 }
1019 }
1022
1020
1023 # Features that imply other features.
1021 # Features that imply other features.
1024 implies = {
1022 implies = {
1025 'simplestore': ['-revlogstore', '-bundlerepo', '-fncache'],
1023 'simplestore': ['-revlogstore', '-bundlerepo', '-fncache'],
1026 }
1024 }
1027
1025
1028 for override in os.environ.get('HGREPOFEATURES', '').split(' '):
1026 for override in os.environ.get('HGREPOFEATURES', '').split(' '):
1029 if not override:
1027 if not override:
1030 continue
1028 continue
1031
1029
1032 if override.startswith('-'):
1030 if override.startswith('-'):
1033 if override[1:] in features:
1031 if override[1:] in features:
1034 features.remove(override[1:])
1032 features.remove(override[1:])
1035 else:
1033 else:
1036 features.add(override)
1034 features.add(override)
1037
1035
1038 for imply in implies.get(override, []):
1036 for imply in implies.get(override, []):
1039 if imply.startswith('-'):
1037 if imply.startswith('-'):
1040 if imply[1:] in features:
1038 if imply[1:] in features:
1041 features.remove(imply[1:])
1039 features.remove(imply[1:])
1042 else:
1040 else:
1043 features.add(imply)
1041 features.add(imply)
1044
1042
1045 return features
1043 return features
1046
1044
1047
1045
1048 @check('reporevlogstore', 'repository using the default revlog store')
1046 @check('reporevlogstore', 'repository using the default revlog store')
1049 def has_reporevlogstore():
1047 def has_reporevlogstore():
1050 return 'revlogstore' in getrepofeatures()
1048 return 'revlogstore' in getrepofeatures()
1051
1049
1052
1050
1053 @check('reposimplestore', 'repository using simple storage extension')
1051 @check('reposimplestore', 'repository using simple storage extension')
1054 def has_reposimplestore():
1052 def has_reposimplestore():
1055 return 'simplestore' in getrepofeatures()
1053 return 'simplestore' in getrepofeatures()
1056
1054
1057
1055
1058 @check('repobundlerepo', 'whether we can open bundle files as repos')
1056 @check('repobundlerepo', 'whether we can open bundle files as repos')
1059 def has_repobundlerepo():
1057 def has_repobundlerepo():
1060 return 'bundlerepo' in getrepofeatures()
1058 return 'bundlerepo' in getrepofeatures()
1061
1059
1062
1060
1063 @check('repofncache', 'repository has an fncache')
1061 @check('repofncache', 'repository has an fncache')
1064 def has_repofncache():
1062 def has_repofncache():
1065 return 'fncache' in getrepofeatures()
1063 return 'fncache' in getrepofeatures()
1066
1064
1067
1065
1068 @check('dirstate-v2', 'using the v2 format of .hg/dirstate')
1066 @check('dirstate-v2', 'using the v2 format of .hg/dirstate')
1069 def has_dirstate_v2():
1067 def has_dirstate_v2():
1070 # Keep this logic in sync with `newreporequirements()` in `mercurial/localrepo.py`
1068 # Keep this logic in sync with `newreporequirements()` in `mercurial/localrepo.py`
1071 return matchoutput(
1069 return matchoutput(
1072 'hg config format.use-dirstate-v2', b'(?i)1|yes|true|on|always'
1070 'hg config format.use-dirstate-v2', b'(?i)1|yes|true|on|always'
1073 )
1071 )
1074
1072
1075
1073
1076 @check('sqlite', 'sqlite3 module and matching cli is available')
1074 @check('sqlite', 'sqlite3 module and matching cli is available')
1077 def has_sqlite():
1075 def has_sqlite():
1078 try:
1076 try:
1079 import sqlite3
1077 import sqlite3
1080
1078
1081 version = sqlite3.sqlite_version_info
1079 version = sqlite3.sqlite_version_info
1082 except ImportError:
1080 except ImportError:
1083 return False
1081 return False
1084
1082
1085 if version < (3, 8, 3):
1083 if version < (3, 8, 3):
1086 # WITH clause not supported
1084 # WITH clause not supported
1087 return False
1085 return False
1088
1086
1089 return matchoutput('sqlite3 -version', br'^3\.\d+')
1087 return matchoutput('sqlite3 -version', br'^3\.\d+')
1090
1088
1091
1089
1092 @check('vcr', 'vcr http mocking library (pytest-vcr)')
1090 @check('vcr', 'vcr http mocking library (pytest-vcr)')
1093 def has_vcr():
1091 def has_vcr():
1094 try:
1092 try:
1095 import vcr
1093 import vcr
1096
1094
1097 vcr.VCR
1095 vcr.VCR
1098 return True
1096 return True
1099 except (ImportError, AttributeError):
1097 except (ImportError, AttributeError):
1100 pass
1098 pass
1101 return False
1099 return False
1102
1100
1103
1101
1104 @check('emacs', 'GNU Emacs')
1102 @check('emacs', 'GNU Emacs')
1105 def has_emacs():
1103 def has_emacs():
1106 # Our emacs lisp uses `with-eval-after-load` which is new in emacs
1104 # Our emacs lisp uses `with-eval-after-load` which is new in emacs
1107 # 24.4, so we allow emacs 24.4, 24.5, and 25+ (24.5 was the last
1105 # 24.4, so we allow emacs 24.4, 24.5, and 25+ (24.5 was the last
1108 # 24 release)
1106 # 24 release)
1109 return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)')
1107 return matchoutput('emacs --version', b'GNU Emacs 2(4.4|4.5|5|6|7|8|9)')
1110
1108
1111
1109
1112 @check('black', 'the black formatter for python (>= 20.8b1)')
1110 @check('black', 'the black formatter for python (>= 20.8b1)')
1113 def has_black():
1111 def has_black():
1114 blackcmd = 'black --version'
1112 blackcmd = 'black --version'
1115 version_regex = b'black, (?:version )?([0-9a-b.]+)'
1113 version_regex = b'black, (?:version )?([0-9a-b.]+)'
1116 version = matchoutput(blackcmd, version_regex)
1114 version = matchoutput(blackcmd, version_regex)
1117 sv = distutils.version.StrictVersion
1115 sv = distutils.version.StrictVersion
1118 return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1')
1116 return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1')
1119
1117
1120
1118
1121 @check('pytype', 'the pytype type checker')
1119 @check('pytype', 'the pytype type checker')
1122 def has_pytype():
1120 def has_pytype():
1123 pytypecmd = 'pytype --version'
1121 pytypecmd = 'pytype --version'
1124 version = matchoutput(pytypecmd, b'[0-9a-b.]+')
1122 version = matchoutput(pytypecmd, b'[0-9a-b.]+')
1125 sv = distutils.version.StrictVersion
1123 sv = distutils.version.StrictVersion
1126 return version and sv(_bytes2sys(version.group(0))) >= sv('2019.10.17')
1124 return version and sv(_bytes2sys(version.group(0))) >= sv('2019.10.17')
1127
1125
1128
1126
1129 @check("rustfmt", "rustfmt tool at version nightly-2021-11-02")
1127 @check("rustfmt", "rustfmt tool at version nightly-2021-11-02")
1130 def has_rustfmt():
1128 def has_rustfmt():
1131 # We use Nightly's rustfmt due to current unstable config options.
1129 # We use Nightly's rustfmt due to current unstable config options.
1132 return matchoutput(
1130 return matchoutput(
1133 '`rustup which --toolchain nightly-2021-11-02 rustfmt` --version',
1131 '`rustup which --toolchain nightly-2021-11-02 rustfmt` --version',
1134 b'rustfmt',
1132 b'rustfmt',
1135 )
1133 )
1136
1134
1137
1135
1138 @check("cargo", "cargo tool")
1136 @check("cargo", "cargo tool")
1139 def has_cargo():
1137 def has_cargo():
1140 return matchoutput('`rustup which cargo` --version', b'cargo')
1138 return matchoutput('`rustup which cargo` --version', b'cargo')
1141
1139
1142
1140
1143 @check("lzma", "python lzma module")
1141 @check("lzma", "python lzma module")
1144 def has_lzma():
1142 def has_lzma():
1145 try:
1143 try:
1146 import _lzma
1144 import _lzma
1147
1145
1148 _lzma.FORMAT_XZ
1146 _lzma.FORMAT_XZ
1149 return True
1147 return True
1150 except ImportError:
1148 except ImportError:
1151 return False
1149 return False
1152
1150
1153
1151
1154 @check("bash", "bash shell")
1152 @check("bash", "bash shell")
1155 def has_bash():
1153 def has_bash():
1156 return matchoutput("bash -c 'echo hi'", b'^hi$')
1154 return matchoutput("bash -c 'echo hi'", b'^hi$')
1157
1155
1158
1156
1159 @check("bigendian", "big-endian CPU")
1157 @check("bigendian", "big-endian CPU")
1160 def has_bigendian():
1158 def has_bigendian():
1161 return sys.byteorder == 'big'
1159 return sys.byteorder == 'big'
General Comments 0
You need to be logged in to leave comments. Login now