##// END OF EJS Templates
merge default into stable for 4.9 release
Augie Fackler -
r41326:593718ff merge 4.9rc0 stable
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,64 b''
1 #!/bin/bash
2 #
3 # produces two repositories with different common and missing subsets
4 #
5 # $ discovery-helper.sh REPO NBHEADS DEPT
6 #
7 # The Goal is to produce two repositories with some common part and some
8 # exclusive part on each side. Provide a source repository REPO, it will
9 # produce two repositories REPO-left and REPO-right.
10 #
11 # Each repository will be missing some revisions exclusive to NBHEADS of the
12 # repo topological heads. These heads and revisions exclusive to them (up to
13 # DEPTH depth) are stripped.
14 #
15 # The "left" repository will use the NBHEADS first heads (sorted by
16 # description). The "right" use the last NBHEADS one.
17 #
18 # To find out how many topological heads a repo has, use:
19 #
20 # $ hg heads -t -T '{rev}\n' | wc -l
21 #
22 # Example:
23 #
24 # The `pypy-2018-09-01` repository has 192 heads. To produce two repositories
25 # with 92 common heads and ~50 exclusive heads on each side.
26 #
27 # $ ./discovery-helper.sh pypy-2018-08-01 50 10
28
29 set -euo pipefail
30
31 if [ $# -lt 3 ]; then
32 echo "usage: `basename $0` REPO NBHEADS DEPTH"
33 exit 64
34 fi
35
36 repo="$1"
37 shift
38
39 nbheads="$1"
40 shift
41
42 depth="$1"
43 shift
44
45 leftrepo="${repo}-left"
46 rightrepo="${repo}-right"
47
48 left="first(sort(heads(all()), 'desc'), $nbheads)"
49 right="last(sort(heads(all()), 'desc'), $nbheads)"
50
51 leftsubset="ancestors($left, $depth) and only($left, heads(all() - $left))"
52 rightsubset="ancestors($right, $depth) and only($right, heads(all() - $right))"
53
54 echo '### building left repository:' $left-repo
55 echo '# cloning'
56 hg clone --noupdate "${repo}" "${leftrepo}"
57 echo '# stripping' '"'${leftsubset}'"'
58 hg -R "${leftrepo}" --config extensions.strip= strip --rev "$leftsubset" --no-backup
59
60 echo '### building right repository:' $right-repo
61 echo '# cloning'
62 hg clone --noupdate "${repo}" "${rightrepo}"
63 echo '# stripping:' '"'${rightsubset}'"'
64 hg -R "${rightrepo}" --config extensions.strip= strip --rev "$rightsubset" --no-backup
@@ -0,0 +1,48 b''
1 #include <Python.h>
2 #include <assert.h>
3 #include <stdlib.h>
4 #include <unistd.h>
5
6 #include <string>
7
8 #include "pyutil.h"
9
10 extern "C" {
11
12 static PyCodeObject *code;
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
18 from parsers import parse_dirstate
19 try:
20 dmap = {}
21 copymap = {}
22 p = parse_dirstate(dmap, copymap, data)
23 except Exception as e:
24 pass
25 # uncomment this print if you're editing this Python code
26 # to debug failures.
27 # print e
28 )py",
29 "fuzzer", Py_file_input);
30 return 0;
31 }
32
33 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
34 {
35 PyObject *text =
36 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
37 PyObject *locals = PyDict_New();
38 PyDict_SetItemString(locals, "data", text);
39 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
40 if (!res) {
41 PyErr_Print();
42 }
43 Py_XDECREF(res);
44 Py_DECREF(locals);
45 Py_DECREF(text);
46 return 0; // Non-zero return values are reserved for future use.
47 }
48 }
@@ -0,0 +1,18 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
4 import os
5 import zipfile
6
7 ap = argparse.ArgumentParser()
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
9 args = ap.parse_args()
10
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__),
12 '..', '..'))
13 dirstate = os.path.join(reporoot, '.hg', 'dirstate')
14
15 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
16 if os.path.exists(dirstate):
17 with open(dirstate) as f:
18 zf.writestr("dirstate", f.read())
@@ -0,0 +1,60 b''
1 #include <Python.h>
2 #include <assert.h>
3 #include <stdlib.h>
4 #include <unistd.h>
5
6 #include <string>
7
8 #include "pyutil.h"
9
10 extern "C" {
11
12 static PyCodeObject *code;
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
18 from parsers import fm1readmarkers
19 def maybeint(s, default):
20 try:
21 return int(s)
22 except ValueError:
23 return default
24 try:
25 parts = data.split('\0', 2)
26 if len(parts) == 3:
27 offset, stop, data = parts
28 elif len(parts) == 2:
29 stop, data = parts
30 offset = 0
31 else:
32 offset = stop = 0
33 offset, stop = maybeint(offset, 0), maybeint(stop, len(data))
34 fm1readmarkers(data, offset, stop)
35 except Exception as e:
36 pass
37 # uncomment this print if you're editing this Python code
38 # to debug failures.
39 # print e
40 )py",
41 "fuzzer", Py_file_input);
42 return 0;
43 }
44
45 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
46 {
47 PyObject *text =
48 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
49 PyObject *locals = PyDict_New();
50 PyDict_SetItemString(locals, "data", text);
51 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
52 if (!res) {
53 PyErr_Print();
54 }
55 Py_XDECREF(res);
56 Py_DECREF(locals);
57 Py_DECREF(text);
58 return 0; // Non-zero return values are reserved for future use.
59 }
60 }
@@ -0,0 +1,36 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
4 import zipfile
5
6 ap = argparse.ArgumentParser()
7 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
8 args = ap.parse_args()
9
10 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
11 zf.writestr(
12 'smallish_obsstore',
13 (
14 # header: fm1readmarkers should start at offset 1, and
15 # read until byte 597.
16 '1\x00597\x00'
17 # body of obsstore file
18 '\x01\x00\x00\x00vA\xd7\x02+C\x1a<)\x01,\x00\x00\x01\x03\x03\xe6'
19 '\x92\xde)x\x16\xd1Xph\xc7\xa7[\xe5\xe2\x1a\xab\x1e6e\xaf\xc2\xae'
20 '\xe7\xbc\x83\xe1\x88\xa5\xda\xce>O\xbd\x04\xe9\x03\xc4o\xeb\x03'
21 '\x01\t\x05\x04\x1fef18operationamenduserAugie Fackler <raf@duri'
22 'n42.com>\x00\x00\x00vA\xd7\x02-\x8aD\xaf-\x01,\x00\x00\x01\x03\x03'
23 '\x17*\xca\x8f\x9e}i\xe0i\xbb\xdf\x9fb\x03\xd2XG?\xd3h\x98\x89\x1a'
24 '=2\xeb\xc3\xc5<\xb3\x9e\xcc\x0e;#\xee\xc3\x10ux\x03\x01\t\x05\x04'
25 '\x1fef18operationamenduserAugie Fackler <raf@durin42.com>\x00\x00'
26 '\x00vA\xd7\x02Mn\xd9%\xea\x01,\x00\x00\x01\x03\x03\x98\x89\x1a='
27 '2\xeb\xc3\xc5<\xb3\x9e\xcc\x0e;#\xee\xc3\x10ux\xe0*\xcaT\x86Z8J'
28 '\x85)\x97\xff7\xcc)\xc1\x7f\x19\x0c\x01\x03\x01\t\x05\x04\x1fef'
29 '18operationamenduserAugie Fackler <raf@durin42.com>\x00\x00\x00'
30 'yA\xd7\x02MtA\xbfj\x01,\x00\x00\x01\x03\x03\xe0*\xcaT\x86Z8J\x85'
31 ')\x97\xff7\xcc)\xc1\x7f\x19\x0c\x01\x00\x94\x01\xa9\n\xf80\x92\xa3'
32 'j\xc5X\xb1\xc9:\xd51\xb8*\xa9\x03\x01\t\x08\x04\x1fef11operatio'
33 'nhistedituserAugie Fackler <raf@durin42.com>\x00\x00\x00yA\xd7\x02'
34 'MtA\xd4\xe1\x01,\x00\x00\x01\x03\x03"\xa5\xcb\x86\xb6\xf4\xbaO\xa0'
35 'sH\xe7?\xcb\x9b\xc2n\xcfI\x9e\x14\xf0D\xf0!\x18DN\xcd\x97\x016\xa5'
36 '\xef\xa06\xcb\x884\x8a\x03\x01\t\x08\x04\x1fef14operationhisted'))
@@ -0,0 +1,49 b''
1 #include "pyutil.h"
2
3 #include <string>
4
5 namespace contrib
6 {
7
8 static char cpypath[8192] = "\0";
9
10 static PyObject *mainmod;
11 static PyObject *globals;
12
13 /* TODO: use Python 3 for this fuzzing? */
14 PyMODINIT_FUNC initparsers(void);
15
16 void initpy(const char *cselfpath)
17 {
18 const std::string subdir = "/sanpy/lib/python2.7";
19 /* HACK ALERT: we need a full Python installation built without
20 pymalloc and with ASAN, so we dump one in
21 $OUT/sanpy/lib/python2.7. This helps us wire that up. */
22 std::string selfpath(cselfpath);
23 std::string pypath;
24 auto pos = selfpath.rfind("/");
25 if (pos == std::string::npos) {
26 char wd[8192];
27 getcwd(wd, 8192);
28 pypath = std::string(wd) + subdir;
29 } else {
30 pypath = selfpath.substr(0, pos) + subdir;
31 }
32 strncpy(cpypath, pypath.c_str(), pypath.size());
33 setenv("PYTHONPATH", cpypath, 1);
34 setenv("PYTHONNOUSERSITE", "1", 1);
35 /* prevent Python from looking up users in the fuzz environment */
36 setenv("PYTHONUSERBASE", cpypath, 1);
37 Py_SetPythonHome(cpypath);
38 Py_InitializeEx(0);
39 mainmod = PyImport_AddModule("__main__");
40 globals = PyModule_GetDict(mainmod);
41 initparsers();
42 }
43
44 PyObject *pyglobals()
45 {
46 return globals;
47 }
48
49 } // namespace contrib
@@ -0,0 +1,9 b''
1 #include <Python.h>
2
3 namespace contrib
4 {
5
6 void initpy(const char *cselfpath);
7 PyObject *pyglobals();
8
9 } /* namespace contrib */
@@ -0,0 +1,47 b''
1 #include <Python.h>
2 #include <assert.h>
3 #include <stdlib.h>
4 #include <unistd.h>
5
6 #include <string>
7
8 #include "pyutil.h"
9
10 extern "C" {
11
12 static PyCodeObject *code;
13
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
15 {
16 contrib::initpy(*argv[0]);
17 code = (PyCodeObject *)Py_CompileString(R"py(
18 from parsers import parse_index2
19 for inline in (True, False):
20 try:
21 index, cache = parse_index2(data, inline)
22 except Exception as e:
23 pass
24 # uncomment this print if you're editing this Python code
25 # to debug failures.
26 # print e
27 )py",
28 "fuzzer", Py_file_input);
29 return 0;
30 }
31
32 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
33 {
34 PyObject *text =
35 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
36 PyObject *locals = PyDict_New();
37 PyDict_SetItemString(locals, "data", text);
38 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
39 if (!res) {
40 PyErr_Print();
41 }
42 Py_XDECREF(res);
43 Py_DECREF(locals);
44 Py_DECREF(text);
45 return 0; // Non-zero return values are reserved for future use.
46 }
47 }
@@ -0,0 +1,28 b''
1 from __future__ import absolute_import, print_function
2
3 import argparse
4 import os
5 import zipfile
6
7 ap = argparse.ArgumentParser()
8 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
9 args = ap.parse_args()
10
11 reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__),
12 '..', '..'))
13 # typically a standalone index
14 changelog = os.path.join(reporoot, '.hg', 'store', '00changelog.i')
15 # an inline revlog with only a few revisions
16 contributing = os.path.join(
17 reporoot, '.hg', 'store', 'data', 'contrib', 'fuzz', 'mpatch.cc.i')
18
19 print(changelog, os.path.exists(changelog))
20 print(contributing, os.path.exists(contributing))
21
22 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
23 if os.path.exists(changelog):
24 with open(changelog) as f:
25 zf.writestr("00changelog.i", f.read())
26 if os.path.exists(contributing):
27 with open(contributing) as f:
28 zf.writestr("contributing.i", f.read())
@@ -0,0 +1,127 b''
1 #!/usr/bin/env python
2 #
3 # Copyright 2018 Paul Morelle <Paul.Morelle@octobus.net>
4 #
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
7 #
8 # This script use the output of `hg perfrevlogwrite -T json --details` to draw
9 # various plot related to write performance in a revlog
10 #
11 # usage: perf-revlog-write-plot.py details.json
12 from __future__ import absolute_import, print_function
13 import json
14 import re
15
16 import numpy as np
17 import scipy.signal
18
19 from matplotlib import (
20 pyplot as plt,
21 ticker as mticker,
22 )
23
24
25 def plot(data, title=None):
26 items = {}
27 re_title = re.compile(r'^revisions #\d+ of \d+, rev (\d+)$')
28 for item in data:
29 m = re_title.match(item['title'])
30 if m is None:
31 continue
32
33 rev = int(m.group(1))
34 items[rev] = item
35
36 min_rev = min(items.keys())
37 max_rev = max(items.keys())
38 ary = np.empty((2, max_rev - min_rev + 1))
39 for rev, item in items.items():
40 ary[0][rev - min_rev] = rev
41 ary[1][rev - min_rev] = item['wall']
42
43 fig = plt.figure()
44 comb_plt = fig.add_subplot(211)
45 other_plt = fig.add_subplot(212)
46
47 comb_plt.plot(ary[0],
48 np.cumsum(ary[1]),
49 color='red',
50 linewidth=1,
51 label='comb')
52
53 plots = []
54 p = other_plt.plot(ary[0],
55 ary[1],
56 color='red',
57 linewidth=1,
58 label='wall')
59 plots.append(p)
60
61 colors = {
62 10: ('green', 'xkcd:grass green'),
63 100: ('blue', 'xkcd:bright blue'),
64 1000: ('purple', 'xkcd:dark pink'),
65 }
66 for n, color in colors.items():
67 avg_n = np.convolve(ary[1], np.full(n, 1. / n), 'valid')
68 p = other_plt.plot(ary[0][n - 1:],
69 avg_n,
70 color=color[0],
71 linewidth=1,
72 label='avg time last %d' % n)
73 plots.append(p)
74
75 med_n = scipy.signal.medfilt(ary[1], n + 1)
76 p = other_plt.plot(ary[0],
77 med_n,
78 color=color[1],
79 linewidth=1,
80 label='median time last %d' % n)
81 plots.append(p)
82
83 formatter = mticker.ScalarFormatter()
84 formatter.set_scientific(False)
85 formatter.set_useOffset(False)
86
87 comb_plt.grid()
88 comb_plt.xaxis.set_major_formatter(formatter)
89 comb_plt.legend()
90
91 other_plt.grid()
92 other_plt.xaxis.set_major_formatter(formatter)
93 leg = other_plt.legend()
94 leg2plot = {}
95 for legline, plot in zip(leg.get_lines(), plots):
96 legline.set_picker(5)
97 leg2plot[legline] = plot
98
99 def onpick(event):
100 legline = event.artist
101 plot = leg2plot[legline]
102 visible = not plot[0].get_visible()
103 for l in plot:
104 l.set_visible(visible)
105
106 if visible:
107 legline.set_alpha(1.0)
108 else:
109 legline.set_alpha(0.2)
110 fig.canvas.draw()
111 if title is not None:
112 fig.canvas.set_window_title(title)
113 fig.canvas.mpl_connect('pick_event', onpick)
114
115 plt.show()
116
117
118 if __name__ == '__main__':
119 import sys
120
121 if len(sys.argv) > 1:
122 print('reading from %r' % sys.argv[1])
123 with open(sys.argv[1], 'r') as fp:
124 plot(json.load(fp), title=sys.argv[1])
125 else:
126 print('reading from stdin')
127 plot(json.load(sys.stdin))
@@ -0,0 +1,104 b''
1 """implements bookmark-based branching (EXPERIMENTAL)
2
3 - Disables creation of new branches (config: enable_branches=False).
4 - Requires an active bookmark on commit (config: require_bookmark=True).
5 - Doesn't move the active bookmark on update, only on commit.
6 - Requires '--rev' for moving an existing bookmark.
7 - Protects special bookmarks (config: protect=@).
8
9 flow related commands
10
11 :hg book NAME: create a new bookmark
12 :hg book NAME -r REV: move bookmark to revision (fast-forward)
13 :hg up|co NAME: switch to bookmark
14 :hg push -B .: push active bookmark
15 """
16 from __future__ import absolute_import
17
18 from mercurial.i18n import _
19 from mercurial import (
20 bookmarks,
21 commands,
22 error,
23 extensions,
24 registrar,
25 )
26
27 MY_NAME = 'bookflow'
28
29 configtable = {}
30 configitem = registrar.configitem(configtable)
31
32 configitem(MY_NAME, 'protect', ['@'])
33 configitem(MY_NAME, 'require-bookmark', True)
34 configitem(MY_NAME, 'enable-branches', False)
35
36 cmdtable = {}
37 command = registrar.command(cmdtable)
38
39 def commit_hook(ui, repo, **kwargs):
40 active = repo._bookmarks.active
41 if active:
42 if active in ui.configlist(MY_NAME, 'protect'):
43 raise error.Abort(
44 _('cannot commit, bookmark %s is protected') % active)
45 if not cwd_at_bookmark(repo, active):
46 raise error.Abort(
47 _('cannot commit, working directory out of sync with active bookmark'),
48 hint=_("run 'hg up %s'") % active)
49 elif ui.configbool(MY_NAME, 'require-bookmark', True):
50 raise error.Abort(_('cannot commit without an active bookmark'))
51 return 0
52
53 def bookmarks_update(orig, repo, parents, node):
54 if len(parents) == 2:
55 # called during commit
56 return orig(repo, parents, node)
57 else:
58 # called during update
59 return False
60
61 def bookmarks_addbookmarks(
62 orig, repo, tr, names, rev=None, force=False, inactive=False):
63 if not rev:
64 marks = repo._bookmarks
65 for name in names:
66 if name in marks:
67 raise error.Abort(_(
68 "bookmark %s already exists, to move use the --rev option"
69 ) % name)
70 return orig(repo, tr, names, rev, force, inactive)
71
72 def commands_commit(orig, ui, repo, *args, **opts):
73 commit_hook(ui, repo)
74 return orig(ui, repo, *args, **opts)
75
76 def commands_pull(orig, ui, repo, *args, **opts):
77 rc = orig(ui, repo, *args, **opts)
78 active = repo._bookmarks.active
79 if active and not cwd_at_bookmark(repo, active):
80 ui.warn(_(
81 "working directory out of sync with active bookmark, run "
82 "'hg up %s'"
83 ) % active)
84 return rc
85
86 def commands_branch(orig, ui, repo, label=None, **opts):
87 if label and not opts.get(r'clean') and not opts.get(r'rev'):
88 raise error.Abort(
89 _("creating named branches is disabled and you should use bookmarks"),
90 hint="see 'hg help bookflow'")
91 return orig(ui, repo, label, **opts)
92
93 def cwd_at_bookmark(repo, mark):
94 mark_id = repo._bookmarks[mark]
95 cur_id = repo.lookup('.')
96 return cur_id == mark_id
97
98 def uisetup(ui):
99 extensions.wrapfunction(bookmarks, 'update', bookmarks_update)
100 extensions.wrapfunction(bookmarks, 'addbookmarks', bookmarks_addbookmarks)
101 extensions.wrapcommand(commands.table, 'commit', commands_commit)
102 extensions.wrapcommand(commands.table, 'pull', commands_pull)
103 if not ui.configbool(MY_NAME, 'enable-branches'):
104 extensions.wrapcommand(commands.table, 'branch', commands_branch)
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100755
NO CONTENT: new file 100755
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: new file 100644
NO CONTENT: new file 100644
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,244 +1,248 b''
1 # If you want to change PREFIX, do not just edit it below. The changed
1 # If you want to change PREFIX, do not just edit it below. The changed
2 # value wont get passed on to recursive make calls. You should instead
2 # value wont get passed on to recursive make calls. You should instead
3 # override the variable on the command like:
3 # override the variable on the command like:
4 #
4 #
5 # % make PREFIX=/opt/ install
5 # % make PREFIX=/opt/ install
6
6
7 export PREFIX=/usr/local
7 export PREFIX=/usr/local
8 PYTHON=python
8 PYTHON=python
9 $(eval HGROOT := $(shell pwd))
9 $(eval HGROOT := $(shell pwd))
10 HGPYTHONS ?= $(HGROOT)/build/pythons
10 HGPYTHONS ?= $(HGROOT)/build/pythons
11 PURE=
11 PURE=
12 PYFILESCMD=find mercurial hgext doc -name '*.py'
12 PYFILESCMD=find mercurial hgext doc -name '*.py'
13 PYFILES:=$(shell $(PYFILESCMD))
13 PYFILES:=$(shell $(PYFILESCMD))
14 DOCFILES=mercurial/help/*.txt
14 DOCFILES=mercurial/help/*.txt
15 export LANGUAGE=C
15 export LANGUAGE=C
16 export LC_ALL=C
16 export LC_ALL=C
17 TESTFLAGS ?= $(shell echo $$HGTESTFLAGS)
17 TESTFLAGS ?= $(shell echo $$HGTESTFLAGS)
18 OSXVERSIONFLAGS ?= $(shell echo $$OSXVERSIONFLAGS)
18 OSXVERSIONFLAGS ?= $(shell echo $$OSXVERSIONFLAGS)
19
19
20 # Set this to e.g. "mingw32" to use a non-default compiler.
20 # Set this to e.g. "mingw32" to use a non-default compiler.
21 COMPILER=
21 COMPILER=
22
22
23 COMPILERFLAG_tmp_ =
23 COMPILERFLAG_tmp_ =
24 COMPILERFLAG_tmp_${COMPILER} ?= -c $(COMPILER)
24 COMPILERFLAG_tmp_${COMPILER} ?= -c $(COMPILER)
25 COMPILERFLAG=${COMPILERFLAG_tmp_${COMPILER}}
25 COMPILERFLAG=${COMPILERFLAG_tmp_${COMPILER}}
26
26
27 help:
27 help:
28 @echo 'Commonly used make targets:'
28 @echo 'Commonly used make targets:'
29 @echo ' all - build program and documentation'
29 @echo ' all - build program and documentation'
30 @echo ' install - install program and man pages to $$PREFIX ($(PREFIX))'
30 @echo ' install - install program and man pages to $$PREFIX ($(PREFIX))'
31 @echo ' install-home - install with setup.py install --home=$$HOME ($(HOME))'
31 @echo ' install-home - install with setup.py install --home=$$HOME ($(HOME))'
32 @echo ' local - build for inplace usage'
32 @echo ' local - build for inplace usage'
33 @echo ' tests - run all tests in the automatic test suite'
33 @echo ' tests - run all tests in the automatic test suite'
34 @echo ' test-foo - run only specified tests (e.g. test-merge1.t)'
34 @echo ' test-foo - run only specified tests (e.g. test-merge1.t)'
35 @echo ' dist - run all tests and create a source tarball in dist/'
35 @echo ' dist - run all tests and create a source tarball in dist/'
36 @echo ' clean - remove files created by other targets'
36 @echo ' clean - remove files created by other targets'
37 @echo ' (except installed files or dist source tarball)'
37 @echo ' (except installed files or dist source tarball)'
38 @echo ' update-pot - update i18n/hg.pot'
38 @echo ' update-pot - update i18n/hg.pot'
39 @echo
39 @echo
40 @echo 'Example for a system-wide installation under /usr/local:'
40 @echo 'Example for a system-wide installation under /usr/local:'
41 @echo ' make all && su -c "make install" && hg version'
41 @echo ' make all && su -c "make install" && hg version'
42 @echo
42 @echo
43 @echo 'Example for a local installation (usable in this directory):'
43 @echo 'Example for a local installation (usable in this directory):'
44 @echo ' make local && ./hg version'
44 @echo ' make local && ./hg version'
45
45
46 all: build doc
46 all: build doc
47
47
48 local:
48 local:
49 $(PYTHON) setup.py $(PURE) \
49 $(PYTHON) setup.py $(PURE) \
50 build_py -c -d . \
50 build_py -c -d . \
51 build_ext $(COMPILERFLAG) -i \
51 build_ext $(COMPILERFLAG) -i \
52 build_hgexe $(COMPILERFLAG) -i \
52 build_hgexe $(COMPILERFLAG) -i \
53 build_mo
53 build_mo
54 env HGRCPATH= $(PYTHON) hg version
54 env HGRCPATH= $(PYTHON) hg version
55
55
56 build:
56 build:
57 $(PYTHON) setup.py $(PURE) build $(COMPILERFLAG)
57 $(PYTHON) setup.py $(PURE) build $(COMPILERFLAG)
58
58
59 wheel:
59 wheel:
60 FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILERFLAG)
60 FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILERFLAG)
61
61
62 doc:
62 doc:
63 $(MAKE) -C doc
63 $(MAKE) -C doc
64
64
65 cleanbutpackages:
65 cleanbutpackages:
66 -$(PYTHON) setup.py clean --all # ignore errors from this command
66 -$(PYTHON) setup.py clean --all # ignore errors from this command
67 find contrib doc hgext hgext3rd i18n mercurial tests hgdemandimport \
67 find contrib doc hgext hgext3rd i18n mercurial tests hgdemandimport \
68 \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
68 \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';'
69 rm -f MANIFEST MANIFEST.in hgext/__index__.py tests/*.err
69 rm -f MANIFEST MANIFEST.in hgext/__index__.py tests/*.err
70 rm -f mercurial/__modulepolicy__.py
70 rm -f mercurial/__modulepolicy__.py
71 if test -d .hg; then rm -f mercurial/__version__.py; fi
71 if test -d .hg; then rm -f mercurial/__version__.py; fi
72 rm -rf build mercurial/locale
72 rm -rf build mercurial/locale
73 $(MAKE) -C doc clean
73 $(MAKE) -C doc clean
74 $(MAKE) -C contrib/chg distclean
74 $(MAKE) -C contrib/chg distclean
75 rm -rf rust/target
76 rm -f mercurial/rustext.so
75
77
76 clean: cleanbutpackages
78 clean: cleanbutpackages
77 rm -rf packages
79 rm -rf packages
78
80
79 install: install-bin install-doc
81 install: install-bin install-doc
80
82
81 install-bin: build
83 install-bin: build
82 $(PYTHON) setup.py $(PURE) install --root="$(DESTDIR)/" --prefix="$(PREFIX)" --force
84 $(PYTHON) setup.py $(PURE) install --root="$(DESTDIR)/" --prefix="$(PREFIX)" --force
83
85
84 install-doc: doc
86 install-doc: doc
85 cd doc && $(MAKE) $(MFLAGS) install
87 cd doc && $(MAKE) $(MFLAGS) install
86
88
87 install-home: install-home-bin install-home-doc
89 install-home: install-home-bin install-home-doc
88
90
89 install-home-bin: build
91 install-home-bin: build
90 $(PYTHON) setup.py $(PURE) install --home="$(HOME)" --prefix="" --force
92 $(PYTHON) setup.py $(PURE) install --home="$(HOME)" --prefix="" --force
91
93
92 install-home-doc: doc
94 install-home-doc: doc
93 cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install
95 cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install
94
96
95 MANIFEST-doc:
97 MANIFEST-doc:
96 $(MAKE) -C doc MANIFEST
98 $(MAKE) -C doc MANIFEST
97
99
98 MANIFEST.in: MANIFEST-doc
100 MANIFEST.in: MANIFEST-doc
99 hg manifest | sed -e 's/^/include /' > MANIFEST.in
101 hg manifest | sed -e 's/^/include /' > MANIFEST.in
100 echo include mercurial/__version__.py >> MANIFEST.in
102 echo include mercurial/__version__.py >> MANIFEST.in
101 sed -e 's/^/include /' < doc/MANIFEST >> MANIFEST.in
103 sed -e 's/^/include /' < doc/MANIFEST >> MANIFEST.in
102
104
103 dist: tests dist-notests
105 dist: tests dist-notests
104
106
105 dist-notests: doc MANIFEST.in
107 dist-notests: doc MANIFEST.in
106 TAR_OPTIONS="--owner=root --group=root --mode=u+w,go-w,a+rX-s" $(PYTHON) setup.py -q sdist
108 TAR_OPTIONS="--owner=root --group=root --mode=u+w,go-w,a+rX-s" $(PYTHON) setup.py -q sdist
107
109
108 check: tests
110 check: tests
109
111
110 tests:
112 tests:
111 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS)
113 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS)
112
114
113 test-%:
115 test-%:
114 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS) $@
116 cd tests && $(PYTHON) run-tests.py $(TESTFLAGS) $@
115
117
116 testpy-%:
118 testpy-%:
117 @echo Looking for Python $* in $(HGPYTHONS)
119 @echo Looking for Python $* in $(HGPYTHONS)
118 [ -e $(HGPYTHONS)/$*/bin/python ] || ( \
120 [ -e $(HGPYTHONS)/$*/bin/python ] || ( \
119 cd $$(mktemp --directory --tmpdir) && \
121 cd $$(mktemp --directory --tmpdir) && \
120 $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
122 $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python )
121 cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
123 cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS)
122
124
123 check-code:
125 check-code:
124 hg manifest | xargs python contrib/check-code.py
126 hg manifest | xargs python contrib/check-code.py
125
127
126 format-c:
128 format-c:
127 clang-format --style file -i \
129 clang-format --style file -i \
128 `hg files 'set:(**.c or **.cc or **.h) and not "listfile:contrib/clang-format-ignorelist"'`
130 `hg files 'set:(**.c or **.cc or **.h) and not "listfile:contrib/clang-format-ignorelist"'`
129
131
130 update-pot: i18n/hg.pot
132 update-pot: i18n/hg.pot
131
133
132 i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n/posplit i18n/hggettext
134 i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n/posplit i18n/hggettext
133 $(PYTHON) i18n/hggettext mercurial/commands.py \
135 $(PYTHON) i18n/hggettext mercurial/commands.py \
134 hgext/*.py hgext/*/__init__.py \
136 hgext/*.py hgext/*/__init__.py \
135 mercurial/fileset.py mercurial/revset.py \
137 mercurial/fileset.py mercurial/revset.py \
136 mercurial/templatefilters.py \
138 mercurial/templatefilters.py \
137 mercurial/templatefuncs.py \
139 mercurial/templatefuncs.py \
138 mercurial/templatekw.py \
140 mercurial/templatekw.py \
139 mercurial/filemerge.py \
141 mercurial/filemerge.py \
140 mercurial/hgweb/webcommands.py \
142 mercurial/hgweb/webcommands.py \
141 mercurial/util.py \
143 mercurial/util.py \
142 $(DOCFILES) > i18n/hg.pot.tmp
144 $(DOCFILES) > i18n/hg.pot.tmp
143 # All strings marked for translation in Mercurial contain
145 # All strings marked for translation in Mercurial contain
144 # ASCII characters only. But some files contain string
146 # ASCII characters only. But some files contain string
145 # literals like this '\037\213'. xgettext thinks it has to
147 # literals like this '\037\213'. xgettext thinks it has to
146 # parse them even though they are not marked for translation.
148 # parse them even though they are not marked for translation.
147 # Extracting with an explicit encoding of ISO-8859-1 will make
149 # Extracting with an explicit encoding of ISO-8859-1 will make
148 # xgettext "parse" and ignore them.
150 # xgettext "parse" and ignore them.
149 $(PYFILESCMD) | xargs \
151 $(PYFILESCMD) | xargs \
150 xgettext --package-name "Mercurial" \
152 xgettext --package-name "Mercurial" \
151 --msgid-bugs-address "<mercurial-devel@mercurial-scm.org>" \
153 --msgid-bugs-address "<mercurial-devel@mercurial-scm.org>" \
152 --copyright-holder "Matt Mackall <mpm@selenic.com> and others" \
154 --copyright-holder "Matt Mackall <mpm@selenic.com> and others" \
153 --from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \
155 --from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \
154 -d hg -p i18n -o hg.pot.tmp
156 -d hg -p i18n -o hg.pot.tmp
155 $(PYTHON) i18n/posplit i18n/hg.pot.tmp
157 $(PYTHON) i18n/posplit i18n/hg.pot.tmp
156 # The target file is not created before the last step. So it never is in
158 # The target file is not created before the last step. So it never is in
157 # an intermediate state.
159 # an intermediate state.
158 mv -f i18n/hg.pot.tmp i18n/hg.pot
160 mv -f i18n/hg.pot.tmp i18n/hg.pot
159
161
160 %.po: i18n/hg.pot
162 %.po: i18n/hg.pot
161 # work on a temporary copy for never having a half completed target
163 # work on a temporary copy for never having a half completed target
162 cp $@ $@.tmp
164 cp $@ $@.tmp
163 msgmerge --no-location --update $@.tmp $^
165 msgmerge --no-location --update $@.tmp $^
164 mv -f $@.tmp $@
166 mv -f $@.tmp $@
165
167
166 # Packaging targets
168 # Packaging targets
167
169
168 packaging_targets := \
170 packaging_targets := \
169 centos5 \
171 centos5 \
170 centos6 \
172 centos6 \
171 centos7 \
173 centos7 \
172 deb \
174 deb \
173 docker-centos5 \
175 docker-centos5 \
174 docker-centos6 \
176 docker-centos6 \
175 docker-centos7 \
177 docker-centos7 \
176 docker-debian-jessie \
178 docker-debian-jessie \
177 docker-debian-stretch \
179 docker-debian-stretch \
178 docker-fedora20 \
180 docker-fedora20 \
179 docker-fedora21 \
181 docker-fedora21 \
180 docker-fedora28 \
182 docker-fedora28 \
183 docker-fedora29 \
181 docker-ubuntu-trusty \
184 docker-ubuntu-trusty \
182 docker-ubuntu-trusty-ppa \
185 docker-ubuntu-trusty-ppa \
183 docker-ubuntu-xenial \
186 docker-ubuntu-xenial \
184 docker-ubuntu-xenial-ppa \
187 docker-ubuntu-xenial-ppa \
185 docker-ubuntu-artful \
188 docker-ubuntu-artful \
186 docker-ubuntu-artful-ppa \
189 docker-ubuntu-artful-ppa \
187 docker-ubuntu-bionic \
190 docker-ubuntu-bionic \
188 docker-ubuntu-bionic-ppa \
191 docker-ubuntu-bionic-ppa \
189 fedora20 \
192 fedora20 \
190 fedora21 \
193 fedora21 \
191 fedora28 \
194 fedora28 \
195 fedora29 \
192 linux-wheels \
196 linux-wheels \
193 linux-wheels-x86_64 \
197 linux-wheels-x86_64 \
194 linux-wheels-i686 \
198 linux-wheels-i686 \
195 ppa
199 ppa
196
200
197 # Forward packaging targets for convenience.
201 # Forward packaging targets for convenience.
198 $(packaging_targets):
202 $(packaging_targets):
199 $(MAKE) -C contrib/packaging $@
203 $(MAKE) -C contrib/packaging $@
200
204
201 osx:
205 osx:
202 rm -rf build/mercurial
206 rm -rf build/mercurial
203 /usr/bin/python2.7 setup.py install --optimize=1 \
207 /usr/bin/python2.7 setup.py install --optimize=1 \
204 --root=build/mercurial/ --prefix=/usr/local/ \
208 --root=build/mercurial/ --prefix=/usr/local/ \
205 --install-lib=/Library/Python/2.7/site-packages/
209 --install-lib=/Library/Python/2.7/site-packages/
206 make -C doc all install DESTDIR="$(PWD)/build/mercurial/"
210 make -C doc all install DESTDIR="$(PWD)/build/mercurial/"
207 # Place a bogon .DS_Store file in the target dir so we can be
211 # Place a bogon .DS_Store file in the target dir so we can be
208 # sure it doesn't get included in the final package.
212 # sure it doesn't get included in the final package.
209 touch build/mercurial/.DS_Store
213 touch build/mercurial/.DS_Store
210 # install zsh completions - this location appears to be
214 # install zsh completions - this location appears to be
211 # searched by default as of macOS Sierra.
215 # searched by default as of macOS Sierra.
212 install -d build/mercurial/usr/local/share/zsh/site-functions/
216 install -d build/mercurial/usr/local/share/zsh/site-functions/
213 install -m 0644 contrib/zsh_completion build/mercurial/usr/local/share/zsh/site-functions/_hg
217 install -m 0644 contrib/zsh_completion build/mercurial/usr/local/share/zsh/site-functions/_hg
214 # install bash completions - there doesn't appear to be a
218 # install bash completions - there doesn't appear to be a
215 # place that's searched by default for bash, so we'll follow
219 # place that's searched by default for bash, so we'll follow
216 # the lead of Apple's git install and just put it in a
220 # the lead of Apple's git install and just put it in a
217 # location of our own.
221 # location of our own.
218 install -d build/mercurial/usr/local/hg/contrib/
222 install -d build/mercurial/usr/local/hg/contrib/
219 install -m 0644 contrib/bash_completion build/mercurial/usr/local/hg/contrib/hg-completion.bash
223 install -m 0644 contrib/bash_completion build/mercurial/usr/local/hg/contrib/hg-completion.bash
220 make -C contrib/chg \
224 make -C contrib/chg \
221 HGPATH=/usr/local/bin/hg \
225 HGPATH=/usr/local/bin/hg \
222 PYTHON=/usr/bin/python2.7 \
226 PYTHON=/usr/bin/python2.7 \
223 HGEXTDIR=/Library/Python/2.7/site-packages/hgext \
227 HGEXTDIR=/Library/Python/2.7/site-packages/hgext \
224 DESTDIR=../../build/mercurial \
228 DESTDIR=../../build/mercurial \
225 PREFIX=/usr/local \
229 PREFIX=/usr/local \
226 clean install
230 clean install
227 mkdir -p $${OUTPUTDIR:-dist}
231 mkdir -p $${OUTPUTDIR:-dist}
228 HGVER=$$(python contrib/genosxversion.py $(OSXVERSIONFLAGS) build/mercurial/Library/Python/2.7/site-packages/mercurial/__version__.py) && \
232 HGVER=$$(python contrib/genosxversion.py $(OSXVERSIONFLAGS) build/mercurial/Library/Python/2.7/site-packages/mercurial/__version__.py) && \
229 OSXVER=$$(sw_vers -productVersion | cut -d. -f1,2) && \
233 OSXVER=$$(sw_vers -productVersion | cut -d. -f1,2) && \
230 pkgbuild --filter \\.DS_Store --root build/mercurial/ \
234 pkgbuild --filter \\.DS_Store --root build/mercurial/ \
231 --identifier org.mercurial-scm.mercurial \
235 --identifier org.mercurial-scm.mercurial \
232 --version "$${HGVER}" \
236 --version "$${HGVER}" \
233 build/mercurial.pkg && \
237 build/mercurial.pkg && \
234 productbuild --distribution contrib/packaging/macosx/distribution.xml \
238 productbuild --distribution contrib/packaging/macosx/distribution.xml \
235 --package-path build/ \
239 --package-path build/ \
236 --version "$${HGVER}" \
240 --version "$${HGVER}" \
237 --resources contrib/packaging/macosx/ \
241 --resources contrib/packaging/macosx/ \
238 "$${OUTPUTDIR:-dist/}"/Mercurial-"$${HGVER}"-macosx"$${OSXVER}".pkg
242 "$${OUTPUTDIR:-dist/}"/Mercurial-"$${HGVER}"-macosx"$${OSXVER}".pkg
239
243
240 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
244 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
241 install-doc install-home install-home-bin install-home-doc \
245 install-doc install-home install-home-bin install-home-doc \
242 dist dist-notests check tests check-code format-c update-pot \
246 dist dist-notests check tests check-code format-c update-pot \
243 $(packaging_targets) \
247 $(packaging_targets) \
244 osx
248 osx
@@ -1,141 +1,156 b''
1 # All revsets ever used with revsetbenchmarks.py script
1 # All revsets ever used with revsetbenchmarks.py script
2 #
2 #
3 # The goal of this file is to gather all revsets ever used for benchmarking
3 # The goal of this file is to gather all revsets ever used for benchmarking
4 # revset's performance. It should be used to gather revsets that test a
4 # revset's performance. It should be used to gather revsets that test a
5 # specific usecase or a specific implementation of revset predicates.
5 # specific usecase or a specific implementation of revset predicates.
6 # If you are working on the smartset implementation itself, check
6 # If you are working on the smartset implementation itself, check
7 # 'base-revsets.txt'.
7 # 'base-revsets.txt'.
8 #
8 #
9 # Please update this file with any revsets you use for benchmarking a change so
9 # Please update this file with any revsets you use for benchmarking a change so
10 # that future contributors can easily find and retest it when doing further
10 # that future contributors can easily find and retest it when doing further
11 # modification. Feel free to highlight interesting variants if needed.
11 # modification. Feel free to highlight interesting variants if needed.
12
12
13
13
14 ## Revset from this section are all extracted from changelog when this file was
14 ## Revset from this section are all extracted from changelog when this file was
15 # created. Feel free to dig and improve documentation.
15 # created. Feel free to dig and improve documentation.
16
16
17 # Used in revision da05fe01170b
17 # Used in revision da05fe01170b
18 (20000::) - (20000)
18 (20000::) - (20000)
19 # Used in revision 95af98616aa7
19 # Used in revision 95af98616aa7
20 parents(20000)
20 parents(20000)
21 # Used in revision 186fd06283b4
21 # Used in revision 186fd06283b4
22 (_intlist('20000\x0020001')) and merge()
22 (_intlist('20000\x0020001')) and merge()
23 # Used in revision 911f5a6579d1
23 # Used in revision 911f5a6579d1
24 p1(20000)
24 p1(20000)
25 p2(10000)
25 p2(10000)
26 # Used in revision b6dc3b79bb25
26 # Used in revision b6dc3b79bb25
27 0::
27 0::
28 # Used in revision faf4f63533ff
28 # Used in revision faf4f63533ff
29 bookmark()
29 bookmark()
30 # Used in revision 22ba2c0825da
30 # Used in revision 22ba2c0825da
31 tip~25
31 tip~25
32 # Used in revision 0cf46b8298fe
32 # Used in revision 0cf46b8298fe
33 bisect(range)
33 bisect(range)
34 # Used in revision 5b65429721d5
34 # Used in revision 5b65429721d5
35 divergent()
35 divergent()
36 # Used in revision 6261b9c549a2
36 # Used in revision 6261b9c549a2
37 file(COPYING)
37 file(COPYING)
38 # Used in revision 44f471102f3a
38 # Used in revision 44f471102f3a
39 follow(COPYING)
39 follow(COPYING)
40 # Used in revision 8040a44aab1c
40 # Used in revision 8040a44aab1c
41 origin(tip)
41 origin(tip)
42 # Used in revision bbf4f3dfd700
42 # Used in revision bbf4f3dfd700
43 rev(25)
43 rev(25)
44 # Used in revision a428db9ab61d
44 # Used in revision a428db9ab61d
45 p1()
45 p1()
46 # Used in revision c1546d7400ef
46 # Used in revision c1546d7400ef
47 min(0::)
47 min(0::)
48 # Used in revision 546fa6576815
48 # Used in revision 546fa6576815
49 author(lmoscovicz) or author(mpm)
49 author(lmoscovicz) or author(mpm)
50 author(mpm) or author(lmoscovicz)
50 author(mpm) or author(lmoscovicz)
51 # Used in revision 9bfe68357c01
51 # Used in revision 9bfe68357c01
52 public() and id("d82e2223f132")
52 public() and id("d82e2223f132")
53 # Used in revision ba89f7b542c9
53 # Used in revision ba89f7b542c9
54 rev(25)
54 rev(25)
55 # Used in revision eb763217152a
55 # Used in revision eb763217152a
56 rev(210000)
56 rev(210000)
57 # Used in revision 69524a05a7fa
57 # Used in revision 69524a05a7fa
58 10:100
58 10:100
59 parents(10):parents(100)
59 parents(10):parents(100)
60 # Used in revision 6f1b8b3f12fd
60 # Used in revision 6f1b8b3f12fd
61 100~5
61 100~5
62 parents(100)~5
62 parents(100)~5
63 (100~5)~5
63 (100~5)~5
64 # Used in revision 7a42e5d4c418
64 # Used in revision 7a42e5d4c418
65 children(tip~100)
65 children(tip~100)
66 # Used in revision 7e8737e6ab08
66 # Used in revision 7e8737e6ab08
67 100^1
67 100^1
68 parents(100)^1
68 parents(100)^1
69 (100^1)^1
69 (100^1)^1
70 # Used in revision 30e0dcd7c5ff
70 # Used in revision 30e0dcd7c5ff
71 matching(100)
71 matching(100)
72 matching(parents(100))
72 matching(parents(100))
73 # Used in revision aafeaba22826
73 # Used in revision aafeaba22826
74 0|1|2|3|4|5|6|7|8|9
74 0|1|2|3|4|5|6|7|8|9
75 # Used in revision 33c7a94d4dd0
75 # Used in revision 33c7a94d4dd0
76 tip:0
76 tip:0
77 # Used in revision 7d369fae098e
77 # Used in revision 7d369fae098e
78 (0:100000)
78 (0:100000)
79 # Used in revision b333ca94403d
79 # Used in revision b333ca94403d
80 0 + 1 + 2 + ... + 200
80 0 + 1 + 2 + ... + 200
81 0 + 1 + 2 + ... + 1000
81 0 + 1 + 2 + ... + 1000
82 sort(0 + 1 + 2 + ... + 200)
82 sort(0 + 1 + 2 + ... + 200)
83 sort(0 + 1 + 2 + ... + 1000)
83 sort(0 + 1 + 2 + ... + 1000)
84 # Used in revision 7fbef7932af9
84 # Used in revision 7fbef7932af9
85 first(0 + 1 + 2 + ... + 1000)
85 first(0 + 1 + 2 + ... + 1000)
86 # Used in revision ceaf04bb14ff
86 # Used in revision ceaf04bb14ff
87 0:1000
87 0:1000
88 # Used in revision 262e6ad93885
88 # Used in revision 262e6ad93885
89 not public()
89 not public()
90 (tip~1000::) - public()
90 (tip~1000::) - public()
91 not public() and branch("default")
91 not public() and branch("default")
92 # Used in revision 15412bba5a68
92 # Used in revision 15412bba5a68
93 0::tip
93 0::tip
94
94
95 ## all the revsets from this section have been taken from the former central file
95 ## all the revsets from this section have been taken from the former central file
96 # for revset's benchmarking, they are undocumented for this reason.
96 # for revset's benchmarking, they are undocumented for this reason.
97 all()
97 all()
98 draft()
98 draft()
99 ::tip
99 ::tip
100 draft() and ::tip
100 draft() and ::tip
101 ::tip and draft()
101 ::tip and draft()
102 author(lmoscovicz)
102 author(lmoscovicz)
103 author(mpm)
103 author(mpm)
104 ::p1(p1(tip))::
104 ::p1(p1(tip))::
105 public()
105 public()
106 :10000 and public()
106 :10000 and public()
107 :10000 and draft()
107 :10000 and draft()
108 (not public() - obsolete())
108 (not public() - obsolete())
109
109
110 # The one below is used by rebase
110 # The one below is used by rebase
111 (children(ancestor(tip~5, tip)) and ::(tip~5))::
111 (children(ancestor(tip~5, tip)) and ::(tip~5))::
112
112
113 # those two `roots(...)` inputs are close to what phase movement use.
113 # those two `roots(...)` inputs are close to what phase movement use.
114 roots((tip~100::) - (tip~100::tip))
114 roots((tip~100::) - (tip~100::tip))
115 roots((0::) - (0::tip))
115 roots((0::) - (0::tip))
116
116
117 # more roots testing
117 # more roots testing
118 roots(tip~100:)
118 roots(tip~100:)
119 roots(:42)
119 roots(:42)
120 roots(not public())
120 roots(not public())
121 roots((0:tip)::)
121 roots((0:tip)::)
122 roots(0::tip)
122 roots(0::tip)
123 42:68 and roots(42:tip)
123 42:68 and roots(42:tip)
124 # Used in revision f140d6207cca
124 # Used in revision f140d6207cca
125 roots(0:tip)
125 roots(0:tip)
126 # test disjoint set with multiple roots
126 # test disjoint set with multiple roots
127 roots((:42) + (tip~42:))
127 roots((:42) + (tip~42:))
128
128
129 # Testing the behavior of "head()" in various situations
129 # Testing the behavior of "head()" in various situations
130 head()
130 head()
131 head() - public()
131 head() - public()
132 draft() and head()
132 draft() and head()
133 head() and author("mpm")
133 head() and author("mpm")
134
134
135 # testing the mutable phases set
135 # testing the mutable phases set
136 draft()
136 draft()
137 secret()
137 secret()
138
138
139 # test finding common ancestors
139 # test finding common ancestors
140 heads(commonancestors(last(head(), 2)))
140 heads(commonancestors(last(head(), 2)))
141 heads(commonancestors(head()))
141 heads(commonancestors(head()))
142
143 # more heads testing
144 heads(all())
145 heads(-10000:-1)
146 (-5000:-1000) and heads(-10000:-1)
147 heads(matching(tip, "author"))
148 heads(matching(tip, "author")) and -10000:-1
149 (-10000:-1) and heads(matching(tip, "author"))
150 # more roots testing
151 roots(all())
152 roots(-10000:-1)
153 (-5000:-1000) and roots(-10000:-1)
154 roots(matching(tip, "author"))
155 roots(matching(tip, "author")) and -10000:-1
156 (-10000:-1) and roots(matching(tip, "author"))
@@ -1,90 +1,98 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """Tool read primitive events from a pipe to produce a catapult trace.
7 """Tool read primitive events from a pipe to produce a catapult trace.
8
8
9 Usage:
10 Terminal 1: $ catapipe.py /tmp/mypipe /tmp/trace.json
11 Terminal 2: $ HGCATAPULTSERVERPIPE=/tmp/mypipe hg root
12 <ctrl-c catapipe.py in Terminal 1>
13 $ catapult/tracing/bin/trace2html /tmp/trace.json # produce /tmp/trace.html
14 <open trace.html in your browser of choice; the WASD keys are very useful>
15 (catapult is located at https://github.com/catapult-project/catapult)
16
9 For now the event stream supports
17 For now the event stream supports
10
18
11 START $SESSIONID ...
19 START $SESSIONID ...
12
20
13 and
21 and
14
22
15 END $SESSIONID ...
23 END $SESSIONID ...
16
24
17 events. Everything after the SESSIONID (which must not contain spaces)
25 events. Everything after the SESSIONID (which must not contain spaces)
18 is used as a label for the event. Events are timestamped as of when
26 is used as a label for the event. Events are timestamped as of when
19 they arrive in this process and are then used to produce catapult
27 they arrive in this process and are then used to produce catapult
20 traces that can be loaded in Chrome's about:tracing utility. It's
28 traces that can be loaded in Chrome's about:tracing utility. It's
21 important that the event stream *into* this process stay simple,
29 important that the event stream *into* this process stay simple,
22 because we have to emit it from the shell scripts produced by
30 because we have to emit it from the shell scripts produced by
23 run-tests.py.
31 run-tests.py.
24
32
25 Typically you'll want to place the path to the named pipe in the
33 Typically you'll want to place the path to the named pipe in the
26 HGCATAPULTSERVERPIPE environment variable, which both run-tests and hg
34 HGCATAPULTSERVERPIPE environment variable, which both run-tests and hg
27 understand.
35 understand. To trace *only* run-tests, use HGTESTCATAPULTSERVERPIPE instead.
28 """
36 """
29 from __future__ import absolute_import, print_function
37 from __future__ import absolute_import, print_function
30
38
31 import argparse
39 import argparse
32 import json
40 import json
33 import os
41 import os
34 import timeit
42 import timeit
35
43
36 _TYPEMAP = {
44 _TYPEMAP = {
37 'START': 'B',
45 'START': 'B',
38 'END': 'E',
46 'END': 'E',
39 }
47 }
40
48
41 _threadmap = {}
49 _threadmap = {}
42
50
43 # Timeit already contains the whole logic about which timer to use based on
51 # Timeit already contains the whole logic about which timer to use based on
44 # Python version and OS
52 # Python version and OS
45 timer = timeit.default_timer
53 timer = timeit.default_timer
46
54
47 def main():
55 def main():
48 parser = argparse.ArgumentParser()
56 parser = argparse.ArgumentParser()
49 parser.add_argument('pipe', type=str, nargs=1,
57 parser.add_argument('pipe', type=str, nargs=1,
50 help='Path of named pipe to create and listen on.')
58 help='Path of named pipe to create and listen on.')
51 parser.add_argument('output', default='trace.json', type=str, nargs='?',
59 parser.add_argument('output', default='trace.json', type=str, nargs='?',
52 help='Path of json file to create where the traces '
60 help='Path of json file to create where the traces '
53 'will be stored.')
61 'will be stored.')
54 parser.add_argument('--debug', default=False, action='store_true',
62 parser.add_argument('--debug', default=False, action='store_true',
55 help='Print useful debug messages')
63 help='Print useful debug messages')
56 args = parser.parse_args()
64 args = parser.parse_args()
57 fn = args.pipe[0]
65 fn = args.pipe[0]
58 os.mkfifo(fn)
66 os.mkfifo(fn)
59 try:
67 try:
60 with open(fn) as f, open(args.output, 'w') as out:
68 with open(fn) as f, open(args.output, 'w') as out:
61 out.write('[\n')
69 out.write('[\n')
62 start = timer()
70 start = timer()
63 while True:
71 while True:
64 ev = f.readline().strip()
72 ev = f.readline().strip()
65 if not ev:
73 if not ev:
66 continue
74 continue
67 now = timer()
75 now = timer()
68 if args.debug:
76 if args.debug:
69 print(ev)
77 print(ev)
70 verb, session, label = ev.split(' ', 2)
78 verb, session, label = ev.split(' ', 2)
71 if session not in _threadmap:
79 if session not in _threadmap:
72 _threadmap[session] = len(_threadmap)
80 _threadmap[session] = len(_threadmap)
73 pid = _threadmap[session]
81 pid = _threadmap[session]
74 ts_micros = (now - start) * 1000000
82 ts_micros = (now - start) * 1000000
75 out.write(json.dumps(
83 out.write(json.dumps(
76 {
84 {
77 "name": label,
85 "name": label,
78 "cat": "misc",
86 "cat": "misc",
79 "ph": _TYPEMAP[verb],
87 "ph": _TYPEMAP[verb],
80 "ts": ts_micros,
88 "ts": ts_micros,
81 "pid": pid,
89 "pid": pid,
82 "tid": 1,
90 "tid": 1,
83 "args": {}
91 "args": {}
84 }))
92 }))
85 out.write(',\n')
93 out.write(',\n')
86 finally:
94 finally:
87 os.unlink(fn)
95 os.unlink(fn)
88
96
89 if __name__ == '__main__':
97 if __name__ == '__main__':
90 main()
98 main()
@@ -1,109 +1,109 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # Copyright 2014 Matt Mackall <mpm@selenic.com>
3 # Copyright 2014 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # A tool/hook to run basic sanity checks on commits/patches for
5 # A tool/hook to run basic sanity checks on commits/patches for
6 # submission to Mercurial. Install by adding the following to your
6 # submission to Mercurial. Install by adding the following to your
7 # .hg/hgrc:
7 # .hg/hgrc:
8 #
8 #
9 # [hooks]
9 # [hooks]
10 # pretxncommit = contrib/check-commit
10 # pretxncommit = contrib/check-commit
11 #
11 #
12 # The hook can be temporarily bypassed with:
12 # The hook can be temporarily bypassed with:
13 #
13 #
14 # $ BYPASS= hg commit
14 # $ BYPASS= hg commit
15 #
15 #
16 # See also: https://mercurial-scm.org/wiki/ContributingChanges
16 # See also: https://mercurial-scm.org/wiki/ContributingChanges
17
17
18 from __future__ import absolute_import, print_function
18 from __future__ import absolute_import, print_function
19
19
20 import os
20 import os
21 import re
21 import re
22 import sys
22 import sys
23
23
24 commitheader = r"^(?:# [^\n]*\n)*"
24 commitheader = r"^(?:# [^\n]*\n)*"
25 afterheader = commitheader + r"(?!#)"
25 afterheader = commitheader + r"(?!#)"
26 beforepatch = afterheader + r"(?!\n(?!@@))"
26 beforepatch = afterheader + r"(?!\n(?!@@))"
27
27
28 errors = [
28 errors = [
29 (beforepatch + r".*[(]bc[)]", "(BC) needs to be uppercase"),
29 (beforepatch + r".*[(]bc[)]", "(BC) needs to be uppercase"),
30 (beforepatch + r".*[(]issue \d\d\d",
30 (beforepatch + r".*[(]issue \d\d\d",
31 "no space allowed between issue and number"),
31 "no space allowed between issue and number"),
32 (beforepatch + r".*[(]bug(\d|\s)", "use (issueDDDD) instead of bug"),
32 (beforepatch + r".*[(]bug(\d|\s)", "use (issueDDDD) instead of bug"),
33 (commitheader + r"# User [^@\n]+\n", "username is not an email address"),
33 (commitheader + r"# User [^@\n]+\n", "username is not an email address"),
34 (commitheader + r"(?!merge with )[^#]\S+[^:] ",
34 (commitheader + r"(?!merge with )[^#]\S+[^:] ",
35 "summary line doesn't start with 'topic: '"),
35 "summary line doesn't start with 'topic: '"),
36 (afterheader + r"[A-Z][a-z]\S+", "don't capitalize summary lines"),
36 (afterheader + r"[A-Z][a-z]\S+", "don't capitalize summary lines"),
37 (afterheader + r"[^\n]*: *[A-Z][a-z]\S+", "don't capitalize summary lines"),
37 (afterheader + r"^\S+: *[A-Z][a-z]\S+", "don't capitalize summary lines"),
38 (afterheader + r"\S*[^A-Za-z0-9-_]\S*: ",
38 (afterheader + r"\S*[^A-Za-z0-9-_]\S*: ",
39 "summary keyword should be most user-relevant one-word command or topic"),
39 "summary keyword should be most user-relevant one-word command or topic"),
40 (afterheader + r".*\.\s*\n", "don't add trailing period on summary line"),
40 (afterheader + r".*\.\s*\n", "don't add trailing period on summary line"),
41 (afterheader + r".{79,}", "summary line too long (limit is 78)"),
41 (afterheader + r".{79,}", "summary line too long (limit is 78)"),
42 # Forbid "_" in function name.
42 # Forbid "_" in function name.
43 #
43 #
44 # We skip the check for cffi related functions. They use names mapping the
44 # We skip the check for cffi related functions. They use names mapping the
45 # name of the C function. C function names may contain "_".
45 # name of the C function. C function names may contain "_".
46 (r"\n\+[ \t]+def (?!cffi)[a-z]+_[a-z]",
46 (r"\n\+[ \t]+def (?!cffi)[a-z]+_[a-z]",
47 "adds a function with foo_bar naming"),
47 "adds a function with foo_bar naming"),
48 ]
48 ]
49
49
50 word = re.compile('\S')
50 word = re.compile('\S')
51 def nonempty(first, second):
51 def nonempty(first, second):
52 if word.search(first):
52 if word.search(first):
53 return first
53 return first
54 return second
54 return second
55
55
56 def checkcommit(commit, node=None):
56 def checkcommit(commit, node=None):
57 exitcode = 0
57 exitcode = 0
58 printed = node is None
58 printed = node is None
59 hits = []
59 hits = []
60 signtag = (afterheader +
60 signtag = (afterheader +
61 r'Added (tag [^ ]+|signature) for changeset [a-f0-9]{12}')
61 r'Added (tag [^ ]+|signature) for changeset [a-f0-9]{12}')
62 if re.search(signtag, commit):
62 if re.search(signtag, commit):
63 return 0
63 return 0
64 for exp, msg in errors:
64 for exp, msg in errors:
65 for m in re.finditer(exp, commit):
65 for m in re.finditer(exp, commit):
66 end = m.end()
66 end = m.end()
67 trailing = re.search(r'(\\n)+$', exp)
67 trailing = re.search(r'(\\n)+$', exp)
68 if trailing:
68 if trailing:
69 end -= len(trailing.group()) / 2
69 end -= len(trailing.group()) / 2
70 hits.append((end, exp, msg))
70 hits.append((end, exp, msg))
71 if hits:
71 if hits:
72 hits.sort()
72 hits.sort()
73 pos = 0
73 pos = 0
74 last = ''
74 last = ''
75 for n, l in enumerate(commit.splitlines(True)):
75 for n, l in enumerate(commit.splitlines(True)):
76 pos += len(l)
76 pos += len(l)
77 while len(hits):
77 while len(hits):
78 end, exp, msg = hits[0]
78 end, exp, msg = hits[0]
79 if pos < end:
79 if pos < end:
80 break
80 break
81 if not printed:
81 if not printed:
82 printed = True
82 printed = True
83 print("node: %s" % node)
83 print("node: %s" % node)
84 print("%d: %s" % (n, msg))
84 print("%d: %s" % (n, msg))
85 print(" %s" % nonempty(l, last)[:-1])
85 print(" %s" % nonempty(l, last)[:-1])
86 if "BYPASS" not in os.environ:
86 if "BYPASS" not in os.environ:
87 exitcode = 1
87 exitcode = 1
88 del hits[0]
88 del hits[0]
89 last = nonempty(l, last)
89 last = nonempty(l, last)
90
90
91 return exitcode
91 return exitcode
92
92
93 def readcommit(node):
93 def readcommit(node):
94 return os.popen("hg export %s" % node).read()
94 return os.popen("hg export %s" % node).read()
95
95
96 if __name__ == "__main__":
96 if __name__ == "__main__":
97 exitcode = 0
97 exitcode = 0
98 node = os.environ.get("HG_NODE")
98 node = os.environ.get("HG_NODE")
99
99
100 if node:
100 if node:
101 commit = readcommit(node)
101 commit = readcommit(node)
102 exitcode = checkcommit(commit)
102 exitcode = checkcommit(commit)
103 elif sys.argv[1:]:
103 elif sys.argv[1:]:
104 for node in sys.argv[1:]:
104 for node in sys.argv[1:]:
105 exitcode |= checkcommit(readcommit(node), node)
105 exitcode |= checkcommit(readcommit(node), node)
106 else:
106 else:
107 commit = sys.stdin.read()
107 commit = sys.stdin.read()
108 exitcode = checkcommit(commit)
108 exitcode = checkcommit(commit)
109 sys.exit(exitcode)
109 sys.exit(exitcode)
@@ -1,95 +1,94 b''
1 # Files that just need to be migrated to the formatter.
1 # Files that just need to be migrated to the formatter.
2 # Do not add new files here!
2 # Do not add new files here!
3 mercurial/cext/dirs.c
3 mercurial/cext/dirs.c
4 mercurial/cext/manifest.c
4 mercurial/cext/manifest.c
5 mercurial/cext/osutil.c
5 mercurial/cext/osutil.c
6 mercurial/cext/revlog.c
7 # Vendored code that we should never format:
6 # Vendored code that we should never format:
8 contrib/python-zstandard/c-ext/bufferutil.c
7 contrib/python-zstandard/c-ext/bufferutil.c
9 contrib/python-zstandard/c-ext/compressionchunker.c
8 contrib/python-zstandard/c-ext/compressionchunker.c
10 contrib/python-zstandard/c-ext/compressiondict.c
9 contrib/python-zstandard/c-ext/compressiondict.c
11 contrib/python-zstandard/c-ext/compressionparams.c
10 contrib/python-zstandard/c-ext/compressionparams.c
12 contrib/python-zstandard/c-ext/compressionreader.c
11 contrib/python-zstandard/c-ext/compressionreader.c
13 contrib/python-zstandard/c-ext/compressionwriter.c
12 contrib/python-zstandard/c-ext/compressionwriter.c
14 contrib/python-zstandard/c-ext/compressobj.c
13 contrib/python-zstandard/c-ext/compressobj.c
15 contrib/python-zstandard/c-ext/compressor.c
14 contrib/python-zstandard/c-ext/compressor.c
16 contrib/python-zstandard/c-ext/compressoriterator.c
15 contrib/python-zstandard/c-ext/compressoriterator.c
17 contrib/python-zstandard/c-ext/constants.c
16 contrib/python-zstandard/c-ext/constants.c
18 contrib/python-zstandard/c-ext/decompressionreader.c
17 contrib/python-zstandard/c-ext/decompressionreader.c
19 contrib/python-zstandard/c-ext/decompressionwriter.c
18 contrib/python-zstandard/c-ext/decompressionwriter.c
20 contrib/python-zstandard/c-ext/decompressobj.c
19 contrib/python-zstandard/c-ext/decompressobj.c
21 contrib/python-zstandard/c-ext/decompressor.c
20 contrib/python-zstandard/c-ext/decompressor.c
22 contrib/python-zstandard/c-ext/decompressoriterator.c
21 contrib/python-zstandard/c-ext/decompressoriterator.c
23 contrib/python-zstandard/c-ext/frameparams.c
22 contrib/python-zstandard/c-ext/frameparams.c
24 contrib/python-zstandard/c-ext/python-zstandard.h
23 contrib/python-zstandard/c-ext/python-zstandard.h
25 contrib/python-zstandard/zstd.c
24 contrib/python-zstandard/zstd.c
26 contrib/python-zstandard/zstd/common/bitstream.h
25 contrib/python-zstandard/zstd/common/bitstream.h
27 contrib/python-zstandard/zstd/common/compiler.h
26 contrib/python-zstandard/zstd/common/compiler.h
28 contrib/python-zstandard/zstd/common/cpu.h
27 contrib/python-zstandard/zstd/common/cpu.h
29 contrib/python-zstandard/zstd/common/debug.c
28 contrib/python-zstandard/zstd/common/debug.c
30 contrib/python-zstandard/zstd/common/debug.h
29 contrib/python-zstandard/zstd/common/debug.h
31 contrib/python-zstandard/zstd/common/entropy_common.c
30 contrib/python-zstandard/zstd/common/entropy_common.c
32 contrib/python-zstandard/zstd/common/error_private.c
31 contrib/python-zstandard/zstd/common/error_private.c
33 contrib/python-zstandard/zstd/common/error_private.h
32 contrib/python-zstandard/zstd/common/error_private.h
34 contrib/python-zstandard/zstd/common/fse_decompress.c
33 contrib/python-zstandard/zstd/common/fse_decompress.c
35 contrib/python-zstandard/zstd/common/fse.h
34 contrib/python-zstandard/zstd/common/fse.h
36 contrib/python-zstandard/zstd/common/huf.h
35 contrib/python-zstandard/zstd/common/huf.h
37 contrib/python-zstandard/zstd/common/mem.h
36 contrib/python-zstandard/zstd/common/mem.h
38 contrib/python-zstandard/zstd/common/pool.c
37 contrib/python-zstandard/zstd/common/pool.c
39 contrib/python-zstandard/zstd/common/pool.h
38 contrib/python-zstandard/zstd/common/pool.h
40 contrib/python-zstandard/zstd/common/threading.c
39 contrib/python-zstandard/zstd/common/threading.c
41 contrib/python-zstandard/zstd/common/threading.h
40 contrib/python-zstandard/zstd/common/threading.h
42 contrib/python-zstandard/zstd/common/xxhash.c
41 contrib/python-zstandard/zstd/common/xxhash.c
43 contrib/python-zstandard/zstd/common/xxhash.h
42 contrib/python-zstandard/zstd/common/xxhash.h
44 contrib/python-zstandard/zstd/common/zstd_common.c
43 contrib/python-zstandard/zstd/common/zstd_common.c
45 contrib/python-zstandard/zstd/common/zstd_errors.h
44 contrib/python-zstandard/zstd/common/zstd_errors.h
46 contrib/python-zstandard/zstd/common/zstd_internal.h
45 contrib/python-zstandard/zstd/common/zstd_internal.h
47 contrib/python-zstandard/zstd/compress/fse_compress.c
46 contrib/python-zstandard/zstd/compress/fse_compress.c
48 contrib/python-zstandard/zstd/compress/hist.c
47 contrib/python-zstandard/zstd/compress/hist.c
49 contrib/python-zstandard/zstd/compress/hist.h
48 contrib/python-zstandard/zstd/compress/hist.h
50 contrib/python-zstandard/zstd/compress/huf_compress.c
49 contrib/python-zstandard/zstd/compress/huf_compress.c
51 contrib/python-zstandard/zstd/compress/zstd_compress.c
50 contrib/python-zstandard/zstd/compress/zstd_compress.c
52 contrib/python-zstandard/zstd/compress/zstd_compress_internal.h
51 contrib/python-zstandard/zstd/compress/zstd_compress_internal.h
53 contrib/python-zstandard/zstd/compress/zstd_double_fast.c
52 contrib/python-zstandard/zstd/compress/zstd_double_fast.c
54 contrib/python-zstandard/zstd/compress/zstd_double_fast.h
53 contrib/python-zstandard/zstd/compress/zstd_double_fast.h
55 contrib/python-zstandard/zstd/compress/zstd_fast.c
54 contrib/python-zstandard/zstd/compress/zstd_fast.c
56 contrib/python-zstandard/zstd/compress/zstd_fast.h
55 contrib/python-zstandard/zstd/compress/zstd_fast.h
57 contrib/python-zstandard/zstd/compress/zstd_lazy.c
56 contrib/python-zstandard/zstd/compress/zstd_lazy.c
58 contrib/python-zstandard/zstd/compress/zstd_lazy.h
57 contrib/python-zstandard/zstd/compress/zstd_lazy.h
59 contrib/python-zstandard/zstd/compress/zstd_ldm.c
58 contrib/python-zstandard/zstd/compress/zstd_ldm.c
60 contrib/python-zstandard/zstd/compress/zstd_ldm.h
59 contrib/python-zstandard/zstd/compress/zstd_ldm.h
61 contrib/python-zstandard/zstd/compress/zstdmt_compress.c
60 contrib/python-zstandard/zstd/compress/zstdmt_compress.c
62 contrib/python-zstandard/zstd/compress/zstdmt_compress.h
61 contrib/python-zstandard/zstd/compress/zstdmt_compress.h
63 contrib/python-zstandard/zstd/compress/zstd_opt.c
62 contrib/python-zstandard/zstd/compress/zstd_opt.c
64 contrib/python-zstandard/zstd/compress/zstd_opt.h
63 contrib/python-zstandard/zstd/compress/zstd_opt.h
65 contrib/python-zstandard/zstd/decompress/huf_decompress.c
64 contrib/python-zstandard/zstd/decompress/huf_decompress.c
66 contrib/python-zstandard/zstd/decompress/zstd_decompress.c
65 contrib/python-zstandard/zstd/decompress/zstd_decompress.c
67 contrib/python-zstandard/zstd/deprecated/zbuff_common.c
66 contrib/python-zstandard/zstd/deprecated/zbuff_common.c
68 contrib/python-zstandard/zstd/deprecated/zbuff_compress.c
67 contrib/python-zstandard/zstd/deprecated/zbuff_compress.c
69 contrib/python-zstandard/zstd/deprecated/zbuff_decompress.c
68 contrib/python-zstandard/zstd/deprecated/zbuff_decompress.c
70 contrib/python-zstandard/zstd/deprecated/zbuff.h
69 contrib/python-zstandard/zstd/deprecated/zbuff.h
71 contrib/python-zstandard/zstd/dictBuilder/cover.c
70 contrib/python-zstandard/zstd/dictBuilder/cover.c
72 contrib/python-zstandard/zstd/dictBuilder/cover.h
71 contrib/python-zstandard/zstd/dictBuilder/cover.h
73 contrib/python-zstandard/zstd/dictBuilder/divsufsort.c
72 contrib/python-zstandard/zstd/dictBuilder/divsufsort.c
74 contrib/python-zstandard/zstd/dictBuilder/divsufsort.h
73 contrib/python-zstandard/zstd/dictBuilder/divsufsort.h
75 contrib/python-zstandard/zstd/dictBuilder/fastcover.c
74 contrib/python-zstandard/zstd/dictBuilder/fastcover.c
76 contrib/python-zstandard/zstd/dictBuilder/zdict.c
75 contrib/python-zstandard/zstd/dictBuilder/zdict.c
77 contrib/python-zstandard/zstd/dictBuilder/zdict.h
76 contrib/python-zstandard/zstd/dictBuilder/zdict.h
78 contrib/python-zstandard/zstd/zstd.h
77 contrib/python-zstandard/zstd/zstd.h
79 hgext/fsmonitor/pywatchman/bser.c
78 hgext/fsmonitor/pywatchman/bser.c
80 mercurial/thirdparty/xdiff/xdiff.h
79 mercurial/thirdparty/xdiff/xdiff.h
81 mercurial/thirdparty/xdiff/xdiffi.c
80 mercurial/thirdparty/xdiff/xdiffi.c
82 mercurial/thirdparty/xdiff/xdiffi.h
81 mercurial/thirdparty/xdiff/xdiffi.h
83 mercurial/thirdparty/xdiff/xemit.c
82 mercurial/thirdparty/xdiff/xemit.c
84 mercurial/thirdparty/xdiff/xemit.h
83 mercurial/thirdparty/xdiff/xemit.h
85 mercurial/thirdparty/xdiff/xhistogram.c
84 mercurial/thirdparty/xdiff/xhistogram.c
86 mercurial/thirdparty/xdiff/xinclude.h
85 mercurial/thirdparty/xdiff/xinclude.h
87 mercurial/thirdparty/xdiff/xmacros.h
86 mercurial/thirdparty/xdiff/xmacros.h
88 mercurial/thirdparty/xdiff/xmerge.c
87 mercurial/thirdparty/xdiff/xmerge.c
89 mercurial/thirdparty/xdiff/xpatience.c
88 mercurial/thirdparty/xdiff/xpatience.c
90 mercurial/thirdparty/xdiff/xprepare.c
89 mercurial/thirdparty/xdiff/xprepare.c
91 mercurial/thirdparty/xdiff/xprepare.h
90 mercurial/thirdparty/xdiff/xprepare.h
92 mercurial/thirdparty/xdiff/xtypes.h
91 mercurial/thirdparty/xdiff/xtypes.h
93 mercurial/thirdparty/xdiff/xutils.c
92 mercurial/thirdparty/xdiff/xutils.c
94 mercurial/thirdparty/xdiff/xutils.h
93 mercurial/thirdparty/xdiff/xutils.h
95 mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c
94 mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c
@@ -1,128 +1,160 b''
1 CC = clang
1 CC = clang
2 CXX = clang++
2 CXX = clang++
3
3
4 all: bdiff mpatch xdiff
4 all: bdiff mpatch xdiff
5
5
6 fuzzutil.o: fuzzutil.cc fuzzutil.h
6 fuzzutil.o: fuzzutil.cc fuzzutil.h
7 $(CXX) $(CXXFLAGS) -g -O1 -fsanitize=fuzzer-no-link,address \
7 $(CXX) $(CXXFLAGS) -g -O1 \
8 -std=c++17 \
8 -std=c++17 \
9 -I../../mercurial -c -o fuzzutil.o fuzzutil.cc
9 -I../../mercurial -c -o fuzzutil.o fuzzutil.cc
10
10
11 fuzzutil-oss-fuzz.o: fuzzutil.cc fuzzutil.h
11 fuzzutil-oss-fuzz.o: fuzzutil.cc fuzzutil.h
12 $(CXX) $(CXXFLAGS) -std=c++17 \
12 $(CXX) $(CXXFLAGS) -std=c++17 \
13 -I../../mercurial -c -o fuzzutil-oss-fuzz.o fuzzutil.cc
13 -I../../mercurial -c -o fuzzutil-oss-fuzz.o fuzzutil.cc
14
14
15 pyutil.o: pyutil.cc pyutil.h
16 $(CXX) $(CXXFLAGS) -g -O1 \
17 `$$OUT/sanpy/bin/python-config --cflags` \
18 -I../../mercurial -c -o pyutil.o pyutil.cc
19
15 bdiff.o: ../../mercurial/bdiff.c
20 bdiff.o: ../../mercurial/bdiff.c
16 $(CC) $(CFLAGS) -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
21 $(CC) $(CFLAGS) -fsanitize=fuzzer-no-link,address -c -o bdiff.o \
17 ../../mercurial/bdiff.c
22 ../../mercurial/bdiff.c
18
23
19 bdiff: bdiff.cc bdiff.o fuzzutil.o
24 bdiff: bdiff.cc bdiff.o fuzzutil.o
20 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
25 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
21 -std=c++17 \
26 -std=c++17 \
22 -I../../mercurial bdiff.cc bdiff.o fuzzutil.o -o bdiff
27 -I../../mercurial bdiff.cc bdiff.o fuzzutil.o -o bdiff
23
28
24 bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
29 bdiff-oss-fuzz.o: ../../mercurial/bdiff.c
25 $(CC) $(CFLAGS) -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
30 $(CC) $(CFLAGS) -c -o bdiff-oss-fuzz.o ../../mercurial/bdiff.c
26
31
27 bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o
32 bdiff_fuzzer: bdiff.cc bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o
28 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial bdiff.cc \
33 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial bdiff.cc \
29 bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
34 bdiff-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
30 $$OUT/bdiff_fuzzer
35 $$OUT/bdiff_fuzzer
31
36
32 mpatch.o: ../../mercurial/mpatch.c
37 mpatch.o: ../../mercurial/mpatch.c
33 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c -o mpatch.o \
38 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c -o mpatch.o \
34 ../../mercurial/mpatch.c
39 ../../mercurial/mpatch.c
35
40
36 mpatch: CXXFLAGS += -std=c++17
41 mpatch: CXXFLAGS += -std=c++17
37 mpatch: mpatch.cc mpatch.o fuzzutil.o
42 mpatch: mpatch.cc mpatch.o fuzzutil.o
38 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
43 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
39 -I../../mercurial mpatch.cc mpatch.o fuzzutil.o -o mpatch
44 -I../../mercurial mpatch.cc mpatch.o fuzzutil.o -o mpatch
40
45
41 mpatch-oss-fuzz.o: ../../mercurial/mpatch.c
46 mpatch-oss-fuzz.o: ../../mercurial/mpatch.c
42 $(CC) $(CFLAGS) -c -o mpatch-oss-fuzz.o ../../mercurial/mpatch.c
47 $(CC) $(CFLAGS) -c -o mpatch-oss-fuzz.o ../../mercurial/mpatch.c
43
48
44 mpatch_fuzzer: mpatch.cc mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o
49 mpatch_fuzzer: mpatch.cc mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o
45 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial mpatch.cc \
50 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial mpatch.cc \
46 mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
51 mpatch-oss-fuzz.o fuzzutil-oss-fuzz.o -lFuzzingEngine -o \
47 $$OUT/mpatch_fuzzer
52 $$OUT/mpatch_fuzzer
48
53
49 mpatch_corpus.zip:
54 mpatch_corpus.zip:
50 python mpatch_corpus.py $$OUT/mpatch_fuzzer_seed_corpus.zip
55 python mpatch_corpus.py $$OUT/mpatch_fuzzer_seed_corpus.zip
51
56
52 x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
57 x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
53 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c \
58 $(CC) -g -O1 -fsanitize=fuzzer-no-link,address -c \
54 -o $@ \
59 -o $@ \
55 $<
60 $<
56
61
57 xdiff: CXXFLAGS += -std=c++17
62 xdiff: CXXFLAGS += -std=c++17
58 xdiff: xdiff.cc xdiffi.o xprepare.o xutils.o fuzzutil.o
63 xdiff: xdiff.cc xdiffi.o xprepare.o xutils.o fuzzutil.o
59 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
64 $(CXX) $(CXXFLAGS) -DHG_FUZZER_INCLUDE_MAIN=1 -g -O1 -fsanitize=fuzzer-no-link,address \
60 -I../../mercurial xdiff.cc \
65 -I../../mercurial xdiff.cc \
61 xdiffi.o xprepare.o xutils.o fuzzutil.o -o xdiff
66 xdiffi.o xprepare.o xutils.o fuzzutil.o -o xdiff
62
67
63 fuzz-x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
68 fuzz-x%.o: ../../mercurial/thirdparty/xdiff/x%.c ../../mercurial/thirdparty/xdiff/*.h
64 $(CC) $(CFLAGS) -c \
69 $(CC) $(CFLAGS) -c \
65 -o $@ \
70 -o $@ \
66 $<
71 $<
67
72
68 xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o
73 xdiff_fuzzer: xdiff.cc fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o
69 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial xdiff.cc \
74 $(CXX) $(CXXFLAGS) -std=c++17 -I../../mercurial xdiff.cc \
70 fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o \
75 fuzz-xdiffi.o fuzz-xprepare.o fuzz-xutils.o fuzzutil-oss-fuzz.o \
71 -lFuzzingEngine -o $$OUT/xdiff_fuzzer
76 -lFuzzingEngine -o $$OUT/xdiff_fuzzer
72
77
73 # TODO use the $OUT env var instead of hardcoding /out
78 manifest.o: ../../mercurial/cext/manifest.c
74 /out/sanpy/bin/python:
75 cd /Python-2.7.15/ && ./configure --without-pymalloc --prefix=$$OUT/sanpy CFLAGS='-O1 -fno-omit-frame-pointer -g -fwrapv -fstack-protector-strong' LDFLAGS=-lasan && ASAN_OPTIONS=detect_leaks=0 make && make install
76
77 sanpy: /out/sanpy/bin/python
78
79 manifest.o: sanpy ../../mercurial/cext/manifest.c
80 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
79 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
81 -I../../mercurial \
80 -I../../mercurial \
82 -c -o manifest.o ../../mercurial/cext/manifest.c
81 -c -o manifest.o ../../mercurial/cext/manifest.c
83
82
84 charencode.o: sanpy ../../mercurial/cext/charencode.c
83 charencode.o: ../../mercurial/cext/charencode.c
85 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
84 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
86 -I../../mercurial \
85 -I../../mercurial \
87 -c -o charencode.o ../../mercurial/cext/charencode.c
86 -c -o charencode.o ../../mercurial/cext/charencode.c
88
87
89 parsers.o: sanpy ../../mercurial/cext/parsers.c
88 parsers.o: ../../mercurial/cext/parsers.c
90 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
89 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
91 -I../../mercurial \
90 -I../../mercurial \
92 -c -o parsers.o ../../mercurial/cext/parsers.c
91 -c -o parsers.o ../../mercurial/cext/parsers.c
93
92
94 dirs.o: sanpy ../../mercurial/cext/dirs.c
93 dirs.o: ../../mercurial/cext/dirs.c
95 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
94 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
96 -I../../mercurial \
95 -I../../mercurial \
97 -c -o dirs.o ../../mercurial/cext/dirs.c
96 -c -o dirs.o ../../mercurial/cext/dirs.c
98
97
99 pathencode.o: sanpy ../../mercurial/cext/pathencode.c
98 pathencode.o: ../../mercurial/cext/pathencode.c
100 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
99 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
101 -I../../mercurial \
100 -I../../mercurial \
102 -c -o pathencode.o ../../mercurial/cext/pathencode.c
101 -c -o pathencode.o ../../mercurial/cext/pathencode.c
103
102
104 revlog.o: sanpy ../../mercurial/cext/revlog.c
103 revlog.o: ../../mercurial/cext/revlog.c
105 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
104 $(CC) $(CFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
106 -I../../mercurial \
105 -I../../mercurial \
107 -c -o revlog.o ../../mercurial/cext/revlog.c
106 -c -o revlog.o ../../mercurial/cext/revlog.c
108
107
109 manifest_fuzzer: sanpy manifest.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o
108 manifest_fuzzer: manifest.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
110 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
109 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
111 -Wno-register -Wno-macro-redefined \
110 -Wno-register -Wno-macro-redefined \
112 -I../../mercurial manifest.cc \
111 -I../../mercurial manifest.cc \
113 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o \
112 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
114 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
113 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
115 -o $$OUT/manifest_fuzzer
114 -o $$OUT/manifest_fuzzer
116
115
117 manifest_corpus.zip:
116 manifest_corpus.zip:
118 python manifest_corpus.py $$OUT/manifest_fuzzer_seed_corpus.zip
117 python manifest_corpus.py $$OUT/manifest_fuzzer_seed_corpus.zip
119
118
119 revlog_fuzzer: revlog.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
120 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
121 -Wno-register -Wno-macro-redefined \
122 -I../../mercurial revlog.cc \
123 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
124 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
125 -o $$OUT/revlog_fuzzer
126
127 revlog_corpus.zip:
128 python revlog_corpus.py $$OUT/revlog_fuzzer_seed_corpus.zip
129
130 dirstate_fuzzer: dirstate.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
131 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
132 -Wno-register -Wno-macro-redefined \
133 -I../../mercurial dirstate.cc \
134 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
135 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
136 -o $$OUT/dirstate_fuzzer
137
138 dirstate_corpus.zip:
139 python dirstate_corpus.py $$OUT/dirstate_fuzzer_seed_corpus.zip
140
141 fm1readmarkers_fuzzer: fm1readmarkers.cc manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o
142 $(CXX) $(CXXFLAGS) `$$OUT/sanpy/bin/python-config --cflags` \
143 -Wno-register -Wno-macro-redefined \
144 -I../../mercurial fm1readmarkers.cc \
145 manifest.o charencode.o parsers.o dirs.o pathencode.o revlog.o pyutil.o \
146 -lFuzzingEngine `$$OUT/sanpy/bin/python-config --ldflags` \
147 -o $$OUT/fm1readmarkers_fuzzer
148
149 fm1readmarkers_corpus.zip:
150 python fm1readmarkers_corpus.py $$OUT/fm1readmarkers_fuzzer_seed_corpus.zip
151
120 clean:
152 clean:
121 $(RM) *.o *_fuzzer \
153 $(RM) *.o *_fuzzer \
122 bdiff \
154 bdiff \
123 mpatch \
155 mpatch \
124 xdiff
156 xdiff
125
157
126 oss-fuzz: bdiff_fuzzer mpatch_fuzzer mpatch_corpus.zip xdiff_fuzzer manifest_fuzzer manifest_corpus.zip
158 oss-fuzz: bdiff_fuzzer mpatch_fuzzer mpatch_corpus.zip xdiff_fuzzer manifest_fuzzer manifest_corpus.zip revlog_fuzzer revlog_corpus.zip dirstate_fuzzer dirstate_corpus.zip fm1readmarkers_fuzzer fm1readmarkers_corpus.zip
127
159
128 .PHONY: all clean oss-fuzz sanpy
160 .PHONY: all clean oss-fuzz
@@ -1,83 +1,55 b''
1 #include <Python.h>
1 #include <Python.h>
2 #include <assert.h>
2 #include <assert.h>
3 #include <stdlib.h>
3 #include <stdlib.h>
4 #include <unistd.h>
4 #include <unistd.h>
5
5
6 #include "pyutil.h"
7
6 #include <string>
8 #include <string>
7
9
8 extern "C" {
10 extern "C" {
9
11
10 /* TODO: use Python 3 for this fuzzing? */
11 PyMODINIT_FUNC initparsers(void);
12
13 static char cpypath[8192] = "\0";
14
15 static PyCodeObject *code;
12 static PyCodeObject *code;
16 static PyObject *mainmod;
17 static PyObject *globals;
18
13
19 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
14 extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv)
20 {
15 {
21 const std::string subdir = "/sanpy/lib/python2.7";
16 contrib::initpy(*argv[0]);
22 /* HACK ALERT: we need a full Python installation built without
23 pymalloc and with ASAN, so we dump one in
24 $OUT/sanpy/lib/python2.7. This helps us wire that up. */
25 std::string selfpath(*argv[0]);
26 std::string pypath;
27 auto pos = selfpath.rfind("/");
28 if (pos == std::string::npos) {
29 char wd[8192];
30 getcwd(wd, 8192);
31 pypath = std::string(wd) + subdir;
32 } else {
33 pypath = selfpath.substr(0, pos) + subdir;
34 }
35 strncpy(cpypath, pypath.c_str(), pypath.size());
36 setenv("PYTHONPATH", cpypath, 1);
37 setenv("PYTHONNOUSERSITE", "1", 1);
38 /* prevent Python from looking up users in the fuzz environment */
39 setenv("PYTHONUSERBASE", cpypath, 1);
40 Py_SetPythonHome(cpypath);
41 Py_InitializeEx(0);
42 initparsers();
43 code = (PyCodeObject *)Py_CompileString(R"py(
17 code = (PyCodeObject *)Py_CompileString(R"py(
44 from parsers import lazymanifest
18 from parsers import lazymanifest
45 try:
19 try:
46 lm = lazymanifest(mdata)
20 lm = lazymanifest(mdata)
47 # iterate the whole thing, which causes the code to fully parse
21 # iterate the whole thing, which causes the code to fully parse
48 # every line in the manifest
22 # every line in the manifest
49 list(lm.iterentries())
23 list(lm.iterentries())
50 lm[b'xyzzy'] = (b'\0' * 20, 'x')
24 lm[b'xyzzy'] = (b'\0' * 20, 'x')
51 # do an insert, text should change
25 # do an insert, text should change
52 assert lm.text() != mdata, "insert should change text and didn't: %r %r" % (lm.text(), mdata)
26 assert lm.text() != mdata, "insert should change text and didn't: %r %r" % (lm.text(), mdata)
53 del lm[b'xyzzy']
27 del lm[b'xyzzy']
54 # should be back to the same
28 # should be back to the same
55 assert lm.text() == mdata, "delete should have restored text but didn't: %r %r" % (lm.text(), mdata)
29 assert lm.text() == mdata, "delete should have restored text but didn't: %r %r" % (lm.text(), mdata)
56 except Exception as e:
30 except Exception as e:
57 pass
31 pass
58 # uncomment this print if you're editing this Python code
32 # uncomment this print if you're editing this Python code
59 # to debug failures.
33 # to debug failures.
60 # print e
34 # print e
61 )py",
35 )py",
62 "fuzzer", Py_file_input);
36 "fuzzer", Py_file_input);
63 mainmod = PyImport_AddModule("__main__");
64 globals = PyModule_GetDict(mainmod);
65 return 0;
37 return 0;
66 }
38 }
67
39
68 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
40 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
69 {
41 {
70 PyObject *mtext =
42 PyObject *mtext =
71 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
43 PyBytes_FromStringAndSize((const char *)Data, (Py_ssize_t)Size);
72 PyObject *locals = PyDict_New();
44 PyObject *locals = PyDict_New();
73 PyDict_SetItemString(locals, "mdata", mtext);
45 PyDict_SetItemString(locals, "mdata", mtext);
74 PyObject *res = PyEval_EvalCode(code, globals, locals);
46 PyObject *res = PyEval_EvalCode(code, contrib::pyglobals(), locals);
75 if (!res) {
47 if (!res) {
76 PyErr_Print();
48 PyErr_Print();
77 }
49 }
78 Py_XDECREF(res);
50 Py_XDECREF(res);
79 Py_DECREF(locals);
51 Py_DECREF(locals);
80 Py_DECREF(mtext);
52 Py_DECREF(mtext);
81 return 0; // Non-zero return values are reserved for future use.
53 return 0; // Non-zero return values are reserved for future use.
82 }
54 }
83 }
55 }
@@ -1,58 +1,63 b''
1 /*
1 /*
2 * xdiff.cc - fuzzer harness for thirdparty/xdiff
2 * xdiff.cc - fuzzer harness for thirdparty/xdiff
3 *
3 *
4 * Copyright 2018, Google Inc.
4 * Copyright 2018, Google Inc.
5 *
5 *
6 * This software may be used and distributed according to the terms of
6 * This software may be used and distributed according to the terms of
7 * the GNU General Public License, incorporated herein by reference.
7 * the GNU General Public License, incorporated herein by reference.
8 */
8 */
9 #include "thirdparty/xdiff/xdiff.h"
9 #include "thirdparty/xdiff/xdiff.h"
10 #include <inttypes.h>
10 #include <inttypes.h>
11 #include <stdlib.h>
11 #include <stdlib.h>
12
12
13 #include "fuzzutil.h"
13 #include "fuzzutil.h"
14
14
15 extern "C" {
15 extern "C" {
16
16
17 int hunk_consumer(long a1, long a2, long b1, long b2, void *priv)
17 int hunk_consumer(long a1, long a2, long b1, long b2, void *priv)
18 {
18 {
19 // TODO: probably also test returning -1 from this when things break?
19 // TODO: probably also test returning -1 from this when things break?
20 return 0;
20 return 0;
21 }
21 }
22
22
23 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
23 int LLVMFuzzerTestOneInput(const uint8_t *Data, size_t Size)
24 {
24 {
25 // Don't allow fuzzer inputs larger than 100k, since we'll just bog
26 // down and not accomplish much.
27 if (Size > 100000) {
28 return 0;
29 }
25 auto maybe_inputs = SplitInputs(Data, Size);
30 auto maybe_inputs = SplitInputs(Data, Size);
26 if (!maybe_inputs) {
31 if (!maybe_inputs) {
27 return 0;
32 return 0;
28 }
33 }
29 auto inputs = std::move(maybe_inputs.value());
34 auto inputs = std::move(maybe_inputs.value());
30 mmfile_t a, b;
35 mmfile_t a, b;
31
36
32 a.ptr = inputs.left.get();
37 a.ptr = inputs.left.get();
33 a.size = inputs.left_size;
38 a.size = inputs.left_size;
34 b.ptr = inputs.right.get();
39 b.ptr = inputs.right.get();
35 b.size = inputs.right_size;
40 b.size = inputs.right_size;
36 xpparam_t xpp = {
41 xpparam_t xpp = {
37 XDF_INDENT_HEURISTIC, /* flags */
42 XDF_INDENT_HEURISTIC, /* flags */
38 };
43 };
39 xdemitconf_t xecfg = {
44 xdemitconf_t xecfg = {
40 XDL_EMIT_BDIFFHUNK, /* flags */
45 XDL_EMIT_BDIFFHUNK, /* flags */
41 hunk_consumer, /* hunk_consume_func */
46 hunk_consumer, /* hunk_consume_func */
42 };
47 };
43 xdemitcb_t ecb = {
48 xdemitcb_t ecb = {
44 NULL, /* priv */
49 NULL, /* priv */
45 };
50 };
46 xdl_diff(&a, &b, &xpp, &xecfg, &ecb);
51 xdl_diff(&a, &b, &xpp, &xecfg, &ecb);
47 return 0; // Non-zero return values are reserved for future use.
52 return 0; // Non-zero return values are reserved for future use.
48 }
53 }
49
54
50 #ifdef HG_FUZZER_INCLUDE_MAIN
55 #ifdef HG_FUZZER_INCLUDE_MAIN
51 int main(int argc, char **argv)
56 int main(int argc, char **argv)
52 {
57 {
53 const char data[] = "asdf";
58 const char data[] = "asdf";
54 return LLVMFuzzerTestOneInput((const uint8_t *)data, 4);
59 return LLVMFuzzerTestOneInput((const uint8_t *)data, 4);
55 }
60 }
56 #endif
61 #endif
57
62
58 } // extern "C"
63 } // extern "C"
@@ -1,134 +1,147 b''
1 # A minimal client for Mercurial's command server
1 # A minimal client for Mercurial's command server
2
2
3 from __future__ import absolute_import, print_function
3 from __future__ import absolute_import, print_function
4
4
5 import io
5 import io
6 import os
6 import os
7 import re
7 import re
8 import signal
8 import signal
9 import socket
9 import socket
10 import struct
10 import struct
11 import subprocess
11 import subprocess
12 import sys
12 import sys
13 import time
13 import time
14
14
15 if sys.version_info[0] >= 3:
15 if sys.version_info[0] >= 3:
16 stdout = sys.stdout.buffer
16 stdout = sys.stdout.buffer
17 stderr = sys.stderr.buffer
17 stderr = sys.stderr.buffer
18 stringio = io.BytesIO
18 stringio = io.BytesIO
19 def bprint(*args):
19 def bprint(*args):
20 # remove b'' as well for ease of test migration
20 # remove b'' as well for ease of test migration
21 pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args]
21 pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args]
22 stdout.write(b' '.join(pargs) + b'\n')
22 stdout.write(b' '.join(pargs) + b'\n')
23 else:
23 else:
24 import cStringIO
24 import cStringIO
25 stdout = sys.stdout
25 stdout = sys.stdout
26 stderr = sys.stderr
26 stderr = sys.stderr
27 stringio = cStringIO.StringIO
27 stringio = cStringIO.StringIO
28 bprint = print
28 bprint = print
29
29
30 def connectpipe(path=None):
30 def connectpipe(path=None, extraargs=()):
31 cmdline = [b'hg', b'serve', b'--cmdserver', b'pipe']
31 cmdline = [b'hg', b'serve', b'--cmdserver', b'pipe']
32 if path:
32 if path:
33 cmdline += [b'-R', path]
33 cmdline += [b'-R', path]
34 cmdline.extend(extraargs)
34
35
35 server = subprocess.Popen(cmdline, stdin=subprocess.PIPE,
36 def tonative(cmdline):
37 if os.name != r'nt':
38 return cmdline
39 return [arg.decode("utf-8") for arg in cmdline]
40
41 server = subprocess.Popen(tonative(cmdline), stdin=subprocess.PIPE,
36 stdout=subprocess.PIPE)
42 stdout=subprocess.PIPE)
37
43
38 return server
44 return server
39
45
40 class unixconnection(object):
46 class unixconnection(object):
41 def __init__(self, sockpath):
47 def __init__(self, sockpath):
42 self.sock = sock = socket.socket(socket.AF_UNIX)
48 self.sock = sock = socket.socket(socket.AF_UNIX)
43 sock.connect(sockpath)
49 sock.connect(sockpath)
44 self.stdin = sock.makefile('wb')
50 self.stdin = sock.makefile('wb')
45 self.stdout = sock.makefile('rb')
51 self.stdout = sock.makefile('rb')
46
52
47 def wait(self):
53 def wait(self):
48 self.stdin.close()
54 self.stdin.close()
49 self.stdout.close()
55 self.stdout.close()
50 self.sock.close()
56 self.sock.close()
51
57
52 class unixserver(object):
58 class unixserver(object):
53 def __init__(self, sockpath, logpath=None, repopath=None):
59 def __init__(self, sockpath, logpath=None, repopath=None):
54 self.sockpath = sockpath
60 self.sockpath = sockpath
55 cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath]
61 cmdline = [b'hg', b'serve', b'--cmdserver', b'unix', b'-a', sockpath]
56 if repopath:
62 if repopath:
57 cmdline += [b'-R', repopath]
63 cmdline += [b'-R', repopath]
58 if logpath:
64 if logpath:
59 stdout = open(logpath, 'a')
65 stdout = open(logpath, 'a')
60 stderr = subprocess.STDOUT
66 stderr = subprocess.STDOUT
61 else:
67 else:
62 stdout = stderr = None
68 stdout = stderr = None
63 self.server = subprocess.Popen(cmdline, stdout=stdout, stderr=stderr)
69 self.server = subprocess.Popen(cmdline, stdout=stdout, stderr=stderr)
64 # wait for listen()
70 # wait for listen()
65 while self.server.poll() is None:
71 while self.server.poll() is None:
66 if os.path.exists(sockpath):
72 if os.path.exists(sockpath):
67 break
73 break
68 time.sleep(0.1)
74 time.sleep(0.1)
69
75
70 def connect(self):
76 def connect(self):
71 return unixconnection(self.sockpath)
77 return unixconnection(self.sockpath)
72
78
73 def shutdown(self):
79 def shutdown(self):
74 os.kill(self.server.pid, signal.SIGTERM)
80 os.kill(self.server.pid, signal.SIGTERM)
75 self.server.wait()
81 self.server.wait()
76
82
77 def writeblock(server, data):
83 def writeblock(server, data):
78 server.stdin.write(struct.pack(b'>I', len(data)))
84 server.stdin.write(struct.pack(b'>I', len(data)))
79 server.stdin.write(data)
85 server.stdin.write(data)
80 server.stdin.flush()
86 server.stdin.flush()
81
87
82 def readchannel(server):
88 def readchannel(server):
83 data = server.stdout.read(5)
89 data = server.stdout.read(5)
84 if not data:
90 if not data:
85 raise EOFError
91 raise EOFError
86 channel, length = struct.unpack('>cI', data)
92 channel, length = struct.unpack('>cI', data)
87 if channel in b'IL':
93 if channel in b'IL':
88 return channel, length
94 return channel, length
89 else:
95 else:
90 return channel, server.stdout.read(length)
96 return channel, server.stdout.read(length)
91
97
92 def sep(text):
98 def sep(text):
93 return text.replace(b'\\', b'/')
99 return text.replace(b'\\', b'/')
94
100
95 def runcommand(server, args, output=stdout, error=stderr, input=None,
101 def runcommand(server, args, output=stdout, error=stderr, input=None,
96 outfilter=lambda x: x):
102 outfilter=lambda x: x):
97 bprint(b'*** runcommand', b' '.join(args))
103 bprint(b'*** runcommand', b' '.join(args))
98 stdout.flush()
104 stdout.flush()
99 server.stdin.write(b'runcommand\n')
105 server.stdin.write(b'runcommand\n')
100 writeblock(server, b'\0'.join(args))
106 writeblock(server, b'\0'.join(args))
101
107
102 if not input:
108 if not input:
103 input = stringio()
109 input = stringio()
104
110
105 while True:
111 while True:
106 ch, data = readchannel(server)
112 ch, data = readchannel(server)
107 if ch == b'o':
113 if ch == b'o':
108 output.write(outfilter(data))
114 output.write(outfilter(data))
109 output.flush()
115 output.flush()
110 elif ch == b'e':
116 elif ch == b'e':
111 error.write(data)
117 error.write(data)
112 error.flush()
118 error.flush()
113 elif ch == b'I':
119 elif ch == b'I':
114 writeblock(server, input.read(data))
120 writeblock(server, input.read(data))
115 elif ch == b'L':
121 elif ch == b'L':
116 writeblock(server, input.readline(data))
122 writeblock(server, input.readline(data))
123 elif ch == b'm':
124 bprint(b"message: %r" % data)
117 elif ch == b'r':
125 elif ch == b'r':
118 ret, = struct.unpack('>i', data)
126 ret, = struct.unpack('>i', data)
119 if ret != 0:
127 if ret != 0:
120 bprint(b' [%d]' % ret)
128 bprint(b' [%d]' % ret)
121 return ret
129 return ret
122 else:
130 else:
123 bprint(b"unexpected channel %c: %r" % (ch, data))
131 bprint(b"unexpected channel %c: %r" % (ch, data))
124 if ch.isupper():
132 if ch.isupper():
125 return
133 return
126
134
127 def check(func, connect=connectpipe):
135 def check(func, connect=connectpipe):
128 stdout.flush()
136 stdout.flush()
129 server = connect()
137 server = connect()
130 try:
138 try:
131 return func(server)
139 return func(server)
132 finally:
140 finally:
133 server.stdin.close()
141 server.stdin.close()
134 server.wait()
142 server.wait()
143
144 def checkwith(connect=connectpipe, **kwargs):
145 def wrap(func):
146 return check(func, lambda: connect(**kwargs))
147 return wrap
@@ -1,745 +1,747 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2
2
3 from __future__ import absolute_import, print_function
3 from __future__ import absolute_import, print_function
4
4
5 import ast
5 import ast
6 import collections
6 import collections
7 import os
7 import os
8 import sys
8 import sys
9
9
10 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
10 # Import a minimal set of stdlib modules needed for list_stdlib_modules()
11 # to work when run from a virtualenv. The modules were chosen empirically
11 # to work when run from a virtualenv. The modules were chosen empirically
12 # so that the return value matches the return value without virtualenv.
12 # so that the return value matches the return value without virtualenv.
13 if True: # disable lexical sorting checks
13 if True: # disable lexical sorting checks
14 try:
14 try:
15 import BaseHTTPServer as basehttpserver
15 import BaseHTTPServer as basehttpserver
16 except ImportError:
16 except ImportError:
17 basehttpserver = None
17 basehttpserver = None
18 import zlib
18 import zlib
19
19
20 import testparseutil
20 import testparseutil
21
21
22 # Whitelist of modules that symbols can be directly imported from.
22 # Whitelist of modules that symbols can be directly imported from.
23 allowsymbolimports = (
23 allowsymbolimports = (
24 '__future__',
24 '__future__',
25 'bzrlib',
25 'bzrlib',
26 'hgclient',
26 'hgclient',
27 'mercurial',
27 'mercurial',
28 'mercurial.hgweb.common',
28 'mercurial.hgweb.common',
29 'mercurial.hgweb.request',
29 'mercurial.hgweb.request',
30 'mercurial.i18n',
30 'mercurial.i18n',
31 'mercurial.node',
31 'mercurial.node',
32 # for revlog to re-export constant to extensions
32 # for revlog to re-export constant to extensions
33 'mercurial.revlogutils.constants',
33 'mercurial.revlogutils.constants',
34 # for cffi modules to re-export pure functions
34 # for cffi modules to re-export pure functions
35 'mercurial.pure.base85',
35 'mercurial.pure.base85',
36 'mercurial.pure.bdiff',
36 'mercurial.pure.bdiff',
37 'mercurial.pure.mpatch',
37 'mercurial.pure.mpatch',
38 'mercurial.pure.osutil',
38 'mercurial.pure.osutil',
39 'mercurial.pure.parsers',
39 'mercurial.pure.parsers',
40 # third-party imports should be directly imported
40 # third-party imports should be directly imported
41 'mercurial.thirdparty',
41 'mercurial.thirdparty',
42 'mercurial.thirdparty.attr',
42 'mercurial.thirdparty.attr',
43 'mercurial.thirdparty.cbor',
44 'mercurial.thirdparty.cbor.cbor2',
45 'mercurial.thirdparty.zope',
43 'mercurial.thirdparty.zope',
46 'mercurial.thirdparty.zope.interface',
44 'mercurial.thirdparty.zope.interface',
47 )
45 )
48
46
49 # Whitelist of symbols that can be directly imported.
47 # Whitelist of symbols that can be directly imported.
50 directsymbols = (
48 directsymbols = (
51 'demandimport',
49 'demandimport',
52 )
50 )
53
51
54 # Modules that must be aliased because they are commonly confused with
52 # Modules that must be aliased because they are commonly confused with
55 # common variables and can create aliasing and readability issues.
53 # common variables and can create aliasing and readability issues.
56 requirealias = {
54 requirealias = {
57 'ui': 'uimod',
55 'ui': 'uimod',
58 }
56 }
59
57
60 def usingabsolute(root):
58 def usingabsolute(root):
61 """Whether absolute imports are being used."""
59 """Whether absolute imports are being used."""
62 if sys.version_info[0] >= 3:
60 if sys.version_info[0] >= 3:
63 return True
61 return True
64
62
65 for node in ast.walk(root):
63 for node in ast.walk(root):
66 if isinstance(node, ast.ImportFrom):
64 if isinstance(node, ast.ImportFrom):
67 if node.module == '__future__':
65 if node.module == '__future__':
68 for n in node.names:
66 for n in node.names:
69 if n.name == 'absolute_import':
67 if n.name == 'absolute_import':
70 return True
68 return True
71
69
72 return False
70 return False
73
71
74 def walklocal(root):
72 def walklocal(root):
75 """Recursively yield all descendant nodes but not in a different scope"""
73 """Recursively yield all descendant nodes but not in a different scope"""
76 todo = collections.deque(ast.iter_child_nodes(root))
74 todo = collections.deque(ast.iter_child_nodes(root))
77 yield root, False
75 yield root, False
78 while todo:
76 while todo:
79 node = todo.popleft()
77 node = todo.popleft()
80 newscope = isinstance(node, ast.FunctionDef)
78 newscope = isinstance(node, ast.FunctionDef)
81 if not newscope:
79 if not newscope:
82 todo.extend(ast.iter_child_nodes(node))
80 todo.extend(ast.iter_child_nodes(node))
83 yield node, newscope
81 yield node, newscope
84
82
85 def dotted_name_of_path(path):
83 def dotted_name_of_path(path):
86 """Given a relative path to a source file, return its dotted module name.
84 """Given a relative path to a source file, return its dotted module name.
87
85
88 >>> dotted_name_of_path('mercurial/error.py')
86 >>> dotted_name_of_path('mercurial/error.py')
89 'mercurial.error'
87 'mercurial.error'
90 >>> dotted_name_of_path('zlibmodule.so')
88 >>> dotted_name_of_path('zlibmodule.so')
91 'zlib'
89 'zlib'
92 """
90 """
93 parts = path.replace(os.sep, '/').split('/')
91 parts = path.replace(os.sep, '/').split('/')
94 parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
92 parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so
95 if parts[-1].endswith('module'):
93 if parts[-1].endswith('module'):
96 parts[-1] = parts[-1][:-6]
94 parts[-1] = parts[-1][:-6]
97 return '.'.join(parts)
95 return '.'.join(parts)
98
96
99 def fromlocalfunc(modulename, localmods):
97 def fromlocalfunc(modulename, localmods):
100 """Get a function to examine which locally defined module the
98 """Get a function to examine which locally defined module the
101 target source imports via a specified name.
99 target source imports via a specified name.
102
100
103 `modulename` is an `dotted_name_of_path()`-ed source file path,
101 `modulename` is an `dotted_name_of_path()`-ed source file path,
104 which may have `.__init__` at the end of it, of the target source.
102 which may have `.__init__` at the end of it, of the target source.
105
103
106 `localmods` is a set of absolute `dotted_name_of_path()`-ed source file
104 `localmods` is a set of absolute `dotted_name_of_path()`-ed source file
107 paths of locally defined (= Mercurial specific) modules.
105 paths of locally defined (= Mercurial specific) modules.
108
106
109 This function assumes that module names not existing in
107 This function assumes that module names not existing in
110 `localmods` are from the Python standard library.
108 `localmods` are from the Python standard library.
111
109
112 This function returns the function, which takes `name` argument,
110 This function returns the function, which takes `name` argument,
113 and returns `(absname, dottedpath, hassubmod)` tuple if `name`
111 and returns `(absname, dottedpath, hassubmod)` tuple if `name`
114 matches against locally defined module. Otherwise, it returns
112 matches against locally defined module. Otherwise, it returns
115 False.
113 False.
116
114
117 It is assumed that `name` doesn't have `.__init__`.
115 It is assumed that `name` doesn't have `.__init__`.
118
116
119 `absname` is an absolute module name of specified `name`
117 `absname` is an absolute module name of specified `name`
120 (e.g. "hgext.convert"). This can be used to compose prefix for sub
118 (e.g. "hgext.convert"). This can be used to compose prefix for sub
121 modules or so.
119 modules or so.
122
120
123 `dottedpath` is a `dotted_name_of_path()`-ed source file path
121 `dottedpath` is a `dotted_name_of_path()`-ed source file path
124 (e.g. "hgext.convert.__init__") of `name`. This is used to look
122 (e.g. "hgext.convert.__init__") of `name`. This is used to look
125 module up in `localmods` again.
123 module up in `localmods` again.
126
124
127 `hassubmod` is whether it may have sub modules under it (for
125 `hassubmod` is whether it may have sub modules under it (for
128 convenient, even though this is also equivalent to "absname !=
126 convenient, even though this is also equivalent to "absname !=
129 dottednpath")
127 dottednpath")
130
128
131 >>> localmods = {'foo.__init__', 'foo.foo1',
129 >>> localmods = {'foo.__init__', 'foo.foo1',
132 ... 'foo.bar.__init__', 'foo.bar.bar1',
130 ... 'foo.bar.__init__', 'foo.bar.bar1',
133 ... 'baz.__init__', 'baz.baz1'}
131 ... 'baz.__init__', 'baz.baz1'}
134 >>> fromlocal = fromlocalfunc('foo.xxx', localmods)
132 >>> fromlocal = fromlocalfunc('foo.xxx', localmods)
135 >>> # relative
133 >>> # relative
136 >>> fromlocal('foo1')
134 >>> fromlocal('foo1')
137 ('foo.foo1', 'foo.foo1', False)
135 ('foo.foo1', 'foo.foo1', False)
138 >>> fromlocal('bar')
136 >>> fromlocal('bar')
139 ('foo.bar', 'foo.bar.__init__', True)
137 ('foo.bar', 'foo.bar.__init__', True)
140 >>> fromlocal('bar.bar1')
138 >>> fromlocal('bar.bar1')
141 ('foo.bar.bar1', 'foo.bar.bar1', False)
139 ('foo.bar.bar1', 'foo.bar.bar1', False)
142 >>> # absolute
140 >>> # absolute
143 >>> fromlocal('baz')
141 >>> fromlocal('baz')
144 ('baz', 'baz.__init__', True)
142 ('baz', 'baz.__init__', True)
145 >>> fromlocal('baz.baz1')
143 >>> fromlocal('baz.baz1')
146 ('baz.baz1', 'baz.baz1', False)
144 ('baz.baz1', 'baz.baz1', False)
147 >>> # unknown = maybe standard library
145 >>> # unknown = maybe standard library
148 >>> fromlocal('os')
146 >>> fromlocal('os')
149 False
147 False
150 >>> fromlocal(None, 1)
148 >>> fromlocal(None, 1)
151 ('foo', 'foo.__init__', True)
149 ('foo', 'foo.__init__', True)
152 >>> fromlocal('foo1', 1)
150 >>> fromlocal('foo1', 1)
153 ('foo.foo1', 'foo.foo1', False)
151 ('foo.foo1', 'foo.foo1', False)
154 >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods)
152 >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods)
155 >>> fromlocal2(None, 2)
153 >>> fromlocal2(None, 2)
156 ('foo', 'foo.__init__', True)
154 ('foo', 'foo.__init__', True)
157 >>> fromlocal2('bar2', 1)
155 >>> fromlocal2('bar2', 1)
158 False
156 False
159 >>> fromlocal2('bar', 2)
157 >>> fromlocal2('bar', 2)
160 ('foo.bar', 'foo.bar.__init__', True)
158 ('foo.bar', 'foo.bar.__init__', True)
161 """
159 """
162 if not isinstance(modulename, str):
160 if not isinstance(modulename, str):
163 modulename = modulename.decode('ascii')
161 modulename = modulename.decode('ascii')
164 prefix = '.'.join(modulename.split('.')[:-1])
162 prefix = '.'.join(modulename.split('.')[:-1])
165 if prefix:
163 if prefix:
166 prefix += '.'
164 prefix += '.'
167 def fromlocal(name, level=0):
165 def fromlocal(name, level=0):
168 # name is false value when relative imports are used.
166 # name is false value when relative imports are used.
169 if not name:
167 if not name:
170 # If relative imports are used, level must not be absolute.
168 # If relative imports are used, level must not be absolute.
171 assert level > 0
169 assert level > 0
172 candidates = ['.'.join(modulename.split('.')[:-level])]
170 candidates = ['.'.join(modulename.split('.')[:-level])]
173 else:
171 else:
174 if not level:
172 if not level:
175 # Check relative name first.
173 # Check relative name first.
176 candidates = [prefix + name, name]
174 candidates = [prefix + name, name]
177 else:
175 else:
178 candidates = ['.'.join(modulename.split('.')[:-level]) +
176 candidates = ['.'.join(modulename.split('.')[:-level]) +
179 '.' + name]
177 '.' + name]
180
178
181 for n in candidates:
179 for n in candidates:
182 if n in localmods:
180 if n in localmods:
183 return (n, n, False)
181 return (n, n, False)
184 dottedpath = n + '.__init__'
182 dottedpath = n + '.__init__'
185 if dottedpath in localmods:
183 if dottedpath in localmods:
186 return (n, dottedpath, True)
184 return (n, dottedpath, True)
187 return False
185 return False
188 return fromlocal
186 return fromlocal
189
187
190 def populateextmods(localmods):
188 def populateextmods(localmods):
191 """Populate C extension modules based on pure modules"""
189 """Populate C extension modules based on pure modules"""
192 newlocalmods = set(localmods)
190 newlocalmods = set(localmods)
193 for n in localmods:
191 for n in localmods:
194 if n.startswith('mercurial.pure.'):
192 if n.startswith('mercurial.pure.'):
195 m = n[len('mercurial.pure.'):]
193 m = n[len('mercurial.pure.'):]
196 newlocalmods.add('mercurial.cext.' + m)
194 newlocalmods.add('mercurial.cext.' + m)
197 newlocalmods.add('mercurial.cffi._' + m)
195 newlocalmods.add('mercurial.cffi._' + m)
198 return newlocalmods
196 return newlocalmods
199
197
200 def list_stdlib_modules():
198 def list_stdlib_modules():
201 """List the modules present in the stdlib.
199 """List the modules present in the stdlib.
202
200
203 >>> py3 = sys.version_info[0] >= 3
201 >>> py3 = sys.version_info[0] >= 3
204 >>> mods = set(list_stdlib_modules())
202 >>> mods = set(list_stdlib_modules())
205 >>> 'BaseHTTPServer' in mods or py3
203 >>> 'BaseHTTPServer' in mods or py3
206 True
204 True
207
205
208 os.path isn't really a module, so it's missing:
206 os.path isn't really a module, so it's missing:
209
207
210 >>> 'os.path' in mods
208 >>> 'os.path' in mods
211 False
209 False
212
210
213 sys requires special treatment, because it's baked into the
211 sys requires special treatment, because it's baked into the
214 interpreter, but it should still appear:
212 interpreter, but it should still appear:
215
213
216 >>> 'sys' in mods
214 >>> 'sys' in mods
217 True
215 True
218
216
219 >>> 'collections' in mods
217 >>> 'collections' in mods
220 True
218 True
221
219
222 >>> 'cStringIO' in mods or py3
220 >>> 'cStringIO' in mods or py3
223 True
221 True
224
222
225 >>> 'cffi' in mods
223 >>> 'cffi' in mods
226 True
224 True
227 """
225 """
228 for m in sys.builtin_module_names:
226 for m in sys.builtin_module_names:
229 yield m
227 yield m
230 # These modules only exist on windows, but we should always
228 # These modules only exist on windows, but we should always
231 # consider them stdlib.
229 # consider them stdlib.
232 for m in ['msvcrt', '_winreg']:
230 for m in ['msvcrt', '_winreg']:
233 yield m
231 yield m
234 yield '__builtin__'
232 yield '__builtin__'
235 yield 'builtins' # python3 only
233 yield 'builtins' # python3 only
236 yield 'importlib.abc' # python3 only
234 yield 'importlib.abc' # python3 only
237 yield 'importlib.machinery' # python3 only
235 yield 'importlib.machinery' # python3 only
238 yield 'importlib.util' # python3 only
236 yield 'importlib.util' # python3 only
239 for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
237 for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only
240 yield m
238 yield m
241 for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
239 for m in 'cPickle', 'datetime': # in Python (not C) on PyPy
242 yield m
240 yield m
243 for m in ['cffi']:
241 for m in ['cffi']:
244 yield m
242 yield m
245 stdlib_prefixes = {sys.prefix, sys.exec_prefix}
243 stdlib_prefixes = {sys.prefix, sys.exec_prefix}
246 # We need to supplement the list of prefixes for the search to work
244 # We need to supplement the list of prefixes for the search to work
247 # when run from within a virtualenv.
245 # when run from within a virtualenv.
248 for mod in (basehttpserver, zlib):
246 for mod in (basehttpserver, zlib):
249 if mod is None:
247 if mod is None:
250 continue
248 continue
251 try:
249 try:
252 # Not all module objects have a __file__ attribute.
250 # Not all module objects have a __file__ attribute.
253 filename = mod.__file__
251 filename = mod.__file__
254 except AttributeError:
252 except AttributeError:
255 continue
253 continue
256 dirname = os.path.dirname(filename)
254 dirname = os.path.dirname(filename)
257 for prefix in stdlib_prefixes:
255 for prefix in stdlib_prefixes:
258 if dirname.startswith(prefix):
256 if dirname.startswith(prefix):
259 # Then this directory is redundant.
257 # Then this directory is redundant.
260 break
258 break
261 else:
259 else:
262 stdlib_prefixes.add(dirname)
260 stdlib_prefixes.add(dirname)
261 sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
263 for libpath in sys.path:
262 for libpath in sys.path:
264 # We want to walk everything in sys.path that starts with
263 # We want to walk everything in sys.path that starts with something in
265 # something in stdlib_prefixes.
264 # stdlib_prefixes, but not directories from the hg sources.
266 if not any(libpath.startswith(p) for p in stdlib_prefixes):
265 if (os.path.abspath(libpath).startswith(sourceroot)
266 or not any(libpath.startswith(p) for p in stdlib_prefixes)):
267 continue
267 continue
268 for top, dirs, files in os.walk(libpath):
268 for top, dirs, files in os.walk(libpath):
269 for i, d in reversed(list(enumerate(dirs))):
269 for i, d in reversed(list(enumerate(dirs))):
270 if (not os.path.exists(os.path.join(top, d, '__init__.py'))
270 if (not os.path.exists(os.path.join(top, d, '__init__.py'))
271 or top == libpath and d in ('hgdemandimport', 'hgext',
271 or top == libpath and d in ('hgdemandimport', 'hgext',
272 'mercurial')):
272 'mercurial')):
273 del dirs[i]
273 del dirs[i]
274 for name in files:
274 for name in files:
275 if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
275 if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')):
276 continue
276 continue
277 if name.startswith('__init__.py'):
277 if name.startswith('__init__.py'):
278 full_path = top
278 full_path = top
279 else:
279 else:
280 full_path = os.path.join(top, name)
280 full_path = os.path.join(top, name)
281 rel_path = full_path[len(libpath) + 1:]
281 rel_path = full_path[len(libpath) + 1:]
282 mod = dotted_name_of_path(rel_path)
282 mod = dotted_name_of_path(rel_path)
283 yield mod
283 yield mod
284
284
285 stdlib_modules = set(list_stdlib_modules())
285 stdlib_modules = set(list_stdlib_modules())
286
286
287 def imported_modules(source, modulename, f, localmods, ignore_nested=False):
287 def imported_modules(source, modulename, f, localmods, ignore_nested=False):
288 """Given the source of a file as a string, yield the names
288 """Given the source of a file as a string, yield the names
289 imported by that file.
289 imported by that file.
290
290
291 Args:
291 Args:
292 source: The python source to examine as a string.
292 source: The python source to examine as a string.
293 modulename: of specified python source (may have `__init__`)
293 modulename: of specified python source (may have `__init__`)
294 localmods: set of locally defined module names (may have `__init__`)
294 localmods: set of locally defined module names (may have `__init__`)
295 ignore_nested: If true, import statements that do not start in
295 ignore_nested: If true, import statements that do not start in
296 column zero will be ignored.
296 column zero will be ignored.
297
297
298 Returns:
298 Returns:
299 A list of absolute module names imported by the given source.
299 A list of absolute module names imported by the given source.
300
300
301 >>> f = 'foo/xxx.py'
301 >>> f = 'foo/xxx.py'
302 >>> modulename = 'foo.xxx'
302 >>> modulename = 'foo.xxx'
303 >>> localmods = {'foo.__init__': True,
303 >>> localmods = {'foo.__init__': True,
304 ... 'foo.foo1': True, 'foo.foo2': True,
304 ... 'foo.foo1': True, 'foo.foo2': True,
305 ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
305 ... 'foo.bar.__init__': True, 'foo.bar.bar1': True,
306 ... 'baz.__init__': True, 'baz.baz1': True }
306 ... 'baz.__init__': True, 'baz.baz1': True }
307 >>> # standard library (= not locally defined ones)
307 >>> # standard library (= not locally defined ones)
308 >>> sorted(imported_modules(
308 >>> sorted(imported_modules(
309 ... 'from stdlib1 import foo, bar; import stdlib2',
309 ... 'from stdlib1 import foo, bar; import stdlib2',
310 ... modulename, f, localmods))
310 ... modulename, f, localmods))
311 []
311 []
312 >>> # relative importing
312 >>> # relative importing
313 >>> sorted(imported_modules(
313 >>> sorted(imported_modules(
314 ... 'import foo1; from bar import bar1',
314 ... 'import foo1; from bar import bar1',
315 ... modulename, f, localmods))
315 ... modulename, f, localmods))
316 ['foo.bar.bar1', 'foo.foo1']
316 ['foo.bar.bar1', 'foo.foo1']
317 >>> sorted(imported_modules(
317 >>> sorted(imported_modules(
318 ... 'from bar.bar1 import name1, name2, name3',
318 ... 'from bar.bar1 import name1, name2, name3',
319 ... modulename, f, localmods))
319 ... modulename, f, localmods))
320 ['foo.bar.bar1']
320 ['foo.bar.bar1']
321 >>> # absolute importing
321 >>> # absolute importing
322 >>> sorted(imported_modules(
322 >>> sorted(imported_modules(
323 ... 'from baz import baz1, name1',
323 ... 'from baz import baz1, name1',
324 ... modulename, f, localmods))
324 ... modulename, f, localmods))
325 ['baz.__init__', 'baz.baz1']
325 ['baz.__init__', 'baz.baz1']
326 >>> # mixed importing, even though it shouldn't be recommended
326 >>> # mixed importing, even though it shouldn't be recommended
327 >>> sorted(imported_modules(
327 >>> sorted(imported_modules(
328 ... 'import stdlib, foo1, baz',
328 ... 'import stdlib, foo1, baz',
329 ... modulename, f, localmods))
329 ... modulename, f, localmods))
330 ['baz.__init__', 'foo.foo1']
330 ['baz.__init__', 'foo.foo1']
331 >>> # ignore_nested
331 >>> # ignore_nested
332 >>> sorted(imported_modules(
332 >>> sorted(imported_modules(
333 ... '''import foo
333 ... '''import foo
334 ... def wat():
334 ... def wat():
335 ... import bar
335 ... import bar
336 ... ''', modulename, f, localmods))
336 ... ''', modulename, f, localmods))
337 ['foo.__init__', 'foo.bar.__init__']
337 ['foo.__init__', 'foo.bar.__init__']
338 >>> sorted(imported_modules(
338 >>> sorted(imported_modules(
339 ... '''import foo
339 ... '''import foo
340 ... def wat():
340 ... def wat():
341 ... import bar
341 ... import bar
342 ... ''', modulename, f, localmods, ignore_nested=True))
342 ... ''', modulename, f, localmods, ignore_nested=True))
343 ['foo.__init__']
343 ['foo.__init__']
344 """
344 """
345 fromlocal = fromlocalfunc(modulename, localmods)
345 fromlocal = fromlocalfunc(modulename, localmods)
346 for node in ast.walk(ast.parse(source, f)):
346 for node in ast.walk(ast.parse(source, f)):
347 if ignore_nested and getattr(node, 'col_offset', 0) > 0:
347 if ignore_nested and getattr(node, 'col_offset', 0) > 0:
348 continue
348 continue
349 if isinstance(node, ast.Import):
349 if isinstance(node, ast.Import):
350 for n in node.names:
350 for n in node.names:
351 found = fromlocal(n.name)
351 found = fromlocal(n.name)
352 if not found:
352 if not found:
353 # this should import standard library
353 # this should import standard library
354 continue
354 continue
355 yield found[1]
355 yield found[1]
356 elif isinstance(node, ast.ImportFrom):
356 elif isinstance(node, ast.ImportFrom):
357 found = fromlocal(node.module, node.level)
357 found = fromlocal(node.module, node.level)
358 if not found:
358 if not found:
359 # this should import standard library
359 # this should import standard library
360 continue
360 continue
361
361
362 absname, dottedpath, hassubmod = found
362 absname, dottedpath, hassubmod = found
363 if not hassubmod:
363 if not hassubmod:
364 # "dottedpath" is not a package; must be imported
364 # "dottedpath" is not a package; must be imported
365 yield dottedpath
365 yield dottedpath
366 # examination of "node.names" should be redundant
366 # examination of "node.names" should be redundant
367 # e.g.: from mercurial.node import nullid, nullrev
367 # e.g.: from mercurial.node import nullid, nullrev
368 continue
368 continue
369
369
370 modnotfound = False
370 modnotfound = False
371 prefix = absname + '.'
371 prefix = absname + '.'
372 for n in node.names:
372 for n in node.names:
373 found = fromlocal(prefix + n.name)
373 found = fromlocal(prefix + n.name)
374 if not found:
374 if not found:
375 # this should be a function or a property of "node.module"
375 # this should be a function or a property of "node.module"
376 modnotfound = True
376 modnotfound = True
377 continue
377 continue
378 yield found[1]
378 yield found[1]
379 if modnotfound:
379 if modnotfound:
380 # "dottedpath" is a package, but imported because of non-module
380 # "dottedpath" is a package, but imported because of non-module
381 # lookup
381 # lookup
382 yield dottedpath
382 yield dottedpath
383
383
384 def verify_import_convention(module, source, localmods):
384 def verify_import_convention(module, source, localmods):
385 """Verify imports match our established coding convention.
385 """Verify imports match our established coding convention.
386
386
387 We have 2 conventions: legacy and modern. The modern convention is in
387 We have 2 conventions: legacy and modern. The modern convention is in
388 effect when using absolute imports.
388 effect when using absolute imports.
389
389
390 The legacy convention only looks for mixed imports. The modern convention
390 The legacy convention only looks for mixed imports. The modern convention
391 is much more thorough.
391 is much more thorough.
392 """
392 """
393 root = ast.parse(source)
393 root = ast.parse(source)
394 absolute = usingabsolute(root)
394 absolute = usingabsolute(root)
395
395
396 if absolute:
396 if absolute:
397 return verify_modern_convention(module, root, localmods)
397 return verify_modern_convention(module, root, localmods)
398 else:
398 else:
399 return verify_stdlib_on_own_line(root)
399 return verify_stdlib_on_own_line(root)
400
400
401 def verify_modern_convention(module, root, localmods, root_col_offset=0):
401 def verify_modern_convention(module, root, localmods, root_col_offset=0):
402 """Verify a file conforms to the modern import convention rules.
402 """Verify a file conforms to the modern import convention rules.
403
403
404 The rules of the modern convention are:
404 The rules of the modern convention are:
405
405
406 * Ordering is stdlib followed by local imports. Each group is lexically
406 * Ordering is stdlib followed by local imports. Each group is lexically
407 sorted.
407 sorted.
408 * Importing multiple modules via "import X, Y" is not allowed: use
408 * Importing multiple modules via "import X, Y" is not allowed: use
409 separate import statements.
409 separate import statements.
410 * Importing multiple modules via "from X import ..." is allowed if using
410 * Importing multiple modules via "from X import ..." is allowed if using
411 parenthesis and one entry per line.
411 parenthesis and one entry per line.
412 * Only 1 relative import statement per import level ("from .", "from ..")
412 * Only 1 relative import statement per import level ("from .", "from ..")
413 is allowed.
413 is allowed.
414 * Relative imports from higher levels must occur before lower levels. e.g.
414 * Relative imports from higher levels must occur before lower levels. e.g.
415 "from .." must be before "from .".
415 "from .." must be before "from .".
416 * Imports from peer packages should use relative import (e.g. do not
416 * Imports from peer packages should use relative import (e.g. do not
417 "import mercurial.foo" from a "mercurial.*" module).
417 "import mercurial.foo" from a "mercurial.*" module).
418 * Symbols can only be imported from specific modules (see
418 * Symbols can only be imported from specific modules (see
419 `allowsymbolimports`). For other modules, first import the module then
419 `allowsymbolimports`). For other modules, first import the module then
420 assign the symbol to a module-level variable. In addition, these imports
420 assign the symbol to a module-level variable. In addition, these imports
421 must be performed before other local imports. This rule only
421 must be performed before other local imports. This rule only
422 applies to import statements outside of any blocks.
422 applies to import statements outside of any blocks.
423 * Relative imports from the standard library are not allowed, unless that
423 * Relative imports from the standard library are not allowed, unless that
424 library is also a local module.
424 library is also a local module.
425 * Certain modules must be aliased to alternate names to avoid aliasing
425 * Certain modules must be aliased to alternate names to avoid aliasing
426 and readability problems. See `requirealias`.
426 and readability problems. See `requirealias`.
427 """
427 """
428 if not isinstance(module, str):
428 if not isinstance(module, str):
429 module = module.decode('ascii')
429 module = module.decode('ascii')
430 topmodule = module.split('.')[0]
430 topmodule = module.split('.')[0]
431 fromlocal = fromlocalfunc(module, localmods)
431 fromlocal = fromlocalfunc(module, localmods)
432
432
433 # Whether a local/non-stdlib import has been performed.
433 # Whether a local/non-stdlib import has been performed.
434 seenlocal = None
434 seenlocal = None
435 # Whether a local/non-stdlib, non-symbol import has been seen.
435 # Whether a local/non-stdlib, non-symbol import has been seen.
436 seennonsymbollocal = False
436 seennonsymbollocal = False
437 # The last name to be imported (for sorting).
437 # The last name to be imported (for sorting).
438 lastname = None
438 lastname = None
439 laststdlib = None
439 laststdlib = None
440 # Relative import levels encountered so far.
440 # Relative import levels encountered so far.
441 seenlevels = set()
441 seenlevels = set()
442
442
443 for node, newscope in walklocal(root):
443 for node, newscope in walklocal(root):
444 def msg(fmt, *args):
444 def msg(fmt, *args):
445 return (fmt % args, node.lineno)
445 return (fmt % args, node.lineno)
446 if newscope:
446 if newscope:
447 # Check for local imports in function
447 # Check for local imports in function
448 for r in verify_modern_convention(module, node, localmods,
448 for r in verify_modern_convention(module, node, localmods,
449 node.col_offset + 4):
449 node.col_offset + 4):
450 yield r
450 yield r
451 elif isinstance(node, ast.Import):
451 elif isinstance(node, ast.Import):
452 # Disallow "import foo, bar" and require separate imports
452 # Disallow "import foo, bar" and require separate imports
453 # for each module.
453 # for each module.
454 if len(node.names) > 1:
454 if len(node.names) > 1:
455 yield msg('multiple imported names: %s',
455 yield msg('multiple imported names: %s',
456 ', '.join(n.name for n in node.names))
456 ', '.join(n.name for n in node.names))
457
457
458 name = node.names[0].name
458 name = node.names[0].name
459 asname = node.names[0].asname
459 asname = node.names[0].asname
460
460
461 stdlib = name in stdlib_modules
461 stdlib = name in stdlib_modules
462
462
463 # Ignore sorting rules on imports inside blocks.
463 # Ignore sorting rules on imports inside blocks.
464 if node.col_offset == root_col_offset:
464 if node.col_offset == root_col_offset:
465 if lastname and name < lastname and laststdlib == stdlib:
465 if lastname and name < lastname and laststdlib == stdlib:
466 yield msg('imports not lexically sorted: %s < %s',
466 yield msg('imports not lexically sorted: %s < %s',
467 name, lastname)
467 name, lastname)
468
468
469 lastname = name
469 lastname = name
470 laststdlib = stdlib
470 laststdlib = stdlib
471
471
472 # stdlib imports should be before local imports.
472 # stdlib imports should be before local imports.
473 if stdlib and seenlocal and node.col_offset == root_col_offset:
473 if stdlib and seenlocal and node.col_offset == root_col_offset:
474 yield msg('stdlib import "%s" follows local import: %s',
474 yield msg('stdlib import "%s" follows local import: %s',
475 name, seenlocal)
475 name, seenlocal)
476
476
477 if not stdlib:
477 if not stdlib:
478 seenlocal = name
478 seenlocal = name
479
479
480 # Import of sibling modules should use relative imports.
480 # Import of sibling modules should use relative imports.
481 topname = name.split('.')[0]
481 topname = name.split('.')[0]
482 if topname == topmodule:
482 if topname == topmodule:
483 yield msg('import should be relative: %s', name)
483 yield msg('import should be relative: %s', name)
484
484
485 if name in requirealias and asname != requirealias[name]:
485 if name in requirealias and asname != requirealias[name]:
486 yield msg('%s module must be "as" aliased to %s',
486 yield msg('%s module must be "as" aliased to %s',
487 name, requirealias[name])
487 name, requirealias[name])
488
488
489 elif isinstance(node, ast.ImportFrom):
489 elif isinstance(node, ast.ImportFrom):
490 # Resolve the full imported module name.
490 # Resolve the full imported module name.
491 if node.level > 0:
491 if node.level > 0:
492 fullname = '.'.join(module.split('.')[:-node.level])
492 fullname = '.'.join(module.split('.')[:-node.level])
493 if node.module:
493 if node.module:
494 fullname += '.%s' % node.module
494 fullname += '.%s' % node.module
495 else:
495 else:
496 assert node.module
496 assert node.module
497 fullname = node.module
497 fullname = node.module
498
498
499 topname = fullname.split('.')[0]
499 topname = fullname.split('.')[0]
500 if topname == topmodule:
500 if topname == topmodule:
501 yield msg('import should be relative: %s', fullname)
501 yield msg('import should be relative: %s', fullname)
502
502
503 # __future__ is special since it needs to come first and use
503 # __future__ is special since it needs to come first and use
504 # symbol import.
504 # symbol import.
505 if fullname != '__future__':
505 if fullname != '__future__':
506 if not fullname or (
506 if not fullname or (
507 fullname in stdlib_modules
507 fullname in stdlib_modules
508 and fullname not in localmods
508 and fullname not in localmods
509 and fullname + '.__init__' not in localmods):
509 and fullname + '.__init__' not in localmods):
510 yield msg('relative import of stdlib module')
510 yield msg('relative import of stdlib module')
511 else:
511 else:
512 seenlocal = fullname
512 seenlocal = fullname
513
513
514 # Direct symbol import is only allowed from certain modules and
514 # Direct symbol import is only allowed from certain modules and
515 # must occur before non-symbol imports.
515 # must occur before non-symbol imports.
516 found = fromlocal(node.module, node.level)
516 found = fromlocal(node.module, node.level)
517 if found and found[2]: # node.module is a package
517 if found and found[2]: # node.module is a package
518 prefix = found[0] + '.'
518 prefix = found[0] + '.'
519 symbols = (n.name for n in node.names
519 symbols = (n.name for n in node.names
520 if not fromlocal(prefix + n.name))
520 if not fromlocal(prefix + n.name))
521 else:
521 else:
522 symbols = (n.name for n in node.names)
522 symbols = (n.name for n in node.names)
523 symbols = [sym for sym in symbols if sym not in directsymbols]
523 symbols = [sym for sym in symbols if sym not in directsymbols]
524 if node.module and node.col_offset == root_col_offset:
524 if node.module and node.col_offset == root_col_offset:
525 if symbols and fullname not in allowsymbolimports:
525 if symbols and fullname not in allowsymbolimports:
526 yield msg('direct symbol import %s from %s',
526 yield msg('direct symbol import %s from %s',
527 ', '.join(symbols), fullname)
527 ', '.join(symbols), fullname)
528
528
529 if symbols and seennonsymbollocal:
529 if symbols and seennonsymbollocal:
530 yield msg('symbol import follows non-symbol import: %s',
530 yield msg('symbol import follows non-symbol import: %s',
531 fullname)
531 fullname)
532 if not symbols and fullname not in stdlib_modules:
532 if not symbols and fullname not in stdlib_modules:
533 seennonsymbollocal = True
533 seennonsymbollocal = True
534
534
535 if not node.module:
535 if not node.module:
536 assert node.level
536 assert node.level
537
537
538 # Only allow 1 group per level.
538 # Only allow 1 group per level.
539 if (node.level in seenlevels
539 if (node.level in seenlevels
540 and node.col_offset == root_col_offset):
540 and node.col_offset == root_col_offset):
541 yield msg('multiple "from %s import" statements',
541 yield msg('multiple "from %s import" statements',
542 '.' * node.level)
542 '.' * node.level)
543
543
544 # Higher-level groups come before lower-level groups.
544 # Higher-level groups come before lower-level groups.
545 if any(node.level > l for l in seenlevels):
545 if any(node.level > l for l in seenlevels):
546 yield msg('higher-level import should come first: %s',
546 yield msg('higher-level import should come first: %s',
547 fullname)
547 fullname)
548
548
549 seenlevels.add(node.level)
549 seenlevels.add(node.level)
550
550
551 # Entries in "from .X import ( ... )" lists must be lexically
551 # Entries in "from .X import ( ... )" lists must be lexically
552 # sorted.
552 # sorted.
553 lastentryname = None
553 lastentryname = None
554
554
555 for n in node.names:
555 for n in node.names:
556 if lastentryname and n.name < lastentryname:
556 if lastentryname and n.name < lastentryname:
557 yield msg('imports from %s not lexically sorted: %s < %s',
557 yield msg('imports from %s not lexically sorted: %s < %s',
558 fullname, n.name, lastentryname)
558 fullname, n.name, lastentryname)
559
559
560 lastentryname = n.name
560 lastentryname = n.name
561
561
562 if n.name in requirealias and n.asname != requirealias[n.name]:
562 if n.name in requirealias and n.asname != requirealias[n.name]:
563 yield msg('%s from %s must be "as" aliased to %s',
563 yield msg('%s from %s must be "as" aliased to %s',
564 n.name, fullname, requirealias[n.name])
564 n.name, fullname, requirealias[n.name])
565
565
566 def verify_stdlib_on_own_line(root):
566 def verify_stdlib_on_own_line(root):
567 """Given some python source, verify that stdlib imports are done
567 """Given some python source, verify that stdlib imports are done
568 in separate statements from relative local module imports.
568 in separate statements from relative local module imports.
569
569
570 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo')))
570 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo')))
571 [('mixed imports\\n stdlib: sys\\n relative: foo', 1)]
571 [('mixed imports\\n stdlib: sys\\n relative: foo', 1)]
572 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os')))
572 >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os')))
573 []
573 []
574 >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar')))
574 >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar')))
575 []
575 []
576 """
576 """
577 for node in ast.walk(root):
577 for node in ast.walk(root):
578 if isinstance(node, ast.Import):
578 if isinstance(node, ast.Import):
579 from_stdlib = {False: [], True: []}
579 from_stdlib = {False: [], True: []}
580 for n in node.names:
580 for n in node.names:
581 from_stdlib[n.name in stdlib_modules].append(n.name)
581 from_stdlib[n.name in stdlib_modules].append(n.name)
582 if from_stdlib[True] and from_stdlib[False]:
582 if from_stdlib[True] and from_stdlib[False]:
583 yield ('mixed imports\n stdlib: %s\n relative: %s' %
583 yield ('mixed imports\n stdlib: %s\n relative: %s' %
584 (', '.join(sorted(from_stdlib[True])),
584 (', '.join(sorted(from_stdlib[True])),
585 ', '.join(sorted(from_stdlib[False]))), node.lineno)
585 ', '.join(sorted(from_stdlib[False]))), node.lineno)
586
586
587 class CircularImport(Exception):
587 class CircularImport(Exception):
588 pass
588 pass
589
589
590 def checkmod(mod, imports):
590 def checkmod(mod, imports):
591 shortest = {}
591 shortest = {}
592 visit = [[mod]]
592 visit = [[mod]]
593 while visit:
593 while visit:
594 path = visit.pop(0)
594 path = visit.pop(0)
595 for i in sorted(imports.get(path[-1], [])):
595 for i in sorted(imports.get(path[-1], [])):
596 if len(path) < shortest.get(i, 1000):
596 if len(path) < shortest.get(i, 1000):
597 shortest[i] = len(path)
597 shortest[i] = len(path)
598 if i in path:
598 if i in path:
599 if i == path[0]:
599 if i == path[0]:
600 raise CircularImport(path)
600 raise CircularImport(path)
601 continue
601 continue
602 visit.append(path + [i])
602 visit.append(path + [i])
603
603
604 def rotatecycle(cycle):
604 def rotatecycle(cycle):
605 """arrange a cycle so that the lexicographically first module listed first
605 """arrange a cycle so that the lexicographically first module listed first
606
606
607 >>> rotatecycle(['foo', 'bar'])
607 >>> rotatecycle(['foo', 'bar'])
608 ['bar', 'foo', 'bar']
608 ['bar', 'foo', 'bar']
609 """
609 """
610 lowest = min(cycle)
610 lowest = min(cycle)
611 idx = cycle.index(lowest)
611 idx = cycle.index(lowest)
612 return cycle[idx:] + cycle[:idx] + [lowest]
612 return cycle[idx:] + cycle[:idx] + [lowest]
613
613
614 def find_cycles(imports):
614 def find_cycles(imports):
615 """Find cycles in an already-loaded import graph.
615 """Find cycles in an already-loaded import graph.
616
616
617 All module names recorded in `imports` should be absolute one.
617 All module names recorded in `imports` should be absolute one.
618
618
619 >>> from __future__ import print_function
619 >>> from __future__ import print_function
620 >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
620 >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'],
621 ... 'top.bar': ['top.baz', 'sys'],
621 ... 'top.bar': ['top.baz', 'sys'],
622 ... 'top.baz': ['top.foo'],
622 ... 'top.baz': ['top.foo'],
623 ... 'top.qux': ['top.foo']}
623 ... 'top.qux': ['top.foo']}
624 >>> print('\\n'.join(sorted(find_cycles(imports))))
624 >>> print('\\n'.join(sorted(find_cycles(imports))))
625 top.bar -> top.baz -> top.foo -> top.bar
625 top.bar -> top.baz -> top.foo -> top.bar
626 top.foo -> top.qux -> top.foo
626 top.foo -> top.qux -> top.foo
627 """
627 """
628 cycles = set()
628 cycles = set()
629 for mod in sorted(imports.keys()):
629 for mod in sorted(imports.keys()):
630 try:
630 try:
631 checkmod(mod, imports)
631 checkmod(mod, imports)
632 except CircularImport as e:
632 except CircularImport as e:
633 cycle = e.args[0]
633 cycle = e.args[0]
634 cycles.add(" -> ".join(rotatecycle(cycle)))
634 cycles.add(" -> ".join(rotatecycle(cycle)))
635 return cycles
635 return cycles
636
636
637 def _cycle_sortkey(c):
637 def _cycle_sortkey(c):
638 return len(c), c
638 return len(c), c
639
639
640 def embedded(f, modname, src):
640 def embedded(f, modname, src):
641 """Extract embedded python code
641 """Extract embedded python code
642
642
643 >>> def _forcestr(thing):
643 >>> def _forcestr(thing):
644 ... if not isinstance(thing, str):
644 ... if not isinstance(thing, str):
645 ... return thing.decode('ascii')
645 ... return thing.decode('ascii')
646 ... return thing
646 ... return thing
647 >>> def test(fn, lines):
647 >>> def test(fn, lines):
648 ... for s, m, f, l in embedded(fn, b"example", lines):
648 ... for s, m, f, l in embedded(fn, b"example", lines):
649 ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l))
649 ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l))
650 ... print(repr(_forcestr(s)))
650 ... print(repr(_forcestr(s)))
651 >>> lines = [
651 >>> lines = [
652 ... b'comment',
652 ... b'comment',
653 ... b' >>> from __future__ import print_function',
653 ... b' >>> from __future__ import print_function',
654 ... b" >>> ' multiline",
654 ... b" >>> ' multiline",
655 ... b" ... string'",
655 ... b" ... string'",
656 ... b' ',
656 ... b' ',
657 ... b'comment',
657 ... b'comment',
658 ... b' $ cat > foo.py <<EOF',
658 ... b' $ cat > foo.py <<EOF',
659 ... b' > from __future__ import print_function',
659 ... b' > from __future__ import print_function',
660 ... b' > EOF',
660 ... b' > EOF',
661 ... ]
661 ... ]
662 >>> test(b"example.t", lines)
662 >>> test(b"example.t", lines)
663 example[2] doctest.py 1
663 example[2] doctest.py 1
664 "from __future__ import print_function\\n' multiline\\nstring'\\n\\n"
664 "from __future__ import print_function\\n' multiline\\nstring'\\n\\n"
665 example[8] foo.py 7
665 example[8] foo.py 7
666 'from __future__ import print_function\\n'
666 'from __future__ import print_function\\n'
667 """
667 """
668 errors = []
668 errors = []
669 for name, starts, ends, code in testparseutil.pyembedded(f, src, errors):
669 for name, starts, ends, code in testparseutil.pyembedded(f, src, errors):
670 if not name:
670 if not name:
671 # use 'doctest.py', in order to make already existing
671 # use 'doctest.py', in order to make already existing
672 # doctest above pass instantly
672 # doctest above pass instantly
673 name = 'doctest.py'
673 name = 'doctest.py'
674 # "starts" is "line number" (1-origin), but embedded() is
674 # "starts" is "line number" (1-origin), but embedded() is
675 # expected to return "line offset" (0-origin). Therefore, this
675 # expected to return "line offset" (0-origin). Therefore, this
676 # yields "starts - 1".
676 # yields "starts - 1".
677 if not isinstance(modname, str):
678 modname = modname.decode('utf8')
677 yield code, "%s[%d]" % (modname, starts), name, starts - 1
679 yield code, "%s[%d]" % (modname, starts), name, starts - 1
678
680
679 def sources(f, modname):
681 def sources(f, modname):
680 """Yields possibly multiple sources from a filepath
682 """Yields possibly multiple sources from a filepath
681
683
682 input: filepath, modulename
684 input: filepath, modulename
683 yields: script(string), modulename, filepath, linenumber
685 yields: script(string), modulename, filepath, linenumber
684
686
685 For embedded scripts, the modulename and filepath will be different
687 For embedded scripts, the modulename and filepath will be different
686 from the function arguments. linenumber is an offset relative to
688 from the function arguments. linenumber is an offset relative to
687 the input file.
689 the input file.
688 """
690 """
689 py = False
691 py = False
690 if not f.endswith('.t'):
692 if not f.endswith('.t'):
691 with open(f, 'rb') as src:
693 with open(f, 'rb') as src:
692 yield src.read(), modname, f, 0
694 yield src.read(), modname, f, 0
693 py = True
695 py = True
694 if py or f.endswith('.t'):
696 if py or f.endswith('.t'):
695 with open(f, 'rb') as src:
697 with open(f, 'rb') as src:
696 for script, modname, t, line in embedded(f, modname, src):
698 for script, modname, t, line in embedded(f, modname, src):
697 yield script, modname, t, line
699 yield script, modname.encode('utf8'), t, line
698
700
699 def main(argv):
701 def main(argv):
700 if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2):
702 if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2):
701 print('Usage: %s {-|file [file] [file] ...}')
703 print('Usage: %s {-|file [file] [file] ...}')
702 return 1
704 return 1
703 if argv[1] == '-':
705 if argv[1] == '-':
704 argv = argv[:1]
706 argv = argv[:1]
705 argv.extend(l.rstrip() for l in sys.stdin.readlines())
707 argv.extend(l.rstrip() for l in sys.stdin.readlines())
706 localmodpaths = {}
708 localmodpaths = {}
707 used_imports = {}
709 used_imports = {}
708 any_errors = False
710 any_errors = False
709 for source_path in argv[1:]:
711 for source_path in argv[1:]:
710 modname = dotted_name_of_path(source_path)
712 modname = dotted_name_of_path(source_path)
711 localmodpaths[modname] = source_path
713 localmodpaths[modname] = source_path
712 localmods = populateextmods(localmodpaths)
714 localmods = populateextmods(localmodpaths)
713 for localmodname, source_path in sorted(localmodpaths.items()):
715 for localmodname, source_path in sorted(localmodpaths.items()):
714 if not isinstance(localmodname, bytes):
716 if not isinstance(localmodname, bytes):
715 # This is only safe because all hg's files are ascii
717 # This is only safe because all hg's files are ascii
716 localmodname = localmodname.encode('ascii')
718 localmodname = localmodname.encode('ascii')
717 for src, modname, name, line in sources(source_path, localmodname):
719 for src, modname, name, line in sources(source_path, localmodname):
718 try:
720 try:
719 used_imports[modname] = sorted(
721 used_imports[modname] = sorted(
720 imported_modules(src, modname, name, localmods,
722 imported_modules(src, modname, name, localmods,
721 ignore_nested=True))
723 ignore_nested=True))
722 for error, lineno in verify_import_convention(modname, src,
724 for error, lineno in verify_import_convention(modname, src,
723 localmods):
725 localmods):
724 any_errors = True
726 any_errors = True
725 print('%s:%d: %s' % (source_path, lineno + line, error))
727 print('%s:%d: %s' % (source_path, lineno + line, error))
726 except SyntaxError as e:
728 except SyntaxError as e:
727 print('%s:%d: SyntaxError: %s' %
729 print('%s:%d: SyntaxError: %s' %
728 (source_path, e.lineno + line, e))
730 (source_path, e.lineno + line, e))
729 cycles = find_cycles(used_imports)
731 cycles = find_cycles(used_imports)
730 if cycles:
732 if cycles:
731 firstmods = set()
733 firstmods = set()
732 for c in sorted(cycles, key=_cycle_sortkey):
734 for c in sorted(cycles, key=_cycle_sortkey):
733 first = c.split()[0]
735 first = c.split()[0]
734 # As a rough cut, ignore any cycle that starts with the
736 # As a rough cut, ignore any cycle that starts with the
735 # same module as some other cycle. Otherwise we see lots
737 # same module as some other cycle. Otherwise we see lots
736 # of cycles that are effectively duplicates.
738 # of cycles that are effectively duplicates.
737 if first in firstmods:
739 if first in firstmods:
738 continue
740 continue
739 print('Import cycle:', c)
741 print('Import cycle:', c)
740 firstmods.add(first)
742 firstmods.add(first)
741 any_errors = True
743 any_errors = True
742 return any_errors != 0
744 return any_errors != 0
743
745
744 if __name__ == '__main__':
746 if __name__ == '__main__':
745 sys.exit(int(main(sys.argv)))
747 sys.exit(int(main(sys.argv)))
@@ -1,144 +1,145 b''
1 $(eval HGROOT := $(shell cd ../..; pwd))
1 $(eval HGROOT := $(shell cd ../..; pwd))
2
2
3 DEBIAN_CODENAMES := \
3 DEBIAN_CODENAMES := \
4 jessie \
4 jessie \
5 stretch \
5 stretch \
6 buster
6 buster
7
7
8 UBUNTU_CODENAMES := \
8 UBUNTU_CODENAMES := \
9 trusty \
9 trusty \
10 xenial \
10 xenial \
11 artful \
11 artful \
12 bionic \
12 bionic \
13
13
14 FEDORA_RELEASES := \
14 FEDORA_RELEASES := \
15 20 \
15 20 \
16 21 \
16 21 \
17 28
17 28 \
18 29
18
19
19 CENTOS_RELEASES := \
20 CENTOS_RELEASES := \
20 5 \
21 5 \
21 6 \
22 6 \
22 7
23 7
23
24
24 # Build a Python for these CentOS releases.
25 # Build a Python for these CentOS releases.
25 CENTOS_WITH_PYTHON_RELEASES := 5 6
26 CENTOS_WITH_PYTHON_RELEASES := 5 6
26
27
27 help:
28 help:
28 @echo 'Packaging Make Targets'
29 @echo 'Packaging Make Targets'
29 @echo ''
30 @echo ''
30 @echo 'docker-centos{$(strip $(CENTOS_RELEASES))}'
31 @echo 'docker-centos{$(strip $(CENTOS_RELEASES))}'
31 @echo ' Build an RPM for a specific CentOS version using Docker.'
32 @echo ' Build an RPM for a specific CentOS version using Docker.'
32 @echo ''
33 @echo ''
33 @echo 'docker-debian-{$(strip $(DEBIAN_CODENAMES))}'
34 @echo 'docker-debian-{$(strip $(DEBIAN_CODENAMES))}'
34 @echo ' Build Debian packages specific to a Debian distro using Docker.'
35 @echo ' Build Debian packages specific to a Debian distro using Docker.'
35 @echo ''
36 @echo ''
36 @echo 'docker-fedora{$(strip $(FEDORA_RELEASES))}'
37 @echo 'docker-fedora{$(strip $(FEDORA_RELEASES))}'
37 @echo ' Build an RPM for a specific Fedora version using Docker.'
38 @echo ' Build an RPM for a specific Fedora version using Docker.'
38 @echo ''
39 @echo ''
39 @echo 'docker-ubuntu-{$(strip $(UBUNTU_CODENAMES))}'
40 @echo 'docker-ubuntu-{$(strip $(UBUNTU_CODENAMES))}'
40 @echo ' Build Debian package specific to an Ubuntu distro using Docker.'
41 @echo ' Build Debian package specific to an Ubuntu distro using Docker.'
41 @echo ''
42 @echo ''
42 @echo 'docker-ubuntu-{$(strip $(UBUNTU_CODENAMES))}-ppa'
43 @echo 'docker-ubuntu-{$(strip $(UBUNTU_CODENAMES))}-ppa'
43 @echo ' Build a source-only Debian package specific to an Ubuntu distro'
44 @echo ' Build a source-only Debian package specific to an Ubuntu distro'
44 @echo ' using Docker.'
45 @echo ' using Docker.'
45 @echo ''
46 @echo ''
46 @echo 'linux-wheels'
47 @echo 'linux-wheels'
47 @echo ' Build Linux manylinux wheels using Docker.'
48 @echo ' Build Linux manylinux wheels using Docker.'
48 @echo ''
49 @echo ''
49 @echo 'linux-wheels-{x86_64, i686}'
50 @echo 'linux-wheels-{x86_64, i686}'
50 @echo ' Build Linux manylinux wheels for a specific architecture using Docker'
51 @echo ' Build Linux manylinux wheels for a specific architecture using Docker'
51 @echo ''
52 @echo ''
52 @echo 'deb'
53 @echo 'deb'
53 @echo ' Build a Debian package locally targeting the current system'
54 @echo ' Build a Debian package locally targeting the current system'
54 @echo ''
55 @echo ''
55 @echo 'ppa'
56 @echo 'ppa'
56 @echo ' Build a Debian source package locally targeting the current system'
57 @echo ' Build a Debian source package locally targeting the current system'
57 @echo ''
58 @echo ''
58 @echo 'centos{$(strip $(CENTOS_RELEASES))}'
59 @echo 'centos{$(strip $(CENTOS_RELEASES))}'
59 @echo ' Build an RPM for a specific CentOS version locally'
60 @echo ' Build an RPM for a specific CentOS version locally'
60 @echo ''
61 @echo ''
61 @echo 'fedora{$(strip $(FEDORA_RELEASES))}'
62 @echo 'fedora{$(strip $(FEDORA_RELEASES))}'
62 @echo ' Build an RPM for a specific Fedora version locally'
63 @echo ' Build an RPM for a specific Fedora version locally'
63
64
64 .PHONY: help
65 .PHONY: help
65
66
66 .PHONY: deb
67 .PHONY: deb
67 deb:
68 deb:
68 ./builddeb
69 ./builddeb
69
70
70 .PHONY: ppa
71 .PHONY: ppa
71 ppa:
72 ppa:
72 ./builddeb --source-only
73 ./builddeb --source-only
73
74
74 # Debian targets.
75 # Debian targets.
75 define debian_targets =
76 define debian_targets =
76 .PHONY: docker-debian-$(1)
77 .PHONY: docker-debian-$(1)
77 docker-debian-$(1):
78 docker-debian-$(1):
78 ./dockerdeb debian $(1)
79 ./dockerdeb debian $(1)
79
80
80 endef
81 endef
81
82
82 $(foreach codename,$(DEBIAN_CODENAMES),$(eval $(call debian_targets,$(codename))))
83 $(foreach codename,$(DEBIAN_CODENAMES),$(eval $(call debian_targets,$(codename))))
83
84
84 # Ubuntu targets.
85 # Ubuntu targets.
85 define ubuntu_targets =
86 define ubuntu_targets =
86 .PHONY: docker-ubuntu-$(1)
87 .PHONY: docker-ubuntu-$(1)
87 docker-ubuntu-$(1):
88 docker-ubuntu-$(1):
88 ./dockerdeb ubuntu $(1)
89 ./dockerdeb ubuntu $(1)
89
90
90 .PHONY: docker-ubuntu-$(1)-ppa
91 .PHONY: docker-ubuntu-$(1)-ppa
91 docker-ubuntu-$(1)-ppa:
92 docker-ubuntu-$(1)-ppa:
92 ./dockerdeb ubuntu $(1) --source-only
93 ./dockerdeb ubuntu $(1) --source-only
93
94
94 endef
95 endef
95
96
96 $(foreach codename,$(UBUNTU_CODENAMES),$(eval $(call ubuntu_targets,$(codename))))
97 $(foreach codename,$(UBUNTU_CODENAMES),$(eval $(call ubuntu_targets,$(codename))))
97
98
98 # Fedora targets.
99 # Fedora targets.
99 define fedora_targets
100 define fedora_targets
100 .PHONY: fedora$(1)
101 .PHONY: fedora$(1)
101 fedora$(1):
102 fedora$(1):
102 mkdir -p $$(HGROOT)/packages/fedora$(1)
103 mkdir -p $$(HGROOT)/packages/fedora$(1)
103 ./buildrpm
104 ./buildrpm
104 cp $$(HGROOT)/contrib/packaging/rpmbuild/RPMS/*/* $$(HGROOT)/packages/fedora$(1)
105 cp $$(HGROOT)/contrib/packaging/rpmbuild/RPMS/*/* $$(HGROOT)/packages/fedora$(1)
105 cp $$(HGROOT)/contrib/packaging/rpmbuild/SRPMS/* $$(HGROOT)/packages/fedora$(1)
106 cp $$(HGROOT)/contrib/packaging/rpmbuild/SRPMS/* $$(HGROOT)/packages/fedora$(1)
106 rm -rf $(HGROOT)/rpmbuild
107 rm -rf $(HGROOT)/rpmbuild
107
108
108 .PHONY: docker-fedora$(1)
109 .PHONY: docker-fedora$(1)
109 docker-fedora$(1):
110 docker-fedora$(1):
110 mkdir -p $$(HGROOT)/packages/fedora$(1)
111 mkdir -p $$(HGROOT)/packages/fedora$(1)
111 ./dockerrpm fedora$(1)
112 ./dockerrpm fedora$(1)
112
113
113 endef
114 endef
114
115
115 $(foreach release,$(FEDORA_RELEASES),$(eval $(call fedora_targets,$(release))))
116 $(foreach release,$(FEDORA_RELEASES),$(eval $(call fedora_targets,$(release))))
116
117
117 # CentOS targets.
118 # CentOS targets.
118 define centos_targets
119 define centos_targets
119 .PHONY: centos$(1)
120 .PHONY: centos$(1)
120 centos$(1):
121 centos$(1):
121 mkdir -p $$(HGROOT)/packages/centos$(1)
122 mkdir -p $$(HGROOT)/packages/centos$(1)
122 ./buildrpm $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython)
123 ./buildrpm $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython)
123 cp $$(HGROOT)/contrib/packaging/rpmbuild/RPMS/*/* $$(HGROOT)/packages/centos$(1)
124 cp $$(HGROOT)/contrib/packaging/rpmbuild/RPMS/*/* $$(HGROOT)/packages/centos$(1)
124 cp $$(HGROOT)/contrib/packaging/rpmbuild/SRPMS/* $$(HGROOT)/packages/centos$(1)
125 cp $$(HGROOT)/contrib/packaging/rpmbuild/SRPMS/* $$(HGROOT)/packages/centos$(1)
125
126
126 .PHONY: docker-centos$(1)
127 .PHONY: docker-centos$(1)
127 docker-centos$(1):
128 docker-centos$(1):
128 mkdir -p $$(HGROOT)/packages/centos$(1)
129 mkdir -p $$(HGROOT)/packages/centos$(1)
129 ./dockerrpm centos$(1) $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython)
130 ./dockerrpm centos$(1) $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython)
130
131
131 endef
132 endef
132
133
133 $(foreach release,$(CENTOS_RELEASES),$(eval $(call centos_targets,$(release))))
134 $(foreach release,$(CENTOS_RELEASES),$(eval $(call centos_targets,$(release))))
134
135
135 .PHONY: linux-wheels
136 .PHONY: linux-wheels
136 linux-wheels: linux-wheels-x86_64 linux-wheels-i686
137 linux-wheels: linux-wheels-x86_64 linux-wheels-i686
137
138
138 .PHONY: linux-wheels-x86_64
139 .PHONY: linux-wheels-x86_64
139 linux-wheels-x86_64:
140 linux-wheels-x86_64:
140 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`/../..:/src quay.io/pypa/manylinux1_x86_64 /src/contrib/packaging/build-linux-wheels.sh
141 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`/../..:/src quay.io/pypa/manylinux1_x86_64 /src/contrib/packaging/build-linux-wheels.sh
141
142
142 .PHONY: linux-wheels-i686
143 .PHONY: linux-wheels-i686
143 linux-wheels-i686:
144 linux-wheels-i686:
144 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`/../..:/src quay.io/pypa/manylinux1_i686 linux32 /src/contrib/packaging/build-linux-wheels.sh
145 docker run -e "HGTEST_JOBS=$(shell nproc)" --rm -ti -v `pwd`/../..:/src quay.io/pypa/manylinux1_i686 linux32 /src/contrib/packaging/build-linux-wheels.sh
@@ -1,27 +1,27 b''
1 FROM centos:centos5
1 FROM centos:centos5
2
2
3 RUN groupadd -g 1000 build && \
3 RUN groupadd -g %GID% build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
4 useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
5
5
6 RUN \
6 RUN \
7 sed -i 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo && \
7 sed -i 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo && \
8 sed -i 's/^#\(baseurl=\)http:\/\/mirror.centos.org\/centos/\1http:\/\/vault.centos.org/' /etc/yum.repos.d/*.repo && \
8 sed -i 's/^#\(baseurl=\)http:\/\/mirror.centos.org\/centos/\1http:\/\/vault.centos.org/' /etc/yum.repos.d/*.repo && \
9 sed -i 's/\$releasever/5.11/' /etc/yum.repos.d/*.repo
9 sed -i 's/\$releasever/5.11/' /etc/yum.repos.d/*.repo
10
10
11 RUN yum install -y \
11 RUN yum install -y \
12 gcc \
12 gcc \
13 gettext \
13 gettext \
14 make \
14 make \
15 python-devel \
15 python-devel \
16 python-docutils \
16 python-docutils \
17 rpm-build \
17 rpm-build \
18 tar
18 tar
19
19
20 # For creating repo meta data
20 # For creating repo meta data
21 RUN yum install -y \
21 RUN yum install -y \
22 bzip2-devel \
22 bzip2-devel \
23 createrepo \
23 createrepo \
24 ncurses-devel \
24 ncurses-devel \
25 openssl-devel \
25 openssl-devel \
26 readline-devel \
26 readline-devel \
27 zlib-devel
27 zlib-devel
@@ -1,24 +1,24 b''
1 FROM centos:centos6
1 FROM centos:centos6
2
2
3 RUN groupadd -g 1000 build && \
3 RUN groupadd -g %GID% build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
4 useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
5
5
6 RUN yum install -y \
6 RUN yum install -y \
7 gcc \
7 gcc \
8 gettext \
8 gettext \
9 make \
9 make \
10 python-devel \
10 python-devel \
11 python-docutils \
11 python-docutils \
12 rpm-build \
12 rpm-build \
13 tar
13 tar
14
14
15 # For creating repo meta data
15 # For creating repo meta data
16 RUN yum install -y createrepo
16 RUN yum install -y createrepo
17
17
18 # For python
18 # For python
19 RUN yum install -y \
19 RUN yum install -y \
20 bzip2-devel \
20 bzip2-devel \
21 ncurses-devel \
21 ncurses-devel \
22 openssl-devel \
22 openssl-devel \
23 readline-devel \
23 readline-devel \
24 zlib-devel
24 zlib-devel
@@ -1,16 +1,16 b''
1 FROM centos:centos7
1 FROM centos:centos7
2
2
3 RUN groupadd -g 1000 build && \
3 RUN groupadd -g %GID% build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
4 useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
5
5
6 RUN yum install -y \
6 RUN yum install -y \
7 gcc \
7 gcc \
8 gettext \
8 gettext \
9 make \
9 make \
10 python-devel \
10 python-devel \
11 python-docutils \
11 python-docutils \
12 rpm-build \
12 rpm-build \
13 tar
13 tar
14
14
15 # For creating repo meta data
15 # For creating repo meta data
16 RUN yum install -y createrepo
16 RUN yum install -y createrepo
@@ -1,15 +1,15 b''
1 FROM fedora:28
1 FROM fedora:29
2
2
3 RUN groupadd -g 1000 build && \
3 RUN groupadd -g 1000 build && \
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
4 useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
5
5
6 RUN dnf install -y \
6 RUN dnf install -y \
7 gcc \
7 gcc \
8 gettext \
8 gettext \
9 make \
9 make \
10 python-devel \
10 python-devel \
11 python-docutils \
11 python-docutils \
12 rpm-build
12 rpm-build
13
13
14 # For creating repo meta data
14 # For creating repo meta data
15 RUN dnf install -y createrepo
15 RUN dnf install -y createrepo
@@ -1,39 +1,47 b''
1 #!/bin/bash -e
1 #!/bin/bash -e
2
2
3 BUILDDIR=$(dirname $0)
3 BUILDDIR=$(dirname $0)
4 export ROOTDIR=$(cd $BUILDDIR/../..; pwd)
4 export ROOTDIR=$(cd $BUILDDIR/../..; pwd)
5
5
6 PLATFORM="$1"
6 PLATFORM="$1"
7 shift # extra params are passed to buildrpm
7 shift # extra params are passed to buildrpm
8
8
9 DOCKER=$($BUILDDIR/hg-docker docker-path)
9 DOCKER=$($BUILDDIR/hg-docker docker-path)
10
10
11 CONTAINER=hg-docker-$PLATFORM
11 CONTAINER=hg-docker-$PLATFORM
12
12
13 $BUILDDIR/hg-docker build $BUILDDIR/docker/$PLATFORM $CONTAINER
13 if [[ -z "${HG_DOCKER_OWN_USER}" ]]; then
14 DOCKERUID=1000
15 DOCKERGID=1000
16 else
17 DOCKERUID=$(id -u)
18 DOCKERGID=$(id -g)
19 fi
20
21 $BUILDDIR/hg-docker build --build-arg UID=$DOCKERUID --build-arg GID=$DOCKERGID $BUILDDIR/docker/$PLATFORM $CONTAINER
14
22
15 RPMBUILDDIR=$ROOTDIR/packages/$PLATFORM
23 RPMBUILDDIR=$ROOTDIR/packages/$PLATFORM
16 $ROOTDIR/contrib/packaging/buildrpm --rpmbuilddir $RPMBUILDDIR --prepare $*
24 $ROOTDIR/contrib/packaging/buildrpm --rpmbuilddir $RPMBUILDDIR --prepare $*
17
25
18 DSHARED=/mnt/shared
26 DSHARED=/mnt/shared
19 DBUILDUSER=build
27 DBUILDUSER=build
20
28
21 $DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
29 $DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
22 rpmbuild --define "_topdir $DSHARED" -ba $DSHARED/SPECS/mercurial.spec --clean
30 rpmbuild --define "_topdir $DSHARED" -ba $DSHARED/SPECS/mercurial.spec --clean
23
31
24 $DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
32 $DOCKER run -e http_proxy -e https_proxy -u $DBUILDUSER --rm -v $RPMBUILDDIR:$DSHARED $CONTAINER \
25 createrepo $DSHARED
33 createrepo $DSHARED
26
34
27 cat << EOF > $RPMBUILDDIR/mercurial.repo
35 cat << EOF > $RPMBUILDDIR/mercurial.repo
28 # Place this file in /etc/yum.repos.d/mercurial.repo
36 # Place this file in /etc/yum.repos.d/mercurial.repo
29 [mercurial]
37 [mercurial]
30 name=Mercurial packages for $PLATFORM
38 name=Mercurial packages for $PLATFORM
31 # baseurl=file://$RPMBUILDDIR/
39 # baseurl=file://$RPMBUILDDIR/
32 baseurl=http://hg.example.com/build/$PLATFORM/
40 baseurl=http://hg.example.com/build/$PLATFORM/
33 skip_if_unavailable=True
41 skip_if_unavailable=True
34 gpgcheck=0
42 gpgcheck=0
35 enabled=1
43 enabled=1
36 EOF
44 EOF
37
45
38 echo
46 echo
39 echo "Build complete - results can be found in $RPMBUILDDIR"
47 echo "Build complete - results can be found in $RPMBUILDDIR"
@@ -1,111 +1,116 b''
1 #!/usr/bin/env python3
1 #!/usr/bin/env python3
2 #
2 #
3 # Copyright 2018 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2018 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import argparse
8 import argparse
9 import pathlib
9 import pathlib
10 import shutil
10 import shutil
11 import subprocess
11 import subprocess
12 import sys
12 import sys
13
13
14 def get_docker() -> str:
14 def get_docker() -> str:
15 docker = shutil.which('docker.io') or shutil.which('docker')
15 docker = shutil.which('docker.io') or shutil.which('docker')
16 if not docker:
16 if not docker:
17 print('could not find docker executable')
17 print('could not find docker executable')
18 return 1
18 return 1
19
19
20 try:
20 try:
21 out = subprocess.check_output([docker, '-h'], stderr=subprocess.STDOUT)
21 out = subprocess.check_output([docker, '-h'], stderr=subprocess.STDOUT)
22
22
23 if b'Jansens' in out:
23 if b'Jansens' in out:
24 print('%s is the Docking System Tray; try installing docker.io' %
24 print('%s is the Docking System Tray; try installing docker.io' %
25 docker)
25 docker)
26 sys.exit(1)
26 sys.exit(1)
27 except subprocess.CalledProcessError as e:
27 except subprocess.CalledProcessError as e:
28 print('error calling `%s -h`: %s' % (docker, e.output))
28 print('error calling `%s -h`: %s' % (docker, e.output))
29 sys.exit(1)
29 sys.exit(1)
30
30
31 out = subprocess.check_output([docker, 'version'],
31 out = subprocess.check_output([docker, 'version'],
32 stderr=subprocess.STDOUT)
32 stderr=subprocess.STDOUT)
33
33
34 lines = out.splitlines()
34 lines = out.splitlines()
35 if not any(l.startswith((b'Client:', b'Client version:')) for l in lines):
35 if not any(l.startswith((b'Client:', b'Client version:')) for l in lines):
36 print('`%s version` does not look like Docker' % docker)
36 print('`%s version` does not look like Docker' % docker)
37 sys.exit(1)
37 sys.exit(1)
38
38
39 if not any(l.startswith((b'Server:', b'Server version:')) for l in lines):
39 if not any(l.startswith((b'Server:', b'Server version:')) for l in lines):
40 print('`%s version` does not look like Docker' % docker)
40 print('`%s version` does not look like Docker' % docker)
41 sys.exit(1)
41 sys.exit(1)
42
42
43 return docker
43 return docker
44
44
45 def get_dockerfile(path: pathlib.Path, args: list) -> bytes:
45 def get_dockerfile(path: pathlib.Path, args: list) -> bytes:
46 with path.open('rb') as fh:
46 with path.open('rb') as fh:
47 df = fh.read()
47 df = fh.read()
48
48
49 for k, v in args:
49 for k, v in args:
50 df = df.replace(b'%%%s%%' % k, v)
50 df = df.replace(bytes('%%%s%%' % k.decode(), 'utf-8'), v)
51
51
52 return df
52 return df
53
53
54 def build_docker_image(dockerfile: pathlib.Path, params: list, tag: str):
54 def build_docker_image(dockerfile: pathlib.Path, params: list, tag: str):
55 """Build a Docker image from a templatized Dockerfile."""
55 """Build a Docker image from a templatized Dockerfile."""
56 docker = get_docker()
56 docker = get_docker()
57
57
58 dockerfile_path = pathlib.Path(dockerfile)
58 dockerfile_path = pathlib.Path(dockerfile)
59
59
60 dockerfile = get_dockerfile(dockerfile_path, params)
60 dockerfile = get_dockerfile(dockerfile_path, params)
61
61
62 print('building Dockerfile:')
62 print('building Dockerfile:')
63 print(dockerfile.decode('utf-8', 'replace'))
63 print(dockerfile.decode('utf-8', 'replace'))
64
64
65 args = [
65 args = [
66 docker,
66 docker,
67 'build',
67 'build',
68 '--build-arg', 'http_proxy',
68 '--build-arg', 'http_proxy',
69 '--build-arg', 'https_proxy',
69 '--build-arg', 'https_proxy',
70 '--tag', tag,
70 '--tag', tag,
71 '-',
71 '-',
72 ]
72 ]
73
73
74 print('executing: %r' % args)
74 print('executing: %r' % args)
75 subprocess.run(args, input=dockerfile, check=True)
75 p = subprocess.Popen(args, stdin=subprocess.PIPE)
76 p.communicate(input=dockerfile)
77 if p.returncode:
78 raise subprocess.CalledProcessException(
79 p.returncode, 'failed to build docker image: %s %s' \
80 % (p.stdout, p.stderr))
76
81
77 def command_build(args):
82 def command_build(args):
78 build_args = []
83 build_args = []
79 for arg in args.build_arg:
84 for arg in args.build_arg:
80 k, v = arg.split('=', 1)
85 k, v = arg.split('=', 1)
81 build_args.append((k.encode('utf-8'), v.encode('utf-8')))
86 build_args.append((k.encode('utf-8'), v.encode('utf-8')))
82
87
83 build_docker_image(pathlib.Path(args.dockerfile),
88 build_docker_image(pathlib.Path(args.dockerfile),
84 build_args,
89 build_args,
85 args.tag)
90 args.tag)
86
91
87 def command_docker(args):
92 def command_docker(args):
88 print(get_docker())
93 print(get_docker())
89
94
90 def main() -> int:
95 def main() -> int:
91 parser = argparse.ArgumentParser()
96 parser = argparse.ArgumentParser()
92
97
93 subparsers = parser.add_subparsers(title='subcommands')
98 subparsers = parser.add_subparsers(title='subcommands')
94
99
95 build = subparsers.add_parser('build', help='Build a Docker image')
100 build = subparsers.add_parser('build', help='Build a Docker image')
96 build.set_defaults(func=command_build)
101 build.set_defaults(func=command_build)
97 build.add_argument('--build-arg', action='append', default=[],
102 build.add_argument('--build-arg', action='append', default=[],
98 help='Substitution to perform in Dockerfile; '
103 help='Substitution to perform in Dockerfile; '
99 'format: key=value')
104 'format: key=value')
100 build.add_argument('dockerfile', help='path to Dockerfile to use')
105 build.add_argument('dockerfile', help='path to Dockerfile to use')
101 build.add_argument('tag', help='Tag to apply to created image')
106 build.add_argument('tag', help='Tag to apply to created image')
102
107
103 docker = subparsers.add_parser('docker-path', help='Resolve path to Docker')
108 docker = subparsers.add_parser('docker-path', help='Resolve path to Docker')
104 docker.set_defaults(func=command_docker)
109 docker.set_defaults(func=command_docker)
105
110
106 args = parser.parse_args()
111 args = parser.parse_args()
107
112
108 return args.func(args)
113 return args.func(args)
109
114
110 if __name__ == '__main__':
115 if __name__ == '__main__':
111 sys.exit(main())
116 sys.exit(main())
This diff has been collapsed as it changes many lines, (665 lines changed) Show them Hide them
@@ -1,2126 +1,2675 b''
1 # perf.py - performance test routines
1 # perf.py - performance test routines
2 '''helper extension to measure performance'''
2 '''helper extension to measure performance'''
3
3
4 # "historical portability" policy of perf.py:
4 # "historical portability" policy of perf.py:
5 #
5 #
6 # We have to do:
6 # We have to do:
7 # - make perf.py "loadable" with as wide Mercurial version as possible
7 # - make perf.py "loadable" with as wide Mercurial version as possible
8 # This doesn't mean that perf commands work correctly with that Mercurial.
8 # This doesn't mean that perf commands work correctly with that Mercurial.
9 # BTW, perf.py itself has been available since 1.1 (or eb240755386d).
9 # BTW, perf.py itself has been available since 1.1 (or eb240755386d).
10 # - make historical perf command work correctly with as wide Mercurial
10 # - make historical perf command work correctly with as wide Mercurial
11 # version as possible
11 # version as possible
12 #
12 #
13 # We have to do, if possible with reasonable cost:
13 # We have to do, if possible with reasonable cost:
14 # - make recent perf command for historical feature work correctly
14 # - make recent perf command for historical feature work correctly
15 # with early Mercurial
15 # with early Mercurial
16 #
16 #
17 # We don't have to do:
17 # We don't have to do:
18 # - make perf command for recent feature work correctly with early
18 # - make perf command for recent feature work correctly with early
19 # Mercurial
19 # Mercurial
20
20
21 from __future__ import absolute_import
21 from __future__ import absolute_import
22 import contextlib
22 import contextlib
23 import functools
23 import functools
24 import gc
24 import gc
25 import os
25 import os
26 import random
26 import random
27 import shutil
27 import struct
28 import struct
28 import sys
29 import sys
30 import tempfile
29 import threading
31 import threading
30 import time
32 import time
31 from mercurial import (
33 from mercurial import (
32 changegroup,
34 changegroup,
33 cmdutil,
35 cmdutil,
34 commands,
36 commands,
35 copies,
37 copies,
36 error,
38 error,
37 extensions,
39 extensions,
40 hg,
38 mdiff,
41 mdiff,
39 merge,
42 merge,
40 revlog,
43 revlog,
41 util,
44 util,
42 )
45 )
43
46
44 # for "historical portability":
47 # for "historical portability":
45 # try to import modules separately (in dict order), and ignore
48 # try to import modules separately (in dict order), and ignore
46 # failure, because these aren't available with early Mercurial
49 # failure, because these aren't available with early Mercurial
47 try:
50 try:
48 from mercurial import branchmap # since 2.5 (or bcee63733aad)
51 from mercurial import branchmap # since 2.5 (or bcee63733aad)
49 except ImportError:
52 except ImportError:
50 pass
53 pass
51 try:
54 try:
52 from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
55 from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
53 except ImportError:
56 except ImportError:
54 pass
57 pass
55 try:
58 try:
56 from mercurial import registrar # since 3.7 (or 37d50250b696)
59 from mercurial import registrar # since 3.7 (or 37d50250b696)
57 dir(registrar) # forcibly load it
60 dir(registrar) # forcibly load it
58 except ImportError:
61 except ImportError:
59 registrar = None
62 registrar = None
60 try:
63 try:
61 from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
64 from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
62 except ImportError:
65 except ImportError:
63 pass
66 pass
64 try:
67 try:
65 from mercurial import scmutil # since 1.9 (or 8b252e826c68)
68 from mercurial import scmutil # since 1.9 (or 8b252e826c68)
66 except ImportError:
69 except ImportError:
67 pass
70 pass
71 try:
72 from mercurial import setdiscovery # since 1.9 (or cb98fed52495)
73 except ImportError:
74 pass
75
68
76
69 def identity(a):
77 def identity(a):
70 return a
78 return a
71
79
72 try:
80 try:
73 from mercurial import pycompat
81 from mercurial import pycompat
74 getargspec = pycompat.getargspec # added to module after 4.5
82 getargspec = pycompat.getargspec # added to module after 4.5
75 _byteskwargs = pycompat.byteskwargs # since 4.1 (or fbc3f73dc802)
83 _byteskwargs = pycompat.byteskwargs # since 4.1 (or fbc3f73dc802)
76 _sysstr = pycompat.sysstr # since 4.0 (or 2219f4f82ede)
84 _sysstr = pycompat.sysstr # since 4.0 (or 2219f4f82ede)
77 _xrange = pycompat.xrange # since 4.8 (or 7eba8f83129b)
85 _xrange = pycompat.xrange # since 4.8 (or 7eba8f83129b)
78 fsencode = pycompat.fsencode # since 3.9 (or f4a5e0e86a7e)
86 fsencode = pycompat.fsencode # since 3.9 (or f4a5e0e86a7e)
79 if pycompat.ispy3:
87 if pycompat.ispy3:
80 _maxint = sys.maxsize # per py3 docs for replacing maxint
88 _maxint = sys.maxsize # per py3 docs for replacing maxint
81 else:
89 else:
82 _maxint = sys.maxint
90 _maxint = sys.maxint
83 except (ImportError, AttributeError):
91 except (ImportError, AttributeError):
84 import inspect
92 import inspect
85 getargspec = inspect.getargspec
93 getargspec = inspect.getargspec
86 _byteskwargs = identity
94 _byteskwargs = identity
87 fsencode = identity # no py3 support
95 fsencode = identity # no py3 support
88 _maxint = sys.maxint # no py3 support
96 _maxint = sys.maxint # no py3 support
89 _sysstr = lambda x: x # no py3 support
97 _sysstr = lambda x: x # no py3 support
90 _xrange = xrange
98 _xrange = xrange
91
99
92 try:
100 try:
93 # 4.7+
101 # 4.7+
94 queue = pycompat.queue.Queue
102 queue = pycompat.queue.Queue
95 except (AttributeError, ImportError):
103 except (AttributeError, ImportError):
96 # <4.7.
104 # <4.7.
97 try:
105 try:
98 queue = pycompat.queue
106 queue = pycompat.queue
99 except (AttributeError, ImportError):
107 except (AttributeError, ImportError):
100 queue = util.queue
108 queue = util.queue
101
109
102 try:
110 try:
103 from mercurial import logcmdutil
111 from mercurial import logcmdutil
104 makelogtemplater = logcmdutil.maketemplater
112 makelogtemplater = logcmdutil.maketemplater
105 except (AttributeError, ImportError):
113 except (AttributeError, ImportError):
106 try:
114 try:
107 makelogtemplater = cmdutil.makelogtemplater
115 makelogtemplater = cmdutil.makelogtemplater
108 except (AttributeError, ImportError):
116 except (AttributeError, ImportError):
109 makelogtemplater = None
117 makelogtemplater = None
110
118
111 # for "historical portability":
119 # for "historical portability":
112 # define util.safehasattr forcibly, because util.safehasattr has been
120 # define util.safehasattr forcibly, because util.safehasattr has been
113 # available since 1.9.3 (or 94b200a11cf7)
121 # available since 1.9.3 (or 94b200a11cf7)
114 _undefined = object()
122 _undefined = object()
115 def safehasattr(thing, attr):
123 def safehasattr(thing, attr):
116 return getattr(thing, _sysstr(attr), _undefined) is not _undefined
124 return getattr(thing, _sysstr(attr), _undefined) is not _undefined
117 setattr(util, 'safehasattr', safehasattr)
125 setattr(util, 'safehasattr', safehasattr)
118
126
119 # for "historical portability":
127 # for "historical portability":
120 # define util.timer forcibly, because util.timer has been available
128 # define util.timer forcibly, because util.timer has been available
121 # since ae5d60bb70c9
129 # since ae5d60bb70c9
122 if safehasattr(time, 'perf_counter'):
130 if safehasattr(time, 'perf_counter'):
123 util.timer = time.perf_counter
131 util.timer = time.perf_counter
124 elif os.name == b'nt':
132 elif os.name == b'nt':
125 util.timer = time.clock
133 util.timer = time.clock
126 else:
134 else:
127 util.timer = time.time
135 util.timer = time.time
128
136
129 # for "historical portability":
137 # for "historical portability":
130 # use locally defined empty option list, if formatteropts isn't
138 # use locally defined empty option list, if formatteropts isn't
131 # available, because commands.formatteropts has been available since
139 # available, because commands.formatteropts has been available since
132 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
140 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
133 # available since 2.2 (or ae5f92e154d3)
141 # available since 2.2 (or ae5f92e154d3)
134 formatteropts = getattr(cmdutil, "formatteropts",
142 formatteropts = getattr(cmdutil, "formatteropts",
135 getattr(commands, "formatteropts", []))
143 getattr(commands, "formatteropts", []))
136
144
137 # for "historical portability":
145 # for "historical portability":
138 # use locally defined option list, if debugrevlogopts isn't available,
146 # use locally defined option list, if debugrevlogopts isn't available,
139 # because commands.debugrevlogopts has been available since 3.7 (or
147 # because commands.debugrevlogopts has been available since 3.7 (or
140 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
148 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
141 # since 1.9 (or a79fea6b3e77).
149 # since 1.9 (or a79fea6b3e77).
142 revlogopts = getattr(cmdutil, "debugrevlogopts",
150 revlogopts = getattr(cmdutil, "debugrevlogopts",
143 getattr(commands, "debugrevlogopts", [
151 getattr(commands, "debugrevlogopts", [
144 (b'c', b'changelog', False, (b'open changelog')),
152 (b'c', b'changelog', False, (b'open changelog')),
145 (b'm', b'manifest', False, (b'open manifest')),
153 (b'm', b'manifest', False, (b'open manifest')),
146 (b'', b'dir', False, (b'open directory manifest')),
154 (b'', b'dir', False, (b'open directory manifest')),
147 ]))
155 ]))
148
156
149 cmdtable = {}
157 cmdtable = {}
150
158
151 # for "historical portability":
159 # for "historical portability":
152 # define parsealiases locally, because cmdutil.parsealiases has been
160 # define parsealiases locally, because cmdutil.parsealiases has been
153 # available since 1.5 (or 6252852b4332)
161 # available since 1.5 (or 6252852b4332)
154 def parsealiases(cmd):
162 def parsealiases(cmd):
155 return cmd.split(b"|")
163 return cmd.split(b"|")
156
164
157 if safehasattr(registrar, 'command'):
165 if safehasattr(registrar, 'command'):
158 command = registrar.command(cmdtable)
166 command = registrar.command(cmdtable)
159 elif safehasattr(cmdutil, 'command'):
167 elif safehasattr(cmdutil, 'command'):
160 command = cmdutil.command(cmdtable)
168 command = cmdutil.command(cmdtable)
161 if b'norepo' not in getargspec(command).args:
169 if b'norepo' not in getargspec(command).args:
162 # for "historical portability":
170 # for "historical portability":
163 # wrap original cmdutil.command, because "norepo" option has
171 # wrap original cmdutil.command, because "norepo" option has
164 # been available since 3.1 (or 75a96326cecb)
172 # been available since 3.1 (or 75a96326cecb)
165 _command = command
173 _command = command
166 def command(name, options=(), synopsis=None, norepo=False):
174 def command(name, options=(), synopsis=None, norepo=False):
167 if norepo:
175 if norepo:
168 commands.norepo += b' %s' % b' '.join(parsealiases(name))
176 commands.norepo += b' %s' % b' '.join(parsealiases(name))
169 return _command(name, list(options), synopsis)
177 return _command(name, list(options), synopsis)
170 else:
178 else:
171 # for "historical portability":
179 # for "historical portability":
172 # define "@command" annotation locally, because cmdutil.command
180 # define "@command" annotation locally, because cmdutil.command
173 # has been available since 1.9 (or 2daa5179e73f)
181 # has been available since 1.9 (or 2daa5179e73f)
174 def command(name, options=(), synopsis=None, norepo=False):
182 def command(name, options=(), synopsis=None, norepo=False):
175 def decorator(func):
183 def decorator(func):
176 if synopsis:
184 if synopsis:
177 cmdtable[name] = func, list(options), synopsis
185 cmdtable[name] = func, list(options), synopsis
178 else:
186 else:
179 cmdtable[name] = func, list(options)
187 cmdtable[name] = func, list(options)
180 if norepo:
188 if norepo:
181 commands.norepo += b' %s' % b' '.join(parsealiases(name))
189 commands.norepo += b' %s' % b' '.join(parsealiases(name))
182 return func
190 return func
183 return decorator
191 return decorator
184
192
185 try:
193 try:
186 import mercurial.registrar
194 import mercurial.registrar
187 import mercurial.configitems
195 import mercurial.configitems
188 configtable = {}
196 configtable = {}
189 configitem = mercurial.registrar.configitem(configtable)
197 configitem = mercurial.registrar.configitem(configtable)
190 configitem(b'perf', b'presleep',
198 configitem(b'perf', b'presleep',
191 default=mercurial.configitems.dynamicdefault,
199 default=mercurial.configitems.dynamicdefault,
192 )
200 )
193 configitem(b'perf', b'stub',
201 configitem(b'perf', b'stub',
194 default=mercurial.configitems.dynamicdefault,
202 default=mercurial.configitems.dynamicdefault,
195 )
203 )
196 configitem(b'perf', b'parentscount',
204 configitem(b'perf', b'parentscount',
197 default=mercurial.configitems.dynamicdefault,
205 default=mercurial.configitems.dynamicdefault,
198 )
206 )
199 configitem(b'perf', b'all-timing',
207 configitem(b'perf', b'all-timing',
200 default=mercurial.configitems.dynamicdefault,
208 default=mercurial.configitems.dynamicdefault,
201 )
209 )
202 except (ImportError, AttributeError):
210 except (ImportError, AttributeError):
203 pass
211 pass
204
212
205 def getlen(ui):
213 def getlen(ui):
206 if ui.configbool(b"perf", b"stub", False):
214 if ui.configbool(b"perf", b"stub", False):
207 return lambda x: 1
215 return lambda x: 1
208 return len
216 return len
209
217
210 def gettimer(ui, opts=None):
218 def gettimer(ui, opts=None):
211 """return a timer function and formatter: (timer, formatter)
219 """return a timer function and formatter: (timer, formatter)
212
220
213 This function exists to gather the creation of formatter in a single
221 This function exists to gather the creation of formatter in a single
214 place instead of duplicating it in all performance commands."""
222 place instead of duplicating it in all performance commands."""
215
223
216 # enforce an idle period before execution to counteract power management
224 # enforce an idle period before execution to counteract power management
217 # experimental config: perf.presleep
225 # experimental config: perf.presleep
218 time.sleep(getint(ui, b"perf", b"presleep", 1))
226 time.sleep(getint(ui, b"perf", b"presleep", 1))
219
227
220 if opts is None:
228 if opts is None:
221 opts = {}
229 opts = {}
222 # redirect all to stderr unless buffer api is in use
230 # redirect all to stderr unless buffer api is in use
223 if not ui._buffers:
231 if not ui._buffers:
224 ui = ui.copy()
232 ui = ui.copy()
225 uifout = safeattrsetter(ui, b'fout', ignoremissing=True)
233 uifout = safeattrsetter(ui, b'fout', ignoremissing=True)
226 if uifout:
234 if uifout:
227 # for "historical portability":
235 # for "historical portability":
228 # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d)
236 # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d)
229 uifout.set(ui.ferr)
237 uifout.set(ui.ferr)
230
238
231 # get a formatter
239 # get a formatter
232 uiformatter = getattr(ui, 'formatter', None)
240 uiformatter = getattr(ui, 'formatter', None)
233 if uiformatter:
241 if uiformatter:
234 fm = uiformatter(b'perf', opts)
242 fm = uiformatter(b'perf', opts)
235 else:
243 else:
236 # for "historical portability":
244 # for "historical portability":
237 # define formatter locally, because ui.formatter has been
245 # define formatter locally, because ui.formatter has been
238 # available since 2.2 (or ae5f92e154d3)
246 # available since 2.2 (or ae5f92e154d3)
239 from mercurial import node
247 from mercurial import node
240 class defaultformatter(object):
248 class defaultformatter(object):
241 """Minimized composition of baseformatter and plainformatter
249 """Minimized composition of baseformatter and plainformatter
242 """
250 """
243 def __init__(self, ui, topic, opts):
251 def __init__(self, ui, topic, opts):
244 self._ui = ui
252 self._ui = ui
245 if ui.debugflag:
253 if ui.debugflag:
246 self.hexfunc = node.hex
254 self.hexfunc = node.hex
247 else:
255 else:
248 self.hexfunc = node.short
256 self.hexfunc = node.short
249 def __nonzero__(self):
257 def __nonzero__(self):
250 return False
258 return False
251 __bool__ = __nonzero__
259 __bool__ = __nonzero__
252 def startitem(self):
260 def startitem(self):
253 pass
261 pass
254 def data(self, **data):
262 def data(self, **data):
255 pass
263 pass
256 def write(self, fields, deftext, *fielddata, **opts):
264 def write(self, fields, deftext, *fielddata, **opts):
257 self._ui.write(deftext % fielddata, **opts)
265 self._ui.write(deftext % fielddata, **opts)
258 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
266 def condwrite(self, cond, fields, deftext, *fielddata, **opts):
259 if cond:
267 if cond:
260 self._ui.write(deftext % fielddata, **opts)
268 self._ui.write(deftext % fielddata, **opts)
261 def plain(self, text, **opts):
269 def plain(self, text, **opts):
262 self._ui.write(text, **opts)
270 self._ui.write(text, **opts)
263 def end(self):
271 def end(self):
264 pass
272 pass
265 fm = defaultformatter(ui, b'perf', opts)
273 fm = defaultformatter(ui, b'perf', opts)
266
274
267 # stub function, runs code only once instead of in a loop
275 # stub function, runs code only once instead of in a loop
268 # experimental config: perf.stub
276 # experimental config: perf.stub
269 if ui.configbool(b"perf", b"stub", False):
277 if ui.configbool(b"perf", b"stub", False):
270 return functools.partial(stub_timer, fm), fm
278 return functools.partial(stub_timer, fm), fm
271
279
272 # experimental config: perf.all-timing
280 # experimental config: perf.all-timing
273 displayall = ui.configbool(b"perf", b"all-timing", False)
281 displayall = ui.configbool(b"perf", b"all-timing", False)
274 return functools.partial(_timer, fm, displayall=displayall), fm
282 return functools.partial(_timer, fm, displayall=displayall), fm
275
283
276 def stub_timer(fm, func, title=None):
284 def stub_timer(fm, func, setup=None, title=None):
285 if setup is not None:
286 setup()
277 func()
287 func()
278
288
279 @contextlib.contextmanager
289 @contextlib.contextmanager
280 def timeone():
290 def timeone():
281 r = []
291 r = []
282 ostart = os.times()
292 ostart = os.times()
283 cstart = util.timer()
293 cstart = util.timer()
284 yield r
294 yield r
285 cstop = util.timer()
295 cstop = util.timer()
286 ostop = os.times()
296 ostop = os.times()
287 a, b = ostart, ostop
297 a, b = ostart, ostop
288 r.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
298 r.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
289
299
290 def _timer(fm, func, title=None, displayall=False):
300 def _timer(fm, func, setup=None, title=None, displayall=False):
291 gc.collect()
301 gc.collect()
292 results = []
302 results = []
293 begin = util.timer()
303 begin = util.timer()
294 count = 0
304 count = 0
295 while True:
305 while True:
306 if setup is not None:
307 setup()
296 with timeone() as item:
308 with timeone() as item:
297 r = func()
309 r = func()
298 count += 1
310 count += 1
299 results.append(item[0])
311 results.append(item[0])
300 cstop = util.timer()
312 cstop = util.timer()
301 if cstop - begin > 3 and count >= 100:
313 if cstop - begin > 3 and count >= 100:
302 break
314 break
303 if cstop - begin > 10 and count >= 3:
315 if cstop - begin > 10 and count >= 3:
304 break
316 break
305
317
306 formatone(fm, results, title=title, result=r,
318 formatone(fm, results, title=title, result=r,
307 displayall=displayall)
319 displayall=displayall)
308
320
309 def formatone(fm, timings, title=None, result=None, displayall=False):
321 def formatone(fm, timings, title=None, result=None, displayall=False):
310
322
311 count = len(timings)
323 count = len(timings)
312
324
313 fm.startitem()
325 fm.startitem()
314
326
315 if title:
327 if title:
316 fm.write(b'title', b'! %s\n', title)
328 fm.write(b'title', b'! %s\n', title)
317 if result:
329 if result:
318 fm.write(b'result', b'! result: %s\n', result)
330 fm.write(b'result', b'! result: %s\n', result)
319 def display(role, entry):
331 def display(role, entry):
320 prefix = b''
332 prefix = b''
321 if role != b'best':
333 if role != b'best':
322 prefix = b'%s.' % role
334 prefix = b'%s.' % role
323 fm.plain(b'!')
335 fm.plain(b'!')
324 fm.write(prefix + b'wall', b' wall %f', entry[0])
336 fm.write(prefix + b'wall', b' wall %f', entry[0])
325 fm.write(prefix + b'comb', b' comb %f', entry[1] + entry[2])
337 fm.write(prefix + b'comb', b' comb %f', entry[1] + entry[2])
326 fm.write(prefix + b'user', b' user %f', entry[1])
338 fm.write(prefix + b'user', b' user %f', entry[1])
327 fm.write(prefix + b'sys', b' sys %f', entry[2])
339 fm.write(prefix + b'sys', b' sys %f', entry[2])
328 fm.write(prefix + b'count', b' (%s of %%d)' % role, count)
340 fm.write(prefix + b'count', b' (%s of %%d)' % role, count)
329 fm.plain(b'\n')
341 fm.plain(b'\n')
330 timings.sort()
342 timings.sort()
331 min_val = timings[0]
343 min_val = timings[0]
332 display(b'best', min_val)
344 display(b'best', min_val)
333 if displayall:
345 if displayall:
334 max_val = timings[-1]
346 max_val = timings[-1]
335 display(b'max', max_val)
347 display(b'max', max_val)
336 avg = tuple([sum(x) / count for x in zip(*timings)])
348 avg = tuple([sum(x) / count for x in zip(*timings)])
337 display(b'avg', avg)
349 display(b'avg', avg)
338 median = timings[len(timings) // 2]
350 median = timings[len(timings) // 2]
339 display(b'median', median)
351 display(b'median', median)
340
352
341 # utilities for historical portability
353 # utilities for historical portability
342
354
343 def getint(ui, section, name, default):
355 def getint(ui, section, name, default):
344 # for "historical portability":
356 # for "historical portability":
345 # ui.configint has been available since 1.9 (or fa2b596db182)
357 # ui.configint has been available since 1.9 (or fa2b596db182)
346 v = ui.config(section, name, None)
358 v = ui.config(section, name, None)
347 if v is None:
359 if v is None:
348 return default
360 return default
349 try:
361 try:
350 return int(v)
362 return int(v)
351 except ValueError:
363 except ValueError:
352 raise error.ConfigError((b"%s.%s is not an integer ('%s')")
364 raise error.ConfigError((b"%s.%s is not an integer ('%s')")
353 % (section, name, v))
365 % (section, name, v))
354
366
355 def safeattrsetter(obj, name, ignoremissing=False):
367 def safeattrsetter(obj, name, ignoremissing=False):
356 """Ensure that 'obj' has 'name' attribute before subsequent setattr
368 """Ensure that 'obj' has 'name' attribute before subsequent setattr
357
369
358 This function is aborted, if 'obj' doesn't have 'name' attribute
370 This function is aborted, if 'obj' doesn't have 'name' attribute
359 at runtime. This avoids overlooking removal of an attribute, which
371 at runtime. This avoids overlooking removal of an attribute, which
360 breaks assumption of performance measurement, in the future.
372 breaks assumption of performance measurement, in the future.
361
373
362 This function returns the object to (1) assign a new value, and
374 This function returns the object to (1) assign a new value, and
363 (2) restore an original value to the attribute.
375 (2) restore an original value to the attribute.
364
376
365 If 'ignoremissing' is true, missing 'name' attribute doesn't cause
377 If 'ignoremissing' is true, missing 'name' attribute doesn't cause
366 abortion, and this function returns None. This is useful to
378 abortion, and this function returns None. This is useful to
367 examine an attribute, which isn't ensured in all Mercurial
379 examine an attribute, which isn't ensured in all Mercurial
368 versions.
380 versions.
369 """
381 """
370 if not util.safehasattr(obj, name):
382 if not util.safehasattr(obj, name):
371 if ignoremissing:
383 if ignoremissing:
372 return None
384 return None
373 raise error.Abort((b"missing attribute %s of %s might break assumption"
385 raise error.Abort((b"missing attribute %s of %s might break assumption"
374 b" of performance measurement") % (name, obj))
386 b" of performance measurement") % (name, obj))
375
387
376 origvalue = getattr(obj, _sysstr(name))
388 origvalue = getattr(obj, _sysstr(name))
377 class attrutil(object):
389 class attrutil(object):
378 def set(self, newvalue):
390 def set(self, newvalue):
379 setattr(obj, _sysstr(name), newvalue)
391 setattr(obj, _sysstr(name), newvalue)
380 def restore(self):
392 def restore(self):
381 setattr(obj, _sysstr(name), origvalue)
393 setattr(obj, _sysstr(name), origvalue)
382
394
383 return attrutil()
395 return attrutil()
384
396
385 # utilities to examine each internal API changes
397 # utilities to examine each internal API changes
386
398
387 def getbranchmapsubsettable():
399 def getbranchmapsubsettable():
388 # for "historical portability":
400 # for "historical portability":
389 # subsettable is defined in:
401 # subsettable is defined in:
390 # - branchmap since 2.9 (or 175c6fd8cacc)
402 # - branchmap since 2.9 (or 175c6fd8cacc)
391 # - repoview since 2.5 (or 59a9f18d4587)
403 # - repoview since 2.5 (or 59a9f18d4587)
392 for mod in (branchmap, repoview):
404 for mod in (branchmap, repoview):
393 subsettable = getattr(mod, 'subsettable', None)
405 subsettable = getattr(mod, 'subsettable', None)
394 if subsettable:
406 if subsettable:
395 return subsettable
407 return subsettable
396
408
397 # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
409 # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
398 # branchmap and repoview modules exist, but subsettable attribute
410 # branchmap and repoview modules exist, but subsettable attribute
399 # doesn't)
411 # doesn't)
400 raise error.Abort((b"perfbranchmap not available with this Mercurial"),
412 raise error.Abort((b"perfbranchmap not available with this Mercurial"),
401 hint=b"use 2.5 or later")
413 hint=b"use 2.5 or later")
402
414
403 def getsvfs(repo):
415 def getsvfs(repo):
404 """Return appropriate object to access files under .hg/store
416 """Return appropriate object to access files under .hg/store
405 """
417 """
406 # for "historical portability":
418 # for "historical portability":
407 # repo.svfs has been available since 2.3 (or 7034365089bf)
419 # repo.svfs has been available since 2.3 (or 7034365089bf)
408 svfs = getattr(repo, 'svfs', None)
420 svfs = getattr(repo, 'svfs', None)
409 if svfs:
421 if svfs:
410 return svfs
422 return svfs
411 else:
423 else:
412 return getattr(repo, 'sopener')
424 return getattr(repo, 'sopener')
413
425
414 def getvfs(repo):
426 def getvfs(repo):
415 """Return appropriate object to access files under .hg
427 """Return appropriate object to access files under .hg
416 """
428 """
417 # for "historical portability":
429 # for "historical portability":
418 # repo.vfs has been available since 2.3 (or 7034365089bf)
430 # repo.vfs has been available since 2.3 (or 7034365089bf)
419 vfs = getattr(repo, 'vfs', None)
431 vfs = getattr(repo, 'vfs', None)
420 if vfs:
432 if vfs:
421 return vfs
433 return vfs
422 else:
434 else:
423 return getattr(repo, 'opener')
435 return getattr(repo, 'opener')
424
436
425 def repocleartagscachefunc(repo):
437 def repocleartagscachefunc(repo):
426 """Return the function to clear tags cache according to repo internal API
438 """Return the function to clear tags cache according to repo internal API
427 """
439 """
428 if util.safehasattr(repo, b'_tagscache'): # since 2.0 (or 9dca7653b525)
440 if util.safehasattr(repo, b'_tagscache'): # since 2.0 (or 9dca7653b525)
429 # in this case, setattr(repo, '_tagscache', None) or so isn't
441 # in this case, setattr(repo, '_tagscache', None) or so isn't
430 # correct way to clear tags cache, because existing code paths
442 # correct way to clear tags cache, because existing code paths
431 # expect _tagscache to be a structured object.
443 # expect _tagscache to be a structured object.
432 def clearcache():
444 def clearcache():
433 # _tagscache has been filteredpropertycache since 2.5 (or
445 # _tagscache has been filteredpropertycache since 2.5 (or
434 # 98c867ac1330), and delattr() can't work in such case
446 # 98c867ac1330), and delattr() can't work in such case
435 if b'_tagscache' in vars(repo):
447 if b'_tagscache' in vars(repo):
436 del repo.__dict__[b'_tagscache']
448 del repo.__dict__[b'_tagscache']
437 return clearcache
449 return clearcache
438
450
439 repotags = safeattrsetter(repo, b'_tags', ignoremissing=True)
451 repotags = safeattrsetter(repo, b'_tags', ignoremissing=True)
440 if repotags: # since 1.4 (or 5614a628d173)
452 if repotags: # since 1.4 (or 5614a628d173)
441 return lambda : repotags.set(None)
453 return lambda : repotags.set(None)
442
454
443 repotagscache = safeattrsetter(repo, b'tagscache', ignoremissing=True)
455 repotagscache = safeattrsetter(repo, b'tagscache', ignoremissing=True)
444 if repotagscache: # since 0.6 (or d7df759d0e97)
456 if repotagscache: # since 0.6 (or d7df759d0e97)
445 return lambda : repotagscache.set(None)
457 return lambda : repotagscache.set(None)
446
458
447 # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
459 # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
448 # this point, but it isn't so problematic, because:
460 # this point, but it isn't so problematic, because:
449 # - repo.tags of such Mercurial isn't "callable", and repo.tags()
461 # - repo.tags of such Mercurial isn't "callable", and repo.tags()
450 # in perftags() causes failure soon
462 # in perftags() causes failure soon
451 # - perf.py itself has been available since 1.1 (or eb240755386d)
463 # - perf.py itself has been available since 1.1 (or eb240755386d)
452 raise error.Abort((b"tags API of this hg command is unknown"))
464 raise error.Abort((b"tags API of this hg command is unknown"))
453
465
454 # utilities to clear cache
466 # utilities to clear cache
455
467
456 def clearfilecache(repo, attrname):
468 def clearfilecache(obj, attrname):
457 unfi = repo.unfiltered()
469 unfiltered = getattr(obj, 'unfiltered', None)
458 if attrname in vars(unfi):
470 if unfiltered is not None:
459 delattr(unfi, attrname)
471 obj = obj.unfiltered()
460 unfi._filecache.pop(attrname, None)
472 if attrname in vars(obj):
473 delattr(obj, attrname)
474 obj._filecache.pop(attrname, None)
475
476 def clearchangelog(repo):
477 if repo is not repo.unfiltered():
478 object.__setattr__(repo, r'_clcachekey', None)
479 object.__setattr__(repo, r'_clcache', None)
480 clearfilecache(repo.unfiltered(), 'changelog')
461
481
462 # perf commands
482 # perf commands
463
483
464 @command(b'perfwalk', formatteropts)
484 @command(b'perfwalk', formatteropts)
465 def perfwalk(ui, repo, *pats, **opts):
485 def perfwalk(ui, repo, *pats, **opts):
466 opts = _byteskwargs(opts)
486 opts = _byteskwargs(opts)
467 timer, fm = gettimer(ui, opts)
487 timer, fm = gettimer(ui, opts)
468 m = scmutil.match(repo[None], pats, {})
488 m = scmutil.match(repo[None], pats, {})
469 timer(lambda: len(list(repo.dirstate.walk(m, subrepos=[], unknown=True,
489 timer(lambda: len(list(repo.dirstate.walk(m, subrepos=[], unknown=True,
470 ignored=False))))
490 ignored=False))))
471 fm.end()
491 fm.end()
472
492
473 @command(b'perfannotate', formatteropts)
493 @command(b'perfannotate', formatteropts)
474 def perfannotate(ui, repo, f, **opts):
494 def perfannotate(ui, repo, f, **opts):
475 opts = _byteskwargs(opts)
495 opts = _byteskwargs(opts)
476 timer, fm = gettimer(ui, opts)
496 timer, fm = gettimer(ui, opts)
477 fc = repo[b'.'][f]
497 fc = repo[b'.'][f]
478 timer(lambda: len(fc.annotate(True)))
498 timer(lambda: len(fc.annotate(True)))
479 fm.end()
499 fm.end()
480
500
481 @command(b'perfstatus',
501 @command(b'perfstatus',
482 [(b'u', b'unknown', False,
502 [(b'u', b'unknown', False,
483 b'ask status to look for unknown files')] + formatteropts)
503 b'ask status to look for unknown files')] + formatteropts)
484 def perfstatus(ui, repo, **opts):
504 def perfstatus(ui, repo, **opts):
485 opts = _byteskwargs(opts)
505 opts = _byteskwargs(opts)
486 #m = match.always(repo.root, repo.getcwd())
506 #m = match.always(repo.root, repo.getcwd())
487 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
507 #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
488 # False))))
508 # False))))
489 timer, fm = gettimer(ui, opts)
509 timer, fm = gettimer(ui, opts)
490 timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown']))))
510 timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown']))))
491 fm.end()
511 fm.end()
492
512
493 @command(b'perfaddremove', formatteropts)
513 @command(b'perfaddremove', formatteropts)
494 def perfaddremove(ui, repo, **opts):
514 def perfaddremove(ui, repo, **opts):
495 opts = _byteskwargs(opts)
515 opts = _byteskwargs(opts)
496 timer, fm = gettimer(ui, opts)
516 timer, fm = gettimer(ui, opts)
497 try:
517 try:
498 oldquiet = repo.ui.quiet
518 oldquiet = repo.ui.quiet
499 repo.ui.quiet = True
519 repo.ui.quiet = True
500 matcher = scmutil.match(repo[None])
520 matcher = scmutil.match(repo[None])
501 opts[b'dry_run'] = True
521 opts[b'dry_run'] = True
502 timer(lambda: scmutil.addremove(repo, matcher, b"", opts))
522 timer(lambda: scmutil.addremove(repo, matcher, b"", opts))
503 finally:
523 finally:
504 repo.ui.quiet = oldquiet
524 repo.ui.quiet = oldquiet
505 fm.end()
525 fm.end()
506
526
507 def clearcaches(cl):
527 def clearcaches(cl):
508 # behave somewhat consistently across internal API changes
528 # behave somewhat consistently across internal API changes
509 if util.safehasattr(cl, b'clearcaches'):
529 if util.safehasattr(cl, b'clearcaches'):
510 cl.clearcaches()
530 cl.clearcaches()
511 elif util.safehasattr(cl, b'_nodecache'):
531 elif util.safehasattr(cl, b'_nodecache'):
512 from mercurial.node import nullid, nullrev
532 from mercurial.node import nullid, nullrev
513 cl._nodecache = {nullid: nullrev}
533 cl._nodecache = {nullid: nullrev}
514 cl._nodepos = None
534 cl._nodepos = None
515
535
516 @command(b'perfheads', formatteropts)
536 @command(b'perfheads', formatteropts)
517 def perfheads(ui, repo, **opts):
537 def perfheads(ui, repo, **opts):
518 opts = _byteskwargs(opts)
538 opts = _byteskwargs(opts)
519 timer, fm = gettimer(ui, opts)
539 timer, fm = gettimer(ui, opts)
520 cl = repo.changelog
540 cl = repo.changelog
521 def d():
541 def d():
522 len(cl.headrevs())
542 len(cl.headrevs())
523 clearcaches(cl)
543 clearcaches(cl)
524 timer(d)
544 timer(d)
525 fm.end()
545 fm.end()
526
546
527 @command(b'perftags', formatteropts)
547 @command(b'perftags', formatteropts+
548 [
549 (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
550 ])
528 def perftags(ui, repo, **opts):
551 def perftags(ui, repo, **opts):
529 import mercurial.changelog
530 import mercurial.manifest
531
532 opts = _byteskwargs(opts)
552 opts = _byteskwargs(opts)
533 timer, fm = gettimer(ui, opts)
553 timer, fm = gettimer(ui, opts)
534 svfs = getsvfs(repo)
535 repocleartagscache = repocleartagscachefunc(repo)
554 repocleartagscache = repocleartagscachefunc(repo)
555 clearrevlogs = opts[b'clear_revlogs']
556 def s():
557 if clearrevlogs:
558 clearchangelog(repo)
559 clearfilecache(repo.unfiltered(), 'manifest')
560 repocleartagscache()
536 def t():
561 def t():
537 repo.changelog = mercurial.changelog.changelog(svfs)
538 rootmanifest = mercurial.manifest.manifestrevlog(svfs)
539 repo.manifestlog = mercurial.manifest.manifestlog(svfs, repo,
540 rootmanifest)
541 repocleartagscache()
542 return len(repo.tags())
562 return len(repo.tags())
543 timer(t)
563 timer(t, setup=s)
544 fm.end()
564 fm.end()
545
565
546 @command(b'perfancestors', formatteropts)
566 @command(b'perfancestors', formatteropts)
547 def perfancestors(ui, repo, **opts):
567 def perfancestors(ui, repo, **opts):
548 opts = _byteskwargs(opts)
568 opts = _byteskwargs(opts)
549 timer, fm = gettimer(ui, opts)
569 timer, fm = gettimer(ui, opts)
550 heads = repo.changelog.headrevs()
570 heads = repo.changelog.headrevs()
551 def d():
571 def d():
552 for a in repo.changelog.ancestors(heads):
572 for a in repo.changelog.ancestors(heads):
553 pass
573 pass
554 timer(d)
574 timer(d)
555 fm.end()
575 fm.end()
556
576
557 @command(b'perfancestorset', formatteropts)
577 @command(b'perfancestorset', formatteropts)
558 def perfancestorset(ui, repo, revset, **opts):
578 def perfancestorset(ui, repo, revset, **opts):
559 opts = _byteskwargs(opts)
579 opts = _byteskwargs(opts)
560 timer, fm = gettimer(ui, opts)
580 timer, fm = gettimer(ui, opts)
561 revs = repo.revs(revset)
581 revs = repo.revs(revset)
562 heads = repo.changelog.headrevs()
582 heads = repo.changelog.headrevs()
563 def d():
583 def d():
564 s = repo.changelog.ancestors(heads)
584 s = repo.changelog.ancestors(heads)
565 for rev in revs:
585 for rev in revs:
566 rev in s
586 rev in s
567 timer(d)
587 timer(d)
568 fm.end()
588 fm.end()
569
589
570 @command(b'perfbookmarks', formatteropts)
590 @command(b'perfdiscovery', formatteropts, b'PATH')
591 def perfdiscovery(ui, repo, path, **opts):
592 """benchmark discovery between local repo and the peer at given path
593 """
594 repos = [repo, None]
595 timer, fm = gettimer(ui, opts)
596 path = ui.expandpath(path)
597
598 def s():
599 repos[1] = hg.peer(ui, opts, path)
600 def d():
601 setdiscovery.findcommonheads(ui, *repos)
602 timer(d, setup=s)
603 fm.end()
604
605 @command(b'perfbookmarks', formatteropts +
606 [
607 (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
608 ])
571 def perfbookmarks(ui, repo, **opts):
609 def perfbookmarks(ui, repo, **opts):
572 """benchmark parsing bookmarks from disk to memory"""
610 """benchmark parsing bookmarks from disk to memory"""
573 opts = _byteskwargs(opts)
611 opts = _byteskwargs(opts)
574 timer, fm = gettimer(ui, opts)
612 timer, fm = gettimer(ui, opts)
575 def d():
613
614 clearrevlogs = opts[b'clear_revlogs']
615 def s():
616 if clearrevlogs:
617 clearchangelog(repo)
576 clearfilecache(repo, b'_bookmarks')
618 clearfilecache(repo, b'_bookmarks')
619 def d():
577 repo._bookmarks
620 repo._bookmarks
578 timer(d)
621 timer(d, setup=s)
579 fm.end()
622 fm.end()
580
623
581 @command(b'perfbundleread', formatteropts, b'BUNDLE')
624 @command(b'perfbundleread', formatteropts, b'BUNDLE')
582 def perfbundleread(ui, repo, bundlepath, **opts):
625 def perfbundleread(ui, repo, bundlepath, **opts):
583 """Benchmark reading of bundle files.
626 """Benchmark reading of bundle files.
584
627
585 This command is meant to isolate the I/O part of bundle reading as
628 This command is meant to isolate the I/O part of bundle reading as
586 much as possible.
629 much as possible.
587 """
630 """
588 from mercurial import (
631 from mercurial import (
589 bundle2,
632 bundle2,
590 exchange,
633 exchange,
591 streamclone,
634 streamclone,
592 )
635 )
593
636
594 opts = _byteskwargs(opts)
637 opts = _byteskwargs(opts)
595
638
596 def makebench(fn):
639 def makebench(fn):
597 def run():
640 def run():
598 with open(bundlepath, b'rb') as fh:
641 with open(bundlepath, b'rb') as fh:
599 bundle = exchange.readbundle(ui, fh, bundlepath)
642 bundle = exchange.readbundle(ui, fh, bundlepath)
600 fn(bundle)
643 fn(bundle)
601
644
602 return run
645 return run
603
646
604 def makereadnbytes(size):
647 def makereadnbytes(size):
605 def run():
648 def run():
606 with open(bundlepath, b'rb') as fh:
649 with open(bundlepath, b'rb') as fh:
607 bundle = exchange.readbundle(ui, fh, bundlepath)
650 bundle = exchange.readbundle(ui, fh, bundlepath)
608 while bundle.read(size):
651 while bundle.read(size):
609 pass
652 pass
610
653
611 return run
654 return run
612
655
613 def makestdioread(size):
656 def makestdioread(size):
614 def run():
657 def run():
615 with open(bundlepath, b'rb') as fh:
658 with open(bundlepath, b'rb') as fh:
616 while fh.read(size):
659 while fh.read(size):
617 pass
660 pass
618
661
619 return run
662 return run
620
663
621 # bundle1
664 # bundle1
622
665
623 def deltaiter(bundle):
666 def deltaiter(bundle):
624 for delta in bundle.deltaiter():
667 for delta in bundle.deltaiter():
625 pass
668 pass
626
669
627 def iterchunks(bundle):
670 def iterchunks(bundle):
628 for chunk in bundle.getchunks():
671 for chunk in bundle.getchunks():
629 pass
672 pass
630
673
631 # bundle2
674 # bundle2
632
675
633 def forwardchunks(bundle):
676 def forwardchunks(bundle):
634 for chunk in bundle._forwardchunks():
677 for chunk in bundle._forwardchunks():
635 pass
678 pass
636
679
637 def iterparts(bundle):
680 def iterparts(bundle):
638 for part in bundle.iterparts():
681 for part in bundle.iterparts():
639 pass
682 pass
640
683
641 def iterpartsseekable(bundle):
684 def iterpartsseekable(bundle):
642 for part in bundle.iterparts(seekable=True):
685 for part in bundle.iterparts(seekable=True):
643 pass
686 pass
644
687
645 def seek(bundle):
688 def seek(bundle):
646 for part in bundle.iterparts(seekable=True):
689 for part in bundle.iterparts(seekable=True):
647 part.seek(0, os.SEEK_END)
690 part.seek(0, os.SEEK_END)
648
691
649 def makepartreadnbytes(size):
692 def makepartreadnbytes(size):
650 def run():
693 def run():
651 with open(bundlepath, b'rb') as fh:
694 with open(bundlepath, b'rb') as fh:
652 bundle = exchange.readbundle(ui, fh, bundlepath)
695 bundle = exchange.readbundle(ui, fh, bundlepath)
653 for part in bundle.iterparts():
696 for part in bundle.iterparts():
654 while part.read(size):
697 while part.read(size):
655 pass
698 pass
656
699
657 return run
700 return run
658
701
659 benches = [
702 benches = [
660 (makestdioread(8192), b'read(8k)'),
703 (makestdioread(8192), b'read(8k)'),
661 (makestdioread(16384), b'read(16k)'),
704 (makestdioread(16384), b'read(16k)'),
662 (makestdioread(32768), b'read(32k)'),
705 (makestdioread(32768), b'read(32k)'),
663 (makestdioread(131072), b'read(128k)'),
706 (makestdioread(131072), b'read(128k)'),
664 ]
707 ]
665
708
666 with open(bundlepath, b'rb') as fh:
709 with open(bundlepath, b'rb') as fh:
667 bundle = exchange.readbundle(ui, fh, bundlepath)
710 bundle = exchange.readbundle(ui, fh, bundlepath)
668
711
669 if isinstance(bundle, changegroup.cg1unpacker):
712 if isinstance(bundle, changegroup.cg1unpacker):
670 benches.extend([
713 benches.extend([
671 (makebench(deltaiter), b'cg1 deltaiter()'),
714 (makebench(deltaiter), b'cg1 deltaiter()'),
672 (makebench(iterchunks), b'cg1 getchunks()'),
715 (makebench(iterchunks), b'cg1 getchunks()'),
673 (makereadnbytes(8192), b'cg1 read(8k)'),
716 (makereadnbytes(8192), b'cg1 read(8k)'),
674 (makereadnbytes(16384), b'cg1 read(16k)'),
717 (makereadnbytes(16384), b'cg1 read(16k)'),
675 (makereadnbytes(32768), b'cg1 read(32k)'),
718 (makereadnbytes(32768), b'cg1 read(32k)'),
676 (makereadnbytes(131072), b'cg1 read(128k)'),
719 (makereadnbytes(131072), b'cg1 read(128k)'),
677 ])
720 ])
678 elif isinstance(bundle, bundle2.unbundle20):
721 elif isinstance(bundle, bundle2.unbundle20):
679 benches.extend([
722 benches.extend([
680 (makebench(forwardchunks), b'bundle2 forwardchunks()'),
723 (makebench(forwardchunks), b'bundle2 forwardchunks()'),
681 (makebench(iterparts), b'bundle2 iterparts()'),
724 (makebench(iterparts), b'bundle2 iterparts()'),
682 (makebench(iterpartsseekable), b'bundle2 iterparts() seekable'),
725 (makebench(iterpartsseekable), b'bundle2 iterparts() seekable'),
683 (makebench(seek), b'bundle2 part seek()'),
726 (makebench(seek), b'bundle2 part seek()'),
684 (makepartreadnbytes(8192), b'bundle2 part read(8k)'),
727 (makepartreadnbytes(8192), b'bundle2 part read(8k)'),
685 (makepartreadnbytes(16384), b'bundle2 part read(16k)'),
728 (makepartreadnbytes(16384), b'bundle2 part read(16k)'),
686 (makepartreadnbytes(32768), b'bundle2 part read(32k)'),
729 (makepartreadnbytes(32768), b'bundle2 part read(32k)'),
687 (makepartreadnbytes(131072), b'bundle2 part read(128k)'),
730 (makepartreadnbytes(131072), b'bundle2 part read(128k)'),
688 ])
731 ])
689 elif isinstance(bundle, streamclone.streamcloneapplier):
732 elif isinstance(bundle, streamclone.streamcloneapplier):
690 raise error.Abort(b'stream clone bundles not supported')
733 raise error.Abort(b'stream clone bundles not supported')
691 else:
734 else:
692 raise error.Abort(b'unhandled bundle type: %s' % type(bundle))
735 raise error.Abort(b'unhandled bundle type: %s' % type(bundle))
693
736
694 for fn, title in benches:
737 for fn, title in benches:
695 timer, fm = gettimer(ui, opts)
738 timer, fm = gettimer(ui, opts)
696 timer(fn, title=title)
739 timer(fn, title=title)
697 fm.end()
740 fm.end()
698
741
699 @command(b'perfchangegroupchangelog', formatteropts +
742 @command(b'perfchangegroupchangelog', formatteropts +
700 [(b'', b'version', b'02', b'changegroup version'),
743 [(b'', b'cgversion', b'02', b'changegroup version'),
701 (b'r', b'rev', b'', b'revisions to add to changegroup')])
744 (b'r', b'rev', b'', b'revisions to add to changegroup')])
702 def perfchangegroupchangelog(ui, repo, version=b'02', rev=None, **opts):
745 def perfchangegroupchangelog(ui, repo, cgversion=b'02', rev=None, **opts):
703 """Benchmark producing a changelog group for a changegroup.
746 """Benchmark producing a changelog group for a changegroup.
704
747
705 This measures the time spent processing the changelog during a
748 This measures the time spent processing the changelog during a
706 bundle operation. This occurs during `hg bundle` and on a server
749 bundle operation. This occurs during `hg bundle` and on a server
707 processing a `getbundle` wire protocol request (handles clones
750 processing a `getbundle` wire protocol request (handles clones
708 and pull requests).
751 and pull requests).
709
752
710 By default, all revisions are added to the changegroup.
753 By default, all revisions are added to the changegroup.
711 """
754 """
712 opts = _byteskwargs(opts)
755 opts = _byteskwargs(opts)
713 cl = repo.changelog
756 cl = repo.changelog
714 nodes = [cl.lookup(r) for r in repo.revs(rev or b'all()')]
757 nodes = [cl.lookup(r) for r in repo.revs(rev or b'all()')]
715 bundler = changegroup.getbundler(version, repo)
758 bundler = changegroup.getbundler(cgversion, repo)
716
759
717 def d():
760 def d():
718 state, chunks = bundler._generatechangelog(cl, nodes)
761 state, chunks = bundler._generatechangelog(cl, nodes)
719 for chunk in chunks:
762 for chunk in chunks:
720 pass
763 pass
721
764
722 timer, fm = gettimer(ui, opts)
765 timer, fm = gettimer(ui, opts)
723
766
724 # Terminal printing can interfere with timing. So disable it.
767 # Terminal printing can interfere with timing. So disable it.
725 with ui.configoverride({(b'progress', b'disable'): True}):
768 with ui.configoverride({(b'progress', b'disable'): True}):
726 timer(d)
769 timer(d)
727
770
728 fm.end()
771 fm.end()
729
772
730 @command(b'perfdirs', formatteropts)
773 @command(b'perfdirs', formatteropts)
731 def perfdirs(ui, repo, **opts):
774 def perfdirs(ui, repo, **opts):
732 opts = _byteskwargs(opts)
775 opts = _byteskwargs(opts)
733 timer, fm = gettimer(ui, opts)
776 timer, fm = gettimer(ui, opts)
734 dirstate = repo.dirstate
777 dirstate = repo.dirstate
735 b'a' in dirstate
778 b'a' in dirstate
736 def d():
779 def d():
737 dirstate.hasdir(b'a')
780 dirstate.hasdir(b'a')
738 del dirstate._map._dirs
781 del dirstate._map._dirs
739 timer(d)
782 timer(d)
740 fm.end()
783 fm.end()
741
784
742 @command(b'perfdirstate', formatteropts)
785 @command(b'perfdirstate', formatteropts)
743 def perfdirstate(ui, repo, **opts):
786 def perfdirstate(ui, repo, **opts):
744 opts = _byteskwargs(opts)
787 opts = _byteskwargs(opts)
745 timer, fm = gettimer(ui, opts)
788 timer, fm = gettimer(ui, opts)
746 b"a" in repo.dirstate
789 b"a" in repo.dirstate
747 def d():
790 def d():
748 repo.dirstate.invalidate()
791 repo.dirstate.invalidate()
749 b"a" in repo.dirstate
792 b"a" in repo.dirstate
750 timer(d)
793 timer(d)
751 fm.end()
794 fm.end()
752
795
753 @command(b'perfdirstatedirs', formatteropts)
796 @command(b'perfdirstatedirs', formatteropts)
754 def perfdirstatedirs(ui, repo, **opts):
797 def perfdirstatedirs(ui, repo, **opts):
755 opts = _byteskwargs(opts)
798 opts = _byteskwargs(opts)
756 timer, fm = gettimer(ui, opts)
799 timer, fm = gettimer(ui, opts)
757 b"a" in repo.dirstate
800 b"a" in repo.dirstate
758 def d():
801 def d():
759 repo.dirstate.hasdir(b"a")
802 repo.dirstate.hasdir(b"a")
760 del repo.dirstate._map._dirs
803 del repo.dirstate._map._dirs
761 timer(d)
804 timer(d)
762 fm.end()
805 fm.end()
763
806
764 @command(b'perfdirstatefoldmap', formatteropts)
807 @command(b'perfdirstatefoldmap', formatteropts)
765 def perfdirstatefoldmap(ui, repo, **opts):
808 def perfdirstatefoldmap(ui, repo, **opts):
766 opts = _byteskwargs(opts)
809 opts = _byteskwargs(opts)
767 timer, fm = gettimer(ui, opts)
810 timer, fm = gettimer(ui, opts)
768 dirstate = repo.dirstate
811 dirstate = repo.dirstate
769 b'a' in dirstate
812 b'a' in dirstate
770 def d():
813 def d():
771 dirstate._map.filefoldmap.get(b'a')
814 dirstate._map.filefoldmap.get(b'a')
772 del dirstate._map.filefoldmap
815 del dirstate._map.filefoldmap
773 timer(d)
816 timer(d)
774 fm.end()
817 fm.end()
775
818
776 @command(b'perfdirfoldmap', formatteropts)
819 @command(b'perfdirfoldmap', formatteropts)
777 def perfdirfoldmap(ui, repo, **opts):
820 def perfdirfoldmap(ui, repo, **opts):
778 opts = _byteskwargs(opts)
821 opts = _byteskwargs(opts)
779 timer, fm = gettimer(ui, opts)
822 timer, fm = gettimer(ui, opts)
780 dirstate = repo.dirstate
823 dirstate = repo.dirstate
781 b'a' in dirstate
824 b'a' in dirstate
782 def d():
825 def d():
783 dirstate._map.dirfoldmap.get(b'a')
826 dirstate._map.dirfoldmap.get(b'a')
784 del dirstate._map.dirfoldmap
827 del dirstate._map.dirfoldmap
785 del dirstate._map._dirs
828 del dirstate._map._dirs
786 timer(d)
829 timer(d)
787 fm.end()
830 fm.end()
788
831
789 @command(b'perfdirstatewrite', formatteropts)
832 @command(b'perfdirstatewrite', formatteropts)
790 def perfdirstatewrite(ui, repo, **opts):
833 def perfdirstatewrite(ui, repo, **opts):
791 opts = _byteskwargs(opts)
834 opts = _byteskwargs(opts)
792 timer, fm = gettimer(ui, opts)
835 timer, fm = gettimer(ui, opts)
793 ds = repo.dirstate
836 ds = repo.dirstate
794 b"a" in ds
837 b"a" in ds
795 def d():
838 def d():
796 ds._dirty = True
839 ds._dirty = True
797 ds.write(repo.currenttransaction())
840 ds.write(repo.currenttransaction())
798 timer(d)
841 timer(d)
799 fm.end()
842 fm.end()
800
843
801 @command(b'perfmergecalculate',
844 @command(b'perfmergecalculate',
802 [(b'r', b'rev', b'.', b'rev to merge against')] + formatteropts)
845 [(b'r', b'rev', b'.', b'rev to merge against')] + formatteropts)
803 def perfmergecalculate(ui, repo, rev, **opts):
846 def perfmergecalculate(ui, repo, rev, **opts):
804 opts = _byteskwargs(opts)
847 opts = _byteskwargs(opts)
805 timer, fm = gettimer(ui, opts)
848 timer, fm = gettimer(ui, opts)
806 wctx = repo[None]
849 wctx = repo[None]
807 rctx = scmutil.revsingle(repo, rev, rev)
850 rctx = scmutil.revsingle(repo, rev, rev)
808 ancestor = wctx.ancestor(rctx)
851 ancestor = wctx.ancestor(rctx)
809 # we don't want working dir files to be stat'd in the benchmark, so prime
852 # we don't want working dir files to be stat'd in the benchmark, so prime
810 # that cache
853 # that cache
811 wctx.dirty()
854 wctx.dirty()
812 def d():
855 def d():
813 # acceptremote is True because we don't want prompts in the middle of
856 # acceptremote is True because we don't want prompts in the middle of
814 # our benchmark
857 # our benchmark
815 merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False,
858 merge.calculateupdates(repo, wctx, rctx, [ancestor], False, False,
816 acceptremote=True, followcopies=True)
859 acceptremote=True, followcopies=True)
817 timer(d)
860 timer(d)
818 fm.end()
861 fm.end()
819
862
820 @command(b'perfpathcopies', [], b"REV REV")
863 @command(b'perfpathcopies', [], b"REV REV")
821 def perfpathcopies(ui, repo, rev1, rev2, **opts):
864 def perfpathcopies(ui, repo, rev1, rev2, **opts):
865 """benchmark the copy tracing logic"""
822 opts = _byteskwargs(opts)
866 opts = _byteskwargs(opts)
823 timer, fm = gettimer(ui, opts)
867 timer, fm = gettimer(ui, opts)
824 ctx1 = scmutil.revsingle(repo, rev1, rev1)
868 ctx1 = scmutil.revsingle(repo, rev1, rev1)
825 ctx2 = scmutil.revsingle(repo, rev2, rev2)
869 ctx2 = scmutil.revsingle(repo, rev2, rev2)
826 def d():
870 def d():
827 copies.pathcopies(ctx1, ctx2)
871 copies.pathcopies(ctx1, ctx2)
828 timer(d)
872 timer(d)
829 fm.end()
873 fm.end()
830
874
831 @command(b'perfphases',
875 @command(b'perfphases',
832 [(b'', b'full', False, b'include file reading time too'),
876 [(b'', b'full', False, b'include file reading time too'),
833 ], b"")
877 ], b"")
834 def perfphases(ui, repo, **opts):
878 def perfphases(ui, repo, **opts):
835 """benchmark phasesets computation"""
879 """benchmark phasesets computation"""
836 opts = _byteskwargs(opts)
880 opts = _byteskwargs(opts)
837 timer, fm = gettimer(ui, opts)
881 timer, fm = gettimer(ui, opts)
838 _phases = repo._phasecache
882 _phases = repo._phasecache
839 full = opts.get(b'full')
883 full = opts.get(b'full')
840 def d():
884 def d():
841 phases = _phases
885 phases = _phases
842 if full:
886 if full:
843 clearfilecache(repo, b'_phasecache')
887 clearfilecache(repo, b'_phasecache')
844 phases = repo._phasecache
888 phases = repo._phasecache
845 phases.invalidate()
889 phases.invalidate()
846 phases.loadphaserevs(repo)
890 phases.loadphaserevs(repo)
847 timer(d)
891 timer(d)
848 fm.end()
892 fm.end()
849
893
850 @command(b'perfphasesremote',
894 @command(b'perfphasesremote',
851 [], b"[DEST]")
895 [], b"[DEST]")
852 def perfphasesremote(ui, repo, dest=None, **opts):
896 def perfphasesremote(ui, repo, dest=None, **opts):
853 """benchmark time needed to analyse phases of the remote server"""
897 """benchmark time needed to analyse phases of the remote server"""
854 from mercurial.node import (
898 from mercurial.node import (
855 bin,
899 bin,
856 )
900 )
857 from mercurial import (
901 from mercurial import (
858 exchange,
902 exchange,
859 hg,
903 hg,
860 phases,
904 phases,
861 )
905 )
862 opts = _byteskwargs(opts)
906 opts = _byteskwargs(opts)
863 timer, fm = gettimer(ui, opts)
907 timer, fm = gettimer(ui, opts)
864
908
865 path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
909 path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
866 if not path:
910 if not path:
867 raise error.Abort((b'default repository not configured!'),
911 raise error.Abort((b'default repository not configured!'),
868 hint=(b"see 'hg help config.paths'"))
912 hint=(b"see 'hg help config.paths'"))
869 dest = path.pushloc or path.loc
913 dest = path.pushloc or path.loc
870 branches = (path.branch, opts.get(b'branch') or [])
914 branches = (path.branch, opts.get(b'branch') or [])
871 ui.status((b'analysing phase of %s\n') % util.hidepassword(dest))
915 ui.status((b'analysing phase of %s\n') % util.hidepassword(dest))
872 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev'))
916 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get(b'rev'))
873 other = hg.peer(repo, opts, dest)
917 other = hg.peer(repo, opts, dest)
874
918
875 # easier to perform discovery through the operation
919 # easier to perform discovery through the operation
876 op = exchange.pushoperation(repo, other)
920 op = exchange.pushoperation(repo, other)
877 exchange._pushdiscoverychangeset(op)
921 exchange._pushdiscoverychangeset(op)
878
922
879 remotesubset = op.fallbackheads
923 remotesubset = op.fallbackheads
880
924
881 with other.commandexecutor() as e:
925 with other.commandexecutor() as e:
882 remotephases = e.callcommand(b'listkeys',
926 remotephases = e.callcommand(b'listkeys',
883 {b'namespace': b'phases'}).result()
927 {b'namespace': b'phases'}).result()
884 del other
928 del other
885 publishing = remotephases.get(b'publishing', False)
929 publishing = remotephases.get(b'publishing', False)
886 if publishing:
930 if publishing:
887 ui.status((b'publishing: yes\n'))
931 ui.status((b'publishing: yes\n'))
888 else:
932 else:
889 ui.status((b'publishing: no\n'))
933 ui.status((b'publishing: no\n'))
890
934
891 nodemap = repo.changelog.nodemap
935 nodemap = repo.changelog.nodemap
892 nonpublishroots = 0
936 nonpublishroots = 0
893 for nhex, phase in remotephases.iteritems():
937 for nhex, phase in remotephases.iteritems():
894 if nhex == b'publishing': # ignore data related to publish option
938 if nhex == b'publishing': # ignore data related to publish option
895 continue
939 continue
896 node = bin(nhex)
940 node = bin(nhex)
897 if node in nodemap and int(phase):
941 if node in nodemap and int(phase):
898 nonpublishroots += 1
942 nonpublishroots += 1
899 ui.status((b'number of roots: %d\n') % len(remotephases))
943 ui.status((b'number of roots: %d\n') % len(remotephases))
900 ui.status((b'number of known non public roots: %d\n') % nonpublishroots)
944 ui.status((b'number of known non public roots: %d\n') % nonpublishroots)
901 def d():
945 def d():
902 phases.remotephasessummary(repo,
946 phases.remotephasessummary(repo,
903 remotesubset,
947 remotesubset,
904 remotephases)
948 remotephases)
905 timer(d)
949 timer(d)
906 fm.end()
950 fm.end()
907
951
908 @command(b'perfmanifest',[
952 @command(b'perfmanifest',[
909 (b'm', b'manifest-rev', False, b'Look up a manifest node revision'),
953 (b'm', b'manifest-rev', False, b'Look up a manifest node revision'),
910 (b'', b'clear-disk', False, b'clear on-disk caches too'),
954 (b'', b'clear-disk', False, b'clear on-disk caches too'),
911 ] + formatteropts, b'REV|NODE')
955 ] + formatteropts, b'REV|NODE')
912 def perfmanifest(ui, repo, rev, manifest_rev=False, clear_disk=False, **opts):
956 def perfmanifest(ui, repo, rev, manifest_rev=False, clear_disk=False, **opts):
913 """benchmark the time to read a manifest from disk and return a usable
957 """benchmark the time to read a manifest from disk and return a usable
914 dict-like object
958 dict-like object
915
959
916 Manifest caches are cleared before retrieval."""
960 Manifest caches are cleared before retrieval."""
917 opts = _byteskwargs(opts)
961 opts = _byteskwargs(opts)
918 timer, fm = gettimer(ui, opts)
962 timer, fm = gettimer(ui, opts)
919 if not manifest_rev:
963 if not manifest_rev:
920 ctx = scmutil.revsingle(repo, rev, rev)
964 ctx = scmutil.revsingle(repo, rev, rev)
921 t = ctx.manifestnode()
965 t = ctx.manifestnode()
922 else:
966 else:
923 from mercurial.node import bin
967 from mercurial.node import bin
924
968
925 if len(rev) == 40:
969 if len(rev) == 40:
926 t = bin(rev)
970 t = bin(rev)
927 else:
971 else:
928 try:
972 try:
929 rev = int(rev)
973 rev = int(rev)
930
974
931 if util.safehasattr(repo.manifestlog, b'getstorage'):
975 if util.safehasattr(repo.manifestlog, b'getstorage'):
932 t = repo.manifestlog.getstorage(b'').node(rev)
976 t = repo.manifestlog.getstorage(b'').node(rev)
933 else:
977 else:
934 t = repo.manifestlog._revlog.lookup(rev)
978 t = repo.manifestlog._revlog.lookup(rev)
935 except ValueError:
979 except ValueError:
936 raise error.Abort(b'manifest revision must be integer or full '
980 raise error.Abort(b'manifest revision must be integer or full '
937 b'node')
981 b'node')
938 def d():
982 def d():
939 repo.manifestlog.clearcaches(clear_persisted_data=clear_disk)
983 repo.manifestlog.clearcaches(clear_persisted_data=clear_disk)
940 repo.manifestlog[t].read()
984 repo.manifestlog[t].read()
941 timer(d)
985 timer(d)
942 fm.end()
986 fm.end()
943
987
944 @command(b'perfchangeset', formatteropts)
988 @command(b'perfchangeset', formatteropts)
945 def perfchangeset(ui, repo, rev, **opts):
989 def perfchangeset(ui, repo, rev, **opts):
946 opts = _byteskwargs(opts)
990 opts = _byteskwargs(opts)
947 timer, fm = gettimer(ui, opts)
991 timer, fm = gettimer(ui, opts)
948 n = scmutil.revsingle(repo, rev).node()
992 n = scmutil.revsingle(repo, rev).node()
949 def d():
993 def d():
950 repo.changelog.read(n)
994 repo.changelog.read(n)
951 #repo.changelog._cache = None
995 #repo.changelog._cache = None
952 timer(d)
996 timer(d)
953 fm.end()
997 fm.end()
954
998
955 @command(b'perfindex', formatteropts)
999 @command(b'perfignore', formatteropts)
1000 def perfignore(ui, repo, **opts):
1001 """benchmark operation related to computing ignore"""
1002 opts = _byteskwargs(opts)
1003 timer, fm = gettimer(ui, opts)
1004 dirstate = repo.dirstate
1005
1006 def setupone():
1007 dirstate.invalidate()
1008 clearfilecache(dirstate, b'_ignore')
1009
1010 def runone():
1011 dirstate._ignore
1012
1013 timer(runone, setup=setupone, title=b"load")
1014 fm.end()
1015
1016 @command(b'perfindex', [
1017 (b'', b'rev', b'', b'revision to be looked up (default tip)'),
1018 ] + formatteropts)
956 def perfindex(ui, repo, **opts):
1019 def perfindex(ui, repo, **opts):
957 import mercurial.revlog
1020 import mercurial.revlog
958 opts = _byteskwargs(opts)
1021 opts = _byteskwargs(opts)
959 timer, fm = gettimer(ui, opts)
1022 timer, fm = gettimer(ui, opts)
960 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
1023 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
961 n = repo[b"tip"].node()
1024 if opts[b'rev'] is None:
962 svfs = getsvfs(repo)
1025 n = repo[b"tip"].node()
1026 else:
1027 rev = scmutil.revsingle(repo, opts[b'rev'])
1028 n = repo[rev].node()
1029
1030 unfi = repo.unfiltered()
1031 # find the filecache func directly
1032 # This avoid polluting the benchmark with the filecache logic
1033 makecl = unfi.__class__.changelog.func
1034 def setup():
1035 # probably not necessary, but for good measure
1036 clearchangelog(unfi)
963 def d():
1037 def d():
964 cl = mercurial.revlog.revlog(svfs, b"00changelog.i")
1038 cl = makecl(unfi)
965 cl.rev(n)
1039 cl.rev(n)
966 timer(d)
1040 timer(d, setup=setup)
967 fm.end()
1041 fm.end()
968
1042
969 @command(b'perfstartup', formatteropts)
1043 @command(b'perfstartup', formatteropts)
970 def perfstartup(ui, repo, **opts):
1044 def perfstartup(ui, repo, **opts):
971 opts = _byteskwargs(opts)
1045 opts = _byteskwargs(opts)
972 timer, fm = gettimer(ui, opts)
1046 timer, fm = gettimer(ui, opts)
973 def d():
1047 def d():
974 if os.name != r'nt':
1048 if os.name != r'nt':
975 os.system(b"HGRCPATH= %s version -q > /dev/null" %
1049 os.system(b"HGRCPATH= %s version -q > /dev/null" %
976 fsencode(sys.argv[0]))
1050 fsencode(sys.argv[0]))
977 else:
1051 else:
978 os.environ[r'HGRCPATH'] = r' '
1052 os.environ[r'HGRCPATH'] = r' '
979 os.system(r"%s version -q > NUL" % sys.argv[0])
1053 os.system(r"%s version -q > NUL" % sys.argv[0])
980 timer(d)
1054 timer(d)
981 fm.end()
1055 fm.end()
982
1056
983 @command(b'perfparents', formatteropts)
1057 @command(b'perfparents', formatteropts)
984 def perfparents(ui, repo, **opts):
1058 def perfparents(ui, repo, **opts):
985 opts = _byteskwargs(opts)
1059 opts = _byteskwargs(opts)
986 timer, fm = gettimer(ui, opts)
1060 timer, fm = gettimer(ui, opts)
987 # control the number of commits perfparents iterates over
1061 # control the number of commits perfparents iterates over
988 # experimental config: perf.parentscount
1062 # experimental config: perf.parentscount
989 count = getint(ui, b"perf", b"parentscount", 1000)
1063 count = getint(ui, b"perf", b"parentscount", 1000)
990 if len(repo.changelog) < count:
1064 if len(repo.changelog) < count:
991 raise error.Abort(b"repo needs %d commits for this test" % count)
1065 raise error.Abort(b"repo needs %d commits for this test" % count)
992 repo = repo.unfiltered()
1066 repo = repo.unfiltered()
993 nl = [repo.changelog.node(i) for i in _xrange(count)]
1067 nl = [repo.changelog.node(i) for i in _xrange(count)]
994 def d():
1068 def d():
995 for n in nl:
1069 for n in nl:
996 repo.changelog.parents(n)
1070 repo.changelog.parents(n)
997 timer(d)
1071 timer(d)
998 fm.end()
1072 fm.end()
999
1073
1000 @command(b'perfctxfiles', formatteropts)
1074 @command(b'perfctxfiles', formatteropts)
1001 def perfctxfiles(ui, repo, x, **opts):
1075 def perfctxfiles(ui, repo, x, **opts):
1002 opts = _byteskwargs(opts)
1076 opts = _byteskwargs(opts)
1003 x = int(x)
1077 x = int(x)
1004 timer, fm = gettimer(ui, opts)
1078 timer, fm = gettimer(ui, opts)
1005 def d():
1079 def d():
1006 len(repo[x].files())
1080 len(repo[x].files())
1007 timer(d)
1081 timer(d)
1008 fm.end()
1082 fm.end()
1009
1083
1010 @command(b'perfrawfiles', formatteropts)
1084 @command(b'perfrawfiles', formatteropts)
1011 def perfrawfiles(ui, repo, x, **opts):
1085 def perfrawfiles(ui, repo, x, **opts):
1012 opts = _byteskwargs(opts)
1086 opts = _byteskwargs(opts)
1013 x = int(x)
1087 x = int(x)
1014 timer, fm = gettimer(ui, opts)
1088 timer, fm = gettimer(ui, opts)
1015 cl = repo.changelog
1089 cl = repo.changelog
1016 def d():
1090 def d():
1017 len(cl.read(x)[3])
1091 len(cl.read(x)[3])
1018 timer(d)
1092 timer(d)
1019 fm.end()
1093 fm.end()
1020
1094
1021 @command(b'perflookup', formatteropts)
1095 @command(b'perflookup', formatteropts)
1022 def perflookup(ui, repo, rev, **opts):
1096 def perflookup(ui, repo, rev, **opts):
1023 opts = _byteskwargs(opts)
1097 opts = _byteskwargs(opts)
1024 timer, fm = gettimer(ui, opts)
1098 timer, fm = gettimer(ui, opts)
1025 timer(lambda: len(repo.lookup(rev)))
1099 timer(lambda: len(repo.lookup(rev)))
1026 fm.end()
1100 fm.end()
1027
1101
1028 @command(b'perflinelogedits',
1102 @command(b'perflinelogedits',
1029 [(b'n', b'edits', 10000, b'number of edits'),
1103 [(b'n', b'edits', 10000, b'number of edits'),
1030 (b'', b'max-hunk-lines', 10, b'max lines in a hunk'),
1104 (b'', b'max-hunk-lines', 10, b'max lines in a hunk'),
1031 ], norepo=True)
1105 ], norepo=True)
1032 def perflinelogedits(ui, **opts):
1106 def perflinelogedits(ui, **opts):
1033 from mercurial import linelog
1107 from mercurial import linelog
1034
1108
1035 opts = _byteskwargs(opts)
1109 opts = _byteskwargs(opts)
1036
1110
1037 edits = opts[b'edits']
1111 edits = opts[b'edits']
1038 maxhunklines = opts[b'max_hunk_lines']
1112 maxhunklines = opts[b'max_hunk_lines']
1039
1113
1040 maxb1 = 100000
1114 maxb1 = 100000
1041 random.seed(0)
1115 random.seed(0)
1042 randint = random.randint
1116 randint = random.randint
1043 currentlines = 0
1117 currentlines = 0
1044 arglist = []
1118 arglist = []
1045 for rev in _xrange(edits):
1119 for rev in _xrange(edits):
1046 a1 = randint(0, currentlines)
1120 a1 = randint(0, currentlines)
1047 a2 = randint(a1, min(currentlines, a1 + maxhunklines))
1121 a2 = randint(a1, min(currentlines, a1 + maxhunklines))
1048 b1 = randint(0, maxb1)
1122 b1 = randint(0, maxb1)
1049 b2 = randint(b1, b1 + maxhunklines)
1123 b2 = randint(b1, b1 + maxhunklines)
1050 currentlines += (b2 - b1) - (a2 - a1)
1124 currentlines += (b2 - b1) - (a2 - a1)
1051 arglist.append((rev, a1, a2, b1, b2))
1125 arglist.append((rev, a1, a2, b1, b2))
1052
1126
1053 def d():
1127 def d():
1054 ll = linelog.linelog()
1128 ll = linelog.linelog()
1055 for args in arglist:
1129 for args in arglist:
1056 ll.replacelines(*args)
1130 ll.replacelines(*args)
1057
1131
1058 timer, fm = gettimer(ui, opts)
1132 timer, fm = gettimer(ui, opts)
1059 timer(d)
1133 timer(d)
1060 fm.end()
1134 fm.end()
1061
1135
1062 @command(b'perfrevrange', formatteropts)
1136 @command(b'perfrevrange', formatteropts)
1063 def perfrevrange(ui, repo, *specs, **opts):
1137 def perfrevrange(ui, repo, *specs, **opts):
1064 opts = _byteskwargs(opts)
1138 opts = _byteskwargs(opts)
1065 timer, fm = gettimer(ui, opts)
1139 timer, fm = gettimer(ui, opts)
1066 revrange = scmutil.revrange
1140 revrange = scmutil.revrange
1067 timer(lambda: len(revrange(repo, specs)))
1141 timer(lambda: len(revrange(repo, specs)))
1068 fm.end()
1142 fm.end()
1069
1143
1070 @command(b'perfnodelookup', formatteropts)
1144 @command(b'perfnodelookup', formatteropts)
1071 def perfnodelookup(ui, repo, rev, **opts):
1145 def perfnodelookup(ui, repo, rev, **opts):
1072 opts = _byteskwargs(opts)
1146 opts = _byteskwargs(opts)
1073 timer, fm = gettimer(ui, opts)
1147 timer, fm = gettimer(ui, opts)
1074 import mercurial.revlog
1148 import mercurial.revlog
1075 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
1149 mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
1076 n = scmutil.revsingle(repo, rev).node()
1150 n = scmutil.revsingle(repo, rev).node()
1077 cl = mercurial.revlog.revlog(getsvfs(repo), b"00changelog.i")
1151 cl = mercurial.revlog.revlog(getsvfs(repo), b"00changelog.i")
1078 def d():
1152 def d():
1079 cl.rev(n)
1153 cl.rev(n)
1080 clearcaches(cl)
1154 clearcaches(cl)
1081 timer(d)
1155 timer(d)
1082 fm.end()
1156 fm.end()
1083
1157
1084 @command(b'perflog',
1158 @command(b'perflog',
1085 [(b'', b'rename', False, b'ask log to follow renames')
1159 [(b'', b'rename', False, b'ask log to follow renames')
1086 ] + formatteropts)
1160 ] + formatteropts)
1087 def perflog(ui, repo, rev=None, **opts):
1161 def perflog(ui, repo, rev=None, **opts):
1088 opts = _byteskwargs(opts)
1162 opts = _byteskwargs(opts)
1089 if rev is None:
1163 if rev is None:
1090 rev=[]
1164 rev=[]
1091 timer, fm = gettimer(ui, opts)
1165 timer, fm = gettimer(ui, opts)
1092 ui.pushbuffer()
1166 ui.pushbuffer()
1093 timer(lambda: commands.log(ui, repo, rev=rev, date=b'', user=b'',
1167 timer(lambda: commands.log(ui, repo, rev=rev, date=b'', user=b'',
1094 copies=opts.get(b'rename')))
1168 copies=opts.get(b'rename')))
1095 ui.popbuffer()
1169 ui.popbuffer()
1096 fm.end()
1170 fm.end()
1097
1171
1098 @command(b'perfmoonwalk', formatteropts)
1172 @command(b'perfmoonwalk', formatteropts)
1099 def perfmoonwalk(ui, repo, **opts):
1173 def perfmoonwalk(ui, repo, **opts):
1100 """benchmark walking the changelog backwards
1174 """benchmark walking the changelog backwards
1101
1175
1102 This also loads the changelog data for each revision in the changelog.
1176 This also loads the changelog data for each revision in the changelog.
1103 """
1177 """
1104 opts = _byteskwargs(opts)
1178 opts = _byteskwargs(opts)
1105 timer, fm = gettimer(ui, opts)
1179 timer, fm = gettimer(ui, opts)
1106 def moonwalk():
1180 def moonwalk():
1107 for i in repo.changelog.revs(start=(len(repo) - 1), stop=-1):
1181 for i in repo.changelog.revs(start=(len(repo) - 1), stop=-1):
1108 ctx = repo[i]
1182 ctx = repo[i]
1109 ctx.branch() # read changelog data (in addition to the index)
1183 ctx.branch() # read changelog data (in addition to the index)
1110 timer(moonwalk)
1184 timer(moonwalk)
1111 fm.end()
1185 fm.end()
1112
1186
1113 @command(b'perftemplating',
1187 @command(b'perftemplating',
1114 [(b'r', b'rev', [], b'revisions to run the template on'),
1188 [(b'r', b'rev', [], b'revisions to run the template on'),
1115 ] + formatteropts)
1189 ] + formatteropts)
1116 def perftemplating(ui, repo, testedtemplate=None, **opts):
1190 def perftemplating(ui, repo, testedtemplate=None, **opts):
1117 """test the rendering time of a given template"""
1191 """test the rendering time of a given template"""
1118 if makelogtemplater is None:
1192 if makelogtemplater is None:
1119 raise error.Abort((b"perftemplating not available with this Mercurial"),
1193 raise error.Abort((b"perftemplating not available with this Mercurial"),
1120 hint=b"use 4.3 or later")
1194 hint=b"use 4.3 or later")
1121
1195
1122 opts = _byteskwargs(opts)
1196 opts = _byteskwargs(opts)
1123
1197
1124 nullui = ui.copy()
1198 nullui = ui.copy()
1125 nullui.fout = open(os.devnull, r'wb')
1199 nullui.fout = open(os.devnull, r'wb')
1126 nullui.disablepager()
1200 nullui.disablepager()
1127 revs = opts.get(b'rev')
1201 revs = opts.get(b'rev')
1128 if not revs:
1202 if not revs:
1129 revs = [b'all()']
1203 revs = [b'all()']
1130 revs = list(scmutil.revrange(repo, revs))
1204 revs = list(scmutil.revrange(repo, revs))
1131
1205
1132 defaulttemplate = (b'{date|shortdate} [{rev}:{node|short}]'
1206 defaulttemplate = (b'{date|shortdate} [{rev}:{node|short}]'
1133 b' {author|person}: {desc|firstline}\n')
1207 b' {author|person}: {desc|firstline}\n')
1134 if testedtemplate is None:
1208 if testedtemplate is None:
1135 testedtemplate = defaulttemplate
1209 testedtemplate = defaulttemplate
1136 displayer = makelogtemplater(nullui, repo, testedtemplate)
1210 displayer = makelogtemplater(nullui, repo, testedtemplate)
1137 def format():
1211 def format():
1138 for r in revs:
1212 for r in revs:
1139 ctx = repo[r]
1213 ctx = repo[r]
1140 displayer.show(ctx)
1214 displayer.show(ctx)
1141 displayer.flush(ctx)
1215 displayer.flush(ctx)
1142
1216
1143 timer, fm = gettimer(ui, opts)
1217 timer, fm = gettimer(ui, opts)
1144 timer(format)
1218 timer(format)
1145 fm.end()
1219 fm.end()
1146
1220
1221 @command(b'perfhelper-pathcopies', formatteropts +
1222 [
1223 (b'r', b'revs', [], b'restrict search to these revisions'),
1224 (b'', b'timing', False, b'provides extra data (costly)'),
1225 ])
1226 def perfhelperpathcopies(ui, repo, revs=[], **opts):
1227 """find statistic about potential parameters for the `perftracecopies`
1228
1229 This command find source-destination pair relevant for copytracing testing.
1230 It report value for some of the parameters that impact copy tracing time.
1231
1232 If `--timing` is set, rename detection is run and the associated timing
1233 will be reported. The extra details comes at the cost of a slower command
1234 execution.
1235
1236 Since the rename detection is only run once, other factors might easily
1237 affect the precision of the timing. However it should give a good
1238 approximation of which revision pairs are very costly.
1239 """
1240 opts = _byteskwargs(opts)
1241 fm = ui.formatter(b'perf', opts)
1242 dotiming = opts[b'timing']
1243
1244 if dotiming:
1245 header = '%12s %12s %12s %12s %12s %12s\n'
1246 output = ("%(source)12s %(destination)12s "
1247 "%(nbrevs)12d %(nbmissingfiles)12d "
1248 "%(nbrenamedfiles)12d %(time)18.5f\n")
1249 header_names = ("source", "destination", "nb-revs", "nb-files",
1250 "nb-renames", "time")
1251 fm.plain(header % header_names)
1252 else:
1253 header = '%12s %12s %12s %12s\n'
1254 output = ("%(source)12s %(destination)12s "
1255 "%(nbrevs)12d %(nbmissingfiles)12d\n")
1256 fm.plain(header % ("source", "destination", "nb-revs", "nb-files"))
1257
1258 if not revs:
1259 revs = ['all()']
1260 revs = scmutil.revrange(repo, revs)
1261
1262 roi = repo.revs('merge() and %ld', revs)
1263 for r in roi:
1264 ctx = repo[r]
1265 p1 = ctx.p1().rev()
1266 p2 = ctx.p2().rev()
1267 bases = repo.changelog._commonancestorsheads(p1, p2)
1268 for p in (p1, p2):
1269 for b in bases:
1270 base = repo[b]
1271 parent = repo[p]
1272 missing = copies._computeforwardmissing(base, parent)
1273 if not missing:
1274 continue
1275 data = {
1276 b'source': base.hex(),
1277 b'destination': parent.hex(),
1278 b'nbrevs': len(repo.revs('%d::%d', b, p)),
1279 b'nbmissingfiles': len(missing),
1280 }
1281 if dotiming:
1282 begin = util.timer()
1283 renames = copies.pathcopies(base, parent)
1284 end = util.timer()
1285 # not very stable timing since we did only one run
1286 data['time'] = end - begin
1287 data['nbrenamedfiles'] = len(renames)
1288 fm.startitem()
1289 fm.data(**data)
1290 out = data.copy()
1291 out['source'] = fm.hexfunc(base.node())
1292 out['destination'] = fm.hexfunc(parent.node())
1293 fm.plain(output % out)
1294
1295 fm.end()
1296
1147 @command(b'perfcca', formatteropts)
1297 @command(b'perfcca', formatteropts)
1148 def perfcca(ui, repo, **opts):
1298 def perfcca(ui, repo, **opts):
1149 opts = _byteskwargs(opts)
1299 opts = _byteskwargs(opts)
1150 timer, fm = gettimer(ui, opts)
1300 timer, fm = gettimer(ui, opts)
1151 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
1301 timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
1152 fm.end()
1302 fm.end()
1153
1303
1154 @command(b'perffncacheload', formatteropts)
1304 @command(b'perffncacheload', formatteropts)
1155 def perffncacheload(ui, repo, **opts):
1305 def perffncacheload(ui, repo, **opts):
1156 opts = _byteskwargs(opts)
1306 opts = _byteskwargs(opts)
1157 timer, fm = gettimer(ui, opts)
1307 timer, fm = gettimer(ui, opts)
1158 s = repo.store
1308 s = repo.store
1159 def d():
1309 def d():
1160 s.fncache._load()
1310 s.fncache._load()
1161 timer(d)
1311 timer(d)
1162 fm.end()
1312 fm.end()
1163
1313
1164 @command(b'perffncachewrite', formatteropts)
1314 @command(b'perffncachewrite', formatteropts)
1165 def perffncachewrite(ui, repo, **opts):
1315 def perffncachewrite(ui, repo, **opts):
1166 opts = _byteskwargs(opts)
1316 opts = _byteskwargs(opts)
1167 timer, fm = gettimer(ui, opts)
1317 timer, fm = gettimer(ui, opts)
1168 s = repo.store
1318 s = repo.store
1169 lock = repo.lock()
1319 lock = repo.lock()
1170 s.fncache._load()
1320 s.fncache._load()
1171 tr = repo.transaction(b'perffncachewrite')
1321 tr = repo.transaction(b'perffncachewrite')
1172 tr.addbackup(b'fncache')
1322 tr.addbackup(b'fncache')
1173 def d():
1323 def d():
1174 s.fncache._dirty = True
1324 s.fncache._dirty = True
1175 s.fncache.write(tr)
1325 s.fncache.write(tr)
1176 timer(d)
1326 timer(d)
1177 tr.close()
1327 tr.close()
1178 lock.release()
1328 lock.release()
1179 fm.end()
1329 fm.end()
1180
1330
1181 @command(b'perffncacheencode', formatteropts)
1331 @command(b'perffncacheencode', formatteropts)
1182 def perffncacheencode(ui, repo, **opts):
1332 def perffncacheencode(ui, repo, **opts):
1183 opts = _byteskwargs(opts)
1333 opts = _byteskwargs(opts)
1184 timer, fm = gettimer(ui, opts)
1334 timer, fm = gettimer(ui, opts)
1185 s = repo.store
1335 s = repo.store
1186 s.fncache._load()
1336 s.fncache._load()
1187 def d():
1337 def d():
1188 for p in s.fncache.entries:
1338 for p in s.fncache.entries:
1189 s.encode(p)
1339 s.encode(p)
1190 timer(d)
1340 timer(d)
1191 fm.end()
1341 fm.end()
1192
1342
1193 def _bdiffworker(q, blocks, xdiff, ready, done):
1343 def _bdiffworker(q, blocks, xdiff, ready, done):
1194 while not done.is_set():
1344 while not done.is_set():
1195 pair = q.get()
1345 pair = q.get()
1196 while pair is not None:
1346 while pair is not None:
1197 if xdiff:
1347 if xdiff:
1198 mdiff.bdiff.xdiffblocks(*pair)
1348 mdiff.bdiff.xdiffblocks(*pair)
1199 elif blocks:
1349 elif blocks:
1200 mdiff.bdiff.blocks(*pair)
1350 mdiff.bdiff.blocks(*pair)
1201 else:
1351 else:
1202 mdiff.textdiff(*pair)
1352 mdiff.textdiff(*pair)
1203 q.task_done()
1353 q.task_done()
1204 pair = q.get()
1354 pair = q.get()
1205 q.task_done() # for the None one
1355 q.task_done() # for the None one
1206 with ready:
1356 with ready:
1207 ready.wait()
1357 ready.wait()
1208
1358
1209 def _manifestrevision(repo, mnode):
1359 def _manifestrevision(repo, mnode):
1210 ml = repo.manifestlog
1360 ml = repo.manifestlog
1211
1361
1212 if util.safehasattr(ml, b'getstorage'):
1362 if util.safehasattr(ml, b'getstorage'):
1213 store = ml.getstorage(b'')
1363 store = ml.getstorage(b'')
1214 else:
1364 else:
1215 store = ml._revlog
1365 store = ml._revlog
1216
1366
1217 return store.revision(mnode)
1367 return store.revision(mnode)
1218
1368
1219 @command(b'perfbdiff', revlogopts + formatteropts + [
1369 @command(b'perfbdiff', revlogopts + formatteropts + [
1220 (b'', b'count', 1, b'number of revisions to test (when using --startrev)'),
1370 (b'', b'count', 1, b'number of revisions to test (when using --startrev)'),
1221 (b'', b'alldata', False, b'test bdiffs for all associated revisions'),
1371 (b'', b'alldata', False, b'test bdiffs for all associated revisions'),
1222 (b'', b'threads', 0, b'number of thread to use (disable with 0)'),
1372 (b'', b'threads', 0, b'number of thread to use (disable with 0)'),
1223 (b'', b'blocks', False, b'test computing diffs into blocks'),
1373 (b'', b'blocks', False, b'test computing diffs into blocks'),
1224 (b'', b'xdiff', False, b'use xdiff algorithm'),
1374 (b'', b'xdiff', False, b'use xdiff algorithm'),
1225 ],
1375 ],
1226
1376
1227 b'-c|-m|FILE REV')
1377 b'-c|-m|FILE REV')
1228 def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts):
1378 def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts):
1229 """benchmark a bdiff between revisions
1379 """benchmark a bdiff between revisions
1230
1380
1231 By default, benchmark a bdiff between its delta parent and itself.
1381 By default, benchmark a bdiff between its delta parent and itself.
1232
1382
1233 With ``--count``, benchmark bdiffs between delta parents and self for N
1383 With ``--count``, benchmark bdiffs between delta parents and self for N
1234 revisions starting at the specified revision.
1384 revisions starting at the specified revision.
1235
1385
1236 With ``--alldata``, assume the requested revision is a changeset and
1386 With ``--alldata``, assume the requested revision is a changeset and
1237 measure bdiffs for all changes related to that changeset (manifest
1387 measure bdiffs for all changes related to that changeset (manifest
1238 and filelogs).
1388 and filelogs).
1239 """
1389 """
1240 opts = _byteskwargs(opts)
1390 opts = _byteskwargs(opts)
1241
1391
1242 if opts[b'xdiff'] and not opts[b'blocks']:
1392 if opts[b'xdiff'] and not opts[b'blocks']:
1243 raise error.CommandError(b'perfbdiff', b'--xdiff requires --blocks')
1393 raise error.CommandError(b'perfbdiff', b'--xdiff requires --blocks')
1244
1394
1245 if opts[b'alldata']:
1395 if opts[b'alldata']:
1246 opts[b'changelog'] = True
1396 opts[b'changelog'] = True
1247
1397
1248 if opts.get(b'changelog') or opts.get(b'manifest'):
1398 if opts.get(b'changelog') or opts.get(b'manifest'):
1249 file_, rev = None, file_
1399 file_, rev = None, file_
1250 elif rev is None:
1400 elif rev is None:
1251 raise error.CommandError(b'perfbdiff', b'invalid arguments')
1401 raise error.CommandError(b'perfbdiff', b'invalid arguments')
1252
1402
1253 blocks = opts[b'blocks']
1403 blocks = opts[b'blocks']
1254 xdiff = opts[b'xdiff']
1404 xdiff = opts[b'xdiff']
1255 textpairs = []
1405 textpairs = []
1256
1406
1257 r = cmdutil.openrevlog(repo, b'perfbdiff', file_, opts)
1407 r = cmdutil.openrevlog(repo, b'perfbdiff', file_, opts)
1258
1408
1259 startrev = r.rev(r.lookup(rev))
1409 startrev = r.rev(r.lookup(rev))
1260 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1410 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1261 if opts[b'alldata']:
1411 if opts[b'alldata']:
1262 # Load revisions associated with changeset.
1412 # Load revisions associated with changeset.
1263 ctx = repo[rev]
1413 ctx = repo[rev]
1264 mtext = _manifestrevision(repo, ctx.manifestnode())
1414 mtext = _manifestrevision(repo, ctx.manifestnode())
1265 for pctx in ctx.parents():
1415 for pctx in ctx.parents():
1266 pman = _manifestrevision(repo, pctx.manifestnode())
1416 pman = _manifestrevision(repo, pctx.manifestnode())
1267 textpairs.append((pman, mtext))
1417 textpairs.append((pman, mtext))
1268
1418
1269 # Load filelog revisions by iterating manifest delta.
1419 # Load filelog revisions by iterating manifest delta.
1270 man = ctx.manifest()
1420 man = ctx.manifest()
1271 pman = ctx.p1().manifest()
1421 pman = ctx.p1().manifest()
1272 for filename, change in pman.diff(man).items():
1422 for filename, change in pman.diff(man).items():
1273 fctx = repo.file(filename)
1423 fctx = repo.file(filename)
1274 f1 = fctx.revision(change[0][0] or -1)
1424 f1 = fctx.revision(change[0][0] or -1)
1275 f2 = fctx.revision(change[1][0] or -1)
1425 f2 = fctx.revision(change[1][0] or -1)
1276 textpairs.append((f1, f2))
1426 textpairs.append((f1, f2))
1277 else:
1427 else:
1278 dp = r.deltaparent(rev)
1428 dp = r.deltaparent(rev)
1279 textpairs.append((r.revision(dp), r.revision(rev)))
1429 textpairs.append((r.revision(dp), r.revision(rev)))
1280
1430
1281 withthreads = threads > 0
1431 withthreads = threads > 0
1282 if not withthreads:
1432 if not withthreads:
1283 def d():
1433 def d():
1284 for pair in textpairs:
1434 for pair in textpairs:
1285 if xdiff:
1435 if xdiff:
1286 mdiff.bdiff.xdiffblocks(*pair)
1436 mdiff.bdiff.xdiffblocks(*pair)
1287 elif blocks:
1437 elif blocks:
1288 mdiff.bdiff.blocks(*pair)
1438 mdiff.bdiff.blocks(*pair)
1289 else:
1439 else:
1290 mdiff.textdiff(*pair)
1440 mdiff.textdiff(*pair)
1291 else:
1441 else:
1292 q = queue()
1442 q = queue()
1293 for i in _xrange(threads):
1443 for i in _xrange(threads):
1294 q.put(None)
1444 q.put(None)
1295 ready = threading.Condition()
1445 ready = threading.Condition()
1296 done = threading.Event()
1446 done = threading.Event()
1297 for i in _xrange(threads):
1447 for i in _xrange(threads):
1298 threading.Thread(target=_bdiffworker,
1448 threading.Thread(target=_bdiffworker,
1299 args=(q, blocks, xdiff, ready, done)).start()
1449 args=(q, blocks, xdiff, ready, done)).start()
1300 q.join()
1450 q.join()
1301 def d():
1451 def d():
1302 for pair in textpairs:
1452 for pair in textpairs:
1303 q.put(pair)
1453 q.put(pair)
1304 for i in _xrange(threads):
1454 for i in _xrange(threads):
1305 q.put(None)
1455 q.put(None)
1306 with ready:
1456 with ready:
1307 ready.notify_all()
1457 ready.notify_all()
1308 q.join()
1458 q.join()
1309 timer, fm = gettimer(ui, opts)
1459 timer, fm = gettimer(ui, opts)
1310 timer(d)
1460 timer(d)
1311 fm.end()
1461 fm.end()
1312
1462
1313 if withthreads:
1463 if withthreads:
1314 done.set()
1464 done.set()
1315 for i in _xrange(threads):
1465 for i in _xrange(threads):
1316 q.put(None)
1466 q.put(None)
1317 with ready:
1467 with ready:
1318 ready.notify_all()
1468 ready.notify_all()
1319
1469
1320 @command(b'perfunidiff', revlogopts + formatteropts + [
1470 @command(b'perfunidiff', revlogopts + formatteropts + [
1321 (b'', b'count', 1, b'number of revisions to test (when using --startrev)'),
1471 (b'', b'count', 1, b'number of revisions to test (when using --startrev)'),
1322 (b'', b'alldata', False, b'test unidiffs for all associated revisions'),
1472 (b'', b'alldata', False, b'test unidiffs for all associated revisions'),
1323 ], b'-c|-m|FILE REV')
1473 ], b'-c|-m|FILE REV')
1324 def perfunidiff(ui, repo, file_, rev=None, count=None, **opts):
1474 def perfunidiff(ui, repo, file_, rev=None, count=None, **opts):
1325 """benchmark a unified diff between revisions
1475 """benchmark a unified diff between revisions
1326
1476
1327 This doesn't include any copy tracing - it's just a unified diff
1477 This doesn't include any copy tracing - it's just a unified diff
1328 of the texts.
1478 of the texts.
1329
1479
1330 By default, benchmark a diff between its delta parent and itself.
1480 By default, benchmark a diff between its delta parent and itself.
1331
1481
1332 With ``--count``, benchmark diffs between delta parents and self for N
1482 With ``--count``, benchmark diffs between delta parents and self for N
1333 revisions starting at the specified revision.
1483 revisions starting at the specified revision.
1334
1484
1335 With ``--alldata``, assume the requested revision is a changeset and
1485 With ``--alldata``, assume the requested revision is a changeset and
1336 measure diffs for all changes related to that changeset (manifest
1486 measure diffs for all changes related to that changeset (manifest
1337 and filelogs).
1487 and filelogs).
1338 """
1488 """
1339 opts = _byteskwargs(opts)
1489 opts = _byteskwargs(opts)
1340 if opts[b'alldata']:
1490 if opts[b'alldata']:
1341 opts[b'changelog'] = True
1491 opts[b'changelog'] = True
1342
1492
1343 if opts.get(b'changelog') or opts.get(b'manifest'):
1493 if opts.get(b'changelog') or opts.get(b'manifest'):
1344 file_, rev = None, file_
1494 file_, rev = None, file_
1345 elif rev is None:
1495 elif rev is None:
1346 raise error.CommandError(b'perfunidiff', b'invalid arguments')
1496 raise error.CommandError(b'perfunidiff', b'invalid arguments')
1347
1497
1348 textpairs = []
1498 textpairs = []
1349
1499
1350 r = cmdutil.openrevlog(repo, b'perfunidiff', file_, opts)
1500 r = cmdutil.openrevlog(repo, b'perfunidiff', file_, opts)
1351
1501
1352 startrev = r.rev(r.lookup(rev))
1502 startrev = r.rev(r.lookup(rev))
1353 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1503 for rev in range(startrev, min(startrev + count, len(r) - 1)):
1354 if opts[b'alldata']:
1504 if opts[b'alldata']:
1355 # Load revisions associated with changeset.
1505 # Load revisions associated with changeset.
1356 ctx = repo[rev]
1506 ctx = repo[rev]
1357 mtext = _manifestrevision(repo, ctx.manifestnode())
1507 mtext = _manifestrevision(repo, ctx.manifestnode())
1358 for pctx in ctx.parents():
1508 for pctx in ctx.parents():
1359 pman = _manifestrevision(repo, pctx.manifestnode())
1509 pman = _manifestrevision(repo, pctx.manifestnode())
1360 textpairs.append((pman, mtext))
1510 textpairs.append((pman, mtext))
1361
1511
1362 # Load filelog revisions by iterating manifest delta.
1512 # Load filelog revisions by iterating manifest delta.
1363 man = ctx.manifest()
1513 man = ctx.manifest()
1364 pman = ctx.p1().manifest()
1514 pman = ctx.p1().manifest()
1365 for filename, change in pman.diff(man).items():
1515 for filename, change in pman.diff(man).items():
1366 fctx = repo.file(filename)
1516 fctx = repo.file(filename)
1367 f1 = fctx.revision(change[0][0] or -1)
1517 f1 = fctx.revision(change[0][0] or -1)
1368 f2 = fctx.revision(change[1][0] or -1)
1518 f2 = fctx.revision(change[1][0] or -1)
1369 textpairs.append((f1, f2))
1519 textpairs.append((f1, f2))
1370 else:
1520 else:
1371 dp = r.deltaparent(rev)
1521 dp = r.deltaparent(rev)
1372 textpairs.append((r.revision(dp), r.revision(rev)))
1522 textpairs.append((r.revision(dp), r.revision(rev)))
1373
1523
1374 def d():
1524 def d():
1375 for left, right in textpairs:
1525 for left, right in textpairs:
1376 # The date strings don't matter, so we pass empty strings.
1526 # The date strings don't matter, so we pass empty strings.
1377 headerlines, hunks = mdiff.unidiff(
1527 headerlines, hunks = mdiff.unidiff(
1378 left, b'', right, b'', b'left', b'right', binary=False)
1528 left, b'', right, b'', b'left', b'right', binary=False)
1379 # consume iterators in roughly the way patch.py does
1529 # consume iterators in roughly the way patch.py does
1380 b'\n'.join(headerlines)
1530 b'\n'.join(headerlines)
1381 b''.join(sum((list(hlines) for hrange, hlines in hunks), []))
1531 b''.join(sum((list(hlines) for hrange, hlines in hunks), []))
1382 timer, fm = gettimer(ui, opts)
1532 timer, fm = gettimer(ui, opts)
1383 timer(d)
1533 timer(d)
1384 fm.end()
1534 fm.end()
1385
1535
1386 @command(b'perfdiffwd', formatteropts)
1536 @command(b'perfdiffwd', formatteropts)
1387 def perfdiffwd(ui, repo, **opts):
1537 def perfdiffwd(ui, repo, **opts):
1388 """Profile diff of working directory changes"""
1538 """Profile diff of working directory changes"""
1389 opts = _byteskwargs(opts)
1539 opts = _byteskwargs(opts)
1390 timer, fm = gettimer(ui, opts)
1540 timer, fm = gettimer(ui, opts)
1391 options = {
1541 options = {
1392 'w': 'ignore_all_space',
1542 'w': 'ignore_all_space',
1393 'b': 'ignore_space_change',
1543 'b': 'ignore_space_change',
1394 'B': 'ignore_blank_lines',
1544 'B': 'ignore_blank_lines',
1395 }
1545 }
1396
1546
1397 for diffopt in ('', 'w', 'b', 'B', 'wB'):
1547 for diffopt in ('', 'w', 'b', 'B', 'wB'):
1398 opts = dict((options[c], b'1') for c in diffopt)
1548 opts = dict((options[c], b'1') for c in diffopt)
1399 def d():
1549 def d():
1400 ui.pushbuffer()
1550 ui.pushbuffer()
1401 commands.diff(ui, repo, **opts)
1551 commands.diff(ui, repo, **opts)
1402 ui.popbuffer()
1552 ui.popbuffer()
1403 diffopt = diffopt.encode('ascii')
1553 diffopt = diffopt.encode('ascii')
1404 title = b'diffopts: %s' % (diffopt and (b'-' + diffopt) or b'none')
1554 title = b'diffopts: %s' % (diffopt and (b'-' + diffopt) or b'none')
1405 timer(d, title)
1555 timer(d, title=title)
1406 fm.end()
1556 fm.end()
1407
1557
1408 @command(b'perfrevlogindex', revlogopts + formatteropts,
1558 @command(b'perfrevlogindex', revlogopts + formatteropts,
1409 b'-c|-m|FILE')
1559 b'-c|-m|FILE')
1410 def perfrevlogindex(ui, repo, file_=None, **opts):
1560 def perfrevlogindex(ui, repo, file_=None, **opts):
1411 """Benchmark operations against a revlog index.
1561 """Benchmark operations against a revlog index.
1412
1562
1413 This tests constructing a revlog instance, reading index data,
1563 This tests constructing a revlog instance, reading index data,
1414 parsing index data, and performing various operations related to
1564 parsing index data, and performing various operations related to
1415 index data.
1565 index data.
1416 """
1566 """
1417
1567
1418 opts = _byteskwargs(opts)
1568 opts = _byteskwargs(opts)
1419
1569
1420 rl = cmdutil.openrevlog(repo, b'perfrevlogindex', file_, opts)
1570 rl = cmdutil.openrevlog(repo, b'perfrevlogindex', file_, opts)
1421
1571
1422 opener = getattr(rl, 'opener') # trick linter
1572 opener = getattr(rl, 'opener') # trick linter
1423 indexfile = rl.indexfile
1573 indexfile = rl.indexfile
1424 data = opener.read(indexfile)
1574 data = opener.read(indexfile)
1425
1575
1426 header = struct.unpack(b'>I', data[0:4])[0]
1576 header = struct.unpack(b'>I', data[0:4])[0]
1427 version = header & 0xFFFF
1577 version = header & 0xFFFF
1428 if version == 1:
1578 if version == 1:
1429 revlogio = revlog.revlogio()
1579 revlogio = revlog.revlogio()
1430 inline = header & (1 << 16)
1580 inline = header & (1 << 16)
1431 else:
1581 else:
1432 raise error.Abort((b'unsupported revlog version: %d') % version)
1582 raise error.Abort((b'unsupported revlog version: %d') % version)
1433
1583
1434 rllen = len(rl)
1584 rllen = len(rl)
1435
1585
1436 node0 = rl.node(0)
1586 node0 = rl.node(0)
1437 node25 = rl.node(rllen // 4)
1587 node25 = rl.node(rllen // 4)
1438 node50 = rl.node(rllen // 2)
1588 node50 = rl.node(rllen // 2)
1439 node75 = rl.node(rllen // 4 * 3)
1589 node75 = rl.node(rllen // 4 * 3)
1440 node100 = rl.node(rllen - 1)
1590 node100 = rl.node(rllen - 1)
1441
1591
1442 allrevs = range(rllen)
1592 allrevs = range(rllen)
1443 allrevsrev = list(reversed(allrevs))
1593 allrevsrev = list(reversed(allrevs))
1444 allnodes = [rl.node(rev) for rev in range(rllen)]
1594 allnodes = [rl.node(rev) for rev in range(rllen)]
1445 allnodesrev = list(reversed(allnodes))
1595 allnodesrev = list(reversed(allnodes))
1446
1596
1447 def constructor():
1597 def constructor():
1448 revlog.revlog(opener, indexfile)
1598 revlog.revlog(opener, indexfile)
1449
1599
1450 def read():
1600 def read():
1451 with opener(indexfile) as fh:
1601 with opener(indexfile) as fh:
1452 fh.read()
1602 fh.read()
1453
1603
1454 def parseindex():
1604 def parseindex():
1455 revlogio.parseindex(data, inline)
1605 revlogio.parseindex(data, inline)
1456
1606
1457 def getentry(revornode):
1607 def getentry(revornode):
1458 index = revlogio.parseindex(data, inline)[0]
1608 index = revlogio.parseindex(data, inline)[0]
1459 index[revornode]
1609 index[revornode]
1460
1610
1461 def getentries(revs, count=1):
1611 def getentries(revs, count=1):
1462 index = revlogio.parseindex(data, inline)[0]
1612 index = revlogio.parseindex(data, inline)[0]
1463
1613
1464 for i in range(count):
1614 for i in range(count):
1465 for rev in revs:
1615 for rev in revs:
1466 index[rev]
1616 index[rev]
1467
1617
1468 def resolvenode(node):
1618 def resolvenode(node):
1469 nodemap = revlogio.parseindex(data, inline)[1]
1619 nodemap = revlogio.parseindex(data, inline)[1]
1470 # This only works for the C code.
1620 # This only works for the C code.
1471 if nodemap is None:
1621 if nodemap is None:
1472 return
1622 return
1473
1623
1474 try:
1624 try:
1475 nodemap[node]
1625 nodemap[node]
1476 except error.RevlogError:
1626 except error.RevlogError:
1477 pass
1627 pass
1478
1628
1479 def resolvenodes(nodes, count=1):
1629 def resolvenodes(nodes, count=1):
1480 nodemap = revlogio.parseindex(data, inline)[1]
1630 nodemap = revlogio.parseindex(data, inline)[1]
1481 if nodemap is None:
1631 if nodemap is None:
1482 return
1632 return
1483
1633
1484 for i in range(count):
1634 for i in range(count):
1485 for node in nodes:
1635 for node in nodes:
1486 try:
1636 try:
1487 nodemap[node]
1637 nodemap[node]
1488 except error.RevlogError:
1638 except error.RevlogError:
1489 pass
1639 pass
1490
1640
1491 benches = [
1641 benches = [
1492 (constructor, b'revlog constructor'),
1642 (constructor, b'revlog constructor'),
1493 (read, b'read'),
1643 (read, b'read'),
1494 (parseindex, b'create index object'),
1644 (parseindex, b'create index object'),
1495 (lambda: getentry(0), b'retrieve index entry for rev 0'),
1645 (lambda: getentry(0), b'retrieve index entry for rev 0'),
1496 (lambda: resolvenode(b'a' * 20), b'look up missing node'),
1646 (lambda: resolvenode(b'a' * 20), b'look up missing node'),
1497 (lambda: resolvenode(node0), b'look up node at rev 0'),
1647 (lambda: resolvenode(node0), b'look up node at rev 0'),
1498 (lambda: resolvenode(node25), b'look up node at 1/4 len'),
1648 (lambda: resolvenode(node25), b'look up node at 1/4 len'),
1499 (lambda: resolvenode(node50), b'look up node at 1/2 len'),
1649 (lambda: resolvenode(node50), b'look up node at 1/2 len'),
1500 (lambda: resolvenode(node75), b'look up node at 3/4 len'),
1650 (lambda: resolvenode(node75), b'look up node at 3/4 len'),
1501 (lambda: resolvenode(node100), b'look up node at tip'),
1651 (lambda: resolvenode(node100), b'look up node at tip'),
1502 # 2x variation is to measure caching impact.
1652 # 2x variation is to measure caching impact.
1503 (lambda: resolvenodes(allnodes),
1653 (lambda: resolvenodes(allnodes),
1504 b'look up all nodes (forward)'),
1654 b'look up all nodes (forward)'),
1505 (lambda: resolvenodes(allnodes, 2),
1655 (lambda: resolvenodes(allnodes, 2),
1506 b'look up all nodes 2x (forward)'),
1656 b'look up all nodes 2x (forward)'),
1507 (lambda: resolvenodes(allnodesrev),
1657 (lambda: resolvenodes(allnodesrev),
1508 b'look up all nodes (reverse)'),
1658 b'look up all nodes (reverse)'),
1509 (lambda: resolvenodes(allnodesrev, 2),
1659 (lambda: resolvenodes(allnodesrev, 2),
1510 b'look up all nodes 2x (reverse)'),
1660 b'look up all nodes 2x (reverse)'),
1511 (lambda: getentries(allrevs),
1661 (lambda: getentries(allrevs),
1512 b'retrieve all index entries (forward)'),
1662 b'retrieve all index entries (forward)'),
1513 (lambda: getentries(allrevs, 2),
1663 (lambda: getentries(allrevs, 2),
1514 b'retrieve all index entries 2x (forward)'),
1664 b'retrieve all index entries 2x (forward)'),
1515 (lambda: getentries(allrevsrev),
1665 (lambda: getentries(allrevsrev),
1516 b'retrieve all index entries (reverse)'),
1666 b'retrieve all index entries (reverse)'),
1517 (lambda: getentries(allrevsrev, 2),
1667 (lambda: getentries(allrevsrev, 2),
1518 b'retrieve all index entries 2x (reverse)'),
1668 b'retrieve all index entries 2x (reverse)'),
1519 ]
1669 ]
1520
1670
1521 for fn, title in benches:
1671 for fn, title in benches:
1522 timer, fm = gettimer(ui, opts)
1672 timer, fm = gettimer(ui, opts)
1523 timer(fn, title=title)
1673 timer(fn, title=title)
1524 fm.end()
1674 fm.end()
1525
1675
1526 @command(b'perfrevlogrevisions', revlogopts + formatteropts +
1676 @command(b'perfrevlogrevisions', revlogopts + formatteropts +
1527 [(b'd', b'dist', 100, b'distance between the revisions'),
1677 [(b'd', b'dist', 100, b'distance between the revisions'),
1528 (b's', b'startrev', 0, b'revision to start reading at'),
1678 (b's', b'startrev', 0, b'revision to start reading at'),
1529 (b'', b'reverse', False, b'read in reverse')],
1679 (b'', b'reverse', False, b'read in reverse')],
1530 b'-c|-m|FILE')
1680 b'-c|-m|FILE')
1531 def perfrevlogrevisions(ui, repo, file_=None, startrev=0, reverse=False,
1681 def perfrevlogrevisions(ui, repo, file_=None, startrev=0, reverse=False,
1532 **opts):
1682 **opts):
1533 """Benchmark reading a series of revisions from a revlog.
1683 """Benchmark reading a series of revisions from a revlog.
1534
1684
1535 By default, we read every ``-d/--dist`` revision from 0 to tip of
1685 By default, we read every ``-d/--dist`` revision from 0 to tip of
1536 the specified revlog.
1686 the specified revlog.
1537
1687
1538 The start revision can be defined via ``-s/--startrev``.
1688 The start revision can be defined via ``-s/--startrev``.
1539 """
1689 """
1540 opts = _byteskwargs(opts)
1690 opts = _byteskwargs(opts)
1541
1691
1542 rl = cmdutil.openrevlog(repo, b'perfrevlogrevisions', file_, opts)
1692 rl = cmdutil.openrevlog(repo, b'perfrevlogrevisions', file_, opts)
1543 rllen = getlen(ui)(rl)
1693 rllen = getlen(ui)(rl)
1544
1694
1545 if startrev < 0:
1695 if startrev < 0:
1546 startrev = rllen + startrev
1696 startrev = rllen + startrev
1547
1697
1548 def d():
1698 def d():
1549 rl.clearcaches()
1699 rl.clearcaches()
1550
1700
1551 beginrev = startrev
1701 beginrev = startrev
1552 endrev = rllen
1702 endrev = rllen
1553 dist = opts[b'dist']
1703 dist = opts[b'dist']
1554
1704
1555 if reverse:
1705 if reverse:
1556 beginrev, endrev = endrev, beginrev
1706 beginrev, endrev = endrev - 1, beginrev - 1
1557 dist = -1 * dist
1707 dist = -1 * dist
1558
1708
1559 for x in _xrange(beginrev, endrev, dist):
1709 for x in _xrange(beginrev, endrev, dist):
1560 # Old revisions don't support passing int.
1710 # Old revisions don't support passing int.
1561 n = rl.node(x)
1711 n = rl.node(x)
1562 rl.revision(n)
1712 rl.revision(n)
1563
1713
1564 timer, fm = gettimer(ui, opts)
1714 timer, fm = gettimer(ui, opts)
1565 timer(d)
1715 timer(d)
1566 fm.end()
1716 fm.end()
1567
1717
1718 @command(b'perfrevlogwrite', revlogopts + formatteropts +
1719 [(b's', b'startrev', 1000, b'revision to start writing at'),
1720 (b'', b'stoprev', -1, b'last revision to write'),
1721 (b'', b'count', 3, b'last revision to write'),
1722 (b'', b'details', False, b'print timing for every revisions tested'),
1723 (b'', b'source', b'full', b'the kind of data feed in the revlog'),
1724 (b'', b'lazydeltabase', True, b'try the provided delta first'),
1725 (b'', b'clear-caches', True, b'clear revlog cache between calls'),
1726 ],
1727 b'-c|-m|FILE')
1728 def perfrevlogwrite(ui, repo, file_=None, startrev=1000, stoprev=-1, **opts):
1729 """Benchmark writing a series of revisions to a revlog.
1730
1731 Possible source values are:
1732 * `full`: add from a full text (default).
1733 * `parent-1`: add from a delta to the first parent
1734 * `parent-2`: add from a delta to the second parent if it exists
1735 (use a delta from the first parent otherwise)
1736 * `parent-smallest`: add from the smallest delta (either p1 or p2)
1737 * `storage`: add from the existing precomputed deltas
1738 """
1739 opts = _byteskwargs(opts)
1740
1741 rl = cmdutil.openrevlog(repo, b'perfrevlogwrite', file_, opts)
1742 rllen = getlen(ui)(rl)
1743 if startrev < 0:
1744 startrev = rllen + startrev
1745 if stoprev < 0:
1746 stoprev = rllen + stoprev
1747
1748 lazydeltabase = opts['lazydeltabase']
1749 source = opts['source']
1750 clearcaches = opts['clear_caches']
1751 validsource = (b'full', b'parent-1', b'parent-2', b'parent-smallest',
1752 b'storage')
1753 if source not in validsource:
1754 raise error.Abort('invalid source type: %s' % source)
1755
1756 ### actually gather results
1757 count = opts['count']
1758 if count <= 0:
1759 raise error.Abort('invalide run count: %d' % count)
1760 allresults = []
1761 for c in range(count):
1762 timing = _timeonewrite(ui, rl, source, startrev, stoprev, c + 1,
1763 lazydeltabase=lazydeltabase,
1764 clearcaches=clearcaches)
1765 allresults.append(timing)
1766
1767 ### consolidate the results in a single list
1768 results = []
1769 for idx, (rev, t) in enumerate(allresults[0]):
1770 ts = [t]
1771 for other in allresults[1:]:
1772 orev, ot = other[idx]
1773 assert orev == rev
1774 ts.append(ot)
1775 results.append((rev, ts))
1776 resultcount = len(results)
1777
1778 ### Compute and display relevant statistics
1779
1780 # get a formatter
1781 fm = ui.formatter(b'perf', opts)
1782 displayall = ui.configbool(b"perf", b"all-timing", False)
1783
1784 # print individual details if requested
1785 if opts['details']:
1786 for idx, item in enumerate(results, 1):
1787 rev, data = item
1788 title = 'revisions #%d of %d, rev %d' % (idx, resultcount, rev)
1789 formatone(fm, data, title=title, displayall=displayall)
1790
1791 # sorts results by median time
1792 results.sort(key=lambda x: sorted(x[1])[len(x[1]) // 2])
1793 # list of (name, index) to display)
1794 relevants = [
1795 ("min", 0),
1796 ("10%", resultcount * 10 // 100),
1797 ("25%", resultcount * 25 // 100),
1798 ("50%", resultcount * 70 // 100),
1799 ("75%", resultcount * 75 // 100),
1800 ("90%", resultcount * 90 // 100),
1801 ("95%", resultcount * 95 // 100),
1802 ("99%", resultcount * 99 // 100),
1803 ("99.9%", resultcount * 999 // 1000),
1804 ("99.99%", resultcount * 9999 // 10000),
1805 ("99.999%", resultcount * 99999 // 100000),
1806 ("max", -1),
1807 ]
1808 if not ui.quiet:
1809 for name, idx in relevants:
1810 data = results[idx]
1811 title = '%s of %d, rev %d' % (name, resultcount, data[0])
1812 formatone(fm, data[1], title=title, displayall=displayall)
1813
1814 # XXX summing that many float will not be very precise, we ignore this fact
1815 # for now
1816 totaltime = []
1817 for item in allresults:
1818 totaltime.append((sum(x[1][0] for x in item),
1819 sum(x[1][1] for x in item),
1820 sum(x[1][2] for x in item),)
1821 )
1822 formatone(fm, totaltime, title="total time (%d revs)" % resultcount,
1823 displayall=displayall)
1824 fm.end()
1825
1826 class _faketr(object):
1827 def add(s, x, y, z=None):
1828 return None
1829
1830 def _timeonewrite(ui, orig, source, startrev, stoprev, runidx=None,
1831 lazydeltabase=True, clearcaches=True):
1832 timings = []
1833 tr = _faketr()
1834 with _temprevlog(ui, orig, startrev) as dest:
1835 dest._lazydeltabase = lazydeltabase
1836 revs = list(orig.revs(startrev, stoprev))
1837 total = len(revs)
1838 topic = 'adding'
1839 if runidx is not None:
1840 topic += ' (run #%d)' % runidx
1841 # Support both old and new progress API
1842 if util.safehasattr(ui, 'makeprogress'):
1843 progress = ui.makeprogress(topic, unit='revs', total=total)
1844 def updateprogress(pos):
1845 progress.update(pos)
1846 def completeprogress():
1847 progress.complete()
1848 else:
1849 def updateprogress(pos):
1850 ui.progress(topic, pos, unit='revs', total=total)
1851 def completeprogress():
1852 ui.progress(topic, None, unit='revs', total=total)
1853
1854 for idx, rev in enumerate(revs):
1855 updateprogress(idx)
1856 addargs, addkwargs = _getrevisionseed(orig, rev, tr, source)
1857 if clearcaches:
1858 dest.index.clearcaches()
1859 dest.clearcaches()
1860 with timeone() as r:
1861 dest.addrawrevision(*addargs, **addkwargs)
1862 timings.append((rev, r[0]))
1863 updateprogress(total)
1864 completeprogress()
1865 return timings
1866
1867 def _getrevisionseed(orig, rev, tr, source):
1868 from mercurial.node import nullid
1869
1870 linkrev = orig.linkrev(rev)
1871 node = orig.node(rev)
1872 p1, p2 = orig.parents(node)
1873 flags = orig.flags(rev)
1874 cachedelta = None
1875 text = None
1876
1877 if source == b'full':
1878 text = orig.revision(rev)
1879 elif source == b'parent-1':
1880 baserev = orig.rev(p1)
1881 cachedelta = (baserev, orig.revdiff(p1, rev))
1882 elif source == b'parent-2':
1883 parent = p2
1884 if p2 == nullid:
1885 parent = p1
1886 baserev = orig.rev(parent)
1887 cachedelta = (baserev, orig.revdiff(parent, rev))
1888 elif source == b'parent-smallest':
1889 p1diff = orig.revdiff(p1, rev)
1890 parent = p1
1891 diff = p1diff
1892 if p2 != nullid:
1893 p2diff = orig.revdiff(p2, rev)
1894 if len(p1diff) > len(p2diff):
1895 parent = p2
1896 diff = p2diff
1897 baserev = orig.rev(parent)
1898 cachedelta = (baserev, diff)
1899 elif source == b'storage':
1900 baserev = orig.deltaparent(rev)
1901 cachedelta = (baserev, orig.revdiff(orig.node(baserev), rev))
1902
1903 return ((text, tr, linkrev, p1, p2),
1904 {'node': node, 'flags': flags, 'cachedelta': cachedelta})
1905
1906 @contextlib.contextmanager
1907 def _temprevlog(ui, orig, truncaterev):
1908 from mercurial import vfs as vfsmod
1909
1910 if orig._inline:
1911 raise error.Abort('not supporting inline revlog (yet)')
1912
1913 origindexpath = orig.opener.join(orig.indexfile)
1914 origdatapath = orig.opener.join(orig.datafile)
1915 indexname = 'revlog.i'
1916 dataname = 'revlog.d'
1917
1918 tmpdir = tempfile.mkdtemp(prefix='tmp-hgperf-')
1919 try:
1920 # copy the data file in a temporary directory
1921 ui.debug('copying data in %s\n' % tmpdir)
1922 destindexpath = os.path.join(tmpdir, 'revlog.i')
1923 destdatapath = os.path.join(tmpdir, 'revlog.d')
1924 shutil.copyfile(origindexpath, destindexpath)
1925 shutil.copyfile(origdatapath, destdatapath)
1926
1927 # remove the data we want to add again
1928 ui.debug('truncating data to be rewritten\n')
1929 with open(destindexpath, 'ab') as index:
1930 index.seek(0)
1931 index.truncate(truncaterev * orig._io.size)
1932 with open(destdatapath, 'ab') as data:
1933 data.seek(0)
1934 data.truncate(orig.start(truncaterev))
1935
1936 # instantiate a new revlog from the temporary copy
1937 ui.debug('truncating adding to be rewritten\n')
1938 vfs = vfsmod.vfs(tmpdir)
1939 vfs.options = getattr(orig.opener, 'options', None)
1940
1941 dest = revlog.revlog(vfs,
1942 indexfile=indexname,
1943 datafile=dataname)
1944 if dest._inline:
1945 raise error.Abort('not supporting inline revlog (yet)')
1946 # make sure internals are initialized
1947 dest.revision(len(dest) - 1)
1948 yield dest
1949 del dest, vfs
1950 finally:
1951 shutil.rmtree(tmpdir, True)
1952
1568 @command(b'perfrevlogchunks', revlogopts + formatteropts +
1953 @command(b'perfrevlogchunks', revlogopts + formatteropts +
1569 [(b'e', b'engines', b'', b'compression engines to use'),
1954 [(b'e', b'engines', b'', b'compression engines to use'),
1570 (b's', b'startrev', 0, b'revision to start at')],
1955 (b's', b'startrev', 0, b'revision to start at')],
1571 b'-c|-m|FILE')
1956 b'-c|-m|FILE')
1572 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
1957 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
1573 """Benchmark operations on revlog chunks.
1958 """Benchmark operations on revlog chunks.
1574
1959
1575 Logically, each revlog is a collection of fulltext revisions. However,
1960 Logically, each revlog is a collection of fulltext revisions. However,
1576 stored within each revlog are "chunks" of possibly compressed data. This
1961 stored within each revlog are "chunks" of possibly compressed data. This
1577 data needs to be read and decompressed or compressed and written.
1962 data needs to be read and decompressed or compressed and written.
1578
1963
1579 This command measures the time it takes to read+decompress and recompress
1964 This command measures the time it takes to read+decompress and recompress
1580 chunks in a revlog. It effectively isolates I/O and compression performance.
1965 chunks in a revlog. It effectively isolates I/O and compression performance.
1581 For measurements of higher-level operations like resolving revisions,
1966 For measurements of higher-level operations like resolving revisions,
1582 see ``perfrevlogrevisions`` and ``perfrevlogrevision``.
1967 see ``perfrevlogrevisions`` and ``perfrevlogrevision``.
1583 """
1968 """
1584 opts = _byteskwargs(opts)
1969 opts = _byteskwargs(opts)
1585
1970
1586 rl = cmdutil.openrevlog(repo, b'perfrevlogchunks', file_, opts)
1971 rl = cmdutil.openrevlog(repo, b'perfrevlogchunks', file_, opts)
1587
1972
1588 # _chunkraw was renamed to _getsegmentforrevs.
1973 # _chunkraw was renamed to _getsegmentforrevs.
1589 try:
1974 try:
1590 segmentforrevs = rl._getsegmentforrevs
1975 segmentforrevs = rl._getsegmentforrevs
1591 except AttributeError:
1976 except AttributeError:
1592 segmentforrevs = rl._chunkraw
1977 segmentforrevs = rl._chunkraw
1593
1978
1594 # Verify engines argument.
1979 # Verify engines argument.
1595 if engines:
1980 if engines:
1596 engines = set(e.strip() for e in engines.split(b','))
1981 engines = set(e.strip() for e in engines.split(b','))
1597 for engine in engines:
1982 for engine in engines:
1598 try:
1983 try:
1599 util.compressionengines[engine]
1984 util.compressionengines[engine]
1600 except KeyError:
1985 except KeyError:
1601 raise error.Abort(b'unknown compression engine: %s' % engine)
1986 raise error.Abort(b'unknown compression engine: %s' % engine)
1602 else:
1987 else:
1603 engines = []
1988 engines = []
1604 for e in util.compengines:
1989 for e in util.compengines:
1605 engine = util.compengines[e]
1990 engine = util.compengines[e]
1606 try:
1991 try:
1607 if engine.available():
1992 if engine.available():
1608 engine.revlogcompressor().compress(b'dummy')
1993 engine.revlogcompressor().compress(b'dummy')
1609 engines.append(e)
1994 engines.append(e)
1610 except NotImplementedError:
1995 except NotImplementedError:
1611 pass
1996 pass
1612
1997
1613 revs = list(rl.revs(startrev, len(rl) - 1))
1998 revs = list(rl.revs(startrev, len(rl) - 1))
1614
1999
1615 def rlfh(rl):
2000 def rlfh(rl):
1616 if rl._inline:
2001 if rl._inline:
1617 return getsvfs(repo)(rl.indexfile)
2002 return getsvfs(repo)(rl.indexfile)
1618 else:
2003 else:
1619 return getsvfs(repo)(rl.datafile)
2004 return getsvfs(repo)(rl.datafile)
1620
2005
1621 def doread():
2006 def doread():
1622 rl.clearcaches()
2007 rl.clearcaches()
1623 for rev in revs:
2008 for rev in revs:
1624 segmentforrevs(rev, rev)
2009 segmentforrevs(rev, rev)
1625
2010
1626 def doreadcachedfh():
2011 def doreadcachedfh():
1627 rl.clearcaches()
2012 rl.clearcaches()
1628 fh = rlfh(rl)
2013 fh = rlfh(rl)
1629 for rev in revs:
2014 for rev in revs:
1630 segmentforrevs(rev, rev, df=fh)
2015 segmentforrevs(rev, rev, df=fh)
1631
2016
1632 def doreadbatch():
2017 def doreadbatch():
1633 rl.clearcaches()
2018 rl.clearcaches()
1634 segmentforrevs(revs[0], revs[-1])
2019 segmentforrevs(revs[0], revs[-1])
1635
2020
1636 def doreadbatchcachedfh():
2021 def doreadbatchcachedfh():
1637 rl.clearcaches()
2022 rl.clearcaches()
1638 fh = rlfh(rl)
2023 fh = rlfh(rl)
1639 segmentforrevs(revs[0], revs[-1], df=fh)
2024 segmentforrevs(revs[0], revs[-1], df=fh)
1640
2025
1641 def dochunk():
2026 def dochunk():
1642 rl.clearcaches()
2027 rl.clearcaches()
1643 fh = rlfh(rl)
2028 fh = rlfh(rl)
1644 for rev in revs:
2029 for rev in revs:
1645 rl._chunk(rev, df=fh)
2030 rl._chunk(rev, df=fh)
1646
2031
1647 chunks = [None]
2032 chunks = [None]
1648
2033
1649 def dochunkbatch():
2034 def dochunkbatch():
1650 rl.clearcaches()
2035 rl.clearcaches()
1651 fh = rlfh(rl)
2036 fh = rlfh(rl)
1652 # Save chunks as a side-effect.
2037 # Save chunks as a side-effect.
1653 chunks[0] = rl._chunks(revs, df=fh)
2038 chunks[0] = rl._chunks(revs, df=fh)
1654
2039
1655 def docompress(compressor):
2040 def docompress(compressor):
1656 rl.clearcaches()
2041 rl.clearcaches()
1657
2042
1658 try:
2043 try:
1659 # Swap in the requested compression engine.
2044 # Swap in the requested compression engine.
1660 oldcompressor = rl._compressor
2045 oldcompressor = rl._compressor
1661 rl._compressor = compressor
2046 rl._compressor = compressor
1662 for chunk in chunks[0]:
2047 for chunk in chunks[0]:
1663 rl.compress(chunk)
2048 rl.compress(chunk)
1664 finally:
2049 finally:
1665 rl._compressor = oldcompressor
2050 rl._compressor = oldcompressor
1666
2051
1667 benches = [
2052 benches = [
1668 (lambda: doread(), b'read'),
2053 (lambda: doread(), b'read'),
1669 (lambda: doreadcachedfh(), b'read w/ reused fd'),
2054 (lambda: doreadcachedfh(), b'read w/ reused fd'),
1670 (lambda: doreadbatch(), b'read batch'),
2055 (lambda: doreadbatch(), b'read batch'),
1671 (lambda: doreadbatchcachedfh(), b'read batch w/ reused fd'),
2056 (lambda: doreadbatchcachedfh(), b'read batch w/ reused fd'),
1672 (lambda: dochunk(), b'chunk'),
2057 (lambda: dochunk(), b'chunk'),
1673 (lambda: dochunkbatch(), b'chunk batch'),
2058 (lambda: dochunkbatch(), b'chunk batch'),
1674 ]
2059 ]
1675
2060
1676 for engine in sorted(engines):
2061 for engine in sorted(engines):
1677 compressor = util.compengines[engine].revlogcompressor()
2062 compressor = util.compengines[engine].revlogcompressor()
1678 benches.append((functools.partial(docompress, compressor),
2063 benches.append((functools.partial(docompress, compressor),
1679 b'compress w/ %s' % engine))
2064 b'compress w/ %s' % engine))
1680
2065
1681 for fn, title in benches:
2066 for fn, title in benches:
1682 timer, fm = gettimer(ui, opts)
2067 timer, fm = gettimer(ui, opts)
1683 timer(fn, title=title)
2068 timer(fn, title=title)
1684 fm.end()
2069 fm.end()
1685
2070
1686 @command(b'perfrevlogrevision', revlogopts + formatteropts +
2071 @command(b'perfrevlogrevision', revlogopts + formatteropts +
1687 [(b'', b'cache', False, b'use caches instead of clearing')],
2072 [(b'', b'cache', False, b'use caches instead of clearing')],
1688 b'-c|-m|FILE REV')
2073 b'-c|-m|FILE REV')
1689 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
2074 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
1690 """Benchmark obtaining a revlog revision.
2075 """Benchmark obtaining a revlog revision.
1691
2076
1692 Obtaining a revlog revision consists of roughly the following steps:
2077 Obtaining a revlog revision consists of roughly the following steps:
1693
2078
1694 1. Compute the delta chain
2079 1. Compute the delta chain
1695 2. Obtain the raw chunks for that delta chain
2080 2. Slice the delta chain if applicable
1696 3. Decompress each raw chunk
2081 3. Obtain the raw chunks for that delta chain
1697 4. Apply binary patches to obtain fulltext
2082 4. Decompress each raw chunk
1698 5. Verify hash of fulltext
2083 5. Apply binary patches to obtain fulltext
2084 6. Verify hash of fulltext
1699
2085
1700 This command measures the time spent in each of these phases.
2086 This command measures the time spent in each of these phases.
1701 """
2087 """
1702 opts = _byteskwargs(opts)
2088 opts = _byteskwargs(opts)
1703
2089
1704 if opts.get(b'changelog') or opts.get(b'manifest'):
2090 if opts.get(b'changelog') or opts.get(b'manifest'):
1705 file_, rev = None, file_
2091 file_, rev = None, file_
1706 elif rev is None:
2092 elif rev is None:
1707 raise error.CommandError(b'perfrevlogrevision', b'invalid arguments')
2093 raise error.CommandError(b'perfrevlogrevision', b'invalid arguments')
1708
2094
1709 r = cmdutil.openrevlog(repo, b'perfrevlogrevision', file_, opts)
2095 r = cmdutil.openrevlog(repo, b'perfrevlogrevision', file_, opts)
1710
2096
1711 # _chunkraw was renamed to _getsegmentforrevs.
2097 # _chunkraw was renamed to _getsegmentforrevs.
1712 try:
2098 try:
1713 segmentforrevs = r._getsegmentforrevs
2099 segmentforrevs = r._getsegmentforrevs
1714 except AttributeError:
2100 except AttributeError:
1715 segmentforrevs = r._chunkraw
2101 segmentforrevs = r._chunkraw
1716
2102
1717 node = r.lookup(rev)
2103 node = r.lookup(rev)
1718 rev = r.rev(node)
2104 rev = r.rev(node)
1719
2105
1720 def getrawchunks(data, chain):
2106 def getrawchunks(data, chain):
1721 start = r.start
2107 start = r.start
1722 length = r.length
2108 length = r.length
1723 inline = r._inline
2109 inline = r._inline
1724 iosize = r._io.size
2110 iosize = r._io.size
1725 buffer = util.buffer
2111 buffer = util.buffer
1726 offset = start(chain[0])
1727
2112
1728 chunks = []
2113 chunks = []
1729 ladd = chunks.append
2114 ladd = chunks.append
1730
2115 for idx, item in enumerate(chain):
1731 for rev in chain:
2116 offset = start(item[0])
1732 chunkstart = start(rev)
2117 bits = data[idx]
1733 if inline:
2118 for rev in item:
1734 chunkstart += (rev + 1) * iosize
2119 chunkstart = start(rev)
1735 chunklength = length(rev)
2120 if inline:
1736 ladd(buffer(data, chunkstart - offset, chunklength))
2121 chunkstart += (rev + 1) * iosize
2122 chunklength = length(rev)
2123 ladd(buffer(bits, chunkstart - offset, chunklength))
1737
2124
1738 return chunks
2125 return chunks
1739
2126
1740 def dodeltachain(rev):
2127 def dodeltachain(rev):
1741 if not cache:
2128 if not cache:
1742 r.clearcaches()
2129 r.clearcaches()
1743 r._deltachain(rev)
2130 r._deltachain(rev)
1744
2131
1745 def doread(chain):
2132 def doread(chain):
1746 if not cache:
2133 if not cache:
1747 r.clearcaches()
2134 r.clearcaches()
1748 segmentforrevs(chain[0], chain[-1])
2135 for item in slicedchain:
2136 segmentforrevs(item[0], item[-1])
2137
2138 def doslice(r, chain, size):
2139 for s in slicechunk(r, chain, targetsize=size):
2140 pass
1749
2141
1750 def dorawchunks(data, chain):
2142 def dorawchunks(data, chain):
1751 if not cache:
2143 if not cache:
1752 r.clearcaches()
2144 r.clearcaches()
1753 getrawchunks(data, chain)
2145 getrawchunks(data, chain)
1754
2146
1755 def dodecompress(chunks):
2147 def dodecompress(chunks):
1756 decomp = r.decompress
2148 decomp = r.decompress
1757 for chunk in chunks:
2149 for chunk in chunks:
1758 decomp(chunk)
2150 decomp(chunk)
1759
2151
1760 def dopatch(text, bins):
2152 def dopatch(text, bins):
1761 if not cache:
2153 if not cache:
1762 r.clearcaches()
2154 r.clearcaches()
1763 mdiff.patches(text, bins)
2155 mdiff.patches(text, bins)
1764
2156
1765 def dohash(text):
2157 def dohash(text):
1766 if not cache:
2158 if not cache:
1767 r.clearcaches()
2159 r.clearcaches()
1768 r.checkhash(text, node, rev=rev)
2160 r.checkhash(text, node, rev=rev)
1769
2161
1770 def dorevision():
2162 def dorevision():
1771 if not cache:
2163 if not cache:
1772 r.clearcaches()
2164 r.clearcaches()
1773 r.revision(node)
2165 r.revision(node)
1774
2166
2167 try:
2168 from mercurial.revlogutils.deltas import slicechunk
2169 except ImportError:
2170 slicechunk = getattr(revlog, '_slicechunk', None)
2171
2172 size = r.length(rev)
1775 chain = r._deltachain(rev)[0]
2173 chain = r._deltachain(rev)[0]
1776 data = segmentforrevs(chain[0], chain[-1])[1]
2174 if not getattr(r, '_withsparseread', False):
1777 rawchunks = getrawchunks(data, chain)
2175 slicedchain = (chain,)
2176 else:
2177 slicedchain = tuple(slicechunk(r, chain, targetsize=size))
2178 data = [segmentforrevs(seg[0], seg[-1])[1] for seg in slicedchain]
2179 rawchunks = getrawchunks(data, slicedchain)
1778 bins = r._chunks(chain)
2180 bins = r._chunks(chain)
1779 text = bytes(bins[0])
2181 text = bytes(bins[0])
1780 bins = bins[1:]
2182 bins = bins[1:]
1781 text = mdiff.patches(text, bins)
2183 text = mdiff.patches(text, bins)
1782
2184
1783 benches = [
2185 benches = [
1784 (lambda: dorevision(), b'full'),
2186 (lambda: dorevision(), b'full'),
1785 (lambda: dodeltachain(rev), b'deltachain'),
2187 (lambda: dodeltachain(rev), b'deltachain'),
1786 (lambda: doread(chain), b'read'),
2188 (lambda: doread(chain), b'read'),
1787 (lambda: dorawchunks(data, chain), b'rawchunks'),
2189 ]
2190
2191 if getattr(r, '_withsparseread', False):
2192 slicing = (lambda: doslice(r, chain, size), b'slice-sparse-chain')
2193 benches.append(slicing)
2194
2195 benches.extend([
2196 (lambda: dorawchunks(data, slicedchain), b'rawchunks'),
1788 (lambda: dodecompress(rawchunks), b'decompress'),
2197 (lambda: dodecompress(rawchunks), b'decompress'),
1789 (lambda: dopatch(text, bins), b'patch'),
2198 (lambda: dopatch(text, bins), b'patch'),
1790 (lambda: dohash(text), b'hash'),
2199 (lambda: dohash(text), b'hash'),
1791 ]
2200 ])
1792
2201
2202 timer, fm = gettimer(ui, opts)
1793 for fn, title in benches:
2203 for fn, title in benches:
1794 timer, fm = gettimer(ui, opts)
1795 timer(fn, title=title)
2204 timer(fn, title=title)
1796 fm.end()
2205 fm.end()
1797
2206
1798 @command(b'perfrevset',
2207 @command(b'perfrevset',
1799 [(b'C', b'clear', False, b'clear volatile cache between each call.'),
2208 [(b'C', b'clear', False, b'clear volatile cache between each call.'),
1800 (b'', b'contexts', False, b'obtain changectx for each revision')]
2209 (b'', b'contexts', False, b'obtain changectx for each revision')]
1801 + formatteropts, b"REVSET")
2210 + formatteropts, b"REVSET")
1802 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
2211 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
1803 """benchmark the execution time of a revset
2212 """benchmark the execution time of a revset
1804
2213
1805 Use the --clean option if need to evaluate the impact of build volatile
2214 Use the --clean option if need to evaluate the impact of build volatile
1806 revisions set cache on the revset execution. Volatile cache hold filtered
2215 revisions set cache on the revset execution. Volatile cache hold filtered
1807 and obsolete related cache."""
2216 and obsolete related cache."""
1808 opts = _byteskwargs(opts)
2217 opts = _byteskwargs(opts)
1809
2218
1810 timer, fm = gettimer(ui, opts)
2219 timer, fm = gettimer(ui, opts)
1811 def d():
2220 def d():
1812 if clear:
2221 if clear:
1813 repo.invalidatevolatilesets()
2222 repo.invalidatevolatilesets()
1814 if contexts:
2223 if contexts:
1815 for ctx in repo.set(expr): pass
2224 for ctx in repo.set(expr): pass
1816 else:
2225 else:
1817 for r in repo.revs(expr): pass
2226 for r in repo.revs(expr): pass
1818 timer(d)
2227 timer(d)
1819 fm.end()
2228 fm.end()
1820
2229
1821 @command(b'perfvolatilesets',
2230 @command(b'perfvolatilesets',
1822 [(b'', b'clear-obsstore', False, b'drop obsstore between each call.'),
2231 [(b'', b'clear-obsstore', False, b'drop obsstore between each call.'),
1823 ] + formatteropts)
2232 ] + formatteropts)
1824 def perfvolatilesets(ui, repo, *names, **opts):
2233 def perfvolatilesets(ui, repo, *names, **opts):
1825 """benchmark the computation of various volatile set
2234 """benchmark the computation of various volatile set
1826
2235
1827 Volatile set computes element related to filtering and obsolescence."""
2236 Volatile set computes element related to filtering and obsolescence."""
1828 opts = _byteskwargs(opts)
2237 opts = _byteskwargs(opts)
1829 timer, fm = gettimer(ui, opts)
2238 timer, fm = gettimer(ui, opts)
1830 repo = repo.unfiltered()
2239 repo = repo.unfiltered()
1831
2240
1832 def getobs(name):
2241 def getobs(name):
1833 def d():
2242 def d():
1834 repo.invalidatevolatilesets()
2243 repo.invalidatevolatilesets()
1835 if opts[b'clear_obsstore']:
2244 if opts[b'clear_obsstore']:
1836 clearfilecache(repo, b'obsstore')
2245 clearfilecache(repo, b'obsstore')
1837 obsolete.getrevs(repo, name)
2246 obsolete.getrevs(repo, name)
1838 return d
2247 return d
1839
2248
1840 allobs = sorted(obsolete.cachefuncs)
2249 allobs = sorted(obsolete.cachefuncs)
1841 if names:
2250 if names:
1842 allobs = [n for n in allobs if n in names]
2251 allobs = [n for n in allobs if n in names]
1843
2252
1844 for name in allobs:
2253 for name in allobs:
1845 timer(getobs(name), title=name)
2254 timer(getobs(name), title=name)
1846
2255
1847 def getfiltered(name):
2256 def getfiltered(name):
1848 def d():
2257 def d():
1849 repo.invalidatevolatilesets()
2258 repo.invalidatevolatilesets()
1850 if opts[b'clear_obsstore']:
2259 if opts[b'clear_obsstore']:
1851 clearfilecache(repo, b'obsstore')
2260 clearfilecache(repo, b'obsstore')
1852 repoview.filterrevs(repo, name)
2261 repoview.filterrevs(repo, name)
1853 return d
2262 return d
1854
2263
1855 allfilter = sorted(repoview.filtertable)
2264 allfilter = sorted(repoview.filtertable)
1856 if names:
2265 if names:
1857 allfilter = [n for n in allfilter if n in names]
2266 allfilter = [n for n in allfilter if n in names]
1858
2267
1859 for name in allfilter:
2268 for name in allfilter:
1860 timer(getfiltered(name), title=name)
2269 timer(getfiltered(name), title=name)
1861 fm.end()
2270 fm.end()
1862
2271
1863 @command(b'perfbranchmap',
2272 @command(b'perfbranchmap',
1864 [(b'f', b'full', False,
2273 [(b'f', b'full', False,
1865 b'Includes build time of subset'),
2274 b'Includes build time of subset'),
1866 (b'', b'clear-revbranch', False,
2275 (b'', b'clear-revbranch', False,
1867 b'purge the revbranch cache between computation'),
2276 b'purge the revbranch cache between computation'),
1868 ] + formatteropts)
2277 ] + formatteropts)
1869 def perfbranchmap(ui, repo, *filternames, **opts):
2278 def perfbranchmap(ui, repo, *filternames, **opts):
1870 """benchmark the update of a branchmap
2279 """benchmark the update of a branchmap
1871
2280
1872 This benchmarks the full repo.branchmap() call with read and write disabled
2281 This benchmarks the full repo.branchmap() call with read and write disabled
1873 """
2282 """
1874 opts = _byteskwargs(opts)
2283 opts = _byteskwargs(opts)
1875 full = opts.get(b"full", False)
2284 full = opts.get(b"full", False)
1876 clear_revbranch = opts.get(b"clear_revbranch", False)
2285 clear_revbranch = opts.get(b"clear_revbranch", False)
1877 timer, fm = gettimer(ui, opts)
2286 timer, fm = gettimer(ui, opts)
1878 def getbranchmap(filtername):
2287 def getbranchmap(filtername):
1879 """generate a benchmark function for the filtername"""
2288 """generate a benchmark function for the filtername"""
1880 if filtername is None:
2289 if filtername is None:
1881 view = repo
2290 view = repo
1882 else:
2291 else:
1883 view = repo.filtered(filtername)
2292 view = repo.filtered(filtername)
1884 def d():
2293 def d():
1885 if clear_revbranch:
2294 if clear_revbranch:
1886 repo.revbranchcache()._clear()
2295 repo.revbranchcache()._clear()
1887 if full:
2296 if full:
1888 view._branchcaches.clear()
2297 view._branchcaches.clear()
1889 else:
2298 else:
1890 view._branchcaches.pop(filtername, None)
2299 view._branchcaches.pop(filtername, None)
1891 view.branchmap()
2300 view.branchmap()
1892 return d
2301 return d
1893 # add filter in smaller subset to bigger subset
2302 # add filter in smaller subset to bigger subset
1894 possiblefilters = set(repoview.filtertable)
2303 possiblefilters = set(repoview.filtertable)
1895 if filternames:
2304 if filternames:
1896 possiblefilters &= set(filternames)
2305 possiblefilters &= set(filternames)
1897 subsettable = getbranchmapsubsettable()
2306 subsettable = getbranchmapsubsettable()
1898 allfilters = []
2307 allfilters = []
1899 while possiblefilters:
2308 while possiblefilters:
1900 for name in possiblefilters:
2309 for name in possiblefilters:
1901 subset = subsettable.get(name)
2310 subset = subsettable.get(name)
1902 if subset not in possiblefilters:
2311 if subset not in possiblefilters:
1903 break
2312 break
1904 else:
2313 else:
1905 assert False, b'subset cycle %s!' % possiblefilters
2314 assert False, b'subset cycle %s!' % possiblefilters
1906 allfilters.append(name)
2315 allfilters.append(name)
1907 possiblefilters.remove(name)
2316 possiblefilters.remove(name)
1908
2317
1909 # warm the cache
2318 # warm the cache
1910 if not full:
2319 if not full:
1911 for name in allfilters:
2320 for name in allfilters:
1912 repo.filtered(name).branchmap()
2321 repo.filtered(name).branchmap()
1913 if not filternames or b'unfiltered' in filternames:
2322 if not filternames or b'unfiltered' in filternames:
1914 # add unfiltered
2323 # add unfiltered
1915 allfilters.append(None)
2324 allfilters.append(None)
1916
2325
1917 branchcacheread = safeattrsetter(branchmap, b'read')
2326 branchcacheread = safeattrsetter(branchmap, b'read')
1918 branchcachewrite = safeattrsetter(branchmap.branchcache, b'write')
2327 branchcachewrite = safeattrsetter(branchmap.branchcache, b'write')
1919 branchcacheread.set(lambda repo: None)
2328 branchcacheread.set(lambda repo: None)
1920 branchcachewrite.set(lambda bc, repo: None)
2329 branchcachewrite.set(lambda bc, repo: None)
1921 try:
2330 try:
1922 for name in allfilters:
2331 for name in allfilters:
1923 printname = name
2332 printname = name
1924 if name is None:
2333 if name is None:
1925 printname = b'unfiltered'
2334 printname = b'unfiltered'
1926 timer(getbranchmap(name), title=str(printname))
2335 timer(getbranchmap(name), title=str(printname))
1927 finally:
2336 finally:
1928 branchcacheread.restore()
2337 branchcacheread.restore()
1929 branchcachewrite.restore()
2338 branchcachewrite.restore()
1930 fm.end()
2339 fm.end()
1931
2340
2341 @command(b'perfbranchmapupdate', [
2342 (b'', b'base', [], b'subset of revision to start from'),
2343 (b'', b'target', [], b'subset of revision to end with'),
2344 (b'', b'clear-caches', False, b'clear cache between each runs')
2345 ] + formatteropts)
2346 def perfbranchmapupdate(ui, repo, base=(), target=(), **opts):
2347 """benchmark branchmap update from for <base> revs to <target> revs
2348
2349 If `--clear-caches` is passed, the following items will be reset before
2350 each update:
2351 * the changelog instance and associated indexes
2352 * the rev-branch-cache instance
2353
2354 Examples:
2355
2356 # update for the one last revision
2357 $ hg perfbranchmapupdate --base 'not tip' --target 'tip'
2358
2359 $ update for change coming with a new branch
2360 $ hg perfbranchmapupdate --base 'stable' --target 'default'
2361 """
2362 from mercurial import branchmap
2363 from mercurial import repoview
2364 opts = _byteskwargs(opts)
2365 timer, fm = gettimer(ui, opts)
2366 clearcaches = opts[b'clear_caches']
2367 unfi = repo.unfiltered()
2368 x = [None] # used to pass data between closure
2369
2370 # we use a `list` here to avoid possible side effect from smartset
2371 baserevs = list(scmutil.revrange(repo, base))
2372 targetrevs = list(scmutil.revrange(repo, target))
2373 if not baserevs:
2374 raise error.Abort(b'no revisions selected for --base')
2375 if not targetrevs:
2376 raise error.Abort(b'no revisions selected for --target')
2377
2378 # make sure the target branchmap also contains the one in the base
2379 targetrevs = list(set(baserevs) | set(targetrevs))
2380 targetrevs.sort()
2381
2382 cl = repo.changelog
2383 allbaserevs = list(cl.ancestors(baserevs, inclusive=True))
2384 allbaserevs.sort()
2385 alltargetrevs = frozenset(cl.ancestors(targetrevs, inclusive=True))
2386
2387 newrevs = list(alltargetrevs.difference(allbaserevs))
2388 newrevs.sort()
2389
2390 allrevs = frozenset(unfi.changelog.revs())
2391 basefilterrevs = frozenset(allrevs.difference(allbaserevs))
2392 targetfilterrevs = frozenset(allrevs.difference(alltargetrevs))
2393
2394 def basefilter(repo, visibilityexceptions=None):
2395 return basefilterrevs
2396
2397 def targetfilter(repo, visibilityexceptions=None):
2398 return targetfilterrevs
2399
2400 msg = b'benchmark of branchmap with %d revisions with %d new ones\n'
2401 ui.status(msg % (len(allbaserevs), len(newrevs)))
2402 if targetfilterrevs:
2403 msg = b'(%d revisions still filtered)\n'
2404 ui.status(msg % len(targetfilterrevs))
2405
2406 try:
2407 repoview.filtertable[b'__perf_branchmap_update_base'] = basefilter
2408 repoview.filtertable[b'__perf_branchmap_update_target'] = targetfilter
2409
2410 baserepo = repo.filtered(b'__perf_branchmap_update_base')
2411 targetrepo = repo.filtered(b'__perf_branchmap_update_target')
2412
2413 # try to find an existing branchmap to reuse
2414 subsettable = getbranchmapsubsettable()
2415 candidatefilter = subsettable.get(None)
2416 while candidatefilter is not None:
2417 candidatebm = repo.filtered(candidatefilter).branchmap()
2418 if candidatebm.validfor(baserepo):
2419 filtered = repoview.filterrevs(repo, candidatefilter)
2420 missing = [r for r in allbaserevs if r in filtered]
2421 base = candidatebm.copy()
2422 base.update(baserepo, missing)
2423 break
2424 candidatefilter = subsettable.get(candidatefilter)
2425 else:
2426 # no suitable subset where found
2427 base = branchmap.branchcache()
2428 base.update(baserepo, allbaserevs)
2429
2430 def setup():
2431 x[0] = base.copy()
2432 if clearcaches:
2433 unfi._revbranchcache = None
2434 clearchangelog(repo)
2435
2436 def bench():
2437 x[0].update(targetrepo, newrevs)
2438
2439 timer(bench, setup=setup)
2440 fm.end()
2441 finally:
2442 repoview.filtertable.pop(b'__perf_branchmap_update_base', None)
2443 repoview.filtertable.pop(b'__perf_branchmap_update_target', None)
2444
1932 @command(b'perfbranchmapload', [
2445 @command(b'perfbranchmapload', [
1933 (b'f', b'filter', b'', b'Specify repoview filter'),
2446 (b'f', b'filter', b'', b'Specify repoview filter'),
1934 (b'', b'list', False, b'List brachmap filter caches'),
2447 (b'', b'list', False, b'List brachmap filter caches'),
2448 (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
2449
1935 ] + formatteropts)
2450 ] + formatteropts)
1936 def perfbranchmapread(ui, repo, filter=b'', list=False, **opts):
2451 def perfbranchmapload(ui, repo, filter=b'', list=False, **opts):
1937 """benchmark reading the branchmap"""
2452 """benchmark reading the branchmap"""
1938 opts = _byteskwargs(opts)
2453 opts = _byteskwargs(opts)
2454 clearrevlogs = opts[b'clear_revlogs']
1939
2455
1940 if list:
2456 if list:
1941 for name, kind, st in repo.cachevfs.readdir(stat=True):
2457 for name, kind, st in repo.cachevfs.readdir(stat=True):
1942 if name.startswith(b'branch2'):
2458 if name.startswith(b'branch2'):
1943 filtername = name.partition(b'-')[2] or b'unfiltered'
2459 filtername = name.partition(b'-')[2] or b'unfiltered'
1944 ui.status(b'%s - %s\n'
2460 ui.status(b'%s - %s\n'
1945 % (filtername, util.bytecount(st.st_size)))
2461 % (filtername, util.bytecount(st.st_size)))
1946 return
2462 return
1947 if filter:
2463 if not filter:
2464 filter = None
2465 subsettable = getbranchmapsubsettable()
2466 if filter is None:
2467 repo = repo.unfiltered()
2468 else:
1948 repo = repoview.repoview(repo, filter)
2469 repo = repoview.repoview(repo, filter)
1949 else:
2470
1950 repo = repo.unfiltered()
2471 repo.branchmap() # make sure we have a relevant, up to date branchmap
2472
2473 currentfilter = filter
1951 # try once without timer, the filter may not be cached
2474 # try once without timer, the filter may not be cached
1952 if branchmap.read(repo) is None:
2475 while branchmap.read(repo) is None:
1953 raise error.Abort(b'No brachmap cached for %s repo'
2476 currentfilter = subsettable.get(currentfilter)
1954 % (filter or b'unfiltered'))
2477 if currentfilter is None:
2478 raise error.Abort(b'No branchmap cached for %s repo'
2479 % (filter or b'unfiltered'))
2480 repo = repo.filtered(currentfilter)
1955 timer, fm = gettimer(ui, opts)
2481 timer, fm = gettimer(ui, opts)
1956 timer(lambda: branchmap.read(repo) and None)
2482 def setup():
2483 if clearrevlogs:
2484 clearchangelog(repo)
2485 def bench():
2486 branchmap.read(repo)
2487 timer(bench, setup=setup)
1957 fm.end()
2488 fm.end()
1958
2489
1959 @command(b'perfloadmarkers')
2490 @command(b'perfloadmarkers')
1960 def perfloadmarkers(ui, repo):
2491 def perfloadmarkers(ui, repo):
1961 """benchmark the time to parse the on-disk markers for a repo
2492 """benchmark the time to parse the on-disk markers for a repo
1962
2493
1963 Result is the number of markers in the repo."""
2494 Result is the number of markers in the repo."""
1964 timer, fm = gettimer(ui)
2495 timer, fm = gettimer(ui)
1965 svfs = getsvfs(repo)
2496 svfs = getsvfs(repo)
1966 timer(lambda: len(obsolete.obsstore(svfs)))
2497 timer(lambda: len(obsolete.obsstore(svfs)))
1967 fm.end()
2498 fm.end()
1968
2499
1969 @command(b'perflrucachedict', formatteropts +
2500 @command(b'perflrucachedict', formatteropts +
1970 [(b'', b'costlimit', 0, b'maximum total cost of items in cache'),
2501 [(b'', b'costlimit', 0, b'maximum total cost of items in cache'),
1971 (b'', b'mincost', 0, b'smallest cost of items in cache'),
2502 (b'', b'mincost', 0, b'smallest cost of items in cache'),
1972 (b'', b'maxcost', 100, b'maximum cost of items in cache'),
2503 (b'', b'maxcost', 100, b'maximum cost of items in cache'),
1973 (b'', b'size', 4, b'size of cache'),
2504 (b'', b'size', 4, b'size of cache'),
1974 (b'', b'gets', 10000, b'number of key lookups'),
2505 (b'', b'gets', 10000, b'number of key lookups'),
1975 (b'', b'sets', 10000, b'number of key sets'),
2506 (b'', b'sets', 10000, b'number of key sets'),
1976 (b'', b'mixed', 10000, b'number of mixed mode operations'),
2507 (b'', b'mixed', 10000, b'number of mixed mode operations'),
1977 (b'', b'mixedgetfreq', 50, b'frequency of get vs set ops in mixed mode')],
2508 (b'', b'mixedgetfreq', 50, b'frequency of get vs set ops in mixed mode')],
1978 norepo=True)
2509 norepo=True)
1979 def perflrucache(ui, mincost=0, maxcost=100, costlimit=0, size=4,
2510 def perflrucache(ui, mincost=0, maxcost=100, costlimit=0, size=4,
1980 gets=10000, sets=10000, mixed=10000, mixedgetfreq=50, **opts):
2511 gets=10000, sets=10000, mixed=10000, mixedgetfreq=50, **opts):
1981 opts = _byteskwargs(opts)
2512 opts = _byteskwargs(opts)
1982
2513
1983 def doinit():
2514 def doinit():
1984 for i in _xrange(10000):
2515 for i in _xrange(10000):
1985 util.lrucachedict(size)
2516 util.lrucachedict(size)
1986
2517
1987 costrange = list(range(mincost, maxcost + 1))
2518 costrange = list(range(mincost, maxcost + 1))
1988
2519
1989 values = []
2520 values = []
1990 for i in _xrange(size):
2521 for i in _xrange(size):
1991 values.append(random.randint(0, _maxint))
2522 values.append(random.randint(0, _maxint))
1992
2523
1993 # Get mode fills the cache and tests raw lookup performance with no
2524 # Get mode fills the cache and tests raw lookup performance with no
1994 # eviction.
2525 # eviction.
1995 getseq = []
2526 getseq = []
1996 for i in _xrange(gets):
2527 for i in _xrange(gets):
1997 getseq.append(random.choice(values))
2528 getseq.append(random.choice(values))
1998
2529
1999 def dogets():
2530 def dogets():
2000 d = util.lrucachedict(size)
2531 d = util.lrucachedict(size)
2001 for v in values:
2532 for v in values:
2002 d[v] = v
2533 d[v] = v
2003 for key in getseq:
2534 for key in getseq:
2004 value = d[key]
2535 value = d[key]
2005 value # silence pyflakes warning
2536 value # silence pyflakes warning
2006
2537
2007 def dogetscost():
2538 def dogetscost():
2008 d = util.lrucachedict(size, maxcost=costlimit)
2539 d = util.lrucachedict(size, maxcost=costlimit)
2009 for i, v in enumerate(values):
2540 for i, v in enumerate(values):
2010 d.insert(v, v, cost=costs[i])
2541 d.insert(v, v, cost=costs[i])
2011 for key in getseq:
2542 for key in getseq:
2012 try:
2543 try:
2013 value = d[key]
2544 value = d[key]
2014 value # silence pyflakes warning
2545 value # silence pyflakes warning
2015 except KeyError:
2546 except KeyError:
2016 pass
2547 pass
2017
2548
2018 # Set mode tests insertion speed with cache eviction.
2549 # Set mode tests insertion speed with cache eviction.
2019 setseq = []
2550 setseq = []
2020 costs = []
2551 costs = []
2021 for i in _xrange(sets):
2552 for i in _xrange(sets):
2022 setseq.append(random.randint(0, _maxint))
2553 setseq.append(random.randint(0, _maxint))
2023 costs.append(random.choice(costrange))
2554 costs.append(random.choice(costrange))
2024
2555
2025 def doinserts():
2556 def doinserts():
2026 d = util.lrucachedict(size)
2557 d = util.lrucachedict(size)
2027 for v in setseq:
2558 for v in setseq:
2028 d.insert(v, v)
2559 d.insert(v, v)
2029
2560
2030 def doinsertscost():
2561 def doinsertscost():
2031 d = util.lrucachedict(size, maxcost=costlimit)
2562 d = util.lrucachedict(size, maxcost=costlimit)
2032 for i, v in enumerate(setseq):
2563 for i, v in enumerate(setseq):
2033 d.insert(v, v, cost=costs[i])
2564 d.insert(v, v, cost=costs[i])
2034
2565
2035 def dosets():
2566 def dosets():
2036 d = util.lrucachedict(size)
2567 d = util.lrucachedict(size)
2037 for v in setseq:
2568 for v in setseq:
2038 d[v] = v
2569 d[v] = v
2039
2570
2040 # Mixed mode randomly performs gets and sets with eviction.
2571 # Mixed mode randomly performs gets and sets with eviction.
2041 mixedops = []
2572 mixedops = []
2042 for i in _xrange(mixed):
2573 for i in _xrange(mixed):
2043 r = random.randint(0, 100)
2574 r = random.randint(0, 100)
2044 if r < mixedgetfreq:
2575 if r < mixedgetfreq:
2045 op = 0
2576 op = 0
2046 else:
2577 else:
2047 op = 1
2578 op = 1
2048
2579
2049 mixedops.append((op,
2580 mixedops.append((op,
2050 random.randint(0, size * 2),
2581 random.randint(0, size * 2),
2051 random.choice(costrange)))
2582 random.choice(costrange)))
2052
2583
2053 def domixed():
2584 def domixed():
2054 d = util.lrucachedict(size)
2585 d = util.lrucachedict(size)
2055
2586
2056 for op, v, cost in mixedops:
2587 for op, v, cost in mixedops:
2057 if op == 0:
2588 if op == 0:
2058 try:
2589 try:
2059 d[v]
2590 d[v]
2060 except KeyError:
2591 except KeyError:
2061 pass
2592 pass
2062 else:
2593 else:
2063 d[v] = v
2594 d[v] = v
2064
2595
2065 def domixedcost():
2596 def domixedcost():
2066 d = util.lrucachedict(size, maxcost=costlimit)
2597 d = util.lrucachedict(size, maxcost=costlimit)
2067
2598
2068 for op, v, cost in mixedops:
2599 for op, v, cost in mixedops:
2069 if op == 0:
2600 if op == 0:
2070 try:
2601 try:
2071 d[v]
2602 d[v]
2072 except KeyError:
2603 except KeyError:
2073 pass
2604 pass
2074 else:
2605 else:
2075 d.insert(v, v, cost=cost)
2606 d.insert(v, v, cost=cost)
2076
2607
2077 benches = [
2608 benches = [
2078 (doinit, b'init'),
2609 (doinit, b'init'),
2079 ]
2610 ]
2080
2611
2081 if costlimit:
2612 if costlimit:
2082 benches.extend([
2613 benches.extend([
2083 (dogetscost, b'gets w/ cost limit'),
2614 (dogetscost, b'gets w/ cost limit'),
2084 (doinsertscost, b'inserts w/ cost limit'),
2615 (doinsertscost, b'inserts w/ cost limit'),
2085 (domixedcost, b'mixed w/ cost limit'),
2616 (domixedcost, b'mixed w/ cost limit'),
2086 ])
2617 ])
2087 else:
2618 else:
2088 benches.extend([
2619 benches.extend([
2089 (dogets, b'gets'),
2620 (dogets, b'gets'),
2090 (doinserts, b'inserts'),
2621 (doinserts, b'inserts'),
2091 (dosets, b'sets'),
2622 (dosets, b'sets'),
2092 (domixed, b'mixed')
2623 (domixed, b'mixed')
2093 ])
2624 ])
2094
2625
2095 for fn, title in benches:
2626 for fn, title in benches:
2096 timer, fm = gettimer(ui, opts)
2627 timer, fm = gettimer(ui, opts)
2097 timer(fn, title=title)
2628 timer(fn, title=title)
2098 fm.end()
2629 fm.end()
2099
2630
2100 @command(b'perfwrite', formatteropts)
2631 @command(b'perfwrite', formatteropts)
2101 def perfwrite(ui, repo, **opts):
2632 def perfwrite(ui, repo, **opts):
2102 """microbenchmark ui.write
2633 """microbenchmark ui.write
2103 """
2634 """
2104 opts = _byteskwargs(opts)
2635 opts = _byteskwargs(opts)
2105
2636
2106 timer, fm = gettimer(ui, opts)
2637 timer, fm = gettimer(ui, opts)
2107 def write():
2638 def write():
2108 for i in range(100000):
2639 for i in range(100000):
2109 ui.write((b'Testing write performance\n'))
2640 ui.write((b'Testing write performance\n'))
2110 timer(write)
2641 timer(write)
2111 fm.end()
2642 fm.end()
2112
2643
2113 def uisetup(ui):
2644 def uisetup(ui):
2114 if (util.safehasattr(cmdutil, b'openrevlog') and
2645 if (util.safehasattr(cmdutil, b'openrevlog') and
2115 not util.safehasattr(commands, b'debugrevlogopts')):
2646 not util.safehasattr(commands, b'debugrevlogopts')):
2116 # for "historical portability":
2647 # for "historical portability":
2117 # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
2648 # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
2118 # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
2649 # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
2119 # openrevlog() should cause failure, because it has been
2650 # openrevlog() should cause failure, because it has been
2120 # available since 3.5 (or 49c583ca48c4).
2651 # available since 3.5 (or 49c583ca48c4).
2121 def openrevlog(orig, repo, cmd, file_, opts):
2652 def openrevlog(orig, repo, cmd, file_, opts):
2122 if opts.get(b'dir') and not util.safehasattr(repo, b'dirlog'):
2653 if opts.get(b'dir') and not util.safehasattr(repo, b'dirlog'):
2123 raise error.Abort(b"This version doesn't support --dir option",
2654 raise error.Abort(b"This version doesn't support --dir option",
2124 hint=b"use 3.5 or later")
2655 hint=b"use 3.5 or later")
2125 return orig(repo, cmd, file_, opts)
2656 return orig(repo, cmd, file_, opts)
2126 extensions.wrapfunction(cmdutil, b'openrevlog', openrevlog)
2657 extensions.wrapfunction(cmdutil, b'openrevlog', openrevlog)
2658
2659 @command(b'perfprogress', formatteropts + [
2660 (b'', b'topic', b'topic', b'topic for progress messages'),
2661 (b'c', b'total', 1000000, b'total value we are progressing to'),
2662 ], norepo=True)
2663 def perfprogress(ui, topic=None, total=None, **opts):
2664 """printing of progress bars"""
2665 opts = _byteskwargs(opts)
2666
2667 timer, fm = gettimer(ui, opts)
2668
2669 def doprogress():
2670 with ui.makeprogress(topic, total=total) as progress:
2671 for i in pycompat.xrange(total):
2672 progress.increment()
2673
2674 timer(doprogress)
2675 fm.end()
@@ -1,642 +1,703 b''
1 test-abort-checkin.t
1 test-abort-checkin.t
2 test-absorb-edit-lines.t
2 test-absorb-filefixupstate.py
3 test-absorb-filefixupstate.py
3 test-absorb-phase.t
4 test-absorb-phase.t
4 test-absorb-rename.t
5 test-absorb-rename.t
5 test-absorb-strip.t
6 test-absorb-strip.t
6 test-absorb.t
7 test-absorb.t
7 test-add.t
8 test-add.t
8 test-addremove-similar.t
9 test-addremove-similar.t
9 test-addremove.t
10 test-addremove.t
10 test-alias.t
11 test-alias.t
11 test-amend-subrepo.t
12 test-amend-subrepo.t
12 test-amend.t
13 test-amend.t
13 test-ancestor.py
14 test-ancestor.py
14 test-annotate.py
15 test-annotate.py
15 test-annotate.t
16 test-annotate.t
16 test-archive-symlinks.t
17 test-archive-symlinks.t
17 test-archive.t
18 test-archive.t
18 test-atomictempfile.py
19 test-atomictempfile.py
19 test-audit-path.t
20 test-audit-path.t
20 test-audit-subrepo.t
21 test-audit-subrepo.t
21 test-automv.t
22 test-automv.t
22 test-backout.t
23 test-backout.t
23 test-backwards-remove.t
24 test-backwards-remove.t
24 test-bad-extension.t
25 test-bad-extension.t
25 test-bad-pull.t
26 test-bad-pull.t
26 test-basic.t
27 test-basic.t
27 test-bdiff.py
28 test-bdiff.py
28 test-bheads.t
29 test-bheads.t
29 test-bisect.t
30 test-bisect.t
30 test-bisect2.t
31 test-bisect2.t
31 test-bisect3.t
32 test-bisect3.t
32 test-blackbox.t
33 test-blackbox.t
34 test-bookflow.t
33 test-bookmarks-current.t
35 test-bookmarks-current.t
34 test-bookmarks-merge.t
36 test-bookmarks-merge.t
35 test-bookmarks-pushpull.t
37 test-bookmarks-pushpull.t
36 test-bookmarks-rebase.t
38 test-bookmarks-rebase.t
37 test-bookmarks-strip.t
39 test-bookmarks-strip.t
38 test-bookmarks.t
40 test-bookmarks.t
39 test-branch-change.t
41 test-branch-change.t
40 test-branch-option.t
42 test-branch-option.t
41 test-branch-tag-confict.t
43 test-branch-tag-confict.t
42 test-branches.t
44 test-branches.t
43 test-bundle-phases.t
45 test-bundle-phases.t
44 test-bundle-r.t
46 test-bundle-r.t
45 test-bundle-type.t
47 test-bundle-type.t
46 test-bundle-vs-outgoing.t
48 test-bundle-vs-outgoing.t
47 test-bundle.t
49 test-bundle.t
48 test-bundle2-exchange.t
50 test-bundle2-exchange.t
49 test-bundle2-format.t
51 test-bundle2-format.t
50 test-bundle2-multiple-changegroups.t
52 test-bundle2-multiple-changegroups.t
51 test-bundle2-pushback.t
53 test-bundle2-pushback.t
52 test-bundle2-remote-changegroup.t
54 test-bundle2-remote-changegroup.t
53 test-cache-abuse.t
55 test-cache-abuse.t
54 test-cappedreader.py
56 test-cappedreader.py
55 test-casecollision.t
57 test-casecollision.t
56 test-cat.t
58 test-cat.t
57 test-cbor.py
59 test-cbor.py
58 test-censor.t
60 test-censor.t
59 test-changelog-exec.t
61 test-changelog-exec.t
60 test-check-code.t
62 test-check-code.t
61 test-check-commit.t
63 test-check-commit.t
62 test-check-config.py
64 test-check-config.py
63 test-check-config.t
65 test-check-config.t
64 test-check-execute.t
66 test-check-execute.t
67 test-check-help.t
65 test-check-interfaces.py
68 test-check-interfaces.py
66 test-check-module-imports.t
69 test-check-module-imports.t
67 test-check-py3-compat.t
70 test-check-py3-compat.t
68 test-check-pyflakes.t
71 test-check-pyflakes.t
69 test-check-pylint.t
72 test-check-pylint.t
70 test-check-shbang.t
73 test-check-shbang.t
71 test-children.t
74 test-children.t
72 test-churn.t
75 test-churn.t
73 test-clone-cgi.t
76 test-clone-cgi.t
74 test-clone-pull-corruption.t
77 test-clone-pull-corruption.t
75 test-clone-r.t
78 test-clone-r.t
76 test-clone-uncompressed.t
79 test-clone-uncompressed.t
77 test-clone-update-order.t
80 test-clone-update-order.t
78 test-clone.t
81 test-clone.t
79 test-clonebundles.t
82 test-clonebundles.t
80 test-close-head.t
83 test-close-head.t
81 test-commandserver.t
84 test-commandserver.t
82 test-commit-amend.t
85 test-commit-amend.t
83 test-commit-interactive.t
86 test-commit-interactive.t
84 test-commit-multiple.t
87 test-commit-multiple.t
85 test-commit-unresolved.t
88 test-commit-unresolved.t
86 test-commit.t
89 test-commit.t
87 test-committer.t
90 test-committer.t
88 test-completion.t
91 test-completion.t
89 test-config-env.py
92 test-config-env.py
90 test-config.t
93 test-config.t
91 test-conflict.t
94 test-conflict.t
92 test-confused-revert.t
95 test-confused-revert.t
93 test-context-metadata.t
96 test-context-metadata.t
94 test-context.py
97 test-context.py
95 test-contrib-check-code.t
98 test-contrib-check-code.t
96 test-contrib-check-commit.t
99 test-contrib-check-commit.t
97 test-contrib-dumprevlog.t
100 test-contrib-dumprevlog.t
98 test-contrib-perf.t
101 test-contrib-perf.t
99 test-contrib-relnotes.t
102 test-contrib-relnotes.t
100 test-contrib-testparseutil.t
103 test-contrib-testparseutil.t
101 test-contrib.t
104 test-contrib.t
102 test-convert-authormap.t
105 test-convert-authormap.t
103 test-convert-clonebranches.t
106 test-convert-clonebranches.t
104 test-convert-cvs-branch.t
107 test-convert-cvs-branch.t
105 test-convert-cvs-detectmerge.t
108 test-convert-cvs-detectmerge.t
106 test-convert-cvs-synthetic.t
109 test-convert-cvs-synthetic.t
107 test-convert-cvs.t
110 test-convert-cvs.t
108 test-convert-cvsnt-mergepoints.t
111 test-convert-cvsnt-mergepoints.t
109 test-convert-datesort.t
112 test-convert-datesort.t
110 test-convert-filemap.t
113 test-convert-filemap.t
111 test-convert-hg-sink.t
114 test-convert-hg-sink.t
112 test-convert-hg-source.t
115 test-convert-hg-source.t
113 test-convert-hg-startrev.t
116 test-convert-hg-startrev.t
114 test-convert-splicemap.t
117 test-convert-splicemap.t
115 test-convert-tagsbranch-topology.t
118 test-convert-tagsbranch-topology.t
116 test-copy-move-merge.t
119 test-copy-move-merge.t
117 test-copy.t
120 test-copy.t
118 test-copytrace-heuristics.t
121 test-copytrace-heuristics.t
122 test-custom-filters.t
119 test-debugbuilddag.t
123 test-debugbuilddag.t
120 test-debugbundle.t
124 test-debugbundle.t
121 test-debugcommands.t
125 test-debugcommands.t
122 test-debugextensions.t
126 test-debugextensions.t
123 test-debugindexdot.t
127 test-debugindexdot.t
124 test-debugrename.t
128 test-debugrename.t
125 test-default-push.t
129 test-default-push.t
126 test-diff-antipatience.t
130 test-diff-antipatience.t
127 test-diff-binary-file.t
131 test-diff-binary-file.t
128 test-diff-change.t
132 test-diff-change.t
129 test-diff-color.t
133 test-diff-color.t
130 test-diff-copy-depth.t
134 test-diff-copy-depth.t
131 test-diff-hashes.t
135 test-diff-hashes.t
132 test-diff-ignore-whitespace.t
136 test-diff-ignore-whitespace.t
133 test-diff-indent-heuristic.t
137 test-diff-indent-heuristic.t
134 test-diff-issue2761.t
138 test-diff-issue2761.t
135 test-diff-newlines.t
139 test-diff-newlines.t
136 test-diff-reverse.t
140 test-diff-reverse.t
137 test-diff-subdir.t
141 test-diff-subdir.t
138 test-diff-unified.t
142 test-diff-unified.t
139 test-diff-upgrade.t
143 test-diff-upgrade.t
140 test-diffdir.t
144 test-diffdir.t
141 test-diffstat.t
145 test-diffstat.t
142 test-directaccess.t
146 test-directaccess.t
143 test-dirstate-backup.t
147 test-dirstate-backup.t
144 test-dirstate-nonnormalset.t
148 test-dirstate-nonnormalset.t
145 test-dirstate-race.t
149 test-dirstate-race.t
146 test-dirstate.t
150 test-dirstate.t
147 test-dispatch.py
151 test-dispatch.py
148 test-doctest.py
152 test-doctest.py
149 test-double-merge.t
153 test-double-merge.t
150 test-drawdag.t
154 test-drawdag.t
151 test-duplicateoptions.py
155 test-duplicateoptions.py
152 test-editor-filename.t
156 test-editor-filename.t
153 test-empty-dir.t
157 test-empty-dir.t
154 test-empty-file.t
158 test-empty-file.t
155 test-empty-group.t
159 test-empty-group.t
156 test-empty.t
160 test-empty.t
157 test-encode.t
161 test-encode.t
158 test-encoding-func.py
162 test-encoding-func.py
159 test-encoding-textwrap.t
163 test-encoding-textwrap.t
160 test-encoding.t
164 test-encoding.t
161 test-eol-add.t
165 test-eol-add.t
162 test-eol-clone.t
166 test-eol-clone.t
163 test-eol-hook.t
167 test-eol-hook.t
164 test-eol-patch.t
168 test-eol-patch.t
165 test-eol-tag.t
169 test-eol-tag.t
166 test-eol-update.t
170 test-eol-update.t
167 test-eol.t
171 test-eol.t
168 test-eolfilename.t
172 test-eolfilename.t
169 test-excessive-merge.t
173 test-excessive-merge.t
170 test-exchange-obsmarkers-case-A1.t
174 test-exchange-obsmarkers-case-A1.t
171 test-exchange-obsmarkers-case-A2.t
175 test-exchange-obsmarkers-case-A2.t
172 test-exchange-obsmarkers-case-A3.t
176 test-exchange-obsmarkers-case-A3.t
173 test-exchange-obsmarkers-case-A4.t
177 test-exchange-obsmarkers-case-A4.t
174 test-exchange-obsmarkers-case-A5.t
178 test-exchange-obsmarkers-case-A5.t
175 test-exchange-obsmarkers-case-A6.t
179 test-exchange-obsmarkers-case-A6.t
176 test-exchange-obsmarkers-case-A7.t
180 test-exchange-obsmarkers-case-A7.t
177 test-exchange-obsmarkers-case-B1.t
181 test-exchange-obsmarkers-case-B1.t
178 test-exchange-obsmarkers-case-B2.t
182 test-exchange-obsmarkers-case-B2.t
179 test-exchange-obsmarkers-case-B3.t
183 test-exchange-obsmarkers-case-B3.t
180 test-exchange-obsmarkers-case-B4.t
184 test-exchange-obsmarkers-case-B4.t
181 test-exchange-obsmarkers-case-B5.t
185 test-exchange-obsmarkers-case-B5.t
182 test-exchange-obsmarkers-case-B6.t
186 test-exchange-obsmarkers-case-B6.t
183 test-exchange-obsmarkers-case-B7.t
187 test-exchange-obsmarkers-case-B7.t
184 test-exchange-obsmarkers-case-C1.t
188 test-exchange-obsmarkers-case-C1.t
185 test-exchange-obsmarkers-case-C2.t
189 test-exchange-obsmarkers-case-C2.t
186 test-exchange-obsmarkers-case-C3.t
190 test-exchange-obsmarkers-case-C3.t
187 test-exchange-obsmarkers-case-C4.t
191 test-exchange-obsmarkers-case-C4.t
188 test-exchange-obsmarkers-case-D1.t
192 test-exchange-obsmarkers-case-D1.t
189 test-exchange-obsmarkers-case-D2.t
193 test-exchange-obsmarkers-case-D2.t
190 test-exchange-obsmarkers-case-D3.t
194 test-exchange-obsmarkers-case-D3.t
191 test-exchange-obsmarkers-case-D4.t
195 test-exchange-obsmarkers-case-D4.t
192 test-execute-bit.t
196 test-execute-bit.t
193 test-export.t
197 test-export.t
194 test-extdata.t
198 test-extdata.t
195 test-extdiff.t
199 test-extdiff.t
200 test-extension-timing.t
196 test-extensions-afterloaded.t
201 test-extensions-afterloaded.t
197 test-extensions-wrapfunction.py
202 test-extensions-wrapfunction.py
198 test-extra-filelog-entry.t
203 test-extra-filelog-entry.t
204 test-fastannotate-corrupt.t
205 test-fastannotate-diffopts.t
206 test-fastannotate-hg.t
207 test-fastannotate-perfhack.t
208 test-fastannotate-protocol.t
209 test-fastannotate-renames.t
210 test-fastannotate-revmap.py
211 test-fastannotate.t
199 test-fetch.t
212 test-fetch.t
200 test-filebranch.t
213 test-filebranch.t
201 test-filecache.py
214 test-filecache.py
202 test-filelog.py
215 test-filelog.py
203 test-fileset-generated.t
216 test-fileset-generated.t
204 test-fileset.t
217 test-fileset.t
205 test-fix-topology.t
218 test-fix-topology.t
206 test-fix.t
219 test-fix.t
207 test-flags.t
220 test-flags.t
208 test-fncache.t
221 test-fncache.t
222 test-gendoc-da.t
223 test-gendoc-de.t
224 test-gendoc-el.t
225 test-gendoc-fr.t
226 test-gendoc-it.t
227 test-gendoc-ja.t
228 test-gendoc-pt_BR.t
229 test-gendoc-ro.t
230 test-gendoc-ru.t
231 test-gendoc-sv.t
232 test-gendoc-zh_CN.t
233 test-gendoc-zh_TW.t
234 test-gendoc.t
209 test-generaldelta.t
235 test-generaldelta.t
210 test-getbundle.t
236 test-getbundle.t
211 test-git-export.t
237 test-git-export.t
212 test-globalopts.t
238 test-globalopts.t
213 test-glog-beautifygraph.t
239 test-glog-beautifygraph.t
214 test-glog-topological.t
240 test-glog-topological.t
215 test-glog.t
241 test-glog.t
216 test-gpg.t
242 test-gpg.t
217 test-graft.t
243 test-graft.t
218 test-grep.t
244 test-grep.t
219 test-hardlinks.t
245 test-hardlinks.t
246 test-help-hide.t
220 test-help.t
247 test-help.t
221 test-hg-parseurl.py
248 test-hg-parseurl.py
222 test-hghave.t
249 test-hghave.t
223 test-hgignore.t
250 test-hgignore.t
224 test-hgk.t
251 test-hgk.t
225 test-hgrc.t
252 test-hgrc.t
226 test-hgweb-annotate-whitespace.t
253 test-hgweb-annotate-whitespace.t
227 test-hgweb-bundle.t
254 test-hgweb-bundle.t
228 test-hgweb-csp.t
255 test-hgweb-csp.t
229 test-hgweb-descend-empties.t
256 test-hgweb-descend-empties.t
230 test-hgweb-diffs.t
257 test-hgweb-diffs.t
231 test-hgweb-empty.t
258 test-hgweb-empty.t
232 test-hgweb-filelog.t
259 test-hgweb-filelog.t
233 test-hgweb-non-interactive.t
260 test-hgweb-non-interactive.t
234 test-hgweb-raw.t
261 test-hgweb-raw.t
235 test-hgweb-removed.t
262 test-hgweb-removed.t
236 test-hgweb.t
263 test-hgweb.t
237 test-hgwebdir-paths.py
264 test-hgwebdir-paths.py
238 test-hgwebdirsym.t
265 test-hgwebdirsym.t
239 test-histedit-arguments.t
266 test-histedit-arguments.t
240 test-histedit-base.t
267 test-histedit-base.t
241 test-histedit-bookmark-motion.t
268 test-histedit-bookmark-motion.t
242 test-histedit-commute.t
269 test-histedit-commute.t
243 test-histedit-drop.t
270 test-histedit-drop.t
244 test-histedit-edit.t
271 test-histedit-edit.t
245 test-histedit-fold-non-commute.t
272 test-histedit-fold-non-commute.t
246 test-histedit-fold.t
273 test-histedit-fold.t
247 test-histedit-no-backup.t
274 test-histedit-no-backup.t
248 test-histedit-no-change.t
275 test-histedit-no-change.t
249 test-histedit-non-commute-abort.t
276 test-histedit-non-commute-abort.t
250 test-histedit-non-commute.t
277 test-histedit-non-commute.t
251 test-histedit-obsolete.t
278 test-histedit-obsolete.t
252 test-histedit-outgoing.t
279 test-histedit-outgoing.t
253 test-histedit-templates.t
280 test-histedit-templates.t
254 test-http-branchmap.t
281 test-http-branchmap.t
255 test-http-bundle1.t
282 test-http-bundle1.t
256 test-http-clone-r.t
283 test-http-clone-r.t
257 test-http-permissions.t
284 test-http-permissions.t
258 test-http.t
285 test-http.t
259 test-hybridencode.py
286 test-hybridencode.py
260 test-i18n.t
287 test-i18n.t
261 test-identify.t
288 test-identify.t
262 test-impexp-branch.t
289 test-impexp-branch.t
263 test-import-bypass.t
290 test-import-bypass.t
291 test-import-context.t
264 test-import-eol.t
292 test-import-eol.t
265 test-import-merge.t
293 test-import-merge.t
266 test-import-unknown.t
294 test-import-unknown.t
267 test-import.t
295 test-import.t
268 test-imports-checker.t
296 test-imports-checker.t
269 test-incoming-outgoing.t
297 test-incoming-outgoing.t
270 test-infinitepush-bundlestore.t
298 test-infinitepush-bundlestore.t
271 test-infinitepush-ci.t
299 test-infinitepush-ci.t
272 test-infinitepush.t
300 test-infinitepush.t
273 test-inherit-mode.t
301 test-inherit-mode.t
274 test-init.t
302 test-init.t
275 test-issue1089.t
303 test-issue1089.t
276 test-issue1102.t
304 test-issue1102.t
277 test-issue1175.t
305 test-issue1175.t
278 test-issue1306.t
306 test-issue1306.t
279 test-issue1438.t
307 test-issue1438.t
280 test-issue1502.t
308 test-issue1502.t
281 test-issue1802.t
309 test-issue1802.t
282 test-issue1877.t
310 test-issue1877.t
283 test-issue1993.t
311 test-issue1993.t
284 test-issue2137.t
312 test-issue2137.t
285 test-issue3084.t
313 test-issue3084.t
286 test-issue4074.t
314 test-issue4074.t
287 test-issue522.t
315 test-issue522.t
288 test-issue586.t
316 test-issue586.t
289 test-issue5979.t
317 test-issue5979.t
290 test-issue612.t
318 test-issue612.t
291 test-issue619.t
319 test-issue619.t
292 test-issue660.t
320 test-issue660.t
293 test-issue672.t
321 test-issue672.t
294 test-issue842.t
322 test-issue842.t
295 test-journal-exists.t
323 test-journal-exists.t
296 test-journal-share.t
324 test-journal-share.t
297 test-journal.t
325 test-journal.t
298 test-keyword.t
326 test-keyword.t
299 test-known.t
327 test-known.t
300 test-largefiles-cache.t
328 test-largefiles-cache.t
301 test-largefiles-misc.t
329 test-largefiles-misc.t
302 test-largefiles-small-disk.t
330 test-largefiles-small-disk.t
303 test-largefiles-update.t
331 test-largefiles-update.t
332 test-largefiles-wireproto.t
304 test-largefiles.t
333 test-largefiles.t
334 test-lfconvert.t
335 test-lfs-bundle.t
305 test-lfs-largefiles.t
336 test-lfs-largefiles.t
306 test-lfs-pointer.py
337 test-lfs-pointer.py
338 test-lfs.t
307 test-linelog.py
339 test-linelog.py
308 test-linerange.py
340 test-linerange.py
309 test-locate.t
341 test-locate.t
310 test-lock-badness.t
342 test-lock-badness.t
343 test-log-exthook.t
311 test-log-linerange.t
344 test-log-linerange.t
312 test-log.t
345 test-log.t
313 test-logexchange.t
346 test-logexchange.t
347 test-logtoprocess.t
314 test-lrucachedict.py
348 test-lrucachedict.py
315 test-mactext.t
349 test-mactext.t
316 test-mailmap.t
350 test-mailmap.t
317 test-manifest-merging.t
351 test-manifest-merging.t
318 test-manifest.py
352 test-manifest.py
319 test-manifest.t
353 test-manifest.t
320 test-match.py
354 test-match.py
321 test-mdiff.py
355 test-mdiff.py
322 test-merge-changedelete.t
356 test-merge-changedelete.t
323 test-merge-closedheads.t
357 test-merge-closedheads.t
324 test-merge-commit.t
358 test-merge-commit.t
325 test-merge-criss-cross.t
359 test-merge-criss-cross.t
326 test-merge-default.t
360 test-merge-default.t
327 test-merge-force.t
361 test-merge-force.t
328 test-merge-halt.t
362 test-merge-halt.t
329 test-merge-internal-tools-pattern.t
363 test-merge-internal-tools-pattern.t
330 test-merge-local.t
364 test-merge-local.t
331 test-merge-no-file-change.t
365 test-merge-no-file-change.t
332 test-merge-remove.t
366 test-merge-remove.t
333 test-merge-revert.t
367 test-merge-revert.t
334 test-merge-revert2.t
368 test-merge-revert2.t
335 test-merge-subrepos.t
369 test-merge-subrepos.t
336 test-merge-symlinks.t
370 test-merge-symlinks.t
337 test-merge-tools.t
371 test-merge-tools.t
338 test-merge-types.t
372 test-merge-types.t
339 test-merge1.t
373 test-merge1.t
340 test-merge10.t
374 test-merge10.t
341 test-merge2.t
375 test-merge2.t
342 test-merge4.t
376 test-merge4.t
343 test-merge5.t
377 test-merge5.t
344 test-merge6.t
378 test-merge6.t
345 test-merge7.t
379 test-merge7.t
346 test-merge8.t
380 test-merge8.t
347 test-merge9.t
381 test-merge9.t
348 test-minifileset.py
382 test-minifileset.py
349 test-minirst.py
383 test-minirst.py
350 test-mq-git.t
384 test-mq-git.t
351 test-mq-guards.t
385 test-mq-guards.t
352 test-mq-header-date.t
386 test-mq-header-date.t
353 test-mq-header-from.t
387 test-mq-header-from.t
354 test-mq-merge.t
388 test-mq-merge.t
355 test-mq-pull-from-bundle.t
389 test-mq-pull-from-bundle.t
356 test-mq-qclone-http.t
390 test-mq-qclone-http.t
357 test-mq-qdelete.t
391 test-mq-qdelete.t
358 test-mq-qdiff.t
392 test-mq-qdiff.t
359 test-mq-qfold.t
393 test-mq-qfold.t
360 test-mq-qgoto.t
394 test-mq-qgoto.t
361 test-mq-qimport-fail-cleanup.t
395 test-mq-qimport-fail-cleanup.t
362 test-mq-qnew.t
396 test-mq-qnew.t
363 test-mq-qpush-exact.t
397 test-mq-qpush-exact.t
364 test-mq-qpush-fail.t
398 test-mq-qpush-fail.t
365 test-mq-qqueue.t
399 test-mq-qqueue.t
366 test-mq-qrefresh-interactive.t
400 test-mq-qrefresh-interactive.t
367 test-mq-qrefresh-replace-log-message.t
401 test-mq-qrefresh-replace-log-message.t
368 test-mq-qrefresh.t
402 test-mq-qrefresh.t
369 test-mq-qrename.t
403 test-mq-qrename.t
370 test-mq-qsave.t
404 test-mq-qsave.t
371 test-mq-safety.t
405 test-mq-safety.t
372 test-mq-subrepo.t
406 test-mq-subrepo.t
373 test-mq-symlinks.t
407 test-mq-symlinks.t
374 test-mq.t
408 test-mq.t
375 test-mv-cp-st-diff.t
409 test-mv-cp-st-diff.t
376 test-narrow-acl.t
410 test-narrow-acl.t
377 test-narrow-archive.t
411 test-narrow-archive.t
378 test-narrow-clone-no-ellipsis.t
412 test-narrow-clone-no-ellipsis.t
379 test-narrow-clone-non-narrow-server.t
413 test-narrow-clone-non-narrow-server.t
380 test-narrow-clone-nonlinear.t
414 test-narrow-clone-nonlinear.t
381 test-narrow-clone-stream.t
415 test-narrow-clone-stream.t
382 test-narrow-clone.t
416 test-narrow-clone.t
383 test-narrow-commit.t
417 test-narrow-commit.t
384 test-narrow-copies.t
418 test-narrow-copies.t
385 test-narrow-debugcommands.t
419 test-narrow-debugcommands.t
386 test-narrow-debugrebuilddirstate.t
420 test-narrow-debugrebuilddirstate.t
387 test-narrow-exchange-merges.t
421 test-narrow-exchange-merges.t
388 test-narrow-exchange.t
422 test-narrow-exchange.t
389 test-narrow-expanddirstate.t
423 test-narrow-expanddirstate.t
390 test-narrow-merge.t
424 test-narrow-merge.t
391 test-narrow-patch.t
425 test-narrow-patch.t
392 test-narrow-patterns.t
426 test-narrow-patterns.t
393 test-narrow-pull.t
427 test-narrow-pull.t
394 test-narrow-rebase.t
428 test-narrow-rebase.t
395 test-narrow-shallow-merges.t
429 test-narrow-shallow-merges.t
396 test-narrow-shallow.t
430 test-narrow-shallow.t
431 test-narrow-share.t
432 test-narrow-sparse.t
397 test-narrow-strip.t
433 test-narrow-strip.t
398 test-narrow-trackedcmd.t
434 test-narrow-trackedcmd.t
399 test-narrow-update.t
435 test-narrow-update.t
400 test-narrow-widen-no-ellipsis.t
436 test-narrow-widen-no-ellipsis.t
401 test-narrow-widen.t
437 test-narrow-widen.t
402 test-narrow.t
438 test-narrow.t
403 test-nested-repo.t
439 test-nested-repo.t
404 test-newbranch.t
440 test-newbranch.t
405 test-newercgi.t
441 test-newercgi.t
406 test-nointerrupt.t
442 test-nointerrupt.t
407 test-obshistory.t
443 test-obshistory.t
408 test-obsmarker-template.t
444 test-obsmarker-template.t
409 test-obsmarkers-effectflag.t
445 test-obsmarkers-effectflag.t
410 test-obsolete-bounds-checking.t
446 test-obsolete-bounds-checking.t
411 test-obsolete-bundle-strip.t
447 test-obsolete-bundle-strip.t
412 test-obsolete-changeset-exchange.t
448 test-obsolete-changeset-exchange.t
413 test-obsolete-checkheads.t
449 test-obsolete-checkheads.t
414 test-obsolete-distributed.t
450 test-obsolete-distributed.t
415 test-obsolete-divergent.t
451 test-obsolete-divergent.t
416 test-obsolete-tag-cache.t
452 test-obsolete-tag-cache.t
417 test-obsolete.t
453 test-obsolete.t
418 test-origbackup-conflict.t
454 test-origbackup-conflict.t
419 test-pager-legacy.t
455 test-pager-legacy.t
420 test-pager.t
456 test-pager.t
421 test-parents.t
457 test-parents.t
422 test-parse-date.t
458 test-parse-date.t
423 test-parseindex2.py
459 test-parseindex2.py
424 test-patch-offset.t
460 test-patch-offset.t
425 test-patch.t
461 test-patch.t
426 test-patchbomb-bookmark.t
462 test-patchbomb-bookmark.t
427 test-patchbomb-tls.t
463 test-patchbomb-tls.t
428 test-patchbomb.t
464 test-patchbomb.t
429 test-pathconflicts-basic.t
465 test-pathconflicts-basic.t
430 test-pathconflicts-merge.t
466 test-pathconflicts-merge.t
431 test-pathconflicts-update.t
467 test-pathconflicts-update.t
432 test-pathencode.py
468 test-pathencode.py
433 test-pending.t
469 test-pending.t
434 test-permissions.t
470 test-permissions.t
435 test-phases-exchange.t
471 test-phases-exchange.t
436 test-phases.t
472 test-phases.t
437 test-profile.t
473 test-profile.t
438 test-progress.t
474 test-progress.t
439 test-propertycache.py
475 test-propertycache.py
440 test-pull-branch.t
476 test-pull-branch.t
441 test-pull-http.t
477 test-pull-http.t
442 test-pull-permission.t
478 test-pull-permission.t
443 test-pull-pull-corruption.t
479 test-pull-pull-corruption.t
444 test-pull-r.t
480 test-pull-r.t
445 test-pull-update.t
481 test-pull-update.t
446 test-pull.t
482 test-pull.t
447 test-purge.t
483 test-purge.t
448 test-push-cgi.t
484 test-push-cgi.t
449 test-push-checkheads-partial-C1.t
485 test-push-checkheads-partial-C1.t
450 test-push-checkheads-partial-C2.t
486 test-push-checkheads-partial-C2.t
451 test-push-checkheads-partial-C3.t
487 test-push-checkheads-partial-C3.t
452 test-push-checkheads-partial-C4.t
488 test-push-checkheads-partial-C4.t
453 test-push-checkheads-pruned-B1.t
489 test-push-checkheads-pruned-B1.t
454 test-push-checkheads-pruned-B2.t
490 test-push-checkheads-pruned-B2.t
455 test-push-checkheads-pruned-B3.t
491 test-push-checkheads-pruned-B3.t
456 test-push-checkheads-pruned-B4.t
492 test-push-checkheads-pruned-B4.t
457 test-push-checkheads-pruned-B5.t
493 test-push-checkheads-pruned-B5.t
458 test-push-checkheads-pruned-B6.t
494 test-push-checkheads-pruned-B6.t
459 test-push-checkheads-pruned-B7.t
495 test-push-checkheads-pruned-B7.t
460 test-push-checkheads-pruned-B8.t
496 test-push-checkheads-pruned-B8.t
461 test-push-checkheads-superceed-A1.t
497 test-push-checkheads-superceed-A1.t
462 test-push-checkheads-superceed-A2.t
498 test-push-checkheads-superceed-A2.t
463 test-push-checkheads-superceed-A3.t
499 test-push-checkheads-superceed-A3.t
464 test-push-checkheads-superceed-A4.t
500 test-push-checkheads-superceed-A4.t
465 test-push-checkheads-superceed-A5.t
501 test-push-checkheads-superceed-A5.t
466 test-push-checkheads-superceed-A6.t
502 test-push-checkheads-superceed-A6.t
467 test-push-checkheads-superceed-A7.t
503 test-push-checkheads-superceed-A7.t
468 test-push-checkheads-superceed-A8.t
504 test-push-checkheads-superceed-A8.t
469 test-push-checkheads-unpushed-D1.t
505 test-push-checkheads-unpushed-D1.t
470 test-push-checkheads-unpushed-D2.t
506 test-push-checkheads-unpushed-D2.t
471 test-push-checkheads-unpushed-D3.t
507 test-push-checkheads-unpushed-D3.t
472 test-push-checkheads-unpushed-D4.t
508 test-push-checkheads-unpushed-D4.t
473 test-push-checkheads-unpushed-D5.t
509 test-push-checkheads-unpushed-D5.t
474 test-push-checkheads-unpushed-D6.t
510 test-push-checkheads-unpushed-D6.t
475 test-push-checkheads-unpushed-D7.t
511 test-push-checkheads-unpushed-D7.t
476 test-push-http.t
512 test-push-http.t
513 test-push-race.t
477 test-push-warn.t
514 test-push-warn.t
478 test-push.t
515 test-push.t
479 test-pushvars.t
516 test-pushvars.t
480 test-qrecord.t
517 test-qrecord.t
481 test-rebase-abort.t
518 test-rebase-abort.t
482 test-rebase-backup.t
519 test-rebase-backup.t
483 test-rebase-base-flag.t
520 test-rebase-base-flag.t
484 test-rebase-bookmarks.t
521 test-rebase-bookmarks.t
485 test-rebase-brute-force.t
522 test-rebase-brute-force.t
486 test-rebase-cache.t
523 test-rebase-cache.t
487 test-rebase-check-restore.t
524 test-rebase-check-restore.t
488 test-rebase-collapse.t
525 test-rebase-collapse.t
489 test-rebase-conflicts.t
526 test-rebase-conflicts.t
490 test-rebase-dest.t
527 test-rebase-dest.t
491 test-rebase-detach.t
528 test-rebase-detach.t
492 test-rebase-emptycommit.t
529 test-rebase-emptycommit.t
493 test-rebase-inmemory.t
530 test-rebase-inmemory.t
494 test-rebase-interruptions.t
531 test-rebase-interruptions.t
495 test-rebase-issue-noparam-single-rev.t
532 test-rebase-issue-noparam-single-rev.t
496 test-rebase-legacy.t
533 test-rebase-legacy.t
497 test-rebase-mq-skip.t
534 test-rebase-mq-skip.t
498 test-rebase-mq.t
535 test-rebase-mq.t
499 test-rebase-named-branches.t
536 test-rebase-named-branches.t
500 test-rebase-newancestor.t
537 test-rebase-newancestor.t
501 test-rebase-obsolete.t
538 test-rebase-obsolete.t
502 test-rebase-parameters.t
539 test-rebase-parameters.t
503 test-rebase-partial.t
540 test-rebase-partial.t
504 test-rebase-pull.t
541 test-rebase-pull.t
505 test-rebase-rename.t
542 test-rebase-rename.t
506 test-rebase-scenario-global.t
543 test-rebase-scenario-global.t
507 test-rebase-templates.t
544 test-rebase-templates.t
508 test-rebase-transaction.t
545 test-rebase-transaction.t
509 test-rebuildstate.t
546 test-rebuildstate.t
510 test-record.t
547 test-record.t
511 test-releasenotes-formatting.t
548 test-releasenotes-formatting.t
512 test-releasenotes-merging.t
549 test-releasenotes-merging.t
513 test-releasenotes-parsing.t
550 test-releasenotes-parsing.t
514 test-relink.t
551 test-relink.t
552 test-remotefilelog-bad-configs.t
553 test-remotefilelog-bgprefetch.t
554 test-remotefilelog-blame.t
555 test-remotefilelog-bundle2.t
556 test-remotefilelog-bundles.t
557 test-remotefilelog-cacheprocess.t
558 test-remotefilelog-clone-tree.t
559 test-remotefilelog-clone.t
560 test-remotefilelog-gcrepack.t
561 test-remotefilelog-http.t
562 test-remotefilelog-keepset.t
563 test-remotefilelog-local.t
564 test-remotefilelog-log.t
565 test-remotefilelog-partial-shallow.t
566 test-remotefilelog-permissions.t
567 test-remotefilelog-permisssions.t
568 test-remotefilelog-prefetch.t
569 test-remotefilelog-pull-noshallow.t
570 test-remotefilelog-share.t
571 test-remotefilelog-sparse.t
572 test-remotefilelog-tags.t
573 test-remotefilelog-wireproto.t
515 test-remove.t
574 test-remove.t
516 test-removeemptydirs.t
575 test-removeemptydirs.t
517 test-rename-after-merge.t
576 test-rename-after-merge.t
518 test-rename-dir-merge.t
577 test-rename-dir-merge.t
519 test-rename-merge1.t
578 test-rename-merge1.t
520 test-rename-merge2.t
579 test-rename-merge2.t
521 test-rename.t
580 test-rename.t
522 test-repair-strip.t
581 test-repair-strip.t
523 test-repo-compengines.t
582 test-repo-compengines.t
524 test-requires.t
583 test-requires.t
525 test-resolve.t
584 test-resolve.t
526 test-revert-flags.t
585 test-revert-flags.t
527 test-revert-interactive.t
586 test-revert-interactive.t
528 test-revert-unknown.t
587 test-revert-unknown.t
529 test-revert.t
588 test-revert.t
530 test-revisions.t
589 test-revisions.t
531 test-revlog-ancestry.py
590 test-revlog-ancestry.py
532 test-revlog-group-emptyiter.t
591 test-revlog-group-emptyiter.t
533 test-revlog-mmapindex.t
592 test-revlog-mmapindex.t
534 test-revlog-packentry.t
593 test-revlog-packentry.t
535 test-revlog-raw.py
594 test-revlog-raw.py
536 test-revlog-v2.t
595 test-revlog-v2.t
537 test-revlog.t
596 test-revlog.t
538 test-revset-dirstate-parents.t
597 test-revset-dirstate-parents.t
539 test-revset-legacy-lookup.t
598 test-revset-legacy-lookup.t
540 test-revset-outgoing.t
599 test-revset-outgoing.t
541 test-rollback.t
600 test-rollback.t
542 test-run-tests.py
601 test-run-tests.py
543 test-run-tests.t
602 test-run-tests.t
603 test-rust-ancestor.py
544 test-schemes.t
604 test-schemes.t
545 test-serve.t
605 test-serve.t
546 test-setdiscovery.t
606 test-setdiscovery.t
547 test-share.t
607 test-share.t
548 test-shelve.t
608 test-shelve.t
609 test-shelve2.t
549 test-show-stack.t
610 test-show-stack.t
550 test-show-work.t
611 test-show-work.t
551 test-show.t
612 test-show.t
552 test-simple-update.t
613 test-simple-update.t
553 test-simplekeyvaluefile.py
614 test-simplekeyvaluefile.py
554 test-simplemerge.py
615 test-simplemerge.py
555 test-single-head.t
616 test-single-head.t
556 test-sparse-clear.t
617 test-sparse-clear.t
557 test-sparse-clone.t
618 test-sparse-clone.t
558 test-sparse-import.t
619 test-sparse-import.t
559 test-sparse-merges.t
620 test-sparse-merges.t
560 test-sparse-profiles.t
621 test-sparse-profiles.t
561 test-sparse-requirement.t
622 test-sparse-requirement.t
562 test-sparse-verbose-json.t
623 test-sparse-verbose-json.t
563 test-sparse.t
624 test-sparse.t
564 test-split.t
625 test-split.t
565 test-ssh-bundle1.t
626 test-ssh-bundle1.t
566 test-ssh-clone-r.t
627 test-ssh-clone-r.t
567 test-ssh-proto-unbundle.t
628 test-ssh-proto-unbundle.t
568 test-ssh-proto.t
629 test-ssh-proto.t
569 test-ssh-repoerror.t
630 test-ssh-repoerror.t
570 test-ssh.t
631 test-ssh.t
571 test-sshserver.py
632 test-sshserver.py
572 test-stack.t
633 test-stack.t
573 test-status-color.t
634 test-status-color.t
574 test-status-inprocess.py
635 test-status-inprocess.py
575 test-status-rev.t
636 test-status-rev.t
576 test-status-terse.t
637 test-status-terse.t
577 test-status.t
638 test-status.t
578 test-storage.py
639 test-storage.py
579 test-stream-bundle-v2.t
640 test-stream-bundle-v2.t
580 test-strict.t
641 test-strict.t
581 test-strip-cross.t
642 test-strip-cross.t
582 test-strip.t
643 test-strip.t
583 test-subrepo-deep-nested-change.t
644 test-subrepo-deep-nested-change.t
584 test-subrepo-missing.t
645 test-subrepo-missing.t
585 test-subrepo-paths.t
646 test-subrepo-paths.t
586 test-subrepo-recursion.t
647 test-subrepo-recursion.t
587 test-subrepo-relative-path.t
648 test-subrepo-relative-path.t
588 test-subrepo.t
649 test-subrepo.t
589 test-symlink-os-yes-fs-no.py
650 test-symlink-os-yes-fs-no.py
590 test-symlink-placeholder.t
651 test-symlink-placeholder.t
591 test-symlinks.t
652 test-symlinks.t
592 test-tag.t
653 test-tag.t
593 test-tags.t
654 test-tags.t
594 test-template-basic.t
655 test-template-basic.t
595 test-template-functions.t
656 test-template-functions.t
596 test-template-keywords.t
657 test-template-keywords.t
597 test-template-map.t
658 test-template-map.t
598 test-tools.t
659 test-tools.t
599 test-transplant.t
660 test-transplant.t
600 test-treemanifest.t
661 test-treemanifest.t
601 test-ui-color.py
662 test-ui-color.py
602 test-ui-config.py
663 test-ui-config.py
603 test-ui-verbosity.py
664 test-ui-verbosity.py
604 test-unamend.t
665 test-unamend.t
605 test-unbundlehash.t
666 test-unbundlehash.t
606 test-uncommit.t
667 test-uncommit.t
607 test-unified-test.t
668 test-unified-test.t
608 test-unionrepo.t
669 test-unionrepo.t
609 test-unrelated-pull.t
670 test-unrelated-pull.t
610 test-up-local-change.t
671 test-up-local-change.t
611 test-update-branches.t
672 test-update-branches.t
612 test-update-dest.t
673 test-update-dest.t
613 test-update-issue1456.t
674 test-update-issue1456.t
614 test-update-names.t
675 test-update-names.t
615 test-update-reverse.t
676 test-update-reverse.t
616 test-upgrade-repo.t
677 test-upgrade-repo.t
617 test-url-download.t
678 test-url-download.t
618 test-url-rev.t
679 test-url-rev.t
619 test-url.py
680 test-url.py
620 test-username-newline.t
681 test-username-newline.t
621 test-util.py
682 test-util.py
622 test-verify.t
683 test-verify.t
623 test-walk.t
684 test-walk.t
624 test-walkrepo.py
685 test-walkrepo.py
625 test-websub.t
686 test-websub.t
626 test-win32text.t
687 test-win32text.t
627 test-wireproto-clientreactor.py
688 test-wireproto-clientreactor.py
628 test-wireproto-command-branchmap.t
689 test-wireproto-command-branchmap.t
629 test-wireproto-command-changesetdata.t
690 test-wireproto-command-changesetdata.t
630 test-wireproto-command-filedata.t
691 test-wireproto-command-filedata.t
631 test-wireproto-command-filesdata.t
692 test-wireproto-command-filesdata.t
632 test-wireproto-command-heads.t
693 test-wireproto-command-heads.t
633 test-wireproto-command-listkeys.t
694 test-wireproto-command-listkeys.t
634 test-wireproto-command-lookup.t
695 test-wireproto-command-lookup.t
635 test-wireproto-command-manifestdata.t
696 test-wireproto-command-manifestdata.t
636 test-wireproto-command-pushkey.t
697 test-wireproto-command-pushkey.t
637 test-wireproto-command-rawstorefiledata.t
698 test-wireproto-command-rawstorefiledata.t
638 test-wireproto-framing.py
699 test-wireproto-framing.py
639 test-wireproto-serverreactor.py
700 test-wireproto-serverreactor.py
640 test-wireproto.py
701 test-wireproto.py
641 test-wsgirequest.py
702 test-wsgirequest.py
642 test-xdg.t
703 test-xdg.t
@@ -1,325 +1,327 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2
2
3 # Measure the performance of a list of revsets against multiple revisions
3 # Measure the performance of a list of revsets against multiple revisions
4 # defined by parameter. Checkout one by one and run perfrevset with every
4 # defined by parameter. Checkout one by one and run perfrevset with every
5 # revset in the list to benchmark its performance.
5 # revset in the list to benchmark its performance.
6 #
6 #
7 # You should run this from the root of your mercurial repository.
7 # You should run this from the root of your mercurial repository.
8 #
8 #
9 # call with --help for details
9 # call with --help for details
10
10
11 from __future__ import absolute_import, print_function
11 from __future__ import absolute_import, print_function
12 import math
12 import math
13 import optparse # cannot use argparse, python 2.7 only
13 import optparse # cannot use argparse, python 2.7 only
14 import os
14 import os
15 import re
15 import re
16 import subprocess
16 import subprocess
17 import sys
17 import sys
18
18
19 DEFAULTVARIANTS = ['plain', 'min', 'max', 'first', 'last',
19 DEFAULTVARIANTS = ['plain', 'min', 'max', 'first', 'last',
20 'reverse', 'reverse+first', 'reverse+last',
20 'reverse', 'reverse+first', 'reverse+last',
21 'sort', 'sort+first', 'sort+last']
21 'sort', 'sort+first', 'sort+last']
22
22
23 def check_output(*args, **kwargs):
23 def check_output(*args, **kwargs):
24 kwargs.setdefault('stderr', subprocess.PIPE)
24 kwargs.setdefault('stderr', subprocess.PIPE)
25 kwargs.setdefault('stdout', subprocess.PIPE)
25 kwargs.setdefault('stdout', subprocess.PIPE)
26 proc = subprocess.Popen(*args, **kwargs)
26 proc = subprocess.Popen(*args, **kwargs)
27 output, error = proc.communicate()
27 output, error = proc.communicate()
28 if proc.returncode != 0:
28 if proc.returncode != 0:
29 raise subprocess.CalledProcessError(proc.returncode, ' '.join(args[0]))
29 raise subprocess.CalledProcessError(proc.returncode, ' '.join(args[0]))
30 return output
30 return output
31
31
32 def update(rev):
32 def update(rev):
33 """update the repo to a revision"""
33 """update the repo to a revision"""
34 try:
34 try:
35 subprocess.check_call(['hg', 'update', '--quiet', '--check', str(rev)])
35 subprocess.check_call(['hg', 'update', '--quiet', '--check', str(rev)])
36 check_output(['make', 'local'],
36 check_output(['make', 'local'],
37 stderr=None) # suppress output except for error/warning
37 stderr=None) # suppress output except for error/warning
38 except subprocess.CalledProcessError as exc:
38 except subprocess.CalledProcessError as exc:
39 print('update to revision %s failed, aborting'%rev, file=sys.stderr)
39 print('update to revision %s failed, aborting'%rev, file=sys.stderr)
40 sys.exit(exc.returncode)
40 sys.exit(exc.returncode)
41
41
42
42
43 def hg(cmd, repo=None):
43 def hg(cmd, repo=None):
44 """run a mercurial command
44 """run a mercurial command
45
45
46 <cmd> is the list of command + argument,
46 <cmd> is the list of command + argument,
47 <repo> is an optional repository path to run this command in."""
47 <repo> is an optional repository path to run this command in."""
48 fullcmd = ['./hg']
48 fullcmd = ['./hg']
49 if repo is not None:
49 if repo is not None:
50 fullcmd += ['-R', repo]
50 fullcmd += ['-R', repo]
51 fullcmd += ['--config',
51 fullcmd += ['--config',
52 'extensions.perf=' + os.path.join(contribdir, 'perf.py')]
52 'extensions.perf=' + os.path.join(contribdir, 'perf.py')]
53 fullcmd += cmd
53 fullcmd += cmd
54 return check_output(fullcmd, stderr=subprocess.STDOUT)
54 return check_output(fullcmd, stderr=subprocess.STDOUT)
55
55
56 def perf(revset, target=None, contexts=False):
56 def perf(revset, target=None, contexts=False):
57 """run benchmark for this very revset"""
57 """run benchmark for this very revset"""
58 try:
58 try:
59 args = ['perfrevset', revset]
59 args = ['perfrevset']
60 if contexts:
60 if contexts:
61 args.append('--contexts')
61 args.append('--contexts')
62 args.append('--')
63 args.append(revset)
62 output = hg(args, repo=target)
64 output = hg(args, repo=target)
63 return parseoutput(output)
65 return parseoutput(output)
64 except subprocess.CalledProcessError as exc:
66 except subprocess.CalledProcessError as exc:
65 print('abort: cannot run revset benchmark: %s'%exc.cmd, file=sys.stderr)
67 print('abort: cannot run revset benchmark: %s'%exc.cmd, file=sys.stderr)
66 if getattr(exc, 'output', None) is None: # no output before 2.7
68 if getattr(exc, 'output', None) is None: # no output before 2.7
67 print('(no output)', file=sys.stderr)
69 print('(no output)', file=sys.stderr)
68 else:
70 else:
69 print(exc.output, file=sys.stderr)
71 print(exc.output, file=sys.stderr)
70 return None
72 return None
71
73
72 outputre = re.compile(r'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) '
74 outputre = re.compile(r'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) '
73 'sys (\d+.\d+) \(best of (\d+)\)')
75 'sys (\d+.\d+) \(best of (\d+)\)')
74
76
75 def parseoutput(output):
77 def parseoutput(output):
76 """parse a textual output into a dict
78 """parse a textual output into a dict
77
79
78 We cannot just use json because we want to compare with old
80 We cannot just use json because we want to compare with old
79 versions of Mercurial that may not support json output.
81 versions of Mercurial that may not support json output.
80 """
82 """
81 match = outputre.search(output)
83 match = outputre.search(output)
82 if not match:
84 if not match:
83 print('abort: invalid output:', file=sys.stderr)
85 print('abort: invalid output:', file=sys.stderr)
84 print(output, file=sys.stderr)
86 print(output, file=sys.stderr)
85 sys.exit(1)
87 sys.exit(1)
86 return {'comb': float(match.group(2)),
88 return {'comb': float(match.group(2)),
87 'count': int(match.group(5)),
89 'count': int(match.group(5)),
88 'sys': float(match.group(3)),
90 'sys': float(match.group(3)),
89 'user': float(match.group(4)),
91 'user': float(match.group(4)),
90 'wall': float(match.group(1)),
92 'wall': float(match.group(1)),
91 }
93 }
92
94
93 def printrevision(rev):
95 def printrevision(rev):
94 """print data about a revision"""
96 """print data about a revision"""
95 sys.stdout.write("Revision ")
97 sys.stdout.write("Revision ")
96 sys.stdout.flush()
98 sys.stdout.flush()
97 subprocess.check_call(['hg', 'log', '--rev', str(rev), '--template',
99 subprocess.check_call(['hg', 'log', '--rev', str(rev), '--template',
98 '{if(tags, " ({tags})")} '
100 '{if(tags, " ({tags})")} '
99 '{rev}:{node|short}: {desc|firstline}\n'])
101 '{rev}:{node|short}: {desc|firstline}\n'])
100
102
101 def idxwidth(nbidx):
103 def idxwidth(nbidx):
102 """return the max width of number used for index
104 """return the max width of number used for index
103
105
104 This is similar to log10(nbidx), but we use custom code here
106 This is similar to log10(nbidx), but we use custom code here
105 because we start with zero and we'd rather not deal with all the
107 because we start with zero and we'd rather not deal with all the
106 extra rounding business that log10 would imply.
108 extra rounding business that log10 would imply.
107 """
109 """
108 nbidx -= 1 # starts at 0
110 nbidx -= 1 # starts at 0
109 idxwidth = 0
111 idxwidth = 0
110 while nbidx:
112 while nbidx:
111 idxwidth += 1
113 idxwidth += 1
112 nbidx //= 10
114 nbidx //= 10
113 if not idxwidth:
115 if not idxwidth:
114 idxwidth = 1
116 idxwidth = 1
115 return idxwidth
117 return idxwidth
116
118
117 def getfactor(main, other, field, sensitivity=0.05):
119 def getfactor(main, other, field, sensitivity=0.05):
118 """return the relative factor between values for 'field' in main and other
120 """return the relative factor between values for 'field' in main and other
119
121
120 Return None if the factor is insignificant (less than <sensitivity>
122 Return None if the factor is insignificant (less than <sensitivity>
121 variation)."""
123 variation)."""
122 factor = 1
124 factor = 1
123 if main is not None:
125 if main is not None:
124 factor = other[field] / main[field]
126 factor = other[field] / main[field]
125 low, high = 1 - sensitivity, 1 + sensitivity
127 low, high = 1 - sensitivity, 1 + sensitivity
126 if (low < factor < high):
128 if (low < factor < high):
127 return None
129 return None
128 return factor
130 return factor
129
131
130 def formatfactor(factor):
132 def formatfactor(factor):
131 """format a factor into a 4 char string
133 """format a factor into a 4 char string
132
134
133 22%
135 22%
134 156%
136 156%
135 x2.4
137 x2.4
136 x23
138 x23
137 x789
139 x789
138 x1e4
140 x1e4
139 x5x7
141 x5x7
140
142
141 """
143 """
142 if factor is None:
144 if factor is None:
143 return ' '
145 return ' '
144 elif factor < 2:
146 elif factor < 2:
145 return '%3i%%' % (factor * 100)
147 return '%3i%%' % (factor * 100)
146 elif factor < 10:
148 elif factor < 10:
147 return 'x%3.1f' % factor
149 return 'x%3.1f' % factor
148 elif factor < 1000:
150 elif factor < 1000:
149 return '%4s' % ('x%i' % factor)
151 return '%4s' % ('x%i' % factor)
150 else:
152 else:
151 order = int(math.log(factor)) + 1
153 order = int(math.log(factor)) + 1
152 while math.log(factor) > 1:
154 while math.log(factor) > 1:
153 factor //= 0
155 factor //= 0
154 return 'x%ix%i' % (factor, order)
156 return 'x%ix%i' % (factor, order)
155
157
156 def formattiming(value):
158 def formattiming(value):
157 """format a value to strictly 8 char, dropping some precision if needed"""
159 """format a value to strictly 8 char, dropping some precision if needed"""
158 if value < 10**7:
160 if value < 10**7:
159 return ('%.6f' % value)[:8]
161 return ('%.6f' % value)[:8]
160 else:
162 else:
161 # value is HUGE very unlikely to happen (4+ month run)
163 # value is HUGE very unlikely to happen (4+ month run)
162 return '%i' % value
164 return '%i' % value
163
165
164 _marker = object()
166 _marker = object()
165 def printresult(variants, idx, data, maxidx, verbose=False, reference=_marker):
167 def printresult(variants, idx, data, maxidx, verbose=False, reference=_marker):
166 """print a line of result to stdout"""
168 """print a line of result to stdout"""
167 mask = '%%0%ii) %%s' % idxwidth(maxidx)
169 mask = '%%0%ii) %%s' % idxwidth(maxidx)
168
170
169 out = []
171 out = []
170 for var in variants:
172 for var in variants:
171 if data[var] is None:
173 if data[var] is None:
172 out.append('error ')
174 out.append('error ')
173 out.append(' ' * 4)
175 out.append(' ' * 4)
174 continue
176 continue
175 out.append(formattiming(data[var]['wall']))
177 out.append(formattiming(data[var]['wall']))
176 if reference is not _marker:
178 if reference is not _marker:
177 factor = None
179 factor = None
178 if reference is not None:
180 if reference is not None:
179 factor = getfactor(reference[var], data[var], 'wall')
181 factor = getfactor(reference[var], data[var], 'wall')
180 out.append(formatfactor(factor))
182 out.append(formatfactor(factor))
181 if verbose:
183 if verbose:
182 out.append(formattiming(data[var]['comb']))
184 out.append(formattiming(data[var]['comb']))
183 out.append(formattiming(data[var]['user']))
185 out.append(formattiming(data[var]['user']))
184 out.append(formattiming(data[var]['sys']))
186 out.append(formattiming(data[var]['sys']))
185 out.append('%6d' % data[var]['count'])
187 out.append('%6d' % data[var]['count'])
186 print(mask % (idx, ' '.join(out)))
188 print(mask % (idx, ' '.join(out)))
187
189
188 def printheader(variants, maxidx, verbose=False, relative=False):
190 def printheader(variants, maxidx, verbose=False, relative=False):
189 header = [' ' * (idxwidth(maxidx) + 1)]
191 header = [' ' * (idxwidth(maxidx) + 1)]
190 for var in variants:
192 for var in variants:
191 if not var:
193 if not var:
192 var = 'iter'
194 var = 'iter'
193 if len(var) > 8:
195 if len(var) > 8:
194 var = var[:3] + '..' + var[-3:]
196 var = var[:3] + '..' + var[-3:]
195 header.append('%-8s' % var)
197 header.append('%-8s' % var)
196 if relative:
198 if relative:
197 header.append(' ')
199 header.append(' ')
198 if verbose:
200 if verbose:
199 header.append('%-8s' % 'comb')
201 header.append('%-8s' % 'comb')
200 header.append('%-8s' % 'user')
202 header.append('%-8s' % 'user')
201 header.append('%-8s' % 'sys')
203 header.append('%-8s' % 'sys')
202 header.append('%6s' % 'count')
204 header.append('%6s' % 'count')
203 print(' '.join(header))
205 print(' '.join(header))
204
206
205 def getrevs(spec):
207 def getrevs(spec):
206 """get the list of rev matched by a revset"""
208 """get the list of rev matched by a revset"""
207 try:
209 try:
208 out = check_output(['hg', 'log', '--template={rev}\n', '--rev', spec])
210 out = check_output(['hg', 'log', '--template={rev}\n', '--rev', spec])
209 except subprocess.CalledProcessError as exc:
211 except subprocess.CalledProcessError as exc:
210 print("abort, can't get revision from %s"%spec, file=sys.stderr)
212 print("abort, can't get revision from %s"%spec, file=sys.stderr)
211 sys.exit(exc.returncode)
213 sys.exit(exc.returncode)
212 return [r for r in out.split() if r]
214 return [r for r in out.split() if r]
213
215
214
216
215 def applyvariants(revset, variant):
217 def applyvariants(revset, variant):
216 if variant == 'plain':
218 if variant == 'plain':
217 return revset
219 return revset
218 for var in variant.split('+'):
220 for var in variant.split('+'):
219 revset = '%s(%s)' % (var, revset)
221 revset = '%s(%s)' % (var, revset)
220 return revset
222 return revset
221
223
222 helptext="""This script will run multiple variants of provided revsets using
224 helptext="""This script will run multiple variants of provided revsets using
223 different revisions in your mercurial repository. After the benchmark are run
225 different revisions in your mercurial repository. After the benchmark are run
224 summary output is provided. Use it to demonstrate speed improvements or pin
226 summary output is provided. Use it to demonstrate speed improvements or pin
225 point regressions. Revsets to run are specified in a file (or from stdin), one
227 point regressions. Revsets to run are specified in a file (or from stdin), one
226 revsets per line. Line starting with '#' will be ignored, allowing insertion of
228 revsets per line. Line starting with '#' will be ignored, allowing insertion of
227 comments."""
229 comments."""
228 parser = optparse.OptionParser(usage="usage: %prog [options] <revs>",
230 parser = optparse.OptionParser(usage="usage: %prog [options] <revs>",
229 description=helptext)
231 description=helptext)
230 parser.add_option("-f", "--file",
232 parser.add_option("-f", "--file",
231 help="read revset from FILE (stdin if omitted)",
233 help="read revset from FILE (stdin if omitted)",
232 metavar="FILE")
234 metavar="FILE")
233 parser.add_option("-R", "--repo",
235 parser.add_option("-R", "--repo",
234 help="run benchmark on REPO", metavar="REPO")
236 help="run benchmark on REPO", metavar="REPO")
235
237
236 parser.add_option("-v", "--verbose",
238 parser.add_option("-v", "--verbose",
237 action='store_true',
239 action='store_true',
238 help="display all timing data (not just best total time)")
240 help="display all timing data (not just best total time)")
239
241
240 parser.add_option("", "--variants",
242 parser.add_option("", "--variants",
241 default=','.join(DEFAULTVARIANTS),
243 default=','.join(DEFAULTVARIANTS),
242 help="comma separated list of variant to test "
244 help="comma separated list of variant to test "
243 "(eg: plain,min,sorted) (plain = no modification)")
245 "(eg: plain,min,sorted) (plain = no modification)")
244 parser.add_option('', '--contexts',
246 parser.add_option('', '--contexts',
245 action='store_true',
247 action='store_true',
246 help='obtain changectx from results instead of integer revs')
248 help='obtain changectx from results instead of integer revs')
247
249
248 (options, args) = parser.parse_args()
250 (options, args) = parser.parse_args()
249
251
250 if not args:
252 if not args:
251 parser.print_help()
253 parser.print_help()
252 sys.exit(255)
254 sys.exit(255)
253
255
254 # the directory where both this script and the perf.py extension live.
256 # the directory where both this script and the perf.py extension live.
255 contribdir = os.path.dirname(__file__)
257 contribdir = os.path.dirname(__file__)
256
258
257 revsetsfile = sys.stdin
259 revsetsfile = sys.stdin
258 if options.file:
260 if options.file:
259 revsetsfile = open(options.file)
261 revsetsfile = open(options.file)
260
262
261 revsets = [l.strip() for l in revsetsfile if not l.startswith('#')]
263 revsets = [l.strip() for l in revsetsfile if not l.startswith('#')]
262 revsets = [l for l in revsets if l]
264 revsets = [l for l in revsets if l]
263
265
264 print("Revsets to benchmark")
266 print("Revsets to benchmark")
265 print("----------------------------")
267 print("----------------------------")
266
268
267 for idx, rset in enumerate(revsets):
269 for idx, rset in enumerate(revsets):
268 print("%i) %s" % (idx, rset))
270 print("%i) %s" % (idx, rset))
269
271
270 print("----------------------------")
272 print("----------------------------")
271 print()
273 print()
272
274
273 revs = []
275 revs = []
274 for a in args:
276 for a in args:
275 revs.extend(getrevs(a))
277 revs.extend(getrevs(a))
276
278
277 variants = options.variants.split(',')
279 variants = options.variants.split(',')
278
280
279 results = []
281 results = []
280 for r in revs:
282 for r in revs:
281 print("----------------------------")
283 print("----------------------------")
282 printrevision(r)
284 printrevision(r)
283 print("----------------------------")
285 print("----------------------------")
284 update(r)
286 update(r)
285 res = []
287 res = []
286 results.append(res)
288 results.append(res)
287 printheader(variants, len(revsets), verbose=options.verbose)
289 printheader(variants, len(revsets), verbose=options.verbose)
288 for idx, rset in enumerate(revsets):
290 for idx, rset in enumerate(revsets):
289 varres = {}
291 varres = {}
290 for var in variants:
292 for var in variants:
291 varrset = applyvariants(rset, var)
293 varrset = applyvariants(rset, var)
292 data = perf(varrset, target=options.repo, contexts=options.contexts)
294 data = perf(varrset, target=options.repo, contexts=options.contexts)
293 varres[var] = data
295 varres[var] = data
294 res.append(varres)
296 res.append(varres)
295 printresult(variants, idx, varres, len(revsets),
297 printresult(variants, idx, varres, len(revsets),
296 verbose=options.verbose)
298 verbose=options.verbose)
297 sys.stdout.flush()
299 sys.stdout.flush()
298 print("----------------------------")
300 print("----------------------------")
299
301
300
302
301 print("""
303 print("""
302
304
303 Result by revset
305 Result by revset
304 ================
306 ================
305 """)
307 """)
306
308
307 print('Revision:')
309 print('Revision:')
308 for idx, rev in enumerate(revs):
310 for idx, rev in enumerate(revs):
309 sys.stdout.write('%i) ' % idx)
311 sys.stdout.write('%i) ' % idx)
310 sys.stdout.flush()
312 sys.stdout.flush()
311 printrevision(rev)
313 printrevision(rev)
312
314
313 print()
315 print()
314 print()
316 print()
315
317
316 for ridx, rset in enumerate(revsets):
318 for ridx, rset in enumerate(revsets):
317
319
318 print("revset #%i: %s" % (ridx, rset))
320 print("revset #%i: %s" % (ridx, rset))
319 printheader(variants, len(results), verbose=options.verbose, relative=True)
321 printheader(variants, len(results), verbose=options.verbose, relative=True)
320 ref = None
322 ref = None
321 for idx, data in enumerate(results):
323 for idx, data in enumerate(results):
322 printresult(variants, idx, data[ridx], len(results),
324 printresult(variants, idx, data[ridx], len(results),
323 verbose=options.verbose, reference=ref)
325 verbose=options.verbose, reference=ref)
324 ref = data[ridx]
326 ref = data[ridx]
325 print()
327 print()
@@ -1,63 +1,64 b''
1 <?xml version="1.0" encoding="utf-8"?>
1 <?xml version="1.0" encoding="utf-8"?>
2 <Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
2 <Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
3
3
4 <?include guids.wxi ?>
4 <?include guids.wxi ?>
5 <?include defines.wxi ?>
5 <?include defines.wxi ?>
6
6
7 <Fragment>
7 <Fragment>
8 <ComponentGroup Id='helpFolder'>
8 <ComponentGroup Id='helpFolder'>
9 <ComponentRef Id='help.root' />
9 <ComponentRef Id='help.root' />
10 <ComponentRef Id='help.internals' />
10 <ComponentRef Id='help.internals' />
11 </ComponentGroup>
11 </ComponentGroup>
12 </Fragment>
12 </Fragment>
13
13
14 <Fragment>
14 <Fragment>
15 <DirectoryRef Id="INSTALLDIR">
15 <DirectoryRef Id="INSTALLDIR">
16 <Directory Id="helpdir" Name="help" FileSource="$(var.SourceDir)">
16 <Directory Id="helpdir" Name="help" FileSource="$(var.SourceDir)">
17 <Component Id="help.root" Guid="$(var.help.root.guid)" Win64='$(var.IsX64)'>
17 <Component Id="help.root" Guid="$(var.help.root.guid)" Win64='$(var.IsX64)'>
18 <File Name="bundlespec.txt" />
18 <File Name="bundlespec.txt" />
19 <File Name="color.txt" />
19 <File Name="color.txt" />
20 <File Name="config.txt" KeyPath="yes" />
20 <File Name="config.txt" KeyPath="yes" />
21 <File Name="dates.txt" />
21 <File Name="dates.txt" />
22 <File Name="deprecated.txt" />
22 <File Name="deprecated.txt" />
23 <File Name="diffs.txt" />
23 <File Name="diffs.txt" />
24 <File Name="environment.txt" />
24 <File Name="environment.txt" />
25 <File Name="extensions.txt" />
25 <File Name="extensions.txt" />
26 <File Name="filesets.txt" />
26 <File Name="filesets.txt" />
27 <File Name="flags.txt" />
27 <File Name="flags.txt" />
28 <File Name="glossary.txt" />
28 <File Name="glossary.txt" />
29 <File Name="hgignore.txt" />
29 <File Name="hgignore.txt" />
30 <File Name="hgweb.txt" />
30 <File Name="hgweb.txt" />
31 <File Name="merge-tools.txt" />
31 <File Name="merge-tools.txt" />
32 <File Name="pager.txt" />
32 <File Name="pager.txt" />
33 <File Name="patterns.txt" />
33 <File Name="patterns.txt" />
34 <File Name="phases.txt" />
34 <File Name="phases.txt" />
35 <File Name="revisions.txt" />
35 <File Name="revisions.txt" />
36 <File Name="scripting.txt" />
36 <File Name="scripting.txt" />
37 <File Name="subrepos.txt" />
37 <File Name="subrepos.txt" />
38 <File Name="templates.txt" />
38 <File Name="templates.txt" />
39 <File Name="urls.txt" />
39 <File Name="urls.txt" />
40 </Component>
40 </Component>
41
41
42 <Directory Id="help.internaldir" Name="internals">
42 <Directory Id="help.internaldir" Name="internals">
43 <Component Id="help.internals" Guid="$(var.help.internals.guid)" Win64='$(var.IsX64)'>
43 <Component Id="help.internals" Guid="$(var.help.internals.guid)" Win64='$(var.IsX64)'>
44 <File Id="internals.bundle2.txt" Name="bundle2.txt" />
44 <File Id="internals.bundle2.txt" Name="bundle2.txt" />
45 <File Id="internals.bundles.txt" Name="bundles.txt" KeyPath="yes" />
45 <File Id="internals.bundles.txt" Name="bundles.txt" KeyPath="yes" />
46 <File Id="internals.cbor.txt" Name="cbor.txt" />
46 <File Id="internals.cbor.txt" Name="cbor.txt" />
47 <File Id="internals.censor.txt" Name="censor.txt" />
47 <File Id="internals.censor.txt" Name="censor.txt" />
48 <File Id="internals.changegroups.txt" Name="changegroups.txt" />
48 <File Id="internals.changegroups.txt" Name="changegroups.txt" />
49 <File Id="internals.config.txt" Name="config.txt" />
49 <File Id="internals.config.txt" Name="config.txt" />
50 <File Id="internals.extensions.txt" Name="extensions.txt" />
50 <File Id="internals.linelog.txt" Name="linelog.txt" />
51 <File Id="internals.linelog.txt" Name="linelog.txt" />
51 <File Id="internals.requirements.txt" Name="requirements.txt" />
52 <File Id="internals.requirements.txt" Name="requirements.txt" />
52 <File Id="internals.revlogs.txt" Name="revlogs.txt" />
53 <File Id="internals.revlogs.txt" Name="revlogs.txt" />
53 <File Id="internals.wireprotocol.txt" Name="wireprotocol.txt" />
54 <File Id="internals.wireprotocol.txt" Name="wireprotocol.txt" />
54 <File Id="internals.wireprotocolrpc.txt" Name="wireprotocolrpc.txt" />
55 <File Id="internals.wireprotocolrpc.txt" Name="wireprotocolrpc.txt" />
55 <File Id="internals.wireprotocolv2.txt" Name="wireprotocolv2.txt" />
56 <File Id="internals.wireprotocolv2.txt" Name="wireprotocolv2.txt" />
56 </Component>
57 </Component>
57 </Directory>
58 </Directory>
58
59
59 </Directory>
60 </Directory>
60 </DirectoryRef>
61 </DirectoryRef>
61 </Fragment>
62 </Fragment>
62
63
63 </Wix>
64 </Wix>
@@ -1,66 +1,76 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # docchecker - look for problematic markup
3 # docchecker - look for problematic markup
4 #
4 #
5 # Copyright 2016 timeless <timeless@mozdev.org> and others
5 # Copyright 2016 timeless <timeless@mozdev.org> and others
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import, print_function
10 from __future__ import absolute_import, print_function
11
11
12 import os
12 import re
13 import re
13 import sys
14 import sys
14
15
15 leadingline = re.compile(r'(^\s*)(\S.*)$')
16 try:
17 import msvcrt
18 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
19 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
20 except ImportError:
21 pass
22
23 stdout = getattr(sys.stdout, 'buffer', sys.stdout)
24
25 leadingline = re.compile(br'(^\s*)(\S.*)$')
16
26
17 checks = [
27 checks = [
18 (r""":hg:`[^`]*'[^`]*`""",
28 (br""":hg:`[^`]*'[^`]*`""",
19 """warning: please avoid nesting ' in :hg:`...`"""),
29 b"""warning: please avoid nesting ' in :hg:`...`"""),
20 (r'\w:hg:`',
30 (br'\w:hg:`',
21 'warning: please have a space before :hg:'),
31 b'warning: please have a space before :hg:'),
22 (r"""(?:[^a-z][^'.])hg ([^,;"`]*'(?!hg)){2}""",
32 (br"""(?:[^a-z][^'.])hg ([^,;"`]*'(?!hg)){2}""",
23 '''warning: please use " instead of ' for hg ... "..."'''),
33 b'''warning: please use " instead of ' for hg ... "..."'''),
24 ]
34 ]
25
35
26 def check(line):
36 def check(line):
27 messages = []
37 messages = []
28 for match, msg in checks:
38 for match, msg in checks:
29 if re.search(match, line):
39 if re.search(match, line):
30 messages.append(msg)
40 messages.append(msg)
31 if messages:
41 if messages:
32 print(line)
42 stdout.write(b'%s\n' % line)
33 for msg in messages:
43 for msg in messages:
34 print(msg)
44 stdout.write(b'%s\n' % msg)
35
45
36 def work(file):
46 def work(file):
37 (llead, lline) = ('', '')
47 (llead, lline) = (b'', b'')
38
48
39 for line in file:
49 for line in file:
40 # this section unwraps lines
50 # this section unwraps lines
41 match = leadingline.match(line)
51 match = leadingline.match(line)
42 if not match:
52 if not match:
43 check(lline)
53 check(lline)
44 (llead, lline) = ('', '')
54 (llead, lline) = (b'', b'')
45 continue
55 continue
46
56
47 lead, line = match.group(1), match.group(2)
57 lead, line = match.group(1), match.group(2)
48 if (lead == llead):
58 if (lead == llead):
49 if (lline != ''):
59 if (lline != b''):
50 lline += ' ' + line
60 lline += b' ' + line
51 else:
61 else:
52 lline = line
62 lline = line
53 else:
63 else:
54 check(lline)
64 check(lline)
55 (llead, lline) = (lead, line)
65 (llead, lline) = (lead, line)
56 check(lline)
66 check(lline)
57
67
58 def main():
68 def main():
59 for f in sys.argv[1:]:
69 for f in sys.argv[1:]:
60 try:
70 try:
61 with open(f) as file:
71 with open(f, 'rb') as file:
62 work(file)
72 work(file)
63 except BaseException as e:
73 except BaseException as e:
64 print("failed to process %s: %s" % (f, e))
74 sys.stdout.write(r"failed to process %s: %s\n" % (f, e))
65
75
66 main()
76 main()
@@ -1,230 +1,240 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 """usage: %s DOC ...
2 """usage: %s DOC ...
3
3
4 where DOC is the name of a document
4 where DOC is the name of a document
5 """
5 """
6
6
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import os
9 import os
10 import sys
10 import sys
11 import textwrap
11 import textwrap
12
12
13 try:
14 import msvcrt
15 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
16 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
17 except ImportError:
18 pass
19
13 # This script is executed during installs and may not have C extensions
20 # This script is executed during installs and may not have C extensions
14 # available. Relax C module requirements.
21 # available. Relax C module requirements.
15 os.environ['HGMODULEPOLICY'] = 'allow'
22 os.environ[r'HGMODULEPOLICY'] = r'allow'
16 # import from the live mercurial repo
23 # import from the live mercurial repo
17 sys.path.insert(0, "..")
24 sys.path.insert(0, r"..")
18 from mercurial import demandimport; demandimport.enable()
25 from mercurial import demandimport; demandimport.enable()
19 # Load util so that the locale path is set by i18n.setdatapath() before
26 # Load util so that the locale path is set by i18n.setdatapath() before
20 # calling _().
27 # calling _().
21 from mercurial import util
28 from mercurial import util
22 util.datapath
29 util.datapath
23 from mercurial import (
30 from mercurial import (
24 commands,
31 commands,
32 encoding,
25 extensions,
33 extensions,
26 help,
34 help,
27 minirst,
35 minirst,
36 pycompat,
28 ui as uimod,
37 ui as uimod,
29 )
38 )
30 from mercurial.i18n import (
39 from mercurial.i18n import (
31 gettext,
40 gettext,
32 _,
41 _,
33 )
42 )
34
43
35 table = commands.table
44 table = commands.table
36 globalopts = commands.globalopts
45 globalopts = commands.globalopts
37 helptable = help.helptable
46 helptable = help.helptable
38 loaddoc = help.loaddoc
47 loaddoc = help.loaddoc
39
48
40 def get_desc(docstr):
49 def get_desc(docstr):
41 if not docstr:
50 if not docstr:
42 return "", ""
51 return b"", b""
43 # sanitize
52 # sanitize
44 docstr = docstr.strip("\n")
53 docstr = docstr.strip(b"\n")
45 docstr = docstr.rstrip()
54 docstr = docstr.rstrip()
46 shortdesc = docstr.splitlines()[0].strip()
55 shortdesc = docstr.splitlines()[0].strip()
47
56
48 i = docstr.find("\n")
57 i = docstr.find(b"\n")
49 if i != -1:
58 if i != -1:
50 desc = docstr[i + 2:]
59 desc = docstr[i + 2:]
51 else:
60 else:
52 desc = shortdesc
61 desc = shortdesc
53
62
54 desc = textwrap.dedent(desc)
63 desc = textwrap.dedent(desc.decode('latin1')).encode('latin1')
55
64
56 return (shortdesc, desc)
65 return (shortdesc, desc)
57
66
58 def get_opts(opts):
67 def get_opts(opts):
59 for opt in opts:
68 for opt in opts:
60 if len(opt) == 5:
69 if len(opt) == 5:
61 shortopt, longopt, default, desc, optlabel = opt
70 shortopt, longopt, default, desc, optlabel = opt
62 else:
71 else:
63 shortopt, longopt, default, desc = opt
72 shortopt, longopt, default, desc = opt
64 optlabel = _("VALUE")
73 optlabel = _(b"VALUE")
65 allopts = []
74 allopts = []
66 if shortopt:
75 if shortopt:
67 allopts.append("-%s" % shortopt)
76 allopts.append(b"-%s" % shortopt)
68 if longopt:
77 if longopt:
69 allopts.append("--%s" % longopt)
78 allopts.append(b"--%s" % longopt)
70 if isinstance(default, list):
79 if isinstance(default, list):
71 allopts[-1] += " <%s[+]>" % optlabel
80 allopts[-1] += b" <%s[+]>" % optlabel
72 elif (default is not None) and not isinstance(default, bool):
81 elif (default is not None) and not isinstance(default, bool):
73 allopts[-1] += " <%s>" % optlabel
82 allopts[-1] += b" <%s>" % optlabel
74 if '\n' in desc:
83 if b'\n' in desc:
75 # only remove line breaks and indentation
84 # only remove line breaks and indentation
76 desc = ' '.join(l.lstrip() for l in desc.split('\n'))
85 desc = b' '.join(l.lstrip() for l in desc.split(b'\n'))
77 desc += default and _(" (default: %s)") % default or ""
86 desc += default and _(b" (default: %s)") % bytes(default) or b""
78 yield (", ".join(allopts), desc)
87 yield (b", ".join(allopts), desc)
79
88
80 def get_cmd(cmd, cmdtable):
89 def get_cmd(cmd, cmdtable):
81 d = {}
90 d = {}
82 attr = cmdtable[cmd]
91 attr = cmdtable[cmd]
83 cmds = cmd.lstrip("^").split("|")
92 cmds = cmd.lstrip(b"^").split(b"|")
84
93
85 d['cmd'] = cmds[0]
94 d[b'cmd'] = cmds[0]
86 d['aliases'] = cmd.split("|")[1:]
95 d[b'aliases'] = cmd.split(b"|")[1:]
87 d['desc'] = get_desc(gettext(attr[0].__doc__))
96 d[b'desc'] = get_desc(gettext(pycompat.getdoc(attr[0])))
88 d['opts'] = list(get_opts(attr[1]))
97 d[b'opts'] = list(get_opts(attr[1]))
89
98
90 s = 'hg ' + cmds[0]
99 s = b'hg ' + cmds[0]
91 if len(attr) > 2:
100 if len(attr) > 2:
92 if not attr[2].startswith('hg'):
101 if not attr[2].startswith(b'hg'):
93 s += ' ' + attr[2]
102 s += b' ' + attr[2]
94 else:
103 else:
95 s = attr[2]
104 s = attr[2]
96 d['synopsis'] = s.strip()
105 d[b'synopsis'] = s.strip()
97
106
98 return d
107 return d
99
108
100 def showdoc(ui):
109 def showdoc(ui):
101 # print options
110 # print options
102 ui.write(minirst.section(_("Options")))
111 ui.write(minirst.section(_(b"Options")))
103 multioccur = False
112 multioccur = False
104 for optstr, desc in get_opts(globalopts):
113 for optstr, desc in get_opts(globalopts):
105 ui.write("%s\n %s\n\n" % (optstr, desc))
114 ui.write(b"%s\n %s\n\n" % (optstr, desc))
106 if optstr.endswith("[+]>"):
115 if optstr.endswith(b"[+]>"):
107 multioccur = True
116 multioccur = True
108 if multioccur:
117 if multioccur:
109 ui.write(_("\n[+] marked option can be specified multiple times\n"))
118 ui.write(_(b"\n[+] marked option can be specified multiple times\n"))
110 ui.write("\n")
119 ui.write(b"\n")
111
120
112 # print cmds
121 # print cmds
113 ui.write(minirst.section(_("Commands")))
122 ui.write(minirst.section(_(b"Commands")))
114 commandprinter(ui, table, minirst.subsection)
123 commandprinter(ui, table, minirst.subsection)
115
124
116 # print help topics
125 # print help topics
117 # The config help topic is included in the hgrc.5 man page.
126 # The config help topic is included in the hgrc.5 man page.
118 helpprinter(ui, helptable, minirst.section, exclude=['config'])
127 helpprinter(ui, helptable, minirst.section, exclude=[b'config'])
119
128
120 ui.write(minirst.section(_("Extensions")))
129 ui.write(minirst.section(_(b"Extensions")))
121 ui.write(_("This section contains help for extensions that are "
130 ui.write(_(b"This section contains help for extensions that are "
122 "distributed together with Mercurial. Help for other "
131 b"distributed together with Mercurial. Help for other "
123 "extensions is available in the help system."))
132 b"extensions is available in the help system."))
124 ui.write(("\n\n"
133 ui.write((b"\n\n"
125 ".. contents::\n"
134 b".. contents::\n"
126 " :class: htmlonly\n"
135 b" :class: htmlonly\n"
127 " :local:\n"
136 b" :local:\n"
128 " :depth: 1\n\n"))
137 b" :depth: 1\n\n"))
129
138
130 for extensionname in sorted(allextensionnames()):
139 for extensionname in sorted(allextensionnames()):
131 mod = extensions.load(ui, extensionname, None)
140 mod = extensions.load(ui, extensionname, None)
132 ui.write(minirst.subsection(extensionname))
141 ui.write(minirst.subsection(extensionname))
133 ui.write("%s\n\n" % gettext(mod.__doc__))
142 ui.write(b"%s\n\n" % gettext(pycompat.getdoc(mod)))
134 cmdtable = getattr(mod, 'cmdtable', None)
143 cmdtable = getattr(mod, 'cmdtable', None)
135 if cmdtable:
144 if cmdtable:
136 ui.write(minirst.subsubsection(_('Commands')))
145 ui.write(minirst.subsubsection(_(b'Commands')))
137 commandprinter(ui, cmdtable, minirst.subsubsubsection)
146 commandprinter(ui, cmdtable, minirst.subsubsubsection)
138
147
139 def showtopic(ui, topic):
148 def showtopic(ui, topic):
140 extrahelptable = [
149 extrahelptable = [
141 (["common"], '', loaddoc('common'), help.TOPIC_CATEGORY_MISC),
150 ([b"common"], b'', loaddoc(b'common'), help.TOPIC_CATEGORY_MISC),
142 (["hg.1"], '', loaddoc('hg.1'), help.TOPIC_CATEGORY_CONFIG),
151 ([b"hg.1"], b'', loaddoc(b'hg.1'), help.TOPIC_CATEGORY_CONFIG),
143 (["hg-ssh.8"], '', loaddoc('hg-ssh.8'), help.TOPIC_CATEGORY_CONFIG),
152 ([b"hg-ssh.8"], b'', loaddoc(b'hg-ssh.8'), help.TOPIC_CATEGORY_CONFIG),
144 (["hgignore.5"], '', loaddoc('hgignore.5'), help.TOPIC_CATEGORY_CONFIG),
153 ([b"hgignore.5"], b'', loaddoc(b'hgignore.5'),
145 (["hgrc.5"], '', loaddoc('hgrc.5'), help.TOPIC_CATEGORY_CONFIG),
146 (["hgignore.5.gendoc"], '', loaddoc('hgignore'),
147 help.TOPIC_CATEGORY_CONFIG),
154 help.TOPIC_CATEGORY_CONFIG),
148 (["hgrc.5.gendoc"], '', loaddoc('config'), help.TOPIC_CATEGORY_CONFIG),
155 ([b"hgrc.5"], b'', loaddoc(b'hgrc.5'), help.TOPIC_CATEGORY_CONFIG),
156 ([b"hgignore.5.gendoc"], b'', loaddoc(b'hgignore'),
157 help.TOPIC_CATEGORY_CONFIG),
158 ([b"hgrc.5.gendoc"], b'', loaddoc(b'config'),
159 help.TOPIC_CATEGORY_CONFIG),
149 ]
160 ]
150 helpprinter(ui, helptable + extrahelptable, None, include=[topic])
161 helpprinter(ui, helptable + extrahelptable, None, include=[topic])
151
162
152 def helpprinter(ui, helptable, sectionfunc, include=[], exclude=[]):
163 def helpprinter(ui, helptable, sectionfunc, include=[], exclude=[]):
153 for h in helptable:
164 for h in helptable:
154 names, sec, doc = h[0:3]
165 names, sec, doc = h[0:3]
155 if exclude and names[0] in exclude:
166 if exclude and names[0] in exclude:
156 continue
167 continue
157 if include and names[0] not in include:
168 if include and names[0] not in include:
158 continue
169 continue
159 for name in names:
170 for name in names:
160 ui.write(".. _%s:\n" % name)
171 ui.write(b".. _%s:\n" % name)
161 ui.write("\n")
172 ui.write(b"\n")
162 if sectionfunc:
173 if sectionfunc:
163 ui.write(sectionfunc(sec))
174 ui.write(sectionfunc(sec))
164 if callable(doc):
175 if callable(doc):
165 doc = doc(ui)
176 doc = doc(ui)
166 ui.write(doc)
177 ui.write(doc)
167 ui.write("\n")
178 ui.write(b"\n")
168
179
169 def commandprinter(ui, cmdtable, sectionfunc):
180 def commandprinter(ui, cmdtable, sectionfunc):
170 h = {}
181 h = {}
171 for c, attr in cmdtable.items():
182 for c, attr in cmdtable.items():
172 f = c.split("|")[0]
183 f = c.split(b"|")[0]
173 f = f.lstrip("^")
184 f = f.lstrip(b"^")
174 h[f] = c
185 h[f] = c
175 cmds = h.keys()
186 cmds = h.keys()
176 cmds.sort()
177
187
178 for f in cmds:
188 for f in sorted(cmds):
179 if f.startswith("debug"):
189 if f.startswith(b"debug"):
180 continue
190 continue
181 d = get_cmd(h[f], cmdtable)
191 d = get_cmd(h[f], cmdtable)
182 ui.write(sectionfunc(d['cmd']))
192 ui.write(sectionfunc(d[b'cmd']))
183 # short description
193 # short description
184 ui.write(d['desc'][0])
194 ui.write(d[b'desc'][0])
185 # synopsis
195 # synopsis
186 ui.write("::\n\n")
196 ui.write(b"::\n\n")
187 synopsislines = d['synopsis'].splitlines()
197 synopsislines = d[b'synopsis'].splitlines()
188 for line in synopsislines:
198 for line in synopsislines:
189 # some commands (such as rebase) have a multi-line
199 # some commands (such as rebase) have a multi-line
190 # synopsis
200 # synopsis
191 ui.write(" %s\n" % line)
201 ui.write(b" %s\n" % line)
192 ui.write('\n')
202 ui.write(b'\n')
193 # description
203 # description
194 ui.write("%s\n\n" % d['desc'][1])
204 ui.write(b"%s\n\n" % d[b'desc'][1])
195 # options
205 # options
196 opt_output = list(d['opts'])
206 opt_output = list(d[b'opts'])
197 if opt_output:
207 if opt_output:
198 opts_len = max([len(line[0]) for line in opt_output])
208 opts_len = max([len(line[0]) for line in opt_output])
199 ui.write(_("Options:\n\n"))
209 ui.write(_(b"Options:\n\n"))
200 multioccur = False
210 multioccur = False
201 for optstr, desc in opt_output:
211 for optstr, desc in opt_output:
202 if desc:
212 if desc:
203 s = "%-*s %s" % (opts_len, optstr, desc)
213 s = b"%-*s %s" % (opts_len, optstr, desc)
204 else:
214 else:
205 s = optstr
215 s = optstr
206 ui.write("%s\n" % s)
216 ui.write(b"%s\n" % s)
207 if optstr.endswith("[+]>"):
217 if optstr.endswith(b"[+]>"):
208 multioccur = True
218 multioccur = True
209 if multioccur:
219 if multioccur:
210 ui.write(_("\n[+] marked option can be specified"
220 ui.write(_(b"\n[+] marked option can be specified"
211 " multiple times\n"))
221 b" multiple times\n"))
212 ui.write("\n")
222 ui.write(b"\n")
213 # aliases
223 # aliases
214 if d['aliases']:
224 if d[b'aliases']:
215 ui.write(_(" aliases: %s\n\n") % " ".join(d['aliases']))
225 ui.write(_(b" aliases: %s\n\n") % b" ".join(d[b'aliases']))
216
226
217
227
218 def allextensionnames():
228 def allextensionnames():
219 return extensions.enabled().keys() + extensions.disabled().keys()
229 return set(extensions.enabled().keys()) | set(extensions.disabled().keys())
220
230
221 if __name__ == "__main__":
231 if __name__ == "__main__":
222 doc = 'hg.1.gendoc'
232 doc = b'hg.1.gendoc'
223 if len(sys.argv) > 1:
233 if len(sys.argv) > 1:
224 doc = sys.argv[1]
234 doc = encoding.strtolocal(sys.argv[1])
225
235
226 ui = uimod.ui.load()
236 ui = uimod.ui.load()
227 if doc == 'hg.1.gendoc':
237 if doc == b'hg.1.gendoc':
228 showdoc(ui)
238 showdoc(ui)
229 else:
239 else:
230 showtopic(ui, sys.argv[1])
240 showtopic(ui, encoding.strtolocal(sys.argv[1]))
@@ -1,1026 +1,1028 b''
1 # absorb.py
1 # absorb.py
2 #
2 #
3 # Copyright 2016 Facebook, Inc.
3 # Copyright 2016 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """apply working directory changes to changesets (EXPERIMENTAL)
8 """apply working directory changes to changesets (EXPERIMENTAL)
9
9
10 The absorb extension provides a command to use annotate information to
10 The absorb extension provides a command to use annotate information to
11 amend modified chunks into the corresponding non-public changesets.
11 amend modified chunks into the corresponding non-public changesets.
12
12
13 ::
13 ::
14
14
15 [absorb]
15 [absorb]
16 # only check 50 recent non-public changesets at most
16 # only check 50 recent non-public changesets at most
17 max-stack-size = 50
17 max-stack-size = 50
18 # whether to add noise to new commits to avoid obsolescence cycle
18 # whether to add noise to new commits to avoid obsolescence cycle
19 add-noise = 1
19 add-noise = 1
20 # make `amend --correlated` a shortcut to the main command
20 # make `amend --correlated` a shortcut to the main command
21 amend-flag = correlated
21 amend-flag = correlated
22
22
23 [color]
23 [color]
24 absorb.description = yellow
24 absorb.description = yellow
25 absorb.node = blue bold
25 absorb.node = blue bold
26 absorb.path = bold
26 absorb.path = bold
27 """
27 """
28
28
29 # TODO:
29 # TODO:
30 # * Rename config items to [commands] namespace
30 # * Rename config items to [commands] namespace
31 # * Converge getdraftstack() with other code in core
31 # * Converge getdraftstack() with other code in core
32 # * move many attributes on fixupstate to be private
32 # * move many attributes on fixupstate to be private
33
33
34 from __future__ import absolute_import
34 from __future__ import absolute_import
35
35
36 import collections
36 import collections
37
37
38 from mercurial.i18n import _
38 from mercurial.i18n import _
39 from mercurial import (
39 from mercurial import (
40 cmdutil,
40 cmdutil,
41 commands,
41 commands,
42 context,
42 context,
43 crecord,
43 crecord,
44 error,
44 error,
45 linelog,
45 linelog,
46 mdiff,
46 mdiff,
47 node,
47 node,
48 obsolete,
48 obsolete,
49 patch,
49 patch,
50 phases,
50 phases,
51 pycompat,
51 pycompat,
52 registrar,
52 registrar,
53 repair,
53 repair,
54 scmutil,
54 scmutil,
55 util,
55 util,
56 )
56 )
57 from mercurial.utils import (
57 from mercurial.utils import (
58 stringutil,
58 stringutil,
59 )
59 )
60
60
61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
63 # be specifying the version(s) of Mercurial they are tested with, or
63 # be specifying the version(s) of Mercurial they are tested with, or
64 # leave the attribute unspecified.
64 # leave the attribute unspecified.
65 testedwith = 'ships-with-hg-core'
65 testedwith = 'ships-with-hg-core'
66
66
67 cmdtable = {}
67 cmdtable = {}
68 command = registrar.command(cmdtable)
68 command = registrar.command(cmdtable)
69
69
70 configtable = {}
70 configtable = {}
71 configitem = registrar.configitem(configtable)
71 configitem = registrar.configitem(configtable)
72
72
73 configitem('absorb', 'add-noise', default=True)
73 configitem('absorb', 'add-noise', default=True)
74 configitem('absorb', 'amend-flag', default=None)
74 configitem('absorb', 'amend-flag', default=None)
75 configitem('absorb', 'max-stack-size', default=50)
75 configitem('absorb', 'max-stack-size', default=50)
76
76
77 colortable = {
77 colortable = {
78 'absorb.description': 'yellow',
78 'absorb.description': 'yellow',
79 'absorb.node': 'blue bold',
79 'absorb.node': 'blue bold',
80 'absorb.path': 'bold',
80 'absorb.path': 'bold',
81 }
81 }
82
82
83 defaultdict = collections.defaultdict
83 defaultdict = collections.defaultdict
84
84
85 class nullui(object):
85 class nullui(object):
86 """blank ui object doing nothing"""
86 """blank ui object doing nothing"""
87 debugflag = False
87 debugflag = False
88 verbose = False
88 verbose = False
89 quiet = True
89 quiet = True
90
90
91 def __getitem__(name):
91 def __getitem__(name):
92 def nullfunc(*args, **kwds):
92 def nullfunc(*args, **kwds):
93 return
93 return
94 return nullfunc
94 return nullfunc
95
95
96 class emptyfilecontext(object):
96 class emptyfilecontext(object):
97 """minimal filecontext representing an empty file"""
97 """minimal filecontext representing an empty file"""
98 def data(self):
98 def data(self):
99 return ''
99 return ''
100
100
101 def node(self):
101 def node(self):
102 return node.nullid
102 return node.nullid
103
103
104 def uniq(lst):
104 def uniq(lst):
105 """list -> list. remove duplicated items without changing the order"""
105 """list -> list. remove duplicated items without changing the order"""
106 seen = set()
106 seen = set()
107 result = []
107 result = []
108 for x in lst:
108 for x in lst:
109 if x not in seen:
109 if x not in seen:
110 seen.add(x)
110 seen.add(x)
111 result.append(x)
111 result.append(x)
112 return result
112 return result
113
113
114 def getdraftstack(headctx, limit=None):
114 def getdraftstack(headctx, limit=None):
115 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
115 """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
116
116
117 changesets are sorted in topo order, oldest first.
117 changesets are sorted in topo order, oldest first.
118 return at most limit items, if limit is a positive number.
118 return at most limit items, if limit is a positive number.
119
119
120 merges are considered as non-draft as well. i.e. every commit
120 merges are considered as non-draft as well. i.e. every commit
121 returned has and only has 1 parent.
121 returned has and only has 1 parent.
122 """
122 """
123 ctx = headctx
123 ctx = headctx
124 result = []
124 result = []
125 while ctx.phase() != phases.public:
125 while ctx.phase() != phases.public:
126 if limit and len(result) >= limit:
126 if limit and len(result) >= limit:
127 break
127 break
128 parents = ctx.parents()
128 parents = ctx.parents()
129 if len(parents) != 1:
129 if len(parents) != 1:
130 break
130 break
131 result.append(ctx)
131 result.append(ctx)
132 ctx = parents[0]
132 ctx = parents[0]
133 result.reverse()
133 result.reverse()
134 return result
134 return result
135
135
136 def getfilestack(stack, path, seenfctxs=None):
136 def getfilestack(stack, path, seenfctxs=None):
137 """([ctx], str, set) -> [fctx], {ctx: fctx}
137 """([ctx], str, set) -> [fctx], {ctx: fctx}
138
138
139 stack is a list of contexts, from old to new. usually they are what
139 stack is a list of contexts, from old to new. usually they are what
140 "getdraftstack" returns.
140 "getdraftstack" returns.
141
141
142 follows renames, but not copies.
142 follows renames, but not copies.
143
143
144 seenfctxs is a set of filecontexts that will be considered "immutable".
144 seenfctxs is a set of filecontexts that will be considered "immutable".
145 they are usually what this function returned in earlier calls, useful
145 they are usually what this function returned in earlier calls, useful
146 to avoid issues that a file was "moved" to multiple places and was then
146 to avoid issues that a file was "moved" to multiple places and was then
147 modified differently, like: "a" was copied to "b", "a" was also copied to
147 modified differently, like: "a" was copied to "b", "a" was also copied to
148 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
148 "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a"
149 and we enforce only one of them to be able to affect "a"'s content.
149 and we enforce only one of them to be able to affect "a"'s content.
150
150
151 return an empty list and an empty dict, if the specified path does not
151 return an empty list and an empty dict, if the specified path does not
152 exist in stack[-1] (the top of the stack).
152 exist in stack[-1] (the top of the stack).
153
153
154 otherwise, return a list of de-duplicated filecontexts, and the map to
154 otherwise, return a list of de-duplicated filecontexts, and the map to
155 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
155 convert ctx in the stack to fctx, for possible mutable fctxs. the first item
156 of the list would be outside the stack and should be considered immutable.
156 of the list would be outside the stack and should be considered immutable.
157 the remaining items are within the stack.
157 the remaining items are within the stack.
158
158
159 for example, given the following changelog and corresponding filelog
159 for example, given the following changelog and corresponding filelog
160 revisions:
160 revisions:
161
161
162 changelog: 3----4----5----6----7
162 changelog: 3----4----5----6----7
163 filelog: x 0----1----1----2 (x: no such file yet)
163 filelog: x 0----1----1----2 (x: no such file yet)
164
164
165 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
165 - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2})
166 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
166 - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a
167 dummy empty filecontext.
167 dummy empty filecontext.
168 - if stack = [2], returns ([], {})
168 - if stack = [2], returns ([], {})
169 - if stack = [7], returns ([1, 2], {7: 2})
169 - if stack = [7], returns ([1, 2], {7: 2})
170 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
170 - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be
171 removed, since 1 is immutable.
171 removed, since 1 is immutable.
172 """
172 """
173 if seenfctxs is None:
173 if seenfctxs is None:
174 seenfctxs = set()
174 seenfctxs = set()
175 assert stack
175 assert stack
176
176
177 if path not in stack[-1]:
177 if path not in stack[-1]:
178 return [], {}
178 return [], {}
179
179
180 fctxs = []
180 fctxs = []
181 fctxmap = {}
181 fctxmap = {}
182
182
183 pctx = stack[0].p1() # the public (immutable) ctx we stop at
183 pctx = stack[0].p1() # the public (immutable) ctx we stop at
184 for ctx in reversed(stack):
184 for ctx in reversed(stack):
185 if path not in ctx: # the file is added in the next commit
185 if path not in ctx: # the file is added in the next commit
186 pctx = ctx
186 pctx = ctx
187 break
187 break
188 fctx = ctx[path]
188 fctx = ctx[path]
189 fctxs.append(fctx)
189 fctxs.append(fctx)
190 if fctx in seenfctxs: # treat fctx as the immutable one
190 if fctx in seenfctxs: # treat fctx as the immutable one
191 pctx = None # do not add another immutable fctx
191 pctx = None # do not add another immutable fctx
192 break
192 break
193 fctxmap[ctx] = fctx # only for mutable fctxs
193 fctxmap[ctx] = fctx # only for mutable fctxs
194 renamed = fctx.renamed()
194 renamed = fctx.renamed()
195 if renamed:
195 if renamed:
196 path = renamed[0] # follow rename
196 path = renamed[0] # follow rename
197 if path in ctx: # but do not follow copy
197 if path in ctx: # but do not follow copy
198 pctx = ctx.p1()
198 pctx = ctx.p1()
199 break
199 break
200
200
201 if pctx is not None: # need an extra immutable fctx
201 if pctx is not None: # need an extra immutable fctx
202 if path in pctx:
202 if path in pctx:
203 fctxs.append(pctx[path])
203 fctxs.append(pctx[path])
204 else:
204 else:
205 fctxs.append(emptyfilecontext())
205 fctxs.append(emptyfilecontext())
206
206
207 fctxs.reverse()
207 fctxs.reverse()
208 # note: we rely on a property of hg: filerev is not reused for linear
208 # note: we rely on a property of hg: filerev is not reused for linear
209 # history. i.e. it's impossible to have:
209 # history. i.e. it's impossible to have:
210 # changelog: 4----5----6 (linear, no merges)
210 # changelog: 4----5----6 (linear, no merges)
211 # filelog: 1----2----1
211 # filelog: 1----2----1
212 # ^ reuse filerev (impossible)
212 # ^ reuse filerev (impossible)
213 # because parents are part of the hash. if that's not true, we need to
213 # because parents are part of the hash. if that's not true, we need to
214 # remove uniq and find a different way to identify fctxs.
214 # remove uniq and find a different way to identify fctxs.
215 return uniq(fctxs), fctxmap
215 return uniq(fctxs), fctxmap
216
216
217 class overlaystore(patch.filestore):
217 class overlaystore(patch.filestore):
218 """read-only, hybrid store based on a dict and ctx.
218 """read-only, hybrid store based on a dict and ctx.
219 memworkingcopy: {path: content}, overrides file contents.
219 memworkingcopy: {path: content}, overrides file contents.
220 """
220 """
221 def __init__(self, basectx, memworkingcopy):
221 def __init__(self, basectx, memworkingcopy):
222 self.basectx = basectx
222 self.basectx = basectx
223 self.memworkingcopy = memworkingcopy
223 self.memworkingcopy = memworkingcopy
224
224
225 def getfile(self, path):
225 def getfile(self, path):
226 """comply with mercurial.patch.filestore.getfile"""
226 """comply with mercurial.patch.filestore.getfile"""
227 if path not in self.basectx:
227 if path not in self.basectx:
228 return None, None, None
228 return None, None, None
229 fctx = self.basectx[path]
229 fctx = self.basectx[path]
230 if path in self.memworkingcopy:
230 if path in self.memworkingcopy:
231 content = self.memworkingcopy[path]
231 content = self.memworkingcopy[path]
232 else:
232 else:
233 content = fctx.data()
233 content = fctx.data()
234 mode = (fctx.islink(), fctx.isexec())
234 mode = (fctx.islink(), fctx.isexec())
235 renamed = fctx.renamed() # False or (path, node)
235 renamed = fctx.renamed() # False or (path, node)
236 return content, mode, (renamed and renamed[0])
236 return content, mode, (renamed and renamed[0])
237
237
238 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
238 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
239 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
239 """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
240 memworkingcopy overrides file contents.
240 memworkingcopy overrides file contents.
241 """
241 """
242 # parents must contain 2 items: (node1, node2)
242 # parents must contain 2 items: (node1, node2)
243 if parents is None:
243 if parents is None:
244 parents = ctx.repo().changelog.parents(ctx.node())
244 parents = ctx.repo().changelog.parents(ctx.node())
245 if extra is None:
245 if extra is None:
246 extra = ctx.extra()
246 extra = ctx.extra()
247 date = ctx.date()
247 date = ctx.date()
248 desc = ctx.description()
248 desc = ctx.description()
249 user = ctx.user()
249 user = ctx.user()
250 files = set(ctx.files()).union(memworkingcopy)
250 files = set(ctx.files()).union(memworkingcopy)
251 store = overlaystore(ctx, memworkingcopy)
251 store = overlaystore(ctx, memworkingcopy)
252 return context.memctx(
252 return context.memctx(
253 repo=ctx.repo(), parents=parents, text=desc,
253 repo=ctx.repo(), parents=parents, text=desc,
254 files=files, filectxfn=store, user=user, date=date,
254 files=files, filectxfn=store, user=user, date=date,
255 branch=None, extra=extra)
255 branch=None, extra=extra)
256
256
257 class filefixupstate(object):
257 class filefixupstate(object):
258 """state needed to apply fixups to a single file
258 """state needed to apply fixups to a single file
259
259
260 internally, it keeps file contents of several revisions and a linelog.
260 internally, it keeps file contents of several revisions and a linelog.
261
261
262 the linelog uses odd revision numbers for original contents (fctxs passed
262 the linelog uses odd revision numbers for original contents (fctxs passed
263 to __init__), and even revision numbers for fixups, like:
263 to __init__), and even revision numbers for fixups, like:
264
264
265 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
265 linelog rev 1: self.fctxs[0] (from an immutable "public" changeset)
266 linelog rev 2: fixups made to self.fctxs[0]
266 linelog rev 2: fixups made to self.fctxs[0]
267 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
267 linelog rev 3: self.fctxs[1] (a child of fctxs[0])
268 linelog rev 4: fixups made to self.fctxs[1]
268 linelog rev 4: fixups made to self.fctxs[1]
269 ...
269 ...
270
270
271 a typical use is like:
271 a typical use is like:
272
272
273 1. call diffwith, to calculate self.fixups
273 1. call diffwith, to calculate self.fixups
274 2. (optionally), present self.fixups to the user, or change it
274 2. (optionally), present self.fixups to the user, or change it
275 3. call apply, to apply changes
275 3. call apply, to apply changes
276 4. read results from "finalcontents", or call getfinalcontent
276 4. read results from "finalcontents", or call getfinalcontent
277 """
277 """
278
278
279 def __init__(self, fctxs, path, ui=None, opts=None):
279 def __init__(self, fctxs, path, ui=None, opts=None):
280 """([fctx], ui or None) -> None
280 """([fctx], ui or None) -> None
281
281
282 fctxs should be linear, and sorted by topo order - oldest first.
282 fctxs should be linear, and sorted by topo order - oldest first.
283 fctxs[0] will be considered as "immutable" and will not be changed.
283 fctxs[0] will be considered as "immutable" and will not be changed.
284 """
284 """
285 self.fctxs = fctxs
285 self.fctxs = fctxs
286 self.path = path
286 self.path = path
287 self.ui = ui or nullui()
287 self.ui = ui or nullui()
288 self.opts = opts or {}
288 self.opts = opts or {}
289
289
290 # following fields are built from fctxs. they exist for perf reason
290 # following fields are built from fctxs. they exist for perf reason
291 self.contents = [f.data() for f in fctxs]
291 self.contents = [f.data() for f in fctxs]
292 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
292 self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents)
293 self.linelog = self._buildlinelog()
293 self.linelog = self._buildlinelog()
294 if self.ui.debugflag:
294 if self.ui.debugflag:
295 assert self._checkoutlinelog() == self.contents
295 assert self._checkoutlinelog() == self.contents
296
296
297 # following fields will be filled later
297 # following fields will be filled later
298 self.chunkstats = [0, 0] # [adopted, total : int]
298 self.chunkstats = [0, 0] # [adopted, total : int]
299 self.targetlines = [] # [str]
299 self.targetlines = [] # [str]
300 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
300 self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
301 self.finalcontents = [] # [str]
301 self.finalcontents = [] # [str]
302 self.ctxaffected = set()
302 self.ctxaffected = set()
303
303
304 def diffwith(self, targetfctx, fm=None):
304 def diffwith(self, targetfctx, fm=None):
305 """calculate fixups needed by examining the differences between
305 """calculate fixups needed by examining the differences between
306 self.fctxs[-1] and targetfctx, chunk by chunk.
306 self.fctxs[-1] and targetfctx, chunk by chunk.
307
307
308 targetfctx is the target state we move towards. we may or may not be
308 targetfctx is the target state we move towards. we may or may not be
309 able to get there because not all modified chunks can be amended into
309 able to get there because not all modified chunks can be amended into
310 a non-public fctx unambiguously.
310 a non-public fctx unambiguously.
311
311
312 call this only once, before apply().
312 call this only once, before apply().
313
313
314 update self.fixups, self.chunkstats, and self.targetlines.
314 update self.fixups, self.chunkstats, and self.targetlines.
315 """
315 """
316 a = self.contents[-1]
316 a = self.contents[-1]
317 alines = self.contentlines[-1]
317 alines = self.contentlines[-1]
318 b = targetfctx.data()
318 b = targetfctx.data()
319 blines = mdiff.splitnewlines(b)
319 blines = mdiff.splitnewlines(b)
320 self.targetlines = blines
320 self.targetlines = blines
321
321
322 self.linelog.annotate(self.linelog.maxrev)
322 self.linelog.annotate(self.linelog.maxrev)
323 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
323 annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
324 assert len(annotated) == len(alines)
324 assert len(annotated) == len(alines)
325 # add a dummy end line to make insertion at the end easier
325 # add a dummy end line to make insertion at the end easier
326 if annotated:
326 if annotated:
327 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
327 dummyendline = (annotated[-1][0], annotated[-1][1] + 1)
328 annotated.append(dummyendline)
328 annotated.append(dummyendline)
329
329
330 # analyse diff blocks
330 # analyse diff blocks
331 for chunk in self._alldiffchunks(a, b, alines, blines):
331 for chunk in self._alldiffchunks(a, b, alines, blines):
332 newfixups = self._analysediffchunk(chunk, annotated)
332 newfixups = self._analysediffchunk(chunk, annotated)
333 self.chunkstats[0] += bool(newfixups) # 1 or 0
333 self.chunkstats[0] += bool(newfixups) # 1 or 0
334 self.chunkstats[1] += 1
334 self.chunkstats[1] += 1
335 self.fixups += newfixups
335 self.fixups += newfixups
336 if fm is not None:
336 if fm is not None:
337 self._showchanges(fm, alines, blines, chunk, newfixups)
337 self._showchanges(fm, alines, blines, chunk, newfixups)
338
338
339 def apply(self):
339 def apply(self):
340 """apply self.fixups. update self.linelog, self.finalcontents.
340 """apply self.fixups. update self.linelog, self.finalcontents.
341
341
342 call this only once, before getfinalcontent(), after diffwith().
342 call this only once, before getfinalcontent(), after diffwith().
343 """
343 """
344 # the following is unnecessary, as it's done by "diffwith":
344 # the following is unnecessary, as it's done by "diffwith":
345 # self.linelog.annotate(self.linelog.maxrev)
345 # self.linelog.annotate(self.linelog.maxrev)
346 for rev, a1, a2, b1, b2 in reversed(self.fixups):
346 for rev, a1, a2, b1, b2 in reversed(self.fixups):
347 blines = self.targetlines[b1:b2]
347 blines = self.targetlines[b1:b2]
348 if self.ui.debugflag:
348 if self.ui.debugflag:
349 idx = (max(rev - 1, 0)) // 2
349 idx = (max(rev - 1, 0)) // 2
350 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
350 self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
351 % (node.short(self.fctxs[idx].node()),
351 % (node.short(self.fctxs[idx].node()),
352 a1, a2, len(blines)))
352 a1, a2, len(blines)))
353 self.linelog.replacelines(rev, a1, a2, b1, b2)
353 self.linelog.replacelines(rev, a1, a2, b1, b2)
354 if self.opts.get('edit_lines', False):
354 if self.opts.get('edit_lines', False):
355 self.finalcontents = self._checkoutlinelogwithedits()
355 self.finalcontents = self._checkoutlinelogwithedits()
356 else:
356 else:
357 self.finalcontents = self._checkoutlinelog()
357 self.finalcontents = self._checkoutlinelog()
358
358
359 def getfinalcontent(self, fctx):
359 def getfinalcontent(self, fctx):
360 """(fctx) -> str. get modified file content for a given filecontext"""
360 """(fctx) -> str. get modified file content for a given filecontext"""
361 idx = self.fctxs.index(fctx)
361 idx = self.fctxs.index(fctx)
362 return self.finalcontents[idx]
362 return self.finalcontents[idx]
363
363
364 def _analysediffchunk(self, chunk, annotated):
364 def _analysediffchunk(self, chunk, annotated):
365 """analyse a different chunk and return new fixups found
365 """analyse a different chunk and return new fixups found
366
366
367 return [] if no lines from the chunk can be safely applied.
367 return [] if no lines from the chunk can be safely applied.
368
368
369 the chunk (or lines) cannot be safely applied, if, for example:
369 the chunk (or lines) cannot be safely applied, if, for example:
370 - the modified (deleted) lines belong to a public changeset
370 - the modified (deleted) lines belong to a public changeset
371 (self.fctxs[0])
371 (self.fctxs[0])
372 - the chunk is a pure insertion and the adjacent lines (at most 2
372 - the chunk is a pure insertion and the adjacent lines (at most 2
373 lines) belong to different non-public changesets, or do not belong
373 lines) belong to different non-public changesets, or do not belong
374 to any non-public changesets.
374 to any non-public changesets.
375 - the chunk is modifying lines from different changesets.
375 - the chunk is modifying lines from different changesets.
376 in this case, if the number of lines deleted equals to the number
376 in this case, if the number of lines deleted equals to the number
377 of lines added, assume it's a simple 1:1 map (could be wrong).
377 of lines added, assume it's a simple 1:1 map (could be wrong).
378 otherwise, give up.
378 otherwise, give up.
379 - the chunk is modifying lines from a single non-public changeset,
379 - the chunk is modifying lines from a single non-public changeset,
380 but other revisions touch the area as well. i.e. the lines are
380 but other revisions touch the area as well. i.e. the lines are
381 not continuous as seen from the linelog.
381 not continuous as seen from the linelog.
382 """
382 """
383 a1, a2, b1, b2 = chunk
383 a1, a2, b1, b2 = chunk
384 # find involved indexes from annotate result
384 # find involved indexes from annotate result
385 involved = annotated[a1:a2]
385 involved = annotated[a1:a2]
386 if not involved and annotated: # a1 == a2 and a is not empty
386 if not involved and annotated: # a1 == a2 and a is not empty
387 # pure insertion, check nearby lines. ignore lines belong
387 # pure insertion, check nearby lines. ignore lines belong
388 # to the public (first) changeset (i.e. annotated[i][0] == 1)
388 # to the public (first) changeset (i.e. annotated[i][0] == 1)
389 nearbylinenums = {a2, max(0, a1 - 1)}
389 nearbylinenums = {a2, max(0, a1 - 1)}
390 involved = [annotated[i]
390 involved = [annotated[i]
391 for i in nearbylinenums if annotated[i][0] != 1]
391 for i in nearbylinenums if annotated[i][0] != 1]
392 involvedrevs = list(set(r for r, l in involved))
392 involvedrevs = list(set(r for r, l in involved))
393 newfixups = []
393 newfixups = []
394 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
394 if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
395 # chunk belongs to a single revision
395 # chunk belongs to a single revision
396 rev = involvedrevs[0]
396 rev = involvedrevs[0]
397 if rev > 1:
397 if rev > 1:
398 fixuprev = rev + 1
398 fixuprev = rev + 1
399 newfixups.append((fixuprev, a1, a2, b1, b2))
399 newfixups.append((fixuprev, a1, a2, b1, b2))
400 elif a2 - a1 == b2 - b1 or b1 == b2:
400 elif a2 - a1 == b2 - b1 or b1 == b2:
401 # 1:1 line mapping, or chunk was deleted
401 # 1:1 line mapping, or chunk was deleted
402 for i in pycompat.xrange(a1, a2):
402 for i in pycompat.xrange(a1, a2):
403 rev, linenum = annotated[i]
403 rev, linenum = annotated[i]
404 if rev > 1:
404 if rev > 1:
405 if b1 == b2: # deletion, simply remove that single line
405 if b1 == b2: # deletion, simply remove that single line
406 nb1 = nb2 = 0
406 nb1 = nb2 = 0
407 else: # 1:1 line mapping, change the corresponding rev
407 else: # 1:1 line mapping, change the corresponding rev
408 nb1 = b1 + i - a1
408 nb1 = b1 + i - a1
409 nb2 = nb1 + 1
409 nb2 = nb1 + 1
410 fixuprev = rev + 1
410 fixuprev = rev + 1
411 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
411 newfixups.append((fixuprev, i, i + 1, nb1, nb2))
412 return self._optimizefixups(newfixups)
412 return self._optimizefixups(newfixups)
413
413
414 @staticmethod
414 @staticmethod
415 def _alldiffchunks(a, b, alines, blines):
415 def _alldiffchunks(a, b, alines, blines):
416 """like mdiff.allblocks, but only care about differences"""
416 """like mdiff.allblocks, but only care about differences"""
417 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
417 blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines)
418 for chunk, btype in blocks:
418 for chunk, btype in blocks:
419 if btype != '!':
419 if btype != '!':
420 continue
420 continue
421 yield chunk
421 yield chunk
422
422
423 def _buildlinelog(self):
423 def _buildlinelog(self):
424 """calculate the initial linelog based on self.content{,line}s.
424 """calculate the initial linelog based on self.content{,line}s.
425 this is similar to running a partial "annotate".
425 this is similar to running a partial "annotate".
426 """
426 """
427 llog = linelog.linelog()
427 llog = linelog.linelog()
428 a, alines = '', []
428 a, alines = '', []
429 for i in pycompat.xrange(len(self.contents)):
429 for i in pycompat.xrange(len(self.contents)):
430 b, blines = self.contents[i], self.contentlines[i]
430 b, blines = self.contents[i], self.contentlines[i]
431 llrev = i * 2 + 1
431 llrev = i * 2 + 1
432 chunks = self._alldiffchunks(a, b, alines, blines)
432 chunks = self._alldiffchunks(a, b, alines, blines)
433 for a1, a2, b1, b2 in reversed(list(chunks)):
433 for a1, a2, b1, b2 in reversed(list(chunks)):
434 llog.replacelines(llrev, a1, a2, b1, b2)
434 llog.replacelines(llrev, a1, a2, b1, b2)
435 a, alines = b, blines
435 a, alines = b, blines
436 return llog
436 return llog
437
437
438 def _checkoutlinelog(self):
438 def _checkoutlinelog(self):
439 """() -> [str]. check out file contents from linelog"""
439 """() -> [str]. check out file contents from linelog"""
440 contents = []
440 contents = []
441 for i in pycompat.xrange(len(self.contents)):
441 for i in pycompat.xrange(len(self.contents)):
442 rev = (i + 1) * 2
442 rev = (i + 1) * 2
443 self.linelog.annotate(rev)
443 self.linelog.annotate(rev)
444 content = ''.join(map(self._getline, self.linelog.annotateresult))
444 content = ''.join(map(self._getline, self.linelog.annotateresult))
445 contents.append(content)
445 contents.append(content)
446 return contents
446 return contents
447
447
448 def _checkoutlinelogwithedits(self):
448 def _checkoutlinelogwithedits(self):
449 """() -> [str]. prompt all lines for edit"""
449 """() -> [str]. prompt all lines for edit"""
450 alllines = self.linelog.getalllines()
450 alllines = self.linelog.getalllines()
451 # header
451 # header
452 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
452 editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
453 'exists in the changeset to the top\nHG:\n')
453 'exists in the changeset to the top\nHG:\n')
454 % self.fctxs[-1].path())
454 % self.fctxs[-1].path())
455 # [(idx, fctx)]. hide the dummy emptyfilecontext
455 # [(idx, fctx)]. hide the dummy emptyfilecontext
456 visiblefctxs = [(i, f)
456 visiblefctxs = [(i, f)
457 for i, f in enumerate(self.fctxs)
457 for i, f in enumerate(self.fctxs)
458 if not isinstance(f, emptyfilecontext)]
458 if not isinstance(f, emptyfilecontext)]
459 for i, (j, f) in enumerate(visiblefctxs):
459 for i, (j, f) in enumerate(visiblefctxs):
460 editortext += (_('HG: %s/%s %s %s\n') %
460 editortext += (_('HG: %s/%s %s %s\n') %
461 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
461 ('|' * i, '-' * (len(visiblefctxs) - i + 1),
462 node.short(f.node()),
462 node.short(f.node()),
463 f.description().split('\n',1)[0]))
463 f.description().split('\n',1)[0]))
464 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
464 editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
465 # figure out the lifetime of a line, this is relatively inefficient,
465 # figure out the lifetime of a line, this is relatively inefficient,
466 # but probably fine
466 # but probably fine
467 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
467 lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
468 for i, f in visiblefctxs:
468 for i, f in visiblefctxs:
469 self.linelog.annotate((i + 1) * 2)
469 self.linelog.annotate((i + 1) * 2)
470 for l in self.linelog.annotateresult:
470 for l in self.linelog.annotateresult:
471 lineset[l].add(i)
471 lineset[l].add(i)
472 # append lines
472 # append lines
473 for l in alllines:
473 for l in alllines:
474 editortext += (' %s : %s' %
474 editortext += (' %s : %s' %
475 (''.join([('y' if i in lineset[l] else ' ')
475 (''.join([('y' if i in lineset[l] else ' ')
476 for i, _f in visiblefctxs]),
476 for i, _f in visiblefctxs]),
477 self._getline(l)))
477 self._getline(l)))
478 # run editor
478 # run editor
479 editedtext = self.ui.edit(editortext, '', action='absorb')
479 editedtext = self.ui.edit(editortext, '', action='absorb')
480 if not editedtext:
480 if not editedtext:
481 raise error.Abort(_('empty editor text'))
481 raise error.Abort(_('empty editor text'))
482 # parse edited result
482 # parse edited result
483 contents = ['' for i in self.fctxs]
483 contents = ['' for i in self.fctxs]
484 leftpadpos = 4
484 leftpadpos = 4
485 colonpos = leftpadpos + len(visiblefctxs) + 1
485 colonpos = leftpadpos + len(visiblefctxs) + 1
486 for l in mdiff.splitnewlines(editedtext):
486 for l in mdiff.splitnewlines(editedtext):
487 if l.startswith('HG:'):
487 if l.startswith('HG:'):
488 continue
488 continue
489 if l[colonpos - 1:colonpos + 2] != ' : ':
489 if l[colonpos - 1:colonpos + 2] != ' : ':
490 raise error.Abort(_('malformed line: %s') % l)
490 raise error.Abort(_('malformed line: %s') % l)
491 linecontent = l[colonpos + 2:]
491 linecontent = l[colonpos + 2:]
492 for i, ch in enumerate(l[leftpadpos:colonpos - 1]):
492 for i, ch in enumerate(
493 pycompat.bytestr(l[leftpadpos:colonpos - 1])):
493 if ch == 'y':
494 if ch == 'y':
494 contents[visiblefctxs[i][0]] += linecontent
495 contents[visiblefctxs[i][0]] += linecontent
495 # chunkstats is hard to calculate if anything changes, therefore
496 # chunkstats is hard to calculate if anything changes, therefore
496 # set them to just a simple value (1, 1).
497 # set them to just a simple value (1, 1).
497 if editedtext != editortext:
498 if editedtext != editortext:
498 self.chunkstats = [1, 1]
499 self.chunkstats = [1, 1]
499 return contents
500 return contents
500
501
501 def _getline(self, lineinfo):
502 def _getline(self, lineinfo):
502 """((rev, linenum)) -> str. convert rev+line number to line content"""
503 """((rev, linenum)) -> str. convert rev+line number to line content"""
503 rev, linenum = lineinfo
504 rev, linenum = lineinfo
504 if rev & 1: # odd: original line taken from fctxs
505 if rev & 1: # odd: original line taken from fctxs
505 return self.contentlines[rev // 2][linenum]
506 return self.contentlines[rev // 2][linenum]
506 else: # even: fixup line from targetfctx
507 else: # even: fixup line from targetfctx
507 return self.targetlines[linenum]
508 return self.targetlines[linenum]
508
509
509 def _iscontinuous(self, a1, a2, closedinterval=False):
510 def _iscontinuous(self, a1, a2, closedinterval=False):
510 """(a1, a2 : int) -> bool
511 """(a1, a2 : int) -> bool
511
512
512 check if these lines are continuous. i.e. no other insertions or
513 check if these lines are continuous. i.e. no other insertions or
513 deletions (from other revisions) among these lines.
514 deletions (from other revisions) among these lines.
514
515
515 closedinterval decides whether a2 should be included or not. i.e. is
516 closedinterval decides whether a2 should be included or not. i.e. is
516 it [a1, a2), or [a1, a2] ?
517 it [a1, a2), or [a1, a2] ?
517 """
518 """
518 if a1 >= a2:
519 if a1 >= a2:
519 return True
520 return True
520 llog = self.linelog
521 llog = self.linelog
521 offset1 = llog.getoffset(a1)
522 offset1 = llog.getoffset(a1)
522 offset2 = llog.getoffset(a2) + int(closedinterval)
523 offset2 = llog.getoffset(a2) + int(closedinterval)
523 linesinbetween = llog.getalllines(offset1, offset2)
524 linesinbetween = llog.getalllines(offset1, offset2)
524 return len(linesinbetween) == a2 - a1 + int(closedinterval)
525 return len(linesinbetween) == a2 - a1 + int(closedinterval)
525
526
526 def _optimizefixups(self, fixups):
527 def _optimizefixups(self, fixups):
527 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
528 """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)].
528 merge adjacent fixups to make them less fragmented.
529 merge adjacent fixups to make them less fragmented.
529 """
530 """
530 result = []
531 result = []
531 pcurrentchunk = [[-1, -1, -1, -1, -1]]
532 pcurrentchunk = [[-1, -1, -1, -1, -1]]
532
533
533 def pushchunk():
534 def pushchunk():
534 if pcurrentchunk[0][0] != -1:
535 if pcurrentchunk[0][0] != -1:
535 result.append(tuple(pcurrentchunk[0]))
536 result.append(tuple(pcurrentchunk[0]))
536
537
537 for i, chunk in enumerate(fixups):
538 for i, chunk in enumerate(fixups):
538 rev, a1, a2, b1, b2 = chunk
539 rev, a1, a2, b1, b2 = chunk
539 lastrev = pcurrentchunk[0][0]
540 lastrev = pcurrentchunk[0][0]
540 lasta2 = pcurrentchunk[0][2]
541 lasta2 = pcurrentchunk[0][2]
541 lastb2 = pcurrentchunk[0][4]
542 lastb2 = pcurrentchunk[0][4]
542 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
543 if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
543 self._iscontinuous(max(a1 - 1, 0), a1)):
544 self._iscontinuous(max(a1 - 1, 0), a1)):
544 # merge into currentchunk
545 # merge into currentchunk
545 pcurrentchunk[0][2] = a2
546 pcurrentchunk[0][2] = a2
546 pcurrentchunk[0][4] = b2
547 pcurrentchunk[0][4] = b2
547 else:
548 else:
548 pushchunk()
549 pushchunk()
549 pcurrentchunk[0] = list(chunk)
550 pcurrentchunk[0] = list(chunk)
550 pushchunk()
551 pushchunk()
551 return result
552 return result
552
553
553 def _showchanges(self, fm, alines, blines, chunk, fixups):
554 def _showchanges(self, fm, alines, blines, chunk, fixups):
554
555
555 def trim(line):
556 def trim(line):
556 if line.endswith('\n'):
557 if line.endswith('\n'):
557 line = line[:-1]
558 line = line[:-1]
558 return line
559 return line
559
560
560 # this is not optimized for perf but _showchanges only gets executed
561 # this is not optimized for perf but _showchanges only gets executed
561 # with an extra command-line flag.
562 # with an extra command-line flag.
562 a1, a2, b1, b2 = chunk
563 a1, a2, b1, b2 = chunk
563 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
564 aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1)
564 for idx, fa1, fa2, fb1, fb2 in fixups:
565 for idx, fa1, fa2, fb1, fb2 in fixups:
565 for i in pycompat.xrange(fa1, fa2):
566 for i in pycompat.xrange(fa1, fa2):
566 aidxs[i - a1] = (max(idx, 1) - 1) // 2
567 aidxs[i - a1] = (max(idx, 1) - 1) // 2
567 for i in pycompat.xrange(fb1, fb2):
568 for i in pycompat.xrange(fb1, fb2):
568 bidxs[i - b1] = (max(idx, 1) - 1) // 2
569 bidxs[i - b1] = (max(idx, 1) - 1) // 2
569
570
570 fm.startitem()
571 fm.startitem()
571 fm.write('hunk', ' %s\n',
572 fm.write('hunk', ' %s\n',
572 '@@ -%d,%d +%d,%d @@'
573 '@@ -%d,%d +%d,%d @@'
573 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
574 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
574 fm.data(path=self.path, linetype='hunk')
575 fm.data(path=self.path, linetype='hunk')
575
576
576 def writeline(idx, diffchar, line, linetype, linelabel):
577 def writeline(idx, diffchar, line, linetype, linelabel):
577 fm.startitem()
578 fm.startitem()
578 node = ''
579 node = ''
579 if idx:
580 if idx:
580 ctx = self.fctxs[idx]
581 ctx = self.fctxs[idx]
581 fm.context(fctx=ctx)
582 fm.context(fctx=ctx)
582 node = ctx.hex()
583 node = ctx.hex()
583 self.ctxaffected.add(ctx.changectx())
584 self.ctxaffected.add(ctx.changectx())
584 fm.write('node', '%-7.7s ', node, label='absorb.node')
585 fm.write('node', '%-7.7s ', node, label='absorb.node')
585 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
586 fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
586 label=linelabel)
587 label=linelabel)
587 fm.data(path=self.path, linetype=linetype)
588 fm.data(path=self.path, linetype=linetype)
588
589
589 for i in pycompat.xrange(a1, a2):
590 for i in pycompat.xrange(a1, a2):
590 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
591 writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
591 'diff.deleted')
592 'diff.deleted')
592 for i in pycompat.xrange(b1, b2):
593 for i in pycompat.xrange(b1, b2):
593 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
594 writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
594 'diff.inserted')
595 'diff.inserted')
595
596
596 class fixupstate(object):
597 class fixupstate(object):
597 """state needed to run absorb
598 """state needed to run absorb
598
599
599 internally, it keeps paths and filefixupstates.
600 internally, it keeps paths and filefixupstates.
600
601
601 a typical use is like filefixupstates:
602 a typical use is like filefixupstates:
602
603
603 1. call diffwith, to calculate fixups
604 1. call diffwith, to calculate fixups
604 2. (optionally), present fixups to the user, or edit fixups
605 2. (optionally), present fixups to the user, or edit fixups
605 3. call apply, to apply changes to memory
606 3. call apply, to apply changes to memory
606 4. call commit, to commit changes to hg database
607 4. call commit, to commit changes to hg database
607 """
608 """
608
609
609 def __init__(self, stack, ui=None, opts=None):
610 def __init__(self, stack, ui=None, opts=None):
610 """([ctx], ui or None) -> None
611 """([ctx], ui or None) -> None
611
612
612 stack: should be linear, and sorted by topo order - oldest first.
613 stack: should be linear, and sorted by topo order - oldest first.
613 all commits in stack are considered mutable.
614 all commits in stack are considered mutable.
614 """
615 """
615 assert stack
616 assert stack
616 self.ui = ui or nullui()
617 self.ui = ui or nullui()
617 self.opts = opts or {}
618 self.opts = opts or {}
618 self.stack = stack
619 self.stack = stack
619 self.repo = stack[-1].repo().unfiltered()
620 self.repo = stack[-1].repo().unfiltered()
620
621
621 # following fields will be filled later
622 # following fields will be filled later
622 self.paths = [] # [str]
623 self.paths = [] # [str]
623 self.status = None # ctx.status output
624 self.status = None # ctx.status output
624 self.fctxmap = {} # {path: {ctx: fctx}}
625 self.fctxmap = {} # {path: {ctx: fctx}}
625 self.fixupmap = {} # {path: filefixupstate}
626 self.fixupmap = {} # {path: filefixupstate}
626 self.replacemap = {} # {oldnode: newnode or None}
627 self.replacemap = {} # {oldnode: newnode or None}
627 self.finalnode = None # head after all fixups
628 self.finalnode = None # head after all fixups
628 self.ctxaffected = set() # ctx that will be absorbed into
629 self.ctxaffected = set() # ctx that will be absorbed into
629
630
630 def diffwith(self, targetctx, match=None, fm=None):
631 def diffwith(self, targetctx, match=None, fm=None):
631 """diff and prepare fixups. update self.fixupmap, self.paths"""
632 """diff and prepare fixups. update self.fixupmap, self.paths"""
632 # only care about modified files
633 # only care about modified files
633 self.status = self.stack[-1].status(targetctx, match)
634 self.status = self.stack[-1].status(targetctx, match)
634 self.paths = []
635 self.paths = []
635 # but if --edit-lines is used, the user may want to edit files
636 # but if --edit-lines is used, the user may want to edit files
636 # even if they are not modified
637 # even if they are not modified
637 editopt = self.opts.get('edit_lines')
638 editopt = self.opts.get('edit_lines')
638 if not self.status.modified and editopt and match:
639 if not self.status.modified and editopt and match:
639 interestingpaths = match.files()
640 interestingpaths = match.files()
640 else:
641 else:
641 interestingpaths = self.status.modified
642 interestingpaths = self.status.modified
642 # prepare the filefixupstate
643 # prepare the filefixupstate
643 seenfctxs = set()
644 seenfctxs = set()
644 # sorting is necessary to eliminate ambiguity for the "double move"
645 # sorting is necessary to eliminate ambiguity for the "double move"
645 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
646 # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A".
646 for path in sorted(interestingpaths):
647 for path in sorted(interestingpaths):
647 self.ui.debug('calculating fixups for %s\n' % path)
648 self.ui.debug('calculating fixups for %s\n' % path)
648 targetfctx = targetctx[path]
649 targetfctx = targetctx[path]
649 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
650 fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
650 # ignore symbolic links or binary, or unchanged files
651 # ignore symbolic links or binary, or unchanged files
651 if any(f.islink() or stringutil.binary(f.data())
652 if any(f.islink() or stringutil.binary(f.data())
652 for f in [targetfctx] + fctxs
653 for f in [targetfctx] + fctxs
653 if not isinstance(f, emptyfilecontext)):
654 if not isinstance(f, emptyfilecontext)):
654 continue
655 continue
655 if targetfctx.data() == fctxs[-1].data() and not editopt:
656 if targetfctx.data() == fctxs[-1].data() and not editopt:
656 continue
657 continue
657 seenfctxs.update(fctxs[1:])
658 seenfctxs.update(fctxs[1:])
658 self.fctxmap[path] = ctx2fctx
659 self.fctxmap[path] = ctx2fctx
659 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
660 fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts)
660 if fm is not None:
661 if fm is not None:
661 fm.startitem()
662 fm.startitem()
662 fm.plain('showing changes for ')
663 fm.plain('showing changes for ')
663 fm.write('path', '%s\n', path, label='absorb.path')
664 fm.write('path', '%s\n', path, label='absorb.path')
664 fm.data(linetype='path')
665 fm.data(linetype='path')
665 fstate.diffwith(targetfctx, fm)
666 fstate.diffwith(targetfctx, fm)
666 self.fixupmap[path] = fstate
667 self.fixupmap[path] = fstate
667 self.paths.append(path)
668 self.paths.append(path)
668 self.ctxaffected.update(fstate.ctxaffected)
669 self.ctxaffected.update(fstate.ctxaffected)
669
670
670 def apply(self):
671 def apply(self):
671 """apply fixups to individual filefixupstates"""
672 """apply fixups to individual filefixupstates"""
672 for path, state in self.fixupmap.iteritems():
673 for path, state in self.fixupmap.iteritems():
673 if self.ui.debugflag:
674 if self.ui.debugflag:
674 self.ui.write(_('applying fixups to %s\n') % path)
675 self.ui.write(_('applying fixups to %s\n') % path)
675 state.apply()
676 state.apply()
676
677
677 @property
678 @property
678 def chunkstats(self):
679 def chunkstats(self):
679 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
680 """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
680 return dict((path, state.chunkstats)
681 return dict((path, state.chunkstats)
681 for path, state in self.fixupmap.iteritems())
682 for path, state in self.fixupmap.iteritems())
682
683
683 def commit(self):
684 def commit(self):
684 """commit changes. update self.finalnode, self.replacemap"""
685 """commit changes. update self.finalnode, self.replacemap"""
685 with self.repo.wlock(), self.repo.lock():
686 with self.repo.wlock(), self.repo.lock():
686 with self.repo.transaction('absorb') as tr:
687 with self.repo.transaction('absorb') as tr:
687 self._commitstack()
688 self._commitstack()
688 self._movebookmarks(tr)
689 self._movebookmarks(tr)
689 if self.repo['.'].node() in self.replacemap:
690 if self.repo['.'].node() in self.replacemap:
690 self._moveworkingdirectoryparent()
691 self._moveworkingdirectoryparent()
691 if self._useobsolete:
692 if self._useobsolete:
692 self._obsoleteoldcommits()
693 self._obsoleteoldcommits()
693 if not self._useobsolete: # strip must be outside transactions
694 if not self._useobsolete: # strip must be outside transactions
694 self._stripoldcommits()
695 self._stripoldcommits()
695 return self.finalnode
696 return self.finalnode
696
697
697 def printchunkstats(self):
698 def printchunkstats(self):
698 """print things like '1 of 2 chunk(s) applied'"""
699 """print things like '1 of 2 chunk(s) applied'"""
699 ui = self.ui
700 ui = self.ui
700 chunkstats = self.chunkstats
701 chunkstats = self.chunkstats
701 if ui.verbose:
702 if ui.verbose:
702 # chunkstats for each file
703 # chunkstats for each file
703 for path, stat in chunkstats.iteritems():
704 for path, stat in chunkstats.iteritems():
704 if stat[0]:
705 if stat[0]:
705 ui.write(_('%s: %d of %d chunk(s) applied\n')
706 ui.write(_('%s: %d of %d chunk(s) applied\n')
706 % (path, stat[0], stat[1]))
707 % (path, stat[0], stat[1]))
707 elif not ui.quiet:
708 elif not ui.quiet:
708 # a summary for all files
709 # a summary for all files
709 stats = chunkstats.values()
710 stats = chunkstats.values()
710 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
711 applied, total = (sum(s[i] for s in stats) for i in (0, 1))
711 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
712 ui.write(_('%d of %d chunk(s) applied\n') % (applied, total))
712
713
713 def _commitstack(self):
714 def _commitstack(self):
714 """make new commits. update self.finalnode, self.replacemap.
715 """make new commits. update self.finalnode, self.replacemap.
715 it is splitted from "commit" to avoid too much indentation.
716 it is splitted from "commit" to avoid too much indentation.
716 """
717 """
717 # last node (20-char) committed by us
718 # last node (20-char) committed by us
718 lastcommitted = None
719 lastcommitted = None
719 # p1 which overrides the parent of the next commit, "None" means use
720 # p1 which overrides the parent of the next commit, "None" means use
720 # the original parent unchanged
721 # the original parent unchanged
721 nextp1 = None
722 nextp1 = None
722 for ctx in self.stack:
723 for ctx in self.stack:
723 memworkingcopy = self._getnewfilecontents(ctx)
724 memworkingcopy = self._getnewfilecontents(ctx)
724 if not memworkingcopy and not lastcommitted:
725 if not memworkingcopy and not lastcommitted:
725 # nothing changed, nothing commited
726 # nothing changed, nothing commited
726 nextp1 = ctx
727 nextp1 = ctx
727 continue
728 continue
728 msg = ''
729 msg = ''
729 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
730 if self._willbecomenoop(memworkingcopy, ctx, nextp1):
730 # changeset is no longer necessary
731 # changeset is no longer necessary
731 self.replacemap[ctx.node()] = None
732 self.replacemap[ctx.node()] = None
732 msg = _('became empty and was dropped')
733 msg = _('became empty and was dropped')
733 else:
734 else:
734 # changeset needs re-commit
735 # changeset needs re-commit
735 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
736 nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1)
736 lastcommitted = self.repo[nodestr]
737 lastcommitted = self.repo[nodestr]
737 nextp1 = lastcommitted
738 nextp1 = lastcommitted
738 self.replacemap[ctx.node()] = lastcommitted.node()
739 self.replacemap[ctx.node()] = lastcommitted.node()
739 if memworkingcopy:
740 if memworkingcopy:
740 msg = _('%d file(s) changed, became %s') % (
741 msg = _('%d file(s) changed, became %s') % (
741 len(memworkingcopy), self._ctx2str(lastcommitted))
742 len(memworkingcopy), self._ctx2str(lastcommitted))
742 else:
743 else:
743 msg = _('became %s') % self._ctx2str(lastcommitted)
744 msg = _('became %s') % self._ctx2str(lastcommitted)
744 if self.ui.verbose and msg:
745 if self.ui.verbose and msg:
745 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
746 self.ui.write(_('%s: %s\n') % (self._ctx2str(ctx), msg))
746 self.finalnode = lastcommitted and lastcommitted.node()
747 self.finalnode = lastcommitted and lastcommitted.node()
747
748
748 def _ctx2str(self, ctx):
749 def _ctx2str(self, ctx):
749 if self.ui.debugflag:
750 if self.ui.debugflag:
750 return '%d:%s' % (ctx.rev(), ctx.hex())
751 return '%d:%s' % (ctx.rev(), ctx.hex())
751 else:
752 else:
752 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
753 return '%d:%s' % (ctx.rev(), node.short(ctx.node()))
753
754
754 def _getnewfilecontents(self, ctx):
755 def _getnewfilecontents(self, ctx):
755 """(ctx) -> {path: str}
756 """(ctx) -> {path: str}
756
757
757 fetch file contents from filefixupstates.
758 fetch file contents from filefixupstates.
758 return the working copy overrides - files different from ctx.
759 return the working copy overrides - files different from ctx.
759 """
760 """
760 result = {}
761 result = {}
761 for path in self.paths:
762 for path in self.paths:
762 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
763 ctx2fctx = self.fctxmap[path] # {ctx: fctx}
763 if ctx not in ctx2fctx:
764 if ctx not in ctx2fctx:
764 continue
765 continue
765 fctx = ctx2fctx[ctx]
766 fctx = ctx2fctx[ctx]
766 content = fctx.data()
767 content = fctx.data()
767 newcontent = self.fixupmap[path].getfinalcontent(fctx)
768 newcontent = self.fixupmap[path].getfinalcontent(fctx)
768 if content != newcontent:
769 if content != newcontent:
769 result[fctx.path()] = newcontent
770 result[fctx.path()] = newcontent
770 return result
771 return result
771
772
772 def _movebookmarks(self, tr):
773 def _movebookmarks(self, tr):
773 repo = self.repo
774 repo = self.repo
774 needupdate = [(name, self.replacemap[hsh])
775 needupdate = [(name, self.replacemap[hsh])
775 for name, hsh in repo._bookmarks.iteritems()
776 for name, hsh in repo._bookmarks.iteritems()
776 if hsh in self.replacemap]
777 if hsh in self.replacemap]
777 changes = []
778 changes = []
778 for name, hsh in needupdate:
779 for name, hsh in needupdate:
779 if hsh:
780 if hsh:
780 changes.append((name, hsh))
781 changes.append((name, hsh))
781 if self.ui.verbose:
782 if self.ui.verbose:
782 self.ui.write(_('moving bookmark %s to %s\n')
783 self.ui.write(_('moving bookmark %s to %s\n')
783 % (name, node.hex(hsh)))
784 % (name, node.hex(hsh)))
784 else:
785 else:
785 changes.append((name, None))
786 changes.append((name, None))
786 if self.ui.verbose:
787 if self.ui.verbose:
787 self.ui.write(_('deleting bookmark %s\n') % name)
788 self.ui.write(_('deleting bookmark %s\n') % name)
788 repo._bookmarks.applychanges(repo, tr, changes)
789 repo._bookmarks.applychanges(repo, tr, changes)
789
790
790 def _moveworkingdirectoryparent(self):
791 def _moveworkingdirectoryparent(self):
791 if not self.finalnode:
792 if not self.finalnode:
792 # Find the latest not-{obsoleted,stripped} parent.
793 # Find the latest not-{obsoleted,stripped} parent.
793 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
794 revs = self.repo.revs('max(::. - %ln)', self.replacemap.keys())
794 ctx = self.repo[revs.first()]
795 ctx = self.repo[revs.first()]
795 self.finalnode = ctx.node()
796 self.finalnode = ctx.node()
796 else:
797 else:
797 ctx = self.repo[self.finalnode]
798 ctx = self.repo[self.finalnode]
798
799
799 dirstate = self.repo.dirstate
800 dirstate = self.repo.dirstate
800 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
801 # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to
801 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
802 # be slow. in absorb's case, no need to invalidate fsmonitorstate.
802 noop = lambda: 0
803 noop = lambda: 0
803 restore = noop
804 restore = noop
804 if util.safehasattr(dirstate, '_fsmonitorstate'):
805 if util.safehasattr(dirstate, '_fsmonitorstate'):
805 bak = dirstate._fsmonitorstate.invalidate
806 bak = dirstate._fsmonitorstate.invalidate
806 def restore():
807 def restore():
807 dirstate._fsmonitorstate.invalidate = bak
808 dirstate._fsmonitorstate.invalidate = bak
808 dirstate._fsmonitorstate.invalidate = noop
809 dirstate._fsmonitorstate.invalidate = noop
809 try:
810 try:
810 with dirstate.parentchange():
811 with dirstate.parentchange():
811 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
812 dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths)
812 finally:
813 finally:
813 restore()
814 restore()
814
815
815 @staticmethod
816 @staticmethod
816 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
817 def _willbecomenoop(memworkingcopy, ctx, pctx=None):
817 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
818 """({path: content}, ctx, ctx) -> bool. test if a commit will be noop
818
819
819 if it will become an empty commit (does not change anything, after the
820 if it will become an empty commit (does not change anything, after the
820 memworkingcopy overrides), return True. otherwise return False.
821 memworkingcopy overrides), return True. otherwise return False.
821 """
822 """
822 if not pctx:
823 if not pctx:
823 parents = ctx.parents()
824 parents = ctx.parents()
824 if len(parents) != 1:
825 if len(parents) != 1:
825 return False
826 return False
826 pctx = parents[0]
827 pctx = parents[0]
827 # ctx changes more files (not a subset of memworkingcopy)
828 # ctx changes more files (not a subset of memworkingcopy)
828 if not set(ctx.files()).issubset(set(memworkingcopy)):
829 if not set(ctx.files()).issubset(set(memworkingcopy)):
829 return False
830 return False
830 for path, content in memworkingcopy.iteritems():
831 for path, content in memworkingcopy.iteritems():
831 if path not in pctx or path not in ctx:
832 if path not in pctx or path not in ctx:
832 return False
833 return False
833 fctx = ctx[path]
834 fctx = ctx[path]
834 pfctx = pctx[path]
835 pfctx = pctx[path]
835 if pfctx.flags() != fctx.flags():
836 if pfctx.flags() != fctx.flags():
836 return False
837 return False
837 if pfctx.data() != content:
838 if pfctx.data() != content:
838 return False
839 return False
839 return True
840 return True
840
841
841 def _commitsingle(self, memworkingcopy, ctx, p1=None):
842 def _commitsingle(self, memworkingcopy, ctx, p1=None):
842 """(ctx, {path: content}, node) -> node. make a single commit
843 """(ctx, {path: content}, node) -> node. make a single commit
843
844
844 the commit is a clone from ctx, with a (optionally) different p1, and
845 the commit is a clone from ctx, with a (optionally) different p1, and
845 different file contents replaced by memworkingcopy.
846 different file contents replaced by memworkingcopy.
846 """
847 """
847 parents = p1 and (p1, node.nullid)
848 parents = p1 and (p1, node.nullid)
848 extra = ctx.extra()
849 extra = ctx.extra()
849 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
850 if self._useobsolete and self.ui.configbool('absorb', 'add-noise'):
850 extra['absorb_source'] = ctx.hex()
851 extra['absorb_source'] = ctx.hex()
851 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
852 mctx = overlaycontext(memworkingcopy, ctx, parents, extra=extra)
852 # preserve phase
853 # preserve phase
853 with mctx.repo().ui.configoverride({
854 with mctx.repo().ui.configoverride({
854 ('phases', 'new-commit'): ctx.phase()}):
855 ('phases', 'new-commit'): ctx.phase()}):
855 return mctx.commit()
856 return mctx.commit()
856
857
857 @util.propertycache
858 @util.propertycache
858 def _useobsolete(self):
859 def _useobsolete(self):
859 """() -> bool"""
860 """() -> bool"""
860 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
861 return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
861
862
862 def _obsoleteoldcommits(self):
863 def _obsoleteoldcommits(self):
863 relations = [(self.repo[k], v and (self.repo[v],) or ())
864 relations = [(self.repo[k], v and (self.repo[v],) or ())
864 for k, v in self.replacemap.iteritems()]
865 for k, v in self.replacemap.iteritems()]
865 if relations:
866 if relations:
866 obsolete.createmarkers(self.repo, relations)
867 obsolete.createmarkers(self.repo, relations)
867
868
868 def _stripoldcommits(self):
869 def _stripoldcommits(self):
869 nodelist = self.replacemap.keys()
870 nodelist = self.replacemap.keys()
870 # make sure we don't strip innocent children
871 # make sure we don't strip innocent children
871 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
872 revs = self.repo.revs('%ln - (::(heads(%ln::)-%ln))', nodelist,
872 nodelist, nodelist)
873 nodelist, nodelist)
873 tonode = self.repo.changelog.node
874 tonode = self.repo.changelog.node
874 nodelist = [tonode(r) for r in revs]
875 nodelist = [tonode(r) for r in revs]
875 if nodelist:
876 if nodelist:
876 repair.strip(self.repo.ui, self.repo, nodelist)
877 repair.strip(self.repo.ui, self.repo, nodelist)
877
878
878 def _parsechunk(hunk):
879 def _parsechunk(hunk):
879 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
880 """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
880 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
881 if type(hunk) not in (crecord.uihunk, patch.recordhunk):
881 return None, None
882 return None, None
882 path = hunk.header.filename()
883 path = hunk.header.filename()
883 a1 = hunk.fromline + len(hunk.before) - 1
884 a1 = hunk.fromline + len(hunk.before) - 1
884 # remove before and after context
885 # remove before and after context
885 hunk.before = hunk.after = []
886 hunk.before = hunk.after = []
886 buf = util.stringio()
887 buf = util.stringio()
887 hunk.write(buf)
888 hunk.write(buf)
888 patchlines = mdiff.splitnewlines(buf.getvalue())
889 patchlines = mdiff.splitnewlines(buf.getvalue())
889 # hunk.prettystr() will update hunk.removed
890 # hunk.prettystr() will update hunk.removed
890 a2 = a1 + hunk.removed
891 a2 = a1 + hunk.removed
891 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
892 blines = [l[1:] for l in patchlines[1:] if l[0] != '-']
892 return path, (a1, a2, blines)
893 return path, (a1, a2, blines)
893
894
894 def overlaydiffcontext(ctx, chunks):
895 def overlaydiffcontext(ctx, chunks):
895 """(ctx, [crecord.uihunk]) -> memctx
896 """(ctx, [crecord.uihunk]) -> memctx
896
897
897 return a memctx with some [1] patches (chunks) applied to ctx.
898 return a memctx with some [1] patches (chunks) applied to ctx.
898 [1]: modifications are handled. renames, mode changes, etc. are ignored.
899 [1]: modifications are handled. renames, mode changes, etc. are ignored.
899 """
900 """
900 # sadly the applying-patch logic is hardly reusable, and messy:
901 # sadly the applying-patch logic is hardly reusable, and messy:
901 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
902 # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it
902 # needs a file stream of a patch and will re-parse it, while we have
903 # needs a file stream of a patch and will re-parse it, while we have
903 # structured hunk objects at hand.
904 # structured hunk objects at hand.
904 # 2. a lot of different implementations about "chunk" (patch.hunk,
905 # 2. a lot of different implementations about "chunk" (patch.hunk,
905 # patch.recordhunk, crecord.uihunk)
906 # patch.recordhunk, crecord.uihunk)
906 # as we only care about applying changes to modified files, no mode
907 # as we only care about applying changes to modified files, no mode
907 # change, no binary diff, and no renames, it's probably okay to
908 # change, no binary diff, and no renames, it's probably okay to
908 # re-invent the logic using much simpler code here.
909 # re-invent the logic using much simpler code here.
909 memworkingcopy = {} # {path: content}
910 memworkingcopy = {} # {path: content}
910 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
911 patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
911 for path, info in map(_parsechunk, chunks):
912 for path, info in map(_parsechunk, chunks):
912 if not path or not info:
913 if not path or not info:
913 continue
914 continue
914 patchmap[path].append(info)
915 patchmap[path].append(info)
915 for path, patches in patchmap.iteritems():
916 for path, patches in patchmap.iteritems():
916 if path not in ctx or not patches:
917 if path not in ctx or not patches:
917 continue
918 continue
918 patches.sort(reverse=True)
919 patches.sort(reverse=True)
919 lines = mdiff.splitnewlines(ctx[path].data())
920 lines = mdiff.splitnewlines(ctx[path].data())
920 for a1, a2, blines in patches:
921 for a1, a2, blines in patches:
921 lines[a1:a2] = blines
922 lines[a1:a2] = blines
922 memworkingcopy[path] = ''.join(lines)
923 memworkingcopy[path] = ''.join(lines)
923 return overlaycontext(memworkingcopy, ctx)
924 return overlaycontext(memworkingcopy, ctx)
924
925
925 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
926 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
926 """pick fixup chunks from targetctx, apply them to stack.
927 """pick fixup chunks from targetctx, apply them to stack.
927
928
928 if targetctx is None, the working copy context will be used.
929 if targetctx is None, the working copy context will be used.
929 if stack is None, the current draft stack will be used.
930 if stack is None, the current draft stack will be used.
930 return fixupstate.
931 return fixupstate.
931 """
932 """
932 if stack is None:
933 if stack is None:
933 limit = ui.configint('absorb', 'max-stack-size')
934 limit = ui.configint('absorb', 'max-stack-size')
934 stack = getdraftstack(repo['.'], limit)
935 stack = getdraftstack(repo['.'], limit)
935 if limit and len(stack) >= limit:
936 if limit and len(stack) >= limit:
936 ui.warn(_('absorb: only the recent %d changesets will '
937 ui.warn(_('absorb: only the recent %d changesets will '
937 'be analysed\n')
938 'be analysed\n')
938 % limit)
939 % limit)
939 if not stack:
940 if not stack:
940 raise error.Abort(_('no mutable changeset to change'))
941 raise error.Abort(_('no mutable changeset to change'))
941 if targetctx is None: # default to working copy
942 if targetctx is None: # default to working copy
942 targetctx = repo[None]
943 targetctx = repo[None]
943 if pats is None:
944 if pats is None:
944 pats = ()
945 pats = ()
945 if opts is None:
946 if opts is None:
946 opts = {}
947 opts = {}
947 state = fixupstate(stack, ui=ui, opts=opts)
948 state = fixupstate(stack, ui=ui, opts=opts)
948 matcher = scmutil.match(targetctx, pats, opts)
949 matcher = scmutil.match(targetctx, pats, opts)
949 if opts.get('interactive'):
950 if opts.get('interactive'):
950 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
951 diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher)
951 origchunks = patch.parsepatch(diff)
952 origchunks = patch.parsepatch(diff)
952 chunks = cmdutil.recordfilter(ui, origchunks)[0]
953 chunks = cmdutil.recordfilter(ui, origchunks)[0]
953 targetctx = overlaydiffcontext(stack[-1], chunks)
954 targetctx = overlaydiffcontext(stack[-1], chunks)
954 fm = None
955 fm = None
955 if opts.get('print_changes') or not opts.get('apply_changes'):
956 if opts.get('print_changes') or not opts.get('apply_changes'):
956 fm = ui.formatter('absorb', opts)
957 fm = ui.formatter('absorb', opts)
957 state.diffwith(targetctx, matcher, fm)
958 state.diffwith(targetctx, matcher, fm)
958 if fm is not None:
959 if fm is not None:
959 fm.startitem()
960 fm.startitem()
960 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
961 fm.write("count", "\n%d changesets affected\n", len(state.ctxaffected))
961 fm.data(linetype='summary')
962 fm.data(linetype='summary')
962 for ctx in reversed(stack):
963 for ctx in reversed(stack):
963 if ctx not in state.ctxaffected:
964 if ctx not in state.ctxaffected:
964 continue
965 continue
965 fm.startitem()
966 fm.startitem()
966 fm.context(ctx=ctx)
967 fm.context(ctx=ctx)
967 fm.data(linetype='changeset')
968 fm.data(linetype='changeset')
968 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
969 fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
969 descfirstline = ctx.description().splitlines()[0]
970 descfirstline = ctx.description().splitlines()[0]
970 fm.write('descfirstline', '%s\n', descfirstline,
971 fm.write('descfirstline', '%s\n', descfirstline,
971 label='absorb.description')
972 label='absorb.description')
972 fm.end()
973 fm.end()
973 if not opts.get('dry_run'):
974 if not opts.get('dry_run'):
974 if not opts.get('apply_changes'):
975 if (not opts.get('apply_changes') and
975 if ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1):
976 state.ctxaffected and
976 raise error.Abort(_('absorb cancelled\n'))
977 ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)):
978 raise error.Abort(_('absorb cancelled\n'))
977
979
978 state.apply()
980 state.apply()
979 if state.commit():
981 if state.commit():
980 state.printchunkstats()
982 state.printchunkstats()
981 elif not ui.quiet:
983 elif not ui.quiet:
982 ui.write(_('nothing applied\n'))
984 ui.write(_('nothing applied\n'))
983 return state
985 return state
984
986
985 @command('absorb',
987 @command('absorb',
986 [('a', 'apply-changes', None,
988 [('a', 'apply-changes', None,
987 _('apply changes without prompting for confirmation')),
989 _('apply changes without prompting for confirmation')),
988 ('p', 'print-changes', None,
990 ('p', 'print-changes', None,
989 _('always print which changesets are modified by which changes')),
991 _('always print which changesets are modified by which changes')),
990 ('i', 'interactive', None,
992 ('i', 'interactive', None,
991 _('interactively select which chunks to apply (EXPERIMENTAL)')),
993 _('interactively select which chunks to apply (EXPERIMENTAL)')),
992 ('e', 'edit-lines', None,
994 ('e', 'edit-lines', None,
993 _('edit what lines belong to which changesets before commit '
995 _('edit what lines belong to which changesets before commit '
994 '(EXPERIMENTAL)')),
996 '(EXPERIMENTAL)')),
995 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
997 ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
996 _('hg absorb [OPTION] [FILE]...'),
998 _('hg absorb [OPTION] [FILE]...'),
997 helpcategory=command.CATEGORY_COMMITTING,
999 helpcategory=command.CATEGORY_COMMITTING,
998 helpbasic=True)
1000 helpbasic=True)
999 def absorbcmd(ui, repo, *pats, **opts):
1001 def absorbcmd(ui, repo, *pats, **opts):
1000 """incorporate corrections into the stack of draft changesets
1002 """incorporate corrections into the stack of draft changesets
1001
1003
1002 absorb analyzes each change in your working directory and attempts to
1004 absorb analyzes each change in your working directory and attempts to
1003 amend the changed lines into the changesets in your stack that first
1005 amend the changed lines into the changesets in your stack that first
1004 introduced those lines.
1006 introduced those lines.
1005
1007
1006 If absorb cannot find an unambiguous changeset to amend for a change,
1008 If absorb cannot find an unambiguous changeset to amend for a change,
1007 that change will be left in the working directory, untouched. They can be
1009 that change will be left in the working directory, untouched. They can be
1008 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
1010 observed by :hg:`status` or :hg:`diff` afterwards. In other words,
1009 absorb does not write to the working directory.
1011 absorb does not write to the working directory.
1010
1012
1011 Changesets outside the revset `::. and not public() and not merge()` will
1013 Changesets outside the revset `::. and not public() and not merge()` will
1012 not be changed.
1014 not be changed.
1013
1015
1014 Changesets that become empty after applying the changes will be deleted.
1016 Changesets that become empty after applying the changes will be deleted.
1015
1017
1016 By default, absorb will show what it plans to do and prompt for
1018 By default, absorb will show what it plans to do and prompt for
1017 confirmation. If you are confident that the changes will be absorbed
1019 confirmation. If you are confident that the changes will be absorbed
1018 to the correct place, run :hg:`absorb -a` to apply the changes
1020 to the correct place, run :hg:`absorb -a` to apply the changes
1019 immediately.
1021 immediately.
1020
1022
1021 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1023 Returns 0 on success, 1 if all chunks were ignored and nothing amended.
1022 """
1024 """
1023 opts = pycompat.byteskwargs(opts)
1025 opts = pycompat.byteskwargs(opts)
1024 state = absorb(ui, repo, pats=pats, opts=opts)
1026 state = absorb(ui, repo, pats=pats, opts=opts)
1025 if sum(s[0] for s in state.chunkstats.values()) == 0:
1027 if sum(s[0] for s in state.chunkstats.values()) == 0:
1026 return 1
1028 return 1
@@ -1,58 +1,60 b''
1 # amend.py - provide the amend command
1 # amend.py - provide the amend command
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """provide the amend command (EXPERIMENTAL)
7 """provide the amend command (EXPERIMENTAL)
8
8
9 This extension provides an ``amend`` command that is similar to
9 This extension provides an ``amend`` command that is similar to
10 ``commit --amend`` but does not prompt an editor.
10 ``commit --amend`` but does not prompt an editor.
11 """
11 """
12
12
13 from __future__ import absolute_import
13 from __future__ import absolute_import
14
14
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16 from mercurial import (
16 from mercurial import (
17 cmdutil,
17 cmdutil,
18 commands,
18 commands,
19 error,
19 error,
20 pycompat,
20 pycompat,
21 registrar,
21 registrar,
22 )
22 )
23
23
24 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
24 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
25 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
25 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
26 # be specifying the version(s) of Mercurial they are tested with, or
26 # be specifying the version(s) of Mercurial they are tested with, or
27 # leave the attribute unspecified.
27 # leave the attribute unspecified.
28 testedwith = 'ships-with-hg-core'
28 testedwith = 'ships-with-hg-core'
29
29
30 cmdtable = {}
30 cmdtable = {}
31 command = registrar.command(cmdtable)
31 command = registrar.command(cmdtable)
32
32
33 @command('amend',
33 @command('amend',
34 [('A', 'addremove', None,
34 [('A', 'addremove', None,
35 _('mark new/missing files as added/removed before committing')),
35 _('mark new/missing files as added/removed before committing')),
36 ('e', 'edit', None, _('invoke editor on commit messages')),
36 ('e', 'edit', None, _('invoke editor on commit messages')),
37 ('i', 'interactive', None, _('use interactive mode')),
37 ('i', 'interactive', None, _('use interactive mode')),
38 ('n', 'note', '', _('store a note on the amend')),
38 ('n', 'note', '', _('store a note on the amend')),
39 ('D', 'currentdate', None,
40 _('record the current date as commit date')),
39 ] + cmdutil.walkopts + cmdutil.commitopts + cmdutil.commitopts2,
41 ] + cmdutil.walkopts + cmdutil.commitopts + cmdutil.commitopts2,
40 _('[OPTION]... [FILE]...'),
42 _('[OPTION]... [FILE]...'),
41 helpcategory=command.CATEGORY_COMMITTING,
43 helpcategory=command.CATEGORY_COMMITTING,
42 inferrepo=True)
44 inferrepo=True)
43 def amend(ui, repo, *pats, **opts):
45 def amend(ui, repo, *pats, **opts):
44 """amend the working copy parent with all or specified outstanding changes
46 """amend the working copy parent with all or specified outstanding changes
45
47
46 Similar to :hg:`commit --amend`, but reuse the commit message without
48 Similar to :hg:`commit --amend`, but reuse the commit message without
47 invoking editor, unless ``--edit`` was set.
49 invoking editor, unless ``--edit`` was set.
48
50
49 See :hg:`help commit` for more details.
51 See :hg:`help commit` for more details.
50 """
52 """
51 opts = pycompat.byteskwargs(opts)
53 opts = pycompat.byteskwargs(opts)
52 if len(opts['note']) > 255:
54 if len(opts['note']) > 255:
53 raise error.Abort(_("cannot store a note of more than 255 bytes"))
55 raise error.Abort(_("cannot store a note of more than 255 bytes"))
54 with repo.wlock(), repo.lock():
56 with repo.wlock(), repo.lock():
55 if not opts.get('logfile'):
57 if not opts.get('logfile'):
56 opts['message'] = opts.get('message') or repo['.'].description()
58 opts['message'] = opts.get('message') or repo['.'].description()
57 opts['amend'] = True
59 opts['amend'] = True
58 return commands._docommit(ui, repo, *pats, **pycompat.strkwargs(opts))
60 return commands._docommit(ui, repo, *pats, **pycompat.strkwargs(opts))
@@ -1,96 +1,94 b''
1 # -*- coding: UTF-8 -*-
1 # -*- coding: UTF-8 -*-
2 # beautifygraph.py - improve graph output by using Unicode characters
2 # beautifygraph.py - improve graph output by using Unicode characters
3 #
3 #
4 # Copyright 2018 John Stiles <johnstiles@gmail.com>
4 # Copyright 2018 John Stiles <johnstiles@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''beautify log -G output by using Unicode characters (EXPERIMENTAL)
9 '''beautify log -G output by using Unicode characters (EXPERIMENTAL)
10
10
11 A terminal with UTF-8 support and monospace narrow text are required.
11 A terminal with UTF-8 support and monospace narrow text are required.
12 '''
12 '''
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 from mercurial import (
17 from mercurial import (
18 encoding,
18 encoding,
19 extensions,
19 extensions,
20 graphmod,
20 graphmod,
21 pycompat,
21 pycompat,
22 templatekw,
22 templatekw,
23 )
23 )
24
24
25 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
25 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
26 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
26 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
27 # be specifying the version(s) of Mercurial they are tested with, or
27 # be specifying the version(s) of Mercurial they are tested with, or
28 # leave the attribute unspecified.
28 # leave the attribute unspecified.
29 testedwith = 'ships-with-hg-core'
29 testedwith = 'ships-with-hg-core'
30
30
31 def prettyedge(before, edge, after):
31 def prettyedge(before, edge, after):
32 if edge == '~':
32 if edge == '~':
33 return '\xE2\x95\xA7' # U+2567 ╧
33 return '\xE2\x95\xA7' # U+2567 ╧
34 if edge == 'X':
35 return '\xE2\x95\xB3' # U+2573 ╳
36 if edge == '/':
34 if edge == '/':
37 return '\xE2\x95\xB1' # U+2571 ╱
35 return '\xE2\x95\xB1' # U+2571 ╱
38 if edge == '-':
36 if edge == '-':
39 return '\xE2\x94\x80' # U+2500 ─
37 return '\xE2\x94\x80' # U+2500 ─
40 if edge == '|':
38 if edge == '|':
41 return '\xE2\x94\x82' # U+2502 │
39 return '\xE2\x94\x82' # U+2502 │
42 if edge == ':':
40 if edge == ':':
43 return '\xE2\x94\x86' # U+2506 ┆
41 return '\xE2\x94\x86' # U+2506 ┆
44 if edge == '\\':
42 if edge == '\\':
45 return '\xE2\x95\xB2' # U+2572 ╲
43 return '\xE2\x95\xB2' # U+2572 ╲
46 if edge == '+':
44 if edge == '+':
47 if before == ' ' and not after == ' ':
45 if before == ' ' and not after == ' ':
48 return '\xE2\x94\x9C' # U+251C ├
46 return '\xE2\x94\x9C' # U+251C ├
49 if after == ' ' and not before == ' ':
47 if after == ' ' and not before == ' ':
50 return '\xE2\x94\xA4' # U+2524 ┤
48 return '\xE2\x94\xA4' # U+2524 ┤
51 return '\xE2\x94\xBC' # U+253C ┼
49 return '\xE2\x94\xBC' # U+253C ┼
52 return edge
50 return edge
53
51
54 def convertedges(line):
52 def convertedges(line):
55 line = ' %s ' % line
53 line = ' %s ' % line
56 pretty = []
54 pretty = []
57 for idx in pycompat.xrange(len(line) - 2):
55 for idx in pycompat.xrange(len(line) - 2):
58 pretty.append(prettyedge(line[idx:idx + 1],
56 pretty.append(prettyedge(line[idx:idx + 1],
59 line[idx + 1:idx + 2],
57 line[idx + 1:idx + 2],
60 line[idx + 2:idx + 3]))
58 line[idx + 2:idx + 3]))
61 return ''.join(pretty)
59 return ''.join(pretty)
62
60
63 def getprettygraphnode(orig, *args, **kwargs):
61 def getprettygraphnode(orig, *args, **kwargs):
64 node = orig(*args, **kwargs)
62 node = orig(*args, **kwargs)
65 if node == 'o':
63 if node == 'o':
66 return '\xE2\x97\x8B' # U+25CB ○
64 return '\xE2\x97\x8B' # U+25CB ○
67 if node == '@':
65 if node == '@':
68 return '\xE2\x97\x8D' # U+25CD ◍
66 return '\xE2\x97\x8D' # U+25CD ◍
69 if node == '*':
67 if node == '*':
70 return '\xE2\x88\x97' # U+2217 ∗
68 return '\xE2\x88\x97' # U+2217 ∗
71 if node == 'x':
69 if node == 'x':
72 return '\xE2\x97\x8C' # U+25CC ◌
70 return '\xE2\x97\x8C' # U+25CC ◌
73 if node == '_':
71 if node == '_':
74 return '\xE2\x95\xA4' # U+2564 ╤
72 return '\xE2\x95\xA4' # U+2564 ╤
75 return node
73 return node
76
74
77 def outputprettygraph(orig, ui, graph, *args, **kwargs):
75 def outputprettygraph(orig, ui, graph, *args, **kwargs):
78 (edges, text) = zip(*graph)
76 (edges, text) = zip(*graph)
79 graph = zip([convertedges(e) for e in edges], text)
77 graph = zip([convertedges(e) for e in edges], text)
80 return orig(ui, graph, *args, **kwargs)
78 return orig(ui, graph, *args, **kwargs)
81
79
82 def extsetup(ui):
80 def extsetup(ui):
83 if ui.plain('graph'):
81 if ui.plain('graph'):
84 return
82 return
85
83
86 if encoding.encoding != 'UTF-8':
84 if encoding.encoding != 'UTF-8':
87 ui.warn(_('beautifygraph: unsupported encoding, UTF-8 required\n'))
85 ui.warn(_('beautifygraph: unsupported encoding, UTF-8 required\n'))
88 return
86 return
89
87
90 if r'A' in encoding._wide:
88 if r'A' in encoding._wide:
91 ui.warn(_('beautifygraph: unsupported terminal settings, '
89 ui.warn(_('beautifygraph: unsupported terminal settings, '
92 'monospace narrow text required\n'))
90 'monospace narrow text required\n'))
93 return
91 return
94
92
95 extensions.wrapfunction(graphmod, 'outputgraph', outputprettygraph)
93 extensions.wrapfunction(graphmod, 'outputgraph', outputprettygraph)
96 extensions.wrapfunction(templatekw, 'getgraphnode', getprettygraphnode)
94 extensions.wrapfunction(templatekw, 'getgraphnode', getprettygraphnode)
@@ -1,257 +1,198 b''
1 # blackbox.py - log repository events to a file for post-mortem debugging
1 # blackbox.py - log repository events to a file for post-mortem debugging
2 #
2 #
3 # Copyright 2010 Nicolas Dumazet
3 # Copyright 2010 Nicolas Dumazet
4 # Copyright 2013 Facebook, Inc.
4 # Copyright 2013 Facebook, Inc.
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """log repository events to a blackbox for debugging
9 """log repository events to a blackbox for debugging
10
10
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 The events that get logged can be configured via the blackbox.track config key.
12 The events that get logged can be configured via the blackbox.track config key.
13
13
14 Examples::
14 Examples::
15
15
16 [blackbox]
16 [blackbox]
17 track = *
17 track = *
18 # dirty is *EXPENSIVE* (slow);
18 # dirty is *EXPENSIVE* (slow);
19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
20 dirty = True
20 dirty = True
21 # record the source of log messages
21 # record the source of log messages
22 logsource = True
22 logsource = True
23
23
24 [blackbox]
24 [blackbox]
25 track = command, commandfinish, commandexception, exthook, pythonhook
25 track = command, commandfinish, commandexception, exthook, pythonhook
26
26
27 [blackbox]
27 [blackbox]
28 track = incoming
28 track = incoming
29
29
30 [blackbox]
30 [blackbox]
31 # limit the size of a log file
31 # limit the size of a log file
32 maxsize = 1.5 MB
32 maxsize = 1.5 MB
33 # rotate up to N log files when the current one gets too big
33 # rotate up to N log files when the current one gets too big
34 maxfiles = 3
34 maxfiles = 3
35
35
36 [blackbox]
37 # Include nanoseconds in log entries with %f (see Python function
38 # datetime.datetime.strftime)
39 date-format = '%Y-%m-%d @ %H:%M:%S.%f'
40
36 """
41 """
37
42
38 from __future__ import absolute_import
43 from __future__ import absolute_import
39
44
40 import errno
41 import re
45 import re
42
46
43 from mercurial.i18n import _
47 from mercurial.i18n import _
44 from mercurial.node import hex
48 from mercurial.node import hex
45
49
46 from mercurial import (
50 from mercurial import (
47 encoding,
51 encoding,
48 pycompat,
52 loggingutil,
49 registrar,
53 registrar,
50 ui as uimod,
51 util,
52 )
54 )
53 from mercurial.utils import (
55 from mercurial.utils import (
54 dateutil,
56 dateutil,
55 procutil,
57 procutil,
56 )
58 )
57
59
58 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
59 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
60 # be specifying the version(s) of Mercurial they are tested with, or
62 # be specifying the version(s) of Mercurial they are tested with, or
61 # leave the attribute unspecified.
63 # leave the attribute unspecified.
62 testedwith = 'ships-with-hg-core'
64 testedwith = 'ships-with-hg-core'
63
65
64 cmdtable = {}
66 cmdtable = {}
65 command = registrar.command(cmdtable)
67 command = registrar.command(cmdtable)
66
68
67 configtable = {}
69 configtable = {}
68 configitem = registrar.configitem(configtable)
70 configitem = registrar.configitem(configtable)
69
71
70 configitem('blackbox', 'dirty',
72 configitem('blackbox', 'dirty',
71 default=False,
73 default=False,
72 )
74 )
73 configitem('blackbox', 'maxsize',
75 configitem('blackbox', 'maxsize',
74 default='1 MB',
76 default='1 MB',
75 )
77 )
76 configitem('blackbox', 'logsource',
78 configitem('blackbox', 'logsource',
77 default=False,
79 default=False,
78 )
80 )
79 configitem('blackbox', 'maxfiles',
81 configitem('blackbox', 'maxfiles',
80 default=7,
82 default=7,
81 )
83 )
82 configitem('blackbox', 'track',
84 configitem('blackbox', 'track',
83 default=lambda: ['*'],
85 default=lambda: ['*'],
84 )
86 )
87 configitem('blackbox', 'date-format',
88 default='%Y/%m/%d %H:%M:%S',
89 )
85
90
86 lastui = None
91 _lastlogger = loggingutil.proxylogger()
87
92
88 def _openlogfile(ui, vfs):
93 class blackboxlogger(object):
89 def rotate(oldpath, newpath):
94 def __init__(self, ui, repo):
90 try:
95 self._repo = repo
91 vfs.unlink(newpath)
96 self._trackedevents = set(ui.configlist('blackbox', 'track'))
92 except OSError as err:
97 self._maxfiles = ui.configint('blackbox', 'maxfiles')
93 if err.errno != errno.ENOENT:
98 self._maxsize = ui.configbytes('blackbox', 'maxsize')
94 ui.debug("warning: cannot remove '%s': %s\n" %
99 self._inlog = False
95 (newpath, err.strerror))
96 try:
97 if newpath:
98 vfs.rename(oldpath, newpath)
99 except OSError as err:
100 if err.errno != errno.ENOENT:
101 ui.debug("warning: cannot rename '%s' to '%s': %s\n" %
102 (newpath, oldpath, err.strerror))
103
100
104 maxsize = ui.configbytes('blackbox', 'maxsize')
101 def tracked(self, event):
105 name = 'blackbox.log'
102 return b'*' in self._trackedevents or event in self._trackedevents
106 if maxsize > 0:
103
104 def log(self, ui, event, msg, opts):
105 # self._log() -> ctx.dirty() may create new subrepo instance, which
106 # ui is derived from baseui. So the recursion guard in ui.log()
107 # doesn't work as it's local to the ui instance.
108 if self._inlog:
109 return
110 self._inlog = True
107 try:
111 try:
108 st = vfs.stat(name)
112 self._log(ui, event, msg, opts)
109 except OSError:
113 finally:
110 pass
114 self._inlog = False
111 else:
112 if st.st_size >= maxsize:
113 path = vfs.join(name)
114 maxfiles = ui.configint('blackbox', 'maxfiles')
115 for i in pycompat.xrange(maxfiles - 1, 1, -1):
116 rotate(oldpath='%s.%d' % (path, i - 1),
117 newpath='%s.%d' % (path, i))
118 rotate(oldpath=path,
119 newpath=maxfiles > 0 and path + '.1')
120 return vfs(name, 'a')
121
122 def wrapui(ui):
123 class blackboxui(ui.__class__):
124 @property
125 def _bbvfs(self):
126 vfs = None
127 repo = getattr(self, '_bbrepo', None)
128 if repo:
129 vfs = repo.vfs
130 if not vfs.isdir('.'):
131 vfs = None
132 return vfs
133
134 @util.propertycache
135 def track(self):
136 return self.configlist('blackbox', 'track')
137
138 def debug(self, *msg, **opts):
139 super(blackboxui, self).debug(*msg, **opts)
140 if self.debugflag:
141 self.log('debug', '%s', ''.join(msg))
142
143 def log(self, event, *msg, **opts):
144 global lastui
145 super(blackboxui, self).log(event, *msg, **opts)
146
115
147 if not '*' in self.track and not event in self.track:
116 def _log(self, ui, event, msg, opts):
148 return
117 default = ui.configdate('devel', 'default-date')
149
118 date = dateutil.datestr(default, ui.config('blackbox', 'date-format'))
150 if self._bbvfs:
119 user = procutil.getuser()
151 ui = self
120 pid = '%d' % procutil.getpid()
152 else:
121 rev = '(unknown)'
153 # certain ui instances exist outside the context of
122 changed = ''
154 # a repo, so just default to the last blackbox that
123 ctx = self._repo[None]
155 # was seen.
124 parents = ctx.parents()
156 ui = lastui
125 rev = ('+'.join([hex(p.node()) for p in parents]))
157
126 if (ui.configbool('blackbox', 'dirty') and
158 if not ui:
127 ctx.dirty(missing=True, merge=False, branch=False)):
159 return
128 changed = '+'
160 vfs = ui._bbvfs
129 if ui.configbool('blackbox', 'logsource'):
161 if not vfs:
130 src = ' [%s]' % event
162 return
131 else:
132 src = ''
133 try:
134 fmt = '%s %s @%s%s (%s)%s> %s'
135 args = (date, user, rev, changed, pid, src, msg)
136 with loggingutil.openlogfile(
137 ui, self._repo.vfs, name='blackbox.log',
138 maxfiles=self._maxfiles, maxsize=self._maxsize) as fp:
139 fp.write(fmt % args)
140 except (IOError, OSError) as err:
141 # deactivate this to avoid failed logging again
142 self._trackedevents.clear()
143 ui.debug('warning: cannot write to blackbox.log: %s\n' %
144 encoding.strtolocal(err.strerror))
145 return
146 _lastlogger.logger = self
163
147
164 repo = getattr(ui, '_bbrepo', None)
148 def uipopulate(ui):
165 if not lastui or repo:
149 ui.setlogger(b'blackbox', _lastlogger)
166 lastui = ui
167 if getattr(ui, '_bbinlog', False):
168 # recursion and failure guard
169 return
170 ui._bbinlog = True
171 default = self.configdate('devel', 'default-date')
172 date = dateutil.datestr(default, '%Y/%m/%d %H:%M:%S')
173 user = procutil.getuser()
174 pid = '%d' % procutil.getpid()
175 formattedmsg = msg[0] % msg[1:]
176 rev = '(unknown)'
177 changed = ''
178 if repo:
179 ctx = repo[None]
180 parents = ctx.parents()
181 rev = ('+'.join([hex(p.node()) for p in parents]))
182 if (ui.configbool('blackbox', 'dirty') and
183 ctx.dirty(missing=True, merge=False, branch=False)):
184 changed = '+'
185 if ui.configbool('blackbox', 'logsource'):
186 src = ' [%s]' % event
187 else:
188 src = ''
189 try:
190 fmt = '%s %s @%s%s (%s)%s> %s'
191 args = (date, user, rev, changed, pid, src, formattedmsg)
192 with _openlogfile(ui, vfs) as fp:
193 fp.write(fmt % args)
194 except (IOError, OSError) as err:
195 self.debug('warning: cannot write to blackbox.log: %s\n' %
196 encoding.strtolocal(err.strerror))
197 # do not restore _bbinlog intentionally to avoid failed
198 # logging again
199 else:
200 ui._bbinlog = False
201
202 def setrepo(self, repo):
203 self._bbrepo = repo
204
205 ui.__class__ = blackboxui
206 uimod.ui = blackboxui
207
208 def uisetup(ui):
209 wrapui(ui)
210
150
211 def reposetup(ui, repo):
151 def reposetup(ui, repo):
212 # During 'hg pull' a httppeer repo is created to represent the remote repo.
152 # During 'hg pull' a httppeer repo is created to represent the remote repo.
213 # It doesn't have a .hg directory to put a blackbox in, so we don't do
153 # It doesn't have a .hg directory to put a blackbox in, so we don't do
214 # the blackbox setup for it.
154 # the blackbox setup for it.
215 if not repo.local():
155 if not repo.local():
216 return
156 return
217
157
218 if util.safehasattr(ui, 'setrepo'):
158 # Since blackbox.log is stored in the repo directory, the logger should be
219 ui.setrepo(repo)
159 # instantiated per repository.
160 logger = blackboxlogger(ui, repo)
161 ui.setlogger(b'blackbox', logger)
220
162
221 # Set lastui even if ui.log is not called. This gives blackbox a
163 # Set _lastlogger even if ui.log is not called. This gives blackbox a
222 # fallback place to log.
164 # fallback place to log
223 global lastui
165 if _lastlogger.logger is None:
224 if lastui is None:
166 _lastlogger.logger = logger
225 lastui = ui
226
167
227 repo._wlockfreeprefix.add('blackbox.log')
168 repo._wlockfreeprefix.add('blackbox.log')
228
169
229 @command('blackbox',
170 @command('blackbox',
230 [('l', 'limit', 10, _('the number of events to show')),
171 [('l', 'limit', 10, _('the number of events to show')),
231 ],
172 ],
232 _('hg blackbox [OPTION]...'),
173 _('hg blackbox [OPTION]...'),
233 helpcategory=command.CATEGORY_MAINTENANCE,
174 helpcategory=command.CATEGORY_MAINTENANCE,
234 helpbasic=True)
175 helpbasic=True)
235 def blackbox(ui, repo, *revs, **opts):
176 def blackbox(ui, repo, *revs, **opts):
236 '''view the recent repository events
177 '''view the recent repository events
237 '''
178 '''
238
179
239 if not repo.vfs.exists('blackbox.log'):
180 if not repo.vfs.exists('blackbox.log'):
240 return
181 return
241
182
242 limit = opts.get(r'limit')
183 limit = opts.get(r'limit')
243 fp = repo.vfs('blackbox.log', 'r')
184 fp = repo.vfs('blackbox.log', 'r')
244 lines = fp.read().split('\n')
185 lines = fp.read().split('\n')
245
186
246 count = 0
187 count = 0
247 output = []
188 output = []
248 for line in reversed(lines):
189 for line in reversed(lines):
249 if count >= limit:
190 if count >= limit:
250 break
191 break
251
192
252 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
193 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
253 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
194 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
254 count += 1
195 count += 1
255 output.append(line)
196 output.append(line)
256
197
257 ui.status('\n'.join(reversed(output)))
198 ui.status('\n'.join(reversed(output)))
@@ -1,465 +1,479 b''
1 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
1 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
2 # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
2 # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
3 #
3 #
4 # This software may be used and distributed according to the terms of the
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6
6
7 from __future__ import absolute_import, print_function
7 from __future__ import absolute_import, print_function
8
8
9 import posixpath
9 import posixpath
10
10
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12 from mercurial import (
12 from mercurial import (
13 error,
13 error,
14 pycompat,
14 pycompat,
15 )
15 )
16 from . import common
16 from . import common
17 SKIPREV = common.SKIPREV
17 SKIPREV = common.SKIPREV
18
18
19 def rpairs(path):
19 def rpairs(path):
20 '''Yield tuples with path split at '/', starting with the full path.
20 '''Yield tuples with path split at '/', starting with the full path.
21 No leading, trailing or double '/', please.
21 No leading, trailing or double '/', please.
22 >>> for x in rpairs(b'foo/bar/baz'): print(x)
22 >>> for x in rpairs(b'foo/bar/baz'): print(x)
23 ('foo/bar/baz', '')
23 ('foo/bar/baz', '')
24 ('foo/bar', 'baz')
24 ('foo/bar', 'baz')
25 ('foo', 'bar/baz')
25 ('foo', 'bar/baz')
26 ('.', 'foo/bar/baz')
26 ('.', 'foo/bar/baz')
27 '''
27 '''
28 i = len(path)
28 i = len(path)
29 while i != -1:
29 while i != -1:
30 yield path[:i], path[i + 1:]
30 yield path[:i], path[i + 1:]
31 i = path.rfind('/', 0, i)
31 i = path.rfind('/', 0, i)
32 yield '.', path
32 yield '.', path
33
33
34 def normalize(path):
34 def normalize(path):
35 ''' We use posixpath.normpath to support cross-platform path format.
35 ''' We use posixpath.normpath to support cross-platform path format.
36 However, it doesn't handle None input. So we wrap it up. '''
36 However, it doesn't handle None input. So we wrap it up. '''
37 if path is None:
37 if path is None:
38 return None
38 return None
39 return posixpath.normpath(path)
39 return posixpath.normpath(path)
40
40
41 class filemapper(object):
41 class filemapper(object):
42 '''Map and filter filenames when importing.
42 '''Map and filter filenames when importing.
43 A name can be mapped to itself, a new name, or None (omit from new
43 A name can be mapped to itself, a new name, or None (omit from new
44 repository).'''
44 repository).'''
45
45
46 def __init__(self, ui, path=None):
46 def __init__(self, ui, path=None):
47 self.ui = ui
47 self.ui = ui
48 self.include = {}
48 self.include = {}
49 self.exclude = {}
49 self.exclude = {}
50 self.rename = {}
50 self.rename = {}
51 self.targetprefixes = None
51 self.targetprefixes = None
52 if path:
52 if path:
53 if self.parse(path):
53 if self.parse(path):
54 raise error.Abort(_('errors in filemap'))
54 raise error.Abort(_('errors in filemap'))
55
55
56 def parse(self, path):
56 def parse(self, path):
57 errs = 0
57 errs = 0
58 def check(name, mapping, listname):
58 def check(name, mapping, listname):
59 if not name:
59 if not name:
60 self.ui.warn(_('%s:%d: path to %s is missing\n') %
60 self.ui.warn(_('%s:%d: path to %s is missing\n') %
61 (lex.infile, lex.lineno, listname))
61 (lex.infile, lex.lineno, listname))
62 return 1
62 return 1
63 if name in mapping:
63 if name in mapping:
64 self.ui.warn(_('%s:%d: %r already in %s list\n') %
64 self.ui.warn(_('%s:%d: %r already in %s list\n') %
65 (lex.infile, lex.lineno, name, listname))
65 (lex.infile, lex.lineno, name, listname))
66 return 1
66 return 1
67 if (name.startswith('/') or
67 if (name.startswith('/') or
68 name.endswith('/') or
68 name.endswith('/') or
69 '//' in name):
69 '//' in name):
70 self.ui.warn(_('%s:%d: superfluous / in %s %r\n') %
70 self.ui.warn(_('%s:%d: superfluous / in %s %r\n') %
71 (lex.infile, lex.lineno, listname,
71 (lex.infile, lex.lineno, listname,
72 pycompat.bytestr(name)))
72 pycompat.bytestr(name)))
73 return 1
73 return 1
74 return 0
74 return 0
75 lex = common.shlexer(
75 lex = common.shlexer(
76 filepath=path, wordchars='!@#$%^&*()-=+[]{}|;:,./<>?')
76 filepath=path, wordchars='!@#$%^&*()-=+[]{}|;:,./<>?')
77 cmd = lex.get_token()
77 cmd = lex.get_token()
78 while cmd:
78 while cmd:
79 if cmd == 'include':
79 if cmd == 'include':
80 name = normalize(lex.get_token())
80 name = normalize(lex.get_token())
81 errs += check(name, self.exclude, 'exclude')
81 errs += check(name, self.exclude, 'exclude')
82 self.include[name] = name
82 self.include[name] = name
83 elif cmd == 'exclude':
83 elif cmd == 'exclude':
84 name = normalize(lex.get_token())
84 name = normalize(lex.get_token())
85 errs += check(name, self.include, 'include')
85 errs += check(name, self.include, 'include')
86 errs += check(name, self.rename, 'rename')
86 errs += check(name, self.rename, 'rename')
87 self.exclude[name] = name
87 self.exclude[name] = name
88 elif cmd == 'rename':
88 elif cmd == 'rename':
89 src = normalize(lex.get_token())
89 src = normalize(lex.get_token())
90 dest = normalize(lex.get_token())
90 dest = normalize(lex.get_token())
91 errs += check(src, self.exclude, 'exclude')
91 errs += check(src, self.exclude, 'exclude')
92 self.rename[src] = dest
92 self.rename[src] = dest
93 elif cmd == 'source':
93 elif cmd == 'source':
94 errs += self.parse(normalize(lex.get_token()))
94 errs += self.parse(normalize(lex.get_token()))
95 else:
95 else:
96 self.ui.warn(_('%s:%d: unknown directive %r\n') %
96 self.ui.warn(_('%s:%d: unknown directive %r\n') %
97 (lex.infile, lex.lineno, pycompat.bytestr(cmd)))
97 (lex.infile, lex.lineno, pycompat.bytestr(cmd)))
98 errs += 1
98 errs += 1
99 cmd = lex.get_token()
99 cmd = lex.get_token()
100 return errs
100 return errs
101
101
102 def lookup(self, name, mapping):
102 def lookup(self, name, mapping):
103 name = normalize(name)
103 name = normalize(name)
104 for pre, suf in rpairs(name):
104 for pre, suf in rpairs(name):
105 try:
105 try:
106 return mapping[pre], pre, suf
106 return mapping[pre], pre, suf
107 except KeyError:
107 except KeyError:
108 pass
108 pass
109 return '', name, ''
109 return '', name, ''
110
110
111 def istargetfile(self, filename):
111 def istargetfile(self, filename):
112 """Return true if the given target filename is covered as a destination
112 """Return true if the given target filename is covered as a destination
113 of the filemap. This is useful for identifying what parts of the target
113 of the filemap. This is useful for identifying what parts of the target
114 repo belong to the source repo and what parts don't."""
114 repo belong to the source repo and what parts don't."""
115 if self.targetprefixes is None:
115 if self.targetprefixes is None:
116 self.targetprefixes = set()
116 self.targetprefixes = set()
117 for before, after in self.rename.iteritems():
117 for before, after in self.rename.iteritems():
118 self.targetprefixes.add(after)
118 self.targetprefixes.add(after)
119
119
120 # If "." is a target, then all target files are considered from the
120 # If "." is a target, then all target files are considered from the
121 # source.
121 # source.
122 if not self.targetprefixes or '.' in self.targetprefixes:
122 if not self.targetprefixes or '.' in self.targetprefixes:
123 return True
123 return True
124
124
125 filename = normalize(filename)
125 filename = normalize(filename)
126 for pre, suf in rpairs(filename):
126 for pre, suf in rpairs(filename):
127 # This check is imperfect since it doesn't account for the
127 # This check is imperfect since it doesn't account for the
128 # include/exclude list, but it should work in filemaps that don't
128 # include/exclude list, but it should work in filemaps that don't
129 # apply include/exclude to the same source directories they are
129 # apply include/exclude to the same source directories they are
130 # renaming.
130 # renaming.
131 if pre in self.targetprefixes:
131 if pre in self.targetprefixes:
132 return True
132 return True
133 return False
133 return False
134
134
135 def __call__(self, name):
135 def __call__(self, name):
136 if self.include:
136 if self.include:
137 inc = self.lookup(name, self.include)[0]
137 inc = self.lookup(name, self.include)[0]
138 else:
138 else:
139 inc = name
139 inc = name
140 if self.exclude:
140 if self.exclude:
141 exc = self.lookup(name, self.exclude)[0]
141 exc = self.lookup(name, self.exclude)[0]
142 else:
142 else:
143 exc = ''
143 exc = ''
144 if (not self.include and exc) or (len(inc) <= len(exc)):
144 if (not self.include and exc) or (len(inc) <= len(exc)):
145 return None
145 return None
146 newpre, pre, suf = self.lookup(name, self.rename)
146 newpre, pre, suf = self.lookup(name, self.rename)
147 if newpre:
147 if newpre:
148 if newpre == '.':
148 if newpre == '.':
149 return suf
149 return suf
150 if suf:
150 if suf:
151 if newpre.endswith('/'):
151 if newpre.endswith('/'):
152 return newpre + suf
152 return newpre + suf
153 return newpre + '/' + suf
153 return newpre + '/' + suf
154 return newpre
154 return newpre
155 return name
155 return name
156
156
157 def active(self):
157 def active(self):
158 return bool(self.include or self.exclude or self.rename)
158 return bool(self.include or self.exclude or self.rename)
159
159
160 # This class does two additional things compared to a regular source:
160 # This class does two additional things compared to a regular source:
161 #
161 #
162 # - Filter and rename files. This is mostly wrapped by the filemapper
162 # - Filter and rename files. This is mostly wrapped by the filemapper
163 # class above. We hide the original filename in the revision that is
163 # class above. We hide the original filename in the revision that is
164 # returned by getchanges to be able to find things later in getfile.
164 # returned by getchanges to be able to find things later in getfile.
165 #
165 #
166 # - Return only revisions that matter for the files we're interested in.
166 # - Return only revisions that matter for the files we're interested in.
167 # This involves rewriting the parents of the original revision to
167 # This involves rewriting the parents of the original revision to
168 # create a graph that is restricted to those revisions.
168 # create a graph that is restricted to those revisions.
169 #
169 #
170 # This set of revisions includes not only revisions that directly
170 # This set of revisions includes not only revisions that directly
171 # touch files we're interested in, but also merges that merge two
171 # touch files we're interested in, but also merges that merge two
172 # or more interesting revisions.
172 # or more interesting revisions.
173
173
174 class filemap_source(common.converter_source):
174 class filemap_source(common.converter_source):
175 def __init__(self, ui, baseconverter, filemap):
175 def __init__(self, ui, baseconverter, filemap):
176 super(filemap_source, self).__init__(ui, baseconverter.repotype)
176 super(filemap_source, self).__init__(ui, baseconverter.repotype)
177 self.base = baseconverter
177 self.base = baseconverter
178 self.filemapper = filemapper(ui, filemap)
178 self.filemapper = filemapper(ui, filemap)
179 self.commits = {}
179 self.commits = {}
180 # if a revision rev has parent p in the original revision graph, then
180 # if a revision rev has parent p in the original revision graph, then
181 # rev will have parent self.parentmap[p] in the restricted graph.
181 # rev will have parent self.parentmap[p] in the restricted graph.
182 self.parentmap = {}
182 self.parentmap = {}
183 # self.wantedancestors[rev] is the set of all ancestors of rev that
183 # self.wantedancestors[rev] is the set of all ancestors of rev that
184 # are in the restricted graph.
184 # are in the restricted graph.
185 self.wantedancestors = {}
185 self.wantedancestors = {}
186 self.convertedorder = None
186 self.convertedorder = None
187 self._rebuilt = False
187 self._rebuilt = False
188 self.origparents = {}
188 self.origparents = {}
189 self.children = {}
189 self.children = {}
190 self.seenchildren = {}
190 self.seenchildren = {}
191 # experimental config: convert.ignoreancestorcheck
191 # experimental config: convert.ignoreancestorcheck
192 self.ignoreancestorcheck = self.ui.configbool('convert',
192 self.ignoreancestorcheck = self.ui.configbool('convert',
193 'ignoreancestorcheck')
193 'ignoreancestorcheck')
194
194
195 def before(self):
195 def before(self):
196 self.base.before()
196 self.base.before()
197
197
198 def after(self):
198 def after(self):
199 self.base.after()
199 self.base.after()
200
200
201 def setrevmap(self, revmap):
201 def setrevmap(self, revmap):
202 # rebuild our state to make things restartable
202 # rebuild our state to make things restartable
203 #
203 #
204 # To avoid calling getcommit for every revision that has already
204 # To avoid calling getcommit for every revision that has already
205 # been converted, we rebuild only the parentmap, delaying the
205 # been converted, we rebuild only the parentmap, delaying the
206 # rebuild of wantedancestors until we need it (i.e. until a
206 # rebuild of wantedancestors until we need it (i.e. until a
207 # merge).
207 # merge).
208 #
208 #
209 # We assume the order argument lists the revisions in
209 # We assume the order argument lists the revisions in
210 # topological order, so that we can infer which revisions were
210 # topological order, so that we can infer which revisions were
211 # wanted by previous runs.
211 # wanted by previous runs.
212 self._rebuilt = not revmap
212 self._rebuilt = not revmap
213 seen = {SKIPREV: SKIPREV}
213 seen = {SKIPREV: SKIPREV}
214 dummyset = set()
214 dummyset = set()
215 converted = []
215 converted = []
216 for rev in revmap.order:
216 for rev in revmap.order:
217 mapped = revmap[rev]
217 mapped = revmap[rev]
218 wanted = mapped not in seen
218 wanted = mapped not in seen
219 if wanted:
219 if wanted:
220 seen[mapped] = rev
220 seen[mapped] = rev
221 self.parentmap[rev] = rev
221 self.parentmap[rev] = rev
222 else:
222 else:
223 self.parentmap[rev] = seen[mapped]
223 self.parentmap[rev] = seen[mapped]
224 self.wantedancestors[rev] = dummyset
224 self.wantedancestors[rev] = dummyset
225 arg = seen[mapped]
225 arg = seen[mapped]
226 if arg == SKIPREV:
226 if arg == SKIPREV:
227 arg = None
227 arg = None
228 converted.append((rev, wanted, arg))
228 converted.append((rev, wanted, arg))
229 self.convertedorder = converted
229 self.convertedorder = converted
230 return self.base.setrevmap(revmap)
230 return self.base.setrevmap(revmap)
231
231
232 def rebuild(self):
232 def rebuild(self):
233 if self._rebuilt:
233 if self._rebuilt:
234 return True
234 return True
235 self._rebuilt = True
235 self._rebuilt = True
236 self.parentmap.clear()
236 self.parentmap.clear()
237 self.wantedancestors.clear()
237 self.wantedancestors.clear()
238 self.seenchildren.clear()
238 self.seenchildren.clear()
239 for rev, wanted, arg in self.convertedorder:
239 for rev, wanted, arg in self.convertedorder:
240 if rev not in self.origparents:
240 if rev not in self.origparents:
241 try:
241 try:
242 self.origparents[rev] = self.getcommit(rev).parents
242 self.origparents[rev] = self.getcommit(rev).parents
243 except error.RepoLookupError:
243 except error.RepoLookupError:
244 self.ui.debug("unknown revmap source: %s\n" % rev)
244 self.ui.debug("unknown revmap source: %s\n" % rev)
245 continue
245 continue
246 if arg is not None:
246 if arg is not None:
247 self.children[arg] = self.children.get(arg, 0) + 1
247 self.children[arg] = self.children.get(arg, 0) + 1
248
248
249 for rev, wanted, arg in self.convertedorder:
249 for rev, wanted, arg in self.convertedorder:
250 try:
250 try:
251 parents = self.origparents[rev]
251 parents = self.origparents[rev]
252 except KeyError:
252 except KeyError:
253 continue # unknown revmap source
253 continue # unknown revmap source
254 if wanted:
254 if wanted:
255 self.mark_wanted(rev, parents)
255 self.mark_wanted(rev, parents)
256 else:
256 else:
257 self.mark_not_wanted(rev, arg)
257 self.mark_not_wanted(rev, arg)
258 self._discard(arg, *parents)
258 self._discard(arg, *parents)
259
259
260 return True
260 return True
261
261
262 def getheads(self):
262 def getheads(self):
263 return self.base.getheads()
263 return self.base.getheads()
264
264
265 def getcommit(self, rev):
265 def getcommit(self, rev):
266 # We want to save a reference to the commit objects to be able
266 # We want to save a reference to the commit objects to be able
267 # to rewrite their parents later on.
267 # to rewrite their parents later on.
268 c = self.commits[rev] = self.base.getcommit(rev)
268 c = self.commits[rev] = self.base.getcommit(rev)
269 for p in c.parents:
269 for p in c.parents:
270 self.children[p] = self.children.get(p, 0) + 1
270 self.children[p] = self.children.get(p, 0) + 1
271 return c
271 return c
272
272
273 def numcommits(self):
274 return self.base.numcommits()
275
273 def _cachedcommit(self, rev):
276 def _cachedcommit(self, rev):
274 if rev in self.commits:
277 if rev in self.commits:
275 return self.commits[rev]
278 return self.commits[rev]
276 return self.base.getcommit(rev)
279 return self.base.getcommit(rev)
277
280
278 def _discard(self, *revs):
281 def _discard(self, *revs):
279 for r in revs:
282 for r in revs:
280 if r is None:
283 if r is None:
281 continue
284 continue
282 self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
285 self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
283 if self.seenchildren[r] == self.children[r]:
286 if self.seenchildren[r] == self.children[r]:
284 self.wantedancestors.pop(r, None)
287 self.wantedancestors.pop(r, None)
285 self.parentmap.pop(r, None)
288 self.parentmap.pop(r, None)
286 del self.seenchildren[r]
289 del self.seenchildren[r]
287 if self._rebuilt:
290 if self._rebuilt:
288 del self.children[r]
291 del self.children[r]
289
292
290 def wanted(self, rev, i):
293 def wanted(self, rev, i):
291 # Return True if we're directly interested in rev.
294 # Return True if we're directly interested in rev.
292 #
295 #
293 # i is an index selecting one of the parents of rev (if rev
296 # i is an index selecting one of the parents of rev (if rev
294 # has no parents, i is None). getchangedfiles will give us
297 # has no parents, i is None). getchangedfiles will give us
295 # the list of files that are different in rev and in the parent
298 # the list of files that are different in rev and in the parent
296 # indicated by i. If we're interested in any of these files,
299 # indicated by i. If we're interested in any of these files,
297 # we're interested in rev.
300 # we're interested in rev.
298 try:
301 try:
299 files = self.base.getchangedfiles(rev, i)
302 files = self.base.getchangedfiles(rev, i)
300 except NotImplementedError:
303 except NotImplementedError:
301 raise error.Abort(_("source repository doesn't support --filemap"))
304 raise error.Abort(_("source repository doesn't support --filemap"))
302 for f in files:
305 for f in files:
303 if self.filemapper(f):
306 if self.filemapper(f):
304 return True
307 return True
305 return False
308
309 # The include directive is documented to include nothing else (though
310 # valid branch closes are included).
311 if self.filemapper.include:
312 return False
313
314 # Allow empty commits in the source revision through. The getchanges()
315 # method doesn't even bother calling this if it determines that the
316 # close marker is significant (i.e. all of the branch ancestors weren't
317 # eliminated). Therefore if there *is* a close marker, getchanges()
318 # doesn't consider it significant, and this revision should be dropped.
319 return not files and 'close' not in self.commits[rev].extra
306
320
307 def mark_not_wanted(self, rev, p):
321 def mark_not_wanted(self, rev, p):
308 # Mark rev as not interesting and update data structures.
322 # Mark rev as not interesting and update data structures.
309
323
310 if p is None:
324 if p is None:
311 # A root revision. Use SKIPREV to indicate that it doesn't
325 # A root revision. Use SKIPREV to indicate that it doesn't
312 # map to any revision in the restricted graph. Put SKIPREV
326 # map to any revision in the restricted graph. Put SKIPREV
313 # in the set of wanted ancestors to simplify code elsewhere
327 # in the set of wanted ancestors to simplify code elsewhere
314 self.parentmap[rev] = SKIPREV
328 self.parentmap[rev] = SKIPREV
315 self.wantedancestors[rev] = {SKIPREV}
329 self.wantedancestors[rev] = {SKIPREV}
316 return
330 return
317
331
318 # Reuse the data from our parent.
332 # Reuse the data from our parent.
319 self.parentmap[rev] = self.parentmap[p]
333 self.parentmap[rev] = self.parentmap[p]
320 self.wantedancestors[rev] = self.wantedancestors[p]
334 self.wantedancestors[rev] = self.wantedancestors[p]
321
335
322 def mark_wanted(self, rev, parents):
336 def mark_wanted(self, rev, parents):
323 # Mark rev ss wanted and update data structures.
337 # Mark rev ss wanted and update data structures.
324
338
325 # rev will be in the restricted graph, so children of rev in
339 # rev will be in the restricted graph, so children of rev in
326 # the original graph should still have rev as a parent in the
340 # the original graph should still have rev as a parent in the
327 # restricted graph.
341 # restricted graph.
328 self.parentmap[rev] = rev
342 self.parentmap[rev] = rev
329
343
330 # The set of wanted ancestors of rev is the union of the sets
344 # The set of wanted ancestors of rev is the union of the sets
331 # of wanted ancestors of its parents. Plus rev itself.
345 # of wanted ancestors of its parents. Plus rev itself.
332 wrev = set()
346 wrev = set()
333 for p in parents:
347 for p in parents:
334 if p in self.wantedancestors:
348 if p in self.wantedancestors:
335 wrev.update(self.wantedancestors[p])
349 wrev.update(self.wantedancestors[p])
336 else:
350 else:
337 self.ui.warn(_('warning: %s parent %s is missing\n') %
351 self.ui.warn(_('warning: %s parent %s is missing\n') %
338 (rev, p))
352 (rev, p))
339 wrev.add(rev)
353 wrev.add(rev)
340 self.wantedancestors[rev] = wrev
354 self.wantedancestors[rev] = wrev
341
355
342 def getchanges(self, rev, full):
356 def getchanges(self, rev, full):
343 parents = self.commits[rev].parents
357 parents = self.commits[rev].parents
344 if len(parents) > 1 and not self.ignoreancestorcheck:
358 if len(parents) > 1 and not self.ignoreancestorcheck:
345 self.rebuild()
359 self.rebuild()
346
360
347 # To decide whether we're interested in rev we:
361 # To decide whether we're interested in rev we:
348 #
362 #
349 # - calculate what parents rev will have if it turns out we're
363 # - calculate what parents rev will have if it turns out we're
350 # interested in it. If it's going to have more than 1 parent,
364 # interested in it. If it's going to have more than 1 parent,
351 # we're interested in it.
365 # we're interested in it.
352 #
366 #
353 # - otherwise, we'll compare it with the single parent we found.
367 # - otherwise, we'll compare it with the single parent we found.
354 # If any of the files we're interested in is different in the
368 # If any of the files we're interested in is different in the
355 # the two revisions, we're interested in rev.
369 # the two revisions, we're interested in rev.
356
370
357 # A parent p is interesting if its mapped version (self.parentmap[p]):
371 # A parent p is interesting if its mapped version (self.parentmap[p]):
358 # - is not SKIPREV
372 # - is not SKIPREV
359 # - is still not in the list of parents (we don't want duplicates)
373 # - is still not in the list of parents (we don't want duplicates)
360 # - is not an ancestor of the mapped versions of the other parents or
374 # - is not an ancestor of the mapped versions of the other parents or
361 # there is no parent in the same branch than the current revision.
375 # there is no parent in the same branch than the current revision.
362 mparents = []
376 mparents = []
363 knownparents = set()
377 knownparents = set()
364 branch = self.commits[rev].branch
378 branch = self.commits[rev].branch
365 hasbranchparent = False
379 hasbranchparent = False
366 for i, p1 in enumerate(parents):
380 for i, p1 in enumerate(parents):
367 mp1 = self.parentmap[p1]
381 mp1 = self.parentmap[p1]
368 if mp1 == SKIPREV or mp1 in knownparents:
382 if mp1 == SKIPREV or mp1 in knownparents:
369 continue
383 continue
370
384
371 isancestor = (not self.ignoreancestorcheck and
385 isancestor = (not self.ignoreancestorcheck and
372 any(p2 for p2 in parents
386 any(p2 for p2 in parents
373 if p1 != p2 and mp1 != self.parentmap[p2]
387 if p1 != p2 and mp1 != self.parentmap[p2]
374 and mp1 in self.wantedancestors[p2]))
388 and mp1 in self.wantedancestors[p2]))
375 if not isancestor and not hasbranchparent and len(parents) > 1:
389 if not isancestor and not hasbranchparent and len(parents) > 1:
376 # This could be expensive, avoid unnecessary calls.
390 # This could be expensive, avoid unnecessary calls.
377 if self._cachedcommit(p1).branch == branch:
391 if self._cachedcommit(p1).branch == branch:
378 hasbranchparent = True
392 hasbranchparent = True
379 mparents.append((p1, mp1, i, isancestor))
393 mparents.append((p1, mp1, i, isancestor))
380 knownparents.add(mp1)
394 knownparents.add(mp1)
381 # Discard parents ancestors of other parents if there is a
395 # Discard parents ancestors of other parents if there is a
382 # non-ancestor one on the same branch than current revision.
396 # non-ancestor one on the same branch than current revision.
383 if hasbranchparent:
397 if hasbranchparent:
384 mparents = [p for p in mparents if not p[3]]
398 mparents = [p for p in mparents if not p[3]]
385 wp = None
399 wp = None
386 if mparents:
400 if mparents:
387 wp = max(p[2] for p in mparents)
401 wp = max(p[2] for p in mparents)
388 mparents = [p[1] for p in mparents]
402 mparents = [p[1] for p in mparents]
389 elif parents:
403 elif parents:
390 wp = 0
404 wp = 0
391
405
392 self.origparents[rev] = parents
406 self.origparents[rev] = parents
393
407
394 closed = False
408 closed = False
395 if 'close' in self.commits[rev].extra:
409 if 'close' in self.commits[rev].extra:
396 # A branch closing revision is only useful if one of its
410 # A branch closing revision is only useful if one of its
397 # parents belong to the branch being closed
411 # parents belong to the branch being closed
398 pbranches = [self._cachedcommit(p).branch for p in mparents]
412 pbranches = [self._cachedcommit(p).branch for p in mparents]
399 if branch in pbranches:
413 if branch in pbranches:
400 closed = True
414 closed = True
401
415
402 if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
416 if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
403 # We don't want this revision.
417 # We don't want this revision.
404 # Update our state and tell the convert process to map this
418 # Update our state and tell the convert process to map this
405 # revision to the same revision its parent as mapped to.
419 # revision to the same revision its parent as mapped to.
406 p = None
420 p = None
407 if parents:
421 if parents:
408 p = parents[wp]
422 p = parents[wp]
409 self.mark_not_wanted(rev, p)
423 self.mark_not_wanted(rev, p)
410 self.convertedorder.append((rev, False, p))
424 self.convertedorder.append((rev, False, p))
411 self._discard(*parents)
425 self._discard(*parents)
412 return self.parentmap[rev]
426 return self.parentmap[rev]
413
427
414 # We want this revision.
428 # We want this revision.
415 # Rewrite the parents of the commit object
429 # Rewrite the parents of the commit object
416 self.commits[rev].parents = mparents
430 self.commits[rev].parents = mparents
417 self.mark_wanted(rev, parents)
431 self.mark_wanted(rev, parents)
418 self.convertedorder.append((rev, True, None))
432 self.convertedorder.append((rev, True, None))
419 self._discard(*parents)
433 self._discard(*parents)
420
434
421 # Get the real changes and do the filtering/mapping. To be
435 # Get the real changes and do the filtering/mapping. To be
422 # able to get the files later on in getfile, we hide the
436 # able to get the files later on in getfile, we hide the
423 # original filename in the rev part of the return value.
437 # original filename in the rev part of the return value.
424 changes, copies, cleanp2 = self.base.getchanges(rev, full)
438 changes, copies, cleanp2 = self.base.getchanges(rev, full)
425 files = {}
439 files = {}
426 ncleanp2 = set(cleanp2)
440 ncleanp2 = set(cleanp2)
427 for f, r in changes:
441 for f, r in changes:
428 newf = self.filemapper(f)
442 newf = self.filemapper(f)
429 if newf and (newf != f or newf not in files):
443 if newf and (newf != f or newf not in files):
430 files[newf] = (f, r)
444 files[newf] = (f, r)
431 if newf != f:
445 if newf != f:
432 ncleanp2.discard(f)
446 ncleanp2.discard(f)
433 files = sorted(files.items())
447 files = sorted(files.items())
434
448
435 ncopies = {}
449 ncopies = {}
436 for c in copies:
450 for c in copies:
437 newc = self.filemapper(c)
451 newc = self.filemapper(c)
438 if newc:
452 if newc:
439 newsource = self.filemapper(copies[c])
453 newsource = self.filemapper(copies[c])
440 if newsource:
454 if newsource:
441 ncopies[newc] = newsource
455 ncopies[newc] = newsource
442
456
443 return files, ncopies, ncleanp2
457 return files, ncopies, ncleanp2
444
458
445 def targetfilebelongstosource(self, targetfilename):
459 def targetfilebelongstosource(self, targetfilename):
446 return self.filemapper.istargetfile(targetfilename)
460 return self.filemapper.istargetfile(targetfilename)
447
461
448 def getfile(self, name, rev):
462 def getfile(self, name, rev):
449 realname, realrev = rev
463 realname, realrev = rev
450 return self.base.getfile(realname, realrev)
464 return self.base.getfile(realname, realrev)
451
465
452 def gettags(self):
466 def gettags(self):
453 return self.base.gettags()
467 return self.base.gettags()
454
468
455 def hasnativeorder(self):
469 def hasnativeorder(self):
456 return self.base.hasnativeorder()
470 return self.base.hasnativeorder()
457
471
458 def lookuprev(self, rev):
472 def lookuprev(self, rev):
459 return self.base.lookuprev(rev)
473 return self.base.lookuprev(rev)
460
474
461 def getbookmarks(self):
475 def getbookmarks(self):
462 return self.base.getbookmarks()
476 return self.base.getbookmarks()
463
477
464 def converted(self, rev, sinkrev):
478 def converted(self, rev, sinkrev):
465 self.base.converted(rev, sinkrev)
479 self.base.converted(rev, sinkrev)
@@ -1,652 +1,655 b''
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19 from __future__ import absolute_import
19 from __future__ import absolute_import
20
20
21 import os
21 import os
22 import re
22 import re
23 import time
23 import time
24
24
25 from mercurial.i18n import _
25 from mercurial.i18n import _
26 from mercurial import (
26 from mercurial import (
27 bookmarks,
27 bookmarks,
28 context,
28 context,
29 error,
29 error,
30 exchange,
30 exchange,
31 hg,
31 hg,
32 lock as lockmod,
32 lock as lockmod,
33 merge as mergemod,
33 merge as mergemod,
34 node as nodemod,
34 node as nodemod,
35 phases,
35 phases,
36 scmutil,
36 scmutil,
37 util,
37 util,
38 )
38 )
39 from mercurial.utils import dateutil
39 from mercurial.utils import dateutil
40 stringio = util.stringio
40 stringio = util.stringio
41
41
42 from . import common
42 from . import common
43 mapfile = common.mapfile
43 mapfile = common.mapfile
44 NoRepo = common.NoRepo
44 NoRepo = common.NoRepo
45
45
46 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
46 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
47
47
48 class mercurial_sink(common.converter_sink):
48 class mercurial_sink(common.converter_sink):
49 def __init__(self, ui, repotype, path):
49 def __init__(self, ui, repotype, path):
50 common.converter_sink.__init__(self, ui, repotype, path)
50 common.converter_sink.__init__(self, ui, repotype, path)
51 self.branchnames = ui.configbool('convert', 'hg.usebranchnames')
51 self.branchnames = ui.configbool('convert', 'hg.usebranchnames')
52 self.clonebranches = ui.configbool('convert', 'hg.clonebranches')
52 self.clonebranches = ui.configbool('convert', 'hg.clonebranches')
53 self.tagsbranch = ui.config('convert', 'hg.tagsbranch')
53 self.tagsbranch = ui.config('convert', 'hg.tagsbranch')
54 self.lastbranch = None
54 self.lastbranch = None
55 if os.path.isdir(path) and len(os.listdir(path)) > 0:
55 if os.path.isdir(path) and len(os.listdir(path)) > 0:
56 try:
56 try:
57 self.repo = hg.repository(self.ui, path)
57 self.repo = hg.repository(self.ui, path)
58 if not self.repo.local():
58 if not self.repo.local():
59 raise NoRepo(_('%s is not a local Mercurial repository')
59 raise NoRepo(_('%s is not a local Mercurial repository')
60 % path)
60 % path)
61 except error.RepoError as err:
61 except error.RepoError as err:
62 ui.traceback()
62 ui.traceback()
63 raise NoRepo(err.args[0])
63 raise NoRepo(err.args[0])
64 else:
64 else:
65 try:
65 try:
66 ui.status(_('initializing destination %s repository\n') % path)
66 ui.status(_('initializing destination %s repository\n') % path)
67 self.repo = hg.repository(self.ui, path, create=True)
67 self.repo = hg.repository(self.ui, path, create=True)
68 if not self.repo.local():
68 if not self.repo.local():
69 raise NoRepo(_('%s is not a local Mercurial repository')
69 raise NoRepo(_('%s is not a local Mercurial repository')
70 % path)
70 % path)
71 self.created.append(path)
71 self.created.append(path)
72 except error.RepoError:
72 except error.RepoError:
73 ui.traceback()
73 ui.traceback()
74 raise NoRepo(_("could not create hg repository %s as sink")
74 raise NoRepo(_("could not create hg repository %s as sink")
75 % path)
75 % path)
76 self.lock = None
76 self.lock = None
77 self.wlock = None
77 self.wlock = None
78 self.filemapmode = False
78 self.filemapmode = False
79 self.subrevmaps = {}
79 self.subrevmaps = {}
80
80
81 def before(self):
81 def before(self):
82 self.ui.debug('run hg sink pre-conversion action\n')
82 self.ui.debug('run hg sink pre-conversion action\n')
83 self.wlock = self.repo.wlock()
83 self.wlock = self.repo.wlock()
84 self.lock = self.repo.lock()
84 self.lock = self.repo.lock()
85
85
86 def after(self):
86 def after(self):
87 self.ui.debug('run hg sink post-conversion action\n')
87 self.ui.debug('run hg sink post-conversion action\n')
88 if self.lock:
88 if self.lock:
89 self.lock.release()
89 self.lock.release()
90 if self.wlock:
90 if self.wlock:
91 self.wlock.release()
91 self.wlock.release()
92
92
93 def revmapfile(self):
93 def revmapfile(self):
94 return self.repo.vfs.join("shamap")
94 return self.repo.vfs.join("shamap")
95
95
96 def authorfile(self):
96 def authorfile(self):
97 return self.repo.vfs.join("authormap")
97 return self.repo.vfs.join("authormap")
98
98
99 def setbranch(self, branch, pbranches):
99 def setbranch(self, branch, pbranches):
100 if not self.clonebranches:
100 if not self.clonebranches:
101 return
101 return
102
102
103 setbranch = (branch != self.lastbranch)
103 setbranch = (branch != self.lastbranch)
104 self.lastbranch = branch
104 self.lastbranch = branch
105 if not branch:
105 if not branch:
106 branch = 'default'
106 branch = 'default'
107 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
107 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
108 if pbranches:
108 if pbranches:
109 pbranch = pbranches[0][1]
109 pbranch = pbranches[0][1]
110 else:
110 else:
111 pbranch = 'default'
111 pbranch = 'default'
112
112
113 branchpath = os.path.join(self.path, branch)
113 branchpath = os.path.join(self.path, branch)
114 if setbranch:
114 if setbranch:
115 self.after()
115 self.after()
116 try:
116 try:
117 self.repo = hg.repository(self.ui, branchpath)
117 self.repo = hg.repository(self.ui, branchpath)
118 except Exception:
118 except Exception:
119 self.repo = hg.repository(self.ui, branchpath, create=True)
119 self.repo = hg.repository(self.ui, branchpath, create=True)
120 self.before()
120 self.before()
121
121
122 # pbranches may bring revisions from other branches (merge parents)
122 # pbranches may bring revisions from other branches (merge parents)
123 # Make sure we have them, or pull them.
123 # Make sure we have them, or pull them.
124 missings = {}
124 missings = {}
125 for b in pbranches:
125 for b in pbranches:
126 try:
126 try:
127 self.repo.lookup(b[0])
127 self.repo.lookup(b[0])
128 except Exception:
128 except Exception:
129 missings.setdefault(b[1], []).append(b[0])
129 missings.setdefault(b[1], []).append(b[0])
130
130
131 if missings:
131 if missings:
132 self.after()
132 self.after()
133 for pbranch, heads in sorted(missings.iteritems()):
133 for pbranch, heads in sorted(missings.iteritems()):
134 pbranchpath = os.path.join(self.path, pbranch)
134 pbranchpath = os.path.join(self.path, pbranch)
135 prepo = hg.peer(self.ui, {}, pbranchpath)
135 prepo = hg.peer(self.ui, {}, pbranchpath)
136 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
136 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
137 exchange.pull(self.repo, prepo,
137 exchange.pull(self.repo, prepo,
138 [prepo.lookup(h) for h in heads])
138 [prepo.lookup(h) for h in heads])
139 self.before()
139 self.before()
140
140
141 def _rewritetags(self, source, revmap, data):
141 def _rewritetags(self, source, revmap, data):
142 fp = stringio()
142 fp = stringio()
143 for line in data.splitlines():
143 for line in data.splitlines():
144 s = line.split(' ', 1)
144 s = line.split(' ', 1)
145 if len(s) != 2:
145 if len(s) != 2:
146 self.ui.warn(_('invalid tag entry: "%s"\n') % line)
146 self.ui.warn(_('invalid tag entry: "%s"\n') % line)
147 fp.write('%s\n' % line) # Bogus, but keep for hash stability
147 fp.write('%s\n' % line) # Bogus, but keep for hash stability
148 continue
148 continue
149 revid = revmap.get(source.lookuprev(s[0]))
149 revid = revmap.get(source.lookuprev(s[0]))
150 if not revid:
150 if not revid:
151 if s[0] == nodemod.nullhex:
151 if s[0] == nodemod.nullhex:
152 revid = s[0]
152 revid = s[0]
153 else:
153 else:
154 # missing, but keep for hash stability
154 # missing, but keep for hash stability
155 self.ui.warn(_('missing tag entry: "%s"\n') % line)
155 self.ui.warn(_('missing tag entry: "%s"\n') % line)
156 fp.write('%s\n' % line)
156 fp.write('%s\n' % line)
157 continue
157 continue
158 fp.write('%s %s\n' % (revid, s[1]))
158 fp.write('%s %s\n' % (revid, s[1]))
159 return fp.getvalue()
159 return fp.getvalue()
160
160
161 def _rewritesubstate(self, source, data):
161 def _rewritesubstate(self, source, data):
162 fp = stringio()
162 fp = stringio()
163 for line in data.splitlines():
163 for line in data.splitlines():
164 s = line.split(' ', 1)
164 s = line.split(' ', 1)
165 if len(s) != 2:
165 if len(s) != 2:
166 continue
166 continue
167
167
168 revid = s[0]
168 revid = s[0]
169 subpath = s[1]
169 subpath = s[1]
170 if revid != nodemod.nullhex:
170 if revid != nodemod.nullhex:
171 revmap = self.subrevmaps.get(subpath)
171 revmap = self.subrevmaps.get(subpath)
172 if revmap is None:
172 if revmap is None:
173 revmap = mapfile(self.ui,
173 revmap = mapfile(self.ui,
174 self.repo.wjoin(subpath, '.hg/shamap'))
174 self.repo.wjoin(subpath, '.hg/shamap'))
175 self.subrevmaps[subpath] = revmap
175 self.subrevmaps[subpath] = revmap
176
176
177 # It is reasonable that one or more of the subrepos don't
177 # It is reasonable that one or more of the subrepos don't
178 # need to be converted, in which case they can be cloned
178 # need to be converted, in which case they can be cloned
179 # into place instead of converted. Therefore, only warn
179 # into place instead of converted. Therefore, only warn
180 # once.
180 # once.
181 msg = _('no ".hgsubstate" updates will be made for "%s"\n')
181 msg = _('no ".hgsubstate" updates will be made for "%s"\n')
182 if len(revmap) == 0:
182 if len(revmap) == 0:
183 sub = self.repo.wvfs.reljoin(subpath, '.hg')
183 sub = self.repo.wvfs.reljoin(subpath, '.hg')
184
184
185 if self.repo.wvfs.exists(sub):
185 if self.repo.wvfs.exists(sub):
186 self.ui.warn(msg % subpath)
186 self.ui.warn(msg % subpath)
187
187
188 newid = revmap.get(revid)
188 newid = revmap.get(revid)
189 if not newid:
189 if not newid:
190 if len(revmap) > 0:
190 if len(revmap) > 0:
191 self.ui.warn(_("%s is missing from %s/.hg/shamap\n") %
191 self.ui.warn(_("%s is missing from %s/.hg/shamap\n") %
192 (revid, subpath))
192 (revid, subpath))
193 else:
193 else:
194 revid = newid
194 revid = newid
195
195
196 fp.write('%s %s\n' % (revid, subpath))
196 fp.write('%s %s\n' % (revid, subpath))
197
197
198 return fp.getvalue()
198 return fp.getvalue()
199
199
200 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
200 def _calculatemergedfiles(self, source, p1ctx, p2ctx):
201 """Calculates the files from p2 that we need to pull in when merging p1
201 """Calculates the files from p2 that we need to pull in when merging p1
202 and p2, given that the merge is coming from the given source.
202 and p2, given that the merge is coming from the given source.
203
203
204 This prevents us from losing files that only exist in the target p2 and
204 This prevents us from losing files that only exist in the target p2 and
205 that don't come from the source repo (like if you're merging multiple
205 that don't come from the source repo (like if you're merging multiple
206 repositories together).
206 repositories together).
207 """
207 """
208 anc = [p1ctx.ancestor(p2ctx)]
208 anc = [p1ctx.ancestor(p2ctx)]
209 # Calculate what files are coming from p2
209 # Calculate what files are coming from p2
210 actions, diverge, rename = mergemod.calculateupdates(
210 actions, diverge, rename = mergemod.calculateupdates(
211 self.repo, p1ctx, p2ctx, anc,
211 self.repo, p1ctx, p2ctx, anc,
212 True, # branchmerge
212 True, # branchmerge
213 True, # force
213 True, # force
214 False, # acceptremote
214 False, # acceptremote
215 False, # followcopies
215 False, # followcopies
216 )
216 )
217
217
218 for file, (action, info, msg) in actions.iteritems():
218 for file, (action, info, msg) in actions.iteritems():
219 if source.targetfilebelongstosource(file):
219 if source.targetfilebelongstosource(file):
220 # If the file belongs to the source repo, ignore the p2
220 # If the file belongs to the source repo, ignore the p2
221 # since it will be covered by the existing fileset.
221 # since it will be covered by the existing fileset.
222 continue
222 continue
223
223
224 # If the file requires actual merging, abort. We don't have enough
224 # If the file requires actual merging, abort. We don't have enough
225 # context to resolve merges correctly.
225 # context to resolve merges correctly.
226 if action in ['m', 'dm', 'cd', 'dc']:
226 if action in ['m', 'dm', 'cd', 'dc']:
227 raise error.Abort(_("unable to convert merge commit "
227 raise error.Abort(_("unable to convert merge commit "
228 "since target parents do not merge cleanly (file "
228 "since target parents do not merge cleanly (file "
229 "%s, parents %s and %s)") % (file, p1ctx,
229 "%s, parents %s and %s)") % (file, p1ctx,
230 p2ctx))
230 p2ctx))
231 elif action == 'k':
231 elif action == 'k':
232 # 'keep' means nothing changed from p1
232 # 'keep' means nothing changed from p1
233 continue
233 continue
234 else:
234 else:
235 # Any other change means we want to take the p2 version
235 # Any other change means we want to take the p2 version
236 yield file
236 yield file
237
237
238 def putcommit(self, files, copies, parents, commit, source, revmap, full,
238 def putcommit(self, files, copies, parents, commit, source, revmap, full,
239 cleanp2):
239 cleanp2):
240 files = dict(files)
240 files = dict(files)
241
241
242 def getfilectx(repo, memctx, f):
242 def getfilectx(repo, memctx, f):
243 if p2ctx and f in p2files and f not in copies:
243 if p2ctx and f in p2files and f not in copies:
244 self.ui.debug('reusing %s from p2\n' % f)
244 self.ui.debug('reusing %s from p2\n' % f)
245 try:
245 try:
246 return p2ctx[f]
246 return p2ctx[f]
247 except error.ManifestLookupError:
247 except error.ManifestLookupError:
248 # If the file doesn't exist in p2, then we're syncing a
248 # If the file doesn't exist in p2, then we're syncing a
249 # delete, so just return None.
249 # delete, so just return None.
250 return None
250 return None
251 try:
251 try:
252 v = files[f]
252 v = files[f]
253 except KeyError:
253 except KeyError:
254 return None
254 return None
255 data, mode = source.getfile(f, v)
255 data, mode = source.getfile(f, v)
256 if data is None:
256 if data is None:
257 return None
257 return None
258 if f == '.hgtags':
258 if f == '.hgtags':
259 data = self._rewritetags(source, revmap, data)
259 data = self._rewritetags(source, revmap, data)
260 if f == '.hgsubstate':
260 if f == '.hgsubstate':
261 data = self._rewritesubstate(source, data)
261 data = self._rewritesubstate(source, data)
262 return context.memfilectx(self.repo, memctx, f, data, 'l' in mode,
262 return context.memfilectx(self.repo, memctx, f, data, 'l' in mode,
263 'x' in mode, copies.get(f))
263 'x' in mode, copies.get(f))
264
264
265 pl = []
265 pl = []
266 for p in parents:
266 for p in parents:
267 if p not in pl:
267 if p not in pl:
268 pl.append(p)
268 pl.append(p)
269 parents = pl
269 parents = pl
270 nparents = len(parents)
270 nparents = len(parents)
271 if self.filemapmode and nparents == 1:
271 if self.filemapmode and nparents == 1:
272 m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
272 m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
273 parent = parents[0]
273 parent = parents[0]
274
274
275 if len(parents) < 2:
275 if len(parents) < 2:
276 parents.append(nodemod.nullid)
276 parents.append(nodemod.nullid)
277 if len(parents) < 2:
277 if len(parents) < 2:
278 parents.append(nodemod.nullid)
278 parents.append(nodemod.nullid)
279 p2 = parents.pop(0)
279 p2 = parents.pop(0)
280
280
281 text = commit.desc
281 text = commit.desc
282
282
283 sha1s = re.findall(sha1re, text)
283 sha1s = re.findall(sha1re, text)
284 for sha1 in sha1s:
284 for sha1 in sha1s:
285 oldrev = source.lookuprev(sha1)
285 oldrev = source.lookuprev(sha1)
286 newrev = revmap.get(oldrev)
286 newrev = revmap.get(oldrev)
287 if newrev is not None:
287 if newrev is not None:
288 text = text.replace(sha1, newrev[:len(sha1)])
288 text = text.replace(sha1, newrev[:len(sha1)])
289
289
290 extra = commit.extra.copy()
290 extra = commit.extra.copy()
291
291
292 sourcename = self.repo.ui.config('convert', 'hg.sourcename')
292 sourcename = self.repo.ui.config('convert', 'hg.sourcename')
293 if sourcename:
293 if sourcename:
294 extra['convert_source'] = sourcename
294 extra['convert_source'] = sourcename
295
295
296 for label in ('source', 'transplant_source', 'rebase_source',
296 for label in ('source', 'transplant_source', 'rebase_source',
297 'intermediate-source'):
297 'intermediate-source'):
298 node = extra.get(label)
298 node = extra.get(label)
299
299
300 if node is None:
300 if node is None:
301 continue
301 continue
302
302
303 # Only transplant stores its reference in binary
303 # Only transplant stores its reference in binary
304 if label == 'transplant_source':
304 if label == 'transplant_source':
305 node = nodemod.hex(node)
305 node = nodemod.hex(node)
306
306
307 newrev = revmap.get(node)
307 newrev = revmap.get(node)
308 if newrev is not None:
308 if newrev is not None:
309 if label == 'transplant_source':
309 if label == 'transplant_source':
310 newrev = nodemod.bin(newrev)
310 newrev = nodemod.bin(newrev)
311
311
312 extra[label] = newrev
312 extra[label] = newrev
313
313
314 if self.branchnames and commit.branch:
314 if self.branchnames and commit.branch:
315 extra['branch'] = commit.branch
315 extra['branch'] = commit.branch
316 if commit.rev and commit.saverev:
316 if commit.rev and commit.saverev:
317 extra['convert_revision'] = commit.rev
317 extra['convert_revision'] = commit.rev
318
318
319 while parents:
319 while parents:
320 p1 = p2
320 p1 = p2
321 p2 = parents.pop(0)
321 p2 = parents.pop(0)
322 p1ctx = self.repo[p1]
322 p1ctx = self.repo[p1]
323 p2ctx = None
323 p2ctx = None
324 if p2 != nodemod.nullid:
324 if p2 != nodemod.nullid:
325 p2ctx = self.repo[p2]
325 p2ctx = self.repo[p2]
326 fileset = set(files)
326 fileset = set(files)
327 if full:
327 if full:
328 fileset.update(self.repo[p1])
328 fileset.update(self.repo[p1])
329 fileset.update(self.repo[p2])
329 fileset.update(self.repo[p2])
330
330
331 if p2ctx:
331 if p2ctx:
332 p2files = set(cleanp2)
332 p2files = set(cleanp2)
333 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
333 for file in self._calculatemergedfiles(source, p1ctx, p2ctx):
334 p2files.add(file)
334 p2files.add(file)
335 fileset.add(file)
335 fileset.add(file)
336
336
337 ctx = context.memctx(self.repo, (p1, p2), text, fileset,
337 ctx = context.memctx(self.repo, (p1, p2), text, fileset,
338 getfilectx, commit.author, commit.date, extra)
338 getfilectx, commit.author, commit.date, extra)
339
339
340 # We won't know if the conversion changes the node until after the
340 # We won't know if the conversion changes the node until after the
341 # commit, so copy the source's phase for now.
341 # commit, so copy the source's phase for now.
342 self.repo.ui.setconfig('phases', 'new-commit',
342 self.repo.ui.setconfig('phases', 'new-commit',
343 phases.phasenames[commit.phase], 'convert')
343 phases.phasenames[commit.phase], 'convert')
344
344
345 with self.repo.transaction("convert") as tr:
345 with self.repo.transaction("convert") as tr:
346 node = nodemod.hex(self.repo.commitctx(ctx))
346 node = nodemod.hex(self.repo.commitctx(ctx))
347
347
348 # If the node value has changed, but the phase is lower than
348 # If the node value has changed, but the phase is lower than
349 # draft, set it back to draft since it hasn't been exposed
349 # draft, set it back to draft since it hasn't been exposed
350 # anywhere.
350 # anywhere.
351 if commit.rev != node:
351 if commit.rev != node:
352 ctx = self.repo[node]
352 ctx = self.repo[node]
353 if ctx.phase() < phases.draft:
353 if ctx.phase() < phases.draft:
354 phases.registernew(self.repo, tr, phases.draft,
354 phases.registernew(self.repo, tr, phases.draft,
355 [ctx.node()])
355 [ctx.node()])
356
356
357 text = "(octopus merge fixup)\n"
357 text = "(octopus merge fixup)\n"
358 p2 = node
358 p2 = node
359
359
360 if self.filemapmode and nparents == 1:
360 if self.filemapmode and nparents == 1:
361 man = self.repo.manifestlog.getstorage(b'')
361 man = self.repo.manifestlog.getstorage(b'')
362 mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
362 mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
363 closed = 'close' in commit.extra
363 closed = 'close' in commit.extra
364 if not closed and not man.cmp(m1node, man.revision(mnode)):
364 if not closed and not man.cmp(m1node, man.revision(mnode)):
365 self.ui.status(_("filtering out empty revision\n"))
365 self.ui.status(_("filtering out empty revision\n"))
366 self.repo.rollback(force=True)
366 self.repo.rollback(force=True)
367 return parent
367 return parent
368 return p2
368 return p2
369
369
370 def puttags(self, tags):
370 def puttags(self, tags):
371 tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
371 tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
372 tagparent = tagparent or nodemod.nullid
372 tagparent = tagparent or nodemod.nullid
373
373
374 oldlines = set()
374 oldlines = set()
375 for branch, heads in self.repo.branchmap().iteritems():
375 for branch, heads in self.repo.branchmap().iteritems():
376 for h in heads:
376 for h in heads:
377 if '.hgtags' in self.repo[h]:
377 if '.hgtags' in self.repo[h]:
378 oldlines.update(
378 oldlines.update(
379 set(self.repo[h]['.hgtags'].data().splitlines(True)))
379 set(self.repo[h]['.hgtags'].data().splitlines(True)))
380 oldlines = sorted(list(oldlines))
380 oldlines = sorted(list(oldlines))
381
381
382 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
382 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
383 if newlines == oldlines:
383 if newlines == oldlines:
384 return None, None
384 return None, None
385
385
386 # if the old and new tags match, then there is nothing to update
386 # if the old and new tags match, then there is nothing to update
387 oldtags = set()
387 oldtags = set()
388 newtags = set()
388 newtags = set()
389 for line in oldlines:
389 for line in oldlines:
390 s = line.strip().split(' ', 1)
390 s = line.strip().split(' ', 1)
391 if len(s) != 2:
391 if len(s) != 2:
392 continue
392 continue
393 oldtags.add(s[1])
393 oldtags.add(s[1])
394 for line in newlines:
394 for line in newlines:
395 s = line.strip().split(' ', 1)
395 s = line.strip().split(' ', 1)
396 if len(s) != 2:
396 if len(s) != 2:
397 continue
397 continue
398 if s[1] not in oldtags:
398 if s[1] not in oldtags:
399 newtags.add(s[1].strip())
399 newtags.add(s[1].strip())
400
400
401 if not newtags:
401 if not newtags:
402 return None, None
402 return None, None
403
403
404 data = "".join(newlines)
404 data = "".join(newlines)
405 def getfilectx(repo, memctx, f):
405 def getfilectx(repo, memctx, f):
406 return context.memfilectx(repo, memctx, f, data, False, False, None)
406 return context.memfilectx(repo, memctx, f, data, False, False, None)
407
407
408 self.ui.status(_("updating tags\n"))
408 self.ui.status(_("updating tags\n"))
409 date = "%d 0" % int(time.mktime(time.gmtime()))
409 date = "%d 0" % int(time.mktime(time.gmtime()))
410 extra = {'branch': self.tagsbranch}
410 extra = {'branch': self.tagsbranch}
411 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
411 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
412 [".hgtags"], getfilectx, "convert-repo", date,
412 [".hgtags"], getfilectx, "convert-repo", date,
413 extra)
413 extra)
414 node = self.repo.commitctx(ctx)
414 node = self.repo.commitctx(ctx)
415 return nodemod.hex(node), nodemod.hex(tagparent)
415 return nodemod.hex(node), nodemod.hex(tagparent)
416
416
417 def setfilemapmode(self, active):
417 def setfilemapmode(self, active):
418 self.filemapmode = active
418 self.filemapmode = active
419
419
420 def putbookmarks(self, updatedbookmark):
420 def putbookmarks(self, updatedbookmark):
421 if not len(updatedbookmark):
421 if not len(updatedbookmark):
422 return
422 return
423 wlock = lock = tr = None
423 wlock = lock = tr = None
424 try:
424 try:
425 wlock = self.repo.wlock()
425 wlock = self.repo.wlock()
426 lock = self.repo.lock()
426 lock = self.repo.lock()
427 tr = self.repo.transaction('bookmark')
427 tr = self.repo.transaction('bookmark')
428 self.ui.status(_("updating bookmarks\n"))
428 self.ui.status(_("updating bookmarks\n"))
429 destmarks = self.repo._bookmarks
429 destmarks = self.repo._bookmarks
430 changes = [(bookmark, nodemod.bin(updatedbookmark[bookmark]))
430 changes = [(bookmark, nodemod.bin(updatedbookmark[bookmark]))
431 for bookmark in updatedbookmark]
431 for bookmark in updatedbookmark]
432 destmarks.applychanges(self.repo, tr, changes)
432 destmarks.applychanges(self.repo, tr, changes)
433 tr.close()
433 tr.close()
434 finally:
434 finally:
435 lockmod.release(lock, wlock, tr)
435 lockmod.release(lock, wlock, tr)
436
436
437 def hascommitfrommap(self, rev):
437 def hascommitfrommap(self, rev):
438 # the exact semantics of clonebranches is unclear so we can't say no
438 # the exact semantics of clonebranches is unclear so we can't say no
439 return rev in self.repo or self.clonebranches
439 return rev in self.repo or self.clonebranches
440
440
441 def hascommitforsplicemap(self, rev):
441 def hascommitforsplicemap(self, rev):
442 if rev not in self.repo and self.clonebranches:
442 if rev not in self.repo and self.clonebranches:
443 raise error.Abort(_('revision %s not found in destination '
443 raise error.Abort(_('revision %s not found in destination '
444 'repository (lookups with clonebranches=true '
444 'repository (lookups with clonebranches=true '
445 'are not implemented)') % rev)
445 'are not implemented)') % rev)
446 return rev in self.repo
446 return rev in self.repo
447
447
448 class mercurial_source(common.converter_source):
448 class mercurial_source(common.converter_source):
449 def __init__(self, ui, repotype, path, revs=None):
449 def __init__(self, ui, repotype, path, revs=None):
450 common.converter_source.__init__(self, ui, repotype, path, revs)
450 common.converter_source.__init__(self, ui, repotype, path, revs)
451 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors')
451 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors')
452 self.ignored = set()
452 self.ignored = set()
453 self.saverev = ui.configbool('convert', 'hg.saverev')
453 self.saverev = ui.configbool('convert', 'hg.saverev')
454 try:
454 try:
455 self.repo = hg.repository(self.ui, path)
455 self.repo = hg.repository(self.ui, path)
456 # try to provoke an exception if this isn't really a hg
456 # try to provoke an exception if this isn't really a hg
457 # repo, but some other bogus compatible-looking url
457 # repo, but some other bogus compatible-looking url
458 if not self.repo.local():
458 if not self.repo.local():
459 raise error.RepoError
459 raise error.RepoError
460 except error.RepoError:
460 except error.RepoError:
461 ui.traceback()
461 ui.traceback()
462 raise NoRepo(_("%s is not a local Mercurial repository") % path)
462 raise NoRepo(_("%s is not a local Mercurial repository") % path)
463 self.lastrev = None
463 self.lastrev = None
464 self.lastctx = None
464 self.lastctx = None
465 self._changescache = None, None
465 self._changescache = None, None
466 self.convertfp = None
466 self.convertfp = None
467 # Restrict converted revisions to startrev descendants
467 # Restrict converted revisions to startrev descendants
468 startnode = ui.config('convert', 'hg.startrev')
468 startnode = ui.config('convert', 'hg.startrev')
469 hgrevs = ui.config('convert', 'hg.revs')
469 hgrevs = ui.config('convert', 'hg.revs')
470 if hgrevs is None:
470 if hgrevs is None:
471 if startnode is not None:
471 if startnode is not None:
472 try:
472 try:
473 startnode = self.repo.lookup(startnode)
473 startnode = self.repo.lookup(startnode)
474 except error.RepoError:
474 except error.RepoError:
475 raise error.Abort(_('%s is not a valid start revision')
475 raise error.Abort(_('%s is not a valid start revision')
476 % startnode)
476 % startnode)
477 startrev = self.repo.changelog.rev(startnode)
477 startrev = self.repo.changelog.rev(startnode)
478 children = {startnode: 1}
478 children = {startnode: 1}
479 for r in self.repo.changelog.descendants([startrev]):
479 for r in self.repo.changelog.descendants([startrev]):
480 children[self.repo.changelog.node(r)] = 1
480 children[self.repo.changelog.node(r)] = 1
481 self.keep = children.__contains__
481 self.keep = children.__contains__
482 else:
482 else:
483 self.keep = util.always
483 self.keep = util.always
484 if revs:
484 if revs:
485 self._heads = [self.repo.lookup(r) for r in revs]
485 self._heads = [self.repo.lookup(r) for r in revs]
486 else:
486 else:
487 self._heads = self.repo.heads()
487 self._heads = self.repo.heads()
488 else:
488 else:
489 if revs or startnode is not None:
489 if revs or startnode is not None:
490 raise error.Abort(_('hg.revs cannot be combined with '
490 raise error.Abort(_('hg.revs cannot be combined with '
491 'hg.startrev or --rev'))
491 'hg.startrev or --rev'))
492 nodes = set()
492 nodes = set()
493 parents = set()
493 parents = set()
494 for r in scmutil.revrange(self.repo, [hgrevs]):
494 for r in scmutil.revrange(self.repo, [hgrevs]):
495 ctx = self.repo[r]
495 ctx = self.repo[r]
496 nodes.add(ctx.node())
496 nodes.add(ctx.node())
497 parents.update(p.node() for p in ctx.parents())
497 parents.update(p.node() for p in ctx.parents())
498 self.keep = nodes.__contains__
498 self.keep = nodes.__contains__
499 self._heads = nodes - parents
499 self._heads = nodes - parents
500
500
501 def _changectx(self, rev):
501 def _changectx(self, rev):
502 if self.lastrev != rev:
502 if self.lastrev != rev:
503 self.lastctx = self.repo[rev]
503 self.lastctx = self.repo[rev]
504 self.lastrev = rev
504 self.lastrev = rev
505 return self.lastctx
505 return self.lastctx
506
506
507 def _parents(self, ctx):
507 def _parents(self, ctx):
508 return [p for p in ctx.parents() if p and self.keep(p.node())]
508 return [p for p in ctx.parents() if p and self.keep(p.node())]
509
509
510 def getheads(self):
510 def getheads(self):
511 return [nodemod.hex(h) for h in self._heads if self.keep(h)]
511 return [nodemod.hex(h) for h in self._heads if self.keep(h)]
512
512
513 def getfile(self, name, rev):
513 def getfile(self, name, rev):
514 try:
514 try:
515 fctx = self._changectx(rev)[name]
515 fctx = self._changectx(rev)[name]
516 return fctx.data(), fctx.flags()
516 return fctx.data(), fctx.flags()
517 except error.LookupError:
517 except error.LookupError:
518 return None, None
518 return None, None
519
519
520 def _changedfiles(self, ctx1, ctx2):
520 def _changedfiles(self, ctx1, ctx2):
521 ma, r = [], []
521 ma, r = [], []
522 maappend = ma.append
522 maappend = ma.append
523 rappend = r.append
523 rappend = r.append
524 d = ctx1.manifest().diff(ctx2.manifest())
524 d = ctx1.manifest().diff(ctx2.manifest())
525 for f, ((node1, flag1), (node2, flag2)) in d.iteritems():
525 for f, ((node1, flag1), (node2, flag2)) in d.iteritems():
526 if node2 is None:
526 if node2 is None:
527 rappend(f)
527 rappend(f)
528 else:
528 else:
529 maappend(f)
529 maappend(f)
530 return ma, r
530 return ma, r
531
531
532 def getchanges(self, rev, full):
532 def getchanges(self, rev, full):
533 ctx = self._changectx(rev)
533 ctx = self._changectx(rev)
534 parents = self._parents(ctx)
534 parents = self._parents(ctx)
535 if full or not parents:
535 if full or not parents:
536 files = copyfiles = ctx.manifest()
536 files = copyfiles = ctx.manifest()
537 if parents:
537 if parents:
538 if self._changescache[0] == rev:
538 if self._changescache[0] == rev:
539 ma, r = self._changescache[1]
539 ma, r = self._changescache[1]
540 else:
540 else:
541 ma, r = self._changedfiles(parents[0], ctx)
541 ma, r = self._changedfiles(parents[0], ctx)
542 if not full:
542 if not full:
543 files = ma + r
543 files = ma + r
544 copyfiles = ma
544 copyfiles = ma
545 # _getcopies() is also run for roots and before filtering so missing
545 # _getcopies() is also run for roots and before filtering so missing
546 # revlogs are detected early
546 # revlogs are detected early
547 copies = self._getcopies(ctx, parents, copyfiles)
547 copies = self._getcopies(ctx, parents, copyfiles)
548 cleanp2 = set()
548 cleanp2 = set()
549 if len(parents) == 2:
549 if len(parents) == 2:
550 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
550 d = parents[1].manifest().diff(ctx.manifest(), clean=True)
551 for f, value in d.iteritems():
551 for f, value in d.iteritems():
552 if value is None:
552 if value is None:
553 cleanp2.add(f)
553 cleanp2.add(f)
554 changes = [(f, rev) for f in files if f not in self.ignored]
554 changes = [(f, rev) for f in files if f not in self.ignored]
555 changes.sort()
555 changes.sort()
556 return changes, copies, cleanp2
556 return changes, copies, cleanp2
557
557
558 def _getcopies(self, ctx, parents, files):
558 def _getcopies(self, ctx, parents, files):
559 copies = {}
559 copies = {}
560 for name in files:
560 for name in files:
561 if name in self.ignored:
561 if name in self.ignored:
562 continue
562 continue
563 try:
563 try:
564 copysource, _copynode = ctx.filectx(name).renamed()
564 copysource, _copynode = ctx.filectx(name).renamed()
565 if copysource in self.ignored:
565 if copysource in self.ignored:
566 continue
566 continue
567 # Ignore copy sources not in parent revisions
567 # Ignore copy sources not in parent revisions
568 if not any(copysource in p for p in parents):
568 if not any(copysource in p for p in parents):
569 continue
569 continue
570 copies[name] = copysource
570 copies[name] = copysource
571 except TypeError:
571 except TypeError:
572 pass
572 pass
573 except error.LookupError as e:
573 except error.LookupError as e:
574 if not self.ignoreerrors:
574 if not self.ignoreerrors:
575 raise
575 raise
576 self.ignored.add(name)
576 self.ignored.add(name)
577 self.ui.warn(_('ignoring: %s\n') % e)
577 self.ui.warn(_('ignoring: %s\n') % e)
578 return copies
578 return copies
579
579
580 def getcommit(self, rev):
580 def getcommit(self, rev):
581 ctx = self._changectx(rev)
581 ctx = self._changectx(rev)
582 _parents = self._parents(ctx)
582 _parents = self._parents(ctx)
583 parents = [p.hex() for p in _parents]
583 parents = [p.hex() for p in _parents]
584 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
584 optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
585 crev = rev
585 crev = rev
586
586
587 return common.commit(author=ctx.user(),
587 return common.commit(author=ctx.user(),
588 date=dateutil.datestr(ctx.date(),
588 date=dateutil.datestr(ctx.date(),
589 '%Y-%m-%d %H:%M:%S %1%2'),
589 '%Y-%m-%d %H:%M:%S %1%2'),
590 desc=ctx.description(),
590 desc=ctx.description(),
591 rev=crev,
591 rev=crev,
592 parents=parents,
592 parents=parents,
593 optparents=optparents,
593 optparents=optparents,
594 branch=ctx.branch(),
594 branch=ctx.branch(),
595 extra=ctx.extra(),
595 extra=ctx.extra(),
596 sortkey=ctx.rev(),
596 sortkey=ctx.rev(),
597 saverev=self.saverev,
597 saverev=self.saverev,
598 phase=ctx.phase())
598 phase=ctx.phase())
599
599
600 def numcommits(self):
601 return len(self.repo)
602
600 def gettags(self):
603 def gettags(self):
601 # This will get written to .hgtags, filter non global tags out.
604 # This will get written to .hgtags, filter non global tags out.
602 tags = [t for t in self.repo.tagslist()
605 tags = [t for t in self.repo.tagslist()
603 if self.repo.tagtype(t[0]) == 'global']
606 if self.repo.tagtype(t[0]) == 'global']
604 return dict([(name, nodemod.hex(node)) for name, node in tags
607 return dict([(name, nodemod.hex(node)) for name, node in tags
605 if self.keep(node)])
608 if self.keep(node)])
606
609
607 def getchangedfiles(self, rev, i):
610 def getchangedfiles(self, rev, i):
608 ctx = self._changectx(rev)
611 ctx = self._changectx(rev)
609 parents = self._parents(ctx)
612 parents = self._parents(ctx)
610 if not parents and i is None:
613 if not parents and i is None:
611 i = 0
614 i = 0
612 ma, r = ctx.manifest().keys(), []
615 ma, r = ctx.manifest().keys(), []
613 else:
616 else:
614 i = i or 0
617 i = i or 0
615 ma, r = self._changedfiles(parents[i], ctx)
618 ma, r = self._changedfiles(parents[i], ctx)
616 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
619 ma, r = [[f for f in l if f not in self.ignored] for l in (ma, r)]
617
620
618 if i == 0:
621 if i == 0:
619 self._changescache = (rev, (ma, r))
622 self._changescache = (rev, (ma, r))
620
623
621 return ma + r
624 return ma + r
622
625
623 def converted(self, rev, destrev):
626 def converted(self, rev, destrev):
624 if self.convertfp is None:
627 if self.convertfp is None:
625 self.convertfp = open(self.repo.vfs.join('shamap'), 'ab')
628 self.convertfp = open(self.repo.vfs.join('shamap'), 'ab')
626 self.convertfp.write(util.tonativeeol('%s %s\n' % (destrev, rev)))
629 self.convertfp.write(util.tonativeeol('%s %s\n' % (destrev, rev)))
627 self.convertfp.flush()
630 self.convertfp.flush()
628
631
629 def before(self):
632 def before(self):
630 self.ui.debug('run hg source pre-conversion action\n')
633 self.ui.debug('run hg source pre-conversion action\n')
631
634
632 def after(self):
635 def after(self):
633 self.ui.debug('run hg source post-conversion action\n')
636 self.ui.debug('run hg source post-conversion action\n')
634
637
635 def hasnativeorder(self):
638 def hasnativeorder(self):
636 return True
639 return True
637
640
638 def hasnativeclose(self):
641 def hasnativeclose(self):
639 return True
642 return True
640
643
641 def lookuprev(self, rev):
644 def lookuprev(self, rev):
642 try:
645 try:
643 return nodemod.hex(self.repo.lookup(rev))
646 return nodemod.hex(self.repo.lookup(rev))
644 except (error.RepoError, error.LookupError):
647 except (error.RepoError, error.LookupError):
645 return None
648 return None
646
649
647 def getbookmarks(self):
650 def getbookmarks(self):
648 return bookmarks.listbookmarks(self.repo)
651 return bookmarks.listbookmarks(self.repo)
649
652
650 def checkrevformat(self, revstr, mapname='splicemap'):
653 def checkrevformat(self, revstr, mapname='splicemap'):
651 """ Mercurial, revision string is a 40 byte hex """
654 """ Mercurial, revision string is a 40 byte hex """
652 self.checkhexformat(revstr, mapname)
655 self.checkhexformat(revstr, mapname)
@@ -1,436 +1,446 b''
1 # extdiff.py - external diff program support for mercurial
1 # extdiff.py - external diff program support for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to allow external programs to compare revisions
8 '''command to allow external programs to compare revisions
9
9
10 The extdiff Mercurial extension allows you to use external programs
10 The extdiff Mercurial extension allows you to use external programs
11 to compare revisions, or revision with working directory. The external
11 to compare revisions, or revision with working directory. The external
12 diff programs are called with a configurable set of options and two
12 diff programs are called with a configurable set of options and two
13 non-option arguments: paths to directories containing snapshots of
13 non-option arguments: paths to directories containing snapshots of
14 files to compare.
14 files to compare.
15
15
16 If there is more than one file being compared and the "child" revision
16 If there is more than one file being compared and the "child" revision
17 is the working directory, any modifications made in the external diff
17 is the working directory, any modifications made in the external diff
18 program will be copied back to the working directory from the temporary
18 program will be copied back to the working directory from the temporary
19 directory.
19 directory.
20
20
21 The extdiff extension also allows you to configure new diff commands, so
21 The extdiff extension also allows you to configure new diff commands, so
22 you do not need to type :hg:`extdiff -p kdiff3` always. ::
22 you do not need to type :hg:`extdiff -p kdiff3` always. ::
23
23
24 [extdiff]
24 [extdiff]
25 # add new command that runs GNU diff(1) in 'context diff' mode
25 # add new command that runs GNU diff(1) in 'context diff' mode
26 cdiff = gdiff -Nprc5
26 cdiff = gdiff -Nprc5
27 ## or the old way:
27 ## or the old way:
28 #cmd.cdiff = gdiff
28 #cmd.cdiff = gdiff
29 #opts.cdiff = -Nprc5
29 #opts.cdiff = -Nprc5
30
30
31 # add new command called meld, runs meld (no need to name twice). If
31 # add new command called meld, runs meld (no need to name twice). If
32 # the meld executable is not available, the meld tool in [merge-tools]
32 # the meld executable is not available, the meld tool in [merge-tools]
33 # will be used, if available
33 # will be used, if available
34 meld =
34 meld =
35
35
36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
36 # add new command called vimdiff, runs gvimdiff with DirDiff plugin
37 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
37 # (see http://www.vim.org/scripts/script.php?script_id=102) Non
38 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
38 # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
39 # your .vimrc
39 # your .vimrc
40 vimdiff = gvim -f "+next" \\
40 vimdiff = gvim -f "+next" \\
41 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
41 "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
42
42
43 Tool arguments can include variables that are expanded at runtime::
43 Tool arguments can include variables that are expanded at runtime::
44
44
45 $parent1, $plabel1 - filename, descriptive label of first parent
45 $parent1, $plabel1 - filename, descriptive label of first parent
46 $child, $clabel - filename, descriptive label of child revision
46 $child, $clabel - filename, descriptive label of child revision
47 $parent2, $plabel2 - filename, descriptive label of second parent
47 $parent2, $plabel2 - filename, descriptive label of second parent
48 $root - repository root
48 $root - repository root
49 $parent is an alias for $parent1.
49 $parent is an alias for $parent1.
50
50
51 The extdiff extension will look in your [diff-tools] and [merge-tools]
51 The extdiff extension will look in your [diff-tools] and [merge-tools]
52 sections for diff tool arguments, when none are specified in [extdiff].
52 sections for diff tool arguments, when none are specified in [extdiff].
53
53
54 ::
54 ::
55
55
56 [extdiff]
56 [extdiff]
57 kdiff3 =
57 kdiff3 =
58
58
59 [diff-tools]
59 [diff-tools]
60 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
60 kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
61
61
62 You can use -I/-X and list of file or directory names like normal
62 You can use -I/-X and list of file or directory names like normal
63 :hg:`diff` command. The extdiff extension makes snapshots of only
63 :hg:`diff` command. The extdiff extension makes snapshots of only
64 needed files, so running the external diff program will actually be
64 needed files, so running the external diff program will actually be
65 pretty fast (at least faster than having to compare the entire tree).
65 pretty fast (at least faster than having to compare the entire tree).
66 '''
66 '''
67
67
68 from __future__ import absolute_import
68 from __future__ import absolute_import
69
69
70 import os
70 import os
71 import re
71 import re
72 import shutil
72 import shutil
73 import stat
73 import stat
74
74
75 from mercurial.i18n import _
75 from mercurial.i18n import _
76 from mercurial.node import (
76 from mercurial.node import (
77 nullid,
77 nullid,
78 short,
78 short,
79 )
79 )
80 from mercurial import (
80 from mercurial import (
81 archival,
81 archival,
82 cmdutil,
82 cmdutil,
83 error,
83 error,
84 filemerge,
84 filemerge,
85 formatter,
85 formatter,
86 pycompat,
86 pycompat,
87 registrar,
87 registrar,
88 scmutil,
88 scmutil,
89 util,
89 util,
90 )
90 )
91 from mercurial.utils import (
91 from mercurial.utils import (
92 procutil,
92 procutil,
93 stringutil,
93 stringutil,
94 )
94 )
95
95
96 cmdtable = {}
96 cmdtable = {}
97 command = registrar.command(cmdtable)
97 command = registrar.command(cmdtable)
98
98
99 configtable = {}
99 configtable = {}
100 configitem = registrar.configitem(configtable)
100 configitem = registrar.configitem(configtable)
101
101
102 configitem('extdiff', br'opts\..*',
102 configitem('extdiff', br'opts\..*',
103 default='',
103 default='',
104 generic=True,
104 generic=True,
105 )
105 )
106
106
107 configitem('diff-tools', br'.*\.diffargs$',
107 configitem('diff-tools', br'.*\.diffargs$',
108 default=None,
108 default=None,
109 generic=True,
109 generic=True,
110 )
110 )
111
111
112 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
112 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
113 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
113 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
114 # be specifying the version(s) of Mercurial they are tested with, or
114 # be specifying the version(s) of Mercurial they are tested with, or
115 # leave the attribute unspecified.
115 # leave the attribute unspecified.
116 testedwith = 'ships-with-hg-core'
116 testedwith = 'ships-with-hg-core'
117
117
118 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
118 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
119 '''snapshot files as of some revision
119 '''snapshot files as of some revision
120 if not using snapshot, -I/-X does not work and recursive diff
120 if not using snapshot, -I/-X does not work and recursive diff
121 in tools like kdiff3 and meld displays too many files.'''
121 in tools like kdiff3 and meld displays too many files.'''
122 dirname = os.path.basename(repo.root)
122 dirname = os.path.basename(repo.root)
123 if dirname == "":
123 if dirname == "":
124 dirname = "root"
124 dirname = "root"
125 if node is not None:
125 if node is not None:
126 dirname = '%s.%s' % (dirname, short(node))
126 dirname = '%s.%s' % (dirname, short(node))
127 base = os.path.join(tmproot, dirname)
127 base = os.path.join(tmproot, dirname)
128 os.mkdir(base)
128 os.mkdir(base)
129 fnsandstat = []
129 fnsandstat = []
130
130
131 if node is not None:
131 if node is not None:
132 ui.note(_('making snapshot of %d files from rev %s\n') %
132 ui.note(_('making snapshot of %d files from rev %s\n') %
133 (len(files), short(node)))
133 (len(files), short(node)))
134 else:
134 else:
135 ui.note(_('making snapshot of %d files from working directory\n') %
135 ui.note(_('making snapshot of %d files from working directory\n') %
136 (len(files)))
136 (len(files)))
137
137
138 if files:
138 if files:
139 repo.ui.setconfig("ui", "archivemeta", False)
139 repo.ui.setconfig("ui", "archivemeta", False)
140
140
141 archival.archive(repo, base, node, 'files',
141 archival.archive(repo, base, node, 'files',
142 matchfn=scmutil.matchfiles(repo, files),
142 match=scmutil.matchfiles(repo, files),
143 subrepos=listsubrepos)
143 subrepos=listsubrepos)
144
144
145 for fn in sorted(files):
145 for fn in sorted(files):
146 wfn = util.pconvert(fn)
146 wfn = util.pconvert(fn)
147 ui.note(' %s\n' % wfn)
147 ui.note(' %s\n' % wfn)
148
148
149 if node is None:
149 if node is None:
150 dest = os.path.join(base, wfn)
150 dest = os.path.join(base, wfn)
151
151
152 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
152 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
153 return dirname, fnsandstat
153 return dirname, fnsandstat
154
154
155 def formatcmdline(cmdline, repo_root, do3way,
156 parent1, plabel1, parent2, plabel2, child, clabel):
157 # Function to quote file/dir names in the argument string.
158 # When not operating in 3-way mode, an empty string is
159 # returned for parent2
160 replace = {'parent': parent1, 'parent1': parent1, 'parent2': parent2,
161 'plabel1': plabel1, 'plabel2': plabel2,
162 'child': child, 'clabel': clabel,
163 'root': repo_root}
164 def quote(match):
165 pre = match.group(2)
166 key = match.group(3)
167 if not do3way and key == 'parent2':
168 return pre
169 return pre + procutil.shellquote(replace[key])
170
171 # Match parent2 first, so 'parent1?' will match both parent1 and parent
172 regex = (br'''(['"]?)([^\s'"$]*)'''
173 br'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
174 if not do3way and not re.search(regex, cmdline):
175 cmdline += ' $parent1 $child'
176 return re.sub(regex, quote, cmdline)
177
155 def dodiff(ui, repo, cmdline, pats, opts):
178 def dodiff(ui, repo, cmdline, pats, opts):
156 '''Do the actual diff:
179 '''Do the actual diff:
157
180
158 - copy to a temp structure if diffing 2 internal revisions
181 - copy to a temp structure if diffing 2 internal revisions
159 - copy to a temp structure if diffing working revision with
182 - copy to a temp structure if diffing working revision with
160 another one and more than 1 file is changed
183 another one and more than 1 file is changed
161 - just invoke the diff for a single file in the working dir
184 - just invoke the diff for a single file in the working dir
162 '''
185 '''
163
186
164 revs = opts.get('rev')
187 revs = opts.get('rev')
165 change = opts.get('change')
188 change = opts.get('change')
166 do3way = '$parent2' in cmdline
189 do3way = '$parent2' in cmdline
167
190
168 if revs and change:
191 if revs and change:
169 msg = _('cannot specify --rev and --change at the same time')
192 msg = _('cannot specify --rev and --change at the same time')
170 raise error.Abort(msg)
193 raise error.Abort(msg)
171 elif change:
194 elif change:
172 ctx2 = scmutil.revsingle(repo, change, None)
195 ctx2 = scmutil.revsingle(repo, change, None)
173 ctx1a, ctx1b = ctx2.p1(), ctx2.p2()
196 ctx1a, ctx1b = ctx2.p1(), ctx2.p2()
174 else:
197 else:
175 ctx1a, ctx2 = scmutil.revpair(repo, revs)
198 ctx1a, ctx2 = scmutil.revpair(repo, revs)
176 if not revs:
199 if not revs:
177 ctx1b = repo[None].p2()
200 ctx1b = repo[None].p2()
178 else:
201 else:
179 ctx1b = repo[nullid]
202 ctx1b = repo[nullid]
180
203
181 node1a = ctx1a.node()
204 node1a = ctx1a.node()
182 node1b = ctx1b.node()
205 node1b = ctx1b.node()
183 node2 = ctx2.node()
206 node2 = ctx2.node()
184
207
185 # Disable 3-way merge if there is only one parent
208 # Disable 3-way merge if there is only one parent
186 if do3way:
209 if do3way:
187 if node1b == nullid:
210 if node1b == nullid:
188 do3way = False
211 do3way = False
189
212
190 subrepos=opts.get('subrepos')
213 subrepos=opts.get('subrepos')
191
214
192 matcher = scmutil.match(repo[node2], pats, opts)
215 matcher = scmutil.match(repo[node2], pats, opts)
193
216
194 if opts.get('patch'):
217 if opts.get('patch'):
195 if subrepos:
218 if subrepos:
196 raise error.Abort(_('--patch cannot be used with --subrepos'))
219 raise error.Abort(_('--patch cannot be used with --subrepos'))
197 if node2 is None:
220 if node2 is None:
198 raise error.Abort(_('--patch requires two revisions'))
221 raise error.Abort(_('--patch requires two revisions'))
199 else:
222 else:
200 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
223 mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
201 listsubrepos=subrepos)[:3])
224 listsubrepos=subrepos)[:3])
202 if do3way:
225 if do3way:
203 mod_b, add_b, rem_b = map(set,
226 mod_b, add_b, rem_b = map(set,
204 repo.status(node1b, node2, matcher,
227 repo.status(node1b, node2, matcher,
205 listsubrepos=subrepos)[:3])
228 listsubrepos=subrepos)[:3])
206 else:
229 else:
207 mod_b, add_b, rem_b = set(), set(), set()
230 mod_b, add_b, rem_b = set(), set(), set()
208 modadd = mod_a | add_a | mod_b | add_b
231 modadd = mod_a | add_a | mod_b | add_b
209 common = modadd | rem_a | rem_b
232 common = modadd | rem_a | rem_b
210 if not common:
233 if not common:
211 return 0
234 return 0
212
235
213 tmproot = pycompat.mkdtemp(prefix='extdiff.')
236 tmproot = pycompat.mkdtemp(prefix='extdiff.')
214 try:
237 try:
215 if not opts.get('patch'):
238 if not opts.get('patch'):
216 # Always make a copy of node1a (and node1b, if applicable)
239 # Always make a copy of node1a (and node1b, if applicable)
217 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
240 dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
218 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
241 dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
219 subrepos)[0]
242 subrepos)[0]
220 rev1a = '@%d' % repo[node1a].rev()
243 rev1a = '@%d' % repo[node1a].rev()
221 if do3way:
244 if do3way:
222 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
245 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
223 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
246 dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
224 subrepos)[0]
247 subrepos)[0]
225 rev1b = '@%d' % repo[node1b].rev()
248 rev1b = '@%d' % repo[node1b].rev()
226 else:
249 else:
227 dir1b = None
250 dir1b = None
228 rev1b = ''
251 rev1b = ''
229
252
230 fnsandstat = []
253 fnsandstat = []
231
254
232 # If node2 in not the wc or there is >1 change, copy it
255 # If node2 in not the wc or there is >1 change, copy it
233 dir2root = ''
256 dir2root = ''
234 rev2 = ''
257 rev2 = ''
235 if node2:
258 if node2:
236 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
259 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
237 rev2 = '@%d' % repo[node2].rev()
260 rev2 = '@%d' % repo[node2].rev()
238 elif len(common) > 1:
261 elif len(common) > 1:
239 #we only actually need to get the files to copy back to
262 #we only actually need to get the files to copy back to
240 #the working dir in this case (because the other cases
263 #the working dir in this case (because the other cases
241 #are: diffing 2 revisions or single file -- in which case
264 #are: diffing 2 revisions or single file -- in which case
242 #the file is already directly passed to the diff tool).
265 #the file is already directly passed to the diff tool).
243 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
266 dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
244 subrepos)
267 subrepos)
245 else:
268 else:
246 # This lets the diff tool open the changed file directly
269 # This lets the diff tool open the changed file directly
247 dir2 = ''
270 dir2 = ''
248 dir2root = repo.root
271 dir2root = repo.root
249
272
250 label1a = rev1a
273 label1a = rev1a
251 label1b = rev1b
274 label1b = rev1b
252 label2 = rev2
275 label2 = rev2
253
276
254 # If only one change, diff the files instead of the directories
277 # If only one change, diff the files instead of the directories
255 # Handle bogus modifies correctly by checking if the files exist
278 # Handle bogus modifies correctly by checking if the files exist
256 if len(common) == 1:
279 if len(common) == 1:
257 common_file = util.localpath(common.pop())
280 common_file = util.localpath(common.pop())
258 dir1a = os.path.join(tmproot, dir1a, common_file)
281 dir1a = os.path.join(tmproot, dir1a, common_file)
259 label1a = common_file + rev1a
282 label1a = common_file + rev1a
260 if not os.path.isfile(dir1a):
283 if not os.path.isfile(dir1a):
261 dir1a = os.devnull
284 dir1a = os.devnull
262 if do3way:
285 if do3way:
263 dir1b = os.path.join(tmproot, dir1b, common_file)
286 dir1b = os.path.join(tmproot, dir1b, common_file)
264 label1b = common_file + rev1b
287 label1b = common_file + rev1b
265 if not os.path.isfile(dir1b):
288 if not os.path.isfile(dir1b):
266 dir1b = os.devnull
289 dir1b = os.devnull
267 dir2 = os.path.join(dir2root, dir2, common_file)
290 dir2 = os.path.join(dir2root, dir2, common_file)
268 label2 = common_file + rev2
291 label2 = common_file + rev2
269 else:
292 else:
270 template = 'hg-%h.patch'
293 template = 'hg-%h.patch'
271 with formatter.nullformatter(ui, 'extdiff', {}) as fm:
294 with formatter.nullformatter(ui, 'extdiff', {}) as fm:
272 cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
295 cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
273 fm,
296 fm,
274 fntemplate=repo.vfs.reljoin(tmproot, template),
297 fntemplate=repo.vfs.reljoin(tmproot, template),
275 match=matcher)
298 match=matcher)
276 label1a = cmdutil.makefilename(repo[node1a], template)
299 label1a = cmdutil.makefilename(repo[node1a], template)
277 label2 = cmdutil.makefilename(repo[node2], template)
300 label2 = cmdutil.makefilename(repo[node2], template)
278 dir1a = repo.vfs.reljoin(tmproot, label1a)
301 dir1a = repo.vfs.reljoin(tmproot, label1a)
279 dir2 = repo.vfs.reljoin(tmproot, label2)
302 dir2 = repo.vfs.reljoin(tmproot, label2)
280 dir1b = None
303 dir1b = None
281 label1b = None
304 label1b = None
282 fnsandstat = []
305 fnsandstat = []
283
306
284 # Function to quote file/dir names in the argument string.
307 # Run the external tool on the 2 temp directories or the patches
285 # When not operating in 3-way mode, an empty string is
308 cmdline = formatcmdline(
286 # returned for parent2
309 cmdline, repo.root, do3way=do3way,
287 replace = {'parent': dir1a, 'parent1': dir1a, 'parent2': dir1b,
310 parent1=dir1a, plabel1=label1a,
288 'plabel1': label1a, 'plabel2': label1b,
311 parent2=dir1b, plabel2=label1b,
289 'clabel': label2, 'child': dir2,
312 child=dir2, clabel=label2)
290 'root': repo.root}
313 ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline),
291 def quote(match):
314 tmproot))
292 pre = match.group(2)
293 key = match.group(3)
294 if not do3way and key == 'parent2':
295 return pre
296 return pre + procutil.shellquote(replace[key])
297
298 # Match parent2 first, so 'parent1?' will match both parent1 and parent
299 regex = (br'''(['"]?)([^\s'"$]*)'''
300 br'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
301 if not do3way and not re.search(regex, cmdline):
302 cmdline += ' $parent1 $child'
303 cmdline = re.sub(regex, quote, cmdline)
304
305 ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot))
306 ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
315 ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
307
316
308 for copy_fn, working_fn, st in fnsandstat:
317 for copy_fn, working_fn, st in fnsandstat:
309 cpstat = os.lstat(copy_fn)
318 cpstat = os.lstat(copy_fn)
310 # Some tools copy the file and attributes, so mtime may not detect
319 # Some tools copy the file and attributes, so mtime may not detect
311 # all changes. A size check will detect more cases, but not all.
320 # all changes. A size check will detect more cases, but not all.
312 # The only certain way to detect every case is to diff all files,
321 # The only certain way to detect every case is to diff all files,
313 # which could be expensive.
322 # which could be expensive.
314 # copyfile() carries over the permission, so the mode check could
323 # copyfile() carries over the permission, so the mode check could
315 # be in an 'elif' branch, but for the case where the file has
324 # be in an 'elif' branch, but for the case where the file has
316 # changed without affecting mtime or size.
325 # changed without affecting mtime or size.
317 if (cpstat[stat.ST_MTIME] != st[stat.ST_MTIME]
326 if (cpstat[stat.ST_MTIME] != st[stat.ST_MTIME]
318 or cpstat.st_size != st.st_size
327 or cpstat.st_size != st.st_size
319 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
328 or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
320 ui.debug('file changed while diffing. '
329 ui.debug('file changed while diffing. '
321 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
330 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
322 util.copyfile(copy_fn, working_fn)
331 util.copyfile(copy_fn, working_fn)
323
332
324 return 1
333 return 1
325 finally:
334 finally:
326 ui.note(_('cleaning up temp directory\n'))
335 ui.note(_('cleaning up temp directory\n'))
327 shutil.rmtree(tmproot)
336 shutil.rmtree(tmproot)
328
337
329 extdiffopts = [
338 extdiffopts = [
330 ('o', 'option', [],
339 ('o', 'option', [],
331 _('pass option to comparison program'), _('OPT')),
340 _('pass option to comparison program'), _('OPT')),
332 ('r', 'rev', [], _('revision'), _('REV')),
341 ('r', 'rev', [], _('revision'), _('REV')),
333 ('c', 'change', '', _('change made by revision'), _('REV')),
342 ('c', 'change', '', _('change made by revision'), _('REV')),
334 ('', 'patch', None, _('compare patches for two revisions'))
343 ('', 'patch', None, _('compare patches for two revisions'))
335 ] + cmdutil.walkopts + cmdutil.subrepoopts
344 ] + cmdutil.walkopts + cmdutil.subrepoopts
336
345
337 @command('extdiff',
346 @command('extdiff',
338 [('p', 'program', '', _('comparison program to run'), _('CMD')),
347 [('p', 'program', '', _('comparison program to run'), _('CMD')),
339 ] + extdiffopts,
348 ] + extdiffopts,
340 _('hg extdiff [OPT]... [FILE]...'),
349 _('hg extdiff [OPT]... [FILE]...'),
341 helpcategory=command.CATEGORY_FILE_CONTENTS,
350 helpcategory=command.CATEGORY_FILE_CONTENTS,
342 inferrepo=True)
351 inferrepo=True)
343 def extdiff(ui, repo, *pats, **opts):
352 def extdiff(ui, repo, *pats, **opts):
344 '''use external program to diff repository (or selected files)
353 '''use external program to diff repository (or selected files)
345
354
346 Show differences between revisions for the specified files, using
355 Show differences between revisions for the specified files, using
347 an external program. The default program used is diff, with
356 an external program. The default program used is diff, with
348 default options "-Npru".
357 default options "-Npru".
349
358
350 To select a different program, use the -p/--program option. The
359 To select a different program, use the -p/--program option. The
351 program will be passed the names of two directories to compare. To
360 program will be passed the names of two directories to compare. To
352 pass additional options to the program, use -o/--option. These
361 pass additional options to the program, use -o/--option. These
353 will be passed before the names of the directories to compare.
362 will be passed before the names of the directories to compare.
354
363
355 When two revision arguments are given, then changes are shown
364 When two revision arguments are given, then changes are shown
356 between those revisions. If only one revision is specified then
365 between those revisions. If only one revision is specified then
357 that revision is compared to the working directory, and, when no
366 that revision is compared to the working directory, and, when no
358 revisions are specified, the working directory files are compared
367 revisions are specified, the working directory files are compared
359 to its parent.'''
368 to its parent.'''
360 opts = pycompat.byteskwargs(opts)
369 opts = pycompat.byteskwargs(opts)
361 program = opts.get('program')
370 program = opts.get('program')
362 option = opts.get('option')
371 option = opts.get('option')
363 if not program:
372 if not program:
364 program = 'diff'
373 program = 'diff'
365 option = option or ['-Npru']
374 option = option or ['-Npru']
366 cmdline = ' '.join(map(procutil.shellquote, [program] + option))
375 cmdline = ' '.join(map(procutil.shellquote, [program] + option))
367 return dodiff(ui, repo, cmdline, pats, opts)
376 return dodiff(ui, repo, cmdline, pats, opts)
368
377
369 class savedcmd(object):
378 class savedcmd(object):
370 """use external program to diff repository (or selected files)
379 """use external program to diff repository (or selected files)
371
380
372 Show differences between revisions for the specified files, using
381 Show differences between revisions for the specified files, using
373 the following program::
382 the following program::
374
383
375 %(path)s
384 %(path)s
376
385
377 When two revision arguments are given, then changes are shown
386 When two revision arguments are given, then changes are shown
378 between those revisions. If only one revision is specified then
387 between those revisions. If only one revision is specified then
379 that revision is compared to the working directory, and, when no
388 that revision is compared to the working directory, and, when no
380 revisions are specified, the working directory files are compared
389 revisions are specified, the working directory files are compared
381 to its parent.
390 to its parent.
382 """
391 """
383
392
384 def __init__(self, path, cmdline):
393 def __init__(self, path, cmdline):
385 # We can't pass non-ASCII through docstrings (and path is
394 # We can't pass non-ASCII through docstrings (and path is
386 # in an unknown encoding anyway)
395 # in an unknown encoding anyway), but avoid double separators on
387 docpath = stringutil.escapestr(path)
396 # Windows
397 docpath = stringutil.escapestr(path).replace(b'\\\\', b'\\')
388 self.__doc__ %= {r'path': pycompat.sysstr(stringutil.uirepr(docpath))}
398 self.__doc__ %= {r'path': pycompat.sysstr(stringutil.uirepr(docpath))}
389 self._cmdline = cmdline
399 self._cmdline = cmdline
390
400
391 def __call__(self, ui, repo, *pats, **opts):
401 def __call__(self, ui, repo, *pats, **opts):
392 opts = pycompat.byteskwargs(opts)
402 opts = pycompat.byteskwargs(opts)
393 options = ' '.join(map(procutil.shellquote, opts['option']))
403 options = ' '.join(map(procutil.shellquote, opts['option']))
394 if options:
404 if options:
395 options = ' ' + options
405 options = ' ' + options
396 return dodiff(ui, repo, self._cmdline + options, pats, opts)
406 return dodiff(ui, repo, self._cmdline + options, pats, opts)
397
407
398 def uisetup(ui):
408 def uisetup(ui):
399 for cmd, path in ui.configitems('extdiff'):
409 for cmd, path in ui.configitems('extdiff'):
400 path = util.expandpath(path)
410 path = util.expandpath(path)
401 if cmd.startswith('cmd.'):
411 if cmd.startswith('cmd.'):
402 cmd = cmd[4:]
412 cmd = cmd[4:]
403 if not path:
413 if not path:
404 path = procutil.findexe(cmd)
414 path = procutil.findexe(cmd)
405 if path is None:
415 if path is None:
406 path = filemerge.findexternaltool(ui, cmd) or cmd
416 path = filemerge.findexternaltool(ui, cmd) or cmd
407 diffopts = ui.config('extdiff', 'opts.' + cmd)
417 diffopts = ui.config('extdiff', 'opts.' + cmd)
408 cmdline = procutil.shellquote(path)
418 cmdline = procutil.shellquote(path)
409 if diffopts:
419 if diffopts:
410 cmdline += ' ' + diffopts
420 cmdline += ' ' + diffopts
411 elif cmd.startswith('opts.'):
421 elif cmd.startswith('opts.'):
412 continue
422 continue
413 else:
423 else:
414 if path:
424 if path:
415 # case "cmd = path opts"
425 # case "cmd = path opts"
416 cmdline = path
426 cmdline = path
417 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
427 diffopts = len(pycompat.shlexsplit(cmdline)) > 1
418 else:
428 else:
419 # case "cmd ="
429 # case "cmd ="
420 path = procutil.findexe(cmd)
430 path = procutil.findexe(cmd)
421 if path is None:
431 if path is None:
422 path = filemerge.findexternaltool(ui, cmd) or cmd
432 path = filemerge.findexternaltool(ui, cmd) or cmd
423 cmdline = procutil.shellquote(path)
433 cmdline = procutil.shellquote(path)
424 diffopts = False
434 diffopts = False
425 # look for diff arguments in [diff-tools] then [merge-tools]
435 # look for diff arguments in [diff-tools] then [merge-tools]
426 if not diffopts:
436 if not diffopts:
427 args = ui.config('diff-tools', cmd+'.diffargs') or \
437 args = ui.config('diff-tools', cmd+'.diffargs') or \
428 ui.config('merge-tools', cmd+'.diffargs')
438 ui.config('merge-tools', cmd+'.diffargs')
429 if args:
439 if args:
430 cmdline += ' ' + args
440 cmdline += ' ' + args
431 command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd,
441 command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd,
432 helpcategory=command.CATEGORY_FILE_CONTENTS,
442 helpcategory=command.CATEGORY_FILE_CONTENTS,
433 inferrepo=True)(savedcmd(path, cmdline))
443 inferrepo=True)(savedcmd(path, cmdline))
434
444
435 # tell hggettext to extract docstrings from these functions:
445 # tell hggettext to extract docstrings from these functions:
436 i18nfunctions = [savedcmd]
446 i18nfunctions = [savedcmd]
@@ -1,285 +1,285 b''
1 # Copyright 2016-present Facebook. All Rights Reserved.
1 # Copyright 2016-present Facebook. All Rights Reserved.
2 #
2 #
3 # commands: fastannotate commands
3 # commands: fastannotate commands
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial import (
13 from mercurial import (
14 commands,
14 commands,
15 encoding,
15 encoding,
16 error,
16 error,
17 extensions,
17 extensions,
18 patch,
18 patch,
19 pycompat,
19 pycompat,
20 registrar,
20 registrar,
21 scmutil,
21 scmutil,
22 util,
22 util,
23 )
23 )
24
24
25 from . import (
25 from . import (
26 context as facontext,
26 context as facontext,
27 error as faerror,
27 error as faerror,
28 formatter as faformatter,
28 formatter as faformatter,
29 )
29 )
30
30
31 cmdtable = {}
31 cmdtable = {}
32 command = registrar.command(cmdtable)
32 command = registrar.command(cmdtable)
33
33
34 def _matchpaths(repo, rev, pats, opts, aopts=facontext.defaultopts):
34 def _matchpaths(repo, rev, pats, opts, aopts=facontext.defaultopts):
35 """generate paths matching given patterns"""
35 """generate paths matching given patterns"""
36 perfhack = repo.ui.configbool('fastannotate', 'perfhack')
36 perfhack = repo.ui.configbool('fastannotate', 'perfhack')
37
37
38 # disable perfhack if:
38 # disable perfhack if:
39 # a) any walkopt is used
39 # a) any walkopt is used
40 # b) if we treat pats as plain file names, some of them do not have
40 # b) if we treat pats as plain file names, some of them do not have
41 # corresponding linelog files
41 # corresponding linelog files
42 if perfhack:
42 if perfhack:
43 # cwd related to reporoot
43 # cwd related to reporoot
44 reporoot = os.path.dirname(repo.path)
44 reporoot = os.path.dirname(repo.path)
45 reldir = os.path.relpath(encoding.getcwd(), reporoot)
45 reldir = os.path.relpath(encoding.getcwd(), reporoot)
46 if reldir == '.':
46 if reldir == '.':
47 reldir = ''
47 reldir = ''
48 if any(opts.get(o[1]) for o in commands.walkopts): # a)
48 if any(opts.get(o[1]) for o in commands.walkopts): # a)
49 perfhack = False
49 perfhack = False
50 else: # b)
50 else: # b)
51 relpats = [os.path.relpath(p, reporoot) if os.path.isabs(p) else p
51 relpats = [os.path.relpath(p, reporoot) if os.path.isabs(p) else p
52 for p in pats]
52 for p in pats]
53 # disable perfhack on '..' since it allows escaping from the repo
53 # disable perfhack on '..' since it allows escaping from the repo
54 if any(('..' in f or
54 if any(('..' in f or
55 not os.path.isfile(
55 not os.path.isfile(
56 facontext.pathhelper(repo, f, aopts).linelogpath))
56 facontext.pathhelper(repo, f, aopts).linelogpath))
57 for f in relpats):
57 for f in relpats):
58 perfhack = False
58 perfhack = False
59
59
60 # perfhack: emit paths directory without checking with manifest
60 # perfhack: emit paths directory without checking with manifest
61 # this can be incorrect if the rev dos not have file.
61 # this can be incorrect if the rev dos not have file.
62 if perfhack:
62 if perfhack:
63 for p in relpats:
63 for p in relpats:
64 yield os.path.join(reldir, p)
64 yield os.path.join(reldir, p)
65 else:
65 else:
66 def bad(x, y):
66 def bad(x, y):
67 raise error.Abort("%s: %s" % (x, y))
67 raise error.Abort("%s: %s" % (x, y))
68 ctx = scmutil.revsingle(repo, rev)
68 ctx = scmutil.revsingle(repo, rev)
69 m = scmutil.match(ctx, pats, opts, badfn=bad)
69 m = scmutil.match(ctx, pats, opts, badfn=bad)
70 for p in ctx.walk(m):
70 for p in ctx.walk(m):
71 yield p
71 yield p
72
72
73 fastannotatecommandargs = {
73 fastannotatecommandargs = {
74 r'options': [
74 r'options': [
75 ('r', 'rev', '.', _('annotate the specified revision'), _('REV')),
75 ('r', 'rev', '.', _('annotate the specified revision'), _('REV')),
76 ('u', 'user', None, _('list the author (long with -v)')),
76 ('u', 'user', None, _('list the author (long with -v)')),
77 ('f', 'file', None, _('list the filename')),
77 ('f', 'file', None, _('list the filename')),
78 ('d', 'date', None, _('list the date (short with -q)')),
78 ('d', 'date', None, _('list the date (short with -q)')),
79 ('n', 'number', None, _('list the revision number (default)')),
79 ('n', 'number', None, _('list the revision number (default)')),
80 ('c', 'changeset', None, _('list the changeset')),
80 ('c', 'changeset', None, _('list the changeset')),
81 ('l', 'line-number', None, _('show line number at the first '
81 ('l', 'line-number', None, _('show line number at the first '
82 'appearance')),
82 'appearance')),
83 ('e', 'deleted', None, _('show deleted lines (slow) (EXPERIMENTAL)')),
83 ('e', 'deleted', None, _('show deleted lines (slow) (EXPERIMENTAL)')),
84 ('', 'no-content', None, _('do not show file content (EXPERIMENTAL)')),
84 ('', 'no-content', None, _('do not show file content (EXPERIMENTAL)')),
85 ('', 'no-follow', None, _("don't follow copies and renames")),
85 ('', 'no-follow', None, _("don't follow copies and renames")),
86 ('', 'linear', None, _('enforce linear history, ignore second parent '
86 ('', 'linear', None, _('enforce linear history, ignore second parent '
87 'of merges (EXPERIMENTAL)')),
87 'of merges (EXPERIMENTAL)')),
88 ('', 'long-hash', None, _('show long changeset hash (EXPERIMENTAL)')),
88 ('', 'long-hash', None, _('show long changeset hash (EXPERIMENTAL)')),
89 ('', 'rebuild', None, _('rebuild cache even if it exists '
89 ('', 'rebuild', None, _('rebuild cache even if it exists '
90 '(EXPERIMENTAL)')),
90 '(EXPERIMENTAL)')),
91 ] + commands.diffwsopts + commands.walkopts + commands.formatteropts,
91 ] + commands.diffwsopts + commands.walkopts + commands.formatteropts,
92 r'synopsis': _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
92 r'synopsis': _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
93 r'inferrepo': True,
93 r'inferrepo': True,
94 }
94 }
95
95
96 def fastannotate(ui, repo, *pats, **opts):
96 def fastannotate(ui, repo, *pats, **opts):
97 """show changeset information by line for each file
97 """show changeset information by line for each file
98
98
99 List changes in files, showing the revision id responsible for each line.
99 List changes in files, showing the revision id responsible for each line.
100
100
101 This command is useful for discovering when a change was made and by whom.
101 This command is useful for discovering when a change was made and by whom.
102
102
103 By default this command prints revision numbers. If you include --file,
103 By default this command prints revision numbers. If you include --file,
104 --user, or --date, the revision number is suppressed unless you also
104 --user, or --date, the revision number is suppressed unless you also
105 include --number. The default format can also be customized by setting
105 include --number. The default format can also be customized by setting
106 fastannotate.defaultformat.
106 fastannotate.defaultformat.
107
107
108 Returns 0 on success.
108 Returns 0 on success.
109
109
110 .. container:: verbose
110 .. container:: verbose
111
111
112 This command uses an implementation different from the vanilla annotate
112 This command uses an implementation different from the vanilla annotate
113 command, which may produce slightly different (while still reasonable)
113 command, which may produce slightly different (while still reasonable)
114 outputs for some cases.
114 outputs for some cases.
115
115
116 Unlike the vanilla anootate, fastannotate follows rename regardless of
116 Unlike the vanilla anootate, fastannotate follows rename regardless of
117 the existence of --file.
117 the existence of --file.
118
118
119 For the best performance when running on a full repo, use -c, -l,
119 For the best performance when running on a full repo, use -c, -l,
120 avoid -u, -d, -n. Use --linear and --no-content to make it even faster.
120 avoid -u, -d, -n. Use --linear and --no-content to make it even faster.
121
121
122 For the best performance when running on a shallow (remotefilelog)
122 For the best performance when running on a shallow (remotefilelog)
123 repo, avoid --linear, --no-follow, or any diff options. As the server
123 repo, avoid --linear, --no-follow, or any diff options. As the server
124 won't be able to populate annotate cache when non-default options
124 won't be able to populate annotate cache when non-default options
125 affecting results are used.
125 affecting results are used.
126 """
126 """
127 if not pats:
127 if not pats:
128 raise error.Abort(_('at least one filename or pattern is required'))
128 raise error.Abort(_('at least one filename or pattern is required'))
129
129
130 # performance hack: filtered repo can be slow. unfilter by default.
130 # performance hack: filtered repo can be slow. unfilter by default.
131 if ui.configbool('fastannotate', 'unfilteredrepo'):
131 if ui.configbool('fastannotate', 'unfilteredrepo'):
132 repo = repo.unfiltered()
132 repo = repo.unfiltered()
133
133
134 opts = pycompat.byteskwargs(opts)
134 opts = pycompat.byteskwargs(opts)
135
135
136 rev = opts.get('rev', '.')
136 rev = opts.get('rev', '.')
137 rebuild = opts.get('rebuild', False)
137 rebuild = opts.get('rebuild', False)
138
138
139 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
139 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
140 whitespace=True)
140 whitespace=True)
141 aopts = facontext.annotateopts(
141 aopts = facontext.annotateopts(
142 diffopts=diffopts,
142 diffopts=diffopts,
143 followmerge=not opts.get('linear', False),
143 followmerge=not opts.get('linear', False),
144 followrename=not opts.get('no_follow', False))
144 followrename=not opts.get('no_follow', False))
145
145
146 if not any(opts.get(s)
146 if not any(opts.get(s)
147 for s in ['user', 'date', 'file', 'number', 'changeset']):
147 for s in ['user', 'date', 'file', 'number', 'changeset']):
148 # default 'number' for compatibility. but fastannotate is more
148 # default 'number' for compatibility. but fastannotate is more
149 # efficient with "changeset", "line-number" and "no-content".
149 # efficient with "changeset", "line-number" and "no-content".
150 for name in ui.configlist('fastannotate', 'defaultformat', ['number']):
150 for name in ui.configlist('fastannotate', 'defaultformat', ['number']):
151 opts[name] = True
151 opts[name] = True
152
152
153 ui.pager('fastannotate')
153 ui.pager('fastannotate')
154 template = opts.get('template')
154 template = opts.get('template')
155 if template == 'json':
155 if template == 'json':
156 formatter = faformatter.jsonformatter(ui, repo, opts)
156 formatter = faformatter.jsonformatter(ui, repo, opts)
157 else:
157 else:
158 formatter = faformatter.defaultformatter(ui, repo, opts)
158 formatter = faformatter.defaultformatter(ui, repo, opts)
159 showdeleted = opts.get('deleted', False)
159 showdeleted = opts.get('deleted', False)
160 showlines = not bool(opts.get('no_content'))
160 showlines = not bool(opts.get('no_content'))
161 showpath = opts.get('file', False)
161 showpath = opts.get('file', False)
162
162
163 # find the head of the main (master) branch
163 # find the head of the main (master) branch
164 master = ui.config('fastannotate', 'mainbranch') or rev
164 master = ui.config('fastannotate', 'mainbranch') or rev
165
165
166 # paths will be used for prefetching and the real annotating
166 # paths will be used for prefetching and the real annotating
167 paths = list(_matchpaths(repo, rev, pats, opts, aopts))
167 paths = list(_matchpaths(repo, rev, pats, opts, aopts))
168
168
169 # for client, prefetch from the server
169 # for client, prefetch from the server
170 if util.safehasattr(repo, 'prefetchfastannotate'):
170 if util.safehasattr(repo, 'prefetchfastannotate'):
171 repo.prefetchfastannotate(paths)
171 repo.prefetchfastannotate(paths)
172
172
173 for path in paths:
173 for path in paths:
174 result = lines = existinglines = None
174 result = lines = existinglines = None
175 while True:
175 while True:
176 try:
176 try:
177 with facontext.annotatecontext(repo, path, aopts, rebuild) as a:
177 with facontext.annotatecontext(repo, path, aopts, rebuild) as a:
178 result = a.annotate(rev, master=master, showpath=showpath,
178 result = a.annotate(rev, master=master, showpath=showpath,
179 showlines=(showlines and
179 showlines=(showlines and
180 not showdeleted))
180 not showdeleted))
181 if showdeleted:
181 if showdeleted:
182 existinglines = set((l[0], l[1]) for l in result)
182 existinglines = set((l[0], l[1]) for l in result)
183 result = a.annotatealllines(
183 result = a.annotatealllines(
184 rev, showpath=showpath, showlines=showlines)
184 rev, showpath=showpath, showlines=showlines)
185 break
185 break
186 except (faerror.CannotReuseError, faerror.CorruptedFileError):
186 except (faerror.CannotReuseError, faerror.CorruptedFileError):
187 # happens if master moves backwards, or the file was deleted
187 # happens if master moves backwards, or the file was deleted
188 # and readded, or renamed to an existing name, or corrupted.
188 # and readded, or renamed to an existing name, or corrupted.
189 if rebuild: # give up since we have tried rebuild already
189 if rebuild: # give up since we have tried rebuild already
190 raise
190 raise
191 else: # try a second time rebuilding the cache (slow)
191 else: # try a second time rebuilding the cache (slow)
192 rebuild = True
192 rebuild = True
193 continue
193 continue
194
194
195 if showlines:
195 if showlines:
196 result, lines = result
196 result, lines = result
197
197
198 formatter.write(result, lines, existinglines=existinglines)
198 formatter.write(result, lines, existinglines=existinglines)
199 formatter.end()
199 formatter.end()
200
200
201 _newopts = set([])
201 _newopts = set([])
202 _knownopts = set([opt[1].replace('-', '_') for opt in
202 _knownopts = set([opt[1].replace('-', '_') for opt in
203 (fastannotatecommandargs[r'options'] + commands.globalopts)])
203 (fastannotatecommandargs[r'options'] + commands.globalopts)])
204
204
205 def _annotatewrapper(orig, ui, repo, *pats, **opts):
205 def _annotatewrapper(orig, ui, repo, *pats, **opts):
206 """used by wrapdefault"""
206 """used by wrapdefault"""
207 # we need this hack until the obsstore has 0.0 seconds perf impact
207 # we need this hack until the obsstore has 0.0 seconds perf impact
208 if ui.configbool('fastannotate', 'unfilteredrepo'):
208 if ui.configbool('fastannotate', 'unfilteredrepo'):
209 repo = repo.unfiltered()
209 repo = repo.unfiltered()
210
210
211 # treat the file as text (skip the isbinary check)
211 # treat the file as text (skip the isbinary check)
212 if ui.configbool('fastannotate', 'forcetext'):
212 if ui.configbool('fastannotate', 'forcetext'):
213 opts[r'text'] = True
213 opts[r'text'] = True
214
214
215 # check if we need to do prefetch (client-side)
215 # check if we need to do prefetch (client-side)
216 rev = opts.get(r'rev')
216 rev = opts.get(r'rev')
217 if util.safehasattr(repo, 'prefetchfastannotate') and rev is not None:
217 if util.safehasattr(repo, 'prefetchfastannotate') and rev is not None:
218 paths = list(_matchpaths(repo, rev, pats, pycompat.byteskwargs(opts)))
218 paths = list(_matchpaths(repo, rev, pats, pycompat.byteskwargs(opts)))
219 repo.prefetchfastannotate(paths)
219 repo.prefetchfastannotate(paths)
220
220
221 return orig(ui, repo, *pats, **opts)
221 return orig(ui, repo, *pats, **opts)
222
222
223 def registercommand():
223 def registercommand():
224 """register the fastannotate command"""
224 """register the fastannotate command"""
225 name = 'fastannotate|fastblame|fa'
225 name = 'fastannotate|fastblame|fa'
226 command(name, helpbasic=True, **fastannotatecommandargs)(fastannotate)
226 command(name, helpbasic=True, **fastannotatecommandargs)(fastannotate)
227
227
228 def wrapdefault():
228 def wrapdefault():
229 """wrap the default annotate command, to be aware of the protocol"""
229 """wrap the default annotate command, to be aware of the protocol"""
230 extensions.wrapcommand(commands.table, 'annotate', _annotatewrapper)
230 extensions.wrapcommand(commands.table, 'annotate', _annotatewrapper)
231
231
232 @command('debugbuildannotatecache',
232 @command('debugbuildannotatecache',
233 [('r', 'rev', '', _('build up to the specific revision'), _('REV'))
233 [('r', 'rev', '', _('build up to the specific revision'), _('REV'))
234 ] + commands.walkopts,
234 ] + commands.walkopts,
235 _('[-r REV] FILE...'))
235 _('[-r REV] FILE...'))
236 def debugbuildannotatecache(ui, repo, *pats, **opts):
236 def debugbuildannotatecache(ui, repo, *pats, **opts):
237 """incrementally build fastannotate cache up to REV for specified files
237 """incrementally build fastannotate cache up to REV for specified files
238
238
239 If REV is not specified, use the config 'fastannotate.mainbranch'.
239 If REV is not specified, use the config 'fastannotate.mainbranch'.
240
240
241 If fastannotate.client is True, download the annotate cache from the
241 If fastannotate.client is True, download the annotate cache from the
242 server. Otherwise, build the annotate cache locally.
242 server. Otherwise, build the annotate cache locally.
243
243
244 The annotate cache will be built using the default diff and follow
244 The annotate cache will be built using the default diff and follow
245 options and lives in '.hg/fastannotate/default'.
245 options and lives in '.hg/fastannotate/default'.
246 """
246 """
247 opts = pycompat.byteskwargs(opts)
247 opts = pycompat.byteskwargs(opts)
248 rev = opts.get('REV') or ui.config('fastannotate', 'mainbranch')
248 rev = opts.get('REV') or ui.config('fastannotate', 'mainbranch')
249 if not rev:
249 if not rev:
250 raise error.Abort(_('you need to provide a revision'),
250 raise error.Abort(_('you need to provide a revision'),
251 hint=_('set fastannotate.mainbranch or use --rev'))
251 hint=_('set fastannotate.mainbranch or use --rev'))
252 if ui.configbool('fastannotate', 'unfilteredrepo'):
252 if ui.configbool('fastannotate', 'unfilteredrepo'):
253 repo = repo.unfiltered()
253 repo = repo.unfiltered()
254 ctx = scmutil.revsingle(repo, rev)
254 ctx = scmutil.revsingle(repo, rev)
255 m = scmutil.match(ctx, pats, opts)
255 m = scmutil.match(ctx, pats, opts)
256 paths = list(ctx.walk(m))
256 paths = list(ctx.walk(m))
257 if util.safehasattr(repo, 'prefetchfastannotate'):
257 if util.safehasattr(repo, 'prefetchfastannotate'):
258 # client
258 # client
259 if opts.get('REV'):
259 if opts.get('REV'):
260 raise error.Abort(_('--rev cannot be used for client'))
260 raise error.Abort(_('--rev cannot be used for client'))
261 repo.prefetchfastannotate(paths)
261 repo.prefetchfastannotate(paths)
262 else:
262 else:
263 # server, or full repo
263 # server, or full repo
264 progress = ui.makeprogress(_('building'), total=len(paths))
264 for i, path in enumerate(paths):
265 for i, path in enumerate(paths):
265 ui.progress(_('building'), i, total=len(paths))
266 progress.update(i)
266 with facontext.annotatecontext(repo, path) as actx:
267 with facontext.annotatecontext(repo, path) as actx:
267 try:
268 try:
268 if actx.isuptodate(rev):
269 if actx.isuptodate(rev):
269 continue
270 continue
270 actx.annotate(rev, rev)
271 actx.annotate(rev, rev)
271 except (faerror.CannotReuseError, faerror.CorruptedFileError):
272 except (faerror.CannotReuseError, faerror.CorruptedFileError):
272 # the cache is broken (could happen with renaming so the
273 # the cache is broken (could happen with renaming so the
273 # file history gets invalidated). rebuild and try again.
274 # file history gets invalidated). rebuild and try again.
274 ui.debug('fastannotate: %s: rebuilding broken cache\n'
275 ui.debug('fastannotate: %s: rebuilding broken cache\n'
275 % path)
276 % path)
276 actx.rebuild()
277 actx.rebuild()
277 try:
278 try:
278 actx.annotate(rev, rev)
279 actx.annotate(rev, rev)
279 except Exception as ex:
280 except Exception as ex:
280 # possibly a bug, but should not stop us from building
281 # possibly a bug, but should not stop us from building
281 # cache for other files.
282 # cache for other files.
282 ui.warn(_('fastannotate: %s: failed to '
283 ui.warn(_('fastannotate: %s: failed to '
283 'build cache: %r\n') % (path, ex))
284 'build cache: %r\n') % (path, ex))
284 # clear the progress bar
285 progress.complete()
285 ui.write()
@@ -1,828 +1,826 b''
1 # Copyright 2016-present Facebook. All Rights Reserved.
1 # Copyright 2016-present Facebook. All Rights Reserved.
2 #
2 #
3 # context: context needed to annotate a file
3 # context: context needed to annotate a file
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import hashlib
12 import hashlib
13 import os
13 import os
14
14
15 from mercurial.i18n import _
15 from mercurial.i18n import _
16 from mercurial import (
16 from mercurial import (
17 error,
17 error,
18 linelog as linelogmod,
18 linelog as linelogmod,
19 lock as lockmod,
19 lock as lockmod,
20 mdiff,
20 mdiff,
21 node,
21 node,
22 pycompat,
22 pycompat,
23 scmutil,
23 scmutil,
24 util,
24 util,
25 )
25 )
26 from mercurial.utils import (
26 from mercurial.utils import (
27 stringutil,
27 stringutil,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 error as faerror,
31 error as faerror,
32 revmap as revmapmod,
32 revmap as revmapmod,
33 )
33 )
34
34
35 # given path, get filelog, cached
35 # given path, get filelog, cached
36 @util.lrucachefunc
36 @util.lrucachefunc
37 def _getflog(repo, path):
37 def _getflog(repo, path):
38 return repo.file(path)
38 return repo.file(path)
39
39
40 # extracted from mercurial.context.basefilectx.annotate
40 # extracted from mercurial.context.basefilectx.annotate
41 def _parents(f, follow=True):
41 def _parents(f, follow=True):
42 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
42 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
43 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
43 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
44 # from the topmost introrev (= srcrev) down to p.linkrev() if it
44 # from the topmost introrev (= srcrev) down to p.linkrev() if it
45 # isn't an ancestor of the srcrev.
45 # isn't an ancestor of the srcrev.
46 f._changeid
46 f._changeid
47 pl = f.parents()
47 pl = f.parents()
48
48
49 # Don't return renamed parents if we aren't following.
49 # Don't return renamed parents if we aren't following.
50 if not follow:
50 if not follow:
51 pl = [p for p in pl if p.path() == f.path()]
51 pl = [p for p in pl if p.path() == f.path()]
52
52
53 # renamed filectx won't have a filelog yet, so set it
53 # renamed filectx won't have a filelog yet, so set it
54 # from the cache to save time
54 # from the cache to save time
55 for p in pl:
55 for p in pl:
56 if not '_filelog' in p.__dict__:
56 if not '_filelog' in p.__dict__:
57 p._filelog = _getflog(f._repo, p.path())
57 p._filelog = _getflog(f._repo, p.path())
58
58
59 return pl
59 return pl
60
60
61 # extracted from mercurial.context.basefilectx.annotate. slightly modified
61 # extracted from mercurial.context.basefilectx.annotate. slightly modified
62 # so it takes a fctx instead of a pair of text and fctx.
62 # so it takes a fctx instead of a pair of text and fctx.
63 def _decorate(fctx):
63 def _decorate(fctx):
64 text = fctx.data()
64 text = fctx.data()
65 linecount = text.count('\n')
65 linecount = text.count('\n')
66 if text and not text.endswith('\n'):
66 if text and not text.endswith('\n'):
67 linecount += 1
67 linecount += 1
68 return ([(fctx, i) for i in pycompat.xrange(linecount)], text)
68 return ([(fctx, i) for i in pycompat.xrange(linecount)], text)
69
69
70 # extracted from mercurial.context.basefilectx.annotate. slightly modified
70 # extracted from mercurial.context.basefilectx.annotate. slightly modified
71 # so it takes an extra "blocks" parameter calculated elsewhere, instead of
71 # so it takes an extra "blocks" parameter calculated elsewhere, instead of
72 # calculating diff here.
72 # calculating diff here.
73 def _pair(parent, child, blocks):
73 def _pair(parent, child, blocks):
74 for (a1, a2, b1, b2), t in blocks:
74 for (a1, a2, b1, b2), t in blocks:
75 # Changed blocks ('!') or blocks made only of blank lines ('~')
75 # Changed blocks ('!') or blocks made only of blank lines ('~')
76 # belong to the child.
76 # belong to the child.
77 if t == '=':
77 if t == '=':
78 child[0][b1:b2] = parent[0][a1:a2]
78 child[0][b1:b2] = parent[0][a1:a2]
79 return child
79 return child
80
80
81 # like scmutil.revsingle, but with lru cache, so their states (like manifests)
81 # like scmutil.revsingle, but with lru cache, so their states (like manifests)
82 # could be reused
82 # could be reused
83 _revsingle = util.lrucachefunc(scmutil.revsingle)
83 _revsingle = util.lrucachefunc(scmutil.revsingle)
84
84
85 def resolvefctx(repo, rev, path, resolverev=False, adjustctx=None):
85 def resolvefctx(repo, rev, path, resolverev=False, adjustctx=None):
86 """(repo, str, str) -> fctx
86 """(repo, str, str) -> fctx
87
87
88 get the filectx object from repo, rev, path, in an efficient way.
88 get the filectx object from repo, rev, path, in an efficient way.
89
89
90 if resolverev is True, "rev" is a revision specified by the revset
90 if resolverev is True, "rev" is a revision specified by the revset
91 language, otherwise "rev" is a nodeid, or a revision number that can
91 language, otherwise "rev" is a nodeid, or a revision number that can
92 be consumed by repo.__getitem__.
92 be consumed by repo.__getitem__.
93
93
94 if adjustctx is not None, the returned fctx will point to a changeset
94 if adjustctx is not None, the returned fctx will point to a changeset
95 that introduces the change (last modified the file). if adjustctx
95 that introduces the change (last modified the file). if adjustctx
96 is 'linkrev', trust the linkrev and do not adjust it. this is noticeably
96 is 'linkrev', trust the linkrev and do not adjust it. this is noticeably
97 faster for big repos but is incorrect for some cases.
97 faster for big repos but is incorrect for some cases.
98 """
98 """
99 if resolverev and not isinstance(rev, int) and rev is not None:
99 if resolverev and not isinstance(rev, int) and rev is not None:
100 ctx = _revsingle(repo, rev)
100 ctx = _revsingle(repo, rev)
101 else:
101 else:
102 ctx = repo[rev]
102 ctx = repo[rev]
103
103
104 # If we don't need to adjust the linkrev, create the filectx using the
104 # If we don't need to adjust the linkrev, create the filectx using the
105 # changectx instead of using ctx[path]. This means it already has the
105 # changectx instead of using ctx[path]. This means it already has the
106 # changectx information, so blame -u will be able to look directly at the
106 # changectx information, so blame -u will be able to look directly at the
107 # commitctx object instead of having to resolve it by going through the
107 # commitctx object instead of having to resolve it by going through the
108 # manifest. In a lazy-manifest world this can prevent us from downloading a
108 # manifest. In a lazy-manifest world this can prevent us from downloading a
109 # lot of data.
109 # lot of data.
110 if adjustctx is None:
110 if adjustctx is None:
111 # ctx.rev() is None means it's the working copy, which is a special
111 # ctx.rev() is None means it's the working copy, which is a special
112 # case.
112 # case.
113 if ctx.rev() is None:
113 if ctx.rev() is None:
114 fctx = ctx[path]
114 fctx = ctx[path]
115 else:
115 else:
116 fctx = repo.filectx(path, changeid=ctx.rev())
116 fctx = repo.filectx(path, changeid=ctx.rev())
117 else:
117 else:
118 fctx = ctx[path]
118 fctx = ctx[path]
119 if adjustctx == 'linkrev':
119 if adjustctx == 'linkrev':
120 introrev = fctx.linkrev()
120 introrev = fctx.linkrev()
121 else:
121 else:
122 introrev = fctx.introrev()
122 introrev = fctx.introrev()
123 if introrev != ctx.rev():
123 if introrev != ctx.rev():
124 fctx._changeid = introrev
124 fctx._changeid = introrev
125 fctx._changectx = repo[introrev]
125 fctx._changectx = repo[introrev]
126 return fctx
126 return fctx
127
127
128 # like mercurial.store.encodedir, but use linelog suffixes: .m, .l, .lock
128 # like mercurial.store.encodedir, but use linelog suffixes: .m, .l, .lock
129 def encodedir(path):
129 def encodedir(path):
130 return (path
130 return (path
131 .replace('.hg/', '.hg.hg/')
131 .replace('.hg/', '.hg.hg/')
132 .replace('.l/', '.l.hg/')
132 .replace('.l/', '.l.hg/')
133 .replace('.m/', '.m.hg/')
133 .replace('.m/', '.m.hg/')
134 .replace('.lock/', '.lock.hg/'))
134 .replace('.lock/', '.lock.hg/'))
135
135
136 def hashdiffopts(diffopts):
136 def hashdiffopts(diffopts):
137 diffoptstr = stringutil.pprint(sorted(
137 diffoptstr = stringutil.pprint(sorted(
138 (k, getattr(diffopts, k))
138 (k, getattr(diffopts, k))
139 for k in mdiff.diffopts.defaults
139 for k in mdiff.diffopts.defaults
140 ))
140 ))
141 return hashlib.sha1(diffoptstr).hexdigest()[:6]
141 return node.hex(hashlib.sha1(diffoptstr).digest())[:6]
142
142
143 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
143 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
144
144
145 class annotateopts(object):
145 class annotateopts(object):
146 """like mercurial.mdiff.diffopts, but is for annotate
146 """like mercurial.mdiff.diffopts, but is for annotate
147
147
148 followrename: follow renames, like "hg annotate -f"
148 followrename: follow renames, like "hg annotate -f"
149 followmerge: follow p2 of a merge changeset, otherwise p2 is ignored
149 followmerge: follow p2 of a merge changeset, otherwise p2 is ignored
150 """
150 """
151
151
152 defaults = {
152 defaults = {
153 'diffopts': None,
153 'diffopts': None,
154 'followrename': True,
154 'followrename': True,
155 'followmerge': True,
155 'followmerge': True,
156 }
156 }
157
157
158 def __init__(self, **opts):
158 def __init__(self, **opts):
159 opts = pycompat.byteskwargs(opts)
159 for k, v in self.defaults.iteritems():
160 for k, v in self.defaults.iteritems():
160 setattr(self, k, opts.get(k, v))
161 setattr(self, k, opts.get(k, v))
161
162
162 @util.propertycache
163 @util.propertycache
163 def shortstr(self):
164 def shortstr(self):
164 """represent opts in a short string, suitable for a directory name"""
165 """represent opts in a short string, suitable for a directory name"""
165 result = ''
166 result = ''
166 if not self.followrename:
167 if not self.followrename:
167 result += 'r0'
168 result += 'r0'
168 if not self.followmerge:
169 if not self.followmerge:
169 result += 'm0'
170 result += 'm0'
170 if self.diffopts is not None:
171 if self.diffopts is not None:
171 assert isinstance(self.diffopts, mdiff.diffopts)
172 assert isinstance(self.diffopts, mdiff.diffopts)
172 diffopthash = hashdiffopts(self.diffopts)
173 diffopthash = hashdiffopts(self.diffopts)
173 if diffopthash != _defaultdiffopthash:
174 if diffopthash != _defaultdiffopthash:
174 result += 'i' + diffopthash
175 result += 'i' + diffopthash
175 return result or 'default'
176 return result or 'default'
176
177
177 defaultopts = annotateopts()
178 defaultopts = annotateopts()
178
179
179 class _annotatecontext(object):
180 class _annotatecontext(object):
180 """do not use this class directly as it does not use lock to protect
181 """do not use this class directly as it does not use lock to protect
181 writes. use "with annotatecontext(...)" instead.
182 writes. use "with annotatecontext(...)" instead.
182 """
183 """
183
184
184 def __init__(self, repo, path, linelogpath, revmappath, opts):
185 def __init__(self, repo, path, linelogpath, revmappath, opts):
185 self.repo = repo
186 self.repo = repo
186 self.ui = repo.ui
187 self.ui = repo.ui
187 self.path = path
188 self.path = path
188 self.opts = opts
189 self.opts = opts
189 self.linelogpath = linelogpath
190 self.linelogpath = linelogpath
190 self.revmappath = revmappath
191 self.revmappath = revmappath
191 self._linelog = None
192 self._linelog = None
192 self._revmap = None
193 self._revmap = None
193 self._node2path = {} # {str: str}
194 self._node2path = {} # {str: str}
194
195
195 @property
196 @property
196 def linelog(self):
197 def linelog(self):
197 if self._linelog is None:
198 if self._linelog is None:
198 if os.path.exists(self.linelogpath):
199 if os.path.exists(self.linelogpath):
199 with open(self.linelogpath, 'rb') as f:
200 with open(self.linelogpath, 'rb') as f:
200 try:
201 try:
201 self._linelog = linelogmod.linelog.fromdata(f.read())
202 self._linelog = linelogmod.linelog.fromdata(f.read())
202 except linelogmod.LineLogError:
203 except linelogmod.LineLogError:
203 self._linelog = linelogmod.linelog()
204 self._linelog = linelogmod.linelog()
204 else:
205 else:
205 self._linelog = linelogmod.linelog()
206 self._linelog = linelogmod.linelog()
206 return self._linelog
207 return self._linelog
207
208
208 @property
209 @property
209 def revmap(self):
210 def revmap(self):
210 if self._revmap is None:
211 if self._revmap is None:
211 self._revmap = revmapmod.revmap(self.revmappath)
212 self._revmap = revmapmod.revmap(self.revmappath)
212 return self._revmap
213 return self._revmap
213
214
214 def close(self):
215 def close(self):
215 if self._revmap is not None:
216 if self._revmap is not None:
216 self._revmap.flush()
217 self._revmap.flush()
217 self._revmap = None
218 self._revmap = None
218 if self._linelog is not None:
219 if self._linelog is not None:
219 with open(self.linelogpath, 'wb') as f:
220 with open(self.linelogpath, 'wb') as f:
220 f.write(self._linelog.encode())
221 f.write(self._linelog.encode())
221 self._linelog = None
222 self._linelog = None
222
223
223 __del__ = close
224 __del__ = close
224
225
225 def rebuild(self):
226 def rebuild(self):
226 """delete linelog and revmap, useful for rebuilding"""
227 """delete linelog and revmap, useful for rebuilding"""
227 self.close()
228 self.close()
228 self._node2path.clear()
229 self._node2path.clear()
229 _unlinkpaths([self.revmappath, self.linelogpath])
230 _unlinkpaths([self.revmappath, self.linelogpath])
230
231
231 @property
232 @property
232 def lastnode(self):
233 def lastnode(self):
233 """return last node in revmap, or None if revmap is empty"""
234 """return last node in revmap, or None if revmap is empty"""
234 if self._revmap is None:
235 if self._revmap is None:
235 # fast path, read revmap without loading its full content
236 # fast path, read revmap without loading its full content
236 return revmapmod.getlastnode(self.revmappath)
237 return revmapmod.getlastnode(self.revmappath)
237 else:
238 else:
238 return self._revmap.rev2hsh(self._revmap.maxrev)
239 return self._revmap.rev2hsh(self._revmap.maxrev)
239
240
240 def isuptodate(self, master, strict=True):
241 def isuptodate(self, master, strict=True):
241 """return True if the revmap / linelog is up-to-date, or the file
242 """return True if the revmap / linelog is up-to-date, or the file
242 does not exist in the master revision. False otherwise.
243 does not exist in the master revision. False otherwise.
243
244
244 it tries to be fast and could return false negatives, because of the
245 it tries to be fast and could return false negatives, because of the
245 use of linkrev instead of introrev.
246 use of linkrev instead of introrev.
246
247
247 useful for both server and client to decide whether to update
248 useful for both server and client to decide whether to update
248 fastannotate cache or not.
249 fastannotate cache or not.
249
250
250 if strict is True, even if fctx exists in the revmap, but is not the
251 if strict is True, even if fctx exists in the revmap, but is not the
251 last node, isuptodate will return False. it's good for performance - no
252 last node, isuptodate will return False. it's good for performance - no
252 expensive check was done.
253 expensive check was done.
253
254
254 if strict is False, if fctx exists in the revmap, this function may
255 if strict is False, if fctx exists in the revmap, this function may
255 return True. this is useful for the client to skip downloading the
256 return True. this is useful for the client to skip downloading the
256 cache if the client's master is behind the server's.
257 cache if the client's master is behind the server's.
257 """
258 """
258 lastnode = self.lastnode
259 lastnode = self.lastnode
259 try:
260 try:
260 f = self._resolvefctx(master, resolverev=True)
261 f = self._resolvefctx(master, resolverev=True)
261 # choose linkrev instead of introrev as the check is meant to be
262 # choose linkrev instead of introrev as the check is meant to be
262 # *fast*.
263 # *fast*.
263 linknode = self.repo.changelog.node(f.linkrev())
264 linknode = self.repo.changelog.node(f.linkrev())
264 if not strict and lastnode and linknode != lastnode:
265 if not strict and lastnode and linknode != lastnode:
265 # check if f.node() is in the revmap. note: this loads the
266 # check if f.node() is in the revmap. note: this loads the
266 # revmap and can be slow.
267 # revmap and can be slow.
267 return self.revmap.hsh2rev(linknode) is not None
268 return self.revmap.hsh2rev(linknode) is not None
268 # avoid resolving old manifest, or slow adjustlinkrev to be fast,
269 # avoid resolving old manifest, or slow adjustlinkrev to be fast,
269 # false negatives are acceptable in this case.
270 # false negatives are acceptable in this case.
270 return linknode == lastnode
271 return linknode == lastnode
271 except LookupError:
272 except LookupError:
272 # master does not have the file, or the revmap is ahead
273 # master does not have the file, or the revmap is ahead
273 return True
274 return True
274
275
275 def annotate(self, rev, master=None, showpath=False, showlines=False):
276 def annotate(self, rev, master=None, showpath=False, showlines=False):
276 """incrementally update the cache so it includes revisions in the main
277 """incrementally update the cache so it includes revisions in the main
277 branch till 'master'. and run annotate on 'rev', which may or may not be
278 branch till 'master'. and run annotate on 'rev', which may or may not be
278 included in the main branch.
279 included in the main branch.
279
280
280 if master is None, do not update linelog.
281 if master is None, do not update linelog.
281
282
282 the first value returned is the annotate result, it is [(node, linenum)]
283 the first value returned is the annotate result, it is [(node, linenum)]
283 by default. [(node, linenum, path)] if showpath is True.
284 by default. [(node, linenum, path)] if showpath is True.
284
285
285 if showlines is True, a second value will be returned, it is a list of
286 if showlines is True, a second value will be returned, it is a list of
286 corresponding line contents.
287 corresponding line contents.
287 """
288 """
288
289
289 # the fast path test requires commit hash, convert rev number to hash,
290 # the fast path test requires commit hash, convert rev number to hash,
290 # so it may hit the fast path. note: in the "fctx" mode, the "annotate"
291 # so it may hit the fast path. note: in the "fctx" mode, the "annotate"
291 # command could give us a revision number even if the user passes a
292 # command could give us a revision number even if the user passes a
292 # commit hash.
293 # commit hash.
293 if isinstance(rev, int):
294 if isinstance(rev, int):
294 rev = node.hex(self.repo.changelog.node(rev))
295 rev = node.hex(self.repo.changelog.node(rev))
295
296
296 # fast path: if rev is in the main branch already
297 # fast path: if rev is in the main branch already
297 directly, revfctx = self.canannotatedirectly(rev)
298 directly, revfctx = self.canannotatedirectly(rev)
298 if directly:
299 if directly:
299 if self.ui.debugflag:
300 if self.ui.debugflag:
300 self.ui.debug('fastannotate: %s: using fast path '
301 self.ui.debug('fastannotate: %s: using fast path '
301 '(resolved fctx: %s)\n'
302 '(resolved fctx: %s)\n'
302 % (self.path,
303 % (self.path,
303 stringutil.pprint(util.safehasattr(revfctx,
304 stringutil.pprint(util.safehasattr(revfctx,
304 'node'))))
305 'node'))))
305 return self.annotatedirectly(revfctx, showpath, showlines)
306 return self.annotatedirectly(revfctx, showpath, showlines)
306
307
307 # resolve master
308 # resolve master
308 masterfctx = None
309 masterfctx = None
309 if master:
310 if master:
310 try:
311 try:
311 masterfctx = self._resolvefctx(master, resolverev=True,
312 masterfctx = self._resolvefctx(master, resolverev=True,
312 adjustctx=True)
313 adjustctx=True)
313 except LookupError: # master does not have the file
314 except LookupError: # master does not have the file
314 pass
315 pass
315 else:
316 else:
316 if masterfctx in self.revmap: # no need to update linelog
317 if masterfctx in self.revmap: # no need to update linelog
317 masterfctx = None
318 masterfctx = None
318
319
319 # ... - @ <- rev (can be an arbitrary changeset,
320 # ... - @ <- rev (can be an arbitrary changeset,
320 # / not necessarily a descendant
321 # / not necessarily a descendant
321 # master -> o of master)
322 # master -> o of master)
322 # |
323 # |
323 # a merge -> o 'o': new changesets in the main branch
324 # a merge -> o 'o': new changesets in the main branch
324 # |\ '#': revisions in the main branch that
325 # |\ '#': revisions in the main branch that
325 # o * exist in linelog / revmap
326 # o * exist in linelog / revmap
326 # | . '*': changesets in side branches, or
327 # | . '*': changesets in side branches, or
327 # last master -> # . descendants of master
328 # last master -> # . descendants of master
328 # | .
329 # | .
329 # # * joint: '#', and is a parent of a '*'
330 # # * joint: '#', and is a parent of a '*'
330 # |/
331 # |/
331 # a joint -> # ^^^^ --- side branches
332 # a joint -> # ^^^^ --- side branches
332 # |
333 # |
333 # ^ --- main branch (in linelog)
334 # ^ --- main branch (in linelog)
334
335
335 # these DFSes are similar to the traditional annotate algorithm.
336 # these DFSes are similar to the traditional annotate algorithm.
336 # we cannot really reuse the code for perf reason.
337 # we cannot really reuse the code for perf reason.
337
338
338 # 1st DFS calculates merges, joint points, and needed.
339 # 1st DFS calculates merges, joint points, and needed.
339 # "needed" is a simple reference counting dict to free items in
340 # "needed" is a simple reference counting dict to free items in
340 # "hist", reducing its memory usage otherwise could be huge.
341 # "hist", reducing its memory usage otherwise could be huge.
341 initvisit = [revfctx]
342 initvisit = [revfctx]
342 if masterfctx:
343 if masterfctx:
343 if masterfctx.rev() is None:
344 if masterfctx.rev() is None:
344 raise error.Abort(_('cannot update linelog to wdir()'),
345 raise error.Abort(_('cannot update linelog to wdir()'),
345 hint=_('set fastannotate.mainbranch'))
346 hint=_('set fastannotate.mainbranch'))
346 initvisit.append(masterfctx)
347 initvisit.append(masterfctx)
347 visit = initvisit[:]
348 visit = initvisit[:]
348 pcache = {}
349 pcache = {}
349 needed = {revfctx: 1}
350 needed = {revfctx: 1}
350 hist = {} # {fctx: ([(llrev or fctx, linenum)], text)}
351 hist = {} # {fctx: ([(llrev or fctx, linenum)], text)}
351 while visit:
352 while visit:
352 f = visit.pop()
353 f = visit.pop()
353 if f in pcache or f in hist:
354 if f in pcache or f in hist:
354 continue
355 continue
355 if f in self.revmap: # in the old main branch, it's a joint
356 if f in self.revmap: # in the old main branch, it's a joint
356 llrev = self.revmap.hsh2rev(f.node())
357 llrev = self.revmap.hsh2rev(f.node())
357 self.linelog.annotate(llrev)
358 self.linelog.annotate(llrev)
358 result = self.linelog.annotateresult
359 result = self.linelog.annotateresult
359 hist[f] = (result, f.data())
360 hist[f] = (result, f.data())
360 continue
361 continue
361 pl = self._parentfunc(f)
362 pl = self._parentfunc(f)
362 pcache[f] = pl
363 pcache[f] = pl
363 for p in pl:
364 for p in pl:
364 needed[p] = needed.get(p, 0) + 1
365 needed[p] = needed.get(p, 0) + 1
365 if p not in pcache:
366 if p not in pcache:
366 visit.append(p)
367 visit.append(p)
367
368
368 # 2nd (simple) DFS calculates new changesets in the main branch
369 # 2nd (simple) DFS calculates new changesets in the main branch
369 # ('o' nodes in # the above graph), so we know when to update linelog.
370 # ('o' nodes in # the above graph), so we know when to update linelog.
370 newmainbranch = set()
371 newmainbranch = set()
371 f = masterfctx
372 f = masterfctx
372 while f and f not in self.revmap:
373 while f and f not in self.revmap:
373 newmainbranch.add(f)
374 newmainbranch.add(f)
374 pl = pcache[f]
375 pl = pcache[f]
375 if pl:
376 if pl:
376 f = pl[0]
377 f = pl[0]
377 else:
378 else:
378 f = None
379 f = None
379 break
380 break
380
381
381 # f, if present, is the position where the last build stopped at, and
382 # f, if present, is the position where the last build stopped at, and
382 # should be the "master" last time. check to see if we can continue
383 # should be the "master" last time. check to see if we can continue
383 # building the linelog incrementally. (we cannot if diverged)
384 # building the linelog incrementally. (we cannot if diverged)
384 if masterfctx is not None:
385 if masterfctx is not None:
385 self._checklastmasterhead(f)
386 self._checklastmasterhead(f)
386
387
387 if self.ui.debugflag:
388 if self.ui.debugflag:
388 if newmainbranch:
389 if newmainbranch:
389 self.ui.debug('fastannotate: %s: %d new changesets in the main'
390 self.ui.debug('fastannotate: %s: %d new changesets in the main'
390 ' branch\n' % (self.path, len(newmainbranch)))
391 ' branch\n' % (self.path, len(newmainbranch)))
391 elif not hist: # no joints, no updates
392 elif not hist: # no joints, no updates
392 self.ui.debug('fastannotate: %s: linelog cannot help in '
393 self.ui.debug('fastannotate: %s: linelog cannot help in '
393 'annotating this revision\n' % self.path)
394 'annotating this revision\n' % self.path)
394
395
395 # prepare annotateresult so we can update linelog incrementally
396 # prepare annotateresult so we can update linelog incrementally
396 self.linelog.annotate(self.linelog.maxrev)
397 self.linelog.annotate(self.linelog.maxrev)
397
398
398 # 3rd DFS does the actual annotate
399 # 3rd DFS does the actual annotate
399 visit = initvisit[:]
400 visit = initvisit[:]
400 progress = 0
401 progress = self.ui.makeprogress(('building cache'),
402 total=len(newmainbranch))
401 while visit:
403 while visit:
402 f = visit[-1]
404 f = visit[-1]
403 if f in hist:
405 if f in hist:
404 visit.pop()
406 visit.pop()
405 continue
407 continue
406
408
407 ready = True
409 ready = True
408 pl = pcache[f]
410 pl = pcache[f]
409 for p in pl:
411 for p in pl:
410 if p not in hist:
412 if p not in hist:
411 ready = False
413 ready = False
412 visit.append(p)
414 visit.append(p)
413 if not ready:
415 if not ready:
414 continue
416 continue
415
417
416 visit.pop()
418 visit.pop()
417 blocks = None # mdiff blocks, used for appending linelog
419 blocks = None # mdiff blocks, used for appending linelog
418 ismainbranch = (f in newmainbranch)
420 ismainbranch = (f in newmainbranch)
419 # curr is the same as the traditional annotate algorithm,
421 # curr is the same as the traditional annotate algorithm,
420 # if we only care about linear history (do not follow merge),
422 # if we only care about linear history (do not follow merge),
421 # then curr is not actually used.
423 # then curr is not actually used.
422 assert f not in hist
424 assert f not in hist
423 curr = _decorate(f)
425 curr = _decorate(f)
424 for i, p in enumerate(pl):
426 for i, p in enumerate(pl):
425 bs = list(self._diffblocks(hist[p][1], curr[1]))
427 bs = list(self._diffblocks(hist[p][1], curr[1]))
426 if i == 0 and ismainbranch:
428 if i == 0 and ismainbranch:
427 blocks = bs
429 blocks = bs
428 curr = _pair(hist[p], curr, bs)
430 curr = _pair(hist[p], curr, bs)
429 if needed[p] == 1:
431 if needed[p] == 1:
430 del hist[p]
432 del hist[p]
431 del needed[p]
433 del needed[p]
432 else:
434 else:
433 needed[p] -= 1
435 needed[p] -= 1
434
436
435 hist[f] = curr
437 hist[f] = curr
436 del pcache[f]
438 del pcache[f]
437
439
438 if ismainbranch: # need to write to linelog
440 if ismainbranch: # need to write to linelog
439 if not self.ui.quiet:
441 progress.increment()
440 progress += 1
441 self.ui.progress(_('building cache'), progress,
442 total=len(newmainbranch))
443 bannotated = None
442 bannotated = None
444 if len(pl) == 2 and self.opts.followmerge: # merge
443 if len(pl) == 2 and self.opts.followmerge: # merge
445 bannotated = curr[0]
444 bannotated = curr[0]
446 if blocks is None: # no parents, add an empty one
445 if blocks is None: # no parents, add an empty one
447 blocks = list(self._diffblocks('', curr[1]))
446 blocks = list(self._diffblocks('', curr[1]))
448 self._appendrev(f, blocks, bannotated)
447 self._appendrev(f, blocks, bannotated)
449 elif showpath: # not append linelog, but we need to record path
448 elif showpath: # not append linelog, but we need to record path
450 self._node2path[f.node()] = f.path()
449 self._node2path[f.node()] = f.path()
451
450
452 if progress: # clean progress bar
451 progress.complete()
453 self.ui.write()
454
452
455 result = [
453 result = [
456 ((self.revmap.rev2hsh(fr) if isinstance(fr, int) else fr.node()), l)
454 ((self.revmap.rev2hsh(fr) if isinstance(fr, int) else fr.node()), l)
457 for fr, l in hist[revfctx][0]] # [(node, linenumber)]
455 for fr, l in hist[revfctx][0]] # [(node, linenumber)]
458 return self._refineannotateresult(result, revfctx, showpath, showlines)
456 return self._refineannotateresult(result, revfctx, showpath, showlines)
459
457
460 def canannotatedirectly(self, rev):
458 def canannotatedirectly(self, rev):
461 """(str) -> bool, fctx or node.
459 """(str) -> bool, fctx or node.
462 return (True, f) if we can annotate without updating the linelog, pass
460 return (True, f) if we can annotate without updating the linelog, pass
463 f to annotatedirectly.
461 f to annotatedirectly.
464 return (False, f) if we need extra calculation. f is the fctx resolved
462 return (False, f) if we need extra calculation. f is the fctx resolved
465 from rev.
463 from rev.
466 """
464 """
467 result = True
465 result = True
468 f = None
466 f = None
469 if not isinstance(rev, int) and rev is not None:
467 if not isinstance(rev, int) and rev is not None:
470 hsh = {20: bytes, 40: node.bin}.get(len(rev), lambda x: None)(rev)
468 hsh = {20: bytes, 40: node.bin}.get(len(rev), lambda x: None)(rev)
471 if hsh is not None and (hsh, self.path) in self.revmap:
469 if hsh is not None and (hsh, self.path) in self.revmap:
472 f = hsh
470 f = hsh
473 if f is None:
471 if f is None:
474 adjustctx = 'linkrev' if self._perfhack else True
472 adjustctx = 'linkrev' if self._perfhack else True
475 f = self._resolvefctx(rev, adjustctx=adjustctx, resolverev=True)
473 f = self._resolvefctx(rev, adjustctx=adjustctx, resolverev=True)
476 result = f in self.revmap
474 result = f in self.revmap
477 if not result and self._perfhack:
475 if not result and self._perfhack:
478 # redo the resolution without perfhack - as we are going to
476 # redo the resolution without perfhack - as we are going to
479 # do write operations, we need a correct fctx.
477 # do write operations, we need a correct fctx.
480 f = self._resolvefctx(rev, adjustctx=True, resolverev=True)
478 f = self._resolvefctx(rev, adjustctx=True, resolverev=True)
481 return result, f
479 return result, f
482
480
483 def annotatealllines(self, rev, showpath=False, showlines=False):
481 def annotatealllines(self, rev, showpath=False, showlines=False):
484 """(rev : str) -> [(node : str, linenum : int, path : str)]
482 """(rev : str) -> [(node : str, linenum : int, path : str)]
485
483
486 the result has the same format with annotate, but include all (including
484 the result has the same format with annotate, but include all (including
487 deleted) lines up to rev. call this after calling annotate(rev, ...) for
485 deleted) lines up to rev. call this after calling annotate(rev, ...) for
488 better performance and accuracy.
486 better performance and accuracy.
489 """
487 """
490 revfctx = self._resolvefctx(rev, resolverev=True, adjustctx=True)
488 revfctx = self._resolvefctx(rev, resolverev=True, adjustctx=True)
491
489
492 # find a chain from rev to anything in the mainbranch
490 # find a chain from rev to anything in the mainbranch
493 if revfctx not in self.revmap:
491 if revfctx not in self.revmap:
494 chain = [revfctx]
492 chain = [revfctx]
495 a = ''
493 a = ''
496 while True:
494 while True:
497 f = chain[-1]
495 f = chain[-1]
498 pl = self._parentfunc(f)
496 pl = self._parentfunc(f)
499 if not pl:
497 if not pl:
500 break
498 break
501 if pl[0] in self.revmap:
499 if pl[0] in self.revmap:
502 a = pl[0].data()
500 a = pl[0].data()
503 break
501 break
504 chain.append(pl[0])
502 chain.append(pl[0])
505
503
506 # both self.linelog and self.revmap is backed by filesystem. now
504 # both self.linelog and self.revmap is backed by filesystem. now
507 # we want to modify them but do not want to write changes back to
505 # we want to modify them but do not want to write changes back to
508 # files. so we create in-memory objects and copy them. it's like
506 # files. so we create in-memory objects and copy them. it's like
509 # a "fork".
507 # a "fork".
510 linelog = linelogmod.linelog()
508 linelog = linelogmod.linelog()
511 linelog.copyfrom(self.linelog)
509 linelog.copyfrom(self.linelog)
512 linelog.annotate(linelog.maxrev)
510 linelog.annotate(linelog.maxrev)
513 revmap = revmapmod.revmap()
511 revmap = revmapmod.revmap()
514 revmap.copyfrom(self.revmap)
512 revmap.copyfrom(self.revmap)
515
513
516 for f in reversed(chain):
514 for f in reversed(chain):
517 b = f.data()
515 b = f.data()
518 blocks = list(self._diffblocks(a, b))
516 blocks = list(self._diffblocks(a, b))
519 self._doappendrev(linelog, revmap, f, blocks)
517 self._doappendrev(linelog, revmap, f, blocks)
520 a = b
518 a = b
521 else:
519 else:
522 # fastpath: use existing linelog, revmap as we don't write to them
520 # fastpath: use existing linelog, revmap as we don't write to them
523 linelog = self.linelog
521 linelog = self.linelog
524 revmap = self.revmap
522 revmap = self.revmap
525
523
526 lines = linelog.getalllines()
524 lines = linelog.getalllines()
527 hsh = revfctx.node()
525 hsh = revfctx.node()
528 llrev = revmap.hsh2rev(hsh)
526 llrev = revmap.hsh2rev(hsh)
529 result = [(revmap.rev2hsh(r), l) for r, l in lines if r <= llrev]
527 result = [(revmap.rev2hsh(r), l) for r, l in lines if r <= llrev]
530 # cannot use _refineannotateresult since we need custom logic for
528 # cannot use _refineannotateresult since we need custom logic for
531 # resolving line contents
529 # resolving line contents
532 if showpath:
530 if showpath:
533 result = self._addpathtoresult(result, revmap)
531 result = self._addpathtoresult(result, revmap)
534 if showlines:
532 if showlines:
535 linecontents = self._resolvelines(result, revmap, linelog)
533 linecontents = self._resolvelines(result, revmap, linelog)
536 result = (result, linecontents)
534 result = (result, linecontents)
537 return result
535 return result
538
536
539 def _resolvelines(self, annotateresult, revmap, linelog):
537 def _resolvelines(self, annotateresult, revmap, linelog):
540 """(annotateresult) -> [line]. designed for annotatealllines.
538 """(annotateresult) -> [line]. designed for annotatealllines.
541 this is probably the most inefficient code in the whole fastannotate
539 this is probably the most inefficient code in the whole fastannotate
542 directory. but we have made a decision that the linelog does not
540 directory. but we have made a decision that the linelog does not
543 store line contents. so getting them requires random accesses to
541 store line contents. so getting them requires random accesses to
544 the revlog data, since they can be many, it can be very slow.
542 the revlog data, since they can be many, it can be very slow.
545 """
543 """
546 # [llrev]
544 # [llrev]
547 revs = [revmap.hsh2rev(l[0]) for l in annotateresult]
545 revs = [revmap.hsh2rev(l[0]) for l in annotateresult]
548 result = [None] * len(annotateresult)
546 result = [None] * len(annotateresult)
549 # {(rev, linenum): [lineindex]}
547 # {(rev, linenum): [lineindex]}
550 key2idxs = collections.defaultdict(list)
548 key2idxs = collections.defaultdict(list)
551 for i in pycompat.xrange(len(result)):
549 for i in pycompat.xrange(len(result)):
552 key2idxs[(revs[i], annotateresult[i][1])].append(i)
550 key2idxs[(revs[i], annotateresult[i][1])].append(i)
553 while key2idxs:
551 while key2idxs:
554 # find an unresolved line and its linelog rev to annotate
552 # find an unresolved line and its linelog rev to annotate
555 hsh = None
553 hsh = None
556 try:
554 try:
557 for (rev, _linenum), idxs in key2idxs.iteritems():
555 for (rev, _linenum), idxs in key2idxs.iteritems():
558 if revmap.rev2flag(rev) & revmapmod.sidebranchflag:
556 if revmap.rev2flag(rev) & revmapmod.sidebranchflag:
559 continue
557 continue
560 hsh = annotateresult[idxs[0]][0]
558 hsh = annotateresult[idxs[0]][0]
561 break
559 break
562 except StopIteration: # no more unresolved lines
560 except StopIteration: # no more unresolved lines
563 return result
561 return result
564 if hsh is None:
562 if hsh is None:
565 # the remaining key2idxs are not in main branch, resolving them
563 # the remaining key2idxs are not in main branch, resolving them
566 # using the hard way...
564 # using the hard way...
567 revlines = {}
565 revlines = {}
568 for (rev, linenum), idxs in key2idxs.iteritems():
566 for (rev, linenum), idxs in key2idxs.iteritems():
569 if rev not in revlines:
567 if rev not in revlines:
570 hsh = annotateresult[idxs[0]][0]
568 hsh = annotateresult[idxs[0]][0]
571 if self.ui.debugflag:
569 if self.ui.debugflag:
572 self.ui.debug('fastannotate: reading %s line #%d '
570 self.ui.debug('fastannotate: reading %s line #%d '
573 'to resolve lines %r\n'
571 'to resolve lines %r\n'
574 % (node.short(hsh), linenum, idxs))
572 % (node.short(hsh), linenum, idxs))
575 fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
573 fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
576 lines = mdiff.splitnewlines(fctx.data())
574 lines = mdiff.splitnewlines(fctx.data())
577 revlines[rev] = lines
575 revlines[rev] = lines
578 for idx in idxs:
576 for idx in idxs:
579 result[idx] = revlines[rev][linenum]
577 result[idx] = revlines[rev][linenum]
580 assert all(x is not None for x in result)
578 assert all(x is not None for x in result)
581 return result
579 return result
582
580
583 # run the annotate and the lines should match to the file content
581 # run the annotate and the lines should match to the file content
584 self.ui.debug('fastannotate: annotate %s to resolve lines\n'
582 self.ui.debug('fastannotate: annotate %s to resolve lines\n'
585 % node.short(hsh))
583 % node.short(hsh))
586 linelog.annotate(rev)
584 linelog.annotate(rev)
587 fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
585 fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
588 annotated = linelog.annotateresult
586 annotated = linelog.annotateresult
589 lines = mdiff.splitnewlines(fctx.data())
587 lines = mdiff.splitnewlines(fctx.data())
590 if len(lines) != len(annotated):
588 if len(lines) != len(annotated):
591 raise faerror.CorruptedFileError('unexpected annotated lines')
589 raise faerror.CorruptedFileError('unexpected annotated lines')
592 # resolve lines from the annotate result
590 # resolve lines from the annotate result
593 for i, line in enumerate(lines):
591 for i, line in enumerate(lines):
594 k = annotated[i]
592 k = annotated[i]
595 if k in key2idxs:
593 if k in key2idxs:
596 for idx in key2idxs[k]:
594 for idx in key2idxs[k]:
597 result[idx] = line
595 result[idx] = line
598 del key2idxs[k]
596 del key2idxs[k]
599 return result
597 return result
600
598
601 def annotatedirectly(self, f, showpath, showlines):
599 def annotatedirectly(self, f, showpath, showlines):
602 """like annotate, but when we know that f is in linelog.
600 """like annotate, but when we know that f is in linelog.
603 f can be either a 20-char str (node) or a fctx. this is for perf - in
601 f can be either a 20-char str (node) or a fctx. this is for perf - in
604 the best case, the user provides a node and we don't need to read the
602 the best case, the user provides a node and we don't need to read the
605 filelog or construct any filecontext.
603 filelog or construct any filecontext.
606 """
604 """
607 if isinstance(f, str):
605 if isinstance(f, bytes):
608 hsh = f
606 hsh = f
609 else:
607 else:
610 hsh = f.node()
608 hsh = f.node()
611 llrev = self.revmap.hsh2rev(hsh)
609 llrev = self.revmap.hsh2rev(hsh)
612 if not llrev:
610 if not llrev:
613 raise faerror.CorruptedFileError('%s is not in revmap'
611 raise faerror.CorruptedFileError('%s is not in revmap'
614 % node.hex(hsh))
612 % node.hex(hsh))
615 if (self.revmap.rev2flag(llrev) & revmapmod.sidebranchflag) != 0:
613 if (self.revmap.rev2flag(llrev) & revmapmod.sidebranchflag) != 0:
616 raise faerror.CorruptedFileError('%s is not in revmap mainbranch'
614 raise faerror.CorruptedFileError('%s is not in revmap mainbranch'
617 % node.hex(hsh))
615 % node.hex(hsh))
618 self.linelog.annotate(llrev)
616 self.linelog.annotate(llrev)
619 result = [(self.revmap.rev2hsh(r), l)
617 result = [(self.revmap.rev2hsh(r), l)
620 for r, l in self.linelog.annotateresult]
618 for r, l in self.linelog.annotateresult]
621 return self._refineannotateresult(result, f, showpath, showlines)
619 return self._refineannotateresult(result, f, showpath, showlines)
622
620
623 def _refineannotateresult(self, result, f, showpath, showlines):
621 def _refineannotateresult(self, result, f, showpath, showlines):
624 """add the missing path or line contents, they can be expensive.
622 """add the missing path or line contents, they can be expensive.
625 f could be either node or fctx.
623 f could be either node or fctx.
626 """
624 """
627 if showpath:
625 if showpath:
628 result = self._addpathtoresult(result)
626 result = self._addpathtoresult(result)
629 if showlines:
627 if showlines:
630 if isinstance(f, str): # f: node or fctx
628 if isinstance(f, bytes): # f: node or fctx
631 llrev = self.revmap.hsh2rev(f)
629 llrev = self.revmap.hsh2rev(f)
632 fctx = self._resolvefctx(f, self.revmap.rev2path(llrev))
630 fctx = self._resolvefctx(f, self.revmap.rev2path(llrev))
633 else:
631 else:
634 fctx = f
632 fctx = f
635 lines = mdiff.splitnewlines(fctx.data())
633 lines = mdiff.splitnewlines(fctx.data())
636 if len(lines) != len(result): # linelog is probably corrupted
634 if len(lines) != len(result): # linelog is probably corrupted
637 raise faerror.CorruptedFileError()
635 raise faerror.CorruptedFileError()
638 result = (result, lines)
636 result = (result, lines)
639 return result
637 return result
640
638
641 def _appendrev(self, fctx, blocks, bannotated=None):
639 def _appendrev(self, fctx, blocks, bannotated=None):
642 self._doappendrev(self.linelog, self.revmap, fctx, blocks, bannotated)
640 self._doappendrev(self.linelog, self.revmap, fctx, blocks, bannotated)
643
641
644 def _diffblocks(self, a, b):
642 def _diffblocks(self, a, b):
645 return mdiff.allblocks(a, b, self.opts.diffopts)
643 return mdiff.allblocks(a, b, self.opts.diffopts)
646
644
647 @staticmethod
645 @staticmethod
648 def _doappendrev(linelog, revmap, fctx, blocks, bannotated=None):
646 def _doappendrev(linelog, revmap, fctx, blocks, bannotated=None):
649 """append a revision to linelog and revmap"""
647 """append a revision to linelog and revmap"""
650
648
651 def getllrev(f):
649 def getllrev(f):
652 """(fctx) -> int"""
650 """(fctx) -> int"""
653 # f should not be a linelog revision
651 # f should not be a linelog revision
654 if isinstance(f, int):
652 if isinstance(f, int):
655 raise error.ProgrammingError('f should not be an int')
653 raise error.ProgrammingError('f should not be an int')
656 # f is a fctx, allocate linelog rev on demand
654 # f is a fctx, allocate linelog rev on demand
657 hsh = f.node()
655 hsh = f.node()
658 rev = revmap.hsh2rev(hsh)
656 rev = revmap.hsh2rev(hsh)
659 if rev is None:
657 if rev is None:
660 rev = revmap.append(hsh, sidebranch=True, path=f.path())
658 rev = revmap.append(hsh, sidebranch=True, path=f.path())
661 return rev
659 return rev
662
660
663 # append sidebranch revisions to revmap
661 # append sidebranch revisions to revmap
664 siderevs = []
662 siderevs = []
665 siderevmap = {} # node: int
663 siderevmap = {} # node: int
666 if bannotated is not None:
664 if bannotated is not None:
667 for (a1, a2, b1, b2), op in blocks:
665 for (a1, a2, b1, b2), op in blocks:
668 if op != '=':
666 if op != '=':
669 # f could be either linelong rev, or fctx.
667 # f could be either linelong rev, or fctx.
670 siderevs += [f for f, l in bannotated[b1:b2]
668 siderevs += [f for f, l in bannotated[b1:b2]
671 if not isinstance(f, int)]
669 if not isinstance(f, int)]
672 siderevs = set(siderevs)
670 siderevs = set(siderevs)
673 if fctx in siderevs: # mainnode must be appended seperately
671 if fctx in siderevs: # mainnode must be appended seperately
674 siderevs.remove(fctx)
672 siderevs.remove(fctx)
675 for f in siderevs:
673 for f in siderevs:
676 siderevmap[f] = getllrev(f)
674 siderevmap[f] = getllrev(f)
677
675
678 # the changeset in the main branch, could be a merge
676 # the changeset in the main branch, could be a merge
679 llrev = revmap.append(fctx.node(), path=fctx.path())
677 llrev = revmap.append(fctx.node(), path=fctx.path())
680 siderevmap[fctx] = llrev
678 siderevmap[fctx] = llrev
681
679
682 for (a1, a2, b1, b2), op in reversed(blocks):
680 for (a1, a2, b1, b2), op in reversed(blocks):
683 if op == '=':
681 if op == '=':
684 continue
682 continue
685 if bannotated is None:
683 if bannotated is None:
686 linelog.replacelines(llrev, a1, a2, b1, b2)
684 linelog.replacelines(llrev, a1, a2, b1, b2)
687 else:
685 else:
688 blines = [((r if isinstance(r, int) else siderevmap[r]), l)
686 blines = [((r if isinstance(r, int) else siderevmap[r]), l)
689 for r, l in bannotated[b1:b2]]
687 for r, l in bannotated[b1:b2]]
690 linelog.replacelines_vec(llrev, a1, a2, blines)
688 linelog.replacelines_vec(llrev, a1, a2, blines)
691
689
692 def _addpathtoresult(self, annotateresult, revmap=None):
690 def _addpathtoresult(self, annotateresult, revmap=None):
693 """(revmap, [(node, linenum)]) -> [(node, linenum, path)]"""
691 """(revmap, [(node, linenum)]) -> [(node, linenum, path)]"""
694 if revmap is None:
692 if revmap is None:
695 revmap = self.revmap
693 revmap = self.revmap
696
694
697 def _getpath(nodeid):
695 def _getpath(nodeid):
698 path = self._node2path.get(nodeid)
696 path = self._node2path.get(nodeid)
699 if path is None:
697 if path is None:
700 path = revmap.rev2path(revmap.hsh2rev(nodeid))
698 path = revmap.rev2path(revmap.hsh2rev(nodeid))
701 self._node2path[nodeid] = path
699 self._node2path[nodeid] = path
702 return path
700 return path
703
701
704 return [(n, l, _getpath(n)) for n, l in annotateresult]
702 return [(n, l, _getpath(n)) for n, l in annotateresult]
705
703
706 def _checklastmasterhead(self, fctx):
704 def _checklastmasterhead(self, fctx):
707 """check if fctx is the master's head last time, raise if not"""
705 """check if fctx is the master's head last time, raise if not"""
708 if fctx is None:
706 if fctx is None:
709 llrev = 0
707 llrev = 0
710 else:
708 else:
711 llrev = self.revmap.hsh2rev(fctx.node())
709 llrev = self.revmap.hsh2rev(fctx.node())
712 if not llrev:
710 if not llrev:
713 raise faerror.CannotReuseError()
711 raise faerror.CannotReuseError()
714 if self.linelog.maxrev != llrev:
712 if self.linelog.maxrev != llrev:
715 raise faerror.CannotReuseError()
713 raise faerror.CannotReuseError()
716
714
717 @util.propertycache
715 @util.propertycache
718 def _parentfunc(self):
716 def _parentfunc(self):
719 """-> (fctx) -> [fctx]"""
717 """-> (fctx) -> [fctx]"""
720 followrename = self.opts.followrename
718 followrename = self.opts.followrename
721 followmerge = self.opts.followmerge
719 followmerge = self.opts.followmerge
722 def parents(f):
720 def parents(f):
723 pl = _parents(f, follow=followrename)
721 pl = _parents(f, follow=followrename)
724 if not followmerge:
722 if not followmerge:
725 pl = pl[:1]
723 pl = pl[:1]
726 return pl
724 return pl
727 return parents
725 return parents
728
726
729 @util.propertycache
727 @util.propertycache
730 def _perfhack(self):
728 def _perfhack(self):
731 return self.ui.configbool('fastannotate', 'perfhack')
729 return self.ui.configbool('fastannotate', 'perfhack')
732
730
733 def _resolvefctx(self, rev, path=None, **kwds):
731 def _resolvefctx(self, rev, path=None, **kwds):
734 return resolvefctx(self.repo, rev, (path or self.path), **kwds)
732 return resolvefctx(self.repo, rev, (path or self.path), **kwds)
735
733
736 def _unlinkpaths(paths):
734 def _unlinkpaths(paths):
737 """silent, best-effort unlink"""
735 """silent, best-effort unlink"""
738 for path in paths:
736 for path in paths:
739 try:
737 try:
740 util.unlink(path)
738 util.unlink(path)
741 except OSError:
739 except OSError:
742 pass
740 pass
743
741
744 class pathhelper(object):
742 class pathhelper(object):
745 """helper for getting paths for lockfile, linelog and revmap"""
743 """helper for getting paths for lockfile, linelog and revmap"""
746
744
747 def __init__(self, repo, path, opts=defaultopts):
745 def __init__(self, repo, path, opts=defaultopts):
748 # different options use different directories
746 # different options use different directories
749 self._vfspath = os.path.join('fastannotate',
747 self._vfspath = os.path.join('fastannotate',
750 opts.shortstr, encodedir(path))
748 opts.shortstr, encodedir(path))
751 self._repo = repo
749 self._repo = repo
752
750
753 @property
751 @property
754 def dirname(self):
752 def dirname(self):
755 return os.path.dirname(self._repo.vfs.join(self._vfspath))
753 return os.path.dirname(self._repo.vfs.join(self._vfspath))
756
754
757 @property
755 @property
758 def linelogpath(self):
756 def linelogpath(self):
759 return self._repo.vfs.join(self._vfspath + '.l')
757 return self._repo.vfs.join(self._vfspath + '.l')
760
758
761 def lock(self):
759 def lock(self):
762 return lockmod.lock(self._repo.vfs, self._vfspath + '.lock')
760 return lockmod.lock(self._repo.vfs, self._vfspath + '.lock')
763
761
764 @contextlib.contextmanager
762 @contextlib.contextmanager
765 def _lockflock(self):
763 def _lockflock(self):
766 """the same as 'lock' but use flock instead of lockmod.lock, to avoid
764 """the same as 'lock' but use flock instead of lockmod.lock, to avoid
767 creating temporary symlinks."""
765 creating temporary symlinks."""
768 import fcntl
766 import fcntl
769 lockpath = self.linelogpath
767 lockpath = self.linelogpath
770 util.makedirs(os.path.dirname(lockpath))
768 util.makedirs(os.path.dirname(lockpath))
771 lockfd = os.open(lockpath, os.O_RDONLY | os.O_CREAT, 0o664)
769 lockfd = os.open(lockpath, os.O_RDONLY | os.O_CREAT, 0o664)
772 fcntl.flock(lockfd, fcntl.LOCK_EX)
770 fcntl.flock(lockfd, fcntl.LOCK_EX)
773 try:
771 try:
774 yield
772 yield
775 finally:
773 finally:
776 fcntl.flock(lockfd, fcntl.LOCK_UN)
774 fcntl.flock(lockfd, fcntl.LOCK_UN)
777 os.close(lockfd)
775 os.close(lockfd)
778
776
779 @property
777 @property
780 def revmappath(self):
778 def revmappath(self):
781 return self._repo.vfs.join(self._vfspath + '.m')
779 return self._repo.vfs.join(self._vfspath + '.m')
782
780
783 @contextlib.contextmanager
781 @contextlib.contextmanager
784 def annotatecontext(repo, path, opts=defaultopts, rebuild=False):
782 def annotatecontext(repo, path, opts=defaultopts, rebuild=False):
785 """context needed to perform (fast) annotate on a file
783 """context needed to perform (fast) annotate on a file
786
784
787 an annotatecontext of a single file consists of two structures: the
785 an annotatecontext of a single file consists of two structures: the
788 linelog and the revmap. this function takes care of locking. only 1
786 linelog and the revmap. this function takes care of locking. only 1
789 process is allowed to write that file's linelog and revmap at a time.
787 process is allowed to write that file's linelog and revmap at a time.
790
788
791 when something goes wrong, this function will assume the linelog and the
789 when something goes wrong, this function will assume the linelog and the
792 revmap are in a bad state, and remove them from disk.
790 revmap are in a bad state, and remove them from disk.
793
791
794 use this function in the following way:
792 use this function in the following way:
795
793
796 with annotatecontext(...) as actx:
794 with annotatecontext(...) as actx:
797 actx. ....
795 actx. ....
798 """
796 """
799 helper = pathhelper(repo, path, opts)
797 helper = pathhelper(repo, path, opts)
800 util.makedirs(helper.dirname)
798 util.makedirs(helper.dirname)
801 revmappath = helper.revmappath
799 revmappath = helper.revmappath
802 linelogpath = helper.linelogpath
800 linelogpath = helper.linelogpath
803 actx = None
801 actx = None
804 try:
802 try:
805 with helper.lock():
803 with helper.lock():
806 actx = _annotatecontext(repo, path, linelogpath, revmappath, opts)
804 actx = _annotatecontext(repo, path, linelogpath, revmappath, opts)
807 if rebuild:
805 if rebuild:
808 actx.rebuild()
806 actx.rebuild()
809 yield actx
807 yield actx
810 except Exception:
808 except Exception:
811 if actx is not None:
809 if actx is not None:
812 actx.rebuild()
810 actx.rebuild()
813 repo.ui.debug('fastannotate: %s: cache broken and deleted\n' % path)
811 repo.ui.debug('fastannotate: %s: cache broken and deleted\n' % path)
814 raise
812 raise
815 finally:
813 finally:
816 if actx is not None:
814 if actx is not None:
817 actx.close()
815 actx.close()
818
816
819 def fctxannotatecontext(fctx, follow=True, diffopts=None, rebuild=False):
817 def fctxannotatecontext(fctx, follow=True, diffopts=None, rebuild=False):
820 """like annotatecontext but get the context from a fctx. convenient when
818 """like annotatecontext but get the context from a fctx. convenient when
821 used in fctx.annotate
819 used in fctx.annotate
822 """
820 """
823 repo = fctx._repo
821 repo = fctx._repo
824 path = fctx._path
822 path = fctx._path
825 if repo.ui.configbool('fastannotate', 'forcefollow', True):
823 if repo.ui.configbool('fastannotate', 'forcefollow', True):
826 follow = True
824 follow = True
827 aopts = annotateopts(diffopts=diffopts, followrename=follow)
825 aopts = annotateopts(diffopts=diffopts, followrename=follow)
828 return annotatecontext(repo, path, aopts, rebuild)
826 return annotatecontext(repo, path, aopts, rebuild)
@@ -1,161 +1,164 b''
1 # Copyright 2016-present Facebook. All Rights Reserved.
1 # Copyright 2016-present Facebook. All Rights Reserved.
2 #
2 #
3 # format: defines the format used to output annotate result
3 # format: defines the format used to output annotate result
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 from mercurial import (
9 from mercurial import (
10 encoding,
10 encoding,
11 node,
11 node,
12 pycompat,
12 pycompat,
13 templatefilters,
13 templatefilters,
14 util,
14 util,
15 )
15 )
16 from mercurial.utils import (
16 from mercurial.utils import (
17 dateutil,
17 dateutil,
18 )
18 )
19
19
20 # imitating mercurial.commands.annotate, not using the vanilla formatter since
20 # imitating mercurial.commands.annotate, not using the vanilla formatter since
21 # the data structures are a bit different, and we have some fast paths.
21 # the data structures are a bit different, and we have some fast paths.
22 class defaultformatter(object):
22 class defaultformatter(object):
23 """the default formatter that does leftpad and support some common flags"""
23 """the default formatter that does leftpad and support some common flags"""
24
24
25 def __init__(self, ui, repo, opts):
25 def __init__(self, ui, repo, opts):
26 self.ui = ui
26 self.ui = ui
27 self.opts = opts
27 self.opts = opts
28
28
29 if ui.quiet:
29 if ui.quiet:
30 datefunc = dateutil.shortdate
30 datefunc = dateutil.shortdate
31 else:
31 else:
32 datefunc = dateutil.datestr
32 datefunc = dateutil.datestr
33 datefunc = util.cachefunc(datefunc)
33 datefunc = util.cachefunc(datefunc)
34 getctx = util.cachefunc(lambda x: repo[x[0]])
34 getctx = util.cachefunc(lambda x: repo[x[0]])
35 hexfunc = self._hexfunc
35 hexfunc = self._hexfunc
36
36
37 # special handling working copy "changeset" and "rev" functions
37 # special handling working copy "changeset" and "rev" functions
38 if self.opts.get('rev') == 'wdir()':
38 if self.opts.get('rev') == 'wdir()':
39 orig = hexfunc
39 orig = hexfunc
40 hexfunc = lambda x: None if x is None else orig(x)
40 hexfunc = lambda x: None if x is None else orig(x)
41 wnode = hexfunc(repo[None].p1().node()) + '+'
41 wnode = hexfunc(repo[None].p1().node()) + '+'
42 wrev = str(repo[None].p1().rev())
42 wrev = '%d' % repo[None].p1().rev()
43 wrevpad = ''
43 wrevpad = ''
44 if not opts.get('changeset'): # only show + if changeset is hidden
44 if not opts.get('changeset'): # only show + if changeset is hidden
45 wrev += '+'
45 wrev += '+'
46 wrevpad = ' '
46 wrevpad = ' '
47 revenc = lambda x: wrev if x is None else str(x) + wrevpad
47 revenc = lambda x: wrev if x is None else ('%d' % x) + wrevpad
48 csetenc = lambda x: wnode if x is None else str(x) + ' '
48 def csetenc(x):
49 if x is None:
50 return wnode
51 return pycompat.bytestr(x) + ' '
49 else:
52 else:
50 revenc = csetenc = str
53 revenc = csetenc = pycompat.bytestr
51
54
52 # opt name, separator, raw value (for json/plain), encoder (for plain)
55 # opt name, separator, raw value (for json/plain), encoder (for plain)
53 opmap = [('user', ' ', lambda x: getctx(x).user(), ui.shortuser),
56 opmap = [('user', ' ', lambda x: getctx(x).user(), ui.shortuser),
54 ('number', ' ', lambda x: getctx(x).rev(), revenc),
57 ('number', ' ', lambda x: getctx(x).rev(), revenc),
55 ('changeset', ' ', lambda x: hexfunc(x[0]), csetenc),
58 ('changeset', ' ', lambda x: hexfunc(x[0]), csetenc),
56 ('date', ' ', lambda x: getctx(x).date(), datefunc),
59 ('date', ' ', lambda x: getctx(x).date(), datefunc),
57 ('file', ' ', lambda x: x[2], str),
60 ('file', ' ', lambda x: x[2], pycompat.bytestr),
58 ('line_number', ':', lambda x: x[1] + 1, str)]
61 ('line_number', ':', lambda x: x[1] + 1, pycompat.bytestr)]
59 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
62 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
60 funcmap = [(get, sep, fieldnamemap.get(op, op), enc)
63 funcmap = [(get, sep, fieldnamemap.get(op, op), enc)
61 for op, sep, get, enc in opmap
64 for op, sep, get, enc in opmap
62 if opts.get(op)]
65 if opts.get(op)]
63 # no separator for first column
66 # no separator for first column
64 funcmap[0] = list(funcmap[0])
67 funcmap[0] = list(funcmap[0])
65 funcmap[0][1] = ''
68 funcmap[0][1] = ''
66 self.funcmap = funcmap
69 self.funcmap = funcmap
67
70
68 def write(self, annotatedresult, lines=None, existinglines=None):
71 def write(self, annotatedresult, lines=None, existinglines=None):
69 """(annotateresult, [str], set([rev, linenum])) -> None. write output.
72 """(annotateresult, [str], set([rev, linenum])) -> None. write output.
70 annotateresult can be [(node, linenum, path)], or [(node, linenum)]
73 annotateresult can be [(node, linenum, path)], or [(node, linenum)]
71 """
74 """
72 pieces = [] # [[str]]
75 pieces = [] # [[str]]
73 maxwidths = [] # [int]
76 maxwidths = [] # [int]
74
77
75 # calculate padding
78 # calculate padding
76 for f, sep, name, enc in self.funcmap:
79 for f, sep, name, enc in self.funcmap:
77 l = [enc(f(x)) for x in annotatedresult]
80 l = [enc(f(x)) for x in annotatedresult]
78 pieces.append(l)
81 pieces.append(l)
79 if name in ['node', 'date']: # node and date has fixed size
82 if name in ['node', 'date']: # node and date has fixed size
80 l = l[:1]
83 l = l[:1]
81 widths = pycompat.maplist(encoding.colwidth, set(l))
84 widths = pycompat.maplist(encoding.colwidth, set(l))
82 maxwidth = (max(widths) if widths else 0)
85 maxwidth = (max(widths) if widths else 0)
83 maxwidths.append(maxwidth)
86 maxwidths.append(maxwidth)
84
87
85 # buffered output
88 # buffered output
86 result = ''
89 result = ''
87 for i in pycompat.xrange(len(annotatedresult)):
90 for i in pycompat.xrange(len(annotatedresult)):
88 for j, p in enumerate(pieces):
91 for j, p in enumerate(pieces):
89 sep = self.funcmap[j][1]
92 sep = self.funcmap[j][1]
90 padding = ' ' * (maxwidths[j] - len(p[i]))
93 padding = ' ' * (maxwidths[j] - len(p[i]))
91 result += sep + padding + p[i]
94 result += sep + padding + p[i]
92 if lines:
95 if lines:
93 if existinglines is None:
96 if existinglines is None:
94 result += ': ' + lines[i]
97 result += ': ' + lines[i]
95 else: # extra formatting showing whether a line exists
98 else: # extra formatting showing whether a line exists
96 key = (annotatedresult[i][0], annotatedresult[i][1])
99 key = (annotatedresult[i][0], annotatedresult[i][1])
97 if key in existinglines:
100 if key in existinglines:
98 result += ': ' + lines[i]
101 result += ': ' + lines[i]
99 else:
102 else:
100 result += ': ' + self.ui.label('-' + lines[i],
103 result += ': ' + self.ui.label('-' + lines[i],
101 'diff.deleted')
104 'diff.deleted')
102
105
103 if result[-1] != '\n':
106 if result[-1:] != '\n':
104 result += '\n'
107 result += '\n'
105
108
106 self.ui.write(result)
109 self.ui.write(result)
107
110
108 @util.propertycache
111 @util.propertycache
109 def _hexfunc(self):
112 def _hexfunc(self):
110 if self.ui.debugflag or self.opts.get('long_hash'):
113 if self.ui.debugflag or self.opts.get('long_hash'):
111 return node.hex
114 return node.hex
112 else:
115 else:
113 return node.short
116 return node.short
114
117
115 def end(self):
118 def end(self):
116 pass
119 pass
117
120
118 class jsonformatter(defaultformatter):
121 class jsonformatter(defaultformatter):
119 def __init__(self, ui, repo, opts):
122 def __init__(self, ui, repo, opts):
120 super(jsonformatter, self).__init__(ui, repo, opts)
123 super(jsonformatter, self).__init__(ui, repo, opts)
121 self.ui.write('[')
124 self.ui.write('[')
122 self.needcomma = False
125 self.needcomma = False
123
126
124 def write(self, annotatedresult, lines=None, existinglines=None):
127 def write(self, annotatedresult, lines=None, existinglines=None):
125 if annotatedresult:
128 if annotatedresult:
126 self._writecomma()
129 self._writecomma()
127
130
128 pieces = [(name, map(f, annotatedresult))
131 pieces = [(name, pycompat.maplist(f, annotatedresult))
129 for f, sep, name, enc in self.funcmap]
132 for f, sep, name, enc in self.funcmap]
130 if lines is not None:
133 if lines is not None:
131 pieces.append(('line', lines))
134 pieces.append(('line', lines))
132 pieces.sort()
135 pieces.sort()
133
136
134 seps = [','] * len(pieces[:-1]) + ['']
137 seps = [','] * len(pieces[:-1]) + ['']
135
138
136 result = ''
139 result = ''
137 lasti = len(annotatedresult) - 1
140 lasti = len(annotatedresult) - 1
138 for i in pycompat.xrange(len(annotatedresult)):
141 for i in pycompat.xrange(len(annotatedresult)):
139 result += '\n {\n'
142 result += '\n {\n'
140 for j, p in enumerate(pieces):
143 for j, p in enumerate(pieces):
141 k, vs = p
144 k, vs = p
142 result += (' "%s": %s%s\n'
145 result += (' "%s": %s%s\n'
143 % (k, templatefilters.json(vs[i], paranoid=False),
146 % (k, templatefilters.json(vs[i], paranoid=False),
144 seps[j]))
147 seps[j]))
145 result += ' }%s' % ('' if i == lasti else ',')
148 result += ' }%s' % ('' if i == lasti else ',')
146 if lasti >= 0:
149 if lasti >= 0:
147 self.needcomma = True
150 self.needcomma = True
148
151
149 self.ui.write(result)
152 self.ui.write(result)
150
153
151 def _writecomma(self):
154 def _writecomma(self):
152 if self.needcomma:
155 if self.needcomma:
153 self.ui.write(',')
156 self.ui.write(',')
154 self.needcomma = False
157 self.needcomma = False
155
158
156 @util.propertycache
159 @util.propertycache
157 def _hexfunc(self):
160 def _hexfunc(self):
158 return node.hex
161 return node.hex
159
162
160 def end(self):
163 def end(self):
161 self.ui.write('\n]\n')
164 self.ui.write('\n]\n')
@@ -1,228 +1,228 b''
1 # Copyright 2016-present Facebook. All Rights Reserved.
1 # Copyright 2016-present Facebook. All Rights Reserved.
2 #
2 #
3 # protocol: logic for a server providing fastannotate support
3 # protocol: logic for a server providing fastannotate support
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import contextlib
9 import contextlib
10 import os
10 import os
11
11
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13 from mercurial import (
13 from mercurial import (
14 error,
14 error,
15 extensions,
15 extensions,
16 hg,
16 hg,
17 util,
17 util,
18 wireprotov1peer,
18 wireprotov1peer,
19 wireprotov1server,
19 wireprotov1server,
20 )
20 )
21 from . import context
21 from . import context
22
22
23 # common
23 # common
24
24
25 def _getmaster(ui):
25 def _getmaster(ui):
26 """get the mainbranch, and enforce it is set"""
26 """get the mainbranch, and enforce it is set"""
27 master = ui.config('fastannotate', 'mainbranch')
27 master = ui.config('fastannotate', 'mainbranch')
28 if not master:
28 if not master:
29 raise error.Abort(_('fastannotate.mainbranch is required '
29 raise error.Abort(_('fastannotate.mainbranch is required '
30 'for both the client and the server'))
30 'for both the client and the server'))
31 return master
31 return master
32
32
33 # server-side
33 # server-side
34
34
35 def _capabilities(orig, repo, proto):
35 def _capabilities(orig, repo, proto):
36 result = orig(repo, proto)
36 result = orig(repo, proto)
37 result.append('getannotate')
37 result.append('getannotate')
38 return result
38 return result
39
39
40 def _getannotate(repo, proto, path, lastnode):
40 def _getannotate(repo, proto, path, lastnode):
41 # output:
41 # output:
42 # FILE := vfspath + '\0' + str(size) + '\0' + content
42 # FILE := vfspath + '\0' + str(size) + '\0' + content
43 # OUTPUT := '' | FILE + OUTPUT
43 # OUTPUT := '' | FILE + OUTPUT
44 result = ''
44 result = ''
45 buildondemand = repo.ui.configbool('fastannotate', 'serverbuildondemand',
45 buildondemand = repo.ui.configbool('fastannotate', 'serverbuildondemand',
46 True)
46 True)
47 with context.annotatecontext(repo, path) as actx:
47 with context.annotatecontext(repo, path) as actx:
48 if buildondemand:
48 if buildondemand:
49 # update before responding to the client
49 # update before responding to the client
50 master = _getmaster(repo.ui)
50 master = _getmaster(repo.ui)
51 try:
51 try:
52 if not actx.isuptodate(master):
52 if not actx.isuptodate(master):
53 actx.annotate(master, master)
53 actx.annotate(master, master)
54 except Exception:
54 except Exception:
55 # non-fast-forward move or corrupted. rebuild automically.
55 # non-fast-forward move or corrupted. rebuild automically.
56 actx.rebuild()
56 actx.rebuild()
57 try:
57 try:
58 actx.annotate(master, master)
58 actx.annotate(master, master)
59 except Exception:
59 except Exception:
60 actx.rebuild() # delete files
60 actx.rebuild() # delete files
61 finally:
61 finally:
62 # although the "with" context will also do a close/flush, we
62 # although the "with" context will also do a close/flush, we
63 # need to do it early so we can send the correct respond to
63 # need to do it early so we can send the correct respond to
64 # client.
64 # client.
65 actx.close()
65 actx.close()
66 # send back the full content of revmap and linelog, in the future we
66 # send back the full content of revmap and linelog, in the future we
67 # may want to do some rsync-like fancy updating.
67 # may want to do some rsync-like fancy updating.
68 # the lastnode check is not necessary if the client and the server
68 # the lastnode check is not necessary if the client and the server
69 # agree where the main branch is.
69 # agree where the main branch is.
70 if actx.lastnode != lastnode:
70 if actx.lastnode != lastnode:
71 for p in [actx.revmappath, actx.linelogpath]:
71 for p in [actx.revmappath, actx.linelogpath]:
72 if not os.path.exists(p):
72 if not os.path.exists(p):
73 continue
73 continue
74 content = ''
74 content = ''
75 with open(p, 'rb') as f:
75 with open(p, 'rb') as f:
76 content = f.read()
76 content = f.read()
77 vfsbaselen = len(repo.vfs.base + '/')
77 vfsbaselen = len(repo.vfs.base + '/')
78 relpath = p[vfsbaselen:]
78 relpath = p[vfsbaselen:]
79 result += '%s\0%d\0%s' % (relpath, len(content), content)
79 result += '%s\0%d\0%s' % (relpath, len(content), content)
80 return result
80 return result
81
81
82 def _registerwireprotocommand():
82 def _registerwireprotocommand():
83 if 'getannotate' in wireprotov1server.commands:
83 if 'getannotate' in wireprotov1server.commands:
84 return
84 return
85 wireprotov1server.wireprotocommand(
85 wireprotov1server.wireprotocommand(
86 'getannotate', 'path lastnode')(_getannotate)
86 'getannotate', 'path lastnode')(_getannotate)
87
87
88 def serveruisetup(ui):
88 def serveruisetup(ui):
89 _registerwireprotocommand()
89 _registerwireprotocommand()
90 extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
90 extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
91
91
92 # client-side
92 # client-side
93
93
94 def _parseresponse(payload):
94 def _parseresponse(payload):
95 result = {}
95 result = {}
96 i = 0
96 i = 0
97 l = len(payload) - 1
97 l = len(payload) - 1
98 state = 0 # 0: vfspath, 1: size
98 state = 0 # 0: vfspath, 1: size
99 vfspath = size = ''
99 vfspath = size = ''
100 while i < l:
100 while i < l:
101 ch = payload[i]
101 ch = payload[i:i + 1]
102 if ch == '\0':
102 if ch == '\0':
103 if state == 1:
103 if state == 1:
104 result[vfspath] = buffer(payload, i + 1, int(size))
104 result[vfspath] = payload[i + 1:i + 1 + int(size)]
105 i += int(size)
105 i += int(size)
106 state = 0
106 state = 0
107 vfspath = size = ''
107 vfspath = size = ''
108 elif state == 0:
108 elif state == 0:
109 state = 1
109 state = 1
110 else:
110 else:
111 if state == 1:
111 if state == 1:
112 size += ch
112 size += ch
113 elif state == 0:
113 elif state == 0:
114 vfspath += ch
114 vfspath += ch
115 i += 1
115 i += 1
116 return result
116 return result
117
117
118 def peersetup(ui, peer):
118 def peersetup(ui, peer):
119 class fastannotatepeer(peer.__class__):
119 class fastannotatepeer(peer.__class__):
120 @wireprotov1peer.batchable
120 @wireprotov1peer.batchable
121 def getannotate(self, path, lastnode=None):
121 def getannotate(self, path, lastnode=None):
122 if not self.capable('getannotate'):
122 if not self.capable('getannotate'):
123 ui.warn(_('remote peer cannot provide annotate cache\n'))
123 ui.warn(_('remote peer cannot provide annotate cache\n'))
124 yield None, None
124 yield None, None
125 else:
125 else:
126 args = {'path': path, 'lastnode': lastnode or ''}
126 args = {'path': path, 'lastnode': lastnode or ''}
127 f = wireprotov1peer.future()
127 f = wireprotov1peer.future()
128 yield args, f
128 yield args, f
129 yield _parseresponse(f.value)
129 yield _parseresponse(f.value)
130 peer.__class__ = fastannotatepeer
130 peer.__class__ = fastannotatepeer
131
131
132 @contextlib.contextmanager
132 @contextlib.contextmanager
133 def annotatepeer(repo):
133 def annotatepeer(repo):
134 ui = repo.ui
134 ui = repo.ui
135
135
136 remotepath = ui.expandpath(
136 remotepath = ui.expandpath(
137 ui.config('fastannotate', 'remotepath', 'default'))
137 ui.config('fastannotate', 'remotepath', 'default'))
138 peer = hg.peer(ui, {}, remotepath)
138 peer = hg.peer(ui, {}, remotepath)
139
139
140 try:
140 try:
141 yield peer
141 yield peer
142 finally:
142 finally:
143 peer.close()
143 peer.close()
144
144
145 def clientfetch(repo, paths, lastnodemap=None, peer=None):
145 def clientfetch(repo, paths, lastnodemap=None, peer=None):
146 """download annotate cache from the server for paths"""
146 """download annotate cache from the server for paths"""
147 if not paths:
147 if not paths:
148 return
148 return
149
149
150 if peer is None:
150 if peer is None:
151 with annotatepeer(repo) as peer:
151 with annotatepeer(repo) as peer:
152 return clientfetch(repo, paths, lastnodemap, peer)
152 return clientfetch(repo, paths, lastnodemap, peer)
153
153
154 if lastnodemap is None:
154 if lastnodemap is None:
155 lastnodemap = {}
155 lastnodemap = {}
156
156
157 ui = repo.ui
157 ui = repo.ui
158 results = []
158 results = []
159 with peer.commandexecutor() as batcher:
159 with peer.commandexecutor() as batcher:
160 ui.debug('fastannotate: requesting %d files\n' % len(paths))
160 ui.debug('fastannotate: requesting %d files\n' % len(paths))
161 for p in paths:
161 for p in paths:
162 results.append(batcher.callcommand(
162 results.append(batcher.callcommand(
163 'getannotate',
163 'getannotate',
164 {'path': p, 'lastnode':lastnodemap.get(p)}))
164 {'path': p, 'lastnode':lastnodemap.get(p)}))
165
165
166 for result in results:
166 for result in results:
167 r = result.result()
167 r = result.result()
168 # TODO: pconvert these paths on the server?
168 # TODO: pconvert these paths on the server?
169 r = {util.pconvert(p): v for p, v in r.iteritems()}
169 r = {util.pconvert(p): v for p, v in r.iteritems()}
170 for path in sorted(r):
170 for path in sorted(r):
171 # ignore malicious paths
171 # ignore malicious paths
172 if (not path.startswith('fastannotate/')
172 if (not path.startswith('fastannotate/')
173 or '/../' in (path + '/')):
173 or '/../' in (path + '/')):
174 ui.debug('fastannotate: ignored malicious path %s\n' % path)
174 ui.debug('fastannotate: ignored malicious path %s\n' % path)
175 continue
175 continue
176 content = r[path]
176 content = r[path]
177 if ui.debugflag:
177 if ui.debugflag:
178 ui.debug('fastannotate: writing %d bytes to %s\n'
178 ui.debug('fastannotate: writing %d bytes to %s\n'
179 % (len(content), path))
179 % (len(content), path))
180 repo.vfs.makedirs(os.path.dirname(path))
180 repo.vfs.makedirs(os.path.dirname(path))
181 with repo.vfs(path, 'wb') as f:
181 with repo.vfs(path, 'wb') as f:
182 f.write(content)
182 f.write(content)
183
183
184 def _filterfetchpaths(repo, paths):
184 def _filterfetchpaths(repo, paths):
185 """return a subset of paths whose history is long and need to fetch linelog
185 """return a subset of paths whose history is long and need to fetch linelog
186 from the server. works with remotefilelog and non-remotefilelog repos.
186 from the server. works with remotefilelog and non-remotefilelog repos.
187 """
187 """
188 threshold = repo.ui.configint('fastannotate', 'clientfetchthreshold', 10)
188 threshold = repo.ui.configint('fastannotate', 'clientfetchthreshold', 10)
189 if threshold <= 0:
189 if threshold <= 0:
190 return paths
190 return paths
191
191
192 result = []
192 result = []
193 for path in paths:
193 for path in paths:
194 try:
194 try:
195 if len(repo.file(path)) >= threshold:
195 if len(repo.file(path)) >= threshold:
196 result.append(path)
196 result.append(path)
197 except Exception: # file not found etc.
197 except Exception: # file not found etc.
198 result.append(path)
198 result.append(path)
199
199
200 return result
200 return result
201
201
202 def localreposetup(ui, repo):
202 def localreposetup(ui, repo):
203 class fastannotaterepo(repo.__class__):
203 class fastannotaterepo(repo.__class__):
204 def prefetchfastannotate(self, paths, peer=None):
204 def prefetchfastannotate(self, paths, peer=None):
205 master = _getmaster(self.ui)
205 master = _getmaster(self.ui)
206 needupdatepaths = []
206 needupdatepaths = []
207 lastnodemap = {}
207 lastnodemap = {}
208 try:
208 try:
209 for path in _filterfetchpaths(self, paths):
209 for path in _filterfetchpaths(self, paths):
210 with context.annotatecontext(self, path) as actx:
210 with context.annotatecontext(self, path) as actx:
211 if not actx.isuptodate(master, strict=False):
211 if not actx.isuptodate(master, strict=False):
212 needupdatepaths.append(path)
212 needupdatepaths.append(path)
213 lastnodemap[path] = actx.lastnode
213 lastnodemap[path] = actx.lastnode
214 if needupdatepaths:
214 if needupdatepaths:
215 clientfetch(self, needupdatepaths, lastnodemap, peer)
215 clientfetch(self, needupdatepaths, lastnodemap, peer)
216 except Exception as ex:
216 except Exception as ex:
217 # could be directory not writable or so, not fatal
217 # could be directory not writable or so, not fatal
218 self.ui.debug('fastannotate: prefetch failed: %r\n' % ex)
218 self.ui.debug('fastannotate: prefetch failed: %r\n' % ex)
219 repo.__class__ = fastannotaterepo
219 repo.__class__ = fastannotaterepo
220
220
221 def clientreposetup(ui, repo):
221 def clientreposetup(ui, repo):
222 _registerwireprotocommand()
222 _registerwireprotocommand()
223 if repo.local():
223 if repo.local():
224 localreposetup(ui, repo)
224 localreposetup(ui, repo)
225 # TODO: this mutates global state, but only if at least one repo
225 # TODO: this mutates global state, but only if at least one repo
226 # has the extension enabled. This is probably bad for hgweb.
226 # has the extension enabled. This is probably bad for hgweb.
227 if peersetup not in hg.wirepeersetupfuncs:
227 if peersetup not in hg.wirepeersetupfuncs:
228 hg.wirepeersetupfuncs.append(peersetup)
228 hg.wirepeersetupfuncs.append(peersetup)
@@ -1,254 +1,254 b''
1 # Copyright 2016-present Facebook. All Rights Reserved.
1 # Copyright 2016-present Facebook. All Rights Reserved.
2 #
2 #
3 # revmap: trivial hg hash - linelog rev bidirectional map
3 # revmap: trivial hg hash - linelog rev bidirectional map
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import bisect
10 import bisect
11 import os
11 import os
12 import struct
12 import struct
13
13
14 from mercurial.node import hex
14 from mercurial.node import hex
15 from mercurial import (
15 from mercurial import (
16 error as hgerror,
16 error as hgerror,
17 pycompat,
17 pycompat,
18 )
18 )
19 from . import error
19 from . import error
20
20
21 # the revmap file format is straightforward:
21 # the revmap file format is straightforward:
22 #
22 #
23 # 8 bytes: header
23 # 8 bytes: header
24 # 1 byte : flag for linelog revision 1
24 # 1 byte : flag for linelog revision 1
25 # ? bytes: (optional) '\0'-terminated path string
25 # ? bytes: (optional) '\0'-terminated path string
26 # only exists if (flag & renameflag) != 0
26 # only exists if (flag & renameflag) != 0
27 # 20 bytes: hg hash for linelog revision 1
27 # 20 bytes: hg hash for linelog revision 1
28 # 1 byte : flag for linelog revision 2
28 # 1 byte : flag for linelog revision 2
29 # ? bytes: (optional) '\0'-terminated path string
29 # ? bytes: (optional) '\0'-terminated path string
30 # 20 bytes: hg hash for linelog revision 2
30 # 20 bytes: hg hash for linelog revision 2
31 # ....
31 # ....
32 #
32 #
33 # the implementation is kinda stupid: __init__ loads the whole revmap.
33 # the implementation is kinda stupid: __init__ loads the whole revmap.
34 # no laziness. benchmark shows loading 10000 revisions is about 0.015
34 # no laziness. benchmark shows loading 10000 revisions is about 0.015
35 # seconds, which looks enough for our use-case. if this implementation
35 # seconds, which looks enough for our use-case. if this implementation
36 # becomes a bottleneck, we can change it to lazily read the file
36 # becomes a bottleneck, we can change it to lazily read the file
37 # from the end.
37 # from the end.
38
38
39 # whether the changeset is in the side branch. i.e. not in the linear main
39 # whether the changeset is in the side branch. i.e. not in the linear main
40 # branch but only got referenced by lines in merge changesets.
40 # branch but only got referenced by lines in merge changesets.
41 sidebranchflag = 1
41 sidebranchflag = 1
42
42
43 # whether the changeset changes the file path (ie. is a rename)
43 # whether the changeset changes the file path (ie. is a rename)
44 renameflag = 2
44 renameflag = 2
45
45
46 # len(mercurial.node.nullid)
46 # len(mercurial.node.nullid)
47 _hshlen = 20
47 _hshlen = 20
48
48
49 class revmap(object):
49 class revmap(object):
50 """trivial hg bin hash - linelog rev bidirectional map
50 """trivial hg bin hash - linelog rev bidirectional map
51
51
52 also stores a flag (uint8) for each revision, and track renames.
52 also stores a flag (uint8) for each revision, and track renames.
53 """
53 """
54
54
55 HEADER = b'REVMAP1\0'
55 HEADER = b'REVMAP1\0'
56
56
57 def __init__(self, path=None):
57 def __init__(self, path=None):
58 """create or load the revmap, optionally associate to a file
58 """create or load the revmap, optionally associate to a file
59
59
60 if path is None, the revmap is entirely in-memory. the caller is
60 if path is None, the revmap is entirely in-memory. the caller is
61 responsible for locking. concurrent writes to a same file is unsafe.
61 responsible for locking. concurrent writes to a same file is unsafe.
62 the caller needs to make sure one file is associated to at most one
62 the caller needs to make sure one file is associated to at most one
63 revmap object at a time."""
63 revmap object at a time."""
64 self.path = path
64 self.path = path
65 self._rev2hsh = [None]
65 self._rev2hsh = [None]
66 self._rev2flag = [None]
66 self._rev2flag = [None]
67 self._hsh2rev = {}
67 self._hsh2rev = {}
68 # since rename does not happen frequently, do not store path for every
68 # since rename does not happen frequently, do not store path for every
69 # revision. self._renamerevs can be used for bisecting.
69 # revision. self._renamerevs can be used for bisecting.
70 self._renamerevs = [0]
70 self._renamerevs = [0]
71 self._renamepaths = ['']
71 self._renamepaths = ['']
72 self._lastmaxrev = -1
72 self._lastmaxrev = -1
73 if path:
73 if path:
74 if os.path.exists(path):
74 if os.path.exists(path):
75 self._load()
75 self._load()
76 else:
76 else:
77 # write the header so "append" can do incremental updates
77 # write the header so "append" can do incremental updates
78 self.flush()
78 self.flush()
79
79
80 def copyfrom(self, rhs):
80 def copyfrom(self, rhs):
81 """copy the map data from another revmap. do not affect self.path"""
81 """copy the map data from another revmap. do not affect self.path"""
82 self._rev2hsh = rhs._rev2hsh[:]
82 self._rev2hsh = rhs._rev2hsh[:]
83 self._rev2flag = rhs._rev2flag[:]
83 self._rev2flag = rhs._rev2flag[:]
84 self._hsh2rev = rhs._hsh2rev.copy()
84 self._hsh2rev = rhs._hsh2rev.copy()
85 self._renamerevs = rhs._renamerevs[:]
85 self._renamerevs = rhs._renamerevs[:]
86 self._renamepaths = rhs._renamepaths[:]
86 self._renamepaths = rhs._renamepaths[:]
87 self._lastmaxrev = -1
87 self._lastmaxrev = -1
88
88
89 @property
89 @property
90 def maxrev(self):
90 def maxrev(self):
91 """return max linelog revision number"""
91 """return max linelog revision number"""
92 return len(self._rev2hsh) - 1
92 return len(self._rev2hsh) - 1
93
93
94 def append(self, hsh, sidebranch=False, path=None, flush=False):
94 def append(self, hsh, sidebranch=False, path=None, flush=False):
95 """add a binary hg hash and return the mapped linelog revision.
95 """add a binary hg hash and return the mapped linelog revision.
96 if flush is True, incrementally update the file.
96 if flush is True, incrementally update the file.
97 """
97 """
98 if hsh in self._hsh2rev:
98 if hsh in self._hsh2rev:
99 raise error.CorruptedFileError('%r is in revmap already' % hex(hsh))
99 raise error.CorruptedFileError('%r is in revmap already' % hex(hsh))
100 if len(hsh) != _hshlen:
100 if len(hsh) != _hshlen:
101 raise hgerror.ProgrammingError('hsh must be %d-char long' % _hshlen)
101 raise hgerror.ProgrammingError('hsh must be %d-char long' % _hshlen)
102 idx = len(self._rev2hsh)
102 idx = len(self._rev2hsh)
103 flag = 0
103 flag = 0
104 if sidebranch:
104 if sidebranch:
105 flag |= sidebranchflag
105 flag |= sidebranchflag
106 if path is not None and path != self._renamepaths[-1]:
106 if path is not None and path != self._renamepaths[-1]:
107 flag |= renameflag
107 flag |= renameflag
108 self._renamerevs.append(idx)
108 self._renamerevs.append(idx)
109 self._renamepaths.append(path)
109 self._renamepaths.append(path)
110 self._rev2hsh.append(hsh)
110 self._rev2hsh.append(hsh)
111 self._rev2flag.append(flag)
111 self._rev2flag.append(flag)
112 self._hsh2rev[hsh] = idx
112 self._hsh2rev[hsh] = idx
113 if flush:
113 if flush:
114 self.flush()
114 self.flush()
115 return idx
115 return idx
116
116
117 def rev2hsh(self, rev):
117 def rev2hsh(self, rev):
118 """convert linelog revision to hg hash. return None if not found."""
118 """convert linelog revision to hg hash. return None if not found."""
119 if rev > self.maxrev or rev < 0:
119 if rev > self.maxrev or rev < 0:
120 return None
120 return None
121 return self._rev2hsh[rev]
121 return self._rev2hsh[rev]
122
122
123 def rev2flag(self, rev):
123 def rev2flag(self, rev):
124 """get the flag (uint8) for a given linelog revision.
124 """get the flag (uint8) for a given linelog revision.
125 return None if revision does not exist.
125 return None if revision does not exist.
126 """
126 """
127 if rev > self.maxrev or rev < 0:
127 if rev > self.maxrev or rev < 0:
128 return None
128 return None
129 return self._rev2flag[rev]
129 return self._rev2flag[rev]
130
130
131 def rev2path(self, rev):
131 def rev2path(self, rev):
132 """get the path for a given linelog revision.
132 """get the path for a given linelog revision.
133 return None if revision does not exist.
133 return None if revision does not exist.
134 """
134 """
135 if rev > self.maxrev or rev < 0:
135 if rev > self.maxrev or rev < 0:
136 return None
136 return None
137 idx = bisect.bisect_right(self._renamerevs, rev) - 1
137 idx = bisect.bisect_right(self._renamerevs, rev) - 1
138 return self._renamepaths[idx]
138 return self._renamepaths[idx]
139
139
140 def hsh2rev(self, hsh):
140 def hsh2rev(self, hsh):
141 """convert hg hash to linelog revision. return None if not found."""
141 """convert hg hash to linelog revision. return None if not found."""
142 return self._hsh2rev.get(hsh)
142 return self._hsh2rev.get(hsh)
143
143
144 def clear(self, flush=False):
144 def clear(self, flush=False):
145 """make the map empty. if flush is True, write to disk"""
145 """make the map empty. if flush is True, write to disk"""
146 # rev 0 is reserved, real rev starts from 1
146 # rev 0 is reserved, real rev starts from 1
147 self._rev2hsh = [None]
147 self._rev2hsh = [None]
148 self._rev2flag = [None]
148 self._rev2flag = [None]
149 self._hsh2rev = {}
149 self._hsh2rev = {}
150 self._rev2path = ['']
150 self._rev2path = ['']
151 self._lastmaxrev = -1
151 self._lastmaxrev = -1
152 if flush:
152 if flush:
153 self.flush()
153 self.flush()
154
154
155 def flush(self):
155 def flush(self):
156 """write the state down to the file"""
156 """write the state down to the file"""
157 if not self.path:
157 if not self.path:
158 return
158 return
159 if self._lastmaxrev == -1: # write the entire file
159 if self._lastmaxrev == -1: # write the entire file
160 with open(self.path, 'wb') as f:
160 with open(self.path, 'wb') as f:
161 f.write(self.HEADER)
161 f.write(self.HEADER)
162 for i in pycompat.xrange(1, len(self._rev2hsh)):
162 for i in pycompat.xrange(1, len(self._rev2hsh)):
163 self._writerev(i, f)
163 self._writerev(i, f)
164 else: # append incrementally
164 else: # append incrementally
165 with open(self.path, 'ab') as f:
165 with open(self.path, 'ab') as f:
166 for i in pycompat.xrange(self._lastmaxrev + 1,
166 for i in pycompat.xrange(self._lastmaxrev + 1,
167 len(self._rev2hsh)):
167 len(self._rev2hsh)):
168 self._writerev(i, f)
168 self._writerev(i, f)
169 self._lastmaxrev = self.maxrev
169 self._lastmaxrev = self.maxrev
170
170
171 def _load(self):
171 def _load(self):
172 """load state from file"""
172 """load state from file"""
173 if not self.path:
173 if not self.path:
174 return
174 return
175 # use local variables in a loop. CPython uses LOAD_FAST for them,
175 # use local variables in a loop. CPython uses LOAD_FAST for them,
176 # which is faster than both LOAD_CONST and LOAD_GLOBAL.
176 # which is faster than both LOAD_CONST and LOAD_GLOBAL.
177 flaglen = 1
177 flaglen = 1
178 hshlen = _hshlen
178 hshlen = _hshlen
179 with open(self.path, 'rb') as f:
179 with open(self.path, 'rb') as f:
180 if f.read(len(self.HEADER)) != self.HEADER:
180 if f.read(len(self.HEADER)) != self.HEADER:
181 raise error.CorruptedFileError()
181 raise error.CorruptedFileError()
182 self.clear(flush=False)
182 self.clear(flush=False)
183 while True:
183 while True:
184 buf = f.read(flaglen)
184 buf = f.read(flaglen)
185 if not buf:
185 if not buf:
186 break
186 break
187 flag = ord(buf)
187 flag = ord(buf)
188 rev = len(self._rev2hsh)
188 rev = len(self._rev2hsh)
189 if flag & renameflag:
189 if flag & renameflag:
190 path = self._readcstr(f)
190 path = self._readcstr(f)
191 self._renamerevs.append(rev)
191 self._renamerevs.append(rev)
192 self._renamepaths.append(path)
192 self._renamepaths.append(path)
193 hsh = f.read(hshlen)
193 hsh = f.read(hshlen)
194 if len(hsh) != hshlen:
194 if len(hsh) != hshlen:
195 raise error.CorruptedFileError()
195 raise error.CorruptedFileError()
196 self._hsh2rev[hsh] = rev
196 self._hsh2rev[hsh] = rev
197 self._rev2flag.append(flag)
197 self._rev2flag.append(flag)
198 self._rev2hsh.append(hsh)
198 self._rev2hsh.append(hsh)
199 self._lastmaxrev = self.maxrev
199 self._lastmaxrev = self.maxrev
200
200
201 def _writerev(self, rev, f):
201 def _writerev(self, rev, f):
202 """append a revision data to file"""
202 """append a revision data to file"""
203 flag = self._rev2flag[rev]
203 flag = self._rev2flag[rev]
204 hsh = self._rev2hsh[rev]
204 hsh = self._rev2hsh[rev]
205 f.write(struct.pack('B', flag))
205 f.write(struct.pack('B', flag))
206 if flag & renameflag:
206 if flag & renameflag:
207 path = self.rev2path(rev)
207 path = self.rev2path(rev)
208 if path is None:
208 if path is None:
209 raise error.CorruptedFileError('cannot find path for %s' % rev)
209 raise error.CorruptedFileError('cannot find path for %s' % rev)
210 f.write(path + '\0')
210 f.write(path + b'\0')
211 f.write(hsh)
211 f.write(hsh)
212
212
213 @staticmethod
213 @staticmethod
214 def _readcstr(f):
214 def _readcstr(f):
215 """read a C-language-like '\0'-terminated string"""
215 """read a C-language-like '\0'-terminated string"""
216 buf = ''
216 buf = ''
217 while True:
217 while True:
218 ch = f.read(1)
218 ch = f.read(1)
219 if not ch: # unexpected eof
219 if not ch: # unexpected eof
220 raise error.CorruptedFileError()
220 raise error.CorruptedFileError()
221 if ch == '\0':
221 if ch == '\0':
222 break
222 break
223 buf += ch
223 buf += ch
224 return buf
224 return buf
225
225
226 def __contains__(self, f):
226 def __contains__(self, f):
227 """(fctx or (node, path)) -> bool.
227 """(fctx or (node, path)) -> bool.
228 test if (node, path) is in the map, and is not in a side branch.
228 test if (node, path) is in the map, and is not in a side branch.
229 f can be either a tuple of (node, path), or a fctx.
229 f can be either a tuple of (node, path), or a fctx.
230 """
230 """
231 if isinstance(f, tuple): # f: (node, path)
231 if isinstance(f, tuple): # f: (node, path)
232 hsh, path = f
232 hsh, path = f
233 else: # f: fctx
233 else: # f: fctx
234 hsh, path = f.node(), f.path()
234 hsh, path = f.node(), f.path()
235 rev = self.hsh2rev(hsh)
235 rev = self.hsh2rev(hsh)
236 if rev is None:
236 if rev is None:
237 return False
237 return False
238 if path is not None and path != self.rev2path(rev):
238 if path is not None and path != self.rev2path(rev):
239 return False
239 return False
240 return (self.rev2flag(rev) & sidebranchflag) == 0
240 return (self.rev2flag(rev) & sidebranchflag) == 0
241
241
242 def getlastnode(path):
242 def getlastnode(path):
243 """return the last hash in a revmap, without loading its full content.
243 """return the last hash in a revmap, without loading its full content.
244 this is equivalent to `m = revmap(path); m.rev2hsh(m.maxrev)`, but faster.
244 this is equivalent to `m = revmap(path); m.rev2hsh(m.maxrev)`, but faster.
245 """
245 """
246 hsh = None
246 hsh = None
247 try:
247 try:
248 with open(path, 'rb') as f:
248 with open(path, 'rb') as f:
249 f.seek(-_hshlen, 2)
249 f.seek(-_hshlen, 2)
250 if f.tell() > len(revmap.HEADER):
250 if f.tell() > len(revmap.HEADER):
251 hsh = f.read(_hshlen)
251 hsh = f.read(_hshlen)
252 except IOError:
252 except IOError:
253 pass
253 pass
254 return hsh
254 return hsh
@@ -1,617 +1,691 b''
1 # fix - rewrite file content in changesets and working copy
1 # fix - rewrite file content in changesets and working copy
2 #
2 #
3 # Copyright 2018 Google LLC.
3 # Copyright 2018 Google LLC.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
7 """rewrite file content in changesets or working copy (EXPERIMENTAL)
8
8
9 Provides a command that runs configured tools on the contents of modified files,
9 Provides a command that runs configured tools on the contents of modified files,
10 writing back any fixes to the working copy or replacing changesets.
10 writing back any fixes to the working copy or replacing changesets.
11
11
12 Here is an example configuration that causes :hg:`fix` to apply automatic
12 Here is an example configuration that causes :hg:`fix` to apply automatic
13 formatting fixes to modified lines in C++ code::
13 formatting fixes to modified lines in C++ code::
14
14
15 [fix]
15 [fix]
16 clang-format:command=clang-format --assume-filename={rootpath}
16 clang-format:command=clang-format --assume-filename={rootpath}
17 clang-format:linerange=--lines={first}:{last}
17 clang-format:linerange=--lines={first}:{last}
18 clang-format:fileset=set:**.cpp or **.hpp
18 clang-format:pattern=set:**.cpp or **.hpp
19
19
20 The :command suboption forms the first part of the shell command that will be
20 The :command suboption forms the first part of the shell command that will be
21 used to fix a file. The content of the file is passed on standard input, and the
21 used to fix a file. The content of the file is passed on standard input, and the
22 fixed file content is expected on standard output. If there is any output on
22 fixed file content is expected on standard output. Any output on standard error
23 standard error, the file will not be affected. Some values may be substituted
23 will be displayed as a warning. If the exit status is not zero, the file will
24 into the command::
24 not be affected. A placeholder warning is displayed if there is a non-zero exit
25 status but no standard error output. Some values may be substituted into the
26 command::
25
27
26 {rootpath} The path of the file being fixed, relative to the repo root
28 {rootpath} The path of the file being fixed, relative to the repo root
27 {basename} The name of the file being fixed, without the directory path
29 {basename} The name of the file being fixed, without the directory path
28
30
29 If the :linerange suboption is set, the tool will only be run if there are
31 If the :linerange suboption is set, the tool will only be run if there are
30 changed lines in a file. The value of this suboption is appended to the shell
32 changed lines in a file. The value of this suboption is appended to the shell
31 command once for every range of changed lines in the file. Some values may be
33 command once for every range of changed lines in the file. Some values may be
32 substituted into the command::
34 substituted into the command::
33
35
34 {first} The 1-based line number of the first line in the modified range
36 {first} The 1-based line number of the first line in the modified range
35 {last} The 1-based line number of the last line in the modified range
37 {last} The 1-based line number of the last line in the modified range
36
38
37 The :fileset suboption determines which files will be passed through each
39 The :pattern suboption determines which files will be passed through each
38 configured tool. See :hg:`help fileset` for possible values. If there are file
40 configured tool. See :hg:`help patterns` for possible values. If there are file
39 arguments to :hg:`fix`, the intersection of these filesets is used.
41 arguments to :hg:`fix`, the intersection of these patterns is used.
40
42
41 There is also a configurable limit for the maximum size of file that will be
43 There is also a configurable limit for the maximum size of file that will be
42 processed by :hg:`fix`::
44 processed by :hg:`fix`::
43
45
44 [fix]
46 [fix]
45 maxfilesize=2MB
47 maxfilesize = 2MB
48
49 Normally, execution of configured tools will continue after a failure (indicated
50 by a non-zero exit status). It can also be configured to abort after the first
51 such failure, so that no files will be affected if any tool fails. This abort
52 will also cause :hg:`fix` to exit with a non-zero status::
53
54 [fix]
55 failure = abort
46
56
57 When multiple tools are configured to affect a file, they execute in an order
58 defined by the :priority suboption. The priority suboption has a default value
59 of zero for each tool. Tools are executed in order of descending priority. The
60 execution order of tools with equal priority is unspecified. For example, you
61 could use the 'sort' and 'head' utilities to keep only the 10 smallest numbers
62 in a text file by ensuring that 'sort' runs before 'head'::
63
64 [fix]
65 sort:command = sort -n
66 head:command = head -n 10
67 sort:pattern = numbers.txt
68 head:pattern = numbers.txt
69 sort:priority = 2
70 head:priority = 1
71
72 To account for changes made by each tool, the line numbers used for incremental
73 formatting are recomputed before executing the next tool. So, each tool may see
74 different values for the arguments added by the :linerange suboption.
47 """
75 """
48
76
49 from __future__ import absolute_import
77 from __future__ import absolute_import
50
78
51 import collections
79 import collections
52 import itertools
80 import itertools
53 import os
81 import os
54 import re
82 import re
55 import subprocess
83 import subprocess
56
84
57 from mercurial.i18n import _
85 from mercurial.i18n import _
58 from mercurial.node import nullrev
86 from mercurial.node import nullrev
59 from mercurial.node import wdirrev
87 from mercurial.node import wdirrev
60
88
61 from mercurial.utils import (
89 from mercurial.utils import (
62 procutil,
90 procutil,
63 )
91 )
64
92
65 from mercurial import (
93 from mercurial import (
66 cmdutil,
94 cmdutil,
67 context,
95 context,
68 copies,
96 copies,
69 error,
97 error,
70 mdiff,
98 mdiff,
71 merge,
99 merge,
72 obsolete,
100 obsolete,
73 pycompat,
101 pycompat,
74 registrar,
102 registrar,
75 scmutil,
103 scmutil,
76 util,
104 util,
77 worker,
105 worker,
78 )
106 )
79
107
80 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
108 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
81 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
109 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
82 # be specifying the version(s) of Mercurial they are tested with, or
110 # be specifying the version(s) of Mercurial they are tested with, or
83 # leave the attribute unspecified.
111 # leave the attribute unspecified.
84 testedwith = 'ships-with-hg-core'
112 testedwith = 'ships-with-hg-core'
85
113
86 cmdtable = {}
114 cmdtable = {}
87 command = registrar.command(cmdtable)
115 command = registrar.command(cmdtable)
88
116
89 configtable = {}
117 configtable = {}
90 configitem = registrar.configitem(configtable)
118 configitem = registrar.configitem(configtable)
91
119
92 # Register the suboptions allowed for each configured fixer.
120 # Register the suboptions allowed for each configured fixer.
93 FIXER_ATTRS = ('command', 'linerange', 'fileset')
121 FIXER_ATTRS = {
122 'command': None,
123 'linerange': None,
124 'fileset': None,
125 'pattern': None,
126 'priority': 0,
127 }
94
128
95 for key in FIXER_ATTRS:
129 for key, default in FIXER_ATTRS.items():
96 configitem('fix', '.*(:%s)?' % key, default=None, generic=True)
130 configitem('fix', '.*(:%s)?' % key, default=default, generic=True)
97
131
98 # A good default size allows most source code files to be fixed, but avoids
132 # A good default size allows most source code files to be fixed, but avoids
99 # letting fixer tools choke on huge inputs, which could be surprising to the
133 # letting fixer tools choke on huge inputs, which could be surprising to the
100 # user.
134 # user.
101 configitem('fix', 'maxfilesize', default='2MB')
135 configitem('fix', 'maxfilesize', default='2MB')
102
136
137 # Allow fix commands to exit non-zero if an executed fixer tool exits non-zero.
138 # This helps users do shell scripts that stop when a fixer tool signals a
139 # problem.
140 configitem('fix', 'failure', default='continue')
141
142 def checktoolfailureaction(ui, message, hint=None):
143 """Abort with 'message' if fix.failure=abort"""
144 action = ui.config('fix', 'failure')
145 if action not in ('continue', 'abort'):
146 raise error.Abort(_('unknown fix.failure action: %s') % (action,),
147 hint=_('use "continue" or "abort"'))
148 if action == 'abort':
149 raise error.Abort(message, hint=hint)
150
103 allopt = ('', 'all', False, _('fix all non-public non-obsolete revisions'))
151 allopt = ('', 'all', False, _('fix all non-public non-obsolete revisions'))
104 baseopt = ('', 'base', [], _('revisions to diff against (overrides automatic '
152 baseopt = ('', 'base', [], _('revisions to diff against (overrides automatic '
105 'selection, and applies to every revision being '
153 'selection, and applies to every revision being '
106 'fixed)'), _('REV'))
154 'fixed)'), _('REV'))
107 revopt = ('r', 'rev', [], _('revisions to fix'), _('REV'))
155 revopt = ('r', 'rev', [], _('revisions to fix'), _('REV'))
108 wdiropt = ('w', 'working-dir', False, _('fix the working directory'))
156 wdiropt = ('w', 'working-dir', False, _('fix the working directory'))
109 wholeopt = ('', 'whole', False, _('always fix every line of a file'))
157 wholeopt = ('', 'whole', False, _('always fix every line of a file'))
110 usage = _('[OPTION]... [FILE]...')
158 usage = _('[OPTION]... [FILE]...')
111
159
112 @command('fix', [allopt, baseopt, revopt, wdiropt, wholeopt], usage,
160 @command('fix', [allopt, baseopt, revopt, wdiropt, wholeopt], usage,
113 helpcategory=command.CATEGORY_FILE_CONTENTS)
161 helpcategory=command.CATEGORY_FILE_CONTENTS)
114 def fix(ui, repo, *pats, **opts):
162 def fix(ui, repo, *pats, **opts):
115 """rewrite file content in changesets or working directory
163 """rewrite file content in changesets or working directory
116
164
117 Runs any configured tools to fix the content of files. Only affects files
165 Runs any configured tools to fix the content of files. Only affects files
118 with changes, unless file arguments are provided. Only affects changed lines
166 with changes, unless file arguments are provided. Only affects changed lines
119 of files, unless the --whole flag is used. Some tools may always affect the
167 of files, unless the --whole flag is used. Some tools may always affect the
120 whole file regardless of --whole.
168 whole file regardless of --whole.
121
169
122 If revisions are specified with --rev, those revisions will be checked, and
170 If revisions are specified with --rev, those revisions will be checked, and
123 they may be replaced with new revisions that have fixed file content. It is
171 they may be replaced with new revisions that have fixed file content. It is
124 desirable to specify all descendants of each specified revision, so that the
172 desirable to specify all descendants of each specified revision, so that the
125 fixes propagate to the descendants. If all descendants are fixed at the same
173 fixes propagate to the descendants. If all descendants are fixed at the same
126 time, no merging, rebasing, or evolution will be required.
174 time, no merging, rebasing, or evolution will be required.
127
175
128 If --working-dir is used, files with uncommitted changes in the working copy
176 If --working-dir is used, files with uncommitted changes in the working copy
129 will be fixed. If the checked-out revision is also fixed, the working
177 will be fixed. If the checked-out revision is also fixed, the working
130 directory will update to the replacement revision.
178 directory will update to the replacement revision.
131
179
132 When determining what lines of each file to fix at each revision, the whole
180 When determining what lines of each file to fix at each revision, the whole
133 set of revisions being fixed is considered, so that fixes to earlier
181 set of revisions being fixed is considered, so that fixes to earlier
134 revisions are not forgotten in later ones. The --base flag can be used to
182 revisions are not forgotten in later ones. The --base flag can be used to
135 override this default behavior, though it is not usually desirable to do so.
183 override this default behavior, though it is not usually desirable to do so.
136 """
184 """
137 opts = pycompat.byteskwargs(opts)
185 opts = pycompat.byteskwargs(opts)
138 if opts['all']:
186 if opts['all']:
139 if opts['rev']:
187 if opts['rev']:
140 raise error.Abort(_('cannot specify both "--rev" and "--all"'))
188 raise error.Abort(_('cannot specify both "--rev" and "--all"'))
141 opts['rev'] = ['not public() and not obsolete()']
189 opts['rev'] = ['not public() and not obsolete()']
142 opts['working_dir'] = True
190 opts['working_dir'] = True
143 with repo.wlock(), repo.lock(), repo.transaction('fix'):
191 with repo.wlock(), repo.lock(), repo.transaction('fix'):
144 revstofix = getrevstofix(ui, repo, opts)
192 revstofix = getrevstofix(ui, repo, opts)
145 basectxs = getbasectxs(repo, opts, revstofix)
193 basectxs = getbasectxs(repo, opts, revstofix)
146 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
194 workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
147 basectxs)
195 basectxs)
148 fixers = getfixers(ui)
196 fixers = getfixers(ui)
149
197
150 # There are no data dependencies between the workers fixing each file
198 # There are no data dependencies between the workers fixing each file
151 # revision, so we can use all available parallelism.
199 # revision, so we can use all available parallelism.
152 def getfixes(items):
200 def getfixes(items):
153 for rev, path in items:
201 for rev, path in items:
154 ctx = repo[rev]
202 ctx = repo[rev]
155 olddata = ctx[path].data()
203 olddata = ctx[path].data()
156 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
204 newdata = fixfile(ui, opts, fixers, ctx, path, basectxs[rev])
157 # Don't waste memory/time passing unchanged content back, but
205 # Don't waste memory/time passing unchanged content back, but
158 # produce one result per item either way.
206 # produce one result per item either way.
159 yield (rev, path, newdata if newdata != olddata else None)
207 yield (rev, path, newdata if newdata != olddata else None)
160 results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue,
208 results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue,
161 threadsafe=False)
209 threadsafe=False)
162
210
163 # We have to hold on to the data for each successor revision in memory
211 # We have to hold on to the data for each successor revision in memory
164 # until all its parents are committed. We ensure this by committing and
212 # until all its parents are committed. We ensure this by committing and
165 # freeing memory for the revisions in some topological order. This
213 # freeing memory for the revisions in some topological order. This
166 # leaves a little bit of memory efficiency on the table, but also makes
214 # leaves a little bit of memory efficiency on the table, but also makes
167 # the tests deterministic. It might also be considered a feature since
215 # the tests deterministic. It might also be considered a feature since
168 # it makes the results more easily reproducible.
216 # it makes the results more easily reproducible.
169 filedata = collections.defaultdict(dict)
217 filedata = collections.defaultdict(dict)
170 replacements = {}
218 replacements = {}
171 wdirwritten = False
219 wdirwritten = False
172 commitorder = sorted(revstofix, reverse=True)
220 commitorder = sorted(revstofix, reverse=True)
173 with ui.makeprogress(topic=_('fixing'), unit=_('files'),
221 with ui.makeprogress(topic=_('fixing'), unit=_('files'),
174 total=sum(numitems.values())) as progress:
222 total=sum(numitems.values())) as progress:
175 for rev, path, newdata in results:
223 for rev, path, newdata in results:
176 progress.increment(item=path)
224 progress.increment(item=path)
177 if newdata is not None:
225 if newdata is not None:
178 filedata[rev][path] = newdata
226 filedata[rev][path] = newdata
179 numitems[rev] -= 1
227 numitems[rev] -= 1
180 # Apply the fixes for this and any other revisions that are
228 # Apply the fixes for this and any other revisions that are
181 # ready and sitting at the front of the queue. Using a loop here
229 # ready and sitting at the front of the queue. Using a loop here
182 # prevents the queue from being blocked by the first revision to
230 # prevents the queue from being blocked by the first revision to
183 # be ready out of order.
231 # be ready out of order.
184 while commitorder and not numitems[commitorder[-1]]:
232 while commitorder and not numitems[commitorder[-1]]:
185 rev = commitorder.pop()
233 rev = commitorder.pop()
186 ctx = repo[rev]
234 ctx = repo[rev]
187 if rev == wdirrev:
235 if rev == wdirrev:
188 writeworkingdir(repo, ctx, filedata[rev], replacements)
236 writeworkingdir(repo, ctx, filedata[rev], replacements)
189 wdirwritten = bool(filedata[rev])
237 wdirwritten = bool(filedata[rev])
190 else:
238 else:
191 replacerev(ui, repo, ctx, filedata[rev], replacements)
239 replacerev(ui, repo, ctx, filedata[rev], replacements)
192 del filedata[rev]
240 del filedata[rev]
193
241
194 cleanup(repo, replacements, wdirwritten)
242 cleanup(repo, replacements, wdirwritten)
195
243
196 def cleanup(repo, replacements, wdirwritten):
244 def cleanup(repo, replacements, wdirwritten):
197 """Calls scmutil.cleanupnodes() with the given replacements.
245 """Calls scmutil.cleanupnodes() with the given replacements.
198
246
199 "replacements" is a dict from nodeid to nodeid, with one key and one value
247 "replacements" is a dict from nodeid to nodeid, with one key and one value
200 for every revision that was affected by fixing. This is slightly different
248 for every revision that was affected by fixing. This is slightly different
201 from cleanupnodes().
249 from cleanupnodes().
202
250
203 "wdirwritten" is a bool which tells whether the working copy was affected by
251 "wdirwritten" is a bool which tells whether the working copy was affected by
204 fixing, since it has no entry in "replacements".
252 fixing, since it has no entry in "replacements".
205
253
206 Useful as a hook point for extending "hg fix" with output summarizing the
254 Useful as a hook point for extending "hg fix" with output summarizing the
207 effects of the command, though we choose not to output anything here.
255 effects of the command, though we choose not to output anything here.
208 """
256 """
209 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
257 replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
210 scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
258 scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
211
259
212 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
260 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
213 """"Constructs the list of files to be fixed at specific revisions
261 """"Constructs the list of files to be fixed at specific revisions
214
262
215 It is up to the caller how to consume the work items, and the only
263 It is up to the caller how to consume the work items, and the only
216 dependence between them is that replacement revisions must be committed in
264 dependence between them is that replacement revisions must be committed in
217 topological order. Each work item represents a file in the working copy or
265 topological order. Each work item represents a file in the working copy or
218 in some revision that should be fixed and written back to the working copy
266 in some revision that should be fixed and written back to the working copy
219 or into a replacement revision.
267 or into a replacement revision.
220
268
221 Work items for the same revision are grouped together, so that a worker
269 Work items for the same revision are grouped together, so that a worker
222 pool starting with the first N items in parallel is likely to finish the
270 pool starting with the first N items in parallel is likely to finish the
223 first revision's work before other revisions. This can allow us to write
271 first revision's work before other revisions. This can allow us to write
224 the result to disk and reduce memory footprint. At time of writing, the
272 the result to disk and reduce memory footprint. At time of writing, the
225 partition strategy in worker.py seems favorable to this. We also sort the
273 partition strategy in worker.py seems favorable to this. We also sort the
226 items by ascending revision number to match the order in which we commit
274 items by ascending revision number to match the order in which we commit
227 the fixes later.
275 the fixes later.
228 """
276 """
229 workqueue = []
277 workqueue = []
230 numitems = collections.defaultdict(int)
278 numitems = collections.defaultdict(int)
231 maxfilesize = ui.configbytes('fix', 'maxfilesize')
279 maxfilesize = ui.configbytes('fix', 'maxfilesize')
232 for rev in sorted(revstofix):
280 for rev in sorted(revstofix):
233 fixctx = repo[rev]
281 fixctx = repo[rev]
234 match = scmutil.match(fixctx, pats, opts)
282 match = scmutil.match(fixctx, pats, opts)
235 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
283 for path in pathstofix(ui, repo, pats, opts, match, basectxs[rev],
236 fixctx):
284 fixctx):
237 if path not in fixctx:
285 if path not in fixctx:
238 continue
286 continue
239 fctx = fixctx[path]
287 fctx = fixctx[path]
240 if fctx.islink():
288 if fctx.islink():
241 continue
289 continue
242 if fctx.size() > maxfilesize:
290 if fctx.size() > maxfilesize:
243 ui.warn(_('ignoring file larger than %s: %s\n') %
291 ui.warn(_('ignoring file larger than %s: %s\n') %
244 (util.bytecount(maxfilesize), path))
292 (util.bytecount(maxfilesize), path))
245 continue
293 continue
246 workqueue.append((rev, path))
294 workqueue.append((rev, path))
247 numitems[rev] += 1
295 numitems[rev] += 1
248 return workqueue, numitems
296 return workqueue, numitems
249
297
250 def getrevstofix(ui, repo, opts):
298 def getrevstofix(ui, repo, opts):
251 """Returns the set of revision numbers that should be fixed"""
299 """Returns the set of revision numbers that should be fixed"""
252 revs = set(scmutil.revrange(repo, opts['rev']))
300 revs = set(scmutil.revrange(repo, opts['rev']))
253 for rev in revs:
301 for rev in revs:
254 checkfixablectx(ui, repo, repo[rev])
302 checkfixablectx(ui, repo, repo[rev])
255 if revs:
303 if revs:
256 cmdutil.checkunfinished(repo)
304 cmdutil.checkunfinished(repo)
257 checknodescendants(repo, revs)
305 checknodescendants(repo, revs)
258 if opts.get('working_dir'):
306 if opts.get('working_dir'):
259 revs.add(wdirrev)
307 revs.add(wdirrev)
260 if list(merge.mergestate.read(repo).unresolved()):
308 if list(merge.mergestate.read(repo).unresolved()):
261 raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
309 raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
262 if not revs:
310 if not revs:
263 raise error.Abort(
311 raise error.Abort(
264 'no changesets specified', hint='use --rev or --working-dir')
312 'no changesets specified', hint='use --rev or --working-dir')
265 return revs
313 return revs
266
314
267 def checknodescendants(repo, revs):
315 def checknodescendants(repo, revs):
268 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
316 if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
269 repo.revs('(%ld::) - (%ld)', revs, revs)):
317 repo.revs('(%ld::) - (%ld)', revs, revs)):
270 raise error.Abort(_('can only fix a changeset together '
318 raise error.Abort(_('can only fix a changeset together '
271 'with all its descendants'))
319 'with all its descendants'))
272
320
273 def checkfixablectx(ui, repo, ctx):
321 def checkfixablectx(ui, repo, ctx):
274 """Aborts if the revision shouldn't be replaced with a fixed one."""
322 """Aborts if the revision shouldn't be replaced with a fixed one."""
275 if not ctx.mutable():
323 if not ctx.mutable():
276 raise error.Abort('can\'t fix immutable changeset %s' %
324 raise error.Abort('can\'t fix immutable changeset %s' %
277 (scmutil.formatchangeid(ctx),))
325 (scmutil.formatchangeid(ctx),))
278 if ctx.obsolete():
326 if ctx.obsolete():
279 # It would be better to actually check if the revision has a successor.
327 # It would be better to actually check if the revision has a successor.
280 allowdivergence = ui.configbool('experimental',
328 allowdivergence = ui.configbool('experimental',
281 'evolution.allowdivergence')
329 'evolution.allowdivergence')
282 if not allowdivergence:
330 if not allowdivergence:
283 raise error.Abort('fixing obsolete revision could cause divergence')
331 raise error.Abort('fixing obsolete revision could cause divergence')
284
332
285 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
333 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
286 """Returns the set of files that should be fixed in a context
334 """Returns the set of files that should be fixed in a context
287
335
288 The result depends on the base contexts; we include any file that has
336 The result depends on the base contexts; we include any file that has
289 changed relative to any of the base contexts. Base contexts should be
337 changed relative to any of the base contexts. Base contexts should be
290 ancestors of the context being fixed.
338 ancestors of the context being fixed.
291 """
339 """
292 files = set()
340 files = set()
293 for basectx in basectxs:
341 for basectx in basectxs:
294 stat = basectx.status(fixctx, match=match, listclean=bool(pats),
342 stat = basectx.status(fixctx, match=match, listclean=bool(pats),
295 listunknown=bool(pats))
343 listunknown=bool(pats))
296 files.update(
344 files.update(
297 set(itertools.chain(stat.added, stat.modified, stat.clean,
345 set(itertools.chain(stat.added, stat.modified, stat.clean,
298 stat.unknown)))
346 stat.unknown)))
299 return files
347 return files
300
348
301 def lineranges(opts, path, basectxs, fixctx, content2):
349 def lineranges(opts, path, basectxs, fixctx, content2):
302 """Returns the set of line ranges that should be fixed in a file
350 """Returns the set of line ranges that should be fixed in a file
303
351
304 Of the form [(10, 20), (30, 40)].
352 Of the form [(10, 20), (30, 40)].
305
353
306 This depends on the given base contexts; we must consider lines that have
354 This depends on the given base contexts; we must consider lines that have
307 changed versus any of the base contexts, and whether the file has been
355 changed versus any of the base contexts, and whether the file has been
308 renamed versus any of them.
356 renamed versus any of them.
309
357
310 Another way to understand this is that we exclude line ranges that are
358 Another way to understand this is that we exclude line ranges that are
311 common to the file in all base contexts.
359 common to the file in all base contexts.
312 """
360 """
313 if opts.get('whole'):
361 if opts.get('whole'):
314 # Return a range containing all lines. Rely on the diff implementation's
362 # Return a range containing all lines. Rely on the diff implementation's
315 # idea of how many lines are in the file, instead of reimplementing it.
363 # idea of how many lines are in the file, instead of reimplementing it.
316 return difflineranges('', content2)
364 return difflineranges('', content2)
317
365
318 rangeslist = []
366 rangeslist = []
319 for basectx in basectxs:
367 for basectx in basectxs:
320 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
368 basepath = copies.pathcopies(basectx, fixctx).get(path, path)
321 if basepath in basectx:
369 if basepath in basectx:
322 content1 = basectx[basepath].data()
370 content1 = basectx[basepath].data()
323 else:
371 else:
324 content1 = ''
372 content1 = ''
325 rangeslist.extend(difflineranges(content1, content2))
373 rangeslist.extend(difflineranges(content1, content2))
326 return unionranges(rangeslist)
374 return unionranges(rangeslist)
327
375
328 def unionranges(rangeslist):
376 def unionranges(rangeslist):
329 """Return the union of some closed intervals
377 """Return the union of some closed intervals
330
378
331 >>> unionranges([])
379 >>> unionranges([])
332 []
380 []
333 >>> unionranges([(1, 100)])
381 >>> unionranges([(1, 100)])
334 [(1, 100)]
382 [(1, 100)]
335 >>> unionranges([(1, 100), (1, 100)])
383 >>> unionranges([(1, 100), (1, 100)])
336 [(1, 100)]
384 [(1, 100)]
337 >>> unionranges([(1, 100), (2, 100)])
385 >>> unionranges([(1, 100), (2, 100)])
338 [(1, 100)]
386 [(1, 100)]
339 >>> unionranges([(1, 99), (1, 100)])
387 >>> unionranges([(1, 99), (1, 100)])
340 [(1, 100)]
388 [(1, 100)]
341 >>> unionranges([(1, 100), (40, 60)])
389 >>> unionranges([(1, 100), (40, 60)])
342 [(1, 100)]
390 [(1, 100)]
343 >>> unionranges([(1, 49), (50, 100)])
391 >>> unionranges([(1, 49), (50, 100)])
344 [(1, 100)]
392 [(1, 100)]
345 >>> unionranges([(1, 48), (50, 100)])
393 >>> unionranges([(1, 48), (50, 100)])
346 [(1, 48), (50, 100)]
394 [(1, 48), (50, 100)]
347 >>> unionranges([(1, 2), (3, 4), (5, 6)])
395 >>> unionranges([(1, 2), (3, 4), (5, 6)])
348 [(1, 6)]
396 [(1, 6)]
349 """
397 """
350 rangeslist = sorted(set(rangeslist))
398 rangeslist = sorted(set(rangeslist))
351 unioned = []
399 unioned = []
352 if rangeslist:
400 if rangeslist:
353 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
401 unioned, rangeslist = [rangeslist[0]], rangeslist[1:]
354 for a, b in rangeslist:
402 for a, b in rangeslist:
355 c, d = unioned[-1]
403 c, d = unioned[-1]
356 if a > d + 1:
404 if a > d + 1:
357 unioned.append((a, b))
405 unioned.append((a, b))
358 else:
406 else:
359 unioned[-1] = (c, max(b, d))
407 unioned[-1] = (c, max(b, d))
360 return unioned
408 return unioned
361
409
362 def difflineranges(content1, content2):
410 def difflineranges(content1, content2):
363 """Return list of line number ranges in content2 that differ from content1.
411 """Return list of line number ranges in content2 that differ from content1.
364
412
365 Line numbers are 1-based. The numbers are the first and last line contained
413 Line numbers are 1-based. The numbers are the first and last line contained
366 in the range. Single-line ranges have the same line number for the first and
414 in the range. Single-line ranges have the same line number for the first and
367 last line. Excludes any empty ranges that result from lines that are only
415 last line. Excludes any empty ranges that result from lines that are only
368 present in content1. Relies on mdiff's idea of where the line endings are in
416 present in content1. Relies on mdiff's idea of where the line endings are in
369 the string.
417 the string.
370
418
371 >>> from mercurial import pycompat
419 >>> from mercurial import pycompat
372 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
420 >>> lines = lambda s: b'\\n'.join([c for c in pycompat.iterbytestr(s)])
373 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
421 >>> difflineranges2 = lambda a, b: difflineranges(lines(a), lines(b))
374 >>> difflineranges2(b'', b'')
422 >>> difflineranges2(b'', b'')
375 []
423 []
376 >>> difflineranges2(b'a', b'')
424 >>> difflineranges2(b'a', b'')
377 []
425 []
378 >>> difflineranges2(b'', b'A')
426 >>> difflineranges2(b'', b'A')
379 [(1, 1)]
427 [(1, 1)]
380 >>> difflineranges2(b'a', b'a')
428 >>> difflineranges2(b'a', b'a')
381 []
429 []
382 >>> difflineranges2(b'a', b'A')
430 >>> difflineranges2(b'a', b'A')
383 [(1, 1)]
431 [(1, 1)]
384 >>> difflineranges2(b'ab', b'')
432 >>> difflineranges2(b'ab', b'')
385 []
433 []
386 >>> difflineranges2(b'', b'AB')
434 >>> difflineranges2(b'', b'AB')
387 [(1, 2)]
435 [(1, 2)]
388 >>> difflineranges2(b'abc', b'ac')
436 >>> difflineranges2(b'abc', b'ac')
389 []
437 []
390 >>> difflineranges2(b'ab', b'aCb')
438 >>> difflineranges2(b'ab', b'aCb')
391 [(2, 2)]
439 [(2, 2)]
392 >>> difflineranges2(b'abc', b'aBc')
440 >>> difflineranges2(b'abc', b'aBc')
393 [(2, 2)]
441 [(2, 2)]
394 >>> difflineranges2(b'ab', b'AB')
442 >>> difflineranges2(b'ab', b'AB')
395 [(1, 2)]
443 [(1, 2)]
396 >>> difflineranges2(b'abcde', b'aBcDe')
444 >>> difflineranges2(b'abcde', b'aBcDe')
397 [(2, 2), (4, 4)]
445 [(2, 2), (4, 4)]
398 >>> difflineranges2(b'abcde', b'aBCDe')
446 >>> difflineranges2(b'abcde', b'aBCDe')
399 [(2, 4)]
447 [(2, 4)]
400 """
448 """
401 ranges = []
449 ranges = []
402 for lines, kind in mdiff.allblocks(content1, content2):
450 for lines, kind in mdiff.allblocks(content1, content2):
403 firstline, lastline = lines[2:4]
451 firstline, lastline = lines[2:4]
404 if kind == '!' and firstline != lastline:
452 if kind == '!' and firstline != lastline:
405 ranges.append((firstline + 1, lastline))
453 ranges.append((firstline + 1, lastline))
406 return ranges
454 return ranges
407
455
408 def getbasectxs(repo, opts, revstofix):
456 def getbasectxs(repo, opts, revstofix):
409 """Returns a map of the base contexts for each revision
457 """Returns a map of the base contexts for each revision
410
458
411 The base contexts determine which lines are considered modified when we
459 The base contexts determine which lines are considered modified when we
412 attempt to fix just the modified lines in a file. It also determines which
460 attempt to fix just the modified lines in a file. It also determines which
413 files we attempt to fix, so it is important to compute this even when
461 files we attempt to fix, so it is important to compute this even when
414 --whole is used.
462 --whole is used.
415 """
463 """
416 # The --base flag overrides the usual logic, and we give every revision
464 # The --base flag overrides the usual logic, and we give every revision
417 # exactly the set of baserevs that the user specified.
465 # exactly the set of baserevs that the user specified.
418 if opts.get('base'):
466 if opts.get('base'):
419 baserevs = set(scmutil.revrange(repo, opts.get('base')))
467 baserevs = set(scmutil.revrange(repo, opts.get('base')))
420 if not baserevs:
468 if not baserevs:
421 baserevs = {nullrev}
469 baserevs = {nullrev}
422 basectxs = {repo[rev] for rev in baserevs}
470 basectxs = {repo[rev] for rev in baserevs}
423 return {rev: basectxs for rev in revstofix}
471 return {rev: basectxs for rev in revstofix}
424
472
425 # Proceed in topological order so that we can easily determine each
473 # Proceed in topological order so that we can easily determine each
426 # revision's baserevs by looking at its parents and their baserevs.
474 # revision's baserevs by looking at its parents and their baserevs.
427 basectxs = collections.defaultdict(set)
475 basectxs = collections.defaultdict(set)
428 for rev in sorted(revstofix):
476 for rev in sorted(revstofix):
429 ctx = repo[rev]
477 ctx = repo[rev]
430 for pctx in ctx.parents():
478 for pctx in ctx.parents():
431 if pctx.rev() in basectxs:
479 if pctx.rev() in basectxs:
432 basectxs[rev].update(basectxs[pctx.rev()])
480 basectxs[rev].update(basectxs[pctx.rev()])
433 else:
481 else:
434 basectxs[rev].add(pctx)
482 basectxs[rev].add(pctx)
435 return basectxs
483 return basectxs
436
484
437 def fixfile(ui, opts, fixers, fixctx, path, basectxs):
485 def fixfile(ui, opts, fixers, fixctx, path, basectxs):
438 """Run any configured fixers that should affect the file in this context
486 """Run any configured fixers that should affect the file in this context
439
487
440 Returns the file content that results from applying the fixers in some order
488 Returns the file content that results from applying the fixers in some order
441 starting with the file's content in the fixctx. Fixers that support line
489 starting with the file's content in the fixctx. Fixers that support line
442 ranges will affect lines that have changed relative to any of the basectxs
490 ranges will affect lines that have changed relative to any of the basectxs
443 (i.e. they will only avoid lines that are common to all basectxs).
491 (i.e. they will only avoid lines that are common to all basectxs).
444
492
445 A fixer tool's stdout will become the file's new content if and only if it
493 A fixer tool's stdout will become the file's new content if and only if it
446 exits with code zero.
494 exits with code zero.
447 """
495 """
448 newdata = fixctx[path].data()
496 newdata = fixctx[path].data()
449 for fixername, fixer in fixers.iteritems():
497 for fixername, fixer in fixers.iteritems():
450 if fixer.affects(opts, fixctx, path):
498 if fixer.affects(opts, fixctx, path):
451 rangesfn = lambda: lineranges(opts, path, basectxs, fixctx, newdata)
499 rangesfn = lambda: lineranges(opts, path, basectxs, fixctx, newdata)
452 command = fixer.command(ui, path, rangesfn)
500 command = fixer.command(ui, path, rangesfn)
453 if command is None:
501 if command is None:
454 continue
502 continue
455 ui.debug('subprocess: %s\n' % (command,))
503 ui.debug('subprocess: %s\n' % (command,))
456 proc = subprocess.Popen(
504 proc = subprocess.Popen(
457 procutil.tonativestr(command),
505 procutil.tonativestr(command),
458 shell=True,
506 shell=True,
459 cwd=procutil.tonativestr(b'/'),
507 cwd=procutil.tonativestr(b'/'),
460 stdin=subprocess.PIPE,
508 stdin=subprocess.PIPE,
461 stdout=subprocess.PIPE,
509 stdout=subprocess.PIPE,
462 stderr=subprocess.PIPE)
510 stderr=subprocess.PIPE)
463 newerdata, stderr = proc.communicate(newdata)
511 newerdata, stderr = proc.communicate(newdata)
464 if stderr:
512 if stderr:
465 showstderr(ui, fixctx.rev(), fixername, stderr)
513 showstderr(ui, fixctx.rev(), fixername, stderr)
466 if proc.returncode == 0:
514 if proc.returncode == 0:
467 newdata = newerdata
515 newdata = newerdata
468 elif not stderr:
516 else:
469 showstderr(ui, fixctx.rev(), fixername,
517 if not stderr:
470 _('exited with status %d\n') % (proc.returncode,))
518 message = _('exited with status %d\n') % (proc.returncode,)
519 showstderr(ui, fixctx.rev(), fixername, message)
520 checktoolfailureaction(
521 ui, _('no fixes will be applied'),
522 hint=_('use --config fix.failure=continue to apply any '
523 'successful fixes anyway'))
471 return newdata
524 return newdata
472
525
473 def showstderr(ui, rev, fixername, stderr):
526 def showstderr(ui, rev, fixername, stderr):
474 """Writes the lines of the stderr string as warnings on the ui
527 """Writes the lines of the stderr string as warnings on the ui
475
528
476 Uses the revision number and fixername to give more context to each line of
529 Uses the revision number and fixername to give more context to each line of
477 the error message. Doesn't include file names, since those take up a lot of
530 the error message. Doesn't include file names, since those take up a lot of
478 space and would tend to be included in the error message if they were
531 space and would tend to be included in the error message if they were
479 relevant.
532 relevant.
480 """
533 """
481 for line in re.split('[\r\n]+', stderr):
534 for line in re.split('[\r\n]+', stderr):
482 if line:
535 if line:
483 ui.warn(('['))
536 ui.warn(('['))
484 if rev is None:
537 if rev is None:
485 ui.warn(_('wdir'), label='evolve.rev')
538 ui.warn(_('wdir'), label='evolve.rev')
486 else:
539 else:
487 ui.warn((str(rev)), label='evolve.rev')
540 ui.warn((str(rev)), label='evolve.rev')
488 ui.warn(('] %s: %s\n') % (fixername, line))
541 ui.warn(('] %s: %s\n') % (fixername, line))
489
542
490 def writeworkingdir(repo, ctx, filedata, replacements):
543 def writeworkingdir(repo, ctx, filedata, replacements):
491 """Write new content to the working copy and check out the new p1 if any
544 """Write new content to the working copy and check out the new p1 if any
492
545
493 We check out a new revision if and only if we fixed something in both the
546 We check out a new revision if and only if we fixed something in both the
494 working directory and its parent revision. This avoids the need for a full
547 working directory and its parent revision. This avoids the need for a full
495 update/merge, and means that the working directory simply isn't affected
548 update/merge, and means that the working directory simply isn't affected
496 unless the --working-dir flag is given.
549 unless the --working-dir flag is given.
497
550
498 Directly updates the dirstate for the affected files.
551 Directly updates the dirstate for the affected files.
499 """
552 """
500 for path, data in filedata.iteritems():
553 for path, data in filedata.iteritems():
501 fctx = ctx[path]
554 fctx = ctx[path]
502 fctx.write(data, fctx.flags())
555 fctx.write(data, fctx.flags())
503 if repo.dirstate[path] == 'n':
556 if repo.dirstate[path] == 'n':
504 repo.dirstate.normallookup(path)
557 repo.dirstate.normallookup(path)
505
558
506 oldparentnodes = repo.dirstate.parents()
559 oldparentnodes = repo.dirstate.parents()
507 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
560 newparentnodes = [replacements.get(n, n) for n in oldparentnodes]
508 if newparentnodes != oldparentnodes:
561 if newparentnodes != oldparentnodes:
509 repo.setparents(*newparentnodes)
562 repo.setparents(*newparentnodes)
510
563
511 def replacerev(ui, repo, ctx, filedata, replacements):
564 def replacerev(ui, repo, ctx, filedata, replacements):
512 """Commit a new revision like the given one, but with file content changes
565 """Commit a new revision like the given one, but with file content changes
513
566
514 "ctx" is the original revision to be replaced by a modified one.
567 "ctx" is the original revision to be replaced by a modified one.
515
568
516 "filedata" is a dict that maps paths to their new file content. All other
569 "filedata" is a dict that maps paths to their new file content. All other
517 paths will be recreated from the original revision without changes.
570 paths will be recreated from the original revision without changes.
518 "filedata" may contain paths that didn't exist in the original revision;
571 "filedata" may contain paths that didn't exist in the original revision;
519 they will be added.
572 they will be added.
520
573
521 "replacements" is a dict that maps a single node to a single node, and it is
574 "replacements" is a dict that maps a single node to a single node, and it is
522 updated to indicate the original revision is replaced by the newly created
575 updated to indicate the original revision is replaced by the newly created
523 one. No entry is added if the replacement's node already exists.
576 one. No entry is added if the replacement's node already exists.
524
577
525 The new revision has the same parents as the old one, unless those parents
578 The new revision has the same parents as the old one, unless those parents
526 have already been replaced, in which case those replacements are the parents
579 have already been replaced, in which case those replacements are the parents
527 of this new revision. Thus, if revisions are replaced in topological order,
580 of this new revision. Thus, if revisions are replaced in topological order,
528 there is no need to rebase them into the original topology later.
581 there is no need to rebase them into the original topology later.
529 """
582 """
530
583
531 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
584 p1rev, p2rev = repo.changelog.parentrevs(ctx.rev())
532 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
585 p1ctx, p2ctx = repo[p1rev], repo[p2rev]
533 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
586 newp1node = replacements.get(p1ctx.node(), p1ctx.node())
534 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
587 newp2node = replacements.get(p2ctx.node(), p2ctx.node())
535
588
589 # We don't want to create a revision that has no changes from the original,
590 # but we should if the original revision's parent has been replaced.
591 # Otherwise, we would produce an orphan that needs no actual human
592 # intervention to evolve. We can't rely on commit() to avoid creating the
593 # un-needed revision because the extra field added below produces a new hash
594 # regardless of file content changes.
595 if (not filedata and
596 p1ctx.node() not in replacements and
597 p2ctx.node() not in replacements):
598 return
599
536 def filectxfn(repo, memctx, path):
600 def filectxfn(repo, memctx, path):
537 if path not in ctx:
601 if path not in ctx:
538 return None
602 return None
539 fctx = ctx[path]
603 fctx = ctx[path]
540 copied = fctx.renamed()
604 copied = fctx.renamed()
541 if copied:
605 if copied:
542 copied = copied[0]
606 copied = copied[0]
543 return context.memfilectx(
607 return context.memfilectx(
544 repo,
608 repo,
545 memctx,
609 memctx,
546 path=fctx.path(),
610 path=fctx.path(),
547 data=filedata.get(path, fctx.data()),
611 data=filedata.get(path, fctx.data()),
548 islink=fctx.islink(),
612 islink=fctx.islink(),
549 isexec=fctx.isexec(),
613 isexec=fctx.isexec(),
550 copied=copied)
614 copied=copied)
551
615
616 extra = ctx.extra().copy()
617 extra['fix_source'] = ctx.hex()
618
552 memctx = context.memctx(
619 memctx = context.memctx(
553 repo,
620 repo,
554 parents=(newp1node, newp2node),
621 parents=(newp1node, newp2node),
555 text=ctx.description(),
622 text=ctx.description(),
556 files=set(ctx.files()) | set(filedata.keys()),
623 files=set(ctx.files()) | set(filedata.keys()),
557 filectxfn=filectxfn,
624 filectxfn=filectxfn,
558 user=ctx.user(),
625 user=ctx.user(),
559 date=ctx.date(),
626 date=ctx.date(),
560 extra=ctx.extra(),
627 extra=extra,
561 branch=ctx.branch(),
628 branch=ctx.branch(),
562 editor=None)
629 editor=None)
563 sucnode = memctx.commit()
630 sucnode = memctx.commit()
564 prenode = ctx.node()
631 prenode = ctx.node()
565 if prenode == sucnode:
632 if prenode == sucnode:
566 ui.debug('node %s already existed\n' % (ctx.hex()))
633 ui.debug('node %s already existed\n' % (ctx.hex()))
567 else:
634 else:
568 replacements[ctx.node()] = sucnode
635 replacements[ctx.node()] = sucnode
569
636
570 def getfixers(ui):
637 def getfixers(ui):
571 """Returns a map of configured fixer tools indexed by their names
638 """Returns a map of configured fixer tools indexed by their names
572
639
573 Each value is a Fixer object with methods that implement the behavior of the
640 Each value is a Fixer object with methods that implement the behavior of the
574 fixer's config suboptions. Does not validate the config values.
641 fixer's config suboptions. Does not validate the config values.
575 """
642 """
576 result = {}
643 fixers = {}
577 for name in fixernames(ui):
644 for name in fixernames(ui):
578 result[name] = Fixer()
645 fixers[name] = Fixer()
579 attrs = ui.configsuboptions('fix', name)[1]
646 attrs = ui.configsuboptions('fix', name)[1]
580 for key in FIXER_ATTRS:
647 if 'fileset' in attrs and 'pattern' not in attrs:
581 setattr(result[name], pycompat.sysstr('_' + key),
648 ui.warn(_('the fix.tool:fileset config name is deprecated; '
582 attrs.get(key, ''))
649 'please rename it to fix.tool:pattern\n'))
583 return result
650 attrs['pattern'] = attrs['fileset']
651 for key, default in FIXER_ATTRS.items():
652 setattr(fixers[name], pycompat.sysstr('_' + key),
653 attrs.get(key, default))
654 fixers[name]._priority = int(fixers[name]._priority)
655 return collections.OrderedDict(
656 sorted(fixers.items(), key=lambda item: item[1]._priority,
657 reverse=True))
584
658
585 def fixernames(ui):
659 def fixernames(ui):
586 """Returns the names of [fix] config options that have suboptions"""
660 """Returns the names of [fix] config options that have suboptions"""
587 names = set()
661 names = set()
588 for k, v in ui.configitems('fix'):
662 for k, v in ui.configitems('fix'):
589 if ':' in k:
663 if ':' in k:
590 names.add(k.split(':', 1)[0])
664 names.add(k.split(':', 1)[0])
591 return names
665 return names
592
666
593 class Fixer(object):
667 class Fixer(object):
594 """Wraps the raw config values for a fixer with methods"""
668 """Wraps the raw config values for a fixer with methods"""
595
669
596 def affects(self, opts, fixctx, path):
670 def affects(self, opts, fixctx, path):
597 """Should this fixer run on the file at the given path and context?"""
671 """Should this fixer run on the file at the given path and context?"""
598 return scmutil.match(fixctx, [self._fileset], opts)(path)
672 return scmutil.match(fixctx, [self._pattern], opts)(path)
599
673
600 def command(self, ui, path, rangesfn):
674 def command(self, ui, path, rangesfn):
601 """A shell command to use to invoke this fixer on the given file/lines
675 """A shell command to use to invoke this fixer on the given file/lines
602
676
603 May return None if there is no appropriate command to run for the given
677 May return None if there is no appropriate command to run for the given
604 parameters.
678 parameters.
605 """
679 """
606 expand = cmdutil.rendercommandtemplate
680 expand = cmdutil.rendercommandtemplate
607 parts = [expand(ui, self._command,
681 parts = [expand(ui, self._command,
608 {'rootpath': path, 'basename': os.path.basename(path)})]
682 {'rootpath': path, 'basename': os.path.basename(path)})]
609 if self._linerange:
683 if self._linerange:
610 ranges = rangesfn()
684 ranges = rangesfn()
611 if not ranges:
685 if not ranges:
612 # No line ranges to fix, so don't run the fixer.
686 # No line ranges to fix, so don't run the fixer.
613 return None
687 return None
614 for first, last in ranges:
688 for first, last in ranges:
615 parts.append(expand(ui, self._linerange,
689 parts.append(expand(ui, self._linerange,
616 {'first': first, 'last': last}))
690 {'first': first, 'last': last}))
617 return ' '.join(parts)
691 return ' '.join(parts)
@@ -1,96 +1,96 b''
1 # highlight - syntax highlighting in hgweb, based on Pygments
1 # highlight - syntax highlighting in hgweb, based on Pygments
2 #
2 #
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
3 # Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # The original module was split in an interface and an implementation
8 # The original module was split in an interface and an implementation
9 # file to defer pygments loading and speedup extension setup.
9 # file to defer pygments loading and speedup extension setup.
10
10
11 """syntax highlighting for hgweb (requires Pygments)
11 """syntax highlighting for hgweb (requires Pygments)
12
12
13 It depends on the Pygments syntax highlighting library:
13 It depends on the Pygments syntax highlighting library:
14 http://pygments.org/
14 http://pygments.org/
15
15
16 There are the following configuration options::
16 There are the following configuration options::
17
17
18 [web]
18 [web]
19 pygments_style = <style> (default: colorful)
19 pygments_style = <style> (default: colorful)
20 highlightfiles = <fileset> (default: size('<5M'))
20 highlightfiles = <fileset> (default: size('<5M'))
21 highlightonlymatchfilename = <bool> (default False)
21 highlightonlymatchfilename = <bool> (default False)
22
22
23 ``highlightonlymatchfilename`` will only highlight files if their type could
23 ``highlightonlymatchfilename`` will only highlight files if their type could
24 be identified by their filename. When this is not enabled (the default),
24 be identified by their filename. When this is not enabled (the default),
25 Pygments will try very hard to identify the file type from content and any
25 Pygments will try very hard to identify the file type from content and any
26 match (even matches with a low confidence score) will be used.
26 match (even matches with a low confidence score) will be used.
27 """
27 """
28
28
29 from __future__ import absolute_import
29 from __future__ import absolute_import
30
30
31 from . import highlight
31 from . import highlight
32 from mercurial.hgweb import (
32 from mercurial.hgweb import (
33 webcommands,
33 webcommands,
34 webutil,
34 webutil,
35 )
35 )
36
36
37 from mercurial import (
37 from mercurial import (
38 extensions,
38 extensions,
39 )
39 )
40
40
41 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
41 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
42 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
43 # be specifying the version(s) of Mercurial they are tested with, or
43 # be specifying the version(s) of Mercurial they are tested with, or
44 # leave the attribute unspecified.
44 # leave the attribute unspecified.
45 testedwith = 'ships-with-hg-core'
45 testedwith = 'ships-with-hg-core'
46
46
47 def pygmentize(web, field, fctx, tmpl):
47 def pygmentize(web, field, fctx, tmpl):
48 style = web.config('web', 'pygments_style', 'colorful')
48 style = web.config('web', 'pygments_style', 'colorful')
49 expr = web.config('web', 'highlightfiles', "size('<5M')")
49 expr = web.config('web', 'highlightfiles', "size('<5M')")
50 filenameonly = web.configbool('web', 'highlightonlymatchfilename', False)
50 filenameonly = web.configbool('web', 'highlightonlymatchfilename', False)
51
51
52 ctx = fctx.changectx()
52 ctx = fctx.changectx()
53 m = ctx.matchfileset(expr)
53 m = ctx.matchfileset(expr)
54 if m(fctx.path()):
54 if m(fctx.path()):
55 highlight.pygmentize(field, fctx, style, tmpl,
55 highlight.pygmentize(field, fctx, style, tmpl,
56 guessfilenameonly=filenameonly)
56 guessfilenameonly=filenameonly)
57
57
58 def filerevision_highlight(orig, web, fctx):
58 def filerevision_highlight(orig, web, fctx):
59 mt = web.res.headers['Content-Type']
59 mt = web.res.headers['Content-Type']
60 # only pygmentize for mimetype containing 'html' so we both match
60 # only pygmentize for mimetype containing 'html' so we both match
61 # 'text/html' and possibly 'application/xhtml+xml' in the future
61 # 'text/html' and possibly 'application/xhtml+xml' in the future
62 # so that we don't have to touch the extension when the mimetype
62 # so that we don't have to touch the extension when the mimetype
63 # for a template changes; also hgweb optimizes the case that a
63 # for a template changes; also hgweb optimizes the case that a
64 # raw file is sent using rawfile() and doesn't call us, so we
64 # raw file is sent using rawfile() and doesn't call us, so we
65 # can't clash with the file's content-type here in case we
65 # can't clash with the file's content-type here in case we
66 # pygmentize a html file
66 # pygmentize a html file
67 if 'html' in mt:
67 if 'html' in mt:
68 pygmentize(web, 'fileline', fctx, web.tmpl)
68 pygmentize(web, 'fileline', fctx, web.tmpl)
69
69
70 return orig(web, fctx)
70 return orig(web, fctx)
71
71
72 def annotate_highlight(orig, web):
72 def annotate_highlight(orig, web):
73 mt = web.res.headers['Content-Type']
73 mt = web.res.headers['Content-Type']
74 if 'html' in mt:
74 if 'html' in mt:
75 fctx = webutil.filectx(web.repo, web.req)
75 fctx = webutil.filectx(web.repo, web.req)
76 pygmentize(web, 'annotateline', fctx, web.tmpl)
76 pygmentize(web, 'annotateline', fctx, web.tmpl)
77
77
78 return orig(web)
78 return orig(web)
79
79
80 def generate_css(web):
80 def generate_css(web):
81 pg_style = web.config('web', 'pygments_style', 'colorful')
81 pg_style = web.config('web', 'pygments_style', 'colorful')
82 fmter = highlight.HtmlFormatter(style=pg_style)
82 fmter = highlight.HtmlFormatter(style=pg_style)
83 web.res.headers['Content-Type'] = 'text/css'
83 web.res.headers['Content-Type'] = 'text/css'
84 web.res.setbodybytes(''.join([
84 web.res.setbodybytes(''.join([
85 '/* pygments_style = %s */\n\n' % pg_style,
85 '/* pygments_style = %s */\n\n' % pg_style,
86 fmter.get_style_defs(''),
86 fmter.get_style_defs(''),
87 ]))
87 ]))
88 return web.res.sendresponse()
88 return web.res.sendresponse()
89
89
90 def extsetup():
90 def extsetup(ui):
91 # monkeypatch in the new version
91 # monkeypatch in the new version
92 extensions.wrapfunction(webcommands, '_filerevision',
92 extensions.wrapfunction(webcommands, '_filerevision',
93 filerevision_highlight)
93 filerevision_highlight)
94 extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
94 extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
95 webcommands.highlightcss = generate_css
95 webcommands.highlightcss = generate_css
96 webcommands.__all__.append('highlightcss')
96 webcommands.__all__.append('highlightcss')
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from hgext/blackbox.py to mercurial/loggingutil.py
NO CONTENT: file copied from hgext/blackbox.py to mercurial/loggingutil.py
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from rust/Cargo.lock to rust/hgcli/Cargo.lock
NO CONTENT: file copied from rust/Cargo.lock to rust/hgcli/Cargo.lock
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file copied from tests/test-shelve.t to tests/test-shelve2.t
NO CONTENT: file copied from tests/test-shelve.t to tests/test-shelve2.t
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: file was removed
NO CONTENT: file was removed
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now