Show More
@@ -1,129 +1,127 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.bin.kallithea_api |
|
15 | kallithea.bin.kallithea_api | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Api CLI client for Kallithea |
|
18 | Api CLI client for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Jun 3, 2012 |
|
22 | :created_on: Jun 3, 2012 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | from __future__ import print_function |
|
|||
29 |
|
||||
30 | import argparse |
|
28 | import argparse | |
31 | import json |
|
29 | import json | |
32 | import sys |
|
30 | import sys | |
33 |
|
31 | |||
34 | from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call |
|
32 | from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call | |
35 |
|
33 | |||
36 |
|
34 | |||
37 | def argparser(argv): |
|
35 | def argparser(argv): | |
38 | usage = ( |
|
36 | usage = ( | |
39 | "kallithea-api [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] " |
|
37 | "kallithea-api [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] " | |
40 | "[--config=CONFIG] [--save-config] " |
|
38 | "[--config=CONFIG] [--save-config] " | |
41 | "METHOD <key:val> <key2:val> ...\n" |
|
39 | "METHOD <key:val> <key2:val> ...\n" | |
42 | "Create config file: kallithea-api --apikey=<key> --apihost=http://kallithea.example.com --save-config" |
|
40 | "Create config file: kallithea-api --apikey=<key> --apihost=http://kallithea.example.com --save-config" | |
43 | ) |
|
41 | ) | |
44 |
|
42 | |||
45 | parser = argparse.ArgumentParser(description='Kallithea API cli', |
|
43 | parser = argparse.ArgumentParser(description='Kallithea API cli', | |
46 | usage=usage) |
|
44 | usage=usage) | |
47 |
|
45 | |||
48 | ## config |
|
46 | ## config | |
49 | group = parser.add_argument_group('config') |
|
47 | group = parser.add_argument_group('config') | |
50 | group.add_argument('--apikey', help='api access key') |
|
48 | group.add_argument('--apikey', help='api access key') | |
51 | group.add_argument('--apihost', help='api host') |
|
49 | group.add_argument('--apihost', help='api host') | |
52 | group.add_argument('--config', help='config file') |
|
50 | group.add_argument('--config', help='config file') | |
53 | group.add_argument('--save-config', action='store_true', help='save the given config into a file') |
|
51 | group.add_argument('--save-config', action='store_true', help='save the given config into a file') | |
54 |
|
52 | |||
55 | group = parser.add_argument_group('API') |
|
53 | group = parser.add_argument_group('API') | |
56 | group.add_argument('method', metavar='METHOD', nargs='?', type=str, default=None, |
|
54 | group.add_argument('method', metavar='METHOD', nargs='?', type=str, default=None, | |
57 | help='API method name to call followed by key:value attributes', |
|
55 | help='API method name to call followed by key:value attributes', | |
58 | ) |
|
56 | ) | |
59 | group.add_argument('--format', dest='format', type=str, |
|
57 | group.add_argument('--format', dest='format', type=str, | |
60 | help='output format default: `%s` can ' |
|
58 | help='output format default: `%s` can ' | |
61 | 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON), |
|
59 | 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON), | |
62 | default=FORMAT_PRETTY |
|
60 | default=FORMAT_PRETTY | |
63 | ) |
|
61 | ) | |
64 | args, other = parser.parse_known_args() |
|
62 | args, other = parser.parse_known_args() | |
65 | return parser, args, other |
|
63 | return parser, args, other | |
66 |
|
64 | |||
67 |
|
65 | |||
68 | def main(argv=None): |
|
66 | def main(argv=None): | |
69 | """ |
|
67 | """ | |
70 | Main execution function for cli |
|
68 | Main execution function for cli | |
71 |
|
69 | |||
72 | :param argv: |
|
70 | :param argv: | |
73 | """ |
|
71 | """ | |
74 | if argv is None: |
|
72 | if argv is None: | |
75 | argv = sys.argv |
|
73 | argv = sys.argv | |
76 |
|
74 | |||
77 | conf = None |
|
75 | conf = None | |
78 | parser, args, other = argparser(argv) |
|
76 | parser, args, other = argparser(argv) | |
79 |
|
77 | |||
80 | api_credentials_given = (args.apikey and args.apihost) |
|
78 | api_credentials_given = (args.apikey and args.apihost) | |
81 | if args.save_config: |
|
79 | if args.save_config: | |
82 | if not api_credentials_given: |
|
80 | if not api_credentials_given: | |
83 | raise parser.error('--save-config requires --apikey and --apihost') |
|
81 | raise parser.error('--save-config requires --apikey and --apihost') | |
84 | conf = RcConf(config_location=args.config, |
|
82 | conf = RcConf(config_location=args.config, | |
85 | autocreate=True, config={'apikey': args.apikey, |
|
83 | autocreate=True, config={'apikey': args.apikey, | |
86 | 'apihost': args.apihost}) |
|
84 | 'apihost': args.apihost}) | |
87 | sys.exit() |
|
85 | sys.exit() | |
88 |
|
86 | |||
89 | if not conf: |
|
87 | if not conf: | |
90 | conf = RcConf(config_location=args.config, autoload=True) |
|
88 | conf = RcConf(config_location=args.config, autoload=True) | |
91 | if not conf: |
|
89 | if not conf: | |
92 | if not api_credentials_given: |
|
90 | if not api_credentials_given: | |
93 | parser.error('Could not find config file and missing ' |
|
91 | parser.error('Could not find config file and missing ' | |
94 | '--apikey or --apihost in params') |
|
92 | '--apikey or --apihost in params') | |
95 |
|
93 | |||
96 | apikey = args.apikey or conf['apikey'] |
|
94 | apikey = args.apikey or conf['apikey'] | |
97 | apihost = args.apihost or conf['apihost'] |
|
95 | apihost = args.apihost or conf['apihost'] | |
98 | method = args.method |
|
96 | method = args.method | |
99 |
|
97 | |||
100 | # if we don't have method here it's an error |
|
98 | # if we don't have method here it's an error | |
101 | if not method: |
|
99 | if not method: | |
102 | parser.error('Please specify method name') |
|
100 | parser.error('Please specify method name') | |
103 |
|
101 | |||
104 | try: |
|
102 | try: | |
105 | margs = dict(s.split(':', 1) for s in other) |
|
103 | margs = dict(s.split(':', 1) for s in other) | |
106 | except ValueError: |
|
104 | except ValueError: | |
107 | sys.stderr.write('Error parsing arguments \n') |
|
105 | sys.stderr.write('Error parsing arguments \n') | |
108 | sys.exit() |
|
106 | sys.exit() | |
109 | if args.format == FORMAT_PRETTY: |
|
107 | if args.format == FORMAT_PRETTY: | |
110 | print('Calling method %s => %s' % (method, apihost)) |
|
108 | print('Calling method %s => %s' % (method, apihost)) | |
111 |
|
109 | |||
112 | json_resp = api_call(apikey, apihost, method, **margs) |
|
110 | json_resp = api_call(apikey, apihost, method, **margs) | |
113 | error_prefix = '' |
|
111 | error_prefix = '' | |
114 | if json_resp['error']: |
|
112 | if json_resp['error']: | |
115 | error_prefix = 'ERROR:' |
|
113 | error_prefix = 'ERROR:' | |
116 | json_data = json_resp['error'] |
|
114 | json_data = json_resp['error'] | |
117 | else: |
|
115 | else: | |
118 | json_data = json_resp['result'] |
|
116 | json_data = json_resp['result'] | |
119 | if args.format == FORMAT_JSON: |
|
117 | if args.format == FORMAT_JSON: | |
120 | print(json.dumps(json_data)) |
|
118 | print(json.dumps(json_data)) | |
121 | elif args.format == FORMAT_PRETTY: |
|
119 | elif args.format == FORMAT_PRETTY: | |
122 | print('Server response \n%s%s' % ( |
|
120 | print('Server response \n%s%s' % ( | |
123 | error_prefix, json.dumps(json_data, indent=4, sort_keys=True) |
|
121 | error_prefix, json.dumps(json_data, indent=4, sort_keys=True) | |
124 | )) |
|
122 | )) | |
125 | return 0 |
|
123 | return 0 | |
126 |
|
124 | |||
127 |
|
125 | |||
128 | if __name__ == '__main__': |
|
126 | if __name__ == '__main__': | |
129 | sys.exit(main(sys.argv)) |
|
127 | sys.exit(main(sys.argv)) |
@@ -1,42 +1,40 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | This file was forked by the Kallithea project in July 2014 and later moved. |
|
15 | This file was forked by the Kallithea project in July 2014 and later moved. | |
16 | Original author and date, and relevant copyright and licensing information is below: |
|
16 | Original author and date, and relevant copyright and licensing information is below: | |
17 | :created_on: Apr 4, 2013 |
|
17 | :created_on: Apr 4, 2013 | |
18 | :author: marcink |
|
18 | :author: marcink | |
19 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
19 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
20 | :license: GPLv3, see LICENSE.md for more details. |
|
20 | :license: GPLv3, see LICENSE.md for more details. | |
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
23 | from __future__ import print_function |
|
|||
24 |
|
||||
25 | import sys |
|
23 | import sys | |
26 |
|
24 | |||
27 | import kallithea.bin.kallithea_cli_base as cli_base |
|
25 | import kallithea.bin.kallithea_cli_base as cli_base | |
28 | from kallithea.model.db import * # these names will be directly available in the IPython shell |
|
26 | from kallithea.model.db import * # these names will be directly available in the IPython shell | |
29 |
|
27 | |||
30 |
|
28 | |||
31 | @cli_base.register_command(config_file_initialize_app=True) |
|
29 | @cli_base.register_command(config_file_initialize_app=True) | |
32 | def ishell(): |
|
30 | def ishell(): | |
33 | """Interactive shell for Kallithea.""" |
|
31 | """Interactive shell for Kallithea.""" | |
34 | try: |
|
32 | try: | |
35 | from IPython import embed |
|
33 | from IPython import embed | |
36 | except ImportError: |
|
34 | except ImportError: | |
37 | print('Kallithea ishell requires the Python package IPython 4 or later') |
|
35 | print('Kallithea ishell requires the Python package IPython 4 or later') | |
38 | sys.exit(-1) |
|
36 | sys.exit(-1) | |
39 | from traitlets.config.loader import Config |
|
37 | from traitlets.config.loader import Config | |
40 | cfg = Config() |
|
38 | cfg = Config() | |
41 | cfg.InteractiveShellEmbed.confirm_exit = False |
|
39 | cfg.InteractiveShellEmbed.confirm_exit = False | |
42 | embed(config=cfg, banner1="Kallithea IShell.") |
|
40 | embed(config=cfg, banner1="Kallithea IShell.") |
@@ -1,175 +1,173 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.bin.kallithea_gist |
|
15 | kallithea.bin.kallithea_gist | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Gist CLI client for Kallithea |
|
18 | Gist CLI client for Kallithea | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: May 9, 2013 |
|
22 | :created_on: May 9, 2013 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | from __future__ import print_function |
|
|||
29 |
|
||||
30 | import argparse |
|
28 | import argparse | |
31 | import fileinput |
|
29 | import fileinput | |
32 | import json |
|
30 | import json | |
33 | import os |
|
31 | import os | |
34 | import stat |
|
32 | import stat | |
35 | import sys |
|
33 | import sys | |
36 |
|
34 | |||
37 | from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call |
|
35 | from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call | |
38 |
|
36 | |||
39 |
|
37 | |||
40 | def argparser(argv): |
|
38 | def argparser(argv): | |
41 | usage = ( |
|
39 | usage = ( | |
42 | "kallithea-gist [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] " |
|
40 | "kallithea-gist [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] " | |
43 | "[--config=CONFIG] [--save-config] [GIST OPTIONS] " |
|
41 | "[--config=CONFIG] [--save-config] [GIST OPTIONS] " | |
44 | "[filename or stdin use - for terminal stdin ]\n" |
|
42 | "[filename or stdin use - for terminal stdin ]\n" | |
45 | "Create config file: kallithea-gist --apikey=<key> --apihost=http://kallithea.example.com --save-config" |
|
43 | "Create config file: kallithea-gist --apikey=<key> --apihost=http://kallithea.example.com --save-config" | |
46 | ) |
|
44 | ) | |
47 |
|
45 | |||
48 | parser = argparse.ArgumentParser(description='Kallithea Gist cli', |
|
46 | parser = argparse.ArgumentParser(description='Kallithea Gist cli', | |
49 | usage=usage) |
|
47 | usage=usage) | |
50 |
|
48 | |||
51 | ## config |
|
49 | ## config | |
52 | group = parser.add_argument_group('config') |
|
50 | group = parser.add_argument_group('config') | |
53 | group.add_argument('--apikey', help='api access key') |
|
51 | group.add_argument('--apikey', help='api access key') | |
54 | group.add_argument('--apihost', help='api host') |
|
52 | group.add_argument('--apihost', help='api host') | |
55 | group.add_argument('--config', help='config file path DEFAULT: ~/.config/kallithea') |
|
53 | group.add_argument('--config', help='config file path DEFAULT: ~/.config/kallithea') | |
56 | group.add_argument('--save-config', action='store_true', |
|
54 | group.add_argument('--save-config', action='store_true', | |
57 | help='save the given config into a file') |
|
55 | help='save the given config into a file') | |
58 |
|
56 | |||
59 | group = parser.add_argument_group('GIST') |
|
57 | group = parser.add_argument_group('GIST') | |
60 | group.add_argument('-p', '--private', action='store_true', |
|
58 | group.add_argument('-p', '--private', action='store_true', | |
61 | help='create private Gist') |
|
59 | help='create private Gist') | |
62 | group.add_argument('-f', '--filename', |
|
60 | group.add_argument('-f', '--filename', | |
63 | help='set uploaded gist filename, ' |
|
61 | help='set uploaded gist filename, ' | |
64 | 'also defines syntax highlighting') |
|
62 | 'also defines syntax highlighting') | |
65 | group.add_argument('-d', '--description', help='Gist description') |
|
63 | group.add_argument('-d', '--description', help='Gist description') | |
66 | group.add_argument('-l', '--lifetime', metavar='MINUTES', |
|
64 | group.add_argument('-l', '--lifetime', metavar='MINUTES', | |
67 | help='gist lifetime in minutes, -1 (DEFAULT) is forever') |
|
65 | help='gist lifetime in minutes, -1 (DEFAULT) is forever') | |
68 | group.add_argument('--format', dest='format', type=str, |
|
66 | group.add_argument('--format', dest='format', type=str, | |
69 | help='output format DEFAULT: `%s` can ' |
|
67 | help='output format DEFAULT: `%s` can ' | |
70 | 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON), |
|
68 | 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON), | |
71 | default=FORMAT_PRETTY |
|
69 | default=FORMAT_PRETTY | |
72 | ) |
|
70 | ) | |
73 | args, other = parser.parse_known_args() |
|
71 | args, other = parser.parse_known_args() | |
74 | return parser, args, other |
|
72 | return parser, args, other | |
75 |
|
73 | |||
76 |
|
74 | |||
77 | def _run(argv): |
|
75 | def _run(argv): | |
78 | conf = None |
|
76 | conf = None | |
79 | parser, args, other = argparser(argv) |
|
77 | parser, args, other = argparser(argv) | |
80 |
|
78 | |||
81 | api_credentials_given = (args.apikey and args.apihost) |
|
79 | api_credentials_given = (args.apikey and args.apihost) | |
82 | if args.save_config: |
|
80 | if args.save_config: | |
83 | if not api_credentials_given: |
|
81 | if not api_credentials_given: | |
84 | raise parser.error('--save-config requires --apikey and --apihost') |
|
82 | raise parser.error('--save-config requires --apikey and --apihost') | |
85 | conf = RcConf(config_location=args.config, |
|
83 | conf = RcConf(config_location=args.config, | |
86 | autocreate=True, config={'apikey': args.apikey, |
|
84 | autocreate=True, config={'apikey': args.apikey, | |
87 | 'apihost': args.apihost}) |
|
85 | 'apihost': args.apihost}) | |
88 | sys.exit() |
|
86 | sys.exit() | |
89 |
|
87 | |||
90 | if not conf: |
|
88 | if not conf: | |
91 | conf = RcConf(config_location=args.config, autoload=True) |
|
89 | conf = RcConf(config_location=args.config, autoload=True) | |
92 | if not conf: |
|
90 | if not conf: | |
93 | if not api_credentials_given: |
|
91 | if not api_credentials_given: | |
94 | parser.error('Could not find config file and missing ' |
|
92 | parser.error('Could not find config file and missing ' | |
95 | '--apikey or --apihost in params') |
|
93 | '--apikey or --apihost in params') | |
96 |
|
94 | |||
97 | apikey = args.apikey or conf['apikey'] |
|
95 | apikey = args.apikey or conf['apikey'] | |
98 | host = args.apihost or conf['apihost'] |
|
96 | host = args.apihost or conf['apihost'] | |
99 | DEFAULT_FILENAME = 'gistfile1.txt' |
|
97 | DEFAULT_FILENAME = 'gistfile1.txt' | |
100 | if other: |
|
98 | if other: | |
101 | # skip multifiles for now |
|
99 | # skip multifiles for now | |
102 | filename = other[0] |
|
100 | filename = other[0] | |
103 | if filename == '-': |
|
101 | if filename == '-': | |
104 | filename = DEFAULT_FILENAME |
|
102 | filename = DEFAULT_FILENAME | |
105 | gist_content = '' |
|
103 | gist_content = '' | |
106 | for line in fileinput.input('-'): |
|
104 | for line in fileinput.input('-'): | |
107 | gist_content += line |
|
105 | gist_content += line | |
108 | else: |
|
106 | else: | |
109 | with open(filename, 'rb') as f: |
|
107 | with open(filename, 'rb') as f: | |
110 | gist_content = f.read() |
|
108 | gist_content = f.read() | |
111 |
|
109 | |||
112 | else: |
|
110 | else: | |
113 | filename = DEFAULT_FILENAME |
|
111 | filename = DEFAULT_FILENAME | |
114 | gist_content = None |
|
112 | gist_content = None | |
115 | # little bit hacky but cross platform check where the |
|
113 | # little bit hacky but cross platform check where the | |
116 | # stdin comes from we skip the terminal case it can be handled by '-' |
|
114 | # stdin comes from we skip the terminal case it can be handled by '-' | |
117 | mode = os.fstat(0).st_mode |
|
115 | mode = os.fstat(0).st_mode | |
118 | if stat.S_ISFIFO(mode): |
|
116 | if stat.S_ISFIFO(mode): | |
119 | # "stdin is piped" |
|
117 | # "stdin is piped" | |
120 | gist_content = sys.stdin.read() |
|
118 | gist_content = sys.stdin.read() | |
121 | elif stat.S_ISREG(mode): |
|
119 | elif stat.S_ISREG(mode): | |
122 | # "stdin is redirected" |
|
120 | # "stdin is redirected" | |
123 | gist_content = sys.stdin.read() |
|
121 | gist_content = sys.stdin.read() | |
124 | else: |
|
122 | else: | |
125 | # "stdin is terminal" |
|
123 | # "stdin is terminal" | |
126 | pass |
|
124 | pass | |
127 |
|
125 | |||
128 | # make sure we don't upload binary stuff |
|
126 | # make sure we don't upload binary stuff | |
129 | if gist_content and '\0' in gist_content: |
|
127 | if gist_content and '\0' in gist_content: | |
130 | raise Exception('Error: binary files upload is not possible') |
|
128 | raise Exception('Error: binary files upload is not possible') | |
131 |
|
129 | |||
132 | filename = os.path.basename(args.filename or filename) |
|
130 | filename = os.path.basename(args.filename or filename) | |
133 | if gist_content: |
|
131 | if gist_content: | |
134 | files = { |
|
132 | files = { | |
135 | filename: { |
|
133 | filename: { | |
136 | 'content': gist_content, |
|
134 | 'content': gist_content, | |
137 | 'lexer': None |
|
135 | 'lexer': None | |
138 | } |
|
136 | } | |
139 | } |
|
137 | } | |
140 |
|
138 | |||
141 | margs = dict( |
|
139 | margs = dict( | |
142 | lifetime=args.lifetime, |
|
140 | lifetime=args.lifetime, | |
143 | description=args.description, |
|
141 | description=args.description, | |
144 | gist_type='private' if args.private else 'public', |
|
142 | gist_type='private' if args.private else 'public', | |
145 | files=files |
|
143 | files=files | |
146 | ) |
|
144 | ) | |
147 |
|
145 | |||
148 | json_data = api_call(apikey, host, 'create_gist', **margs)['result'] |
|
146 | json_data = api_call(apikey, host, 'create_gist', **margs)['result'] | |
149 | if args.format == FORMAT_JSON: |
|
147 | if args.format == FORMAT_JSON: | |
150 | print(json.dumps(json_data)) |
|
148 | print(json.dumps(json_data)) | |
151 | elif args.format == FORMAT_PRETTY: |
|
149 | elif args.format == FORMAT_PRETTY: | |
152 | print(json_data) |
|
150 | print(json_data) | |
153 | print('Created %s gist %s' % (json_data['gist']['type'], |
|
151 | print('Created %s gist %s' % (json_data['gist']['type'], | |
154 | json_data['gist']['url'])) |
|
152 | json_data['gist']['url'])) | |
155 | return 0 |
|
153 | return 0 | |
156 |
|
154 | |||
157 |
|
155 | |||
158 | def main(argv=None): |
|
156 | def main(argv=None): | |
159 | """ |
|
157 | """ | |
160 | Main execution function for cli |
|
158 | Main execution function for cli | |
161 |
|
159 | |||
162 | :param argv: |
|
160 | :param argv: | |
163 | """ |
|
161 | """ | |
164 | if argv is None: |
|
162 | if argv is None: | |
165 | argv = sys.argv |
|
163 | argv = sys.argv | |
166 |
|
164 | |||
167 | try: |
|
165 | try: | |
168 | return _run(argv) |
|
166 | return _run(argv) | |
169 | except Exception as e: |
|
167 | except Exception as e: | |
170 | print(e) |
|
168 | print(e) | |
171 | return 1 |
|
169 | return 1 | |
172 |
|
170 | |||
173 |
|
171 | |||
174 | if __name__ == '__main__': |
|
172 | if __name__ == '__main__': | |
175 | sys.exit(main(sys.argv)) |
|
173 | sys.exit(main(sys.argv)) |
@@ -1,261 +1,259 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.bin.ldap_sync |
|
15 | kallithea.bin.ldap_sync | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | LDAP sync script |
|
18 | LDAP sync script | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Mar 06, 2013 |
|
22 | :created_on: Mar 06, 2013 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | from __future__ import print_function |
|
|||
29 |
|
||||
30 | import urllib.request |
|
28 | import urllib.request | |
31 | import uuid |
|
29 | import uuid | |
32 | from configparser import ConfigParser |
|
30 | from configparser import ConfigParser | |
33 |
|
31 | |||
34 | import ldap |
|
32 | import ldap | |
35 |
|
33 | |||
36 | from kallithea.lib import ext_json |
|
34 | from kallithea.lib import ext_json | |
37 | from kallithea.lib.utils2 import ascii_bytes |
|
35 | from kallithea.lib.utils2 import ascii_bytes | |
38 |
|
36 | |||
39 |
|
37 | |||
40 | config = ConfigParser() |
|
38 | config = ConfigParser() | |
41 | config.read('ldap_sync.conf') |
|
39 | config.read('ldap_sync.conf') | |
42 |
|
40 | |||
43 |
|
41 | |||
44 | class InvalidResponseIDError(Exception): |
|
42 | class InvalidResponseIDError(Exception): | |
45 | """ Request and response don't have the same UUID. """ |
|
43 | """ Request and response don't have the same UUID. """ | |
46 |
|
44 | |||
47 |
|
45 | |||
48 | class ResponseError(Exception): |
|
46 | class ResponseError(Exception): | |
49 | """ Response has an error, something went wrong with request execution. """ |
|
47 | """ Response has an error, something went wrong with request execution. """ | |
50 |
|
48 | |||
51 |
|
49 | |||
52 | class UserAlreadyInGroupError(Exception): |
|
50 | class UserAlreadyInGroupError(Exception): | |
53 | """ User is already a member of the target group. """ |
|
51 | """ User is already a member of the target group. """ | |
54 |
|
52 | |||
55 |
|
53 | |||
56 | class UserNotInGroupError(Exception): |
|
54 | class UserNotInGroupError(Exception): | |
57 | """ User is not a member of the target group. """ |
|
55 | """ User is not a member of the target group. """ | |
58 |
|
56 | |||
59 |
|
57 | |||
60 | class API(object): |
|
58 | class API(object): | |
61 |
|
59 | |||
62 | def __init__(self, url, key): |
|
60 | def __init__(self, url, key): | |
63 | self.url = url |
|
61 | self.url = url | |
64 | self.key = key |
|
62 | self.key = key | |
65 |
|
63 | |||
66 | def get_api_data(self, uid, method, args): |
|
64 | def get_api_data(self, uid, method, args): | |
67 | """Prepare dict for API post.""" |
|
65 | """Prepare dict for API post.""" | |
68 | return { |
|
66 | return { | |
69 | "id": uid, |
|
67 | "id": uid, | |
70 | "api_key": self.key, |
|
68 | "api_key": self.key, | |
71 | "method": method, |
|
69 | "method": method, | |
72 | "args": args |
|
70 | "args": args | |
73 | } |
|
71 | } | |
74 |
|
72 | |||
75 | def post(self, method, args): |
|
73 | def post(self, method, args): | |
76 | """Send a generic API post to Kallithea. |
|
74 | """Send a generic API post to Kallithea. | |
77 |
|
75 | |||
78 | This will generate the UUID for validation check after the |
|
76 | This will generate the UUID for validation check after the | |
79 | response is returned. Handle errors and get the result back. |
|
77 | response is returned. Handle errors and get the result back. | |
80 | """ |
|
78 | """ | |
81 | uid = str(uuid.uuid1()) |
|
79 | uid = str(uuid.uuid1()) | |
82 | data = self.get_api_data(uid, method, args) |
|
80 | data = self.get_api_data(uid, method, args) | |
83 |
|
81 | |||
84 | data = ascii_bytes(ext_json.dumps(data)) |
|
82 | data = ascii_bytes(ext_json.dumps(data)) | |
85 | headers = {'content-type': 'text/plain'} |
|
83 | headers = {'content-type': 'text/plain'} | |
86 | req = urllib.request.Request(self.url, data, headers) |
|
84 | req = urllib.request.Request(self.url, data, headers) | |
87 |
|
85 | |||
88 | response = urllib.request.urlopen(req) |
|
86 | response = urllib.request.urlopen(req) | |
89 | response = ext_json.load(response) |
|
87 | response = ext_json.load(response) | |
90 |
|
88 | |||
91 | if uid != response["id"]: |
|
89 | if uid != response["id"]: | |
92 | raise InvalidResponseIDError("UUID does not match.") |
|
90 | raise InvalidResponseIDError("UUID does not match.") | |
93 |
|
91 | |||
94 | if response["error"] is not None: |
|
92 | if response["error"] is not None: | |
95 | raise ResponseError(response["error"]) |
|
93 | raise ResponseError(response["error"]) | |
96 |
|
94 | |||
97 | return response["result"] |
|
95 | return response["result"] | |
98 |
|
96 | |||
99 | def create_group(self, name, active=True): |
|
97 | def create_group(self, name, active=True): | |
100 | """Create the Kallithea user group.""" |
|
98 | """Create the Kallithea user group.""" | |
101 | args = { |
|
99 | args = { | |
102 | "group_name": name, |
|
100 | "group_name": name, | |
103 | "active": str(active) |
|
101 | "active": str(active) | |
104 | } |
|
102 | } | |
105 | self.post("create_user_group", args) |
|
103 | self.post("create_user_group", args) | |
106 |
|
104 | |||
107 | def add_membership(self, group, username): |
|
105 | def add_membership(self, group, username): | |
108 | """Add specific user to a group.""" |
|
106 | """Add specific user to a group.""" | |
109 | args = { |
|
107 | args = { | |
110 | "usersgroupid": group, |
|
108 | "usersgroupid": group, | |
111 | "userid": username |
|
109 | "userid": username | |
112 | } |
|
110 | } | |
113 | result = self.post("add_user_to_user_group", args) |
|
111 | result = self.post("add_user_to_user_group", args) | |
114 | if not result["success"]: |
|
112 | if not result["success"]: | |
115 | raise UserAlreadyInGroupError("User %s already in group %s." % |
|
113 | raise UserAlreadyInGroupError("User %s already in group %s." % | |
116 | (username, group)) |
|
114 | (username, group)) | |
117 |
|
115 | |||
118 | def remove_membership(self, group, username): |
|
116 | def remove_membership(self, group, username): | |
119 | """Remove specific user from a group.""" |
|
117 | """Remove specific user from a group.""" | |
120 | args = { |
|
118 | args = { | |
121 | "usersgroupid": group, |
|
119 | "usersgroupid": group, | |
122 | "userid": username |
|
120 | "userid": username | |
123 | } |
|
121 | } | |
124 | result = self.post("remove_user_from_user_group", args) |
|
122 | result = self.post("remove_user_from_user_group", args) | |
125 | if not result["success"]: |
|
123 | if not result["success"]: | |
126 | raise UserNotInGroupError("User %s not in group %s." % |
|
124 | raise UserNotInGroupError("User %s not in group %s." % | |
127 | (username, group)) |
|
125 | (username, group)) | |
128 |
|
126 | |||
129 | def get_group_members(self, name): |
|
127 | def get_group_members(self, name): | |
130 | """Get the list of member usernames from a user group.""" |
|
128 | """Get the list of member usernames from a user group.""" | |
131 | args = {"usersgroupid": name} |
|
129 | args = {"usersgroupid": name} | |
132 | members = self.post("get_user_group", args)['members'] |
|
130 | members = self.post("get_user_group", args)['members'] | |
133 | member_list = [] |
|
131 | member_list = [] | |
134 | for member in members: |
|
132 | for member in members: | |
135 | member_list.append(member["username"]) |
|
133 | member_list.append(member["username"]) | |
136 | return member_list |
|
134 | return member_list | |
137 |
|
135 | |||
138 | def get_group(self, name): |
|
136 | def get_group(self, name): | |
139 | """Return group info.""" |
|
137 | """Return group info.""" | |
140 | args = {"usersgroupid": name} |
|
138 | args = {"usersgroupid": name} | |
141 | return self.post("get_user_group", args) |
|
139 | return self.post("get_user_group", args) | |
142 |
|
140 | |||
143 | def get_user(self, username): |
|
141 | def get_user(self, username): | |
144 | """Return user info.""" |
|
142 | """Return user info.""" | |
145 | args = {"userid": username} |
|
143 | args = {"userid": username} | |
146 | return self.post("get_user", args) |
|
144 | return self.post("get_user", args) | |
147 |
|
145 | |||
148 |
|
146 | |||
149 | class LdapClient(object): |
|
147 | class LdapClient(object): | |
150 |
|
148 | |||
151 | def __init__(self, uri, user, key, base_dn): |
|
149 | def __init__(self, uri, user, key, base_dn): | |
152 | self.client = ldap.initialize(uri, trace_level=0) |
|
150 | self.client = ldap.initialize(uri, trace_level=0) | |
153 | self.client.set_option(ldap.OPT_REFERRALS, 0) |
|
151 | self.client.set_option(ldap.OPT_REFERRALS, 0) | |
154 | self.client.simple_bind(user, key) |
|
152 | self.client.simple_bind(user, key) | |
155 | self.base_dn = base_dn |
|
153 | self.base_dn = base_dn | |
156 |
|
154 | |||
157 | def close(self): |
|
155 | def close(self): | |
158 | self.client.unbind() |
|
156 | self.client.unbind() | |
159 |
|
157 | |||
160 | def get_groups(self): |
|
158 | def get_groups(self): | |
161 | """Get all the groups in form of dict {group_name: group_info,...}.""" |
|
159 | """Get all the groups in form of dict {group_name: group_info,...}.""" | |
162 | searchFilter = "objectClass=groupOfUniqueNames" |
|
160 | searchFilter = "objectClass=groupOfUniqueNames" | |
163 | result = self.client.search_s(self.base_dn, ldap.SCOPE_SUBTREE, |
|
161 | result = self.client.search_s(self.base_dn, ldap.SCOPE_SUBTREE, | |
164 | searchFilter) |
|
162 | searchFilter) | |
165 |
|
163 | |||
166 | groups = {} |
|
164 | groups = {} | |
167 | for group in result: |
|
165 | for group in result: | |
168 | groups[group[1]['cn'][0]] = group[1] |
|
166 | groups[group[1]['cn'][0]] = group[1] | |
169 |
|
167 | |||
170 | return groups |
|
168 | return groups | |
171 |
|
169 | |||
172 | def get_group_users(self, groups, group): |
|
170 | def get_group_users(self, groups, group): | |
173 | """Returns all the users belonging to a single group. |
|
171 | """Returns all the users belonging to a single group. | |
174 |
|
172 | |||
175 | Based on the list of groups and memberships, returns all the |
|
173 | Based on the list of groups and memberships, returns all the | |
176 | users belonging to a single group, searching recursively. |
|
174 | users belonging to a single group, searching recursively. | |
177 | """ |
|
175 | """ | |
178 | users = [] |
|
176 | users = [] | |
179 | for member in groups[group]["uniqueMember"]: |
|
177 | for member in groups[group]["uniqueMember"]: | |
180 | member = self.parse_member_string(member) |
|
178 | member = self.parse_member_string(member) | |
181 | if member[0] == "uid": |
|
179 | if member[0] == "uid": | |
182 | users.append(member[1]) |
|
180 | users.append(member[1]) | |
183 | elif member[0] == "cn": |
|
181 | elif member[0] == "cn": | |
184 | users += self.get_group_users(groups, member[1]) |
|
182 | users += self.get_group_users(groups, member[1]) | |
185 |
|
183 | |||
186 | return users |
|
184 | return users | |
187 |
|
185 | |||
188 | def parse_member_string(self, member): |
|
186 | def parse_member_string(self, member): | |
189 | """Parses the member string and returns a touple of type and name. |
|
187 | """Parses the member string and returns a touple of type and name. | |
190 |
|
188 | |||
191 | Unique member can be either user or group. Users will have 'uid' as |
|
189 | Unique member can be either user or group. Users will have 'uid' as | |
192 | prefix while groups will have 'cn'. |
|
190 | prefix while groups will have 'cn'. | |
193 | """ |
|
191 | """ | |
194 | member = member.split(",")[0] |
|
192 | member = member.split(",")[0] | |
195 | return member.split('=') |
|
193 | return member.split('=') | |
196 |
|
194 | |||
197 |
|
195 | |||
198 | class LdapSync(object): |
|
196 | class LdapSync(object): | |
199 |
|
197 | |||
200 | def __init__(self): |
|
198 | def __init__(self): | |
201 | self.ldap_client = LdapClient(config.get("default", "ldap_uri"), |
|
199 | self.ldap_client = LdapClient(config.get("default", "ldap_uri"), | |
202 | config.get("default", "ldap_user"), |
|
200 | config.get("default", "ldap_user"), | |
203 | config.get("default", "ldap_key"), |
|
201 | config.get("default", "ldap_key"), | |
204 | config.get("default", "base_dn")) |
|
202 | config.get("default", "base_dn")) | |
205 | self.kallithea_api = API(config.get("default", "api_url"), |
|
203 | self.kallithea_api = API(config.get("default", "api_url"), | |
206 | config.get("default", "api_key")) |
|
204 | config.get("default", "api_key")) | |
207 |
|
205 | |||
208 | def update_groups_from_ldap(self): |
|
206 | def update_groups_from_ldap(self): | |
209 | """Add all the groups from LDAP to Kallithea.""" |
|
207 | """Add all the groups from LDAP to Kallithea.""" | |
210 | added = existing = 0 |
|
208 | added = existing = 0 | |
211 | groups = self.ldap_client.get_groups() |
|
209 | groups = self.ldap_client.get_groups() | |
212 | for group in groups: |
|
210 | for group in groups: | |
213 | try: |
|
211 | try: | |
214 | self.kallithea_api.create_group(group) |
|
212 | self.kallithea_api.create_group(group) | |
215 | added += 1 |
|
213 | added += 1 | |
216 | except Exception: |
|
214 | except Exception: | |
217 | existing += 1 |
|
215 | existing += 1 | |
218 |
|
216 | |||
219 | return added, existing |
|
217 | return added, existing | |
220 |
|
218 | |||
221 | def update_memberships_from_ldap(self, group): |
|
219 | def update_memberships_from_ldap(self, group): | |
222 | """Update memberships based on the LDAP groups.""" |
|
220 | """Update memberships based on the LDAP groups.""" | |
223 | groups = self.ldap_client.get_groups() |
|
221 | groups = self.ldap_client.get_groups() | |
224 | group_users = self.ldap_client.get_group_users(groups, group) |
|
222 | group_users = self.ldap_client.get_group_users(groups, group) | |
225 |
|
223 | |||
226 | # Delete memberships first from each group which are not part |
|
224 | # Delete memberships first from each group which are not part | |
227 | # of the group any more. |
|
225 | # of the group any more. | |
228 | members = self.kallithea_api.get_group_members(group) |
|
226 | members = self.kallithea_api.get_group_members(group) | |
229 | for member in members: |
|
227 | for member in members: | |
230 | if member not in group_users: |
|
228 | if member not in group_users: | |
231 | try: |
|
229 | try: | |
232 | self.kallithea_api.remove_membership(group, |
|
230 | self.kallithea_api.remove_membership(group, | |
233 | member) |
|
231 | member) | |
234 | except UserNotInGroupError: |
|
232 | except UserNotInGroupError: | |
235 | pass |
|
233 | pass | |
236 |
|
234 | |||
237 | # Add memberships. |
|
235 | # Add memberships. | |
238 | for member in group_users: |
|
236 | for member in group_users: | |
239 | try: |
|
237 | try: | |
240 | self.kallithea_api.add_membership(group, member) |
|
238 | self.kallithea_api.add_membership(group, member) | |
241 | except UserAlreadyInGroupError: |
|
239 | except UserAlreadyInGroupError: | |
242 | # TODO: handle somehow maybe.. |
|
240 | # TODO: handle somehow maybe.. | |
243 | pass |
|
241 | pass | |
244 |
|
242 | |||
245 | def close(self): |
|
243 | def close(self): | |
246 | self.ldap_client.close() |
|
244 | self.ldap_client.close() | |
247 |
|
245 | |||
248 |
|
246 | |||
249 | if __name__ == '__main__': |
|
247 | if __name__ == '__main__': | |
250 | sync = LdapSync() |
|
248 | sync = LdapSync() | |
251 | print(sync.update_groups_from_ldap()) |
|
249 | print(sync.update_groups_from_ldap()) | |
252 |
|
250 | |||
253 | for gr in sync.ldap_client.get_groups(): |
|
251 | for gr in sync.ldap_client.get_groups(): | |
254 | # TODO: exception when user does not exist during add membership... |
|
252 | # TODO: exception when user does not exist during add membership... | |
255 | # How should we handle this.. Either sync users as well at this step, |
|
253 | # How should we handle this.. Either sync users as well at this step, | |
256 | # or just ignore those who don't exist. If we want the second case, |
|
254 | # or just ignore those who don't exist. If we want the second case, | |
257 | # we need to find a way to recognize the right exception (we always get |
|
255 | # we need to find a way to recognize the right exception (we always get | |
258 | # ResponseError with no error code so maybe by return msg (?) |
|
256 | # ResponseError with no error code so maybe by return msg (?) | |
259 | sync.update_memberships_from_ldap(gr) |
|
257 | sync.update_memberships_from_ldap(gr) | |
260 |
|
258 | |||
261 | sync.close() |
|
259 | sync.close() |
@@ -1,422 +1,420 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.db_manage |
|
15 | kallithea.lib.db_manage | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Database creation, and setup module for Kallithea. Used for creation |
|
18 | Database creation, and setup module for Kallithea. Used for creation | |
19 | of database as well as for migration operations |
|
19 | of database as well as for migration operations | |
20 |
|
20 | |||
21 | This file was forked by the Kallithea project in July 2014. |
|
21 | This file was forked by the Kallithea project in July 2014. | |
22 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | Original author and date, and relevant copyright and licensing information is below: | |
23 | :created_on: Apr 10, 2010 |
|
23 | :created_on: Apr 10, 2010 | |
24 | :author: marcink |
|
24 | :author: marcink | |
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
26 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | :license: GPLv3, see LICENSE.md for more details. | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | from __future__ import print_function |
|
|||
30 |
|
||||
31 | import logging |
|
29 | import logging | |
32 | import os |
|
30 | import os | |
33 | import sys |
|
31 | import sys | |
34 | import uuid |
|
32 | import uuid | |
35 |
|
33 | |||
36 | import alembic.command |
|
34 | import alembic.command | |
37 | import alembic.config |
|
35 | import alembic.config | |
38 | import sqlalchemy |
|
36 | import sqlalchemy | |
39 | from sqlalchemy.engine import create_engine |
|
37 | from sqlalchemy.engine import create_engine | |
40 |
|
38 | |||
41 | from kallithea.model.base import init_model |
|
39 | from kallithea.model.base import init_model | |
42 | from kallithea.model.db import Permission, RepoGroup, Repository, Setting, Ui, User, UserRepoGroupToPerm, UserToPerm |
|
40 | from kallithea.model.db import Permission, RepoGroup, Repository, Setting, Ui, User, UserRepoGroupToPerm, UserToPerm | |
43 | #from kallithea.model import meta |
|
41 | #from kallithea.model import meta | |
44 | from kallithea.model.meta import Base, Session |
|
42 | from kallithea.model.meta import Base, Session | |
45 | from kallithea.model.permission import PermissionModel |
|
43 | from kallithea.model.permission import PermissionModel | |
46 | from kallithea.model.repo_group import RepoGroupModel |
|
44 | from kallithea.model.repo_group import RepoGroupModel | |
47 | from kallithea.model.user import UserModel |
|
45 | from kallithea.model.user import UserModel | |
48 |
|
46 | |||
49 |
|
47 | |||
50 | log = logging.getLogger(__name__) |
|
48 | log = logging.getLogger(__name__) | |
51 |
|
49 | |||
52 |
|
50 | |||
53 | class DbManage(object): |
|
51 | class DbManage(object): | |
54 | def __init__(self, dbconf, root, tests=False, SESSION=None, cli_args=None): |
|
52 | def __init__(self, dbconf, root, tests=False, SESSION=None, cli_args=None): | |
55 | self.dbname = dbconf.split('/')[-1] |
|
53 | self.dbname = dbconf.split('/')[-1] | |
56 | self.tests = tests |
|
54 | self.tests = tests | |
57 | self.root = root |
|
55 | self.root = root | |
58 | self.dburi = dbconf |
|
56 | self.dburi = dbconf | |
59 | self.db_exists = False |
|
57 | self.db_exists = False | |
60 | self.cli_args = cli_args or {} |
|
58 | self.cli_args = cli_args or {} | |
61 | self.init_db(SESSION=SESSION) |
|
59 | self.init_db(SESSION=SESSION) | |
62 |
|
60 | |||
63 | def _ask_ok(self, msg): |
|
61 | def _ask_ok(self, msg): | |
64 | """Invoke ask_ok unless the force_ask option provides the answer""" |
|
62 | """Invoke ask_ok unless the force_ask option provides the answer""" | |
65 | force_ask = self.cli_args.get('force_ask') |
|
63 | force_ask = self.cli_args.get('force_ask') | |
66 | if force_ask is not None: |
|
64 | if force_ask is not None: | |
67 | return force_ask |
|
65 | return force_ask | |
68 | from kallithea.lib.utils2 import ask_ok |
|
66 | from kallithea.lib.utils2 import ask_ok | |
69 | return ask_ok(msg) |
|
67 | return ask_ok(msg) | |
70 |
|
68 | |||
71 | def init_db(self, SESSION=None): |
|
69 | def init_db(self, SESSION=None): | |
72 | if SESSION: |
|
70 | if SESSION: | |
73 | self.sa = SESSION |
|
71 | self.sa = SESSION | |
74 | else: |
|
72 | else: | |
75 | # init new sessions |
|
73 | # init new sessions | |
76 | engine = create_engine(self.dburi) |
|
74 | engine = create_engine(self.dburi) | |
77 | init_model(engine) |
|
75 | init_model(engine) | |
78 | self.sa = Session() |
|
76 | self.sa = Session() | |
79 |
|
77 | |||
80 | def create_tables(self, override=False): |
|
78 | def create_tables(self, override=False): | |
81 | """ |
|
79 | """ | |
82 | Create a auth database |
|
80 | Create a auth database | |
83 | """ |
|
81 | """ | |
84 |
|
82 | |||
85 | log.info("Any existing database is going to be destroyed") |
|
83 | log.info("Any existing database is going to be destroyed") | |
86 | if self.tests: |
|
84 | if self.tests: | |
87 | destroy = True |
|
85 | destroy = True | |
88 | else: |
|
86 | else: | |
89 | destroy = self._ask_ok('Are you sure to destroy old database ? [y/n]') |
|
87 | destroy = self._ask_ok('Are you sure to destroy old database ? [y/n]') | |
90 | if not destroy: |
|
88 | if not destroy: | |
91 | print('Nothing done.') |
|
89 | print('Nothing done.') | |
92 | sys.exit(0) |
|
90 | sys.exit(0) | |
93 | if destroy: |
|
91 | if destroy: | |
94 | # drop and re-create old schemas |
|
92 | # drop and re-create old schemas | |
95 |
|
93 | |||
96 | url = sqlalchemy.engine.url.make_url(self.dburi) |
|
94 | url = sqlalchemy.engine.url.make_url(self.dburi) | |
97 | database = url.database |
|
95 | database = url.database | |
98 |
|
96 | |||
99 | # Some databases enforce foreign key constraints and Base.metadata.drop_all() doesn't work |
|
97 | # Some databases enforce foreign key constraints and Base.metadata.drop_all() doesn't work | |
100 | if url.drivername == 'mysql': |
|
98 | if url.drivername == 'mysql': | |
101 | url.database = None # don't connect to the database (it might not exist) |
|
99 | url.database = None # don't connect to the database (it might not exist) | |
102 | engine = sqlalchemy.create_engine(url) |
|
100 | engine = sqlalchemy.create_engine(url) | |
103 | with engine.connect() as conn: |
|
101 | with engine.connect() as conn: | |
104 | conn.execute('DROP DATABASE IF EXISTS ' + database) |
|
102 | conn.execute('DROP DATABASE IF EXISTS ' + database) | |
105 | conn.execute('CREATE DATABASE ' + database) |
|
103 | conn.execute('CREATE DATABASE ' + database) | |
106 | elif url.drivername == 'postgresql': |
|
104 | elif url.drivername == 'postgresql': | |
107 | from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT |
|
105 | from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT | |
108 | url.database = 'postgres' # connect to the system database (as the real one might not exist) |
|
106 | url.database = 'postgres' # connect to the system database (as the real one might not exist) | |
109 | engine = sqlalchemy.create_engine(url) |
|
107 | engine = sqlalchemy.create_engine(url) | |
110 | with engine.connect() as conn: |
|
108 | with engine.connect() as conn: | |
111 | conn.connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) |
|
109 | conn.connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) | |
112 | conn.execute('DROP DATABASE IF EXISTS ' + database) |
|
110 | conn.execute('DROP DATABASE IF EXISTS ' + database) | |
113 | conn.execute('CREATE DATABASE ' + database) |
|
111 | conn.execute('CREATE DATABASE ' + database) | |
114 | else: |
|
112 | else: | |
115 | # known to work on SQLite - possibly not on other databases with strong referential integrity |
|
113 | # known to work on SQLite - possibly not on other databases with strong referential integrity | |
116 | Base.metadata.drop_all() |
|
114 | Base.metadata.drop_all() | |
117 |
|
115 | |||
118 | checkfirst = not override |
|
116 | checkfirst = not override | |
119 | Base.metadata.create_all(checkfirst=checkfirst) |
|
117 | Base.metadata.create_all(checkfirst=checkfirst) | |
120 |
|
118 | |||
121 | # Create an Alembic configuration and generate the version table, |
|
119 | # Create an Alembic configuration and generate the version table, | |
122 | # "stamping" it with the most recent Alembic migration revision, to |
|
120 | # "stamping" it with the most recent Alembic migration revision, to | |
123 | # tell Alembic that all the schema upgrades are already in effect. |
|
121 | # tell Alembic that all the schema upgrades are already in effect. | |
124 | alembic_cfg = alembic.config.Config() |
|
122 | alembic_cfg = alembic.config.Config() | |
125 | alembic_cfg.set_main_option('script_location', 'kallithea:alembic') |
|
123 | alembic_cfg.set_main_option('script_location', 'kallithea:alembic') | |
126 | alembic_cfg.set_main_option('sqlalchemy.url', self.dburi) |
|
124 | alembic_cfg.set_main_option('sqlalchemy.url', self.dburi) | |
127 | # This command will give an error in an Alembic multi-head scenario, |
|
125 | # This command will give an error in an Alembic multi-head scenario, | |
128 | # but in practice, such a scenario should not come up during database |
|
126 | # but in practice, such a scenario should not come up during database | |
129 | # creation, even during development. |
|
127 | # creation, even during development. | |
130 | alembic.command.stamp(alembic_cfg, 'head') |
|
128 | alembic.command.stamp(alembic_cfg, 'head') | |
131 |
|
129 | |||
132 | log.info('Created tables for %s', self.dbname) |
|
130 | log.info('Created tables for %s', self.dbname) | |
133 |
|
131 | |||
134 | def fix_repo_paths(self): |
|
132 | def fix_repo_paths(self): | |
135 | """ |
|
133 | """ | |
136 | Fixes a old kallithea version path into new one without a '*' |
|
134 | Fixes a old kallithea version path into new one without a '*' | |
137 | """ |
|
135 | """ | |
138 |
|
136 | |||
139 | paths = Ui.query() \ |
|
137 | paths = Ui.query() \ | |
140 | .filter(Ui.ui_key == '/') \ |
|
138 | .filter(Ui.ui_key == '/') \ | |
141 | .scalar() |
|
139 | .scalar() | |
142 |
|
140 | |||
143 | paths.ui_value = paths.ui_value.replace('*', '') |
|
141 | paths.ui_value = paths.ui_value.replace('*', '') | |
144 |
|
142 | |||
145 | self.sa.commit() |
|
143 | self.sa.commit() | |
146 |
|
144 | |||
147 | def fix_default_user(self): |
|
145 | def fix_default_user(self): | |
148 | """ |
|
146 | """ | |
149 | Fixes a old default user with some 'nicer' default values, |
|
147 | Fixes a old default user with some 'nicer' default values, | |
150 | used mostly for anonymous access |
|
148 | used mostly for anonymous access | |
151 | """ |
|
149 | """ | |
152 | def_user = User.query().filter_by(is_default_user=True).one() |
|
150 | def_user = User.query().filter_by(is_default_user=True).one() | |
153 |
|
151 | |||
154 | def_user.name = 'Anonymous' |
|
152 | def_user.name = 'Anonymous' | |
155 | def_user.lastname = 'User' |
|
153 | def_user.lastname = 'User' | |
156 | def_user.email = 'anonymous@kallithea-scm.org' |
|
154 | def_user.email = 'anonymous@kallithea-scm.org' | |
157 |
|
155 | |||
158 | self.sa.commit() |
|
156 | self.sa.commit() | |
159 |
|
157 | |||
160 | def fix_settings(self): |
|
158 | def fix_settings(self): | |
161 | """ |
|
159 | """ | |
162 | Fixes kallithea settings adds ga_code key for google analytics |
|
160 | Fixes kallithea settings adds ga_code key for google analytics | |
163 | """ |
|
161 | """ | |
164 |
|
162 | |||
165 | hgsettings3 = Setting('ga_code', '') |
|
163 | hgsettings3 = Setting('ga_code', '') | |
166 |
|
164 | |||
167 | self.sa.add(hgsettings3) |
|
165 | self.sa.add(hgsettings3) | |
168 | self.sa.commit() |
|
166 | self.sa.commit() | |
169 |
|
167 | |||
170 | def admin_prompt(self, second=False): |
|
168 | def admin_prompt(self, second=False): | |
171 | if not self.tests: |
|
169 | if not self.tests: | |
172 | import getpass |
|
170 | import getpass | |
173 |
|
171 | |||
174 | username = self.cli_args.get('username') |
|
172 | username = self.cli_args.get('username') | |
175 | password = self.cli_args.get('password') |
|
173 | password = self.cli_args.get('password') | |
176 | email = self.cli_args.get('email') |
|
174 | email = self.cli_args.get('email') | |
177 |
|
175 | |||
178 | def get_password(): |
|
176 | def get_password(): | |
179 | password = getpass.getpass('Specify admin password ' |
|
177 | password = getpass.getpass('Specify admin password ' | |
180 | '(min 6 chars):') |
|
178 | '(min 6 chars):') | |
181 | confirm = getpass.getpass('Confirm password:') |
|
179 | confirm = getpass.getpass('Confirm password:') | |
182 |
|
180 | |||
183 | if password != confirm: |
|
181 | if password != confirm: | |
184 | log.error('passwords mismatch') |
|
182 | log.error('passwords mismatch') | |
185 | return False |
|
183 | return False | |
186 | if len(password) < 6: |
|
184 | if len(password) < 6: | |
187 | log.error('password is to short use at least 6 characters') |
|
185 | log.error('password is to short use at least 6 characters') | |
188 | return False |
|
186 | return False | |
189 |
|
187 | |||
190 | return password |
|
188 | return password | |
191 | if username is None: |
|
189 | if username is None: | |
192 | username = input('Specify admin username:') |
|
190 | username = input('Specify admin username:') | |
193 | if password is None: |
|
191 | if password is None: | |
194 | password = get_password() |
|
192 | password = get_password() | |
195 | if not password: |
|
193 | if not password: | |
196 | # second try |
|
194 | # second try | |
197 | password = get_password() |
|
195 | password = get_password() | |
198 | if not password: |
|
196 | if not password: | |
199 | sys.exit() |
|
197 | sys.exit() | |
200 | if email is None: |
|
198 | if email is None: | |
201 | email = input('Specify admin email:') |
|
199 | email = input('Specify admin email:') | |
202 | self.create_user(username, password, email, True) |
|
200 | self.create_user(username, password, email, True) | |
203 | else: |
|
201 | else: | |
204 | log.info('creating admin and regular test users') |
|
202 | log.info('creating admin and regular test users') | |
205 | from kallithea.tests.base import TEST_USER_ADMIN_LOGIN, \ |
|
203 | from kallithea.tests.base import TEST_USER_ADMIN_LOGIN, \ | |
206 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ |
|
204 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ | |
207 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ |
|
205 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ | |
208 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ |
|
206 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ | |
209 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL |
|
207 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL | |
210 |
|
208 | |||
211 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
209 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, | |
212 | TEST_USER_ADMIN_EMAIL, True) |
|
210 | TEST_USER_ADMIN_EMAIL, True) | |
213 |
|
211 | |||
214 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
212 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, | |
215 | TEST_USER_REGULAR_EMAIL, False) |
|
213 | TEST_USER_REGULAR_EMAIL, False) | |
216 |
|
214 | |||
217 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, |
|
215 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, | |
218 | TEST_USER_REGULAR2_EMAIL, False) |
|
216 | TEST_USER_REGULAR2_EMAIL, False) | |
219 |
|
217 | |||
220 | def create_auth_plugin_options(self, skip_existing=False): |
|
218 | def create_auth_plugin_options(self, skip_existing=False): | |
221 | """ |
|
219 | """ | |
222 | Create default auth plugin settings, and make it active |
|
220 | Create default auth plugin settings, and make it active | |
223 |
|
221 | |||
224 | :param skip_existing: |
|
222 | :param skip_existing: | |
225 | """ |
|
223 | """ | |
226 |
|
224 | |||
227 | for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'), |
|
225 | for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'), | |
228 | ('auth_internal_enabled', 'True', 'bool')]: |
|
226 | ('auth_internal_enabled', 'True', 'bool')]: | |
229 | if skip_existing and Setting.get_by_name(k) is not None: |
|
227 | if skip_existing and Setting.get_by_name(k) is not None: | |
230 | log.debug('Skipping option %s', k) |
|
228 | log.debug('Skipping option %s', k) | |
231 | continue |
|
229 | continue | |
232 | setting = Setting(k, v, t) |
|
230 | setting = Setting(k, v, t) | |
233 | self.sa.add(setting) |
|
231 | self.sa.add(setting) | |
234 |
|
232 | |||
235 | def create_default_options(self, skip_existing=False): |
|
233 | def create_default_options(self, skip_existing=False): | |
236 | """Creates default settings""" |
|
234 | """Creates default settings""" | |
237 |
|
235 | |||
238 | for k, v, t in [ |
|
236 | for k, v, t in [ | |
239 | ('default_repo_enable_downloads', False, 'bool'), |
|
237 | ('default_repo_enable_downloads', False, 'bool'), | |
240 | ('default_repo_enable_statistics', False, 'bool'), |
|
238 | ('default_repo_enable_statistics', False, 'bool'), | |
241 | ('default_repo_private', False, 'bool'), |
|
239 | ('default_repo_private', False, 'bool'), | |
242 | ('default_repo_type', 'hg', 'unicode') |
|
240 | ('default_repo_type', 'hg', 'unicode') | |
243 | ]: |
|
241 | ]: | |
244 | if skip_existing and Setting.get_by_name(k) is not None: |
|
242 | if skip_existing and Setting.get_by_name(k) is not None: | |
245 | log.debug('Skipping option %s', k) |
|
243 | log.debug('Skipping option %s', k) | |
246 | continue |
|
244 | continue | |
247 | setting = Setting(k, v, t) |
|
245 | setting = Setting(k, v, t) | |
248 | self.sa.add(setting) |
|
246 | self.sa.add(setting) | |
249 |
|
247 | |||
250 | def fixup_groups(self): |
|
248 | def fixup_groups(self): | |
251 | def_usr = User.get_default_user() |
|
249 | def_usr = User.get_default_user() | |
252 | for g in RepoGroup.query().all(): |
|
250 | for g in RepoGroup.query().all(): | |
253 | g.group_name = g.get_new_name(g.name) |
|
251 | g.group_name = g.get_new_name(g.name) | |
254 | # get default perm |
|
252 | # get default perm | |
255 | default = UserRepoGroupToPerm.query() \ |
|
253 | default = UserRepoGroupToPerm.query() \ | |
256 | .filter(UserRepoGroupToPerm.group == g) \ |
|
254 | .filter(UserRepoGroupToPerm.group == g) \ | |
257 | .filter(UserRepoGroupToPerm.user == def_usr) \ |
|
255 | .filter(UserRepoGroupToPerm.user == def_usr) \ | |
258 | .scalar() |
|
256 | .scalar() | |
259 |
|
257 | |||
260 | if default is None: |
|
258 | if default is None: | |
261 | log.debug('missing default permission for group %s adding', g) |
|
259 | log.debug('missing default permission for group %s adding', g) | |
262 | RepoGroupModel()._create_default_perms(g) |
|
260 | RepoGroupModel()._create_default_perms(g) | |
263 |
|
261 | |||
264 | def reset_permissions(self, username): |
|
262 | def reset_permissions(self, username): | |
265 | """ |
|
263 | """ | |
266 | Resets permissions to default state, useful when old systems had |
|
264 | Resets permissions to default state, useful when old systems had | |
267 | bad permissions, we must clean them up |
|
265 | bad permissions, we must clean them up | |
268 |
|
266 | |||
269 | :param username: |
|
267 | :param username: | |
270 | """ |
|
268 | """ | |
271 | default_user = User.get_by_username(username) |
|
269 | default_user = User.get_by_username(username) | |
272 | if not default_user: |
|
270 | if not default_user: | |
273 | return |
|
271 | return | |
274 |
|
272 | |||
275 | u2p = UserToPerm.query() \ |
|
273 | u2p = UserToPerm.query() \ | |
276 | .filter(UserToPerm.user == default_user).all() |
|
274 | .filter(UserToPerm.user == default_user).all() | |
277 | fixed = False |
|
275 | fixed = False | |
278 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): |
|
276 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): | |
279 | for p in u2p: |
|
277 | for p in u2p: | |
280 | Session().delete(p) |
|
278 | Session().delete(p) | |
281 | fixed = True |
|
279 | fixed = True | |
282 | self.populate_default_permissions() |
|
280 | self.populate_default_permissions() | |
283 | return fixed |
|
281 | return fixed | |
284 |
|
282 | |||
285 | def update_repo_info(self): |
|
283 | def update_repo_info(self): | |
286 | for repo in Repository.query(): |
|
284 | for repo in Repository.query(): | |
287 | repo.update_changeset_cache() |
|
285 | repo.update_changeset_cache() | |
288 |
|
286 | |||
289 | def prompt_repo_root_path(self, test_repo_path='', retries=3): |
|
287 | def prompt_repo_root_path(self, test_repo_path='', retries=3): | |
290 | _path = self.cli_args.get('repos_location') |
|
288 | _path = self.cli_args.get('repos_location') | |
291 | if retries == 3: |
|
289 | if retries == 3: | |
292 | log.info('Setting up repositories config') |
|
290 | log.info('Setting up repositories config') | |
293 |
|
291 | |||
294 | if _path is not None: |
|
292 | if _path is not None: | |
295 | path = _path |
|
293 | path = _path | |
296 | elif not self.tests and not test_repo_path: |
|
294 | elif not self.tests and not test_repo_path: | |
297 | path = input( |
|
295 | path = input( | |
298 | 'Enter a valid absolute path to store repositories. ' |
|
296 | 'Enter a valid absolute path to store repositories. ' | |
299 | 'All repositories in that path will be added automatically:' |
|
297 | 'All repositories in that path will be added automatically:' | |
300 | ) |
|
298 | ) | |
301 | else: |
|
299 | else: | |
302 | path = test_repo_path |
|
300 | path = test_repo_path | |
303 | path_ok = True |
|
301 | path_ok = True | |
304 |
|
302 | |||
305 | # check proper dir |
|
303 | # check proper dir | |
306 | if not os.path.isdir(path): |
|
304 | if not os.path.isdir(path): | |
307 | path_ok = False |
|
305 | path_ok = False | |
308 | log.error('Given path %s is not a valid directory', path) |
|
306 | log.error('Given path %s is not a valid directory', path) | |
309 |
|
307 | |||
310 | elif not os.path.isabs(path): |
|
308 | elif not os.path.isabs(path): | |
311 | path_ok = False |
|
309 | path_ok = False | |
312 | log.error('Given path %s is not an absolute path', path) |
|
310 | log.error('Given path %s is not an absolute path', path) | |
313 |
|
311 | |||
314 | # check if path is at least readable. |
|
312 | # check if path is at least readable. | |
315 | if not os.access(path, os.R_OK): |
|
313 | if not os.access(path, os.R_OK): | |
316 | path_ok = False |
|
314 | path_ok = False | |
317 | log.error('Given path %s is not readable', path) |
|
315 | log.error('Given path %s is not readable', path) | |
318 |
|
316 | |||
319 | # check write access, warn user about non writeable paths |
|
317 | # check write access, warn user about non writeable paths | |
320 | elif not os.access(path, os.W_OK) and path_ok: |
|
318 | elif not os.access(path, os.W_OK) and path_ok: | |
321 | log.warning('No write permission to given path %s', path) |
|
319 | log.warning('No write permission to given path %s', path) | |
322 | if not self._ask_ok('Given path %s is not writeable, do you want to ' |
|
320 | if not self._ask_ok('Given path %s is not writeable, do you want to ' | |
323 | 'continue with read only mode ? [y/n]' % (path,)): |
|
321 | 'continue with read only mode ? [y/n]' % (path,)): | |
324 | log.error('Canceled by user') |
|
322 | log.error('Canceled by user') | |
325 | sys.exit(-1) |
|
323 | sys.exit(-1) | |
326 |
|
324 | |||
327 | if retries == 0: |
|
325 | if retries == 0: | |
328 | sys.exit('max retries reached') |
|
326 | sys.exit('max retries reached') | |
329 | if not path_ok: |
|
327 | if not path_ok: | |
330 | if _path is not None: |
|
328 | if _path is not None: | |
331 | sys.exit('Invalid repo path: %s' % _path) |
|
329 | sys.exit('Invalid repo path: %s' % _path) | |
332 | retries -= 1 |
|
330 | retries -= 1 | |
333 | return self.prompt_repo_root_path(test_repo_path, retries) # recursing!!! |
|
331 | return self.prompt_repo_root_path(test_repo_path, retries) # recursing!!! | |
334 |
|
332 | |||
335 | real_path = os.path.normpath(os.path.realpath(path)) |
|
333 | real_path = os.path.normpath(os.path.realpath(path)) | |
336 |
|
334 | |||
337 | if real_path != os.path.normpath(path): |
|
335 | if real_path != os.path.normpath(path): | |
338 | log.warning('Using normalized path %s instead of %s', real_path, path) |
|
336 | log.warning('Using normalized path %s instead of %s', real_path, path) | |
339 |
|
337 | |||
340 | return real_path |
|
338 | return real_path | |
341 |
|
339 | |||
342 | def create_settings(self, repo_root_path): |
|
340 | def create_settings(self, repo_root_path): | |
343 | ui_config = [ |
|
341 | ui_config = [ | |
344 | ('paths', '/', repo_root_path, True), |
|
342 | ('paths', '/', repo_root_path, True), | |
345 | #('phases', 'publish', 'false', False) |
|
343 | #('phases', 'publish', 'false', False) | |
346 | ('hooks', Ui.HOOK_UPDATE, 'hg update >&2', False), |
|
344 | ('hooks', Ui.HOOK_UPDATE, 'hg update >&2', False), | |
347 | ('hooks', Ui.HOOK_REPO_SIZE, 'python:kallithea.lib.hooks.repo_size', True), |
|
345 | ('hooks', Ui.HOOK_REPO_SIZE, 'python:kallithea.lib.hooks.repo_size', True), | |
348 | ('extensions', 'largefiles', '', True), |
|
346 | ('extensions', 'largefiles', '', True), | |
349 | ('largefiles', 'usercache', os.path.join(repo_root_path, '.cache', 'largefiles'), True), |
|
347 | ('largefiles', 'usercache', os.path.join(repo_root_path, '.cache', 'largefiles'), True), | |
350 | ('extensions', 'hgsubversion', '', False), |
|
348 | ('extensions', 'hgsubversion', '', False), | |
351 | ('extensions', 'hggit', '', False), |
|
349 | ('extensions', 'hggit', '', False), | |
352 | ] |
|
350 | ] | |
353 | for ui_section, ui_key, ui_value, ui_active in ui_config: |
|
351 | for ui_section, ui_key, ui_value, ui_active in ui_config: | |
354 | ui_conf = Ui( |
|
352 | ui_conf = Ui( | |
355 | ui_section=ui_section, |
|
353 | ui_section=ui_section, | |
356 | ui_key=ui_key, |
|
354 | ui_key=ui_key, | |
357 | ui_value=ui_value, |
|
355 | ui_value=ui_value, | |
358 | ui_active=ui_active) |
|
356 | ui_active=ui_active) | |
359 | self.sa.add(ui_conf) |
|
357 | self.sa.add(ui_conf) | |
360 |
|
358 | |||
361 | settings = [ |
|
359 | settings = [ | |
362 | ('realm', 'Kallithea', 'unicode'), |
|
360 | ('realm', 'Kallithea', 'unicode'), | |
363 | ('title', '', 'unicode'), |
|
361 | ('title', '', 'unicode'), | |
364 | ('ga_code', '', 'unicode'), |
|
362 | ('ga_code', '', 'unicode'), | |
365 | ('show_public_icon', True, 'bool'), |
|
363 | ('show_public_icon', True, 'bool'), | |
366 | ('show_private_icon', True, 'bool'), |
|
364 | ('show_private_icon', True, 'bool'), | |
367 | ('stylify_metalabels', False, 'bool'), |
|
365 | ('stylify_metalabels', False, 'bool'), | |
368 | ('dashboard_items', 100, 'int'), # TODO: call it page_size |
|
366 | ('dashboard_items', 100, 'int'), # TODO: call it page_size | |
369 | ('admin_grid_items', 25, 'int'), |
|
367 | ('admin_grid_items', 25, 'int'), | |
370 | ('show_version', True, 'bool'), |
|
368 | ('show_version', True, 'bool'), | |
371 | ('use_gravatar', True, 'bool'), |
|
369 | ('use_gravatar', True, 'bool'), | |
372 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), |
|
370 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), | |
373 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), |
|
371 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), | |
374 | ('clone_ssh_tmpl', Repository.DEFAULT_CLONE_SSH, 'unicode'), |
|
372 | ('clone_ssh_tmpl', Repository.DEFAULT_CLONE_SSH, 'unicode'), | |
375 | ] |
|
373 | ] | |
376 | for key, val, type_ in settings: |
|
374 | for key, val, type_ in settings: | |
377 | sett = Setting(key, val, type_) |
|
375 | sett = Setting(key, val, type_) | |
378 | self.sa.add(sett) |
|
376 | self.sa.add(sett) | |
379 |
|
377 | |||
380 | self.create_auth_plugin_options() |
|
378 | self.create_auth_plugin_options() | |
381 | self.create_default_options() |
|
379 | self.create_default_options() | |
382 |
|
380 | |||
383 | log.info('Populated Ui and Settings defaults') |
|
381 | log.info('Populated Ui and Settings defaults') | |
384 |
|
382 | |||
385 | def create_user(self, username, password, email='', admin=False): |
|
383 | def create_user(self, username, password, email='', admin=False): | |
386 | log.info('creating user %s', username) |
|
384 | log.info('creating user %s', username) | |
387 | UserModel().create_or_update(username, password, email, |
|
385 | UserModel().create_or_update(username, password, email, | |
388 | firstname='Kallithea', lastname='Admin', |
|
386 | firstname='Kallithea', lastname='Admin', | |
389 | active=True, admin=admin, |
|
387 | active=True, admin=admin, | |
390 | extern_type=User.DEFAULT_AUTH_TYPE) |
|
388 | extern_type=User.DEFAULT_AUTH_TYPE) | |
391 |
|
389 | |||
392 | def create_default_user(self): |
|
390 | def create_default_user(self): | |
393 | log.info('creating default user') |
|
391 | log.info('creating default user') | |
394 | # create default user for handling default permissions. |
|
392 | # create default user for handling default permissions. | |
395 | user = UserModel().create_or_update(username=User.DEFAULT_USER, |
|
393 | user = UserModel().create_or_update(username=User.DEFAULT_USER, | |
396 | password=str(uuid.uuid1())[:20], |
|
394 | password=str(uuid.uuid1())[:20], | |
397 | email='anonymous@kallithea-scm.org', |
|
395 | email='anonymous@kallithea-scm.org', | |
398 | firstname='Anonymous', |
|
396 | firstname='Anonymous', | |
399 | lastname='User') |
|
397 | lastname='User') | |
400 | # based on configuration options activate/deactivate this user which |
|
398 | # based on configuration options activate/deactivate this user which | |
401 | # controls anonymous access |
|
399 | # controls anonymous access | |
402 | if self.cli_args.get('public_access') is False: |
|
400 | if self.cli_args.get('public_access') is False: | |
403 | log.info('Public access disabled') |
|
401 | log.info('Public access disabled') | |
404 | user.active = False |
|
402 | user.active = False | |
405 | Session().commit() |
|
403 | Session().commit() | |
406 |
|
404 | |||
407 | def create_permissions(self): |
|
405 | def create_permissions(self): | |
408 | """ |
|
406 | """ | |
409 | Creates all permissions defined in the system |
|
407 | Creates all permissions defined in the system | |
410 | """ |
|
408 | """ | |
411 | # module.(access|create|change|delete)_[name] |
|
409 | # module.(access|create|change|delete)_[name] | |
412 | # module.(none|read|write|admin) |
|
410 | # module.(none|read|write|admin) | |
413 | log.info('creating permissions') |
|
411 | log.info('creating permissions') | |
414 | PermissionModel().create_permissions() |
|
412 | PermissionModel().create_permissions() | |
415 |
|
413 | |||
416 | def populate_default_permissions(self): |
|
414 | def populate_default_permissions(self): | |
417 | """ |
|
415 | """ | |
418 | Populate default permissions. It will create only the default |
|
416 | Populate default permissions. It will create only the default | |
419 | permissions that are missing, and not alter already defined ones |
|
417 | permissions that are missing, and not alter already defined ones | |
420 | """ |
|
418 | """ | |
421 | log.info('creating default user permissions') |
|
419 | log.info('creating default user permissions') | |
422 | PermissionModel().create_default_permissions(user=User.DEFAULT_USER) |
|
420 | PermissionModel().create_default_permissions(user=User.DEFAULT_USER) |
@@ -1,142 +1,140 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 |
|
14 | |||
15 | from __future__ import print_function |
|
|||
16 |
|
||||
17 | import errno |
|
15 | import errno | |
18 | import os |
|
16 | import os | |
19 | from multiprocessing.util import Finalize |
|
17 | from multiprocessing.util import Finalize | |
20 |
|
18 | |||
21 | from kallithea.lib.compat import kill |
|
19 | from kallithea.lib.compat import kill | |
22 |
|
20 | |||
23 |
|
21 | |||
24 | class LockHeld(Exception): |
|
22 | class LockHeld(Exception): | |
25 | pass |
|
23 | pass | |
26 |
|
24 | |||
27 |
|
25 | |||
28 | class DaemonLock(object): |
|
26 | class DaemonLock(object): | |
29 | """daemon locking |
|
27 | """daemon locking | |
30 | USAGE: |
|
28 | USAGE: | |
31 | try: |
|
29 | try: | |
32 | l = DaemonLock('/path/tolockfile',desc='test lock') |
|
30 | l = DaemonLock('/path/tolockfile',desc='test lock') | |
33 | main() |
|
31 | main() | |
34 | l.release() |
|
32 | l.release() | |
35 | except LockHeld: |
|
33 | except LockHeld: | |
36 | sys.exit(1) |
|
34 | sys.exit(1) | |
37 | """ |
|
35 | """ | |
38 |
|
36 | |||
39 | def __init__(self, file_, callbackfn=None, |
|
37 | def __init__(self, file_, callbackfn=None, | |
40 | desc='daemon lock', debug=False): |
|
38 | desc='daemon lock', debug=False): | |
41 | self.pidfile = file_ |
|
39 | self.pidfile = file_ | |
42 | self.callbackfn = callbackfn |
|
40 | self.callbackfn = callbackfn | |
43 | self.desc = desc |
|
41 | self.desc = desc | |
44 | self.debug = debug |
|
42 | self.debug = debug | |
45 | self.held = False |
|
43 | self.held = False | |
46 | # run the lock automatically! |
|
44 | # run the lock automatically! | |
47 | self.lock() |
|
45 | self.lock() | |
48 | self._finalize = Finalize(self, DaemonLock._on_finalize, |
|
46 | self._finalize = Finalize(self, DaemonLock._on_finalize, | |
49 | args=(self, debug), exitpriority=10) |
|
47 | args=(self, debug), exitpriority=10) | |
50 |
|
48 | |||
51 | @staticmethod |
|
49 | @staticmethod | |
52 | def _on_finalize(lock, debug): |
|
50 | def _on_finalize(lock, debug): | |
53 | if lock.held: |
|
51 | if lock.held: | |
54 | if debug: |
|
52 | if debug: | |
55 | print('lock held finalizing and running lock.release()') |
|
53 | print('lock held finalizing and running lock.release()') | |
56 | lock.release() |
|
54 | lock.release() | |
57 |
|
55 | |||
58 | def lock(self): |
|
56 | def lock(self): | |
59 | """ |
|
57 | """ | |
60 | locking function, if lock is present it |
|
58 | locking function, if lock is present it | |
61 | will raise LockHeld exception |
|
59 | will raise LockHeld exception | |
62 | """ |
|
60 | """ | |
63 | lockname = str(os.getpid()) |
|
61 | lockname = str(os.getpid()) | |
64 | if self.debug: |
|
62 | if self.debug: | |
65 | print('running lock') |
|
63 | print('running lock') | |
66 | self.trylock() |
|
64 | self.trylock() | |
67 | self.makelock(lockname, self.pidfile) |
|
65 | self.makelock(lockname, self.pidfile) | |
68 | return True |
|
66 | return True | |
69 |
|
67 | |||
70 | def trylock(self): |
|
68 | def trylock(self): | |
71 | running_pid = False |
|
69 | running_pid = False | |
72 | if self.debug: |
|
70 | if self.debug: | |
73 | print('checking for already running process') |
|
71 | print('checking for already running process') | |
74 | try: |
|
72 | try: | |
75 | with open(self.pidfile, 'r') as f: |
|
73 | with open(self.pidfile, 'r') as f: | |
76 | try: |
|
74 | try: | |
77 | running_pid = int(f.readline()) |
|
75 | running_pid = int(f.readline()) | |
78 | except ValueError: |
|
76 | except ValueError: | |
79 | running_pid = -1 |
|
77 | running_pid = -1 | |
80 |
|
78 | |||
81 | if self.debug: |
|
79 | if self.debug: | |
82 | print('lock file present running_pid: %s, ' |
|
80 | print('lock file present running_pid: %s, ' | |
83 | 'checking for execution' % (running_pid,)) |
|
81 | 'checking for execution' % (running_pid,)) | |
84 | # Now we check the PID from lock file matches to the current |
|
82 | # Now we check the PID from lock file matches to the current | |
85 | # process PID |
|
83 | # process PID | |
86 | if running_pid: |
|
84 | if running_pid: | |
87 | try: |
|
85 | try: | |
88 | kill(running_pid, 0) |
|
86 | kill(running_pid, 0) | |
89 | except OSError as exc: |
|
87 | except OSError as exc: | |
90 | if exc.errno in (errno.ESRCH, errno.EPERM): |
|
88 | if exc.errno in (errno.ESRCH, errno.EPERM): | |
91 | print ("Lock File is there but" |
|
89 | print ("Lock File is there but" | |
92 | " the program is not running") |
|
90 | " the program is not running") | |
93 | print("Removing lock file for the: %s" % running_pid) |
|
91 | print("Removing lock file for the: %s" % running_pid) | |
94 | self.release() |
|
92 | self.release() | |
95 | else: |
|
93 | else: | |
96 | raise |
|
94 | raise | |
97 | else: |
|
95 | else: | |
98 | print("You already have an instance of the program running") |
|
96 | print("You already have an instance of the program running") | |
99 | print("It is running as process %s" % running_pid) |
|
97 | print("It is running as process %s" % running_pid) | |
100 | raise LockHeld() |
|
98 | raise LockHeld() | |
101 |
|
99 | |||
102 | except IOError as e: |
|
100 | except IOError as e: | |
103 | if e.errno != 2: |
|
101 | if e.errno != 2: | |
104 | raise |
|
102 | raise | |
105 |
|
103 | |||
106 | def release(self): |
|
104 | def release(self): | |
107 | """releases the pid by removing the pidfile |
|
105 | """releases the pid by removing the pidfile | |
108 | """ |
|
106 | """ | |
109 | if self.debug: |
|
107 | if self.debug: | |
110 | print('trying to release the pidlock') |
|
108 | print('trying to release the pidlock') | |
111 |
|
109 | |||
112 | if self.callbackfn: |
|
110 | if self.callbackfn: | |
113 | #execute callback function on release |
|
111 | #execute callback function on release | |
114 | if self.debug: |
|
112 | if self.debug: | |
115 | print('executing callback function %s' % self.callbackfn) |
|
113 | print('executing callback function %s' % self.callbackfn) | |
116 | self.callbackfn() |
|
114 | self.callbackfn() | |
117 | try: |
|
115 | try: | |
118 | if self.debug: |
|
116 | if self.debug: | |
119 | print('removing pidfile %s' % self.pidfile) |
|
117 | print('removing pidfile %s' % self.pidfile) | |
120 | os.remove(self.pidfile) |
|
118 | os.remove(self.pidfile) | |
121 | self.held = False |
|
119 | self.held = False | |
122 | except OSError as e: |
|
120 | except OSError as e: | |
123 | if self.debug: |
|
121 | if self.debug: | |
124 | print('removing pidfile failed %s' % e) |
|
122 | print('removing pidfile failed %s' % e) | |
125 | pass |
|
123 | pass | |
126 |
|
124 | |||
127 | def makelock(self, lockname, pidfile): |
|
125 | def makelock(self, lockname, pidfile): | |
128 | """ |
|
126 | """ | |
129 | this function will make an actual lock |
|
127 | this function will make an actual lock | |
130 |
|
128 | |||
131 | :param lockname: actual pid of file |
|
129 | :param lockname: actual pid of file | |
132 | :param pidfile: the file to write the pid in |
|
130 | :param pidfile: the file to write the pid in | |
133 | """ |
|
131 | """ | |
134 | if self.debug: |
|
132 | if self.debug: | |
135 | print('creating a file %s and pid: %s' % (pidfile, lockname)) |
|
133 | print('creating a file %s and pid: %s' % (pidfile, lockname)) | |
136 |
|
134 | |||
137 | dir_, file_ = os.path.split(pidfile) |
|
135 | dir_, file_ = os.path.split(pidfile) | |
138 | if not os.path.isdir(dir_): |
|
136 | if not os.path.isdir(dir_): | |
139 | os.makedirs(dir_) |
|
137 | os.makedirs(dir_) | |
140 | with open(self.pidfile, 'w') as f: |
|
138 | with open(self.pidfile, 'w') as f: | |
141 | f.write(lockname) |
|
139 | f.write(lockname) | |
142 | self.held = True |
|
140 | self.held = True |
@@ -1,613 +1,611 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.lib.utils2 |
|
15 | kallithea.lib.utils2 | |
16 | ~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Some simple helper functions. |
|
18 | Some simple helper functions. | |
19 | Note: all these functions should be independent of Kallithea classes, i.e. |
|
19 | Note: all these functions should be independent of Kallithea classes, i.e. | |
20 | models, controllers, etc. to prevent import cycles. |
|
20 | models, controllers, etc. to prevent import cycles. | |
21 |
|
21 | |||
22 | This file was forked by the Kallithea project in July 2014. |
|
22 | This file was forked by the Kallithea project in July 2014. | |
23 | Original author and date, and relevant copyright and licensing information is below: |
|
23 | Original author and date, and relevant copyright and licensing information is below: | |
24 | :created_on: Jan 5, 2011 |
|
24 | :created_on: Jan 5, 2011 | |
25 | :author: marcink |
|
25 | :author: marcink | |
26 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
26 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
27 | :license: GPLv3, see LICENSE.md for more details. |
|
27 | :license: GPLv3, see LICENSE.md for more details. | |
28 | """ |
|
28 | """ | |
29 |
|
29 | |||
30 | from __future__ import print_function |
|
|||
31 |
|
||||
32 | import binascii |
|
30 | import binascii | |
33 | import datetime |
|
31 | import datetime | |
34 | import json |
|
32 | import json | |
35 | import os |
|
33 | import os | |
36 | import pwd |
|
34 | import pwd | |
37 | import re |
|
35 | import re | |
38 | import time |
|
36 | import time | |
39 | import urllib.parse |
|
37 | import urllib.parse | |
40 |
|
38 | |||
41 | import urlobject |
|
39 | import urlobject | |
42 | from tg.i18n import ugettext as _ |
|
40 | from tg.i18n import ugettext as _ | |
43 | from tg.i18n import ungettext |
|
41 | from tg.i18n import ungettext | |
44 | from webhelpers2.text import collapse, remove_formatting, strip_tags |
|
42 | from webhelpers2.text import collapse, remove_formatting, strip_tags | |
45 |
|
43 | |||
46 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export |
|
44 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export | |
47 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
45 | from kallithea.lib.vcs.utils.lazy import LazyProperty | |
48 |
|
46 | |||
49 |
|
47 | |||
50 | def str2bool(_str): |
|
48 | def str2bool(_str): | |
51 | """ |
|
49 | """ | |
52 | returns True/False value from given string, it tries to translate the |
|
50 | returns True/False value from given string, it tries to translate the | |
53 | string into boolean |
|
51 | string into boolean | |
54 |
|
52 | |||
55 | :param _str: string value to translate into boolean |
|
53 | :param _str: string value to translate into boolean | |
56 | :rtype: boolean |
|
54 | :rtype: boolean | |
57 | :returns: boolean from given string |
|
55 | :returns: boolean from given string | |
58 | """ |
|
56 | """ | |
59 | if _str is None: |
|
57 | if _str is None: | |
60 | return False |
|
58 | return False | |
61 | if _str in (True, False): |
|
59 | if _str in (True, False): | |
62 | return _str |
|
60 | return _str | |
63 | _str = str(_str).strip().lower() |
|
61 | _str = str(_str).strip().lower() | |
64 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') |
|
62 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') | |
65 |
|
63 | |||
66 |
|
64 | |||
67 | def aslist(obj, sep=None, strip=True): |
|
65 | def aslist(obj, sep=None, strip=True): | |
68 | """ |
|
66 | """ | |
69 | Returns given string separated by sep as list |
|
67 | Returns given string separated by sep as list | |
70 |
|
68 | |||
71 | :param obj: |
|
69 | :param obj: | |
72 | :param sep: |
|
70 | :param sep: | |
73 | :param strip: |
|
71 | :param strip: | |
74 | """ |
|
72 | """ | |
75 | if isinstance(obj, (str)): |
|
73 | if isinstance(obj, (str)): | |
76 | lst = obj.split(sep) |
|
74 | lst = obj.split(sep) | |
77 | if strip: |
|
75 | if strip: | |
78 | lst = [v.strip() for v in lst] |
|
76 | lst = [v.strip() for v in lst] | |
79 | return lst |
|
77 | return lst | |
80 | elif isinstance(obj, (list, tuple)): |
|
78 | elif isinstance(obj, (list, tuple)): | |
81 | return obj |
|
79 | return obj | |
82 | elif obj is None: |
|
80 | elif obj is None: | |
83 | return [] |
|
81 | return [] | |
84 | else: |
|
82 | else: | |
85 | return [obj] |
|
83 | return [obj] | |
86 |
|
84 | |||
87 |
|
85 | |||
88 | def convert_line_endings(line, mode): |
|
86 | def convert_line_endings(line, mode): | |
89 | """ |
|
87 | """ | |
90 | Converts a given line "line end" according to given mode |
|
88 | Converts a given line "line end" according to given mode | |
91 |
|
89 | |||
92 | Available modes are:: |
|
90 | Available modes are:: | |
93 | 0 - Unix |
|
91 | 0 - Unix | |
94 | 1 - Mac |
|
92 | 1 - Mac | |
95 | 2 - DOS |
|
93 | 2 - DOS | |
96 |
|
94 | |||
97 | :param line: given line to convert |
|
95 | :param line: given line to convert | |
98 | :param mode: mode to convert to |
|
96 | :param mode: mode to convert to | |
99 | :rtype: str |
|
97 | :rtype: str | |
100 | :return: converted line according to mode |
|
98 | :return: converted line according to mode | |
101 | """ |
|
99 | """ | |
102 | if mode == 0: |
|
100 | if mode == 0: | |
103 | line = line.replace('\r\n', '\n') |
|
101 | line = line.replace('\r\n', '\n') | |
104 | line = line.replace('\r', '\n') |
|
102 | line = line.replace('\r', '\n') | |
105 | elif mode == 1: |
|
103 | elif mode == 1: | |
106 | line = line.replace('\r\n', '\r') |
|
104 | line = line.replace('\r\n', '\r') | |
107 | line = line.replace('\n', '\r') |
|
105 | line = line.replace('\n', '\r') | |
108 | elif mode == 2: |
|
106 | elif mode == 2: | |
109 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) |
|
107 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) | |
110 | return line |
|
108 | return line | |
111 |
|
109 | |||
112 |
|
110 | |||
113 | def detect_mode(line, default): |
|
111 | def detect_mode(line, default): | |
114 | """ |
|
112 | """ | |
115 | Detects line break for given line, if line break couldn't be found |
|
113 | Detects line break for given line, if line break couldn't be found | |
116 | given default value is returned |
|
114 | given default value is returned | |
117 |
|
115 | |||
118 | :param line: str line |
|
116 | :param line: str line | |
119 | :param default: default |
|
117 | :param default: default | |
120 | :rtype: int |
|
118 | :rtype: int | |
121 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS |
|
119 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS | |
122 | """ |
|
120 | """ | |
123 | if line.endswith('\r\n'): |
|
121 | if line.endswith('\r\n'): | |
124 | return 2 |
|
122 | return 2 | |
125 | elif line.endswith('\n'): |
|
123 | elif line.endswith('\n'): | |
126 | return 0 |
|
124 | return 0 | |
127 | elif line.endswith('\r'): |
|
125 | elif line.endswith('\r'): | |
128 | return 1 |
|
126 | return 1 | |
129 | else: |
|
127 | else: | |
130 | return default |
|
128 | return default | |
131 |
|
129 | |||
132 |
|
130 | |||
133 | def generate_api_key(): |
|
131 | def generate_api_key(): | |
134 | """ |
|
132 | """ | |
135 | Generates a random (presumably unique) API key. |
|
133 | Generates a random (presumably unique) API key. | |
136 |
|
134 | |||
137 | This value is used in URLs and "Bearer" HTTP Authorization headers, |
|
135 | This value is used in URLs and "Bearer" HTTP Authorization headers, | |
138 | which in practice means it should only contain URL-safe characters |
|
136 | which in practice means it should only contain URL-safe characters | |
139 | (RFC 3986): |
|
137 | (RFC 3986): | |
140 |
|
138 | |||
141 | unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" |
|
139 | unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" | |
142 | """ |
|
140 | """ | |
143 | # Hexadecimal certainly qualifies as URL-safe. |
|
141 | # Hexadecimal certainly qualifies as URL-safe. | |
144 | return ascii_str(binascii.hexlify(os.urandom(20))) |
|
142 | return ascii_str(binascii.hexlify(os.urandom(20))) | |
145 |
|
143 | |||
146 |
|
144 | |||
147 | def safe_int(val, default=None): |
|
145 | def safe_int(val, default=None): | |
148 | """ |
|
146 | """ | |
149 | Returns int() of val if val is not convertable to int use default |
|
147 | Returns int() of val if val is not convertable to int use default | |
150 | instead |
|
148 | instead | |
151 |
|
149 | |||
152 | :param val: |
|
150 | :param val: | |
153 | :param default: |
|
151 | :param default: | |
154 | """ |
|
152 | """ | |
155 | try: |
|
153 | try: | |
156 | val = int(val) |
|
154 | val = int(val) | |
157 | except (ValueError, TypeError): |
|
155 | except (ValueError, TypeError): | |
158 | val = default |
|
156 | val = default | |
159 | return val |
|
157 | return val | |
160 |
|
158 | |||
161 |
|
159 | |||
162 | def remove_suffix(s, suffix): |
|
160 | def remove_suffix(s, suffix): | |
163 | if s.endswith(suffix): |
|
161 | if s.endswith(suffix): | |
164 | s = s[:-1 * len(suffix)] |
|
162 | s = s[:-1 * len(suffix)] | |
165 | return s |
|
163 | return s | |
166 |
|
164 | |||
167 |
|
165 | |||
168 | def remove_prefix(s, prefix): |
|
166 | def remove_prefix(s, prefix): | |
169 | if s.startswith(prefix): |
|
167 | if s.startswith(prefix): | |
170 | s = s[len(prefix):] |
|
168 | s = s[len(prefix):] | |
171 | return s |
|
169 | return s | |
172 |
|
170 | |||
173 |
|
171 | |||
174 | def age(prevdate, show_short_version=False, now=None): |
|
172 | def age(prevdate, show_short_version=False, now=None): | |
175 | """ |
|
173 | """ | |
176 | turns a datetime into an age string. |
|
174 | turns a datetime into an age string. | |
177 | If show_short_version is True, then it will generate a not so accurate but shorter string, |
|
175 | If show_short_version is True, then it will generate a not so accurate but shorter string, | |
178 | example: 2days ago, instead of 2 days and 23 hours ago. |
|
176 | example: 2days ago, instead of 2 days and 23 hours ago. | |
179 |
|
177 | |||
180 | :param prevdate: datetime object |
|
178 | :param prevdate: datetime object | |
181 | :param show_short_version: if it should approximate the date and return a shorter string |
|
179 | :param show_short_version: if it should approximate the date and return a shorter string | |
182 | :rtype: str |
|
180 | :rtype: str | |
183 | :returns: str words describing age |
|
181 | :returns: str words describing age | |
184 | """ |
|
182 | """ | |
185 | now = now or datetime.datetime.now() |
|
183 | now = now or datetime.datetime.now() | |
186 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] |
|
184 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] | |
187 | deltas = {} |
|
185 | deltas = {} | |
188 | future = False |
|
186 | future = False | |
189 |
|
187 | |||
190 | if prevdate > now: |
|
188 | if prevdate > now: | |
191 | now, prevdate = prevdate, now |
|
189 | now, prevdate = prevdate, now | |
192 | future = True |
|
190 | future = True | |
193 | if future: |
|
191 | if future: | |
194 | prevdate = prevdate.replace(microsecond=0) |
|
192 | prevdate = prevdate.replace(microsecond=0) | |
195 | # Get date parts deltas |
|
193 | # Get date parts deltas | |
196 | from dateutil import relativedelta |
|
194 | from dateutil import relativedelta | |
197 | for part in order: |
|
195 | for part in order: | |
198 | d = relativedelta.relativedelta(now, prevdate) |
|
196 | d = relativedelta.relativedelta(now, prevdate) | |
199 | deltas[part] = getattr(d, part + 's') |
|
197 | deltas[part] = getattr(d, part + 's') | |
200 |
|
198 | |||
201 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, |
|
199 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, | |
202 | # not 1 hour, -59 minutes and -59 seconds) |
|
200 | # not 1 hour, -59 minutes and -59 seconds) | |
203 | for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours |
|
201 | for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours | |
204 | part = order[num] |
|
202 | part = order[num] | |
205 | carry_part = order[num - 1] |
|
203 | carry_part = order[num - 1] | |
206 |
|
204 | |||
207 | if deltas[part] < 0: |
|
205 | if deltas[part] < 0: | |
208 | deltas[part] += length |
|
206 | deltas[part] += length | |
209 | deltas[carry_part] -= 1 |
|
207 | deltas[carry_part] -= 1 | |
210 |
|
208 | |||
211 | # Same thing for days except that the increment depends on the (variable) |
|
209 | # Same thing for days except that the increment depends on the (variable) | |
212 | # number of days in the month |
|
210 | # number of days in the month | |
213 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] |
|
211 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] | |
214 | if deltas['day'] < 0: |
|
212 | if deltas['day'] < 0: | |
215 | if prevdate.month == 2 and (prevdate.year % 4 == 0 and |
|
213 | if prevdate.month == 2 and (prevdate.year % 4 == 0 and | |
216 | (prevdate.year % 100 != 0 or prevdate.year % 400 == 0) |
|
214 | (prevdate.year % 100 != 0 or prevdate.year % 400 == 0) | |
217 | ): |
|
215 | ): | |
218 | deltas['day'] += 29 |
|
216 | deltas['day'] += 29 | |
219 | else: |
|
217 | else: | |
220 | deltas['day'] += month_lengths[prevdate.month - 1] |
|
218 | deltas['day'] += month_lengths[prevdate.month - 1] | |
221 |
|
219 | |||
222 | deltas['month'] -= 1 |
|
220 | deltas['month'] -= 1 | |
223 |
|
221 | |||
224 | if deltas['month'] < 0: |
|
222 | if deltas['month'] < 0: | |
225 | deltas['month'] += 12 |
|
223 | deltas['month'] += 12 | |
226 | deltas['year'] -= 1 |
|
224 | deltas['year'] -= 1 | |
227 |
|
225 | |||
228 | # In short version, we want nicer handling of ages of more than a year |
|
226 | # In short version, we want nicer handling of ages of more than a year | |
229 | if show_short_version: |
|
227 | if show_short_version: | |
230 | if deltas['year'] == 1: |
|
228 | if deltas['year'] == 1: | |
231 | # ages between 1 and 2 years: show as months |
|
229 | # ages between 1 and 2 years: show as months | |
232 | deltas['month'] += 12 |
|
230 | deltas['month'] += 12 | |
233 | deltas['year'] = 0 |
|
231 | deltas['year'] = 0 | |
234 | if deltas['year'] >= 2: |
|
232 | if deltas['year'] >= 2: | |
235 | # ages 2+ years: round |
|
233 | # ages 2+ years: round | |
236 | if deltas['month'] > 6: |
|
234 | if deltas['month'] > 6: | |
237 | deltas['year'] += 1 |
|
235 | deltas['year'] += 1 | |
238 | deltas['month'] = 0 |
|
236 | deltas['month'] = 0 | |
239 |
|
237 | |||
240 | # Format the result |
|
238 | # Format the result | |
241 | fmt_funcs = { |
|
239 | fmt_funcs = { | |
242 | 'year': lambda d: ungettext('%d year', '%d years', d) % d, |
|
240 | 'year': lambda d: ungettext('%d year', '%d years', d) % d, | |
243 | 'month': lambda d: ungettext('%d month', '%d months', d) % d, |
|
241 | 'month': lambda d: ungettext('%d month', '%d months', d) % d, | |
244 | 'day': lambda d: ungettext('%d day', '%d days', d) % d, |
|
242 | 'day': lambda d: ungettext('%d day', '%d days', d) % d, | |
245 | 'hour': lambda d: ungettext('%d hour', '%d hours', d) % d, |
|
243 | 'hour': lambda d: ungettext('%d hour', '%d hours', d) % d, | |
246 | 'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d, |
|
244 | 'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d, | |
247 | 'second': lambda d: ungettext('%d second', '%d seconds', d) % d, |
|
245 | 'second': lambda d: ungettext('%d second', '%d seconds', d) % d, | |
248 | } |
|
246 | } | |
249 |
|
247 | |||
250 | for i, part in enumerate(order): |
|
248 | for i, part in enumerate(order): | |
251 | value = deltas[part] |
|
249 | value = deltas[part] | |
252 | if value == 0: |
|
250 | if value == 0: | |
253 | continue |
|
251 | continue | |
254 |
|
252 | |||
255 | if i < 5: |
|
253 | if i < 5: | |
256 | sub_part = order[i + 1] |
|
254 | sub_part = order[i + 1] | |
257 | sub_value = deltas[sub_part] |
|
255 | sub_value = deltas[sub_part] | |
258 | else: |
|
256 | else: | |
259 | sub_value = 0 |
|
257 | sub_value = 0 | |
260 |
|
258 | |||
261 | if sub_value == 0 or show_short_version: |
|
259 | if sub_value == 0 or show_short_version: | |
262 | if future: |
|
260 | if future: | |
263 | return _('in %s') % fmt_funcs[part](value) |
|
261 | return _('in %s') % fmt_funcs[part](value) | |
264 | else: |
|
262 | else: | |
265 | return _('%s ago') % fmt_funcs[part](value) |
|
263 | return _('%s ago') % fmt_funcs[part](value) | |
266 | if future: |
|
264 | if future: | |
267 | return _('in %s and %s') % (fmt_funcs[part](value), |
|
265 | return _('in %s and %s') % (fmt_funcs[part](value), | |
268 | fmt_funcs[sub_part](sub_value)) |
|
266 | fmt_funcs[sub_part](sub_value)) | |
269 | else: |
|
267 | else: | |
270 | return _('%s and %s ago') % (fmt_funcs[part](value), |
|
268 | return _('%s and %s ago') % (fmt_funcs[part](value), | |
271 | fmt_funcs[sub_part](sub_value)) |
|
269 | fmt_funcs[sub_part](sub_value)) | |
272 |
|
270 | |||
273 | return _('just now') |
|
271 | return _('just now') | |
274 |
|
272 | |||
275 |
|
273 | |||
276 | def uri_filter(uri): |
|
274 | def uri_filter(uri): | |
277 | """ |
|
275 | """ | |
278 | Removes user:password from given url string |
|
276 | Removes user:password from given url string | |
279 |
|
277 | |||
280 | :param uri: |
|
278 | :param uri: | |
281 | :rtype: str |
|
279 | :rtype: str | |
282 | :returns: filtered list of strings |
|
280 | :returns: filtered list of strings | |
283 | """ |
|
281 | """ | |
284 | if not uri: |
|
282 | if not uri: | |
285 | return [] |
|
283 | return [] | |
286 |
|
284 | |||
287 | proto = '' |
|
285 | proto = '' | |
288 |
|
286 | |||
289 | for pat in ('https://', 'http://', 'git://'): |
|
287 | for pat in ('https://', 'http://', 'git://'): | |
290 | if uri.startswith(pat): |
|
288 | if uri.startswith(pat): | |
291 | uri = uri[len(pat):] |
|
289 | uri = uri[len(pat):] | |
292 | proto = pat |
|
290 | proto = pat | |
293 | break |
|
291 | break | |
294 |
|
292 | |||
295 | # remove passwords and username |
|
293 | # remove passwords and username | |
296 | uri = uri[uri.find('@') + 1:] |
|
294 | uri = uri[uri.find('@') + 1:] | |
297 |
|
295 | |||
298 | # get the port |
|
296 | # get the port | |
299 | cred_pos = uri.find(':') |
|
297 | cred_pos = uri.find(':') | |
300 | if cred_pos == -1: |
|
298 | if cred_pos == -1: | |
301 | host, port = uri, None |
|
299 | host, port = uri, None | |
302 | else: |
|
300 | else: | |
303 | host, port = uri[:cred_pos], uri[cred_pos + 1:] |
|
301 | host, port = uri[:cred_pos], uri[cred_pos + 1:] | |
304 |
|
302 | |||
305 | return [_f for _f in [proto, host, port] if _f] |
|
303 | return [_f for _f in [proto, host, port] if _f] | |
306 |
|
304 | |||
307 |
|
305 | |||
308 | def credentials_filter(uri): |
|
306 | def credentials_filter(uri): | |
309 | """ |
|
307 | """ | |
310 | Returns a url with removed credentials |
|
308 | Returns a url with removed credentials | |
311 |
|
309 | |||
312 | :param uri: |
|
310 | :param uri: | |
313 | """ |
|
311 | """ | |
314 |
|
312 | |||
315 | uri = uri_filter(uri) |
|
313 | uri = uri_filter(uri) | |
316 | # check if we have port |
|
314 | # check if we have port | |
317 | if len(uri) > 2 and uri[2]: |
|
315 | if len(uri) > 2 and uri[2]: | |
318 | uri[2] = ':' + uri[2] |
|
316 | uri[2] = ':' + uri[2] | |
319 |
|
317 | |||
320 | return ''.join(uri) |
|
318 | return ''.join(uri) | |
321 |
|
319 | |||
322 |
|
320 | |||
323 | def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None): |
|
321 | def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None): | |
324 | parsed_url = urlobject.URLObject(prefix_url) |
|
322 | parsed_url = urlobject.URLObject(prefix_url) | |
325 | prefix = urllib.parse.unquote(parsed_url.path.rstrip('/')) |
|
323 | prefix = urllib.parse.unquote(parsed_url.path.rstrip('/')) | |
326 | try: |
|
324 | try: | |
327 | system_user = pwd.getpwuid(os.getuid()).pw_name |
|
325 | system_user = pwd.getpwuid(os.getuid()).pw_name | |
328 | except Exception: # TODO: support all systems - especially Windows |
|
326 | except Exception: # TODO: support all systems - especially Windows | |
329 | system_user = 'kallithea' # hardcoded default value ... |
|
327 | system_user = 'kallithea' # hardcoded default value ... | |
330 | args = { |
|
328 | args = { | |
331 | 'scheme': parsed_url.scheme, |
|
329 | 'scheme': parsed_url.scheme, | |
332 | 'user': urllib.parse.quote(username or ''), |
|
330 | 'user': urllib.parse.quote(username or ''), | |
333 | 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix") |
|
331 | 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix") | |
334 | 'prefix': prefix, # undocumented, empty or starting with / |
|
332 | 'prefix': prefix, # undocumented, empty or starting with / | |
335 | 'repo': repo_name, |
|
333 | 'repo': repo_name, | |
336 | 'repoid': str(repo_id), |
|
334 | 'repoid': str(repo_id), | |
337 | 'system_user': system_user, |
|
335 | 'system_user': system_user, | |
338 | 'hostname': parsed_url.hostname, |
|
336 | 'hostname': parsed_url.hostname, | |
339 | } |
|
337 | } | |
340 | url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl) |
|
338 | url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl) | |
341 |
|
339 | |||
342 | # remove leading @ sign if it's present. Case of empty user |
|
340 | # remove leading @ sign if it's present. Case of empty user | |
343 | url_obj = urlobject.URLObject(url) |
|
341 | url_obj = urlobject.URLObject(url) | |
344 | if not url_obj.username: |
|
342 | if not url_obj.username: | |
345 | url_obj = url_obj.with_username(None) |
|
343 | url_obj = url_obj.with_username(None) | |
346 |
|
344 | |||
347 | return str(url_obj) |
|
345 | return str(url_obj) | |
348 |
|
346 | |||
349 |
|
347 | |||
350 | def get_changeset_safe(repo, rev): |
|
348 | def get_changeset_safe(repo, rev): | |
351 | """ |
|
349 | """ | |
352 | Safe version of get_changeset if this changeset doesn't exists for a |
|
350 | Safe version of get_changeset if this changeset doesn't exists for a | |
353 | repo it returns a Dummy one instead |
|
351 | repo it returns a Dummy one instead | |
354 |
|
352 | |||
355 | :param repo: |
|
353 | :param repo: | |
356 | :param rev: |
|
354 | :param rev: | |
357 | """ |
|
355 | """ | |
358 | from kallithea.lib.vcs.backends.base import BaseRepository |
|
356 | from kallithea.lib.vcs.backends.base import BaseRepository | |
359 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
357 | from kallithea.lib.vcs.exceptions import RepositoryError | |
360 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
358 | from kallithea.lib.vcs.backends.base import EmptyChangeset | |
361 | if not isinstance(repo, BaseRepository): |
|
359 | if not isinstance(repo, BaseRepository): | |
362 | raise Exception('You must pass an Repository ' |
|
360 | raise Exception('You must pass an Repository ' | |
363 | 'object as first argument got %s' % type(repo)) |
|
361 | 'object as first argument got %s' % type(repo)) | |
364 |
|
362 | |||
365 | try: |
|
363 | try: | |
366 | cs = repo.get_changeset(rev) |
|
364 | cs = repo.get_changeset(rev) | |
367 | except (RepositoryError, LookupError): |
|
365 | except (RepositoryError, LookupError): | |
368 | cs = EmptyChangeset(requested_revision=rev) |
|
366 | cs = EmptyChangeset(requested_revision=rev) | |
369 | return cs |
|
367 | return cs | |
370 |
|
368 | |||
371 |
|
369 | |||
372 | def datetime_to_time(dt): |
|
370 | def datetime_to_time(dt): | |
373 | if dt: |
|
371 | if dt: | |
374 | return time.mktime(dt.timetuple()) |
|
372 | return time.mktime(dt.timetuple()) | |
375 |
|
373 | |||
376 |
|
374 | |||
377 | def time_to_datetime(tm): |
|
375 | def time_to_datetime(tm): | |
378 | if tm: |
|
376 | if tm: | |
379 | if isinstance(tm, str): |
|
377 | if isinstance(tm, str): | |
380 | try: |
|
378 | try: | |
381 | tm = float(tm) |
|
379 | tm = float(tm) | |
382 | except ValueError: |
|
380 | except ValueError: | |
383 | return |
|
381 | return | |
384 | return datetime.datetime.fromtimestamp(tm) |
|
382 | return datetime.datetime.fromtimestamp(tm) | |
385 |
|
383 | |||
386 |
|
384 | |||
387 | # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete() |
|
385 | # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete() | |
388 | # Check char before @ - it must not look like we are in an email addresses. |
|
386 | # Check char before @ - it must not look like we are in an email addresses. | |
389 | # Matching is greedy so we don't have to look beyond the end. |
|
387 | # Matching is greedy so we don't have to look beyond the end. | |
390 | MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])') |
|
388 | MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])') | |
391 |
|
389 | |||
392 |
|
390 | |||
393 | def extract_mentioned_usernames(text): |
|
391 | def extract_mentioned_usernames(text): | |
394 | r""" |
|
392 | r""" | |
395 | Returns list of (possible) usernames @mentioned in given text. |
|
393 | Returns list of (possible) usernames @mentioned in given text. | |
396 |
|
394 | |||
397 | >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,') |
|
395 | >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,') | |
398 | ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz'] |
|
396 | ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz'] | |
399 | """ |
|
397 | """ | |
400 | return MENTIONS_REGEX.findall(text) |
|
398 | return MENTIONS_REGEX.findall(text) | |
401 |
|
399 | |||
402 |
|
400 | |||
403 | def extract_mentioned_users(text): |
|
401 | def extract_mentioned_users(text): | |
404 | """ Returns set of actual database Users @mentioned in given text. """ |
|
402 | """ Returns set of actual database Users @mentioned in given text. """ | |
405 | from kallithea.model.db import User |
|
403 | from kallithea.model.db import User | |
406 | result = set() |
|
404 | result = set() | |
407 | for name in extract_mentioned_usernames(text): |
|
405 | for name in extract_mentioned_usernames(text): | |
408 | user = User.get_by_username(name, case_insensitive=True) |
|
406 | user = User.get_by_username(name, case_insensitive=True) | |
409 | if user is not None and not user.is_default_user: |
|
407 | if user is not None and not user.is_default_user: | |
410 | result.add(user) |
|
408 | result.add(user) | |
411 | return result |
|
409 | return result | |
412 |
|
410 | |||
413 |
|
411 | |||
414 | class AttributeDict(dict): |
|
412 | class AttributeDict(dict): | |
415 | def __getattr__(self, attr): |
|
413 | def __getattr__(self, attr): | |
416 | return self.get(attr, None) |
|
414 | return self.get(attr, None) | |
417 | __setattr__ = dict.__setitem__ |
|
415 | __setattr__ = dict.__setitem__ | |
418 | __delattr__ = dict.__delitem__ |
|
416 | __delattr__ = dict.__delitem__ | |
419 |
|
417 | |||
420 |
|
418 | |||
421 | def obfuscate_url_pw(engine): |
|
419 | def obfuscate_url_pw(engine): | |
422 | from sqlalchemy.engine import url as sa_url |
|
420 | from sqlalchemy.engine import url as sa_url | |
423 | from sqlalchemy.exc import ArgumentError |
|
421 | from sqlalchemy.exc import ArgumentError | |
424 | try: |
|
422 | try: | |
425 | _url = sa_url.make_url(engine or '') |
|
423 | _url = sa_url.make_url(engine or '') | |
426 | except ArgumentError: |
|
424 | except ArgumentError: | |
427 | return engine |
|
425 | return engine | |
428 | if _url.password: |
|
426 | if _url.password: | |
429 | _url.password = 'XXXXX' |
|
427 | _url.password = 'XXXXX' | |
430 | return str(_url) |
|
428 | return str(_url) | |
431 |
|
429 | |||
432 |
|
430 | |||
433 | class HookEnvironmentError(Exception): pass |
|
431 | class HookEnvironmentError(Exception): pass | |
434 |
|
432 | |||
435 |
|
433 | |||
436 | def get_hook_environment(): |
|
434 | def get_hook_environment(): | |
437 | """ |
|
435 | """ | |
438 | Get hook context by deserializing the global KALLITHEA_EXTRAS environment |
|
436 | Get hook context by deserializing the global KALLITHEA_EXTRAS environment | |
439 | variable. |
|
437 | variable. | |
440 |
|
438 | |||
441 | Called early in Git out-of-process hooks to get .ini config path so the |
|
439 | Called early in Git out-of-process hooks to get .ini config path so the | |
442 | basic environment can be configured properly. Also used in all hooks to get |
|
440 | basic environment can be configured properly. Also used in all hooks to get | |
443 | information about the action that triggered it. |
|
441 | information about the action that triggered it. | |
444 | """ |
|
442 | """ | |
445 |
|
443 | |||
446 | try: |
|
444 | try: | |
447 | kallithea_extras = os.environ['KALLITHEA_EXTRAS'] |
|
445 | kallithea_extras = os.environ['KALLITHEA_EXTRAS'] | |
448 | except KeyError: |
|
446 | except KeyError: | |
449 | raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found") |
|
447 | raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found") | |
450 |
|
448 | |||
451 | extras = json.loads(kallithea_extras) |
|
449 | extras = json.loads(kallithea_extras) | |
452 | for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']: |
|
450 | for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']: | |
453 | try: |
|
451 | try: | |
454 | extras[k] |
|
452 | extras[k] | |
455 | except KeyError: |
|
453 | except KeyError: | |
456 | raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras)) |
|
454 | raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras)) | |
457 |
|
455 | |||
458 | return AttributeDict(extras) |
|
456 | return AttributeDict(extras) | |
459 |
|
457 | |||
460 |
|
458 | |||
461 | def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None): |
|
459 | def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None): | |
462 | """Prepare global context for running hooks by serializing data in the |
|
460 | """Prepare global context for running hooks by serializing data in the | |
463 | global KALLITHEA_EXTRAS environment variable. |
|
461 | global KALLITHEA_EXTRAS environment variable. | |
464 |
|
462 | |||
465 | Most importantly, this allow Git hooks to do proper logging and updating of |
|
463 | Most importantly, this allow Git hooks to do proper logging and updating of | |
466 | caches after pushes. |
|
464 | caches after pushes. | |
467 |
|
465 | |||
468 | Must always be called before anything with hooks are invoked. |
|
466 | Must always be called before anything with hooks are invoked. | |
469 | """ |
|
467 | """ | |
470 | from kallithea import CONFIG |
|
468 | from kallithea import CONFIG | |
471 | extras = { |
|
469 | extras = { | |
472 | 'ip': ip_addr, # used in log_push/pull_action action_logger |
|
470 | 'ip': ip_addr, # used in log_push/pull_action action_logger | |
473 | 'username': username, |
|
471 | 'username': username, | |
474 | 'action': action or 'push_local', # used in log_push_action_raw_ids action_logger |
|
472 | 'action': action or 'push_local', # used in log_push_action_raw_ids action_logger | |
475 | 'repository': repo_name, |
|
473 | 'repository': repo_name, | |
476 | 'scm': repo_alias, # used to pick hack in log_push_action_raw_ids |
|
474 | 'scm': repo_alias, # used to pick hack in log_push_action_raw_ids | |
477 | 'config': CONFIG['__file__'], # used by git hook to read config |
|
475 | 'config': CONFIG['__file__'], # used by git hook to read config | |
478 | } |
|
476 | } | |
479 | os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras) |
|
477 | os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras) | |
480 |
|
478 | |||
481 |
|
479 | |||
482 | def get_current_authuser(): |
|
480 | def get_current_authuser(): | |
483 | """ |
|
481 | """ | |
484 | Gets kallithea user from threadlocal tmpl_context variable if it's |
|
482 | Gets kallithea user from threadlocal tmpl_context variable if it's | |
485 | defined, else returns None. |
|
483 | defined, else returns None. | |
486 | """ |
|
484 | """ | |
487 | from tg import tmpl_context |
|
485 | from tg import tmpl_context | |
488 | try: |
|
486 | try: | |
489 | return getattr(tmpl_context, 'authuser', None) |
|
487 | return getattr(tmpl_context, 'authuser', None) | |
490 | except TypeError: # No object (name: context) has been registered for this thread |
|
488 | except TypeError: # No object (name: context) has been registered for this thread | |
491 | return None |
|
489 | return None | |
492 |
|
490 | |||
493 |
|
491 | |||
494 | class OptionalAttr(object): |
|
492 | class OptionalAttr(object): | |
495 | """ |
|
493 | """ | |
496 | Special Optional Option that defines other attribute. Example:: |
|
494 | Special Optional Option that defines other attribute. Example:: | |
497 |
|
495 | |||
498 | def test(apiuser, userid=Optional(OAttr('apiuser')): |
|
496 | def test(apiuser, userid=Optional(OAttr('apiuser')): | |
499 | user = Optional.extract(userid) |
|
497 | user = Optional.extract(userid) | |
500 | # calls |
|
498 | # calls | |
501 |
|
499 | |||
502 | """ |
|
500 | """ | |
503 |
|
501 | |||
504 | def __init__(self, attr_name): |
|
502 | def __init__(self, attr_name): | |
505 | self.attr_name = attr_name |
|
503 | self.attr_name = attr_name | |
506 |
|
504 | |||
507 | def __repr__(self): |
|
505 | def __repr__(self): | |
508 | return '<OptionalAttr:%s>' % self.attr_name |
|
506 | return '<OptionalAttr:%s>' % self.attr_name | |
509 |
|
507 | |||
510 | def __call__(self): |
|
508 | def __call__(self): | |
511 | return self |
|
509 | return self | |
512 |
|
510 | |||
513 |
|
511 | |||
514 | # alias |
|
512 | # alias | |
515 | OAttr = OptionalAttr |
|
513 | OAttr = OptionalAttr | |
516 |
|
514 | |||
517 |
|
515 | |||
518 | class Optional(object): |
|
516 | class Optional(object): | |
519 | """ |
|
517 | """ | |
520 | Defines an optional parameter:: |
|
518 | Defines an optional parameter:: | |
521 |
|
519 | |||
522 | param = param.getval() if isinstance(param, Optional) else param |
|
520 | param = param.getval() if isinstance(param, Optional) else param | |
523 | param = param() if isinstance(param, Optional) else param |
|
521 | param = param() if isinstance(param, Optional) else param | |
524 |
|
522 | |||
525 | is equivalent of:: |
|
523 | is equivalent of:: | |
526 |
|
524 | |||
527 | param = Optional.extract(param) |
|
525 | param = Optional.extract(param) | |
528 |
|
526 | |||
529 | """ |
|
527 | """ | |
530 |
|
528 | |||
531 | def __init__(self, type_): |
|
529 | def __init__(self, type_): | |
532 | self.type_ = type_ |
|
530 | self.type_ = type_ | |
533 |
|
531 | |||
534 | def __repr__(self): |
|
532 | def __repr__(self): | |
535 | return '<Optional:%s>' % self.type_.__repr__() |
|
533 | return '<Optional:%s>' % self.type_.__repr__() | |
536 |
|
534 | |||
537 | def __call__(self): |
|
535 | def __call__(self): | |
538 | return self.getval() |
|
536 | return self.getval() | |
539 |
|
537 | |||
540 | def getval(self): |
|
538 | def getval(self): | |
541 | """ |
|
539 | """ | |
542 | returns value from this Optional instance |
|
540 | returns value from this Optional instance | |
543 | """ |
|
541 | """ | |
544 | if isinstance(self.type_, OAttr): |
|
542 | if isinstance(self.type_, OAttr): | |
545 | # use params name |
|
543 | # use params name | |
546 | return self.type_.attr_name |
|
544 | return self.type_.attr_name | |
547 | return self.type_ |
|
545 | return self.type_ | |
548 |
|
546 | |||
549 | @classmethod |
|
547 | @classmethod | |
550 | def extract(cls, val): |
|
548 | def extract(cls, val): | |
551 | """ |
|
549 | """ | |
552 | Extracts value from Optional() instance |
|
550 | Extracts value from Optional() instance | |
553 |
|
551 | |||
554 | :param val: |
|
552 | :param val: | |
555 | :return: original value if it's not Optional instance else |
|
553 | :return: original value if it's not Optional instance else | |
556 | value of instance |
|
554 | value of instance | |
557 | """ |
|
555 | """ | |
558 | if isinstance(val, cls): |
|
556 | if isinstance(val, cls): | |
559 | return val.getval() |
|
557 | return val.getval() | |
560 | return val |
|
558 | return val | |
561 |
|
559 | |||
562 |
|
560 | |||
563 | def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub): |
|
561 | def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub): | |
564 | return _cleanstringsub('_', s).rstrip('_') |
|
562 | return _cleanstringsub('_', s).rstrip('_') | |
565 |
|
563 | |||
566 |
|
564 | |||
567 | def recursive_replace(str_, replace=' '): |
|
565 | def recursive_replace(str_, replace=' '): | |
568 | """ |
|
566 | """ | |
569 | Recursive replace of given sign to just one instance |
|
567 | Recursive replace of given sign to just one instance | |
570 |
|
568 | |||
571 | :param str_: given string |
|
569 | :param str_: given string | |
572 | :param replace: char to find and replace multiple instances |
|
570 | :param replace: char to find and replace multiple instances | |
573 |
|
571 | |||
574 | Examples:: |
|
572 | Examples:: | |
575 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
573 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') | |
576 | 'Mighty-Mighty-Bo-sstones' |
|
574 | 'Mighty-Mighty-Bo-sstones' | |
577 | """ |
|
575 | """ | |
578 |
|
576 | |||
579 | if str_.find(replace * 2) == -1: |
|
577 | if str_.find(replace * 2) == -1: | |
580 | return str_ |
|
578 | return str_ | |
581 | else: |
|
579 | else: | |
582 | str_ = str_.replace(replace * 2, replace) |
|
580 | str_ = str_.replace(replace * 2, replace) | |
583 | return recursive_replace(str_, replace) |
|
581 | return recursive_replace(str_, replace) | |
584 |
|
582 | |||
585 |
|
583 | |||
586 | def repo_name_slug(value): |
|
584 | def repo_name_slug(value): | |
587 | """ |
|
585 | """ | |
588 | Return slug of name of repository |
|
586 | Return slug of name of repository | |
589 | This function is called on each creation/modification |
|
587 | This function is called on each creation/modification | |
590 | of repository to prevent bad names in repo |
|
588 | of repository to prevent bad names in repo | |
591 | """ |
|
589 | """ | |
592 |
|
590 | |||
593 | slug = remove_formatting(value) |
|
591 | slug = remove_formatting(value) | |
594 | slug = strip_tags(slug) |
|
592 | slug = strip_tags(slug) | |
595 |
|
593 | |||
596 | for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
594 | for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: | |
597 | slug = slug.replace(c, '-') |
|
595 | slug = slug.replace(c, '-') | |
598 | slug = recursive_replace(slug, '-') |
|
596 | slug = recursive_replace(slug, '-') | |
599 | slug = collapse(slug, '-') |
|
597 | slug = collapse(slug, '-') | |
600 | return slug |
|
598 | return slug | |
601 |
|
599 | |||
602 |
|
600 | |||
603 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): |
|
601 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): | |
604 | while True: |
|
602 | while True: | |
605 | ok = input(prompt) |
|
603 | ok = input(prompt) | |
606 | if ok in ('y', 'ye', 'yes'): |
|
604 | if ok in ('y', 'ye', 'yes'): | |
607 | return True |
|
605 | return True | |
608 | if ok in ('n', 'no', 'nop', 'nope'): |
|
606 | if ok in ('n', 'no', 'nop', 'nope'): | |
609 | return False |
|
607 | return False | |
610 | retries = retries - 1 |
|
608 | retries = retries - 1 | |
611 | if retries < 0: |
|
609 | if retries < 0: | |
612 | raise IOError |
|
610 | raise IOError | |
613 | print(complaint) |
|
611 | print(complaint) |
@@ -1,220 +1,219 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Utilities aimed to help achieve mostly basic tasks. |
|
2 | Utilities aimed to help achieve mostly basic tasks. | |
3 | """ |
|
3 | """ | |
4 | from __future__ import division |
|
|||
5 |
|
4 | |||
6 | import datetime |
|
5 | import datetime | |
7 | import os |
|
6 | import os | |
8 | import re |
|
7 | import re | |
9 | import time |
|
8 | import time | |
10 |
|
9 | |||
11 | from kallithea.lib.vcs.exceptions import RepositoryError, VCSError |
|
10 | from kallithea.lib.vcs.exceptions import RepositoryError, VCSError | |
12 | from kallithea.lib.vcs.utils.paths import abspath |
|
11 | from kallithea.lib.vcs.utils.paths import abspath | |
13 |
|
12 | |||
14 |
|
13 | |||
15 | ALIASES = ['hg', 'git'] |
|
14 | ALIASES = ['hg', 'git'] | |
16 |
|
15 | |||
17 |
|
16 | |||
18 | def get_scm(path, search_up=False, explicit_alias=None): |
|
17 | def get_scm(path, search_up=False, explicit_alias=None): | |
19 | """ |
|
18 | """ | |
20 | Returns one of alias from ``ALIASES`` (in order of precedence same as |
|
19 | Returns one of alias from ``ALIASES`` (in order of precedence same as | |
21 | shortcuts given in ``ALIASES``) and top working dir path for the given |
|
20 | shortcuts given in ``ALIASES``) and top working dir path for the given | |
22 | argument. If no scm-specific directory is found or more than one scm is |
|
21 | argument. If no scm-specific directory is found or more than one scm is | |
23 | found at that directory, ``VCSError`` is raised. |
|
22 | found at that directory, ``VCSError`` is raised. | |
24 |
|
23 | |||
25 | :param search_up: if set to ``True``, this function would try to |
|
24 | :param search_up: if set to ``True``, this function would try to | |
26 | move up to parent directory every time no scm is recognized for the |
|
25 | move up to parent directory every time no scm is recognized for the | |
27 | currently checked path. Default: ``False``. |
|
26 | currently checked path. Default: ``False``. | |
28 | :param explicit_alias: can be one of available backend aliases, when given |
|
27 | :param explicit_alias: can be one of available backend aliases, when given | |
29 | it will return given explicit alias in repositories under more than one |
|
28 | it will return given explicit alias in repositories under more than one | |
30 | version control, if explicit_alias is different than found it will raise |
|
29 | version control, if explicit_alias is different than found it will raise | |
31 | VCSError |
|
30 | VCSError | |
32 | """ |
|
31 | """ | |
33 | if not os.path.isdir(path): |
|
32 | if not os.path.isdir(path): | |
34 | raise VCSError("Given path %s is not a directory" % path) |
|
33 | raise VCSError("Given path %s is not a directory" % path) | |
35 |
|
34 | |||
36 | while True: |
|
35 | while True: | |
37 | found_scms = [(scm, path) for scm in get_scms_for_path(path)] |
|
36 | found_scms = [(scm, path) for scm in get_scms_for_path(path)] | |
38 | if found_scms or not search_up: |
|
37 | if found_scms or not search_up: | |
39 | break |
|
38 | break | |
40 | newpath = abspath(path, '..') |
|
39 | newpath = abspath(path, '..') | |
41 | if newpath == path: |
|
40 | if newpath == path: | |
42 | break |
|
41 | break | |
43 | path = newpath |
|
42 | path = newpath | |
44 |
|
43 | |||
45 | if len(found_scms) > 1: |
|
44 | if len(found_scms) > 1: | |
46 | for scm in found_scms: |
|
45 | for scm in found_scms: | |
47 | if scm[0] == explicit_alias: |
|
46 | if scm[0] == explicit_alias: | |
48 | return scm |
|
47 | return scm | |
49 | raise VCSError('More than one [%s] scm found at given path %s' |
|
48 | raise VCSError('More than one [%s] scm found at given path %s' | |
50 | % (', '.join((x[0] for x in found_scms)), path)) |
|
49 | % (', '.join((x[0] for x in found_scms)), path)) | |
51 |
|
50 | |||
52 | if len(found_scms) == 0: |
|
51 | if len(found_scms) == 0: | |
53 | raise VCSError('No scm found at given path %s' % path) |
|
52 | raise VCSError('No scm found at given path %s' % path) | |
54 |
|
53 | |||
55 | return found_scms[0] |
|
54 | return found_scms[0] | |
56 |
|
55 | |||
57 |
|
56 | |||
58 | def get_scms_for_path(path): |
|
57 | def get_scms_for_path(path): | |
59 | """ |
|
58 | """ | |
60 | Returns all scm's found at the given path. If no scm is recognized |
|
59 | Returns all scm's found at the given path. If no scm is recognized | |
61 | - empty list is returned. |
|
60 | - empty list is returned. | |
62 |
|
61 | |||
63 | :param path: path to directory which should be checked. May be callable. |
|
62 | :param path: path to directory which should be checked. May be callable. | |
64 |
|
63 | |||
65 | :raises VCSError: if given ``path`` is not a directory |
|
64 | :raises VCSError: if given ``path`` is not a directory | |
66 | """ |
|
65 | """ | |
67 | from kallithea.lib.vcs.backends import get_backend |
|
66 | from kallithea.lib.vcs.backends import get_backend | |
68 | if hasattr(path, '__call__'): |
|
67 | if hasattr(path, '__call__'): | |
69 | path = path() |
|
68 | path = path() | |
70 | if not os.path.isdir(path): |
|
69 | if not os.path.isdir(path): | |
71 | raise VCSError("Given path %r is not a directory" % path) |
|
70 | raise VCSError("Given path %r is not a directory" % path) | |
72 |
|
71 | |||
73 | result = [] |
|
72 | result = [] | |
74 | for key in ALIASES: |
|
73 | for key in ALIASES: | |
75 | # find .hg / .git |
|
74 | # find .hg / .git | |
76 | dirname = os.path.join(path, '.' + key) |
|
75 | dirname = os.path.join(path, '.' + key) | |
77 | if os.path.isdir(dirname): |
|
76 | if os.path.isdir(dirname): | |
78 | result.append(key) |
|
77 | result.append(key) | |
79 | continue |
|
78 | continue | |
80 | # find rm__.hg / rm__.git too - left overs from old method for deleting |
|
79 | # find rm__.hg / rm__.git too - left overs from old method for deleting | |
81 | dirname = os.path.join(path, 'rm__.' + key) |
|
80 | dirname = os.path.join(path, 'rm__.' + key) | |
82 | if os.path.isdir(dirname): |
|
81 | if os.path.isdir(dirname): | |
83 | return result |
|
82 | return result | |
84 | # We still need to check if it's not bare repository as |
|
83 | # We still need to check if it's not bare repository as | |
85 | # bare repos don't have working directories |
|
84 | # bare repos don't have working directories | |
86 | try: |
|
85 | try: | |
87 | get_backend(key)(path) |
|
86 | get_backend(key)(path) | |
88 | result.append(key) |
|
87 | result.append(key) | |
89 | continue |
|
88 | continue | |
90 | except RepositoryError: |
|
89 | except RepositoryError: | |
91 | # Wrong backend |
|
90 | # Wrong backend | |
92 | pass |
|
91 | pass | |
93 | except VCSError: |
|
92 | except VCSError: | |
94 | # No backend at all |
|
93 | # No backend at all | |
95 | pass |
|
94 | pass | |
96 | return result |
|
95 | return result | |
97 |
|
96 | |||
98 |
|
97 | |||
99 | def get_highlighted_code(name, code, type='terminal'): |
|
98 | def get_highlighted_code(name, code, type='terminal'): | |
100 | """ |
|
99 | """ | |
101 | If pygments are available on the system |
|
100 | If pygments are available on the system | |
102 | then returned output is colored. Otherwise |
|
101 | then returned output is colored. Otherwise | |
103 | unchanged content is returned. |
|
102 | unchanged content is returned. | |
104 | """ |
|
103 | """ | |
105 | import logging |
|
104 | import logging | |
106 | try: |
|
105 | try: | |
107 | import pygments |
|
106 | import pygments | |
108 | pygments |
|
107 | pygments | |
109 | except ImportError: |
|
108 | except ImportError: | |
110 | return code |
|
109 | return code | |
111 | from pygments import highlight |
|
110 | from pygments import highlight | |
112 | from pygments.lexers import guess_lexer_for_filename, ClassNotFound |
|
111 | from pygments.lexers import guess_lexer_for_filename, ClassNotFound | |
113 | from pygments.formatters import TerminalFormatter |
|
112 | from pygments.formatters import TerminalFormatter | |
114 |
|
113 | |||
115 | try: |
|
114 | try: | |
116 | lexer = guess_lexer_for_filename(name, code) |
|
115 | lexer = guess_lexer_for_filename(name, code) | |
117 | formatter = TerminalFormatter() |
|
116 | formatter = TerminalFormatter() | |
118 | content = highlight(code, lexer, formatter) |
|
117 | content = highlight(code, lexer, formatter) | |
119 | except ClassNotFound: |
|
118 | except ClassNotFound: | |
120 | logging.debug("Couldn't guess Lexer, will not use pygments.") |
|
119 | logging.debug("Couldn't guess Lexer, will not use pygments.") | |
121 | content = code |
|
120 | content = code | |
122 | return content |
|
121 | return content | |
123 |
|
122 | |||
124 |
|
123 | |||
125 | def parse_changesets(text): |
|
124 | def parse_changesets(text): | |
126 | """ |
|
125 | """ | |
127 | Returns dictionary with *start*, *main* and *end* ids. |
|
126 | Returns dictionary with *start*, *main* and *end* ids. | |
128 |
|
127 | |||
129 | Examples:: |
|
128 | Examples:: | |
130 |
|
129 | |||
131 | >>> parse_changesets('aaabbb') |
|
130 | >>> parse_changesets('aaabbb') | |
132 | {'start': None, 'main': 'aaabbb', 'end': None} |
|
131 | {'start': None, 'main': 'aaabbb', 'end': None} | |
133 | >>> parse_changesets('aaabbb..cccddd') |
|
132 | >>> parse_changesets('aaabbb..cccddd') | |
134 | {'start': 'aaabbb', 'end': 'cccddd', 'main': None} |
|
133 | {'start': 'aaabbb', 'end': 'cccddd', 'main': None} | |
135 |
|
134 | |||
136 | """ |
|
135 | """ | |
137 | text = text.strip() |
|
136 | text = text.strip() | |
138 | CID_RE = r'[a-zA-Z0-9]+' |
|
137 | CID_RE = r'[a-zA-Z0-9]+' | |
139 | if '..' not in text: |
|
138 | if '..' not in text: | |
140 | m = re.match(r'^(?P<cid>%s)$' % CID_RE, text) |
|
139 | m = re.match(r'^(?P<cid>%s)$' % CID_RE, text) | |
141 | if m: |
|
140 | if m: | |
142 | return { |
|
141 | return { | |
143 | 'start': None, |
|
142 | 'start': None, | |
144 | 'main': text, |
|
143 | 'main': text, | |
145 | 'end': None, |
|
144 | 'end': None, | |
146 | } |
|
145 | } | |
147 | else: |
|
146 | else: | |
148 | RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE) |
|
147 | RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE) | |
149 | m = re.match(RE, text) |
|
148 | m = re.match(RE, text) | |
150 | if m: |
|
149 | if m: | |
151 | result = m.groupdict() |
|
150 | result = m.groupdict() | |
152 | result['main'] = None |
|
151 | result['main'] = None | |
153 | return result |
|
152 | return result | |
154 | raise ValueError("IDs not recognized") |
|
153 | raise ValueError("IDs not recognized") | |
155 |
|
154 | |||
156 |
|
155 | |||
157 | def parse_datetime(text): |
|
156 | def parse_datetime(text): | |
158 | """ |
|
157 | """ | |
159 | Parses given text and returns ``datetime.datetime`` instance or raises |
|
158 | Parses given text and returns ``datetime.datetime`` instance or raises | |
160 | ``ValueError``. |
|
159 | ``ValueError``. | |
161 |
|
160 | |||
162 | :param text: string of desired date/datetime or something more verbose, |
|
161 | :param text: string of desired date/datetime or something more verbose, | |
163 | like *yesterday*, *2weeks 3days*, etc. |
|
162 | like *yesterday*, *2weeks 3days*, etc. | |
164 | """ |
|
163 | """ | |
165 |
|
164 | |||
166 | text = text.strip().lower() |
|
165 | text = text.strip().lower() | |
167 |
|
166 | |||
168 | INPUT_FORMATS = ( |
|
167 | INPUT_FORMATS = ( | |
169 | '%Y-%m-%d %H:%M:%S', |
|
168 | '%Y-%m-%d %H:%M:%S', | |
170 | '%Y-%m-%d %H:%M', |
|
169 | '%Y-%m-%d %H:%M', | |
171 | '%Y-%m-%d', |
|
170 | '%Y-%m-%d', | |
172 | '%m/%d/%Y %H:%M:%S', |
|
171 | '%m/%d/%Y %H:%M:%S', | |
173 | '%m/%d/%Y %H:%M', |
|
172 | '%m/%d/%Y %H:%M', | |
174 | '%m/%d/%Y', |
|
173 | '%m/%d/%Y', | |
175 | '%m/%d/%y %H:%M:%S', |
|
174 | '%m/%d/%y %H:%M:%S', | |
176 | '%m/%d/%y %H:%M', |
|
175 | '%m/%d/%y %H:%M', | |
177 | '%m/%d/%y', |
|
176 | '%m/%d/%y', | |
178 | ) |
|
177 | ) | |
179 | for format in INPUT_FORMATS: |
|
178 | for format in INPUT_FORMATS: | |
180 | try: |
|
179 | try: | |
181 | return datetime.datetime(*time.strptime(text, format)[:6]) |
|
180 | return datetime.datetime(*time.strptime(text, format)[:6]) | |
182 | except ValueError: |
|
181 | except ValueError: | |
183 | pass |
|
182 | pass | |
184 |
|
183 | |||
185 | # Try descriptive texts |
|
184 | # Try descriptive texts | |
186 | if text == 'tomorrow': |
|
185 | if text == 'tomorrow': | |
187 | future = datetime.datetime.now() + datetime.timedelta(days=1) |
|
186 | future = datetime.datetime.now() + datetime.timedelta(days=1) | |
188 | args = future.timetuple()[:3] + (23, 59, 59) |
|
187 | args = future.timetuple()[:3] + (23, 59, 59) | |
189 | return datetime.datetime(*args) |
|
188 | return datetime.datetime(*args) | |
190 | elif text == 'today': |
|
189 | elif text == 'today': | |
191 | return datetime.datetime(*datetime.datetime.today().timetuple()[:3]) |
|
190 | return datetime.datetime(*datetime.datetime.today().timetuple()[:3]) | |
192 | elif text == 'now': |
|
191 | elif text == 'now': | |
193 | return datetime.datetime.now() |
|
192 | return datetime.datetime.now() | |
194 | elif text == 'yesterday': |
|
193 | elif text == 'yesterday': | |
195 | past = datetime.datetime.now() - datetime.timedelta(days=1) |
|
194 | past = datetime.datetime.now() - datetime.timedelta(days=1) | |
196 | return datetime.datetime(*past.timetuple()[:3]) |
|
195 | return datetime.datetime(*past.timetuple()[:3]) | |
197 | else: |
|
196 | else: | |
198 | days = 0 |
|
197 | days = 0 | |
199 | matched = re.match( |
|
198 | matched = re.match( | |
200 | r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text) |
|
199 | r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text) | |
201 | if matched: |
|
200 | if matched: | |
202 | groupdict = matched.groupdict() |
|
201 | groupdict = matched.groupdict() | |
203 | if groupdict['days']: |
|
202 | if groupdict['days']: | |
204 | days += int(matched.groupdict()['days']) |
|
203 | days += int(matched.groupdict()['days']) | |
205 | if groupdict['weeks']: |
|
204 | if groupdict['weeks']: | |
206 | days += int(matched.groupdict()['weeks']) * 7 |
|
205 | days += int(matched.groupdict()['weeks']) * 7 | |
207 | past = datetime.datetime.now() - datetime.timedelta(days=days) |
|
206 | past = datetime.datetime.now() - datetime.timedelta(days=days) | |
208 | return datetime.datetime(*past.timetuple()[:3]) |
|
207 | return datetime.datetime(*past.timetuple()[:3]) | |
209 |
|
208 | |||
210 | raise ValueError('Wrong date: "%s"' % text) |
|
209 | raise ValueError('Wrong date: "%s"' % text) | |
211 |
|
210 | |||
212 |
|
211 | |||
213 | def get_dict_for_attrs(obj, attrs): |
|
212 | def get_dict_for_attrs(obj, attrs): | |
214 | """ |
|
213 | """ | |
215 | Returns dictionary for each attribute from given ``obj``. |
|
214 | Returns dictionary for each attribute from given ``obj``. | |
216 | """ |
|
215 | """ | |
217 | data = {} |
|
216 | data = {} | |
218 | for attr in attrs: |
|
217 | for attr in attrs: | |
219 | data[attr] = getattr(obj, attr) |
|
218 | data[attr] = getattr(obj, attr) | |
220 | return data |
|
219 | return data |
@@ -1,424 +1,422 b'' | |||||
1 | # encoding: UTF-8 |
|
1 | # encoding: UTF-8 | |
2 |
|
2 | |||
3 | from __future__ import print_function |
|
|||
4 |
|
||||
5 | import datetime |
|
3 | import datetime | |
6 | import string |
|
4 | import string | |
7 | import sys |
|
5 | import sys | |
8 |
|
6 | |||
9 | from kallithea.lib.vcs.utils.filesize import filesizeformat |
|
7 | from kallithea.lib.vcs.utils.filesize import filesizeformat | |
10 |
|
8 | |||
11 |
|
9 | |||
12 | class ProgressBarError(Exception): |
|
10 | class ProgressBarError(Exception): | |
13 | pass |
|
11 | pass | |
14 |
|
12 | |||
15 |
|
13 | |||
16 | class AlreadyFinishedError(ProgressBarError): |
|
14 | class AlreadyFinishedError(ProgressBarError): | |
17 | pass |
|
15 | pass | |
18 |
|
16 | |||
19 |
|
17 | |||
20 | class ProgressBar(object): |
|
18 | class ProgressBar(object): | |
21 |
|
19 | |||
22 | default_elements = ['percentage', 'bar', 'steps'] |
|
20 | default_elements = ['percentage', 'bar', 'steps'] | |
23 |
|
21 | |||
24 | def __init__(self, steps=100, stream=None, elements=None): |
|
22 | def __init__(self, steps=100, stream=None, elements=None): | |
25 | self.step = 0 |
|
23 | self.step = 0 | |
26 | self.steps = steps |
|
24 | self.steps = steps | |
27 | self.stream = stream or sys.stderr |
|
25 | self.stream = stream or sys.stderr | |
28 | self.bar_char = '=' |
|
26 | self.bar_char = '=' | |
29 | self.width = 50 |
|
27 | self.width = 50 | |
30 | self.separator = ' | ' |
|
28 | self.separator = ' | ' | |
31 | self.elements = elements or self.default_elements |
|
29 | self.elements = elements or self.default_elements | |
32 | self.started = None |
|
30 | self.started = None | |
33 | self.finished = False |
|
31 | self.finished = False | |
34 | self.steps_label = 'Step' |
|
32 | self.steps_label = 'Step' | |
35 | self.time_label = 'Time' |
|
33 | self.time_label = 'Time' | |
36 | self.eta_label = 'ETA' |
|
34 | self.eta_label = 'ETA' | |
37 | self.speed_label = 'Speed' |
|
35 | self.speed_label = 'Speed' | |
38 | self.transfer_label = 'Transfer' |
|
36 | self.transfer_label = 'Transfer' | |
39 |
|
37 | |||
40 | def __str__(self): |
|
38 | def __str__(self): | |
41 | return self.get_line() |
|
39 | return self.get_line() | |
42 |
|
40 | |||
43 | def __iter__(self): |
|
41 | def __iter__(self): | |
44 | start = self.step |
|
42 | start = self.step | |
45 | end = self.steps + 1 |
|
43 | end = self.steps + 1 | |
46 | for x in range(start, end): |
|
44 | for x in range(start, end): | |
47 | self.render(x) |
|
45 | self.render(x) | |
48 | yield x |
|
46 | yield x | |
49 |
|
47 | |||
50 | def get_separator(self): |
|
48 | def get_separator(self): | |
51 | return self.separator |
|
49 | return self.separator | |
52 |
|
50 | |||
53 | def get_bar_char(self): |
|
51 | def get_bar_char(self): | |
54 | return self.bar_char |
|
52 | return self.bar_char | |
55 |
|
53 | |||
56 | def get_bar(self): |
|
54 | def get_bar(self): | |
57 | char = self.get_bar_char() |
|
55 | char = self.get_bar_char() | |
58 | perc = self.get_percentage() |
|
56 | perc = self.get_percentage() | |
59 | length = int(self.width * perc / 100) |
|
57 | length = int(self.width * perc / 100) | |
60 | bar = char * length |
|
58 | bar = char * length | |
61 | bar = bar.ljust(self.width) |
|
59 | bar = bar.ljust(self.width) | |
62 | return bar |
|
60 | return bar | |
63 |
|
61 | |||
64 | def get_elements(self): |
|
62 | def get_elements(self): | |
65 | return self.elements |
|
63 | return self.elements | |
66 |
|
64 | |||
67 | def get_template(self): |
|
65 | def get_template(self): | |
68 | separator = self.get_separator() |
|
66 | separator = self.get_separator() | |
69 | elements = self.get_elements() |
|
67 | elements = self.get_elements() | |
70 | return string.Template(separator.join((('$%s' % e) for e in elements))) |
|
68 | return string.Template(separator.join((('$%s' % e) for e in elements))) | |
71 |
|
69 | |||
72 | def get_total_time(self, current_time=None): |
|
70 | def get_total_time(self, current_time=None): | |
73 | if current_time is None: |
|
71 | if current_time is None: | |
74 | current_time = datetime.datetime.now() |
|
72 | current_time = datetime.datetime.now() | |
75 | if not self.started: |
|
73 | if not self.started: | |
76 | return datetime.timedelta() |
|
74 | return datetime.timedelta() | |
77 | return current_time - self.started |
|
75 | return current_time - self.started | |
78 |
|
76 | |||
79 | def get_rendered_total_time(self): |
|
77 | def get_rendered_total_time(self): | |
80 | delta = self.get_total_time() |
|
78 | delta = self.get_total_time() | |
81 | if not delta: |
|
79 | if not delta: | |
82 | ttime = '-' |
|
80 | ttime = '-' | |
83 | else: |
|
81 | else: | |
84 | ttime = str(delta) |
|
82 | ttime = str(delta) | |
85 | return '%s %s' % (self.time_label, ttime) |
|
83 | return '%s %s' % (self.time_label, ttime) | |
86 |
|
84 | |||
87 | def get_eta(self, current_time=None): |
|
85 | def get_eta(self, current_time=None): | |
88 | if current_time is None: |
|
86 | if current_time is None: | |
89 | current_time = datetime.datetime.now() |
|
87 | current_time = datetime.datetime.now() | |
90 | if self.step == 0: |
|
88 | if self.step == 0: | |
91 | return datetime.timedelta() |
|
89 | return datetime.timedelta() | |
92 | total_seconds = self.get_total_time().total_seconds() |
|
90 | total_seconds = self.get_total_time().total_seconds() | |
93 | eta_seconds = total_seconds * self.steps / self.step - total_seconds |
|
91 | eta_seconds = total_seconds * self.steps / self.step - total_seconds | |
94 | return datetime.timedelta(seconds=int(eta_seconds)) |
|
92 | return datetime.timedelta(seconds=int(eta_seconds)) | |
95 |
|
93 | |||
96 | def get_rendered_eta(self): |
|
94 | def get_rendered_eta(self): | |
97 | eta = self.get_eta() |
|
95 | eta = self.get_eta() | |
98 | if not eta: |
|
96 | if not eta: | |
99 | eta = '--:--:--' |
|
97 | eta = '--:--:--' | |
100 | else: |
|
98 | else: | |
101 | eta = str(eta).rjust(8) |
|
99 | eta = str(eta).rjust(8) | |
102 | return '%s: %s' % (self.eta_label, eta) |
|
100 | return '%s: %s' % (self.eta_label, eta) | |
103 |
|
101 | |||
104 | def get_percentage(self): |
|
102 | def get_percentage(self): | |
105 | return float(self.step) / self.steps * 100 |
|
103 | return float(self.step) / self.steps * 100 | |
106 |
|
104 | |||
107 | def get_rendered_percentage(self): |
|
105 | def get_rendered_percentage(self): | |
108 | perc = self.get_percentage() |
|
106 | perc = self.get_percentage() | |
109 | return ('%s%%' % (int(perc))).rjust(5) |
|
107 | return ('%s%%' % (int(perc))).rjust(5) | |
110 |
|
108 | |||
111 | def get_rendered_steps(self): |
|
109 | def get_rendered_steps(self): | |
112 | return '%s: %s/%s' % (self.steps_label, self.step, self.steps) |
|
110 | return '%s: %s/%s' % (self.steps_label, self.step, self.steps) | |
113 |
|
111 | |||
114 | def get_rendered_speed(self, step=None, total_seconds=None): |
|
112 | def get_rendered_speed(self, step=None, total_seconds=None): | |
115 | if step is None: |
|
113 | if step is None: | |
116 | step = self.step |
|
114 | step = self.step | |
117 | if total_seconds is None: |
|
115 | if total_seconds is None: | |
118 | total_seconds = self.get_total_time().total_seconds() |
|
116 | total_seconds = self.get_total_time().total_seconds() | |
119 | if step <= 0 or total_seconds <= 0: |
|
117 | if step <= 0 or total_seconds <= 0: | |
120 | speed = '-' |
|
118 | speed = '-' | |
121 | else: |
|
119 | else: | |
122 | speed = filesizeformat(float(step) / total_seconds) |
|
120 | speed = filesizeformat(float(step) / total_seconds) | |
123 | return '%s: %s/s' % (self.speed_label, speed) |
|
121 | return '%s: %s/s' % (self.speed_label, speed) | |
124 |
|
122 | |||
125 | def get_rendered_transfer(self, step=None, steps=None): |
|
123 | def get_rendered_transfer(self, step=None, steps=None): | |
126 | if step is None: |
|
124 | if step is None: | |
127 | step = self.step |
|
125 | step = self.step | |
128 | if steps is None: |
|
126 | if steps is None: | |
129 | steps = self.steps |
|
127 | steps = self.steps | |
130 |
|
128 | |||
131 | if steps <= 0: |
|
129 | if steps <= 0: | |
132 | return '%s: -' % self.transfer_label |
|
130 | return '%s: -' % self.transfer_label | |
133 | total = filesizeformat(float(steps)) |
|
131 | total = filesizeformat(float(steps)) | |
134 | if step <= 0: |
|
132 | if step <= 0: | |
135 | transferred = '-' |
|
133 | transferred = '-' | |
136 | else: |
|
134 | else: | |
137 | transferred = filesizeformat(float(step)) |
|
135 | transferred = filesizeformat(float(step)) | |
138 | return '%s: %s / %s' % (self.transfer_label, transferred, total) |
|
136 | return '%s: %s / %s' % (self.transfer_label, transferred, total) | |
139 |
|
137 | |||
140 | def get_context(self): |
|
138 | def get_context(self): | |
141 | return { |
|
139 | return { | |
142 | 'percentage': self.get_rendered_percentage(), |
|
140 | 'percentage': self.get_rendered_percentage(), | |
143 | 'bar': self.get_bar(), |
|
141 | 'bar': self.get_bar(), | |
144 | 'steps': self.get_rendered_steps(), |
|
142 | 'steps': self.get_rendered_steps(), | |
145 | 'time': self.get_rendered_total_time(), |
|
143 | 'time': self.get_rendered_total_time(), | |
146 | 'eta': self.get_rendered_eta(), |
|
144 | 'eta': self.get_rendered_eta(), | |
147 | 'speed': self.get_rendered_speed(), |
|
145 | 'speed': self.get_rendered_speed(), | |
148 | 'transfer': self.get_rendered_transfer(), |
|
146 | 'transfer': self.get_rendered_transfer(), | |
149 | } |
|
147 | } | |
150 |
|
148 | |||
151 | def get_line(self): |
|
149 | def get_line(self): | |
152 | template = self.get_template() |
|
150 | template = self.get_template() | |
153 | context = self.get_context() |
|
151 | context = self.get_context() | |
154 | return template.safe_substitute(**context) |
|
152 | return template.safe_substitute(**context) | |
155 |
|
153 | |||
156 | def write(self, data): |
|
154 | def write(self, data): | |
157 | self.stream.write(data) |
|
155 | self.stream.write(data) | |
158 |
|
156 | |||
159 | def render(self, step): |
|
157 | def render(self, step): | |
160 | if not self.started: |
|
158 | if not self.started: | |
161 | self.started = datetime.datetime.now() |
|
159 | self.started = datetime.datetime.now() | |
162 | if self.finished: |
|
160 | if self.finished: | |
163 | raise AlreadyFinishedError |
|
161 | raise AlreadyFinishedError | |
164 | self.step = step |
|
162 | self.step = step | |
165 | self.write('\r%s' % self) |
|
163 | self.write('\r%s' % self) | |
166 | if step == self.steps: |
|
164 | if step == self.steps: | |
167 | self.finished = True |
|
165 | self.finished = True | |
168 | if step == self.steps: |
|
166 | if step == self.steps: | |
169 | self.write('\n') |
|
167 | self.write('\n') | |
170 |
|
168 | |||
171 |
|
169 | |||
172 | """ |
|
170 | """ | |
173 | termcolors.py |
|
171 | termcolors.py | |
174 |
|
172 | |||
175 | Grabbed from Django (http://www.djangoproject.com) |
|
173 | Grabbed from Django (http://www.djangoproject.com) | |
176 | """ |
|
174 | """ | |
177 |
|
175 | |||
178 | color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white') |
|
176 | color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white') | |
179 | foreground = dict([(color_names[x], '3%s' % x) for x in range(8)]) |
|
177 | foreground = dict([(color_names[x], '3%s' % x) for x in range(8)]) | |
180 | background = dict([(color_names[x], '4%s' % x) for x in range(8)]) |
|
178 | background = dict([(color_names[x], '4%s' % x) for x in range(8)]) | |
181 |
|
179 | |||
182 | RESET = '0' |
|
180 | RESET = '0' | |
183 | opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'} |
|
181 | opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'} | |
184 |
|
182 | |||
185 |
|
183 | |||
186 | def colorize(text='', opts=(), **kwargs): |
|
184 | def colorize(text='', opts=(), **kwargs): | |
187 | """ |
|
185 | """ | |
188 | Returns your text, enclosed in ANSI graphics codes. |
|
186 | Returns your text, enclosed in ANSI graphics codes. | |
189 |
|
187 | |||
190 | Depends on the keyword arguments 'fg' and 'bg', and the contents of |
|
188 | Depends on the keyword arguments 'fg' and 'bg', and the contents of | |
191 | the opts tuple/list. |
|
189 | the opts tuple/list. | |
192 |
|
190 | |||
193 | Returns the RESET code if no parameters are given. |
|
191 | Returns the RESET code if no parameters are given. | |
194 |
|
192 | |||
195 | Valid colors: |
|
193 | Valid colors: | |
196 | 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white' |
|
194 | 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white' | |
197 |
|
195 | |||
198 | Valid options: |
|
196 | Valid options: | |
199 | 'bold' |
|
197 | 'bold' | |
200 | 'underscore' |
|
198 | 'underscore' | |
201 | 'blink' |
|
199 | 'blink' | |
202 | 'reverse' |
|
200 | 'reverse' | |
203 | 'conceal' |
|
201 | 'conceal' | |
204 | 'noreset' - string will not be auto-terminated with the RESET code |
|
202 | 'noreset' - string will not be auto-terminated with the RESET code | |
205 |
|
203 | |||
206 | Examples: |
|
204 | Examples: | |
207 | colorize('hello', fg='red', bg='blue', opts=('blink',)) |
|
205 | colorize('hello', fg='red', bg='blue', opts=('blink',)) | |
208 | colorize() |
|
206 | colorize() | |
209 | colorize('goodbye', opts=('underscore',)) |
|
207 | colorize('goodbye', opts=('underscore',)) | |
210 | print colorize('first line', fg='red', opts=('noreset',)) |
|
208 | print colorize('first line', fg='red', opts=('noreset',)) | |
211 | print 'this should be red too' |
|
209 | print 'this should be red too' | |
212 | print colorize('and so should this') |
|
210 | print colorize('and so should this') | |
213 | print 'this should not be red' |
|
211 | print 'this should not be red' | |
214 | """ |
|
212 | """ | |
215 | code_list = [] |
|
213 | code_list = [] | |
216 | if text == '' and len(opts) == 1 and opts[0] == 'reset': |
|
214 | if text == '' and len(opts) == 1 and opts[0] == 'reset': | |
217 | return '\x1b[%sm' % RESET |
|
215 | return '\x1b[%sm' % RESET | |
218 | for k, v in kwargs.items(): |
|
216 | for k, v in kwargs.items(): | |
219 | if k == 'fg': |
|
217 | if k == 'fg': | |
220 | code_list.append(foreground[v]) |
|
218 | code_list.append(foreground[v]) | |
221 | elif k == 'bg': |
|
219 | elif k == 'bg': | |
222 | code_list.append(background[v]) |
|
220 | code_list.append(background[v]) | |
223 | for o in opts: |
|
221 | for o in opts: | |
224 | if o in opt_dict: |
|
222 | if o in opt_dict: | |
225 | code_list.append(opt_dict[o]) |
|
223 | code_list.append(opt_dict[o]) | |
226 | if 'noreset' not in opts: |
|
224 | if 'noreset' not in opts: | |
227 | text = text + '\x1b[%sm' % RESET |
|
225 | text = text + '\x1b[%sm' % RESET | |
228 | return ('\x1b[%sm' % ';'.join(code_list)) + text |
|
226 | return ('\x1b[%sm' % ';'.join(code_list)) + text | |
229 |
|
227 | |||
230 |
|
228 | |||
231 | def make_style(opts=(), **kwargs): |
|
229 | def make_style(opts=(), **kwargs): | |
232 | """ |
|
230 | """ | |
233 | Returns a function with default parameters for colorize() |
|
231 | Returns a function with default parameters for colorize() | |
234 |
|
232 | |||
235 | Example: |
|
233 | Example: | |
236 | bold_red = make_style(opts=('bold',), fg='red') |
|
234 | bold_red = make_style(opts=('bold',), fg='red') | |
237 | print bold_red('hello') |
|
235 | print bold_red('hello') | |
238 | KEYWORD = make_style(fg='yellow') |
|
236 | KEYWORD = make_style(fg='yellow') | |
239 | COMMENT = make_style(fg='blue', opts=('bold',)) |
|
237 | COMMENT = make_style(fg='blue', opts=('bold',)) | |
240 | """ |
|
238 | """ | |
241 | return lambda text: colorize(text, opts, **kwargs) |
|
239 | return lambda text: colorize(text, opts, **kwargs) | |
242 |
|
240 | |||
243 |
|
241 | |||
244 | NOCOLOR_PALETTE = 'nocolor' |
|
242 | NOCOLOR_PALETTE = 'nocolor' | |
245 | DARK_PALETTE = 'dark' |
|
243 | DARK_PALETTE = 'dark' | |
246 | LIGHT_PALETTE = 'light' |
|
244 | LIGHT_PALETTE = 'light' | |
247 |
|
245 | |||
248 | PALETTES = { |
|
246 | PALETTES = { | |
249 | NOCOLOR_PALETTE: { |
|
247 | NOCOLOR_PALETTE: { | |
250 | 'ERROR': {}, |
|
248 | 'ERROR': {}, | |
251 | 'NOTICE': {}, |
|
249 | 'NOTICE': {}, | |
252 | 'SQL_FIELD': {}, |
|
250 | 'SQL_FIELD': {}, | |
253 | 'SQL_COLTYPE': {}, |
|
251 | 'SQL_COLTYPE': {}, | |
254 | 'SQL_KEYWORD': {}, |
|
252 | 'SQL_KEYWORD': {}, | |
255 | 'SQL_TABLE': {}, |
|
253 | 'SQL_TABLE': {}, | |
256 | 'HTTP_INFO': {}, |
|
254 | 'HTTP_INFO': {}, | |
257 | 'HTTP_SUCCESS': {}, |
|
255 | 'HTTP_SUCCESS': {}, | |
258 | 'HTTP_REDIRECT': {}, |
|
256 | 'HTTP_REDIRECT': {}, | |
259 | 'HTTP_NOT_MODIFIED': {}, |
|
257 | 'HTTP_NOT_MODIFIED': {}, | |
260 | 'HTTP_BAD_REQUEST': {}, |
|
258 | 'HTTP_BAD_REQUEST': {}, | |
261 | 'HTTP_NOT_FOUND': {}, |
|
259 | 'HTTP_NOT_FOUND': {}, | |
262 | 'HTTP_SERVER_ERROR': {}, |
|
260 | 'HTTP_SERVER_ERROR': {}, | |
263 | }, |
|
261 | }, | |
264 | DARK_PALETTE: { |
|
262 | DARK_PALETTE: { | |
265 | 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, |
|
263 | 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, | |
266 | 'NOTICE': { 'fg': 'red' }, |
|
264 | 'NOTICE': { 'fg': 'red' }, | |
267 | 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) }, |
|
265 | 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) }, | |
268 | 'SQL_COLTYPE': { 'fg': 'green' }, |
|
266 | 'SQL_COLTYPE': { 'fg': 'green' }, | |
269 | 'SQL_KEYWORD': { 'fg': 'yellow' }, |
|
267 | 'SQL_KEYWORD': { 'fg': 'yellow' }, | |
270 | 'SQL_TABLE': { 'opts': ('bold',) }, |
|
268 | 'SQL_TABLE': { 'opts': ('bold',) }, | |
271 | 'HTTP_INFO': { 'opts': ('bold',) }, |
|
269 | 'HTTP_INFO': { 'opts': ('bold',) }, | |
272 | 'HTTP_SUCCESS': { }, |
|
270 | 'HTTP_SUCCESS': { }, | |
273 | 'HTTP_REDIRECT': { 'fg': 'green' }, |
|
271 | 'HTTP_REDIRECT': { 'fg': 'green' }, | |
274 | 'HTTP_NOT_MODIFIED': { 'fg': 'cyan' }, |
|
272 | 'HTTP_NOT_MODIFIED': { 'fg': 'cyan' }, | |
275 | 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, |
|
273 | 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, | |
276 | 'HTTP_NOT_FOUND': { 'fg': 'yellow' }, |
|
274 | 'HTTP_NOT_FOUND': { 'fg': 'yellow' }, | |
277 | 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, |
|
275 | 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, | |
278 | }, |
|
276 | }, | |
279 | LIGHT_PALETTE: { |
|
277 | LIGHT_PALETTE: { | |
280 | 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, |
|
278 | 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, | |
281 | 'NOTICE': { 'fg': 'red' }, |
|
279 | 'NOTICE': { 'fg': 'red' }, | |
282 | 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) }, |
|
280 | 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) }, | |
283 | 'SQL_COLTYPE': { 'fg': 'green' }, |
|
281 | 'SQL_COLTYPE': { 'fg': 'green' }, | |
284 | 'SQL_KEYWORD': { 'fg': 'blue' }, |
|
282 | 'SQL_KEYWORD': { 'fg': 'blue' }, | |
285 | 'SQL_TABLE': { 'opts': ('bold',) }, |
|
283 | 'SQL_TABLE': { 'opts': ('bold',) }, | |
286 | 'HTTP_INFO': { 'opts': ('bold',) }, |
|
284 | 'HTTP_INFO': { 'opts': ('bold',) }, | |
287 | 'HTTP_SUCCESS': { }, |
|
285 | 'HTTP_SUCCESS': { }, | |
288 | 'HTTP_REDIRECT': { 'fg': 'green', 'opts': ('bold',) }, |
|
286 | 'HTTP_REDIRECT': { 'fg': 'green', 'opts': ('bold',) }, | |
289 | 'HTTP_NOT_MODIFIED': { 'fg': 'green' }, |
|
287 | 'HTTP_NOT_MODIFIED': { 'fg': 'green' }, | |
290 | 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, |
|
288 | 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, | |
291 | 'HTTP_NOT_FOUND': { 'fg': 'red' }, |
|
289 | 'HTTP_NOT_FOUND': { 'fg': 'red' }, | |
292 | 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, |
|
290 | 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, | |
293 | } |
|
291 | } | |
294 | } |
|
292 | } | |
295 | DEFAULT_PALETTE = DARK_PALETTE |
|
293 | DEFAULT_PALETTE = DARK_PALETTE | |
296 |
|
294 | |||
297 | # ---------------------------- # |
|
295 | # ---------------------------- # | |
298 | # --- End of termcolors.py --- # |
|
296 | # --- End of termcolors.py --- # | |
299 | # ---------------------------- # |
|
297 | # ---------------------------- # | |
300 |
|
298 | |||
301 |
|
299 | |||
302 | class ColoredProgressBar(ProgressBar): |
|
300 | class ColoredProgressBar(ProgressBar): | |
303 |
|
301 | |||
304 | BAR_COLORS = ( |
|
302 | BAR_COLORS = ( | |
305 | (10, 'red'), |
|
303 | (10, 'red'), | |
306 | (30, 'magenta'), |
|
304 | (30, 'magenta'), | |
307 | (50, 'yellow'), |
|
305 | (50, 'yellow'), | |
308 | (99, 'green'), |
|
306 | (99, 'green'), | |
309 | (100, 'blue'), |
|
307 | (100, 'blue'), | |
310 | ) |
|
308 | ) | |
311 |
|
309 | |||
312 | def get_line(self): |
|
310 | def get_line(self): | |
313 | line = super(ColoredProgressBar, self).get_line() |
|
311 | line = super(ColoredProgressBar, self).get_line() | |
314 | perc = self.get_percentage() |
|
312 | perc = self.get_percentage() | |
315 | if perc > 100: |
|
313 | if perc > 100: | |
316 | color = 'blue' |
|
314 | color = 'blue' | |
317 | for max_perc, color in self.BAR_COLORS: |
|
315 | for max_perc, color in self.BAR_COLORS: | |
318 | if perc <= max_perc: |
|
316 | if perc <= max_perc: | |
319 | break |
|
317 | break | |
320 | return colorize(line, fg=color) |
|
318 | return colorize(line, fg=color) | |
321 |
|
319 | |||
322 |
|
320 | |||
323 | class AnimatedProgressBar(ProgressBar): |
|
321 | class AnimatedProgressBar(ProgressBar): | |
324 |
|
322 | |||
325 | def get_bar_char(self): |
|
323 | def get_bar_char(self): | |
326 | chars = '-/|\\' |
|
324 | chars = '-/|\\' | |
327 | if self.step >= self.steps: |
|
325 | if self.step >= self.steps: | |
328 | return '=' |
|
326 | return '=' | |
329 | return chars[self.step % len(chars)] |
|
327 | return chars[self.step % len(chars)] | |
330 |
|
328 | |||
331 |
|
329 | |||
332 | class BarOnlyProgressBar(ProgressBar): |
|
330 | class BarOnlyProgressBar(ProgressBar): | |
333 |
|
331 | |||
334 | default_elements = ['bar', 'steps'] |
|
332 | default_elements = ['bar', 'steps'] | |
335 |
|
333 | |||
336 | def get_bar(self): |
|
334 | def get_bar(self): | |
337 | bar = super(BarOnlyProgressBar, self).get_bar() |
|
335 | bar = super(BarOnlyProgressBar, self).get_bar() | |
338 | perc = self.get_percentage() |
|
336 | perc = self.get_percentage() | |
339 | perc_text = '%s%%' % int(perc) |
|
337 | perc_text = '%s%%' % int(perc) | |
340 | text = (' %s%% ' % (perc_text)).center(self.width, '=') |
|
338 | text = (' %s%% ' % (perc_text)).center(self.width, '=') | |
341 | L = text.find(' ') |
|
339 | L = text.find(' ') | |
342 | R = text.rfind(' ') |
|
340 | R = text.rfind(' ') | |
343 | bar = ' '.join((bar[:L], perc_text, bar[R:])) |
|
341 | bar = ' '.join((bar[:L], perc_text, bar[R:])) | |
344 | return bar |
|
342 | return bar | |
345 |
|
343 | |||
346 |
|
344 | |||
347 | class AnimatedColoredProgressBar(AnimatedProgressBar, |
|
345 | class AnimatedColoredProgressBar(AnimatedProgressBar, | |
348 | ColoredProgressBar): |
|
346 | ColoredProgressBar): | |
349 | pass |
|
347 | pass | |
350 |
|
348 | |||
351 |
|
349 | |||
352 | class BarOnlyColoredProgressBar(ColoredProgressBar, |
|
350 | class BarOnlyColoredProgressBar(ColoredProgressBar, | |
353 | BarOnlyProgressBar): |
|
351 | BarOnlyProgressBar): | |
354 | pass |
|
352 | pass | |
355 |
|
353 | |||
356 |
|
354 | |||
357 | def main(): |
|
355 | def main(): | |
358 | import time |
|
356 | import time | |
359 |
|
357 | |||
360 | print("Standard progress bar...") |
|
358 | print("Standard progress bar...") | |
361 | bar = ProgressBar(30) |
|
359 | bar = ProgressBar(30) | |
362 | for x in range(1, 31): |
|
360 | for x in range(1, 31): | |
363 | bar.render(x) |
|
361 | bar.render(x) | |
364 | time.sleep(0.02) |
|
362 | time.sleep(0.02) | |
365 | bar.stream.write('\n') |
|
363 | bar.stream.write('\n') | |
366 | print() |
|
364 | print() | |
367 |
|
365 | |||
368 | print("Empty bar...") |
|
366 | print("Empty bar...") | |
369 | bar = ProgressBar(50) |
|
367 | bar = ProgressBar(50) | |
370 | bar.render(0) |
|
368 | bar.render(0) | |
371 | print() |
|
369 | print() | |
372 | print() |
|
370 | print() | |
373 |
|
371 | |||
374 | print("Colored bar...") |
|
372 | print("Colored bar...") | |
375 | bar = ColoredProgressBar(20) |
|
373 | bar = ColoredProgressBar(20) | |
376 | for x in bar: |
|
374 | for x in bar: | |
377 | time.sleep(0.01) |
|
375 | time.sleep(0.01) | |
378 | print() |
|
376 | print() | |
379 |
|
377 | |||
380 | print("Animated char bar...") |
|
378 | print("Animated char bar...") | |
381 | bar = AnimatedProgressBar(20) |
|
379 | bar = AnimatedProgressBar(20) | |
382 | for x in bar: |
|
380 | for x in bar: | |
383 | time.sleep(0.01) |
|
381 | time.sleep(0.01) | |
384 | print() |
|
382 | print() | |
385 |
|
383 | |||
386 | print("Animated + colored char bar...") |
|
384 | print("Animated + colored char bar...") | |
387 | bar = AnimatedColoredProgressBar(20) |
|
385 | bar = AnimatedColoredProgressBar(20) | |
388 | for x in bar: |
|
386 | for x in bar: | |
389 | time.sleep(0.01) |
|
387 | time.sleep(0.01) | |
390 | print() |
|
388 | print() | |
391 |
|
389 | |||
392 | print("Bar only ...") |
|
390 | print("Bar only ...") | |
393 | bar = BarOnlyProgressBar(20) |
|
391 | bar = BarOnlyProgressBar(20) | |
394 | for x in bar: |
|
392 | for x in bar: | |
395 | time.sleep(0.01) |
|
393 | time.sleep(0.01) | |
396 | print() |
|
394 | print() | |
397 |
|
395 | |||
398 | print("Colored, longer bar-only, eta, total time ...") |
|
396 | print("Colored, longer bar-only, eta, total time ...") | |
399 | bar = BarOnlyColoredProgressBar(40) |
|
397 | bar = BarOnlyColoredProgressBar(40) | |
400 | bar.width = 60 |
|
398 | bar.width = 60 | |
401 | bar.elements += ['time', 'eta'] |
|
399 | bar.elements += ['time', 'eta'] | |
402 | for x in bar: |
|
400 | for x in bar: | |
403 | time.sleep(0.01) |
|
401 | time.sleep(0.01) | |
404 | print() |
|
402 | print() | |
405 | print() |
|
403 | print() | |
406 |
|
404 | |||
407 | print("File transfer bar, breaks after 2 seconds ...") |
|
405 | print("File transfer bar, breaks after 2 seconds ...") | |
408 | total_bytes = 1024 * 1024 * 2 |
|
406 | total_bytes = 1024 * 1024 * 2 | |
409 | bar = ProgressBar(total_bytes) |
|
407 | bar = ProgressBar(total_bytes) | |
410 | bar.width = 50 |
|
408 | bar.width = 50 | |
411 | bar.elements.remove('steps') |
|
409 | bar.elements.remove('steps') | |
412 | bar.elements += ['transfer', 'time', 'eta', 'speed'] |
|
410 | bar.elements += ['transfer', 'time', 'eta', 'speed'] | |
413 | for x in range(0, bar.steps, 1024): |
|
411 | for x in range(0, bar.steps, 1024): | |
414 | bar.render(x) |
|
412 | bar.render(x) | |
415 | time.sleep(0.01) |
|
413 | time.sleep(0.01) | |
416 | now = datetime.datetime.now() |
|
414 | now = datetime.datetime.now() | |
417 | if now - bar.started >= datetime.timedelta(seconds=2): |
|
415 | if now - bar.started >= datetime.timedelta(seconds=2): | |
418 | break |
|
416 | break | |
419 | print() |
|
417 | print() | |
420 | print() |
|
418 | print() | |
421 |
|
419 | |||
422 |
|
420 | |||
423 | if __name__ == '__main__': |
|
421 | if __name__ == '__main__': | |
424 | main() |
|
422 | main() |
@@ -1,652 +1,650 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | Test suite for vcs push/pull operations. |
|
15 | Test suite for vcs push/pull operations. | |
16 |
|
16 | |||
17 | The tests need Git > 1.8.1. |
|
17 | The tests need Git > 1.8.1. | |
18 |
|
18 | |||
19 | This file was forked by the Kallithea project in July 2014. |
|
19 | This file was forked by the Kallithea project in July 2014. | |
20 | Original author and date, and relevant copyright and licensing information is below: |
|
20 | Original author and date, and relevant copyright and licensing information is below: | |
21 | :created_on: Dec 30, 2010 |
|
21 | :created_on: Dec 30, 2010 | |
22 | :author: marcink |
|
22 | :author: marcink | |
23 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
23 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
24 | :license: GPLv3, see LICENSE.md for more details. |
|
24 | :license: GPLv3, see LICENSE.md for more details. | |
25 |
|
25 | |||
26 | """ |
|
26 | """ | |
27 |
|
27 | |||
28 | from __future__ import print_function |
|
|||
29 |
|
||||
30 | import json |
|
28 | import json | |
31 | import os |
|
29 | import os | |
32 | import re |
|
30 | import re | |
33 | import tempfile |
|
31 | import tempfile | |
34 | import time |
|
32 | import time | |
35 | import urllib.request |
|
33 | import urllib.request | |
36 | from subprocess import PIPE, Popen |
|
34 | from subprocess import PIPE, Popen | |
37 | from tempfile import _RandomNameSequence |
|
35 | from tempfile import _RandomNameSequence | |
38 |
|
36 | |||
39 | import pytest |
|
37 | import pytest | |
40 |
|
38 | |||
41 | from kallithea import CONFIG |
|
39 | from kallithea import CONFIG | |
42 | from kallithea.lib.utils2 import ascii_bytes, safe_str |
|
40 | from kallithea.lib.utils2 import ascii_bytes, safe_str | |
43 | from kallithea.model.db import CacheInvalidation, Repository, Ui, User, UserIpMap, UserLog |
|
41 | from kallithea.model.db import CacheInvalidation, Repository, Ui, User, UserIpMap, UserLog | |
44 | from kallithea.model.meta import Session |
|
42 | from kallithea.model.meta import Session | |
45 | from kallithea.model.ssh_key import SshKeyModel |
|
43 | from kallithea.model.ssh_key import SshKeyModel | |
46 | from kallithea.model.user import UserModel |
|
44 | from kallithea.model.user import UserModel | |
47 | from kallithea.tests import base |
|
45 | from kallithea.tests import base | |
48 | from kallithea.tests.fixture import Fixture |
|
46 | from kallithea.tests.fixture import Fixture | |
49 |
|
47 | |||
50 |
|
48 | |||
51 | DEBUG = True |
|
49 | DEBUG = True | |
52 | HOST = '127.0.0.1:4999' # test host |
|
50 | HOST = '127.0.0.1:4999' # test host | |
53 |
|
51 | |||
54 | fixture = Fixture() |
|
52 | fixture = Fixture() | |
55 |
|
53 | |||
56 |
|
54 | |||
57 | # Parameterize different kinds of VCS testing - both the kind of VCS and the |
|
55 | # Parameterize different kinds of VCS testing - both the kind of VCS and the | |
58 | # access method (HTTP/SSH) |
|
56 | # access method (HTTP/SSH) | |
59 |
|
57 | |||
60 | # Mixin for using HTTP and SSH URLs |
|
58 | # Mixin for using HTTP and SSH URLs | |
61 | class HttpVcsTest(object): |
|
59 | class HttpVcsTest(object): | |
62 | @staticmethod |
|
60 | @staticmethod | |
63 | def repo_url_param(webserver, repo_name, **kwargs): |
|
61 | def repo_url_param(webserver, repo_name, **kwargs): | |
64 | return webserver.repo_url(repo_name, **kwargs) |
|
62 | return webserver.repo_url(repo_name, **kwargs) | |
65 |
|
63 | |||
66 | class SshVcsTest(object): |
|
64 | class SshVcsTest(object): | |
67 | public_keys = { |
|
65 | public_keys = { | |
68 | base.TEST_USER_REGULAR_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== kallithea@localhost', |
|
66 | base.TEST_USER_REGULAR_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== kallithea@localhost', | |
69 | base.TEST_USER_ADMIN_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUq== kallithea@localhost', |
|
67 | base.TEST_USER_ADMIN_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUq== kallithea@localhost', | |
70 | } |
|
68 | } | |
71 |
|
69 | |||
72 | @classmethod |
|
70 | @classmethod | |
73 | def repo_url_param(cls, webserver, repo_name, username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS, client_ip=base.IP_ADDR): |
|
71 | def repo_url_param(cls, webserver, repo_name, username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS, client_ip=base.IP_ADDR): | |
74 | user = User.get_by_username(username) |
|
72 | user = User.get_by_username(username) | |
75 | if user.ssh_keys: |
|
73 | if user.ssh_keys: | |
76 | ssh_key = user.ssh_keys[0] |
|
74 | ssh_key = user.ssh_keys[0] | |
77 | else: |
|
75 | else: | |
78 | sshkeymodel = SshKeyModel() |
|
76 | sshkeymodel = SshKeyModel() | |
79 | ssh_key = sshkeymodel.create(user, 'test key', cls.public_keys[user.username]) |
|
77 | ssh_key = sshkeymodel.create(user, 'test key', cls.public_keys[user.username]) | |
80 | Session().commit() |
|
78 | Session().commit() | |
81 |
|
79 | |||
82 | return cls._ssh_param(repo_name, user, ssh_key, client_ip) |
|
80 | return cls._ssh_param(repo_name, user, ssh_key, client_ip) | |
83 |
|
81 | |||
84 | # Mixins for using Mercurial and Git |
|
82 | # Mixins for using Mercurial and Git | |
85 | class HgVcsTest(object): |
|
83 | class HgVcsTest(object): | |
86 | repo_type = 'hg' |
|
84 | repo_type = 'hg' | |
87 | repo_name = base.HG_REPO |
|
85 | repo_name = base.HG_REPO | |
88 |
|
86 | |||
89 | class GitVcsTest(object): |
|
87 | class GitVcsTest(object): | |
90 | repo_type = 'git' |
|
88 | repo_type = 'git' | |
91 | repo_name = base.GIT_REPO |
|
89 | repo_name = base.GIT_REPO | |
92 |
|
90 | |||
93 | # Combine mixins to give the combinations we want to parameterize tests with |
|
91 | # Combine mixins to give the combinations we want to parameterize tests with | |
94 | class HgHttpVcsTest(HgVcsTest, HttpVcsTest): |
|
92 | class HgHttpVcsTest(HgVcsTest, HttpVcsTest): | |
95 | pass |
|
93 | pass | |
96 |
|
94 | |||
97 | class GitHttpVcsTest(GitVcsTest, HttpVcsTest): |
|
95 | class GitHttpVcsTest(GitVcsTest, HttpVcsTest): | |
98 | pass |
|
96 | pass | |
99 |
|
97 | |||
100 | class HgSshVcsTest(HgVcsTest, SshVcsTest): |
|
98 | class HgSshVcsTest(HgVcsTest, SshVcsTest): | |
101 | @staticmethod |
|
99 | @staticmethod | |
102 | def _ssh_param(repo_name, user, ssh_key, client_ip): |
|
100 | def _ssh_param(repo_name, user, ssh_key, client_ip): | |
103 | # Specify a custom ssh command on the command line |
|
101 | # Specify a custom ssh command on the command line | |
104 | return r"""--config ui.ssh="bash -c 'SSH_ORIGINAL_COMMAND=\"\$2\" SSH_CONNECTION=\"%s 1024 127.0.0.1 22\" kallithea-cli ssh-serve -c %s %s %s' --" ssh://someuser@somehost/%s""" % ( |
|
102 | return r"""--config ui.ssh="bash -c 'SSH_ORIGINAL_COMMAND=\"\$2\" SSH_CONNECTION=\"%s 1024 127.0.0.1 22\" kallithea-cli ssh-serve -c %s %s %s' --" ssh://someuser@somehost/%s""" % ( | |
105 | client_ip, |
|
103 | client_ip, | |
106 | CONFIG['__file__'], |
|
104 | CONFIG['__file__'], | |
107 | user.user_id, |
|
105 | user.user_id, | |
108 | ssh_key.user_ssh_key_id, |
|
106 | ssh_key.user_ssh_key_id, | |
109 | repo_name) |
|
107 | repo_name) | |
110 |
|
108 | |||
111 | class GitSshVcsTest(GitVcsTest, SshVcsTest): |
|
109 | class GitSshVcsTest(GitVcsTest, SshVcsTest): | |
112 | @staticmethod |
|
110 | @staticmethod | |
113 | def _ssh_param(repo_name, user, ssh_key, client_ip): |
|
111 | def _ssh_param(repo_name, user, ssh_key, client_ip): | |
114 | # Set a custom ssh command in the global environment |
|
112 | # Set a custom ssh command in the global environment | |
115 | os.environ['GIT_SSH_COMMAND'] = r"""bash -c 'SSH_ORIGINAL_COMMAND="$2" SSH_CONNECTION="%s 1024 127.0.0.1 22" kallithea-cli ssh-serve -c %s %s %s' --""" % ( |
|
113 | os.environ['GIT_SSH_COMMAND'] = r"""bash -c 'SSH_ORIGINAL_COMMAND="$2" SSH_CONNECTION="%s 1024 127.0.0.1 22" kallithea-cli ssh-serve -c %s %s %s' --""" % ( | |
116 | client_ip, |
|
114 | client_ip, | |
117 | CONFIG['__file__'], |
|
115 | CONFIG['__file__'], | |
118 | user.user_id, |
|
116 | user.user_id, | |
119 | ssh_key.user_ssh_key_id) |
|
117 | ssh_key.user_ssh_key_id) | |
120 | return "ssh://someuser@somehost/%s""" % repo_name |
|
118 | return "ssh://someuser@somehost/%s""" % repo_name | |
121 |
|
119 | |||
122 | parametrize_vcs_test = base.parametrize('vt', [ |
|
120 | parametrize_vcs_test = base.parametrize('vt', [ | |
123 | HgHttpVcsTest, |
|
121 | HgHttpVcsTest, | |
124 | GitHttpVcsTest, |
|
122 | GitHttpVcsTest, | |
125 | HgSshVcsTest, |
|
123 | HgSshVcsTest, | |
126 | GitSshVcsTest, |
|
124 | GitSshVcsTest, | |
127 | ]) |
|
125 | ]) | |
128 | parametrize_vcs_test_hg = base.parametrize('vt', [ |
|
126 | parametrize_vcs_test_hg = base.parametrize('vt', [ | |
129 | HgHttpVcsTest, |
|
127 | HgHttpVcsTest, | |
130 | HgSshVcsTest, |
|
128 | HgSshVcsTest, | |
131 | ]) |
|
129 | ]) | |
132 | parametrize_vcs_test_http = base.parametrize('vt', [ |
|
130 | parametrize_vcs_test_http = base.parametrize('vt', [ | |
133 | HgHttpVcsTest, |
|
131 | HgHttpVcsTest, | |
134 | GitHttpVcsTest, |
|
132 | GitHttpVcsTest, | |
135 | ]) |
|
133 | ]) | |
136 |
|
134 | |||
137 | class Command(object): |
|
135 | class Command(object): | |
138 |
|
136 | |||
139 | def __init__(self, cwd): |
|
137 | def __init__(self, cwd): | |
140 | self.cwd = cwd |
|
138 | self.cwd = cwd | |
141 |
|
139 | |||
142 | def execute(self, *args, **environ): |
|
140 | def execute(self, *args, **environ): | |
143 | """ |
|
141 | """ | |
144 | Runs command on the system with given ``args`` using simple space |
|
142 | Runs command on the system with given ``args`` using simple space | |
145 | join without safe quoting. |
|
143 | join without safe quoting. | |
146 | """ |
|
144 | """ | |
147 | command = ' '.join(args) |
|
145 | command = ' '.join(args) | |
148 | ignoreReturnCode = environ.pop('ignoreReturnCode', False) |
|
146 | ignoreReturnCode = environ.pop('ignoreReturnCode', False) | |
149 | if DEBUG: |
|
147 | if DEBUG: | |
150 | print('*** CMD %s ***' % command) |
|
148 | print('*** CMD %s ***' % command) | |
151 | testenv = dict(os.environ) |
|
149 | testenv = dict(os.environ) | |
152 | testenv['LANG'] = 'en_US.UTF-8' |
|
150 | testenv['LANG'] = 'en_US.UTF-8' | |
153 | testenv['LANGUAGE'] = 'en_US:en' |
|
151 | testenv['LANGUAGE'] = 'en_US:en' | |
154 | testenv['HGPLAIN'] = '' |
|
152 | testenv['HGPLAIN'] = '' | |
155 | testenv['HGRCPATH'] = '' |
|
153 | testenv['HGRCPATH'] = '' | |
156 | testenv.update(environ) |
|
154 | testenv.update(environ) | |
157 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd, env=testenv) |
|
155 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd, env=testenv) | |
158 | stdout, stderr = p.communicate() |
|
156 | stdout, stderr = p.communicate() | |
159 | if DEBUG: |
|
157 | if DEBUG: | |
160 | if stdout: |
|
158 | if stdout: | |
161 | print('stdout:', stdout) |
|
159 | print('stdout:', stdout) | |
162 | if stderr: |
|
160 | if stderr: | |
163 | print('stderr:', stderr) |
|
161 | print('stderr:', stderr) | |
164 | if not ignoreReturnCode: |
|
162 | if not ignoreReturnCode: | |
165 | assert p.returncode == 0 |
|
163 | assert p.returncode == 0 | |
166 | return safe_str(stdout), safe_str(stderr) |
|
164 | return safe_str(stdout), safe_str(stderr) | |
167 |
|
165 | |||
168 |
|
166 | |||
169 | def _get_tmp_dir(prefix='vcs_operations-', suffix=''): |
|
167 | def _get_tmp_dir(prefix='vcs_operations-', suffix=''): | |
170 | return tempfile.mkdtemp(dir=base.TESTS_TMP_PATH, prefix=prefix, suffix=suffix) |
|
168 | return tempfile.mkdtemp(dir=base.TESTS_TMP_PATH, prefix=prefix, suffix=suffix) | |
171 |
|
169 | |||
172 |
|
170 | |||
173 | def _add_files(vcs, dest_dir, files_no=3): |
|
171 | def _add_files(vcs, dest_dir, files_no=3): | |
174 | """ |
|
172 | """ | |
175 | Generate some files, add it to dest_dir repo and push back |
|
173 | Generate some files, add it to dest_dir repo and push back | |
176 | vcs is git or hg and defines what VCS we want to make those files for |
|
174 | vcs is git or hg and defines what VCS we want to make those files for | |
177 |
|
175 | |||
178 | :param vcs: |
|
176 | :param vcs: | |
179 | :param dest_dir: |
|
177 | :param dest_dir: | |
180 | """ |
|
178 | """ | |
181 | added_file = '%ssetup.py' % next(_RandomNameSequence()) |
|
179 | added_file = '%ssetup.py' % next(_RandomNameSequence()) | |
182 | open(os.path.join(dest_dir, added_file), 'a').close() |
|
180 | open(os.path.join(dest_dir, added_file), 'a').close() | |
183 | Command(dest_dir).execute(vcs, 'add', added_file) |
|
181 | Command(dest_dir).execute(vcs, 'add', added_file) | |
184 |
|
182 | |||
185 | email = 'me@example.com' |
|
183 | email = 'me@example.com' | |
186 | if os.name == 'nt': |
|
184 | if os.name == 'nt': | |
187 | author_str = 'User <%s>' % email |
|
185 | author_str = 'User <%s>' % email | |
188 | else: |
|
186 | else: | |
189 | author_str = 'User ΗΙ―Ια΄ <%s>' % email |
|
187 | author_str = 'User ΗΙ―Ια΄ <%s>' % email | |
190 | for i in range(files_no): |
|
188 | for i in range(files_no): | |
191 | cmd = """echo "added_line%s" >> %s""" % (i, added_file) |
|
189 | cmd = """echo "added_line%s" >> %s""" % (i, added_file) | |
192 | Command(dest_dir).execute(cmd) |
|
190 | Command(dest_dir).execute(cmd) | |
193 | if vcs == 'hg': |
|
191 | if vcs == 'hg': | |
194 | cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % ( |
|
192 | cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % ( | |
195 | i, author_str, added_file |
|
193 | i, author_str, added_file | |
196 | ) |
|
194 | ) | |
197 | elif vcs == 'git': |
|
195 | elif vcs == 'git': | |
198 | cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % ( |
|
196 | cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % ( | |
199 | i, author_str, added_file |
|
197 | i, author_str, added_file | |
200 | ) |
|
198 | ) | |
201 | # git commit needs EMAIL on some machines |
|
199 | # git commit needs EMAIL on some machines | |
202 | Command(dest_dir).execute(cmd, EMAIL=email) |
|
200 | Command(dest_dir).execute(cmd, EMAIL=email) | |
203 |
|
201 | |||
204 | def _add_files_and_push(webserver, vt, dest_dir, clone_url, ignoreReturnCode=False, files_no=3): |
|
202 | def _add_files_and_push(webserver, vt, dest_dir, clone_url, ignoreReturnCode=False, files_no=3): | |
205 | _add_files(vt.repo_type, dest_dir, files_no=files_no) |
|
203 | _add_files(vt.repo_type, dest_dir, files_no=files_no) | |
206 | # PUSH it back |
|
204 | # PUSH it back | |
207 | stdout = stderr = None |
|
205 | stdout = stderr = None | |
208 | if vt.repo_type == 'hg': |
|
206 | if vt.repo_type == 'hg': | |
209 | stdout, stderr = Command(dest_dir).execute('hg push -f --verbose', clone_url, ignoreReturnCode=ignoreReturnCode) |
|
207 | stdout, stderr = Command(dest_dir).execute('hg push -f --verbose', clone_url, ignoreReturnCode=ignoreReturnCode) | |
210 | elif vt.repo_type == 'git': |
|
208 | elif vt.repo_type == 'git': | |
211 | stdout, stderr = Command(dest_dir).execute('git push -f --verbose', clone_url, "master", ignoreReturnCode=ignoreReturnCode) |
|
209 | stdout, stderr = Command(dest_dir).execute('git push -f --verbose', clone_url, "master", ignoreReturnCode=ignoreReturnCode) | |
212 |
|
210 | |||
213 | return stdout, stderr |
|
211 | return stdout, stderr | |
214 |
|
212 | |||
215 |
|
213 | |||
216 | def _check_outgoing(vcs, cwd, clone_url): |
|
214 | def _check_outgoing(vcs, cwd, clone_url): | |
217 | if vcs == 'hg': |
|
215 | if vcs == 'hg': | |
218 | # hg removes the password from default URLs, so we have to provide it here via the clone_url |
|
216 | # hg removes the password from default URLs, so we have to provide it here via the clone_url | |
219 | return Command(cwd).execute('hg -q outgoing', clone_url, ignoreReturnCode=True) |
|
217 | return Command(cwd).execute('hg -q outgoing', clone_url, ignoreReturnCode=True) | |
220 | elif vcs == 'git': |
|
218 | elif vcs == 'git': | |
221 | Command(cwd).execute('git remote update') |
|
219 | Command(cwd).execute('git remote update') | |
222 | return Command(cwd).execute('git log origin/master..master') |
|
220 | return Command(cwd).execute('git log origin/master..master') | |
223 |
|
221 | |||
224 |
|
222 | |||
225 | def set_anonymous_access(enable=True): |
|
223 | def set_anonymous_access(enable=True): | |
226 | user = User.get_default_user() |
|
224 | user = User.get_default_user() | |
227 | user.active = enable |
|
225 | user.active = enable | |
228 | Session().commit() |
|
226 | Session().commit() | |
229 | if enable != User.get_default_user().active: |
|
227 | if enable != User.get_default_user().active: | |
230 | raise Exception('Cannot set anonymous access') |
|
228 | raise Exception('Cannot set anonymous access') | |
231 |
|
229 | |||
232 |
|
230 | |||
233 | #============================================================================== |
|
231 | #============================================================================== | |
234 | # TESTS |
|
232 | # TESTS | |
235 | #============================================================================== |
|
233 | #============================================================================== | |
236 |
|
234 | |||
237 |
|
235 | |||
238 | def _check_proper_git_push(stdout, stderr): |
|
236 | def _check_proper_git_push(stdout, stderr): | |
239 | assert 'fatal' not in stderr |
|
237 | assert 'fatal' not in stderr | |
240 | assert 'rejected' not in stderr |
|
238 | assert 'rejected' not in stderr | |
241 | assert 'Pushing to' in stderr |
|
239 | assert 'Pushing to' in stderr | |
242 | assert 'master -> master' in stderr |
|
240 | assert 'master -> master' in stderr | |
243 |
|
241 | |||
244 |
|
242 | |||
245 | @pytest.mark.usefixtures("test_context_fixture") |
|
243 | @pytest.mark.usefixtures("test_context_fixture") | |
246 | class TestVCSOperations(base.TestController): |
|
244 | class TestVCSOperations(base.TestController): | |
247 |
|
245 | |||
248 | @classmethod |
|
246 | @classmethod | |
249 | def setup_class(cls): |
|
247 | def setup_class(cls): | |
250 | # DISABLE ANONYMOUS ACCESS |
|
248 | # DISABLE ANONYMOUS ACCESS | |
251 | set_anonymous_access(False) |
|
249 | set_anonymous_access(False) | |
252 |
|
250 | |||
253 | @pytest.fixture() |
|
251 | @pytest.fixture() | |
254 | def testhook_cleanup(self): |
|
252 | def testhook_cleanup(self): | |
255 | yield |
|
253 | yield | |
256 | # remove hook |
|
254 | # remove hook | |
257 | for hook in ['prechangegroup', 'pretxnchangegroup', 'preoutgoing', 'changegroup', 'outgoing', 'incoming']: |
|
255 | for hook in ['prechangegroup', 'pretxnchangegroup', 'preoutgoing', 'changegroup', 'outgoing', 'incoming']: | |
258 | entry = Ui.get_by_key('hooks', '%s.testhook' % hook) |
|
256 | entry = Ui.get_by_key('hooks', '%s.testhook' % hook) | |
259 | if entry: |
|
257 | if entry: | |
260 | Session().delete(entry) |
|
258 | Session().delete(entry) | |
261 | Session().commit() |
|
259 | Session().commit() | |
262 |
|
260 | |||
263 | @pytest.fixture(scope="module") |
|
261 | @pytest.fixture(scope="module") | |
264 | def testfork(self): |
|
262 | def testfork(self): | |
265 | # create fork so the repo stays untouched |
|
263 | # create fork so the repo stays untouched | |
266 | git_fork_name = '%s_fork%s' % (base.GIT_REPO, next(_RandomNameSequence())) |
|
264 | git_fork_name = '%s_fork%s' % (base.GIT_REPO, next(_RandomNameSequence())) | |
267 | fixture.create_fork(base.GIT_REPO, git_fork_name) |
|
265 | fixture.create_fork(base.GIT_REPO, git_fork_name) | |
268 | hg_fork_name = '%s_fork%s' % (base.HG_REPO, next(_RandomNameSequence())) |
|
266 | hg_fork_name = '%s_fork%s' % (base.HG_REPO, next(_RandomNameSequence())) | |
269 | fixture.create_fork(base.HG_REPO, hg_fork_name) |
|
267 | fixture.create_fork(base.HG_REPO, hg_fork_name) | |
270 | return {'git': git_fork_name, 'hg': hg_fork_name} |
|
268 | return {'git': git_fork_name, 'hg': hg_fork_name} | |
271 |
|
269 | |||
272 | @parametrize_vcs_test |
|
270 | @parametrize_vcs_test | |
273 | def test_clone_repo_by_admin(self, webserver, vt): |
|
271 | def test_clone_repo_by_admin(self, webserver, vt): | |
274 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
272 | clone_url = vt.repo_url_param(webserver, vt.repo_name) | |
275 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir()) |
|
273 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir()) | |
276 |
|
274 | |||
277 | if vt.repo_type == 'git': |
|
275 | if vt.repo_type == 'git': | |
278 | assert 'Cloning into' in stdout + stderr |
|
276 | assert 'Cloning into' in stdout + stderr | |
279 | assert stderr == '' or stdout == '' |
|
277 | assert stderr == '' or stdout == '' | |
280 | elif vt.repo_type == 'hg': |
|
278 | elif vt.repo_type == 'hg': | |
281 | assert 'requesting all changes' in stdout |
|
279 | assert 'requesting all changes' in stdout | |
282 | assert 'adding changesets' in stdout |
|
280 | assert 'adding changesets' in stdout | |
283 | assert 'adding manifests' in stdout |
|
281 | assert 'adding manifests' in stdout | |
284 | assert 'adding file changes' in stdout |
|
282 | assert 'adding file changes' in stdout | |
285 | assert stderr == '' |
|
283 | assert stderr == '' | |
286 |
|
284 | |||
287 | @parametrize_vcs_test_http |
|
285 | @parametrize_vcs_test_http | |
288 | def test_clone_wrong_credentials(self, webserver, vt): |
|
286 | def test_clone_wrong_credentials(self, webserver, vt): | |
289 | clone_url = vt.repo_url_param(webserver, vt.repo_name, password='bad!') |
|
287 | clone_url = vt.repo_url_param(webserver, vt.repo_name, password='bad!') | |
290 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
288 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) | |
291 | if vt.repo_type == 'git': |
|
289 | if vt.repo_type == 'git': | |
292 | assert 'fatal: Authentication failed' in stderr |
|
290 | assert 'fatal: Authentication failed' in stderr | |
293 | elif vt.repo_type == 'hg': |
|
291 | elif vt.repo_type == 'hg': | |
294 | assert 'abort: authorization failed' in stderr |
|
292 | assert 'abort: authorization failed' in stderr | |
295 |
|
293 | |||
296 | def test_clone_git_dir_as_hg(self, webserver): |
|
294 | def test_clone_git_dir_as_hg(self, webserver): | |
297 | clone_url = HgHttpVcsTest.repo_url_param(webserver, base.GIT_REPO) |
|
295 | clone_url = HgHttpVcsTest.repo_url_param(webserver, base.GIT_REPO) | |
298 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
296 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) | |
299 | assert 'HTTP Error 404: Not Found' in stderr or "not a valid repository" in stdout and 'abort:' in stderr |
|
297 | assert 'HTTP Error 404: Not Found' in stderr or "not a valid repository" in stdout and 'abort:' in stderr | |
300 |
|
298 | |||
301 | def test_clone_hg_repo_as_git(self, webserver): |
|
299 | def test_clone_hg_repo_as_git(self, webserver): | |
302 | clone_url = GitHttpVcsTest.repo_url_param(webserver, base.HG_REPO) |
|
300 | clone_url = GitHttpVcsTest.repo_url_param(webserver, base.HG_REPO) | |
303 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
301 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) | |
304 | assert 'not found' in stderr |
|
302 | assert 'not found' in stderr | |
305 |
|
303 | |||
306 | @parametrize_vcs_test |
|
304 | @parametrize_vcs_test | |
307 | def test_clone_non_existing_path(self, webserver, vt): |
|
305 | def test_clone_non_existing_path(self, webserver, vt): | |
308 | clone_url = vt.repo_url_param(webserver, 'trololo') |
|
306 | clone_url = vt.repo_url_param(webserver, 'trololo') | |
309 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
307 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) | |
310 | if vt.repo_type == 'git': |
|
308 | if vt.repo_type == 'git': | |
311 | assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr |
|
309 | assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr | |
312 | elif vt.repo_type == 'hg': |
|
310 | elif vt.repo_type == 'hg': | |
313 | assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout |
|
311 | assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout | |
314 |
|
312 | |||
315 | @parametrize_vcs_test |
|
313 | @parametrize_vcs_test | |
316 | def test_push_new_repo(self, webserver, vt): |
|
314 | def test_push_new_repo(self, webserver, vt): | |
317 | # Clear the log so we know what is added |
|
315 | # Clear the log so we know what is added | |
318 | UserLog.query().delete() |
|
316 | UserLog.query().delete() | |
319 | Session().commit() |
|
317 | Session().commit() | |
320 |
|
318 | |||
321 | # Create an empty server repo using the API |
|
319 | # Create an empty server repo using the API | |
322 | repo_name = 'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence())) |
|
320 | repo_name = 'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence())) | |
323 | usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN) |
|
321 | usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN) | |
324 | params = { |
|
322 | params = { | |
325 | "id": 7, |
|
323 | "id": 7, | |
326 | "api_key": usr.api_key, |
|
324 | "api_key": usr.api_key, | |
327 | "method": 'create_repo', |
|
325 | "method": 'create_repo', | |
328 | "args": dict(repo_name=repo_name, |
|
326 | "args": dict(repo_name=repo_name, | |
329 | owner=base.TEST_USER_ADMIN_LOGIN, |
|
327 | owner=base.TEST_USER_ADMIN_LOGIN, | |
330 | repo_type=vt.repo_type), |
|
328 | repo_type=vt.repo_type), | |
331 | } |
|
329 | } | |
332 | req = urllib.request.Request( |
|
330 | req = urllib.request.Request( | |
333 | 'http://%s:%s/_admin/api' % webserver.server_address, |
|
331 | 'http://%s:%s/_admin/api' % webserver.server_address, | |
334 | data=ascii_bytes(json.dumps(params)), |
|
332 | data=ascii_bytes(json.dumps(params)), | |
335 | headers={'content-type': 'application/json'}) |
|
333 | headers={'content-type': 'application/json'}) | |
336 | response = urllib.request.urlopen(req) |
|
334 | response = urllib.request.urlopen(req) | |
337 | result = json.loads(response.read()) |
|
335 | result = json.loads(response.read()) | |
338 | # Expect something like: |
|
336 | # Expect something like: | |
339 | # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None} |
|
337 | # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None} | |
340 | assert result['result']['success'] |
|
338 | assert result['result']['success'] | |
341 |
|
339 | |||
342 | # Create local clone of the empty server repo |
|
340 | # Create local clone of the empty server repo | |
343 | local_clone_dir = _get_tmp_dir() |
|
341 | local_clone_dir = _get_tmp_dir() | |
344 | clone_url = vt.repo_url_param(webserver, repo_name) |
|
342 | clone_url = vt.repo_url_param(webserver, repo_name) | |
345 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir) |
|
343 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir) | |
346 |
|
344 | |||
347 | # Make 3 commits and push to the empty server repo. |
|
345 | # Make 3 commits and push to the empty server repo. | |
348 | # The server repo doesn't have any other heads than the |
|
346 | # The server repo doesn't have any other heads than the | |
349 | # refs/heads/master we are pushing, but the `git log` in the push hook |
|
347 | # refs/heads/master we are pushing, but the `git log` in the push hook | |
350 | # should still list the 3 commits. |
|
348 | # should still list the 3 commits. | |
351 | stdout, stderr = _add_files_and_push(webserver, vt, local_clone_dir, clone_url=clone_url) |
|
349 | stdout, stderr = _add_files_and_push(webserver, vt, local_clone_dir, clone_url=clone_url) | |
352 | if vt.repo_type == 'git': |
|
350 | if vt.repo_type == 'git': | |
353 | _check_proper_git_push(stdout, stderr) |
|
351 | _check_proper_git_push(stdout, stderr) | |
354 | elif vt.repo_type == 'hg': |
|
352 | elif vt.repo_type == 'hg': | |
355 | assert 'pushing to ' in stdout |
|
353 | assert 'pushing to ' in stdout | |
356 | assert 'remote: added ' in stdout |
|
354 | assert 'remote: added ' in stdout | |
357 |
|
355 | |||
358 | # Verify that we got the right events in UserLog. Expect something like: |
|
356 | # Verify that we got the right events in UserLog. Expect something like: | |
359 | # <UserLog('id:new_git_XXX:started_following_repo')> |
|
357 | # <UserLog('id:new_git_XXX:started_following_repo')> | |
360 | # <UserLog('id:new_git_XXX:user_created_repo')> |
|
358 | # <UserLog('id:new_git_XXX:user_created_repo')> | |
361 | # <UserLog('id:new_git_XXX:pull')> |
|
359 | # <UserLog('id:new_git_XXX:pull')> | |
362 | # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')> |
|
360 | # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')> | |
363 | action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)] |
|
361 | action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)] | |
364 | assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == ([ |
|
362 | assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == ([ | |
365 | ('started_following_repo', 0), |
|
363 | ('started_following_repo', 0), | |
366 | ('user_created_repo', 0), |
|
364 | ('user_created_repo', 0), | |
367 | ('pull', 0), |
|
365 | ('pull', 0), | |
368 | ('push', 3)] |
|
366 | ('push', 3)] | |
369 | if vt.repo_type == 'git' else [ |
|
367 | if vt.repo_type == 'git' else [ | |
370 | ('started_following_repo', 0), |
|
368 | ('started_following_repo', 0), | |
371 | ('user_created_repo', 0), |
|
369 | ('user_created_repo', 0), | |
372 | # (u'pull', 0), # Mercurial outgoing hook is not called for empty clones |
|
370 | # (u'pull', 0), # Mercurial outgoing hook is not called for empty clones | |
373 | ('push', 3)]) |
|
371 | ('push', 3)]) | |
374 |
|
372 | |||
375 | @parametrize_vcs_test |
|
373 | @parametrize_vcs_test | |
376 | def test_push_new_file(self, webserver, testfork, vt): |
|
374 | def test_push_new_file(self, webserver, testfork, vt): | |
377 | UserLog.query().delete() |
|
375 | UserLog.query().delete() | |
378 | Session().commit() |
|
376 | Session().commit() | |
379 |
|
377 | |||
380 | dest_dir = _get_tmp_dir() |
|
378 | dest_dir = _get_tmp_dir() | |
381 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
379 | clone_url = vt.repo_url_param(webserver, vt.repo_name) | |
382 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
380 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) | |
383 |
|
381 | |||
384 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type]) |
|
382 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type]) | |
385 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url=clone_url) |
|
383 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url=clone_url) | |
386 |
|
384 | |||
387 | if vt.repo_type == 'git': |
|
385 | if vt.repo_type == 'git': | |
388 | _check_proper_git_push(stdout, stderr) |
|
386 | _check_proper_git_push(stdout, stderr) | |
389 | elif vt.repo_type == 'hg': |
|
387 | elif vt.repo_type == 'hg': | |
390 | assert 'pushing to' in stdout |
|
388 | assert 'pushing to' in stdout | |
391 | assert 'Repository size' in stdout |
|
389 | assert 'Repository size' in stdout | |
392 | assert 'Last revision is now' in stdout |
|
390 | assert 'Last revision is now' in stdout | |
393 |
|
391 | |||
394 | action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)] |
|
392 | action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)] | |
395 | assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \ |
|
393 | assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \ | |
396 | [('pull', 0), ('push', 3)] |
|
394 | [('pull', 0), ('push', 3)] | |
397 |
|
395 | |||
398 | @parametrize_vcs_test |
|
396 | @parametrize_vcs_test | |
399 | def test_pull(self, webserver, testfork, vt): |
|
397 | def test_pull(self, webserver, testfork, vt): | |
400 | UserLog.query().delete() |
|
398 | UserLog.query().delete() | |
401 | Session().commit() |
|
399 | Session().commit() | |
402 |
|
400 | |||
403 | dest_dir = _get_tmp_dir() |
|
401 | dest_dir = _get_tmp_dir() | |
404 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir) |
|
402 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir) | |
405 |
|
403 | |||
406 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
404 | clone_url = vt.repo_url_param(webserver, vt.repo_name) | |
407 | stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url) |
|
405 | stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url) | |
408 |
|
406 | |||
409 | if vt.repo_type == 'git': |
|
407 | if vt.repo_type == 'git': | |
410 | assert 'FETCH_HEAD' in stderr |
|
408 | assert 'FETCH_HEAD' in stderr | |
411 | elif vt.repo_type == 'hg': |
|
409 | elif vt.repo_type == 'hg': | |
412 | assert 'new changesets' in stdout |
|
410 | assert 'new changesets' in stdout | |
413 |
|
411 | |||
414 | action_parts = [ul.action for ul in UserLog.query().order_by(UserLog.user_log_id)] |
|
412 | action_parts = [ul.action for ul in UserLog.query().order_by(UserLog.user_log_id)] | |
415 | assert action_parts == ['pull'] |
|
413 | assert action_parts == ['pull'] | |
416 |
|
414 | |||
417 | # Test handling of URLs with extra '/' around repo_name |
|
415 | # Test handling of URLs with extra '/' around repo_name | |
418 | stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/./%s/' % vt.repo_name), ignoreReturnCode=True) |
|
416 | stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/./%s/' % vt.repo_name), ignoreReturnCode=True) | |
419 | if issubclass(vt, HttpVcsTest): |
|
417 | if issubclass(vt, HttpVcsTest): | |
420 | if vt.repo_type == 'git': |
|
418 | if vt.repo_type == 'git': | |
421 | # NOTE: when pulling from http://hostname/./vcs_test_git/ , the git client will normalize that and issue an HTTP request to /vcs_test_git/info/refs |
|
419 | # NOTE: when pulling from http://hostname/./vcs_test_git/ , the git client will normalize that and issue an HTTP request to /vcs_test_git/info/refs | |
422 | assert 'Already up to date.' in stdout |
|
420 | assert 'Already up to date.' in stdout | |
423 | else: |
|
421 | else: | |
424 | assert vt.repo_type == 'hg' |
|
422 | assert vt.repo_type == 'hg' | |
425 | assert "abort: HTTP Error 404: Not Found" in stderr |
|
423 | assert "abort: HTTP Error 404: Not Found" in stderr | |
426 | else: |
|
424 | else: | |
427 | assert issubclass(vt, SshVcsTest) |
|
425 | assert issubclass(vt, SshVcsTest) | |
428 | if vt.repo_type == 'git': |
|
426 | if vt.repo_type == 'git': | |
429 | assert "abort: Access to './%s' denied" % vt.repo_name in stderr |
|
427 | assert "abort: Access to './%s' denied" % vt.repo_name in stderr | |
430 | else: |
|
428 | else: | |
431 | assert "abort: Access to './%s' denied" % vt.repo_name in stdout |
|
429 | assert "abort: Access to './%s' denied" % vt.repo_name in stdout | |
432 |
|
430 | |||
433 | stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/%s/' % vt.repo_name), ignoreReturnCode=True) |
|
431 | stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/%s/' % vt.repo_name), ignoreReturnCode=True) | |
434 | if vt.repo_type == 'git': |
|
432 | if vt.repo_type == 'git': | |
435 | assert 'Already up to date.' in stdout |
|
433 | assert 'Already up to date.' in stdout | |
436 | else: |
|
434 | else: | |
437 | assert vt.repo_type == 'hg' |
|
435 | assert vt.repo_type == 'hg' | |
438 | assert "no changes found" in stdout |
|
436 | assert "no changes found" in stdout | |
439 | assert "denied" not in stderr |
|
437 | assert "denied" not in stderr | |
440 | assert "denied" not in stdout |
|
438 | assert "denied" not in stdout | |
441 | assert "404" not in stdout |
|
439 | assert "404" not in stdout | |
442 |
|
440 | |||
443 | @parametrize_vcs_test |
|
441 | @parametrize_vcs_test | |
444 | def test_push_invalidates_cache(self, webserver, testfork, vt): |
|
442 | def test_push_invalidates_cache(self, webserver, testfork, vt): | |
445 | pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])] |
|
443 | pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])] | |
446 |
|
444 | |||
447 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key |
|
445 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key | |
448 | == testfork[vt.repo_type]).scalar() |
|
446 | == testfork[vt.repo_type]).scalar() | |
449 | if not key: |
|
447 | if not key: | |
450 | key = CacheInvalidation(testfork[vt.repo_type], testfork[vt.repo_type]) |
|
448 | key = CacheInvalidation(testfork[vt.repo_type], testfork[vt.repo_type]) | |
451 | Session().add(key) |
|
449 | Session().add(key) | |
452 |
|
450 | |||
453 | key.cache_active = True |
|
451 | key.cache_active = True | |
454 | Session().commit() |
|
452 | Session().commit() | |
455 |
|
453 | |||
456 | dest_dir = _get_tmp_dir() |
|
454 | dest_dir = _get_tmp_dir() | |
457 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type]) |
|
455 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type]) | |
458 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
456 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) | |
459 |
|
457 | |||
460 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, files_no=1, clone_url=clone_url) |
|
458 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, files_no=1, clone_url=clone_url) | |
461 |
|
459 | |||
462 | if vt.repo_type == 'git': |
|
460 | if vt.repo_type == 'git': | |
463 | _check_proper_git_push(stdout, stderr) |
|
461 | _check_proper_git_push(stdout, stderr) | |
464 |
|
462 | |||
465 | post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])] |
|
463 | post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])] | |
466 | assert pre_cached_tip != post_cached_tip |
|
464 | assert pre_cached_tip != post_cached_tip | |
467 |
|
465 | |||
468 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key |
|
466 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key | |
469 | == testfork[vt.repo_type]).all() |
|
467 | == testfork[vt.repo_type]).all() | |
470 | assert key == [] |
|
468 | assert key == [] | |
471 |
|
469 | |||
472 | @parametrize_vcs_test_http |
|
470 | @parametrize_vcs_test_http | |
473 | def test_push_wrong_credentials(self, webserver, vt): |
|
471 | def test_push_wrong_credentials(self, webserver, vt): | |
474 | dest_dir = _get_tmp_dir() |
|
472 | dest_dir = _get_tmp_dir() | |
475 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
473 | clone_url = vt.repo_url_param(webserver, vt.repo_name) | |
476 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
474 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) | |
477 |
|
475 | |||
478 | clone_url = webserver.repo_url(vt.repo_name, username='bad', password='name') |
|
476 | clone_url = webserver.repo_url(vt.repo_name, username='bad', password='name') | |
479 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, |
|
477 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, | |
480 | clone_url=clone_url, ignoreReturnCode=True) |
|
478 | clone_url=clone_url, ignoreReturnCode=True) | |
481 |
|
479 | |||
482 | if vt.repo_type == 'git': |
|
480 | if vt.repo_type == 'git': | |
483 | assert 'fatal: Authentication failed' in stderr |
|
481 | assert 'fatal: Authentication failed' in stderr | |
484 | elif vt.repo_type == 'hg': |
|
482 | elif vt.repo_type == 'hg': | |
485 | assert 'abort: authorization failed' in stderr |
|
483 | assert 'abort: authorization failed' in stderr | |
486 |
|
484 | |||
487 | @parametrize_vcs_test |
|
485 | @parametrize_vcs_test | |
488 | def test_push_with_readonly_credentials(self, webserver, vt): |
|
486 | def test_push_with_readonly_credentials(self, webserver, vt): | |
489 | UserLog.query().delete() |
|
487 | UserLog.query().delete() | |
490 | Session().commit() |
|
488 | Session().commit() | |
491 |
|
489 | |||
492 | dest_dir = _get_tmp_dir() |
|
490 | dest_dir = _get_tmp_dir() | |
493 | clone_url = vt.repo_url_param(webserver, vt.repo_name, username=base.TEST_USER_REGULAR_LOGIN, password=base.TEST_USER_REGULAR_PASS) |
|
491 | clone_url = vt.repo_url_param(webserver, vt.repo_name, username=base.TEST_USER_REGULAR_LOGIN, password=base.TEST_USER_REGULAR_PASS) | |
494 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
492 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) | |
495 |
|
493 | |||
496 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, ignoreReturnCode=True, clone_url=clone_url) |
|
494 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, ignoreReturnCode=True, clone_url=clone_url) | |
497 |
|
495 | |||
498 | if vt.repo_type == 'git': |
|
496 | if vt.repo_type == 'git': | |
499 | assert 'The requested URL returned error: 403' in stderr or 'abort: Push access to %r denied' % str(vt.repo_name) in stderr |
|
497 | assert 'The requested URL returned error: 403' in stderr or 'abort: Push access to %r denied' % str(vt.repo_name) in stderr | |
500 | elif vt.repo_type == 'hg': |
|
498 | elif vt.repo_type == 'hg': | |
501 | assert 'abort: HTTP Error 403: Forbidden' in stderr or 'abort: push failed on remote' in stderr and 'remote: Push access to %r denied' % str(vt.repo_name) in stdout |
|
499 | assert 'abort: HTTP Error 403: Forbidden' in stderr or 'abort: push failed on remote' in stderr and 'remote: Push access to %r denied' % str(vt.repo_name) in stdout | |
502 |
|
500 | |||
503 | action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)] |
|
501 | action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)] | |
504 | assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \ |
|
502 | assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \ | |
505 | [('pull', 0)] |
|
503 | [('pull', 0)] | |
506 |
|
504 | |||
507 | @parametrize_vcs_test |
|
505 | @parametrize_vcs_test | |
508 | def test_push_back_to_wrong_url(self, webserver, vt): |
|
506 | def test_push_back_to_wrong_url(self, webserver, vt): | |
509 | dest_dir = _get_tmp_dir() |
|
507 | dest_dir = _get_tmp_dir() | |
510 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
508 | clone_url = vt.repo_url_param(webserver, vt.repo_name) | |
511 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
509 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) | |
512 |
|
510 | |||
513 | stdout, stderr = _add_files_and_push( |
|
511 | stdout, stderr = _add_files_and_push( | |
514 | webserver, vt, dest_dir, clone_url='http://%s:%s/tmp' % ( |
|
512 | webserver, vt, dest_dir, clone_url='http://%s:%s/tmp' % ( | |
515 | webserver.server_address[0], webserver.server_address[1]), |
|
513 | webserver.server_address[0], webserver.server_address[1]), | |
516 | ignoreReturnCode=True) |
|
514 | ignoreReturnCode=True) | |
517 |
|
515 | |||
518 | if vt.repo_type == 'git': |
|
516 | if vt.repo_type == 'git': | |
519 | assert 'not found' in stderr |
|
517 | assert 'not found' in stderr | |
520 | elif vt.repo_type == 'hg': |
|
518 | elif vt.repo_type == 'hg': | |
521 | assert 'HTTP Error 404: Not Found' in stderr |
|
519 | assert 'HTTP Error 404: Not Found' in stderr | |
522 |
|
520 | |||
523 | @parametrize_vcs_test |
|
521 | @parametrize_vcs_test | |
524 | def test_ip_restriction(self, webserver, vt): |
|
522 | def test_ip_restriction(self, webserver, vt): | |
525 | user_model = UserModel() |
|
523 | user_model = UserModel() | |
526 | try: |
|
524 | try: | |
527 | # Add IP constraint that excludes the test context: |
|
525 | # Add IP constraint that excludes the test context: | |
528 | user_model.add_extra_ip(base.TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') |
|
526 | user_model.add_extra_ip(base.TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') | |
529 | Session().commit() |
|
527 | Session().commit() | |
530 | # IP permissions are cached, need to wait for the cache in the server process to expire |
|
528 | # IP permissions are cached, need to wait for the cache in the server process to expire | |
531 | time.sleep(1.5) |
|
529 | time.sleep(1.5) | |
532 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
530 | clone_url = vt.repo_url_param(webserver, vt.repo_name) | |
533 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
531 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) | |
534 | if vt.repo_type == 'git': |
|
532 | if vt.repo_type == 'git': | |
535 | # The message apparently changed in Git 1.8.3, so match it loosely. |
|
533 | # The message apparently changed in Git 1.8.3, so match it loosely. | |
536 | assert re.search(r'\b403\b', stderr) or 'abort: User test_admin from 127.0.0.127 cannot be authorized' in stderr |
|
534 | assert re.search(r'\b403\b', stderr) or 'abort: User test_admin from 127.0.0.127 cannot be authorized' in stderr | |
537 | elif vt.repo_type == 'hg': |
|
535 | elif vt.repo_type == 'hg': | |
538 | assert 'abort: HTTP Error 403: Forbidden' in stderr or 'remote: abort: User test_admin from 127.0.0.127 cannot be authorized' in stdout |
|
536 | assert 'abort: HTTP Error 403: Forbidden' in stderr or 'remote: abort: User test_admin from 127.0.0.127 cannot be authorized' in stdout | |
539 | finally: |
|
537 | finally: | |
540 | # release IP restrictions |
|
538 | # release IP restrictions | |
541 | for ip in UserIpMap.query(): |
|
539 | for ip in UserIpMap.query(): | |
542 | UserIpMap.delete(ip.ip_id) |
|
540 | UserIpMap.delete(ip.ip_id) | |
543 | Session().commit() |
|
541 | Session().commit() | |
544 | # IP permissions are cached, need to wait for the cache in the server process to expire |
|
542 | # IP permissions are cached, need to wait for the cache in the server process to expire | |
545 | time.sleep(1.5) |
|
543 | time.sleep(1.5) | |
546 |
|
544 | |||
547 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
545 | clone_url = vt.repo_url_param(webserver, vt.repo_name) | |
548 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir()) |
|
546 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir()) | |
549 |
|
547 | |||
550 | if vt.repo_type == 'git': |
|
548 | if vt.repo_type == 'git': | |
551 | assert 'Cloning into' in stdout + stderr |
|
549 | assert 'Cloning into' in stdout + stderr | |
552 | assert stderr == '' or stdout == '' |
|
550 | assert stderr == '' or stdout == '' | |
553 | elif vt.repo_type == 'hg': |
|
551 | elif vt.repo_type == 'hg': | |
554 | assert 'requesting all changes' in stdout |
|
552 | assert 'requesting all changes' in stdout | |
555 | assert 'adding changesets' in stdout |
|
553 | assert 'adding changesets' in stdout | |
556 | assert 'adding manifests' in stdout |
|
554 | assert 'adding manifests' in stdout | |
557 | assert 'adding file changes' in stdout |
|
555 | assert 'adding file changes' in stdout | |
558 |
|
556 | |||
559 | assert stderr == '' |
|
557 | assert stderr == '' | |
560 |
|
558 | |||
561 | @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks |
|
559 | @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks | |
562 | def test_custom_hooks_preoutgoing(self, testhook_cleanup, webserver, testfork, vt): |
|
560 | def test_custom_hooks_preoutgoing(self, testhook_cleanup, webserver, testfork, vt): | |
563 | # set prechangegroup to failing hook (returns True) |
|
561 | # set prechangegroup to failing hook (returns True) | |
564 | Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook') |
|
562 | Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook') | |
565 | Session().commit() |
|
563 | Session().commit() | |
566 | # clone repo |
|
564 | # clone repo | |
567 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS) |
|
565 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS) | |
568 | dest_dir = _get_tmp_dir() |
|
566 | dest_dir = _get_tmp_dir() | |
569 | stdout, stderr = Command(base.TESTS_TMP_PATH) \ |
|
567 | stdout, stderr = Command(base.TESTS_TMP_PATH) \ | |
570 | .execute(vt.repo_type, 'clone', clone_url, dest_dir, ignoreReturnCode=True) |
|
568 | .execute(vt.repo_type, 'clone', clone_url, dest_dir, ignoreReturnCode=True) | |
571 | if vt.repo_type == 'hg': |
|
569 | if vt.repo_type == 'hg': | |
572 | assert 'preoutgoing.testhook hook failed' in stdout |
|
570 | assert 'preoutgoing.testhook hook failed' in stdout | |
573 | elif vt.repo_type == 'git': |
|
571 | elif vt.repo_type == 'git': | |
574 | assert 'error: 406' in stderr |
|
572 | assert 'error: 406' in stderr | |
575 |
|
573 | |||
576 | @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks |
|
574 | @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks | |
577 | def test_custom_hooks_prechangegroup(self, testhook_cleanup, webserver, testfork, vt): |
|
575 | def test_custom_hooks_prechangegroup(self, testhook_cleanup, webserver, testfork, vt): | |
578 | # set prechangegroup to failing hook (returns exit code 1) |
|
576 | # set prechangegroup to failing hook (returns exit code 1) | |
579 | Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.failing_test_hook') |
|
577 | Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.failing_test_hook') | |
580 | Session().commit() |
|
578 | Session().commit() | |
581 | # clone repo |
|
579 | # clone repo | |
582 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS) |
|
580 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS) | |
583 | dest_dir = _get_tmp_dir() |
|
581 | dest_dir = _get_tmp_dir() | |
584 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
582 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) | |
585 |
|
583 | |||
586 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url, |
|
584 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url, | |
587 | ignoreReturnCode=True) |
|
585 | ignoreReturnCode=True) | |
588 | assert 'failing_test_hook failed' in stdout + stderr |
|
586 | assert 'failing_test_hook failed' in stdout + stderr | |
589 | assert 'Traceback' not in stdout + stderr |
|
587 | assert 'Traceback' not in stdout + stderr | |
590 | assert 'prechangegroup.testhook hook failed' in stdout + stderr |
|
588 | assert 'prechangegroup.testhook hook failed' in stdout + stderr | |
591 | # there are still outgoing changesets |
|
589 | # there are still outgoing changesets | |
592 | stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url) |
|
590 | stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url) | |
593 | assert stdout != '' |
|
591 | assert stdout != '' | |
594 |
|
592 | |||
595 | # set prechangegroup hook to exception throwing method |
|
593 | # set prechangegroup hook to exception throwing method | |
596 | Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.exception_test_hook') |
|
594 | Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.exception_test_hook') | |
597 | Session().commit() |
|
595 | Session().commit() | |
598 | # re-try to push |
|
596 | # re-try to push | |
599 | stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True) |
|
597 | stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True) | |
600 | if vt is HgHttpVcsTest: |
|
598 | if vt is HgHttpVcsTest: | |
601 | # like with 'hg serve...' 'HTTP Error 500: INTERNAL SERVER ERROR' should be returned |
|
599 | # like with 'hg serve...' 'HTTP Error 500: INTERNAL SERVER ERROR' should be returned | |
602 | assert 'HTTP Error 500: INTERNAL SERVER ERROR' in stderr |
|
600 | assert 'HTTP Error 500: INTERNAL SERVER ERROR' in stderr | |
603 | elif vt is HgSshVcsTest: |
|
601 | elif vt is HgSshVcsTest: | |
604 | assert 'remote: Exception: exception_test_hook threw an exception' in stdout |
|
602 | assert 'remote: Exception: exception_test_hook threw an exception' in stdout | |
605 | else: assert False |
|
603 | else: assert False | |
606 | # there are still outgoing changesets |
|
604 | # there are still outgoing changesets | |
607 | stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url) |
|
605 | stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url) | |
608 | assert stdout != '' |
|
606 | assert stdout != '' | |
609 |
|
607 | |||
610 | # set prechangegroup hook to method that returns False |
|
608 | # set prechangegroup hook to method that returns False | |
611 | Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.passing_test_hook') |
|
609 | Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.passing_test_hook') | |
612 | Session().commit() |
|
610 | Session().commit() | |
613 | # re-try to push |
|
611 | # re-try to push | |
614 | stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True) |
|
612 | stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True) | |
615 | assert 'passing_test_hook succeeded' in stdout + stderr |
|
613 | assert 'passing_test_hook succeeded' in stdout + stderr | |
616 | assert 'Traceback' not in stdout + stderr |
|
614 | assert 'Traceback' not in stdout + stderr | |
617 | assert 'prechangegroup.testhook hook failed' not in stdout + stderr |
|
615 | assert 'prechangegroup.testhook hook failed' not in stdout + stderr | |
618 | # no more outgoing changesets |
|
616 | # no more outgoing changesets | |
619 | stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url) |
|
617 | stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url) | |
620 | assert stdout == '' |
|
618 | assert stdout == '' | |
621 | assert stderr == '' |
|
619 | assert stderr == '' | |
622 |
|
620 | |||
623 | def test_add_submodule_git(self, webserver, testfork): |
|
621 | def test_add_submodule_git(self, webserver, testfork): | |
624 | dest_dir = _get_tmp_dir() |
|
622 | dest_dir = _get_tmp_dir() | |
625 | clone_url = GitHttpVcsTest.repo_url_param(webserver, base.GIT_REPO) |
|
623 | clone_url = GitHttpVcsTest.repo_url_param(webserver, base.GIT_REPO) | |
626 |
|
624 | |||
627 | fork_url = GitHttpVcsTest.repo_url_param(webserver, testfork['git']) |
|
625 | fork_url = GitHttpVcsTest.repo_url_param(webserver, testfork['git']) | |
628 |
|
626 | |||
629 | # add submodule |
|
627 | # add submodule | |
630 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', fork_url, dest_dir) |
|
628 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', fork_url, dest_dir) | |
631 | stdout, stderr = Command(dest_dir).execute('git submodule add', clone_url, 'testsubmodule') |
|
629 | stdout, stderr = Command(dest_dir).execute('git submodule add', clone_url, 'testsubmodule') | |
632 | stdout, stderr = Command(dest_dir).execute('git commit -am "added testsubmodule pointing to', clone_url, '"', EMAIL=base.TEST_USER_ADMIN_EMAIL) |
|
630 | stdout, stderr = Command(dest_dir).execute('git commit -am "added testsubmodule pointing to', clone_url, '"', EMAIL=base.TEST_USER_ADMIN_EMAIL) | |
633 | stdout, stderr = Command(dest_dir).execute('git push', fork_url, 'master') |
|
631 | stdout, stderr = Command(dest_dir).execute('git push', fork_url, 'master') | |
634 |
|
632 | |||
635 | # check for testsubmodule link in files page |
|
633 | # check for testsubmodule link in files page | |
636 | self.log_user() |
|
634 | self.log_user() | |
637 | response = self.app.get(base.url(controller='files', action='index', |
|
635 | response = self.app.get(base.url(controller='files', action='index', | |
638 | repo_name=testfork['git'], |
|
636 | repo_name=testfork['git'], | |
639 | revision='tip', |
|
637 | revision='tip', | |
640 | f_path='/')) |
|
638 | f_path='/')) | |
641 | # check _repo_files_url that will be used to reload as AJAX |
|
639 | # check _repo_files_url that will be used to reload as AJAX | |
642 | response.mustcontain('var _repo_files_url = ("/%s/files/");' % testfork['git']) |
|
640 | response.mustcontain('var _repo_files_url = ("/%s/files/");' % testfork['git']) | |
643 |
|
641 | |||
644 | response.mustcontain('<a class="submodule-dir" href="%s" target="_blank"><i class="icon-file-submodule"></i><span>testsubmodule @ ' % clone_url) |
|
642 | response.mustcontain('<a class="submodule-dir" href="%s" target="_blank"><i class="icon-file-submodule"></i><span>testsubmodule @ ' % clone_url) | |
645 |
|
643 | |||
646 | # check that following a submodule link actually works - and redirects |
|
644 | # check that following a submodule link actually works - and redirects | |
647 | response = self.app.get(base.url(controller='files', action='index', |
|
645 | response = self.app.get(base.url(controller='files', action='index', | |
648 | repo_name=testfork['git'], |
|
646 | repo_name=testfork['git'], | |
649 | revision='tip', |
|
647 | revision='tip', | |
650 | f_path='/testsubmodule'), |
|
648 | f_path='/testsubmodule'), | |
651 | status=302) |
|
649 | status=302) | |
652 | assert response.location == clone_url |
|
650 | assert response.location == clone_url |
@@ -1,215 +1,213 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 | # This program is free software: you can redistribute it and/or modify |
|
2 | # This program is free software: you can redistribute it and/or modify | |
3 | # it under the terms of the GNU General Public License as published by |
|
3 | # it under the terms of the GNU General Public License as published by | |
4 | # the Free Software Foundation, either version 3 of the License, or |
|
4 | # the Free Software Foundation, either version 3 of the License, or | |
5 | # (at your option) any later version. |
|
5 | # (at your option) any later version. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU General Public License |
|
12 | # You should have received a copy of the GNU General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | """ |
|
14 | """ | |
15 | kallithea.tests.scripts.manual_test_concurrency |
|
15 | kallithea.tests.scripts.manual_test_concurrency | |
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
17 |
|
17 | |||
18 | Test suite for making push/pull operations |
|
18 | Test suite for making push/pull operations | |
19 |
|
19 | |||
20 | This file was forked by the Kallithea project in July 2014. |
|
20 | This file was forked by the Kallithea project in July 2014. | |
21 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | Original author and date, and relevant copyright and licensing information is below: | |
22 | :created_on: Dec 30, 2010 |
|
22 | :created_on: Dec 30, 2010 | |
23 | :author: marcink |
|
23 | :author: marcink | |
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
25 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | :license: GPLv3, see LICENSE.md for more details. | |
26 |
|
26 | |||
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | from __future__ import print_function |
|
|||
30 |
|
||||
31 | import logging |
|
29 | import logging | |
32 | import os |
|
30 | import os | |
33 | import shutil |
|
31 | import shutil | |
34 | import sys |
|
32 | import sys | |
35 | import tempfile |
|
33 | import tempfile | |
36 | from os.path import dirname |
|
34 | from os.path import dirname | |
37 | from subprocess import PIPE, Popen |
|
35 | from subprocess import PIPE, Popen | |
38 |
|
36 | |||
39 | from paste.deploy import appconfig |
|
37 | from paste.deploy import appconfig | |
40 | from sqlalchemy import engine_from_config |
|
38 | from sqlalchemy import engine_from_config | |
41 |
|
39 | |||
42 | from kallithea.config.environment import load_environment |
|
40 | from kallithea.config.environment import load_environment | |
43 | from kallithea.lib.auth import get_crypt_password |
|
41 | from kallithea.lib.auth import get_crypt_password | |
44 | from kallithea.model import meta |
|
42 | from kallithea.model import meta | |
45 | from kallithea.model.base import init_model |
|
43 | from kallithea.model.base import init_model | |
46 | from kallithea.model.db import Repository, Ui, User |
|
44 | from kallithea.model.db import Repository, Ui, User | |
47 | from kallithea.tests.base import HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS |
|
45 | from kallithea.tests.base import HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS | |
48 |
|
46 | |||
49 |
|
47 | |||
50 | rel_path = dirname(dirname(dirname(dirname(os.path.abspath(__file__))))) |
|
48 | rel_path = dirname(dirname(dirname(dirname(os.path.abspath(__file__))))) | |
51 | conf = appconfig('config:development.ini', relative_to=rel_path) |
|
49 | conf = appconfig('config:development.ini', relative_to=rel_path) | |
52 | load_environment(conf.global_conf, conf.local_conf) |
|
50 | load_environment(conf.global_conf, conf.local_conf) | |
53 |
|
51 | |||
54 | USER = TEST_USER_ADMIN_LOGIN |
|
52 | USER = TEST_USER_ADMIN_LOGIN | |
55 | PASS = TEST_USER_ADMIN_PASS |
|
53 | PASS = TEST_USER_ADMIN_PASS | |
56 | HOST = 'server.local' |
|
54 | HOST = 'server.local' | |
57 | METHOD = 'pull' |
|
55 | METHOD = 'pull' | |
58 | DEBUG = True |
|
56 | DEBUG = True | |
59 | log = logging.getLogger(__name__) |
|
57 | log = logging.getLogger(__name__) | |
60 |
|
58 | |||
61 |
|
59 | |||
62 | class Command(object): |
|
60 | class Command(object): | |
63 |
|
61 | |||
64 | def __init__(self, cwd): |
|
62 | def __init__(self, cwd): | |
65 | self.cwd = cwd |
|
63 | self.cwd = cwd | |
66 |
|
64 | |||
67 | def execute(self, cmd, *args): |
|
65 | def execute(self, cmd, *args): | |
68 | """Runs command on the system with given ``args``. |
|
66 | """Runs command on the system with given ``args``. | |
69 | """ |
|
67 | """ | |
70 |
|
68 | |||
71 | command = cmd + ' ' + ' '.join(args) |
|
69 | command = cmd + ' ' + ' '.join(args) | |
72 | log.debug('Executing %s', command) |
|
70 | log.debug('Executing %s', command) | |
73 | if DEBUG: |
|
71 | if DEBUG: | |
74 | print(command) |
|
72 | print(command) | |
75 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd) |
|
73 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd) | |
76 | stdout, stderr = p.communicate() |
|
74 | stdout, stderr = p.communicate() | |
77 | if DEBUG: |
|
75 | if DEBUG: | |
78 | print(stdout, stderr) |
|
76 | print(stdout, stderr) | |
79 | return stdout, stderr |
|
77 | return stdout, stderr | |
80 |
|
78 | |||
81 |
|
79 | |||
82 | def get_session(): |
|
80 | def get_session(): | |
83 | engine = engine_from_config(conf, 'sqlalchemy.') |
|
81 | engine = engine_from_config(conf, 'sqlalchemy.') | |
84 | init_model(engine) |
|
82 | init_model(engine) | |
85 | sa = meta.Session |
|
83 | sa = meta.Session | |
86 | return sa |
|
84 | return sa | |
87 |
|
85 | |||
88 |
|
86 | |||
89 | def create_test_user(force=True): |
|
87 | def create_test_user(force=True): | |
90 | print('creating test user') |
|
88 | print('creating test user') | |
91 | sa = get_session() |
|
89 | sa = get_session() | |
92 |
|
90 | |||
93 | user = sa.query(User).filter(User.username == USER).scalar() |
|
91 | user = sa.query(User).filter(User.username == USER).scalar() | |
94 |
|
92 | |||
95 | if force and user is not None: |
|
93 | if force and user is not None: | |
96 | print('removing current user') |
|
94 | print('removing current user') | |
97 | for repo in sa.query(Repository).filter(Repository.user == user).all(): |
|
95 | for repo in sa.query(Repository).filter(Repository.user == user).all(): | |
98 | sa.delete(repo) |
|
96 | sa.delete(repo) | |
99 | sa.delete(user) |
|
97 | sa.delete(user) | |
100 | sa.commit() |
|
98 | sa.commit() | |
101 |
|
99 | |||
102 | if user is None or force: |
|
100 | if user is None or force: | |
103 | print('creating new one') |
|
101 | print('creating new one') | |
104 | new_usr = User() |
|
102 | new_usr = User() | |
105 | new_usr.username = USER |
|
103 | new_usr.username = USER | |
106 | new_usr.password = get_crypt_password(PASS) |
|
104 | new_usr.password = get_crypt_password(PASS) | |
107 | new_usr.email = 'mail@example.com' |
|
105 | new_usr.email = 'mail@example.com' | |
108 | new_usr.name = 'test' |
|
106 | new_usr.name = 'test' | |
109 | new_usr.lastname = 'lasttestname' |
|
107 | new_usr.lastname = 'lasttestname' | |
110 | new_usr.active = True |
|
108 | new_usr.active = True | |
111 | new_usr.admin = True |
|
109 | new_usr.admin = True | |
112 | sa.add(new_usr) |
|
110 | sa.add(new_usr) | |
113 | sa.commit() |
|
111 | sa.commit() | |
114 |
|
112 | |||
115 | print('done') |
|
113 | print('done') | |
116 |
|
114 | |||
117 |
|
115 | |||
118 | def create_test_repo(force=True): |
|
116 | def create_test_repo(force=True): | |
119 | print('creating test repo') |
|
117 | print('creating test repo') | |
120 | from kallithea.model.repo import RepoModel |
|
118 | from kallithea.model.repo import RepoModel | |
121 | sa = get_session() |
|
119 | sa = get_session() | |
122 |
|
120 | |||
123 | user = sa.query(User).filter(User.username == USER).scalar() |
|
121 | user = sa.query(User).filter(User.username == USER).scalar() | |
124 | if user is None: |
|
122 | if user is None: | |
125 | raise Exception('user not found') |
|
123 | raise Exception('user not found') | |
126 |
|
124 | |||
127 | repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar() |
|
125 | repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar() | |
128 |
|
126 | |||
129 | if repo is None: |
|
127 | if repo is None: | |
130 | print('repo not found creating') |
|
128 | print('repo not found creating') | |
131 |
|
129 | |||
132 | form_data = {'repo_name': HG_REPO, |
|
130 | form_data = {'repo_name': HG_REPO, | |
133 | 'repo_type': 'hg', |
|
131 | 'repo_type': 'hg', | |
134 | 'private': False, |
|
132 | 'private': False, | |
135 | 'clone_uri': ''} |
|
133 | 'clone_uri': ''} | |
136 | rm = RepoModel() |
|
134 | rm = RepoModel() | |
137 | rm.base_path = '/home/hg' |
|
135 | rm.base_path = '/home/hg' | |
138 | rm.create(form_data, user) |
|
136 | rm.create(form_data, user) | |
139 |
|
137 | |||
140 | print('done') |
|
138 | print('done') | |
141 |
|
139 | |||
142 |
|
140 | |||
143 | def set_anonymous_access(enable=True): |
|
141 | def set_anonymous_access(enable=True): | |
144 | sa = get_session() |
|
142 | sa = get_session() | |
145 | user = sa.query(User).filter(User.username == 'default').one() |
|
143 | user = sa.query(User).filter(User.username == 'default').one() | |
146 | user.active = enable |
|
144 | user.active = enable | |
147 | sa.add(user) |
|
145 | sa.add(user) | |
148 | sa.commit() |
|
146 | sa.commit() | |
149 |
|
147 | |||
150 |
|
148 | |||
151 | def get_anonymous_access(): |
|
149 | def get_anonymous_access(): | |
152 | sa = get_session() |
|
150 | sa = get_session() | |
153 | return sa.query(User).filter(User.username == 'default').one().active |
|
151 | return sa.query(User).filter(User.username == 'default').one().active | |
154 |
|
152 | |||
155 |
|
153 | |||
156 | #============================================================================== |
|
154 | #============================================================================== | |
157 | # TESTS |
|
155 | # TESTS | |
158 | #============================================================================== |
|
156 | #============================================================================== | |
159 | def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD, |
|
157 | def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD, | |
160 | backend='hg'): |
|
158 | backend='hg'): | |
161 | cwd = path = os.path.join(Ui.get_by_key('paths', '/').ui_value, repo) |
|
159 | cwd = path = os.path.join(Ui.get_by_key('paths', '/').ui_value, repo) | |
162 |
|
160 | |||
163 | try: |
|
161 | try: | |
164 | shutil.rmtree(path, ignore_errors=True) |
|
162 | shutil.rmtree(path, ignore_errors=True) | |
165 | os.makedirs(path) |
|
163 | os.makedirs(path) | |
166 | #print 'made dirs %s' % os.path.join(path) |
|
164 | #print 'made dirs %s' % os.path.join(path) | |
167 | except OSError: |
|
165 | except OSError: | |
168 | raise |
|
166 | raise | |
169 |
|
167 | |||
170 | clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ |
|
168 | clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ | |
171 | {'user': USER, |
|
169 | {'user': USER, | |
172 | 'pass': PASS, |
|
170 | 'pass': PASS, | |
173 | 'host': HOST, |
|
171 | 'host': HOST, | |
174 | 'cloned_repo': repo, } |
|
172 | 'cloned_repo': repo, } | |
175 |
|
173 | |||
176 | dest = tempfile.mktemp(dir=path, prefix='dest-') |
|
174 | dest = tempfile.mktemp(dir=path, prefix='dest-') | |
177 | if method == 'pull': |
|
175 | if method == 'pull': | |
178 | stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url) |
|
176 | stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url) | |
179 | else: |
|
177 | else: | |
180 | stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest) |
|
178 | stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest) | |
181 | if not no_errors: |
|
179 | if not no_errors: | |
182 | if backend == 'hg': |
|
180 | if backend == 'hg': | |
183 | assert """adding file changes""" in stdout, 'no messages about cloning' |
|
181 | assert """adding file changes""" in stdout, 'no messages about cloning' | |
184 | assert """abort""" not in stderr, 'got error from clone' |
|
182 | assert """abort""" not in stderr, 'got error from clone' | |
185 | elif backend == 'git': |
|
183 | elif backend == 'git': | |
186 | assert """Cloning into""" in stdout, 'no messages about cloning' |
|
184 | assert """Cloning into""" in stdout, 'no messages about cloning' | |
187 |
|
185 | |||
188 |
|
186 | |||
189 | if __name__ == '__main__': |
|
187 | if __name__ == '__main__': | |
190 | try: |
|
188 | try: | |
191 | create_test_user(force=False) |
|
189 | create_test_user(force=False) | |
192 | import time |
|
190 | import time | |
193 |
|
191 | |||
194 | try: |
|
192 | try: | |
195 | METHOD = sys.argv[3] |
|
193 | METHOD = sys.argv[3] | |
196 | except IndexError: |
|
194 | except IndexError: | |
197 | pass |
|
195 | pass | |
198 |
|
196 | |||
199 | try: |
|
197 | try: | |
200 | backend = sys.argv[4] |
|
198 | backend = sys.argv[4] | |
201 | except IndexError: |
|
199 | except IndexError: | |
202 | backend = 'hg' |
|
200 | backend = 'hg' | |
203 |
|
201 | |||
204 | if METHOD == 'pull': |
|
202 | if METHOD == 'pull': | |
205 | seq = next(tempfile._RandomNameSequence()) |
|
203 | seq = next(tempfile._RandomNameSequence()) | |
206 | test_clone_with_credentials(repo=sys.argv[1], method='clone', |
|
204 | test_clone_with_credentials(repo=sys.argv[1], method='clone', | |
207 | backend=backend) |
|
205 | backend=backend) | |
208 | s = time.time() |
|
206 | s = time.time() | |
209 | for i in range(1, int(sys.argv[2]) + 1): |
|
207 | for i in range(1, int(sys.argv[2]) + 1): | |
210 | print('take', i) |
|
208 | print('take', i) | |
211 | test_clone_with_credentials(repo=sys.argv[1], method=METHOD, |
|
209 | test_clone_with_credentials(repo=sys.argv[1], method=METHOD, | |
212 | backend=backend) |
|
210 | backend=backend) | |
213 | print('time taken %.3f' % (time.time() - s)) |
|
211 | print('time taken %.3f' % (time.time() - s)) | |
214 | except Exception as e: |
|
212 | except Exception as e: | |
215 | sys.exit('stop on %s' % e) |
|
213 | sys.exit('stop on %s' % e) |
@@ -1,193 +1,191 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 | # -*- coding: utf-8 -*- |
|
2 | # -*- coding: utf-8 -*- | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU General Public License as published by |
|
4 | # it under the terms of the GNU General Public License as published by | |
5 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | # the Free Software Foundation, either version 3 of the License, or | |
6 | # (at your option) any later version. |
|
6 | # (at your option) any later version. | |
7 | # |
|
7 | # | |
8 | # This program is distributed in the hope that it will be useful, |
|
8 | # This program is distributed in the hope that it will be useful, | |
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
11 | # GNU General Public License for more details. |
|
11 | # GNU General Public License for more details. | |
12 | # |
|
12 | # | |
13 | # You should have received a copy of the GNU General Public License |
|
13 | # You should have received a copy of the GNU General Public License | |
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
15 | """ |
|
15 | """ | |
16 | kallithea.tests.scripts.manual_test_crawler |
|
16 | kallithea.tests.scripts.manual_test_crawler | |
17 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | |
18 |
|
18 | |||
19 | Test for crawling a project for memory usage |
|
19 | Test for crawling a project for memory usage | |
20 | This should be runned just as regular script together |
|
20 | This should be runned just as regular script together | |
21 | with a watch script that will show memory usage. |
|
21 | with a watch script that will show memory usage. | |
22 |
|
22 | |||
23 | watch -n1 ./kallithea/tests/mem_watch |
|
23 | watch -n1 ./kallithea/tests/mem_watch | |
24 |
|
24 | |||
25 | This file was forked by the Kallithea project in July 2014. |
|
25 | This file was forked by the Kallithea project in July 2014. | |
26 | Original author and date, and relevant copyright and licensing information is below: |
|
26 | Original author and date, and relevant copyright and licensing information is below: | |
27 | :created_on: Apr 21, 2010 |
|
27 | :created_on: Apr 21, 2010 | |
28 | :author: marcink |
|
28 | :author: marcink | |
29 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
29 | :copyright: (c) 2013 RhodeCode GmbH, and others. | |
30 | :license: GPLv3, see LICENSE.md for more details. |
|
30 | :license: GPLv3, see LICENSE.md for more details. | |
31 | """ |
|
31 | """ | |
32 |
|
32 | |||
33 | from __future__ import print_function |
|
|||
34 |
|
||||
35 | import http.cookiejar |
|
33 | import http.cookiejar | |
36 | import os |
|
34 | import os | |
37 | import sys |
|
35 | import sys | |
38 | import tempfile |
|
36 | import tempfile | |
39 | import time |
|
37 | import time | |
40 | import urllib.parse |
|
38 | import urllib.parse | |
41 | import urllib.request |
|
39 | import urllib.request | |
42 | from os.path import dirname |
|
40 | from os.path import dirname | |
43 |
|
41 | |||
44 | from kallithea.lib import vcs |
|
42 | from kallithea.lib import vcs | |
45 | from kallithea.lib.compat import OrderedSet |
|
43 | from kallithea.lib.compat import OrderedSet | |
46 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
44 | from kallithea.lib.vcs.exceptions import RepositoryError | |
47 |
|
45 | |||
48 |
|
46 | |||
49 | __here__ = os.path.abspath(__file__) |
|
47 | __here__ = os.path.abspath(__file__) | |
50 | __root__ = dirname(dirname(dirname(__here__))) |
|
48 | __root__ = dirname(dirname(dirname(__here__))) | |
51 | sys.path.append(__root__) |
|
49 | sys.path.append(__root__) | |
52 |
|
50 | |||
53 |
|
51 | |||
54 | PASES = 3 |
|
52 | PASES = 3 | |
55 | HOST = 'http://127.0.0.1' |
|
53 | HOST = 'http://127.0.0.1' | |
56 | PORT = 5000 |
|
54 | PORT = 5000 | |
57 | BASE_URI = '%s:%s/' % (HOST, PORT) |
|
55 | BASE_URI = '%s:%s/' % (HOST, PORT) | |
58 |
|
56 | |||
59 | if len(sys.argv) == 2: |
|
57 | if len(sys.argv) == 2: | |
60 | BASE_URI = sys.argv[1] |
|
58 | BASE_URI = sys.argv[1] | |
61 |
|
59 | |||
62 | if not BASE_URI.endswith('/'): |
|
60 | if not BASE_URI.endswith('/'): | |
63 | BASE_URI += '/' |
|
61 | BASE_URI += '/' | |
64 |
|
62 | |||
65 | print('Crawling @ %s' % BASE_URI) |
|
63 | print('Crawling @ %s' % BASE_URI) | |
66 | BASE_URI += '%s' |
|
64 | BASE_URI += '%s' | |
67 | PROJECT_PATH = os.path.join('/', 'home', 'username', 'repos') |
|
65 | PROJECT_PATH = os.path.join('/', 'home', 'username', 'repos') | |
68 | PROJECTS = [ |
|
66 | PROJECTS = [ | |
69 | # 'linux-magx-pbranch', |
|
67 | # 'linux-magx-pbranch', | |
70 | 'CPython', |
|
68 | 'CPython', | |
71 | 'kallithea', |
|
69 | 'kallithea', | |
72 | ] |
|
70 | ] | |
73 |
|
71 | |||
74 |
|
72 | |||
75 | cj = http.cookiejar.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt')) |
|
73 | cj = http.cookiejar.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt')) | |
76 | o = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) |
|
74 | o = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) | |
77 | o.addheaders = [ |
|
75 | o.addheaders = [ | |
78 | ('User-agent', 'kallithea-crawler'), |
|
76 | ('User-agent', 'kallithea-crawler'), | |
79 | ('Accept-Language', 'en - us, en;q = 0.5') |
|
77 | ('Accept-Language', 'en - us, en;q = 0.5') | |
80 | ] |
|
78 | ] | |
81 |
|
79 | |||
82 | urllib.request.install_opener(o) |
|
80 | urllib.request.install_opener(o) | |
83 |
|
81 | |||
84 |
|
82 | |||
85 | def _get_repo(proj): |
|
83 | def _get_repo(proj): | |
86 | if isinstance(proj, str): |
|
84 | if isinstance(proj, str): | |
87 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, proj)) |
|
85 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, proj)) | |
88 | proj = proj |
|
86 | proj = proj | |
89 | else: |
|
87 | else: | |
90 | repo = proj |
|
88 | repo = proj | |
91 | proj = repo.name |
|
89 | proj = repo.name | |
92 |
|
90 | |||
93 | return repo, proj |
|
91 | return repo, proj | |
94 |
|
92 | |||
95 |
|
93 | |||
96 | def test_changelog_walk(proj, pages=100): |
|
94 | def test_changelog_walk(proj, pages=100): | |
97 | repo, proj = _get_repo(proj) |
|
95 | repo, proj = _get_repo(proj) | |
98 |
|
96 | |||
99 | total_time = 0 |
|
97 | total_time = 0 | |
100 | for i in range(1, pages): |
|
98 | for i in range(1, pages): | |
101 |
|
99 | |||
102 | page = '/'.join((proj, 'changelog',)) |
|
100 | page = '/'.join((proj, 'changelog',)) | |
103 |
|
101 | |||
104 | full_uri = (BASE_URI % page) + '?' + urllib.parse.urlencode({'page': i}) |
|
102 | full_uri = (BASE_URI % page) + '?' + urllib.parse.urlencode({'page': i}) | |
105 | s = time.time() |
|
103 | s = time.time() | |
106 | f = o.open(full_uri) |
|
104 | f = o.open(full_uri) | |
107 |
|
105 | |||
108 | assert f.url == full_uri, 'URL:%s does not match %s' % (f.url, full_uri) |
|
106 | assert f.url == full_uri, 'URL:%s does not match %s' % (f.url, full_uri) | |
109 |
|
107 | |||
110 | size = len(f.read()) |
|
108 | size = len(f.read()) | |
111 | e = time.time() - s |
|
109 | e = time.time() - s | |
112 | total_time += e |
|
110 | total_time += e | |
113 | print('visited %s size:%s req:%s ms' % (full_uri, size, e)) |
|
111 | print('visited %s size:%s req:%s ms' % (full_uri, size, e)) | |
114 |
|
112 | |||
115 | print('total_time', total_time) |
|
113 | print('total_time', total_time) | |
116 | print('average on req', total_time / float(pages)) |
|
114 | print('average on req', total_time / float(pages)) | |
117 |
|
115 | |||
118 |
|
116 | |||
119 | def test_changeset_walk(proj, limit=None): |
|
117 | def test_changeset_walk(proj, limit=None): | |
120 | repo, proj = _get_repo(proj) |
|
118 | repo, proj = _get_repo(proj) | |
121 |
|
119 | |||
122 | print('processing', os.path.join(PROJECT_PATH, proj)) |
|
120 | print('processing', os.path.join(PROJECT_PATH, proj)) | |
123 | total_time = 0 |
|
121 | total_time = 0 | |
124 |
|
122 | |||
125 | cnt = 0 |
|
123 | cnt = 0 | |
126 | for i in repo: |
|
124 | for i in repo: | |
127 | cnt += 1 |
|
125 | cnt += 1 | |
128 | raw_cs = '/'.join((proj, 'changeset', i.raw_id)) |
|
126 | raw_cs = '/'.join((proj, 'changeset', i.raw_id)) | |
129 | if limit and limit == cnt: |
|
127 | if limit and limit == cnt: | |
130 | break |
|
128 | break | |
131 |
|
129 | |||
132 | full_uri = (BASE_URI % raw_cs) |
|
130 | full_uri = (BASE_URI % raw_cs) | |
133 | print('%s visiting %s/%s' % (cnt, full_uri, i)) |
|
131 | print('%s visiting %s/%s' % (cnt, full_uri, i)) | |
134 | s = time.time() |
|
132 | s = time.time() | |
135 | f = o.open(full_uri) |
|
133 | f = o.open(full_uri) | |
136 | size = len(f.read()) |
|
134 | size = len(f.read()) | |
137 | e = time.time() - s |
|
135 | e = time.time() - s | |
138 | total_time += e |
|
136 | total_time += e | |
139 | print('%s visited %s/%s size:%s req:%s ms' % (cnt, full_uri, i, size, e)) |
|
137 | print('%s visited %s/%s size:%s req:%s ms' % (cnt, full_uri, i, size, e)) | |
140 |
|
138 | |||
141 | print('total_time', total_time) |
|
139 | print('total_time', total_time) | |
142 | print('average on req', total_time / float(cnt)) |
|
140 | print('average on req', total_time / float(cnt)) | |
143 |
|
141 | |||
144 |
|
142 | |||
145 | def test_files_walk(proj, limit=100): |
|
143 | def test_files_walk(proj, limit=100): | |
146 | repo, proj = _get_repo(proj) |
|
144 | repo, proj = _get_repo(proj) | |
147 |
|
145 | |||
148 | print('processing', os.path.join(PROJECT_PATH, proj)) |
|
146 | print('processing', os.path.join(PROJECT_PATH, proj)) | |
149 | total_time = 0 |
|
147 | total_time = 0 | |
150 |
|
148 | |||
151 | paths_ = OrderedSet(['']) |
|
149 | paths_ = OrderedSet(['']) | |
152 | try: |
|
150 | try: | |
153 | tip = repo.get_changeset('tip') |
|
151 | tip = repo.get_changeset('tip') | |
154 | for topnode, dirs, files in tip.walk('/'): |
|
152 | for topnode, dirs, files in tip.walk('/'): | |
155 |
|
153 | |||
156 | for dir in dirs: |
|
154 | for dir in dirs: | |
157 | paths_.add(dir.path) |
|
155 | paths_.add(dir.path) | |
158 | for f in dir: |
|
156 | for f in dir: | |
159 | paths_.add(f.path) |
|
157 | paths_.add(f.path) | |
160 |
|
158 | |||
161 | for f in files: |
|
159 | for f in files: | |
162 | paths_.add(f.path) |
|
160 | paths_.add(f.path) | |
163 |
|
161 | |||
164 | except RepositoryError as e: |
|
162 | except RepositoryError as e: | |
165 | pass |
|
163 | pass | |
166 |
|
164 | |||
167 | cnt = 0 |
|
165 | cnt = 0 | |
168 | for f in paths_: |
|
166 | for f in paths_: | |
169 | cnt += 1 |
|
167 | cnt += 1 | |
170 | if limit and limit == cnt: |
|
168 | if limit and limit == cnt: | |
171 | break |
|
169 | break | |
172 |
|
170 | |||
173 | file_path = '/'.join((proj, 'files', 'tip', f)) |
|
171 | file_path = '/'.join((proj, 'files', 'tip', f)) | |
174 | full_uri = (BASE_URI % file_path) |
|
172 | full_uri = (BASE_URI % file_path) | |
175 | print('%s visiting %s' % (cnt, full_uri)) |
|
173 | print('%s visiting %s' % (cnt, full_uri)) | |
176 | s = time.time() |
|
174 | s = time.time() | |
177 | f = o.open(full_uri) |
|
175 | f = o.open(full_uri) | |
178 | size = len(f.read()) |
|
176 | size = len(f.read()) | |
179 | e = time.time() - s |
|
177 | e = time.time() - s | |
180 | total_time += e |
|
178 | total_time += e | |
181 | print('%s visited OK size:%s req:%s ms' % (cnt, size, e)) |
|
179 | print('%s visited OK size:%s req:%s ms' % (cnt, size, e)) | |
182 |
|
180 | |||
183 | print('total_time', total_time) |
|
181 | print('total_time', total_time) | |
184 | print('average on req', total_time / float(cnt)) |
|
182 | print('average on req', total_time / float(cnt)) | |
185 |
|
183 | |||
186 | if __name__ == '__main__': |
|
184 | if __name__ == '__main__': | |
187 | for path in PROJECTS: |
|
185 | for path in PROJECTS: | |
188 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, path)) |
|
186 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, path)) | |
189 | for i in range(PASES): |
|
187 | for i in range(PASES): | |
190 | print('PASS %s/%s' % (i, PASES)) |
|
188 | print('PASS %s/%s' % (i, PASES)) | |
191 | test_changelog_walk(repo, pages=80) |
|
189 | test_changelog_walk(repo, pages=80) | |
192 | test_changeset_walk(repo, limit=100) |
|
190 | test_changeset_walk(repo, limit=100) | |
193 | test_files_walk(repo, limit=100) |
|
191 | test_files_walk(repo, limit=100) |
@@ -1,84 +1,82 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 |
|
2 | |||
3 | """ |
|
3 | """ | |
4 | Consistent formatting of rst section titles |
|
4 | Consistent formatting of rst section titles | |
5 | """ |
|
5 | """ | |
6 |
|
6 | |||
7 | from __future__ import print_function |
|
|||
8 |
|
||||
9 | import re |
|
7 | import re | |
10 | import subprocess |
|
8 | import subprocess | |
11 |
|
9 | |||
12 |
|
10 | |||
13 | spaces = [ |
|
11 | spaces = [ | |
14 | (0, 1), # we assume this is a over-and-underlined header |
|
12 | (0, 1), # we assume this is a over-and-underlined header | |
15 | (2, 1), |
|
13 | (2, 1), | |
16 | (1, 1), |
|
14 | (1, 1), | |
17 | (1, 0), |
|
15 | (1, 0), | |
18 | (1, 0), |
|
16 | (1, 0), | |
19 | ] |
|
17 | ] | |
20 |
|
18 | |||
21 | # http://sphinx-doc.org/rest.html : |
|
19 | # http://sphinx-doc.org/rest.html : | |
22 | # for the Python documentation, this convention is used which you may follow: |
|
20 | # for the Python documentation, this convention is used which you may follow: | |
23 | # # with overline, for parts |
|
21 | # # with overline, for parts | |
24 | # * with overline, for chapters |
|
22 | # * with overline, for chapters | |
25 | # =, for sections |
|
23 | # =, for sections | |
26 | # -, for subsections |
|
24 | # -, for subsections | |
27 | # ^, for subsubsections |
|
25 | # ^, for subsubsections | |
28 | # ", for paragraphs |
|
26 | # ", for paragraphs | |
29 | pystyles = ['#', '*', '=', '-', '^', '"'] |
|
27 | pystyles = ['#', '*', '=', '-', '^', '"'] | |
30 |
|
28 | |||
31 | # match on a header line underlined with one of the valid characters |
|
29 | # match on a header line underlined with one of the valid characters | |
32 | headermatch = re.compile(r'''\n*(.+)\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n+''', flags=re.MULTILINE) |
|
30 | headermatch = re.compile(r'''\n*(.+)\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n+''', flags=re.MULTILINE) | |
33 |
|
31 | |||
34 |
|
32 | |||
35 | def main(): |
|
33 | def main(): | |
36 | filenames = subprocess.check_output(['hg', 'loc', 'set:**.rst+kallithea/i18n/how_to']).splitlines() |
|
34 | filenames = subprocess.check_output(['hg', 'loc', 'set:**.rst+kallithea/i18n/how_to']).splitlines() | |
37 | for fn in filenames: |
|
35 | for fn in filenames: | |
38 | fn = fn.decode() |
|
36 | fn = fn.decode() | |
39 | print('processing %s' % fn) |
|
37 | print('processing %s' % fn) | |
40 | s = open(fn).read() |
|
38 | s = open(fn).read() | |
41 |
|
39 | |||
42 | # find levels and their styles |
|
40 | # find levels and their styles | |
43 | lastpos = 0 |
|
41 | lastpos = 0 | |
44 | styles = [] |
|
42 | styles = [] | |
45 | for markup in headermatch.findall(s): |
|
43 | for markup in headermatch.findall(s): | |
46 | style = markup[1] |
|
44 | style = markup[1] | |
47 | if style in styles: |
|
45 | if style in styles: | |
48 | stylepos = styles.index(style) |
|
46 | stylepos = styles.index(style) | |
49 | if stylepos > lastpos + 1: |
|
47 | if stylepos > lastpos + 1: | |
50 | print('bad style %r with level %s - was at %s' % (style, stylepos, lastpos)) |
|
48 | print('bad style %r with level %s - was at %s' % (style, stylepos, lastpos)) | |
51 | else: |
|
49 | else: | |
52 | stylepos = len(styles) |
|
50 | stylepos = len(styles) | |
53 | if stylepos > lastpos + 1: |
|
51 | if stylepos > lastpos + 1: | |
54 | print('bad new style %r - expected %r' % (style, styles[lastpos + 1])) |
|
52 | print('bad new style %r - expected %r' % (style, styles[lastpos + 1])) | |
55 | else: |
|
53 | else: | |
56 | styles.append(style) |
|
54 | styles.append(style) | |
57 | lastpos = stylepos |
|
55 | lastpos = stylepos | |
58 |
|
56 | |||
59 | # remove superfluous spacing (may however be restored by header spacing) |
|
57 | # remove superfluous spacing (may however be restored by header spacing) | |
60 | s = re.sub(r'''(\n\n)\n*''', r'\1', s, flags=re.MULTILINE) |
|
58 | s = re.sub(r'''(\n\n)\n*''', r'\1', s, flags=re.MULTILINE) | |
61 |
|
59 | |||
62 | if styles: |
|
60 | if styles: | |
63 | newstyles = pystyles[pystyles.index(styles[0]):] |
|
61 | newstyles = pystyles[pystyles.index(styles[0]):] | |
64 |
|
62 | |||
65 | def subf(m): |
|
63 | def subf(m): | |
66 | title, style = m.groups() |
|
64 | title, style = m.groups() | |
67 | level = styles.index(style) |
|
65 | level = styles.index(style) | |
68 | before, after = spaces[level] |
|
66 | before, after = spaces[level] | |
69 | newstyle = newstyles[level] |
|
67 | newstyle = newstyles[level] | |
70 | return '\n' * (before + 1) + title + '\n' + newstyle * len(title) + '\n' * (after + 1) |
|
68 | return '\n' * (before + 1) + title + '\n' + newstyle * len(title) + '\n' * (after + 1) | |
71 | s = headermatch.sub(subf, s) |
|
69 | s = headermatch.sub(subf, s) | |
72 |
|
70 | |||
73 | # remove superfluous spacing when headers are adjacent |
|
71 | # remove superfluous spacing when headers are adjacent | |
74 | s = re.sub(r'''(\n.+\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n\n\n)\n*''', r'\1', s, flags=re.MULTILINE) |
|
72 | s = re.sub(r'''(\n.+\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n\n\n)\n*''', r'\1', s, flags=re.MULTILINE) | |
75 | # fix trailing space and spacing before link sections |
|
73 | # fix trailing space and spacing before link sections | |
76 | s = s.strip() + '\n' |
|
74 | s = s.strip() + '\n' | |
77 | s = re.sub(r'''\n+((?:\.\. _[^\n]*\n)+)$''', r'\n\n\n\1', s) |
|
75 | s = re.sub(r'''\n+((?:\.\. _[^\n]*\n)+)$''', r'\n\n\n\1', s) | |
78 |
|
76 | |||
79 | open(fn, 'w').write(s) |
|
77 | open(fn, 'w').write(s) | |
80 |
|
78 | |||
81 | print(subprocess.check_output(['hg', 'diff'] + filenames)) |
|
79 | print(subprocess.check_output(['hg', 'diff'] + filenames)) | |
82 |
|
80 | |||
83 | if __name__ == '__main__': |
|
81 | if __name__ == '__main__': | |
84 | main() |
|
82 | main() |
@@ -1,72 +1,70 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 | """ |
|
2 | """ | |
3 | Based on kallithea/lib/paster_commands/template.ini.mako, generate development.ini |
|
3 | Based on kallithea/lib/paster_commands/template.ini.mako, generate development.ini | |
4 | """ |
|
4 | """ | |
5 |
|
5 | |||
6 | from __future__ import print_function |
|
|||
7 |
|
||||
8 | import re |
|
6 | import re | |
9 |
|
7 | |||
10 | from kallithea.lib import inifile |
|
8 | from kallithea.lib import inifile | |
11 |
|
9 | |||
12 |
|
10 | |||
13 | # files to be generated from the mako template |
|
11 | # files to be generated from the mako template | |
14 | ini_files = [ |
|
12 | ini_files = [ | |
15 | ('development.ini', |
|
13 | ('development.ini', | |
16 | { |
|
14 | { | |
17 | '[server:main]': { |
|
15 | '[server:main]': { | |
18 | 'host': '0.0.0.0', |
|
16 | 'host': '0.0.0.0', | |
19 | }, |
|
17 | }, | |
20 | '[app:main]': { |
|
18 | '[app:main]': { | |
21 | 'debug': 'true', |
|
19 | 'debug': 'true', | |
22 | 'app_instance_uuid': 'development-not-secret', |
|
20 | 'app_instance_uuid': 'development-not-secret', | |
23 | 'session.secret': 'development-not-secret', |
|
21 | 'session.secret': 'development-not-secret', | |
24 | }, |
|
22 | }, | |
25 | '[logger_root]': { |
|
23 | '[logger_root]': { | |
26 | 'handlers': 'console_color', |
|
24 | 'handlers': 'console_color', | |
27 | }, |
|
25 | }, | |
28 | '[logger_routes]': { |
|
26 | '[logger_routes]': { | |
29 | 'level': 'DEBUG', |
|
27 | 'level': 'DEBUG', | |
30 | }, |
|
28 | }, | |
31 | '[logger_beaker]': { |
|
29 | '[logger_beaker]': { | |
32 | 'level': 'DEBUG', |
|
30 | 'level': 'DEBUG', | |
33 | }, |
|
31 | }, | |
34 | '[logger_templates]': { |
|
32 | '[logger_templates]': { | |
35 | 'level': 'INFO', |
|
33 | 'level': 'INFO', | |
36 | }, |
|
34 | }, | |
37 | '[logger_kallithea]': { |
|
35 | '[logger_kallithea]': { | |
38 | 'level': 'DEBUG', |
|
36 | 'level': 'DEBUG', | |
39 | }, |
|
37 | }, | |
40 | '[logger_tg]': { |
|
38 | '[logger_tg]': { | |
41 | 'level': 'DEBUG', |
|
39 | 'level': 'DEBUG', | |
42 | }, |
|
40 | }, | |
43 | '[logger_gearbox]': { |
|
41 | '[logger_gearbox]': { | |
44 | 'level': 'DEBUG', |
|
42 | 'level': 'DEBUG', | |
45 | }, |
|
43 | }, | |
46 | '[logger_whoosh_indexer]': { |
|
44 | '[logger_whoosh_indexer]': { | |
47 | 'level': 'DEBUG', |
|
45 | 'level': 'DEBUG', | |
48 | }, |
|
46 | }, | |
49 | }, |
|
47 | }, | |
50 | ), |
|
48 | ), | |
51 | ] |
|
49 | ] | |
52 |
|
50 | |||
53 |
|
51 | |||
54 | def main(): |
|
52 | def main(): | |
55 | # make sure all mako lines starting with '#' (the '##' comments) are marked up as <text> |
|
53 | # make sure all mako lines starting with '#' (the '##' comments) are marked up as <text> | |
56 | makofile = inifile.template_file |
|
54 | makofile = inifile.template_file | |
57 | print('reading:', makofile) |
|
55 | print('reading:', makofile) | |
58 | mako_org = open(makofile).read() |
|
56 | mako_org = open(makofile).read() | |
59 | mako_no_text_markup = re.sub(r'</?%text>', '', mako_org) |
|
57 | mako_no_text_markup = re.sub(r'</?%text>', '', mako_org) | |
60 | mako_marked_up = re.sub(r'\n(##.*)', r'\n<%text>\1</%text>', mako_no_text_markup, flags=re.MULTILINE) |
|
58 | mako_marked_up = re.sub(r'\n(##.*)', r'\n<%text>\1</%text>', mako_no_text_markup, flags=re.MULTILINE) | |
61 | if mako_marked_up != mako_org: |
|
59 | if mako_marked_up != mako_org: | |
62 | print('writing:', makofile) |
|
60 | print('writing:', makofile) | |
63 | open(makofile, 'w').write(mako_marked_up) |
|
61 | open(makofile, 'w').write(mako_marked_up) | |
64 |
|
62 | |||
65 | # create ini files |
|
63 | # create ini files | |
66 | for fn, settings in ini_files: |
|
64 | for fn, settings in ini_files: | |
67 | print('updating:', fn) |
|
65 | print('updating:', fn) | |
68 | inifile.create(fn, None, settings) |
|
66 | inifile.create(fn, None, settings) | |
69 |
|
67 | |||
70 |
|
68 | |||
71 | if __name__ == '__main__': |
|
69 | if __name__ == '__main__': | |
72 | main() |
|
70 | main() |
@@ -1,49 +1,47 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 |
|
2 | |||
3 | from __future__ import print_function |
|
|||
4 |
|
||||
5 | import re |
|
3 | import re | |
6 | import sys |
|
4 | import sys | |
7 |
|
5 | |||
8 |
|
6 | |||
9 | logre = r''' |
|
7 | logre = r''' | |
10 | (log\.(?:error|info|warning|debug) |
|
8 | (log\.(?:error|info|warning|debug) | |
11 | [(][ \n]* |
|
9 | [(][ \n]* | |
12 | ) |
|
10 | ) | |
13 | %s |
|
11 | %s | |
14 | ( |
|
12 | ( | |
15 | [ \n]*[)] |
|
13 | [ \n]*[)] | |
16 | ) |
|
14 | ) | |
17 | ''' |
|
15 | ''' | |
18 |
|
16 | |||
19 |
|
17 | |||
20 | res = [ |
|
18 | res = [ | |
21 | # handle % () - keeping spaces around the old % |
|
19 | # handle % () - keeping spaces around the old % | |
22 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) \( ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) \) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
20 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) \( ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) \) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), | |
23 | # handle % without () - keeping spaces around the old % |
|
21 | # handle % without () - keeping spaces around the old % | |
24 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
22 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), | |
25 | # remove extra space if it is on next line |
|
23 | # remove extra space if it is on next line | |
26 | (re.compile(logre % r'''("[^"]*"|'[^']*') , (\n [ ]) ([ ][\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
24 | (re.compile(logre % r'''("[^"]*"|'[^']*') , (\n [ ]) ([ ][\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), | |
27 | # remove extra space if it is on same line |
|
25 | # remove extra space if it is on same line | |
28 | (re.compile(logre % r'''("[^"]*"|'[^']*') , [ ]+ () ( [\n ]+) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
26 | (re.compile(logre % r'''("[^"]*"|'[^']*') , [ ]+ () ( [\n ]+) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), | |
29 | # remove trailing , and space |
|
27 | # remove trailing , and space | |
30 | (re.compile(logre % r'''("[^"]*"|'[^']*') , () ( [\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* [^(), \n] ) [ ,]*''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
28 | (re.compile(logre % r'''("[^"]*"|'[^']*') , () ( [\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* [^(), \n] ) [ ,]*''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), | |
31 | ] |
|
29 | ] | |
32 |
|
30 | |||
33 |
|
31 | |||
34 | def rewrite(f): |
|
32 | def rewrite(f): | |
35 | s = open(f).read() |
|
33 | s = open(f).read() | |
36 | for r, t in res: |
|
34 | for r, t in res: | |
37 | s = r.sub(t, s) |
|
35 | s = r.sub(t, s) | |
38 | open(f, 'w').write(s) |
|
36 | open(f, 'w').write(s) | |
39 |
|
37 | |||
40 |
|
38 | |||
41 | if __name__ == '__main__': |
|
39 | if __name__ == '__main__': | |
42 | if len(sys.argv) < 2: |
|
40 | if len(sys.argv) < 2: | |
43 | print('Cleanup of superfluous % formatting of log statements.') |
|
41 | print('Cleanup of superfluous % formatting of log statements.') | |
44 | print('Usage:') |
|
42 | print('Usage:') | |
45 | print(''' hg revert `hg loc '*.py'|grep -v logformat.py` && scripts/logformat.py `hg loc '*.py'` && hg diff''') |
|
43 | print(''' hg revert `hg loc '*.py'|grep -v logformat.py` && scripts/logformat.py `hg loc '*.py'` && hg diff''') | |
46 | raise SystemExit(1) |
|
44 | raise SystemExit(1) | |
47 |
|
45 | |||
48 | for f in sys.argv[1:]: |
|
46 | for f in sys.argv[1:]: | |
49 | rewrite(f) |
|
47 | rewrite(f) |
General Comments 0
You need to be logged in to leave comments.
Login now