Show More
@@ -1,129 +1,127 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.bin.kallithea_api |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Api CLI client for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Jun 3, 2012 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | from __future__ import print_function | |
|
29 | ||
|
30 | 28 | import argparse |
|
31 | 29 | import json |
|
32 | 30 | import sys |
|
33 | 31 | |
|
34 | 32 | from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call |
|
35 | 33 | |
|
36 | 34 | |
|
37 | 35 | def argparser(argv): |
|
38 | 36 | usage = ( |
|
39 | 37 | "kallithea-api [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] " |
|
40 | 38 | "[--config=CONFIG] [--save-config] " |
|
41 | 39 | "METHOD <key:val> <key2:val> ...\n" |
|
42 | 40 | "Create config file: kallithea-api --apikey=<key> --apihost=http://kallithea.example.com --save-config" |
|
43 | 41 | ) |
|
44 | 42 | |
|
45 | 43 | parser = argparse.ArgumentParser(description='Kallithea API cli', |
|
46 | 44 | usage=usage) |
|
47 | 45 | |
|
48 | 46 | ## config |
|
49 | 47 | group = parser.add_argument_group('config') |
|
50 | 48 | group.add_argument('--apikey', help='api access key') |
|
51 | 49 | group.add_argument('--apihost', help='api host') |
|
52 | 50 | group.add_argument('--config', help='config file') |
|
53 | 51 | group.add_argument('--save-config', action='store_true', help='save the given config into a file') |
|
54 | 52 | |
|
55 | 53 | group = parser.add_argument_group('API') |
|
56 | 54 | group.add_argument('method', metavar='METHOD', nargs='?', type=str, default=None, |
|
57 | 55 | help='API method name to call followed by key:value attributes', |
|
58 | 56 | ) |
|
59 | 57 | group.add_argument('--format', dest='format', type=str, |
|
60 | 58 | help='output format default: `%s` can ' |
|
61 | 59 | 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON), |
|
62 | 60 | default=FORMAT_PRETTY |
|
63 | 61 | ) |
|
64 | 62 | args, other = parser.parse_known_args() |
|
65 | 63 | return parser, args, other |
|
66 | 64 | |
|
67 | 65 | |
|
68 | 66 | def main(argv=None): |
|
69 | 67 | """ |
|
70 | 68 | Main execution function for cli |
|
71 | 69 | |
|
72 | 70 | :param argv: |
|
73 | 71 | """ |
|
74 | 72 | if argv is None: |
|
75 | 73 | argv = sys.argv |
|
76 | 74 | |
|
77 | 75 | conf = None |
|
78 | 76 | parser, args, other = argparser(argv) |
|
79 | 77 | |
|
80 | 78 | api_credentials_given = (args.apikey and args.apihost) |
|
81 | 79 | if args.save_config: |
|
82 | 80 | if not api_credentials_given: |
|
83 | 81 | raise parser.error('--save-config requires --apikey and --apihost') |
|
84 | 82 | conf = RcConf(config_location=args.config, |
|
85 | 83 | autocreate=True, config={'apikey': args.apikey, |
|
86 | 84 | 'apihost': args.apihost}) |
|
87 | 85 | sys.exit() |
|
88 | 86 | |
|
89 | 87 | if not conf: |
|
90 | 88 | conf = RcConf(config_location=args.config, autoload=True) |
|
91 | 89 | if not conf: |
|
92 | 90 | if not api_credentials_given: |
|
93 | 91 | parser.error('Could not find config file and missing ' |
|
94 | 92 | '--apikey or --apihost in params') |
|
95 | 93 | |
|
96 | 94 | apikey = args.apikey or conf['apikey'] |
|
97 | 95 | apihost = args.apihost or conf['apihost'] |
|
98 | 96 | method = args.method |
|
99 | 97 | |
|
100 | 98 | # if we don't have method here it's an error |
|
101 | 99 | if not method: |
|
102 | 100 | parser.error('Please specify method name') |
|
103 | 101 | |
|
104 | 102 | try: |
|
105 | 103 | margs = dict(s.split(':', 1) for s in other) |
|
106 | 104 | except ValueError: |
|
107 | 105 | sys.stderr.write('Error parsing arguments \n') |
|
108 | 106 | sys.exit() |
|
109 | 107 | if args.format == FORMAT_PRETTY: |
|
110 | 108 | print('Calling method %s => %s' % (method, apihost)) |
|
111 | 109 | |
|
112 | 110 | json_resp = api_call(apikey, apihost, method, **margs) |
|
113 | 111 | error_prefix = '' |
|
114 | 112 | if json_resp['error']: |
|
115 | 113 | error_prefix = 'ERROR:' |
|
116 | 114 | json_data = json_resp['error'] |
|
117 | 115 | else: |
|
118 | 116 | json_data = json_resp['result'] |
|
119 | 117 | if args.format == FORMAT_JSON: |
|
120 | 118 | print(json.dumps(json_data)) |
|
121 | 119 | elif args.format == FORMAT_PRETTY: |
|
122 | 120 | print('Server response \n%s%s' % ( |
|
123 | 121 | error_prefix, json.dumps(json_data, indent=4, sort_keys=True) |
|
124 | 122 | )) |
|
125 | 123 | return 0 |
|
126 | 124 | |
|
127 | 125 | |
|
128 | 126 | if __name__ == '__main__': |
|
129 | 127 | sys.exit(main(sys.argv)) |
@@ -1,42 +1,40 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | This file was forked by the Kallithea project in July 2014 and later moved. |
|
16 | 16 | Original author and date, and relevant copyright and licensing information is below: |
|
17 | 17 | :created_on: Apr 4, 2013 |
|
18 | 18 | :author: marcink |
|
19 | 19 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
20 | 20 | :license: GPLv3, see LICENSE.md for more details. |
|
21 | 21 | """ |
|
22 | 22 | |
|
23 | from __future__ import print_function | |
|
24 | ||
|
25 | 23 | import sys |
|
26 | 24 | |
|
27 | 25 | import kallithea.bin.kallithea_cli_base as cli_base |
|
28 | 26 | from kallithea.model.db import * # these names will be directly available in the IPython shell |
|
29 | 27 | |
|
30 | 28 | |
|
31 | 29 | @cli_base.register_command(config_file_initialize_app=True) |
|
32 | 30 | def ishell(): |
|
33 | 31 | """Interactive shell for Kallithea.""" |
|
34 | 32 | try: |
|
35 | 33 | from IPython import embed |
|
36 | 34 | except ImportError: |
|
37 | 35 | print('Kallithea ishell requires the Python package IPython 4 or later') |
|
38 | 36 | sys.exit(-1) |
|
39 | 37 | from traitlets.config.loader import Config |
|
40 | 38 | cfg = Config() |
|
41 | 39 | cfg.InteractiveShellEmbed.confirm_exit = False |
|
42 | 40 | embed(config=cfg, banner1="Kallithea IShell.") |
@@ -1,175 +1,173 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.bin.kallithea_gist |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Gist CLI client for Kallithea |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: May 9, 2013 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | from __future__ import print_function | |
|
29 | ||
|
30 | 28 | import argparse |
|
31 | 29 | import fileinput |
|
32 | 30 | import json |
|
33 | 31 | import os |
|
34 | 32 | import stat |
|
35 | 33 | import sys |
|
36 | 34 | |
|
37 | 35 | from kallithea.bin.base import FORMAT_JSON, FORMAT_PRETTY, RcConf, api_call |
|
38 | 36 | |
|
39 | 37 | |
|
40 | 38 | def argparser(argv): |
|
41 | 39 | usage = ( |
|
42 | 40 | "kallithea-gist [-h] [--format=FORMAT] [--apikey=APIKEY] [--apihost=APIHOST] " |
|
43 | 41 | "[--config=CONFIG] [--save-config] [GIST OPTIONS] " |
|
44 | 42 | "[filename or stdin use - for terminal stdin ]\n" |
|
45 | 43 | "Create config file: kallithea-gist --apikey=<key> --apihost=http://kallithea.example.com --save-config" |
|
46 | 44 | ) |
|
47 | 45 | |
|
48 | 46 | parser = argparse.ArgumentParser(description='Kallithea Gist cli', |
|
49 | 47 | usage=usage) |
|
50 | 48 | |
|
51 | 49 | ## config |
|
52 | 50 | group = parser.add_argument_group('config') |
|
53 | 51 | group.add_argument('--apikey', help='api access key') |
|
54 | 52 | group.add_argument('--apihost', help='api host') |
|
55 | 53 | group.add_argument('--config', help='config file path DEFAULT: ~/.config/kallithea') |
|
56 | 54 | group.add_argument('--save-config', action='store_true', |
|
57 | 55 | help='save the given config into a file') |
|
58 | 56 | |
|
59 | 57 | group = parser.add_argument_group('GIST') |
|
60 | 58 | group.add_argument('-p', '--private', action='store_true', |
|
61 | 59 | help='create private Gist') |
|
62 | 60 | group.add_argument('-f', '--filename', |
|
63 | 61 | help='set uploaded gist filename, ' |
|
64 | 62 | 'also defines syntax highlighting') |
|
65 | 63 | group.add_argument('-d', '--description', help='Gist description') |
|
66 | 64 | group.add_argument('-l', '--lifetime', metavar='MINUTES', |
|
67 | 65 | help='gist lifetime in minutes, -1 (DEFAULT) is forever') |
|
68 | 66 | group.add_argument('--format', dest='format', type=str, |
|
69 | 67 | help='output format DEFAULT: `%s` can ' |
|
70 | 68 | 'be also `%s`' % (FORMAT_PRETTY, FORMAT_JSON), |
|
71 | 69 | default=FORMAT_PRETTY |
|
72 | 70 | ) |
|
73 | 71 | args, other = parser.parse_known_args() |
|
74 | 72 | return parser, args, other |
|
75 | 73 | |
|
76 | 74 | |
|
77 | 75 | def _run(argv): |
|
78 | 76 | conf = None |
|
79 | 77 | parser, args, other = argparser(argv) |
|
80 | 78 | |
|
81 | 79 | api_credentials_given = (args.apikey and args.apihost) |
|
82 | 80 | if args.save_config: |
|
83 | 81 | if not api_credentials_given: |
|
84 | 82 | raise parser.error('--save-config requires --apikey and --apihost') |
|
85 | 83 | conf = RcConf(config_location=args.config, |
|
86 | 84 | autocreate=True, config={'apikey': args.apikey, |
|
87 | 85 | 'apihost': args.apihost}) |
|
88 | 86 | sys.exit() |
|
89 | 87 | |
|
90 | 88 | if not conf: |
|
91 | 89 | conf = RcConf(config_location=args.config, autoload=True) |
|
92 | 90 | if not conf: |
|
93 | 91 | if not api_credentials_given: |
|
94 | 92 | parser.error('Could not find config file and missing ' |
|
95 | 93 | '--apikey or --apihost in params') |
|
96 | 94 | |
|
97 | 95 | apikey = args.apikey or conf['apikey'] |
|
98 | 96 | host = args.apihost or conf['apihost'] |
|
99 | 97 | DEFAULT_FILENAME = 'gistfile1.txt' |
|
100 | 98 | if other: |
|
101 | 99 | # skip multifiles for now |
|
102 | 100 | filename = other[0] |
|
103 | 101 | if filename == '-': |
|
104 | 102 | filename = DEFAULT_FILENAME |
|
105 | 103 | gist_content = '' |
|
106 | 104 | for line in fileinput.input('-'): |
|
107 | 105 | gist_content += line |
|
108 | 106 | else: |
|
109 | 107 | with open(filename, 'rb') as f: |
|
110 | 108 | gist_content = f.read() |
|
111 | 109 | |
|
112 | 110 | else: |
|
113 | 111 | filename = DEFAULT_FILENAME |
|
114 | 112 | gist_content = None |
|
115 | 113 | # little bit hacky but cross platform check where the |
|
116 | 114 | # stdin comes from we skip the terminal case it can be handled by '-' |
|
117 | 115 | mode = os.fstat(0).st_mode |
|
118 | 116 | if stat.S_ISFIFO(mode): |
|
119 | 117 | # "stdin is piped" |
|
120 | 118 | gist_content = sys.stdin.read() |
|
121 | 119 | elif stat.S_ISREG(mode): |
|
122 | 120 | # "stdin is redirected" |
|
123 | 121 | gist_content = sys.stdin.read() |
|
124 | 122 | else: |
|
125 | 123 | # "stdin is terminal" |
|
126 | 124 | pass |
|
127 | 125 | |
|
128 | 126 | # make sure we don't upload binary stuff |
|
129 | 127 | if gist_content and '\0' in gist_content: |
|
130 | 128 | raise Exception('Error: binary files upload is not possible') |
|
131 | 129 | |
|
132 | 130 | filename = os.path.basename(args.filename or filename) |
|
133 | 131 | if gist_content: |
|
134 | 132 | files = { |
|
135 | 133 | filename: { |
|
136 | 134 | 'content': gist_content, |
|
137 | 135 | 'lexer': None |
|
138 | 136 | } |
|
139 | 137 | } |
|
140 | 138 | |
|
141 | 139 | margs = dict( |
|
142 | 140 | lifetime=args.lifetime, |
|
143 | 141 | description=args.description, |
|
144 | 142 | gist_type='private' if args.private else 'public', |
|
145 | 143 | files=files |
|
146 | 144 | ) |
|
147 | 145 | |
|
148 | 146 | json_data = api_call(apikey, host, 'create_gist', **margs)['result'] |
|
149 | 147 | if args.format == FORMAT_JSON: |
|
150 | 148 | print(json.dumps(json_data)) |
|
151 | 149 | elif args.format == FORMAT_PRETTY: |
|
152 | 150 | print(json_data) |
|
153 | 151 | print('Created %s gist %s' % (json_data['gist']['type'], |
|
154 | 152 | json_data['gist']['url'])) |
|
155 | 153 | return 0 |
|
156 | 154 | |
|
157 | 155 | |
|
158 | 156 | def main(argv=None): |
|
159 | 157 | """ |
|
160 | 158 | Main execution function for cli |
|
161 | 159 | |
|
162 | 160 | :param argv: |
|
163 | 161 | """ |
|
164 | 162 | if argv is None: |
|
165 | 163 | argv = sys.argv |
|
166 | 164 | |
|
167 | 165 | try: |
|
168 | 166 | return _run(argv) |
|
169 | 167 | except Exception as e: |
|
170 | 168 | print(e) |
|
171 | 169 | return 1 |
|
172 | 170 | |
|
173 | 171 | |
|
174 | 172 | if __name__ == '__main__': |
|
175 | 173 | sys.exit(main(sys.argv)) |
@@ -1,261 +1,259 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.bin.ldap_sync |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | LDAP sync script |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Mar 06, 2013 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | from __future__ import print_function | |
|
29 | ||
|
30 | 28 | import urllib.request |
|
31 | 29 | import uuid |
|
32 | 30 | from configparser import ConfigParser |
|
33 | 31 | |
|
34 | 32 | import ldap |
|
35 | 33 | |
|
36 | 34 | from kallithea.lib import ext_json |
|
37 | 35 | from kallithea.lib.utils2 import ascii_bytes |
|
38 | 36 | |
|
39 | 37 | |
|
40 | 38 | config = ConfigParser() |
|
41 | 39 | config.read('ldap_sync.conf') |
|
42 | 40 | |
|
43 | 41 | |
|
44 | 42 | class InvalidResponseIDError(Exception): |
|
45 | 43 | """ Request and response don't have the same UUID. """ |
|
46 | 44 | |
|
47 | 45 | |
|
48 | 46 | class ResponseError(Exception): |
|
49 | 47 | """ Response has an error, something went wrong with request execution. """ |
|
50 | 48 | |
|
51 | 49 | |
|
52 | 50 | class UserAlreadyInGroupError(Exception): |
|
53 | 51 | """ User is already a member of the target group. """ |
|
54 | 52 | |
|
55 | 53 | |
|
56 | 54 | class UserNotInGroupError(Exception): |
|
57 | 55 | """ User is not a member of the target group. """ |
|
58 | 56 | |
|
59 | 57 | |
|
60 | 58 | class API(object): |
|
61 | 59 | |
|
62 | 60 | def __init__(self, url, key): |
|
63 | 61 | self.url = url |
|
64 | 62 | self.key = key |
|
65 | 63 | |
|
66 | 64 | def get_api_data(self, uid, method, args): |
|
67 | 65 | """Prepare dict for API post.""" |
|
68 | 66 | return { |
|
69 | 67 | "id": uid, |
|
70 | 68 | "api_key": self.key, |
|
71 | 69 | "method": method, |
|
72 | 70 | "args": args |
|
73 | 71 | } |
|
74 | 72 | |
|
75 | 73 | def post(self, method, args): |
|
76 | 74 | """Send a generic API post to Kallithea. |
|
77 | 75 | |
|
78 | 76 | This will generate the UUID for validation check after the |
|
79 | 77 | response is returned. Handle errors and get the result back. |
|
80 | 78 | """ |
|
81 | 79 | uid = str(uuid.uuid1()) |
|
82 | 80 | data = self.get_api_data(uid, method, args) |
|
83 | 81 | |
|
84 | 82 | data = ascii_bytes(ext_json.dumps(data)) |
|
85 | 83 | headers = {'content-type': 'text/plain'} |
|
86 | 84 | req = urllib.request.Request(self.url, data, headers) |
|
87 | 85 | |
|
88 | 86 | response = urllib.request.urlopen(req) |
|
89 | 87 | response = ext_json.load(response) |
|
90 | 88 | |
|
91 | 89 | if uid != response["id"]: |
|
92 | 90 | raise InvalidResponseIDError("UUID does not match.") |
|
93 | 91 | |
|
94 | 92 | if response["error"] is not None: |
|
95 | 93 | raise ResponseError(response["error"]) |
|
96 | 94 | |
|
97 | 95 | return response["result"] |
|
98 | 96 | |
|
99 | 97 | def create_group(self, name, active=True): |
|
100 | 98 | """Create the Kallithea user group.""" |
|
101 | 99 | args = { |
|
102 | 100 | "group_name": name, |
|
103 | 101 | "active": str(active) |
|
104 | 102 | } |
|
105 | 103 | self.post("create_user_group", args) |
|
106 | 104 | |
|
107 | 105 | def add_membership(self, group, username): |
|
108 | 106 | """Add specific user to a group.""" |
|
109 | 107 | args = { |
|
110 | 108 | "usersgroupid": group, |
|
111 | 109 | "userid": username |
|
112 | 110 | } |
|
113 | 111 | result = self.post("add_user_to_user_group", args) |
|
114 | 112 | if not result["success"]: |
|
115 | 113 | raise UserAlreadyInGroupError("User %s already in group %s." % |
|
116 | 114 | (username, group)) |
|
117 | 115 | |
|
118 | 116 | def remove_membership(self, group, username): |
|
119 | 117 | """Remove specific user from a group.""" |
|
120 | 118 | args = { |
|
121 | 119 | "usersgroupid": group, |
|
122 | 120 | "userid": username |
|
123 | 121 | } |
|
124 | 122 | result = self.post("remove_user_from_user_group", args) |
|
125 | 123 | if not result["success"]: |
|
126 | 124 | raise UserNotInGroupError("User %s not in group %s." % |
|
127 | 125 | (username, group)) |
|
128 | 126 | |
|
129 | 127 | def get_group_members(self, name): |
|
130 | 128 | """Get the list of member usernames from a user group.""" |
|
131 | 129 | args = {"usersgroupid": name} |
|
132 | 130 | members = self.post("get_user_group", args)['members'] |
|
133 | 131 | member_list = [] |
|
134 | 132 | for member in members: |
|
135 | 133 | member_list.append(member["username"]) |
|
136 | 134 | return member_list |
|
137 | 135 | |
|
138 | 136 | def get_group(self, name): |
|
139 | 137 | """Return group info.""" |
|
140 | 138 | args = {"usersgroupid": name} |
|
141 | 139 | return self.post("get_user_group", args) |
|
142 | 140 | |
|
143 | 141 | def get_user(self, username): |
|
144 | 142 | """Return user info.""" |
|
145 | 143 | args = {"userid": username} |
|
146 | 144 | return self.post("get_user", args) |
|
147 | 145 | |
|
148 | 146 | |
|
149 | 147 | class LdapClient(object): |
|
150 | 148 | |
|
151 | 149 | def __init__(self, uri, user, key, base_dn): |
|
152 | 150 | self.client = ldap.initialize(uri, trace_level=0) |
|
153 | 151 | self.client.set_option(ldap.OPT_REFERRALS, 0) |
|
154 | 152 | self.client.simple_bind(user, key) |
|
155 | 153 | self.base_dn = base_dn |
|
156 | 154 | |
|
157 | 155 | def close(self): |
|
158 | 156 | self.client.unbind() |
|
159 | 157 | |
|
160 | 158 | def get_groups(self): |
|
161 | 159 | """Get all the groups in form of dict {group_name: group_info,...}.""" |
|
162 | 160 | searchFilter = "objectClass=groupOfUniqueNames" |
|
163 | 161 | result = self.client.search_s(self.base_dn, ldap.SCOPE_SUBTREE, |
|
164 | 162 | searchFilter) |
|
165 | 163 | |
|
166 | 164 | groups = {} |
|
167 | 165 | for group in result: |
|
168 | 166 | groups[group[1]['cn'][0]] = group[1] |
|
169 | 167 | |
|
170 | 168 | return groups |
|
171 | 169 | |
|
172 | 170 | def get_group_users(self, groups, group): |
|
173 | 171 | """Returns all the users belonging to a single group. |
|
174 | 172 | |
|
175 | 173 | Based on the list of groups and memberships, returns all the |
|
176 | 174 | users belonging to a single group, searching recursively. |
|
177 | 175 | """ |
|
178 | 176 | users = [] |
|
179 | 177 | for member in groups[group]["uniqueMember"]: |
|
180 | 178 | member = self.parse_member_string(member) |
|
181 | 179 | if member[0] == "uid": |
|
182 | 180 | users.append(member[1]) |
|
183 | 181 | elif member[0] == "cn": |
|
184 | 182 | users += self.get_group_users(groups, member[1]) |
|
185 | 183 | |
|
186 | 184 | return users |
|
187 | 185 | |
|
188 | 186 | def parse_member_string(self, member): |
|
189 | 187 | """Parses the member string and returns a touple of type and name. |
|
190 | 188 | |
|
191 | 189 | Unique member can be either user or group. Users will have 'uid' as |
|
192 | 190 | prefix while groups will have 'cn'. |
|
193 | 191 | """ |
|
194 | 192 | member = member.split(",")[0] |
|
195 | 193 | return member.split('=') |
|
196 | 194 | |
|
197 | 195 | |
|
198 | 196 | class LdapSync(object): |
|
199 | 197 | |
|
200 | 198 | def __init__(self): |
|
201 | 199 | self.ldap_client = LdapClient(config.get("default", "ldap_uri"), |
|
202 | 200 | config.get("default", "ldap_user"), |
|
203 | 201 | config.get("default", "ldap_key"), |
|
204 | 202 | config.get("default", "base_dn")) |
|
205 | 203 | self.kallithea_api = API(config.get("default", "api_url"), |
|
206 | 204 | config.get("default", "api_key")) |
|
207 | 205 | |
|
208 | 206 | def update_groups_from_ldap(self): |
|
209 | 207 | """Add all the groups from LDAP to Kallithea.""" |
|
210 | 208 | added = existing = 0 |
|
211 | 209 | groups = self.ldap_client.get_groups() |
|
212 | 210 | for group in groups: |
|
213 | 211 | try: |
|
214 | 212 | self.kallithea_api.create_group(group) |
|
215 | 213 | added += 1 |
|
216 | 214 | except Exception: |
|
217 | 215 | existing += 1 |
|
218 | 216 | |
|
219 | 217 | return added, existing |
|
220 | 218 | |
|
221 | 219 | def update_memberships_from_ldap(self, group): |
|
222 | 220 | """Update memberships based on the LDAP groups.""" |
|
223 | 221 | groups = self.ldap_client.get_groups() |
|
224 | 222 | group_users = self.ldap_client.get_group_users(groups, group) |
|
225 | 223 | |
|
226 | 224 | # Delete memberships first from each group which are not part |
|
227 | 225 | # of the group any more. |
|
228 | 226 | members = self.kallithea_api.get_group_members(group) |
|
229 | 227 | for member in members: |
|
230 | 228 | if member not in group_users: |
|
231 | 229 | try: |
|
232 | 230 | self.kallithea_api.remove_membership(group, |
|
233 | 231 | member) |
|
234 | 232 | except UserNotInGroupError: |
|
235 | 233 | pass |
|
236 | 234 | |
|
237 | 235 | # Add memberships. |
|
238 | 236 | for member in group_users: |
|
239 | 237 | try: |
|
240 | 238 | self.kallithea_api.add_membership(group, member) |
|
241 | 239 | except UserAlreadyInGroupError: |
|
242 | 240 | # TODO: handle somehow maybe.. |
|
243 | 241 | pass |
|
244 | 242 | |
|
245 | 243 | def close(self): |
|
246 | 244 | self.ldap_client.close() |
|
247 | 245 | |
|
248 | 246 | |
|
249 | 247 | if __name__ == '__main__': |
|
250 | 248 | sync = LdapSync() |
|
251 | 249 | print(sync.update_groups_from_ldap()) |
|
252 | 250 | |
|
253 | 251 | for gr in sync.ldap_client.get_groups(): |
|
254 | 252 | # TODO: exception when user does not exist during add membership... |
|
255 | 253 | # How should we handle this.. Either sync users as well at this step, |
|
256 | 254 | # or just ignore those who don't exist. If we want the second case, |
|
257 | 255 | # we need to find a way to recognize the right exception (we always get |
|
258 | 256 | # ResponseError with no error code so maybe by return msg (?) |
|
259 | 257 | sync.update_memberships_from_ldap(gr) |
|
260 | 258 | |
|
261 | 259 | sync.close() |
@@ -1,422 +1,420 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.db_manage |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Database creation, and setup module for Kallithea. Used for creation |
|
19 | 19 | of database as well as for migration operations |
|
20 | 20 | |
|
21 | 21 | This file was forked by the Kallithea project in July 2014. |
|
22 | 22 | Original author and date, and relevant copyright and licensing information is below: |
|
23 | 23 | :created_on: Apr 10, 2010 |
|
24 | 24 | :author: marcink |
|
25 | 25 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
26 | 26 | :license: GPLv3, see LICENSE.md for more details. |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | from __future__ import print_function | |
|
30 | ||
|
31 | 29 | import logging |
|
32 | 30 | import os |
|
33 | 31 | import sys |
|
34 | 32 | import uuid |
|
35 | 33 | |
|
36 | 34 | import alembic.command |
|
37 | 35 | import alembic.config |
|
38 | 36 | import sqlalchemy |
|
39 | 37 | from sqlalchemy.engine import create_engine |
|
40 | 38 | |
|
41 | 39 | from kallithea.model.base import init_model |
|
42 | 40 | from kallithea.model.db import Permission, RepoGroup, Repository, Setting, Ui, User, UserRepoGroupToPerm, UserToPerm |
|
43 | 41 | #from kallithea.model import meta |
|
44 | 42 | from kallithea.model.meta import Base, Session |
|
45 | 43 | from kallithea.model.permission import PermissionModel |
|
46 | 44 | from kallithea.model.repo_group import RepoGroupModel |
|
47 | 45 | from kallithea.model.user import UserModel |
|
48 | 46 | |
|
49 | 47 | |
|
50 | 48 | log = logging.getLogger(__name__) |
|
51 | 49 | |
|
52 | 50 | |
|
53 | 51 | class DbManage(object): |
|
54 | 52 | def __init__(self, dbconf, root, tests=False, SESSION=None, cli_args=None): |
|
55 | 53 | self.dbname = dbconf.split('/')[-1] |
|
56 | 54 | self.tests = tests |
|
57 | 55 | self.root = root |
|
58 | 56 | self.dburi = dbconf |
|
59 | 57 | self.db_exists = False |
|
60 | 58 | self.cli_args = cli_args or {} |
|
61 | 59 | self.init_db(SESSION=SESSION) |
|
62 | 60 | |
|
63 | 61 | def _ask_ok(self, msg): |
|
64 | 62 | """Invoke ask_ok unless the force_ask option provides the answer""" |
|
65 | 63 | force_ask = self.cli_args.get('force_ask') |
|
66 | 64 | if force_ask is not None: |
|
67 | 65 | return force_ask |
|
68 | 66 | from kallithea.lib.utils2 import ask_ok |
|
69 | 67 | return ask_ok(msg) |
|
70 | 68 | |
|
71 | 69 | def init_db(self, SESSION=None): |
|
72 | 70 | if SESSION: |
|
73 | 71 | self.sa = SESSION |
|
74 | 72 | else: |
|
75 | 73 | # init new sessions |
|
76 | 74 | engine = create_engine(self.dburi) |
|
77 | 75 | init_model(engine) |
|
78 | 76 | self.sa = Session() |
|
79 | 77 | |
|
80 | 78 | def create_tables(self, override=False): |
|
81 | 79 | """ |
|
82 | 80 | Create a auth database |
|
83 | 81 | """ |
|
84 | 82 | |
|
85 | 83 | log.info("Any existing database is going to be destroyed") |
|
86 | 84 | if self.tests: |
|
87 | 85 | destroy = True |
|
88 | 86 | else: |
|
89 | 87 | destroy = self._ask_ok('Are you sure to destroy old database ? [y/n]') |
|
90 | 88 | if not destroy: |
|
91 | 89 | print('Nothing done.') |
|
92 | 90 | sys.exit(0) |
|
93 | 91 | if destroy: |
|
94 | 92 | # drop and re-create old schemas |
|
95 | 93 | |
|
96 | 94 | url = sqlalchemy.engine.url.make_url(self.dburi) |
|
97 | 95 | database = url.database |
|
98 | 96 | |
|
99 | 97 | # Some databases enforce foreign key constraints and Base.metadata.drop_all() doesn't work |
|
100 | 98 | if url.drivername == 'mysql': |
|
101 | 99 | url.database = None # don't connect to the database (it might not exist) |
|
102 | 100 | engine = sqlalchemy.create_engine(url) |
|
103 | 101 | with engine.connect() as conn: |
|
104 | 102 | conn.execute('DROP DATABASE IF EXISTS ' + database) |
|
105 | 103 | conn.execute('CREATE DATABASE ' + database) |
|
106 | 104 | elif url.drivername == 'postgresql': |
|
107 | 105 | from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT |
|
108 | 106 | url.database = 'postgres' # connect to the system database (as the real one might not exist) |
|
109 | 107 | engine = sqlalchemy.create_engine(url) |
|
110 | 108 | with engine.connect() as conn: |
|
111 | 109 | conn.connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) |
|
112 | 110 | conn.execute('DROP DATABASE IF EXISTS ' + database) |
|
113 | 111 | conn.execute('CREATE DATABASE ' + database) |
|
114 | 112 | else: |
|
115 | 113 | # known to work on SQLite - possibly not on other databases with strong referential integrity |
|
116 | 114 | Base.metadata.drop_all() |
|
117 | 115 | |
|
118 | 116 | checkfirst = not override |
|
119 | 117 | Base.metadata.create_all(checkfirst=checkfirst) |
|
120 | 118 | |
|
121 | 119 | # Create an Alembic configuration and generate the version table, |
|
122 | 120 | # "stamping" it with the most recent Alembic migration revision, to |
|
123 | 121 | # tell Alembic that all the schema upgrades are already in effect. |
|
124 | 122 | alembic_cfg = alembic.config.Config() |
|
125 | 123 | alembic_cfg.set_main_option('script_location', 'kallithea:alembic') |
|
126 | 124 | alembic_cfg.set_main_option('sqlalchemy.url', self.dburi) |
|
127 | 125 | # This command will give an error in an Alembic multi-head scenario, |
|
128 | 126 | # but in practice, such a scenario should not come up during database |
|
129 | 127 | # creation, even during development. |
|
130 | 128 | alembic.command.stamp(alembic_cfg, 'head') |
|
131 | 129 | |
|
132 | 130 | log.info('Created tables for %s', self.dbname) |
|
133 | 131 | |
|
134 | 132 | def fix_repo_paths(self): |
|
135 | 133 | """ |
|
136 | 134 | Fixes a old kallithea version path into new one without a '*' |
|
137 | 135 | """ |
|
138 | 136 | |
|
139 | 137 | paths = Ui.query() \ |
|
140 | 138 | .filter(Ui.ui_key == '/') \ |
|
141 | 139 | .scalar() |
|
142 | 140 | |
|
143 | 141 | paths.ui_value = paths.ui_value.replace('*', '') |
|
144 | 142 | |
|
145 | 143 | self.sa.commit() |
|
146 | 144 | |
|
147 | 145 | def fix_default_user(self): |
|
148 | 146 | """ |
|
149 | 147 | Fixes a old default user with some 'nicer' default values, |
|
150 | 148 | used mostly for anonymous access |
|
151 | 149 | """ |
|
152 | 150 | def_user = User.query().filter_by(is_default_user=True).one() |
|
153 | 151 | |
|
154 | 152 | def_user.name = 'Anonymous' |
|
155 | 153 | def_user.lastname = 'User' |
|
156 | 154 | def_user.email = 'anonymous@kallithea-scm.org' |
|
157 | 155 | |
|
158 | 156 | self.sa.commit() |
|
159 | 157 | |
|
160 | 158 | def fix_settings(self): |
|
161 | 159 | """ |
|
162 | 160 | Fixes kallithea settings adds ga_code key for google analytics |
|
163 | 161 | """ |
|
164 | 162 | |
|
165 | 163 | hgsettings3 = Setting('ga_code', '') |
|
166 | 164 | |
|
167 | 165 | self.sa.add(hgsettings3) |
|
168 | 166 | self.sa.commit() |
|
169 | 167 | |
|
170 | 168 | def admin_prompt(self, second=False): |
|
171 | 169 | if not self.tests: |
|
172 | 170 | import getpass |
|
173 | 171 | |
|
174 | 172 | username = self.cli_args.get('username') |
|
175 | 173 | password = self.cli_args.get('password') |
|
176 | 174 | email = self.cli_args.get('email') |
|
177 | 175 | |
|
178 | 176 | def get_password(): |
|
179 | 177 | password = getpass.getpass('Specify admin password ' |
|
180 | 178 | '(min 6 chars):') |
|
181 | 179 | confirm = getpass.getpass('Confirm password:') |
|
182 | 180 | |
|
183 | 181 | if password != confirm: |
|
184 | 182 | log.error('passwords mismatch') |
|
185 | 183 | return False |
|
186 | 184 | if len(password) < 6: |
|
187 | 185 | log.error('password is to short use at least 6 characters') |
|
188 | 186 | return False |
|
189 | 187 | |
|
190 | 188 | return password |
|
191 | 189 | if username is None: |
|
192 | 190 | username = input('Specify admin username:') |
|
193 | 191 | if password is None: |
|
194 | 192 | password = get_password() |
|
195 | 193 | if not password: |
|
196 | 194 | # second try |
|
197 | 195 | password = get_password() |
|
198 | 196 | if not password: |
|
199 | 197 | sys.exit() |
|
200 | 198 | if email is None: |
|
201 | 199 | email = input('Specify admin email:') |
|
202 | 200 | self.create_user(username, password, email, True) |
|
203 | 201 | else: |
|
204 | 202 | log.info('creating admin and regular test users') |
|
205 | 203 | from kallithea.tests.base import TEST_USER_ADMIN_LOGIN, \ |
|
206 | 204 | TEST_USER_ADMIN_PASS, TEST_USER_ADMIN_EMAIL, \ |
|
207 | 205 | TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, \ |
|
208 | 206 | TEST_USER_REGULAR_EMAIL, TEST_USER_REGULAR2_LOGIN, \ |
|
209 | 207 | TEST_USER_REGULAR2_PASS, TEST_USER_REGULAR2_EMAIL |
|
210 | 208 | |
|
211 | 209 | self.create_user(TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS, |
|
212 | 210 | TEST_USER_ADMIN_EMAIL, True) |
|
213 | 211 | |
|
214 | 212 | self.create_user(TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS, |
|
215 | 213 | TEST_USER_REGULAR_EMAIL, False) |
|
216 | 214 | |
|
217 | 215 | self.create_user(TEST_USER_REGULAR2_LOGIN, TEST_USER_REGULAR2_PASS, |
|
218 | 216 | TEST_USER_REGULAR2_EMAIL, False) |
|
219 | 217 | |
|
220 | 218 | def create_auth_plugin_options(self, skip_existing=False): |
|
221 | 219 | """ |
|
222 | 220 | Create default auth plugin settings, and make it active |
|
223 | 221 | |
|
224 | 222 | :param skip_existing: |
|
225 | 223 | """ |
|
226 | 224 | |
|
227 | 225 | for k, v, t in [('auth_plugins', 'kallithea.lib.auth_modules.auth_internal', 'list'), |
|
228 | 226 | ('auth_internal_enabled', 'True', 'bool')]: |
|
229 | 227 | if skip_existing and Setting.get_by_name(k) is not None: |
|
230 | 228 | log.debug('Skipping option %s', k) |
|
231 | 229 | continue |
|
232 | 230 | setting = Setting(k, v, t) |
|
233 | 231 | self.sa.add(setting) |
|
234 | 232 | |
|
235 | 233 | def create_default_options(self, skip_existing=False): |
|
236 | 234 | """Creates default settings""" |
|
237 | 235 | |
|
238 | 236 | for k, v, t in [ |
|
239 | 237 | ('default_repo_enable_downloads', False, 'bool'), |
|
240 | 238 | ('default_repo_enable_statistics', False, 'bool'), |
|
241 | 239 | ('default_repo_private', False, 'bool'), |
|
242 | 240 | ('default_repo_type', 'hg', 'unicode') |
|
243 | 241 | ]: |
|
244 | 242 | if skip_existing and Setting.get_by_name(k) is not None: |
|
245 | 243 | log.debug('Skipping option %s', k) |
|
246 | 244 | continue |
|
247 | 245 | setting = Setting(k, v, t) |
|
248 | 246 | self.sa.add(setting) |
|
249 | 247 | |
|
250 | 248 | def fixup_groups(self): |
|
251 | 249 | def_usr = User.get_default_user() |
|
252 | 250 | for g in RepoGroup.query().all(): |
|
253 | 251 | g.group_name = g.get_new_name(g.name) |
|
254 | 252 | # get default perm |
|
255 | 253 | default = UserRepoGroupToPerm.query() \ |
|
256 | 254 | .filter(UserRepoGroupToPerm.group == g) \ |
|
257 | 255 | .filter(UserRepoGroupToPerm.user == def_usr) \ |
|
258 | 256 | .scalar() |
|
259 | 257 | |
|
260 | 258 | if default is None: |
|
261 | 259 | log.debug('missing default permission for group %s adding', g) |
|
262 | 260 | RepoGroupModel()._create_default_perms(g) |
|
263 | 261 | |
|
264 | 262 | def reset_permissions(self, username): |
|
265 | 263 | """ |
|
266 | 264 | Resets permissions to default state, useful when old systems had |
|
267 | 265 | bad permissions, we must clean them up |
|
268 | 266 | |
|
269 | 267 | :param username: |
|
270 | 268 | """ |
|
271 | 269 | default_user = User.get_by_username(username) |
|
272 | 270 | if not default_user: |
|
273 | 271 | return |
|
274 | 272 | |
|
275 | 273 | u2p = UserToPerm.query() \ |
|
276 | 274 | .filter(UserToPerm.user == default_user).all() |
|
277 | 275 | fixed = False |
|
278 | 276 | if len(u2p) != len(Permission.DEFAULT_USER_PERMISSIONS): |
|
279 | 277 | for p in u2p: |
|
280 | 278 | Session().delete(p) |
|
281 | 279 | fixed = True |
|
282 | 280 | self.populate_default_permissions() |
|
283 | 281 | return fixed |
|
284 | 282 | |
|
285 | 283 | def update_repo_info(self): |
|
286 | 284 | for repo in Repository.query(): |
|
287 | 285 | repo.update_changeset_cache() |
|
288 | 286 | |
|
289 | 287 | def prompt_repo_root_path(self, test_repo_path='', retries=3): |
|
290 | 288 | _path = self.cli_args.get('repos_location') |
|
291 | 289 | if retries == 3: |
|
292 | 290 | log.info('Setting up repositories config') |
|
293 | 291 | |
|
294 | 292 | if _path is not None: |
|
295 | 293 | path = _path |
|
296 | 294 | elif not self.tests and not test_repo_path: |
|
297 | 295 | path = input( |
|
298 | 296 | 'Enter a valid absolute path to store repositories. ' |
|
299 | 297 | 'All repositories in that path will be added automatically:' |
|
300 | 298 | ) |
|
301 | 299 | else: |
|
302 | 300 | path = test_repo_path |
|
303 | 301 | path_ok = True |
|
304 | 302 | |
|
305 | 303 | # check proper dir |
|
306 | 304 | if not os.path.isdir(path): |
|
307 | 305 | path_ok = False |
|
308 | 306 | log.error('Given path %s is not a valid directory', path) |
|
309 | 307 | |
|
310 | 308 | elif not os.path.isabs(path): |
|
311 | 309 | path_ok = False |
|
312 | 310 | log.error('Given path %s is not an absolute path', path) |
|
313 | 311 | |
|
314 | 312 | # check if path is at least readable. |
|
315 | 313 | if not os.access(path, os.R_OK): |
|
316 | 314 | path_ok = False |
|
317 | 315 | log.error('Given path %s is not readable', path) |
|
318 | 316 | |
|
319 | 317 | # check write access, warn user about non writeable paths |
|
320 | 318 | elif not os.access(path, os.W_OK) and path_ok: |
|
321 | 319 | log.warning('No write permission to given path %s', path) |
|
322 | 320 | if not self._ask_ok('Given path %s is not writeable, do you want to ' |
|
323 | 321 | 'continue with read only mode ? [y/n]' % (path,)): |
|
324 | 322 | log.error('Canceled by user') |
|
325 | 323 | sys.exit(-1) |
|
326 | 324 | |
|
327 | 325 | if retries == 0: |
|
328 | 326 | sys.exit('max retries reached') |
|
329 | 327 | if not path_ok: |
|
330 | 328 | if _path is not None: |
|
331 | 329 | sys.exit('Invalid repo path: %s' % _path) |
|
332 | 330 | retries -= 1 |
|
333 | 331 | return self.prompt_repo_root_path(test_repo_path, retries) # recursing!!! |
|
334 | 332 | |
|
335 | 333 | real_path = os.path.normpath(os.path.realpath(path)) |
|
336 | 334 | |
|
337 | 335 | if real_path != os.path.normpath(path): |
|
338 | 336 | log.warning('Using normalized path %s instead of %s', real_path, path) |
|
339 | 337 | |
|
340 | 338 | return real_path |
|
341 | 339 | |
|
342 | 340 | def create_settings(self, repo_root_path): |
|
343 | 341 | ui_config = [ |
|
344 | 342 | ('paths', '/', repo_root_path, True), |
|
345 | 343 | #('phases', 'publish', 'false', False) |
|
346 | 344 | ('hooks', Ui.HOOK_UPDATE, 'hg update >&2', False), |
|
347 | 345 | ('hooks', Ui.HOOK_REPO_SIZE, 'python:kallithea.lib.hooks.repo_size', True), |
|
348 | 346 | ('extensions', 'largefiles', '', True), |
|
349 | 347 | ('largefiles', 'usercache', os.path.join(repo_root_path, '.cache', 'largefiles'), True), |
|
350 | 348 | ('extensions', 'hgsubversion', '', False), |
|
351 | 349 | ('extensions', 'hggit', '', False), |
|
352 | 350 | ] |
|
353 | 351 | for ui_section, ui_key, ui_value, ui_active in ui_config: |
|
354 | 352 | ui_conf = Ui( |
|
355 | 353 | ui_section=ui_section, |
|
356 | 354 | ui_key=ui_key, |
|
357 | 355 | ui_value=ui_value, |
|
358 | 356 | ui_active=ui_active) |
|
359 | 357 | self.sa.add(ui_conf) |
|
360 | 358 | |
|
361 | 359 | settings = [ |
|
362 | 360 | ('realm', 'Kallithea', 'unicode'), |
|
363 | 361 | ('title', '', 'unicode'), |
|
364 | 362 | ('ga_code', '', 'unicode'), |
|
365 | 363 | ('show_public_icon', True, 'bool'), |
|
366 | 364 | ('show_private_icon', True, 'bool'), |
|
367 | 365 | ('stylify_metalabels', False, 'bool'), |
|
368 | 366 | ('dashboard_items', 100, 'int'), # TODO: call it page_size |
|
369 | 367 | ('admin_grid_items', 25, 'int'), |
|
370 | 368 | ('show_version', True, 'bool'), |
|
371 | 369 | ('use_gravatar', True, 'bool'), |
|
372 | 370 | ('gravatar_url', User.DEFAULT_GRAVATAR_URL, 'unicode'), |
|
373 | 371 | ('clone_uri_tmpl', Repository.DEFAULT_CLONE_URI, 'unicode'), |
|
374 | 372 | ('clone_ssh_tmpl', Repository.DEFAULT_CLONE_SSH, 'unicode'), |
|
375 | 373 | ] |
|
376 | 374 | for key, val, type_ in settings: |
|
377 | 375 | sett = Setting(key, val, type_) |
|
378 | 376 | self.sa.add(sett) |
|
379 | 377 | |
|
380 | 378 | self.create_auth_plugin_options() |
|
381 | 379 | self.create_default_options() |
|
382 | 380 | |
|
383 | 381 | log.info('Populated Ui and Settings defaults') |
|
384 | 382 | |
|
385 | 383 | def create_user(self, username, password, email='', admin=False): |
|
386 | 384 | log.info('creating user %s', username) |
|
387 | 385 | UserModel().create_or_update(username, password, email, |
|
388 | 386 | firstname='Kallithea', lastname='Admin', |
|
389 | 387 | active=True, admin=admin, |
|
390 | 388 | extern_type=User.DEFAULT_AUTH_TYPE) |
|
391 | 389 | |
|
392 | 390 | def create_default_user(self): |
|
393 | 391 | log.info('creating default user') |
|
394 | 392 | # create default user for handling default permissions. |
|
395 | 393 | user = UserModel().create_or_update(username=User.DEFAULT_USER, |
|
396 | 394 | password=str(uuid.uuid1())[:20], |
|
397 | 395 | email='anonymous@kallithea-scm.org', |
|
398 | 396 | firstname='Anonymous', |
|
399 | 397 | lastname='User') |
|
400 | 398 | # based on configuration options activate/deactivate this user which |
|
401 | 399 | # controls anonymous access |
|
402 | 400 | if self.cli_args.get('public_access') is False: |
|
403 | 401 | log.info('Public access disabled') |
|
404 | 402 | user.active = False |
|
405 | 403 | Session().commit() |
|
406 | 404 | |
|
407 | 405 | def create_permissions(self): |
|
408 | 406 | """ |
|
409 | 407 | Creates all permissions defined in the system |
|
410 | 408 | """ |
|
411 | 409 | # module.(access|create|change|delete)_[name] |
|
412 | 410 | # module.(none|read|write|admin) |
|
413 | 411 | log.info('creating permissions') |
|
414 | 412 | PermissionModel().create_permissions() |
|
415 | 413 | |
|
416 | 414 | def populate_default_permissions(self): |
|
417 | 415 | """ |
|
418 | 416 | Populate default permissions. It will create only the default |
|
419 | 417 | permissions that are missing, and not alter already defined ones |
|
420 | 418 | """ |
|
421 | 419 | log.info('creating default user permissions') |
|
422 | 420 | PermissionModel().create_default_permissions(user=User.DEFAULT_USER) |
@@ -1,142 +1,140 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | |
|
15 | from __future__ import print_function | |
|
16 | ||
|
17 | 15 | import errno |
|
18 | 16 | import os |
|
19 | 17 | from multiprocessing.util import Finalize |
|
20 | 18 | |
|
21 | 19 | from kallithea.lib.compat import kill |
|
22 | 20 | |
|
23 | 21 | |
|
24 | 22 | class LockHeld(Exception): |
|
25 | 23 | pass |
|
26 | 24 | |
|
27 | 25 | |
|
28 | 26 | class DaemonLock(object): |
|
29 | 27 | """daemon locking |
|
30 | 28 | USAGE: |
|
31 | 29 | try: |
|
32 | 30 | l = DaemonLock('/path/tolockfile',desc='test lock') |
|
33 | 31 | main() |
|
34 | 32 | l.release() |
|
35 | 33 | except LockHeld: |
|
36 | 34 | sys.exit(1) |
|
37 | 35 | """ |
|
38 | 36 | |
|
39 | 37 | def __init__(self, file_, callbackfn=None, |
|
40 | 38 | desc='daemon lock', debug=False): |
|
41 | 39 | self.pidfile = file_ |
|
42 | 40 | self.callbackfn = callbackfn |
|
43 | 41 | self.desc = desc |
|
44 | 42 | self.debug = debug |
|
45 | 43 | self.held = False |
|
46 | 44 | # run the lock automatically! |
|
47 | 45 | self.lock() |
|
48 | 46 | self._finalize = Finalize(self, DaemonLock._on_finalize, |
|
49 | 47 | args=(self, debug), exitpriority=10) |
|
50 | 48 | |
|
51 | 49 | @staticmethod |
|
52 | 50 | def _on_finalize(lock, debug): |
|
53 | 51 | if lock.held: |
|
54 | 52 | if debug: |
|
55 | 53 | print('lock held finalizing and running lock.release()') |
|
56 | 54 | lock.release() |
|
57 | 55 | |
|
58 | 56 | def lock(self): |
|
59 | 57 | """ |
|
60 | 58 | locking function, if lock is present it |
|
61 | 59 | will raise LockHeld exception |
|
62 | 60 | """ |
|
63 | 61 | lockname = str(os.getpid()) |
|
64 | 62 | if self.debug: |
|
65 | 63 | print('running lock') |
|
66 | 64 | self.trylock() |
|
67 | 65 | self.makelock(lockname, self.pidfile) |
|
68 | 66 | return True |
|
69 | 67 | |
|
70 | 68 | def trylock(self): |
|
71 | 69 | running_pid = False |
|
72 | 70 | if self.debug: |
|
73 | 71 | print('checking for already running process') |
|
74 | 72 | try: |
|
75 | 73 | with open(self.pidfile, 'r') as f: |
|
76 | 74 | try: |
|
77 | 75 | running_pid = int(f.readline()) |
|
78 | 76 | except ValueError: |
|
79 | 77 | running_pid = -1 |
|
80 | 78 | |
|
81 | 79 | if self.debug: |
|
82 | 80 | print('lock file present running_pid: %s, ' |
|
83 | 81 | 'checking for execution' % (running_pid,)) |
|
84 | 82 | # Now we check the PID from lock file matches to the current |
|
85 | 83 | # process PID |
|
86 | 84 | if running_pid: |
|
87 | 85 | try: |
|
88 | 86 | kill(running_pid, 0) |
|
89 | 87 | except OSError as exc: |
|
90 | 88 | if exc.errno in (errno.ESRCH, errno.EPERM): |
|
91 | 89 | print ("Lock File is there but" |
|
92 | 90 | " the program is not running") |
|
93 | 91 | print("Removing lock file for the: %s" % running_pid) |
|
94 | 92 | self.release() |
|
95 | 93 | else: |
|
96 | 94 | raise |
|
97 | 95 | else: |
|
98 | 96 | print("You already have an instance of the program running") |
|
99 | 97 | print("It is running as process %s" % running_pid) |
|
100 | 98 | raise LockHeld() |
|
101 | 99 | |
|
102 | 100 | except IOError as e: |
|
103 | 101 | if e.errno != 2: |
|
104 | 102 | raise |
|
105 | 103 | |
|
106 | 104 | def release(self): |
|
107 | 105 | """releases the pid by removing the pidfile |
|
108 | 106 | """ |
|
109 | 107 | if self.debug: |
|
110 | 108 | print('trying to release the pidlock') |
|
111 | 109 | |
|
112 | 110 | if self.callbackfn: |
|
113 | 111 | #execute callback function on release |
|
114 | 112 | if self.debug: |
|
115 | 113 | print('executing callback function %s' % self.callbackfn) |
|
116 | 114 | self.callbackfn() |
|
117 | 115 | try: |
|
118 | 116 | if self.debug: |
|
119 | 117 | print('removing pidfile %s' % self.pidfile) |
|
120 | 118 | os.remove(self.pidfile) |
|
121 | 119 | self.held = False |
|
122 | 120 | except OSError as e: |
|
123 | 121 | if self.debug: |
|
124 | 122 | print('removing pidfile failed %s' % e) |
|
125 | 123 | pass |
|
126 | 124 | |
|
127 | 125 | def makelock(self, lockname, pidfile): |
|
128 | 126 | """ |
|
129 | 127 | this function will make an actual lock |
|
130 | 128 | |
|
131 | 129 | :param lockname: actual pid of file |
|
132 | 130 | :param pidfile: the file to write the pid in |
|
133 | 131 | """ |
|
134 | 132 | if self.debug: |
|
135 | 133 | print('creating a file %s and pid: %s' % (pidfile, lockname)) |
|
136 | 134 | |
|
137 | 135 | dir_, file_ = os.path.split(pidfile) |
|
138 | 136 | if not os.path.isdir(dir_): |
|
139 | 137 | os.makedirs(dir_) |
|
140 | 138 | with open(self.pidfile, 'w') as f: |
|
141 | 139 | f.write(lockname) |
|
142 | 140 | self.held = True |
@@ -1,613 +1,611 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.lib.utils2 |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Some simple helper functions. |
|
19 | 19 | Note: all these functions should be independent of Kallithea classes, i.e. |
|
20 | 20 | models, controllers, etc. to prevent import cycles. |
|
21 | 21 | |
|
22 | 22 | This file was forked by the Kallithea project in July 2014. |
|
23 | 23 | Original author and date, and relevant copyright and licensing information is below: |
|
24 | 24 | :created_on: Jan 5, 2011 |
|
25 | 25 | :author: marcink |
|
26 | 26 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
27 | 27 | :license: GPLv3, see LICENSE.md for more details. |
|
28 | 28 | """ |
|
29 | 29 | |
|
30 | from __future__ import print_function | |
|
31 | ||
|
32 | 30 | import binascii |
|
33 | 31 | import datetime |
|
34 | 32 | import json |
|
35 | 33 | import os |
|
36 | 34 | import pwd |
|
37 | 35 | import re |
|
38 | 36 | import time |
|
39 | 37 | import urllib.parse |
|
40 | 38 | |
|
41 | 39 | import urlobject |
|
42 | 40 | from tg.i18n import ugettext as _ |
|
43 | 41 | from tg.i18n import ungettext |
|
44 | 42 | from webhelpers2.text import collapse, remove_formatting, strip_tags |
|
45 | 43 | |
|
46 | 44 | from kallithea.lib.vcs.utils import ascii_bytes, ascii_str, safe_bytes, safe_str # re-export |
|
47 | 45 | from kallithea.lib.vcs.utils.lazy import LazyProperty |
|
48 | 46 | |
|
49 | 47 | |
|
50 | 48 | def str2bool(_str): |
|
51 | 49 | """ |
|
52 | 50 | returns True/False value from given string, it tries to translate the |
|
53 | 51 | string into boolean |
|
54 | 52 | |
|
55 | 53 | :param _str: string value to translate into boolean |
|
56 | 54 | :rtype: boolean |
|
57 | 55 | :returns: boolean from given string |
|
58 | 56 | """ |
|
59 | 57 | if _str is None: |
|
60 | 58 | return False |
|
61 | 59 | if _str in (True, False): |
|
62 | 60 | return _str |
|
63 | 61 | _str = str(_str).strip().lower() |
|
64 | 62 | return _str in ('t', 'true', 'y', 'yes', 'on', '1') |
|
65 | 63 | |
|
66 | 64 | |
|
67 | 65 | def aslist(obj, sep=None, strip=True): |
|
68 | 66 | """ |
|
69 | 67 | Returns given string separated by sep as list |
|
70 | 68 | |
|
71 | 69 | :param obj: |
|
72 | 70 | :param sep: |
|
73 | 71 | :param strip: |
|
74 | 72 | """ |
|
75 | 73 | if isinstance(obj, (str)): |
|
76 | 74 | lst = obj.split(sep) |
|
77 | 75 | if strip: |
|
78 | 76 | lst = [v.strip() for v in lst] |
|
79 | 77 | return lst |
|
80 | 78 | elif isinstance(obj, (list, tuple)): |
|
81 | 79 | return obj |
|
82 | 80 | elif obj is None: |
|
83 | 81 | return [] |
|
84 | 82 | else: |
|
85 | 83 | return [obj] |
|
86 | 84 | |
|
87 | 85 | |
|
88 | 86 | def convert_line_endings(line, mode): |
|
89 | 87 | """ |
|
90 | 88 | Converts a given line "line end" according to given mode |
|
91 | 89 | |
|
92 | 90 | Available modes are:: |
|
93 | 91 | 0 - Unix |
|
94 | 92 | 1 - Mac |
|
95 | 93 | 2 - DOS |
|
96 | 94 | |
|
97 | 95 | :param line: given line to convert |
|
98 | 96 | :param mode: mode to convert to |
|
99 | 97 | :rtype: str |
|
100 | 98 | :return: converted line according to mode |
|
101 | 99 | """ |
|
102 | 100 | if mode == 0: |
|
103 | 101 | line = line.replace('\r\n', '\n') |
|
104 | 102 | line = line.replace('\r', '\n') |
|
105 | 103 | elif mode == 1: |
|
106 | 104 | line = line.replace('\r\n', '\r') |
|
107 | 105 | line = line.replace('\n', '\r') |
|
108 | 106 | elif mode == 2: |
|
109 | 107 | line = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", line) |
|
110 | 108 | return line |
|
111 | 109 | |
|
112 | 110 | |
|
113 | 111 | def detect_mode(line, default): |
|
114 | 112 | """ |
|
115 | 113 | Detects line break for given line, if line break couldn't be found |
|
116 | 114 | given default value is returned |
|
117 | 115 | |
|
118 | 116 | :param line: str line |
|
119 | 117 | :param default: default |
|
120 | 118 | :rtype: int |
|
121 | 119 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS |
|
122 | 120 | """ |
|
123 | 121 | if line.endswith('\r\n'): |
|
124 | 122 | return 2 |
|
125 | 123 | elif line.endswith('\n'): |
|
126 | 124 | return 0 |
|
127 | 125 | elif line.endswith('\r'): |
|
128 | 126 | return 1 |
|
129 | 127 | else: |
|
130 | 128 | return default |
|
131 | 129 | |
|
132 | 130 | |
|
133 | 131 | def generate_api_key(): |
|
134 | 132 | """ |
|
135 | 133 | Generates a random (presumably unique) API key. |
|
136 | 134 | |
|
137 | 135 | This value is used in URLs and "Bearer" HTTP Authorization headers, |
|
138 | 136 | which in practice means it should only contain URL-safe characters |
|
139 | 137 | (RFC 3986): |
|
140 | 138 | |
|
141 | 139 | unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" |
|
142 | 140 | """ |
|
143 | 141 | # Hexadecimal certainly qualifies as URL-safe. |
|
144 | 142 | return ascii_str(binascii.hexlify(os.urandom(20))) |
|
145 | 143 | |
|
146 | 144 | |
|
147 | 145 | def safe_int(val, default=None): |
|
148 | 146 | """ |
|
149 | 147 | Returns int() of val if val is not convertable to int use default |
|
150 | 148 | instead |
|
151 | 149 | |
|
152 | 150 | :param val: |
|
153 | 151 | :param default: |
|
154 | 152 | """ |
|
155 | 153 | try: |
|
156 | 154 | val = int(val) |
|
157 | 155 | except (ValueError, TypeError): |
|
158 | 156 | val = default |
|
159 | 157 | return val |
|
160 | 158 | |
|
161 | 159 | |
|
162 | 160 | def remove_suffix(s, suffix): |
|
163 | 161 | if s.endswith(suffix): |
|
164 | 162 | s = s[:-1 * len(suffix)] |
|
165 | 163 | return s |
|
166 | 164 | |
|
167 | 165 | |
|
168 | 166 | def remove_prefix(s, prefix): |
|
169 | 167 | if s.startswith(prefix): |
|
170 | 168 | s = s[len(prefix):] |
|
171 | 169 | return s |
|
172 | 170 | |
|
173 | 171 | |
|
174 | 172 | def age(prevdate, show_short_version=False, now=None): |
|
175 | 173 | """ |
|
176 | 174 | turns a datetime into an age string. |
|
177 | 175 | If show_short_version is True, then it will generate a not so accurate but shorter string, |
|
178 | 176 | example: 2days ago, instead of 2 days and 23 hours ago. |
|
179 | 177 | |
|
180 | 178 | :param prevdate: datetime object |
|
181 | 179 | :param show_short_version: if it should approximate the date and return a shorter string |
|
182 | 180 | :rtype: str |
|
183 | 181 | :returns: str words describing age |
|
184 | 182 | """ |
|
185 | 183 | now = now or datetime.datetime.now() |
|
186 | 184 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] |
|
187 | 185 | deltas = {} |
|
188 | 186 | future = False |
|
189 | 187 | |
|
190 | 188 | if prevdate > now: |
|
191 | 189 | now, prevdate = prevdate, now |
|
192 | 190 | future = True |
|
193 | 191 | if future: |
|
194 | 192 | prevdate = prevdate.replace(microsecond=0) |
|
195 | 193 | # Get date parts deltas |
|
196 | 194 | from dateutil import relativedelta |
|
197 | 195 | for part in order: |
|
198 | 196 | d = relativedelta.relativedelta(now, prevdate) |
|
199 | 197 | deltas[part] = getattr(d, part + 's') |
|
200 | 198 | |
|
201 | 199 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, |
|
202 | 200 | # not 1 hour, -59 minutes and -59 seconds) |
|
203 | 201 | for num, length in [(5, 60), (4, 60), (3, 24)]: # seconds, minutes, hours |
|
204 | 202 | part = order[num] |
|
205 | 203 | carry_part = order[num - 1] |
|
206 | 204 | |
|
207 | 205 | if deltas[part] < 0: |
|
208 | 206 | deltas[part] += length |
|
209 | 207 | deltas[carry_part] -= 1 |
|
210 | 208 | |
|
211 | 209 | # Same thing for days except that the increment depends on the (variable) |
|
212 | 210 | # number of days in the month |
|
213 | 211 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] |
|
214 | 212 | if deltas['day'] < 0: |
|
215 | 213 | if prevdate.month == 2 and (prevdate.year % 4 == 0 and |
|
216 | 214 | (prevdate.year % 100 != 0 or prevdate.year % 400 == 0) |
|
217 | 215 | ): |
|
218 | 216 | deltas['day'] += 29 |
|
219 | 217 | else: |
|
220 | 218 | deltas['day'] += month_lengths[prevdate.month - 1] |
|
221 | 219 | |
|
222 | 220 | deltas['month'] -= 1 |
|
223 | 221 | |
|
224 | 222 | if deltas['month'] < 0: |
|
225 | 223 | deltas['month'] += 12 |
|
226 | 224 | deltas['year'] -= 1 |
|
227 | 225 | |
|
228 | 226 | # In short version, we want nicer handling of ages of more than a year |
|
229 | 227 | if show_short_version: |
|
230 | 228 | if deltas['year'] == 1: |
|
231 | 229 | # ages between 1 and 2 years: show as months |
|
232 | 230 | deltas['month'] += 12 |
|
233 | 231 | deltas['year'] = 0 |
|
234 | 232 | if deltas['year'] >= 2: |
|
235 | 233 | # ages 2+ years: round |
|
236 | 234 | if deltas['month'] > 6: |
|
237 | 235 | deltas['year'] += 1 |
|
238 | 236 | deltas['month'] = 0 |
|
239 | 237 | |
|
240 | 238 | # Format the result |
|
241 | 239 | fmt_funcs = { |
|
242 | 240 | 'year': lambda d: ungettext('%d year', '%d years', d) % d, |
|
243 | 241 | 'month': lambda d: ungettext('%d month', '%d months', d) % d, |
|
244 | 242 | 'day': lambda d: ungettext('%d day', '%d days', d) % d, |
|
245 | 243 | 'hour': lambda d: ungettext('%d hour', '%d hours', d) % d, |
|
246 | 244 | 'minute': lambda d: ungettext('%d minute', '%d minutes', d) % d, |
|
247 | 245 | 'second': lambda d: ungettext('%d second', '%d seconds', d) % d, |
|
248 | 246 | } |
|
249 | 247 | |
|
250 | 248 | for i, part in enumerate(order): |
|
251 | 249 | value = deltas[part] |
|
252 | 250 | if value == 0: |
|
253 | 251 | continue |
|
254 | 252 | |
|
255 | 253 | if i < 5: |
|
256 | 254 | sub_part = order[i + 1] |
|
257 | 255 | sub_value = deltas[sub_part] |
|
258 | 256 | else: |
|
259 | 257 | sub_value = 0 |
|
260 | 258 | |
|
261 | 259 | if sub_value == 0 or show_short_version: |
|
262 | 260 | if future: |
|
263 | 261 | return _('in %s') % fmt_funcs[part](value) |
|
264 | 262 | else: |
|
265 | 263 | return _('%s ago') % fmt_funcs[part](value) |
|
266 | 264 | if future: |
|
267 | 265 | return _('in %s and %s') % (fmt_funcs[part](value), |
|
268 | 266 | fmt_funcs[sub_part](sub_value)) |
|
269 | 267 | else: |
|
270 | 268 | return _('%s and %s ago') % (fmt_funcs[part](value), |
|
271 | 269 | fmt_funcs[sub_part](sub_value)) |
|
272 | 270 | |
|
273 | 271 | return _('just now') |
|
274 | 272 | |
|
275 | 273 | |
|
276 | 274 | def uri_filter(uri): |
|
277 | 275 | """ |
|
278 | 276 | Removes user:password from given url string |
|
279 | 277 | |
|
280 | 278 | :param uri: |
|
281 | 279 | :rtype: str |
|
282 | 280 | :returns: filtered list of strings |
|
283 | 281 | """ |
|
284 | 282 | if not uri: |
|
285 | 283 | return [] |
|
286 | 284 | |
|
287 | 285 | proto = '' |
|
288 | 286 | |
|
289 | 287 | for pat in ('https://', 'http://', 'git://'): |
|
290 | 288 | if uri.startswith(pat): |
|
291 | 289 | uri = uri[len(pat):] |
|
292 | 290 | proto = pat |
|
293 | 291 | break |
|
294 | 292 | |
|
295 | 293 | # remove passwords and username |
|
296 | 294 | uri = uri[uri.find('@') + 1:] |
|
297 | 295 | |
|
298 | 296 | # get the port |
|
299 | 297 | cred_pos = uri.find(':') |
|
300 | 298 | if cred_pos == -1: |
|
301 | 299 | host, port = uri, None |
|
302 | 300 | else: |
|
303 | 301 | host, port = uri[:cred_pos], uri[cred_pos + 1:] |
|
304 | 302 | |
|
305 | 303 | return [_f for _f in [proto, host, port] if _f] |
|
306 | 304 | |
|
307 | 305 | |
|
308 | 306 | def credentials_filter(uri): |
|
309 | 307 | """ |
|
310 | 308 | Returns a url with removed credentials |
|
311 | 309 | |
|
312 | 310 | :param uri: |
|
313 | 311 | """ |
|
314 | 312 | |
|
315 | 313 | uri = uri_filter(uri) |
|
316 | 314 | # check if we have port |
|
317 | 315 | if len(uri) > 2 and uri[2]: |
|
318 | 316 | uri[2] = ':' + uri[2] |
|
319 | 317 | |
|
320 | 318 | return ''.join(uri) |
|
321 | 319 | |
|
322 | 320 | |
|
323 | 321 | def get_clone_url(clone_uri_tmpl, prefix_url, repo_name, repo_id, username=None): |
|
324 | 322 | parsed_url = urlobject.URLObject(prefix_url) |
|
325 | 323 | prefix = urllib.parse.unquote(parsed_url.path.rstrip('/')) |
|
326 | 324 | try: |
|
327 | 325 | system_user = pwd.getpwuid(os.getuid()).pw_name |
|
328 | 326 | except Exception: # TODO: support all systems - especially Windows |
|
329 | 327 | system_user = 'kallithea' # hardcoded default value ... |
|
330 | 328 | args = { |
|
331 | 329 | 'scheme': parsed_url.scheme, |
|
332 | 330 | 'user': urllib.parse.quote(username or ''), |
|
333 | 331 | 'netloc': parsed_url.netloc + prefix, # like "hostname:port/prefix" (with optional ":port" and "/prefix") |
|
334 | 332 | 'prefix': prefix, # undocumented, empty or starting with / |
|
335 | 333 | 'repo': repo_name, |
|
336 | 334 | 'repoid': str(repo_id), |
|
337 | 335 | 'system_user': system_user, |
|
338 | 336 | 'hostname': parsed_url.hostname, |
|
339 | 337 | } |
|
340 | 338 | url = re.sub('{([^{}]+)}', lambda m: args.get(m.group(1), m.group(0)), clone_uri_tmpl) |
|
341 | 339 | |
|
342 | 340 | # remove leading @ sign if it's present. Case of empty user |
|
343 | 341 | url_obj = urlobject.URLObject(url) |
|
344 | 342 | if not url_obj.username: |
|
345 | 343 | url_obj = url_obj.with_username(None) |
|
346 | 344 | |
|
347 | 345 | return str(url_obj) |
|
348 | 346 | |
|
349 | 347 | |
|
350 | 348 | def get_changeset_safe(repo, rev): |
|
351 | 349 | """ |
|
352 | 350 | Safe version of get_changeset if this changeset doesn't exists for a |
|
353 | 351 | repo it returns a Dummy one instead |
|
354 | 352 | |
|
355 | 353 | :param repo: |
|
356 | 354 | :param rev: |
|
357 | 355 | """ |
|
358 | 356 | from kallithea.lib.vcs.backends.base import BaseRepository |
|
359 | 357 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
360 | 358 | from kallithea.lib.vcs.backends.base import EmptyChangeset |
|
361 | 359 | if not isinstance(repo, BaseRepository): |
|
362 | 360 | raise Exception('You must pass an Repository ' |
|
363 | 361 | 'object as first argument got %s' % type(repo)) |
|
364 | 362 | |
|
365 | 363 | try: |
|
366 | 364 | cs = repo.get_changeset(rev) |
|
367 | 365 | except (RepositoryError, LookupError): |
|
368 | 366 | cs = EmptyChangeset(requested_revision=rev) |
|
369 | 367 | return cs |
|
370 | 368 | |
|
371 | 369 | |
|
372 | 370 | def datetime_to_time(dt): |
|
373 | 371 | if dt: |
|
374 | 372 | return time.mktime(dt.timetuple()) |
|
375 | 373 | |
|
376 | 374 | |
|
377 | 375 | def time_to_datetime(tm): |
|
378 | 376 | if tm: |
|
379 | 377 | if isinstance(tm, str): |
|
380 | 378 | try: |
|
381 | 379 | tm = float(tm) |
|
382 | 380 | except ValueError: |
|
383 | 381 | return |
|
384 | 382 | return datetime.datetime.fromtimestamp(tm) |
|
385 | 383 | |
|
386 | 384 | |
|
387 | 385 | # Must match regexp in kallithea/public/js/base.js MentionsAutoComplete() |
|
388 | 386 | # Check char before @ - it must not look like we are in an email addresses. |
|
389 | 387 | # Matching is greedy so we don't have to look beyond the end. |
|
390 | 388 | MENTIONS_REGEX = re.compile(r'(?:^|(?<=[^a-zA-Z0-9]))@([a-zA-Z0-9][-_.a-zA-Z0-9]*[a-zA-Z0-9])') |
|
391 | 389 | |
|
392 | 390 | |
|
393 | 391 | def extract_mentioned_usernames(text): |
|
394 | 392 | r""" |
|
395 | 393 | Returns list of (possible) usernames @mentioned in given text. |
|
396 | 394 | |
|
397 | 395 | >>> extract_mentioned_usernames('@1-2.a_X,@1234 not@not @ddd@not @n @ee @ff @gg, @gg;@hh @n\n@zz,') |
|
398 | 396 | ['1-2.a_X', '1234', 'ddd', 'ee', 'ff', 'gg', 'gg', 'hh', 'zz'] |
|
399 | 397 | """ |
|
400 | 398 | return MENTIONS_REGEX.findall(text) |
|
401 | 399 | |
|
402 | 400 | |
|
403 | 401 | def extract_mentioned_users(text): |
|
404 | 402 | """ Returns set of actual database Users @mentioned in given text. """ |
|
405 | 403 | from kallithea.model.db import User |
|
406 | 404 | result = set() |
|
407 | 405 | for name in extract_mentioned_usernames(text): |
|
408 | 406 | user = User.get_by_username(name, case_insensitive=True) |
|
409 | 407 | if user is not None and not user.is_default_user: |
|
410 | 408 | result.add(user) |
|
411 | 409 | return result |
|
412 | 410 | |
|
413 | 411 | |
|
414 | 412 | class AttributeDict(dict): |
|
415 | 413 | def __getattr__(self, attr): |
|
416 | 414 | return self.get(attr, None) |
|
417 | 415 | __setattr__ = dict.__setitem__ |
|
418 | 416 | __delattr__ = dict.__delitem__ |
|
419 | 417 | |
|
420 | 418 | |
|
421 | 419 | def obfuscate_url_pw(engine): |
|
422 | 420 | from sqlalchemy.engine import url as sa_url |
|
423 | 421 | from sqlalchemy.exc import ArgumentError |
|
424 | 422 | try: |
|
425 | 423 | _url = sa_url.make_url(engine or '') |
|
426 | 424 | except ArgumentError: |
|
427 | 425 | return engine |
|
428 | 426 | if _url.password: |
|
429 | 427 | _url.password = 'XXXXX' |
|
430 | 428 | return str(_url) |
|
431 | 429 | |
|
432 | 430 | |
|
433 | 431 | class HookEnvironmentError(Exception): pass |
|
434 | 432 | |
|
435 | 433 | |
|
436 | 434 | def get_hook_environment(): |
|
437 | 435 | """ |
|
438 | 436 | Get hook context by deserializing the global KALLITHEA_EXTRAS environment |
|
439 | 437 | variable. |
|
440 | 438 | |
|
441 | 439 | Called early in Git out-of-process hooks to get .ini config path so the |
|
442 | 440 | basic environment can be configured properly. Also used in all hooks to get |
|
443 | 441 | information about the action that triggered it. |
|
444 | 442 | """ |
|
445 | 443 | |
|
446 | 444 | try: |
|
447 | 445 | kallithea_extras = os.environ['KALLITHEA_EXTRAS'] |
|
448 | 446 | except KeyError: |
|
449 | 447 | raise HookEnvironmentError("Environment variable KALLITHEA_EXTRAS not found") |
|
450 | 448 | |
|
451 | 449 | extras = json.loads(kallithea_extras) |
|
452 | 450 | for k in ['username', 'repository', 'scm', 'action', 'ip', 'config']: |
|
453 | 451 | try: |
|
454 | 452 | extras[k] |
|
455 | 453 | except KeyError: |
|
456 | 454 | raise HookEnvironmentError('Missing key %s in KALLITHEA_EXTRAS %s' % (k, extras)) |
|
457 | 455 | |
|
458 | 456 | return AttributeDict(extras) |
|
459 | 457 | |
|
460 | 458 | |
|
461 | 459 | def set_hook_environment(username, ip_addr, repo_name, repo_alias, action=None): |
|
462 | 460 | """Prepare global context for running hooks by serializing data in the |
|
463 | 461 | global KALLITHEA_EXTRAS environment variable. |
|
464 | 462 | |
|
465 | 463 | Most importantly, this allow Git hooks to do proper logging and updating of |
|
466 | 464 | caches after pushes. |
|
467 | 465 | |
|
468 | 466 | Must always be called before anything with hooks are invoked. |
|
469 | 467 | """ |
|
470 | 468 | from kallithea import CONFIG |
|
471 | 469 | extras = { |
|
472 | 470 | 'ip': ip_addr, # used in log_push/pull_action action_logger |
|
473 | 471 | 'username': username, |
|
474 | 472 | 'action': action or 'push_local', # used in log_push_action_raw_ids action_logger |
|
475 | 473 | 'repository': repo_name, |
|
476 | 474 | 'scm': repo_alias, # used to pick hack in log_push_action_raw_ids |
|
477 | 475 | 'config': CONFIG['__file__'], # used by git hook to read config |
|
478 | 476 | } |
|
479 | 477 | os.environ['KALLITHEA_EXTRAS'] = json.dumps(extras) |
|
480 | 478 | |
|
481 | 479 | |
|
482 | 480 | def get_current_authuser(): |
|
483 | 481 | """ |
|
484 | 482 | Gets kallithea user from threadlocal tmpl_context variable if it's |
|
485 | 483 | defined, else returns None. |
|
486 | 484 | """ |
|
487 | 485 | from tg import tmpl_context |
|
488 | 486 | try: |
|
489 | 487 | return getattr(tmpl_context, 'authuser', None) |
|
490 | 488 | except TypeError: # No object (name: context) has been registered for this thread |
|
491 | 489 | return None |
|
492 | 490 | |
|
493 | 491 | |
|
494 | 492 | class OptionalAttr(object): |
|
495 | 493 | """ |
|
496 | 494 | Special Optional Option that defines other attribute. Example:: |
|
497 | 495 | |
|
498 | 496 | def test(apiuser, userid=Optional(OAttr('apiuser')): |
|
499 | 497 | user = Optional.extract(userid) |
|
500 | 498 | # calls |
|
501 | 499 | |
|
502 | 500 | """ |
|
503 | 501 | |
|
504 | 502 | def __init__(self, attr_name): |
|
505 | 503 | self.attr_name = attr_name |
|
506 | 504 | |
|
507 | 505 | def __repr__(self): |
|
508 | 506 | return '<OptionalAttr:%s>' % self.attr_name |
|
509 | 507 | |
|
510 | 508 | def __call__(self): |
|
511 | 509 | return self |
|
512 | 510 | |
|
513 | 511 | |
|
514 | 512 | # alias |
|
515 | 513 | OAttr = OptionalAttr |
|
516 | 514 | |
|
517 | 515 | |
|
518 | 516 | class Optional(object): |
|
519 | 517 | """ |
|
520 | 518 | Defines an optional parameter:: |
|
521 | 519 | |
|
522 | 520 | param = param.getval() if isinstance(param, Optional) else param |
|
523 | 521 | param = param() if isinstance(param, Optional) else param |
|
524 | 522 | |
|
525 | 523 | is equivalent of:: |
|
526 | 524 | |
|
527 | 525 | param = Optional.extract(param) |
|
528 | 526 | |
|
529 | 527 | """ |
|
530 | 528 | |
|
531 | 529 | def __init__(self, type_): |
|
532 | 530 | self.type_ = type_ |
|
533 | 531 | |
|
534 | 532 | def __repr__(self): |
|
535 | 533 | return '<Optional:%s>' % self.type_.__repr__() |
|
536 | 534 | |
|
537 | 535 | def __call__(self): |
|
538 | 536 | return self.getval() |
|
539 | 537 | |
|
540 | 538 | def getval(self): |
|
541 | 539 | """ |
|
542 | 540 | returns value from this Optional instance |
|
543 | 541 | """ |
|
544 | 542 | if isinstance(self.type_, OAttr): |
|
545 | 543 | # use params name |
|
546 | 544 | return self.type_.attr_name |
|
547 | 545 | return self.type_ |
|
548 | 546 | |
|
549 | 547 | @classmethod |
|
550 | 548 | def extract(cls, val): |
|
551 | 549 | """ |
|
552 | 550 | Extracts value from Optional() instance |
|
553 | 551 | |
|
554 | 552 | :param val: |
|
555 | 553 | :return: original value if it's not Optional instance else |
|
556 | 554 | value of instance |
|
557 | 555 | """ |
|
558 | 556 | if isinstance(val, cls): |
|
559 | 557 | return val.getval() |
|
560 | 558 | return val |
|
561 | 559 | |
|
562 | 560 | |
|
563 | 561 | def urlreadable(s, _cleanstringsub=re.compile('[^-a-zA-Z0-9./]+').sub): |
|
564 | 562 | return _cleanstringsub('_', s).rstrip('_') |
|
565 | 563 | |
|
566 | 564 | |
|
567 | 565 | def recursive_replace(str_, replace=' '): |
|
568 | 566 | """ |
|
569 | 567 | Recursive replace of given sign to just one instance |
|
570 | 568 | |
|
571 | 569 | :param str_: given string |
|
572 | 570 | :param replace: char to find and replace multiple instances |
|
573 | 571 | |
|
574 | 572 | Examples:: |
|
575 | 573 | >>> recursive_replace("Mighty---Mighty-Bo--sstones",'-') |
|
576 | 574 | 'Mighty-Mighty-Bo-sstones' |
|
577 | 575 | """ |
|
578 | 576 | |
|
579 | 577 | if str_.find(replace * 2) == -1: |
|
580 | 578 | return str_ |
|
581 | 579 | else: |
|
582 | 580 | str_ = str_.replace(replace * 2, replace) |
|
583 | 581 | return recursive_replace(str_, replace) |
|
584 | 582 | |
|
585 | 583 | |
|
586 | 584 | def repo_name_slug(value): |
|
587 | 585 | """ |
|
588 | 586 | Return slug of name of repository |
|
589 | 587 | This function is called on each creation/modification |
|
590 | 588 | of repository to prevent bad names in repo |
|
591 | 589 | """ |
|
592 | 590 | |
|
593 | 591 | slug = remove_formatting(value) |
|
594 | 592 | slug = strip_tags(slug) |
|
595 | 593 | |
|
596 | 594 | for c in r"""`?=[]\;'"<>,/~!@#$%^&*()+{}|: """: |
|
597 | 595 | slug = slug.replace(c, '-') |
|
598 | 596 | slug = recursive_replace(slug, '-') |
|
599 | 597 | slug = collapse(slug, '-') |
|
600 | 598 | return slug |
|
601 | 599 | |
|
602 | 600 | |
|
603 | 601 | def ask_ok(prompt, retries=4, complaint='Yes or no please!'): |
|
604 | 602 | while True: |
|
605 | 603 | ok = input(prompt) |
|
606 | 604 | if ok in ('y', 'ye', 'yes'): |
|
607 | 605 | return True |
|
608 | 606 | if ok in ('n', 'no', 'nop', 'nope'): |
|
609 | 607 | return False |
|
610 | 608 | retries = retries - 1 |
|
611 | 609 | if retries < 0: |
|
612 | 610 | raise IOError |
|
613 | 611 | print(complaint) |
@@ -1,220 +1,219 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Utilities aimed to help achieve mostly basic tasks. |
|
3 | 3 | """ |
|
4 | from __future__ import division | |
|
5 | 4 | |
|
6 | 5 | import datetime |
|
7 | 6 | import os |
|
8 | 7 | import re |
|
9 | 8 | import time |
|
10 | 9 | |
|
11 | 10 | from kallithea.lib.vcs.exceptions import RepositoryError, VCSError |
|
12 | 11 | from kallithea.lib.vcs.utils.paths import abspath |
|
13 | 12 | |
|
14 | 13 | |
|
15 | 14 | ALIASES = ['hg', 'git'] |
|
16 | 15 | |
|
17 | 16 | |
|
18 | 17 | def get_scm(path, search_up=False, explicit_alias=None): |
|
19 | 18 | """ |
|
20 | 19 | Returns one of alias from ``ALIASES`` (in order of precedence same as |
|
21 | 20 | shortcuts given in ``ALIASES``) and top working dir path for the given |
|
22 | 21 | argument. If no scm-specific directory is found or more than one scm is |
|
23 | 22 | found at that directory, ``VCSError`` is raised. |
|
24 | 23 | |
|
25 | 24 | :param search_up: if set to ``True``, this function would try to |
|
26 | 25 | move up to parent directory every time no scm is recognized for the |
|
27 | 26 | currently checked path. Default: ``False``. |
|
28 | 27 | :param explicit_alias: can be one of available backend aliases, when given |
|
29 | 28 | it will return given explicit alias in repositories under more than one |
|
30 | 29 | version control, if explicit_alias is different than found it will raise |
|
31 | 30 | VCSError |
|
32 | 31 | """ |
|
33 | 32 | if not os.path.isdir(path): |
|
34 | 33 | raise VCSError("Given path %s is not a directory" % path) |
|
35 | 34 | |
|
36 | 35 | while True: |
|
37 | 36 | found_scms = [(scm, path) for scm in get_scms_for_path(path)] |
|
38 | 37 | if found_scms or not search_up: |
|
39 | 38 | break |
|
40 | 39 | newpath = abspath(path, '..') |
|
41 | 40 | if newpath == path: |
|
42 | 41 | break |
|
43 | 42 | path = newpath |
|
44 | 43 | |
|
45 | 44 | if len(found_scms) > 1: |
|
46 | 45 | for scm in found_scms: |
|
47 | 46 | if scm[0] == explicit_alias: |
|
48 | 47 | return scm |
|
49 | 48 | raise VCSError('More than one [%s] scm found at given path %s' |
|
50 | 49 | % (', '.join((x[0] for x in found_scms)), path)) |
|
51 | 50 | |
|
52 | 51 | if len(found_scms) == 0: |
|
53 | 52 | raise VCSError('No scm found at given path %s' % path) |
|
54 | 53 | |
|
55 | 54 | return found_scms[0] |
|
56 | 55 | |
|
57 | 56 | |
|
58 | 57 | def get_scms_for_path(path): |
|
59 | 58 | """ |
|
60 | 59 | Returns all scm's found at the given path. If no scm is recognized |
|
61 | 60 | - empty list is returned. |
|
62 | 61 | |
|
63 | 62 | :param path: path to directory which should be checked. May be callable. |
|
64 | 63 | |
|
65 | 64 | :raises VCSError: if given ``path`` is not a directory |
|
66 | 65 | """ |
|
67 | 66 | from kallithea.lib.vcs.backends import get_backend |
|
68 | 67 | if hasattr(path, '__call__'): |
|
69 | 68 | path = path() |
|
70 | 69 | if not os.path.isdir(path): |
|
71 | 70 | raise VCSError("Given path %r is not a directory" % path) |
|
72 | 71 | |
|
73 | 72 | result = [] |
|
74 | 73 | for key in ALIASES: |
|
75 | 74 | # find .hg / .git |
|
76 | 75 | dirname = os.path.join(path, '.' + key) |
|
77 | 76 | if os.path.isdir(dirname): |
|
78 | 77 | result.append(key) |
|
79 | 78 | continue |
|
80 | 79 | # find rm__.hg / rm__.git too - left overs from old method for deleting |
|
81 | 80 | dirname = os.path.join(path, 'rm__.' + key) |
|
82 | 81 | if os.path.isdir(dirname): |
|
83 | 82 | return result |
|
84 | 83 | # We still need to check if it's not bare repository as |
|
85 | 84 | # bare repos don't have working directories |
|
86 | 85 | try: |
|
87 | 86 | get_backend(key)(path) |
|
88 | 87 | result.append(key) |
|
89 | 88 | continue |
|
90 | 89 | except RepositoryError: |
|
91 | 90 | # Wrong backend |
|
92 | 91 | pass |
|
93 | 92 | except VCSError: |
|
94 | 93 | # No backend at all |
|
95 | 94 | pass |
|
96 | 95 | return result |
|
97 | 96 | |
|
98 | 97 | |
|
99 | 98 | def get_highlighted_code(name, code, type='terminal'): |
|
100 | 99 | """ |
|
101 | 100 | If pygments are available on the system |
|
102 | 101 | then returned output is colored. Otherwise |
|
103 | 102 | unchanged content is returned. |
|
104 | 103 | """ |
|
105 | 104 | import logging |
|
106 | 105 | try: |
|
107 | 106 | import pygments |
|
108 | 107 | pygments |
|
109 | 108 | except ImportError: |
|
110 | 109 | return code |
|
111 | 110 | from pygments import highlight |
|
112 | 111 | from pygments.lexers import guess_lexer_for_filename, ClassNotFound |
|
113 | 112 | from pygments.formatters import TerminalFormatter |
|
114 | 113 | |
|
115 | 114 | try: |
|
116 | 115 | lexer = guess_lexer_for_filename(name, code) |
|
117 | 116 | formatter = TerminalFormatter() |
|
118 | 117 | content = highlight(code, lexer, formatter) |
|
119 | 118 | except ClassNotFound: |
|
120 | 119 | logging.debug("Couldn't guess Lexer, will not use pygments.") |
|
121 | 120 | content = code |
|
122 | 121 | return content |
|
123 | 122 | |
|
124 | 123 | |
|
125 | 124 | def parse_changesets(text): |
|
126 | 125 | """ |
|
127 | 126 | Returns dictionary with *start*, *main* and *end* ids. |
|
128 | 127 | |
|
129 | 128 | Examples:: |
|
130 | 129 | |
|
131 | 130 | >>> parse_changesets('aaabbb') |
|
132 | 131 | {'start': None, 'main': 'aaabbb', 'end': None} |
|
133 | 132 | >>> parse_changesets('aaabbb..cccddd') |
|
134 | 133 | {'start': 'aaabbb', 'end': 'cccddd', 'main': None} |
|
135 | 134 | |
|
136 | 135 | """ |
|
137 | 136 | text = text.strip() |
|
138 | 137 | CID_RE = r'[a-zA-Z0-9]+' |
|
139 | 138 | if '..' not in text: |
|
140 | 139 | m = re.match(r'^(?P<cid>%s)$' % CID_RE, text) |
|
141 | 140 | if m: |
|
142 | 141 | return { |
|
143 | 142 | 'start': None, |
|
144 | 143 | 'main': text, |
|
145 | 144 | 'end': None, |
|
146 | 145 | } |
|
147 | 146 | else: |
|
148 | 147 | RE = r'^(?P<start>%s)?\.{2,3}(?P<end>%s)?$' % (CID_RE, CID_RE) |
|
149 | 148 | m = re.match(RE, text) |
|
150 | 149 | if m: |
|
151 | 150 | result = m.groupdict() |
|
152 | 151 | result['main'] = None |
|
153 | 152 | return result |
|
154 | 153 | raise ValueError("IDs not recognized") |
|
155 | 154 | |
|
156 | 155 | |
|
157 | 156 | def parse_datetime(text): |
|
158 | 157 | """ |
|
159 | 158 | Parses given text and returns ``datetime.datetime`` instance or raises |
|
160 | 159 | ``ValueError``. |
|
161 | 160 | |
|
162 | 161 | :param text: string of desired date/datetime or something more verbose, |
|
163 | 162 | like *yesterday*, *2weeks 3days*, etc. |
|
164 | 163 | """ |
|
165 | 164 | |
|
166 | 165 | text = text.strip().lower() |
|
167 | 166 | |
|
168 | 167 | INPUT_FORMATS = ( |
|
169 | 168 | '%Y-%m-%d %H:%M:%S', |
|
170 | 169 | '%Y-%m-%d %H:%M', |
|
171 | 170 | '%Y-%m-%d', |
|
172 | 171 | '%m/%d/%Y %H:%M:%S', |
|
173 | 172 | '%m/%d/%Y %H:%M', |
|
174 | 173 | '%m/%d/%Y', |
|
175 | 174 | '%m/%d/%y %H:%M:%S', |
|
176 | 175 | '%m/%d/%y %H:%M', |
|
177 | 176 | '%m/%d/%y', |
|
178 | 177 | ) |
|
179 | 178 | for format in INPUT_FORMATS: |
|
180 | 179 | try: |
|
181 | 180 | return datetime.datetime(*time.strptime(text, format)[:6]) |
|
182 | 181 | except ValueError: |
|
183 | 182 | pass |
|
184 | 183 | |
|
185 | 184 | # Try descriptive texts |
|
186 | 185 | if text == 'tomorrow': |
|
187 | 186 | future = datetime.datetime.now() + datetime.timedelta(days=1) |
|
188 | 187 | args = future.timetuple()[:3] + (23, 59, 59) |
|
189 | 188 | return datetime.datetime(*args) |
|
190 | 189 | elif text == 'today': |
|
191 | 190 | return datetime.datetime(*datetime.datetime.today().timetuple()[:3]) |
|
192 | 191 | elif text == 'now': |
|
193 | 192 | return datetime.datetime.now() |
|
194 | 193 | elif text == 'yesterday': |
|
195 | 194 | past = datetime.datetime.now() - datetime.timedelta(days=1) |
|
196 | 195 | return datetime.datetime(*past.timetuple()[:3]) |
|
197 | 196 | else: |
|
198 | 197 | days = 0 |
|
199 | 198 | matched = re.match( |
|
200 | 199 | r'^((?P<weeks>\d+) ?w(eeks?)?)? ?((?P<days>\d+) ?d(ays?)?)?$', text) |
|
201 | 200 | if matched: |
|
202 | 201 | groupdict = matched.groupdict() |
|
203 | 202 | if groupdict['days']: |
|
204 | 203 | days += int(matched.groupdict()['days']) |
|
205 | 204 | if groupdict['weeks']: |
|
206 | 205 | days += int(matched.groupdict()['weeks']) * 7 |
|
207 | 206 | past = datetime.datetime.now() - datetime.timedelta(days=days) |
|
208 | 207 | return datetime.datetime(*past.timetuple()[:3]) |
|
209 | 208 | |
|
210 | 209 | raise ValueError('Wrong date: "%s"' % text) |
|
211 | 210 | |
|
212 | 211 | |
|
213 | 212 | def get_dict_for_attrs(obj, attrs): |
|
214 | 213 | """ |
|
215 | 214 | Returns dictionary for each attribute from given ``obj``. |
|
216 | 215 | """ |
|
217 | 216 | data = {} |
|
218 | 217 | for attr in attrs: |
|
219 | 218 | data[attr] = getattr(obj, attr) |
|
220 | 219 | return data |
@@ -1,424 +1,422 b'' | |||
|
1 | 1 | # encoding: UTF-8 |
|
2 | 2 | |
|
3 | from __future__ import print_function | |
|
4 | ||
|
5 | 3 | import datetime |
|
6 | 4 | import string |
|
7 | 5 | import sys |
|
8 | 6 | |
|
9 | 7 | from kallithea.lib.vcs.utils.filesize import filesizeformat |
|
10 | 8 | |
|
11 | 9 | |
|
12 | 10 | class ProgressBarError(Exception): |
|
13 | 11 | pass |
|
14 | 12 | |
|
15 | 13 | |
|
16 | 14 | class AlreadyFinishedError(ProgressBarError): |
|
17 | 15 | pass |
|
18 | 16 | |
|
19 | 17 | |
|
20 | 18 | class ProgressBar(object): |
|
21 | 19 | |
|
22 | 20 | default_elements = ['percentage', 'bar', 'steps'] |
|
23 | 21 | |
|
24 | 22 | def __init__(self, steps=100, stream=None, elements=None): |
|
25 | 23 | self.step = 0 |
|
26 | 24 | self.steps = steps |
|
27 | 25 | self.stream = stream or sys.stderr |
|
28 | 26 | self.bar_char = '=' |
|
29 | 27 | self.width = 50 |
|
30 | 28 | self.separator = ' | ' |
|
31 | 29 | self.elements = elements or self.default_elements |
|
32 | 30 | self.started = None |
|
33 | 31 | self.finished = False |
|
34 | 32 | self.steps_label = 'Step' |
|
35 | 33 | self.time_label = 'Time' |
|
36 | 34 | self.eta_label = 'ETA' |
|
37 | 35 | self.speed_label = 'Speed' |
|
38 | 36 | self.transfer_label = 'Transfer' |
|
39 | 37 | |
|
40 | 38 | def __str__(self): |
|
41 | 39 | return self.get_line() |
|
42 | 40 | |
|
43 | 41 | def __iter__(self): |
|
44 | 42 | start = self.step |
|
45 | 43 | end = self.steps + 1 |
|
46 | 44 | for x in range(start, end): |
|
47 | 45 | self.render(x) |
|
48 | 46 | yield x |
|
49 | 47 | |
|
50 | 48 | def get_separator(self): |
|
51 | 49 | return self.separator |
|
52 | 50 | |
|
53 | 51 | def get_bar_char(self): |
|
54 | 52 | return self.bar_char |
|
55 | 53 | |
|
56 | 54 | def get_bar(self): |
|
57 | 55 | char = self.get_bar_char() |
|
58 | 56 | perc = self.get_percentage() |
|
59 | 57 | length = int(self.width * perc / 100) |
|
60 | 58 | bar = char * length |
|
61 | 59 | bar = bar.ljust(self.width) |
|
62 | 60 | return bar |
|
63 | 61 | |
|
64 | 62 | def get_elements(self): |
|
65 | 63 | return self.elements |
|
66 | 64 | |
|
67 | 65 | def get_template(self): |
|
68 | 66 | separator = self.get_separator() |
|
69 | 67 | elements = self.get_elements() |
|
70 | 68 | return string.Template(separator.join((('$%s' % e) for e in elements))) |
|
71 | 69 | |
|
72 | 70 | def get_total_time(self, current_time=None): |
|
73 | 71 | if current_time is None: |
|
74 | 72 | current_time = datetime.datetime.now() |
|
75 | 73 | if not self.started: |
|
76 | 74 | return datetime.timedelta() |
|
77 | 75 | return current_time - self.started |
|
78 | 76 | |
|
79 | 77 | def get_rendered_total_time(self): |
|
80 | 78 | delta = self.get_total_time() |
|
81 | 79 | if not delta: |
|
82 | 80 | ttime = '-' |
|
83 | 81 | else: |
|
84 | 82 | ttime = str(delta) |
|
85 | 83 | return '%s %s' % (self.time_label, ttime) |
|
86 | 84 | |
|
87 | 85 | def get_eta(self, current_time=None): |
|
88 | 86 | if current_time is None: |
|
89 | 87 | current_time = datetime.datetime.now() |
|
90 | 88 | if self.step == 0: |
|
91 | 89 | return datetime.timedelta() |
|
92 | 90 | total_seconds = self.get_total_time().total_seconds() |
|
93 | 91 | eta_seconds = total_seconds * self.steps / self.step - total_seconds |
|
94 | 92 | return datetime.timedelta(seconds=int(eta_seconds)) |
|
95 | 93 | |
|
96 | 94 | def get_rendered_eta(self): |
|
97 | 95 | eta = self.get_eta() |
|
98 | 96 | if not eta: |
|
99 | 97 | eta = '--:--:--' |
|
100 | 98 | else: |
|
101 | 99 | eta = str(eta).rjust(8) |
|
102 | 100 | return '%s: %s' % (self.eta_label, eta) |
|
103 | 101 | |
|
104 | 102 | def get_percentage(self): |
|
105 | 103 | return float(self.step) / self.steps * 100 |
|
106 | 104 | |
|
107 | 105 | def get_rendered_percentage(self): |
|
108 | 106 | perc = self.get_percentage() |
|
109 | 107 | return ('%s%%' % (int(perc))).rjust(5) |
|
110 | 108 | |
|
111 | 109 | def get_rendered_steps(self): |
|
112 | 110 | return '%s: %s/%s' % (self.steps_label, self.step, self.steps) |
|
113 | 111 | |
|
114 | 112 | def get_rendered_speed(self, step=None, total_seconds=None): |
|
115 | 113 | if step is None: |
|
116 | 114 | step = self.step |
|
117 | 115 | if total_seconds is None: |
|
118 | 116 | total_seconds = self.get_total_time().total_seconds() |
|
119 | 117 | if step <= 0 or total_seconds <= 0: |
|
120 | 118 | speed = '-' |
|
121 | 119 | else: |
|
122 | 120 | speed = filesizeformat(float(step) / total_seconds) |
|
123 | 121 | return '%s: %s/s' % (self.speed_label, speed) |
|
124 | 122 | |
|
125 | 123 | def get_rendered_transfer(self, step=None, steps=None): |
|
126 | 124 | if step is None: |
|
127 | 125 | step = self.step |
|
128 | 126 | if steps is None: |
|
129 | 127 | steps = self.steps |
|
130 | 128 | |
|
131 | 129 | if steps <= 0: |
|
132 | 130 | return '%s: -' % self.transfer_label |
|
133 | 131 | total = filesizeformat(float(steps)) |
|
134 | 132 | if step <= 0: |
|
135 | 133 | transferred = '-' |
|
136 | 134 | else: |
|
137 | 135 | transferred = filesizeformat(float(step)) |
|
138 | 136 | return '%s: %s / %s' % (self.transfer_label, transferred, total) |
|
139 | 137 | |
|
140 | 138 | def get_context(self): |
|
141 | 139 | return { |
|
142 | 140 | 'percentage': self.get_rendered_percentage(), |
|
143 | 141 | 'bar': self.get_bar(), |
|
144 | 142 | 'steps': self.get_rendered_steps(), |
|
145 | 143 | 'time': self.get_rendered_total_time(), |
|
146 | 144 | 'eta': self.get_rendered_eta(), |
|
147 | 145 | 'speed': self.get_rendered_speed(), |
|
148 | 146 | 'transfer': self.get_rendered_transfer(), |
|
149 | 147 | } |
|
150 | 148 | |
|
151 | 149 | def get_line(self): |
|
152 | 150 | template = self.get_template() |
|
153 | 151 | context = self.get_context() |
|
154 | 152 | return template.safe_substitute(**context) |
|
155 | 153 | |
|
156 | 154 | def write(self, data): |
|
157 | 155 | self.stream.write(data) |
|
158 | 156 | |
|
159 | 157 | def render(self, step): |
|
160 | 158 | if not self.started: |
|
161 | 159 | self.started = datetime.datetime.now() |
|
162 | 160 | if self.finished: |
|
163 | 161 | raise AlreadyFinishedError |
|
164 | 162 | self.step = step |
|
165 | 163 | self.write('\r%s' % self) |
|
166 | 164 | if step == self.steps: |
|
167 | 165 | self.finished = True |
|
168 | 166 | if step == self.steps: |
|
169 | 167 | self.write('\n') |
|
170 | 168 | |
|
171 | 169 | |
|
172 | 170 | """ |
|
173 | 171 | termcolors.py |
|
174 | 172 | |
|
175 | 173 | Grabbed from Django (http://www.djangoproject.com) |
|
176 | 174 | """ |
|
177 | 175 | |
|
178 | 176 | color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white') |
|
179 | 177 | foreground = dict([(color_names[x], '3%s' % x) for x in range(8)]) |
|
180 | 178 | background = dict([(color_names[x], '4%s' % x) for x in range(8)]) |
|
181 | 179 | |
|
182 | 180 | RESET = '0' |
|
183 | 181 | opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'} |
|
184 | 182 | |
|
185 | 183 | |
|
186 | 184 | def colorize(text='', opts=(), **kwargs): |
|
187 | 185 | """ |
|
188 | 186 | Returns your text, enclosed in ANSI graphics codes. |
|
189 | 187 | |
|
190 | 188 | Depends on the keyword arguments 'fg' and 'bg', and the contents of |
|
191 | 189 | the opts tuple/list. |
|
192 | 190 | |
|
193 | 191 | Returns the RESET code if no parameters are given. |
|
194 | 192 | |
|
195 | 193 | Valid colors: |
|
196 | 194 | 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white' |
|
197 | 195 | |
|
198 | 196 | Valid options: |
|
199 | 197 | 'bold' |
|
200 | 198 | 'underscore' |
|
201 | 199 | 'blink' |
|
202 | 200 | 'reverse' |
|
203 | 201 | 'conceal' |
|
204 | 202 | 'noreset' - string will not be auto-terminated with the RESET code |
|
205 | 203 | |
|
206 | 204 | Examples: |
|
207 | 205 | colorize('hello', fg='red', bg='blue', opts=('blink',)) |
|
208 | 206 | colorize() |
|
209 | 207 | colorize('goodbye', opts=('underscore',)) |
|
210 | 208 | print colorize('first line', fg='red', opts=('noreset',)) |
|
211 | 209 | print 'this should be red too' |
|
212 | 210 | print colorize('and so should this') |
|
213 | 211 | print 'this should not be red' |
|
214 | 212 | """ |
|
215 | 213 | code_list = [] |
|
216 | 214 | if text == '' and len(opts) == 1 and opts[0] == 'reset': |
|
217 | 215 | return '\x1b[%sm' % RESET |
|
218 | 216 | for k, v in kwargs.items(): |
|
219 | 217 | if k == 'fg': |
|
220 | 218 | code_list.append(foreground[v]) |
|
221 | 219 | elif k == 'bg': |
|
222 | 220 | code_list.append(background[v]) |
|
223 | 221 | for o in opts: |
|
224 | 222 | if o in opt_dict: |
|
225 | 223 | code_list.append(opt_dict[o]) |
|
226 | 224 | if 'noreset' not in opts: |
|
227 | 225 | text = text + '\x1b[%sm' % RESET |
|
228 | 226 | return ('\x1b[%sm' % ';'.join(code_list)) + text |
|
229 | 227 | |
|
230 | 228 | |
|
231 | 229 | def make_style(opts=(), **kwargs): |
|
232 | 230 | """ |
|
233 | 231 | Returns a function with default parameters for colorize() |
|
234 | 232 | |
|
235 | 233 | Example: |
|
236 | 234 | bold_red = make_style(opts=('bold',), fg='red') |
|
237 | 235 | print bold_red('hello') |
|
238 | 236 | KEYWORD = make_style(fg='yellow') |
|
239 | 237 | COMMENT = make_style(fg='blue', opts=('bold',)) |
|
240 | 238 | """ |
|
241 | 239 | return lambda text: colorize(text, opts, **kwargs) |
|
242 | 240 | |
|
243 | 241 | |
|
244 | 242 | NOCOLOR_PALETTE = 'nocolor' |
|
245 | 243 | DARK_PALETTE = 'dark' |
|
246 | 244 | LIGHT_PALETTE = 'light' |
|
247 | 245 | |
|
248 | 246 | PALETTES = { |
|
249 | 247 | NOCOLOR_PALETTE: { |
|
250 | 248 | 'ERROR': {}, |
|
251 | 249 | 'NOTICE': {}, |
|
252 | 250 | 'SQL_FIELD': {}, |
|
253 | 251 | 'SQL_COLTYPE': {}, |
|
254 | 252 | 'SQL_KEYWORD': {}, |
|
255 | 253 | 'SQL_TABLE': {}, |
|
256 | 254 | 'HTTP_INFO': {}, |
|
257 | 255 | 'HTTP_SUCCESS': {}, |
|
258 | 256 | 'HTTP_REDIRECT': {}, |
|
259 | 257 | 'HTTP_NOT_MODIFIED': {}, |
|
260 | 258 | 'HTTP_BAD_REQUEST': {}, |
|
261 | 259 | 'HTTP_NOT_FOUND': {}, |
|
262 | 260 | 'HTTP_SERVER_ERROR': {}, |
|
263 | 261 | }, |
|
264 | 262 | DARK_PALETTE: { |
|
265 | 263 | 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, |
|
266 | 264 | 'NOTICE': { 'fg': 'red' }, |
|
267 | 265 | 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) }, |
|
268 | 266 | 'SQL_COLTYPE': { 'fg': 'green' }, |
|
269 | 267 | 'SQL_KEYWORD': { 'fg': 'yellow' }, |
|
270 | 268 | 'SQL_TABLE': { 'opts': ('bold',) }, |
|
271 | 269 | 'HTTP_INFO': { 'opts': ('bold',) }, |
|
272 | 270 | 'HTTP_SUCCESS': { }, |
|
273 | 271 | 'HTTP_REDIRECT': { 'fg': 'green' }, |
|
274 | 272 | 'HTTP_NOT_MODIFIED': { 'fg': 'cyan' }, |
|
275 | 273 | 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, |
|
276 | 274 | 'HTTP_NOT_FOUND': { 'fg': 'yellow' }, |
|
277 | 275 | 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, |
|
278 | 276 | }, |
|
279 | 277 | LIGHT_PALETTE: { |
|
280 | 278 | 'ERROR': { 'fg': 'red', 'opts': ('bold',) }, |
|
281 | 279 | 'NOTICE': { 'fg': 'red' }, |
|
282 | 280 | 'SQL_FIELD': { 'fg': 'green', 'opts': ('bold',) }, |
|
283 | 281 | 'SQL_COLTYPE': { 'fg': 'green' }, |
|
284 | 282 | 'SQL_KEYWORD': { 'fg': 'blue' }, |
|
285 | 283 | 'SQL_TABLE': { 'opts': ('bold',) }, |
|
286 | 284 | 'HTTP_INFO': { 'opts': ('bold',) }, |
|
287 | 285 | 'HTTP_SUCCESS': { }, |
|
288 | 286 | 'HTTP_REDIRECT': { 'fg': 'green', 'opts': ('bold',) }, |
|
289 | 287 | 'HTTP_NOT_MODIFIED': { 'fg': 'green' }, |
|
290 | 288 | 'HTTP_BAD_REQUEST': { 'fg': 'red', 'opts': ('bold',) }, |
|
291 | 289 | 'HTTP_NOT_FOUND': { 'fg': 'red' }, |
|
292 | 290 | 'HTTP_SERVER_ERROR': { 'fg': 'magenta', 'opts': ('bold',) }, |
|
293 | 291 | } |
|
294 | 292 | } |
|
295 | 293 | DEFAULT_PALETTE = DARK_PALETTE |
|
296 | 294 | |
|
297 | 295 | # ---------------------------- # |
|
298 | 296 | # --- End of termcolors.py --- # |
|
299 | 297 | # ---------------------------- # |
|
300 | 298 | |
|
301 | 299 | |
|
302 | 300 | class ColoredProgressBar(ProgressBar): |
|
303 | 301 | |
|
304 | 302 | BAR_COLORS = ( |
|
305 | 303 | (10, 'red'), |
|
306 | 304 | (30, 'magenta'), |
|
307 | 305 | (50, 'yellow'), |
|
308 | 306 | (99, 'green'), |
|
309 | 307 | (100, 'blue'), |
|
310 | 308 | ) |
|
311 | 309 | |
|
312 | 310 | def get_line(self): |
|
313 | 311 | line = super(ColoredProgressBar, self).get_line() |
|
314 | 312 | perc = self.get_percentage() |
|
315 | 313 | if perc > 100: |
|
316 | 314 | color = 'blue' |
|
317 | 315 | for max_perc, color in self.BAR_COLORS: |
|
318 | 316 | if perc <= max_perc: |
|
319 | 317 | break |
|
320 | 318 | return colorize(line, fg=color) |
|
321 | 319 | |
|
322 | 320 | |
|
323 | 321 | class AnimatedProgressBar(ProgressBar): |
|
324 | 322 | |
|
325 | 323 | def get_bar_char(self): |
|
326 | 324 | chars = '-/|\\' |
|
327 | 325 | if self.step >= self.steps: |
|
328 | 326 | return '=' |
|
329 | 327 | return chars[self.step % len(chars)] |
|
330 | 328 | |
|
331 | 329 | |
|
332 | 330 | class BarOnlyProgressBar(ProgressBar): |
|
333 | 331 | |
|
334 | 332 | default_elements = ['bar', 'steps'] |
|
335 | 333 | |
|
336 | 334 | def get_bar(self): |
|
337 | 335 | bar = super(BarOnlyProgressBar, self).get_bar() |
|
338 | 336 | perc = self.get_percentage() |
|
339 | 337 | perc_text = '%s%%' % int(perc) |
|
340 | 338 | text = (' %s%% ' % (perc_text)).center(self.width, '=') |
|
341 | 339 | L = text.find(' ') |
|
342 | 340 | R = text.rfind(' ') |
|
343 | 341 | bar = ' '.join((bar[:L], perc_text, bar[R:])) |
|
344 | 342 | return bar |
|
345 | 343 | |
|
346 | 344 | |
|
347 | 345 | class AnimatedColoredProgressBar(AnimatedProgressBar, |
|
348 | 346 | ColoredProgressBar): |
|
349 | 347 | pass |
|
350 | 348 | |
|
351 | 349 | |
|
352 | 350 | class BarOnlyColoredProgressBar(ColoredProgressBar, |
|
353 | 351 | BarOnlyProgressBar): |
|
354 | 352 | pass |
|
355 | 353 | |
|
356 | 354 | |
|
357 | 355 | def main(): |
|
358 | 356 | import time |
|
359 | 357 | |
|
360 | 358 | print("Standard progress bar...") |
|
361 | 359 | bar = ProgressBar(30) |
|
362 | 360 | for x in range(1, 31): |
|
363 | 361 | bar.render(x) |
|
364 | 362 | time.sleep(0.02) |
|
365 | 363 | bar.stream.write('\n') |
|
366 | 364 | print() |
|
367 | 365 | |
|
368 | 366 | print("Empty bar...") |
|
369 | 367 | bar = ProgressBar(50) |
|
370 | 368 | bar.render(0) |
|
371 | 369 | print() |
|
372 | 370 | print() |
|
373 | 371 | |
|
374 | 372 | print("Colored bar...") |
|
375 | 373 | bar = ColoredProgressBar(20) |
|
376 | 374 | for x in bar: |
|
377 | 375 | time.sleep(0.01) |
|
378 | 376 | print() |
|
379 | 377 | |
|
380 | 378 | print("Animated char bar...") |
|
381 | 379 | bar = AnimatedProgressBar(20) |
|
382 | 380 | for x in bar: |
|
383 | 381 | time.sleep(0.01) |
|
384 | 382 | print() |
|
385 | 383 | |
|
386 | 384 | print("Animated + colored char bar...") |
|
387 | 385 | bar = AnimatedColoredProgressBar(20) |
|
388 | 386 | for x in bar: |
|
389 | 387 | time.sleep(0.01) |
|
390 | 388 | print() |
|
391 | 389 | |
|
392 | 390 | print("Bar only ...") |
|
393 | 391 | bar = BarOnlyProgressBar(20) |
|
394 | 392 | for x in bar: |
|
395 | 393 | time.sleep(0.01) |
|
396 | 394 | print() |
|
397 | 395 | |
|
398 | 396 | print("Colored, longer bar-only, eta, total time ...") |
|
399 | 397 | bar = BarOnlyColoredProgressBar(40) |
|
400 | 398 | bar.width = 60 |
|
401 | 399 | bar.elements += ['time', 'eta'] |
|
402 | 400 | for x in bar: |
|
403 | 401 | time.sleep(0.01) |
|
404 | 402 | print() |
|
405 | 403 | print() |
|
406 | 404 | |
|
407 | 405 | print("File transfer bar, breaks after 2 seconds ...") |
|
408 | 406 | total_bytes = 1024 * 1024 * 2 |
|
409 | 407 | bar = ProgressBar(total_bytes) |
|
410 | 408 | bar.width = 50 |
|
411 | 409 | bar.elements.remove('steps') |
|
412 | 410 | bar.elements += ['transfer', 'time', 'eta', 'speed'] |
|
413 | 411 | for x in range(0, bar.steps, 1024): |
|
414 | 412 | bar.render(x) |
|
415 | 413 | time.sleep(0.01) |
|
416 | 414 | now = datetime.datetime.now() |
|
417 | 415 | if now - bar.started >= datetime.timedelta(seconds=2): |
|
418 | 416 | break |
|
419 | 417 | print() |
|
420 | 418 | print() |
|
421 | 419 | |
|
422 | 420 | |
|
423 | 421 | if __name__ == '__main__': |
|
424 | 422 | main() |
@@ -1,652 +1,650 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | Test suite for vcs push/pull operations. |
|
16 | 16 | |
|
17 | 17 | The tests need Git > 1.8.1. |
|
18 | 18 | |
|
19 | 19 | This file was forked by the Kallithea project in July 2014. |
|
20 | 20 | Original author and date, and relevant copyright and licensing information is below: |
|
21 | 21 | :created_on: Dec 30, 2010 |
|
22 | 22 | :author: marcink |
|
23 | 23 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
24 | 24 | :license: GPLv3, see LICENSE.md for more details. |
|
25 | 25 | |
|
26 | 26 | """ |
|
27 | 27 | |
|
28 | from __future__ import print_function | |
|
29 | ||
|
30 | 28 | import json |
|
31 | 29 | import os |
|
32 | 30 | import re |
|
33 | 31 | import tempfile |
|
34 | 32 | import time |
|
35 | 33 | import urllib.request |
|
36 | 34 | from subprocess import PIPE, Popen |
|
37 | 35 | from tempfile import _RandomNameSequence |
|
38 | 36 | |
|
39 | 37 | import pytest |
|
40 | 38 | |
|
41 | 39 | from kallithea import CONFIG |
|
42 | 40 | from kallithea.lib.utils2 import ascii_bytes, safe_str |
|
43 | 41 | from kallithea.model.db import CacheInvalidation, Repository, Ui, User, UserIpMap, UserLog |
|
44 | 42 | from kallithea.model.meta import Session |
|
45 | 43 | from kallithea.model.ssh_key import SshKeyModel |
|
46 | 44 | from kallithea.model.user import UserModel |
|
47 | 45 | from kallithea.tests import base |
|
48 | 46 | from kallithea.tests.fixture import Fixture |
|
49 | 47 | |
|
50 | 48 | |
|
51 | 49 | DEBUG = True |
|
52 | 50 | HOST = '127.0.0.1:4999' # test host |
|
53 | 51 | |
|
54 | 52 | fixture = Fixture() |
|
55 | 53 | |
|
56 | 54 | |
|
57 | 55 | # Parameterize different kinds of VCS testing - both the kind of VCS and the |
|
58 | 56 | # access method (HTTP/SSH) |
|
59 | 57 | |
|
60 | 58 | # Mixin for using HTTP and SSH URLs |
|
61 | 59 | class HttpVcsTest(object): |
|
62 | 60 | @staticmethod |
|
63 | 61 | def repo_url_param(webserver, repo_name, **kwargs): |
|
64 | 62 | return webserver.repo_url(repo_name, **kwargs) |
|
65 | 63 | |
|
66 | 64 | class SshVcsTest(object): |
|
67 | 65 | public_keys = { |
|
68 | 66 | base.TEST_USER_REGULAR_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUQ== kallithea@localhost', |
|
69 | 67 | base.TEST_USER_ADMIN_LOGIN: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQC6Ycnc2oUZHQnQwuqgZqTTdMDZD7ataf3JM7oG2Fw8JR6cdmz4QZLe5mfDwaFwG2pWHLRpVqzfrD/Pn3rIO++bgCJH5ydczrl1WScfryV1hYMJ/4EzLGM657J1/q5EI+b9SntKjf4ax+KP322L0TNQGbZUHLbfG2MwHMrYBQpHUq== kallithea@localhost', |
|
70 | 68 | } |
|
71 | 69 | |
|
72 | 70 | @classmethod |
|
73 | 71 | def repo_url_param(cls, webserver, repo_name, username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS, client_ip=base.IP_ADDR): |
|
74 | 72 | user = User.get_by_username(username) |
|
75 | 73 | if user.ssh_keys: |
|
76 | 74 | ssh_key = user.ssh_keys[0] |
|
77 | 75 | else: |
|
78 | 76 | sshkeymodel = SshKeyModel() |
|
79 | 77 | ssh_key = sshkeymodel.create(user, 'test key', cls.public_keys[user.username]) |
|
80 | 78 | Session().commit() |
|
81 | 79 | |
|
82 | 80 | return cls._ssh_param(repo_name, user, ssh_key, client_ip) |
|
83 | 81 | |
|
84 | 82 | # Mixins for using Mercurial and Git |
|
85 | 83 | class HgVcsTest(object): |
|
86 | 84 | repo_type = 'hg' |
|
87 | 85 | repo_name = base.HG_REPO |
|
88 | 86 | |
|
89 | 87 | class GitVcsTest(object): |
|
90 | 88 | repo_type = 'git' |
|
91 | 89 | repo_name = base.GIT_REPO |
|
92 | 90 | |
|
93 | 91 | # Combine mixins to give the combinations we want to parameterize tests with |
|
94 | 92 | class HgHttpVcsTest(HgVcsTest, HttpVcsTest): |
|
95 | 93 | pass |
|
96 | 94 | |
|
97 | 95 | class GitHttpVcsTest(GitVcsTest, HttpVcsTest): |
|
98 | 96 | pass |
|
99 | 97 | |
|
100 | 98 | class HgSshVcsTest(HgVcsTest, SshVcsTest): |
|
101 | 99 | @staticmethod |
|
102 | 100 | def _ssh_param(repo_name, user, ssh_key, client_ip): |
|
103 | 101 | # Specify a custom ssh command on the command line |
|
104 | 102 | return r"""--config ui.ssh="bash -c 'SSH_ORIGINAL_COMMAND=\"\$2\" SSH_CONNECTION=\"%s 1024 127.0.0.1 22\" kallithea-cli ssh-serve -c %s %s %s' --" ssh://someuser@somehost/%s""" % ( |
|
105 | 103 | client_ip, |
|
106 | 104 | CONFIG['__file__'], |
|
107 | 105 | user.user_id, |
|
108 | 106 | ssh_key.user_ssh_key_id, |
|
109 | 107 | repo_name) |
|
110 | 108 | |
|
111 | 109 | class GitSshVcsTest(GitVcsTest, SshVcsTest): |
|
112 | 110 | @staticmethod |
|
113 | 111 | def _ssh_param(repo_name, user, ssh_key, client_ip): |
|
114 | 112 | # Set a custom ssh command in the global environment |
|
115 | 113 | os.environ['GIT_SSH_COMMAND'] = r"""bash -c 'SSH_ORIGINAL_COMMAND="$2" SSH_CONNECTION="%s 1024 127.0.0.1 22" kallithea-cli ssh-serve -c %s %s %s' --""" % ( |
|
116 | 114 | client_ip, |
|
117 | 115 | CONFIG['__file__'], |
|
118 | 116 | user.user_id, |
|
119 | 117 | ssh_key.user_ssh_key_id) |
|
120 | 118 | return "ssh://someuser@somehost/%s""" % repo_name |
|
121 | 119 | |
|
122 | 120 | parametrize_vcs_test = base.parametrize('vt', [ |
|
123 | 121 | HgHttpVcsTest, |
|
124 | 122 | GitHttpVcsTest, |
|
125 | 123 | HgSshVcsTest, |
|
126 | 124 | GitSshVcsTest, |
|
127 | 125 | ]) |
|
128 | 126 | parametrize_vcs_test_hg = base.parametrize('vt', [ |
|
129 | 127 | HgHttpVcsTest, |
|
130 | 128 | HgSshVcsTest, |
|
131 | 129 | ]) |
|
132 | 130 | parametrize_vcs_test_http = base.parametrize('vt', [ |
|
133 | 131 | HgHttpVcsTest, |
|
134 | 132 | GitHttpVcsTest, |
|
135 | 133 | ]) |
|
136 | 134 | |
|
137 | 135 | class Command(object): |
|
138 | 136 | |
|
139 | 137 | def __init__(self, cwd): |
|
140 | 138 | self.cwd = cwd |
|
141 | 139 | |
|
142 | 140 | def execute(self, *args, **environ): |
|
143 | 141 | """ |
|
144 | 142 | Runs command on the system with given ``args`` using simple space |
|
145 | 143 | join without safe quoting. |
|
146 | 144 | """ |
|
147 | 145 | command = ' '.join(args) |
|
148 | 146 | ignoreReturnCode = environ.pop('ignoreReturnCode', False) |
|
149 | 147 | if DEBUG: |
|
150 | 148 | print('*** CMD %s ***' % command) |
|
151 | 149 | testenv = dict(os.environ) |
|
152 | 150 | testenv['LANG'] = 'en_US.UTF-8' |
|
153 | 151 | testenv['LANGUAGE'] = 'en_US:en' |
|
154 | 152 | testenv['HGPLAIN'] = '' |
|
155 | 153 | testenv['HGRCPATH'] = '' |
|
156 | 154 | testenv.update(environ) |
|
157 | 155 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd, env=testenv) |
|
158 | 156 | stdout, stderr = p.communicate() |
|
159 | 157 | if DEBUG: |
|
160 | 158 | if stdout: |
|
161 | 159 | print('stdout:', stdout) |
|
162 | 160 | if stderr: |
|
163 | 161 | print('stderr:', stderr) |
|
164 | 162 | if not ignoreReturnCode: |
|
165 | 163 | assert p.returncode == 0 |
|
166 | 164 | return safe_str(stdout), safe_str(stderr) |
|
167 | 165 | |
|
168 | 166 | |
|
169 | 167 | def _get_tmp_dir(prefix='vcs_operations-', suffix=''): |
|
170 | 168 | return tempfile.mkdtemp(dir=base.TESTS_TMP_PATH, prefix=prefix, suffix=suffix) |
|
171 | 169 | |
|
172 | 170 | |
|
173 | 171 | def _add_files(vcs, dest_dir, files_no=3): |
|
174 | 172 | """ |
|
175 | 173 | Generate some files, add it to dest_dir repo and push back |
|
176 | 174 | vcs is git or hg and defines what VCS we want to make those files for |
|
177 | 175 | |
|
178 | 176 | :param vcs: |
|
179 | 177 | :param dest_dir: |
|
180 | 178 | """ |
|
181 | 179 | added_file = '%ssetup.py' % next(_RandomNameSequence()) |
|
182 | 180 | open(os.path.join(dest_dir, added_file), 'a').close() |
|
183 | 181 | Command(dest_dir).execute(vcs, 'add', added_file) |
|
184 | 182 | |
|
185 | 183 | email = 'me@example.com' |
|
186 | 184 | if os.name == 'nt': |
|
187 | 185 | author_str = 'User <%s>' % email |
|
188 | 186 | else: |
|
189 | 187 | author_str = 'User ΗΙ―Ια΄ <%s>' % email |
|
190 | 188 | for i in range(files_no): |
|
191 | 189 | cmd = """echo "added_line%s" >> %s""" % (i, added_file) |
|
192 | 190 | Command(dest_dir).execute(cmd) |
|
193 | 191 | if vcs == 'hg': |
|
194 | 192 | cmd = """hg commit -m "committed new %s" -u "%s" "%s" """ % ( |
|
195 | 193 | i, author_str, added_file |
|
196 | 194 | ) |
|
197 | 195 | elif vcs == 'git': |
|
198 | 196 | cmd = """git commit -m "committed new %s" --author "%s" "%s" """ % ( |
|
199 | 197 | i, author_str, added_file |
|
200 | 198 | ) |
|
201 | 199 | # git commit needs EMAIL on some machines |
|
202 | 200 | Command(dest_dir).execute(cmd, EMAIL=email) |
|
203 | 201 | |
|
204 | 202 | def _add_files_and_push(webserver, vt, dest_dir, clone_url, ignoreReturnCode=False, files_no=3): |
|
205 | 203 | _add_files(vt.repo_type, dest_dir, files_no=files_no) |
|
206 | 204 | # PUSH it back |
|
207 | 205 | stdout = stderr = None |
|
208 | 206 | if vt.repo_type == 'hg': |
|
209 | 207 | stdout, stderr = Command(dest_dir).execute('hg push -f --verbose', clone_url, ignoreReturnCode=ignoreReturnCode) |
|
210 | 208 | elif vt.repo_type == 'git': |
|
211 | 209 | stdout, stderr = Command(dest_dir).execute('git push -f --verbose', clone_url, "master", ignoreReturnCode=ignoreReturnCode) |
|
212 | 210 | |
|
213 | 211 | return stdout, stderr |
|
214 | 212 | |
|
215 | 213 | |
|
216 | 214 | def _check_outgoing(vcs, cwd, clone_url): |
|
217 | 215 | if vcs == 'hg': |
|
218 | 216 | # hg removes the password from default URLs, so we have to provide it here via the clone_url |
|
219 | 217 | return Command(cwd).execute('hg -q outgoing', clone_url, ignoreReturnCode=True) |
|
220 | 218 | elif vcs == 'git': |
|
221 | 219 | Command(cwd).execute('git remote update') |
|
222 | 220 | return Command(cwd).execute('git log origin/master..master') |
|
223 | 221 | |
|
224 | 222 | |
|
225 | 223 | def set_anonymous_access(enable=True): |
|
226 | 224 | user = User.get_default_user() |
|
227 | 225 | user.active = enable |
|
228 | 226 | Session().commit() |
|
229 | 227 | if enable != User.get_default_user().active: |
|
230 | 228 | raise Exception('Cannot set anonymous access') |
|
231 | 229 | |
|
232 | 230 | |
|
233 | 231 | #============================================================================== |
|
234 | 232 | # TESTS |
|
235 | 233 | #============================================================================== |
|
236 | 234 | |
|
237 | 235 | |
|
238 | 236 | def _check_proper_git_push(stdout, stderr): |
|
239 | 237 | assert 'fatal' not in stderr |
|
240 | 238 | assert 'rejected' not in stderr |
|
241 | 239 | assert 'Pushing to' in stderr |
|
242 | 240 | assert 'master -> master' in stderr |
|
243 | 241 | |
|
244 | 242 | |
|
245 | 243 | @pytest.mark.usefixtures("test_context_fixture") |
|
246 | 244 | class TestVCSOperations(base.TestController): |
|
247 | 245 | |
|
248 | 246 | @classmethod |
|
249 | 247 | def setup_class(cls): |
|
250 | 248 | # DISABLE ANONYMOUS ACCESS |
|
251 | 249 | set_anonymous_access(False) |
|
252 | 250 | |
|
253 | 251 | @pytest.fixture() |
|
254 | 252 | def testhook_cleanup(self): |
|
255 | 253 | yield |
|
256 | 254 | # remove hook |
|
257 | 255 | for hook in ['prechangegroup', 'pretxnchangegroup', 'preoutgoing', 'changegroup', 'outgoing', 'incoming']: |
|
258 | 256 | entry = Ui.get_by_key('hooks', '%s.testhook' % hook) |
|
259 | 257 | if entry: |
|
260 | 258 | Session().delete(entry) |
|
261 | 259 | Session().commit() |
|
262 | 260 | |
|
263 | 261 | @pytest.fixture(scope="module") |
|
264 | 262 | def testfork(self): |
|
265 | 263 | # create fork so the repo stays untouched |
|
266 | 264 | git_fork_name = '%s_fork%s' % (base.GIT_REPO, next(_RandomNameSequence())) |
|
267 | 265 | fixture.create_fork(base.GIT_REPO, git_fork_name) |
|
268 | 266 | hg_fork_name = '%s_fork%s' % (base.HG_REPO, next(_RandomNameSequence())) |
|
269 | 267 | fixture.create_fork(base.HG_REPO, hg_fork_name) |
|
270 | 268 | return {'git': git_fork_name, 'hg': hg_fork_name} |
|
271 | 269 | |
|
272 | 270 | @parametrize_vcs_test |
|
273 | 271 | def test_clone_repo_by_admin(self, webserver, vt): |
|
274 | 272 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
275 | 273 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir()) |
|
276 | 274 | |
|
277 | 275 | if vt.repo_type == 'git': |
|
278 | 276 | assert 'Cloning into' in stdout + stderr |
|
279 | 277 | assert stderr == '' or stdout == '' |
|
280 | 278 | elif vt.repo_type == 'hg': |
|
281 | 279 | assert 'requesting all changes' in stdout |
|
282 | 280 | assert 'adding changesets' in stdout |
|
283 | 281 | assert 'adding manifests' in stdout |
|
284 | 282 | assert 'adding file changes' in stdout |
|
285 | 283 | assert stderr == '' |
|
286 | 284 | |
|
287 | 285 | @parametrize_vcs_test_http |
|
288 | 286 | def test_clone_wrong_credentials(self, webserver, vt): |
|
289 | 287 | clone_url = vt.repo_url_param(webserver, vt.repo_name, password='bad!') |
|
290 | 288 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
291 | 289 | if vt.repo_type == 'git': |
|
292 | 290 | assert 'fatal: Authentication failed' in stderr |
|
293 | 291 | elif vt.repo_type == 'hg': |
|
294 | 292 | assert 'abort: authorization failed' in stderr |
|
295 | 293 | |
|
296 | 294 | def test_clone_git_dir_as_hg(self, webserver): |
|
297 | 295 | clone_url = HgHttpVcsTest.repo_url_param(webserver, base.GIT_REPO) |
|
298 | 296 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute('hg clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
299 | 297 | assert 'HTTP Error 404: Not Found' in stderr or "not a valid repository" in stdout and 'abort:' in stderr |
|
300 | 298 | |
|
301 | 299 | def test_clone_hg_repo_as_git(self, webserver): |
|
302 | 300 | clone_url = GitHttpVcsTest.repo_url_param(webserver, base.HG_REPO) |
|
303 | 301 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
304 | 302 | assert 'not found' in stderr |
|
305 | 303 | |
|
306 | 304 | @parametrize_vcs_test |
|
307 | 305 | def test_clone_non_existing_path(self, webserver, vt): |
|
308 | 306 | clone_url = vt.repo_url_param(webserver, 'trololo') |
|
309 | 307 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
310 | 308 | if vt.repo_type == 'git': |
|
311 | 309 | assert 'not found' in stderr or 'abort: Access to %r denied' % 'trololo' in stderr |
|
312 | 310 | elif vt.repo_type == 'hg': |
|
313 | 311 | assert 'HTTP Error 404: Not Found' in stderr or 'abort: no suitable response from remote hg' in stderr and 'remote: abort: Access to %r denied' % 'trololo' in stdout |
|
314 | 312 | |
|
315 | 313 | @parametrize_vcs_test |
|
316 | 314 | def test_push_new_repo(self, webserver, vt): |
|
317 | 315 | # Clear the log so we know what is added |
|
318 | 316 | UserLog.query().delete() |
|
319 | 317 | Session().commit() |
|
320 | 318 | |
|
321 | 319 | # Create an empty server repo using the API |
|
322 | 320 | repo_name = 'new_%s_%s' % (vt.repo_type, next(_RandomNameSequence())) |
|
323 | 321 | usr = User.get_by_username(base.TEST_USER_ADMIN_LOGIN) |
|
324 | 322 | params = { |
|
325 | 323 | "id": 7, |
|
326 | 324 | "api_key": usr.api_key, |
|
327 | 325 | "method": 'create_repo', |
|
328 | 326 | "args": dict(repo_name=repo_name, |
|
329 | 327 | owner=base.TEST_USER_ADMIN_LOGIN, |
|
330 | 328 | repo_type=vt.repo_type), |
|
331 | 329 | } |
|
332 | 330 | req = urllib.request.Request( |
|
333 | 331 | 'http://%s:%s/_admin/api' % webserver.server_address, |
|
334 | 332 | data=ascii_bytes(json.dumps(params)), |
|
335 | 333 | headers={'content-type': 'application/json'}) |
|
336 | 334 | response = urllib.request.urlopen(req) |
|
337 | 335 | result = json.loads(response.read()) |
|
338 | 336 | # Expect something like: |
|
339 | 337 | # {u'result': {u'msg': u'Created new repository `new_XXX`', u'task': None, u'success': True}, u'id': 7, u'error': None} |
|
340 | 338 | assert result['result']['success'] |
|
341 | 339 | |
|
342 | 340 | # Create local clone of the empty server repo |
|
343 | 341 | local_clone_dir = _get_tmp_dir() |
|
344 | 342 | clone_url = vt.repo_url_param(webserver, repo_name) |
|
345 | 343 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, local_clone_dir) |
|
346 | 344 | |
|
347 | 345 | # Make 3 commits and push to the empty server repo. |
|
348 | 346 | # The server repo doesn't have any other heads than the |
|
349 | 347 | # refs/heads/master we are pushing, but the `git log` in the push hook |
|
350 | 348 | # should still list the 3 commits. |
|
351 | 349 | stdout, stderr = _add_files_and_push(webserver, vt, local_clone_dir, clone_url=clone_url) |
|
352 | 350 | if vt.repo_type == 'git': |
|
353 | 351 | _check_proper_git_push(stdout, stderr) |
|
354 | 352 | elif vt.repo_type == 'hg': |
|
355 | 353 | assert 'pushing to ' in stdout |
|
356 | 354 | assert 'remote: added ' in stdout |
|
357 | 355 | |
|
358 | 356 | # Verify that we got the right events in UserLog. Expect something like: |
|
359 | 357 | # <UserLog('id:new_git_XXX:started_following_repo')> |
|
360 | 358 | # <UserLog('id:new_git_XXX:user_created_repo')> |
|
361 | 359 | # <UserLog('id:new_git_XXX:pull')> |
|
362 | 360 | # <UserLog('id:new_git_XXX:push:aed9d4c1732a1927da3be42c47eb9afdc200d427,d38b083a07af10a9f44193486959a96a23db78da,4841ff9a2b385bec995f4679ef649adb3f437622')> |
|
363 | 361 | action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)] |
|
364 | 362 | assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == ([ |
|
365 | 363 | ('started_following_repo', 0), |
|
366 | 364 | ('user_created_repo', 0), |
|
367 | 365 | ('pull', 0), |
|
368 | 366 | ('push', 3)] |
|
369 | 367 | if vt.repo_type == 'git' else [ |
|
370 | 368 | ('started_following_repo', 0), |
|
371 | 369 | ('user_created_repo', 0), |
|
372 | 370 | # (u'pull', 0), # Mercurial outgoing hook is not called for empty clones |
|
373 | 371 | ('push', 3)]) |
|
374 | 372 | |
|
375 | 373 | @parametrize_vcs_test |
|
376 | 374 | def test_push_new_file(self, webserver, testfork, vt): |
|
377 | 375 | UserLog.query().delete() |
|
378 | 376 | Session().commit() |
|
379 | 377 | |
|
380 | 378 | dest_dir = _get_tmp_dir() |
|
381 | 379 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
382 | 380 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
383 | 381 | |
|
384 | 382 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type]) |
|
385 | 383 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url=clone_url) |
|
386 | 384 | |
|
387 | 385 | if vt.repo_type == 'git': |
|
388 | 386 | _check_proper_git_push(stdout, stderr) |
|
389 | 387 | elif vt.repo_type == 'hg': |
|
390 | 388 | assert 'pushing to' in stdout |
|
391 | 389 | assert 'Repository size' in stdout |
|
392 | 390 | assert 'Last revision is now' in stdout |
|
393 | 391 | |
|
394 | 392 | action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)] |
|
395 | 393 | assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \ |
|
396 | 394 | [('pull', 0), ('push', 3)] |
|
397 | 395 | |
|
398 | 396 | @parametrize_vcs_test |
|
399 | 397 | def test_pull(self, webserver, testfork, vt): |
|
400 | 398 | UserLog.query().delete() |
|
401 | 399 | Session().commit() |
|
402 | 400 | |
|
403 | 401 | dest_dir = _get_tmp_dir() |
|
404 | 402 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'init', dest_dir) |
|
405 | 403 | |
|
406 | 404 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
407 | 405 | stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url) |
|
408 | 406 | |
|
409 | 407 | if vt.repo_type == 'git': |
|
410 | 408 | assert 'FETCH_HEAD' in stderr |
|
411 | 409 | elif vt.repo_type == 'hg': |
|
412 | 410 | assert 'new changesets' in stdout |
|
413 | 411 | |
|
414 | 412 | action_parts = [ul.action for ul in UserLog.query().order_by(UserLog.user_log_id)] |
|
415 | 413 | assert action_parts == ['pull'] |
|
416 | 414 | |
|
417 | 415 | # Test handling of URLs with extra '/' around repo_name |
|
418 | 416 | stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/./%s/' % vt.repo_name), ignoreReturnCode=True) |
|
419 | 417 | if issubclass(vt, HttpVcsTest): |
|
420 | 418 | if vt.repo_type == 'git': |
|
421 | 419 | # NOTE: when pulling from http://hostname/./vcs_test_git/ , the git client will normalize that and issue an HTTP request to /vcs_test_git/info/refs |
|
422 | 420 | assert 'Already up to date.' in stdout |
|
423 | 421 | else: |
|
424 | 422 | assert vt.repo_type == 'hg' |
|
425 | 423 | assert "abort: HTTP Error 404: Not Found" in stderr |
|
426 | 424 | else: |
|
427 | 425 | assert issubclass(vt, SshVcsTest) |
|
428 | 426 | if vt.repo_type == 'git': |
|
429 | 427 | assert "abort: Access to './%s' denied" % vt.repo_name in stderr |
|
430 | 428 | else: |
|
431 | 429 | assert "abort: Access to './%s' denied" % vt.repo_name in stdout |
|
432 | 430 | |
|
433 | 431 | stdout, stderr = Command(dest_dir).execute(vt.repo_type, 'pull', clone_url.replace('/' + vt.repo_name, '/%s/' % vt.repo_name), ignoreReturnCode=True) |
|
434 | 432 | if vt.repo_type == 'git': |
|
435 | 433 | assert 'Already up to date.' in stdout |
|
436 | 434 | else: |
|
437 | 435 | assert vt.repo_type == 'hg' |
|
438 | 436 | assert "no changes found" in stdout |
|
439 | 437 | assert "denied" not in stderr |
|
440 | 438 | assert "denied" not in stdout |
|
441 | 439 | assert "404" not in stdout |
|
442 | 440 | |
|
443 | 441 | @parametrize_vcs_test |
|
444 | 442 | def test_push_invalidates_cache(self, webserver, testfork, vt): |
|
445 | 443 | pre_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])] |
|
446 | 444 | |
|
447 | 445 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key |
|
448 | 446 | == testfork[vt.repo_type]).scalar() |
|
449 | 447 | if not key: |
|
450 | 448 | key = CacheInvalidation(testfork[vt.repo_type], testfork[vt.repo_type]) |
|
451 | 449 | Session().add(key) |
|
452 | 450 | |
|
453 | 451 | key.cache_active = True |
|
454 | 452 | Session().commit() |
|
455 | 453 | |
|
456 | 454 | dest_dir = _get_tmp_dir() |
|
457 | 455 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type]) |
|
458 | 456 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
459 | 457 | |
|
460 | 458 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, files_no=1, clone_url=clone_url) |
|
461 | 459 | |
|
462 | 460 | if vt.repo_type == 'git': |
|
463 | 461 | _check_proper_git_push(stdout, stderr) |
|
464 | 462 | |
|
465 | 463 | post_cached_tip = [repo.get_api_data()['last_changeset']['short_id'] for repo in Repository.query().filter(Repository.repo_name == testfork[vt.repo_type])] |
|
466 | 464 | assert pre_cached_tip != post_cached_tip |
|
467 | 465 | |
|
468 | 466 | key = CacheInvalidation.query().filter(CacheInvalidation.cache_key |
|
469 | 467 | == testfork[vt.repo_type]).all() |
|
470 | 468 | assert key == [] |
|
471 | 469 | |
|
472 | 470 | @parametrize_vcs_test_http |
|
473 | 471 | def test_push_wrong_credentials(self, webserver, vt): |
|
474 | 472 | dest_dir = _get_tmp_dir() |
|
475 | 473 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
476 | 474 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
477 | 475 | |
|
478 | 476 | clone_url = webserver.repo_url(vt.repo_name, username='bad', password='name') |
|
479 | 477 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, |
|
480 | 478 | clone_url=clone_url, ignoreReturnCode=True) |
|
481 | 479 | |
|
482 | 480 | if vt.repo_type == 'git': |
|
483 | 481 | assert 'fatal: Authentication failed' in stderr |
|
484 | 482 | elif vt.repo_type == 'hg': |
|
485 | 483 | assert 'abort: authorization failed' in stderr |
|
486 | 484 | |
|
487 | 485 | @parametrize_vcs_test |
|
488 | 486 | def test_push_with_readonly_credentials(self, webserver, vt): |
|
489 | 487 | UserLog.query().delete() |
|
490 | 488 | Session().commit() |
|
491 | 489 | |
|
492 | 490 | dest_dir = _get_tmp_dir() |
|
493 | 491 | clone_url = vt.repo_url_param(webserver, vt.repo_name, username=base.TEST_USER_REGULAR_LOGIN, password=base.TEST_USER_REGULAR_PASS) |
|
494 | 492 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
495 | 493 | |
|
496 | 494 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, ignoreReturnCode=True, clone_url=clone_url) |
|
497 | 495 | |
|
498 | 496 | if vt.repo_type == 'git': |
|
499 | 497 | assert 'The requested URL returned error: 403' in stderr or 'abort: Push access to %r denied' % str(vt.repo_name) in stderr |
|
500 | 498 | elif vt.repo_type == 'hg': |
|
501 | 499 | assert 'abort: HTTP Error 403: Forbidden' in stderr or 'abort: push failed on remote' in stderr and 'remote: Push access to %r denied' % str(vt.repo_name) in stdout |
|
502 | 500 | |
|
503 | 501 | action_parts = [ul.action.split(':', 1) for ul in UserLog.query().order_by(UserLog.user_log_id)] |
|
504 | 502 | assert [(t[0], (t[1].count(',') + 1) if len(t) == 2 else 0) for t in action_parts] == \ |
|
505 | 503 | [('pull', 0)] |
|
506 | 504 | |
|
507 | 505 | @parametrize_vcs_test |
|
508 | 506 | def test_push_back_to_wrong_url(self, webserver, vt): |
|
509 | 507 | dest_dir = _get_tmp_dir() |
|
510 | 508 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
511 | 509 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
512 | 510 | |
|
513 | 511 | stdout, stderr = _add_files_and_push( |
|
514 | 512 | webserver, vt, dest_dir, clone_url='http://%s:%s/tmp' % ( |
|
515 | 513 | webserver.server_address[0], webserver.server_address[1]), |
|
516 | 514 | ignoreReturnCode=True) |
|
517 | 515 | |
|
518 | 516 | if vt.repo_type == 'git': |
|
519 | 517 | assert 'not found' in stderr |
|
520 | 518 | elif vt.repo_type == 'hg': |
|
521 | 519 | assert 'HTTP Error 404: Not Found' in stderr |
|
522 | 520 | |
|
523 | 521 | @parametrize_vcs_test |
|
524 | 522 | def test_ip_restriction(self, webserver, vt): |
|
525 | 523 | user_model = UserModel() |
|
526 | 524 | try: |
|
527 | 525 | # Add IP constraint that excludes the test context: |
|
528 | 526 | user_model.add_extra_ip(base.TEST_USER_ADMIN_LOGIN, '10.10.10.10/32') |
|
529 | 527 | Session().commit() |
|
530 | 528 | # IP permissions are cached, need to wait for the cache in the server process to expire |
|
531 | 529 | time.sleep(1.5) |
|
532 | 530 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
533 | 531 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir(), ignoreReturnCode=True) |
|
534 | 532 | if vt.repo_type == 'git': |
|
535 | 533 | # The message apparently changed in Git 1.8.3, so match it loosely. |
|
536 | 534 | assert re.search(r'\b403\b', stderr) or 'abort: User test_admin from 127.0.0.127 cannot be authorized' in stderr |
|
537 | 535 | elif vt.repo_type == 'hg': |
|
538 | 536 | assert 'abort: HTTP Error 403: Forbidden' in stderr or 'remote: abort: User test_admin from 127.0.0.127 cannot be authorized' in stdout |
|
539 | 537 | finally: |
|
540 | 538 | # release IP restrictions |
|
541 | 539 | for ip in UserIpMap.query(): |
|
542 | 540 | UserIpMap.delete(ip.ip_id) |
|
543 | 541 | Session().commit() |
|
544 | 542 | # IP permissions are cached, need to wait for the cache in the server process to expire |
|
545 | 543 | time.sleep(1.5) |
|
546 | 544 | |
|
547 | 545 | clone_url = vt.repo_url_param(webserver, vt.repo_name) |
|
548 | 546 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, _get_tmp_dir()) |
|
549 | 547 | |
|
550 | 548 | if vt.repo_type == 'git': |
|
551 | 549 | assert 'Cloning into' in stdout + stderr |
|
552 | 550 | assert stderr == '' or stdout == '' |
|
553 | 551 | elif vt.repo_type == 'hg': |
|
554 | 552 | assert 'requesting all changes' in stdout |
|
555 | 553 | assert 'adding changesets' in stdout |
|
556 | 554 | assert 'adding manifests' in stdout |
|
557 | 555 | assert 'adding file changes' in stdout |
|
558 | 556 | |
|
559 | 557 | assert stderr == '' |
|
560 | 558 | |
|
561 | 559 | @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks |
|
562 | 560 | def test_custom_hooks_preoutgoing(self, testhook_cleanup, webserver, testfork, vt): |
|
563 | 561 | # set prechangegroup to failing hook (returns True) |
|
564 | 562 | Ui.create_or_update_hook('preoutgoing.testhook', 'python:kallithea.tests.fixture.failing_test_hook') |
|
565 | 563 | Session().commit() |
|
566 | 564 | # clone repo |
|
567 | 565 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS) |
|
568 | 566 | dest_dir = _get_tmp_dir() |
|
569 | 567 | stdout, stderr = Command(base.TESTS_TMP_PATH) \ |
|
570 | 568 | .execute(vt.repo_type, 'clone', clone_url, dest_dir, ignoreReturnCode=True) |
|
571 | 569 | if vt.repo_type == 'hg': |
|
572 | 570 | assert 'preoutgoing.testhook hook failed' in stdout |
|
573 | 571 | elif vt.repo_type == 'git': |
|
574 | 572 | assert 'error: 406' in stderr |
|
575 | 573 | |
|
576 | 574 | @parametrize_vcs_test_hg # git hooks doesn't work like hg hooks |
|
577 | 575 | def test_custom_hooks_prechangegroup(self, testhook_cleanup, webserver, testfork, vt): |
|
578 | 576 | # set prechangegroup to failing hook (returns exit code 1) |
|
579 | 577 | Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.failing_test_hook') |
|
580 | 578 | Session().commit() |
|
581 | 579 | # clone repo |
|
582 | 580 | clone_url = vt.repo_url_param(webserver, testfork[vt.repo_type], username=base.TEST_USER_ADMIN_LOGIN, password=base.TEST_USER_ADMIN_PASS) |
|
583 | 581 | dest_dir = _get_tmp_dir() |
|
584 | 582 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute(vt.repo_type, 'clone', clone_url, dest_dir) |
|
585 | 583 | |
|
586 | 584 | stdout, stderr = _add_files_and_push(webserver, vt, dest_dir, clone_url, |
|
587 | 585 | ignoreReturnCode=True) |
|
588 | 586 | assert 'failing_test_hook failed' in stdout + stderr |
|
589 | 587 | assert 'Traceback' not in stdout + stderr |
|
590 | 588 | assert 'prechangegroup.testhook hook failed' in stdout + stderr |
|
591 | 589 | # there are still outgoing changesets |
|
592 | 590 | stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url) |
|
593 | 591 | assert stdout != '' |
|
594 | 592 | |
|
595 | 593 | # set prechangegroup hook to exception throwing method |
|
596 | 594 | Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.exception_test_hook') |
|
597 | 595 | Session().commit() |
|
598 | 596 | # re-try to push |
|
599 | 597 | stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True) |
|
600 | 598 | if vt is HgHttpVcsTest: |
|
601 | 599 | # like with 'hg serve...' 'HTTP Error 500: INTERNAL SERVER ERROR' should be returned |
|
602 | 600 | assert 'HTTP Error 500: INTERNAL SERVER ERROR' in stderr |
|
603 | 601 | elif vt is HgSshVcsTest: |
|
604 | 602 | assert 'remote: Exception: exception_test_hook threw an exception' in stdout |
|
605 | 603 | else: assert False |
|
606 | 604 | # there are still outgoing changesets |
|
607 | 605 | stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url) |
|
608 | 606 | assert stdout != '' |
|
609 | 607 | |
|
610 | 608 | # set prechangegroup hook to method that returns False |
|
611 | 609 | Ui.create_or_update_hook('prechangegroup.testhook', 'python:kallithea.tests.fixture.passing_test_hook') |
|
612 | 610 | Session().commit() |
|
613 | 611 | # re-try to push |
|
614 | 612 | stdout, stderr = Command(dest_dir).execute('%s push' % vt.repo_type, clone_url, ignoreReturnCode=True) |
|
615 | 613 | assert 'passing_test_hook succeeded' in stdout + stderr |
|
616 | 614 | assert 'Traceback' not in stdout + stderr |
|
617 | 615 | assert 'prechangegroup.testhook hook failed' not in stdout + stderr |
|
618 | 616 | # no more outgoing changesets |
|
619 | 617 | stdout, stderr = _check_outgoing(vt.repo_type, dest_dir, clone_url) |
|
620 | 618 | assert stdout == '' |
|
621 | 619 | assert stderr == '' |
|
622 | 620 | |
|
623 | 621 | def test_add_submodule_git(self, webserver, testfork): |
|
624 | 622 | dest_dir = _get_tmp_dir() |
|
625 | 623 | clone_url = GitHttpVcsTest.repo_url_param(webserver, base.GIT_REPO) |
|
626 | 624 | |
|
627 | 625 | fork_url = GitHttpVcsTest.repo_url_param(webserver, testfork['git']) |
|
628 | 626 | |
|
629 | 627 | # add submodule |
|
630 | 628 | stdout, stderr = Command(base.TESTS_TMP_PATH).execute('git clone', fork_url, dest_dir) |
|
631 | 629 | stdout, stderr = Command(dest_dir).execute('git submodule add', clone_url, 'testsubmodule') |
|
632 | 630 | stdout, stderr = Command(dest_dir).execute('git commit -am "added testsubmodule pointing to', clone_url, '"', EMAIL=base.TEST_USER_ADMIN_EMAIL) |
|
633 | 631 | stdout, stderr = Command(dest_dir).execute('git push', fork_url, 'master') |
|
634 | 632 | |
|
635 | 633 | # check for testsubmodule link in files page |
|
636 | 634 | self.log_user() |
|
637 | 635 | response = self.app.get(base.url(controller='files', action='index', |
|
638 | 636 | repo_name=testfork['git'], |
|
639 | 637 | revision='tip', |
|
640 | 638 | f_path='/')) |
|
641 | 639 | # check _repo_files_url that will be used to reload as AJAX |
|
642 | 640 | response.mustcontain('var _repo_files_url = ("/%s/files/");' % testfork['git']) |
|
643 | 641 | |
|
644 | 642 | response.mustcontain('<a class="submodule-dir" href="%s" target="_blank"><i class="icon-file-submodule"></i><span>testsubmodule @ ' % clone_url) |
|
645 | 643 | |
|
646 | 644 | # check that following a submodule link actually works - and redirects |
|
647 | 645 | response = self.app.get(base.url(controller='files', action='index', |
|
648 | 646 | repo_name=testfork['git'], |
|
649 | 647 | revision='tip', |
|
650 | 648 | f_path='/testsubmodule'), |
|
651 | 649 | status=302) |
|
652 | 650 | assert response.location == clone_url |
@@ -1,215 +1,213 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | # This program is free software: you can redistribute it and/or modify |
|
3 | 3 | # it under the terms of the GNU General Public License as published by |
|
4 | 4 | # the Free Software Foundation, either version 3 of the License, or |
|
5 | 5 | # (at your option) any later version. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | """ |
|
15 | 15 | kallithea.tests.scripts.manual_test_concurrency |
|
16 | 16 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
17 | 17 | |
|
18 | 18 | Test suite for making push/pull operations |
|
19 | 19 | |
|
20 | 20 | This file was forked by the Kallithea project in July 2014. |
|
21 | 21 | Original author and date, and relevant copyright and licensing information is below: |
|
22 | 22 | :created_on: Dec 30, 2010 |
|
23 | 23 | :author: marcink |
|
24 | 24 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
25 | 25 | :license: GPLv3, see LICENSE.md for more details. |
|
26 | 26 | |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | from __future__ import print_function | |
|
30 | ||
|
31 | 29 | import logging |
|
32 | 30 | import os |
|
33 | 31 | import shutil |
|
34 | 32 | import sys |
|
35 | 33 | import tempfile |
|
36 | 34 | from os.path import dirname |
|
37 | 35 | from subprocess import PIPE, Popen |
|
38 | 36 | |
|
39 | 37 | from paste.deploy import appconfig |
|
40 | 38 | from sqlalchemy import engine_from_config |
|
41 | 39 | |
|
42 | 40 | from kallithea.config.environment import load_environment |
|
43 | 41 | from kallithea.lib.auth import get_crypt_password |
|
44 | 42 | from kallithea.model import meta |
|
45 | 43 | from kallithea.model.base import init_model |
|
46 | 44 | from kallithea.model.db import Repository, Ui, User |
|
47 | 45 | from kallithea.tests.base import HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS |
|
48 | 46 | |
|
49 | 47 | |
|
50 | 48 | rel_path = dirname(dirname(dirname(dirname(os.path.abspath(__file__))))) |
|
51 | 49 | conf = appconfig('config:development.ini', relative_to=rel_path) |
|
52 | 50 | load_environment(conf.global_conf, conf.local_conf) |
|
53 | 51 | |
|
54 | 52 | USER = TEST_USER_ADMIN_LOGIN |
|
55 | 53 | PASS = TEST_USER_ADMIN_PASS |
|
56 | 54 | HOST = 'server.local' |
|
57 | 55 | METHOD = 'pull' |
|
58 | 56 | DEBUG = True |
|
59 | 57 | log = logging.getLogger(__name__) |
|
60 | 58 | |
|
61 | 59 | |
|
62 | 60 | class Command(object): |
|
63 | 61 | |
|
64 | 62 | def __init__(self, cwd): |
|
65 | 63 | self.cwd = cwd |
|
66 | 64 | |
|
67 | 65 | def execute(self, cmd, *args): |
|
68 | 66 | """Runs command on the system with given ``args``. |
|
69 | 67 | """ |
|
70 | 68 | |
|
71 | 69 | command = cmd + ' ' + ' '.join(args) |
|
72 | 70 | log.debug('Executing %s', command) |
|
73 | 71 | if DEBUG: |
|
74 | 72 | print(command) |
|
75 | 73 | p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE, cwd=self.cwd) |
|
76 | 74 | stdout, stderr = p.communicate() |
|
77 | 75 | if DEBUG: |
|
78 | 76 | print(stdout, stderr) |
|
79 | 77 | return stdout, stderr |
|
80 | 78 | |
|
81 | 79 | |
|
82 | 80 | def get_session(): |
|
83 | 81 | engine = engine_from_config(conf, 'sqlalchemy.') |
|
84 | 82 | init_model(engine) |
|
85 | 83 | sa = meta.Session |
|
86 | 84 | return sa |
|
87 | 85 | |
|
88 | 86 | |
|
89 | 87 | def create_test_user(force=True): |
|
90 | 88 | print('creating test user') |
|
91 | 89 | sa = get_session() |
|
92 | 90 | |
|
93 | 91 | user = sa.query(User).filter(User.username == USER).scalar() |
|
94 | 92 | |
|
95 | 93 | if force and user is not None: |
|
96 | 94 | print('removing current user') |
|
97 | 95 | for repo in sa.query(Repository).filter(Repository.user == user).all(): |
|
98 | 96 | sa.delete(repo) |
|
99 | 97 | sa.delete(user) |
|
100 | 98 | sa.commit() |
|
101 | 99 | |
|
102 | 100 | if user is None or force: |
|
103 | 101 | print('creating new one') |
|
104 | 102 | new_usr = User() |
|
105 | 103 | new_usr.username = USER |
|
106 | 104 | new_usr.password = get_crypt_password(PASS) |
|
107 | 105 | new_usr.email = 'mail@example.com' |
|
108 | 106 | new_usr.name = 'test' |
|
109 | 107 | new_usr.lastname = 'lasttestname' |
|
110 | 108 | new_usr.active = True |
|
111 | 109 | new_usr.admin = True |
|
112 | 110 | sa.add(new_usr) |
|
113 | 111 | sa.commit() |
|
114 | 112 | |
|
115 | 113 | print('done') |
|
116 | 114 | |
|
117 | 115 | |
|
118 | 116 | def create_test_repo(force=True): |
|
119 | 117 | print('creating test repo') |
|
120 | 118 | from kallithea.model.repo import RepoModel |
|
121 | 119 | sa = get_session() |
|
122 | 120 | |
|
123 | 121 | user = sa.query(User).filter(User.username == USER).scalar() |
|
124 | 122 | if user is None: |
|
125 | 123 | raise Exception('user not found') |
|
126 | 124 | |
|
127 | 125 | repo = sa.query(Repository).filter(Repository.repo_name == HG_REPO).scalar() |
|
128 | 126 | |
|
129 | 127 | if repo is None: |
|
130 | 128 | print('repo not found creating') |
|
131 | 129 | |
|
132 | 130 | form_data = {'repo_name': HG_REPO, |
|
133 | 131 | 'repo_type': 'hg', |
|
134 | 132 | 'private': False, |
|
135 | 133 | 'clone_uri': ''} |
|
136 | 134 | rm = RepoModel() |
|
137 | 135 | rm.base_path = '/home/hg' |
|
138 | 136 | rm.create(form_data, user) |
|
139 | 137 | |
|
140 | 138 | print('done') |
|
141 | 139 | |
|
142 | 140 | |
|
143 | 141 | def set_anonymous_access(enable=True): |
|
144 | 142 | sa = get_session() |
|
145 | 143 | user = sa.query(User).filter(User.username == 'default').one() |
|
146 | 144 | user.active = enable |
|
147 | 145 | sa.add(user) |
|
148 | 146 | sa.commit() |
|
149 | 147 | |
|
150 | 148 | |
|
151 | 149 | def get_anonymous_access(): |
|
152 | 150 | sa = get_session() |
|
153 | 151 | return sa.query(User).filter(User.username == 'default').one().active |
|
154 | 152 | |
|
155 | 153 | |
|
156 | 154 | #============================================================================== |
|
157 | 155 | # TESTS |
|
158 | 156 | #============================================================================== |
|
159 | 157 | def test_clone_with_credentials(no_errors=False, repo=HG_REPO, method=METHOD, |
|
160 | 158 | backend='hg'): |
|
161 | 159 | cwd = path = os.path.join(Ui.get_by_key('paths', '/').ui_value, repo) |
|
162 | 160 | |
|
163 | 161 | try: |
|
164 | 162 | shutil.rmtree(path, ignore_errors=True) |
|
165 | 163 | os.makedirs(path) |
|
166 | 164 | #print 'made dirs %s' % os.path.join(path) |
|
167 | 165 | except OSError: |
|
168 | 166 | raise |
|
169 | 167 | |
|
170 | 168 | clone_url = 'http://%(user)s:%(pass)s@%(host)s/%(cloned_repo)s' % \ |
|
171 | 169 | {'user': USER, |
|
172 | 170 | 'pass': PASS, |
|
173 | 171 | 'host': HOST, |
|
174 | 172 | 'cloned_repo': repo, } |
|
175 | 173 | |
|
176 | 174 | dest = tempfile.mktemp(dir=path, prefix='dest-') |
|
177 | 175 | if method == 'pull': |
|
178 | 176 | stdout, stderr = Command(cwd).execute(backend, method, '--cwd', dest, clone_url) |
|
179 | 177 | else: |
|
180 | 178 | stdout, stderr = Command(cwd).execute(backend, method, clone_url, dest) |
|
181 | 179 | if not no_errors: |
|
182 | 180 | if backend == 'hg': |
|
183 | 181 | assert """adding file changes""" in stdout, 'no messages about cloning' |
|
184 | 182 | assert """abort""" not in stderr, 'got error from clone' |
|
185 | 183 | elif backend == 'git': |
|
186 | 184 | assert """Cloning into""" in stdout, 'no messages about cloning' |
|
187 | 185 | |
|
188 | 186 | |
|
189 | 187 | if __name__ == '__main__': |
|
190 | 188 | try: |
|
191 | 189 | create_test_user(force=False) |
|
192 | 190 | import time |
|
193 | 191 | |
|
194 | 192 | try: |
|
195 | 193 | METHOD = sys.argv[3] |
|
196 | 194 | except IndexError: |
|
197 | 195 | pass |
|
198 | 196 | |
|
199 | 197 | try: |
|
200 | 198 | backend = sys.argv[4] |
|
201 | 199 | except IndexError: |
|
202 | 200 | backend = 'hg' |
|
203 | 201 | |
|
204 | 202 | if METHOD == 'pull': |
|
205 | 203 | seq = next(tempfile._RandomNameSequence()) |
|
206 | 204 | test_clone_with_credentials(repo=sys.argv[1], method='clone', |
|
207 | 205 | backend=backend) |
|
208 | 206 | s = time.time() |
|
209 | 207 | for i in range(1, int(sys.argv[2]) + 1): |
|
210 | 208 | print('take', i) |
|
211 | 209 | test_clone_with_credentials(repo=sys.argv[1], method=METHOD, |
|
212 | 210 | backend=backend) |
|
213 | 211 | print('time taken %.3f' % (time.time() - s)) |
|
214 | 212 | except Exception as e: |
|
215 | 213 | sys.exit('stop on %s' % e) |
@@ -1,193 +1,191 b'' | |||
|
1 | 1 | #!/usr/bin/env python3 |
|
2 | 2 | # -*- coding: utf-8 -*- |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU General Public License as published by |
|
5 | 5 | # the Free Software Foundation, either version 3 of the License, or |
|
6 | 6 | # (at your option) any later version. |
|
7 | 7 | # |
|
8 | 8 | # This program is distributed in the hope that it will be useful, |
|
9 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | 11 | # GNU General Public License for more details. |
|
12 | 12 | # |
|
13 | 13 | # You should have received a copy of the GNU General Public License |
|
14 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | 15 | """ |
|
16 | 16 | kallithea.tests.scripts.manual_test_crawler |
|
17 | 17 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
|
18 | 18 | |
|
19 | 19 | Test for crawling a project for memory usage |
|
20 | 20 | This should be runned just as regular script together |
|
21 | 21 | with a watch script that will show memory usage. |
|
22 | 22 | |
|
23 | 23 | watch -n1 ./kallithea/tests/mem_watch |
|
24 | 24 | |
|
25 | 25 | This file was forked by the Kallithea project in July 2014. |
|
26 | 26 | Original author and date, and relevant copyright and licensing information is below: |
|
27 | 27 | :created_on: Apr 21, 2010 |
|
28 | 28 | :author: marcink |
|
29 | 29 | :copyright: (c) 2013 RhodeCode GmbH, and others. |
|
30 | 30 | :license: GPLv3, see LICENSE.md for more details. |
|
31 | 31 | """ |
|
32 | 32 | |
|
33 | from __future__ import print_function | |
|
34 | ||
|
35 | 33 | import http.cookiejar |
|
36 | 34 | import os |
|
37 | 35 | import sys |
|
38 | 36 | import tempfile |
|
39 | 37 | import time |
|
40 | 38 | import urllib.parse |
|
41 | 39 | import urllib.request |
|
42 | 40 | from os.path import dirname |
|
43 | 41 | |
|
44 | 42 | from kallithea.lib import vcs |
|
45 | 43 | from kallithea.lib.compat import OrderedSet |
|
46 | 44 | from kallithea.lib.vcs.exceptions import RepositoryError |
|
47 | 45 | |
|
48 | 46 | |
|
49 | 47 | __here__ = os.path.abspath(__file__) |
|
50 | 48 | __root__ = dirname(dirname(dirname(__here__))) |
|
51 | 49 | sys.path.append(__root__) |
|
52 | 50 | |
|
53 | 51 | |
|
54 | 52 | PASES = 3 |
|
55 | 53 | HOST = 'http://127.0.0.1' |
|
56 | 54 | PORT = 5000 |
|
57 | 55 | BASE_URI = '%s:%s/' % (HOST, PORT) |
|
58 | 56 | |
|
59 | 57 | if len(sys.argv) == 2: |
|
60 | 58 | BASE_URI = sys.argv[1] |
|
61 | 59 | |
|
62 | 60 | if not BASE_URI.endswith('/'): |
|
63 | 61 | BASE_URI += '/' |
|
64 | 62 | |
|
65 | 63 | print('Crawling @ %s' % BASE_URI) |
|
66 | 64 | BASE_URI += '%s' |
|
67 | 65 | PROJECT_PATH = os.path.join('/', 'home', 'username', 'repos') |
|
68 | 66 | PROJECTS = [ |
|
69 | 67 | # 'linux-magx-pbranch', |
|
70 | 68 | 'CPython', |
|
71 | 69 | 'kallithea', |
|
72 | 70 | ] |
|
73 | 71 | |
|
74 | 72 | |
|
75 | 73 | cj = http.cookiejar.FileCookieJar(os.path.join(tempfile.gettempdir(), 'rc_test_cookie.txt')) |
|
76 | 74 | o = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) |
|
77 | 75 | o.addheaders = [ |
|
78 | 76 | ('User-agent', 'kallithea-crawler'), |
|
79 | 77 | ('Accept-Language', 'en - us, en;q = 0.5') |
|
80 | 78 | ] |
|
81 | 79 | |
|
82 | 80 | urllib.request.install_opener(o) |
|
83 | 81 | |
|
84 | 82 | |
|
85 | 83 | def _get_repo(proj): |
|
86 | 84 | if isinstance(proj, str): |
|
87 | 85 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, proj)) |
|
88 | 86 | proj = proj |
|
89 | 87 | else: |
|
90 | 88 | repo = proj |
|
91 | 89 | proj = repo.name |
|
92 | 90 | |
|
93 | 91 | return repo, proj |
|
94 | 92 | |
|
95 | 93 | |
|
96 | 94 | def test_changelog_walk(proj, pages=100): |
|
97 | 95 | repo, proj = _get_repo(proj) |
|
98 | 96 | |
|
99 | 97 | total_time = 0 |
|
100 | 98 | for i in range(1, pages): |
|
101 | 99 | |
|
102 | 100 | page = '/'.join((proj, 'changelog',)) |
|
103 | 101 | |
|
104 | 102 | full_uri = (BASE_URI % page) + '?' + urllib.parse.urlencode({'page': i}) |
|
105 | 103 | s = time.time() |
|
106 | 104 | f = o.open(full_uri) |
|
107 | 105 | |
|
108 | 106 | assert f.url == full_uri, 'URL:%s does not match %s' % (f.url, full_uri) |
|
109 | 107 | |
|
110 | 108 | size = len(f.read()) |
|
111 | 109 | e = time.time() - s |
|
112 | 110 | total_time += e |
|
113 | 111 | print('visited %s size:%s req:%s ms' % (full_uri, size, e)) |
|
114 | 112 | |
|
115 | 113 | print('total_time', total_time) |
|
116 | 114 | print('average on req', total_time / float(pages)) |
|
117 | 115 | |
|
118 | 116 | |
|
119 | 117 | def test_changeset_walk(proj, limit=None): |
|
120 | 118 | repo, proj = _get_repo(proj) |
|
121 | 119 | |
|
122 | 120 | print('processing', os.path.join(PROJECT_PATH, proj)) |
|
123 | 121 | total_time = 0 |
|
124 | 122 | |
|
125 | 123 | cnt = 0 |
|
126 | 124 | for i in repo: |
|
127 | 125 | cnt += 1 |
|
128 | 126 | raw_cs = '/'.join((proj, 'changeset', i.raw_id)) |
|
129 | 127 | if limit and limit == cnt: |
|
130 | 128 | break |
|
131 | 129 | |
|
132 | 130 | full_uri = (BASE_URI % raw_cs) |
|
133 | 131 | print('%s visiting %s/%s' % (cnt, full_uri, i)) |
|
134 | 132 | s = time.time() |
|
135 | 133 | f = o.open(full_uri) |
|
136 | 134 | size = len(f.read()) |
|
137 | 135 | e = time.time() - s |
|
138 | 136 | total_time += e |
|
139 | 137 | print('%s visited %s/%s size:%s req:%s ms' % (cnt, full_uri, i, size, e)) |
|
140 | 138 | |
|
141 | 139 | print('total_time', total_time) |
|
142 | 140 | print('average on req', total_time / float(cnt)) |
|
143 | 141 | |
|
144 | 142 | |
|
145 | 143 | def test_files_walk(proj, limit=100): |
|
146 | 144 | repo, proj = _get_repo(proj) |
|
147 | 145 | |
|
148 | 146 | print('processing', os.path.join(PROJECT_PATH, proj)) |
|
149 | 147 | total_time = 0 |
|
150 | 148 | |
|
151 | 149 | paths_ = OrderedSet(['']) |
|
152 | 150 | try: |
|
153 | 151 | tip = repo.get_changeset('tip') |
|
154 | 152 | for topnode, dirs, files in tip.walk('/'): |
|
155 | 153 | |
|
156 | 154 | for dir in dirs: |
|
157 | 155 | paths_.add(dir.path) |
|
158 | 156 | for f in dir: |
|
159 | 157 | paths_.add(f.path) |
|
160 | 158 | |
|
161 | 159 | for f in files: |
|
162 | 160 | paths_.add(f.path) |
|
163 | 161 | |
|
164 | 162 | except RepositoryError as e: |
|
165 | 163 | pass |
|
166 | 164 | |
|
167 | 165 | cnt = 0 |
|
168 | 166 | for f in paths_: |
|
169 | 167 | cnt += 1 |
|
170 | 168 | if limit and limit == cnt: |
|
171 | 169 | break |
|
172 | 170 | |
|
173 | 171 | file_path = '/'.join((proj, 'files', 'tip', f)) |
|
174 | 172 | full_uri = (BASE_URI % file_path) |
|
175 | 173 | print('%s visiting %s' % (cnt, full_uri)) |
|
176 | 174 | s = time.time() |
|
177 | 175 | f = o.open(full_uri) |
|
178 | 176 | size = len(f.read()) |
|
179 | 177 | e = time.time() - s |
|
180 | 178 | total_time += e |
|
181 | 179 | print('%s visited OK size:%s req:%s ms' % (cnt, size, e)) |
|
182 | 180 | |
|
183 | 181 | print('total_time', total_time) |
|
184 | 182 | print('average on req', total_time / float(cnt)) |
|
185 | 183 | |
|
186 | 184 | if __name__ == '__main__': |
|
187 | 185 | for path in PROJECTS: |
|
188 | 186 | repo = vcs.get_repo(os.path.join(PROJECT_PATH, path)) |
|
189 | 187 | for i in range(PASES): |
|
190 | 188 | print('PASS %s/%s' % (i, PASES)) |
|
191 | 189 | test_changelog_walk(repo, pages=80) |
|
192 | 190 | test_changeset_walk(repo, limit=100) |
|
193 | 191 | test_files_walk(repo, limit=100) |
@@ -1,84 +1,82 b'' | |||
|
1 | 1 | #!/usr/bin/env python3 |
|
2 | 2 | |
|
3 | 3 | """ |
|
4 | 4 | Consistent formatting of rst section titles |
|
5 | 5 | """ |
|
6 | 6 | |
|
7 | from __future__ import print_function | |
|
8 | ||
|
9 | 7 | import re |
|
10 | 8 | import subprocess |
|
11 | 9 | |
|
12 | 10 | |
|
13 | 11 | spaces = [ |
|
14 | 12 | (0, 1), # we assume this is a over-and-underlined header |
|
15 | 13 | (2, 1), |
|
16 | 14 | (1, 1), |
|
17 | 15 | (1, 0), |
|
18 | 16 | (1, 0), |
|
19 | 17 | ] |
|
20 | 18 | |
|
21 | 19 | # http://sphinx-doc.org/rest.html : |
|
22 | 20 | # for the Python documentation, this convention is used which you may follow: |
|
23 | 21 | # # with overline, for parts |
|
24 | 22 | # * with overline, for chapters |
|
25 | 23 | # =, for sections |
|
26 | 24 | # -, for subsections |
|
27 | 25 | # ^, for subsubsections |
|
28 | 26 | # ", for paragraphs |
|
29 | 27 | pystyles = ['#', '*', '=', '-', '^', '"'] |
|
30 | 28 | |
|
31 | 29 | # match on a header line underlined with one of the valid characters |
|
32 | 30 | headermatch = re.compile(r'''\n*(.+)\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n+''', flags=re.MULTILINE) |
|
33 | 31 | |
|
34 | 32 | |
|
35 | 33 | def main(): |
|
36 | 34 | filenames = subprocess.check_output(['hg', 'loc', 'set:**.rst+kallithea/i18n/how_to']).splitlines() |
|
37 | 35 | for fn in filenames: |
|
38 | 36 | fn = fn.decode() |
|
39 | 37 | print('processing %s' % fn) |
|
40 | 38 | s = open(fn).read() |
|
41 | 39 | |
|
42 | 40 | # find levels and their styles |
|
43 | 41 | lastpos = 0 |
|
44 | 42 | styles = [] |
|
45 | 43 | for markup in headermatch.findall(s): |
|
46 | 44 | style = markup[1] |
|
47 | 45 | if style in styles: |
|
48 | 46 | stylepos = styles.index(style) |
|
49 | 47 | if stylepos > lastpos + 1: |
|
50 | 48 | print('bad style %r with level %s - was at %s' % (style, stylepos, lastpos)) |
|
51 | 49 | else: |
|
52 | 50 | stylepos = len(styles) |
|
53 | 51 | if stylepos > lastpos + 1: |
|
54 | 52 | print('bad new style %r - expected %r' % (style, styles[lastpos + 1])) |
|
55 | 53 | else: |
|
56 | 54 | styles.append(style) |
|
57 | 55 | lastpos = stylepos |
|
58 | 56 | |
|
59 | 57 | # remove superfluous spacing (may however be restored by header spacing) |
|
60 | 58 | s = re.sub(r'''(\n\n)\n*''', r'\1', s, flags=re.MULTILINE) |
|
61 | 59 | |
|
62 | 60 | if styles: |
|
63 | 61 | newstyles = pystyles[pystyles.index(styles[0]):] |
|
64 | 62 | |
|
65 | 63 | def subf(m): |
|
66 | 64 | title, style = m.groups() |
|
67 | 65 | level = styles.index(style) |
|
68 | 66 | before, after = spaces[level] |
|
69 | 67 | newstyle = newstyles[level] |
|
70 | 68 | return '\n' * (before + 1) + title + '\n' + newstyle * len(title) + '\n' * (after + 1) |
|
71 | 69 | s = headermatch.sub(subf, s) |
|
72 | 70 | |
|
73 | 71 | # remove superfluous spacing when headers are adjacent |
|
74 | 72 | s = re.sub(r'''(\n.+\n([][!"#$%&'()*+,./:;<=>?@\\^_`{|}~-])\2{2,}\n\n\n)\n*''', r'\1', s, flags=re.MULTILINE) |
|
75 | 73 | # fix trailing space and spacing before link sections |
|
76 | 74 | s = s.strip() + '\n' |
|
77 | 75 | s = re.sub(r'''\n+((?:\.\. _[^\n]*\n)+)$''', r'\n\n\n\1', s) |
|
78 | 76 | |
|
79 | 77 | open(fn, 'w').write(s) |
|
80 | 78 | |
|
81 | 79 | print(subprocess.check_output(['hg', 'diff'] + filenames)) |
|
82 | 80 | |
|
83 | 81 | if __name__ == '__main__': |
|
84 | 82 | main() |
@@ -1,72 +1,70 b'' | |||
|
1 | 1 | #!/usr/bin/env python3 |
|
2 | 2 | """ |
|
3 | 3 | Based on kallithea/lib/paster_commands/template.ini.mako, generate development.ini |
|
4 | 4 | """ |
|
5 | 5 | |
|
6 | from __future__ import print_function | |
|
7 | ||
|
8 | 6 | import re |
|
9 | 7 | |
|
10 | 8 | from kallithea.lib import inifile |
|
11 | 9 | |
|
12 | 10 | |
|
13 | 11 | # files to be generated from the mako template |
|
14 | 12 | ini_files = [ |
|
15 | 13 | ('development.ini', |
|
16 | 14 | { |
|
17 | 15 | '[server:main]': { |
|
18 | 16 | 'host': '0.0.0.0', |
|
19 | 17 | }, |
|
20 | 18 | '[app:main]': { |
|
21 | 19 | 'debug': 'true', |
|
22 | 20 | 'app_instance_uuid': 'development-not-secret', |
|
23 | 21 | 'session.secret': 'development-not-secret', |
|
24 | 22 | }, |
|
25 | 23 | '[logger_root]': { |
|
26 | 24 | 'handlers': 'console_color', |
|
27 | 25 | }, |
|
28 | 26 | '[logger_routes]': { |
|
29 | 27 | 'level': 'DEBUG', |
|
30 | 28 | }, |
|
31 | 29 | '[logger_beaker]': { |
|
32 | 30 | 'level': 'DEBUG', |
|
33 | 31 | }, |
|
34 | 32 | '[logger_templates]': { |
|
35 | 33 | 'level': 'INFO', |
|
36 | 34 | }, |
|
37 | 35 | '[logger_kallithea]': { |
|
38 | 36 | 'level': 'DEBUG', |
|
39 | 37 | }, |
|
40 | 38 | '[logger_tg]': { |
|
41 | 39 | 'level': 'DEBUG', |
|
42 | 40 | }, |
|
43 | 41 | '[logger_gearbox]': { |
|
44 | 42 | 'level': 'DEBUG', |
|
45 | 43 | }, |
|
46 | 44 | '[logger_whoosh_indexer]': { |
|
47 | 45 | 'level': 'DEBUG', |
|
48 | 46 | }, |
|
49 | 47 | }, |
|
50 | 48 | ), |
|
51 | 49 | ] |
|
52 | 50 | |
|
53 | 51 | |
|
54 | 52 | def main(): |
|
55 | 53 | # make sure all mako lines starting with '#' (the '##' comments) are marked up as <text> |
|
56 | 54 | makofile = inifile.template_file |
|
57 | 55 | print('reading:', makofile) |
|
58 | 56 | mako_org = open(makofile).read() |
|
59 | 57 | mako_no_text_markup = re.sub(r'</?%text>', '', mako_org) |
|
60 | 58 | mako_marked_up = re.sub(r'\n(##.*)', r'\n<%text>\1</%text>', mako_no_text_markup, flags=re.MULTILINE) |
|
61 | 59 | if mako_marked_up != mako_org: |
|
62 | 60 | print('writing:', makofile) |
|
63 | 61 | open(makofile, 'w').write(mako_marked_up) |
|
64 | 62 | |
|
65 | 63 | # create ini files |
|
66 | 64 | for fn, settings in ini_files: |
|
67 | 65 | print('updating:', fn) |
|
68 | 66 | inifile.create(fn, None, settings) |
|
69 | 67 | |
|
70 | 68 | |
|
71 | 69 | if __name__ == '__main__': |
|
72 | 70 | main() |
@@ -1,49 +1,47 b'' | |||
|
1 | 1 | #!/usr/bin/env python3 |
|
2 | 2 | |
|
3 | from __future__ import print_function | |
|
4 | ||
|
5 | 3 | import re |
|
6 | 4 | import sys |
|
7 | 5 | |
|
8 | 6 | |
|
9 | 7 | logre = r''' |
|
10 | 8 | (log\.(?:error|info|warning|debug) |
|
11 | 9 | [(][ \n]* |
|
12 | 10 | ) |
|
13 | 11 | %s |
|
14 | 12 | ( |
|
15 | 13 | [ \n]*[)] |
|
16 | 14 | ) |
|
17 | 15 | ''' |
|
18 | 16 | |
|
19 | 17 | |
|
20 | 18 | res = [ |
|
21 | 19 | # handle % () - keeping spaces around the old % |
|
22 | 20 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) \( ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) \) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
23 | 21 | # handle % without () - keeping spaces around the old % |
|
24 | 22 | (re.compile(logre % r'''("[^"]*"|'[^']*') ([\n ]*) % ([\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
25 | 23 | # remove extra space if it is on next line |
|
26 | 24 | (re.compile(logre % r'''("[^"]*"|'[^']*') , (\n [ ]) ([ ][\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
27 | 25 | # remove extra space if it is on same line |
|
28 | 26 | (re.compile(logre % r'''("[^"]*"|'[^']*') , [ ]+ () ( [\n ]+) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* ) ''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
29 | 27 | # remove trailing , and space |
|
30 | 28 | (re.compile(logre % r'''("[^"]*"|'[^']*') , () ( [\n ]*) ( (?:[^()]|\n)* (?: \( (?:[^()]|\n)* \) (?:[^()]|\n)* )* [^(), \n] ) [ ,]*''', flags=re.MULTILINE | re.VERBOSE), r'\1\2,\3\4\5\6'), |
|
31 | 29 | ] |
|
32 | 30 | |
|
33 | 31 | |
|
34 | 32 | def rewrite(f): |
|
35 | 33 | s = open(f).read() |
|
36 | 34 | for r, t in res: |
|
37 | 35 | s = r.sub(t, s) |
|
38 | 36 | open(f, 'w').write(s) |
|
39 | 37 | |
|
40 | 38 | |
|
41 | 39 | if __name__ == '__main__': |
|
42 | 40 | if len(sys.argv) < 2: |
|
43 | 41 | print('Cleanup of superfluous % formatting of log statements.') |
|
44 | 42 | print('Usage:') |
|
45 | 43 | print(''' hg revert `hg loc '*.py'|grep -v logformat.py` && scripts/logformat.py `hg loc '*.py'` && hg diff''') |
|
46 | 44 | raise SystemExit(1) |
|
47 | 45 | |
|
48 | 46 | for f in sys.argv[1:]: |
|
49 | 47 | rewrite(f) |
General Comments 0
You need to be logged in to leave comments.
Login now