Show More
@@ -1,238 +1,238 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import re |
|
23 | 23 | import logging |
|
24 | 24 | import datetime |
|
25 |
|
|
|
25 | import configparser | |
|
26 | 26 | |
|
27 | 27 | from rhodecode.model.db import Session, User, UserSshKeys |
|
28 | 28 | from rhodecode.model.scm import ScmModel |
|
29 | 29 | |
|
30 | 30 | from .hg import MercurialServer |
|
31 | 31 | from .git import GitServer |
|
32 | 32 | from .svn import SubversionServer |
|
33 | 33 | log = logging.getLogger(__name__) |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | class SshWrapper(object): |
|
37 | 37 | hg_cmd_pat = re.compile(r'^hg\s+\-R\s+(\S+)\s+serve\s+\-\-stdio$') |
|
38 | 38 | git_cmd_pat = re.compile(r'^git-(receive-pack|upload-pack)\s\'[/]?(\S+?)(|\.git)\'$') |
|
39 | 39 | svn_cmd_pat = re.compile(r'^svnserve -t') |
|
40 | 40 | |
|
41 | 41 | def __init__(self, command, connection_info, mode, |
|
42 | 42 | user, user_id, key_id, shell, ini_path, env): |
|
43 | 43 | self.command = command |
|
44 | 44 | self.connection_info = connection_info |
|
45 | 45 | self.mode = mode |
|
46 | 46 | self.user = user |
|
47 | 47 | self.user_id = user_id |
|
48 | 48 | self.key_id = key_id |
|
49 | 49 | self.shell = shell |
|
50 | 50 | self.ini_path = ini_path |
|
51 | 51 | self.env = env |
|
52 | 52 | |
|
53 | 53 | self.config = self.parse_config(ini_path) |
|
54 | 54 | self.server_impl = None |
|
55 | 55 | |
|
56 | 56 | def parse_config(self, config_path): |
|
57 | 57 | parser = configparser.ConfigParser() |
|
58 | 58 | parser.read(config_path) |
|
59 | 59 | return parser |
|
60 | 60 | |
|
61 | 61 | def update_key_access_time(self, key_id): |
|
62 | 62 | key = UserSshKeys().query().filter( |
|
63 | 63 | UserSshKeys.ssh_key_id == key_id).scalar() |
|
64 | 64 | if key: |
|
65 | 65 | key.accessed_on = datetime.datetime.utcnow() |
|
66 | 66 | Session().add(key) |
|
67 | 67 | Session().commit() |
|
68 | 68 | log.debug('Update key id:`%s` fingerprint:`%s` access time', |
|
69 | 69 | key_id, key.ssh_key_fingerprint) |
|
70 | 70 | |
|
71 | 71 | def get_connection_info(self): |
|
72 | 72 | """ |
|
73 | 73 | connection_info |
|
74 | 74 | |
|
75 | 75 | Identifies the client and server ends of the connection. |
|
76 | 76 | The variable contains four space-separated values: client IP address, |
|
77 | 77 | client port number, server IP address, and server port number. |
|
78 | 78 | """ |
|
79 | 79 | conn = dict( |
|
80 | 80 | client_ip=None, |
|
81 | 81 | client_port=None, |
|
82 | 82 | server_ip=None, |
|
83 | 83 | server_port=None, |
|
84 | 84 | ) |
|
85 | 85 | |
|
86 | 86 | info = self.connection_info.split(' ') |
|
87 | 87 | if len(info) == 4: |
|
88 | 88 | conn['client_ip'] = info[0] |
|
89 | 89 | conn['client_port'] = info[1] |
|
90 | 90 | conn['server_ip'] = info[2] |
|
91 | 91 | conn['server_port'] = info[3] |
|
92 | 92 | |
|
93 | 93 | return conn |
|
94 | 94 | |
|
95 | 95 | def maybe_translate_repo_uid(self, repo_name): |
|
96 | 96 | _org_name = repo_name |
|
97 | 97 | if _org_name.startswith('_'): |
|
98 | 98 | # remove format of _ID/subrepo |
|
99 | 99 | _org_name = _org_name.split('/', 1)[0] |
|
100 | 100 | |
|
101 | 101 | if repo_name.startswith('_'): |
|
102 | 102 | from rhodecode.model.repo import RepoModel |
|
103 | 103 | org_repo_name = repo_name |
|
104 | 104 | log.debug('translating UID repo %s', org_repo_name) |
|
105 | 105 | by_id_match = RepoModel().get_repo_by_id(repo_name) |
|
106 | 106 | if by_id_match: |
|
107 | 107 | repo_name = by_id_match.repo_name |
|
108 | 108 | log.debug('translation of UID repo %s got `%s`', org_repo_name, repo_name) |
|
109 | 109 | |
|
110 | 110 | return repo_name, _org_name |
|
111 | 111 | |
|
112 | 112 | def get_repo_details(self, mode): |
|
113 | 113 | vcs_type = mode if mode in ['svn', 'hg', 'git'] else None |
|
114 | 114 | repo_name = None |
|
115 | 115 | |
|
116 | 116 | hg_match = self.hg_cmd_pat.match(self.command) |
|
117 | 117 | if hg_match is not None: |
|
118 | 118 | vcs_type = 'hg' |
|
119 | 119 | repo_id = hg_match.group(1).strip('/') |
|
120 | 120 | repo_name, org_name = self.maybe_translate_repo_uid(repo_id) |
|
121 | 121 | return vcs_type, repo_name, mode |
|
122 | 122 | |
|
123 | 123 | git_match = self.git_cmd_pat.match(self.command) |
|
124 | 124 | if git_match is not None: |
|
125 | 125 | mode = git_match.group(1) |
|
126 | 126 | vcs_type = 'git' |
|
127 | 127 | repo_id = git_match.group(2).strip('/') |
|
128 | 128 | repo_name, org_name = self.maybe_translate_repo_uid(repo_id) |
|
129 | 129 | return vcs_type, repo_name, mode |
|
130 | 130 | |
|
131 | 131 | svn_match = self.svn_cmd_pat.match(self.command) |
|
132 | 132 | if svn_match is not None: |
|
133 | 133 | vcs_type = 'svn' |
|
134 | 134 | # Repo name should be extracted from the input stream, we're unable to |
|
135 | 135 | # extract it at this point in execution |
|
136 | 136 | return vcs_type, repo_name, mode |
|
137 | 137 | |
|
138 | 138 | return vcs_type, repo_name, mode |
|
139 | 139 | |
|
140 | 140 | def serve(self, vcs, repo, mode, user, permissions, branch_permissions): |
|
141 | 141 | store = ScmModel().repos_path |
|
142 | 142 | |
|
143 | 143 | check_branch_perms = False |
|
144 | 144 | detect_force_push = False |
|
145 | 145 | |
|
146 | 146 | if branch_permissions: |
|
147 | 147 | check_branch_perms = True |
|
148 | 148 | detect_force_push = True |
|
149 | 149 | |
|
150 | 150 | log.debug( |
|
151 | 151 | 'VCS detected:`%s` mode: `%s` repo_name: %s, branch_permission_checks:%s', |
|
152 | 152 | vcs, mode, repo, check_branch_perms) |
|
153 | 153 | |
|
154 | 154 | # detect if we have to check branch permissions |
|
155 | 155 | extras = { |
|
156 | 156 | 'detect_force_push': detect_force_push, |
|
157 | 157 | 'check_branch_perms': check_branch_perms, |
|
158 | 158 | } |
|
159 | 159 | |
|
160 | 160 | if vcs == 'hg': |
|
161 | 161 | server = MercurialServer( |
|
162 | 162 | store=store, ini_path=self.ini_path, |
|
163 | 163 | repo_name=repo, user=user, |
|
164 | 164 | user_permissions=permissions, config=self.config, env=self.env) |
|
165 | 165 | self.server_impl = server |
|
166 | 166 | return server.run(tunnel_extras=extras) |
|
167 | 167 | |
|
168 | 168 | elif vcs == 'git': |
|
169 | 169 | server = GitServer( |
|
170 | 170 | store=store, ini_path=self.ini_path, |
|
171 | 171 | repo_name=repo, repo_mode=mode, user=user, |
|
172 | 172 | user_permissions=permissions, config=self.config, env=self.env) |
|
173 | 173 | self.server_impl = server |
|
174 | 174 | return server.run(tunnel_extras=extras) |
|
175 | 175 | |
|
176 | 176 | elif vcs == 'svn': |
|
177 | 177 | server = SubversionServer( |
|
178 | 178 | store=store, ini_path=self.ini_path, |
|
179 | 179 | repo_name=None, user=user, |
|
180 | 180 | user_permissions=permissions, config=self.config, env=self.env) |
|
181 | 181 | self.server_impl = server |
|
182 | 182 | return server.run(tunnel_extras=extras) |
|
183 | 183 | |
|
184 | 184 | else: |
|
185 | 185 | raise Exception('Unrecognised VCS: {}'.format(vcs)) |
|
186 | 186 | |
|
187 | 187 | def wrap(self): |
|
188 | 188 | mode = self.mode |
|
189 | 189 | user = self.user |
|
190 | 190 | user_id = self.user_id |
|
191 | 191 | key_id = self.key_id |
|
192 | 192 | shell = self.shell |
|
193 | 193 | |
|
194 | 194 | scm_detected, scm_repo, scm_mode = self.get_repo_details(mode) |
|
195 | 195 | |
|
196 | 196 | log.debug( |
|
197 | 197 | 'Mode: `%s` User: `%s:%s` Shell: `%s` SSH Command: `\"%s\"` ' |
|
198 | 198 | 'SCM_DETECTED: `%s` SCM Mode: `%s` SCM Repo: `%s`', |
|
199 | 199 | mode, user, user_id, shell, self.command, |
|
200 | 200 | scm_detected, scm_mode, scm_repo) |
|
201 | 201 | |
|
202 | 202 | # update last access time for this key |
|
203 | 203 | self.update_key_access_time(key_id) |
|
204 | 204 | |
|
205 | 205 | log.debug('SSH Connection info %s', self.get_connection_info()) |
|
206 | 206 | |
|
207 | 207 | if shell and self.command is None: |
|
208 | 208 | log.info('Dropping to shell, no command given and shell is allowed') |
|
209 | 209 | os.execl('/bin/bash', '-l') |
|
210 | 210 | exit_code = 1 |
|
211 | 211 | |
|
212 | 212 | elif scm_detected: |
|
213 | 213 | user = User.get(user_id) |
|
214 | 214 | if not user: |
|
215 | 215 | log.warning('User with id %s not found', user_id) |
|
216 | 216 | exit_code = -1 |
|
217 | 217 | return exit_code |
|
218 | 218 | |
|
219 | 219 | auth_user = user.AuthUser() |
|
220 | 220 | permissions = auth_user.permissions['repositories'] |
|
221 | 221 | repo_branch_permissions = auth_user.get_branch_permissions(scm_repo) |
|
222 | 222 | try: |
|
223 | 223 | exit_code, is_updated = self.serve( |
|
224 | 224 | scm_detected, scm_repo, scm_mode, user, permissions, |
|
225 | 225 | repo_branch_permissions) |
|
226 | 226 | except Exception: |
|
227 | 227 | log.exception('Error occurred during execution of SshWrapper') |
|
228 | 228 | exit_code = -1 |
|
229 | 229 | |
|
230 | 230 | elif self.command is None and shell is False: |
|
231 | 231 | log.error('No Command given.') |
|
232 | 232 | exit_code = -1 |
|
233 | 233 | |
|
234 | 234 | else: |
|
235 | 235 | log.error('Unhandled Command: "%s" Aborting.', self.command) |
|
236 | 236 | exit_code = -1 |
|
237 | 237 | |
|
238 | 238 | return exit_code |
@@ -1,70 +1,70 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import pytest |
|
23 |
|
|
|
23 | import configparser | |
|
24 | 24 | |
|
25 | 25 | from rhodecode.apps.ssh_support.lib.ssh_wrapper import SshWrapper |
|
26 | 26 | from rhodecode.lib.utils2 import AttributeDict |
|
27 | 27 | |
|
28 | 28 | |
|
29 | 29 | @pytest.fixture() |
|
30 | 30 | def dummy_conf_file(tmpdir): |
|
31 | 31 | conf = configparser.ConfigParser() |
|
32 | 32 | conf.add_section('app:main') |
|
33 | 33 | conf.set('app:main', 'ssh.executable.hg', '/usr/bin/hg') |
|
34 | 34 | conf.set('app:main', 'ssh.executable.git', '/usr/bin/git') |
|
35 | 35 | conf.set('app:main', 'ssh.executable.svn', '/usr/bin/svnserve') |
|
36 | 36 | |
|
37 | 37 | f_path = os.path.join(str(tmpdir), 'ssh_wrapper_test.ini') |
|
38 | 38 | with open(f_path, 'wb') as f: |
|
39 | 39 | conf.write(f) |
|
40 | 40 | |
|
41 | 41 | return os.path.join(f_path) |
|
42 | 42 | |
|
43 | 43 | |
|
44 | 44 | def plain_dummy_env(): |
|
45 | 45 | return { |
|
46 | 46 | 'request': |
|
47 | 47 | AttributeDict(host_url='http://localhost', script_name='/') |
|
48 | 48 | } |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | @pytest.fixture() |
|
52 | 52 | def dummy_env(): |
|
53 | 53 | return plain_dummy_env() |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | def plain_dummy_user(): |
|
57 | 57 | return AttributeDict(username='test_user') |
|
58 | 58 | |
|
59 | 59 | |
|
60 | 60 | @pytest.fixture() |
|
61 | 61 | def dummy_user(): |
|
62 | 62 | return plain_dummy_user() |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | @pytest.fixture() |
|
66 | 66 | def ssh_wrapper(app, dummy_conf_file, dummy_env): |
|
67 | 67 | conn_info = '127.0.0.1 22 10.0.0.1 443' |
|
68 | 68 | return SshWrapper( |
|
69 | 69 | 'random command', conn_info, 'auto', 'admin', '1', key_id='1', |
|
70 | 70 | shell=False, ini_path=dummy_conf_file, env=dummy_env) |
@@ -1,141 +1,141 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import json |
|
23 | 23 | import logging |
|
24 | 24 | import datetime |
|
25 | 25 | import time |
|
26 | 26 | |
|
27 | 27 | from functools import partial |
|
28 | 28 | |
|
29 |
|
|
|
29 | import configparser | |
|
30 | 30 | from celery.result import AsyncResult |
|
31 | 31 | import celery.loaders.base |
|
32 | 32 | import celery.schedules |
|
33 | 33 | |
|
34 | 34 | log = logging.getLogger(__name__) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def get_task_id(task): |
|
38 | 38 | task_id = None |
|
39 | 39 | if isinstance(task, AsyncResult): |
|
40 | 40 | task_id = task.task_id |
|
41 | 41 | |
|
42 | 42 | return task_id |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def crontab(value): |
|
46 | 46 | return celery.schedules.crontab(**value) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | def timedelta(value): |
|
50 | 50 | return datetime.timedelta(**value) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | def safe_json(get, section, key): |
|
54 | 54 | value = '' |
|
55 | 55 | try: |
|
56 | 56 | value = get(key) |
|
57 | 57 | json_value = json.loads(value) |
|
58 | 58 | except ValueError: |
|
59 | 59 | msg = 'The %s=%s is not valid json in section %s' % ( |
|
60 | 60 | key, value, section |
|
61 | 61 | ) |
|
62 | 62 | raise ValueError(msg) |
|
63 | 63 | |
|
64 | 64 | return json_value |
|
65 | 65 | |
|
66 | 66 | |
|
67 | 67 | def raw_2_schedule(schedule_value, schedule_type): |
|
68 | 68 | schedule_type_map = { |
|
69 | 69 | 'crontab': crontab, |
|
70 | 70 | 'timedelta': timedelta, |
|
71 | 71 | 'integer': int |
|
72 | 72 | } |
|
73 | 73 | scheduler_cls = schedule_type_map.get(schedule_type) |
|
74 | 74 | |
|
75 | 75 | if scheduler_cls is None: |
|
76 | 76 | raise ValueError( |
|
77 | 77 | 'schedule type %s in section is invalid' % ( |
|
78 | 78 | schedule_type, |
|
79 | 79 | ) |
|
80 | 80 | ) |
|
81 | 81 | try: |
|
82 | 82 | schedule = scheduler_cls(schedule_value) |
|
83 | 83 | except TypeError: |
|
84 | 84 | log.exception('Failed to compose a schedule from value: %r', schedule_value) |
|
85 | 85 | schedule = None |
|
86 | 86 | return schedule |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | def get_beat_config(parser, section): |
|
90 | 90 | |
|
91 | 91 | get = partial(parser.get, section) |
|
92 | 92 | has_option = partial(parser.has_option, section) |
|
93 | 93 | |
|
94 | 94 | schedule_type = get('type') |
|
95 | 95 | schedule_value = safe_json(get, section, 'schedule') |
|
96 | 96 | |
|
97 | 97 | config = { |
|
98 | 98 | 'schedule_type': schedule_type, |
|
99 | 99 | 'schedule_value': schedule_value, |
|
100 | 100 | 'task': get('task'), |
|
101 | 101 | } |
|
102 | 102 | schedule = raw_2_schedule(schedule_value, schedule_type) |
|
103 | 103 | if schedule: |
|
104 | 104 | config['schedule'] = schedule |
|
105 | 105 | |
|
106 | 106 | if has_option('args'): |
|
107 | 107 | config['args'] = safe_json(get, section, 'args') |
|
108 | 108 | |
|
109 | 109 | if has_option('kwargs'): |
|
110 | 110 | config['kwargs'] = safe_json(get, section, 'kwargs') |
|
111 | 111 | |
|
112 | 112 | if has_option('force_update'): |
|
113 | 113 | config['force_update'] = get('force_update') |
|
114 | 114 | |
|
115 | 115 | return config |
|
116 | 116 | |
|
117 | 117 | |
|
118 | 118 | def parse_ini_vars(ini_vars): |
|
119 | 119 | options = {} |
|
120 | 120 | for pairs in ini_vars.split(','): |
|
121 | 121 | key, value = pairs.split('=') |
|
122 | 122 | options[key] = value |
|
123 | 123 | return options |
|
124 | 124 | |
|
125 | 125 | |
|
126 | 126 | def ping_db(): |
|
127 | 127 | from rhodecode.model import meta |
|
128 | 128 | from rhodecode.model.db import DbMigrateVersion |
|
129 | 129 | log.info('Testing DB connection...') |
|
130 | 130 | |
|
131 | 131 | for test in range(10): |
|
132 | 132 | try: |
|
133 | 133 | scalar = DbMigrateVersion.query().scalar() |
|
134 | 134 | log.debug('DB PING %s@%s', scalar, scalar.version) |
|
135 | 135 | break |
|
136 | 136 | except Exception: |
|
137 | 137 | retry = 1 |
|
138 | 138 | log.debug('DB not ready, next try in %ss', retry) |
|
139 | 139 | time.sleep(retry) |
|
140 | 140 | finally: |
|
141 | 141 | meta.Session.remove() |
@@ -1,27 +1,27 b'' | |||
|
1 | 1 | """ |
|
2 | 2 | Configuration parser module. |
|
3 | 3 | """ |
|
4 | 4 | |
|
5 |
|
|
|
5 | import configparser | |
|
6 | 6 | |
|
7 | 7 | from rhodecode.lib.dbmigrate.migrate.versioning.config import * |
|
8 | 8 | from rhodecode.lib.dbmigrate.migrate.versioning import pathed |
|
9 | 9 | |
|
10 | 10 | |
|
11 | 11 | class Parser(ConfigParser): |
|
12 | 12 | """A project configuration file.""" |
|
13 | 13 | |
|
14 | 14 | def to_dict(self, sections=None): |
|
15 | 15 | """It's easier to access config values like dictionaries""" |
|
16 | 16 | return self._sections |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | class Config(pathed.Pathed, Parser): |
|
20 | 20 | """Configuration class.""" |
|
21 | 21 | |
|
22 | 22 | def __init__(self, path, *p, **k): |
|
23 | 23 | """Confirm the config file exists; read it.""" |
|
24 | 24 | self.require_found(path) |
|
25 | 25 | pathed.Pathed.__init__(self, path) |
|
26 | 26 | Parser.__init__(self, *p, **k) |
|
27 | 27 | self.read(path) |
@@ -1,58 +1,58 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 |
|
|
|
22 | import configparser | |
|
23 | 23 | from pyramid.paster import bootstrap as pyramid_bootstrap, setup_logging # pragma: no cover |
|
24 | 24 | |
|
25 | 25 | from rhodecode.lib.request import Request |
|
26 | 26 | |
|
27 | 27 | |
|
28 | 28 | def get_config(ini_path, **kwargs): |
|
29 | 29 | parser = configparser.ConfigParser(**kwargs) |
|
30 | 30 | parser.read(ini_path) |
|
31 | 31 | return parser |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | def get_app_config(ini_path): |
|
35 | 35 | from paste.deploy.loadwsgi import appconfig |
|
36 | 36 | return appconfig('config:{}'.format(ini_path), relative_to=os.getcwd()) |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | def bootstrap(config_uri, options=None, env=None): |
|
40 | 40 | from rhodecode.lib.utils2 import AttributeDict |
|
41 | 41 | |
|
42 | 42 | if env: |
|
43 | 43 | os.environ.update(env) |
|
44 | 44 | |
|
45 | 45 | config = get_config(config_uri) |
|
46 | 46 | base_url = 'http://rhodecode.local' |
|
47 | 47 | try: |
|
48 | 48 | base_url = config.get('app:main', 'app.base_url') |
|
49 | 49 | except (configparser.NoSectionError, configparser.NoOptionError): |
|
50 | 50 | pass |
|
51 | 51 | |
|
52 | 52 | request = Request.blank('/', base_url=base_url) |
|
53 | 53 | # fake inject a running user for bootstrap request ! |
|
54 | 54 | request.user = AttributeDict({'username': 'bootstrap-user', |
|
55 | 55 | 'user_id': 1, |
|
56 | 56 | 'ip_addr': '127.0.0.1'}) |
|
57 | 57 | return pyramid_bootstrap(config_uri, request=request, options=options) |
|
58 | 58 |
@@ -1,853 +1,853 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2017-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import os |
|
23 | 23 | import sys |
|
24 | 24 | import time |
|
25 | 25 | import platform |
|
26 | 26 | import collections |
|
27 | 27 | from functools import wraps |
|
28 | 28 | |
|
29 | 29 | import pkg_resources |
|
30 | 30 | import logging |
|
31 | 31 | import resource |
|
32 | 32 | |
|
33 |
|
|
|
33 | import configparser | |
|
34 | 34 | |
|
35 | 35 | log = logging.getLogger(__name__) |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | psutil = None |
|
39 | 39 | |
|
40 | 40 | try: |
|
41 | 41 | # cygwin cannot have yet psutil support. |
|
42 | 42 | import psutil as psutil |
|
43 | 43 | except ImportError: |
|
44 | 44 | pass |
|
45 | 45 | |
|
46 | 46 | |
|
47 | 47 | _NA = 'NOT AVAILABLE' |
|
48 | 48 | |
|
49 | 49 | STATE_OK = 'ok' |
|
50 | 50 | STATE_ERR = 'error' |
|
51 | 51 | STATE_WARN = 'warning' |
|
52 | 52 | |
|
53 | 53 | STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK} |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | registered_helpers = {} |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | def register_sysinfo(func): |
|
60 | 60 | """ |
|
61 | 61 | @register_helper |
|
62 | 62 | def db_check(): |
|
63 | 63 | pass |
|
64 | 64 | |
|
65 | 65 | db_check == registered_helpers['db_check'] |
|
66 | 66 | """ |
|
67 | 67 | global registered_helpers |
|
68 | 68 | registered_helpers[func.__name__] = func |
|
69 | 69 | |
|
70 | 70 | @wraps(func) |
|
71 | 71 | def _wrapper(*args, **kwargs): |
|
72 | 72 | return func(*args, **kwargs) |
|
73 | 73 | return _wrapper |
|
74 | 74 | |
|
75 | 75 | |
|
76 | 76 | # HELPERS |
|
77 | 77 | def percentage(part, whole): |
|
78 | 78 | whole = float(whole) |
|
79 | 79 | if whole > 0: |
|
80 | 80 | return round(100 * float(part) / whole, 1) |
|
81 | 81 | return 0.0 |
|
82 | 82 | |
|
83 | 83 | |
|
84 | 84 | def get_storage_size(storage_path): |
|
85 | 85 | sizes = [] |
|
86 | 86 | for file_ in os.listdir(storage_path): |
|
87 | 87 | storage_file = os.path.join(storage_path, file_) |
|
88 | 88 | if os.path.isfile(storage_file): |
|
89 | 89 | try: |
|
90 | 90 | sizes.append(os.path.getsize(storage_file)) |
|
91 | 91 | except OSError: |
|
92 | 92 | log.exception('Failed to get size of storage file %s', storage_file) |
|
93 | 93 | pass |
|
94 | 94 | |
|
95 | 95 | return sum(sizes) |
|
96 | 96 | |
|
97 | 97 | |
|
98 | 98 | def get_resource(resource_type): |
|
99 | 99 | try: |
|
100 | 100 | return resource.getrlimit(resource_type) |
|
101 | 101 | except Exception: |
|
102 | 102 | return 'NOT_SUPPORTED' |
|
103 | 103 | |
|
104 | 104 | |
|
105 | 105 | def get_cert_path(ini_path): |
|
106 | 106 | default = '/etc/ssl/certs/ca-certificates.crt' |
|
107 | 107 | control_ca_bundle = os.path.join( |
|
108 | 108 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))), |
|
109 | 109 | '.rccontrol-profile/etc/ca-bundle.crt') |
|
110 | 110 | if os.path.isfile(control_ca_bundle): |
|
111 | 111 | default = control_ca_bundle |
|
112 | 112 | |
|
113 | 113 | return default |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | class SysInfoRes(object): |
|
117 | 117 | def __init__(self, value, state=None, human_value=None): |
|
118 | 118 | self.value = value |
|
119 | 119 | self.state = state or STATE_OK_DEFAULT |
|
120 | 120 | self.human_value = human_value or value |
|
121 | 121 | |
|
122 | 122 | def __json__(self): |
|
123 | 123 | return { |
|
124 | 124 | 'value': self.value, |
|
125 | 125 | 'state': self.state, |
|
126 | 126 | 'human_value': self.human_value, |
|
127 | 127 | } |
|
128 | 128 | |
|
129 | 129 | def get_value(self): |
|
130 | 130 | return self.__json__() |
|
131 | 131 | |
|
132 | 132 | def __str__(self): |
|
133 | 133 | return '<SysInfoRes({})>'.format(self.__json__()) |
|
134 | 134 | |
|
135 | 135 | |
|
136 | 136 | class SysInfo(object): |
|
137 | 137 | |
|
138 | 138 | def __init__(self, func_name, **kwargs): |
|
139 | 139 | self.func_name = func_name |
|
140 | 140 | self.value = _NA |
|
141 | 141 | self.state = None |
|
142 | 142 | self.kwargs = kwargs or {} |
|
143 | 143 | |
|
144 | 144 | def __call__(self): |
|
145 | 145 | computed = self.compute(**self.kwargs) |
|
146 | 146 | if not isinstance(computed, SysInfoRes): |
|
147 | 147 | raise ValueError( |
|
148 | 148 | 'computed value for {} is not instance of ' |
|
149 | 149 | '{}, got {} instead'.format( |
|
150 | 150 | self.func_name, SysInfoRes, type(computed))) |
|
151 | 151 | return computed.__json__() |
|
152 | 152 | |
|
153 | 153 | def __str__(self): |
|
154 | 154 | return '<SysInfo({})>'.format(self.func_name) |
|
155 | 155 | |
|
156 | 156 | def compute(self, **kwargs): |
|
157 | 157 | return self.func_name(**kwargs) |
|
158 | 158 | |
|
159 | 159 | |
|
160 | 160 | # SysInfo functions |
|
161 | 161 | @register_sysinfo |
|
162 | 162 | def python_info(): |
|
163 | 163 | value = dict(version=' '.join(platform._sys_version()), |
|
164 | 164 | executable=sys.executable) |
|
165 | 165 | return SysInfoRes(value=value) |
|
166 | 166 | |
|
167 | 167 | |
|
168 | 168 | @register_sysinfo |
|
169 | 169 | def py_modules(): |
|
170 | 170 | mods = dict([(p.project_name, {'version': p.version, 'location': p.location}) |
|
171 | 171 | for p in pkg_resources.working_set]) |
|
172 | 172 | |
|
173 | 173 | value = sorted(mods.items(), key=lambda k: k[0].lower()) |
|
174 | 174 | return SysInfoRes(value=value) |
|
175 | 175 | |
|
176 | 176 | |
|
177 | 177 | @register_sysinfo |
|
178 | 178 | def platform_type(): |
|
179 | 179 | from rhodecode.lib.utils import safe_unicode, generate_platform_uuid |
|
180 | 180 | |
|
181 | 181 | value = dict( |
|
182 | 182 | name=safe_unicode(platform.platform()), |
|
183 | 183 | uuid=generate_platform_uuid() |
|
184 | 184 | ) |
|
185 | 185 | return SysInfoRes(value=value) |
|
186 | 186 | |
|
187 | 187 | |
|
188 | 188 | @register_sysinfo |
|
189 | 189 | def locale_info(): |
|
190 | 190 | import locale |
|
191 | 191 | |
|
192 | 192 | value = dict( |
|
193 | 193 | locale_default=locale.getdefaultlocale(), |
|
194 | 194 | locale_lc_all=locale.getlocale(locale.LC_ALL), |
|
195 | 195 | lang_env=os.environ.get('LANG'), |
|
196 | 196 | lc_all_env=os.environ.get('LC_ALL'), |
|
197 | 197 | local_archive_env=os.environ.get('LOCALE_ARCHIVE'), |
|
198 | 198 | ) |
|
199 | 199 | human_value = 'LANG: {}, locale LC_ALL: {}, Default locales: {}'.format( |
|
200 | 200 | value['lang_env'], value['locale_lc_all'], value['locale_default']) |
|
201 | 201 | return SysInfoRes(value=value, human_value=human_value) |
|
202 | 202 | |
|
203 | 203 | |
|
204 | 204 | @register_sysinfo |
|
205 | 205 | def ulimit_info(): |
|
206 | 206 | data = collections.OrderedDict([ |
|
207 | 207 | ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)), |
|
208 | 208 | ('file size', get_resource(resource.RLIMIT_FSIZE)), |
|
209 | 209 | ('stack size', get_resource(resource.RLIMIT_STACK)), |
|
210 | 210 | ('core file size', get_resource(resource.RLIMIT_CORE)), |
|
211 | 211 | ('address space size', get_resource(resource.RLIMIT_AS)), |
|
212 | 212 | ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)), |
|
213 | 213 | ('heap size', get_resource(resource.RLIMIT_DATA)), |
|
214 | 214 | ('rss size', get_resource(resource.RLIMIT_RSS)), |
|
215 | 215 | ('number of processes', get_resource(resource.RLIMIT_NPROC)), |
|
216 | 216 | ('open files', get_resource(resource.RLIMIT_NOFILE)), |
|
217 | 217 | ]) |
|
218 | 218 | |
|
219 | 219 | text = ', '.join('{}:{}'.format(k, v) for k, v in data.items()) |
|
220 | 220 | |
|
221 | 221 | value = { |
|
222 | 222 | 'limits': data, |
|
223 | 223 | 'text': text, |
|
224 | 224 | } |
|
225 | 225 | return SysInfoRes(value=value) |
|
226 | 226 | |
|
227 | 227 | |
|
228 | 228 | @register_sysinfo |
|
229 | 229 | def uptime(): |
|
230 | 230 | from rhodecode.lib.helpers import age, time_to_datetime |
|
231 | 231 | from rhodecode.translation import TranslationString |
|
232 | 232 | |
|
233 | 233 | value = dict(boot_time=0, uptime=0, text='') |
|
234 | 234 | state = STATE_OK_DEFAULT |
|
235 | 235 | if not psutil: |
|
236 | 236 | return SysInfoRes(value=value, state=state) |
|
237 | 237 | |
|
238 | 238 | boot_time = psutil.boot_time() |
|
239 | 239 | value['boot_time'] = boot_time |
|
240 | 240 | value['uptime'] = time.time() - boot_time |
|
241 | 241 | |
|
242 | 242 | date_or_age = age(time_to_datetime(boot_time)) |
|
243 | 243 | if isinstance(date_or_age, TranslationString): |
|
244 | 244 | date_or_age = date_or_age.interpolate() |
|
245 | 245 | |
|
246 | 246 | human_value = value.copy() |
|
247 | 247 | human_value['boot_time'] = time_to_datetime(boot_time) |
|
248 | 248 | human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False) |
|
249 | 249 | |
|
250 | 250 | human_value['text'] = u'Server started {}'.format(date_or_age) |
|
251 | 251 | return SysInfoRes(value=value, human_value=human_value) |
|
252 | 252 | |
|
253 | 253 | |
|
254 | 254 | @register_sysinfo |
|
255 | 255 | def memory(): |
|
256 | 256 | from rhodecode.lib.helpers import format_byte_size_binary |
|
257 | 257 | value = dict(available=0, used=0, used_real=0, cached=0, percent=0, |
|
258 | 258 | percent_used=0, free=0, inactive=0, active=0, shared=0, |
|
259 | 259 | total=0, buffers=0, text='') |
|
260 | 260 | |
|
261 | 261 | state = STATE_OK_DEFAULT |
|
262 | 262 | if not psutil: |
|
263 | 263 | return SysInfoRes(value=value, state=state) |
|
264 | 264 | |
|
265 | 265 | value.update(dict(psutil.virtual_memory()._asdict())) |
|
266 | 266 | value['used_real'] = value['total'] - value['available'] |
|
267 | 267 | value['percent_used'] = psutil._common.usage_percent( |
|
268 | 268 | value['used_real'], value['total'], 1) |
|
269 | 269 | |
|
270 | 270 | human_value = value.copy() |
|
271 | 271 | human_value['text'] = '%s/%s, %s%% used' % ( |
|
272 | 272 | format_byte_size_binary(value['used_real']), |
|
273 | 273 | format_byte_size_binary(value['total']), |
|
274 | 274 | value['percent_used'],) |
|
275 | 275 | |
|
276 | 276 | keys = value.keys()[::] |
|
277 | 277 | keys.pop(keys.index('percent')) |
|
278 | 278 | keys.pop(keys.index('percent_used')) |
|
279 | 279 | keys.pop(keys.index('text')) |
|
280 | 280 | for k in keys: |
|
281 | 281 | human_value[k] = format_byte_size_binary(value[k]) |
|
282 | 282 | |
|
283 | 283 | if state['type'] == STATE_OK and value['percent_used'] > 90: |
|
284 | 284 | msg = 'Critical: your available RAM memory is very low.' |
|
285 | 285 | state = {'message': msg, 'type': STATE_ERR} |
|
286 | 286 | |
|
287 | 287 | elif state['type'] == STATE_OK and value['percent_used'] > 70: |
|
288 | 288 | msg = 'Warning: your available RAM memory is running low.' |
|
289 | 289 | state = {'message': msg, 'type': STATE_WARN} |
|
290 | 290 | |
|
291 | 291 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
292 | 292 | |
|
293 | 293 | |
|
294 | 294 | @register_sysinfo |
|
295 | 295 | def machine_load(): |
|
296 | 296 | value = {'1_min': _NA, '5_min': _NA, '15_min': _NA, 'text': ''} |
|
297 | 297 | state = STATE_OK_DEFAULT |
|
298 | 298 | if not psutil: |
|
299 | 299 | return SysInfoRes(value=value, state=state) |
|
300 | 300 | |
|
301 | 301 | # load averages |
|
302 | 302 | if hasattr(psutil.os, 'getloadavg'): |
|
303 | 303 | value.update(dict( |
|
304 | 304 | zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))) |
|
305 | 305 | |
|
306 | 306 | human_value = value.copy() |
|
307 | 307 | human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format( |
|
308 | 308 | value['1_min'], value['5_min'], value['15_min']) |
|
309 | 309 | |
|
310 | 310 | if state['type'] == STATE_OK and value['15_min'] > 5: |
|
311 | 311 | msg = 'Warning: your machine load is very high.' |
|
312 | 312 | state = {'message': msg, 'type': STATE_WARN} |
|
313 | 313 | |
|
314 | 314 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
315 | 315 | |
|
316 | 316 | |
|
317 | 317 | @register_sysinfo |
|
318 | 318 | def cpu(): |
|
319 | 319 | value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []} |
|
320 | 320 | state = STATE_OK_DEFAULT |
|
321 | 321 | |
|
322 | 322 | if not psutil: |
|
323 | 323 | return SysInfoRes(value=value, state=state) |
|
324 | 324 | |
|
325 | 325 | value['cpu'] = psutil.cpu_percent(0.5) |
|
326 | 326 | value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True) |
|
327 | 327 | value['cpu_count'] = psutil.cpu_count() |
|
328 | 328 | |
|
329 | 329 | human_value = value.copy() |
|
330 | 330 | human_value['text'] = '{} cores at {} %'.format( |
|
331 | 331 | value['cpu_count'], value['cpu']) |
|
332 | 332 | |
|
333 | 333 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
334 | 334 | |
|
335 | 335 | |
|
336 | 336 | @register_sysinfo |
|
337 | 337 | def storage(): |
|
338 | 338 | from rhodecode.lib.helpers import format_byte_size_binary |
|
339 | 339 | from rhodecode.model.settings import VcsSettingsModel |
|
340 | 340 | path = VcsSettingsModel().get_repos_location() |
|
341 | 341 | |
|
342 | 342 | value = dict(percent=0, used=0, total=0, path=path, text='') |
|
343 | 343 | state = STATE_OK_DEFAULT |
|
344 | 344 | if not psutil: |
|
345 | 345 | return SysInfoRes(value=value, state=state) |
|
346 | 346 | |
|
347 | 347 | try: |
|
348 | 348 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
349 | 349 | except Exception as e: |
|
350 | 350 | log.exception('Failed to fetch disk info') |
|
351 | 351 | state = {'message': str(e), 'type': STATE_ERR} |
|
352 | 352 | |
|
353 | 353 | human_value = value.copy() |
|
354 | 354 | human_value['used'] = format_byte_size_binary(value['used']) |
|
355 | 355 | human_value['total'] = format_byte_size_binary(value['total']) |
|
356 | 356 | human_value['text'] = "{}/{}, {}% used".format( |
|
357 | 357 | format_byte_size_binary(value['used']), |
|
358 | 358 | format_byte_size_binary(value['total']), |
|
359 | 359 | value['percent']) |
|
360 | 360 | |
|
361 | 361 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
362 | 362 | msg = 'Critical: your disk space is very low.' |
|
363 | 363 | state = {'message': msg, 'type': STATE_ERR} |
|
364 | 364 | |
|
365 | 365 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
366 | 366 | msg = 'Warning: your disk space is running low.' |
|
367 | 367 | state = {'message': msg, 'type': STATE_WARN} |
|
368 | 368 | |
|
369 | 369 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
370 | 370 | |
|
371 | 371 | |
|
372 | 372 | @register_sysinfo |
|
373 | 373 | def storage_inodes(): |
|
374 | 374 | from rhodecode.model.settings import VcsSettingsModel |
|
375 | 375 | path = VcsSettingsModel().get_repos_location() |
|
376 | 376 | |
|
377 | 377 | value = dict(percent=0, free=0, used=0, total=0, path=path, text='') |
|
378 | 378 | state = STATE_OK_DEFAULT |
|
379 | 379 | if not psutil: |
|
380 | 380 | return SysInfoRes(value=value, state=state) |
|
381 | 381 | |
|
382 | 382 | try: |
|
383 | 383 | i_stat = os.statvfs(path) |
|
384 | 384 | value['free'] = i_stat.f_ffree |
|
385 | 385 | value['used'] = i_stat.f_files-i_stat.f_favail |
|
386 | 386 | value['total'] = i_stat.f_files |
|
387 | 387 | value['percent'] = percentage(value['used'], value['total']) |
|
388 | 388 | except Exception as e: |
|
389 | 389 | log.exception('Failed to fetch disk inodes info') |
|
390 | 390 | state = {'message': str(e), 'type': STATE_ERR} |
|
391 | 391 | |
|
392 | 392 | human_value = value.copy() |
|
393 | 393 | human_value['text'] = "{}/{}, {}% used".format( |
|
394 | 394 | value['used'], value['total'], value['percent']) |
|
395 | 395 | |
|
396 | 396 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
397 | 397 | msg = 'Critical: your disk free inodes are very low.' |
|
398 | 398 | state = {'message': msg, 'type': STATE_ERR} |
|
399 | 399 | |
|
400 | 400 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
401 | 401 | msg = 'Warning: your disk free inodes are running low.' |
|
402 | 402 | state = {'message': msg, 'type': STATE_WARN} |
|
403 | 403 | |
|
404 | 404 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
405 | 405 | |
|
406 | 406 | |
|
407 | 407 | @register_sysinfo |
|
408 | 408 | def storage_archives(): |
|
409 | 409 | import rhodecode |
|
410 | 410 | from rhodecode.lib.utils import safe_str |
|
411 | 411 | from rhodecode.lib.helpers import format_byte_size_binary |
|
412 | 412 | |
|
413 | 413 | msg = 'Enable this by setting ' \ |
|
414 | 414 | 'archive_cache_dir=/path/to/cache option in the .ini file' |
|
415 | 415 | path = safe_str(rhodecode.CONFIG.get('archive_cache_dir', msg)) |
|
416 | 416 | |
|
417 | 417 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
418 | 418 | state = STATE_OK_DEFAULT |
|
419 | 419 | try: |
|
420 | 420 | items_count = 0 |
|
421 | 421 | used = 0 |
|
422 | 422 | for root, dirs, files in os.walk(path): |
|
423 | 423 | if root == path: |
|
424 | 424 | items_count = len(files) |
|
425 | 425 | |
|
426 | 426 | for f in files: |
|
427 | 427 | try: |
|
428 | 428 | used += os.path.getsize(os.path.join(root, f)) |
|
429 | 429 | except OSError: |
|
430 | 430 | pass |
|
431 | 431 | value.update({ |
|
432 | 432 | 'percent': 100, |
|
433 | 433 | 'used': used, |
|
434 | 434 | 'total': used, |
|
435 | 435 | 'items': items_count |
|
436 | 436 | }) |
|
437 | 437 | |
|
438 | 438 | except Exception as e: |
|
439 | 439 | log.exception('failed to fetch archive cache storage') |
|
440 | 440 | state = {'message': str(e), 'type': STATE_ERR} |
|
441 | 441 | |
|
442 | 442 | human_value = value.copy() |
|
443 | 443 | human_value['used'] = format_byte_size_binary(value['used']) |
|
444 | 444 | human_value['total'] = format_byte_size_binary(value['total']) |
|
445 | 445 | human_value['text'] = "{} ({} items)".format( |
|
446 | 446 | human_value['used'], value['items']) |
|
447 | 447 | |
|
448 | 448 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
449 | 449 | |
|
450 | 450 | |
|
451 | 451 | @register_sysinfo |
|
452 | 452 | def storage_gist(): |
|
453 | 453 | from rhodecode.model.gist import GIST_STORE_LOC |
|
454 | 454 | from rhodecode.model.settings import VcsSettingsModel |
|
455 | 455 | from rhodecode.lib.utils import safe_str |
|
456 | 456 | from rhodecode.lib.helpers import format_byte_size_binary |
|
457 | 457 | path = safe_str(os.path.join( |
|
458 | 458 | VcsSettingsModel().get_repos_location(), GIST_STORE_LOC)) |
|
459 | 459 | |
|
460 | 460 | # gist storage |
|
461 | 461 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
462 | 462 | state = STATE_OK_DEFAULT |
|
463 | 463 | |
|
464 | 464 | try: |
|
465 | 465 | items_count = 0 |
|
466 | 466 | used = 0 |
|
467 | 467 | for root, dirs, files in os.walk(path): |
|
468 | 468 | if root == path: |
|
469 | 469 | items_count = len(dirs) |
|
470 | 470 | |
|
471 | 471 | for f in files: |
|
472 | 472 | try: |
|
473 | 473 | used += os.path.getsize(os.path.join(root, f)) |
|
474 | 474 | except OSError: |
|
475 | 475 | pass |
|
476 | 476 | value.update({ |
|
477 | 477 | 'percent': 100, |
|
478 | 478 | 'used': used, |
|
479 | 479 | 'total': used, |
|
480 | 480 | 'items': items_count |
|
481 | 481 | }) |
|
482 | 482 | except Exception as e: |
|
483 | 483 | log.exception('failed to fetch gist storage items') |
|
484 | 484 | state = {'message': str(e), 'type': STATE_ERR} |
|
485 | 485 | |
|
486 | 486 | human_value = value.copy() |
|
487 | 487 | human_value['used'] = format_byte_size_binary(value['used']) |
|
488 | 488 | human_value['total'] = format_byte_size_binary(value['total']) |
|
489 | 489 | human_value['text'] = "{} ({} items)".format( |
|
490 | 490 | human_value['used'], value['items']) |
|
491 | 491 | |
|
492 | 492 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
493 | 493 | |
|
494 | 494 | |
|
495 | 495 | @register_sysinfo |
|
496 | 496 | def storage_temp(): |
|
497 | 497 | import tempfile |
|
498 | 498 | from rhodecode.lib.helpers import format_byte_size_binary |
|
499 | 499 | |
|
500 | 500 | path = tempfile.gettempdir() |
|
501 | 501 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
502 | 502 | state = STATE_OK_DEFAULT |
|
503 | 503 | |
|
504 | 504 | if not psutil: |
|
505 | 505 | return SysInfoRes(value=value, state=state) |
|
506 | 506 | |
|
507 | 507 | try: |
|
508 | 508 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
509 | 509 | except Exception as e: |
|
510 | 510 | log.exception('Failed to fetch temp dir info') |
|
511 | 511 | state = {'message': str(e), 'type': STATE_ERR} |
|
512 | 512 | |
|
513 | 513 | human_value = value.copy() |
|
514 | 514 | human_value['used'] = format_byte_size_binary(value['used']) |
|
515 | 515 | human_value['total'] = format_byte_size_binary(value['total']) |
|
516 | 516 | human_value['text'] = "{}/{}, {}% used".format( |
|
517 | 517 | format_byte_size_binary(value['used']), |
|
518 | 518 | format_byte_size_binary(value['total']), |
|
519 | 519 | value['percent']) |
|
520 | 520 | |
|
521 | 521 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
522 | 522 | |
|
523 | 523 | |
|
524 | 524 | @register_sysinfo |
|
525 | 525 | def search_info(): |
|
526 | 526 | import rhodecode |
|
527 | 527 | from rhodecode.lib.index import searcher_from_config |
|
528 | 528 | |
|
529 | 529 | backend = rhodecode.CONFIG.get('search.module', '') |
|
530 | 530 | location = rhodecode.CONFIG.get('search.location', '') |
|
531 | 531 | |
|
532 | 532 | try: |
|
533 | 533 | searcher = searcher_from_config(rhodecode.CONFIG) |
|
534 | 534 | searcher = searcher.__class__.__name__ |
|
535 | 535 | except Exception: |
|
536 | 536 | searcher = None |
|
537 | 537 | |
|
538 | 538 | value = dict( |
|
539 | 539 | backend=backend, searcher=searcher, location=location, text='') |
|
540 | 540 | state = STATE_OK_DEFAULT |
|
541 | 541 | |
|
542 | 542 | human_value = value.copy() |
|
543 | 543 | human_value['text'] = "backend:`{}`".format(human_value['backend']) |
|
544 | 544 | |
|
545 | 545 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
546 | 546 | |
|
547 | 547 | |
|
548 | 548 | @register_sysinfo |
|
549 | 549 | def git_info(): |
|
550 | 550 | from rhodecode.lib.vcs.backends import git |
|
551 | 551 | state = STATE_OK_DEFAULT |
|
552 | 552 | value = human_value = '' |
|
553 | 553 | try: |
|
554 | 554 | value = git.discover_git_version(raise_on_exc=True) |
|
555 | 555 | human_value = 'version reported from VCSServer: {}'.format(value) |
|
556 | 556 | except Exception as e: |
|
557 | 557 | state = {'message': str(e), 'type': STATE_ERR} |
|
558 | 558 | |
|
559 | 559 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
560 | 560 | |
|
561 | 561 | |
|
562 | 562 | @register_sysinfo |
|
563 | 563 | def hg_info(): |
|
564 | 564 | from rhodecode.lib.vcs.backends import hg |
|
565 | 565 | state = STATE_OK_DEFAULT |
|
566 | 566 | value = human_value = '' |
|
567 | 567 | try: |
|
568 | 568 | value = hg.discover_hg_version(raise_on_exc=True) |
|
569 | 569 | human_value = 'version reported from VCSServer: {}'.format(value) |
|
570 | 570 | except Exception as e: |
|
571 | 571 | state = {'message': str(e), 'type': STATE_ERR} |
|
572 | 572 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
573 | 573 | |
|
574 | 574 | |
|
575 | 575 | @register_sysinfo |
|
576 | 576 | def svn_info(): |
|
577 | 577 | from rhodecode.lib.vcs.backends import svn |
|
578 | 578 | state = STATE_OK_DEFAULT |
|
579 | 579 | value = human_value = '' |
|
580 | 580 | try: |
|
581 | 581 | value = svn.discover_svn_version(raise_on_exc=True) |
|
582 | 582 | human_value = 'version reported from VCSServer: {}'.format(value) |
|
583 | 583 | except Exception as e: |
|
584 | 584 | state = {'message': str(e), 'type': STATE_ERR} |
|
585 | 585 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
586 | 586 | |
|
587 | 587 | |
|
588 | 588 | @register_sysinfo |
|
589 | 589 | def vcs_backends(): |
|
590 | 590 | import rhodecode |
|
591 | 591 | value = rhodecode.CONFIG.get('vcs.backends') |
|
592 | 592 | human_value = 'Enabled backends in order: {}'.format(','.join(value)) |
|
593 | 593 | return SysInfoRes(value=value, human_value=human_value) |
|
594 | 594 | |
|
595 | 595 | |
|
596 | 596 | @register_sysinfo |
|
597 | 597 | def vcs_server(): |
|
598 | 598 | import rhodecode |
|
599 | 599 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data |
|
600 | 600 | |
|
601 | 601 | server_url = rhodecode.CONFIG.get('vcs.server') |
|
602 | 602 | enabled = rhodecode.CONFIG.get('vcs.server.enable') |
|
603 | 603 | protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http' |
|
604 | 604 | state = STATE_OK_DEFAULT |
|
605 | 605 | version = None |
|
606 | 606 | workers = 0 |
|
607 | 607 | |
|
608 | 608 | try: |
|
609 | 609 | data = get_vcsserver_service_data() |
|
610 | 610 | if data and 'version' in data: |
|
611 | 611 | version = data['version'] |
|
612 | 612 | |
|
613 | 613 | if data and 'config' in data: |
|
614 | 614 | conf = data['config'] |
|
615 | 615 | workers = conf.get('workers', 'NOT AVAILABLE') |
|
616 | 616 | |
|
617 | 617 | connection = 'connected' |
|
618 | 618 | except Exception as e: |
|
619 | 619 | connection = 'failed' |
|
620 | 620 | state = {'message': str(e), 'type': STATE_ERR} |
|
621 | 621 | |
|
622 | 622 | value = dict( |
|
623 | 623 | url=server_url, |
|
624 | 624 | enabled=enabled, |
|
625 | 625 | protocol=protocol, |
|
626 | 626 | connection=connection, |
|
627 | 627 | version=version, |
|
628 | 628 | text='', |
|
629 | 629 | ) |
|
630 | 630 | |
|
631 | 631 | human_value = value.copy() |
|
632 | 632 | human_value['text'] = \ |
|
633 | 633 | '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format( |
|
634 | 634 | url=server_url, ver=version, workers=workers, mode=protocol, |
|
635 | 635 | conn=connection) |
|
636 | 636 | |
|
637 | 637 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
638 | 638 | |
|
639 | 639 | |
|
640 | 640 | @register_sysinfo |
|
641 | 641 | def vcs_server_config(): |
|
642 | 642 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data |
|
643 | 643 | state = STATE_OK_DEFAULT |
|
644 | 644 | |
|
645 | 645 | value = {} |
|
646 | 646 | try: |
|
647 | 647 | data = get_vcsserver_service_data() |
|
648 | 648 | value = data['app_config'] |
|
649 | 649 | except Exception as e: |
|
650 | 650 | state = {'message': str(e), 'type': STATE_ERR} |
|
651 | 651 | |
|
652 | 652 | human_value = value.copy() |
|
653 | 653 | human_value['text'] = 'VCS Server config' |
|
654 | 654 | |
|
655 | 655 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
656 | 656 | |
|
657 | 657 | |
|
658 | 658 | @register_sysinfo |
|
659 | 659 | def rhodecode_app_info(): |
|
660 | 660 | import rhodecode |
|
661 | 661 | edition = rhodecode.CONFIG.get('rhodecode.edition') |
|
662 | 662 | |
|
663 | 663 | value = dict( |
|
664 | 664 | rhodecode_version=rhodecode.__version__, |
|
665 | 665 | rhodecode_lib_path=os.path.abspath(rhodecode.__file__), |
|
666 | 666 | text='' |
|
667 | 667 | ) |
|
668 | 668 | human_value = value.copy() |
|
669 | 669 | human_value['text'] = 'RhodeCode {edition}, version {ver}'.format( |
|
670 | 670 | edition=edition, ver=value['rhodecode_version'] |
|
671 | 671 | ) |
|
672 | 672 | return SysInfoRes(value=value, human_value=human_value) |
|
673 | 673 | |
|
674 | 674 | |
|
675 | 675 | @register_sysinfo |
|
676 | 676 | def rhodecode_config(): |
|
677 | 677 | import rhodecode |
|
678 | 678 | path = rhodecode.CONFIG.get('__file__') |
|
679 | 679 | rhodecode_ini_safe = rhodecode.CONFIG.copy() |
|
680 | 680 | cert_path = get_cert_path(path) |
|
681 | 681 | |
|
682 | 682 | try: |
|
683 | 683 | config = configparser.ConfigParser() |
|
684 | 684 | config.read(path) |
|
685 | 685 | parsed_ini = config |
|
686 | 686 | if parsed_ini.has_section('server:main'): |
|
687 | 687 | parsed_ini = dict(parsed_ini.items('server:main')) |
|
688 | 688 | except Exception: |
|
689 | 689 | log.exception('Failed to read .ini file for display') |
|
690 | 690 | parsed_ini = {} |
|
691 | 691 | |
|
692 | 692 | rhodecode_ini_safe['server:main'] = parsed_ini |
|
693 | 693 | |
|
694 | 694 | blacklist = [ |
|
695 | 695 | 'rhodecode_license_key', |
|
696 | 696 | 'routes.map', |
|
697 | 697 | 'sqlalchemy.db1.url', |
|
698 | 698 | 'channelstream.secret', |
|
699 | 699 | 'beaker.session.secret', |
|
700 | 700 | 'rhodecode.encrypted_values.secret', |
|
701 | 701 | 'rhodecode_auth_github_consumer_key', |
|
702 | 702 | 'rhodecode_auth_github_consumer_secret', |
|
703 | 703 | 'rhodecode_auth_google_consumer_key', |
|
704 | 704 | 'rhodecode_auth_google_consumer_secret', |
|
705 | 705 | 'rhodecode_auth_bitbucket_consumer_secret', |
|
706 | 706 | 'rhodecode_auth_bitbucket_consumer_key', |
|
707 | 707 | 'rhodecode_auth_twitter_consumer_secret', |
|
708 | 708 | 'rhodecode_auth_twitter_consumer_key', |
|
709 | 709 | |
|
710 | 710 | 'rhodecode_auth_twitter_secret', |
|
711 | 711 | 'rhodecode_auth_github_secret', |
|
712 | 712 | 'rhodecode_auth_google_secret', |
|
713 | 713 | 'rhodecode_auth_bitbucket_secret', |
|
714 | 714 | |
|
715 | 715 | 'appenlight.api_key', |
|
716 | 716 | ('app_conf', 'sqlalchemy.db1.url') |
|
717 | 717 | ] |
|
718 | 718 | for k in blacklist: |
|
719 | 719 | if isinstance(k, tuple): |
|
720 | 720 | section, key = k |
|
721 | 721 | if section in rhodecode_ini_safe: |
|
722 | 722 | rhodecode_ini_safe[section] = '**OBFUSCATED**' |
|
723 | 723 | else: |
|
724 | 724 | rhodecode_ini_safe.pop(k, None) |
|
725 | 725 | |
|
726 | 726 | # TODO: maybe put some CONFIG checks here ? |
|
727 | 727 | return SysInfoRes(value={'config': rhodecode_ini_safe, |
|
728 | 728 | 'path': path, 'cert_path': cert_path}) |
|
729 | 729 | |
|
730 | 730 | |
|
731 | 731 | @register_sysinfo |
|
732 | 732 | def database_info(): |
|
733 | 733 | import rhodecode |
|
734 | 734 | from sqlalchemy.engine import url as engine_url |
|
735 | 735 | from rhodecode.model.meta import Base as sql_base, Session |
|
736 | 736 | from rhodecode.model.db import DbMigrateVersion |
|
737 | 737 | |
|
738 | 738 | state = STATE_OK_DEFAULT |
|
739 | 739 | |
|
740 | 740 | db_migrate = DbMigrateVersion.query().filter( |
|
741 | 741 | DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one() |
|
742 | 742 | |
|
743 | 743 | db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url']) |
|
744 | 744 | |
|
745 | 745 | try: |
|
746 | 746 | engine = sql_base.metadata.bind |
|
747 | 747 | db_server_info = engine.dialect._get_server_version_info( |
|
748 | 748 | Session.connection(bind=engine)) |
|
749 | 749 | db_version = '.'.join(map(str, db_server_info)) |
|
750 | 750 | except Exception: |
|
751 | 751 | log.exception('failed to fetch db version') |
|
752 | 752 | db_version = 'UNKNOWN' |
|
753 | 753 | |
|
754 | 754 | db_info = dict( |
|
755 | 755 | migrate_version=db_migrate.version, |
|
756 | 756 | type=db_url_obj.get_backend_name(), |
|
757 | 757 | version=db_version, |
|
758 | 758 | url=repr(db_url_obj) |
|
759 | 759 | ) |
|
760 | 760 | current_version = db_migrate.version |
|
761 | 761 | expected_version = rhodecode.__dbversion__ |
|
762 | 762 | if state['type'] == STATE_OK and current_version != expected_version: |
|
763 | 763 | msg = 'Critical: database schema mismatch, ' \ |
|
764 | 764 | 'expected version {}, got {}. ' \ |
|
765 | 765 | 'Please run migrations on your database.'.format( |
|
766 | 766 | expected_version, current_version) |
|
767 | 767 | state = {'message': msg, 'type': STATE_ERR} |
|
768 | 768 | |
|
769 | 769 | human_value = db_info.copy() |
|
770 | 770 | human_value['url'] = "{} @ migration version: {}".format( |
|
771 | 771 | db_info['url'], db_info['migrate_version']) |
|
772 | 772 | human_value['version'] = "{} {}".format(db_info['type'], db_info['version']) |
|
773 | 773 | return SysInfoRes(value=db_info, state=state, human_value=human_value) |
|
774 | 774 | |
|
775 | 775 | |
|
776 | 776 | @register_sysinfo |
|
777 | 777 | def server_info(environ): |
|
778 | 778 | import rhodecode |
|
779 | 779 | from rhodecode.lib.base import get_server_ip_addr, get_server_port |
|
780 | 780 | |
|
781 | 781 | value = { |
|
782 | 782 | 'server_ip': '%s:%s' % ( |
|
783 | 783 | get_server_ip_addr(environ, log_errors=False), |
|
784 | 784 | get_server_port(environ) |
|
785 | 785 | ), |
|
786 | 786 | 'server_id': rhodecode.CONFIG.get('instance_id'), |
|
787 | 787 | } |
|
788 | 788 | return SysInfoRes(value=value) |
|
789 | 789 | |
|
790 | 790 | |
|
791 | 791 | @register_sysinfo |
|
792 | 792 | def usage_info(): |
|
793 | 793 | from rhodecode.model.db import User, Repository |
|
794 | 794 | value = { |
|
795 | 795 | 'users': User.query().count(), |
|
796 | 796 | 'users_active': User.query().filter(User.active == True).count(), |
|
797 | 797 | 'repositories': Repository.query().count(), |
|
798 | 798 | 'repository_types': { |
|
799 | 799 | 'hg': Repository.query().filter( |
|
800 | 800 | Repository.repo_type == 'hg').count(), |
|
801 | 801 | 'git': Repository.query().filter( |
|
802 | 802 | Repository.repo_type == 'git').count(), |
|
803 | 803 | 'svn': Repository.query().filter( |
|
804 | 804 | Repository.repo_type == 'svn').count(), |
|
805 | 805 | }, |
|
806 | 806 | } |
|
807 | 807 | return SysInfoRes(value=value) |
|
808 | 808 | |
|
809 | 809 | |
|
810 | 810 | def get_system_info(environ): |
|
811 | 811 | environ = environ or {} |
|
812 | 812 | return { |
|
813 | 813 | 'rhodecode_app': SysInfo(rhodecode_app_info)(), |
|
814 | 814 | 'rhodecode_config': SysInfo(rhodecode_config)(), |
|
815 | 815 | 'rhodecode_usage': SysInfo(usage_info)(), |
|
816 | 816 | 'python': SysInfo(python_info)(), |
|
817 | 817 | 'py_modules': SysInfo(py_modules)(), |
|
818 | 818 | |
|
819 | 819 | 'platform': SysInfo(platform_type)(), |
|
820 | 820 | 'locale': SysInfo(locale_info)(), |
|
821 | 821 | 'server': SysInfo(server_info, environ=environ)(), |
|
822 | 822 | 'database': SysInfo(database_info)(), |
|
823 | 823 | 'ulimit': SysInfo(ulimit_info)(), |
|
824 | 824 | 'storage': SysInfo(storage)(), |
|
825 | 825 | 'storage_inodes': SysInfo(storage_inodes)(), |
|
826 | 826 | 'storage_archive': SysInfo(storage_archives)(), |
|
827 | 827 | 'storage_gist': SysInfo(storage_gist)(), |
|
828 | 828 | 'storage_temp': SysInfo(storage_temp)(), |
|
829 | 829 | |
|
830 | 830 | 'search': SysInfo(search_info)(), |
|
831 | 831 | |
|
832 | 832 | 'uptime': SysInfo(uptime)(), |
|
833 | 833 | 'load': SysInfo(machine_load)(), |
|
834 | 834 | 'cpu': SysInfo(cpu)(), |
|
835 | 835 | 'memory': SysInfo(memory)(), |
|
836 | 836 | |
|
837 | 837 | 'vcs_backends': SysInfo(vcs_backends)(), |
|
838 | 838 | 'vcs_server': SysInfo(vcs_server)(), |
|
839 | 839 | |
|
840 | 840 | 'vcs_server_config': SysInfo(vcs_server_config)(), |
|
841 | 841 | |
|
842 | 842 | 'git': SysInfo(git_info)(), |
|
843 | 843 | 'hg': SysInfo(hg_info)(), |
|
844 | 844 | 'svn': SysInfo(svn_info)(), |
|
845 | 845 | } |
|
846 | 846 | |
|
847 | 847 | |
|
848 | 848 | def load_system_info(key): |
|
849 | 849 | """ |
|
850 | 850 | get_sys_info('vcs_server') |
|
851 | 851 | get_sys_info('database') |
|
852 | 852 | """ |
|
853 | 853 | return SysInfo(registered_helpers[key])() |
@@ -1,496 +1,496 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | GIT commit module |
|
23 | 23 | """ |
|
24 | 24 | |
|
25 | 25 | import re |
|
26 | 26 | import stat |
|
27 | import configparser | |
|
27 | 28 | from itertools import chain |
|
28 | 29 | from io import StringIO |
|
29 | 30 | |
|
30 | 31 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
31 | 32 | |
|
32 | 33 | from rhodecode.lib.datelib import utcdate_fromtimestamp |
|
33 | 34 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
34 | 35 | from rhodecode.lib.utils2 import safe_int |
|
35 | 36 | from rhodecode.lib.vcs.conf import settings |
|
36 | 37 | from rhodecode.lib.vcs.backends import base |
|
37 | 38 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError |
|
38 | 39 | from rhodecode.lib.vcs.nodes import ( |
|
39 | 40 | FileNode, DirNode, NodeKind, RootNode, SubModuleNode, |
|
40 | 41 | ChangedFileNodesGenerator, AddedFileNodesGenerator, |
|
41 | 42 | RemovedFileNodesGenerator, LargeFileNode) |
|
42 | from rhodecode.lib.vcs.compat import configparser | |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | class GitCommit(base.BaseCommit): |
|
46 | 46 | """ |
|
47 | 47 | Represents state of the repository at single commit id. |
|
48 | 48 | """ |
|
49 | 49 | |
|
50 | 50 | _filter_pre_load = [ |
|
51 | 51 | # done through a more complex tree walk on parents |
|
52 | 52 | "affected_files", |
|
53 | 53 | # done through subprocess not remote call |
|
54 | 54 | "children", |
|
55 | 55 | # done through a more complex tree walk on parents |
|
56 | 56 | "status", |
|
57 | 57 | # mercurial specific property not supported here |
|
58 | 58 | "_file_paths", |
|
59 | 59 | # mercurial specific property not supported here |
|
60 | 60 | 'obsolete', |
|
61 | 61 | # mercurial specific property not supported here |
|
62 | 62 | 'phase', |
|
63 | 63 | # mercurial specific property not supported here |
|
64 | 64 | 'hidden' |
|
65 | 65 | ] |
|
66 | 66 | |
|
67 | 67 | def __init__(self, repository, raw_id, idx, pre_load=None): |
|
68 | 68 | self.repository = repository |
|
69 | 69 | self._remote = repository._remote |
|
70 | 70 | # TODO: johbo: Tweak of raw_id should not be necessary |
|
71 | 71 | self.raw_id = safe_str(raw_id) |
|
72 | 72 | self.idx = idx |
|
73 | 73 | |
|
74 | 74 | self._set_bulk_properties(pre_load) |
|
75 | 75 | |
|
76 | 76 | # caches |
|
77 | 77 | self._stat_modes = {} # stat info for paths |
|
78 | 78 | self._paths = {} # path processed with parse_tree |
|
79 | 79 | self.nodes = {} |
|
80 | 80 | self._submodules = None |
|
81 | 81 | |
|
82 | 82 | def _set_bulk_properties(self, pre_load): |
|
83 | 83 | |
|
84 | 84 | if not pre_load: |
|
85 | 85 | return |
|
86 | 86 | pre_load = [entry for entry in pre_load |
|
87 | 87 | if entry not in self._filter_pre_load] |
|
88 | 88 | if not pre_load: |
|
89 | 89 | return |
|
90 | 90 | |
|
91 | 91 | result = self._remote.bulk_request(self.raw_id, pre_load) |
|
92 | 92 | for attr, value in result.items(): |
|
93 | 93 | if attr in ["author", "message"]: |
|
94 | 94 | if value: |
|
95 | 95 | value = safe_unicode(value) |
|
96 | 96 | elif attr == "date": |
|
97 | 97 | value = utcdate_fromtimestamp(*value) |
|
98 | 98 | elif attr == "parents": |
|
99 | 99 | value = self._make_commits(value) |
|
100 | 100 | elif attr == "branch": |
|
101 | 101 | value = self._set_branch(value) |
|
102 | 102 | self.__dict__[attr] = value |
|
103 | 103 | |
|
104 | 104 | @LazyProperty |
|
105 | 105 | def _commit(self): |
|
106 | 106 | return self._remote[self.raw_id] |
|
107 | 107 | |
|
108 | 108 | @LazyProperty |
|
109 | 109 | def _tree_id(self): |
|
110 | 110 | return self._remote[self._commit['tree']]['id'] |
|
111 | 111 | |
|
112 | 112 | @LazyProperty |
|
113 | 113 | def id(self): |
|
114 | 114 | return self.raw_id |
|
115 | 115 | |
|
116 | 116 | @LazyProperty |
|
117 | 117 | def short_id(self): |
|
118 | 118 | return self.raw_id[:12] |
|
119 | 119 | |
|
120 | 120 | @LazyProperty |
|
121 | 121 | def message(self): |
|
122 | 122 | return safe_unicode(self._remote.message(self.id)) |
|
123 | 123 | |
|
124 | 124 | @LazyProperty |
|
125 | 125 | def committer(self): |
|
126 | 126 | return safe_unicode(self._remote.author(self.id)) |
|
127 | 127 | |
|
128 | 128 | @LazyProperty |
|
129 | 129 | def author(self): |
|
130 | 130 | return safe_unicode(self._remote.author(self.id)) |
|
131 | 131 | |
|
132 | 132 | @LazyProperty |
|
133 | 133 | def date(self): |
|
134 | 134 | unix_ts, tz = self._remote.date(self.raw_id) |
|
135 | 135 | return utcdate_fromtimestamp(unix_ts, tz) |
|
136 | 136 | |
|
137 | 137 | @LazyProperty |
|
138 | 138 | def status(self): |
|
139 | 139 | """ |
|
140 | 140 | Returns modified, added, removed, deleted files for current commit |
|
141 | 141 | """ |
|
142 | 142 | return self.changed, self.added, self.removed |
|
143 | 143 | |
|
144 | 144 | @LazyProperty |
|
145 | 145 | def tags(self): |
|
146 | 146 | tags = [safe_unicode(name) for name, |
|
147 | 147 | commit_id in self.repository.tags.iteritems() |
|
148 | 148 | if commit_id == self.raw_id] |
|
149 | 149 | return tags |
|
150 | 150 | |
|
151 | 151 | @LazyProperty |
|
152 | 152 | def commit_branches(self): |
|
153 | 153 | branches = [] |
|
154 | 154 | for name, commit_id in self.repository.branches.iteritems(): |
|
155 | 155 | if commit_id == self.raw_id: |
|
156 | 156 | branches.append(name) |
|
157 | 157 | return branches |
|
158 | 158 | |
|
159 | 159 | def _set_branch(self, branches): |
|
160 | 160 | if branches: |
|
161 | 161 | # actually commit can have multiple branches in git |
|
162 | 162 | return safe_unicode(branches[0]) |
|
163 | 163 | |
|
164 | 164 | @LazyProperty |
|
165 | 165 | def branch(self): |
|
166 | 166 | branches = self._remote.branch(self.raw_id) |
|
167 | 167 | return self._set_branch(branches) |
|
168 | 168 | |
|
169 | 169 | def _get_tree_id_for_path(self, path): |
|
170 | 170 | path = safe_str(path) |
|
171 | 171 | if path in self._paths: |
|
172 | 172 | return self._paths[path] |
|
173 | 173 | |
|
174 | 174 | tree_id = self._tree_id |
|
175 | 175 | |
|
176 | 176 | path = path.strip('/') |
|
177 | 177 | if path == '': |
|
178 | 178 | data = [tree_id, "tree"] |
|
179 | 179 | self._paths[''] = data |
|
180 | 180 | return data |
|
181 | 181 | |
|
182 | 182 | tree_id, tree_type, tree_mode = \ |
|
183 | 183 | self._remote.tree_and_type_for_path(self.raw_id, path) |
|
184 | 184 | if tree_id is None: |
|
185 | 185 | raise self.no_node_at_path(path) |
|
186 | 186 | |
|
187 | 187 | self._paths[path] = [tree_id, tree_type] |
|
188 | 188 | self._stat_modes[path] = tree_mode |
|
189 | 189 | |
|
190 | 190 | if path not in self._paths: |
|
191 | 191 | raise self.no_node_at_path(path) |
|
192 | 192 | |
|
193 | 193 | return self._paths[path] |
|
194 | 194 | |
|
195 | 195 | def _get_kind(self, path): |
|
196 | 196 | tree_id, type_ = self._get_tree_id_for_path(path) |
|
197 | 197 | if type_ == 'blob': |
|
198 | 198 | return NodeKind.FILE |
|
199 | 199 | elif type_ == 'tree': |
|
200 | 200 | return NodeKind.DIR |
|
201 | 201 | elif type_ == 'link': |
|
202 | 202 | return NodeKind.SUBMODULE |
|
203 | 203 | return None |
|
204 | 204 | |
|
205 | 205 | def _get_filectx(self, path): |
|
206 | 206 | path = self._fix_path(path) |
|
207 | 207 | if self._get_kind(path) != NodeKind.FILE: |
|
208 | 208 | raise CommitError( |
|
209 | 209 | "File does not exist for commit %s at '%s'" % (self.raw_id, path)) |
|
210 | 210 | return path |
|
211 | 211 | |
|
212 | 212 | def _get_file_nodes(self): |
|
213 | 213 | return chain(*(t[2] for t in self.walk())) |
|
214 | 214 | |
|
215 | 215 | @LazyProperty |
|
216 | 216 | def parents(self): |
|
217 | 217 | """ |
|
218 | 218 | Returns list of parent commits. |
|
219 | 219 | """ |
|
220 | 220 | parent_ids = self._remote.parents(self.id) |
|
221 | 221 | return self._make_commits(parent_ids) |
|
222 | 222 | |
|
223 | 223 | @LazyProperty |
|
224 | 224 | def children(self): |
|
225 | 225 | """ |
|
226 | 226 | Returns list of child commits. |
|
227 | 227 | """ |
|
228 | 228 | |
|
229 | 229 | children = self._remote.children(self.raw_id) |
|
230 | 230 | return self._make_commits(children) |
|
231 | 231 | |
|
232 | 232 | def _make_commits(self, commit_ids): |
|
233 | 233 | def commit_maker(_commit_id): |
|
234 | 234 | return self.repository.get_commit(commit_id=commit_id) |
|
235 | 235 | |
|
236 | 236 | return [commit_maker(commit_id) for commit_id in commit_ids] |
|
237 | 237 | |
|
238 | 238 | def get_file_mode(self, path): |
|
239 | 239 | """ |
|
240 | 240 | Returns stat mode of the file at the given `path`. |
|
241 | 241 | """ |
|
242 | 242 | path = safe_str(path) |
|
243 | 243 | # ensure path is traversed |
|
244 | 244 | self._get_tree_id_for_path(path) |
|
245 | 245 | return self._stat_modes[path] |
|
246 | 246 | |
|
247 | 247 | def is_link(self, path): |
|
248 | 248 | return stat.S_ISLNK(self.get_file_mode(path)) |
|
249 | 249 | |
|
250 | 250 | def is_node_binary(self, path): |
|
251 | 251 | tree_id, _ = self._get_tree_id_for_path(path) |
|
252 | 252 | return self._remote.is_binary(tree_id) |
|
253 | 253 | |
|
254 | 254 | def get_file_content(self, path): |
|
255 | 255 | """ |
|
256 | 256 | Returns content of the file at given `path`. |
|
257 | 257 | """ |
|
258 | 258 | tree_id, _ = self._get_tree_id_for_path(path) |
|
259 | 259 | return self._remote.blob_as_pretty_string(tree_id) |
|
260 | 260 | |
|
261 | 261 | def get_file_content_streamed(self, path): |
|
262 | 262 | tree_id, _ = self._get_tree_id_for_path(path) |
|
263 | 263 | stream_method = getattr(self._remote, 'stream:blob_as_pretty_string') |
|
264 | 264 | return stream_method(tree_id) |
|
265 | 265 | |
|
266 | 266 | def get_file_size(self, path): |
|
267 | 267 | """ |
|
268 | 268 | Returns size of the file at given `path`. |
|
269 | 269 | """ |
|
270 | 270 | tree_id, _ = self._get_tree_id_for_path(path) |
|
271 | 271 | return self._remote.blob_raw_length(tree_id) |
|
272 | 272 | |
|
273 | 273 | def get_path_history(self, path, limit=None, pre_load=None): |
|
274 | 274 | """ |
|
275 | 275 | Returns history of file as reversed list of `GitCommit` objects for |
|
276 | 276 | which file at given `path` has been modified. |
|
277 | 277 | """ |
|
278 | 278 | |
|
279 | 279 | path = self._get_filectx(path) |
|
280 | 280 | hist = self._remote.node_history(self.raw_id, path, limit) |
|
281 | 281 | return [ |
|
282 | 282 | self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
283 | 283 | for commit_id in hist] |
|
284 | 284 | |
|
285 | 285 | def get_file_annotate(self, path, pre_load=None): |
|
286 | 286 | """ |
|
287 | 287 | Returns a generator of four element tuples with |
|
288 | 288 | lineno, commit_id, commit lazy loader and line |
|
289 | 289 | """ |
|
290 | 290 | |
|
291 | 291 | result = self._remote.node_annotate(self.raw_id, path) |
|
292 | 292 | |
|
293 | 293 | for ln_no, commit_id, content in result: |
|
294 | 294 | yield ( |
|
295 | 295 | ln_no, commit_id, |
|
296 | 296 | lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), |
|
297 | 297 | content) |
|
298 | 298 | |
|
299 | 299 | def get_nodes(self, path): |
|
300 | 300 | |
|
301 | 301 | if self._get_kind(path) != NodeKind.DIR: |
|
302 | 302 | raise CommitError( |
|
303 | 303 | "Directory does not exist for commit %s at '%s'" % (self.raw_id, path)) |
|
304 | 304 | path = self._fix_path(path) |
|
305 | 305 | |
|
306 | 306 | tree_id, _ = self._get_tree_id_for_path(path) |
|
307 | 307 | |
|
308 | 308 | dirnodes = [] |
|
309 | 309 | filenodes = [] |
|
310 | 310 | |
|
311 | 311 | # extracted tree ID gives us our files... |
|
312 | 312 | bytes_path = safe_str(path) # libgit operates on bytes |
|
313 | 313 | for name, stat_, id_, type_ in self._remote.tree_items(tree_id): |
|
314 | 314 | if type_ == 'link': |
|
315 | 315 | url = self._get_submodule_url('/'.join((bytes_path, name))) |
|
316 | 316 | dirnodes.append(SubModuleNode( |
|
317 | 317 | name, url=url, commit=id_, alias=self.repository.alias)) |
|
318 | 318 | continue |
|
319 | 319 | |
|
320 | 320 | if bytes_path != '': |
|
321 | 321 | obj_path = '/'.join((bytes_path, name)) |
|
322 | 322 | else: |
|
323 | 323 | obj_path = name |
|
324 | 324 | if obj_path not in self._stat_modes: |
|
325 | 325 | self._stat_modes[obj_path] = stat_ |
|
326 | 326 | |
|
327 | 327 | if type_ == 'tree': |
|
328 | 328 | dirnodes.append(DirNode(obj_path, commit=self)) |
|
329 | 329 | elif type_ == 'blob': |
|
330 | 330 | filenodes.append(FileNode(obj_path, commit=self, mode=stat_)) |
|
331 | 331 | else: |
|
332 | 332 | raise CommitError( |
|
333 | 333 | "Requested object should be Tree or Blob, is %s", type_) |
|
334 | 334 | |
|
335 | 335 | nodes = dirnodes + filenodes |
|
336 | 336 | for node in nodes: |
|
337 | 337 | if node.path not in self.nodes: |
|
338 | 338 | self.nodes[node.path] = node |
|
339 | 339 | nodes.sort() |
|
340 | 340 | return nodes |
|
341 | 341 | |
|
342 | 342 | def get_node(self, path, pre_load=None): |
|
343 | 343 | if isinstance(path, unicode): |
|
344 | 344 | path = path.encode('utf-8') |
|
345 | 345 | path = self._fix_path(path) |
|
346 | 346 | if path not in self.nodes: |
|
347 | 347 | try: |
|
348 | 348 | tree_id, type_ = self._get_tree_id_for_path(path) |
|
349 | 349 | except CommitError: |
|
350 | 350 | raise NodeDoesNotExistError( |
|
351 | 351 | "Cannot find one of parents' directories for a given " |
|
352 | 352 | "path: %s" % path) |
|
353 | 353 | |
|
354 | 354 | if type_ in ['link', 'commit']: |
|
355 | 355 | url = self._get_submodule_url(path) |
|
356 | 356 | node = SubModuleNode(path, url=url, commit=tree_id, |
|
357 | 357 | alias=self.repository.alias) |
|
358 | 358 | elif type_ == 'tree': |
|
359 | 359 | if path == '': |
|
360 | 360 | node = RootNode(commit=self) |
|
361 | 361 | else: |
|
362 | 362 | node = DirNode(path, commit=self) |
|
363 | 363 | elif type_ == 'blob': |
|
364 | 364 | node = FileNode(path, commit=self, pre_load=pre_load) |
|
365 | 365 | self._stat_modes[path] = node.mode |
|
366 | 366 | else: |
|
367 | 367 | raise self.no_node_at_path(path) |
|
368 | 368 | |
|
369 | 369 | # cache node |
|
370 | 370 | self.nodes[path] = node |
|
371 | 371 | |
|
372 | 372 | return self.nodes[path] |
|
373 | 373 | |
|
374 | 374 | def get_largefile_node(self, path): |
|
375 | 375 | tree_id, _ = self._get_tree_id_for_path(path) |
|
376 | 376 | pointer_spec = self._remote.is_large_file(tree_id) |
|
377 | 377 | |
|
378 | 378 | if pointer_spec: |
|
379 | 379 | # content of that file regular FileNode is the hash of largefile |
|
380 | 380 | file_id = pointer_spec.get('oid_hash') |
|
381 | 381 | if self._remote.in_largefiles_store(file_id): |
|
382 | 382 | lf_path = self._remote.store_path(file_id) |
|
383 | 383 | return LargeFileNode(lf_path, commit=self, org_path=path) |
|
384 | 384 | |
|
385 | 385 | @LazyProperty |
|
386 | 386 | def affected_files(self): |
|
387 | 387 | """ |
|
388 | 388 | Gets a fast accessible file changes for given commit |
|
389 | 389 | """ |
|
390 | 390 | added, modified, deleted = self._changes_cache |
|
391 | 391 | return list(added.union(modified).union(deleted)) |
|
392 | 392 | |
|
393 | 393 | @LazyProperty |
|
394 | 394 | def _changes_cache(self): |
|
395 | 395 | added = set() |
|
396 | 396 | modified = set() |
|
397 | 397 | deleted = set() |
|
398 | 398 | _r = self._remote |
|
399 | 399 | |
|
400 | 400 | parents = self.parents |
|
401 | 401 | if not self.parents: |
|
402 | 402 | parents = [base.EmptyCommit()] |
|
403 | 403 | for parent in parents: |
|
404 | 404 | if isinstance(parent, base.EmptyCommit): |
|
405 | 405 | oid = None |
|
406 | 406 | else: |
|
407 | 407 | oid = parent.raw_id |
|
408 | 408 | changes = _r.tree_changes(oid, self.raw_id) |
|
409 | 409 | for (oldpath, newpath), (_, _), (_, _) in changes: |
|
410 | 410 | if newpath and oldpath: |
|
411 | 411 | modified.add(newpath) |
|
412 | 412 | elif newpath and not oldpath: |
|
413 | 413 | added.add(newpath) |
|
414 | 414 | elif not newpath and oldpath: |
|
415 | 415 | deleted.add(oldpath) |
|
416 | 416 | return added, modified, deleted |
|
417 | 417 | |
|
418 | 418 | def _get_paths_for_status(self, status): |
|
419 | 419 | """ |
|
420 | 420 | Returns sorted list of paths for given ``status``. |
|
421 | 421 | |
|
422 | 422 | :param status: one of: *added*, *modified* or *deleted* |
|
423 | 423 | """ |
|
424 | 424 | added, modified, deleted = self._changes_cache |
|
425 | 425 | return sorted({ |
|
426 | 426 | 'added': list(added), |
|
427 | 427 | 'modified': list(modified), |
|
428 | 428 | 'deleted': list(deleted)}[status] |
|
429 | 429 | ) |
|
430 | 430 | |
|
431 | 431 | @LazyProperty |
|
432 | 432 | def added(self): |
|
433 | 433 | """ |
|
434 | 434 | Returns list of added ``FileNode`` objects. |
|
435 | 435 | """ |
|
436 | 436 | if not self.parents: |
|
437 | 437 | return list(self._get_file_nodes()) |
|
438 | 438 | return AddedFileNodesGenerator(self.added_paths, self) |
|
439 | 439 | |
|
440 | 440 | @LazyProperty |
|
441 | 441 | def added_paths(self): |
|
442 | 442 | return [n for n in self._get_paths_for_status('added')] |
|
443 | 443 | |
|
444 | 444 | @LazyProperty |
|
445 | 445 | def changed(self): |
|
446 | 446 | """ |
|
447 | 447 | Returns list of modified ``FileNode`` objects. |
|
448 | 448 | """ |
|
449 | 449 | if not self.parents: |
|
450 | 450 | return [] |
|
451 | 451 | return ChangedFileNodesGenerator(self.changed_paths, self) |
|
452 | 452 | |
|
453 | 453 | @LazyProperty |
|
454 | 454 | def changed_paths(self): |
|
455 | 455 | return [n for n in self._get_paths_for_status('modified')] |
|
456 | 456 | |
|
457 | 457 | @LazyProperty |
|
458 | 458 | def removed(self): |
|
459 | 459 | """ |
|
460 | 460 | Returns list of removed ``FileNode`` objects. |
|
461 | 461 | """ |
|
462 | 462 | if not self.parents: |
|
463 | 463 | return [] |
|
464 | 464 | return RemovedFileNodesGenerator(self.removed_paths, self) |
|
465 | 465 | |
|
466 | 466 | @LazyProperty |
|
467 | 467 | def removed_paths(self): |
|
468 | 468 | return [n for n in self._get_paths_for_status('deleted')] |
|
469 | 469 | |
|
470 | 470 | def _get_submodule_url(self, submodule_path): |
|
471 | 471 | git_modules_path = '.gitmodules' |
|
472 | 472 | |
|
473 | 473 | if self._submodules is None: |
|
474 | 474 | self._submodules = {} |
|
475 | 475 | |
|
476 | 476 | try: |
|
477 | 477 | submodules_node = self.get_node(git_modules_path) |
|
478 | 478 | except NodeDoesNotExistError: |
|
479 | 479 | return None |
|
480 | 480 | |
|
481 | 481 | # ConfigParser fails if there are whitespaces, also it needs an iterable |
|
482 | 482 | # file like content |
|
483 | 483 | def iter_content(_content): |
|
484 | 484 | for line in _content.splitlines(): |
|
485 | 485 | yield line |
|
486 | 486 | |
|
487 | 487 | parser = configparser.RawConfigParser() |
|
488 | 488 | parser.read_file(iter_content(submodules_node.content)) |
|
489 | 489 | |
|
490 | 490 | for section in parser.sections(): |
|
491 | 491 | path = parser.get(section, 'path') |
|
492 | 492 | url = parser.get(section, 'url') |
|
493 | 493 | if path and url: |
|
494 | 494 | self._submodules[path.strip('/')] = url |
|
495 | 495 | |
|
496 | 496 | return self._submodules.get(submodule_path.strip('/')) |
@@ -1,1012 +1,1012 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | """ |
|
22 | 22 | HG repository module |
|
23 | 23 | """ |
|
24 | 24 | import os |
|
25 | 25 | import logging |
|
26 | 26 | import binascii |
|
27 | import configparser | |
|
27 | 28 | import urllib.request, urllib.parse, urllib.error |
|
28 | 29 | |
|
29 | 30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | 31 | |
|
31 | 32 | from rhodecode.lib.compat import OrderedDict |
|
32 | 33 | from rhodecode.lib.datelib import ( |
|
33 | 34 | date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate) |
|
34 | 35 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
35 | 36 | from rhodecode.lib.utils2 import CachedProperty |
|
36 | 37 | from rhodecode.lib.vcs import connection, exceptions |
|
37 | 38 | from rhodecode.lib.vcs.backends.base import ( |
|
38 | 39 | BaseRepository, CollectionGenerator, Config, MergeResponse, |
|
39 | 40 | MergeFailureReason, Reference, BasePathPermissionChecker) |
|
40 | 41 | from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit |
|
41 | 42 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff |
|
42 | 43 | from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit |
|
43 | 44 | from rhodecode.lib.vcs.exceptions import ( |
|
44 | 45 | EmptyRepositoryError, RepositoryError, TagAlreadyExistError, |
|
45 | 46 | TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo) |
|
46 | from rhodecode.lib.vcs.compat import configparser | |
|
47 | 47 | |
|
48 | 48 | hexlify = binascii.hexlify |
|
49 | 49 | nullid = "\0" * 20 |
|
50 | 50 | |
|
51 | 51 | log = logging.getLogger(__name__) |
|
52 | 52 | |
|
53 | 53 | |
|
54 | 54 | class MercurialRepository(BaseRepository): |
|
55 | 55 | """ |
|
56 | 56 | Mercurial repository backend |
|
57 | 57 | """ |
|
58 | 58 | DEFAULT_BRANCH_NAME = 'default' |
|
59 | 59 | |
|
60 | 60 | def __init__(self, repo_path, config=None, create=False, src_url=None, |
|
61 | 61 | do_workspace_checkout=False, with_wire=None, bare=False): |
|
62 | 62 | """ |
|
63 | 63 | Raises RepositoryError if repository could not be find at the given |
|
64 | 64 | ``repo_path``. |
|
65 | 65 | |
|
66 | 66 | :param repo_path: local path of the repository |
|
67 | 67 | :param config: config object containing the repo configuration |
|
68 | 68 | :param create=False: if set to True, would try to create repository if |
|
69 | 69 | it does not exist rather than raising exception |
|
70 | 70 | :param src_url=None: would try to clone repository from given location |
|
71 | 71 | :param do_workspace_checkout=False: sets update of working copy after |
|
72 | 72 | making a clone |
|
73 | 73 | :param bare: not used, compatible with other VCS |
|
74 | 74 | """ |
|
75 | 75 | |
|
76 | 76 | self.path = safe_str(os.path.abspath(repo_path)) |
|
77 | 77 | # mercurial since 4.4.X requires certain configuration to be present |
|
78 | 78 | # because sometimes we init the repos with config we need to meet |
|
79 | 79 | # special requirements |
|
80 | 80 | self.config = config if config else self.get_default_config( |
|
81 | 81 | default=[('extensions', 'largefiles', '1')]) |
|
82 | 82 | self.with_wire = with_wire or {"cache": False} # default should not use cache |
|
83 | 83 | |
|
84 | 84 | self._init_repo(create, src_url, do_workspace_checkout) |
|
85 | 85 | |
|
86 | 86 | # caches |
|
87 | 87 | self._commit_ids = {} |
|
88 | 88 | |
|
89 | 89 | @LazyProperty |
|
90 | 90 | def _remote(self): |
|
91 | 91 | repo_id = self.path |
|
92 | 92 | return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire) |
|
93 | 93 | |
|
94 | 94 | @CachedProperty |
|
95 | 95 | def commit_ids(self): |
|
96 | 96 | """ |
|
97 | 97 | Returns list of commit ids, in ascending order. Being lazy |
|
98 | 98 | attribute allows external tools to inject shas from cache. |
|
99 | 99 | """ |
|
100 | 100 | commit_ids = self._get_all_commit_ids() |
|
101 | 101 | self._rebuild_cache(commit_ids) |
|
102 | 102 | return commit_ids |
|
103 | 103 | |
|
104 | 104 | def _rebuild_cache(self, commit_ids): |
|
105 | 105 | self._commit_ids = dict((commit_id, index) |
|
106 | 106 | for index, commit_id in enumerate(commit_ids)) |
|
107 | 107 | |
|
108 | 108 | @CachedProperty |
|
109 | 109 | def branches(self): |
|
110 | 110 | return self._get_branches() |
|
111 | 111 | |
|
112 | 112 | @CachedProperty |
|
113 | 113 | def branches_closed(self): |
|
114 | 114 | return self._get_branches(active=False, closed=True) |
|
115 | 115 | |
|
116 | 116 | @CachedProperty |
|
117 | 117 | def branches_all(self): |
|
118 | 118 | all_branches = {} |
|
119 | 119 | all_branches.update(self.branches) |
|
120 | 120 | all_branches.update(self.branches_closed) |
|
121 | 121 | return all_branches |
|
122 | 122 | |
|
123 | 123 | def _get_branches(self, active=True, closed=False): |
|
124 | 124 | """ |
|
125 | 125 | Gets branches for this repository |
|
126 | 126 | Returns only not closed active branches by default |
|
127 | 127 | |
|
128 | 128 | :param active: return also active branches |
|
129 | 129 | :param closed: return also closed branches |
|
130 | 130 | |
|
131 | 131 | """ |
|
132 | 132 | if self.is_empty(): |
|
133 | 133 | return {} |
|
134 | 134 | |
|
135 | 135 | def get_name(ctx): |
|
136 | 136 | return ctx[0] |
|
137 | 137 | |
|
138 | 138 | _branches = [(safe_unicode(n), hexlify(h),) for n, h in |
|
139 | 139 | self._remote.branches(active, closed).items()] |
|
140 | 140 | |
|
141 | 141 | return OrderedDict(sorted(_branches, key=get_name, reverse=False)) |
|
142 | 142 | |
|
143 | 143 | @CachedProperty |
|
144 | 144 | def tags(self): |
|
145 | 145 | """ |
|
146 | 146 | Gets tags for this repository |
|
147 | 147 | """ |
|
148 | 148 | return self._get_tags() |
|
149 | 149 | |
|
150 | 150 | def _get_tags(self): |
|
151 | 151 | if self.is_empty(): |
|
152 | 152 | return {} |
|
153 | 153 | |
|
154 | 154 | def get_name(ctx): |
|
155 | 155 | return ctx[0] |
|
156 | 156 | |
|
157 | 157 | _tags = [(safe_unicode(n), hexlify(h),) for n, h in |
|
158 | 158 | self._remote.tags().items()] |
|
159 | 159 | |
|
160 | 160 | return OrderedDict(sorted(_tags, key=get_name, reverse=True)) |
|
161 | 161 | |
|
162 | 162 | def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs): |
|
163 | 163 | """ |
|
164 | 164 | Creates and returns a tag for the given ``commit_id``. |
|
165 | 165 | |
|
166 | 166 | :param name: name for new tag |
|
167 | 167 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
168 | 168 | :param commit_id: commit id for which new tag would be created |
|
169 | 169 | :param message: message of the tag's commit |
|
170 | 170 | :param date: date of tag's commit |
|
171 | 171 | |
|
172 | 172 | :raises TagAlreadyExistError: if tag with same name already exists |
|
173 | 173 | """ |
|
174 | 174 | if name in self.tags: |
|
175 | 175 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
176 | 176 | |
|
177 | 177 | commit = self.get_commit(commit_id=commit_id) |
|
178 | 178 | local = kwargs.setdefault('local', False) |
|
179 | 179 | |
|
180 | 180 | if message is None: |
|
181 | 181 | message = "Added tag %s for commit %s" % (name, commit.short_id) |
|
182 | 182 | |
|
183 | 183 | date, tz = date_to_timestamp_plus_offset(date) |
|
184 | 184 | |
|
185 | 185 | self._remote.tag(name, commit.raw_id, message, local, user, date, tz) |
|
186 | 186 | self._remote.invalidate_vcs_cache() |
|
187 | 187 | |
|
188 | 188 | # Reinitialize tags |
|
189 | 189 | self._invalidate_prop_cache('tags') |
|
190 | 190 | tag_id = self.tags[name] |
|
191 | 191 | |
|
192 | 192 | return self.get_commit(commit_id=tag_id) |
|
193 | 193 | |
|
194 | 194 | def remove_tag(self, name, user, message=None, date=None): |
|
195 | 195 | """ |
|
196 | 196 | Removes tag with the given `name`. |
|
197 | 197 | |
|
198 | 198 | :param name: name of the tag to be removed |
|
199 | 199 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
200 | 200 | :param message: message of the tag's removal commit |
|
201 | 201 | :param date: date of tag's removal commit |
|
202 | 202 | |
|
203 | 203 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
204 | 204 | """ |
|
205 | 205 | if name not in self.tags: |
|
206 | 206 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
207 | 207 | |
|
208 | 208 | if message is None: |
|
209 | 209 | message = "Removed tag %s" % name |
|
210 | 210 | local = False |
|
211 | 211 | |
|
212 | 212 | date, tz = date_to_timestamp_plus_offset(date) |
|
213 | 213 | |
|
214 | 214 | self._remote.tag(name, nullid, message, local, user, date, tz) |
|
215 | 215 | self._remote.invalidate_vcs_cache() |
|
216 | 216 | self._invalidate_prop_cache('tags') |
|
217 | 217 | |
|
218 | 218 | @LazyProperty |
|
219 | 219 | def bookmarks(self): |
|
220 | 220 | """ |
|
221 | 221 | Gets bookmarks for this repository |
|
222 | 222 | """ |
|
223 | 223 | return self._get_bookmarks() |
|
224 | 224 | |
|
225 | 225 | def _get_bookmarks(self): |
|
226 | 226 | if self.is_empty(): |
|
227 | 227 | return {} |
|
228 | 228 | |
|
229 | 229 | def get_name(ctx): |
|
230 | 230 | return ctx[0] |
|
231 | 231 | |
|
232 | 232 | _bookmarks = [ |
|
233 | 233 | (safe_unicode(n), hexlify(h)) for n, h in |
|
234 | 234 | self._remote.bookmarks().items()] |
|
235 | 235 | |
|
236 | 236 | return OrderedDict(sorted(_bookmarks, key=get_name)) |
|
237 | 237 | |
|
238 | 238 | def _get_all_commit_ids(self): |
|
239 | 239 | return self._remote.get_all_commit_ids('visible') |
|
240 | 240 | |
|
241 | 241 | def get_diff( |
|
242 | 242 | self, commit1, commit2, path='', ignore_whitespace=False, |
|
243 | 243 | context=3, path1=None): |
|
244 | 244 | """ |
|
245 | 245 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
246 | 246 | `commit2` since `commit1`. |
|
247 | 247 | |
|
248 | 248 | :param commit1: Entry point from which diff is shown. Can be |
|
249 | 249 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
250 | 250 | the changes since empty state of the repository until `commit2` |
|
251 | 251 | :param commit2: Until which commit changes should be shown. |
|
252 | 252 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
253 | 253 | changes. Defaults to ``False``. |
|
254 | 254 | :param context: How many lines before/after changed lines should be |
|
255 | 255 | shown. Defaults to ``3``. |
|
256 | 256 | """ |
|
257 | 257 | self._validate_diff_commits(commit1, commit2) |
|
258 | 258 | if path1 is not None and path1 != path: |
|
259 | 259 | raise ValueError("Diff of two different paths not supported.") |
|
260 | 260 | |
|
261 | 261 | if path: |
|
262 | 262 | file_filter = [self.path, path] |
|
263 | 263 | else: |
|
264 | 264 | file_filter = None |
|
265 | 265 | |
|
266 | 266 | diff = self._remote.diff( |
|
267 | 267 | commit1.raw_id, commit2.raw_id, file_filter=file_filter, |
|
268 | 268 | opt_git=True, opt_ignorews=ignore_whitespace, |
|
269 | 269 | context=context) |
|
270 | 270 | return MercurialDiff(diff) |
|
271 | 271 | |
|
272 | 272 | def strip(self, commit_id, branch=None): |
|
273 | 273 | self._remote.strip(commit_id, update=False, backup="none") |
|
274 | 274 | |
|
275 | 275 | self._remote.invalidate_vcs_cache() |
|
276 | 276 | # clear cache |
|
277 | 277 | self._invalidate_prop_cache('commit_ids') |
|
278 | 278 | |
|
279 | 279 | return len(self.commit_ids) |
|
280 | 280 | |
|
281 | 281 | def verify(self): |
|
282 | 282 | verify = self._remote.verify() |
|
283 | 283 | |
|
284 | 284 | self._remote.invalidate_vcs_cache() |
|
285 | 285 | return verify |
|
286 | 286 | |
|
287 | 287 | def hg_update_cache(self): |
|
288 | 288 | update_cache = self._remote.hg_update_cache() |
|
289 | 289 | |
|
290 | 290 | self._remote.invalidate_vcs_cache() |
|
291 | 291 | return update_cache |
|
292 | 292 | |
|
293 | 293 | def hg_rebuild_fn_cache(self): |
|
294 | 294 | update_cache = self._remote.hg_rebuild_fn_cache() |
|
295 | 295 | |
|
296 | 296 | self._remote.invalidate_vcs_cache() |
|
297 | 297 | return update_cache |
|
298 | 298 | |
|
299 | 299 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
300 | 300 | log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s', |
|
301 | 301 | self, commit_id1, repo2, commit_id2) |
|
302 | 302 | |
|
303 | 303 | if commit_id1 == commit_id2: |
|
304 | 304 | return commit_id1 |
|
305 | 305 | |
|
306 | 306 | ancestors = self._remote.revs_from_revspec( |
|
307 | 307 | "ancestor(id(%s), id(%s))", commit_id1, commit_id2, |
|
308 | 308 | other_path=repo2.path) |
|
309 | 309 | |
|
310 | 310 | ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None |
|
311 | 311 | |
|
312 | 312 | log.debug('Found common ancestor with sha: %s', ancestor_id) |
|
313 | 313 | return ancestor_id |
|
314 | 314 | |
|
315 | 315 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
316 | 316 | if commit_id1 == commit_id2: |
|
317 | 317 | commits = [] |
|
318 | 318 | else: |
|
319 | 319 | if merge: |
|
320 | 320 | indexes = self._remote.revs_from_revspec( |
|
321 | 321 | "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)", |
|
322 | 322 | commit_id2, commit_id1, commit_id1, other_path=repo2.path) |
|
323 | 323 | else: |
|
324 | 324 | indexes = self._remote.revs_from_revspec( |
|
325 | 325 | "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2, |
|
326 | 326 | commit_id1, other_path=repo2.path) |
|
327 | 327 | |
|
328 | 328 | commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load) |
|
329 | 329 | for idx in indexes] |
|
330 | 330 | |
|
331 | 331 | return commits |
|
332 | 332 | |
|
333 | 333 | @staticmethod |
|
334 | 334 | def check_url(url, config): |
|
335 | 335 | """ |
|
336 | 336 | Function will check given url and try to verify if it's a valid |
|
337 | 337 | link. Sometimes it may happened that mercurial will issue basic |
|
338 | 338 | auth request that can cause whole API to hang when used from python |
|
339 | 339 | or other external calls. |
|
340 | 340 | |
|
341 | 341 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
342 | 342 | when the return code is non 200 |
|
343 | 343 | """ |
|
344 | 344 | # check first if it's not an local url |
|
345 | 345 | if os.path.isdir(url) or url.startswith('file:'): |
|
346 | 346 | return True |
|
347 | 347 | |
|
348 | 348 | # Request the _remote to verify the url |
|
349 | 349 | return connection.Hg.check_url(url, config.serialize()) |
|
350 | 350 | |
|
351 | 351 | @staticmethod |
|
352 | 352 | def is_valid_repository(path): |
|
353 | 353 | return os.path.isdir(os.path.join(path, '.hg')) |
|
354 | 354 | |
|
355 | 355 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False): |
|
356 | 356 | """ |
|
357 | 357 | Function will check for mercurial repository in given path. If there |
|
358 | 358 | is no repository in that path it will raise an exception unless |
|
359 | 359 | `create` parameter is set to True - in that case repository would |
|
360 | 360 | be created. |
|
361 | 361 | |
|
362 | 362 | If `src_url` is given, would try to clone repository from the |
|
363 | 363 | location at given clone_point. Additionally it'll make update to |
|
364 | 364 | working copy accordingly to `do_workspace_checkout` flag. |
|
365 | 365 | """ |
|
366 | 366 | if create and os.path.exists(self.path): |
|
367 | 367 | raise RepositoryError( |
|
368 | 368 | "Cannot create repository at %s, location already exist" |
|
369 | 369 | % self.path) |
|
370 | 370 | |
|
371 | 371 | if src_url: |
|
372 | 372 | url = str(self._get_url(src_url)) |
|
373 | 373 | MercurialRepository.check_url(url, self.config) |
|
374 | 374 | |
|
375 | 375 | self._remote.clone(url, self.path, do_workspace_checkout) |
|
376 | 376 | |
|
377 | 377 | # Don't try to create if we've already cloned repo |
|
378 | 378 | create = False |
|
379 | 379 | |
|
380 | 380 | if create: |
|
381 | 381 | os.makedirs(self.path, mode=0o755) |
|
382 | 382 | self._remote.localrepository(create) |
|
383 | 383 | |
|
384 | 384 | @LazyProperty |
|
385 | 385 | def in_memory_commit(self): |
|
386 | 386 | return MercurialInMemoryCommit(self) |
|
387 | 387 | |
|
388 | 388 | @LazyProperty |
|
389 | 389 | def description(self): |
|
390 | 390 | description = self._remote.get_config_value( |
|
391 | 391 | 'web', 'description', untrusted=True) |
|
392 | 392 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) |
|
393 | 393 | |
|
394 | 394 | @LazyProperty |
|
395 | 395 | def contact(self): |
|
396 | 396 | contact = ( |
|
397 | 397 | self._remote.get_config_value("web", "contact") or |
|
398 | 398 | self._remote.get_config_value("ui", "username")) |
|
399 | 399 | return safe_unicode(contact or self.DEFAULT_CONTACT) |
|
400 | 400 | |
|
401 | 401 | @LazyProperty |
|
402 | 402 | def last_change(self): |
|
403 | 403 | """ |
|
404 | 404 | Returns last change made on this repository as |
|
405 | 405 | `datetime.datetime` object. |
|
406 | 406 | """ |
|
407 | 407 | try: |
|
408 | 408 | return self.get_commit().date |
|
409 | 409 | except RepositoryError: |
|
410 | 410 | tzoffset = makedate()[1] |
|
411 | 411 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) |
|
412 | 412 | |
|
413 | 413 | def _get_fs_mtime(self): |
|
414 | 414 | # fallback to filesystem |
|
415 | 415 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") |
|
416 | 416 | st_path = os.path.join(self.path, '.hg', "store") |
|
417 | 417 | if os.path.exists(cl_path): |
|
418 | 418 | return os.stat(cl_path).st_mtime |
|
419 | 419 | else: |
|
420 | 420 | return os.stat(st_path).st_mtime |
|
421 | 421 | |
|
422 | 422 | def _get_url(self, url): |
|
423 | 423 | """ |
|
424 | 424 | Returns normalized url. If schema is not given, would fall |
|
425 | 425 | to filesystem |
|
426 | 426 | (``file:///``) schema. |
|
427 | 427 | """ |
|
428 | 428 | url = url.encode('utf8') |
|
429 | 429 | if url != 'default' and '://' not in url: |
|
430 | 430 | url = "file:" + urllib.request.pathname2url(url) |
|
431 | 431 | return url |
|
432 | 432 | |
|
433 | 433 | def get_hook_location(self): |
|
434 | 434 | """ |
|
435 | 435 | returns absolute path to location where hooks are stored |
|
436 | 436 | """ |
|
437 | 437 | return os.path.join(self.path, '.hg', '.hgrc') |
|
438 | 438 | |
|
439 | 439 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, |
|
440 | 440 | translate_tag=None, maybe_unreachable=False, reference_obj=None): |
|
441 | 441 | """ |
|
442 | 442 | Returns ``MercurialCommit`` object representing repository's |
|
443 | 443 | commit at the given `commit_id` or `commit_idx`. |
|
444 | 444 | """ |
|
445 | 445 | if self.is_empty(): |
|
446 | 446 | raise EmptyRepositoryError("There are no commits yet") |
|
447 | 447 | |
|
448 | 448 | if commit_id is not None: |
|
449 | 449 | self._validate_commit_id(commit_id) |
|
450 | 450 | try: |
|
451 | 451 | # we have cached idx, use it without contacting the remote |
|
452 | 452 | idx = self._commit_ids[commit_id] |
|
453 | 453 | return MercurialCommit(self, commit_id, idx, pre_load=pre_load) |
|
454 | 454 | except KeyError: |
|
455 | 455 | pass |
|
456 | 456 | |
|
457 | 457 | elif commit_idx is not None: |
|
458 | 458 | self._validate_commit_idx(commit_idx) |
|
459 | 459 | try: |
|
460 | 460 | _commit_id = self.commit_ids[commit_idx] |
|
461 | 461 | if commit_idx < 0: |
|
462 | 462 | commit_idx = self.commit_ids.index(_commit_id) |
|
463 | 463 | |
|
464 | 464 | return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load) |
|
465 | 465 | except IndexError: |
|
466 | 466 | commit_id = commit_idx |
|
467 | 467 | else: |
|
468 | 468 | commit_id = "tip" |
|
469 | 469 | |
|
470 | 470 | if isinstance(commit_id, unicode): |
|
471 | 471 | commit_id = safe_str(commit_id) |
|
472 | 472 | |
|
473 | 473 | try: |
|
474 | 474 | raw_id, idx = self._remote.lookup(commit_id, both=True) |
|
475 | 475 | except CommitDoesNotExistError: |
|
476 | 476 | msg = "Commit {} does not exist for `{}`".format( |
|
477 | 477 | *map(safe_str, [commit_id, self.name])) |
|
478 | 478 | raise CommitDoesNotExistError(msg) |
|
479 | 479 | |
|
480 | 480 | return MercurialCommit(self, raw_id, idx, pre_load=pre_load) |
|
481 | 481 | |
|
482 | 482 | def get_commits( |
|
483 | 483 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
484 | 484 | branch_name=None, show_hidden=False, pre_load=None, translate_tags=None): |
|
485 | 485 | """ |
|
486 | 486 | Returns generator of ``MercurialCommit`` objects from start to end |
|
487 | 487 | (both are inclusive) |
|
488 | 488 | |
|
489 | 489 | :param start_id: None, str(commit_id) |
|
490 | 490 | :param end_id: None, str(commit_id) |
|
491 | 491 | :param start_date: if specified, commits with commit date less than |
|
492 | 492 | ``start_date`` would be filtered out from returned set |
|
493 | 493 | :param end_date: if specified, commits with commit date greater than |
|
494 | 494 | ``end_date`` would be filtered out from returned set |
|
495 | 495 | :param branch_name: if specified, commits not reachable from given |
|
496 | 496 | branch would be filtered out from returned set |
|
497 | 497 | :param show_hidden: Show hidden commits such as obsolete or hidden from |
|
498 | 498 | Mercurial evolve |
|
499 | 499 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
500 | 500 | exist. |
|
501 | 501 | :raise CommitDoesNotExistError: If commit for given ``start`` or |
|
502 | 502 | ``end`` could not be found. |
|
503 | 503 | """ |
|
504 | 504 | # actually we should check now if it's not an empty repo |
|
505 | 505 | if self.is_empty(): |
|
506 | 506 | raise EmptyRepositoryError("There are no commits yet") |
|
507 | 507 | self._validate_branch_name(branch_name) |
|
508 | 508 | |
|
509 | 509 | branch_ancestors = False |
|
510 | 510 | if start_id is not None: |
|
511 | 511 | self._validate_commit_id(start_id) |
|
512 | 512 | c_start = self.get_commit(commit_id=start_id) |
|
513 | 513 | start_pos = self._commit_ids[c_start.raw_id] |
|
514 | 514 | else: |
|
515 | 515 | start_pos = None |
|
516 | 516 | |
|
517 | 517 | if end_id is not None: |
|
518 | 518 | self._validate_commit_id(end_id) |
|
519 | 519 | c_end = self.get_commit(commit_id=end_id) |
|
520 | 520 | end_pos = max(0, self._commit_ids[c_end.raw_id]) |
|
521 | 521 | else: |
|
522 | 522 | end_pos = None |
|
523 | 523 | |
|
524 | 524 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
525 | 525 | raise RepositoryError( |
|
526 | 526 | "Start commit '%s' cannot be after end commit '%s'" % |
|
527 | 527 | (start_id, end_id)) |
|
528 | 528 | |
|
529 | 529 | if end_pos is not None: |
|
530 | 530 | end_pos += 1 |
|
531 | 531 | |
|
532 | 532 | commit_filter = [] |
|
533 | 533 | |
|
534 | 534 | if branch_name and not branch_ancestors: |
|
535 | 535 | commit_filter.append('branch("%s")' % (branch_name,)) |
|
536 | 536 | elif branch_name and branch_ancestors: |
|
537 | 537 | commit_filter.append('ancestors(branch("%s"))' % (branch_name,)) |
|
538 | 538 | |
|
539 | 539 | if start_date and not end_date: |
|
540 | 540 | commit_filter.append('date(">%s")' % (start_date,)) |
|
541 | 541 | if end_date and not start_date: |
|
542 | 542 | commit_filter.append('date("<%s")' % (end_date,)) |
|
543 | 543 | if start_date and end_date: |
|
544 | 544 | commit_filter.append( |
|
545 | 545 | 'date(">%s") and date("<%s")' % (start_date, end_date)) |
|
546 | 546 | |
|
547 | 547 | if not show_hidden: |
|
548 | 548 | commit_filter.append('not obsolete()') |
|
549 | 549 | commit_filter.append('not hidden()') |
|
550 | 550 | |
|
551 | 551 | # TODO: johbo: Figure out a simpler way for this solution |
|
552 | 552 | collection_generator = CollectionGenerator |
|
553 | 553 | if commit_filter: |
|
554 | 554 | commit_filter = ' and '.join(map(safe_str, commit_filter)) |
|
555 | 555 | revisions = self._remote.rev_range([commit_filter]) |
|
556 | 556 | collection_generator = MercurialIndexBasedCollectionGenerator |
|
557 | 557 | else: |
|
558 | 558 | revisions = self.commit_ids |
|
559 | 559 | |
|
560 | 560 | if start_pos or end_pos: |
|
561 | 561 | revisions = revisions[start_pos:end_pos] |
|
562 | 562 | |
|
563 | 563 | return collection_generator(self, revisions, pre_load=pre_load) |
|
564 | 564 | |
|
565 | 565 | def pull(self, url, commit_ids=None): |
|
566 | 566 | """ |
|
567 | 567 | Pull changes from external location. |
|
568 | 568 | |
|
569 | 569 | :param commit_ids: Optional. Can be set to a list of commit ids |
|
570 | 570 | which shall be pulled from the other repository. |
|
571 | 571 | """ |
|
572 | 572 | url = self._get_url(url) |
|
573 | 573 | self._remote.pull(url, commit_ids=commit_ids) |
|
574 | 574 | self._remote.invalidate_vcs_cache() |
|
575 | 575 | |
|
576 | 576 | def fetch(self, url, commit_ids=None): |
|
577 | 577 | """ |
|
578 | 578 | Backward compatibility with GIT fetch==pull |
|
579 | 579 | """ |
|
580 | 580 | return self.pull(url, commit_ids=commit_ids) |
|
581 | 581 | |
|
582 | 582 | def push(self, url): |
|
583 | 583 | url = self._get_url(url) |
|
584 | 584 | self._remote.sync_push(url) |
|
585 | 585 | |
|
586 | 586 | def _local_clone(self, clone_path): |
|
587 | 587 | """ |
|
588 | 588 | Create a local clone of the current repo. |
|
589 | 589 | """ |
|
590 | 590 | self._remote.clone(self.path, clone_path, update_after_clone=True, |
|
591 | 591 | hooks=False) |
|
592 | 592 | |
|
593 | 593 | def _update(self, revision, clean=False): |
|
594 | 594 | """ |
|
595 | 595 | Update the working copy to the specified revision. |
|
596 | 596 | """ |
|
597 | 597 | log.debug('Doing checkout to commit: `%s` for %s', revision, self) |
|
598 | 598 | self._remote.update(revision, clean=clean) |
|
599 | 599 | |
|
600 | 600 | def _identify(self): |
|
601 | 601 | """ |
|
602 | 602 | Return the current state of the working directory. |
|
603 | 603 | """ |
|
604 | 604 | return self._remote.identify().strip().rstrip('+') |
|
605 | 605 | |
|
606 | 606 | def _heads(self, branch=None): |
|
607 | 607 | """ |
|
608 | 608 | Return the commit ids of the repository heads. |
|
609 | 609 | """ |
|
610 | 610 | return self._remote.heads(branch=branch).strip().split(' ') |
|
611 | 611 | |
|
612 | 612 | def _ancestor(self, revision1, revision2): |
|
613 | 613 | """ |
|
614 | 614 | Return the common ancestor of the two revisions. |
|
615 | 615 | """ |
|
616 | 616 | return self._remote.ancestor(revision1, revision2) |
|
617 | 617 | |
|
618 | 618 | def _local_push( |
|
619 | 619 | self, revision, repository_path, push_branches=False, |
|
620 | 620 | enable_hooks=False): |
|
621 | 621 | """ |
|
622 | 622 | Push the given revision to the specified repository. |
|
623 | 623 | |
|
624 | 624 | :param push_branches: allow to create branches in the target repo. |
|
625 | 625 | """ |
|
626 | 626 | self._remote.push( |
|
627 | 627 | [revision], repository_path, hooks=enable_hooks, |
|
628 | 628 | push_branches=push_branches) |
|
629 | 629 | |
|
630 | 630 | def _local_merge(self, target_ref, merge_message, user_name, user_email, |
|
631 | 631 | source_ref, use_rebase=False, close_commit_id=None, dry_run=False): |
|
632 | 632 | """ |
|
633 | 633 | Merge the given source_revision into the checked out revision. |
|
634 | 634 | |
|
635 | 635 | Returns the commit id of the merge and a boolean indicating if the |
|
636 | 636 | commit needs to be pushed. |
|
637 | 637 | """ |
|
638 | 638 | source_ref_commit_id = source_ref.commit_id |
|
639 | 639 | target_ref_commit_id = target_ref.commit_id |
|
640 | 640 | |
|
641 | 641 | # update our workdir to target ref, for proper merge |
|
642 | 642 | self._update(target_ref_commit_id, clean=True) |
|
643 | 643 | |
|
644 | 644 | ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id) |
|
645 | 645 | is_the_same_branch = self._is_the_same_branch(target_ref, source_ref) |
|
646 | 646 | |
|
647 | 647 | if close_commit_id: |
|
648 | 648 | # NOTE(marcink): if we get the close commit, this is our new source |
|
649 | 649 | # which will include the close commit itself. |
|
650 | 650 | source_ref_commit_id = close_commit_id |
|
651 | 651 | |
|
652 | 652 | if ancestor == source_ref_commit_id: |
|
653 | 653 | # Nothing to do, the changes were already integrated |
|
654 | 654 | return target_ref_commit_id, False |
|
655 | 655 | |
|
656 | 656 | elif ancestor == target_ref_commit_id and is_the_same_branch: |
|
657 | 657 | # In this case we should force a commit message |
|
658 | 658 | return source_ref_commit_id, True |
|
659 | 659 | |
|
660 | 660 | unresolved = None |
|
661 | 661 | if use_rebase: |
|
662 | 662 | try: |
|
663 | 663 | bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id) |
|
664 | 664 | self.bookmark(bookmark_name, revision=source_ref.commit_id) |
|
665 | 665 | self._remote.rebase( |
|
666 | 666 | source=source_ref_commit_id, dest=target_ref_commit_id) |
|
667 | 667 | self._remote.invalidate_vcs_cache() |
|
668 | 668 | self._update(bookmark_name, clean=True) |
|
669 | 669 | return self._identify(), True |
|
670 | 670 | except RepositoryError as e: |
|
671 | 671 | # The rebase-abort may raise another exception which 'hides' |
|
672 | 672 | # the original one, therefore we log it here. |
|
673 | 673 | log.exception('Error while rebasing shadow repo during merge.') |
|
674 | 674 | if 'unresolved conflicts' in safe_str(e): |
|
675 | 675 | unresolved = self._remote.get_unresolved_files() |
|
676 | 676 | log.debug('unresolved files: %s', unresolved) |
|
677 | 677 | |
|
678 | 678 | # Cleanup any rebase leftovers |
|
679 | 679 | self._remote.invalidate_vcs_cache() |
|
680 | 680 | self._remote.rebase(abort=True) |
|
681 | 681 | self._remote.invalidate_vcs_cache() |
|
682 | 682 | self._remote.update(clean=True) |
|
683 | 683 | if unresolved: |
|
684 | 684 | raise UnresolvedFilesInRepo(unresolved) |
|
685 | 685 | else: |
|
686 | 686 | raise |
|
687 | 687 | else: |
|
688 | 688 | try: |
|
689 | 689 | self._remote.merge(source_ref_commit_id) |
|
690 | 690 | self._remote.invalidate_vcs_cache() |
|
691 | 691 | self._remote.commit( |
|
692 | 692 | message=safe_str(merge_message), |
|
693 | 693 | username=safe_str('%s <%s>' % (user_name, user_email))) |
|
694 | 694 | self._remote.invalidate_vcs_cache() |
|
695 | 695 | return self._identify(), True |
|
696 | 696 | except RepositoryError as e: |
|
697 | 697 | # The merge-abort may raise another exception which 'hides' |
|
698 | 698 | # the original one, therefore we log it here. |
|
699 | 699 | log.exception('Error while merging shadow repo during merge.') |
|
700 | 700 | if 'unresolved merge conflicts' in safe_str(e): |
|
701 | 701 | unresolved = self._remote.get_unresolved_files() |
|
702 | 702 | log.debug('unresolved files: %s', unresolved) |
|
703 | 703 | |
|
704 | 704 | # Cleanup any merge leftovers |
|
705 | 705 | self._remote.update(clean=True) |
|
706 | 706 | if unresolved: |
|
707 | 707 | raise UnresolvedFilesInRepo(unresolved) |
|
708 | 708 | else: |
|
709 | 709 | raise |
|
710 | 710 | |
|
711 | 711 | def _local_close(self, target_ref, user_name, user_email, |
|
712 | 712 | source_ref, close_message=''): |
|
713 | 713 | """ |
|
714 | 714 | Close the branch of the given source_revision |
|
715 | 715 | |
|
716 | 716 | Returns the commit id of the close and a boolean indicating if the |
|
717 | 717 | commit needs to be pushed. |
|
718 | 718 | """ |
|
719 | 719 | self._update(source_ref.commit_id) |
|
720 | 720 | message = close_message or "Closing branch: `{}`".format(source_ref.name) |
|
721 | 721 | try: |
|
722 | 722 | self._remote.commit( |
|
723 | 723 | message=safe_str(message), |
|
724 | 724 | username=safe_str('%s <%s>' % (user_name, user_email)), |
|
725 | 725 | close_branch=True) |
|
726 | 726 | self._remote.invalidate_vcs_cache() |
|
727 | 727 | return self._identify(), True |
|
728 | 728 | except RepositoryError: |
|
729 | 729 | # Cleanup any commit leftovers |
|
730 | 730 | self._remote.update(clean=True) |
|
731 | 731 | raise |
|
732 | 732 | |
|
733 | 733 | def _is_the_same_branch(self, target_ref, source_ref): |
|
734 | 734 | return ( |
|
735 | 735 | self._get_branch_name(target_ref) == |
|
736 | 736 | self._get_branch_name(source_ref)) |
|
737 | 737 | |
|
738 | 738 | def _get_branch_name(self, ref): |
|
739 | 739 | if ref.type == 'branch': |
|
740 | 740 | return ref.name |
|
741 | 741 | return self._remote.ctx_branch(ref.commit_id) |
|
742 | 742 | |
|
743 | 743 | def _maybe_prepare_merge_workspace( |
|
744 | 744 | self, repo_id, workspace_id, unused_target_ref, unused_source_ref): |
|
745 | 745 | shadow_repository_path = self._get_shadow_repository_path( |
|
746 | 746 | self.path, repo_id, workspace_id) |
|
747 | 747 | if not os.path.exists(shadow_repository_path): |
|
748 | 748 | self._local_clone(shadow_repository_path) |
|
749 | 749 | log.debug( |
|
750 | 750 | 'Prepared shadow repository in %s', shadow_repository_path) |
|
751 | 751 | |
|
752 | 752 | return shadow_repository_path |
|
753 | 753 | |
|
754 | 754 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
755 | 755 | source_repo, source_ref, merge_message, |
|
756 | 756 | merger_name, merger_email, dry_run=False, |
|
757 | 757 | use_rebase=False, close_branch=False): |
|
758 | 758 | |
|
759 | 759 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', |
|
760 | 760 | 'rebase' if use_rebase else 'merge', dry_run) |
|
761 | 761 | if target_ref.commit_id not in self._heads(): |
|
762 | 762 | return MergeResponse( |
|
763 | 763 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, |
|
764 | 764 | metadata={'target_ref': target_ref}) |
|
765 | 765 | |
|
766 | 766 | try: |
|
767 | 767 | if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1: |
|
768 | 768 | heads_all = self._heads(target_ref.name) |
|
769 | 769 | max_heads = 10 |
|
770 | 770 | if len(heads_all) > max_heads: |
|
771 | 771 | heads = '\n,'.join( |
|
772 | 772 | heads_all[:max_heads] + |
|
773 | 773 | ['and {} more.'.format(len(heads_all)-max_heads)]) |
|
774 | 774 | else: |
|
775 | 775 | heads = '\n,'.join(heads_all) |
|
776 | 776 | metadata = { |
|
777 | 777 | 'target_ref': target_ref, |
|
778 | 778 | 'source_ref': source_ref, |
|
779 | 779 | 'heads': heads |
|
780 | 780 | } |
|
781 | 781 | return MergeResponse( |
|
782 | 782 | False, False, None, |
|
783 | 783 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, |
|
784 | 784 | metadata=metadata) |
|
785 | 785 | except CommitDoesNotExistError: |
|
786 | 786 | log.exception('Failure when looking up branch heads on hg target') |
|
787 | 787 | return MergeResponse( |
|
788 | 788 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
789 | 789 | metadata={'target_ref': target_ref}) |
|
790 | 790 | |
|
791 | 791 | shadow_repository_path = self._maybe_prepare_merge_workspace( |
|
792 | 792 | repo_id, workspace_id, target_ref, source_ref) |
|
793 | 793 | shadow_repo = self.get_shadow_instance(shadow_repository_path) |
|
794 | 794 | |
|
795 | 795 | log.debug('Pulling in target reference %s', target_ref) |
|
796 | 796 | self._validate_pull_reference(target_ref) |
|
797 | 797 | shadow_repo._local_pull(self.path, target_ref) |
|
798 | 798 | |
|
799 | 799 | try: |
|
800 | 800 | log.debug('Pulling in source reference %s', source_ref) |
|
801 | 801 | source_repo._validate_pull_reference(source_ref) |
|
802 | 802 | shadow_repo._local_pull(source_repo.path, source_ref) |
|
803 | 803 | except CommitDoesNotExistError: |
|
804 | 804 | log.exception('Failure when doing local pull on hg shadow repo') |
|
805 | 805 | return MergeResponse( |
|
806 | 806 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF, |
|
807 | 807 | metadata={'source_ref': source_ref}) |
|
808 | 808 | |
|
809 | 809 | merge_ref = None |
|
810 | 810 | merge_commit_id = None |
|
811 | 811 | close_commit_id = None |
|
812 | 812 | merge_failure_reason = MergeFailureReason.NONE |
|
813 | 813 | metadata = {} |
|
814 | 814 | |
|
815 | 815 | # enforce that close branch should be used only in case we source from |
|
816 | 816 | # an actual Branch |
|
817 | 817 | close_branch = close_branch and source_ref.type == 'branch' |
|
818 | 818 | |
|
819 | 819 | # don't allow to close branch if source and target are the same |
|
820 | 820 | close_branch = close_branch and source_ref.name != target_ref.name |
|
821 | 821 | |
|
822 | 822 | needs_push_on_close = False |
|
823 | 823 | if close_branch and not use_rebase and not dry_run: |
|
824 | 824 | try: |
|
825 | 825 | close_commit_id, needs_push_on_close = shadow_repo._local_close( |
|
826 | 826 | target_ref, merger_name, merger_email, source_ref) |
|
827 | 827 | merge_possible = True |
|
828 | 828 | except RepositoryError: |
|
829 | 829 | log.exception('Failure when doing close branch on ' |
|
830 | 830 | 'shadow repo: %s', shadow_repo) |
|
831 | 831 | merge_possible = False |
|
832 | 832 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
833 | 833 | else: |
|
834 | 834 | merge_possible = True |
|
835 | 835 | |
|
836 | 836 | needs_push = False |
|
837 | 837 | if merge_possible: |
|
838 | 838 | |
|
839 | 839 | try: |
|
840 | 840 | merge_commit_id, needs_push = shadow_repo._local_merge( |
|
841 | 841 | target_ref, merge_message, merger_name, merger_email, |
|
842 | 842 | source_ref, use_rebase=use_rebase, |
|
843 | 843 | close_commit_id=close_commit_id, dry_run=dry_run) |
|
844 | 844 | merge_possible = True |
|
845 | 845 | |
|
846 | 846 | # read the state of the close action, if it |
|
847 | 847 | # maybe required a push |
|
848 | 848 | needs_push = needs_push or needs_push_on_close |
|
849 | 849 | |
|
850 | 850 | # Set a bookmark pointing to the merge commit. This bookmark |
|
851 | 851 | # may be used to easily identify the last successful merge |
|
852 | 852 | # commit in the shadow repository. |
|
853 | 853 | shadow_repo.bookmark('pr-merge', revision=merge_commit_id) |
|
854 | 854 | merge_ref = Reference('book', 'pr-merge', merge_commit_id) |
|
855 | 855 | except SubrepoMergeError: |
|
856 | 856 | log.exception( |
|
857 | 857 | 'Subrepo merge error during local merge on hg shadow repo.') |
|
858 | 858 | merge_possible = False |
|
859 | 859 | merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED |
|
860 | 860 | needs_push = False |
|
861 | 861 | except RepositoryError as e: |
|
862 | 862 | log.exception('Failure when doing local merge on hg shadow repo') |
|
863 | 863 | if isinstance(e, UnresolvedFilesInRepo): |
|
864 | 864 | all_conflicts = list(e.args[0]) |
|
865 | 865 | max_conflicts = 20 |
|
866 | 866 | if len(all_conflicts) > max_conflicts: |
|
867 | 867 | conflicts = all_conflicts[:max_conflicts] \ |
|
868 | 868 | + ['and {} more.'.format(len(all_conflicts)-max_conflicts)] |
|
869 | 869 | else: |
|
870 | 870 | conflicts = all_conflicts |
|
871 | 871 | metadata['unresolved_files'] = \ |
|
872 | 872 | '\n* conflict: ' + \ |
|
873 | 873 | ('\n * conflict: '.join(conflicts)) |
|
874 | 874 | |
|
875 | 875 | merge_possible = False |
|
876 | 876 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
877 | 877 | needs_push = False |
|
878 | 878 | |
|
879 | 879 | if merge_possible and not dry_run: |
|
880 | 880 | if needs_push: |
|
881 | 881 | # In case the target is a bookmark, update it, so after pushing |
|
882 | 882 | # the bookmarks is also updated in the target. |
|
883 | 883 | if target_ref.type == 'book': |
|
884 | 884 | shadow_repo.bookmark( |
|
885 | 885 | target_ref.name, revision=merge_commit_id) |
|
886 | 886 | try: |
|
887 | 887 | shadow_repo_with_hooks = self.get_shadow_instance( |
|
888 | 888 | shadow_repository_path, |
|
889 | 889 | enable_hooks=True) |
|
890 | 890 | # This is the actual merge action, we push from shadow |
|
891 | 891 | # into origin. |
|
892 | 892 | # Note: the push_branches option will push any new branch |
|
893 | 893 | # defined in the source repository to the target. This may |
|
894 | 894 | # be dangerous as branches are permanent in Mercurial. |
|
895 | 895 | # This feature was requested in issue #441. |
|
896 | 896 | shadow_repo_with_hooks._local_push( |
|
897 | 897 | merge_commit_id, self.path, push_branches=True, |
|
898 | 898 | enable_hooks=True) |
|
899 | 899 | |
|
900 | 900 | # maybe we also need to push the close_commit_id |
|
901 | 901 | if close_commit_id: |
|
902 | 902 | shadow_repo_with_hooks._local_push( |
|
903 | 903 | close_commit_id, self.path, push_branches=True, |
|
904 | 904 | enable_hooks=True) |
|
905 | 905 | merge_succeeded = True |
|
906 | 906 | except RepositoryError: |
|
907 | 907 | log.exception( |
|
908 | 908 | 'Failure when doing local push from the shadow ' |
|
909 | 909 | 'repository to the target repository at %s.', self.path) |
|
910 | 910 | merge_succeeded = False |
|
911 | 911 | merge_failure_reason = MergeFailureReason.PUSH_FAILED |
|
912 | 912 | metadata['target'] = 'hg shadow repo' |
|
913 | 913 | metadata['merge_commit'] = merge_commit_id |
|
914 | 914 | else: |
|
915 | 915 | merge_succeeded = True |
|
916 | 916 | else: |
|
917 | 917 | merge_succeeded = False |
|
918 | 918 | |
|
919 | 919 | return MergeResponse( |
|
920 | 920 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason, |
|
921 | 921 | metadata=metadata) |
|
922 | 922 | |
|
923 | 923 | def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): |
|
924 | 924 | config = self.config.copy() |
|
925 | 925 | if not enable_hooks: |
|
926 | 926 | config.clear_section('hooks') |
|
927 | 927 | return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache}) |
|
928 | 928 | |
|
929 | 929 | def _validate_pull_reference(self, reference): |
|
930 | 930 | if not (reference.name in self.bookmarks or |
|
931 | 931 | reference.name in self.branches or |
|
932 | 932 | self.get_commit(reference.commit_id)): |
|
933 | 933 | raise CommitDoesNotExistError( |
|
934 | 934 | 'Unknown branch, bookmark or commit id') |
|
935 | 935 | |
|
936 | 936 | def _local_pull(self, repository_path, reference): |
|
937 | 937 | """ |
|
938 | 938 | Fetch a branch, bookmark or commit from a local repository. |
|
939 | 939 | """ |
|
940 | 940 | repository_path = os.path.abspath(repository_path) |
|
941 | 941 | if repository_path == self.path: |
|
942 | 942 | raise ValueError('Cannot pull from the same repository') |
|
943 | 943 | |
|
944 | 944 | reference_type_to_option_name = { |
|
945 | 945 | 'book': 'bookmark', |
|
946 | 946 | 'branch': 'branch', |
|
947 | 947 | } |
|
948 | 948 | option_name = reference_type_to_option_name.get( |
|
949 | 949 | reference.type, 'revision') |
|
950 | 950 | |
|
951 | 951 | if option_name == 'revision': |
|
952 | 952 | ref = reference.commit_id |
|
953 | 953 | else: |
|
954 | 954 | ref = reference.name |
|
955 | 955 | |
|
956 | 956 | options = {option_name: [ref]} |
|
957 | 957 | self._remote.pull_cmd(repository_path, hooks=False, **options) |
|
958 | 958 | self._remote.invalidate_vcs_cache() |
|
959 | 959 | |
|
960 | 960 | def bookmark(self, bookmark, revision=None): |
|
961 | 961 | if isinstance(bookmark, unicode): |
|
962 | 962 | bookmark = safe_str(bookmark) |
|
963 | 963 | self._remote.bookmark(bookmark, revision=revision) |
|
964 | 964 | self._remote.invalidate_vcs_cache() |
|
965 | 965 | |
|
966 | 966 | def get_path_permissions(self, username): |
|
967 | 967 | hgacl_file = os.path.join(self.path, '.hg/hgacl') |
|
968 | 968 | |
|
969 | 969 | def read_patterns(suffix): |
|
970 | 970 | svalue = None |
|
971 | 971 | for section, option in [ |
|
972 | 972 | ('narrowacl', username + suffix), |
|
973 | 973 | ('narrowacl', 'default' + suffix), |
|
974 | 974 | ('narrowhgacl', username + suffix), |
|
975 | 975 | ('narrowhgacl', 'default' + suffix) |
|
976 | 976 | ]: |
|
977 | 977 | try: |
|
978 | 978 | svalue = hgacl.get(section, option) |
|
979 | 979 | break # stop at the first value we find |
|
980 | 980 | except configparser.NoOptionError: |
|
981 | 981 | pass |
|
982 | 982 | if not svalue: |
|
983 | 983 | return None |
|
984 | 984 | result = ['/'] |
|
985 | 985 | for pattern in svalue.split(): |
|
986 | 986 | result.append(pattern) |
|
987 | 987 | if '*' not in pattern and '?' not in pattern: |
|
988 | 988 | result.append(pattern + '/*') |
|
989 | 989 | return result |
|
990 | 990 | |
|
991 | 991 | if os.path.exists(hgacl_file): |
|
992 | 992 | try: |
|
993 | 993 | hgacl = configparser.RawConfigParser() |
|
994 | 994 | hgacl.read(hgacl_file) |
|
995 | 995 | |
|
996 | 996 | includes = read_patterns('.includes') |
|
997 | 997 | excludes = read_patterns('.excludes') |
|
998 | 998 | return BasePathPermissionChecker.create_from_patterns( |
|
999 | 999 | includes, excludes) |
|
1000 | 1000 | except BaseException as e: |
|
1001 | 1001 | msg = 'Cannot read ACL settings from {} on {}: {}'.format( |
|
1002 | 1002 | hgacl_file, self.name, e) |
|
1003 | 1003 | raise exceptions.RepositoryRequirementError(msg) |
|
1004 | 1004 | else: |
|
1005 | 1005 | return None |
|
1006 | 1006 | |
|
1007 | 1007 | |
|
1008 | 1008 | class MercurialIndexBasedCollectionGenerator(CollectionGenerator): |
|
1009 | 1009 | |
|
1010 | 1010 | def _commit_factory(self, commit_id): |
|
1011 | 1011 | return self.repo.get_commit( |
|
1012 | 1012 | commit_idx=commit_id, pre_load=self.pre_load) |
@@ -1,200 +1,200 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | |
|
22 | 22 | import os |
|
23 | 23 | import time |
|
24 | 24 | import tempfile |
|
25 | 25 | import pytest |
|
26 | 26 | import subprocess |
|
27 | 27 | import configobj |
|
28 | 28 | import logging |
|
29 | 29 | from urllib.request import urlopen |
|
30 | 30 | from urllib.error import URLError |
|
31 |
|
|
|
31 | import configparser | |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS |
|
35 | 35 | from rhodecode.tests.utils import is_url_reachable |
|
36 | 36 | |
|
37 | 37 | log = logging.getLogger(__name__) |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | def get_port(pyramid_config): |
|
41 | 41 | config = configparser.ConfigParser() |
|
42 | 42 | config.read(pyramid_config) |
|
43 | 43 | return config.get('server:main', 'port') |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | def get_host_url(pyramid_config): |
|
47 | 47 | """Construct the host url using the port in the test configuration.""" |
|
48 | 48 | return '127.0.0.1:%s' % get_port(pyramid_config) |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | def assert_no_running_instance(url): |
|
52 | 52 | if is_url_reachable(url): |
|
53 | 53 | print("Hint: Usually this means another instance of server " |
|
54 | 54 | "is running in the background at %s." % url) |
|
55 | 55 | pytest.fail( |
|
56 | 56 | "Port is not free at %s, cannot start server at" % url) |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | class ServerBase(object): |
|
60 | 60 | _args = [] |
|
61 | 61 | log_file_name = 'NOT_DEFINED.log' |
|
62 | 62 | status_url_tmpl = 'http://{host}:{port}' |
|
63 | 63 | |
|
64 | 64 | def __init__(self, config_file, log_file): |
|
65 | 65 | self.config_file = config_file |
|
66 | 66 | config_data = configobj.ConfigObj(config_file) |
|
67 | 67 | self._config = config_data['server:main'] |
|
68 | 68 | |
|
69 | 69 | self._args = [] |
|
70 | 70 | self.log_file = log_file or os.path.join( |
|
71 | 71 | tempfile.gettempdir(), self.log_file_name) |
|
72 | 72 | self.process = None |
|
73 | 73 | self.server_out = None |
|
74 | 74 | log.info("Using the {} configuration:{}".format( |
|
75 | 75 | self.__class__.__name__, config_file)) |
|
76 | 76 | |
|
77 | 77 | if not os.path.isfile(config_file): |
|
78 | 78 | raise RuntimeError('Failed to get config at {}'.format(config_file)) |
|
79 | 79 | |
|
80 | 80 | @property |
|
81 | 81 | def command(self): |
|
82 | 82 | return ' '.join(self._args) |
|
83 | 83 | |
|
84 | 84 | @property |
|
85 | 85 | def http_url(self): |
|
86 | 86 | template = 'http://{host}:{port}/' |
|
87 | 87 | return template.format(**self._config) |
|
88 | 88 | |
|
89 | 89 | def host_url(self): |
|
90 | 90 | return 'http://' + get_host_url(self.config_file) |
|
91 | 91 | |
|
92 | 92 | def get_rc_log(self): |
|
93 | 93 | with open(self.log_file) as f: |
|
94 | 94 | return f.read() |
|
95 | 95 | |
|
96 | 96 | def wait_until_ready(self, timeout=30): |
|
97 | 97 | host = self._config['host'] |
|
98 | 98 | port = self._config['port'] |
|
99 | 99 | status_url = self.status_url_tmpl.format(host=host, port=port) |
|
100 | 100 | start = time.time() |
|
101 | 101 | |
|
102 | 102 | while time.time() - start < timeout: |
|
103 | 103 | try: |
|
104 | 104 | urlopen(status_url) |
|
105 | 105 | break |
|
106 | 106 | except URLError: |
|
107 | 107 | time.sleep(0.2) |
|
108 | 108 | else: |
|
109 | 109 | pytest.fail( |
|
110 | 110 | "Starting the {} failed or took more than {} " |
|
111 | 111 | "seconds. cmd: `{}`".format( |
|
112 | 112 | self.__class__.__name__, timeout, self.command)) |
|
113 | 113 | |
|
114 | 114 | log.info('Server of {} ready at url {}'.format( |
|
115 | 115 | self.__class__.__name__, status_url)) |
|
116 | 116 | |
|
117 | 117 | def shutdown(self): |
|
118 | 118 | self.process.kill() |
|
119 | 119 | self.server_out.flush() |
|
120 | 120 | self.server_out.close() |
|
121 | 121 | |
|
122 | 122 | def get_log_file_with_port(self): |
|
123 | 123 | log_file = list(self.log_file.partition('.log')) |
|
124 | 124 | log_file.insert(1, get_port(self.config_file)) |
|
125 | 125 | log_file = ''.join(log_file) |
|
126 | 126 | return log_file |
|
127 | 127 | |
|
128 | 128 | |
|
129 | 129 | class RcVCSServer(ServerBase): |
|
130 | 130 | """ |
|
131 | 131 | Represents a running VCSServer instance. |
|
132 | 132 | """ |
|
133 | 133 | |
|
134 | 134 | log_file_name = 'rc-vcsserver.log' |
|
135 | 135 | status_url_tmpl = 'http://{host}:{port}/status' |
|
136 | 136 | |
|
137 | 137 | def __init__(self, config_file, log_file=None): |
|
138 | 138 | super(RcVCSServer, self).__init__(config_file, log_file) |
|
139 | 139 | self._args = ['gunicorn', '--paste', self.config_file] |
|
140 | 140 | |
|
141 | 141 | def start(self): |
|
142 | 142 | env = os.environ.copy() |
|
143 | 143 | |
|
144 | 144 | self.log_file = self.get_log_file_with_port() |
|
145 | 145 | self.server_out = open(self.log_file, 'w') |
|
146 | 146 | |
|
147 | 147 | host_url = self.host_url() |
|
148 | 148 | assert_no_running_instance(host_url) |
|
149 | 149 | |
|
150 | 150 | log.info('rhodecode-vcsserver start command: {}'.format(' '.join(self._args))) |
|
151 | 151 | log.info('rhodecode-vcsserver starting at: {}'.format(host_url)) |
|
152 | 152 | log.info('rhodecode-vcsserver command: {}'.format(self.command)) |
|
153 | 153 | log.info('rhodecode-vcsserver logfile: {}'.format(self.log_file)) |
|
154 | 154 | |
|
155 | 155 | self.process = subprocess.Popen( |
|
156 | 156 | self._args, bufsize=0, env=env, |
|
157 | 157 | stdout=self.server_out, stderr=self.server_out) |
|
158 | 158 | |
|
159 | 159 | |
|
160 | 160 | class RcWebServer(ServerBase): |
|
161 | 161 | """ |
|
162 | 162 | Represents a running RCE web server used as a test fixture. |
|
163 | 163 | """ |
|
164 | 164 | |
|
165 | 165 | log_file_name = 'rc-web.log' |
|
166 | 166 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' |
|
167 | 167 | |
|
168 | 168 | def __init__(self, config_file, log_file=None): |
|
169 | 169 | super(RcWebServer, self).__init__(config_file, log_file) |
|
170 | 170 | self._args = [ |
|
171 | 171 | 'gunicorn', '--worker-class', 'gevent', '--paste', config_file] |
|
172 | 172 | |
|
173 | 173 | def start(self): |
|
174 | 174 | env = os.environ.copy() |
|
175 | 175 | env['RC_NO_TMP_PATH'] = '1' |
|
176 | 176 | |
|
177 | 177 | self.log_file = self.get_log_file_with_port() |
|
178 | 178 | self.server_out = open(self.log_file, 'w') |
|
179 | 179 | |
|
180 | 180 | host_url = self.host_url() |
|
181 | 181 | assert_no_running_instance(host_url) |
|
182 | 182 | |
|
183 | 183 | log.info('rhodecode-web starting at: {}'.format(host_url)) |
|
184 | 184 | log.info('rhodecode-web command: {}'.format(self.command)) |
|
185 | 185 | log.info('rhodecode-web logfile: {}'.format(self.log_file)) |
|
186 | 186 | |
|
187 | 187 | self.process = subprocess.Popen( |
|
188 | 188 | self._args, bufsize=0, env=env, |
|
189 | 189 | stdout=self.server_out, stderr=self.server_out) |
|
190 | 190 | |
|
191 | 191 | def repo_clone_url(self, repo_name, **kwargs): |
|
192 | 192 | params = { |
|
193 | 193 | 'user': TEST_USER_ADMIN_LOGIN, |
|
194 | 194 | 'passwd': TEST_USER_ADMIN_PASS, |
|
195 | 195 | 'host': get_host_url(self.config_file), |
|
196 | 196 | 'cloned_repo': repo_name, |
|
197 | 197 | } |
|
198 | 198 | params.update(**kwargs) |
|
199 | 199 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params |
|
200 | 200 | return _url |
General Comments 0
You need to be logged in to leave comments.
Login now