Show More
@@ -1,238 +1,238 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import re |
|
22 | import re | |
23 | import logging |
|
23 | import logging | |
24 | import datetime |
|
24 | import datetime | |
25 |
|
|
25 | import configparser | |
26 |
|
26 | |||
27 | from rhodecode.model.db import Session, User, UserSshKeys |
|
27 | from rhodecode.model.db import Session, User, UserSshKeys | |
28 | from rhodecode.model.scm import ScmModel |
|
28 | from rhodecode.model.scm import ScmModel | |
29 |
|
29 | |||
30 | from .hg import MercurialServer |
|
30 | from .hg import MercurialServer | |
31 | from .git import GitServer |
|
31 | from .git import GitServer | |
32 | from .svn import SubversionServer |
|
32 | from .svn import SubversionServer | |
33 | log = logging.getLogger(__name__) |
|
33 | log = logging.getLogger(__name__) | |
34 |
|
34 | |||
35 |
|
35 | |||
36 | class SshWrapper(object): |
|
36 | class SshWrapper(object): | |
37 | hg_cmd_pat = re.compile(r'^hg\s+\-R\s+(\S+)\s+serve\s+\-\-stdio$') |
|
37 | hg_cmd_pat = re.compile(r'^hg\s+\-R\s+(\S+)\s+serve\s+\-\-stdio$') | |
38 | git_cmd_pat = re.compile(r'^git-(receive-pack|upload-pack)\s\'[/]?(\S+?)(|\.git)\'$') |
|
38 | git_cmd_pat = re.compile(r'^git-(receive-pack|upload-pack)\s\'[/]?(\S+?)(|\.git)\'$') | |
39 | svn_cmd_pat = re.compile(r'^svnserve -t') |
|
39 | svn_cmd_pat = re.compile(r'^svnserve -t') | |
40 |
|
40 | |||
41 | def __init__(self, command, connection_info, mode, |
|
41 | def __init__(self, command, connection_info, mode, | |
42 | user, user_id, key_id, shell, ini_path, env): |
|
42 | user, user_id, key_id, shell, ini_path, env): | |
43 | self.command = command |
|
43 | self.command = command | |
44 | self.connection_info = connection_info |
|
44 | self.connection_info = connection_info | |
45 | self.mode = mode |
|
45 | self.mode = mode | |
46 | self.user = user |
|
46 | self.user = user | |
47 | self.user_id = user_id |
|
47 | self.user_id = user_id | |
48 | self.key_id = key_id |
|
48 | self.key_id = key_id | |
49 | self.shell = shell |
|
49 | self.shell = shell | |
50 | self.ini_path = ini_path |
|
50 | self.ini_path = ini_path | |
51 | self.env = env |
|
51 | self.env = env | |
52 |
|
52 | |||
53 | self.config = self.parse_config(ini_path) |
|
53 | self.config = self.parse_config(ini_path) | |
54 | self.server_impl = None |
|
54 | self.server_impl = None | |
55 |
|
55 | |||
56 | def parse_config(self, config_path): |
|
56 | def parse_config(self, config_path): | |
57 | parser = configparser.ConfigParser() |
|
57 | parser = configparser.ConfigParser() | |
58 | parser.read(config_path) |
|
58 | parser.read(config_path) | |
59 | return parser |
|
59 | return parser | |
60 |
|
60 | |||
61 | def update_key_access_time(self, key_id): |
|
61 | def update_key_access_time(self, key_id): | |
62 | key = UserSshKeys().query().filter( |
|
62 | key = UserSshKeys().query().filter( | |
63 | UserSshKeys.ssh_key_id == key_id).scalar() |
|
63 | UserSshKeys.ssh_key_id == key_id).scalar() | |
64 | if key: |
|
64 | if key: | |
65 | key.accessed_on = datetime.datetime.utcnow() |
|
65 | key.accessed_on = datetime.datetime.utcnow() | |
66 | Session().add(key) |
|
66 | Session().add(key) | |
67 | Session().commit() |
|
67 | Session().commit() | |
68 | log.debug('Update key id:`%s` fingerprint:`%s` access time', |
|
68 | log.debug('Update key id:`%s` fingerprint:`%s` access time', | |
69 | key_id, key.ssh_key_fingerprint) |
|
69 | key_id, key.ssh_key_fingerprint) | |
70 |
|
70 | |||
71 | def get_connection_info(self): |
|
71 | def get_connection_info(self): | |
72 | """ |
|
72 | """ | |
73 | connection_info |
|
73 | connection_info | |
74 |
|
74 | |||
75 | Identifies the client and server ends of the connection. |
|
75 | Identifies the client and server ends of the connection. | |
76 | The variable contains four space-separated values: client IP address, |
|
76 | The variable contains four space-separated values: client IP address, | |
77 | client port number, server IP address, and server port number. |
|
77 | client port number, server IP address, and server port number. | |
78 | """ |
|
78 | """ | |
79 | conn = dict( |
|
79 | conn = dict( | |
80 | client_ip=None, |
|
80 | client_ip=None, | |
81 | client_port=None, |
|
81 | client_port=None, | |
82 | server_ip=None, |
|
82 | server_ip=None, | |
83 | server_port=None, |
|
83 | server_port=None, | |
84 | ) |
|
84 | ) | |
85 |
|
85 | |||
86 | info = self.connection_info.split(' ') |
|
86 | info = self.connection_info.split(' ') | |
87 | if len(info) == 4: |
|
87 | if len(info) == 4: | |
88 | conn['client_ip'] = info[0] |
|
88 | conn['client_ip'] = info[0] | |
89 | conn['client_port'] = info[1] |
|
89 | conn['client_port'] = info[1] | |
90 | conn['server_ip'] = info[2] |
|
90 | conn['server_ip'] = info[2] | |
91 | conn['server_port'] = info[3] |
|
91 | conn['server_port'] = info[3] | |
92 |
|
92 | |||
93 | return conn |
|
93 | return conn | |
94 |
|
94 | |||
95 | def maybe_translate_repo_uid(self, repo_name): |
|
95 | def maybe_translate_repo_uid(self, repo_name): | |
96 | _org_name = repo_name |
|
96 | _org_name = repo_name | |
97 | if _org_name.startswith('_'): |
|
97 | if _org_name.startswith('_'): | |
98 | # remove format of _ID/subrepo |
|
98 | # remove format of _ID/subrepo | |
99 | _org_name = _org_name.split('/', 1)[0] |
|
99 | _org_name = _org_name.split('/', 1)[0] | |
100 |
|
100 | |||
101 | if repo_name.startswith('_'): |
|
101 | if repo_name.startswith('_'): | |
102 | from rhodecode.model.repo import RepoModel |
|
102 | from rhodecode.model.repo import RepoModel | |
103 | org_repo_name = repo_name |
|
103 | org_repo_name = repo_name | |
104 | log.debug('translating UID repo %s', org_repo_name) |
|
104 | log.debug('translating UID repo %s', org_repo_name) | |
105 | by_id_match = RepoModel().get_repo_by_id(repo_name) |
|
105 | by_id_match = RepoModel().get_repo_by_id(repo_name) | |
106 | if by_id_match: |
|
106 | if by_id_match: | |
107 | repo_name = by_id_match.repo_name |
|
107 | repo_name = by_id_match.repo_name | |
108 | log.debug('translation of UID repo %s got `%s`', org_repo_name, repo_name) |
|
108 | log.debug('translation of UID repo %s got `%s`', org_repo_name, repo_name) | |
109 |
|
109 | |||
110 | return repo_name, _org_name |
|
110 | return repo_name, _org_name | |
111 |
|
111 | |||
112 | def get_repo_details(self, mode): |
|
112 | def get_repo_details(self, mode): | |
113 | vcs_type = mode if mode in ['svn', 'hg', 'git'] else None |
|
113 | vcs_type = mode if mode in ['svn', 'hg', 'git'] else None | |
114 | repo_name = None |
|
114 | repo_name = None | |
115 |
|
115 | |||
116 | hg_match = self.hg_cmd_pat.match(self.command) |
|
116 | hg_match = self.hg_cmd_pat.match(self.command) | |
117 | if hg_match is not None: |
|
117 | if hg_match is not None: | |
118 | vcs_type = 'hg' |
|
118 | vcs_type = 'hg' | |
119 | repo_id = hg_match.group(1).strip('/') |
|
119 | repo_id = hg_match.group(1).strip('/') | |
120 | repo_name, org_name = self.maybe_translate_repo_uid(repo_id) |
|
120 | repo_name, org_name = self.maybe_translate_repo_uid(repo_id) | |
121 | return vcs_type, repo_name, mode |
|
121 | return vcs_type, repo_name, mode | |
122 |
|
122 | |||
123 | git_match = self.git_cmd_pat.match(self.command) |
|
123 | git_match = self.git_cmd_pat.match(self.command) | |
124 | if git_match is not None: |
|
124 | if git_match is not None: | |
125 | mode = git_match.group(1) |
|
125 | mode = git_match.group(1) | |
126 | vcs_type = 'git' |
|
126 | vcs_type = 'git' | |
127 | repo_id = git_match.group(2).strip('/') |
|
127 | repo_id = git_match.group(2).strip('/') | |
128 | repo_name, org_name = self.maybe_translate_repo_uid(repo_id) |
|
128 | repo_name, org_name = self.maybe_translate_repo_uid(repo_id) | |
129 | return vcs_type, repo_name, mode |
|
129 | return vcs_type, repo_name, mode | |
130 |
|
130 | |||
131 | svn_match = self.svn_cmd_pat.match(self.command) |
|
131 | svn_match = self.svn_cmd_pat.match(self.command) | |
132 | if svn_match is not None: |
|
132 | if svn_match is not None: | |
133 | vcs_type = 'svn' |
|
133 | vcs_type = 'svn' | |
134 | # Repo name should be extracted from the input stream, we're unable to |
|
134 | # Repo name should be extracted from the input stream, we're unable to | |
135 | # extract it at this point in execution |
|
135 | # extract it at this point in execution | |
136 | return vcs_type, repo_name, mode |
|
136 | return vcs_type, repo_name, mode | |
137 |
|
137 | |||
138 | return vcs_type, repo_name, mode |
|
138 | return vcs_type, repo_name, mode | |
139 |
|
139 | |||
140 | def serve(self, vcs, repo, mode, user, permissions, branch_permissions): |
|
140 | def serve(self, vcs, repo, mode, user, permissions, branch_permissions): | |
141 | store = ScmModel().repos_path |
|
141 | store = ScmModel().repos_path | |
142 |
|
142 | |||
143 | check_branch_perms = False |
|
143 | check_branch_perms = False | |
144 | detect_force_push = False |
|
144 | detect_force_push = False | |
145 |
|
145 | |||
146 | if branch_permissions: |
|
146 | if branch_permissions: | |
147 | check_branch_perms = True |
|
147 | check_branch_perms = True | |
148 | detect_force_push = True |
|
148 | detect_force_push = True | |
149 |
|
149 | |||
150 | log.debug( |
|
150 | log.debug( | |
151 | 'VCS detected:`%s` mode: `%s` repo_name: %s, branch_permission_checks:%s', |
|
151 | 'VCS detected:`%s` mode: `%s` repo_name: %s, branch_permission_checks:%s', | |
152 | vcs, mode, repo, check_branch_perms) |
|
152 | vcs, mode, repo, check_branch_perms) | |
153 |
|
153 | |||
154 | # detect if we have to check branch permissions |
|
154 | # detect if we have to check branch permissions | |
155 | extras = { |
|
155 | extras = { | |
156 | 'detect_force_push': detect_force_push, |
|
156 | 'detect_force_push': detect_force_push, | |
157 | 'check_branch_perms': check_branch_perms, |
|
157 | 'check_branch_perms': check_branch_perms, | |
158 | } |
|
158 | } | |
159 |
|
159 | |||
160 | if vcs == 'hg': |
|
160 | if vcs == 'hg': | |
161 | server = MercurialServer( |
|
161 | server = MercurialServer( | |
162 | store=store, ini_path=self.ini_path, |
|
162 | store=store, ini_path=self.ini_path, | |
163 | repo_name=repo, user=user, |
|
163 | repo_name=repo, user=user, | |
164 | user_permissions=permissions, config=self.config, env=self.env) |
|
164 | user_permissions=permissions, config=self.config, env=self.env) | |
165 | self.server_impl = server |
|
165 | self.server_impl = server | |
166 | return server.run(tunnel_extras=extras) |
|
166 | return server.run(tunnel_extras=extras) | |
167 |
|
167 | |||
168 | elif vcs == 'git': |
|
168 | elif vcs == 'git': | |
169 | server = GitServer( |
|
169 | server = GitServer( | |
170 | store=store, ini_path=self.ini_path, |
|
170 | store=store, ini_path=self.ini_path, | |
171 | repo_name=repo, repo_mode=mode, user=user, |
|
171 | repo_name=repo, repo_mode=mode, user=user, | |
172 | user_permissions=permissions, config=self.config, env=self.env) |
|
172 | user_permissions=permissions, config=self.config, env=self.env) | |
173 | self.server_impl = server |
|
173 | self.server_impl = server | |
174 | return server.run(tunnel_extras=extras) |
|
174 | return server.run(tunnel_extras=extras) | |
175 |
|
175 | |||
176 | elif vcs == 'svn': |
|
176 | elif vcs == 'svn': | |
177 | server = SubversionServer( |
|
177 | server = SubversionServer( | |
178 | store=store, ini_path=self.ini_path, |
|
178 | store=store, ini_path=self.ini_path, | |
179 | repo_name=None, user=user, |
|
179 | repo_name=None, user=user, | |
180 | user_permissions=permissions, config=self.config, env=self.env) |
|
180 | user_permissions=permissions, config=self.config, env=self.env) | |
181 | self.server_impl = server |
|
181 | self.server_impl = server | |
182 | return server.run(tunnel_extras=extras) |
|
182 | return server.run(tunnel_extras=extras) | |
183 |
|
183 | |||
184 | else: |
|
184 | else: | |
185 | raise Exception('Unrecognised VCS: {}'.format(vcs)) |
|
185 | raise Exception('Unrecognised VCS: {}'.format(vcs)) | |
186 |
|
186 | |||
187 | def wrap(self): |
|
187 | def wrap(self): | |
188 | mode = self.mode |
|
188 | mode = self.mode | |
189 | user = self.user |
|
189 | user = self.user | |
190 | user_id = self.user_id |
|
190 | user_id = self.user_id | |
191 | key_id = self.key_id |
|
191 | key_id = self.key_id | |
192 | shell = self.shell |
|
192 | shell = self.shell | |
193 |
|
193 | |||
194 | scm_detected, scm_repo, scm_mode = self.get_repo_details(mode) |
|
194 | scm_detected, scm_repo, scm_mode = self.get_repo_details(mode) | |
195 |
|
195 | |||
196 | log.debug( |
|
196 | log.debug( | |
197 | 'Mode: `%s` User: `%s:%s` Shell: `%s` SSH Command: `\"%s\"` ' |
|
197 | 'Mode: `%s` User: `%s:%s` Shell: `%s` SSH Command: `\"%s\"` ' | |
198 | 'SCM_DETECTED: `%s` SCM Mode: `%s` SCM Repo: `%s`', |
|
198 | 'SCM_DETECTED: `%s` SCM Mode: `%s` SCM Repo: `%s`', | |
199 | mode, user, user_id, shell, self.command, |
|
199 | mode, user, user_id, shell, self.command, | |
200 | scm_detected, scm_mode, scm_repo) |
|
200 | scm_detected, scm_mode, scm_repo) | |
201 |
|
201 | |||
202 | # update last access time for this key |
|
202 | # update last access time for this key | |
203 | self.update_key_access_time(key_id) |
|
203 | self.update_key_access_time(key_id) | |
204 |
|
204 | |||
205 | log.debug('SSH Connection info %s', self.get_connection_info()) |
|
205 | log.debug('SSH Connection info %s', self.get_connection_info()) | |
206 |
|
206 | |||
207 | if shell and self.command is None: |
|
207 | if shell and self.command is None: | |
208 | log.info('Dropping to shell, no command given and shell is allowed') |
|
208 | log.info('Dropping to shell, no command given and shell is allowed') | |
209 | os.execl('/bin/bash', '-l') |
|
209 | os.execl('/bin/bash', '-l') | |
210 | exit_code = 1 |
|
210 | exit_code = 1 | |
211 |
|
211 | |||
212 | elif scm_detected: |
|
212 | elif scm_detected: | |
213 | user = User.get(user_id) |
|
213 | user = User.get(user_id) | |
214 | if not user: |
|
214 | if not user: | |
215 | log.warning('User with id %s not found', user_id) |
|
215 | log.warning('User with id %s not found', user_id) | |
216 | exit_code = -1 |
|
216 | exit_code = -1 | |
217 | return exit_code |
|
217 | return exit_code | |
218 |
|
218 | |||
219 | auth_user = user.AuthUser() |
|
219 | auth_user = user.AuthUser() | |
220 | permissions = auth_user.permissions['repositories'] |
|
220 | permissions = auth_user.permissions['repositories'] | |
221 | repo_branch_permissions = auth_user.get_branch_permissions(scm_repo) |
|
221 | repo_branch_permissions = auth_user.get_branch_permissions(scm_repo) | |
222 | try: |
|
222 | try: | |
223 | exit_code, is_updated = self.serve( |
|
223 | exit_code, is_updated = self.serve( | |
224 | scm_detected, scm_repo, scm_mode, user, permissions, |
|
224 | scm_detected, scm_repo, scm_mode, user, permissions, | |
225 | repo_branch_permissions) |
|
225 | repo_branch_permissions) | |
226 | except Exception: |
|
226 | except Exception: | |
227 | log.exception('Error occurred during execution of SshWrapper') |
|
227 | log.exception('Error occurred during execution of SshWrapper') | |
228 | exit_code = -1 |
|
228 | exit_code = -1 | |
229 |
|
229 | |||
230 | elif self.command is None and shell is False: |
|
230 | elif self.command is None and shell is False: | |
231 | log.error('No Command given.') |
|
231 | log.error('No Command given.') | |
232 | exit_code = -1 |
|
232 | exit_code = -1 | |
233 |
|
233 | |||
234 | else: |
|
234 | else: | |
235 | log.error('Unhandled Command: "%s" Aborting.', self.command) |
|
235 | log.error('Unhandled Command: "%s" Aborting.', self.command) | |
236 | exit_code = -1 |
|
236 | exit_code = -1 | |
237 |
|
237 | |||
238 | return exit_code |
|
238 | return exit_code |
@@ -1,70 +1,70 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import pytest |
|
22 | import pytest | |
23 |
|
|
23 | import configparser | |
24 |
|
24 | |||
25 | from rhodecode.apps.ssh_support.lib.ssh_wrapper import SshWrapper |
|
25 | from rhodecode.apps.ssh_support.lib.ssh_wrapper import SshWrapper | |
26 | from rhodecode.lib.utils2 import AttributeDict |
|
26 | from rhodecode.lib.utils2 import AttributeDict | |
27 |
|
27 | |||
28 |
|
28 | |||
29 | @pytest.fixture() |
|
29 | @pytest.fixture() | |
30 | def dummy_conf_file(tmpdir): |
|
30 | def dummy_conf_file(tmpdir): | |
31 | conf = configparser.ConfigParser() |
|
31 | conf = configparser.ConfigParser() | |
32 | conf.add_section('app:main') |
|
32 | conf.add_section('app:main') | |
33 | conf.set('app:main', 'ssh.executable.hg', '/usr/bin/hg') |
|
33 | conf.set('app:main', 'ssh.executable.hg', '/usr/bin/hg') | |
34 | conf.set('app:main', 'ssh.executable.git', '/usr/bin/git') |
|
34 | conf.set('app:main', 'ssh.executable.git', '/usr/bin/git') | |
35 | conf.set('app:main', 'ssh.executable.svn', '/usr/bin/svnserve') |
|
35 | conf.set('app:main', 'ssh.executable.svn', '/usr/bin/svnserve') | |
36 |
|
36 | |||
37 | f_path = os.path.join(str(tmpdir), 'ssh_wrapper_test.ini') |
|
37 | f_path = os.path.join(str(tmpdir), 'ssh_wrapper_test.ini') | |
38 | with open(f_path, 'wb') as f: |
|
38 | with open(f_path, 'wb') as f: | |
39 | conf.write(f) |
|
39 | conf.write(f) | |
40 |
|
40 | |||
41 | return os.path.join(f_path) |
|
41 | return os.path.join(f_path) | |
42 |
|
42 | |||
43 |
|
43 | |||
44 | def plain_dummy_env(): |
|
44 | def plain_dummy_env(): | |
45 | return { |
|
45 | return { | |
46 | 'request': |
|
46 | 'request': | |
47 | AttributeDict(host_url='http://localhost', script_name='/') |
|
47 | AttributeDict(host_url='http://localhost', script_name='/') | |
48 | } |
|
48 | } | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | @pytest.fixture() |
|
51 | @pytest.fixture() | |
52 | def dummy_env(): |
|
52 | def dummy_env(): | |
53 | return plain_dummy_env() |
|
53 | return plain_dummy_env() | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | def plain_dummy_user(): |
|
56 | def plain_dummy_user(): | |
57 | return AttributeDict(username='test_user') |
|
57 | return AttributeDict(username='test_user') | |
58 |
|
58 | |||
59 |
|
59 | |||
60 | @pytest.fixture() |
|
60 | @pytest.fixture() | |
61 | def dummy_user(): |
|
61 | def dummy_user(): | |
62 | return plain_dummy_user() |
|
62 | return plain_dummy_user() | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | @pytest.fixture() |
|
65 | @pytest.fixture() | |
66 | def ssh_wrapper(app, dummy_conf_file, dummy_env): |
|
66 | def ssh_wrapper(app, dummy_conf_file, dummy_env): | |
67 | conn_info = '127.0.0.1 22 10.0.0.1 443' |
|
67 | conn_info = '127.0.0.1 22 10.0.0.1 443' | |
68 | return SshWrapper( |
|
68 | return SshWrapper( | |
69 | 'random command', conn_info, 'auto', 'admin', '1', key_id='1', |
|
69 | 'random command', conn_info, 'auto', 'admin', '1', key_id='1', | |
70 | shell=False, ini_path=dummy_conf_file, env=dummy_env) |
|
70 | shell=False, ini_path=dummy_conf_file, env=dummy_env) |
@@ -1,141 +1,141 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import json |
|
22 | import json | |
23 | import logging |
|
23 | import logging | |
24 | import datetime |
|
24 | import datetime | |
25 | import time |
|
25 | import time | |
26 |
|
26 | |||
27 | from functools import partial |
|
27 | from functools import partial | |
28 |
|
28 | |||
29 |
|
|
29 | import configparser | |
30 | from celery.result import AsyncResult |
|
30 | from celery.result import AsyncResult | |
31 | import celery.loaders.base |
|
31 | import celery.loaders.base | |
32 | import celery.schedules |
|
32 | import celery.schedules | |
33 |
|
33 | |||
34 | log = logging.getLogger(__name__) |
|
34 | log = logging.getLogger(__name__) | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | def get_task_id(task): |
|
37 | def get_task_id(task): | |
38 | task_id = None |
|
38 | task_id = None | |
39 | if isinstance(task, AsyncResult): |
|
39 | if isinstance(task, AsyncResult): | |
40 | task_id = task.task_id |
|
40 | task_id = task.task_id | |
41 |
|
41 | |||
42 | return task_id |
|
42 | return task_id | |
43 |
|
43 | |||
44 |
|
44 | |||
45 | def crontab(value): |
|
45 | def crontab(value): | |
46 | return celery.schedules.crontab(**value) |
|
46 | return celery.schedules.crontab(**value) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | def timedelta(value): |
|
49 | def timedelta(value): | |
50 | return datetime.timedelta(**value) |
|
50 | return datetime.timedelta(**value) | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | def safe_json(get, section, key): |
|
53 | def safe_json(get, section, key): | |
54 | value = '' |
|
54 | value = '' | |
55 | try: |
|
55 | try: | |
56 | value = get(key) |
|
56 | value = get(key) | |
57 | json_value = json.loads(value) |
|
57 | json_value = json.loads(value) | |
58 | except ValueError: |
|
58 | except ValueError: | |
59 | msg = 'The %s=%s is not valid json in section %s' % ( |
|
59 | msg = 'The %s=%s is not valid json in section %s' % ( | |
60 | key, value, section |
|
60 | key, value, section | |
61 | ) |
|
61 | ) | |
62 | raise ValueError(msg) |
|
62 | raise ValueError(msg) | |
63 |
|
63 | |||
64 | return json_value |
|
64 | return json_value | |
65 |
|
65 | |||
66 |
|
66 | |||
67 | def raw_2_schedule(schedule_value, schedule_type): |
|
67 | def raw_2_schedule(schedule_value, schedule_type): | |
68 | schedule_type_map = { |
|
68 | schedule_type_map = { | |
69 | 'crontab': crontab, |
|
69 | 'crontab': crontab, | |
70 | 'timedelta': timedelta, |
|
70 | 'timedelta': timedelta, | |
71 | 'integer': int |
|
71 | 'integer': int | |
72 | } |
|
72 | } | |
73 | scheduler_cls = schedule_type_map.get(schedule_type) |
|
73 | scheduler_cls = schedule_type_map.get(schedule_type) | |
74 |
|
74 | |||
75 | if scheduler_cls is None: |
|
75 | if scheduler_cls is None: | |
76 | raise ValueError( |
|
76 | raise ValueError( | |
77 | 'schedule type %s in section is invalid' % ( |
|
77 | 'schedule type %s in section is invalid' % ( | |
78 | schedule_type, |
|
78 | schedule_type, | |
79 | ) |
|
79 | ) | |
80 | ) |
|
80 | ) | |
81 | try: |
|
81 | try: | |
82 | schedule = scheduler_cls(schedule_value) |
|
82 | schedule = scheduler_cls(schedule_value) | |
83 | except TypeError: |
|
83 | except TypeError: | |
84 | log.exception('Failed to compose a schedule from value: %r', schedule_value) |
|
84 | log.exception('Failed to compose a schedule from value: %r', schedule_value) | |
85 | schedule = None |
|
85 | schedule = None | |
86 | return schedule |
|
86 | return schedule | |
87 |
|
87 | |||
88 |
|
88 | |||
89 | def get_beat_config(parser, section): |
|
89 | def get_beat_config(parser, section): | |
90 |
|
90 | |||
91 | get = partial(parser.get, section) |
|
91 | get = partial(parser.get, section) | |
92 | has_option = partial(parser.has_option, section) |
|
92 | has_option = partial(parser.has_option, section) | |
93 |
|
93 | |||
94 | schedule_type = get('type') |
|
94 | schedule_type = get('type') | |
95 | schedule_value = safe_json(get, section, 'schedule') |
|
95 | schedule_value = safe_json(get, section, 'schedule') | |
96 |
|
96 | |||
97 | config = { |
|
97 | config = { | |
98 | 'schedule_type': schedule_type, |
|
98 | 'schedule_type': schedule_type, | |
99 | 'schedule_value': schedule_value, |
|
99 | 'schedule_value': schedule_value, | |
100 | 'task': get('task'), |
|
100 | 'task': get('task'), | |
101 | } |
|
101 | } | |
102 | schedule = raw_2_schedule(schedule_value, schedule_type) |
|
102 | schedule = raw_2_schedule(schedule_value, schedule_type) | |
103 | if schedule: |
|
103 | if schedule: | |
104 | config['schedule'] = schedule |
|
104 | config['schedule'] = schedule | |
105 |
|
105 | |||
106 | if has_option('args'): |
|
106 | if has_option('args'): | |
107 | config['args'] = safe_json(get, section, 'args') |
|
107 | config['args'] = safe_json(get, section, 'args') | |
108 |
|
108 | |||
109 | if has_option('kwargs'): |
|
109 | if has_option('kwargs'): | |
110 | config['kwargs'] = safe_json(get, section, 'kwargs') |
|
110 | config['kwargs'] = safe_json(get, section, 'kwargs') | |
111 |
|
111 | |||
112 | if has_option('force_update'): |
|
112 | if has_option('force_update'): | |
113 | config['force_update'] = get('force_update') |
|
113 | config['force_update'] = get('force_update') | |
114 |
|
114 | |||
115 | return config |
|
115 | return config | |
116 |
|
116 | |||
117 |
|
117 | |||
118 | def parse_ini_vars(ini_vars): |
|
118 | def parse_ini_vars(ini_vars): | |
119 | options = {} |
|
119 | options = {} | |
120 | for pairs in ini_vars.split(','): |
|
120 | for pairs in ini_vars.split(','): | |
121 | key, value = pairs.split('=') |
|
121 | key, value = pairs.split('=') | |
122 | options[key] = value |
|
122 | options[key] = value | |
123 | return options |
|
123 | return options | |
124 |
|
124 | |||
125 |
|
125 | |||
126 | def ping_db(): |
|
126 | def ping_db(): | |
127 | from rhodecode.model import meta |
|
127 | from rhodecode.model import meta | |
128 | from rhodecode.model.db import DbMigrateVersion |
|
128 | from rhodecode.model.db import DbMigrateVersion | |
129 | log.info('Testing DB connection...') |
|
129 | log.info('Testing DB connection...') | |
130 |
|
130 | |||
131 | for test in range(10): |
|
131 | for test in range(10): | |
132 | try: |
|
132 | try: | |
133 | scalar = DbMigrateVersion.query().scalar() |
|
133 | scalar = DbMigrateVersion.query().scalar() | |
134 | log.debug('DB PING %s@%s', scalar, scalar.version) |
|
134 | log.debug('DB PING %s@%s', scalar, scalar.version) | |
135 | break |
|
135 | break | |
136 | except Exception: |
|
136 | except Exception: | |
137 | retry = 1 |
|
137 | retry = 1 | |
138 | log.debug('DB not ready, next try in %ss', retry) |
|
138 | log.debug('DB not ready, next try in %ss', retry) | |
139 | time.sleep(retry) |
|
139 | time.sleep(retry) | |
140 | finally: |
|
140 | finally: | |
141 | meta.Session.remove() |
|
141 | meta.Session.remove() |
@@ -1,27 +1,27 b'' | |||||
1 | """ |
|
1 | """ | |
2 | Configuration parser module. |
|
2 | Configuration parser module. | |
3 | """ |
|
3 | """ | |
4 |
|
4 | |||
5 |
|
|
5 | import configparser | |
6 |
|
6 | |||
7 | from rhodecode.lib.dbmigrate.migrate.versioning.config import * |
|
7 | from rhodecode.lib.dbmigrate.migrate.versioning.config import * | |
8 | from rhodecode.lib.dbmigrate.migrate.versioning import pathed |
|
8 | from rhodecode.lib.dbmigrate.migrate.versioning import pathed | |
9 |
|
9 | |||
10 |
|
10 | |||
11 | class Parser(ConfigParser): |
|
11 | class Parser(ConfigParser): | |
12 | """A project configuration file.""" |
|
12 | """A project configuration file.""" | |
13 |
|
13 | |||
14 | def to_dict(self, sections=None): |
|
14 | def to_dict(self, sections=None): | |
15 | """It's easier to access config values like dictionaries""" |
|
15 | """It's easier to access config values like dictionaries""" | |
16 | return self._sections |
|
16 | return self._sections | |
17 |
|
17 | |||
18 |
|
18 | |||
19 | class Config(pathed.Pathed, Parser): |
|
19 | class Config(pathed.Pathed, Parser): | |
20 | """Configuration class.""" |
|
20 | """Configuration class.""" | |
21 |
|
21 | |||
22 | def __init__(self, path, *p, **k): |
|
22 | def __init__(self, path, *p, **k): | |
23 | """Confirm the config file exists; read it.""" |
|
23 | """Confirm the config file exists; read it.""" | |
24 | self.require_found(path) |
|
24 | self.require_found(path) | |
25 | pathed.Pathed.__init__(self, path) |
|
25 | pathed.Pathed.__init__(self, path) | |
26 | Parser.__init__(self, *p, **k) |
|
26 | Parser.__init__(self, *p, **k) | |
27 | self.read(path) |
|
27 | self.read(path) |
@@ -1,58 +1,58 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2016-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2016-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 |
|
|
22 | import configparser | |
23 | from pyramid.paster import bootstrap as pyramid_bootstrap, setup_logging # pragma: no cover |
|
23 | from pyramid.paster import bootstrap as pyramid_bootstrap, setup_logging # pragma: no cover | |
24 |
|
24 | |||
25 | from rhodecode.lib.request import Request |
|
25 | from rhodecode.lib.request import Request | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | def get_config(ini_path, **kwargs): |
|
28 | def get_config(ini_path, **kwargs): | |
29 | parser = configparser.ConfigParser(**kwargs) |
|
29 | parser = configparser.ConfigParser(**kwargs) | |
30 | parser.read(ini_path) |
|
30 | parser.read(ini_path) | |
31 | return parser |
|
31 | return parser | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | def get_app_config(ini_path): |
|
34 | def get_app_config(ini_path): | |
35 | from paste.deploy.loadwsgi import appconfig |
|
35 | from paste.deploy.loadwsgi import appconfig | |
36 | return appconfig('config:{}'.format(ini_path), relative_to=os.getcwd()) |
|
36 | return appconfig('config:{}'.format(ini_path), relative_to=os.getcwd()) | |
37 |
|
37 | |||
38 |
|
38 | |||
39 | def bootstrap(config_uri, options=None, env=None): |
|
39 | def bootstrap(config_uri, options=None, env=None): | |
40 | from rhodecode.lib.utils2 import AttributeDict |
|
40 | from rhodecode.lib.utils2 import AttributeDict | |
41 |
|
41 | |||
42 | if env: |
|
42 | if env: | |
43 | os.environ.update(env) |
|
43 | os.environ.update(env) | |
44 |
|
44 | |||
45 | config = get_config(config_uri) |
|
45 | config = get_config(config_uri) | |
46 | base_url = 'http://rhodecode.local' |
|
46 | base_url = 'http://rhodecode.local' | |
47 | try: |
|
47 | try: | |
48 | base_url = config.get('app:main', 'app.base_url') |
|
48 | base_url = config.get('app:main', 'app.base_url') | |
49 | except (configparser.NoSectionError, configparser.NoOptionError): |
|
49 | except (configparser.NoSectionError, configparser.NoOptionError): | |
50 | pass |
|
50 | pass | |
51 |
|
51 | |||
52 | request = Request.blank('/', base_url=base_url) |
|
52 | request = Request.blank('/', base_url=base_url) | |
53 | # fake inject a running user for bootstrap request ! |
|
53 | # fake inject a running user for bootstrap request ! | |
54 | request.user = AttributeDict({'username': 'bootstrap-user', |
|
54 | request.user = AttributeDict({'username': 'bootstrap-user', | |
55 | 'user_id': 1, |
|
55 | 'user_id': 1, | |
56 | 'ip_addr': '127.0.0.1'}) |
|
56 | 'ip_addr': '127.0.0.1'}) | |
57 | return pyramid_bootstrap(config_uri, request=request, options=options) |
|
57 | return pyramid_bootstrap(config_uri, request=request, options=options) | |
58 |
|
58 |
@@ -1,853 +1,853 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2017-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2017-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import os |
|
22 | import os | |
23 | import sys |
|
23 | import sys | |
24 | import time |
|
24 | import time | |
25 | import platform |
|
25 | import platform | |
26 | import collections |
|
26 | import collections | |
27 | from functools import wraps |
|
27 | from functools import wraps | |
28 |
|
28 | |||
29 | import pkg_resources |
|
29 | import pkg_resources | |
30 | import logging |
|
30 | import logging | |
31 | import resource |
|
31 | import resource | |
32 |
|
32 | |||
33 |
|
|
33 | import configparser | |
34 |
|
34 | |||
35 | log = logging.getLogger(__name__) |
|
35 | log = logging.getLogger(__name__) | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | psutil = None |
|
38 | psutil = None | |
39 |
|
39 | |||
40 | try: |
|
40 | try: | |
41 | # cygwin cannot have yet psutil support. |
|
41 | # cygwin cannot have yet psutil support. | |
42 | import psutil as psutil |
|
42 | import psutil as psutil | |
43 | except ImportError: |
|
43 | except ImportError: | |
44 | pass |
|
44 | pass | |
45 |
|
45 | |||
46 |
|
46 | |||
47 | _NA = 'NOT AVAILABLE' |
|
47 | _NA = 'NOT AVAILABLE' | |
48 |
|
48 | |||
49 | STATE_OK = 'ok' |
|
49 | STATE_OK = 'ok' | |
50 | STATE_ERR = 'error' |
|
50 | STATE_ERR = 'error' | |
51 | STATE_WARN = 'warning' |
|
51 | STATE_WARN = 'warning' | |
52 |
|
52 | |||
53 | STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK} |
|
53 | STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK} | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | registered_helpers = {} |
|
56 | registered_helpers = {} | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def register_sysinfo(func): |
|
59 | def register_sysinfo(func): | |
60 | """ |
|
60 | """ | |
61 | @register_helper |
|
61 | @register_helper | |
62 | def db_check(): |
|
62 | def db_check(): | |
63 | pass |
|
63 | pass | |
64 |
|
64 | |||
65 | db_check == registered_helpers['db_check'] |
|
65 | db_check == registered_helpers['db_check'] | |
66 | """ |
|
66 | """ | |
67 | global registered_helpers |
|
67 | global registered_helpers | |
68 | registered_helpers[func.__name__] = func |
|
68 | registered_helpers[func.__name__] = func | |
69 |
|
69 | |||
70 | @wraps(func) |
|
70 | @wraps(func) | |
71 | def _wrapper(*args, **kwargs): |
|
71 | def _wrapper(*args, **kwargs): | |
72 | return func(*args, **kwargs) |
|
72 | return func(*args, **kwargs) | |
73 | return _wrapper |
|
73 | return _wrapper | |
74 |
|
74 | |||
75 |
|
75 | |||
76 | # HELPERS |
|
76 | # HELPERS | |
77 | def percentage(part, whole): |
|
77 | def percentage(part, whole): | |
78 | whole = float(whole) |
|
78 | whole = float(whole) | |
79 | if whole > 0: |
|
79 | if whole > 0: | |
80 | return round(100 * float(part) / whole, 1) |
|
80 | return round(100 * float(part) / whole, 1) | |
81 | return 0.0 |
|
81 | return 0.0 | |
82 |
|
82 | |||
83 |
|
83 | |||
84 | def get_storage_size(storage_path): |
|
84 | def get_storage_size(storage_path): | |
85 | sizes = [] |
|
85 | sizes = [] | |
86 | for file_ in os.listdir(storage_path): |
|
86 | for file_ in os.listdir(storage_path): | |
87 | storage_file = os.path.join(storage_path, file_) |
|
87 | storage_file = os.path.join(storage_path, file_) | |
88 | if os.path.isfile(storage_file): |
|
88 | if os.path.isfile(storage_file): | |
89 | try: |
|
89 | try: | |
90 | sizes.append(os.path.getsize(storage_file)) |
|
90 | sizes.append(os.path.getsize(storage_file)) | |
91 | except OSError: |
|
91 | except OSError: | |
92 | log.exception('Failed to get size of storage file %s', storage_file) |
|
92 | log.exception('Failed to get size of storage file %s', storage_file) | |
93 | pass |
|
93 | pass | |
94 |
|
94 | |||
95 | return sum(sizes) |
|
95 | return sum(sizes) | |
96 |
|
96 | |||
97 |
|
97 | |||
98 | def get_resource(resource_type): |
|
98 | def get_resource(resource_type): | |
99 | try: |
|
99 | try: | |
100 | return resource.getrlimit(resource_type) |
|
100 | return resource.getrlimit(resource_type) | |
101 | except Exception: |
|
101 | except Exception: | |
102 | return 'NOT_SUPPORTED' |
|
102 | return 'NOT_SUPPORTED' | |
103 |
|
103 | |||
104 |
|
104 | |||
105 | def get_cert_path(ini_path): |
|
105 | def get_cert_path(ini_path): | |
106 | default = '/etc/ssl/certs/ca-certificates.crt' |
|
106 | default = '/etc/ssl/certs/ca-certificates.crt' | |
107 | control_ca_bundle = os.path.join( |
|
107 | control_ca_bundle = os.path.join( | |
108 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))), |
|
108 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))), | |
109 | '.rccontrol-profile/etc/ca-bundle.crt') |
|
109 | '.rccontrol-profile/etc/ca-bundle.crt') | |
110 | if os.path.isfile(control_ca_bundle): |
|
110 | if os.path.isfile(control_ca_bundle): | |
111 | default = control_ca_bundle |
|
111 | default = control_ca_bundle | |
112 |
|
112 | |||
113 | return default |
|
113 | return default | |
114 |
|
114 | |||
115 |
|
115 | |||
116 | class SysInfoRes(object): |
|
116 | class SysInfoRes(object): | |
117 | def __init__(self, value, state=None, human_value=None): |
|
117 | def __init__(self, value, state=None, human_value=None): | |
118 | self.value = value |
|
118 | self.value = value | |
119 | self.state = state or STATE_OK_DEFAULT |
|
119 | self.state = state or STATE_OK_DEFAULT | |
120 | self.human_value = human_value or value |
|
120 | self.human_value = human_value or value | |
121 |
|
121 | |||
122 | def __json__(self): |
|
122 | def __json__(self): | |
123 | return { |
|
123 | return { | |
124 | 'value': self.value, |
|
124 | 'value': self.value, | |
125 | 'state': self.state, |
|
125 | 'state': self.state, | |
126 | 'human_value': self.human_value, |
|
126 | 'human_value': self.human_value, | |
127 | } |
|
127 | } | |
128 |
|
128 | |||
129 | def get_value(self): |
|
129 | def get_value(self): | |
130 | return self.__json__() |
|
130 | return self.__json__() | |
131 |
|
131 | |||
132 | def __str__(self): |
|
132 | def __str__(self): | |
133 | return '<SysInfoRes({})>'.format(self.__json__()) |
|
133 | return '<SysInfoRes({})>'.format(self.__json__()) | |
134 |
|
134 | |||
135 |
|
135 | |||
136 | class SysInfo(object): |
|
136 | class SysInfo(object): | |
137 |
|
137 | |||
138 | def __init__(self, func_name, **kwargs): |
|
138 | def __init__(self, func_name, **kwargs): | |
139 | self.func_name = func_name |
|
139 | self.func_name = func_name | |
140 | self.value = _NA |
|
140 | self.value = _NA | |
141 | self.state = None |
|
141 | self.state = None | |
142 | self.kwargs = kwargs or {} |
|
142 | self.kwargs = kwargs or {} | |
143 |
|
143 | |||
144 | def __call__(self): |
|
144 | def __call__(self): | |
145 | computed = self.compute(**self.kwargs) |
|
145 | computed = self.compute(**self.kwargs) | |
146 | if not isinstance(computed, SysInfoRes): |
|
146 | if not isinstance(computed, SysInfoRes): | |
147 | raise ValueError( |
|
147 | raise ValueError( | |
148 | 'computed value for {} is not instance of ' |
|
148 | 'computed value for {} is not instance of ' | |
149 | '{}, got {} instead'.format( |
|
149 | '{}, got {} instead'.format( | |
150 | self.func_name, SysInfoRes, type(computed))) |
|
150 | self.func_name, SysInfoRes, type(computed))) | |
151 | return computed.__json__() |
|
151 | return computed.__json__() | |
152 |
|
152 | |||
153 | def __str__(self): |
|
153 | def __str__(self): | |
154 | return '<SysInfo({})>'.format(self.func_name) |
|
154 | return '<SysInfo({})>'.format(self.func_name) | |
155 |
|
155 | |||
156 | def compute(self, **kwargs): |
|
156 | def compute(self, **kwargs): | |
157 | return self.func_name(**kwargs) |
|
157 | return self.func_name(**kwargs) | |
158 |
|
158 | |||
159 |
|
159 | |||
160 | # SysInfo functions |
|
160 | # SysInfo functions | |
161 | @register_sysinfo |
|
161 | @register_sysinfo | |
162 | def python_info(): |
|
162 | def python_info(): | |
163 | value = dict(version=' '.join(platform._sys_version()), |
|
163 | value = dict(version=' '.join(platform._sys_version()), | |
164 | executable=sys.executable) |
|
164 | executable=sys.executable) | |
165 | return SysInfoRes(value=value) |
|
165 | return SysInfoRes(value=value) | |
166 |
|
166 | |||
167 |
|
167 | |||
168 | @register_sysinfo |
|
168 | @register_sysinfo | |
169 | def py_modules(): |
|
169 | def py_modules(): | |
170 | mods = dict([(p.project_name, {'version': p.version, 'location': p.location}) |
|
170 | mods = dict([(p.project_name, {'version': p.version, 'location': p.location}) | |
171 | for p in pkg_resources.working_set]) |
|
171 | for p in pkg_resources.working_set]) | |
172 |
|
172 | |||
173 | value = sorted(mods.items(), key=lambda k: k[0].lower()) |
|
173 | value = sorted(mods.items(), key=lambda k: k[0].lower()) | |
174 | return SysInfoRes(value=value) |
|
174 | return SysInfoRes(value=value) | |
175 |
|
175 | |||
176 |
|
176 | |||
177 | @register_sysinfo |
|
177 | @register_sysinfo | |
178 | def platform_type(): |
|
178 | def platform_type(): | |
179 | from rhodecode.lib.utils import safe_unicode, generate_platform_uuid |
|
179 | from rhodecode.lib.utils import safe_unicode, generate_platform_uuid | |
180 |
|
180 | |||
181 | value = dict( |
|
181 | value = dict( | |
182 | name=safe_unicode(platform.platform()), |
|
182 | name=safe_unicode(platform.platform()), | |
183 | uuid=generate_platform_uuid() |
|
183 | uuid=generate_platform_uuid() | |
184 | ) |
|
184 | ) | |
185 | return SysInfoRes(value=value) |
|
185 | return SysInfoRes(value=value) | |
186 |
|
186 | |||
187 |
|
187 | |||
188 | @register_sysinfo |
|
188 | @register_sysinfo | |
189 | def locale_info(): |
|
189 | def locale_info(): | |
190 | import locale |
|
190 | import locale | |
191 |
|
191 | |||
192 | value = dict( |
|
192 | value = dict( | |
193 | locale_default=locale.getdefaultlocale(), |
|
193 | locale_default=locale.getdefaultlocale(), | |
194 | locale_lc_all=locale.getlocale(locale.LC_ALL), |
|
194 | locale_lc_all=locale.getlocale(locale.LC_ALL), | |
195 | lang_env=os.environ.get('LANG'), |
|
195 | lang_env=os.environ.get('LANG'), | |
196 | lc_all_env=os.environ.get('LC_ALL'), |
|
196 | lc_all_env=os.environ.get('LC_ALL'), | |
197 | local_archive_env=os.environ.get('LOCALE_ARCHIVE'), |
|
197 | local_archive_env=os.environ.get('LOCALE_ARCHIVE'), | |
198 | ) |
|
198 | ) | |
199 | human_value = 'LANG: {}, locale LC_ALL: {}, Default locales: {}'.format( |
|
199 | human_value = 'LANG: {}, locale LC_ALL: {}, Default locales: {}'.format( | |
200 | value['lang_env'], value['locale_lc_all'], value['locale_default']) |
|
200 | value['lang_env'], value['locale_lc_all'], value['locale_default']) | |
201 | return SysInfoRes(value=value, human_value=human_value) |
|
201 | return SysInfoRes(value=value, human_value=human_value) | |
202 |
|
202 | |||
203 |
|
203 | |||
204 | @register_sysinfo |
|
204 | @register_sysinfo | |
205 | def ulimit_info(): |
|
205 | def ulimit_info(): | |
206 | data = collections.OrderedDict([ |
|
206 | data = collections.OrderedDict([ | |
207 | ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)), |
|
207 | ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)), | |
208 | ('file size', get_resource(resource.RLIMIT_FSIZE)), |
|
208 | ('file size', get_resource(resource.RLIMIT_FSIZE)), | |
209 | ('stack size', get_resource(resource.RLIMIT_STACK)), |
|
209 | ('stack size', get_resource(resource.RLIMIT_STACK)), | |
210 | ('core file size', get_resource(resource.RLIMIT_CORE)), |
|
210 | ('core file size', get_resource(resource.RLIMIT_CORE)), | |
211 | ('address space size', get_resource(resource.RLIMIT_AS)), |
|
211 | ('address space size', get_resource(resource.RLIMIT_AS)), | |
212 | ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)), |
|
212 | ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)), | |
213 | ('heap size', get_resource(resource.RLIMIT_DATA)), |
|
213 | ('heap size', get_resource(resource.RLIMIT_DATA)), | |
214 | ('rss size', get_resource(resource.RLIMIT_RSS)), |
|
214 | ('rss size', get_resource(resource.RLIMIT_RSS)), | |
215 | ('number of processes', get_resource(resource.RLIMIT_NPROC)), |
|
215 | ('number of processes', get_resource(resource.RLIMIT_NPROC)), | |
216 | ('open files', get_resource(resource.RLIMIT_NOFILE)), |
|
216 | ('open files', get_resource(resource.RLIMIT_NOFILE)), | |
217 | ]) |
|
217 | ]) | |
218 |
|
218 | |||
219 | text = ', '.join('{}:{}'.format(k, v) for k, v in data.items()) |
|
219 | text = ', '.join('{}:{}'.format(k, v) for k, v in data.items()) | |
220 |
|
220 | |||
221 | value = { |
|
221 | value = { | |
222 | 'limits': data, |
|
222 | 'limits': data, | |
223 | 'text': text, |
|
223 | 'text': text, | |
224 | } |
|
224 | } | |
225 | return SysInfoRes(value=value) |
|
225 | return SysInfoRes(value=value) | |
226 |
|
226 | |||
227 |
|
227 | |||
228 | @register_sysinfo |
|
228 | @register_sysinfo | |
229 | def uptime(): |
|
229 | def uptime(): | |
230 | from rhodecode.lib.helpers import age, time_to_datetime |
|
230 | from rhodecode.lib.helpers import age, time_to_datetime | |
231 | from rhodecode.translation import TranslationString |
|
231 | from rhodecode.translation import TranslationString | |
232 |
|
232 | |||
233 | value = dict(boot_time=0, uptime=0, text='') |
|
233 | value = dict(boot_time=0, uptime=0, text='') | |
234 | state = STATE_OK_DEFAULT |
|
234 | state = STATE_OK_DEFAULT | |
235 | if not psutil: |
|
235 | if not psutil: | |
236 | return SysInfoRes(value=value, state=state) |
|
236 | return SysInfoRes(value=value, state=state) | |
237 |
|
237 | |||
238 | boot_time = psutil.boot_time() |
|
238 | boot_time = psutil.boot_time() | |
239 | value['boot_time'] = boot_time |
|
239 | value['boot_time'] = boot_time | |
240 | value['uptime'] = time.time() - boot_time |
|
240 | value['uptime'] = time.time() - boot_time | |
241 |
|
241 | |||
242 | date_or_age = age(time_to_datetime(boot_time)) |
|
242 | date_or_age = age(time_to_datetime(boot_time)) | |
243 | if isinstance(date_or_age, TranslationString): |
|
243 | if isinstance(date_or_age, TranslationString): | |
244 | date_or_age = date_or_age.interpolate() |
|
244 | date_or_age = date_or_age.interpolate() | |
245 |
|
245 | |||
246 | human_value = value.copy() |
|
246 | human_value = value.copy() | |
247 | human_value['boot_time'] = time_to_datetime(boot_time) |
|
247 | human_value['boot_time'] = time_to_datetime(boot_time) | |
248 | human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False) |
|
248 | human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False) | |
249 |
|
249 | |||
250 | human_value['text'] = u'Server started {}'.format(date_or_age) |
|
250 | human_value['text'] = u'Server started {}'.format(date_or_age) | |
251 | return SysInfoRes(value=value, human_value=human_value) |
|
251 | return SysInfoRes(value=value, human_value=human_value) | |
252 |
|
252 | |||
253 |
|
253 | |||
254 | @register_sysinfo |
|
254 | @register_sysinfo | |
255 | def memory(): |
|
255 | def memory(): | |
256 | from rhodecode.lib.helpers import format_byte_size_binary |
|
256 | from rhodecode.lib.helpers import format_byte_size_binary | |
257 | value = dict(available=0, used=0, used_real=0, cached=0, percent=0, |
|
257 | value = dict(available=0, used=0, used_real=0, cached=0, percent=0, | |
258 | percent_used=0, free=0, inactive=0, active=0, shared=0, |
|
258 | percent_used=0, free=0, inactive=0, active=0, shared=0, | |
259 | total=0, buffers=0, text='') |
|
259 | total=0, buffers=0, text='') | |
260 |
|
260 | |||
261 | state = STATE_OK_DEFAULT |
|
261 | state = STATE_OK_DEFAULT | |
262 | if not psutil: |
|
262 | if not psutil: | |
263 | return SysInfoRes(value=value, state=state) |
|
263 | return SysInfoRes(value=value, state=state) | |
264 |
|
264 | |||
265 | value.update(dict(psutil.virtual_memory()._asdict())) |
|
265 | value.update(dict(psutil.virtual_memory()._asdict())) | |
266 | value['used_real'] = value['total'] - value['available'] |
|
266 | value['used_real'] = value['total'] - value['available'] | |
267 | value['percent_used'] = psutil._common.usage_percent( |
|
267 | value['percent_used'] = psutil._common.usage_percent( | |
268 | value['used_real'], value['total'], 1) |
|
268 | value['used_real'], value['total'], 1) | |
269 |
|
269 | |||
270 | human_value = value.copy() |
|
270 | human_value = value.copy() | |
271 | human_value['text'] = '%s/%s, %s%% used' % ( |
|
271 | human_value['text'] = '%s/%s, %s%% used' % ( | |
272 | format_byte_size_binary(value['used_real']), |
|
272 | format_byte_size_binary(value['used_real']), | |
273 | format_byte_size_binary(value['total']), |
|
273 | format_byte_size_binary(value['total']), | |
274 | value['percent_used'],) |
|
274 | value['percent_used'],) | |
275 |
|
275 | |||
276 | keys = value.keys()[::] |
|
276 | keys = value.keys()[::] | |
277 | keys.pop(keys.index('percent')) |
|
277 | keys.pop(keys.index('percent')) | |
278 | keys.pop(keys.index('percent_used')) |
|
278 | keys.pop(keys.index('percent_used')) | |
279 | keys.pop(keys.index('text')) |
|
279 | keys.pop(keys.index('text')) | |
280 | for k in keys: |
|
280 | for k in keys: | |
281 | human_value[k] = format_byte_size_binary(value[k]) |
|
281 | human_value[k] = format_byte_size_binary(value[k]) | |
282 |
|
282 | |||
283 | if state['type'] == STATE_OK and value['percent_used'] > 90: |
|
283 | if state['type'] == STATE_OK and value['percent_used'] > 90: | |
284 | msg = 'Critical: your available RAM memory is very low.' |
|
284 | msg = 'Critical: your available RAM memory is very low.' | |
285 | state = {'message': msg, 'type': STATE_ERR} |
|
285 | state = {'message': msg, 'type': STATE_ERR} | |
286 |
|
286 | |||
287 | elif state['type'] == STATE_OK and value['percent_used'] > 70: |
|
287 | elif state['type'] == STATE_OK and value['percent_used'] > 70: | |
288 | msg = 'Warning: your available RAM memory is running low.' |
|
288 | msg = 'Warning: your available RAM memory is running low.' | |
289 | state = {'message': msg, 'type': STATE_WARN} |
|
289 | state = {'message': msg, 'type': STATE_WARN} | |
290 |
|
290 | |||
291 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
291 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
292 |
|
292 | |||
293 |
|
293 | |||
294 | @register_sysinfo |
|
294 | @register_sysinfo | |
295 | def machine_load(): |
|
295 | def machine_load(): | |
296 | value = {'1_min': _NA, '5_min': _NA, '15_min': _NA, 'text': ''} |
|
296 | value = {'1_min': _NA, '5_min': _NA, '15_min': _NA, 'text': ''} | |
297 | state = STATE_OK_DEFAULT |
|
297 | state = STATE_OK_DEFAULT | |
298 | if not psutil: |
|
298 | if not psutil: | |
299 | return SysInfoRes(value=value, state=state) |
|
299 | return SysInfoRes(value=value, state=state) | |
300 |
|
300 | |||
301 | # load averages |
|
301 | # load averages | |
302 | if hasattr(psutil.os, 'getloadavg'): |
|
302 | if hasattr(psutil.os, 'getloadavg'): | |
303 | value.update(dict( |
|
303 | value.update(dict( | |
304 | zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))) |
|
304 | zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))) | |
305 |
|
305 | |||
306 | human_value = value.copy() |
|
306 | human_value = value.copy() | |
307 | human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format( |
|
307 | human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format( | |
308 | value['1_min'], value['5_min'], value['15_min']) |
|
308 | value['1_min'], value['5_min'], value['15_min']) | |
309 |
|
309 | |||
310 | if state['type'] == STATE_OK and value['15_min'] > 5: |
|
310 | if state['type'] == STATE_OK and value['15_min'] > 5: | |
311 | msg = 'Warning: your machine load is very high.' |
|
311 | msg = 'Warning: your machine load is very high.' | |
312 | state = {'message': msg, 'type': STATE_WARN} |
|
312 | state = {'message': msg, 'type': STATE_WARN} | |
313 |
|
313 | |||
314 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
314 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
315 |
|
315 | |||
316 |
|
316 | |||
317 | @register_sysinfo |
|
317 | @register_sysinfo | |
318 | def cpu(): |
|
318 | def cpu(): | |
319 | value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []} |
|
319 | value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []} | |
320 | state = STATE_OK_DEFAULT |
|
320 | state = STATE_OK_DEFAULT | |
321 |
|
321 | |||
322 | if not psutil: |
|
322 | if not psutil: | |
323 | return SysInfoRes(value=value, state=state) |
|
323 | return SysInfoRes(value=value, state=state) | |
324 |
|
324 | |||
325 | value['cpu'] = psutil.cpu_percent(0.5) |
|
325 | value['cpu'] = psutil.cpu_percent(0.5) | |
326 | value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True) |
|
326 | value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True) | |
327 | value['cpu_count'] = psutil.cpu_count() |
|
327 | value['cpu_count'] = psutil.cpu_count() | |
328 |
|
328 | |||
329 | human_value = value.copy() |
|
329 | human_value = value.copy() | |
330 | human_value['text'] = '{} cores at {} %'.format( |
|
330 | human_value['text'] = '{} cores at {} %'.format( | |
331 | value['cpu_count'], value['cpu']) |
|
331 | value['cpu_count'], value['cpu']) | |
332 |
|
332 | |||
333 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
333 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
334 |
|
334 | |||
335 |
|
335 | |||
336 | @register_sysinfo |
|
336 | @register_sysinfo | |
337 | def storage(): |
|
337 | def storage(): | |
338 | from rhodecode.lib.helpers import format_byte_size_binary |
|
338 | from rhodecode.lib.helpers import format_byte_size_binary | |
339 | from rhodecode.model.settings import VcsSettingsModel |
|
339 | from rhodecode.model.settings import VcsSettingsModel | |
340 | path = VcsSettingsModel().get_repos_location() |
|
340 | path = VcsSettingsModel().get_repos_location() | |
341 |
|
341 | |||
342 | value = dict(percent=0, used=0, total=0, path=path, text='') |
|
342 | value = dict(percent=0, used=0, total=0, path=path, text='') | |
343 | state = STATE_OK_DEFAULT |
|
343 | state = STATE_OK_DEFAULT | |
344 | if not psutil: |
|
344 | if not psutil: | |
345 | return SysInfoRes(value=value, state=state) |
|
345 | return SysInfoRes(value=value, state=state) | |
346 |
|
346 | |||
347 | try: |
|
347 | try: | |
348 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
348 | value.update(dict(psutil.disk_usage(path)._asdict())) | |
349 | except Exception as e: |
|
349 | except Exception as e: | |
350 | log.exception('Failed to fetch disk info') |
|
350 | log.exception('Failed to fetch disk info') | |
351 | state = {'message': str(e), 'type': STATE_ERR} |
|
351 | state = {'message': str(e), 'type': STATE_ERR} | |
352 |
|
352 | |||
353 | human_value = value.copy() |
|
353 | human_value = value.copy() | |
354 | human_value['used'] = format_byte_size_binary(value['used']) |
|
354 | human_value['used'] = format_byte_size_binary(value['used']) | |
355 | human_value['total'] = format_byte_size_binary(value['total']) |
|
355 | human_value['total'] = format_byte_size_binary(value['total']) | |
356 | human_value['text'] = "{}/{}, {}% used".format( |
|
356 | human_value['text'] = "{}/{}, {}% used".format( | |
357 | format_byte_size_binary(value['used']), |
|
357 | format_byte_size_binary(value['used']), | |
358 | format_byte_size_binary(value['total']), |
|
358 | format_byte_size_binary(value['total']), | |
359 | value['percent']) |
|
359 | value['percent']) | |
360 |
|
360 | |||
361 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
361 | if state['type'] == STATE_OK and value['percent'] > 90: | |
362 | msg = 'Critical: your disk space is very low.' |
|
362 | msg = 'Critical: your disk space is very low.' | |
363 | state = {'message': msg, 'type': STATE_ERR} |
|
363 | state = {'message': msg, 'type': STATE_ERR} | |
364 |
|
364 | |||
365 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
365 | elif state['type'] == STATE_OK and value['percent'] > 70: | |
366 | msg = 'Warning: your disk space is running low.' |
|
366 | msg = 'Warning: your disk space is running low.' | |
367 | state = {'message': msg, 'type': STATE_WARN} |
|
367 | state = {'message': msg, 'type': STATE_WARN} | |
368 |
|
368 | |||
369 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
369 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
370 |
|
370 | |||
371 |
|
371 | |||
372 | @register_sysinfo |
|
372 | @register_sysinfo | |
373 | def storage_inodes(): |
|
373 | def storage_inodes(): | |
374 | from rhodecode.model.settings import VcsSettingsModel |
|
374 | from rhodecode.model.settings import VcsSettingsModel | |
375 | path = VcsSettingsModel().get_repos_location() |
|
375 | path = VcsSettingsModel().get_repos_location() | |
376 |
|
376 | |||
377 | value = dict(percent=0, free=0, used=0, total=0, path=path, text='') |
|
377 | value = dict(percent=0, free=0, used=0, total=0, path=path, text='') | |
378 | state = STATE_OK_DEFAULT |
|
378 | state = STATE_OK_DEFAULT | |
379 | if not psutil: |
|
379 | if not psutil: | |
380 | return SysInfoRes(value=value, state=state) |
|
380 | return SysInfoRes(value=value, state=state) | |
381 |
|
381 | |||
382 | try: |
|
382 | try: | |
383 | i_stat = os.statvfs(path) |
|
383 | i_stat = os.statvfs(path) | |
384 | value['free'] = i_stat.f_ffree |
|
384 | value['free'] = i_stat.f_ffree | |
385 | value['used'] = i_stat.f_files-i_stat.f_favail |
|
385 | value['used'] = i_stat.f_files-i_stat.f_favail | |
386 | value['total'] = i_stat.f_files |
|
386 | value['total'] = i_stat.f_files | |
387 | value['percent'] = percentage(value['used'], value['total']) |
|
387 | value['percent'] = percentage(value['used'], value['total']) | |
388 | except Exception as e: |
|
388 | except Exception as e: | |
389 | log.exception('Failed to fetch disk inodes info') |
|
389 | log.exception('Failed to fetch disk inodes info') | |
390 | state = {'message': str(e), 'type': STATE_ERR} |
|
390 | state = {'message': str(e), 'type': STATE_ERR} | |
391 |
|
391 | |||
392 | human_value = value.copy() |
|
392 | human_value = value.copy() | |
393 | human_value['text'] = "{}/{}, {}% used".format( |
|
393 | human_value['text'] = "{}/{}, {}% used".format( | |
394 | value['used'], value['total'], value['percent']) |
|
394 | value['used'], value['total'], value['percent']) | |
395 |
|
395 | |||
396 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
396 | if state['type'] == STATE_OK and value['percent'] > 90: | |
397 | msg = 'Critical: your disk free inodes are very low.' |
|
397 | msg = 'Critical: your disk free inodes are very low.' | |
398 | state = {'message': msg, 'type': STATE_ERR} |
|
398 | state = {'message': msg, 'type': STATE_ERR} | |
399 |
|
399 | |||
400 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
400 | elif state['type'] == STATE_OK and value['percent'] > 70: | |
401 | msg = 'Warning: your disk free inodes are running low.' |
|
401 | msg = 'Warning: your disk free inodes are running low.' | |
402 | state = {'message': msg, 'type': STATE_WARN} |
|
402 | state = {'message': msg, 'type': STATE_WARN} | |
403 |
|
403 | |||
404 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
404 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
405 |
|
405 | |||
406 |
|
406 | |||
407 | @register_sysinfo |
|
407 | @register_sysinfo | |
408 | def storage_archives(): |
|
408 | def storage_archives(): | |
409 | import rhodecode |
|
409 | import rhodecode | |
410 | from rhodecode.lib.utils import safe_str |
|
410 | from rhodecode.lib.utils import safe_str | |
411 | from rhodecode.lib.helpers import format_byte_size_binary |
|
411 | from rhodecode.lib.helpers import format_byte_size_binary | |
412 |
|
412 | |||
413 | msg = 'Enable this by setting ' \ |
|
413 | msg = 'Enable this by setting ' \ | |
414 | 'archive_cache_dir=/path/to/cache option in the .ini file' |
|
414 | 'archive_cache_dir=/path/to/cache option in the .ini file' | |
415 | path = safe_str(rhodecode.CONFIG.get('archive_cache_dir', msg)) |
|
415 | path = safe_str(rhodecode.CONFIG.get('archive_cache_dir', msg)) | |
416 |
|
416 | |||
417 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
417 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') | |
418 | state = STATE_OK_DEFAULT |
|
418 | state = STATE_OK_DEFAULT | |
419 | try: |
|
419 | try: | |
420 | items_count = 0 |
|
420 | items_count = 0 | |
421 | used = 0 |
|
421 | used = 0 | |
422 | for root, dirs, files in os.walk(path): |
|
422 | for root, dirs, files in os.walk(path): | |
423 | if root == path: |
|
423 | if root == path: | |
424 | items_count = len(files) |
|
424 | items_count = len(files) | |
425 |
|
425 | |||
426 | for f in files: |
|
426 | for f in files: | |
427 | try: |
|
427 | try: | |
428 | used += os.path.getsize(os.path.join(root, f)) |
|
428 | used += os.path.getsize(os.path.join(root, f)) | |
429 | except OSError: |
|
429 | except OSError: | |
430 | pass |
|
430 | pass | |
431 | value.update({ |
|
431 | value.update({ | |
432 | 'percent': 100, |
|
432 | 'percent': 100, | |
433 | 'used': used, |
|
433 | 'used': used, | |
434 | 'total': used, |
|
434 | 'total': used, | |
435 | 'items': items_count |
|
435 | 'items': items_count | |
436 | }) |
|
436 | }) | |
437 |
|
437 | |||
438 | except Exception as e: |
|
438 | except Exception as e: | |
439 | log.exception('failed to fetch archive cache storage') |
|
439 | log.exception('failed to fetch archive cache storage') | |
440 | state = {'message': str(e), 'type': STATE_ERR} |
|
440 | state = {'message': str(e), 'type': STATE_ERR} | |
441 |
|
441 | |||
442 | human_value = value.copy() |
|
442 | human_value = value.copy() | |
443 | human_value['used'] = format_byte_size_binary(value['used']) |
|
443 | human_value['used'] = format_byte_size_binary(value['used']) | |
444 | human_value['total'] = format_byte_size_binary(value['total']) |
|
444 | human_value['total'] = format_byte_size_binary(value['total']) | |
445 | human_value['text'] = "{} ({} items)".format( |
|
445 | human_value['text'] = "{} ({} items)".format( | |
446 | human_value['used'], value['items']) |
|
446 | human_value['used'], value['items']) | |
447 |
|
447 | |||
448 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
448 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
449 |
|
449 | |||
450 |
|
450 | |||
451 | @register_sysinfo |
|
451 | @register_sysinfo | |
452 | def storage_gist(): |
|
452 | def storage_gist(): | |
453 | from rhodecode.model.gist import GIST_STORE_LOC |
|
453 | from rhodecode.model.gist import GIST_STORE_LOC | |
454 | from rhodecode.model.settings import VcsSettingsModel |
|
454 | from rhodecode.model.settings import VcsSettingsModel | |
455 | from rhodecode.lib.utils import safe_str |
|
455 | from rhodecode.lib.utils import safe_str | |
456 | from rhodecode.lib.helpers import format_byte_size_binary |
|
456 | from rhodecode.lib.helpers import format_byte_size_binary | |
457 | path = safe_str(os.path.join( |
|
457 | path = safe_str(os.path.join( | |
458 | VcsSettingsModel().get_repos_location(), GIST_STORE_LOC)) |
|
458 | VcsSettingsModel().get_repos_location(), GIST_STORE_LOC)) | |
459 |
|
459 | |||
460 | # gist storage |
|
460 | # gist storage | |
461 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
461 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') | |
462 | state = STATE_OK_DEFAULT |
|
462 | state = STATE_OK_DEFAULT | |
463 |
|
463 | |||
464 | try: |
|
464 | try: | |
465 | items_count = 0 |
|
465 | items_count = 0 | |
466 | used = 0 |
|
466 | used = 0 | |
467 | for root, dirs, files in os.walk(path): |
|
467 | for root, dirs, files in os.walk(path): | |
468 | if root == path: |
|
468 | if root == path: | |
469 | items_count = len(dirs) |
|
469 | items_count = len(dirs) | |
470 |
|
470 | |||
471 | for f in files: |
|
471 | for f in files: | |
472 | try: |
|
472 | try: | |
473 | used += os.path.getsize(os.path.join(root, f)) |
|
473 | used += os.path.getsize(os.path.join(root, f)) | |
474 | except OSError: |
|
474 | except OSError: | |
475 | pass |
|
475 | pass | |
476 | value.update({ |
|
476 | value.update({ | |
477 | 'percent': 100, |
|
477 | 'percent': 100, | |
478 | 'used': used, |
|
478 | 'used': used, | |
479 | 'total': used, |
|
479 | 'total': used, | |
480 | 'items': items_count |
|
480 | 'items': items_count | |
481 | }) |
|
481 | }) | |
482 | except Exception as e: |
|
482 | except Exception as e: | |
483 | log.exception('failed to fetch gist storage items') |
|
483 | log.exception('failed to fetch gist storage items') | |
484 | state = {'message': str(e), 'type': STATE_ERR} |
|
484 | state = {'message': str(e), 'type': STATE_ERR} | |
485 |
|
485 | |||
486 | human_value = value.copy() |
|
486 | human_value = value.copy() | |
487 | human_value['used'] = format_byte_size_binary(value['used']) |
|
487 | human_value['used'] = format_byte_size_binary(value['used']) | |
488 | human_value['total'] = format_byte_size_binary(value['total']) |
|
488 | human_value['total'] = format_byte_size_binary(value['total']) | |
489 | human_value['text'] = "{} ({} items)".format( |
|
489 | human_value['text'] = "{} ({} items)".format( | |
490 | human_value['used'], value['items']) |
|
490 | human_value['used'], value['items']) | |
491 |
|
491 | |||
492 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
492 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
493 |
|
493 | |||
494 |
|
494 | |||
495 | @register_sysinfo |
|
495 | @register_sysinfo | |
496 | def storage_temp(): |
|
496 | def storage_temp(): | |
497 | import tempfile |
|
497 | import tempfile | |
498 | from rhodecode.lib.helpers import format_byte_size_binary |
|
498 | from rhodecode.lib.helpers import format_byte_size_binary | |
499 |
|
499 | |||
500 | path = tempfile.gettempdir() |
|
500 | path = tempfile.gettempdir() | |
501 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
501 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') | |
502 | state = STATE_OK_DEFAULT |
|
502 | state = STATE_OK_DEFAULT | |
503 |
|
503 | |||
504 | if not psutil: |
|
504 | if not psutil: | |
505 | return SysInfoRes(value=value, state=state) |
|
505 | return SysInfoRes(value=value, state=state) | |
506 |
|
506 | |||
507 | try: |
|
507 | try: | |
508 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
508 | value.update(dict(psutil.disk_usage(path)._asdict())) | |
509 | except Exception as e: |
|
509 | except Exception as e: | |
510 | log.exception('Failed to fetch temp dir info') |
|
510 | log.exception('Failed to fetch temp dir info') | |
511 | state = {'message': str(e), 'type': STATE_ERR} |
|
511 | state = {'message': str(e), 'type': STATE_ERR} | |
512 |
|
512 | |||
513 | human_value = value.copy() |
|
513 | human_value = value.copy() | |
514 | human_value['used'] = format_byte_size_binary(value['used']) |
|
514 | human_value['used'] = format_byte_size_binary(value['used']) | |
515 | human_value['total'] = format_byte_size_binary(value['total']) |
|
515 | human_value['total'] = format_byte_size_binary(value['total']) | |
516 | human_value['text'] = "{}/{}, {}% used".format( |
|
516 | human_value['text'] = "{}/{}, {}% used".format( | |
517 | format_byte_size_binary(value['used']), |
|
517 | format_byte_size_binary(value['used']), | |
518 | format_byte_size_binary(value['total']), |
|
518 | format_byte_size_binary(value['total']), | |
519 | value['percent']) |
|
519 | value['percent']) | |
520 |
|
520 | |||
521 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
521 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
522 |
|
522 | |||
523 |
|
523 | |||
524 | @register_sysinfo |
|
524 | @register_sysinfo | |
525 | def search_info(): |
|
525 | def search_info(): | |
526 | import rhodecode |
|
526 | import rhodecode | |
527 | from rhodecode.lib.index import searcher_from_config |
|
527 | from rhodecode.lib.index import searcher_from_config | |
528 |
|
528 | |||
529 | backend = rhodecode.CONFIG.get('search.module', '') |
|
529 | backend = rhodecode.CONFIG.get('search.module', '') | |
530 | location = rhodecode.CONFIG.get('search.location', '') |
|
530 | location = rhodecode.CONFIG.get('search.location', '') | |
531 |
|
531 | |||
532 | try: |
|
532 | try: | |
533 | searcher = searcher_from_config(rhodecode.CONFIG) |
|
533 | searcher = searcher_from_config(rhodecode.CONFIG) | |
534 | searcher = searcher.__class__.__name__ |
|
534 | searcher = searcher.__class__.__name__ | |
535 | except Exception: |
|
535 | except Exception: | |
536 | searcher = None |
|
536 | searcher = None | |
537 |
|
537 | |||
538 | value = dict( |
|
538 | value = dict( | |
539 | backend=backend, searcher=searcher, location=location, text='') |
|
539 | backend=backend, searcher=searcher, location=location, text='') | |
540 | state = STATE_OK_DEFAULT |
|
540 | state = STATE_OK_DEFAULT | |
541 |
|
541 | |||
542 | human_value = value.copy() |
|
542 | human_value = value.copy() | |
543 | human_value['text'] = "backend:`{}`".format(human_value['backend']) |
|
543 | human_value['text'] = "backend:`{}`".format(human_value['backend']) | |
544 |
|
544 | |||
545 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
545 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
546 |
|
546 | |||
547 |
|
547 | |||
548 | @register_sysinfo |
|
548 | @register_sysinfo | |
549 | def git_info(): |
|
549 | def git_info(): | |
550 | from rhodecode.lib.vcs.backends import git |
|
550 | from rhodecode.lib.vcs.backends import git | |
551 | state = STATE_OK_DEFAULT |
|
551 | state = STATE_OK_DEFAULT | |
552 | value = human_value = '' |
|
552 | value = human_value = '' | |
553 | try: |
|
553 | try: | |
554 | value = git.discover_git_version(raise_on_exc=True) |
|
554 | value = git.discover_git_version(raise_on_exc=True) | |
555 | human_value = 'version reported from VCSServer: {}'.format(value) |
|
555 | human_value = 'version reported from VCSServer: {}'.format(value) | |
556 | except Exception as e: |
|
556 | except Exception as e: | |
557 | state = {'message': str(e), 'type': STATE_ERR} |
|
557 | state = {'message': str(e), 'type': STATE_ERR} | |
558 |
|
558 | |||
559 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
559 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
560 |
|
560 | |||
561 |
|
561 | |||
562 | @register_sysinfo |
|
562 | @register_sysinfo | |
563 | def hg_info(): |
|
563 | def hg_info(): | |
564 | from rhodecode.lib.vcs.backends import hg |
|
564 | from rhodecode.lib.vcs.backends import hg | |
565 | state = STATE_OK_DEFAULT |
|
565 | state = STATE_OK_DEFAULT | |
566 | value = human_value = '' |
|
566 | value = human_value = '' | |
567 | try: |
|
567 | try: | |
568 | value = hg.discover_hg_version(raise_on_exc=True) |
|
568 | value = hg.discover_hg_version(raise_on_exc=True) | |
569 | human_value = 'version reported from VCSServer: {}'.format(value) |
|
569 | human_value = 'version reported from VCSServer: {}'.format(value) | |
570 | except Exception as e: |
|
570 | except Exception as e: | |
571 | state = {'message': str(e), 'type': STATE_ERR} |
|
571 | state = {'message': str(e), 'type': STATE_ERR} | |
572 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
572 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
573 |
|
573 | |||
574 |
|
574 | |||
575 | @register_sysinfo |
|
575 | @register_sysinfo | |
576 | def svn_info(): |
|
576 | def svn_info(): | |
577 | from rhodecode.lib.vcs.backends import svn |
|
577 | from rhodecode.lib.vcs.backends import svn | |
578 | state = STATE_OK_DEFAULT |
|
578 | state = STATE_OK_DEFAULT | |
579 | value = human_value = '' |
|
579 | value = human_value = '' | |
580 | try: |
|
580 | try: | |
581 | value = svn.discover_svn_version(raise_on_exc=True) |
|
581 | value = svn.discover_svn_version(raise_on_exc=True) | |
582 | human_value = 'version reported from VCSServer: {}'.format(value) |
|
582 | human_value = 'version reported from VCSServer: {}'.format(value) | |
583 | except Exception as e: |
|
583 | except Exception as e: | |
584 | state = {'message': str(e), 'type': STATE_ERR} |
|
584 | state = {'message': str(e), 'type': STATE_ERR} | |
585 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
585 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
586 |
|
586 | |||
587 |
|
587 | |||
588 | @register_sysinfo |
|
588 | @register_sysinfo | |
589 | def vcs_backends(): |
|
589 | def vcs_backends(): | |
590 | import rhodecode |
|
590 | import rhodecode | |
591 | value = rhodecode.CONFIG.get('vcs.backends') |
|
591 | value = rhodecode.CONFIG.get('vcs.backends') | |
592 | human_value = 'Enabled backends in order: {}'.format(','.join(value)) |
|
592 | human_value = 'Enabled backends in order: {}'.format(','.join(value)) | |
593 | return SysInfoRes(value=value, human_value=human_value) |
|
593 | return SysInfoRes(value=value, human_value=human_value) | |
594 |
|
594 | |||
595 |
|
595 | |||
596 | @register_sysinfo |
|
596 | @register_sysinfo | |
597 | def vcs_server(): |
|
597 | def vcs_server(): | |
598 | import rhodecode |
|
598 | import rhodecode | |
599 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data |
|
599 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data | |
600 |
|
600 | |||
601 | server_url = rhodecode.CONFIG.get('vcs.server') |
|
601 | server_url = rhodecode.CONFIG.get('vcs.server') | |
602 | enabled = rhodecode.CONFIG.get('vcs.server.enable') |
|
602 | enabled = rhodecode.CONFIG.get('vcs.server.enable') | |
603 | protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http' |
|
603 | protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http' | |
604 | state = STATE_OK_DEFAULT |
|
604 | state = STATE_OK_DEFAULT | |
605 | version = None |
|
605 | version = None | |
606 | workers = 0 |
|
606 | workers = 0 | |
607 |
|
607 | |||
608 | try: |
|
608 | try: | |
609 | data = get_vcsserver_service_data() |
|
609 | data = get_vcsserver_service_data() | |
610 | if data and 'version' in data: |
|
610 | if data and 'version' in data: | |
611 | version = data['version'] |
|
611 | version = data['version'] | |
612 |
|
612 | |||
613 | if data and 'config' in data: |
|
613 | if data and 'config' in data: | |
614 | conf = data['config'] |
|
614 | conf = data['config'] | |
615 | workers = conf.get('workers', 'NOT AVAILABLE') |
|
615 | workers = conf.get('workers', 'NOT AVAILABLE') | |
616 |
|
616 | |||
617 | connection = 'connected' |
|
617 | connection = 'connected' | |
618 | except Exception as e: |
|
618 | except Exception as e: | |
619 | connection = 'failed' |
|
619 | connection = 'failed' | |
620 | state = {'message': str(e), 'type': STATE_ERR} |
|
620 | state = {'message': str(e), 'type': STATE_ERR} | |
621 |
|
621 | |||
622 | value = dict( |
|
622 | value = dict( | |
623 | url=server_url, |
|
623 | url=server_url, | |
624 | enabled=enabled, |
|
624 | enabled=enabled, | |
625 | protocol=protocol, |
|
625 | protocol=protocol, | |
626 | connection=connection, |
|
626 | connection=connection, | |
627 | version=version, |
|
627 | version=version, | |
628 | text='', |
|
628 | text='', | |
629 | ) |
|
629 | ) | |
630 |
|
630 | |||
631 | human_value = value.copy() |
|
631 | human_value = value.copy() | |
632 | human_value['text'] = \ |
|
632 | human_value['text'] = \ | |
633 | '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format( |
|
633 | '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format( | |
634 | url=server_url, ver=version, workers=workers, mode=protocol, |
|
634 | url=server_url, ver=version, workers=workers, mode=protocol, | |
635 | conn=connection) |
|
635 | conn=connection) | |
636 |
|
636 | |||
637 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
637 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
638 |
|
638 | |||
639 |
|
639 | |||
640 | @register_sysinfo |
|
640 | @register_sysinfo | |
641 | def vcs_server_config(): |
|
641 | def vcs_server_config(): | |
642 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data |
|
642 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data | |
643 | state = STATE_OK_DEFAULT |
|
643 | state = STATE_OK_DEFAULT | |
644 |
|
644 | |||
645 | value = {} |
|
645 | value = {} | |
646 | try: |
|
646 | try: | |
647 | data = get_vcsserver_service_data() |
|
647 | data = get_vcsserver_service_data() | |
648 | value = data['app_config'] |
|
648 | value = data['app_config'] | |
649 | except Exception as e: |
|
649 | except Exception as e: | |
650 | state = {'message': str(e), 'type': STATE_ERR} |
|
650 | state = {'message': str(e), 'type': STATE_ERR} | |
651 |
|
651 | |||
652 | human_value = value.copy() |
|
652 | human_value = value.copy() | |
653 | human_value['text'] = 'VCS Server config' |
|
653 | human_value['text'] = 'VCS Server config' | |
654 |
|
654 | |||
655 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
655 | return SysInfoRes(value=value, state=state, human_value=human_value) | |
656 |
|
656 | |||
657 |
|
657 | |||
658 | @register_sysinfo |
|
658 | @register_sysinfo | |
659 | def rhodecode_app_info(): |
|
659 | def rhodecode_app_info(): | |
660 | import rhodecode |
|
660 | import rhodecode | |
661 | edition = rhodecode.CONFIG.get('rhodecode.edition') |
|
661 | edition = rhodecode.CONFIG.get('rhodecode.edition') | |
662 |
|
662 | |||
663 | value = dict( |
|
663 | value = dict( | |
664 | rhodecode_version=rhodecode.__version__, |
|
664 | rhodecode_version=rhodecode.__version__, | |
665 | rhodecode_lib_path=os.path.abspath(rhodecode.__file__), |
|
665 | rhodecode_lib_path=os.path.abspath(rhodecode.__file__), | |
666 | text='' |
|
666 | text='' | |
667 | ) |
|
667 | ) | |
668 | human_value = value.copy() |
|
668 | human_value = value.copy() | |
669 | human_value['text'] = 'RhodeCode {edition}, version {ver}'.format( |
|
669 | human_value['text'] = 'RhodeCode {edition}, version {ver}'.format( | |
670 | edition=edition, ver=value['rhodecode_version'] |
|
670 | edition=edition, ver=value['rhodecode_version'] | |
671 | ) |
|
671 | ) | |
672 | return SysInfoRes(value=value, human_value=human_value) |
|
672 | return SysInfoRes(value=value, human_value=human_value) | |
673 |
|
673 | |||
674 |
|
674 | |||
675 | @register_sysinfo |
|
675 | @register_sysinfo | |
676 | def rhodecode_config(): |
|
676 | def rhodecode_config(): | |
677 | import rhodecode |
|
677 | import rhodecode | |
678 | path = rhodecode.CONFIG.get('__file__') |
|
678 | path = rhodecode.CONFIG.get('__file__') | |
679 | rhodecode_ini_safe = rhodecode.CONFIG.copy() |
|
679 | rhodecode_ini_safe = rhodecode.CONFIG.copy() | |
680 | cert_path = get_cert_path(path) |
|
680 | cert_path = get_cert_path(path) | |
681 |
|
681 | |||
682 | try: |
|
682 | try: | |
683 | config = configparser.ConfigParser() |
|
683 | config = configparser.ConfigParser() | |
684 | config.read(path) |
|
684 | config.read(path) | |
685 | parsed_ini = config |
|
685 | parsed_ini = config | |
686 | if parsed_ini.has_section('server:main'): |
|
686 | if parsed_ini.has_section('server:main'): | |
687 | parsed_ini = dict(parsed_ini.items('server:main')) |
|
687 | parsed_ini = dict(parsed_ini.items('server:main')) | |
688 | except Exception: |
|
688 | except Exception: | |
689 | log.exception('Failed to read .ini file for display') |
|
689 | log.exception('Failed to read .ini file for display') | |
690 | parsed_ini = {} |
|
690 | parsed_ini = {} | |
691 |
|
691 | |||
692 | rhodecode_ini_safe['server:main'] = parsed_ini |
|
692 | rhodecode_ini_safe['server:main'] = parsed_ini | |
693 |
|
693 | |||
694 | blacklist = [ |
|
694 | blacklist = [ | |
695 | 'rhodecode_license_key', |
|
695 | 'rhodecode_license_key', | |
696 | 'routes.map', |
|
696 | 'routes.map', | |
697 | 'sqlalchemy.db1.url', |
|
697 | 'sqlalchemy.db1.url', | |
698 | 'channelstream.secret', |
|
698 | 'channelstream.secret', | |
699 | 'beaker.session.secret', |
|
699 | 'beaker.session.secret', | |
700 | 'rhodecode.encrypted_values.secret', |
|
700 | 'rhodecode.encrypted_values.secret', | |
701 | 'rhodecode_auth_github_consumer_key', |
|
701 | 'rhodecode_auth_github_consumer_key', | |
702 | 'rhodecode_auth_github_consumer_secret', |
|
702 | 'rhodecode_auth_github_consumer_secret', | |
703 | 'rhodecode_auth_google_consumer_key', |
|
703 | 'rhodecode_auth_google_consumer_key', | |
704 | 'rhodecode_auth_google_consumer_secret', |
|
704 | 'rhodecode_auth_google_consumer_secret', | |
705 | 'rhodecode_auth_bitbucket_consumer_secret', |
|
705 | 'rhodecode_auth_bitbucket_consumer_secret', | |
706 | 'rhodecode_auth_bitbucket_consumer_key', |
|
706 | 'rhodecode_auth_bitbucket_consumer_key', | |
707 | 'rhodecode_auth_twitter_consumer_secret', |
|
707 | 'rhodecode_auth_twitter_consumer_secret', | |
708 | 'rhodecode_auth_twitter_consumer_key', |
|
708 | 'rhodecode_auth_twitter_consumer_key', | |
709 |
|
709 | |||
710 | 'rhodecode_auth_twitter_secret', |
|
710 | 'rhodecode_auth_twitter_secret', | |
711 | 'rhodecode_auth_github_secret', |
|
711 | 'rhodecode_auth_github_secret', | |
712 | 'rhodecode_auth_google_secret', |
|
712 | 'rhodecode_auth_google_secret', | |
713 | 'rhodecode_auth_bitbucket_secret', |
|
713 | 'rhodecode_auth_bitbucket_secret', | |
714 |
|
714 | |||
715 | 'appenlight.api_key', |
|
715 | 'appenlight.api_key', | |
716 | ('app_conf', 'sqlalchemy.db1.url') |
|
716 | ('app_conf', 'sqlalchemy.db1.url') | |
717 | ] |
|
717 | ] | |
718 | for k in blacklist: |
|
718 | for k in blacklist: | |
719 | if isinstance(k, tuple): |
|
719 | if isinstance(k, tuple): | |
720 | section, key = k |
|
720 | section, key = k | |
721 | if section in rhodecode_ini_safe: |
|
721 | if section in rhodecode_ini_safe: | |
722 | rhodecode_ini_safe[section] = '**OBFUSCATED**' |
|
722 | rhodecode_ini_safe[section] = '**OBFUSCATED**' | |
723 | else: |
|
723 | else: | |
724 | rhodecode_ini_safe.pop(k, None) |
|
724 | rhodecode_ini_safe.pop(k, None) | |
725 |
|
725 | |||
726 | # TODO: maybe put some CONFIG checks here ? |
|
726 | # TODO: maybe put some CONFIG checks here ? | |
727 | return SysInfoRes(value={'config': rhodecode_ini_safe, |
|
727 | return SysInfoRes(value={'config': rhodecode_ini_safe, | |
728 | 'path': path, 'cert_path': cert_path}) |
|
728 | 'path': path, 'cert_path': cert_path}) | |
729 |
|
729 | |||
730 |
|
730 | |||
731 | @register_sysinfo |
|
731 | @register_sysinfo | |
732 | def database_info(): |
|
732 | def database_info(): | |
733 | import rhodecode |
|
733 | import rhodecode | |
734 | from sqlalchemy.engine import url as engine_url |
|
734 | from sqlalchemy.engine import url as engine_url | |
735 | from rhodecode.model.meta import Base as sql_base, Session |
|
735 | from rhodecode.model.meta import Base as sql_base, Session | |
736 | from rhodecode.model.db import DbMigrateVersion |
|
736 | from rhodecode.model.db import DbMigrateVersion | |
737 |
|
737 | |||
738 | state = STATE_OK_DEFAULT |
|
738 | state = STATE_OK_DEFAULT | |
739 |
|
739 | |||
740 | db_migrate = DbMigrateVersion.query().filter( |
|
740 | db_migrate = DbMigrateVersion.query().filter( | |
741 | DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one() |
|
741 | DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one() | |
742 |
|
742 | |||
743 | db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url']) |
|
743 | db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url']) | |
744 |
|
744 | |||
745 | try: |
|
745 | try: | |
746 | engine = sql_base.metadata.bind |
|
746 | engine = sql_base.metadata.bind | |
747 | db_server_info = engine.dialect._get_server_version_info( |
|
747 | db_server_info = engine.dialect._get_server_version_info( | |
748 | Session.connection(bind=engine)) |
|
748 | Session.connection(bind=engine)) | |
749 | db_version = '.'.join(map(str, db_server_info)) |
|
749 | db_version = '.'.join(map(str, db_server_info)) | |
750 | except Exception: |
|
750 | except Exception: | |
751 | log.exception('failed to fetch db version') |
|
751 | log.exception('failed to fetch db version') | |
752 | db_version = 'UNKNOWN' |
|
752 | db_version = 'UNKNOWN' | |
753 |
|
753 | |||
754 | db_info = dict( |
|
754 | db_info = dict( | |
755 | migrate_version=db_migrate.version, |
|
755 | migrate_version=db_migrate.version, | |
756 | type=db_url_obj.get_backend_name(), |
|
756 | type=db_url_obj.get_backend_name(), | |
757 | version=db_version, |
|
757 | version=db_version, | |
758 | url=repr(db_url_obj) |
|
758 | url=repr(db_url_obj) | |
759 | ) |
|
759 | ) | |
760 | current_version = db_migrate.version |
|
760 | current_version = db_migrate.version | |
761 | expected_version = rhodecode.__dbversion__ |
|
761 | expected_version = rhodecode.__dbversion__ | |
762 | if state['type'] == STATE_OK and current_version != expected_version: |
|
762 | if state['type'] == STATE_OK and current_version != expected_version: | |
763 | msg = 'Critical: database schema mismatch, ' \ |
|
763 | msg = 'Critical: database schema mismatch, ' \ | |
764 | 'expected version {}, got {}. ' \ |
|
764 | 'expected version {}, got {}. ' \ | |
765 | 'Please run migrations on your database.'.format( |
|
765 | 'Please run migrations on your database.'.format( | |
766 | expected_version, current_version) |
|
766 | expected_version, current_version) | |
767 | state = {'message': msg, 'type': STATE_ERR} |
|
767 | state = {'message': msg, 'type': STATE_ERR} | |
768 |
|
768 | |||
769 | human_value = db_info.copy() |
|
769 | human_value = db_info.copy() | |
770 | human_value['url'] = "{} @ migration version: {}".format( |
|
770 | human_value['url'] = "{} @ migration version: {}".format( | |
771 | db_info['url'], db_info['migrate_version']) |
|
771 | db_info['url'], db_info['migrate_version']) | |
772 | human_value['version'] = "{} {}".format(db_info['type'], db_info['version']) |
|
772 | human_value['version'] = "{} {}".format(db_info['type'], db_info['version']) | |
773 | return SysInfoRes(value=db_info, state=state, human_value=human_value) |
|
773 | return SysInfoRes(value=db_info, state=state, human_value=human_value) | |
774 |
|
774 | |||
775 |
|
775 | |||
776 | @register_sysinfo |
|
776 | @register_sysinfo | |
777 | def server_info(environ): |
|
777 | def server_info(environ): | |
778 | import rhodecode |
|
778 | import rhodecode | |
779 | from rhodecode.lib.base import get_server_ip_addr, get_server_port |
|
779 | from rhodecode.lib.base import get_server_ip_addr, get_server_port | |
780 |
|
780 | |||
781 | value = { |
|
781 | value = { | |
782 | 'server_ip': '%s:%s' % ( |
|
782 | 'server_ip': '%s:%s' % ( | |
783 | get_server_ip_addr(environ, log_errors=False), |
|
783 | get_server_ip_addr(environ, log_errors=False), | |
784 | get_server_port(environ) |
|
784 | get_server_port(environ) | |
785 | ), |
|
785 | ), | |
786 | 'server_id': rhodecode.CONFIG.get('instance_id'), |
|
786 | 'server_id': rhodecode.CONFIG.get('instance_id'), | |
787 | } |
|
787 | } | |
788 | return SysInfoRes(value=value) |
|
788 | return SysInfoRes(value=value) | |
789 |
|
789 | |||
790 |
|
790 | |||
791 | @register_sysinfo |
|
791 | @register_sysinfo | |
792 | def usage_info(): |
|
792 | def usage_info(): | |
793 | from rhodecode.model.db import User, Repository |
|
793 | from rhodecode.model.db import User, Repository | |
794 | value = { |
|
794 | value = { | |
795 | 'users': User.query().count(), |
|
795 | 'users': User.query().count(), | |
796 | 'users_active': User.query().filter(User.active == True).count(), |
|
796 | 'users_active': User.query().filter(User.active == True).count(), | |
797 | 'repositories': Repository.query().count(), |
|
797 | 'repositories': Repository.query().count(), | |
798 | 'repository_types': { |
|
798 | 'repository_types': { | |
799 | 'hg': Repository.query().filter( |
|
799 | 'hg': Repository.query().filter( | |
800 | Repository.repo_type == 'hg').count(), |
|
800 | Repository.repo_type == 'hg').count(), | |
801 | 'git': Repository.query().filter( |
|
801 | 'git': Repository.query().filter( | |
802 | Repository.repo_type == 'git').count(), |
|
802 | Repository.repo_type == 'git').count(), | |
803 | 'svn': Repository.query().filter( |
|
803 | 'svn': Repository.query().filter( | |
804 | Repository.repo_type == 'svn').count(), |
|
804 | Repository.repo_type == 'svn').count(), | |
805 | }, |
|
805 | }, | |
806 | } |
|
806 | } | |
807 | return SysInfoRes(value=value) |
|
807 | return SysInfoRes(value=value) | |
808 |
|
808 | |||
809 |
|
809 | |||
810 | def get_system_info(environ): |
|
810 | def get_system_info(environ): | |
811 | environ = environ or {} |
|
811 | environ = environ or {} | |
812 | return { |
|
812 | return { | |
813 | 'rhodecode_app': SysInfo(rhodecode_app_info)(), |
|
813 | 'rhodecode_app': SysInfo(rhodecode_app_info)(), | |
814 | 'rhodecode_config': SysInfo(rhodecode_config)(), |
|
814 | 'rhodecode_config': SysInfo(rhodecode_config)(), | |
815 | 'rhodecode_usage': SysInfo(usage_info)(), |
|
815 | 'rhodecode_usage': SysInfo(usage_info)(), | |
816 | 'python': SysInfo(python_info)(), |
|
816 | 'python': SysInfo(python_info)(), | |
817 | 'py_modules': SysInfo(py_modules)(), |
|
817 | 'py_modules': SysInfo(py_modules)(), | |
818 |
|
818 | |||
819 | 'platform': SysInfo(platform_type)(), |
|
819 | 'platform': SysInfo(platform_type)(), | |
820 | 'locale': SysInfo(locale_info)(), |
|
820 | 'locale': SysInfo(locale_info)(), | |
821 | 'server': SysInfo(server_info, environ=environ)(), |
|
821 | 'server': SysInfo(server_info, environ=environ)(), | |
822 | 'database': SysInfo(database_info)(), |
|
822 | 'database': SysInfo(database_info)(), | |
823 | 'ulimit': SysInfo(ulimit_info)(), |
|
823 | 'ulimit': SysInfo(ulimit_info)(), | |
824 | 'storage': SysInfo(storage)(), |
|
824 | 'storage': SysInfo(storage)(), | |
825 | 'storage_inodes': SysInfo(storage_inodes)(), |
|
825 | 'storage_inodes': SysInfo(storage_inodes)(), | |
826 | 'storage_archive': SysInfo(storage_archives)(), |
|
826 | 'storage_archive': SysInfo(storage_archives)(), | |
827 | 'storage_gist': SysInfo(storage_gist)(), |
|
827 | 'storage_gist': SysInfo(storage_gist)(), | |
828 | 'storage_temp': SysInfo(storage_temp)(), |
|
828 | 'storage_temp': SysInfo(storage_temp)(), | |
829 |
|
829 | |||
830 | 'search': SysInfo(search_info)(), |
|
830 | 'search': SysInfo(search_info)(), | |
831 |
|
831 | |||
832 | 'uptime': SysInfo(uptime)(), |
|
832 | 'uptime': SysInfo(uptime)(), | |
833 | 'load': SysInfo(machine_load)(), |
|
833 | 'load': SysInfo(machine_load)(), | |
834 | 'cpu': SysInfo(cpu)(), |
|
834 | 'cpu': SysInfo(cpu)(), | |
835 | 'memory': SysInfo(memory)(), |
|
835 | 'memory': SysInfo(memory)(), | |
836 |
|
836 | |||
837 | 'vcs_backends': SysInfo(vcs_backends)(), |
|
837 | 'vcs_backends': SysInfo(vcs_backends)(), | |
838 | 'vcs_server': SysInfo(vcs_server)(), |
|
838 | 'vcs_server': SysInfo(vcs_server)(), | |
839 |
|
839 | |||
840 | 'vcs_server_config': SysInfo(vcs_server_config)(), |
|
840 | 'vcs_server_config': SysInfo(vcs_server_config)(), | |
841 |
|
841 | |||
842 | 'git': SysInfo(git_info)(), |
|
842 | 'git': SysInfo(git_info)(), | |
843 | 'hg': SysInfo(hg_info)(), |
|
843 | 'hg': SysInfo(hg_info)(), | |
844 | 'svn': SysInfo(svn_info)(), |
|
844 | 'svn': SysInfo(svn_info)(), | |
845 | } |
|
845 | } | |
846 |
|
846 | |||
847 |
|
847 | |||
848 | def load_system_info(key): |
|
848 | def load_system_info(key): | |
849 | """ |
|
849 | """ | |
850 | get_sys_info('vcs_server') |
|
850 | get_sys_info('vcs_server') | |
851 | get_sys_info('database') |
|
851 | get_sys_info('database') | |
852 | """ |
|
852 | """ | |
853 | return SysInfo(registered_helpers[key])() |
|
853 | return SysInfo(registered_helpers[key])() |
@@ -1,496 +1,496 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | GIT commit module |
|
22 | GIT commit module | |
23 | """ |
|
23 | """ | |
24 |
|
24 | |||
25 | import re |
|
25 | import re | |
26 | import stat |
|
26 | import stat | |
|
27 | import configparser | |||
27 | from itertools import chain |
|
28 | from itertools import chain | |
28 | from io import StringIO |
|
29 | from io import StringIO | |
29 |
|
30 | |||
30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
31 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
31 |
|
32 | |||
32 | from rhodecode.lib.datelib import utcdate_fromtimestamp |
|
33 | from rhodecode.lib.datelib import utcdate_fromtimestamp | |
33 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
34 | from rhodecode.lib.utils import safe_unicode, safe_str | |
34 | from rhodecode.lib.utils2 import safe_int |
|
35 | from rhodecode.lib.utils2 import safe_int | |
35 | from rhodecode.lib.vcs.conf import settings |
|
36 | from rhodecode.lib.vcs.conf import settings | |
36 | from rhodecode.lib.vcs.backends import base |
|
37 | from rhodecode.lib.vcs.backends import base | |
37 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError |
|
38 | from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError | |
38 | from rhodecode.lib.vcs.nodes import ( |
|
39 | from rhodecode.lib.vcs.nodes import ( | |
39 | FileNode, DirNode, NodeKind, RootNode, SubModuleNode, |
|
40 | FileNode, DirNode, NodeKind, RootNode, SubModuleNode, | |
40 | ChangedFileNodesGenerator, AddedFileNodesGenerator, |
|
41 | ChangedFileNodesGenerator, AddedFileNodesGenerator, | |
41 | RemovedFileNodesGenerator, LargeFileNode) |
|
42 | RemovedFileNodesGenerator, LargeFileNode) | |
42 | from rhodecode.lib.vcs.compat import configparser |
|
|||
43 |
|
43 | |||
44 |
|
44 | |||
45 | class GitCommit(base.BaseCommit): |
|
45 | class GitCommit(base.BaseCommit): | |
46 | """ |
|
46 | """ | |
47 | Represents state of the repository at single commit id. |
|
47 | Represents state of the repository at single commit id. | |
48 | """ |
|
48 | """ | |
49 |
|
49 | |||
50 | _filter_pre_load = [ |
|
50 | _filter_pre_load = [ | |
51 | # done through a more complex tree walk on parents |
|
51 | # done through a more complex tree walk on parents | |
52 | "affected_files", |
|
52 | "affected_files", | |
53 | # done through subprocess not remote call |
|
53 | # done through subprocess not remote call | |
54 | "children", |
|
54 | "children", | |
55 | # done through a more complex tree walk on parents |
|
55 | # done through a more complex tree walk on parents | |
56 | "status", |
|
56 | "status", | |
57 | # mercurial specific property not supported here |
|
57 | # mercurial specific property not supported here | |
58 | "_file_paths", |
|
58 | "_file_paths", | |
59 | # mercurial specific property not supported here |
|
59 | # mercurial specific property not supported here | |
60 | 'obsolete', |
|
60 | 'obsolete', | |
61 | # mercurial specific property not supported here |
|
61 | # mercurial specific property not supported here | |
62 | 'phase', |
|
62 | 'phase', | |
63 | # mercurial specific property not supported here |
|
63 | # mercurial specific property not supported here | |
64 | 'hidden' |
|
64 | 'hidden' | |
65 | ] |
|
65 | ] | |
66 |
|
66 | |||
67 | def __init__(self, repository, raw_id, idx, pre_load=None): |
|
67 | def __init__(self, repository, raw_id, idx, pre_load=None): | |
68 | self.repository = repository |
|
68 | self.repository = repository | |
69 | self._remote = repository._remote |
|
69 | self._remote = repository._remote | |
70 | # TODO: johbo: Tweak of raw_id should not be necessary |
|
70 | # TODO: johbo: Tweak of raw_id should not be necessary | |
71 | self.raw_id = safe_str(raw_id) |
|
71 | self.raw_id = safe_str(raw_id) | |
72 | self.idx = idx |
|
72 | self.idx = idx | |
73 |
|
73 | |||
74 | self._set_bulk_properties(pre_load) |
|
74 | self._set_bulk_properties(pre_load) | |
75 |
|
75 | |||
76 | # caches |
|
76 | # caches | |
77 | self._stat_modes = {} # stat info for paths |
|
77 | self._stat_modes = {} # stat info for paths | |
78 | self._paths = {} # path processed with parse_tree |
|
78 | self._paths = {} # path processed with parse_tree | |
79 | self.nodes = {} |
|
79 | self.nodes = {} | |
80 | self._submodules = None |
|
80 | self._submodules = None | |
81 |
|
81 | |||
82 | def _set_bulk_properties(self, pre_load): |
|
82 | def _set_bulk_properties(self, pre_load): | |
83 |
|
83 | |||
84 | if not pre_load: |
|
84 | if not pre_load: | |
85 | return |
|
85 | return | |
86 | pre_load = [entry for entry in pre_load |
|
86 | pre_load = [entry for entry in pre_load | |
87 | if entry not in self._filter_pre_load] |
|
87 | if entry not in self._filter_pre_load] | |
88 | if not pre_load: |
|
88 | if not pre_load: | |
89 | return |
|
89 | return | |
90 |
|
90 | |||
91 | result = self._remote.bulk_request(self.raw_id, pre_load) |
|
91 | result = self._remote.bulk_request(self.raw_id, pre_load) | |
92 | for attr, value in result.items(): |
|
92 | for attr, value in result.items(): | |
93 | if attr in ["author", "message"]: |
|
93 | if attr in ["author", "message"]: | |
94 | if value: |
|
94 | if value: | |
95 | value = safe_unicode(value) |
|
95 | value = safe_unicode(value) | |
96 | elif attr == "date": |
|
96 | elif attr == "date": | |
97 | value = utcdate_fromtimestamp(*value) |
|
97 | value = utcdate_fromtimestamp(*value) | |
98 | elif attr == "parents": |
|
98 | elif attr == "parents": | |
99 | value = self._make_commits(value) |
|
99 | value = self._make_commits(value) | |
100 | elif attr == "branch": |
|
100 | elif attr == "branch": | |
101 | value = self._set_branch(value) |
|
101 | value = self._set_branch(value) | |
102 | self.__dict__[attr] = value |
|
102 | self.__dict__[attr] = value | |
103 |
|
103 | |||
104 | @LazyProperty |
|
104 | @LazyProperty | |
105 | def _commit(self): |
|
105 | def _commit(self): | |
106 | return self._remote[self.raw_id] |
|
106 | return self._remote[self.raw_id] | |
107 |
|
107 | |||
108 | @LazyProperty |
|
108 | @LazyProperty | |
109 | def _tree_id(self): |
|
109 | def _tree_id(self): | |
110 | return self._remote[self._commit['tree']]['id'] |
|
110 | return self._remote[self._commit['tree']]['id'] | |
111 |
|
111 | |||
112 | @LazyProperty |
|
112 | @LazyProperty | |
113 | def id(self): |
|
113 | def id(self): | |
114 | return self.raw_id |
|
114 | return self.raw_id | |
115 |
|
115 | |||
116 | @LazyProperty |
|
116 | @LazyProperty | |
117 | def short_id(self): |
|
117 | def short_id(self): | |
118 | return self.raw_id[:12] |
|
118 | return self.raw_id[:12] | |
119 |
|
119 | |||
120 | @LazyProperty |
|
120 | @LazyProperty | |
121 | def message(self): |
|
121 | def message(self): | |
122 | return safe_unicode(self._remote.message(self.id)) |
|
122 | return safe_unicode(self._remote.message(self.id)) | |
123 |
|
123 | |||
124 | @LazyProperty |
|
124 | @LazyProperty | |
125 | def committer(self): |
|
125 | def committer(self): | |
126 | return safe_unicode(self._remote.author(self.id)) |
|
126 | return safe_unicode(self._remote.author(self.id)) | |
127 |
|
127 | |||
128 | @LazyProperty |
|
128 | @LazyProperty | |
129 | def author(self): |
|
129 | def author(self): | |
130 | return safe_unicode(self._remote.author(self.id)) |
|
130 | return safe_unicode(self._remote.author(self.id)) | |
131 |
|
131 | |||
132 | @LazyProperty |
|
132 | @LazyProperty | |
133 | def date(self): |
|
133 | def date(self): | |
134 | unix_ts, tz = self._remote.date(self.raw_id) |
|
134 | unix_ts, tz = self._remote.date(self.raw_id) | |
135 | return utcdate_fromtimestamp(unix_ts, tz) |
|
135 | return utcdate_fromtimestamp(unix_ts, tz) | |
136 |
|
136 | |||
137 | @LazyProperty |
|
137 | @LazyProperty | |
138 | def status(self): |
|
138 | def status(self): | |
139 | """ |
|
139 | """ | |
140 | Returns modified, added, removed, deleted files for current commit |
|
140 | Returns modified, added, removed, deleted files for current commit | |
141 | """ |
|
141 | """ | |
142 | return self.changed, self.added, self.removed |
|
142 | return self.changed, self.added, self.removed | |
143 |
|
143 | |||
144 | @LazyProperty |
|
144 | @LazyProperty | |
145 | def tags(self): |
|
145 | def tags(self): | |
146 | tags = [safe_unicode(name) for name, |
|
146 | tags = [safe_unicode(name) for name, | |
147 | commit_id in self.repository.tags.iteritems() |
|
147 | commit_id in self.repository.tags.iteritems() | |
148 | if commit_id == self.raw_id] |
|
148 | if commit_id == self.raw_id] | |
149 | return tags |
|
149 | return tags | |
150 |
|
150 | |||
151 | @LazyProperty |
|
151 | @LazyProperty | |
152 | def commit_branches(self): |
|
152 | def commit_branches(self): | |
153 | branches = [] |
|
153 | branches = [] | |
154 | for name, commit_id in self.repository.branches.iteritems(): |
|
154 | for name, commit_id in self.repository.branches.iteritems(): | |
155 | if commit_id == self.raw_id: |
|
155 | if commit_id == self.raw_id: | |
156 | branches.append(name) |
|
156 | branches.append(name) | |
157 | return branches |
|
157 | return branches | |
158 |
|
158 | |||
159 | def _set_branch(self, branches): |
|
159 | def _set_branch(self, branches): | |
160 | if branches: |
|
160 | if branches: | |
161 | # actually commit can have multiple branches in git |
|
161 | # actually commit can have multiple branches in git | |
162 | return safe_unicode(branches[0]) |
|
162 | return safe_unicode(branches[0]) | |
163 |
|
163 | |||
164 | @LazyProperty |
|
164 | @LazyProperty | |
165 | def branch(self): |
|
165 | def branch(self): | |
166 | branches = self._remote.branch(self.raw_id) |
|
166 | branches = self._remote.branch(self.raw_id) | |
167 | return self._set_branch(branches) |
|
167 | return self._set_branch(branches) | |
168 |
|
168 | |||
169 | def _get_tree_id_for_path(self, path): |
|
169 | def _get_tree_id_for_path(self, path): | |
170 | path = safe_str(path) |
|
170 | path = safe_str(path) | |
171 | if path in self._paths: |
|
171 | if path in self._paths: | |
172 | return self._paths[path] |
|
172 | return self._paths[path] | |
173 |
|
173 | |||
174 | tree_id = self._tree_id |
|
174 | tree_id = self._tree_id | |
175 |
|
175 | |||
176 | path = path.strip('/') |
|
176 | path = path.strip('/') | |
177 | if path == '': |
|
177 | if path == '': | |
178 | data = [tree_id, "tree"] |
|
178 | data = [tree_id, "tree"] | |
179 | self._paths[''] = data |
|
179 | self._paths[''] = data | |
180 | return data |
|
180 | return data | |
181 |
|
181 | |||
182 | tree_id, tree_type, tree_mode = \ |
|
182 | tree_id, tree_type, tree_mode = \ | |
183 | self._remote.tree_and_type_for_path(self.raw_id, path) |
|
183 | self._remote.tree_and_type_for_path(self.raw_id, path) | |
184 | if tree_id is None: |
|
184 | if tree_id is None: | |
185 | raise self.no_node_at_path(path) |
|
185 | raise self.no_node_at_path(path) | |
186 |
|
186 | |||
187 | self._paths[path] = [tree_id, tree_type] |
|
187 | self._paths[path] = [tree_id, tree_type] | |
188 | self._stat_modes[path] = tree_mode |
|
188 | self._stat_modes[path] = tree_mode | |
189 |
|
189 | |||
190 | if path not in self._paths: |
|
190 | if path not in self._paths: | |
191 | raise self.no_node_at_path(path) |
|
191 | raise self.no_node_at_path(path) | |
192 |
|
192 | |||
193 | return self._paths[path] |
|
193 | return self._paths[path] | |
194 |
|
194 | |||
195 | def _get_kind(self, path): |
|
195 | def _get_kind(self, path): | |
196 | tree_id, type_ = self._get_tree_id_for_path(path) |
|
196 | tree_id, type_ = self._get_tree_id_for_path(path) | |
197 | if type_ == 'blob': |
|
197 | if type_ == 'blob': | |
198 | return NodeKind.FILE |
|
198 | return NodeKind.FILE | |
199 | elif type_ == 'tree': |
|
199 | elif type_ == 'tree': | |
200 | return NodeKind.DIR |
|
200 | return NodeKind.DIR | |
201 | elif type_ == 'link': |
|
201 | elif type_ == 'link': | |
202 | return NodeKind.SUBMODULE |
|
202 | return NodeKind.SUBMODULE | |
203 | return None |
|
203 | return None | |
204 |
|
204 | |||
205 | def _get_filectx(self, path): |
|
205 | def _get_filectx(self, path): | |
206 | path = self._fix_path(path) |
|
206 | path = self._fix_path(path) | |
207 | if self._get_kind(path) != NodeKind.FILE: |
|
207 | if self._get_kind(path) != NodeKind.FILE: | |
208 | raise CommitError( |
|
208 | raise CommitError( | |
209 | "File does not exist for commit %s at '%s'" % (self.raw_id, path)) |
|
209 | "File does not exist for commit %s at '%s'" % (self.raw_id, path)) | |
210 | return path |
|
210 | return path | |
211 |
|
211 | |||
212 | def _get_file_nodes(self): |
|
212 | def _get_file_nodes(self): | |
213 | return chain(*(t[2] for t in self.walk())) |
|
213 | return chain(*(t[2] for t in self.walk())) | |
214 |
|
214 | |||
215 | @LazyProperty |
|
215 | @LazyProperty | |
216 | def parents(self): |
|
216 | def parents(self): | |
217 | """ |
|
217 | """ | |
218 | Returns list of parent commits. |
|
218 | Returns list of parent commits. | |
219 | """ |
|
219 | """ | |
220 | parent_ids = self._remote.parents(self.id) |
|
220 | parent_ids = self._remote.parents(self.id) | |
221 | return self._make_commits(parent_ids) |
|
221 | return self._make_commits(parent_ids) | |
222 |
|
222 | |||
223 | @LazyProperty |
|
223 | @LazyProperty | |
224 | def children(self): |
|
224 | def children(self): | |
225 | """ |
|
225 | """ | |
226 | Returns list of child commits. |
|
226 | Returns list of child commits. | |
227 | """ |
|
227 | """ | |
228 |
|
228 | |||
229 | children = self._remote.children(self.raw_id) |
|
229 | children = self._remote.children(self.raw_id) | |
230 | return self._make_commits(children) |
|
230 | return self._make_commits(children) | |
231 |
|
231 | |||
232 | def _make_commits(self, commit_ids): |
|
232 | def _make_commits(self, commit_ids): | |
233 | def commit_maker(_commit_id): |
|
233 | def commit_maker(_commit_id): | |
234 | return self.repository.get_commit(commit_id=commit_id) |
|
234 | return self.repository.get_commit(commit_id=commit_id) | |
235 |
|
235 | |||
236 | return [commit_maker(commit_id) for commit_id in commit_ids] |
|
236 | return [commit_maker(commit_id) for commit_id in commit_ids] | |
237 |
|
237 | |||
238 | def get_file_mode(self, path): |
|
238 | def get_file_mode(self, path): | |
239 | """ |
|
239 | """ | |
240 | Returns stat mode of the file at the given `path`. |
|
240 | Returns stat mode of the file at the given `path`. | |
241 | """ |
|
241 | """ | |
242 | path = safe_str(path) |
|
242 | path = safe_str(path) | |
243 | # ensure path is traversed |
|
243 | # ensure path is traversed | |
244 | self._get_tree_id_for_path(path) |
|
244 | self._get_tree_id_for_path(path) | |
245 | return self._stat_modes[path] |
|
245 | return self._stat_modes[path] | |
246 |
|
246 | |||
247 | def is_link(self, path): |
|
247 | def is_link(self, path): | |
248 | return stat.S_ISLNK(self.get_file_mode(path)) |
|
248 | return stat.S_ISLNK(self.get_file_mode(path)) | |
249 |
|
249 | |||
250 | def is_node_binary(self, path): |
|
250 | def is_node_binary(self, path): | |
251 | tree_id, _ = self._get_tree_id_for_path(path) |
|
251 | tree_id, _ = self._get_tree_id_for_path(path) | |
252 | return self._remote.is_binary(tree_id) |
|
252 | return self._remote.is_binary(tree_id) | |
253 |
|
253 | |||
254 | def get_file_content(self, path): |
|
254 | def get_file_content(self, path): | |
255 | """ |
|
255 | """ | |
256 | Returns content of the file at given `path`. |
|
256 | Returns content of the file at given `path`. | |
257 | """ |
|
257 | """ | |
258 | tree_id, _ = self._get_tree_id_for_path(path) |
|
258 | tree_id, _ = self._get_tree_id_for_path(path) | |
259 | return self._remote.blob_as_pretty_string(tree_id) |
|
259 | return self._remote.blob_as_pretty_string(tree_id) | |
260 |
|
260 | |||
261 | def get_file_content_streamed(self, path): |
|
261 | def get_file_content_streamed(self, path): | |
262 | tree_id, _ = self._get_tree_id_for_path(path) |
|
262 | tree_id, _ = self._get_tree_id_for_path(path) | |
263 | stream_method = getattr(self._remote, 'stream:blob_as_pretty_string') |
|
263 | stream_method = getattr(self._remote, 'stream:blob_as_pretty_string') | |
264 | return stream_method(tree_id) |
|
264 | return stream_method(tree_id) | |
265 |
|
265 | |||
266 | def get_file_size(self, path): |
|
266 | def get_file_size(self, path): | |
267 | """ |
|
267 | """ | |
268 | Returns size of the file at given `path`. |
|
268 | Returns size of the file at given `path`. | |
269 | """ |
|
269 | """ | |
270 | tree_id, _ = self._get_tree_id_for_path(path) |
|
270 | tree_id, _ = self._get_tree_id_for_path(path) | |
271 | return self._remote.blob_raw_length(tree_id) |
|
271 | return self._remote.blob_raw_length(tree_id) | |
272 |
|
272 | |||
273 | def get_path_history(self, path, limit=None, pre_load=None): |
|
273 | def get_path_history(self, path, limit=None, pre_load=None): | |
274 | """ |
|
274 | """ | |
275 | Returns history of file as reversed list of `GitCommit` objects for |
|
275 | Returns history of file as reversed list of `GitCommit` objects for | |
276 | which file at given `path` has been modified. |
|
276 | which file at given `path` has been modified. | |
277 | """ |
|
277 | """ | |
278 |
|
278 | |||
279 | path = self._get_filectx(path) |
|
279 | path = self._get_filectx(path) | |
280 | hist = self._remote.node_history(self.raw_id, path, limit) |
|
280 | hist = self._remote.node_history(self.raw_id, path, limit) | |
281 | return [ |
|
281 | return [ | |
282 | self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
282 | self.repository.get_commit(commit_id=commit_id, pre_load=pre_load) | |
283 | for commit_id in hist] |
|
283 | for commit_id in hist] | |
284 |
|
284 | |||
285 | def get_file_annotate(self, path, pre_load=None): |
|
285 | def get_file_annotate(self, path, pre_load=None): | |
286 | """ |
|
286 | """ | |
287 | Returns a generator of four element tuples with |
|
287 | Returns a generator of four element tuples with | |
288 | lineno, commit_id, commit lazy loader and line |
|
288 | lineno, commit_id, commit lazy loader and line | |
289 | """ |
|
289 | """ | |
290 |
|
290 | |||
291 | result = self._remote.node_annotate(self.raw_id, path) |
|
291 | result = self._remote.node_annotate(self.raw_id, path) | |
292 |
|
292 | |||
293 | for ln_no, commit_id, content in result: |
|
293 | for ln_no, commit_id, content in result: | |
294 | yield ( |
|
294 | yield ( | |
295 | ln_no, commit_id, |
|
295 | ln_no, commit_id, | |
296 | lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), |
|
296 | lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load), | |
297 | content) |
|
297 | content) | |
298 |
|
298 | |||
299 | def get_nodes(self, path): |
|
299 | def get_nodes(self, path): | |
300 |
|
300 | |||
301 | if self._get_kind(path) != NodeKind.DIR: |
|
301 | if self._get_kind(path) != NodeKind.DIR: | |
302 | raise CommitError( |
|
302 | raise CommitError( | |
303 | "Directory does not exist for commit %s at '%s'" % (self.raw_id, path)) |
|
303 | "Directory does not exist for commit %s at '%s'" % (self.raw_id, path)) | |
304 | path = self._fix_path(path) |
|
304 | path = self._fix_path(path) | |
305 |
|
305 | |||
306 | tree_id, _ = self._get_tree_id_for_path(path) |
|
306 | tree_id, _ = self._get_tree_id_for_path(path) | |
307 |
|
307 | |||
308 | dirnodes = [] |
|
308 | dirnodes = [] | |
309 | filenodes = [] |
|
309 | filenodes = [] | |
310 |
|
310 | |||
311 | # extracted tree ID gives us our files... |
|
311 | # extracted tree ID gives us our files... | |
312 | bytes_path = safe_str(path) # libgit operates on bytes |
|
312 | bytes_path = safe_str(path) # libgit operates on bytes | |
313 | for name, stat_, id_, type_ in self._remote.tree_items(tree_id): |
|
313 | for name, stat_, id_, type_ in self._remote.tree_items(tree_id): | |
314 | if type_ == 'link': |
|
314 | if type_ == 'link': | |
315 | url = self._get_submodule_url('/'.join((bytes_path, name))) |
|
315 | url = self._get_submodule_url('/'.join((bytes_path, name))) | |
316 | dirnodes.append(SubModuleNode( |
|
316 | dirnodes.append(SubModuleNode( | |
317 | name, url=url, commit=id_, alias=self.repository.alias)) |
|
317 | name, url=url, commit=id_, alias=self.repository.alias)) | |
318 | continue |
|
318 | continue | |
319 |
|
319 | |||
320 | if bytes_path != '': |
|
320 | if bytes_path != '': | |
321 | obj_path = '/'.join((bytes_path, name)) |
|
321 | obj_path = '/'.join((bytes_path, name)) | |
322 | else: |
|
322 | else: | |
323 | obj_path = name |
|
323 | obj_path = name | |
324 | if obj_path not in self._stat_modes: |
|
324 | if obj_path not in self._stat_modes: | |
325 | self._stat_modes[obj_path] = stat_ |
|
325 | self._stat_modes[obj_path] = stat_ | |
326 |
|
326 | |||
327 | if type_ == 'tree': |
|
327 | if type_ == 'tree': | |
328 | dirnodes.append(DirNode(obj_path, commit=self)) |
|
328 | dirnodes.append(DirNode(obj_path, commit=self)) | |
329 | elif type_ == 'blob': |
|
329 | elif type_ == 'blob': | |
330 | filenodes.append(FileNode(obj_path, commit=self, mode=stat_)) |
|
330 | filenodes.append(FileNode(obj_path, commit=self, mode=stat_)) | |
331 | else: |
|
331 | else: | |
332 | raise CommitError( |
|
332 | raise CommitError( | |
333 | "Requested object should be Tree or Blob, is %s", type_) |
|
333 | "Requested object should be Tree or Blob, is %s", type_) | |
334 |
|
334 | |||
335 | nodes = dirnodes + filenodes |
|
335 | nodes = dirnodes + filenodes | |
336 | for node in nodes: |
|
336 | for node in nodes: | |
337 | if node.path not in self.nodes: |
|
337 | if node.path not in self.nodes: | |
338 | self.nodes[node.path] = node |
|
338 | self.nodes[node.path] = node | |
339 | nodes.sort() |
|
339 | nodes.sort() | |
340 | return nodes |
|
340 | return nodes | |
341 |
|
341 | |||
342 | def get_node(self, path, pre_load=None): |
|
342 | def get_node(self, path, pre_load=None): | |
343 | if isinstance(path, unicode): |
|
343 | if isinstance(path, unicode): | |
344 | path = path.encode('utf-8') |
|
344 | path = path.encode('utf-8') | |
345 | path = self._fix_path(path) |
|
345 | path = self._fix_path(path) | |
346 | if path not in self.nodes: |
|
346 | if path not in self.nodes: | |
347 | try: |
|
347 | try: | |
348 | tree_id, type_ = self._get_tree_id_for_path(path) |
|
348 | tree_id, type_ = self._get_tree_id_for_path(path) | |
349 | except CommitError: |
|
349 | except CommitError: | |
350 | raise NodeDoesNotExistError( |
|
350 | raise NodeDoesNotExistError( | |
351 | "Cannot find one of parents' directories for a given " |
|
351 | "Cannot find one of parents' directories for a given " | |
352 | "path: %s" % path) |
|
352 | "path: %s" % path) | |
353 |
|
353 | |||
354 | if type_ in ['link', 'commit']: |
|
354 | if type_ in ['link', 'commit']: | |
355 | url = self._get_submodule_url(path) |
|
355 | url = self._get_submodule_url(path) | |
356 | node = SubModuleNode(path, url=url, commit=tree_id, |
|
356 | node = SubModuleNode(path, url=url, commit=tree_id, | |
357 | alias=self.repository.alias) |
|
357 | alias=self.repository.alias) | |
358 | elif type_ == 'tree': |
|
358 | elif type_ == 'tree': | |
359 | if path == '': |
|
359 | if path == '': | |
360 | node = RootNode(commit=self) |
|
360 | node = RootNode(commit=self) | |
361 | else: |
|
361 | else: | |
362 | node = DirNode(path, commit=self) |
|
362 | node = DirNode(path, commit=self) | |
363 | elif type_ == 'blob': |
|
363 | elif type_ == 'blob': | |
364 | node = FileNode(path, commit=self, pre_load=pre_load) |
|
364 | node = FileNode(path, commit=self, pre_load=pre_load) | |
365 | self._stat_modes[path] = node.mode |
|
365 | self._stat_modes[path] = node.mode | |
366 | else: |
|
366 | else: | |
367 | raise self.no_node_at_path(path) |
|
367 | raise self.no_node_at_path(path) | |
368 |
|
368 | |||
369 | # cache node |
|
369 | # cache node | |
370 | self.nodes[path] = node |
|
370 | self.nodes[path] = node | |
371 |
|
371 | |||
372 | return self.nodes[path] |
|
372 | return self.nodes[path] | |
373 |
|
373 | |||
374 | def get_largefile_node(self, path): |
|
374 | def get_largefile_node(self, path): | |
375 | tree_id, _ = self._get_tree_id_for_path(path) |
|
375 | tree_id, _ = self._get_tree_id_for_path(path) | |
376 | pointer_spec = self._remote.is_large_file(tree_id) |
|
376 | pointer_spec = self._remote.is_large_file(tree_id) | |
377 |
|
377 | |||
378 | if pointer_spec: |
|
378 | if pointer_spec: | |
379 | # content of that file regular FileNode is the hash of largefile |
|
379 | # content of that file regular FileNode is the hash of largefile | |
380 | file_id = pointer_spec.get('oid_hash') |
|
380 | file_id = pointer_spec.get('oid_hash') | |
381 | if self._remote.in_largefiles_store(file_id): |
|
381 | if self._remote.in_largefiles_store(file_id): | |
382 | lf_path = self._remote.store_path(file_id) |
|
382 | lf_path = self._remote.store_path(file_id) | |
383 | return LargeFileNode(lf_path, commit=self, org_path=path) |
|
383 | return LargeFileNode(lf_path, commit=self, org_path=path) | |
384 |
|
384 | |||
385 | @LazyProperty |
|
385 | @LazyProperty | |
386 | def affected_files(self): |
|
386 | def affected_files(self): | |
387 | """ |
|
387 | """ | |
388 | Gets a fast accessible file changes for given commit |
|
388 | Gets a fast accessible file changes for given commit | |
389 | """ |
|
389 | """ | |
390 | added, modified, deleted = self._changes_cache |
|
390 | added, modified, deleted = self._changes_cache | |
391 | return list(added.union(modified).union(deleted)) |
|
391 | return list(added.union(modified).union(deleted)) | |
392 |
|
392 | |||
393 | @LazyProperty |
|
393 | @LazyProperty | |
394 | def _changes_cache(self): |
|
394 | def _changes_cache(self): | |
395 | added = set() |
|
395 | added = set() | |
396 | modified = set() |
|
396 | modified = set() | |
397 | deleted = set() |
|
397 | deleted = set() | |
398 | _r = self._remote |
|
398 | _r = self._remote | |
399 |
|
399 | |||
400 | parents = self.parents |
|
400 | parents = self.parents | |
401 | if not self.parents: |
|
401 | if not self.parents: | |
402 | parents = [base.EmptyCommit()] |
|
402 | parents = [base.EmptyCommit()] | |
403 | for parent in parents: |
|
403 | for parent in parents: | |
404 | if isinstance(parent, base.EmptyCommit): |
|
404 | if isinstance(parent, base.EmptyCommit): | |
405 | oid = None |
|
405 | oid = None | |
406 | else: |
|
406 | else: | |
407 | oid = parent.raw_id |
|
407 | oid = parent.raw_id | |
408 | changes = _r.tree_changes(oid, self.raw_id) |
|
408 | changes = _r.tree_changes(oid, self.raw_id) | |
409 | for (oldpath, newpath), (_, _), (_, _) in changes: |
|
409 | for (oldpath, newpath), (_, _), (_, _) in changes: | |
410 | if newpath and oldpath: |
|
410 | if newpath and oldpath: | |
411 | modified.add(newpath) |
|
411 | modified.add(newpath) | |
412 | elif newpath and not oldpath: |
|
412 | elif newpath and not oldpath: | |
413 | added.add(newpath) |
|
413 | added.add(newpath) | |
414 | elif not newpath and oldpath: |
|
414 | elif not newpath and oldpath: | |
415 | deleted.add(oldpath) |
|
415 | deleted.add(oldpath) | |
416 | return added, modified, deleted |
|
416 | return added, modified, deleted | |
417 |
|
417 | |||
418 | def _get_paths_for_status(self, status): |
|
418 | def _get_paths_for_status(self, status): | |
419 | """ |
|
419 | """ | |
420 | Returns sorted list of paths for given ``status``. |
|
420 | Returns sorted list of paths for given ``status``. | |
421 |
|
421 | |||
422 | :param status: one of: *added*, *modified* or *deleted* |
|
422 | :param status: one of: *added*, *modified* or *deleted* | |
423 | """ |
|
423 | """ | |
424 | added, modified, deleted = self._changes_cache |
|
424 | added, modified, deleted = self._changes_cache | |
425 | return sorted({ |
|
425 | return sorted({ | |
426 | 'added': list(added), |
|
426 | 'added': list(added), | |
427 | 'modified': list(modified), |
|
427 | 'modified': list(modified), | |
428 | 'deleted': list(deleted)}[status] |
|
428 | 'deleted': list(deleted)}[status] | |
429 | ) |
|
429 | ) | |
430 |
|
430 | |||
431 | @LazyProperty |
|
431 | @LazyProperty | |
432 | def added(self): |
|
432 | def added(self): | |
433 | """ |
|
433 | """ | |
434 | Returns list of added ``FileNode`` objects. |
|
434 | Returns list of added ``FileNode`` objects. | |
435 | """ |
|
435 | """ | |
436 | if not self.parents: |
|
436 | if not self.parents: | |
437 | return list(self._get_file_nodes()) |
|
437 | return list(self._get_file_nodes()) | |
438 | return AddedFileNodesGenerator(self.added_paths, self) |
|
438 | return AddedFileNodesGenerator(self.added_paths, self) | |
439 |
|
439 | |||
440 | @LazyProperty |
|
440 | @LazyProperty | |
441 | def added_paths(self): |
|
441 | def added_paths(self): | |
442 | return [n for n in self._get_paths_for_status('added')] |
|
442 | return [n for n in self._get_paths_for_status('added')] | |
443 |
|
443 | |||
444 | @LazyProperty |
|
444 | @LazyProperty | |
445 | def changed(self): |
|
445 | def changed(self): | |
446 | """ |
|
446 | """ | |
447 | Returns list of modified ``FileNode`` objects. |
|
447 | Returns list of modified ``FileNode`` objects. | |
448 | """ |
|
448 | """ | |
449 | if not self.parents: |
|
449 | if not self.parents: | |
450 | return [] |
|
450 | return [] | |
451 | return ChangedFileNodesGenerator(self.changed_paths, self) |
|
451 | return ChangedFileNodesGenerator(self.changed_paths, self) | |
452 |
|
452 | |||
453 | @LazyProperty |
|
453 | @LazyProperty | |
454 | def changed_paths(self): |
|
454 | def changed_paths(self): | |
455 | return [n for n in self._get_paths_for_status('modified')] |
|
455 | return [n for n in self._get_paths_for_status('modified')] | |
456 |
|
456 | |||
457 | @LazyProperty |
|
457 | @LazyProperty | |
458 | def removed(self): |
|
458 | def removed(self): | |
459 | """ |
|
459 | """ | |
460 | Returns list of removed ``FileNode`` objects. |
|
460 | Returns list of removed ``FileNode`` objects. | |
461 | """ |
|
461 | """ | |
462 | if not self.parents: |
|
462 | if not self.parents: | |
463 | return [] |
|
463 | return [] | |
464 | return RemovedFileNodesGenerator(self.removed_paths, self) |
|
464 | return RemovedFileNodesGenerator(self.removed_paths, self) | |
465 |
|
465 | |||
466 | @LazyProperty |
|
466 | @LazyProperty | |
467 | def removed_paths(self): |
|
467 | def removed_paths(self): | |
468 | return [n for n in self._get_paths_for_status('deleted')] |
|
468 | return [n for n in self._get_paths_for_status('deleted')] | |
469 |
|
469 | |||
470 | def _get_submodule_url(self, submodule_path): |
|
470 | def _get_submodule_url(self, submodule_path): | |
471 | git_modules_path = '.gitmodules' |
|
471 | git_modules_path = '.gitmodules' | |
472 |
|
472 | |||
473 | if self._submodules is None: |
|
473 | if self._submodules is None: | |
474 | self._submodules = {} |
|
474 | self._submodules = {} | |
475 |
|
475 | |||
476 | try: |
|
476 | try: | |
477 | submodules_node = self.get_node(git_modules_path) |
|
477 | submodules_node = self.get_node(git_modules_path) | |
478 | except NodeDoesNotExistError: |
|
478 | except NodeDoesNotExistError: | |
479 | return None |
|
479 | return None | |
480 |
|
480 | |||
481 | # ConfigParser fails if there are whitespaces, also it needs an iterable |
|
481 | # ConfigParser fails if there are whitespaces, also it needs an iterable | |
482 | # file like content |
|
482 | # file like content | |
483 | def iter_content(_content): |
|
483 | def iter_content(_content): | |
484 | for line in _content.splitlines(): |
|
484 | for line in _content.splitlines(): | |
485 | yield line |
|
485 | yield line | |
486 |
|
486 | |||
487 | parser = configparser.RawConfigParser() |
|
487 | parser = configparser.RawConfigParser() | |
488 | parser.read_file(iter_content(submodules_node.content)) |
|
488 | parser.read_file(iter_content(submodules_node.content)) | |
489 |
|
489 | |||
490 | for section in parser.sections(): |
|
490 | for section in parser.sections(): | |
491 | path = parser.get(section, 'path') |
|
491 | path = parser.get(section, 'path') | |
492 | url = parser.get(section, 'url') |
|
492 | url = parser.get(section, 'url') | |
493 | if path and url: |
|
493 | if path and url: | |
494 | self._submodules[path.strip('/')] = url |
|
494 | self._submodules[path.strip('/')] = url | |
495 |
|
495 | |||
496 | return self._submodules.get(submodule_path.strip('/')) |
|
496 | return self._submodules.get(submodule_path.strip('/')) |
@@ -1,1012 +1,1012 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2014-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2014-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 | """ |
|
21 | """ | |
22 | HG repository module |
|
22 | HG repository module | |
23 | """ |
|
23 | """ | |
24 | import os |
|
24 | import os | |
25 | import logging |
|
25 | import logging | |
26 | import binascii |
|
26 | import binascii | |
|
27 | import configparser | |||
27 | import urllib.request, urllib.parse, urllib.error |
|
28 | import urllib.request, urllib.parse, urllib.error | |
28 |
|
29 | |||
29 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
30 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
30 |
|
31 | |||
31 | from rhodecode.lib.compat import OrderedDict |
|
32 | from rhodecode.lib.compat import OrderedDict | |
32 | from rhodecode.lib.datelib import ( |
|
33 | from rhodecode.lib.datelib import ( | |
33 | date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate) |
|
34 | date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate) | |
34 | from rhodecode.lib.utils import safe_unicode, safe_str |
|
35 | from rhodecode.lib.utils import safe_unicode, safe_str | |
35 | from rhodecode.lib.utils2 import CachedProperty |
|
36 | from rhodecode.lib.utils2 import CachedProperty | |
36 | from rhodecode.lib.vcs import connection, exceptions |
|
37 | from rhodecode.lib.vcs import connection, exceptions | |
37 | from rhodecode.lib.vcs.backends.base import ( |
|
38 | from rhodecode.lib.vcs.backends.base import ( | |
38 | BaseRepository, CollectionGenerator, Config, MergeResponse, |
|
39 | BaseRepository, CollectionGenerator, Config, MergeResponse, | |
39 | MergeFailureReason, Reference, BasePathPermissionChecker) |
|
40 | MergeFailureReason, Reference, BasePathPermissionChecker) | |
40 | from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit |
|
41 | from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit | |
41 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff |
|
42 | from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff | |
42 | from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit |
|
43 | from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit | |
43 | from rhodecode.lib.vcs.exceptions import ( |
|
44 | from rhodecode.lib.vcs.exceptions import ( | |
44 | EmptyRepositoryError, RepositoryError, TagAlreadyExistError, |
|
45 | EmptyRepositoryError, RepositoryError, TagAlreadyExistError, | |
45 | TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo) |
|
46 | TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo) | |
46 | from rhodecode.lib.vcs.compat import configparser |
|
|||
47 |
|
47 | |||
48 | hexlify = binascii.hexlify |
|
48 | hexlify = binascii.hexlify | |
49 | nullid = "\0" * 20 |
|
49 | nullid = "\0" * 20 | |
50 |
|
50 | |||
51 | log = logging.getLogger(__name__) |
|
51 | log = logging.getLogger(__name__) | |
52 |
|
52 | |||
53 |
|
53 | |||
54 | class MercurialRepository(BaseRepository): |
|
54 | class MercurialRepository(BaseRepository): | |
55 | """ |
|
55 | """ | |
56 | Mercurial repository backend |
|
56 | Mercurial repository backend | |
57 | """ |
|
57 | """ | |
58 | DEFAULT_BRANCH_NAME = 'default' |
|
58 | DEFAULT_BRANCH_NAME = 'default' | |
59 |
|
59 | |||
60 | def __init__(self, repo_path, config=None, create=False, src_url=None, |
|
60 | def __init__(self, repo_path, config=None, create=False, src_url=None, | |
61 | do_workspace_checkout=False, with_wire=None, bare=False): |
|
61 | do_workspace_checkout=False, with_wire=None, bare=False): | |
62 | """ |
|
62 | """ | |
63 | Raises RepositoryError if repository could not be find at the given |
|
63 | Raises RepositoryError if repository could not be find at the given | |
64 | ``repo_path``. |
|
64 | ``repo_path``. | |
65 |
|
65 | |||
66 | :param repo_path: local path of the repository |
|
66 | :param repo_path: local path of the repository | |
67 | :param config: config object containing the repo configuration |
|
67 | :param config: config object containing the repo configuration | |
68 | :param create=False: if set to True, would try to create repository if |
|
68 | :param create=False: if set to True, would try to create repository if | |
69 | it does not exist rather than raising exception |
|
69 | it does not exist rather than raising exception | |
70 | :param src_url=None: would try to clone repository from given location |
|
70 | :param src_url=None: would try to clone repository from given location | |
71 | :param do_workspace_checkout=False: sets update of working copy after |
|
71 | :param do_workspace_checkout=False: sets update of working copy after | |
72 | making a clone |
|
72 | making a clone | |
73 | :param bare: not used, compatible with other VCS |
|
73 | :param bare: not used, compatible with other VCS | |
74 | """ |
|
74 | """ | |
75 |
|
75 | |||
76 | self.path = safe_str(os.path.abspath(repo_path)) |
|
76 | self.path = safe_str(os.path.abspath(repo_path)) | |
77 | # mercurial since 4.4.X requires certain configuration to be present |
|
77 | # mercurial since 4.4.X requires certain configuration to be present | |
78 | # because sometimes we init the repos with config we need to meet |
|
78 | # because sometimes we init the repos with config we need to meet | |
79 | # special requirements |
|
79 | # special requirements | |
80 | self.config = config if config else self.get_default_config( |
|
80 | self.config = config if config else self.get_default_config( | |
81 | default=[('extensions', 'largefiles', '1')]) |
|
81 | default=[('extensions', 'largefiles', '1')]) | |
82 | self.with_wire = with_wire or {"cache": False} # default should not use cache |
|
82 | self.with_wire = with_wire or {"cache": False} # default should not use cache | |
83 |
|
83 | |||
84 | self._init_repo(create, src_url, do_workspace_checkout) |
|
84 | self._init_repo(create, src_url, do_workspace_checkout) | |
85 |
|
85 | |||
86 | # caches |
|
86 | # caches | |
87 | self._commit_ids = {} |
|
87 | self._commit_ids = {} | |
88 |
|
88 | |||
89 | @LazyProperty |
|
89 | @LazyProperty | |
90 | def _remote(self): |
|
90 | def _remote(self): | |
91 | repo_id = self.path |
|
91 | repo_id = self.path | |
92 | return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire) |
|
92 | return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire) | |
93 |
|
93 | |||
94 | @CachedProperty |
|
94 | @CachedProperty | |
95 | def commit_ids(self): |
|
95 | def commit_ids(self): | |
96 | """ |
|
96 | """ | |
97 | Returns list of commit ids, in ascending order. Being lazy |
|
97 | Returns list of commit ids, in ascending order. Being lazy | |
98 | attribute allows external tools to inject shas from cache. |
|
98 | attribute allows external tools to inject shas from cache. | |
99 | """ |
|
99 | """ | |
100 | commit_ids = self._get_all_commit_ids() |
|
100 | commit_ids = self._get_all_commit_ids() | |
101 | self._rebuild_cache(commit_ids) |
|
101 | self._rebuild_cache(commit_ids) | |
102 | return commit_ids |
|
102 | return commit_ids | |
103 |
|
103 | |||
104 | def _rebuild_cache(self, commit_ids): |
|
104 | def _rebuild_cache(self, commit_ids): | |
105 | self._commit_ids = dict((commit_id, index) |
|
105 | self._commit_ids = dict((commit_id, index) | |
106 | for index, commit_id in enumerate(commit_ids)) |
|
106 | for index, commit_id in enumerate(commit_ids)) | |
107 |
|
107 | |||
108 | @CachedProperty |
|
108 | @CachedProperty | |
109 | def branches(self): |
|
109 | def branches(self): | |
110 | return self._get_branches() |
|
110 | return self._get_branches() | |
111 |
|
111 | |||
112 | @CachedProperty |
|
112 | @CachedProperty | |
113 | def branches_closed(self): |
|
113 | def branches_closed(self): | |
114 | return self._get_branches(active=False, closed=True) |
|
114 | return self._get_branches(active=False, closed=True) | |
115 |
|
115 | |||
116 | @CachedProperty |
|
116 | @CachedProperty | |
117 | def branches_all(self): |
|
117 | def branches_all(self): | |
118 | all_branches = {} |
|
118 | all_branches = {} | |
119 | all_branches.update(self.branches) |
|
119 | all_branches.update(self.branches) | |
120 | all_branches.update(self.branches_closed) |
|
120 | all_branches.update(self.branches_closed) | |
121 | return all_branches |
|
121 | return all_branches | |
122 |
|
122 | |||
123 | def _get_branches(self, active=True, closed=False): |
|
123 | def _get_branches(self, active=True, closed=False): | |
124 | """ |
|
124 | """ | |
125 | Gets branches for this repository |
|
125 | Gets branches for this repository | |
126 | Returns only not closed active branches by default |
|
126 | Returns only not closed active branches by default | |
127 |
|
127 | |||
128 | :param active: return also active branches |
|
128 | :param active: return also active branches | |
129 | :param closed: return also closed branches |
|
129 | :param closed: return also closed branches | |
130 |
|
130 | |||
131 | """ |
|
131 | """ | |
132 | if self.is_empty(): |
|
132 | if self.is_empty(): | |
133 | return {} |
|
133 | return {} | |
134 |
|
134 | |||
135 | def get_name(ctx): |
|
135 | def get_name(ctx): | |
136 | return ctx[0] |
|
136 | return ctx[0] | |
137 |
|
137 | |||
138 | _branches = [(safe_unicode(n), hexlify(h),) for n, h in |
|
138 | _branches = [(safe_unicode(n), hexlify(h),) for n, h in | |
139 | self._remote.branches(active, closed).items()] |
|
139 | self._remote.branches(active, closed).items()] | |
140 |
|
140 | |||
141 | return OrderedDict(sorted(_branches, key=get_name, reverse=False)) |
|
141 | return OrderedDict(sorted(_branches, key=get_name, reverse=False)) | |
142 |
|
142 | |||
143 | @CachedProperty |
|
143 | @CachedProperty | |
144 | def tags(self): |
|
144 | def tags(self): | |
145 | """ |
|
145 | """ | |
146 | Gets tags for this repository |
|
146 | Gets tags for this repository | |
147 | """ |
|
147 | """ | |
148 | return self._get_tags() |
|
148 | return self._get_tags() | |
149 |
|
149 | |||
150 | def _get_tags(self): |
|
150 | def _get_tags(self): | |
151 | if self.is_empty(): |
|
151 | if self.is_empty(): | |
152 | return {} |
|
152 | return {} | |
153 |
|
153 | |||
154 | def get_name(ctx): |
|
154 | def get_name(ctx): | |
155 | return ctx[0] |
|
155 | return ctx[0] | |
156 |
|
156 | |||
157 | _tags = [(safe_unicode(n), hexlify(h),) for n, h in |
|
157 | _tags = [(safe_unicode(n), hexlify(h),) for n, h in | |
158 | self._remote.tags().items()] |
|
158 | self._remote.tags().items()] | |
159 |
|
159 | |||
160 | return OrderedDict(sorted(_tags, key=get_name, reverse=True)) |
|
160 | return OrderedDict(sorted(_tags, key=get_name, reverse=True)) | |
161 |
|
161 | |||
162 | def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs): |
|
162 | def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs): | |
163 | """ |
|
163 | """ | |
164 | Creates and returns a tag for the given ``commit_id``. |
|
164 | Creates and returns a tag for the given ``commit_id``. | |
165 |
|
165 | |||
166 | :param name: name for new tag |
|
166 | :param name: name for new tag | |
167 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
167 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
168 | :param commit_id: commit id for which new tag would be created |
|
168 | :param commit_id: commit id for which new tag would be created | |
169 | :param message: message of the tag's commit |
|
169 | :param message: message of the tag's commit | |
170 | :param date: date of tag's commit |
|
170 | :param date: date of tag's commit | |
171 |
|
171 | |||
172 | :raises TagAlreadyExistError: if tag with same name already exists |
|
172 | :raises TagAlreadyExistError: if tag with same name already exists | |
173 | """ |
|
173 | """ | |
174 | if name in self.tags: |
|
174 | if name in self.tags: | |
175 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
175 | raise TagAlreadyExistError("Tag %s already exists" % name) | |
176 |
|
176 | |||
177 | commit = self.get_commit(commit_id=commit_id) |
|
177 | commit = self.get_commit(commit_id=commit_id) | |
178 | local = kwargs.setdefault('local', False) |
|
178 | local = kwargs.setdefault('local', False) | |
179 |
|
179 | |||
180 | if message is None: |
|
180 | if message is None: | |
181 | message = "Added tag %s for commit %s" % (name, commit.short_id) |
|
181 | message = "Added tag %s for commit %s" % (name, commit.short_id) | |
182 |
|
182 | |||
183 | date, tz = date_to_timestamp_plus_offset(date) |
|
183 | date, tz = date_to_timestamp_plus_offset(date) | |
184 |
|
184 | |||
185 | self._remote.tag(name, commit.raw_id, message, local, user, date, tz) |
|
185 | self._remote.tag(name, commit.raw_id, message, local, user, date, tz) | |
186 | self._remote.invalidate_vcs_cache() |
|
186 | self._remote.invalidate_vcs_cache() | |
187 |
|
187 | |||
188 | # Reinitialize tags |
|
188 | # Reinitialize tags | |
189 | self._invalidate_prop_cache('tags') |
|
189 | self._invalidate_prop_cache('tags') | |
190 | tag_id = self.tags[name] |
|
190 | tag_id = self.tags[name] | |
191 |
|
191 | |||
192 | return self.get_commit(commit_id=tag_id) |
|
192 | return self.get_commit(commit_id=tag_id) | |
193 |
|
193 | |||
194 | def remove_tag(self, name, user, message=None, date=None): |
|
194 | def remove_tag(self, name, user, message=None, date=None): | |
195 | """ |
|
195 | """ | |
196 | Removes tag with the given `name`. |
|
196 | Removes tag with the given `name`. | |
197 |
|
197 | |||
198 | :param name: name of the tag to be removed |
|
198 | :param name: name of the tag to be removed | |
199 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
199 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" | |
200 | :param message: message of the tag's removal commit |
|
200 | :param message: message of the tag's removal commit | |
201 | :param date: date of tag's removal commit |
|
201 | :param date: date of tag's removal commit | |
202 |
|
202 | |||
203 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
203 | :raises TagDoesNotExistError: if tag with given name does not exists | |
204 | """ |
|
204 | """ | |
205 | if name not in self.tags: |
|
205 | if name not in self.tags: | |
206 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
206 | raise TagDoesNotExistError("Tag %s does not exist" % name) | |
207 |
|
207 | |||
208 | if message is None: |
|
208 | if message is None: | |
209 | message = "Removed tag %s" % name |
|
209 | message = "Removed tag %s" % name | |
210 | local = False |
|
210 | local = False | |
211 |
|
211 | |||
212 | date, tz = date_to_timestamp_plus_offset(date) |
|
212 | date, tz = date_to_timestamp_plus_offset(date) | |
213 |
|
213 | |||
214 | self._remote.tag(name, nullid, message, local, user, date, tz) |
|
214 | self._remote.tag(name, nullid, message, local, user, date, tz) | |
215 | self._remote.invalidate_vcs_cache() |
|
215 | self._remote.invalidate_vcs_cache() | |
216 | self._invalidate_prop_cache('tags') |
|
216 | self._invalidate_prop_cache('tags') | |
217 |
|
217 | |||
218 | @LazyProperty |
|
218 | @LazyProperty | |
219 | def bookmarks(self): |
|
219 | def bookmarks(self): | |
220 | """ |
|
220 | """ | |
221 | Gets bookmarks for this repository |
|
221 | Gets bookmarks for this repository | |
222 | """ |
|
222 | """ | |
223 | return self._get_bookmarks() |
|
223 | return self._get_bookmarks() | |
224 |
|
224 | |||
225 | def _get_bookmarks(self): |
|
225 | def _get_bookmarks(self): | |
226 | if self.is_empty(): |
|
226 | if self.is_empty(): | |
227 | return {} |
|
227 | return {} | |
228 |
|
228 | |||
229 | def get_name(ctx): |
|
229 | def get_name(ctx): | |
230 | return ctx[0] |
|
230 | return ctx[0] | |
231 |
|
231 | |||
232 | _bookmarks = [ |
|
232 | _bookmarks = [ | |
233 | (safe_unicode(n), hexlify(h)) for n, h in |
|
233 | (safe_unicode(n), hexlify(h)) for n, h in | |
234 | self._remote.bookmarks().items()] |
|
234 | self._remote.bookmarks().items()] | |
235 |
|
235 | |||
236 | return OrderedDict(sorted(_bookmarks, key=get_name)) |
|
236 | return OrderedDict(sorted(_bookmarks, key=get_name)) | |
237 |
|
237 | |||
238 | def _get_all_commit_ids(self): |
|
238 | def _get_all_commit_ids(self): | |
239 | return self._remote.get_all_commit_ids('visible') |
|
239 | return self._remote.get_all_commit_ids('visible') | |
240 |
|
240 | |||
241 | def get_diff( |
|
241 | def get_diff( | |
242 | self, commit1, commit2, path='', ignore_whitespace=False, |
|
242 | self, commit1, commit2, path='', ignore_whitespace=False, | |
243 | context=3, path1=None): |
|
243 | context=3, path1=None): | |
244 | """ |
|
244 | """ | |
245 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
245 | Returns (git like) *diff*, as plain text. Shows changes introduced by | |
246 | `commit2` since `commit1`. |
|
246 | `commit2` since `commit1`. | |
247 |
|
247 | |||
248 | :param commit1: Entry point from which diff is shown. Can be |
|
248 | :param commit1: Entry point from which diff is shown. Can be | |
249 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
249 | ``self.EMPTY_COMMIT`` - in this case, patch showing all | |
250 | the changes since empty state of the repository until `commit2` |
|
250 | the changes since empty state of the repository until `commit2` | |
251 | :param commit2: Until which commit changes should be shown. |
|
251 | :param commit2: Until which commit changes should be shown. | |
252 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
252 | :param ignore_whitespace: If set to ``True``, would not show whitespace | |
253 | changes. Defaults to ``False``. |
|
253 | changes. Defaults to ``False``. | |
254 | :param context: How many lines before/after changed lines should be |
|
254 | :param context: How many lines before/after changed lines should be | |
255 | shown. Defaults to ``3``. |
|
255 | shown. Defaults to ``3``. | |
256 | """ |
|
256 | """ | |
257 | self._validate_diff_commits(commit1, commit2) |
|
257 | self._validate_diff_commits(commit1, commit2) | |
258 | if path1 is not None and path1 != path: |
|
258 | if path1 is not None and path1 != path: | |
259 | raise ValueError("Diff of two different paths not supported.") |
|
259 | raise ValueError("Diff of two different paths not supported.") | |
260 |
|
260 | |||
261 | if path: |
|
261 | if path: | |
262 | file_filter = [self.path, path] |
|
262 | file_filter = [self.path, path] | |
263 | else: |
|
263 | else: | |
264 | file_filter = None |
|
264 | file_filter = None | |
265 |
|
265 | |||
266 | diff = self._remote.diff( |
|
266 | diff = self._remote.diff( | |
267 | commit1.raw_id, commit2.raw_id, file_filter=file_filter, |
|
267 | commit1.raw_id, commit2.raw_id, file_filter=file_filter, | |
268 | opt_git=True, opt_ignorews=ignore_whitespace, |
|
268 | opt_git=True, opt_ignorews=ignore_whitespace, | |
269 | context=context) |
|
269 | context=context) | |
270 | return MercurialDiff(diff) |
|
270 | return MercurialDiff(diff) | |
271 |
|
271 | |||
272 | def strip(self, commit_id, branch=None): |
|
272 | def strip(self, commit_id, branch=None): | |
273 | self._remote.strip(commit_id, update=False, backup="none") |
|
273 | self._remote.strip(commit_id, update=False, backup="none") | |
274 |
|
274 | |||
275 | self._remote.invalidate_vcs_cache() |
|
275 | self._remote.invalidate_vcs_cache() | |
276 | # clear cache |
|
276 | # clear cache | |
277 | self._invalidate_prop_cache('commit_ids') |
|
277 | self._invalidate_prop_cache('commit_ids') | |
278 |
|
278 | |||
279 | return len(self.commit_ids) |
|
279 | return len(self.commit_ids) | |
280 |
|
280 | |||
281 | def verify(self): |
|
281 | def verify(self): | |
282 | verify = self._remote.verify() |
|
282 | verify = self._remote.verify() | |
283 |
|
283 | |||
284 | self._remote.invalidate_vcs_cache() |
|
284 | self._remote.invalidate_vcs_cache() | |
285 | return verify |
|
285 | return verify | |
286 |
|
286 | |||
287 | def hg_update_cache(self): |
|
287 | def hg_update_cache(self): | |
288 | update_cache = self._remote.hg_update_cache() |
|
288 | update_cache = self._remote.hg_update_cache() | |
289 |
|
289 | |||
290 | self._remote.invalidate_vcs_cache() |
|
290 | self._remote.invalidate_vcs_cache() | |
291 | return update_cache |
|
291 | return update_cache | |
292 |
|
292 | |||
293 | def hg_rebuild_fn_cache(self): |
|
293 | def hg_rebuild_fn_cache(self): | |
294 | update_cache = self._remote.hg_rebuild_fn_cache() |
|
294 | update_cache = self._remote.hg_rebuild_fn_cache() | |
295 |
|
295 | |||
296 | self._remote.invalidate_vcs_cache() |
|
296 | self._remote.invalidate_vcs_cache() | |
297 | return update_cache |
|
297 | return update_cache | |
298 |
|
298 | |||
299 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
299 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): | |
300 | log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s', |
|
300 | log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s', | |
301 | self, commit_id1, repo2, commit_id2) |
|
301 | self, commit_id1, repo2, commit_id2) | |
302 |
|
302 | |||
303 | if commit_id1 == commit_id2: |
|
303 | if commit_id1 == commit_id2: | |
304 | return commit_id1 |
|
304 | return commit_id1 | |
305 |
|
305 | |||
306 | ancestors = self._remote.revs_from_revspec( |
|
306 | ancestors = self._remote.revs_from_revspec( | |
307 | "ancestor(id(%s), id(%s))", commit_id1, commit_id2, |
|
307 | "ancestor(id(%s), id(%s))", commit_id1, commit_id2, | |
308 | other_path=repo2.path) |
|
308 | other_path=repo2.path) | |
309 |
|
309 | |||
310 | ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None |
|
310 | ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None | |
311 |
|
311 | |||
312 | log.debug('Found common ancestor with sha: %s', ancestor_id) |
|
312 | log.debug('Found common ancestor with sha: %s', ancestor_id) | |
313 | return ancestor_id |
|
313 | return ancestor_id | |
314 |
|
314 | |||
315 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
315 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): | |
316 | if commit_id1 == commit_id2: |
|
316 | if commit_id1 == commit_id2: | |
317 | commits = [] |
|
317 | commits = [] | |
318 | else: |
|
318 | else: | |
319 | if merge: |
|
319 | if merge: | |
320 | indexes = self._remote.revs_from_revspec( |
|
320 | indexes = self._remote.revs_from_revspec( | |
321 | "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)", |
|
321 | "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)", | |
322 | commit_id2, commit_id1, commit_id1, other_path=repo2.path) |
|
322 | commit_id2, commit_id1, commit_id1, other_path=repo2.path) | |
323 | else: |
|
323 | else: | |
324 | indexes = self._remote.revs_from_revspec( |
|
324 | indexes = self._remote.revs_from_revspec( | |
325 | "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2, |
|
325 | "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2, | |
326 | commit_id1, other_path=repo2.path) |
|
326 | commit_id1, other_path=repo2.path) | |
327 |
|
327 | |||
328 | commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load) |
|
328 | commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load) | |
329 | for idx in indexes] |
|
329 | for idx in indexes] | |
330 |
|
330 | |||
331 | return commits |
|
331 | return commits | |
332 |
|
332 | |||
333 | @staticmethod |
|
333 | @staticmethod | |
334 | def check_url(url, config): |
|
334 | def check_url(url, config): | |
335 | """ |
|
335 | """ | |
336 | Function will check given url and try to verify if it's a valid |
|
336 | Function will check given url and try to verify if it's a valid | |
337 | link. Sometimes it may happened that mercurial will issue basic |
|
337 | link. Sometimes it may happened that mercurial will issue basic | |
338 | auth request that can cause whole API to hang when used from python |
|
338 | auth request that can cause whole API to hang when used from python | |
339 | or other external calls. |
|
339 | or other external calls. | |
340 |
|
340 | |||
341 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
341 | On failures it'll raise urllib2.HTTPError, exception is also thrown | |
342 | when the return code is non 200 |
|
342 | when the return code is non 200 | |
343 | """ |
|
343 | """ | |
344 | # check first if it's not an local url |
|
344 | # check first if it's not an local url | |
345 | if os.path.isdir(url) or url.startswith('file:'): |
|
345 | if os.path.isdir(url) or url.startswith('file:'): | |
346 | return True |
|
346 | return True | |
347 |
|
347 | |||
348 | # Request the _remote to verify the url |
|
348 | # Request the _remote to verify the url | |
349 | return connection.Hg.check_url(url, config.serialize()) |
|
349 | return connection.Hg.check_url(url, config.serialize()) | |
350 |
|
350 | |||
351 | @staticmethod |
|
351 | @staticmethod | |
352 | def is_valid_repository(path): |
|
352 | def is_valid_repository(path): | |
353 | return os.path.isdir(os.path.join(path, '.hg')) |
|
353 | return os.path.isdir(os.path.join(path, '.hg')) | |
354 |
|
354 | |||
355 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False): |
|
355 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False): | |
356 | """ |
|
356 | """ | |
357 | Function will check for mercurial repository in given path. If there |
|
357 | Function will check for mercurial repository in given path. If there | |
358 | is no repository in that path it will raise an exception unless |
|
358 | is no repository in that path it will raise an exception unless | |
359 | `create` parameter is set to True - in that case repository would |
|
359 | `create` parameter is set to True - in that case repository would | |
360 | be created. |
|
360 | be created. | |
361 |
|
361 | |||
362 | If `src_url` is given, would try to clone repository from the |
|
362 | If `src_url` is given, would try to clone repository from the | |
363 | location at given clone_point. Additionally it'll make update to |
|
363 | location at given clone_point. Additionally it'll make update to | |
364 | working copy accordingly to `do_workspace_checkout` flag. |
|
364 | working copy accordingly to `do_workspace_checkout` flag. | |
365 | """ |
|
365 | """ | |
366 | if create and os.path.exists(self.path): |
|
366 | if create and os.path.exists(self.path): | |
367 | raise RepositoryError( |
|
367 | raise RepositoryError( | |
368 | "Cannot create repository at %s, location already exist" |
|
368 | "Cannot create repository at %s, location already exist" | |
369 | % self.path) |
|
369 | % self.path) | |
370 |
|
370 | |||
371 | if src_url: |
|
371 | if src_url: | |
372 | url = str(self._get_url(src_url)) |
|
372 | url = str(self._get_url(src_url)) | |
373 | MercurialRepository.check_url(url, self.config) |
|
373 | MercurialRepository.check_url(url, self.config) | |
374 |
|
374 | |||
375 | self._remote.clone(url, self.path, do_workspace_checkout) |
|
375 | self._remote.clone(url, self.path, do_workspace_checkout) | |
376 |
|
376 | |||
377 | # Don't try to create if we've already cloned repo |
|
377 | # Don't try to create if we've already cloned repo | |
378 | create = False |
|
378 | create = False | |
379 |
|
379 | |||
380 | if create: |
|
380 | if create: | |
381 | os.makedirs(self.path, mode=0o755) |
|
381 | os.makedirs(self.path, mode=0o755) | |
382 | self._remote.localrepository(create) |
|
382 | self._remote.localrepository(create) | |
383 |
|
383 | |||
384 | @LazyProperty |
|
384 | @LazyProperty | |
385 | def in_memory_commit(self): |
|
385 | def in_memory_commit(self): | |
386 | return MercurialInMemoryCommit(self) |
|
386 | return MercurialInMemoryCommit(self) | |
387 |
|
387 | |||
388 | @LazyProperty |
|
388 | @LazyProperty | |
389 | def description(self): |
|
389 | def description(self): | |
390 | description = self._remote.get_config_value( |
|
390 | description = self._remote.get_config_value( | |
391 | 'web', 'description', untrusted=True) |
|
391 | 'web', 'description', untrusted=True) | |
392 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) |
|
392 | return safe_unicode(description or self.DEFAULT_DESCRIPTION) | |
393 |
|
393 | |||
394 | @LazyProperty |
|
394 | @LazyProperty | |
395 | def contact(self): |
|
395 | def contact(self): | |
396 | contact = ( |
|
396 | contact = ( | |
397 | self._remote.get_config_value("web", "contact") or |
|
397 | self._remote.get_config_value("web", "contact") or | |
398 | self._remote.get_config_value("ui", "username")) |
|
398 | self._remote.get_config_value("ui", "username")) | |
399 | return safe_unicode(contact or self.DEFAULT_CONTACT) |
|
399 | return safe_unicode(contact or self.DEFAULT_CONTACT) | |
400 |
|
400 | |||
401 | @LazyProperty |
|
401 | @LazyProperty | |
402 | def last_change(self): |
|
402 | def last_change(self): | |
403 | """ |
|
403 | """ | |
404 | Returns last change made on this repository as |
|
404 | Returns last change made on this repository as | |
405 | `datetime.datetime` object. |
|
405 | `datetime.datetime` object. | |
406 | """ |
|
406 | """ | |
407 | try: |
|
407 | try: | |
408 | return self.get_commit().date |
|
408 | return self.get_commit().date | |
409 | except RepositoryError: |
|
409 | except RepositoryError: | |
410 | tzoffset = makedate()[1] |
|
410 | tzoffset = makedate()[1] | |
411 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) |
|
411 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) | |
412 |
|
412 | |||
413 | def _get_fs_mtime(self): |
|
413 | def _get_fs_mtime(self): | |
414 | # fallback to filesystem |
|
414 | # fallback to filesystem | |
415 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") |
|
415 | cl_path = os.path.join(self.path, '.hg', "00changelog.i") | |
416 | st_path = os.path.join(self.path, '.hg', "store") |
|
416 | st_path = os.path.join(self.path, '.hg', "store") | |
417 | if os.path.exists(cl_path): |
|
417 | if os.path.exists(cl_path): | |
418 | return os.stat(cl_path).st_mtime |
|
418 | return os.stat(cl_path).st_mtime | |
419 | else: |
|
419 | else: | |
420 | return os.stat(st_path).st_mtime |
|
420 | return os.stat(st_path).st_mtime | |
421 |
|
421 | |||
422 | def _get_url(self, url): |
|
422 | def _get_url(self, url): | |
423 | """ |
|
423 | """ | |
424 | Returns normalized url. If schema is not given, would fall |
|
424 | Returns normalized url. If schema is not given, would fall | |
425 | to filesystem |
|
425 | to filesystem | |
426 | (``file:///``) schema. |
|
426 | (``file:///``) schema. | |
427 | """ |
|
427 | """ | |
428 | url = url.encode('utf8') |
|
428 | url = url.encode('utf8') | |
429 | if url != 'default' and '://' not in url: |
|
429 | if url != 'default' and '://' not in url: | |
430 | url = "file:" + urllib.request.pathname2url(url) |
|
430 | url = "file:" + urllib.request.pathname2url(url) | |
431 | return url |
|
431 | return url | |
432 |
|
432 | |||
433 | def get_hook_location(self): |
|
433 | def get_hook_location(self): | |
434 | """ |
|
434 | """ | |
435 | returns absolute path to location where hooks are stored |
|
435 | returns absolute path to location where hooks are stored | |
436 | """ |
|
436 | """ | |
437 | return os.path.join(self.path, '.hg', '.hgrc') |
|
437 | return os.path.join(self.path, '.hg', '.hgrc') | |
438 |
|
438 | |||
439 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, |
|
439 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, | |
440 | translate_tag=None, maybe_unreachable=False, reference_obj=None): |
|
440 | translate_tag=None, maybe_unreachable=False, reference_obj=None): | |
441 | """ |
|
441 | """ | |
442 | Returns ``MercurialCommit`` object representing repository's |
|
442 | Returns ``MercurialCommit`` object representing repository's | |
443 | commit at the given `commit_id` or `commit_idx`. |
|
443 | commit at the given `commit_id` or `commit_idx`. | |
444 | """ |
|
444 | """ | |
445 | if self.is_empty(): |
|
445 | if self.is_empty(): | |
446 | raise EmptyRepositoryError("There are no commits yet") |
|
446 | raise EmptyRepositoryError("There are no commits yet") | |
447 |
|
447 | |||
448 | if commit_id is not None: |
|
448 | if commit_id is not None: | |
449 | self._validate_commit_id(commit_id) |
|
449 | self._validate_commit_id(commit_id) | |
450 | try: |
|
450 | try: | |
451 | # we have cached idx, use it without contacting the remote |
|
451 | # we have cached idx, use it without contacting the remote | |
452 | idx = self._commit_ids[commit_id] |
|
452 | idx = self._commit_ids[commit_id] | |
453 | return MercurialCommit(self, commit_id, idx, pre_load=pre_load) |
|
453 | return MercurialCommit(self, commit_id, idx, pre_load=pre_load) | |
454 | except KeyError: |
|
454 | except KeyError: | |
455 | pass |
|
455 | pass | |
456 |
|
456 | |||
457 | elif commit_idx is not None: |
|
457 | elif commit_idx is not None: | |
458 | self._validate_commit_idx(commit_idx) |
|
458 | self._validate_commit_idx(commit_idx) | |
459 | try: |
|
459 | try: | |
460 | _commit_id = self.commit_ids[commit_idx] |
|
460 | _commit_id = self.commit_ids[commit_idx] | |
461 | if commit_idx < 0: |
|
461 | if commit_idx < 0: | |
462 | commit_idx = self.commit_ids.index(_commit_id) |
|
462 | commit_idx = self.commit_ids.index(_commit_id) | |
463 |
|
463 | |||
464 | return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load) |
|
464 | return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load) | |
465 | except IndexError: |
|
465 | except IndexError: | |
466 | commit_id = commit_idx |
|
466 | commit_id = commit_idx | |
467 | else: |
|
467 | else: | |
468 | commit_id = "tip" |
|
468 | commit_id = "tip" | |
469 |
|
469 | |||
470 | if isinstance(commit_id, unicode): |
|
470 | if isinstance(commit_id, unicode): | |
471 | commit_id = safe_str(commit_id) |
|
471 | commit_id = safe_str(commit_id) | |
472 |
|
472 | |||
473 | try: |
|
473 | try: | |
474 | raw_id, idx = self._remote.lookup(commit_id, both=True) |
|
474 | raw_id, idx = self._remote.lookup(commit_id, both=True) | |
475 | except CommitDoesNotExistError: |
|
475 | except CommitDoesNotExistError: | |
476 | msg = "Commit {} does not exist for `{}`".format( |
|
476 | msg = "Commit {} does not exist for `{}`".format( | |
477 | *map(safe_str, [commit_id, self.name])) |
|
477 | *map(safe_str, [commit_id, self.name])) | |
478 | raise CommitDoesNotExistError(msg) |
|
478 | raise CommitDoesNotExistError(msg) | |
479 |
|
479 | |||
480 | return MercurialCommit(self, raw_id, idx, pre_load=pre_load) |
|
480 | return MercurialCommit(self, raw_id, idx, pre_load=pre_load) | |
481 |
|
481 | |||
482 | def get_commits( |
|
482 | def get_commits( | |
483 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
483 | self, start_id=None, end_id=None, start_date=None, end_date=None, | |
484 | branch_name=None, show_hidden=False, pre_load=None, translate_tags=None): |
|
484 | branch_name=None, show_hidden=False, pre_load=None, translate_tags=None): | |
485 | """ |
|
485 | """ | |
486 | Returns generator of ``MercurialCommit`` objects from start to end |
|
486 | Returns generator of ``MercurialCommit`` objects from start to end | |
487 | (both are inclusive) |
|
487 | (both are inclusive) | |
488 |
|
488 | |||
489 | :param start_id: None, str(commit_id) |
|
489 | :param start_id: None, str(commit_id) | |
490 | :param end_id: None, str(commit_id) |
|
490 | :param end_id: None, str(commit_id) | |
491 | :param start_date: if specified, commits with commit date less than |
|
491 | :param start_date: if specified, commits with commit date less than | |
492 | ``start_date`` would be filtered out from returned set |
|
492 | ``start_date`` would be filtered out from returned set | |
493 | :param end_date: if specified, commits with commit date greater than |
|
493 | :param end_date: if specified, commits with commit date greater than | |
494 | ``end_date`` would be filtered out from returned set |
|
494 | ``end_date`` would be filtered out from returned set | |
495 | :param branch_name: if specified, commits not reachable from given |
|
495 | :param branch_name: if specified, commits not reachable from given | |
496 | branch would be filtered out from returned set |
|
496 | branch would be filtered out from returned set | |
497 | :param show_hidden: Show hidden commits such as obsolete or hidden from |
|
497 | :param show_hidden: Show hidden commits such as obsolete or hidden from | |
498 | Mercurial evolve |
|
498 | Mercurial evolve | |
499 | :raise BranchDoesNotExistError: If given ``branch_name`` does not |
|
499 | :raise BranchDoesNotExistError: If given ``branch_name`` does not | |
500 | exist. |
|
500 | exist. | |
501 | :raise CommitDoesNotExistError: If commit for given ``start`` or |
|
501 | :raise CommitDoesNotExistError: If commit for given ``start`` or | |
502 | ``end`` could not be found. |
|
502 | ``end`` could not be found. | |
503 | """ |
|
503 | """ | |
504 | # actually we should check now if it's not an empty repo |
|
504 | # actually we should check now if it's not an empty repo | |
505 | if self.is_empty(): |
|
505 | if self.is_empty(): | |
506 | raise EmptyRepositoryError("There are no commits yet") |
|
506 | raise EmptyRepositoryError("There are no commits yet") | |
507 | self._validate_branch_name(branch_name) |
|
507 | self._validate_branch_name(branch_name) | |
508 |
|
508 | |||
509 | branch_ancestors = False |
|
509 | branch_ancestors = False | |
510 | if start_id is not None: |
|
510 | if start_id is not None: | |
511 | self._validate_commit_id(start_id) |
|
511 | self._validate_commit_id(start_id) | |
512 | c_start = self.get_commit(commit_id=start_id) |
|
512 | c_start = self.get_commit(commit_id=start_id) | |
513 | start_pos = self._commit_ids[c_start.raw_id] |
|
513 | start_pos = self._commit_ids[c_start.raw_id] | |
514 | else: |
|
514 | else: | |
515 | start_pos = None |
|
515 | start_pos = None | |
516 |
|
516 | |||
517 | if end_id is not None: |
|
517 | if end_id is not None: | |
518 | self._validate_commit_id(end_id) |
|
518 | self._validate_commit_id(end_id) | |
519 | c_end = self.get_commit(commit_id=end_id) |
|
519 | c_end = self.get_commit(commit_id=end_id) | |
520 | end_pos = max(0, self._commit_ids[c_end.raw_id]) |
|
520 | end_pos = max(0, self._commit_ids[c_end.raw_id]) | |
521 | else: |
|
521 | else: | |
522 | end_pos = None |
|
522 | end_pos = None | |
523 |
|
523 | |||
524 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
524 | if None not in [start_id, end_id] and start_pos > end_pos: | |
525 | raise RepositoryError( |
|
525 | raise RepositoryError( | |
526 | "Start commit '%s' cannot be after end commit '%s'" % |
|
526 | "Start commit '%s' cannot be after end commit '%s'" % | |
527 | (start_id, end_id)) |
|
527 | (start_id, end_id)) | |
528 |
|
528 | |||
529 | if end_pos is not None: |
|
529 | if end_pos is not None: | |
530 | end_pos += 1 |
|
530 | end_pos += 1 | |
531 |
|
531 | |||
532 | commit_filter = [] |
|
532 | commit_filter = [] | |
533 |
|
533 | |||
534 | if branch_name and not branch_ancestors: |
|
534 | if branch_name and not branch_ancestors: | |
535 | commit_filter.append('branch("%s")' % (branch_name,)) |
|
535 | commit_filter.append('branch("%s")' % (branch_name,)) | |
536 | elif branch_name and branch_ancestors: |
|
536 | elif branch_name and branch_ancestors: | |
537 | commit_filter.append('ancestors(branch("%s"))' % (branch_name,)) |
|
537 | commit_filter.append('ancestors(branch("%s"))' % (branch_name,)) | |
538 |
|
538 | |||
539 | if start_date and not end_date: |
|
539 | if start_date and not end_date: | |
540 | commit_filter.append('date(">%s")' % (start_date,)) |
|
540 | commit_filter.append('date(">%s")' % (start_date,)) | |
541 | if end_date and not start_date: |
|
541 | if end_date and not start_date: | |
542 | commit_filter.append('date("<%s")' % (end_date,)) |
|
542 | commit_filter.append('date("<%s")' % (end_date,)) | |
543 | if start_date and end_date: |
|
543 | if start_date and end_date: | |
544 | commit_filter.append( |
|
544 | commit_filter.append( | |
545 | 'date(">%s") and date("<%s")' % (start_date, end_date)) |
|
545 | 'date(">%s") and date("<%s")' % (start_date, end_date)) | |
546 |
|
546 | |||
547 | if not show_hidden: |
|
547 | if not show_hidden: | |
548 | commit_filter.append('not obsolete()') |
|
548 | commit_filter.append('not obsolete()') | |
549 | commit_filter.append('not hidden()') |
|
549 | commit_filter.append('not hidden()') | |
550 |
|
550 | |||
551 | # TODO: johbo: Figure out a simpler way for this solution |
|
551 | # TODO: johbo: Figure out a simpler way for this solution | |
552 | collection_generator = CollectionGenerator |
|
552 | collection_generator = CollectionGenerator | |
553 | if commit_filter: |
|
553 | if commit_filter: | |
554 | commit_filter = ' and '.join(map(safe_str, commit_filter)) |
|
554 | commit_filter = ' and '.join(map(safe_str, commit_filter)) | |
555 | revisions = self._remote.rev_range([commit_filter]) |
|
555 | revisions = self._remote.rev_range([commit_filter]) | |
556 | collection_generator = MercurialIndexBasedCollectionGenerator |
|
556 | collection_generator = MercurialIndexBasedCollectionGenerator | |
557 | else: |
|
557 | else: | |
558 | revisions = self.commit_ids |
|
558 | revisions = self.commit_ids | |
559 |
|
559 | |||
560 | if start_pos or end_pos: |
|
560 | if start_pos or end_pos: | |
561 | revisions = revisions[start_pos:end_pos] |
|
561 | revisions = revisions[start_pos:end_pos] | |
562 |
|
562 | |||
563 | return collection_generator(self, revisions, pre_load=pre_load) |
|
563 | return collection_generator(self, revisions, pre_load=pre_load) | |
564 |
|
564 | |||
565 | def pull(self, url, commit_ids=None): |
|
565 | def pull(self, url, commit_ids=None): | |
566 | """ |
|
566 | """ | |
567 | Pull changes from external location. |
|
567 | Pull changes from external location. | |
568 |
|
568 | |||
569 | :param commit_ids: Optional. Can be set to a list of commit ids |
|
569 | :param commit_ids: Optional. Can be set to a list of commit ids | |
570 | which shall be pulled from the other repository. |
|
570 | which shall be pulled from the other repository. | |
571 | """ |
|
571 | """ | |
572 | url = self._get_url(url) |
|
572 | url = self._get_url(url) | |
573 | self._remote.pull(url, commit_ids=commit_ids) |
|
573 | self._remote.pull(url, commit_ids=commit_ids) | |
574 | self._remote.invalidate_vcs_cache() |
|
574 | self._remote.invalidate_vcs_cache() | |
575 |
|
575 | |||
576 | def fetch(self, url, commit_ids=None): |
|
576 | def fetch(self, url, commit_ids=None): | |
577 | """ |
|
577 | """ | |
578 | Backward compatibility with GIT fetch==pull |
|
578 | Backward compatibility with GIT fetch==pull | |
579 | """ |
|
579 | """ | |
580 | return self.pull(url, commit_ids=commit_ids) |
|
580 | return self.pull(url, commit_ids=commit_ids) | |
581 |
|
581 | |||
582 | def push(self, url): |
|
582 | def push(self, url): | |
583 | url = self._get_url(url) |
|
583 | url = self._get_url(url) | |
584 | self._remote.sync_push(url) |
|
584 | self._remote.sync_push(url) | |
585 |
|
585 | |||
586 | def _local_clone(self, clone_path): |
|
586 | def _local_clone(self, clone_path): | |
587 | """ |
|
587 | """ | |
588 | Create a local clone of the current repo. |
|
588 | Create a local clone of the current repo. | |
589 | """ |
|
589 | """ | |
590 | self._remote.clone(self.path, clone_path, update_after_clone=True, |
|
590 | self._remote.clone(self.path, clone_path, update_after_clone=True, | |
591 | hooks=False) |
|
591 | hooks=False) | |
592 |
|
592 | |||
593 | def _update(self, revision, clean=False): |
|
593 | def _update(self, revision, clean=False): | |
594 | """ |
|
594 | """ | |
595 | Update the working copy to the specified revision. |
|
595 | Update the working copy to the specified revision. | |
596 | """ |
|
596 | """ | |
597 | log.debug('Doing checkout to commit: `%s` for %s', revision, self) |
|
597 | log.debug('Doing checkout to commit: `%s` for %s', revision, self) | |
598 | self._remote.update(revision, clean=clean) |
|
598 | self._remote.update(revision, clean=clean) | |
599 |
|
599 | |||
600 | def _identify(self): |
|
600 | def _identify(self): | |
601 | """ |
|
601 | """ | |
602 | Return the current state of the working directory. |
|
602 | Return the current state of the working directory. | |
603 | """ |
|
603 | """ | |
604 | return self._remote.identify().strip().rstrip('+') |
|
604 | return self._remote.identify().strip().rstrip('+') | |
605 |
|
605 | |||
606 | def _heads(self, branch=None): |
|
606 | def _heads(self, branch=None): | |
607 | """ |
|
607 | """ | |
608 | Return the commit ids of the repository heads. |
|
608 | Return the commit ids of the repository heads. | |
609 | """ |
|
609 | """ | |
610 | return self._remote.heads(branch=branch).strip().split(' ') |
|
610 | return self._remote.heads(branch=branch).strip().split(' ') | |
611 |
|
611 | |||
612 | def _ancestor(self, revision1, revision2): |
|
612 | def _ancestor(self, revision1, revision2): | |
613 | """ |
|
613 | """ | |
614 | Return the common ancestor of the two revisions. |
|
614 | Return the common ancestor of the two revisions. | |
615 | """ |
|
615 | """ | |
616 | return self._remote.ancestor(revision1, revision2) |
|
616 | return self._remote.ancestor(revision1, revision2) | |
617 |
|
617 | |||
618 | def _local_push( |
|
618 | def _local_push( | |
619 | self, revision, repository_path, push_branches=False, |
|
619 | self, revision, repository_path, push_branches=False, | |
620 | enable_hooks=False): |
|
620 | enable_hooks=False): | |
621 | """ |
|
621 | """ | |
622 | Push the given revision to the specified repository. |
|
622 | Push the given revision to the specified repository. | |
623 |
|
623 | |||
624 | :param push_branches: allow to create branches in the target repo. |
|
624 | :param push_branches: allow to create branches in the target repo. | |
625 | """ |
|
625 | """ | |
626 | self._remote.push( |
|
626 | self._remote.push( | |
627 | [revision], repository_path, hooks=enable_hooks, |
|
627 | [revision], repository_path, hooks=enable_hooks, | |
628 | push_branches=push_branches) |
|
628 | push_branches=push_branches) | |
629 |
|
629 | |||
630 | def _local_merge(self, target_ref, merge_message, user_name, user_email, |
|
630 | def _local_merge(self, target_ref, merge_message, user_name, user_email, | |
631 | source_ref, use_rebase=False, close_commit_id=None, dry_run=False): |
|
631 | source_ref, use_rebase=False, close_commit_id=None, dry_run=False): | |
632 | """ |
|
632 | """ | |
633 | Merge the given source_revision into the checked out revision. |
|
633 | Merge the given source_revision into the checked out revision. | |
634 |
|
634 | |||
635 | Returns the commit id of the merge and a boolean indicating if the |
|
635 | Returns the commit id of the merge and a boolean indicating if the | |
636 | commit needs to be pushed. |
|
636 | commit needs to be pushed. | |
637 | """ |
|
637 | """ | |
638 | source_ref_commit_id = source_ref.commit_id |
|
638 | source_ref_commit_id = source_ref.commit_id | |
639 | target_ref_commit_id = target_ref.commit_id |
|
639 | target_ref_commit_id = target_ref.commit_id | |
640 |
|
640 | |||
641 | # update our workdir to target ref, for proper merge |
|
641 | # update our workdir to target ref, for proper merge | |
642 | self._update(target_ref_commit_id, clean=True) |
|
642 | self._update(target_ref_commit_id, clean=True) | |
643 |
|
643 | |||
644 | ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id) |
|
644 | ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id) | |
645 | is_the_same_branch = self._is_the_same_branch(target_ref, source_ref) |
|
645 | is_the_same_branch = self._is_the_same_branch(target_ref, source_ref) | |
646 |
|
646 | |||
647 | if close_commit_id: |
|
647 | if close_commit_id: | |
648 | # NOTE(marcink): if we get the close commit, this is our new source |
|
648 | # NOTE(marcink): if we get the close commit, this is our new source | |
649 | # which will include the close commit itself. |
|
649 | # which will include the close commit itself. | |
650 | source_ref_commit_id = close_commit_id |
|
650 | source_ref_commit_id = close_commit_id | |
651 |
|
651 | |||
652 | if ancestor == source_ref_commit_id: |
|
652 | if ancestor == source_ref_commit_id: | |
653 | # Nothing to do, the changes were already integrated |
|
653 | # Nothing to do, the changes were already integrated | |
654 | return target_ref_commit_id, False |
|
654 | return target_ref_commit_id, False | |
655 |
|
655 | |||
656 | elif ancestor == target_ref_commit_id and is_the_same_branch: |
|
656 | elif ancestor == target_ref_commit_id and is_the_same_branch: | |
657 | # In this case we should force a commit message |
|
657 | # In this case we should force a commit message | |
658 | return source_ref_commit_id, True |
|
658 | return source_ref_commit_id, True | |
659 |
|
659 | |||
660 | unresolved = None |
|
660 | unresolved = None | |
661 | if use_rebase: |
|
661 | if use_rebase: | |
662 | try: |
|
662 | try: | |
663 | bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id) |
|
663 | bookmark_name = 'rcbook%s%s' % (source_ref_commit_id, target_ref_commit_id) | |
664 | self.bookmark(bookmark_name, revision=source_ref.commit_id) |
|
664 | self.bookmark(bookmark_name, revision=source_ref.commit_id) | |
665 | self._remote.rebase( |
|
665 | self._remote.rebase( | |
666 | source=source_ref_commit_id, dest=target_ref_commit_id) |
|
666 | source=source_ref_commit_id, dest=target_ref_commit_id) | |
667 | self._remote.invalidate_vcs_cache() |
|
667 | self._remote.invalidate_vcs_cache() | |
668 | self._update(bookmark_name, clean=True) |
|
668 | self._update(bookmark_name, clean=True) | |
669 | return self._identify(), True |
|
669 | return self._identify(), True | |
670 | except RepositoryError as e: |
|
670 | except RepositoryError as e: | |
671 | # The rebase-abort may raise another exception which 'hides' |
|
671 | # The rebase-abort may raise another exception which 'hides' | |
672 | # the original one, therefore we log it here. |
|
672 | # the original one, therefore we log it here. | |
673 | log.exception('Error while rebasing shadow repo during merge.') |
|
673 | log.exception('Error while rebasing shadow repo during merge.') | |
674 | if 'unresolved conflicts' in safe_str(e): |
|
674 | if 'unresolved conflicts' in safe_str(e): | |
675 | unresolved = self._remote.get_unresolved_files() |
|
675 | unresolved = self._remote.get_unresolved_files() | |
676 | log.debug('unresolved files: %s', unresolved) |
|
676 | log.debug('unresolved files: %s', unresolved) | |
677 |
|
677 | |||
678 | # Cleanup any rebase leftovers |
|
678 | # Cleanup any rebase leftovers | |
679 | self._remote.invalidate_vcs_cache() |
|
679 | self._remote.invalidate_vcs_cache() | |
680 | self._remote.rebase(abort=True) |
|
680 | self._remote.rebase(abort=True) | |
681 | self._remote.invalidate_vcs_cache() |
|
681 | self._remote.invalidate_vcs_cache() | |
682 | self._remote.update(clean=True) |
|
682 | self._remote.update(clean=True) | |
683 | if unresolved: |
|
683 | if unresolved: | |
684 | raise UnresolvedFilesInRepo(unresolved) |
|
684 | raise UnresolvedFilesInRepo(unresolved) | |
685 | else: |
|
685 | else: | |
686 | raise |
|
686 | raise | |
687 | else: |
|
687 | else: | |
688 | try: |
|
688 | try: | |
689 | self._remote.merge(source_ref_commit_id) |
|
689 | self._remote.merge(source_ref_commit_id) | |
690 | self._remote.invalidate_vcs_cache() |
|
690 | self._remote.invalidate_vcs_cache() | |
691 | self._remote.commit( |
|
691 | self._remote.commit( | |
692 | message=safe_str(merge_message), |
|
692 | message=safe_str(merge_message), | |
693 | username=safe_str('%s <%s>' % (user_name, user_email))) |
|
693 | username=safe_str('%s <%s>' % (user_name, user_email))) | |
694 | self._remote.invalidate_vcs_cache() |
|
694 | self._remote.invalidate_vcs_cache() | |
695 | return self._identify(), True |
|
695 | return self._identify(), True | |
696 | except RepositoryError as e: |
|
696 | except RepositoryError as e: | |
697 | # The merge-abort may raise another exception which 'hides' |
|
697 | # The merge-abort may raise another exception which 'hides' | |
698 | # the original one, therefore we log it here. |
|
698 | # the original one, therefore we log it here. | |
699 | log.exception('Error while merging shadow repo during merge.') |
|
699 | log.exception('Error while merging shadow repo during merge.') | |
700 | if 'unresolved merge conflicts' in safe_str(e): |
|
700 | if 'unresolved merge conflicts' in safe_str(e): | |
701 | unresolved = self._remote.get_unresolved_files() |
|
701 | unresolved = self._remote.get_unresolved_files() | |
702 | log.debug('unresolved files: %s', unresolved) |
|
702 | log.debug('unresolved files: %s', unresolved) | |
703 |
|
703 | |||
704 | # Cleanup any merge leftovers |
|
704 | # Cleanup any merge leftovers | |
705 | self._remote.update(clean=True) |
|
705 | self._remote.update(clean=True) | |
706 | if unresolved: |
|
706 | if unresolved: | |
707 | raise UnresolvedFilesInRepo(unresolved) |
|
707 | raise UnresolvedFilesInRepo(unresolved) | |
708 | else: |
|
708 | else: | |
709 | raise |
|
709 | raise | |
710 |
|
710 | |||
711 | def _local_close(self, target_ref, user_name, user_email, |
|
711 | def _local_close(self, target_ref, user_name, user_email, | |
712 | source_ref, close_message=''): |
|
712 | source_ref, close_message=''): | |
713 | """ |
|
713 | """ | |
714 | Close the branch of the given source_revision |
|
714 | Close the branch of the given source_revision | |
715 |
|
715 | |||
716 | Returns the commit id of the close and a boolean indicating if the |
|
716 | Returns the commit id of the close and a boolean indicating if the | |
717 | commit needs to be pushed. |
|
717 | commit needs to be pushed. | |
718 | """ |
|
718 | """ | |
719 | self._update(source_ref.commit_id) |
|
719 | self._update(source_ref.commit_id) | |
720 | message = close_message or "Closing branch: `{}`".format(source_ref.name) |
|
720 | message = close_message or "Closing branch: `{}`".format(source_ref.name) | |
721 | try: |
|
721 | try: | |
722 | self._remote.commit( |
|
722 | self._remote.commit( | |
723 | message=safe_str(message), |
|
723 | message=safe_str(message), | |
724 | username=safe_str('%s <%s>' % (user_name, user_email)), |
|
724 | username=safe_str('%s <%s>' % (user_name, user_email)), | |
725 | close_branch=True) |
|
725 | close_branch=True) | |
726 | self._remote.invalidate_vcs_cache() |
|
726 | self._remote.invalidate_vcs_cache() | |
727 | return self._identify(), True |
|
727 | return self._identify(), True | |
728 | except RepositoryError: |
|
728 | except RepositoryError: | |
729 | # Cleanup any commit leftovers |
|
729 | # Cleanup any commit leftovers | |
730 | self._remote.update(clean=True) |
|
730 | self._remote.update(clean=True) | |
731 | raise |
|
731 | raise | |
732 |
|
732 | |||
733 | def _is_the_same_branch(self, target_ref, source_ref): |
|
733 | def _is_the_same_branch(self, target_ref, source_ref): | |
734 | return ( |
|
734 | return ( | |
735 | self._get_branch_name(target_ref) == |
|
735 | self._get_branch_name(target_ref) == | |
736 | self._get_branch_name(source_ref)) |
|
736 | self._get_branch_name(source_ref)) | |
737 |
|
737 | |||
738 | def _get_branch_name(self, ref): |
|
738 | def _get_branch_name(self, ref): | |
739 | if ref.type == 'branch': |
|
739 | if ref.type == 'branch': | |
740 | return ref.name |
|
740 | return ref.name | |
741 | return self._remote.ctx_branch(ref.commit_id) |
|
741 | return self._remote.ctx_branch(ref.commit_id) | |
742 |
|
742 | |||
743 | def _maybe_prepare_merge_workspace( |
|
743 | def _maybe_prepare_merge_workspace( | |
744 | self, repo_id, workspace_id, unused_target_ref, unused_source_ref): |
|
744 | self, repo_id, workspace_id, unused_target_ref, unused_source_ref): | |
745 | shadow_repository_path = self._get_shadow_repository_path( |
|
745 | shadow_repository_path = self._get_shadow_repository_path( | |
746 | self.path, repo_id, workspace_id) |
|
746 | self.path, repo_id, workspace_id) | |
747 | if not os.path.exists(shadow_repository_path): |
|
747 | if not os.path.exists(shadow_repository_path): | |
748 | self._local_clone(shadow_repository_path) |
|
748 | self._local_clone(shadow_repository_path) | |
749 | log.debug( |
|
749 | log.debug( | |
750 | 'Prepared shadow repository in %s', shadow_repository_path) |
|
750 | 'Prepared shadow repository in %s', shadow_repository_path) | |
751 |
|
751 | |||
752 | return shadow_repository_path |
|
752 | return shadow_repository_path | |
753 |
|
753 | |||
754 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
754 | def _merge_repo(self, repo_id, workspace_id, target_ref, | |
755 | source_repo, source_ref, merge_message, |
|
755 | source_repo, source_ref, merge_message, | |
756 | merger_name, merger_email, dry_run=False, |
|
756 | merger_name, merger_email, dry_run=False, | |
757 | use_rebase=False, close_branch=False): |
|
757 | use_rebase=False, close_branch=False): | |
758 |
|
758 | |||
759 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', |
|
759 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', | |
760 | 'rebase' if use_rebase else 'merge', dry_run) |
|
760 | 'rebase' if use_rebase else 'merge', dry_run) | |
761 | if target_ref.commit_id not in self._heads(): |
|
761 | if target_ref.commit_id not in self._heads(): | |
762 | return MergeResponse( |
|
762 | return MergeResponse( | |
763 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, |
|
763 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, | |
764 | metadata={'target_ref': target_ref}) |
|
764 | metadata={'target_ref': target_ref}) | |
765 |
|
765 | |||
766 | try: |
|
766 | try: | |
767 | if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1: |
|
767 | if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1: | |
768 | heads_all = self._heads(target_ref.name) |
|
768 | heads_all = self._heads(target_ref.name) | |
769 | max_heads = 10 |
|
769 | max_heads = 10 | |
770 | if len(heads_all) > max_heads: |
|
770 | if len(heads_all) > max_heads: | |
771 | heads = '\n,'.join( |
|
771 | heads = '\n,'.join( | |
772 | heads_all[:max_heads] + |
|
772 | heads_all[:max_heads] + | |
773 | ['and {} more.'.format(len(heads_all)-max_heads)]) |
|
773 | ['and {} more.'.format(len(heads_all)-max_heads)]) | |
774 | else: |
|
774 | else: | |
775 | heads = '\n,'.join(heads_all) |
|
775 | heads = '\n,'.join(heads_all) | |
776 | metadata = { |
|
776 | metadata = { | |
777 | 'target_ref': target_ref, |
|
777 | 'target_ref': target_ref, | |
778 | 'source_ref': source_ref, |
|
778 | 'source_ref': source_ref, | |
779 | 'heads': heads |
|
779 | 'heads': heads | |
780 | } |
|
780 | } | |
781 | return MergeResponse( |
|
781 | return MergeResponse( | |
782 | False, False, None, |
|
782 | False, False, None, | |
783 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, |
|
783 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, | |
784 | metadata=metadata) |
|
784 | metadata=metadata) | |
785 | except CommitDoesNotExistError: |
|
785 | except CommitDoesNotExistError: | |
786 | log.exception('Failure when looking up branch heads on hg target') |
|
786 | log.exception('Failure when looking up branch heads on hg target') | |
787 | return MergeResponse( |
|
787 | return MergeResponse( | |
788 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, |
|
788 | False, False, None, MergeFailureReason.MISSING_TARGET_REF, | |
789 | metadata={'target_ref': target_ref}) |
|
789 | metadata={'target_ref': target_ref}) | |
790 |
|
790 | |||
791 | shadow_repository_path = self._maybe_prepare_merge_workspace( |
|
791 | shadow_repository_path = self._maybe_prepare_merge_workspace( | |
792 | repo_id, workspace_id, target_ref, source_ref) |
|
792 | repo_id, workspace_id, target_ref, source_ref) | |
793 | shadow_repo = self.get_shadow_instance(shadow_repository_path) |
|
793 | shadow_repo = self.get_shadow_instance(shadow_repository_path) | |
794 |
|
794 | |||
795 | log.debug('Pulling in target reference %s', target_ref) |
|
795 | log.debug('Pulling in target reference %s', target_ref) | |
796 | self._validate_pull_reference(target_ref) |
|
796 | self._validate_pull_reference(target_ref) | |
797 | shadow_repo._local_pull(self.path, target_ref) |
|
797 | shadow_repo._local_pull(self.path, target_ref) | |
798 |
|
798 | |||
799 | try: |
|
799 | try: | |
800 | log.debug('Pulling in source reference %s', source_ref) |
|
800 | log.debug('Pulling in source reference %s', source_ref) | |
801 | source_repo._validate_pull_reference(source_ref) |
|
801 | source_repo._validate_pull_reference(source_ref) | |
802 | shadow_repo._local_pull(source_repo.path, source_ref) |
|
802 | shadow_repo._local_pull(source_repo.path, source_ref) | |
803 | except CommitDoesNotExistError: |
|
803 | except CommitDoesNotExistError: | |
804 | log.exception('Failure when doing local pull on hg shadow repo') |
|
804 | log.exception('Failure when doing local pull on hg shadow repo') | |
805 | return MergeResponse( |
|
805 | return MergeResponse( | |
806 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF, |
|
806 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF, | |
807 | metadata={'source_ref': source_ref}) |
|
807 | metadata={'source_ref': source_ref}) | |
808 |
|
808 | |||
809 | merge_ref = None |
|
809 | merge_ref = None | |
810 | merge_commit_id = None |
|
810 | merge_commit_id = None | |
811 | close_commit_id = None |
|
811 | close_commit_id = None | |
812 | merge_failure_reason = MergeFailureReason.NONE |
|
812 | merge_failure_reason = MergeFailureReason.NONE | |
813 | metadata = {} |
|
813 | metadata = {} | |
814 |
|
814 | |||
815 | # enforce that close branch should be used only in case we source from |
|
815 | # enforce that close branch should be used only in case we source from | |
816 | # an actual Branch |
|
816 | # an actual Branch | |
817 | close_branch = close_branch and source_ref.type == 'branch' |
|
817 | close_branch = close_branch and source_ref.type == 'branch' | |
818 |
|
818 | |||
819 | # don't allow to close branch if source and target are the same |
|
819 | # don't allow to close branch if source and target are the same | |
820 | close_branch = close_branch and source_ref.name != target_ref.name |
|
820 | close_branch = close_branch and source_ref.name != target_ref.name | |
821 |
|
821 | |||
822 | needs_push_on_close = False |
|
822 | needs_push_on_close = False | |
823 | if close_branch and not use_rebase and not dry_run: |
|
823 | if close_branch and not use_rebase and not dry_run: | |
824 | try: |
|
824 | try: | |
825 | close_commit_id, needs_push_on_close = shadow_repo._local_close( |
|
825 | close_commit_id, needs_push_on_close = shadow_repo._local_close( | |
826 | target_ref, merger_name, merger_email, source_ref) |
|
826 | target_ref, merger_name, merger_email, source_ref) | |
827 | merge_possible = True |
|
827 | merge_possible = True | |
828 | except RepositoryError: |
|
828 | except RepositoryError: | |
829 | log.exception('Failure when doing close branch on ' |
|
829 | log.exception('Failure when doing close branch on ' | |
830 | 'shadow repo: %s', shadow_repo) |
|
830 | 'shadow repo: %s', shadow_repo) | |
831 | merge_possible = False |
|
831 | merge_possible = False | |
832 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
832 | merge_failure_reason = MergeFailureReason.MERGE_FAILED | |
833 | else: |
|
833 | else: | |
834 | merge_possible = True |
|
834 | merge_possible = True | |
835 |
|
835 | |||
836 | needs_push = False |
|
836 | needs_push = False | |
837 | if merge_possible: |
|
837 | if merge_possible: | |
838 |
|
838 | |||
839 | try: |
|
839 | try: | |
840 | merge_commit_id, needs_push = shadow_repo._local_merge( |
|
840 | merge_commit_id, needs_push = shadow_repo._local_merge( | |
841 | target_ref, merge_message, merger_name, merger_email, |
|
841 | target_ref, merge_message, merger_name, merger_email, | |
842 | source_ref, use_rebase=use_rebase, |
|
842 | source_ref, use_rebase=use_rebase, | |
843 | close_commit_id=close_commit_id, dry_run=dry_run) |
|
843 | close_commit_id=close_commit_id, dry_run=dry_run) | |
844 | merge_possible = True |
|
844 | merge_possible = True | |
845 |
|
845 | |||
846 | # read the state of the close action, if it |
|
846 | # read the state of the close action, if it | |
847 | # maybe required a push |
|
847 | # maybe required a push | |
848 | needs_push = needs_push or needs_push_on_close |
|
848 | needs_push = needs_push or needs_push_on_close | |
849 |
|
849 | |||
850 | # Set a bookmark pointing to the merge commit. This bookmark |
|
850 | # Set a bookmark pointing to the merge commit. This bookmark | |
851 | # may be used to easily identify the last successful merge |
|
851 | # may be used to easily identify the last successful merge | |
852 | # commit in the shadow repository. |
|
852 | # commit in the shadow repository. | |
853 | shadow_repo.bookmark('pr-merge', revision=merge_commit_id) |
|
853 | shadow_repo.bookmark('pr-merge', revision=merge_commit_id) | |
854 | merge_ref = Reference('book', 'pr-merge', merge_commit_id) |
|
854 | merge_ref = Reference('book', 'pr-merge', merge_commit_id) | |
855 | except SubrepoMergeError: |
|
855 | except SubrepoMergeError: | |
856 | log.exception( |
|
856 | log.exception( | |
857 | 'Subrepo merge error during local merge on hg shadow repo.') |
|
857 | 'Subrepo merge error during local merge on hg shadow repo.') | |
858 | merge_possible = False |
|
858 | merge_possible = False | |
859 | merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED |
|
859 | merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED | |
860 | needs_push = False |
|
860 | needs_push = False | |
861 | except RepositoryError as e: |
|
861 | except RepositoryError as e: | |
862 | log.exception('Failure when doing local merge on hg shadow repo') |
|
862 | log.exception('Failure when doing local merge on hg shadow repo') | |
863 | if isinstance(e, UnresolvedFilesInRepo): |
|
863 | if isinstance(e, UnresolvedFilesInRepo): | |
864 | all_conflicts = list(e.args[0]) |
|
864 | all_conflicts = list(e.args[0]) | |
865 | max_conflicts = 20 |
|
865 | max_conflicts = 20 | |
866 | if len(all_conflicts) > max_conflicts: |
|
866 | if len(all_conflicts) > max_conflicts: | |
867 | conflicts = all_conflicts[:max_conflicts] \ |
|
867 | conflicts = all_conflicts[:max_conflicts] \ | |
868 | + ['and {} more.'.format(len(all_conflicts)-max_conflicts)] |
|
868 | + ['and {} more.'.format(len(all_conflicts)-max_conflicts)] | |
869 | else: |
|
869 | else: | |
870 | conflicts = all_conflicts |
|
870 | conflicts = all_conflicts | |
871 | metadata['unresolved_files'] = \ |
|
871 | metadata['unresolved_files'] = \ | |
872 | '\n* conflict: ' + \ |
|
872 | '\n* conflict: ' + \ | |
873 | ('\n * conflict: '.join(conflicts)) |
|
873 | ('\n * conflict: '.join(conflicts)) | |
874 |
|
874 | |||
875 | merge_possible = False |
|
875 | merge_possible = False | |
876 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
876 | merge_failure_reason = MergeFailureReason.MERGE_FAILED | |
877 | needs_push = False |
|
877 | needs_push = False | |
878 |
|
878 | |||
879 | if merge_possible and not dry_run: |
|
879 | if merge_possible and not dry_run: | |
880 | if needs_push: |
|
880 | if needs_push: | |
881 | # In case the target is a bookmark, update it, so after pushing |
|
881 | # In case the target is a bookmark, update it, so after pushing | |
882 | # the bookmarks is also updated in the target. |
|
882 | # the bookmarks is also updated in the target. | |
883 | if target_ref.type == 'book': |
|
883 | if target_ref.type == 'book': | |
884 | shadow_repo.bookmark( |
|
884 | shadow_repo.bookmark( | |
885 | target_ref.name, revision=merge_commit_id) |
|
885 | target_ref.name, revision=merge_commit_id) | |
886 | try: |
|
886 | try: | |
887 | shadow_repo_with_hooks = self.get_shadow_instance( |
|
887 | shadow_repo_with_hooks = self.get_shadow_instance( | |
888 | shadow_repository_path, |
|
888 | shadow_repository_path, | |
889 | enable_hooks=True) |
|
889 | enable_hooks=True) | |
890 | # This is the actual merge action, we push from shadow |
|
890 | # This is the actual merge action, we push from shadow | |
891 | # into origin. |
|
891 | # into origin. | |
892 | # Note: the push_branches option will push any new branch |
|
892 | # Note: the push_branches option will push any new branch | |
893 | # defined in the source repository to the target. This may |
|
893 | # defined in the source repository to the target. This may | |
894 | # be dangerous as branches are permanent in Mercurial. |
|
894 | # be dangerous as branches are permanent in Mercurial. | |
895 | # This feature was requested in issue #441. |
|
895 | # This feature was requested in issue #441. | |
896 | shadow_repo_with_hooks._local_push( |
|
896 | shadow_repo_with_hooks._local_push( | |
897 | merge_commit_id, self.path, push_branches=True, |
|
897 | merge_commit_id, self.path, push_branches=True, | |
898 | enable_hooks=True) |
|
898 | enable_hooks=True) | |
899 |
|
899 | |||
900 | # maybe we also need to push the close_commit_id |
|
900 | # maybe we also need to push the close_commit_id | |
901 | if close_commit_id: |
|
901 | if close_commit_id: | |
902 | shadow_repo_with_hooks._local_push( |
|
902 | shadow_repo_with_hooks._local_push( | |
903 | close_commit_id, self.path, push_branches=True, |
|
903 | close_commit_id, self.path, push_branches=True, | |
904 | enable_hooks=True) |
|
904 | enable_hooks=True) | |
905 | merge_succeeded = True |
|
905 | merge_succeeded = True | |
906 | except RepositoryError: |
|
906 | except RepositoryError: | |
907 | log.exception( |
|
907 | log.exception( | |
908 | 'Failure when doing local push from the shadow ' |
|
908 | 'Failure when doing local push from the shadow ' | |
909 | 'repository to the target repository at %s.', self.path) |
|
909 | 'repository to the target repository at %s.', self.path) | |
910 | merge_succeeded = False |
|
910 | merge_succeeded = False | |
911 | merge_failure_reason = MergeFailureReason.PUSH_FAILED |
|
911 | merge_failure_reason = MergeFailureReason.PUSH_FAILED | |
912 | metadata['target'] = 'hg shadow repo' |
|
912 | metadata['target'] = 'hg shadow repo' | |
913 | metadata['merge_commit'] = merge_commit_id |
|
913 | metadata['merge_commit'] = merge_commit_id | |
914 | else: |
|
914 | else: | |
915 | merge_succeeded = True |
|
915 | merge_succeeded = True | |
916 | else: |
|
916 | else: | |
917 | merge_succeeded = False |
|
917 | merge_succeeded = False | |
918 |
|
918 | |||
919 | return MergeResponse( |
|
919 | return MergeResponse( | |
920 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason, |
|
920 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason, | |
921 | metadata=metadata) |
|
921 | metadata=metadata) | |
922 |
|
922 | |||
923 | def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): |
|
923 | def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): | |
924 | config = self.config.copy() |
|
924 | config = self.config.copy() | |
925 | if not enable_hooks: |
|
925 | if not enable_hooks: | |
926 | config.clear_section('hooks') |
|
926 | config.clear_section('hooks') | |
927 | return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache}) |
|
927 | return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache}) | |
928 |
|
928 | |||
929 | def _validate_pull_reference(self, reference): |
|
929 | def _validate_pull_reference(self, reference): | |
930 | if not (reference.name in self.bookmarks or |
|
930 | if not (reference.name in self.bookmarks or | |
931 | reference.name in self.branches or |
|
931 | reference.name in self.branches or | |
932 | self.get_commit(reference.commit_id)): |
|
932 | self.get_commit(reference.commit_id)): | |
933 | raise CommitDoesNotExistError( |
|
933 | raise CommitDoesNotExistError( | |
934 | 'Unknown branch, bookmark or commit id') |
|
934 | 'Unknown branch, bookmark or commit id') | |
935 |
|
935 | |||
936 | def _local_pull(self, repository_path, reference): |
|
936 | def _local_pull(self, repository_path, reference): | |
937 | """ |
|
937 | """ | |
938 | Fetch a branch, bookmark or commit from a local repository. |
|
938 | Fetch a branch, bookmark or commit from a local repository. | |
939 | """ |
|
939 | """ | |
940 | repository_path = os.path.abspath(repository_path) |
|
940 | repository_path = os.path.abspath(repository_path) | |
941 | if repository_path == self.path: |
|
941 | if repository_path == self.path: | |
942 | raise ValueError('Cannot pull from the same repository') |
|
942 | raise ValueError('Cannot pull from the same repository') | |
943 |
|
943 | |||
944 | reference_type_to_option_name = { |
|
944 | reference_type_to_option_name = { | |
945 | 'book': 'bookmark', |
|
945 | 'book': 'bookmark', | |
946 | 'branch': 'branch', |
|
946 | 'branch': 'branch', | |
947 | } |
|
947 | } | |
948 | option_name = reference_type_to_option_name.get( |
|
948 | option_name = reference_type_to_option_name.get( | |
949 | reference.type, 'revision') |
|
949 | reference.type, 'revision') | |
950 |
|
950 | |||
951 | if option_name == 'revision': |
|
951 | if option_name == 'revision': | |
952 | ref = reference.commit_id |
|
952 | ref = reference.commit_id | |
953 | else: |
|
953 | else: | |
954 | ref = reference.name |
|
954 | ref = reference.name | |
955 |
|
955 | |||
956 | options = {option_name: [ref]} |
|
956 | options = {option_name: [ref]} | |
957 | self._remote.pull_cmd(repository_path, hooks=False, **options) |
|
957 | self._remote.pull_cmd(repository_path, hooks=False, **options) | |
958 | self._remote.invalidate_vcs_cache() |
|
958 | self._remote.invalidate_vcs_cache() | |
959 |
|
959 | |||
960 | def bookmark(self, bookmark, revision=None): |
|
960 | def bookmark(self, bookmark, revision=None): | |
961 | if isinstance(bookmark, unicode): |
|
961 | if isinstance(bookmark, unicode): | |
962 | bookmark = safe_str(bookmark) |
|
962 | bookmark = safe_str(bookmark) | |
963 | self._remote.bookmark(bookmark, revision=revision) |
|
963 | self._remote.bookmark(bookmark, revision=revision) | |
964 | self._remote.invalidate_vcs_cache() |
|
964 | self._remote.invalidate_vcs_cache() | |
965 |
|
965 | |||
966 | def get_path_permissions(self, username): |
|
966 | def get_path_permissions(self, username): | |
967 | hgacl_file = os.path.join(self.path, '.hg/hgacl') |
|
967 | hgacl_file = os.path.join(self.path, '.hg/hgacl') | |
968 |
|
968 | |||
969 | def read_patterns(suffix): |
|
969 | def read_patterns(suffix): | |
970 | svalue = None |
|
970 | svalue = None | |
971 | for section, option in [ |
|
971 | for section, option in [ | |
972 | ('narrowacl', username + suffix), |
|
972 | ('narrowacl', username + suffix), | |
973 | ('narrowacl', 'default' + suffix), |
|
973 | ('narrowacl', 'default' + suffix), | |
974 | ('narrowhgacl', username + suffix), |
|
974 | ('narrowhgacl', username + suffix), | |
975 | ('narrowhgacl', 'default' + suffix) |
|
975 | ('narrowhgacl', 'default' + suffix) | |
976 | ]: |
|
976 | ]: | |
977 | try: |
|
977 | try: | |
978 | svalue = hgacl.get(section, option) |
|
978 | svalue = hgacl.get(section, option) | |
979 | break # stop at the first value we find |
|
979 | break # stop at the first value we find | |
980 | except configparser.NoOptionError: |
|
980 | except configparser.NoOptionError: | |
981 | pass |
|
981 | pass | |
982 | if not svalue: |
|
982 | if not svalue: | |
983 | return None |
|
983 | return None | |
984 | result = ['/'] |
|
984 | result = ['/'] | |
985 | for pattern in svalue.split(): |
|
985 | for pattern in svalue.split(): | |
986 | result.append(pattern) |
|
986 | result.append(pattern) | |
987 | if '*' not in pattern and '?' not in pattern: |
|
987 | if '*' not in pattern and '?' not in pattern: | |
988 | result.append(pattern + '/*') |
|
988 | result.append(pattern + '/*') | |
989 | return result |
|
989 | return result | |
990 |
|
990 | |||
991 | if os.path.exists(hgacl_file): |
|
991 | if os.path.exists(hgacl_file): | |
992 | try: |
|
992 | try: | |
993 | hgacl = configparser.RawConfigParser() |
|
993 | hgacl = configparser.RawConfigParser() | |
994 | hgacl.read(hgacl_file) |
|
994 | hgacl.read(hgacl_file) | |
995 |
|
995 | |||
996 | includes = read_patterns('.includes') |
|
996 | includes = read_patterns('.includes') | |
997 | excludes = read_patterns('.excludes') |
|
997 | excludes = read_patterns('.excludes') | |
998 | return BasePathPermissionChecker.create_from_patterns( |
|
998 | return BasePathPermissionChecker.create_from_patterns( | |
999 | includes, excludes) |
|
999 | includes, excludes) | |
1000 | except BaseException as e: |
|
1000 | except BaseException as e: | |
1001 | msg = 'Cannot read ACL settings from {} on {}: {}'.format( |
|
1001 | msg = 'Cannot read ACL settings from {} on {}: {}'.format( | |
1002 | hgacl_file, self.name, e) |
|
1002 | hgacl_file, self.name, e) | |
1003 | raise exceptions.RepositoryRequirementError(msg) |
|
1003 | raise exceptions.RepositoryRequirementError(msg) | |
1004 | else: |
|
1004 | else: | |
1005 | return None |
|
1005 | return None | |
1006 |
|
1006 | |||
1007 |
|
1007 | |||
1008 | class MercurialIndexBasedCollectionGenerator(CollectionGenerator): |
|
1008 | class MercurialIndexBasedCollectionGenerator(CollectionGenerator): | |
1009 |
|
1009 | |||
1010 | def _commit_factory(self, commit_id): |
|
1010 | def _commit_factory(self, commit_id): | |
1011 | return self.repo.get_commit( |
|
1011 | return self.repo.get_commit( | |
1012 | commit_idx=commit_id, pre_load=self.pre_load) |
|
1012 | commit_idx=commit_id, pre_load=self.pre_load) |
@@ -1,200 +1,200 b'' | |||||
1 | # -*- coding: utf-8 -*- |
|
1 | # -*- coding: utf-8 -*- | |
2 |
|
2 | |||
3 | # Copyright (C) 2010-2020 RhodeCode GmbH |
|
3 | # Copyright (C) 2010-2020 RhodeCode GmbH | |
4 | # |
|
4 | # | |
5 | # This program is free software: you can redistribute it and/or modify |
|
5 | # This program is free software: you can redistribute it and/or modify | |
6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | # it under the terms of the GNU Affero General Public License, version 3 | |
7 | # (only), as published by the Free Software Foundation. |
|
7 | # (only), as published by the Free Software Foundation. | |
8 | # |
|
8 | # | |
9 | # This program is distributed in the hope that it will be useful, |
|
9 | # This program is distributed in the hope that it will be useful, | |
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | # GNU General Public License for more details. |
|
12 | # GNU General Public License for more details. | |
13 | # |
|
13 | # | |
14 | # You should have received a copy of the GNU Affero General Public License |
|
14 | # You should have received a copy of the GNU Affero General Public License | |
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | # |
|
16 | # | |
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | import os |
|
22 | import os | |
23 | import time |
|
23 | import time | |
24 | import tempfile |
|
24 | import tempfile | |
25 | import pytest |
|
25 | import pytest | |
26 | import subprocess |
|
26 | import subprocess | |
27 | import configobj |
|
27 | import configobj | |
28 | import logging |
|
28 | import logging | |
29 | from urllib.request import urlopen |
|
29 | from urllib.request import urlopen | |
30 | from urllib.error import URLError |
|
30 | from urllib.error import URLError | |
31 |
|
|
31 | import configparser | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS |
|
34 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS | |
35 | from rhodecode.tests.utils import is_url_reachable |
|
35 | from rhodecode.tests.utils import is_url_reachable | |
36 |
|
36 | |||
37 | log = logging.getLogger(__name__) |
|
37 | log = logging.getLogger(__name__) | |
38 |
|
38 | |||
39 |
|
39 | |||
40 | def get_port(pyramid_config): |
|
40 | def get_port(pyramid_config): | |
41 | config = configparser.ConfigParser() |
|
41 | config = configparser.ConfigParser() | |
42 | config.read(pyramid_config) |
|
42 | config.read(pyramid_config) | |
43 | return config.get('server:main', 'port') |
|
43 | return config.get('server:main', 'port') | |
44 |
|
44 | |||
45 |
|
45 | |||
46 | def get_host_url(pyramid_config): |
|
46 | def get_host_url(pyramid_config): | |
47 | """Construct the host url using the port in the test configuration.""" |
|
47 | """Construct the host url using the port in the test configuration.""" | |
48 | return '127.0.0.1:%s' % get_port(pyramid_config) |
|
48 | return '127.0.0.1:%s' % get_port(pyramid_config) | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | def assert_no_running_instance(url): |
|
51 | def assert_no_running_instance(url): | |
52 | if is_url_reachable(url): |
|
52 | if is_url_reachable(url): | |
53 | print("Hint: Usually this means another instance of server " |
|
53 | print("Hint: Usually this means another instance of server " | |
54 | "is running in the background at %s." % url) |
|
54 | "is running in the background at %s." % url) | |
55 | pytest.fail( |
|
55 | pytest.fail( | |
56 | "Port is not free at %s, cannot start server at" % url) |
|
56 | "Port is not free at %s, cannot start server at" % url) | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | class ServerBase(object): |
|
59 | class ServerBase(object): | |
60 | _args = [] |
|
60 | _args = [] | |
61 | log_file_name = 'NOT_DEFINED.log' |
|
61 | log_file_name = 'NOT_DEFINED.log' | |
62 | status_url_tmpl = 'http://{host}:{port}' |
|
62 | status_url_tmpl = 'http://{host}:{port}' | |
63 |
|
63 | |||
64 | def __init__(self, config_file, log_file): |
|
64 | def __init__(self, config_file, log_file): | |
65 | self.config_file = config_file |
|
65 | self.config_file = config_file | |
66 | config_data = configobj.ConfigObj(config_file) |
|
66 | config_data = configobj.ConfigObj(config_file) | |
67 | self._config = config_data['server:main'] |
|
67 | self._config = config_data['server:main'] | |
68 |
|
68 | |||
69 | self._args = [] |
|
69 | self._args = [] | |
70 | self.log_file = log_file or os.path.join( |
|
70 | self.log_file = log_file or os.path.join( | |
71 | tempfile.gettempdir(), self.log_file_name) |
|
71 | tempfile.gettempdir(), self.log_file_name) | |
72 | self.process = None |
|
72 | self.process = None | |
73 | self.server_out = None |
|
73 | self.server_out = None | |
74 | log.info("Using the {} configuration:{}".format( |
|
74 | log.info("Using the {} configuration:{}".format( | |
75 | self.__class__.__name__, config_file)) |
|
75 | self.__class__.__name__, config_file)) | |
76 |
|
76 | |||
77 | if not os.path.isfile(config_file): |
|
77 | if not os.path.isfile(config_file): | |
78 | raise RuntimeError('Failed to get config at {}'.format(config_file)) |
|
78 | raise RuntimeError('Failed to get config at {}'.format(config_file)) | |
79 |
|
79 | |||
80 | @property |
|
80 | @property | |
81 | def command(self): |
|
81 | def command(self): | |
82 | return ' '.join(self._args) |
|
82 | return ' '.join(self._args) | |
83 |
|
83 | |||
84 | @property |
|
84 | @property | |
85 | def http_url(self): |
|
85 | def http_url(self): | |
86 | template = 'http://{host}:{port}/' |
|
86 | template = 'http://{host}:{port}/' | |
87 | return template.format(**self._config) |
|
87 | return template.format(**self._config) | |
88 |
|
88 | |||
89 | def host_url(self): |
|
89 | def host_url(self): | |
90 | return 'http://' + get_host_url(self.config_file) |
|
90 | return 'http://' + get_host_url(self.config_file) | |
91 |
|
91 | |||
92 | def get_rc_log(self): |
|
92 | def get_rc_log(self): | |
93 | with open(self.log_file) as f: |
|
93 | with open(self.log_file) as f: | |
94 | return f.read() |
|
94 | return f.read() | |
95 |
|
95 | |||
96 | def wait_until_ready(self, timeout=30): |
|
96 | def wait_until_ready(self, timeout=30): | |
97 | host = self._config['host'] |
|
97 | host = self._config['host'] | |
98 | port = self._config['port'] |
|
98 | port = self._config['port'] | |
99 | status_url = self.status_url_tmpl.format(host=host, port=port) |
|
99 | status_url = self.status_url_tmpl.format(host=host, port=port) | |
100 | start = time.time() |
|
100 | start = time.time() | |
101 |
|
101 | |||
102 | while time.time() - start < timeout: |
|
102 | while time.time() - start < timeout: | |
103 | try: |
|
103 | try: | |
104 | urlopen(status_url) |
|
104 | urlopen(status_url) | |
105 | break |
|
105 | break | |
106 | except URLError: |
|
106 | except URLError: | |
107 | time.sleep(0.2) |
|
107 | time.sleep(0.2) | |
108 | else: |
|
108 | else: | |
109 | pytest.fail( |
|
109 | pytest.fail( | |
110 | "Starting the {} failed or took more than {} " |
|
110 | "Starting the {} failed or took more than {} " | |
111 | "seconds. cmd: `{}`".format( |
|
111 | "seconds. cmd: `{}`".format( | |
112 | self.__class__.__name__, timeout, self.command)) |
|
112 | self.__class__.__name__, timeout, self.command)) | |
113 |
|
113 | |||
114 | log.info('Server of {} ready at url {}'.format( |
|
114 | log.info('Server of {} ready at url {}'.format( | |
115 | self.__class__.__name__, status_url)) |
|
115 | self.__class__.__name__, status_url)) | |
116 |
|
116 | |||
117 | def shutdown(self): |
|
117 | def shutdown(self): | |
118 | self.process.kill() |
|
118 | self.process.kill() | |
119 | self.server_out.flush() |
|
119 | self.server_out.flush() | |
120 | self.server_out.close() |
|
120 | self.server_out.close() | |
121 |
|
121 | |||
122 | def get_log_file_with_port(self): |
|
122 | def get_log_file_with_port(self): | |
123 | log_file = list(self.log_file.partition('.log')) |
|
123 | log_file = list(self.log_file.partition('.log')) | |
124 | log_file.insert(1, get_port(self.config_file)) |
|
124 | log_file.insert(1, get_port(self.config_file)) | |
125 | log_file = ''.join(log_file) |
|
125 | log_file = ''.join(log_file) | |
126 | return log_file |
|
126 | return log_file | |
127 |
|
127 | |||
128 |
|
128 | |||
129 | class RcVCSServer(ServerBase): |
|
129 | class RcVCSServer(ServerBase): | |
130 | """ |
|
130 | """ | |
131 | Represents a running VCSServer instance. |
|
131 | Represents a running VCSServer instance. | |
132 | """ |
|
132 | """ | |
133 |
|
133 | |||
134 | log_file_name = 'rc-vcsserver.log' |
|
134 | log_file_name = 'rc-vcsserver.log' | |
135 | status_url_tmpl = 'http://{host}:{port}/status' |
|
135 | status_url_tmpl = 'http://{host}:{port}/status' | |
136 |
|
136 | |||
137 | def __init__(self, config_file, log_file=None): |
|
137 | def __init__(self, config_file, log_file=None): | |
138 | super(RcVCSServer, self).__init__(config_file, log_file) |
|
138 | super(RcVCSServer, self).__init__(config_file, log_file) | |
139 | self._args = ['gunicorn', '--paste', self.config_file] |
|
139 | self._args = ['gunicorn', '--paste', self.config_file] | |
140 |
|
140 | |||
141 | def start(self): |
|
141 | def start(self): | |
142 | env = os.environ.copy() |
|
142 | env = os.environ.copy() | |
143 |
|
143 | |||
144 | self.log_file = self.get_log_file_with_port() |
|
144 | self.log_file = self.get_log_file_with_port() | |
145 | self.server_out = open(self.log_file, 'w') |
|
145 | self.server_out = open(self.log_file, 'w') | |
146 |
|
146 | |||
147 | host_url = self.host_url() |
|
147 | host_url = self.host_url() | |
148 | assert_no_running_instance(host_url) |
|
148 | assert_no_running_instance(host_url) | |
149 |
|
149 | |||
150 | log.info('rhodecode-vcsserver start command: {}'.format(' '.join(self._args))) |
|
150 | log.info('rhodecode-vcsserver start command: {}'.format(' '.join(self._args))) | |
151 | log.info('rhodecode-vcsserver starting at: {}'.format(host_url)) |
|
151 | log.info('rhodecode-vcsserver starting at: {}'.format(host_url)) | |
152 | log.info('rhodecode-vcsserver command: {}'.format(self.command)) |
|
152 | log.info('rhodecode-vcsserver command: {}'.format(self.command)) | |
153 | log.info('rhodecode-vcsserver logfile: {}'.format(self.log_file)) |
|
153 | log.info('rhodecode-vcsserver logfile: {}'.format(self.log_file)) | |
154 |
|
154 | |||
155 | self.process = subprocess.Popen( |
|
155 | self.process = subprocess.Popen( | |
156 | self._args, bufsize=0, env=env, |
|
156 | self._args, bufsize=0, env=env, | |
157 | stdout=self.server_out, stderr=self.server_out) |
|
157 | stdout=self.server_out, stderr=self.server_out) | |
158 |
|
158 | |||
159 |
|
159 | |||
160 | class RcWebServer(ServerBase): |
|
160 | class RcWebServer(ServerBase): | |
161 | """ |
|
161 | """ | |
162 | Represents a running RCE web server used as a test fixture. |
|
162 | Represents a running RCE web server used as a test fixture. | |
163 | """ |
|
163 | """ | |
164 |
|
164 | |||
165 | log_file_name = 'rc-web.log' |
|
165 | log_file_name = 'rc-web.log' | |
166 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' |
|
166 | status_url_tmpl = 'http://{host}:{port}/_admin/ops/ping' | |
167 |
|
167 | |||
168 | def __init__(self, config_file, log_file=None): |
|
168 | def __init__(self, config_file, log_file=None): | |
169 | super(RcWebServer, self).__init__(config_file, log_file) |
|
169 | super(RcWebServer, self).__init__(config_file, log_file) | |
170 | self._args = [ |
|
170 | self._args = [ | |
171 | 'gunicorn', '--worker-class', 'gevent', '--paste', config_file] |
|
171 | 'gunicorn', '--worker-class', 'gevent', '--paste', config_file] | |
172 |
|
172 | |||
173 | def start(self): |
|
173 | def start(self): | |
174 | env = os.environ.copy() |
|
174 | env = os.environ.copy() | |
175 | env['RC_NO_TMP_PATH'] = '1' |
|
175 | env['RC_NO_TMP_PATH'] = '1' | |
176 |
|
176 | |||
177 | self.log_file = self.get_log_file_with_port() |
|
177 | self.log_file = self.get_log_file_with_port() | |
178 | self.server_out = open(self.log_file, 'w') |
|
178 | self.server_out = open(self.log_file, 'w') | |
179 |
|
179 | |||
180 | host_url = self.host_url() |
|
180 | host_url = self.host_url() | |
181 | assert_no_running_instance(host_url) |
|
181 | assert_no_running_instance(host_url) | |
182 |
|
182 | |||
183 | log.info('rhodecode-web starting at: {}'.format(host_url)) |
|
183 | log.info('rhodecode-web starting at: {}'.format(host_url)) | |
184 | log.info('rhodecode-web command: {}'.format(self.command)) |
|
184 | log.info('rhodecode-web command: {}'.format(self.command)) | |
185 | log.info('rhodecode-web logfile: {}'.format(self.log_file)) |
|
185 | log.info('rhodecode-web logfile: {}'.format(self.log_file)) | |
186 |
|
186 | |||
187 | self.process = subprocess.Popen( |
|
187 | self.process = subprocess.Popen( | |
188 | self._args, bufsize=0, env=env, |
|
188 | self._args, bufsize=0, env=env, | |
189 | stdout=self.server_out, stderr=self.server_out) |
|
189 | stdout=self.server_out, stderr=self.server_out) | |
190 |
|
190 | |||
191 | def repo_clone_url(self, repo_name, **kwargs): |
|
191 | def repo_clone_url(self, repo_name, **kwargs): | |
192 | params = { |
|
192 | params = { | |
193 | 'user': TEST_USER_ADMIN_LOGIN, |
|
193 | 'user': TEST_USER_ADMIN_LOGIN, | |
194 | 'passwd': TEST_USER_ADMIN_PASS, |
|
194 | 'passwd': TEST_USER_ADMIN_PASS, | |
195 | 'host': get_host_url(self.config_file), |
|
195 | 'host': get_host_url(self.config_file), | |
196 | 'cloned_repo': repo_name, |
|
196 | 'cloned_repo': repo_name, | |
197 | } |
|
197 | } | |
198 | params.update(**kwargs) |
|
198 | params.update(**kwargs) | |
199 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params |
|
199 | _url = 'http://%(user)s:%(passwd)s@%(host)s/%(cloned_repo)s' % params | |
200 | return _url |
|
200 | return _url |
General Comments 0
You need to be logged in to leave comments.
Login now