Show More
@@ -1,430 +1,433 b'' | |||||
1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2016-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | """ |
|
19 | """ | |
20 | Client for the VCSServer implemented based on HTTP. |
|
20 | Client for the VCSServer implemented based on HTTP. | |
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
23 | import copy |
|
23 | import copy | |
24 | import logging |
|
24 | import logging | |
25 | import threading |
|
25 | import threading | |
26 | import time |
|
26 | import time | |
27 | import urllib.request |
|
27 | import urllib.request | |
28 | import urllib.error |
|
28 | import urllib.error | |
29 | import urllib.parse |
|
29 | import urllib.parse | |
30 | import urllib.parse |
|
30 | import urllib.parse | |
31 | import uuid |
|
31 | import uuid | |
32 | import traceback |
|
32 | import traceback | |
33 |
|
33 | |||
34 | import pycurl |
|
34 | import pycurl | |
35 | import msgpack |
|
35 | import msgpack | |
36 | import requests |
|
36 | import requests | |
37 | from requests.packages.urllib3.util.retry import Retry |
|
37 | from requests.packages.urllib3.util.retry import Retry | |
38 |
|
38 | |||
39 | import rhodecode |
|
39 | import rhodecode | |
40 | from rhodecode.lib import rc_cache |
|
40 | from rhodecode.lib import rc_cache | |
41 | from rhodecode.lib.rc_cache.utils import compute_key_from_params |
|
41 | from rhodecode.lib.rc_cache.utils import compute_key_from_params | |
42 | from rhodecode.lib.system_info import get_cert_path |
|
42 | from rhodecode.lib.system_info import get_cert_path | |
43 | from rhodecode.lib.vcs import exceptions, CurlSession |
|
43 | from rhodecode.lib.vcs import exceptions, CurlSession | |
44 | from rhodecode.lib.utils2 import str2bool |
|
44 | from rhodecode.lib.utils2 import str2bool | |
45 |
|
45 | |||
46 | log = logging.getLogger(__name__) |
|
46 | log = logging.getLogger(__name__) | |
47 |
|
47 | |||
48 |
|
48 | |||
49 | # TODO: mikhail: Keep it in sync with vcsserver's |
|
49 | # TODO: mikhail: Keep it in sync with vcsserver's | |
50 | # HTTPApplication.ALLOWED_EXCEPTIONS |
|
50 | # HTTPApplication.ALLOWED_EXCEPTIONS | |
51 | EXCEPTIONS_MAP = { |
|
51 | EXCEPTIONS_MAP = { | |
52 | 'KeyError': KeyError, |
|
52 | 'KeyError': KeyError, | |
53 | 'URLError': urllib.error.URLError, |
|
53 | 'URLError': urllib.error.URLError, | |
54 | } |
|
54 | } | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | def _remote_call(url, payload, exceptions_map, session, retries=3): |
|
57 | def _remote_call(url, payload, exceptions_map, session, retries=3): | |
58 |
|
58 | |||
59 | for attempt in range(retries): |
|
59 | for attempt in range(retries): | |
60 | try: |
|
60 | try: | |
61 | response = session.post(url, data=msgpack.packb(payload)) |
|
61 | response = session.post(url, data=msgpack.packb(payload)) | |
62 | break |
|
62 | break | |
63 | except pycurl.error as e: |
|
63 | except pycurl.error as e: | |
64 | error_code, error_message = e.args |
|
64 | error_code, error_message = e.args | |
65 | if error_code == pycurl.E_RECV_ERROR: |
|
65 | if error_code == pycurl.E_RECV_ERROR: | |
66 | log.warning(f'Received a "Connection reset by peer" error. ' |
|
66 | log.warning(f'Received a "Connection reset by peer" error. ' | |
67 | f'Retrying... ({attempt + 1}/{retries})') |
|
67 | f'Retrying... ({attempt + 1}/{retries})') | |
68 | continue # Retry if connection reset error. |
|
68 | continue # Retry if connection reset error. | |
69 | msg = f'{e}. \npycurl traceback: {traceback.format_exc()}' |
|
69 | msg = f'{e}. \npycurl traceback: {traceback.format_exc()}' | |
70 | raise exceptions.HttpVCSCommunicationError(msg) |
|
70 | raise exceptions.HttpVCSCommunicationError(msg) | |
71 | except Exception as e: |
|
71 | except Exception as e: | |
72 | message = getattr(e, 'message', '') |
|
72 | message = getattr(e, 'message', '') | |
73 | if 'Failed to connect' in message: |
|
73 | if 'Failed to connect' in message: | |
74 | # gevent doesn't return proper pycurl errors |
|
74 | # gevent doesn't return proper pycurl errors | |
75 | raise exceptions.HttpVCSCommunicationError(e) |
|
75 | raise exceptions.HttpVCSCommunicationError(e) | |
76 | else: |
|
76 | else: | |
77 | raise |
|
77 | raise | |
78 |
|
78 | |||
79 | if response.status_code >= 400: |
|
79 | if response.status_code >= 400: | |
80 | content_type = response.content_type |
|
80 | content_type = response.content_type | |
81 | log.error('Call to %s returned non 200 HTTP code: %s [%s]', |
|
81 | log.error('Call to %s returned non 200 HTTP code: %s [%s]', | |
82 | url, response.status_code, content_type) |
|
82 | url, response.status_code, content_type) | |
83 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) |
|
83 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) | |
84 |
|
84 | |||
85 | try: |
|
85 | try: | |
86 | response = msgpack.unpackb(response.content) |
|
86 | response = msgpack.unpackb(response.content) | |
87 | except Exception: |
|
87 | except Exception: | |
88 | log.exception('Failed to decode response from msgpack') |
|
88 | log.exception('Failed to decode response from msgpack') | |
89 | raise |
|
89 | raise | |
90 |
|
90 | |||
91 | error = response.get('error') |
|
91 | error = response.get('error') | |
92 | if error: |
|
92 | if error: | |
93 | type_ = error.get('type', 'Exception') |
|
93 | type_ = error.get('type', 'Exception') | |
94 | exc = exceptions_map.get(type_, Exception) |
|
94 | exc = exceptions_map.get(type_, Exception) | |
95 | exc = exc(error.get('message')) |
|
95 | exc = exc(error.get('message')) | |
96 | try: |
|
96 | try: | |
97 | exc._vcs_kind = error['_vcs_kind'] |
|
97 | exc._vcs_kind = error['_vcs_kind'] | |
98 | except KeyError: |
|
98 | except KeyError: | |
99 | pass |
|
99 | pass | |
100 |
|
100 | |||
101 | try: |
|
101 | try: | |
102 | exc._vcs_server_traceback = error['traceback'] |
|
102 | exc._vcs_server_traceback = error['traceback'] | |
103 | exc._vcs_server_org_exc_name = error['org_exc'] |
|
103 | exc._vcs_server_org_exc_name = error['org_exc'] | |
104 | exc._vcs_server_org_exc_tb = error['org_exc_tb'] |
|
104 | exc._vcs_server_org_exc_tb = error['org_exc_tb'] | |
105 | except KeyError: |
|
105 | except KeyError: | |
106 | pass |
|
106 | pass | |
107 |
|
107 | |||
108 | exc.add_note(attach_exc_details(error)) |
|
108 | exc.add_note(attach_exc_details(error)) | |
109 | raise exc # raising the org exception from vcsserver |
|
109 | raise exc # raising the org exception from vcsserver | |
110 | return response.get('result') |
|
110 | return response.get('result') | |
111 |
|
111 | |||
112 |
|
112 | |||
113 | def attach_exc_details(error): |
|
113 | def attach_exc_details(error): | |
114 | note = '-- EXC NOTE -- :\n' |
|
114 | note = '-- EXC NOTE -- :\n' | |
115 | note += f'vcs_kind: {error.get("_vcs_kind")}\n' |
|
115 | note += f'vcs_kind: {error.get("_vcs_kind")}\n' | |
116 | note += f'org_exc: {error.get("_vcs_kind")}\n' |
|
116 | note += f'org_exc: {error.get("_vcs_kind")}\n' | |
117 | note += f'tb: {error.get("traceback")}\n' |
|
117 | note += f'tb: {error.get("traceback")}\n' | |
118 | note += '-- END EXC NOTE --' |
|
118 | note += '-- END EXC NOTE --' | |
119 | return note |
|
119 | return note | |
120 |
|
120 | |||
121 |
|
121 | |||
122 | def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size): |
|
122 | def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size): | |
123 | try: |
|
123 | try: | |
124 | headers = { |
|
124 | headers = { | |
125 | 'X-RC-Method': payload.get('method'), |
|
125 | 'X-RC-Method': payload.get('method'), | |
126 | 'X-RC-Repo-Name': payload.get('_repo_name') |
|
126 | 'X-RC-Repo-Name': payload.get('_repo_name') | |
127 | } |
|
127 | } | |
128 | response = session.post(url, data=msgpack.packb(payload), headers=headers) |
|
128 | response = session.post(url, data=msgpack.packb(payload), headers=headers) | |
129 | except pycurl.error as e: |
|
129 | except pycurl.error as e: | |
130 | error_code, error_message = e.args |
|
130 | error_code, error_message = e.args | |
131 | msg = f'{e}. \npycurl traceback: {traceback.format_exc()}' |
|
131 | msg = f'{e}. \npycurl traceback: {traceback.format_exc()}' | |
132 | raise exceptions.HttpVCSCommunicationError(msg) |
|
132 | raise exceptions.HttpVCSCommunicationError(msg) | |
133 | except Exception as e: |
|
133 | except Exception as e: | |
134 | message = getattr(e, 'message', '') |
|
134 | message = getattr(e, 'message', '') | |
135 | if 'Failed to connect' in message: |
|
135 | if 'Failed to connect' in message: | |
136 | # gevent doesn't return proper pycurl errors |
|
136 | # gevent doesn't return proper pycurl errors | |
137 | raise exceptions.HttpVCSCommunicationError(e) |
|
137 | raise exceptions.HttpVCSCommunicationError(e) | |
138 | else: |
|
138 | else: | |
139 | raise |
|
139 | raise | |
140 |
|
140 | |||
141 | if response.status_code >= 400: |
|
141 | if response.status_code >= 400: | |
142 | log.error('Call to %s returned non 200 HTTP code: %s', |
|
142 | log.error('Call to %s returned non 200 HTTP code: %s', | |
143 | url, response.status_code) |
|
143 | url, response.status_code) | |
144 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) |
|
144 | raise exceptions.HttpVCSCommunicationError(repr(response.content)) | |
145 |
|
145 | |||
146 | return response.iter_content(chunk_size=chunk_size) |
|
146 | return response.iter_content(chunk_size=chunk_size) | |
147 |
|
147 | |||
148 |
|
148 | |||
149 | class ServiceConnection(object): |
|
149 | class ServiceConnection(object): | |
150 | def __init__(self, server_and_port, backend_endpoint, session_factory): |
|
150 | def __init__(self, server_and_port, backend_endpoint, session_factory): | |
151 | self.url = urllib.parse.urljoin(f'http://{server_and_port}', backend_endpoint) |
|
151 | self.url = urllib.parse.urljoin(f'http://{server_and_port}', backend_endpoint) | |
152 | self._session_factory = session_factory |
|
152 | self._session_factory = session_factory | |
153 |
|
153 | |||
154 | def __getattr__(self, name): |
|
154 | def __getattr__(self, name): | |
155 | def f(*args, **kwargs): |
|
155 | def f(*args, **kwargs): | |
156 | return self._call(name, *args, **kwargs) |
|
156 | return self._call(name, *args, **kwargs) | |
157 | return f |
|
157 | return f | |
158 |
|
158 | |||
159 | @exceptions.map_vcs_exceptions |
|
159 | @exceptions.map_vcs_exceptions | |
160 | def _call(self, name, *args, **kwargs): |
|
160 | def _call(self, name, *args, **kwargs): | |
161 | payload = { |
|
161 | payload = { | |
162 | 'id': str(uuid.uuid4()), |
|
162 | 'id': str(uuid.uuid4()), | |
163 | 'method': name, |
|
163 | 'method': name, | |
164 | 'params': {'args': args, 'kwargs': kwargs} |
|
164 | 'params': {'args': args, 'kwargs': kwargs} | |
165 | } |
|
165 | } | |
166 | return _remote_call( |
|
166 | return _remote_call( | |
167 | self.url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
167 | self.url, payload, EXCEPTIONS_MAP, self._session_factory()) | |
168 |
|
168 | |||
169 |
|
169 | |||
170 | class RemoteVCSMaker(object): |
|
170 | class RemoteVCSMaker(object): | |
171 |
|
171 | |||
172 | def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): |
|
172 | def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory): | |
173 | self.url = urllib.parse.urljoin(f'http://{server_and_port}', backend_endpoint) |
|
173 | self.url = urllib.parse.urljoin(f'http://{server_and_port}', backend_endpoint) | |
174 | self.stream_url = urllib.parse.urljoin(f'http://{server_and_port}', backend_endpoint+'/stream') |
|
174 | self.stream_url = urllib.parse.urljoin(f'http://{server_and_port}', backend_endpoint+'/stream') | |
175 |
|
175 | |||
176 | self._session_factory = session_factory |
|
176 | self._session_factory = session_factory | |
177 | self.backend_type = backend_type |
|
177 | self.backend_type = backend_type | |
178 |
|
178 | |||
179 | @classmethod |
|
179 | @classmethod | |
180 | def init_cache_region(cls, repo_id): |
|
180 | def init_cache_region(cls, repo_id): | |
181 | cache_namespace_uid = f'repo.{repo_id}' |
|
181 | cache_namespace_uid = f'repo.{repo_id}' | |
182 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
182 | region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) | |
183 | return region, cache_namespace_uid |
|
183 | return region, cache_namespace_uid | |
184 |
|
184 | |||
185 | def __call__(self, path, repo_id, config, with_wire=None): |
|
185 | def __call__(self, path, repo_id, config, with_wire=None): | |
186 | log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path) |
|
186 | log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path) | |
187 | return RemoteRepo(path, repo_id, config, self, with_wire=with_wire) |
|
187 | return RemoteRepo(path, repo_id, config, self, with_wire=with_wire) | |
188 |
|
188 | |||
189 | def __getattr__(self, name): |
|
189 | def __getattr__(self, name): | |
190 | def remote_attr(*args, **kwargs): |
|
190 | def remote_attr(*args, **kwargs): | |
191 | return self._call(name, *args, **kwargs) |
|
191 | return self._call(name, *args, **kwargs) | |
192 | return remote_attr |
|
192 | return remote_attr | |
193 |
|
193 | |||
194 | @exceptions.map_vcs_exceptions |
|
194 | @exceptions.map_vcs_exceptions | |
195 | def _call(self, func_name, *args, **kwargs): |
|
195 | def _call(self, func_name, *args, **kwargs): | |
196 | payload = { |
|
196 | payload = { | |
197 | 'id': str(uuid.uuid4()), |
|
197 | 'id': str(uuid.uuid4()), | |
198 | 'method': func_name, |
|
198 | 'method': func_name, | |
199 | 'backend': self.backend_type, |
|
199 | 'backend': self.backend_type, | |
200 | 'params': {'args': args, 'kwargs': kwargs} |
|
200 | 'params': {'args': args, 'kwargs': kwargs} | |
201 | } |
|
201 | } | |
202 | url = self.url |
|
202 | url = self.url | |
203 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory()) |
|
203 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory()) | |
204 |
|
204 | |||
205 |
|
205 | |||
206 | class RemoteRepo(object): |
|
206 | class RemoteRepo(object): | |
207 | CHUNK_SIZE = 16384 |
|
207 | CHUNK_SIZE = 16384 | |
208 |
|
208 | |||
209 | def __init__(self, path, repo_id, config, remote_maker, with_wire=None): |
|
209 | def __init__(self, path, repo_id, config, remote_maker, with_wire=None): | |
210 | self.url = remote_maker.url |
|
210 | self.url = remote_maker.url | |
211 | self.stream_url = remote_maker.stream_url |
|
211 | self.stream_url = remote_maker.stream_url | |
212 | self._session = remote_maker._session_factory() |
|
212 | self._session = remote_maker._session_factory() | |
213 |
|
213 | |||
214 | cache_repo_id = self._repo_id_sanitizer(repo_id) |
|
214 | cache_repo_id = self._repo_id_sanitizer(repo_id) | |
215 | _repo_name = self._get_repo_name(config, path) |
|
215 | _repo_name = self._get_repo_name(config, path) | |
216 | self._cache_region, self._cache_namespace = \ |
|
216 | self._cache_region, self._cache_namespace = \ | |
217 | remote_maker.init_cache_region(cache_repo_id) |
|
217 | remote_maker.init_cache_region(cache_repo_id) | |
218 |
|
218 | |||
219 | with_wire = with_wire or {} |
|
219 | with_wire = with_wire or {} | |
220 |
|
220 | |||
221 | repo_state_uid = with_wire.get('repo_state_uid') or 'state' |
|
221 | repo_state_uid = with_wire.get('repo_state_uid') or 'state' | |
222 |
|
222 | |||
223 | self._wire = { |
|
223 | self._wire = { | |
224 | "_repo_name": _repo_name, |
|
224 | "_repo_name": _repo_name, | |
225 | "path": path, # repo path |
|
225 | "path": path, # repo path | |
226 | "repo_id": repo_id, |
|
226 | "repo_id": repo_id, | |
227 | "cache_repo_id": cache_repo_id, |
|
227 | "cache_repo_id": cache_repo_id, | |
228 | "config": config, |
|
228 | "config": config, | |
229 | "repo_state_uid": repo_state_uid, |
|
229 | "repo_state_uid": repo_state_uid, | |
230 | "context": self._create_vcs_cache_context(path, repo_state_uid) |
|
230 | "context": self._create_vcs_cache_context(path, repo_state_uid) | |
231 | } |
|
231 | } | |
232 |
|
232 | |||
233 | if with_wire: |
|
233 | if with_wire: | |
234 | self._wire.update(with_wire) |
|
234 | self._wire.update(with_wire) | |
235 |
|
235 | |||
236 | # NOTE(johbo): Trading complexity for performance. Avoiding the call to |
|
236 | # NOTE(johbo): Trading complexity for performance. Avoiding the call to | |
237 | # log.debug brings a few percent gain even if is is not active. |
|
237 | # log.debug brings a few percent gain even if is is not active. | |
238 | if log.isEnabledFor(logging.DEBUG): |
|
238 | if log.isEnabledFor(logging.DEBUG): | |
239 | self._call_with_logging = True |
|
239 | self._call_with_logging = True | |
240 |
|
240 | |||
241 | self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__')) |
|
241 | self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__')) | |
242 |
|
242 | |||
243 | def _get_repo_name(self, config, path): |
|
243 | def _get_repo_name(self, config, path): | |
244 | repo_store = config.get('paths', '/') |
|
244 | repo_store = config.get('paths', '/') | |
245 | return path.split(repo_store)[-1].lstrip('/') |
|
245 | return path.split(repo_store)[-1].lstrip('/') | |
246 |
|
246 | |||
247 | def _repo_id_sanitizer(self, repo_id): |
|
247 | def _repo_id_sanitizer(self, repo_id): | |
248 | pathless = repo_id.replace('/', '__').replace('-', '_') |
|
248 | pathless = repo_id.replace('/', '__').replace('-', '_') | |
249 | return ''.join(char if ord(char) < 128 else '_{}_'.format(ord(char)) for char in pathless) |
|
249 | return ''.join(char if ord(char) < 128 else '_{}_'.format(ord(char)) for char in pathless) | |
250 |
|
250 | |||
251 | def __getattr__(self, name): |
|
251 | def __getattr__(self, name): | |
252 |
|
252 | |||
253 | if name.startswith('stream:'): |
|
253 | if name.startswith('stream:'): | |
254 | def repo_remote_attr(*args, **kwargs): |
|
254 | def repo_remote_attr(*args, **kwargs): | |
255 | return self._call_stream(name, *args, **kwargs) |
|
255 | return self._call_stream(name, *args, **kwargs) | |
256 | else: |
|
256 | else: | |
257 | def repo_remote_attr(*args, **kwargs): |
|
257 | def repo_remote_attr(*args, **kwargs): | |
258 | return self._call(name, *args, **kwargs) |
|
258 | return self._call(name, *args, **kwargs) | |
259 |
|
259 | |||
260 | return repo_remote_attr |
|
260 | return repo_remote_attr | |
261 |
|
261 | |||
262 | def _base_call(self, name, *args, **kwargs): |
|
262 | def _base_call(self, name, *args, **kwargs): | |
263 | # TODO: oliver: This is currently necessary pre-call since the |
|
263 | # TODO: oliver: This is currently necessary pre-call since the | |
264 | # config object is being changed for hooking scenarios |
|
264 | # config object is being changed for hooking scenarios | |
265 | wire = copy.deepcopy(self._wire) |
|
265 | wire = copy.deepcopy(self._wire) | |
266 | wire["config"] = wire["config"].serialize() |
|
266 | wire["config"] = wire["config"].serialize() | |
267 | wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) |
|
267 | wire["config"].append(('vcs', 'ssl_dir', self.cert_dir)) | |
268 |
|
268 | |||
269 | payload = { |
|
269 | payload = { | |
270 | 'id': str(uuid.uuid4()), |
|
270 | 'id': str(uuid.uuid4()), | |
271 | 'method': name, |
|
271 | 'method': name, | |
272 | "_repo_name": wire['_repo_name'], |
|
272 | "_repo_name": wire['_repo_name'], | |
273 | 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} |
|
273 | 'params': {'wire': wire, 'args': args, 'kwargs': kwargs} | |
274 | } |
|
274 | } | |
275 |
|
275 | |||
276 | context_uid = wire.get('context') |
|
276 | context_uid = wire.get('context') | |
277 | return context_uid, payload |
|
277 | return context_uid, payload | |
278 |
|
278 | |||
279 | def get_local_cache(self, name, args): |
|
279 | def get_local_cache(self, name, args): | |
280 | cache_on = False |
|
280 | cache_on = False | |
281 | cache_key = '' |
|
281 | cache_key = '' | |
282 | local_cache_on = rhodecode.ConfigGet().get_bool('vcs.methods.cache') |
|
282 | local_cache_on = rhodecode.ConfigGet().get_bool('vcs.methods.cache') | |
283 |
|
283 | |||
284 | cache_methods = [ |
|
284 | cache_methods = [ | |
285 | 'branches', 'tags', 'bookmarks', |
|
285 | 'branches', 'tags', 'bookmarks', | |
286 | 'is_large_file', 'is_binary', |
|
286 | 'is_large_file', 'is_binary', | |
287 | 'fctx_size', 'stream:fctx_node_data', 'blob_raw_length', |
|
287 | 'fctx_size', 'stream:fctx_node_data', 'blob_raw_length', | |
288 | 'node_history', |
|
288 | 'node_history', | |
289 | 'revision', 'tree_items', |
|
289 | 'revision', 'tree_items', | |
290 | 'ctx_list', 'ctx_branch', 'ctx_description', |
|
290 | 'ctx_list', 'ctx_branch', 'ctx_description', | |
291 | 'bulk_request', |
|
291 | 'bulk_request', | |
292 | 'assert_correct_path', |
|
292 | 'assert_correct_path', | |
293 | 'is_path_valid_repository', |
|
293 | 'is_path_valid_repository', | |
294 | ] |
|
294 | ] | |
295 |
|
295 | |||
296 | if local_cache_on and name in cache_methods: |
|
296 | wire_cache = self._wire['cache'] | |
|
297 | ||||
|
298 | if local_cache_on and wire_cache and name in cache_methods: | |||
297 | cache_on = True |
|
299 | cache_on = True | |
298 | repo_state_uid = self._wire['repo_state_uid'] |
|
300 | repo_state_uid = self._wire['repo_state_uid'] | |
299 | call_args = [a for a in args] |
|
301 | call_args = [a for a in args] | |
300 | cache_key = compute_key_from_params(repo_state_uid, name, *call_args) |
|
302 | cache_key = compute_key_from_params(repo_state_uid, name, *call_args) | |
301 |
|
303 | |||
302 | return cache_on, cache_key |
|
304 | return cache_on, cache_key | |
303 |
|
305 | |||
304 | @exceptions.map_vcs_exceptions |
|
306 | @exceptions.map_vcs_exceptions | |
305 | def _call(self, name, *args, **kwargs): |
|
307 | def _call(self, name, *args, **kwargs): | |
|
308 | ||||
306 | context_uid, payload = self._base_call(name, *args, **kwargs) |
|
309 | context_uid, payload = self._base_call(name, *args, **kwargs) | |
307 | url = self.url |
|
310 | url = self.url | |
308 |
|
311 | |||
309 | start = time.time() |
|
312 | start = time.time() | |
310 | cache_on, cache_key = self.get_local_cache(name, args) |
|
313 | cache_on, cache_key = self.get_local_cache(name, args) | |
311 |
|
314 | |||
312 | @self._cache_region.conditional_cache_on_arguments( |
|
315 | @self._cache_region.conditional_cache_on_arguments( | |
313 | namespace=self._cache_namespace, condition=cache_on and cache_key) |
|
316 | namespace=self._cache_namespace, condition=cache_on and cache_key) | |
314 | def remote_call(_cache_key): |
|
317 | def remote_call(_cache_key): | |
315 | if self._call_with_logging: |
|
318 | if self._call_with_logging: | |
316 | args_repr = f'ARG: {str(args):.512}|KW: {str(kwargs):.512}' |
|
319 | args_repr = f'ARG: {str(args):.512}|KW: {str(kwargs):.512}' | |
317 | log.debug('Calling %s@%s with args:%r. wire_context: %s cache_on: %s', |
|
320 | log.debug('Calling %s@%s with args:%r. wire_context: %s cache_on: %s', | |
318 | url, name, args_repr, context_uid, cache_on) |
|
321 | url, name, args_repr, context_uid, cache_on) | |
319 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session) |
|
322 | return _remote_call(url, payload, EXCEPTIONS_MAP, self._session) | |
320 |
|
323 | |||
321 | result = remote_call(cache_key) |
|
324 | result = remote_call(cache_key) | |
322 | if self._call_with_logging: |
|
325 | if self._call_with_logging: | |
323 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', |
|
326 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', | |
324 | url, name, time.time()-start, context_uid) |
|
327 | url, name, time.time()-start, context_uid) | |
325 | return result |
|
328 | return result | |
326 |
|
329 | |||
327 | @exceptions.map_vcs_exceptions |
|
330 | @exceptions.map_vcs_exceptions | |
328 | def _call_stream(self, name, *args, **kwargs): |
|
331 | def _call_stream(self, name, *args, **kwargs): | |
329 | context_uid, payload = self._base_call(name, *args, **kwargs) |
|
332 | context_uid, payload = self._base_call(name, *args, **kwargs) | |
330 | payload['chunk_size'] = self.CHUNK_SIZE |
|
333 | payload['chunk_size'] = self.CHUNK_SIZE | |
331 | url = self.stream_url |
|
334 | url = self.stream_url | |
332 |
|
335 | |||
333 | start = time.time() |
|
336 | start = time.time() | |
334 | cache_on, cache_key = self.get_local_cache(name, args) |
|
337 | cache_on, cache_key = self.get_local_cache(name, args) | |
335 |
|
338 | |||
336 | # Cache is a problem because this is a stream |
|
339 | # Cache is a problem because this is a stream | |
337 | def streaming_remote_call(_cache_key): |
|
340 | def streaming_remote_call(_cache_key): | |
338 | if self._call_with_logging: |
|
341 | if self._call_with_logging: | |
339 | args_repr = f'ARG: {str(args):.512}|KW: {str(kwargs):.512}' |
|
342 | args_repr = f'ARG: {str(args):.512}|KW: {str(kwargs):.512}' | |
340 | log.debug('Calling %s@%s with args:%r. wire_context: %s cache_on: %s', |
|
343 | log.debug('Calling %s@%s with args:%r. wire_context: %s cache_on: %s', | |
341 | url, name, args_repr, context_uid, cache_on) |
|
344 | url, name, args_repr, context_uid, cache_on) | |
342 | return _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session, self.CHUNK_SIZE) |
|
345 | return _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session, self.CHUNK_SIZE) | |
343 |
|
346 | |||
344 | result = streaming_remote_call(cache_key) |
|
347 | result = streaming_remote_call(cache_key) | |
345 | if self._call_with_logging: |
|
348 | if self._call_with_logging: | |
346 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', |
|
349 | log.debug('Call %s@%s took: %.4fs. wire_context: %s', | |
347 | url, name, time.time()-start, context_uid) |
|
350 | url, name, time.time()-start, context_uid) | |
348 | return result |
|
351 | return result | |
349 |
|
352 | |||
350 | def __getitem__(self, key): |
|
353 | def __getitem__(self, key): | |
351 | return self.revision(key) |
|
354 | return self.revision(key) | |
352 |
|
355 | |||
353 | def _create_vcs_cache_context(self, *args): |
|
356 | def _create_vcs_cache_context(self, *args): | |
354 | """ |
|
357 | """ | |
355 | Creates a unique string which is passed to the VCSServer on every |
|
358 | Creates a unique string which is passed to the VCSServer on every | |
356 | remote call. It is used as cache key in the VCSServer. |
|
359 | remote call. It is used as cache key in the VCSServer. | |
357 | """ |
|
360 | """ | |
358 | hash_key = '-'.join(map(str, args)) |
|
361 | hash_key = '-'.join(map(str, args)) | |
359 | return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key)) |
|
362 | return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key)) | |
360 |
|
363 | |||
361 | def invalidate_vcs_cache(self): |
|
364 | def invalidate_vcs_cache(self): | |
362 | """ |
|
365 | """ | |
363 | This invalidates the context which is sent to the VCSServer on every |
|
366 | This invalidates the context which is sent to the VCSServer on every | |
364 | call to a remote method. It forces the VCSServer to create a fresh |
|
367 | call to a remote method. It forces the VCSServer to create a fresh | |
365 | repository instance on the next call to a remote method. |
|
368 | repository instance on the next call to a remote method. | |
366 | """ |
|
369 | """ | |
367 | self._wire['context'] = str(uuid.uuid4()) |
|
370 | self._wire['context'] = str(uuid.uuid4()) | |
368 |
|
371 | |||
369 |
|
372 | |||
370 | class VcsHttpProxy(object): |
|
373 | class VcsHttpProxy(object): | |
371 |
|
374 | |||
372 | CHUNK_SIZE = 16384 |
|
375 | CHUNK_SIZE = 16384 | |
373 |
|
376 | |||
374 | def __init__(self, server_and_port, backend_endpoint): |
|
377 | def __init__(self, server_and_port, backend_endpoint): | |
375 | retries = Retry(total=5, connect=None, read=None, redirect=None) |
|
378 | retries = Retry(total=5, connect=None, read=None, redirect=None) | |
376 |
|
379 | |||
377 | adapter = requests.adapters.HTTPAdapter(max_retries=retries) |
|
380 | adapter = requests.adapters.HTTPAdapter(max_retries=retries) | |
378 | self.base_url = urllib.parse.urljoin('http://%s' % server_and_port, backend_endpoint) |
|
381 | self.base_url = urllib.parse.urljoin('http://%s' % server_and_port, backend_endpoint) | |
379 | self.session = requests.Session() |
|
382 | self.session = requests.Session() | |
380 | self.session.mount('http://', adapter) |
|
383 | self.session.mount('http://', adapter) | |
381 |
|
384 | |||
382 | def handle(self, environment, input_data, *args, **kwargs): |
|
385 | def handle(self, environment, input_data, *args, **kwargs): | |
383 | data = { |
|
386 | data = { | |
384 | 'environment': environment, |
|
387 | 'environment': environment, | |
385 | 'input_data': input_data, |
|
388 | 'input_data': input_data, | |
386 | 'args': args, |
|
389 | 'args': args, | |
387 | 'kwargs': kwargs |
|
390 | 'kwargs': kwargs | |
388 | } |
|
391 | } | |
389 | result = self.session.post( |
|
392 | result = self.session.post( | |
390 | self.base_url, msgpack.packb(data), stream=True) |
|
393 | self.base_url, msgpack.packb(data), stream=True) | |
391 | return self._get_result(result) |
|
394 | return self._get_result(result) | |
392 |
|
395 | |||
393 | def _deserialize_and_raise(self, error): |
|
396 | def _deserialize_and_raise(self, error): | |
394 | exception = Exception(error['message']) |
|
397 | exception = Exception(error['message']) | |
395 | try: |
|
398 | try: | |
396 | exception._vcs_kind = error['_vcs_kind'] |
|
399 | exception._vcs_kind = error['_vcs_kind'] | |
397 | except KeyError: |
|
400 | except KeyError: | |
398 | pass |
|
401 | pass | |
399 | raise exception |
|
402 | raise exception | |
400 |
|
403 | |||
401 | def _iterate(self, result): |
|
404 | def _iterate(self, result): | |
402 | unpacker = msgpack.Unpacker() |
|
405 | unpacker = msgpack.Unpacker() | |
403 | for line in result.iter_content(chunk_size=self.CHUNK_SIZE): |
|
406 | for line in result.iter_content(chunk_size=self.CHUNK_SIZE): | |
404 | unpacker.feed(line) |
|
407 | unpacker.feed(line) | |
405 | yield from unpacker |
|
408 | yield from unpacker | |
406 |
|
409 | |||
407 | def _get_result(self, result): |
|
410 | def _get_result(self, result): | |
408 | iterator = self._iterate(result) |
|
411 | iterator = self._iterate(result) | |
409 | error = next(iterator) |
|
412 | error = next(iterator) | |
410 | if error: |
|
413 | if error: | |
411 | self._deserialize_and_raise(error) |
|
414 | self._deserialize_and_raise(error) | |
412 |
|
415 | |||
413 | status = next(iterator) |
|
416 | status = next(iterator) | |
414 | headers = next(iterator) |
|
417 | headers = next(iterator) | |
415 |
|
418 | |||
416 | return iterator, status, headers |
|
419 | return iterator, status, headers | |
417 |
|
420 | |||
418 |
|
421 | |||
419 | class ThreadlocalSessionFactory(object): |
|
422 | class ThreadlocalSessionFactory(object): | |
420 | """ |
|
423 | """ | |
421 | Creates one CurlSession per thread on demand. |
|
424 | Creates one CurlSession per thread on demand. | |
422 | """ |
|
425 | """ | |
423 |
|
426 | |||
424 | def __init__(self): |
|
427 | def __init__(self): | |
425 | self._thread_local = threading.local() |
|
428 | self._thread_local = threading.local() | |
426 |
|
429 | |||
427 | def __call__(self): |
|
430 | def __call__(self): | |
428 | if not hasattr(self._thread_local, 'curl_session'): |
|
431 | if not hasattr(self._thread_local, 'curl_session'): | |
429 | self._thread_local.curl_session = CurlSession() |
|
432 | self._thread_local.curl_session = CurlSession() | |
430 | return self._thread_local.curl_session |
|
433 | return self._thread_local.curl_session |
@@ -1,1053 +1,1051 b'' | |||||
1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
1 | # Copyright (C) 2010-2023 RhodeCode GmbH | |
2 | # |
|
2 | # | |
3 | # This program is free software: you can redistribute it and/or modify |
|
3 | # This program is free software: you can redistribute it and/or modify | |
4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
4 | # it under the terms of the GNU Affero General Public License, version 3 | |
5 | # (only), as published by the Free Software Foundation. |
|
5 | # (only), as published by the Free Software Foundation. | |
6 | # |
|
6 | # | |
7 | # This program is distributed in the hope that it will be useful, |
|
7 | # This program is distributed in the hope that it will be useful, | |
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | |
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
10 | # GNU General Public License for more details. |
|
10 | # GNU General Public License for more details. | |
11 | # |
|
11 | # | |
12 | # You should have received a copy of the GNU Affero General Public License |
|
12 | # You should have received a copy of the GNU Affero General Public License | |
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. | |
14 | # |
|
14 | # | |
15 | # This program is dual-licensed. If you wish to learn more about the |
|
15 | # This program is dual-licensed. If you wish to learn more about the | |
16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
16 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
18 |
|
18 | |||
19 | """ |
|
19 | """ | |
20 | Scm model for RhodeCode |
|
20 | Scm model for RhodeCode | |
21 | """ |
|
21 | """ | |
22 |
|
22 | |||
23 | import os.path |
|
23 | import os.path | |
24 | import traceback |
|
24 | import traceback | |
25 | import logging |
|
25 | import logging | |
26 | import io |
|
26 | import io | |
27 |
|
27 | |||
28 | from sqlalchemy import func |
|
28 | from sqlalchemy import func | |
29 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
29 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
30 |
|
30 | |||
31 | import rhodecode |
|
31 | import rhodecode | |
32 | from rhodecode.lib.str_utils import safe_bytes |
|
32 | from rhodecode.lib.str_utils import safe_bytes | |
33 | from rhodecode.lib.vcs import get_backend |
|
33 | from rhodecode.lib.vcs import get_backend | |
34 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError |
|
34 | from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError | |
35 | from rhodecode.lib.vcs.nodes import FileNode |
|
35 | from rhodecode.lib.vcs.nodes import FileNode | |
36 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
36 | from rhodecode.lib.vcs.backends.base import EmptyCommit | |
37 | from rhodecode.lib import helpers as h, rc_cache |
|
37 | from rhodecode.lib import helpers as h, rc_cache | |
38 | from rhodecode.lib.auth import ( |
|
38 | from rhodecode.lib.auth import ( | |
39 | HasRepoPermissionAny, HasRepoGroupPermissionAny, |
|
39 | HasRepoPermissionAny, HasRepoGroupPermissionAny, | |
40 | HasUserGroupPermissionAny) |
|
40 | HasUserGroupPermissionAny) | |
41 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError |
|
41 | from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError | |
42 | from rhodecode.lib import hooks_utils |
|
42 | from rhodecode.lib import hooks_utils | |
43 | from rhodecode.lib.utils import ( |
|
43 | from rhodecode.lib.utils import ( | |
44 | get_filesystem_repos, make_db_config) |
|
44 | get_filesystem_repos, make_db_config) | |
45 | from rhodecode.lib.str_utils import safe_str |
|
45 | from rhodecode.lib.str_utils import safe_str | |
46 | from rhodecode.lib.system_info import get_system_info |
|
46 | from rhodecode.lib.system_info import get_system_info | |
47 | from rhodecode.model import BaseModel |
|
47 | from rhodecode.model import BaseModel | |
48 | from rhodecode.model.db import ( |
|
48 | from rhodecode.model.db import ( | |
49 | or_, false, null, |
|
49 | or_, false, null, | |
50 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, |
|
50 | Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup, | |
51 | PullRequest, FileStore) |
|
51 | PullRequest, FileStore) | |
52 | from rhodecode.model.settings import VcsSettingsModel |
|
52 | from rhodecode.model.settings import VcsSettingsModel | |
53 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl |
|
53 | from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl | |
54 |
|
54 | |||
55 | log = logging.getLogger(__name__) |
|
55 | log = logging.getLogger(__name__) | |
56 |
|
56 | |||
57 |
|
57 | |||
58 | class UserTemp(object): |
|
58 | class UserTemp(object): | |
59 | def __init__(self, user_id): |
|
59 | def __init__(self, user_id): | |
60 | self.user_id = user_id |
|
60 | self.user_id = user_id | |
61 |
|
61 | |||
62 | def __repr__(self): |
|
62 | def __repr__(self): | |
63 | return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id) |
|
63 | return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id) | |
64 |
|
64 | |||
65 |
|
65 | |||
66 | class RepoTemp(object): |
|
66 | class RepoTemp(object): | |
67 | def __init__(self, repo_id): |
|
67 | def __init__(self, repo_id): | |
68 | self.repo_id = repo_id |
|
68 | self.repo_id = repo_id | |
69 |
|
69 | |||
70 | def __repr__(self): |
|
70 | def __repr__(self): | |
71 | return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id) |
|
71 | return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id) | |
72 |
|
72 | |||
73 |
|
73 | |||
74 | class SimpleCachedRepoList(object): |
|
74 | class SimpleCachedRepoList(object): | |
75 | """ |
|
75 | """ | |
76 | Lighter version of of iteration of repos without the scm initialisation, |
|
76 | Lighter version of of iteration of repos without the scm initialisation, | |
77 | and with cache usage |
|
77 | and with cache usage | |
78 | """ |
|
78 | """ | |
79 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): |
|
79 | def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None): | |
80 | self.db_repo_list = db_repo_list |
|
80 | self.db_repo_list = db_repo_list | |
81 | self.repos_path = repos_path |
|
81 | self.repos_path = repos_path | |
82 | self.order_by = order_by |
|
82 | self.order_by = order_by | |
83 | self.reversed = (order_by or '').startswith('-') |
|
83 | self.reversed = (order_by or '').startswith('-') | |
84 | if not perm_set: |
|
84 | if not perm_set: | |
85 | perm_set = ['repository.read', 'repository.write', |
|
85 | perm_set = ['repository.read', 'repository.write', | |
86 | 'repository.admin'] |
|
86 | 'repository.admin'] | |
87 | self.perm_set = perm_set |
|
87 | self.perm_set = perm_set | |
88 |
|
88 | |||
89 | def __len__(self): |
|
89 | def __len__(self): | |
90 | return len(self.db_repo_list) |
|
90 | return len(self.db_repo_list) | |
91 |
|
91 | |||
92 | def __repr__(self): |
|
92 | def __repr__(self): | |
93 | return '<{} ({})>'.format(self.__class__.__name__, self.__len__()) |
|
93 | return '<{} ({})>'.format(self.__class__.__name__, self.__len__()) | |
94 |
|
94 | |||
95 | def __iter__(self): |
|
95 | def __iter__(self): | |
96 | for dbr in self.db_repo_list: |
|
96 | for dbr in self.db_repo_list: | |
97 | # check permission at this level |
|
97 | # check permission at this level | |
98 | has_perm = HasRepoPermissionAny(*self.perm_set)( |
|
98 | has_perm = HasRepoPermissionAny(*self.perm_set)( | |
99 | dbr.repo_name, 'SimpleCachedRepoList check') |
|
99 | dbr.repo_name, 'SimpleCachedRepoList check') | |
100 | if not has_perm: |
|
100 | if not has_perm: | |
101 | continue |
|
101 | continue | |
102 |
|
102 | |||
103 | tmp_d = { |
|
103 | tmp_d = { | |
104 | 'name': dbr.repo_name, |
|
104 | 'name': dbr.repo_name, | |
105 | 'dbrepo': dbr.get_dict(), |
|
105 | 'dbrepo': dbr.get_dict(), | |
106 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} |
|
106 | 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {} | |
107 | } |
|
107 | } | |
108 | yield tmp_d |
|
108 | yield tmp_d | |
109 |
|
109 | |||
110 |
|
110 | |||
111 | class _PermCheckIterator(object): |
|
111 | class _PermCheckIterator(object): | |
112 |
|
112 | |||
113 | def __init__( |
|
113 | def __init__( | |
114 | self, obj_list, obj_attr, perm_set, perm_checker, |
|
114 | self, obj_list, obj_attr, perm_set, perm_checker, | |
115 | extra_kwargs=None): |
|
115 | extra_kwargs=None): | |
116 | """ |
|
116 | """ | |
117 | Creates iterator from given list of objects, additionally |
|
117 | Creates iterator from given list of objects, additionally | |
118 | checking permission for them from perm_set var |
|
118 | checking permission for them from perm_set var | |
119 |
|
119 | |||
120 | :param obj_list: list of db objects |
|
120 | :param obj_list: list of db objects | |
121 | :param obj_attr: attribute of object to pass into perm_checker |
|
121 | :param obj_attr: attribute of object to pass into perm_checker | |
122 | :param perm_set: list of permissions to check |
|
122 | :param perm_set: list of permissions to check | |
123 | :param perm_checker: callable to check permissions against |
|
123 | :param perm_checker: callable to check permissions against | |
124 | """ |
|
124 | """ | |
125 | self.obj_list = obj_list |
|
125 | self.obj_list = obj_list | |
126 | self.obj_attr = obj_attr |
|
126 | self.obj_attr = obj_attr | |
127 | self.perm_set = perm_set |
|
127 | self.perm_set = perm_set | |
128 | self.perm_checker = perm_checker(*self.perm_set) |
|
128 | self.perm_checker = perm_checker(*self.perm_set) | |
129 | self.extra_kwargs = extra_kwargs or {} |
|
129 | self.extra_kwargs = extra_kwargs or {} | |
130 |
|
130 | |||
131 | def __len__(self): |
|
131 | def __len__(self): | |
132 | return len(self.obj_list) |
|
132 | return len(self.obj_list) | |
133 |
|
133 | |||
134 | def __repr__(self): |
|
134 | def __repr__(self): | |
135 | return '<{} ({})>'.format(self.__class__.__name__, self.__len__()) |
|
135 | return '<{} ({})>'.format(self.__class__.__name__, self.__len__()) | |
136 |
|
136 | |||
137 | def __iter__(self): |
|
137 | def __iter__(self): | |
138 | for db_obj in self.obj_list: |
|
138 | for db_obj in self.obj_list: | |
139 | # check permission at this level |
|
139 | # check permission at this level | |
140 | # NOTE(marcink): the __dict__.get() is ~4x faster then getattr() |
|
140 | # NOTE(marcink): the __dict__.get() is ~4x faster then getattr() | |
141 | name = db_obj.__dict__.get(self.obj_attr, None) |
|
141 | name = db_obj.__dict__.get(self.obj_attr, None) | |
142 | if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs): |
|
142 | if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs): | |
143 | continue |
|
143 | continue | |
144 |
|
144 | |||
145 | yield db_obj |
|
145 | yield db_obj | |
146 |
|
146 | |||
147 |
|
147 | |||
148 | class RepoList(_PermCheckIterator): |
|
148 | class RepoList(_PermCheckIterator): | |
149 |
|
149 | |||
150 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): |
|
150 | def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None): | |
151 | if not perm_set: |
|
151 | if not perm_set: | |
152 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] |
|
152 | perm_set = ['repository.read', 'repository.write', 'repository.admin'] | |
153 |
|
153 | |||
154 | super().__init__( |
|
154 | super().__init__( | |
155 | obj_list=db_repo_list, |
|
155 | obj_list=db_repo_list, | |
156 | obj_attr='_repo_name', perm_set=perm_set, |
|
156 | obj_attr='_repo_name', perm_set=perm_set, | |
157 | perm_checker=HasRepoPermissionAny, |
|
157 | perm_checker=HasRepoPermissionAny, | |
158 | extra_kwargs=extra_kwargs) |
|
158 | extra_kwargs=extra_kwargs) | |
159 |
|
159 | |||
160 |
|
160 | |||
161 | class RepoGroupList(_PermCheckIterator): |
|
161 | class RepoGroupList(_PermCheckIterator): | |
162 |
|
162 | |||
163 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): |
|
163 | def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None): | |
164 | if not perm_set: |
|
164 | if not perm_set: | |
165 | perm_set = ['group.read', 'group.write', 'group.admin'] |
|
165 | perm_set = ['group.read', 'group.write', 'group.admin'] | |
166 |
|
166 | |||
167 | super().__init__( |
|
167 | super().__init__( | |
168 | obj_list=db_repo_group_list, |
|
168 | obj_list=db_repo_group_list, | |
169 | obj_attr='_group_name', perm_set=perm_set, |
|
169 | obj_attr='_group_name', perm_set=perm_set, | |
170 | perm_checker=HasRepoGroupPermissionAny, |
|
170 | perm_checker=HasRepoGroupPermissionAny, | |
171 | extra_kwargs=extra_kwargs) |
|
171 | extra_kwargs=extra_kwargs) | |
172 |
|
172 | |||
173 |
|
173 | |||
174 | class UserGroupList(_PermCheckIterator): |
|
174 | class UserGroupList(_PermCheckIterator): | |
175 |
|
175 | |||
176 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): |
|
176 | def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None): | |
177 | if not perm_set: |
|
177 | if not perm_set: | |
178 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] |
|
178 | perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin'] | |
179 |
|
179 | |||
180 | super().__init__( |
|
180 | super().__init__( | |
181 | obj_list=db_user_group_list, |
|
181 | obj_list=db_user_group_list, | |
182 | obj_attr='users_group_name', perm_set=perm_set, |
|
182 | obj_attr='users_group_name', perm_set=perm_set, | |
183 | perm_checker=HasUserGroupPermissionAny, |
|
183 | perm_checker=HasUserGroupPermissionAny, | |
184 | extra_kwargs=extra_kwargs) |
|
184 | extra_kwargs=extra_kwargs) | |
185 |
|
185 | |||
186 |
|
186 | |||
187 | class ScmModel(BaseModel): |
|
187 | class ScmModel(BaseModel): | |
188 | """ |
|
188 | """ | |
189 | Generic Scm Model |
|
189 | Generic Scm Model | |
190 | """ |
|
190 | """ | |
191 |
|
191 | |||
192 | def repo_scan(self, repos_path=None): |
|
192 | def repo_scan(self, repos_path=None): | |
193 | """ |
|
193 | """ | |
194 | Listing of repositories in given path. This path should not be a |
|
194 | Listing of repositories in given path. This path should not be a | |
195 | repository itself. Return a dictionary of repository objects |
|
195 | repository itself. Return a dictionary of repository objects | |
196 |
|
196 | |||
197 | :param repos_path: path to directory containing repositories |
|
197 | :param repos_path: path to directory containing repositories | |
198 | """ |
|
198 | """ | |
199 |
|
199 | |||
200 | if repos_path is None: |
|
200 | if repos_path is None: | |
201 | repos_path = self.repos_path |
|
201 | repos_path = self.repos_path | |
202 |
|
202 | |||
203 | log.info('scanning for repositories in %s', repos_path) |
|
203 | log.info('scanning for repositories in %s', repos_path) | |
204 |
|
204 | |||
205 | config = make_db_config() |
|
205 | config = make_db_config() | |
206 | config.set('extensions', 'largefiles', '') |
|
206 | config.set('extensions', 'largefiles', '') | |
207 | repos = {} |
|
207 | repos = {} | |
208 |
|
208 | |||
209 | for name, path in get_filesystem_repos(repos_path, recursive=True): |
|
209 | for name, path in get_filesystem_repos(repos_path, recursive=True): | |
210 | # name need to be decomposed and put back together using the / |
|
210 | # name need to be decomposed and put back together using the / | |
211 | # since this is internal storage separator for rhodecode |
|
211 | # since this is internal storage separator for rhodecode | |
212 | name = Repository.normalize_repo_name(name) |
|
212 | name = Repository.normalize_repo_name(name) | |
213 |
|
213 | |||
214 | try: |
|
214 | try: | |
215 | if name in repos: |
|
215 | if name in repos: | |
216 |
raise RepositoryError('Duplicate repository name |
|
216 | raise RepositoryError(f'Duplicate repository name {name} found in {path}') | |
217 | 'found in %s' % (name, path)) |
|
|||
218 | elif path[0] in rhodecode.BACKENDS: |
|
217 | elif path[0] in rhodecode.BACKENDS: | |
219 | backend = get_backend(path[0]) |
|
218 | backend = get_backend(path[0]) | |
220 | repos[name] = backend(path[1], config=config, |
|
219 | repos[name] = backend(path[1], config=config, with_wire={"cache": False}) | |
221 | with_wire={"cache": False}) |
|
|||
222 | except OSError: |
|
220 | except OSError: | |
223 | continue |
|
221 | continue | |
224 | except RepositoryError: |
|
222 | except RepositoryError: | |
225 | log.exception('Failed to create a repo') |
|
223 | log.exception('Failed to create a repo') | |
226 | continue |
|
224 | continue | |
227 |
|
225 | |||
228 | log.debug('found %s paths with repositories', len(repos)) |
|
226 | log.debug('found %s paths with repositories', len(repos)) | |
229 | return repos |
|
227 | return repos | |
230 |
|
228 | |||
231 | def get_repos(self, all_repos=None, sort_key=None): |
|
229 | def get_repos(self, all_repos=None, sort_key=None): | |
232 | """ |
|
230 | """ | |
233 | Get all repositories from db and for each repo create it's |
|
231 | Get all repositories from db and for each repo create it's | |
234 | backend instance and fill that backed with information from database |
|
232 | backend instance and fill that backed with information from database | |
235 |
|
233 | |||
236 | :param all_repos: list of repository names as strings |
|
234 | :param all_repos: list of repository names as strings | |
237 | give specific repositories list, good for filtering |
|
235 | give specific repositories list, good for filtering | |
238 |
|
236 | |||
239 | :param sort_key: initial sorting of repositories |
|
237 | :param sort_key: initial sorting of repositories | |
240 | """ |
|
238 | """ | |
241 | if all_repos is None: |
|
239 | if all_repos is None: | |
242 | all_repos = self.sa.query(Repository)\ |
|
240 | all_repos = self.sa.query(Repository)\ | |
243 | .filter(Repository.group_id == null())\ |
|
241 | .filter(Repository.group_id == null())\ | |
244 | .order_by(func.lower(Repository.repo_name)).all() |
|
242 | .order_by(func.lower(Repository.repo_name)).all() | |
245 | repo_iter = SimpleCachedRepoList( |
|
243 | repo_iter = SimpleCachedRepoList( | |
246 | all_repos, repos_path=self.repos_path, order_by=sort_key) |
|
244 | all_repos, repos_path=self.repos_path, order_by=sort_key) | |
247 | return repo_iter |
|
245 | return repo_iter | |
248 |
|
246 | |||
249 | @staticmethod |
|
247 | @staticmethod | |
250 | def get_parent_commits(parent_commit, scm_instance): |
|
248 | def get_parent_commits(parent_commit, scm_instance): | |
251 | if not parent_commit: |
|
249 | if not parent_commit: | |
252 | parent_commit = EmptyCommit(alias=scm_instance.alias) |
|
250 | parent_commit = EmptyCommit(alias=scm_instance.alias) | |
253 |
|
251 | |||
254 | if isinstance(parent_commit, EmptyCommit): |
|
252 | if isinstance(parent_commit, EmptyCommit): | |
255 | # EmptyCommit means we're editing empty repository |
|
253 | # EmptyCommit means we're editing empty repository | |
256 | parents = None |
|
254 | parents = None | |
257 | else: |
|
255 | else: | |
258 | parents = [parent_commit] |
|
256 | parents = [parent_commit] | |
259 | return parent_commit, parents |
|
257 | return parent_commit, parents | |
260 |
|
258 | |||
261 | def initialize_inmemory_vars(self, user, repo, message, author): |
|
259 | def initialize_inmemory_vars(self, user, repo, message, author): | |
262 | """ |
|
260 | """ | |
263 | Initialize node specific objects for further usage |
|
261 | Initialize node specific objects for further usage | |
264 | """ |
|
262 | """ | |
265 | user = self._get_user(user) |
|
263 | user = self._get_user(user) | |
266 | scm_instance = repo.scm_instance(cache=False) |
|
264 | scm_instance = repo.scm_instance(cache=False) | |
267 | message = safe_str(message) |
|
265 | message = safe_str(message) | |
268 | commiter = user.full_contact |
|
266 | commiter = user.full_contact | |
269 | author = safe_str(author) if author else commiter |
|
267 | author = safe_str(author) if author else commiter | |
270 | imc = scm_instance.in_memory_commit |
|
268 | imc = scm_instance.in_memory_commit | |
271 |
|
269 | |||
272 | return user, scm_instance, message, commiter, author, imc |
|
270 | return user, scm_instance, message, commiter, author, imc | |
273 |
|
271 | |||
274 | def get_repo_groups(self, all_groups=None): |
|
272 | def get_repo_groups(self, all_groups=None): | |
275 | if all_groups is None: |
|
273 | if all_groups is None: | |
276 | all_groups = RepoGroup.query()\ |
|
274 | all_groups = RepoGroup.query()\ | |
277 | .filter(RepoGroup.group_parent_id == null()).all() |
|
275 | .filter(RepoGroup.group_parent_id == null()).all() | |
278 | return [x for x in RepoGroupList(all_groups)] |
|
276 | return [x for x in RepoGroupList(all_groups)] | |
279 |
|
277 | |||
280 | def mark_for_invalidation(self, repo_name, delete=False): |
|
278 | def mark_for_invalidation(self, repo_name, delete=False): | |
281 | """ |
|
279 | """ | |
282 | Mark caches of this repo invalid in the database. `delete` flag |
|
280 | Mark caches of this repo invalid in the database. `delete` flag | |
283 | removes the cache entries |
|
281 | removes the cache entries | |
284 |
|
282 | |||
285 | :param repo_name: the repo_name for which caches should be marked |
|
283 | :param repo_name: the repo_name for which caches should be marked | |
286 | invalid, or deleted |
|
284 | invalid, or deleted | |
287 | :param delete: delete the entry keys instead of setting bool |
|
285 | :param delete: delete the entry keys instead of setting bool | |
288 | flag on them, and also purge caches used by the dogpile |
|
286 | flag on them, and also purge caches used by the dogpile | |
289 | """ |
|
287 | """ | |
290 | repo = Repository.get_by_repo_name(repo_name) |
|
288 | repo = Repository.get_by_repo_name(repo_name) | |
291 |
|
289 | |||
292 | if repo: |
|
290 | if repo: | |
293 | repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo.repo_id) |
|
291 | repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=repo.repo_id) | |
294 | CacheKey.set_invalidate(repo_namespace_key, delete=delete) |
|
292 | CacheKey.set_invalidate(repo_namespace_key, delete=delete) | |
295 |
|
293 | |||
296 | repo_id = repo.repo_id |
|
294 | repo_id = repo.repo_id | |
297 | config = repo._config |
|
295 | config = repo._config | |
298 | config.set('extensions', 'largefiles', '') |
|
296 | config.set('extensions', 'largefiles', '') | |
299 | repo.update_commit_cache(config=config, cs_cache=None) |
|
297 | repo.update_commit_cache(config=config, cs_cache=None) | |
300 | if delete: |
|
298 | if delete: | |
301 | cache_namespace_uid = f'cache_repo.{repo_id}' |
|
299 | cache_namespace_uid = f'cache_repo.{repo_id}' | |
302 | rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE) |
|
300 | rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE) | |
303 |
|
301 | |||
304 | def toggle_following_repo(self, follow_repo_id, user_id): |
|
302 | def toggle_following_repo(self, follow_repo_id, user_id): | |
305 |
|
303 | |||
306 | f = self.sa.query(UserFollowing)\ |
|
304 | f = self.sa.query(UserFollowing)\ | |
307 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ |
|
305 | .filter(UserFollowing.follows_repo_id == follow_repo_id)\ | |
308 | .filter(UserFollowing.user_id == user_id).scalar() |
|
306 | .filter(UserFollowing.user_id == user_id).scalar() | |
309 |
|
307 | |||
310 | if f is not None: |
|
308 | if f is not None: | |
311 | try: |
|
309 | try: | |
312 | self.sa.delete(f) |
|
310 | self.sa.delete(f) | |
313 | return |
|
311 | return | |
314 | except Exception: |
|
312 | except Exception: | |
315 | log.error(traceback.format_exc()) |
|
313 | log.error(traceback.format_exc()) | |
316 | raise |
|
314 | raise | |
317 |
|
315 | |||
318 | try: |
|
316 | try: | |
319 | f = UserFollowing() |
|
317 | f = UserFollowing() | |
320 | f.user_id = user_id |
|
318 | f.user_id = user_id | |
321 | f.follows_repo_id = follow_repo_id |
|
319 | f.follows_repo_id = follow_repo_id | |
322 | self.sa.add(f) |
|
320 | self.sa.add(f) | |
323 | except Exception: |
|
321 | except Exception: | |
324 | log.error(traceback.format_exc()) |
|
322 | log.error(traceback.format_exc()) | |
325 | raise |
|
323 | raise | |
326 |
|
324 | |||
327 | def toggle_following_user(self, follow_user_id, user_id): |
|
325 | def toggle_following_user(self, follow_user_id, user_id): | |
328 | f = self.sa.query(UserFollowing)\ |
|
326 | f = self.sa.query(UserFollowing)\ | |
329 | .filter(UserFollowing.follows_user_id == follow_user_id)\ |
|
327 | .filter(UserFollowing.follows_user_id == follow_user_id)\ | |
330 | .filter(UserFollowing.user_id == user_id).scalar() |
|
328 | .filter(UserFollowing.user_id == user_id).scalar() | |
331 |
|
329 | |||
332 | if f is not None: |
|
330 | if f is not None: | |
333 | try: |
|
331 | try: | |
334 | self.sa.delete(f) |
|
332 | self.sa.delete(f) | |
335 | return |
|
333 | return | |
336 | except Exception: |
|
334 | except Exception: | |
337 | log.error(traceback.format_exc()) |
|
335 | log.error(traceback.format_exc()) | |
338 | raise |
|
336 | raise | |
339 |
|
337 | |||
340 | try: |
|
338 | try: | |
341 | f = UserFollowing() |
|
339 | f = UserFollowing() | |
342 | f.user_id = user_id |
|
340 | f.user_id = user_id | |
343 | f.follows_user_id = follow_user_id |
|
341 | f.follows_user_id = follow_user_id | |
344 | self.sa.add(f) |
|
342 | self.sa.add(f) | |
345 | except Exception: |
|
343 | except Exception: | |
346 | log.error(traceback.format_exc()) |
|
344 | log.error(traceback.format_exc()) | |
347 | raise |
|
345 | raise | |
348 |
|
346 | |||
349 | def is_following_repo(self, repo_name, user_id, cache=False): |
|
347 | def is_following_repo(self, repo_name, user_id, cache=False): | |
350 | r = self.sa.query(Repository)\ |
|
348 | r = self.sa.query(Repository)\ | |
351 | .filter(Repository.repo_name == repo_name).scalar() |
|
349 | .filter(Repository.repo_name == repo_name).scalar() | |
352 |
|
350 | |||
353 | f = self.sa.query(UserFollowing)\ |
|
351 | f = self.sa.query(UserFollowing)\ | |
354 | .filter(UserFollowing.follows_repository == r)\ |
|
352 | .filter(UserFollowing.follows_repository == r)\ | |
355 | .filter(UserFollowing.user_id == user_id).scalar() |
|
353 | .filter(UserFollowing.user_id == user_id).scalar() | |
356 |
|
354 | |||
357 | return f is not None |
|
355 | return f is not None | |
358 |
|
356 | |||
359 | def is_following_user(self, username, user_id, cache=False): |
|
357 | def is_following_user(self, username, user_id, cache=False): | |
360 | u = User.get_by_username(username) |
|
358 | u = User.get_by_username(username) | |
361 |
|
359 | |||
362 | f = self.sa.query(UserFollowing)\ |
|
360 | f = self.sa.query(UserFollowing)\ | |
363 | .filter(UserFollowing.follows_user == u)\ |
|
361 | .filter(UserFollowing.follows_user == u)\ | |
364 | .filter(UserFollowing.user_id == user_id).scalar() |
|
362 | .filter(UserFollowing.user_id == user_id).scalar() | |
365 |
|
363 | |||
366 | return f is not None |
|
364 | return f is not None | |
367 |
|
365 | |||
368 | def get_followers(self, repo): |
|
366 | def get_followers(self, repo): | |
369 | repo = self._get_repo(repo) |
|
367 | repo = self._get_repo(repo) | |
370 |
|
368 | |||
371 | return self.sa.query(UserFollowing)\ |
|
369 | return self.sa.query(UserFollowing)\ | |
372 | .filter(UserFollowing.follows_repository == repo).count() |
|
370 | .filter(UserFollowing.follows_repository == repo).count() | |
373 |
|
371 | |||
374 | def get_forks(self, repo): |
|
372 | def get_forks(self, repo): | |
375 | repo = self._get_repo(repo) |
|
373 | repo = self._get_repo(repo) | |
376 | return self.sa.query(Repository)\ |
|
374 | return self.sa.query(Repository)\ | |
377 | .filter(Repository.fork == repo).count() |
|
375 | .filter(Repository.fork == repo).count() | |
378 |
|
376 | |||
379 | def get_pull_requests(self, repo): |
|
377 | def get_pull_requests(self, repo): | |
380 | repo = self._get_repo(repo) |
|
378 | repo = self._get_repo(repo) | |
381 | return self.sa.query(PullRequest)\ |
|
379 | return self.sa.query(PullRequest)\ | |
382 | .filter(PullRequest.target_repo == repo)\ |
|
380 | .filter(PullRequest.target_repo == repo)\ | |
383 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() |
|
381 | .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count() | |
384 |
|
382 | |||
385 | def get_artifacts(self, repo): |
|
383 | def get_artifacts(self, repo): | |
386 | repo = self._get_repo(repo) |
|
384 | repo = self._get_repo(repo) | |
387 | return self.sa.query(FileStore)\ |
|
385 | return self.sa.query(FileStore)\ | |
388 | .filter(FileStore.repo == repo)\ |
|
386 | .filter(FileStore.repo == repo)\ | |
389 | .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count() |
|
387 | .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count() | |
390 |
|
388 | |||
391 | def mark_as_fork(self, repo, fork, user): |
|
389 | def mark_as_fork(self, repo, fork, user): | |
392 | repo = self._get_repo(repo) |
|
390 | repo = self._get_repo(repo) | |
393 | fork = self._get_repo(fork) |
|
391 | fork = self._get_repo(fork) | |
394 | if fork and repo.repo_id == fork.repo_id: |
|
392 | if fork and repo.repo_id == fork.repo_id: | |
395 | raise Exception("Cannot set repository as fork of itself") |
|
393 | raise Exception("Cannot set repository as fork of itself") | |
396 |
|
394 | |||
397 | if fork and repo.repo_type != fork.repo_type: |
|
395 | if fork and repo.repo_type != fork.repo_type: | |
398 | raise RepositoryError( |
|
396 | raise RepositoryError( | |
399 | "Cannot set repository as fork of repository with other type") |
|
397 | "Cannot set repository as fork of repository with other type") | |
400 |
|
398 | |||
401 | repo.fork = fork |
|
399 | repo.fork = fork | |
402 | self.sa.add(repo) |
|
400 | self.sa.add(repo) | |
403 | return repo |
|
401 | return repo | |
404 |
|
402 | |||
405 | def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs): |
|
403 | def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs): | |
406 | dbrepo = self._get_repo(repo) |
|
404 | dbrepo = self._get_repo(repo) | |
407 | remote_uri = remote_uri or dbrepo.clone_uri |
|
405 | remote_uri = remote_uri or dbrepo.clone_uri | |
408 | if not remote_uri: |
|
406 | if not remote_uri: | |
409 | raise Exception("This repository doesn't have a clone uri") |
|
407 | raise Exception("This repository doesn't have a clone uri") | |
410 |
|
408 | |||
411 | repo = dbrepo.scm_instance(cache=False) |
|
409 | repo = dbrepo.scm_instance(cache=False) | |
412 | repo.config.clear_section('hooks') |
|
410 | repo.config.clear_section('hooks') | |
413 |
|
411 | |||
414 | try: |
|
412 | try: | |
415 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
413 | # NOTE(marcink): add extra validation so we skip invalid urls | |
416 | # this is due this tasks can be executed via scheduler without |
|
414 | # this is due this tasks can be executed via scheduler without | |
417 | # proper validation of remote_uri |
|
415 | # proper validation of remote_uri | |
418 | if validate_uri: |
|
416 | if validate_uri: | |
419 | config = make_db_config(clear_session=False) |
|
417 | config = make_db_config(clear_session=False) | |
420 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
418 | url_validator(remote_uri, dbrepo.repo_type, config) | |
421 | except InvalidCloneUrl: |
|
419 | except InvalidCloneUrl: | |
422 | raise |
|
420 | raise | |
423 |
|
421 | |||
424 | repo_name = dbrepo.repo_name |
|
422 | repo_name = dbrepo.repo_name | |
425 | try: |
|
423 | try: | |
426 | # TODO: we need to make sure those operations call proper hooks ! |
|
424 | # TODO: we need to make sure those operations call proper hooks ! | |
427 | repo.fetch(remote_uri, **kwargs) |
|
425 | repo.fetch(remote_uri, **kwargs) | |
428 |
|
426 | |||
429 | self.mark_for_invalidation(repo_name) |
|
427 | self.mark_for_invalidation(repo_name) | |
430 | except Exception: |
|
428 | except Exception: | |
431 | log.error(traceback.format_exc()) |
|
429 | log.error(traceback.format_exc()) | |
432 | raise |
|
430 | raise | |
433 |
|
431 | |||
434 | def push_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs): |
|
432 | def push_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs): | |
435 | dbrepo = self._get_repo(repo) |
|
433 | dbrepo = self._get_repo(repo) | |
436 | remote_uri = remote_uri or dbrepo.push_uri |
|
434 | remote_uri = remote_uri or dbrepo.push_uri | |
437 | if not remote_uri: |
|
435 | if not remote_uri: | |
438 | raise Exception("This repository doesn't have a clone uri") |
|
436 | raise Exception("This repository doesn't have a clone uri") | |
439 |
|
437 | |||
440 | repo = dbrepo.scm_instance(cache=False) |
|
438 | repo = dbrepo.scm_instance(cache=False) | |
441 | repo.config.clear_section('hooks') |
|
439 | repo.config.clear_section('hooks') | |
442 |
|
440 | |||
443 | try: |
|
441 | try: | |
444 | # NOTE(marcink): add extra validation so we skip invalid urls |
|
442 | # NOTE(marcink): add extra validation so we skip invalid urls | |
445 | # this is due this tasks can be executed via scheduler without |
|
443 | # this is due this tasks can be executed via scheduler without | |
446 | # proper validation of remote_uri |
|
444 | # proper validation of remote_uri | |
447 | if validate_uri: |
|
445 | if validate_uri: | |
448 | config = make_db_config(clear_session=False) |
|
446 | config = make_db_config(clear_session=False) | |
449 | url_validator(remote_uri, dbrepo.repo_type, config) |
|
447 | url_validator(remote_uri, dbrepo.repo_type, config) | |
450 | except InvalidCloneUrl: |
|
448 | except InvalidCloneUrl: | |
451 | raise |
|
449 | raise | |
452 |
|
450 | |||
453 | try: |
|
451 | try: | |
454 | repo.push(remote_uri, **kwargs) |
|
452 | repo.push(remote_uri, **kwargs) | |
455 | except Exception: |
|
453 | except Exception: | |
456 | log.error(traceback.format_exc()) |
|
454 | log.error(traceback.format_exc()) | |
457 | raise |
|
455 | raise | |
458 |
|
456 | |||
459 | def commit_change(self, repo, repo_name, commit, user, author, message, |
|
457 | def commit_change(self, repo, repo_name, commit, user, author, message, | |
460 | content: bytes, f_path: bytes, branch: str = None): |
|
458 | content: bytes, f_path: bytes, branch: str = None): | |
461 | """ |
|
459 | """ | |
462 | Commits changes |
|
460 | Commits changes | |
463 | """ |
|
461 | """ | |
464 | user = self._get_user(user) |
|
462 | user = self._get_user(user) | |
465 |
|
463 | |||
466 | # message and author needs to be unicode |
|
464 | # message and author needs to be unicode | |
467 | # proper backend should then translate that into required type |
|
465 | # proper backend should then translate that into required type | |
468 | message = safe_str(message) |
|
466 | message = safe_str(message) | |
469 | author = safe_str(author) |
|
467 | author = safe_str(author) | |
470 | imc = repo.in_memory_commit |
|
468 | imc = repo.in_memory_commit | |
471 | imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path))) |
|
469 | imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path))) | |
472 | try: |
|
470 | try: | |
473 | # TODO: handle pre-push action ! |
|
471 | # TODO: handle pre-push action ! | |
474 | tip = imc.commit( |
|
472 | tip = imc.commit( | |
475 | message=message, author=author, parents=[commit], |
|
473 | message=message, author=author, parents=[commit], | |
476 | branch=branch or commit.branch) |
|
474 | branch=branch or commit.branch) | |
477 | except Exception as e: |
|
475 | except Exception as e: | |
478 | log.error(traceback.format_exc()) |
|
476 | log.error(traceback.format_exc()) | |
479 | raise IMCCommitError(str(e)) |
|
477 | raise IMCCommitError(str(e)) | |
480 | finally: |
|
478 | finally: | |
481 | # always clear caches, if commit fails we want fresh object also |
|
479 | # always clear caches, if commit fails we want fresh object also | |
482 | self.mark_for_invalidation(repo_name) |
|
480 | self.mark_for_invalidation(repo_name) | |
483 |
|
481 | |||
484 | # We trigger the post-push action |
|
482 | # We trigger the post-push action | |
485 | hooks_utils.trigger_post_push_hook( |
|
483 | hooks_utils.trigger_post_push_hook( | |
486 | username=user.username, action='push_local', hook_type='post_push', |
|
484 | username=user.username, action='push_local', hook_type='post_push', | |
487 | repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id]) |
|
485 | repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id]) | |
488 | return tip |
|
486 | return tip | |
489 |
|
487 | |||
490 | def _sanitize_path(self, f_path: bytes): |
|
488 | def _sanitize_path(self, f_path: bytes): | |
491 | if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path: |
|
489 | if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path: | |
492 | raise NonRelativePathError(b'%b is not an relative path' % f_path) |
|
490 | raise NonRelativePathError(b'%b is not an relative path' % f_path) | |
493 | if f_path: |
|
491 | if f_path: | |
494 | f_path = os.path.normpath(f_path) |
|
492 | f_path = os.path.normpath(f_path) | |
495 | return f_path |
|
493 | return f_path | |
496 |
|
494 | |||
497 | def get_dirnode_metadata(self, request, commit, dir_node): |
|
495 | def get_dirnode_metadata(self, request, commit, dir_node): | |
498 | if not dir_node.is_dir(): |
|
496 | if not dir_node.is_dir(): | |
499 | return [] |
|
497 | return [] | |
500 |
|
498 | |||
501 | data = [] |
|
499 | data = [] | |
502 | for node in dir_node: |
|
500 | for node in dir_node: | |
503 | if not node.is_file(): |
|
501 | if not node.is_file(): | |
504 | # we skip file-nodes |
|
502 | # we skip file-nodes | |
505 | continue |
|
503 | continue | |
506 |
|
504 | |||
507 | last_commit = node.last_commit |
|
505 | last_commit = node.last_commit | |
508 | last_commit_date = last_commit.date |
|
506 | last_commit_date = last_commit.date | |
509 | data.append({ |
|
507 | data.append({ | |
510 | 'name': node.name, |
|
508 | 'name': node.name, | |
511 | 'size': h.format_byte_size_binary(node.size), |
|
509 | 'size': h.format_byte_size_binary(node.size), | |
512 | 'modified_at': h.format_date(last_commit_date), |
|
510 | 'modified_at': h.format_date(last_commit_date), | |
513 | 'modified_ts': last_commit_date.isoformat(), |
|
511 | 'modified_ts': last_commit_date.isoformat(), | |
514 | 'revision': last_commit.revision, |
|
512 | 'revision': last_commit.revision, | |
515 | 'short_id': last_commit.short_id, |
|
513 | 'short_id': last_commit.short_id, | |
516 | 'message': h.escape(last_commit.message), |
|
514 | 'message': h.escape(last_commit.message), | |
517 | 'author': h.escape(last_commit.author), |
|
515 | 'author': h.escape(last_commit.author), | |
518 | 'user_profile': h.gravatar_with_user( |
|
516 | 'user_profile': h.gravatar_with_user( | |
519 | request, last_commit.author), |
|
517 | request, last_commit.author), | |
520 | }) |
|
518 | }) | |
521 |
|
519 | |||
522 | return data |
|
520 | return data | |
523 |
|
521 | |||
524 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, |
|
522 | def get_nodes(self, repo_name, commit_id, root_path='/', flat=True, | |
525 | extended_info=False, content=False, max_file_bytes=None): |
|
523 | extended_info=False, content=False, max_file_bytes=None): | |
526 | """ |
|
524 | """ | |
527 | recursive walk in root dir and return a set of all path in that dir |
|
525 | recursive walk in root dir and return a set of all path in that dir | |
528 | based on repository walk function |
|
526 | based on repository walk function | |
529 |
|
527 | |||
530 | :param repo_name: name of repository |
|
528 | :param repo_name: name of repository | |
531 | :param commit_id: commit id for which to list nodes |
|
529 | :param commit_id: commit id for which to list nodes | |
532 | :param root_path: root path to list |
|
530 | :param root_path: root path to list | |
533 | :param flat: return as a list, if False returns a dict with description |
|
531 | :param flat: return as a list, if False returns a dict with description | |
534 | :param extended_info: show additional info such as md5, binary, size etc |
|
532 | :param extended_info: show additional info such as md5, binary, size etc | |
535 | :param content: add nodes content to the return data |
|
533 | :param content: add nodes content to the return data | |
536 | :param max_file_bytes: will not return file contents over this limit |
|
534 | :param max_file_bytes: will not return file contents over this limit | |
537 |
|
535 | |||
538 | """ |
|
536 | """ | |
539 | _files = list() |
|
537 | _files = list() | |
540 | _dirs = list() |
|
538 | _dirs = list() | |
541 |
|
539 | |||
542 | try: |
|
540 | try: | |
543 | _repo = self._get_repo(repo_name) |
|
541 | _repo = self._get_repo(repo_name) | |
544 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
542 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
545 | root_path = root_path.lstrip('/') |
|
543 | root_path = root_path.lstrip('/') | |
546 |
|
544 | |||
547 | # get RootNode, inject pre-load options before walking |
|
545 | # get RootNode, inject pre-load options before walking | |
548 | top_node = commit.get_node(root_path) |
|
546 | top_node = commit.get_node(root_path) | |
549 | extended_info_pre_load = [] |
|
547 | extended_info_pre_load = [] | |
550 | if extended_info: |
|
548 | if extended_info: | |
551 | extended_info_pre_load += ['md5'] |
|
549 | extended_info_pre_load += ['md5'] | |
552 | top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load |
|
550 | top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load | |
553 |
|
551 | |||
554 | for __, dirs, files in commit.walk(top_node): |
|
552 | for __, dirs, files in commit.walk(top_node): | |
555 |
|
553 | |||
556 | for f in files: |
|
554 | for f in files: | |
557 | _content = None |
|
555 | _content = None | |
558 | _data = f_name = f.str_path |
|
556 | _data = f_name = f.str_path | |
559 |
|
557 | |||
560 | if not flat: |
|
558 | if not flat: | |
561 | _data = { |
|
559 | _data = { | |
562 | "name": h.escape(f_name), |
|
560 | "name": h.escape(f_name), | |
563 | "type": "file", |
|
561 | "type": "file", | |
564 | } |
|
562 | } | |
565 | if extended_info: |
|
563 | if extended_info: | |
566 | _data.update({ |
|
564 | _data.update({ | |
567 | "md5": f.md5, |
|
565 | "md5": f.md5, | |
568 | "binary": f.is_binary, |
|
566 | "binary": f.is_binary, | |
569 | "size": f.size, |
|
567 | "size": f.size, | |
570 | "extension": f.extension, |
|
568 | "extension": f.extension, | |
571 | "mimetype": f.mimetype, |
|
569 | "mimetype": f.mimetype, | |
572 | "lines": f.lines()[0] |
|
570 | "lines": f.lines()[0] | |
573 | }) |
|
571 | }) | |
574 |
|
572 | |||
575 | if content: |
|
573 | if content: | |
576 | over_size_limit = (max_file_bytes is not None |
|
574 | over_size_limit = (max_file_bytes is not None | |
577 | and f.size > max_file_bytes) |
|
575 | and f.size > max_file_bytes) | |
578 | full_content = None |
|
576 | full_content = None | |
579 | if not f.is_binary and not over_size_limit: |
|
577 | if not f.is_binary and not over_size_limit: | |
580 | full_content = f.str_content |
|
578 | full_content = f.str_content | |
581 |
|
579 | |||
582 | _data.update({ |
|
580 | _data.update({ | |
583 | "content": full_content, |
|
581 | "content": full_content, | |
584 | }) |
|
582 | }) | |
585 | _files.append(_data) |
|
583 | _files.append(_data) | |
586 |
|
584 | |||
587 | for d in dirs: |
|
585 | for d in dirs: | |
588 | _data = d_name = d.str_path |
|
586 | _data = d_name = d.str_path | |
589 | if not flat: |
|
587 | if not flat: | |
590 | _data = { |
|
588 | _data = { | |
591 | "name": h.escape(d_name), |
|
589 | "name": h.escape(d_name), | |
592 | "type": "dir", |
|
590 | "type": "dir", | |
593 | } |
|
591 | } | |
594 | if extended_info: |
|
592 | if extended_info: | |
595 | _data.update({ |
|
593 | _data.update({ | |
596 | "md5": "", |
|
594 | "md5": "", | |
597 | "binary": False, |
|
595 | "binary": False, | |
598 | "size": 0, |
|
596 | "size": 0, | |
599 | "extension": "", |
|
597 | "extension": "", | |
600 | }) |
|
598 | }) | |
601 | if content: |
|
599 | if content: | |
602 | _data.update({ |
|
600 | _data.update({ | |
603 | "content": None |
|
601 | "content": None | |
604 | }) |
|
602 | }) | |
605 | _dirs.append(_data) |
|
603 | _dirs.append(_data) | |
606 | except RepositoryError: |
|
604 | except RepositoryError: | |
607 | log.exception("Exception in get_nodes") |
|
605 | log.exception("Exception in get_nodes") | |
608 | raise |
|
606 | raise | |
609 |
|
607 | |||
610 | return _dirs, _files |
|
608 | return _dirs, _files | |
611 |
|
609 | |||
612 | def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'): |
|
610 | def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'): | |
613 | """ |
|
611 | """ | |
614 | Generate files for quick filter in files view |
|
612 | Generate files for quick filter in files view | |
615 | """ |
|
613 | """ | |
616 |
|
614 | |||
617 | _files = list() |
|
615 | _files = list() | |
618 | _dirs = list() |
|
616 | _dirs = list() | |
619 | try: |
|
617 | try: | |
620 | _repo = self._get_repo(repo_name) |
|
618 | _repo = self._get_repo(repo_name) | |
621 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
619 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
622 | root_path = root_path.lstrip('/') |
|
620 | root_path = root_path.lstrip('/') | |
623 |
|
621 | |||
624 | top_node = commit.get_node(root_path) |
|
622 | top_node = commit.get_node(root_path) | |
625 | top_node.default_pre_load = [] |
|
623 | top_node.default_pre_load = [] | |
626 |
|
624 | |||
627 | for __, dirs, files in commit.walk(top_node): |
|
625 | for __, dirs, files in commit.walk(top_node): | |
628 | for f in files: |
|
626 | for f in files: | |
629 |
|
627 | |||
630 | _data = { |
|
628 | _data = { | |
631 | "name": h.escape(f.str_path), |
|
629 | "name": h.escape(f.str_path), | |
632 | "type": "file", |
|
630 | "type": "file", | |
633 | } |
|
631 | } | |
634 |
|
632 | |||
635 | _files.append(_data) |
|
633 | _files.append(_data) | |
636 |
|
634 | |||
637 | for d in dirs: |
|
635 | for d in dirs: | |
638 |
|
636 | |||
639 | _data = { |
|
637 | _data = { | |
640 | "name": h.escape(d.str_path), |
|
638 | "name": h.escape(d.str_path), | |
641 | "type": "dir", |
|
639 | "type": "dir", | |
642 | } |
|
640 | } | |
643 |
|
641 | |||
644 | _dirs.append(_data) |
|
642 | _dirs.append(_data) | |
645 | except RepositoryError: |
|
643 | except RepositoryError: | |
646 | log.exception("Exception in get_quick_filter_nodes") |
|
644 | log.exception("Exception in get_quick_filter_nodes") | |
647 | raise |
|
645 | raise | |
648 |
|
646 | |||
649 | return _dirs, _files |
|
647 | return _dirs, _files | |
650 |
|
648 | |||
651 | def get_node(self, repo_name, commit_id, file_path, |
|
649 | def get_node(self, repo_name, commit_id, file_path, | |
652 | extended_info=False, content=False, max_file_bytes=None, cache=True): |
|
650 | extended_info=False, content=False, max_file_bytes=None, cache=True): | |
653 | """ |
|
651 | """ | |
654 | retrieve single node from commit |
|
652 | retrieve single node from commit | |
655 | """ |
|
653 | """ | |
656 |
|
654 | |||
657 | try: |
|
655 | try: | |
658 |
|
656 | |||
659 | _repo = self._get_repo(repo_name) |
|
657 | _repo = self._get_repo(repo_name) | |
660 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
658 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
661 |
|
659 | |||
662 | file_node = commit.get_node(file_path) |
|
660 | file_node = commit.get_node(file_path) | |
663 | if file_node.is_dir(): |
|
661 | if file_node.is_dir(): | |
664 | raise RepositoryError('The given path is a directory') |
|
662 | raise RepositoryError('The given path is a directory') | |
665 |
|
663 | |||
666 | _content = None |
|
664 | _content = None | |
667 | f_name = file_node.str_path |
|
665 | f_name = file_node.str_path | |
668 |
|
666 | |||
669 | file_data = { |
|
667 | file_data = { | |
670 | "name": h.escape(f_name), |
|
668 | "name": h.escape(f_name), | |
671 | "type": "file", |
|
669 | "type": "file", | |
672 | } |
|
670 | } | |
673 |
|
671 | |||
674 | if extended_info: |
|
672 | if extended_info: | |
675 | file_data.update({ |
|
673 | file_data.update({ | |
676 | "extension": file_node.extension, |
|
674 | "extension": file_node.extension, | |
677 | "mimetype": file_node.mimetype, |
|
675 | "mimetype": file_node.mimetype, | |
678 | }) |
|
676 | }) | |
679 |
|
677 | |||
680 | if cache: |
|
678 | if cache: | |
681 | md5 = file_node.md5 |
|
679 | md5 = file_node.md5 | |
682 | is_binary = file_node.is_binary |
|
680 | is_binary = file_node.is_binary | |
683 | size = file_node.size |
|
681 | size = file_node.size | |
684 | else: |
|
682 | else: | |
685 | is_binary, md5, size, _content = file_node.metadata_uncached() |
|
683 | is_binary, md5, size, _content = file_node.metadata_uncached() | |
686 |
|
684 | |||
687 | file_data.update({ |
|
685 | file_data.update({ | |
688 | "md5": md5, |
|
686 | "md5": md5, | |
689 | "binary": is_binary, |
|
687 | "binary": is_binary, | |
690 | "size": size, |
|
688 | "size": size, | |
691 | }) |
|
689 | }) | |
692 |
|
690 | |||
693 | if content and cache: |
|
691 | if content and cache: | |
694 | # get content + cache |
|
692 | # get content + cache | |
695 | size = file_node.size |
|
693 | size = file_node.size | |
696 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) |
|
694 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) | |
697 | full_content = None |
|
695 | full_content = None | |
698 | all_lines = 0 |
|
696 | all_lines = 0 | |
699 | if not file_node.is_binary and not over_size_limit: |
|
697 | if not file_node.is_binary and not over_size_limit: | |
700 | full_content = safe_str(file_node.content) |
|
698 | full_content = safe_str(file_node.content) | |
701 | all_lines, empty_lines = file_node.count_lines(full_content) |
|
699 | all_lines, empty_lines = file_node.count_lines(full_content) | |
702 |
|
700 | |||
703 | file_data.update({ |
|
701 | file_data.update({ | |
704 | "content": full_content, |
|
702 | "content": full_content, | |
705 | "lines": all_lines |
|
703 | "lines": all_lines | |
706 | }) |
|
704 | }) | |
707 | elif content: |
|
705 | elif content: | |
708 | # get content *without* cache |
|
706 | # get content *without* cache | |
709 | if _content is None: |
|
707 | if _content is None: | |
710 | is_binary, md5, size, _content = file_node.metadata_uncached() |
|
708 | is_binary, md5, size, _content = file_node.metadata_uncached() | |
711 |
|
709 | |||
712 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) |
|
710 | over_size_limit = (max_file_bytes is not None and size > max_file_bytes) | |
713 | full_content = None |
|
711 | full_content = None | |
714 | all_lines = 0 |
|
712 | all_lines = 0 | |
715 | if not is_binary and not over_size_limit: |
|
713 | if not is_binary and not over_size_limit: | |
716 | full_content = safe_str(_content) |
|
714 | full_content = safe_str(_content) | |
717 | all_lines, empty_lines = file_node.count_lines(full_content) |
|
715 | all_lines, empty_lines = file_node.count_lines(full_content) | |
718 |
|
716 | |||
719 | file_data.update({ |
|
717 | file_data.update({ | |
720 | "content": full_content, |
|
718 | "content": full_content, | |
721 | "lines": all_lines |
|
719 | "lines": all_lines | |
722 | }) |
|
720 | }) | |
723 |
|
721 | |||
724 | except RepositoryError: |
|
722 | except RepositoryError: | |
725 | log.exception("Exception in get_node") |
|
723 | log.exception("Exception in get_node") | |
726 | raise |
|
724 | raise | |
727 |
|
725 | |||
728 | return file_data |
|
726 | return file_data | |
729 |
|
727 | |||
730 | def get_fts_data(self, repo_name, commit_id, root_path='/'): |
|
728 | def get_fts_data(self, repo_name, commit_id, root_path='/'): | |
731 | """ |
|
729 | """ | |
732 | Fetch node tree for usage in full text search |
|
730 | Fetch node tree for usage in full text search | |
733 | """ |
|
731 | """ | |
734 |
|
732 | |||
735 | tree_info = list() |
|
733 | tree_info = list() | |
736 |
|
734 | |||
737 | try: |
|
735 | try: | |
738 | _repo = self._get_repo(repo_name) |
|
736 | _repo = self._get_repo(repo_name) | |
739 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) |
|
737 | commit = _repo.scm_instance().get_commit(commit_id=commit_id) | |
740 | root_path = root_path.lstrip('/') |
|
738 | root_path = root_path.lstrip('/') | |
741 | top_node = commit.get_node(root_path) |
|
739 | top_node = commit.get_node(root_path) | |
742 | top_node.default_pre_load = [] |
|
740 | top_node.default_pre_load = [] | |
743 |
|
741 | |||
744 | for __, dirs, files in commit.walk(top_node): |
|
742 | for __, dirs, files in commit.walk(top_node): | |
745 |
|
743 | |||
746 | for f in files: |
|
744 | for f in files: | |
747 | is_binary, md5, size, _content = f.metadata_uncached() |
|
745 | is_binary, md5, size, _content = f.metadata_uncached() | |
748 | _data = { |
|
746 | _data = { | |
749 | "name": f.str_path, |
|
747 | "name": f.str_path, | |
750 | "md5": md5, |
|
748 | "md5": md5, | |
751 | "extension": f.extension, |
|
749 | "extension": f.extension, | |
752 | "binary": is_binary, |
|
750 | "binary": is_binary, | |
753 | "size": size |
|
751 | "size": size | |
754 | } |
|
752 | } | |
755 |
|
753 | |||
756 | tree_info.append(_data) |
|
754 | tree_info.append(_data) | |
757 |
|
755 | |||
758 | except RepositoryError: |
|
756 | except RepositoryError: | |
759 | log.exception("Exception in get_nodes") |
|
757 | log.exception("Exception in get_nodes") | |
760 | raise |
|
758 | raise | |
761 |
|
759 | |||
762 | return tree_info |
|
760 | return tree_info | |
763 |
|
761 | |||
764 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
762 | def create_nodes(self, user, repo, message, nodes, parent_commit=None, | |
765 | author=None, trigger_push_hook=True): |
|
763 | author=None, trigger_push_hook=True): | |
766 | """ |
|
764 | """ | |
767 | Commits given multiple nodes into repo |
|
765 | Commits given multiple nodes into repo | |
768 |
|
766 | |||
769 | :param user: RhodeCode User object or user_id, the commiter |
|
767 | :param user: RhodeCode User object or user_id, the commiter | |
770 | :param repo: RhodeCode Repository object |
|
768 | :param repo: RhodeCode Repository object | |
771 | :param message: commit message |
|
769 | :param message: commit message | |
772 | :param nodes: mapping {filename:{'content':content},...} |
|
770 | :param nodes: mapping {filename:{'content':content},...} | |
773 | :param parent_commit: parent commit, can be empty than it's |
|
771 | :param parent_commit: parent commit, can be empty than it's | |
774 | initial commit |
|
772 | initial commit | |
775 | :param author: author of commit, cna be different that commiter |
|
773 | :param author: author of commit, cna be different that commiter | |
776 | only for git |
|
774 | only for git | |
777 | :param trigger_push_hook: trigger push hooks |
|
775 | :param trigger_push_hook: trigger push hooks | |
778 |
|
776 | |||
779 | :returns: new committed commit |
|
777 | :returns: new committed commit | |
780 | """ |
|
778 | """ | |
781 | user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars( |
|
779 | user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars( | |
782 | user, repo, message, author) |
|
780 | user, repo, message, author) | |
783 |
|
781 | |||
784 | parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance) |
|
782 | parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance) | |
785 |
|
783 | |||
786 | upload_file_types = (io.BytesIO, io.BufferedRandom) |
|
784 | upload_file_types = (io.BytesIO, io.BufferedRandom) | |
787 | processed_nodes = [] |
|
785 | processed_nodes = [] | |
788 | for filename, content_dict in nodes.items(): |
|
786 | for filename, content_dict in nodes.items(): | |
789 | if not isinstance(filename, bytes): |
|
787 | if not isinstance(filename, bytes): | |
790 | raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}') |
|
788 | raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}') | |
791 | content = content_dict['content'] |
|
789 | content = content_dict['content'] | |
792 | if not isinstance(content, upload_file_types + (bytes,)): |
|
790 | if not isinstance(content, upload_file_types + (bytes,)): | |
793 | raise ValueError('content key value in nodes needs to be bytes') |
|
791 | raise ValueError('content key value in nodes needs to be bytes') | |
794 |
|
792 | |||
795 | for f_path in nodes: |
|
793 | for f_path in nodes: | |
796 | f_path = self._sanitize_path(f_path) |
|
794 | f_path = self._sanitize_path(f_path) | |
797 | content = nodes[f_path]['content'] |
|
795 | content = nodes[f_path]['content'] | |
798 |
|
796 | |||
799 | # decoding here will force that we have proper encoded values |
|
797 | # decoding here will force that we have proper encoded values | |
800 | # in any other case this will throw exceptions and deny commit |
|
798 | # in any other case this will throw exceptions and deny commit | |
801 |
|
799 | |||
802 | if isinstance(content, bytes): |
|
800 | if isinstance(content, bytes): | |
803 | pass |
|
801 | pass | |
804 | elif isinstance(content, upload_file_types): |
|
802 | elif isinstance(content, upload_file_types): | |
805 | content = content.read() |
|
803 | content = content.read() | |
806 | else: |
|
804 | else: | |
807 | raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}') |
|
805 | raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}') | |
808 | processed_nodes.append((f_path, content)) |
|
806 | processed_nodes.append((f_path, content)) | |
809 |
|
807 | |||
810 | # add multiple nodes |
|
808 | # add multiple nodes | |
811 | for path, content in processed_nodes: |
|
809 | for path, content in processed_nodes: | |
812 | imc.add(FileNode(path, content=content)) |
|
810 | imc.add(FileNode(path, content=content)) | |
813 |
|
811 | |||
814 | # TODO: handle pre push scenario |
|
812 | # TODO: handle pre push scenario | |
815 | tip = imc.commit(message=message, |
|
813 | tip = imc.commit(message=message, | |
816 | author=author, |
|
814 | author=author, | |
817 | parents=parents, |
|
815 | parents=parents, | |
818 | branch=parent_commit.branch) |
|
816 | branch=parent_commit.branch) | |
819 |
|
817 | |||
820 | self.mark_for_invalidation(repo.repo_name) |
|
818 | self.mark_for_invalidation(repo.repo_name) | |
821 | if trigger_push_hook: |
|
819 | if trigger_push_hook: | |
822 | hooks_utils.trigger_post_push_hook( |
|
820 | hooks_utils.trigger_post_push_hook( | |
823 | username=user.username, action='push_local', |
|
821 | username=user.username, action='push_local', | |
824 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
822 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |
825 | hook_type='post_push', |
|
823 | hook_type='post_push', | |
826 | commit_ids=[tip.raw_id]) |
|
824 | commit_ids=[tip.raw_id]) | |
827 | return tip |
|
825 | return tip | |
828 |
|
826 | |||
829 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
827 | def update_nodes(self, user, repo, message, nodes, parent_commit=None, | |
830 | author=None, trigger_push_hook=True): |
|
828 | author=None, trigger_push_hook=True): | |
831 | user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars( |
|
829 | user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars( | |
832 | user, repo, message, author) |
|
830 | user, repo, message, author) | |
833 |
|
831 | |||
834 | parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance) |
|
832 | parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance) | |
835 |
|
833 | |||
836 | # add multiple nodes |
|
834 | # add multiple nodes | |
837 | for _filename, data in nodes.items(): |
|
835 | for _filename, data in nodes.items(): | |
838 | # new filename, can be renamed from the old one, also sanitaze |
|
836 | # new filename, can be renamed from the old one, also sanitaze | |
839 | # the path for any hack around relative paths like ../../ etc. |
|
837 | # the path for any hack around relative paths like ../../ etc. | |
840 | filename = self._sanitize_path(data['filename']) |
|
838 | filename = self._sanitize_path(data['filename']) | |
841 | old_filename = self._sanitize_path(_filename) |
|
839 | old_filename = self._sanitize_path(_filename) | |
842 | content = data['content'] |
|
840 | content = data['content'] | |
843 | file_mode = data.get('mode') |
|
841 | file_mode = data.get('mode') | |
844 | filenode = FileNode(old_filename, content=content, mode=file_mode) |
|
842 | filenode = FileNode(old_filename, content=content, mode=file_mode) | |
845 | op = data['op'] |
|
843 | op = data['op'] | |
846 | if op == 'add': |
|
844 | if op == 'add': | |
847 | imc.add(filenode) |
|
845 | imc.add(filenode) | |
848 | elif op == 'del': |
|
846 | elif op == 'del': | |
849 | imc.remove(filenode) |
|
847 | imc.remove(filenode) | |
850 | elif op == 'mod': |
|
848 | elif op == 'mod': | |
851 | if filename != old_filename: |
|
849 | if filename != old_filename: | |
852 | # TODO: handle renames more efficient, needs vcs lib changes |
|
850 | # TODO: handle renames more efficient, needs vcs lib changes | |
853 | imc.remove(filenode) |
|
851 | imc.remove(filenode) | |
854 | imc.add(FileNode(filename, content=content, mode=file_mode)) |
|
852 | imc.add(FileNode(filename, content=content, mode=file_mode)) | |
855 | else: |
|
853 | else: | |
856 | imc.change(filenode) |
|
854 | imc.change(filenode) | |
857 |
|
855 | |||
858 | try: |
|
856 | try: | |
859 | # TODO: handle pre push scenario commit changes |
|
857 | # TODO: handle pre push scenario commit changes | |
860 | tip = imc.commit(message=message, |
|
858 | tip = imc.commit(message=message, | |
861 | author=author, |
|
859 | author=author, | |
862 | parents=parents, |
|
860 | parents=parents, | |
863 | branch=parent_commit.branch) |
|
861 | branch=parent_commit.branch) | |
864 | except NodeNotChangedError: |
|
862 | except NodeNotChangedError: | |
865 | raise |
|
863 | raise | |
866 | except Exception as e: |
|
864 | except Exception as e: | |
867 | log.exception("Unexpected exception during call to imc.commit") |
|
865 | log.exception("Unexpected exception during call to imc.commit") | |
868 | raise IMCCommitError(str(e)) |
|
866 | raise IMCCommitError(str(e)) | |
869 | finally: |
|
867 | finally: | |
870 | # always clear caches, if commit fails we want fresh object also |
|
868 | # always clear caches, if commit fails we want fresh object also | |
871 | self.mark_for_invalidation(repo.repo_name) |
|
869 | self.mark_for_invalidation(repo.repo_name) | |
872 |
|
870 | |||
873 | if trigger_push_hook: |
|
871 | if trigger_push_hook: | |
874 | hooks_utils.trigger_post_push_hook( |
|
872 | hooks_utils.trigger_post_push_hook( | |
875 | username=user.username, action='push_local', hook_type='post_push', |
|
873 | username=user.username, action='push_local', hook_type='post_push', | |
876 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
874 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |
877 | commit_ids=[tip.raw_id]) |
|
875 | commit_ids=[tip.raw_id]) | |
878 |
|
876 | |||
879 | return tip |
|
877 | return tip | |
880 |
|
878 | |||
881 | def update_binary_node(self, user, repo, message, node, parent_commit=None, author=None): |
|
879 | def update_binary_node(self, user, repo, message, node, parent_commit=None, author=None): | |
882 | user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars( |
|
880 | user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars( | |
883 | user, repo, message, author) |
|
881 | user, repo, message, author) | |
884 |
|
882 | |||
885 | parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance) |
|
883 | parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance) | |
886 |
|
884 | |||
887 | file_path = node.get('file_path') |
|
885 | file_path = node.get('file_path') | |
888 | if isinstance(raw_content := node.get('content'), (io.BytesIO, io.BufferedRandom)): |
|
886 | if isinstance(raw_content := node.get('content'), (io.BytesIO, io.BufferedRandom)): | |
889 | content = raw_content.read() |
|
887 | content = raw_content.read() | |
890 | else: |
|
888 | else: | |
891 | raise Exception("Wrong content was provided") |
|
889 | raise Exception("Wrong content was provided") | |
892 | file_node = FileNode(file_path, content=content) |
|
890 | file_node = FileNode(file_path, content=content) | |
893 | imc.change(file_node) |
|
891 | imc.change(file_node) | |
894 |
|
892 | |||
895 | try: |
|
893 | try: | |
896 | tip = imc.commit(message=message, |
|
894 | tip = imc.commit(message=message, | |
897 | author=author, |
|
895 | author=author, | |
898 | parents=parents, |
|
896 | parents=parents, | |
899 | branch=parent_commit.branch) |
|
897 | branch=parent_commit.branch) | |
900 | except NodeNotChangedError: |
|
898 | except NodeNotChangedError: | |
901 | raise |
|
899 | raise | |
902 | except Exception as e: |
|
900 | except Exception as e: | |
903 | log.exception("Unexpected exception during call to imc.commit") |
|
901 | log.exception("Unexpected exception during call to imc.commit") | |
904 | raise IMCCommitError(str(e)) |
|
902 | raise IMCCommitError(str(e)) | |
905 | finally: |
|
903 | finally: | |
906 | self.mark_for_invalidation(repo.repo_name) |
|
904 | self.mark_for_invalidation(repo.repo_name) | |
907 |
|
905 | |||
908 | hooks_utils.trigger_post_push_hook( |
|
906 | hooks_utils.trigger_post_push_hook( | |
909 | username=user.username, action='push_local', hook_type='post_push', |
|
907 | username=user.username, action='push_local', hook_type='post_push', | |
910 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
908 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |
911 | commit_ids=[tip.raw_id]) |
|
909 | commit_ids=[tip.raw_id]) | |
912 | return tip |
|
910 | return tip | |
913 |
|
911 | |||
914 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, |
|
912 | def delete_nodes(self, user, repo, message, nodes, parent_commit=None, | |
915 | author=None, trigger_push_hook=True): |
|
913 | author=None, trigger_push_hook=True): | |
916 | """ |
|
914 | """ | |
917 | Deletes given multiple nodes into `repo` |
|
915 | Deletes given multiple nodes into `repo` | |
918 |
|
916 | |||
919 | :param user: RhodeCode User object or user_id, the committer |
|
917 | :param user: RhodeCode User object or user_id, the committer | |
920 | :param repo: RhodeCode Repository object |
|
918 | :param repo: RhodeCode Repository object | |
921 | :param message: commit message |
|
919 | :param message: commit message | |
922 | :param nodes: mapping {filename:{'content':content},...} |
|
920 | :param nodes: mapping {filename:{'content':content},...} | |
923 | :param parent_commit: parent commit, can be empty than it's initial |
|
921 | :param parent_commit: parent commit, can be empty than it's initial | |
924 | commit |
|
922 | commit | |
925 | :param author: author of commit, cna be different that commiter only |
|
923 | :param author: author of commit, cna be different that commiter only | |
926 | for git |
|
924 | for git | |
927 | :param trigger_push_hook: trigger push hooks |
|
925 | :param trigger_push_hook: trigger push hooks | |
928 |
|
926 | |||
929 | :returns: new commit after deletion |
|
927 | :returns: new commit after deletion | |
930 | """ |
|
928 | """ | |
931 |
|
929 | |||
932 | user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars( |
|
930 | user, scm_instance, message, commiter, author, imc = self.initialize_inmemory_vars( | |
933 | user, repo, message, author) |
|
931 | user, repo, message, author) | |
934 |
|
932 | |||
935 | processed_nodes = [] |
|
933 | processed_nodes = [] | |
936 | for f_path in nodes: |
|
934 | for f_path in nodes: | |
937 | f_path = self._sanitize_path(f_path) |
|
935 | f_path = self._sanitize_path(f_path) | |
938 | # content can be empty but for compatibility it allows same dicts |
|
936 | # content can be empty but for compatibility it allows same dicts | |
939 | # structure as add_nodes |
|
937 | # structure as add_nodes | |
940 | content = nodes[f_path].get('content') |
|
938 | content = nodes[f_path].get('content') | |
941 | processed_nodes.append((safe_bytes(f_path), content)) |
|
939 | processed_nodes.append((safe_bytes(f_path), content)) | |
942 |
|
940 | |||
943 | parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance) |
|
941 | parent_commit, parents = self.get_parent_commits(parent_commit, scm_instance) | |
944 |
|
942 | |||
945 | # add multiple nodes |
|
943 | # add multiple nodes | |
946 | for path, content in processed_nodes: |
|
944 | for path, content in processed_nodes: | |
947 | imc.remove(FileNode(path, content=content)) |
|
945 | imc.remove(FileNode(path, content=content)) | |
948 |
|
946 | |||
949 | # TODO: handle pre push scenario |
|
947 | # TODO: handle pre push scenario | |
950 | tip = imc.commit(message=message, |
|
948 | tip = imc.commit(message=message, | |
951 | author=author, |
|
949 | author=author, | |
952 | parents=parents, |
|
950 | parents=parents, | |
953 | branch=parent_commit.branch) |
|
951 | branch=parent_commit.branch) | |
954 |
|
952 | |||
955 | self.mark_for_invalidation(repo.repo_name) |
|
953 | self.mark_for_invalidation(repo.repo_name) | |
956 | if trigger_push_hook: |
|
954 | if trigger_push_hook: | |
957 | hooks_utils.trigger_post_push_hook( |
|
955 | hooks_utils.trigger_post_push_hook( | |
958 | username=user.username, action='push_local', hook_type='post_push', |
|
956 | username=user.username, action='push_local', hook_type='post_push', | |
959 | repo_name=repo.repo_name, repo_type=scm_instance.alias, |
|
957 | repo_name=repo.repo_name, repo_type=scm_instance.alias, | |
960 | commit_ids=[tip.raw_id]) |
|
958 | commit_ids=[tip.raw_id]) | |
961 | return tip |
|
959 | return tip | |
962 |
|
960 | |||
963 | def strip(self, repo, commit_id, branch): |
|
961 | def strip(self, repo, commit_id, branch): | |
964 | scm_instance = repo.scm_instance(cache=False) |
|
962 | scm_instance = repo.scm_instance(cache=False) | |
965 | scm_instance.config.clear_section('hooks') |
|
963 | scm_instance.config.clear_section('hooks') | |
966 | scm_instance.strip(commit_id, branch) |
|
964 | scm_instance.strip(commit_id, branch) | |
967 | self.mark_for_invalidation(repo.repo_name) |
|
965 | self.mark_for_invalidation(repo.repo_name) | |
968 |
|
966 | |||
969 | def get_unread_journal(self): |
|
967 | def get_unread_journal(self): | |
970 | return self.sa.query(UserLog).count() |
|
968 | return self.sa.query(UserLog).count() | |
971 |
|
969 | |||
972 | @classmethod |
|
970 | @classmethod | |
973 | def backend_landing_ref(cls, repo_type): |
|
971 | def backend_landing_ref(cls, repo_type): | |
974 | """ |
|
972 | """ | |
975 | Return a default landing ref based on a repository type. |
|
973 | Return a default landing ref based on a repository type. | |
976 | """ |
|
974 | """ | |
977 |
|
975 | |||
978 | landing_ref = { |
|
976 | landing_ref = { | |
979 | 'hg': ('branch:default', 'default'), |
|
977 | 'hg': ('branch:default', 'default'), | |
980 | 'git': ('branch:master', 'master'), |
|
978 | 'git': ('branch:master', 'master'), | |
981 | 'svn': ('rev:tip', 'latest tip'), |
|
979 | 'svn': ('rev:tip', 'latest tip'), | |
982 | 'default': ('rev:tip', 'latest tip'), |
|
980 | 'default': ('rev:tip', 'latest tip'), | |
983 | } |
|
981 | } | |
984 |
|
982 | |||
985 | return landing_ref.get(repo_type) or landing_ref['default'] |
|
983 | return landing_ref.get(repo_type) or landing_ref['default'] | |
986 |
|
984 | |||
987 | def get_repo_landing_revs(self, translator, repo=None): |
|
985 | def get_repo_landing_revs(self, translator, repo=None): | |
988 | """ |
|
986 | """ | |
989 | Generates select option with tags branches and bookmarks (for hg only) |
|
987 | Generates select option with tags branches and bookmarks (for hg only) | |
990 | grouped by type |
|
988 | grouped by type | |
991 |
|
989 | |||
992 | :param repo: |
|
990 | :param repo: | |
993 | """ |
|
991 | """ | |
994 | from rhodecode.lib.vcs.backends.git import GitRepository |
|
992 | from rhodecode.lib.vcs.backends.git import GitRepository | |
995 |
|
993 | |||
996 | _ = translator |
|
994 | _ = translator | |
997 | repo = self._get_repo(repo) |
|
995 | repo = self._get_repo(repo) | |
998 |
|
996 | |||
999 | if repo: |
|
997 | if repo: | |
1000 | repo_type = repo.repo_type |
|
998 | repo_type = repo.repo_type | |
1001 | else: |
|
999 | else: | |
1002 | repo_type = 'default' |
|
1000 | repo_type = 'default' | |
1003 |
|
1001 | |||
1004 | default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type) |
|
1002 | default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type) | |
1005 |
|
1003 | |||
1006 | default_ref_options = [ |
|
1004 | default_ref_options = [ | |
1007 | [default_landing_ref, landing_ref_lbl] |
|
1005 | [default_landing_ref, landing_ref_lbl] | |
1008 | ] |
|
1006 | ] | |
1009 | default_choices = [ |
|
1007 | default_choices = [ | |
1010 | default_landing_ref |
|
1008 | default_landing_ref | |
1011 | ] |
|
1009 | ] | |
1012 |
|
1010 | |||
1013 | if not repo: |
|
1011 | if not repo: | |
1014 | # presented at NEW repo creation |
|
1012 | # presented at NEW repo creation | |
1015 | return default_choices, default_ref_options |
|
1013 | return default_choices, default_ref_options | |
1016 |
|
1014 | |||
1017 | repo = repo.scm_instance() |
|
1015 | repo = repo.scm_instance() | |
1018 |
|
1016 | |||
1019 | ref_options = [(default_landing_ref, landing_ref_lbl)] |
|
1017 | ref_options = [(default_landing_ref, landing_ref_lbl)] | |
1020 | choices = [default_landing_ref] |
|
1018 | choices = [default_landing_ref] | |
1021 |
|
1019 | |||
1022 | # branches |
|
1020 | # branches | |
1023 | branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches] |
|
1021 | branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches] | |
1024 | if not branch_group: |
|
1022 | if not branch_group: | |
1025 | # new repo, or without maybe a branch? |
|
1023 | # new repo, or without maybe a branch? | |
1026 | branch_group = default_ref_options |
|
1024 | branch_group = default_ref_options | |
1027 |
|
1025 | |||
1028 | branches_group = (branch_group, _("Branches")) |
|
1026 | branches_group = (branch_group, _("Branches")) | |
1029 | ref_options.append(branches_group) |
|
1027 | ref_options.append(branches_group) | |
1030 | choices.extend([x[0] for x in branches_group[0]]) |
|
1028 | choices.extend([x[0] for x in branches_group[0]]) | |
1031 |
|
1029 | |||
1032 | # bookmarks for HG |
|
1030 | # bookmarks for HG | |
1033 | if repo.alias == 'hg': |
|
1031 | if repo.alias == 'hg': | |
1034 | bookmarks_group = ( |
|
1032 | bookmarks_group = ( | |
1035 | [(f'book:{safe_str(b)}', safe_str(b)) |
|
1033 | [(f'book:{safe_str(b)}', safe_str(b)) | |
1036 | for b in repo.bookmarks], |
|
1034 | for b in repo.bookmarks], | |
1037 | _("Bookmarks")) |
|
1035 | _("Bookmarks")) | |
1038 | ref_options.append(bookmarks_group) |
|
1036 | ref_options.append(bookmarks_group) | |
1039 | choices.extend([x[0] for x in bookmarks_group[0]]) |
|
1037 | choices.extend([x[0] for x in bookmarks_group[0]]) | |
1040 |
|
1038 | |||
1041 | # tags |
|
1039 | # tags | |
1042 | tags_group = ( |
|
1040 | tags_group = ( | |
1043 | [(f'tag:{safe_str(t)}', safe_str(t)) |
|
1041 | [(f'tag:{safe_str(t)}', safe_str(t)) | |
1044 | for t in repo.tags], |
|
1042 | for t in repo.tags], | |
1045 | _("Tags")) |
|
1043 | _("Tags")) | |
1046 | ref_options.append(tags_group) |
|
1044 | ref_options.append(tags_group) | |
1047 | choices.extend([x[0] for x in tags_group[0]]) |
|
1045 | choices.extend([x[0] for x in tags_group[0]]) | |
1048 |
|
1046 | |||
1049 | return choices, ref_options |
|
1047 | return choices, ref_options | |
1050 |
|
1048 | |||
1051 | def get_server_info(self, environ=None): |
|
1049 | def get_server_info(self, environ=None): | |
1052 | server_info = get_system_info(environ) |
|
1050 | server_info = get_system_info(environ) | |
1053 | return server_info |
|
1051 | return server_info |
General Comments 0
You need to be logged in to leave comments.
Login now