##// END OF EJS Templates
svn: enable hooks and integration framework execution....
marcink -
r2677:25d65914 default
parent child Browse files
Show More
@@ -1,54 +1,51 b''
1 1 # top level files
2 2
3 3 include MANIFEST.in
4 4 include README.rst
5 5 include CHANGES.rst
6 6 include LICENSE.txt
7 7
8 8 include rhodecode/VERSION
9 9
10 10 # docs
11 11 recursive-include docs *
12 12
13 13 # all config files
14 14 recursive-include configs *
15 15
16 16 # translations
17 17 recursive-include rhodecode/i18n *
18 18
19 # hook templates
20 recursive-include rhodecode/config/hook_templates *
21
22 19 # non-python core stuff
23 20 recursive-include rhodecode *.cfg
24 21 recursive-include rhodecode *.json
25 22 recursive-include rhodecode *.ini_tmpl
26 23 recursive-include rhodecode *.sh
27 24 recursive-include rhodecode *.mako
28 25
29 26 # 502 page
30 27 include rhodecode/public/502.html
31 28
32 29
33 30 # images, css
34 31 include rhodecode/public/css/*.css
35 32 include rhodecode/public/images/*.*
36 33 include rhodecode/public/images/ee_features/*.*
37 34
38 35 # sound files
39 36 include rhodecode/public/sounds/*.mp3
40 37 include rhodecode/public/sounds/*.wav
41 38
42 39 # fonts
43 40 recursive-include rhodecode/public/fonts/ProximaNova *
44 41 recursive-include rhodecode/public/fonts/RCIcons *
45 42
46 43 # js
47 44 recursive-include rhodecode/public/js *
48 45
49 46 # templates
50 47 recursive-include rhodecode/templates *
51 48
52 49 # skip any tests files
53 50 recursive-exclude rhodecode/tests *
54 51
@@ -1,140 +1,141 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23
24 24 from rhodecode.api.tests.utils import build_data, api_call, assert_error
25 25
26 26
27 27 @pytest.mark.usefixtures("testuser_api", "app")
28 28 class TestGetRepoChangeset(object):
29 29 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
30 30 def test_get_repo_changeset(self, details, backend):
31 31 commit = backend.repo.get_commit(commit_idx=0)
32 32 __, params = build_data(
33 33 self.apikey, 'get_repo_changeset',
34 34 repoid=backend.repo_name, revision=commit.raw_id,
35 35 details=details,
36 36 )
37 37 response = api_call(self.app, params)
38 38 result = response.json['result']
39 39 assert result['revision'] == 0
40 40 assert result['raw_id'] == commit.raw_id
41 41
42 42 if details == 'full':
43 43 assert result['refs']['bookmarks'] == getattr(
44 44 commit, 'bookmarks', [])
45 assert result['refs']['branches'] == [commit.branch]
45 branches = [commit.branch] if commit.branch else []
46 assert result['refs']['branches'] == branches
46 47 assert result['refs']['tags'] == commit.tags
47 48
48 49 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
49 50 def test_get_repo_changeset_bad_type(self, details, backend):
50 51 id_, params = build_data(
51 52 self.apikey, 'get_repo_changeset',
52 53 repoid=backend.repo_name, revision=0,
53 54 details=details,
54 55 )
55 56 response = api_call(self.app, params)
56 57 expected = 'commit_id must be a string value'
57 58 assert_error(id_, expected, given=response.body)
58 59
59 60 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
60 61 def test_get_repo_changesets(self, details, backend):
61 62 limit = 2
62 63 commit = backend.repo.get_commit(commit_idx=0)
63 64 __, params = build_data(
64 65 self.apikey, 'get_repo_changesets',
65 66 repoid=backend.repo_name, start_rev=commit.raw_id, limit=limit,
66 67 details=details,
67 68 )
68 69 response = api_call(self.app, params)
69 70 result = response.json['result']
70 71 assert result
71 72 assert len(result) == limit
72 73 for x in xrange(limit):
73 74 assert result[x]['revision'] == x
74 75
75 76 if details == 'full':
76 77 for x in xrange(limit):
77 78 assert 'bookmarks' in result[x]['refs']
78 79 assert 'branches' in result[x]['refs']
79 80 assert 'tags' in result[x]['refs']
80 81
81 82 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
82 83 @pytest.mark.parametrize("start_rev, expected_revision", [
83 84 ("0", 0),
84 85 ("10", 10),
85 86 ("20", 20),
86 87 ])
87 88 @pytest.mark.backends("hg", "git")
88 89 def test_get_repo_changesets_commit_range(
89 90 self, details, backend, start_rev, expected_revision):
90 91 limit = 10
91 92 __, params = build_data(
92 93 self.apikey, 'get_repo_changesets',
93 94 repoid=backend.repo_name, start_rev=start_rev, limit=limit,
94 95 details=details,
95 96 )
96 97 response = api_call(self.app, params)
97 98 result = response.json['result']
98 99 assert result
99 100 assert len(result) == limit
100 101 for i in xrange(limit):
101 102 assert result[i]['revision'] == int(expected_revision) + i
102 103
103 104 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
104 105 @pytest.mark.parametrize("start_rev, expected_revision", [
105 106 ("0", 0),
106 107 ("10", 9),
107 108 ("20", 19),
108 109 ])
109 110 def test_get_repo_changesets_commit_range_svn(
110 111 self, details, backend_svn, start_rev, expected_revision):
111 112
112 113 # TODO: johbo: SVN showed a problem here: The parameter "start_rev"
113 114 # in our API allows to pass in a "Commit ID" as well as a
114 115 # "Commit Index". In the case of Subversion it is not possible to
115 116 # distinguish these cases. As a workaround we implemented this
116 117 # behavior which gives a preference to see it as a "Commit ID".
117 118
118 119 limit = 10
119 120 __, params = build_data(
120 121 self.apikey, 'get_repo_changesets',
121 122 repoid=backend_svn.repo_name, start_rev=start_rev, limit=limit,
122 123 details=details,
123 124 )
124 125 response = api_call(self.app, params)
125 126 result = response.json['result']
126 127 assert result
127 128 assert len(result) == limit
128 129 for i in xrange(limit):
129 130 assert result[i]['revision'] == int(expected_revision) + i
130 131
131 132 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
132 133 def test_get_repo_changesets_bad_type(self, details, backend):
133 134 id_, params = build_data(
134 135 self.apikey, 'get_repo_changesets',
135 136 repoid=backend.repo_name, start_rev=0, limit=2,
136 137 details=details,
137 138 )
138 139 response = api_call(self.app, params)
139 140 expected = 'commit_id must be a string value'
140 141 assert_error(id_, expected, given=response.body)
@@ -1,251 +1,313 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import json
21 import os
22 import time
22 23 import logging
24 import tempfile
23 25 import traceback
24 26 import threading
27
25 28 from BaseHTTPServer import BaseHTTPRequestHandler
26 29 from SocketServer import TCPServer
27 30
28 31 import rhodecode
29 32 from rhodecode.model import meta
30 33 from rhodecode.lib.base import bootstrap_request, bootstrap_config
31 34 from rhodecode.lib import hooks_base
32 35 from rhodecode.lib.utils2 import AttributeDict
36 from rhodecode.lib.ext_json import json
33 37
34 38
35 39 log = logging.getLogger(__name__)
36 40
37 41
38 42 class HooksHttpHandler(BaseHTTPRequestHandler):
43
39 44 def do_POST(self):
40 45 method, extras = self._read_request()
46 txn_id = getattr(self.server, 'txn_id', None)
47 if txn_id:
48 from rhodecode.lib.caches import compute_key_from_params
49 log.debug('Computing TXN_ID based on `%s`:`%s`',
50 extras['repository'], extras['txn_id'])
51 computed_txn_id = compute_key_from_params(
52 extras['repository'], extras['txn_id'])
53 if txn_id != computed_txn_id:
54 raise Exception(
55 'TXN ID fail: expected {} got {} instead'.format(
56 txn_id, computed_txn_id))
57
41 58 try:
42 59 result = self._call_hook(method, extras)
43 60 except Exception as e:
44 61 exc_tb = traceback.format_exc()
45 62 result = {
46 63 'exception': e.__class__.__name__,
47 64 'exception_traceback': exc_tb,
48 65 'exception_args': e.args
49 66 }
50 67 self._write_response(result)
51 68
52 69 def _read_request(self):
53 70 length = int(self.headers['Content-Length'])
54 71 body = self.rfile.read(length).decode('utf-8')
55 72 data = json.loads(body)
56 73 return data['method'], data['extras']
57 74
58 75 def _write_response(self, result):
59 76 self.send_response(200)
60 77 self.send_header("Content-type", "text/json")
61 78 self.end_headers()
62 79 self.wfile.write(json.dumps(result))
63 80
64 81 def _call_hook(self, method, extras):
65 82 hooks = Hooks()
66 83 try:
67 84 result = getattr(hooks, method)(extras)
68 85 finally:
69 86 meta.Session.remove()
70 87 return result
71 88
72 89 def log_message(self, format, *args):
73 90 """
74 91 This is an overridden method of BaseHTTPRequestHandler which logs using
75 92 logging library instead of writing directly to stderr.
76 93 """
77 94
78 95 message = format % args
79 96
80 # TODO: mikhail: add different log levels support
81 97 log.debug(
82 98 "%s - - [%s] %s", self.client_address[0],
83 99 self.log_date_time_string(), message)
84 100
85 101
86 102 class DummyHooksCallbackDaemon(object):
103 hooks_uri = ''
104
87 105 def __init__(self):
88 106 self.hooks_module = Hooks.__module__
89 107
90 108 def __enter__(self):
91 109 log.debug('Running dummy hooks callback daemon')
92 110 return self
93 111
94 112 def __exit__(self, exc_type, exc_val, exc_tb):
95 113 log.debug('Exiting dummy hooks callback daemon')
96 114
97 115
98 116 class ThreadedHookCallbackDaemon(object):
99 117
100 118 _callback_thread = None
101 119 _daemon = None
102 120 _done = False
103 121
104 def __init__(self):
105 self._prepare()
122 def __init__(self, txn_id=None, port=None):
123 self._prepare(txn_id=txn_id, port=port)
106 124
107 125 def __enter__(self):
108 126 self._run()
109 127 return self
110 128
111 129 def __exit__(self, exc_type, exc_val, exc_tb):
112 130 log.debug('Callback daemon exiting now...')
113 131 self._stop()
114 132
115 def _prepare(self):
133 def _prepare(self, txn_id=None, port=None):
116 134 raise NotImplementedError()
117 135
118 136 def _run(self):
119 137 raise NotImplementedError()
120 138
121 139 def _stop(self):
122 140 raise NotImplementedError()
123 141
124 142
125 143 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
126 144 """
127 145 Context manager which will run a callback daemon in a background thread.
128 146 """
129 147
130 148 hooks_uri = None
131 149
132 150 IP_ADDRESS = '127.0.0.1'
133 151
134 152 # From Python docs: Polling reduces our responsiveness to a shutdown
135 153 # request and wastes cpu at all other times.
136 154 POLL_INTERVAL = 0.01
137 155
138 def _prepare(self):
139 log.debug("Preparing HTTP callback daemon and registering hook object")
140
156 def _prepare(self, txn_id=None, port=None):
141 157 self._done = False
142 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
158 self._daemon = TCPServer((self.IP_ADDRESS, port or 0), HooksHttpHandler)
143 159 _, port = self._daemon.server_address
144 160 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
161 self.txn_id = txn_id
162 # inject transaction_id for later verification
163 self._daemon.txn_id = self.txn_id
145 164
146 log.debug("Hooks uri is: %s", self.hooks_uri)
165 log.debug(
166 "Preparing HTTP callback daemon at `%s` and registering hook object",
167 self.hooks_uri)
147 168
148 169 def _run(self):
149 170 log.debug("Running event loop of callback daemon in background thread")
150 171 callback_thread = threading.Thread(
151 172 target=self._daemon.serve_forever,
152 173 kwargs={'poll_interval': self.POLL_INTERVAL})
153 174 callback_thread.daemon = True
154 175 callback_thread.start()
155 176 self._callback_thread = callback_thread
156 177
157 178 def _stop(self):
158 179 log.debug("Waiting for background thread to finish.")
159 180 self._daemon.shutdown()
160 181 self._callback_thread.join()
161 182 self._daemon = None
162 183 self._callback_thread = None
184 if self.txn_id:
185 txn_id_file = get_txn_id_data_path(self.txn_id)
186 log.debug('Cleaning up TXN ID %s', txn_id_file)
187 if os.path.isfile(txn_id_file):
188 os.remove(txn_id_file)
189
163 190 log.debug("Background thread done.")
164 191
165 192
166 def prepare_callback_daemon(extras, protocol, use_direct_calls):
167 callback_daemon = None
193 def get_txn_id_data_path(txn_id):
194 root = tempfile.gettempdir()
195 return os.path.join(root, 'rc_txn_id_{}'.format(txn_id))
196
197
198 def store_txn_id_data(txn_id, data_dict):
199 if not txn_id:
200 log.warning('Cannot store txn_id because it is empty')
201 return
202
203 path = get_txn_id_data_path(txn_id)
204 try:
205 with open(path, 'wb') as f:
206 f.write(json.dumps(data_dict))
207 except Exception:
208 log.exception('Failed to write txn_id metadata')
168 209
210
211 def get_txn_id_from_store(txn_id):
212 """
213 Reads txn_id from store and if present returns the data for callback manager
214 """
215 path = get_txn_id_data_path(txn_id)
216 try:
217 with open(path, 'rb') as f:
218 return json.loads(f.read())
219 except Exception:
220 return {}
221
222
223 def prepare_callback_daemon(extras, protocol, use_direct_calls, txn_id=None):
224 txn_details = get_txn_id_from_store(txn_id)
225 port = txn_details.get('port', 0)
169 226 if use_direct_calls:
170 227 callback_daemon = DummyHooksCallbackDaemon()
171 228 extras['hooks_module'] = callback_daemon.hooks_module
172 229 else:
173 230 if protocol == 'http':
174 callback_daemon = HttpHooksCallbackDaemon()
231 callback_daemon = HttpHooksCallbackDaemon(txn_id=txn_id, port=port)
175 232 else:
176 233 log.error('Unsupported callback daemon protocol "%s"', protocol)
177 234 raise Exception('Unsupported callback daemon protocol.')
178 235
179 extras['hooks_uri'] = callback_daemon.hooks_uri
180 extras['hooks_protocol'] = protocol
236 extras['hooks_uri'] = callback_daemon.hooks_uri
237 extras['hooks_protocol'] = protocol
238 extras['time'] = time.time()
181 239
182 log.debug('Prepared a callback daemon: %s', callback_daemon)
240 # register txn_id
241 extras['txn_id'] = txn_id
242
243 log.debug('Prepared a callback daemon: %s at url `%s`',
244 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
183 245 return callback_daemon, extras
184 246
185 247
186 248 class Hooks(object):
187 249 """
188 250 Exposes the hooks for remote call backs
189 251 """
190 252
191 253 def repo_size(self, extras):
192 254 log.debug("Called repo_size of %s object", self)
193 255 return self._call_hook(hooks_base.repo_size, extras)
194 256
195 257 def pre_pull(self, extras):
196 258 log.debug("Called pre_pull of %s object", self)
197 259 return self._call_hook(hooks_base.pre_pull, extras)
198 260
199 261 def post_pull(self, extras):
200 262 log.debug("Called post_pull of %s object", self)
201 263 return self._call_hook(hooks_base.post_pull, extras)
202 264
203 265 def pre_push(self, extras):
204 266 log.debug("Called pre_push of %s object", self)
205 267 return self._call_hook(hooks_base.pre_push, extras)
206 268
207 269 def post_push(self, extras):
208 270 log.debug("Called post_push of %s object", self)
209 271 return self._call_hook(hooks_base.post_push, extras)
210 272
211 273 def _call_hook(self, hook, extras):
212 274 extras = AttributeDict(extras)
213 275 server_url = extras['server_url']
214 276 request = bootstrap_request(application_url=server_url)
215 277
216 278 bootstrap_config(request) # inject routes and other interfaces
217 279
218 280 # inject the user for usage in hooks
219 281 request.user = AttributeDict({'username': extras.username,
220 282 'ip_addr': extras.ip,
221 283 'user_id': extras.user_id})
222 284
223 285 extras.request = request
224 286
225 287 try:
226 288 result = hook(extras)
227 289 except Exception as error:
228 290 exc_tb = traceback.format_exc()
229 291 log.exception('Exception when handling hook %s', hook)
230 292 error_args = error.args
231 293 return {
232 294 'status': 128,
233 295 'output': '',
234 296 'exception': type(error).__name__,
235 297 'exception_traceback': exc_tb,
236 298 'exception_args': error_args,
237 299 }
238 300 finally:
239 301 meta.Session.remove()
240 302
241 303 log.debug('Got hook call response %s', result)
242 304 return {
243 305 'status': result.status,
244 306 'output': result.output,
245 307 }
246 308
247 309 def __enter__(self):
248 310 return self
249 311
250 312 def __exit__(self, exc_type, exc_val, exc_tb):
251 313 pass
@@ -1,175 +1,204 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import base64
21 22 import logging
22 23 import urllib
23 24 from urlparse import urljoin
24 25
25
26 26 import requests
27 27 from webob.exc import HTTPNotAcceptable
28 28
29 from rhodecode.lib import caches
29 30 from rhodecode.lib.middleware import simplevcs
30 31 from rhodecode.lib.utils import is_valid_repo
31 from rhodecode.lib.utils2 import str2bool
32 from rhodecode.lib.utils2 import str2bool, safe_int
33 from rhodecode.lib.ext_json import json
34 from rhodecode.lib.hooks_daemon import store_txn_id_data
35
32 36
33 37 log = logging.getLogger(__name__)
34 38
35 39
36 40 class SimpleSvnApp(object):
37 41 IGNORED_HEADERS = [
38 42 'connection', 'keep-alive', 'content-encoding',
39 43 'transfer-encoding', 'content-length']
40 44 rc_extras = {}
41 45
42
43 46 def __init__(self, config):
44 47 self.config = config
45 48
46 49 def __call__(self, environ, start_response):
47 50 request_headers = self._get_request_headers(environ)
48 51
49 52 data = environ['wsgi.input']
50 53 # johbo: Avoid that we end up with sending the request in chunked
51 54 # transfer encoding (mainly on Gunicorn). If we know the content
52 55 # length, then we should transfer the payload in one request.
53 56 if environ['REQUEST_METHOD'] == 'MKCOL' or 'CONTENT_LENGTH' in environ:
54 57 data = data.read()
58 if data.startswith('(create-txn-with-props'):
59 # store on-the-fly our rc_extra using svn revision properties
60 # those can be read later on in hooks executed so we have a way
61 # to pass in the data into svn hooks
62 rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras))
63 rc_data_len = len(rc_data)
64 # header defines data lenght, and serialized data
65 skel = ' rc-scm-extras {} {}'.format(rc_data_len, rc_data)
66 data = data[:-2] + skel + '))'
55 67
56 68 log.debug('Calling: %s method via `%s`', environ['REQUEST_METHOD'],
57 69 self._get_url(environ['PATH_INFO']))
70
58 71 response = requests.request(
59 72 environ['REQUEST_METHOD'], self._get_url(environ['PATH_INFO']),
60 73 data=data, headers=request_headers)
61 74
62 75 if response.status_code not in [200, 401]:
63 76 if response.status_code >= 500:
64 77 log.error('Got SVN response:%s with text:`%s`',
65 78 response, response.text)
66 79 else:
67 80 log.debug('Got SVN response:%s with text:`%s`',
68 81 response, response.text)
69 82 else:
70 83 log.debug('got response code: %s', response.status_code)
71 84
72 85 response_headers = self._get_response_headers(response.headers)
86
87 if response.headers.get('SVN-Txn-name'):
88 svn_tx_id = response.headers.get('SVN-Txn-name')
89 txn_id = caches.compute_key_from_params(
90 self.config['repository'], svn_tx_id)
91 port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1])
92 store_txn_id_data(txn_id, {'port': port})
93
73 94 start_response(
74 95 '{} {}'.format(response.status_code, response.reason),
75 96 response_headers)
76 97 return response.iter_content(chunk_size=1024)
77 98
78 99 def _get_url(self, path):
79 100 url_path = urljoin(
80 101 self.config.get('subversion_http_server_url', ''), path)
81 102 url_path = urllib.quote(url_path, safe="/:=~+!$,;'")
82 103 return url_path
83 104
84 105 def _get_request_headers(self, environ):
85 106 headers = {}
86 107
87 108 for key in environ:
88 109 if not key.startswith('HTTP_'):
89 110 continue
90 111 new_key = key.split('_')
91 112 new_key = [k.capitalize() for k in new_key[1:]]
92 113 new_key = '-'.join(new_key)
93 114 headers[new_key] = environ[key]
94 115
95 116 if 'CONTENT_TYPE' in environ:
96 117 headers['Content-Type'] = environ['CONTENT_TYPE']
97 118
98 119 if 'CONTENT_LENGTH' in environ:
99 120 headers['Content-Length'] = environ['CONTENT_LENGTH']
100 121
101 122 return headers
102 123
103 124 def _get_response_headers(self, headers):
104 125 headers = [
105 126 (h, headers[h])
106 127 for h in headers
107 128 if h.lower() not in self.IGNORED_HEADERS
108 129 ]
109 130
110 131 return headers
111 132
112 133
113 134 class DisabledSimpleSvnApp(object):
114 135 def __init__(self, config):
115 136 self.config = config
116 137
117 138 def __call__(self, environ, start_response):
118 139 reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled'
119 140 log.warning(reason)
120 141 return HTTPNotAcceptable(reason)(environ, start_response)
121 142
122 143
123 144 class SimpleSvn(simplevcs.SimpleVCS):
124 145
125 146 SCM = 'svn'
126 147 READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT')
127 148 DEFAULT_HTTP_SERVER = 'http://localhost:8090'
128 149
129 150 def _get_repository_name(self, environ):
130 151 """
131 152 Gets repository name out of PATH_INFO header
132 153
133 154 :param environ: environ where PATH_INFO is stored
134 155 """
135 156 path = environ['PATH_INFO'].split('!')
136 157 repo_name = path[0].strip('/')
137 158
138 159 # SVN includes the whole path in it's requests, including
139 160 # subdirectories inside the repo. Therefore we have to search for
140 161 # the repo root directory.
141 162 if not is_valid_repo(
142 163 repo_name, self.base_path, explicit_scm=self.SCM):
143 164 current_path = ''
144 165 for component in repo_name.split('/'):
145 166 current_path += component
146 167 if is_valid_repo(
147 168 current_path, self.base_path, explicit_scm=self.SCM):
148 169 return current_path
149 170 current_path += '/'
150 171
151 172 return repo_name
152 173
153 174 def _get_action(self, environ):
154 175 return (
155 176 'pull'
156 177 if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS
157 178 else 'push')
158 179
180 def _should_use_callback_daemon(self, extras, environ, action):
181 # only MERGE command triggers hooks, so we don't want to start
182 # hooks server too many times. POST however starts the svn transaction
183 # so we also need to run the init of callback daemon of POST
184 if environ['REQUEST_METHOD'] in ['MERGE', 'POST']:
185 return True
186 return False
187
159 188 def _create_wsgi_app(self, repo_path, repo_name, config):
160 189 if self._is_svn_enabled():
161 190 return SimpleSvnApp(config)
162 191 # we don't have http proxy enabled return dummy request handler
163 192 return DisabledSimpleSvnApp(config)
164 193
165 194 def _is_svn_enabled(self):
166 195 conf = self.repo_vcs_config
167 196 return str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
168 197
169 198 def _create_config(self, extras, repo_name):
170 199 conf = self.repo_vcs_config
171 200 server_url = conf.get('vcs_svn_proxy', 'http_server_url')
172 201 server_url = server_url or self.DEFAULT_HTTP_SERVER
173 202
174 203 extras['subversion_http_server_url'] = server_url
175 204 return extras
@@ -1,648 +1,673 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import importlib
30 30 from functools import wraps
31 from StringIO import StringIO
32 from lxml import etree
31 33
32 34 import time
33 35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
34 # TODO(marcink): check if we should use webob.exc here ?
36
35 37 from pyramid.httpexceptions import (
36 38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
37 39 from zope.cachedescriptors.property import Lazy as LazyProperty
38 40
39 41 import rhodecode
40 42 from rhodecode.authentication.base import (
41 43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
42 44 from rhodecode.lib import caches
43 45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
44 46 from rhodecode.lib.base import (
45 47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
46 from rhodecode.lib.exceptions import (
47 HTTPLockedRC, HTTPRequirementError, UserCreationError,
48 NotAllowedToCreateUserError)
48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
49 49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
50 50 from rhodecode.lib.middleware import appenlight
51 51 from rhodecode.lib.middleware.utils import scm_app_http
52 52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
54 54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 55 from rhodecode.lib.vcs.backends import base
56
56 57 from rhodecode.model import meta
57 58 from rhodecode.model.db import User, Repository, PullRequest
58 59 from rhodecode.model.scm import ScmModel
59 60 from rhodecode.model.pull_request import PullRequestModel
60 61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61 62
62 63 log = logging.getLogger(__name__)
63 64
64 65
66 def extract_svn_txn_id(acl_repo_name, data):
67 """
68 Helper method for extraction of svn txn_id from submited XML data during
69 POST operations
70 """
71 try:
72 root = etree.fromstring(data)
73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
74 for el in root:
75 if el.tag == '{DAV:}source':
76 for sub_el in el:
77 if sub_el.tag == '{DAV:}href':
78 match = pat.search(sub_el.text)
79 if match:
80 svn_tx_id = match.groupdict()['txn_id']
81 txn_id = caches.compute_key_from_params(
82 acl_repo_name, svn_tx_id)
83 return txn_id
84 except Exception:
85 log.exception('Failed to extract txn_id')
86
87
65 88 def initialize_generator(factory):
66 89 """
67 90 Initializes the returned generator by draining its first element.
68 91
69 92 This can be used to give a generator an initializer, which is the code
70 93 up to the first yield statement. This decorator enforces that the first
71 94 produced element has the value ``"__init__"`` to make its special
72 95 purpose very explicit in the using code.
73 96 """
74 97
75 98 @wraps(factory)
76 99 def wrapper(*args, **kwargs):
77 100 gen = factory(*args, **kwargs)
78 101 try:
79 102 init = gen.next()
80 103 except StopIteration:
81 104 raise ValueError('Generator must yield at least one element.')
82 105 if init != "__init__":
83 106 raise ValueError('First yielded element must be "__init__".')
84 107 return gen
85 108 return wrapper
86 109
87 110
88 111 class SimpleVCS(object):
89 112 """Common functionality for SCM HTTP handlers."""
90 113
91 114 SCM = 'unknown'
92 115
93 116 acl_repo_name = None
94 117 url_repo_name = None
95 118 vcs_repo_name = None
96 119 rc_extras = {}
97 120
98 121 # We have to handle requests to shadow repositories different than requests
99 122 # to normal repositories. Therefore we have to distinguish them. To do this
100 123 # we use this regex which will match only on URLs pointing to shadow
101 124 # repositories.
102 125 shadow_repo_re = re.compile(
103 126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
104 127 '(?P<target>{slug_pat})/' # target repo
105 128 'pull-request/(?P<pr_id>\d+)/' # pull request
106 129 'repository$' # shadow repo
107 130 .format(slug_pat=SLUG_RE.pattern))
108 131
109 132 def __init__(self, config, registry):
110 133 self.registry = registry
111 134 self.config = config
112 135 # re-populated by specialized middleware
113 136 self.repo_vcs_config = base.Config()
114 137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
115 138
116 139 registry.rhodecode_settings = self.rhodecode_settings
117 140 # authenticate this VCS request using authfunc
118 141 auth_ret_code_detection = \
119 142 str2bool(self.config.get('auth_ret_code_detection', False))
120 143 self.authenticate = BasicAuth(
121 144 '', authenticate, registry, config.get('auth_ret_code'),
122 145 auth_ret_code_detection)
123 146 self.ip_addr = '0.0.0.0'
124 147
125 148 @LazyProperty
126 149 def global_vcs_config(self):
127 150 try:
128 151 return VcsSettingsModel().get_ui_settings_as_config_obj()
129 152 except Exception:
130 153 return base.Config()
131 154
132 155 @property
133 156 def base_path(self):
134 157 settings_path = self.repo_vcs_config.get(
135 158 *VcsSettingsModel.PATH_SETTING)
136 159
137 160 if not settings_path:
138 161 settings_path = self.global_vcs_config.get(
139 162 *VcsSettingsModel.PATH_SETTING)
140 163
141 164 if not settings_path:
142 165 # try, maybe we passed in explicitly as config option
143 166 settings_path = self.config.get('base_path')
144 167
145 168 if not settings_path:
146 169 raise ValueError('FATAL: base_path is empty')
147 170 return settings_path
148 171
149 172 def set_repo_names(self, environ):
150 173 """
151 174 This will populate the attributes acl_repo_name, url_repo_name,
152 175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
153 176 shadow) repositories all names are equal. In case of requests to a
154 177 shadow repository the acl-name points to the target repo of the pull
155 178 request and the vcs-name points to the shadow repo file system path.
156 179 The url-name is always the URL used by the vcs client program.
157 180
158 181 Example in case of a shadow repo:
159 182 acl_repo_name = RepoGroup/MyRepo
160 183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
161 184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
162 185 """
163 186 # First we set the repo name from URL for all attributes. This is the
164 187 # default if handling normal (non shadow) repo requests.
165 188 self.url_repo_name = self._get_repository_name(environ)
166 189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
167 190 self.is_shadow_repo = False
168 191
169 192 # Check if this is a request to a shadow repository.
170 193 match = self.shadow_repo_re.match(self.url_repo_name)
171 194 if match:
172 195 match_dict = match.groupdict()
173 196
174 197 # Build acl repo name from regex match.
175 198 acl_repo_name = safe_unicode('{groups}{target}'.format(
176 199 groups=match_dict['groups'] or '',
177 200 target=match_dict['target']))
178 201
179 202 # Retrieve pull request instance by ID from regex match.
180 203 pull_request = PullRequest.get(match_dict['pr_id'])
181 204
182 205 # Only proceed if we got a pull request and if acl repo name from
183 206 # URL equals the target repo name of the pull request.
184 207 if pull_request and (acl_repo_name ==
185 208 pull_request.target_repo.repo_name):
186 209 # Get file system path to shadow repository.
187 210 workspace_id = PullRequestModel()._workspace_id(pull_request)
188 211 target_vcs = pull_request.target_repo.scm_instance()
189 212 vcs_repo_name = target_vcs._get_shadow_repository_path(
190 213 workspace_id)
191 214
192 215 # Store names for later usage.
193 216 self.vcs_repo_name = vcs_repo_name
194 217 self.acl_repo_name = acl_repo_name
195 218 self.is_shadow_repo = True
196 219
197 220 log.debug('Setting all VCS repository names: %s', {
198 221 'acl_repo_name': self.acl_repo_name,
199 222 'url_repo_name': self.url_repo_name,
200 223 'vcs_repo_name': self.vcs_repo_name,
201 224 })
202 225
203 226 @property
204 227 def scm_app(self):
205 228 custom_implementation = self.config['vcs.scm_app_implementation']
206 229 if custom_implementation == 'http':
207 230 log.info('Using HTTP implementation of scm app.')
208 231 scm_app_impl = scm_app_http
209 232 else:
210 233 log.info('Using custom implementation of scm_app: "{}"'.format(
211 234 custom_implementation))
212 235 scm_app_impl = importlib.import_module(custom_implementation)
213 236 return scm_app_impl
214 237
215 238 def _get_by_id(self, repo_name):
216 239 """
217 240 Gets a special pattern _<ID> from clone url and tries to replace it
218 241 with a repository_name for support of _<ID> non changeable urls
219 242 """
220 243
221 244 data = repo_name.split('/')
222 245 if len(data) >= 2:
223 246 from rhodecode.model.repo import RepoModel
224 247 by_id_match = RepoModel().get_repo_by_id(repo_name)
225 248 if by_id_match:
226 249 data[1] = by_id_match.repo_name
227 250
228 251 return safe_str('/'.join(data))
229 252
230 253 def _invalidate_cache(self, repo_name):
231 254 """
232 255 Set's cache for this repository for invalidation on next access
233 256
234 257 :param repo_name: full repo name, also a cache key
235 258 """
236 259 ScmModel().mark_for_invalidation(repo_name)
237 260
238 261 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
239 262 db_repo = Repository.get_by_repo_name(repo_name)
240 263 if not db_repo:
241 264 log.debug('Repository `%s` not found inside the database.',
242 265 repo_name)
243 266 return False
244 267
245 268 if db_repo.repo_type != scm_type:
246 269 log.warning(
247 270 'Repository `%s` have incorrect scm_type, expected %s got %s',
248 271 repo_name, db_repo.repo_type, scm_type)
249 272 return False
250 273
251 274 config = db_repo._config
252 275 config.set('extensions', 'largefiles', '')
253 276 return is_valid_repo(
254 277 repo_name, base_path,
255 278 explicit_scm=scm_type, expect_scm=scm_type, config=config)
256 279
257 280 def valid_and_active_user(self, user):
258 281 """
259 282 Checks if that user is not empty, and if it's actually object it checks
260 283 if he's active.
261 284
262 285 :param user: user object or None
263 286 :return: boolean
264 287 """
265 288 if user is None:
266 289 return False
267 290
268 291 elif user.active:
269 292 return True
270 293
271 294 return False
272 295
273 296 @property
274 297 def is_shadow_repo_dir(self):
275 298 return os.path.isdir(self.vcs_repo_name)
276 299
277 300 def _check_permission(self, action, user, repo_name, ip_addr=None,
278 301 plugin_id='', plugin_cache_active=False, cache_ttl=0):
279 302 """
280 303 Checks permissions using action (push/pull) user and repository
281 304 name. If plugin_cache and ttl is set it will use the plugin which
282 305 authenticated the user to store the cached permissions result for N
283 306 amount of seconds as in cache_ttl
284 307
285 308 :param action: push or pull action
286 309 :param user: user instance
287 310 :param repo_name: repository name
288 311 """
289 312
290 313 # get instance of cache manager configured for a namespace
291 314 cache_manager = get_perms_cache_manager(
292 315 custom_ttl=cache_ttl, suffix=user.user_id)
293 316 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
294 317 plugin_id, plugin_cache_active, cache_ttl)
295 318
296 319 # for environ based password can be empty, but then the validation is
297 320 # on the server that fills in the env data needed for authentication
298 321 _perm_calc_hash = caches.compute_key_from_params(
299 322 plugin_id, action, user.user_id, repo_name, ip_addr)
300 323
301 324 # _authenticate is a wrapper for .auth() method of plugin.
302 325 # it checks if .auth() sends proper data.
303 326 # For RhodeCodeExternalAuthPlugin it also maps users to
304 327 # Database and maps the attributes returned from .auth()
305 328 # to RhodeCode database. If this function returns data
306 329 # then auth is correct.
307 330 start = time.time()
308 331 log.debug('Running plugin `%s` permissions check', plugin_id)
309 332
310 333 def perm_func():
311 334 """
312 335 This function is used internally in Cache of Beaker to calculate
313 336 Results
314 337 """
315 338 log.debug('auth: calculating permission access now...')
316 339 # check IP
317 340 inherit = user.inherit_default_permissions
318 341 ip_allowed = AuthUser.check_ip_allowed(
319 342 user.user_id, ip_addr, inherit_from_default=inherit)
320 343 if ip_allowed:
321 344 log.info('Access for IP:%s allowed', ip_addr)
322 345 else:
323 346 return False
324 347
325 348 if action == 'push':
326 349 perms = ('repository.write', 'repository.admin')
327 350 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
328 351 return False
329 352
330 353 else:
331 354 # any other action need at least read permission
332 355 perms = (
333 356 'repository.read', 'repository.write', 'repository.admin')
334 357 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
335 358 return False
336 359
337 360 return True
338 361
339 362 if plugin_cache_active:
340 363 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
341 364 perm_result = cache_manager.get(
342 365 _perm_calc_hash, createfunc=perm_func)
343 366 else:
344 367 perm_result = perm_func()
345 368
346 369 auth_time = time.time() - start
347 370 log.debug('Permissions for plugin `%s` completed in %.3fs, '
348 371 'expiration time of fetched cache %.1fs.',
349 372 plugin_id, auth_time, cache_ttl)
350 373
351 374 return perm_result
352 375
353 376 def _check_ssl(self, environ, start_response):
354 377 """
355 378 Checks the SSL check flag and returns False if SSL is not present
356 379 and required True otherwise
357 380 """
358 381 org_proto = environ['wsgi._org_proto']
359 382 # check if we have SSL required ! if not it's a bad request !
360 383 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
361 384 if require_ssl and org_proto == 'http':
362 385 log.debug(
363 386 'Bad request: detected protocol is `%s` and '
364 387 'SSL/HTTPS is required.', org_proto)
365 388 return False
366 389 return True
367 390
368 391 def _get_default_cache_ttl(self):
369 392 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
370 393 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
371 394 plugin_settings = plugin.get_settings()
372 395 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
373 396 plugin_settings) or (False, 0)
374 397 return plugin_cache_active, cache_ttl
375 398
376 399 def __call__(self, environ, start_response):
377 400 try:
378 401 return self._handle_request(environ, start_response)
379 402 except Exception:
380 403 log.exception("Exception while handling request")
381 404 appenlight.track_exception(environ)
382 405 return HTTPInternalServerError()(environ, start_response)
383 406 finally:
384 407 meta.Session.remove()
385 408
386 409 def _handle_request(self, environ, start_response):
387 410
388 411 if not self._check_ssl(environ, start_response):
389 412 reason = ('SSL required, while RhodeCode was unable '
390 413 'to detect this as SSL request')
391 414 log.debug('User not allowed to proceed, %s', reason)
392 415 return HTTPNotAcceptable(reason)(environ, start_response)
393 416
394 417 if not self.url_repo_name:
395 418 log.warning('Repository name is empty: %s', self.url_repo_name)
396 419 # failed to get repo name, we fail now
397 420 return HTTPNotFound()(environ, start_response)
398 421 log.debug('Extracted repo name is %s', self.url_repo_name)
399 422
400 423 ip_addr = get_ip_addr(environ)
401 424 user_agent = get_user_agent(environ)
402 425 username = None
403 426
404 427 # skip passing error to error controller
405 428 environ['pylons.status_code_redirect'] = True
406 429
407 430 # ======================================================================
408 431 # GET ACTION PULL or PUSH
409 432 # ======================================================================
410 433 action = self._get_action(environ)
411 434
412 435 # ======================================================================
413 436 # Check if this is a request to a shadow repository of a pull request.
414 437 # In this case only pull action is allowed.
415 438 # ======================================================================
416 439 if self.is_shadow_repo and action != 'pull':
417 440 reason = 'Only pull action is allowed for shadow repositories.'
418 441 log.debug('User not allowed to proceed, %s', reason)
419 442 return HTTPNotAcceptable(reason)(environ, start_response)
420 443
421 444 # Check if the shadow repo actually exists, in case someone refers
422 445 # to it, and it has been deleted because of successful merge.
423 446 if self.is_shadow_repo and not self.is_shadow_repo_dir:
424 447 log.debug(
425 448 'Shadow repo detected, and shadow repo dir `%s` is missing',
426 449 self.is_shadow_repo_dir)
427 450 return HTTPNotFound()(environ, start_response)
428 451
429 452 # ======================================================================
430 453 # CHECK ANONYMOUS PERMISSION
431 454 # ======================================================================
432 455 if action in ['pull', 'push']:
433 456 anonymous_user = User.get_default_user()
434 457 username = anonymous_user.username
435 458 if anonymous_user.active:
436 459 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
437 460 # ONLY check permissions if the user is activated
438 461 anonymous_perm = self._check_permission(
439 462 action, anonymous_user, self.acl_repo_name, ip_addr,
440 463 plugin_id='anonymous_access',
441 464 plugin_cache_active=plugin_cache_active,
442 465 cache_ttl=cache_ttl,
443 466 )
444 467 else:
445 468 anonymous_perm = False
446 469
447 470 if not anonymous_user.active or not anonymous_perm:
448 471 if not anonymous_user.active:
449 472 log.debug('Anonymous access is disabled, running '
450 473 'authentication')
451 474
452 475 if not anonymous_perm:
453 476 log.debug('Not enough credentials to access this '
454 477 'repository as anonymous user')
455 478
456 479 username = None
457 480 # ==============================================================
458 481 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
459 482 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
460 483 # ==============================================================
461 484
462 485 # try to auth based on environ, container auth methods
463 486 log.debug('Running PRE-AUTH for container based authentication')
464 487 pre_auth = authenticate(
465 488 '', '', environ, VCS_TYPE, registry=self.registry,
466 489 acl_repo_name=self.acl_repo_name)
467 490 if pre_auth and pre_auth.get('username'):
468 491 username = pre_auth['username']
469 492 log.debug('PRE-AUTH got %s as username', username)
470 493 if pre_auth:
471 494 log.debug('PRE-AUTH successful from %s',
472 495 pre_auth.get('auth_data', {}).get('_plugin'))
473 496
474 497 # If not authenticated by the container, running basic auth
475 498 # before inject the calling repo_name for special scope checks
476 499 self.authenticate.acl_repo_name = self.acl_repo_name
477 500
478 501 plugin_cache_active, cache_ttl = False, 0
479 502 plugin = None
480 503 if not username:
481 504 self.authenticate.realm = self.authenticate.get_rc_realm()
482 505
483 506 try:
484 507 auth_result = self.authenticate(environ)
485 508 except (UserCreationError, NotAllowedToCreateUserError) as e:
486 509 log.error(e)
487 510 reason = safe_str(e)
488 511 return HTTPNotAcceptable(reason)(environ, start_response)
489 512
490 513 if isinstance(auth_result, dict):
491 514 AUTH_TYPE.update(environ, 'basic')
492 515 REMOTE_USER.update(environ, auth_result['username'])
493 516 username = auth_result['username']
494 517 plugin = auth_result.get('auth_data', {}).get('_plugin')
495 518 log.info(
496 519 'MAIN-AUTH successful for user `%s` from %s plugin',
497 520 username, plugin)
498 521
499 522 plugin_cache_active, cache_ttl = auth_result.get(
500 523 'auth_data', {}).get('_ttl_cache') or (False, 0)
501 524 else:
502 525 return auth_result.wsgi_application(
503 526 environ, start_response)
504 527
505 528
506 529 # ==============================================================
507 530 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
508 531 # ==============================================================
509 532 user = User.get_by_username(username)
510 533 if not self.valid_and_active_user(user):
511 534 return HTTPForbidden()(environ, start_response)
512 535 username = user.username
513 536 user.update_lastactivity()
514 537 meta.Session().commit()
515 538
516 539 # check user attributes for password change flag
517 540 user_obj = user
518 541 if user_obj and user_obj.username != User.DEFAULT_USER and \
519 542 user_obj.user_data.get('force_password_change'):
520 543 reason = 'password change required'
521 544 log.debug('User not allowed to authenticate, %s', reason)
522 545 return HTTPNotAcceptable(reason)(environ, start_response)
523 546
524 547 # check permissions for this repository
525 548 perm = self._check_permission(
526 549 action, user, self.acl_repo_name, ip_addr,
527 550 plugin, plugin_cache_active, cache_ttl)
528 551 if not perm:
529 552 return HTTPForbidden()(environ, start_response)
530 553
531 554 # extras are injected into UI object and later available
532 555 # in hooks executed by RhodeCode
533 556 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
534 557 extras = vcs_operation_context(
535 558 environ, repo_name=self.acl_repo_name, username=username,
536 559 action=action, scm=self.SCM, check_locking=check_locking,
537 560 is_shadow_repo=self.is_shadow_repo
538 561 )
539 562
540 563 # ======================================================================
541 564 # REQUEST HANDLING
542 565 # ======================================================================
543 566 repo_path = os.path.join(
544 567 safe_str(self.base_path), safe_str(self.vcs_repo_name))
545 568 log.debug('Repository path is %s', repo_path)
546 569
547 570 fix_PATH()
548 571
549 572 log.info(
550 573 '%s action on %s repo "%s" by "%s" from %s %s',
551 574 action, self.SCM, safe_str(self.url_repo_name),
552 575 safe_str(username), ip_addr, user_agent)
553 576
554 577 return self._generate_vcs_response(
555 578 environ, start_response, repo_path, extras, action)
556 579
557 580 @initialize_generator
558 581 def _generate_vcs_response(
559 582 self, environ, start_response, repo_path, extras, action):
560 583 """
561 584 Returns a generator for the response content.
562 585
563 586 This method is implemented as a generator, so that it can trigger
564 587 the cache validation after all content sent back to the client. It
565 588 also handles the locking exceptions which will be triggered when
566 589 the first chunk is produced by the underlying WSGI application.
567 590 """
568 callback_daemon, extras = self._prepare_callback_daemon(extras)
569 config = self._create_config(extras, self.acl_repo_name)
570 log.debug('HOOKS extras is %s', extras)
571 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
572 app.rc_extras = extras
591 txn_id = ''
592 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
593 # case for SVN, we want to re-use the callback daemon port
594 # so we use the txn_id, for this we peek the body, and still save
595 # it as wsgi.input
596 data = environ['wsgi.input'].read()
597 environ['wsgi.input'] = StringIO(data)
598 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
573 599
574 try:
575 with callback_daemon:
576 try:
577 response = app(environ, start_response)
578 finally:
579 # This statement works together with the decorator
580 # "initialize_generator" above. The decorator ensures that
581 # we hit the first yield statement before the generator is
582 # returned back to the WSGI server. This is needed to
583 # ensure that the call to "app" above triggers the
584 # needed callback to "start_response" before the
585 # generator is actually used.
586 yield "__init__"
600 callback_daemon, extras = self._prepare_callback_daemon(
601 extras, environ, action, txn_id=txn_id)
602 log.debug('HOOKS extras is %s', extras)
603
604 config = self._create_config(extras, self.acl_repo_name)
605 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
606 with callback_daemon:
607 app.rc_extras = extras
587 608
588 for chunk in response:
589 yield chunk
590 except Exception as exc:
591 # TODO: martinb: Exceptions are only raised in case of the Pyro4
592 # backend. Refactor this except block after dropping Pyro4 support.
593 # TODO: johbo: Improve "translating" back the exception.
594 if getattr(exc, '_vcs_kind', None) == 'repo_locked':
595 exc = HTTPLockedRC(*exc.args)
596 _code = rhodecode.CONFIG.get('lock_ret_code')
597 log.debug('Repository LOCKED ret code %s!', (_code,))
598 elif getattr(exc, '_vcs_kind', None) == 'requirement':
599 log.debug(
600 'Repository requires features unknown to this Mercurial')
601 exc = HTTPRequirementError(*exc.args)
602 else:
603 raise
609 try:
610 response = app(environ, start_response)
611 finally:
612 # This statement works together with the decorator
613 # "initialize_generator" above. The decorator ensures that
614 # we hit the first yield statement before the generator is
615 # returned back to the WSGI server. This is needed to
616 # ensure that the call to "app" above triggers the
617 # needed callback to "start_response" before the
618 # generator is actually used.
619 yield "__init__"
604 620
605 for chunk in exc(environ, start_response):
621 # iter content
622 for chunk in response:
606 623 yield chunk
607 finally:
608 # invalidate cache on push
624
609 625 try:
626 # invalidate cache on push
610 627 if action == 'push':
611 628 self._invalidate_cache(self.url_repo_name)
612 629 finally:
613 630 meta.Session.remove()
614 631
615 632 def _get_repository_name(self, environ):
616 633 """Get repository name out of the environmnent
617 634
618 635 :param environ: WSGI environment
619 636 """
620 637 raise NotImplementedError()
621 638
622 639 def _get_action(self, environ):
623 640 """Map request commands into a pull or push command.
624 641
625 642 :param environ: WSGI environment
626 643 """
627 644 raise NotImplementedError()
628 645
629 646 def _create_wsgi_app(self, repo_path, repo_name, config):
630 647 """Return the WSGI app that will finally handle the request."""
631 648 raise NotImplementedError()
632 649
633 650 def _create_config(self, extras, repo_name):
634 651 """Create a safe config representation."""
635 652 raise NotImplementedError()
636 653
637 def _prepare_callback_daemon(self, extras):
654 def _should_use_callback_daemon(self, extras, environ, action):
655 return True
656
657 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
658 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
659 if not self._should_use_callback_daemon(extras, environ, action):
660 # disable callback daemon for actions that don't require it
661 direct_calls = True
662
638 663 return prepare_callback_daemon(
639 664 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
640 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
665 use_direct_calls=direct_calls, txn_id=txn_id)
641 666
642 667
643 668 def _should_check_locking(query_string):
644 669 # this is kind of hacky, but due to how mercurial handles client-server
645 670 # server see all operation on commit; bookmarks, phases and
646 671 # obsolescence marker in different transaction, we don't want to check
647 672 # locking on those
648 673 return query_string not in ['cmd=listkeys']
@@ -1,781 +1,779 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Utilities library for RhodeCode
23 23 """
24 24
25 25 import datetime
26 26 import decorator
27 27 import json
28 28 import logging
29 29 import os
30 30 import re
31 31 import shutil
32 32 import tempfile
33 33 import traceback
34 34 import tarfile
35 35 import warnings
36 36 import hashlib
37 37 from os.path import join as jn
38 38
39 39 import paste
40 40 import pkg_resources
41 41 from webhelpers.text import collapse, remove_formatting, strip_tags
42 42 from mako import exceptions
43 43 from pyramid.threadlocal import get_current_registry
44 44 from pyramid.request import Request
45 45
46 46 from rhodecode.lib.fakemod import create_module
47 47 from rhodecode.lib.vcs.backends.base import Config
48 48 from rhodecode.lib.vcs.exceptions import VCSError
49 49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 50 from rhodecode.lib.utils2 import (
51 51 safe_str, safe_unicode, get_current_rhodecode_user, md5)
52 52 from rhodecode.model import meta
53 53 from rhodecode.model.db import (
54 54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 55 from rhodecode.model.meta import Session
56 56
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61 61
62 62 # String which contains characters that are not allowed in slug names for
63 63 # repositories or repository groups. It is properly escaped to use it in
64 64 # regular expressions.
65 65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66 66
67 67 # Regex that matches forbidden characters in repo/group slugs.
68 68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69 69
70 70 # Regex that matches allowed characters in repo/group slugs.
71 71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72 72
73 73 # Regex that matches whole repo/group slugs.
74 74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75 75
76 76 _license_cache = None
77 77
78 78
79 79 def repo_name_slug(value):
80 80 """
81 81 Return slug of name of repository
82 82 This function is called on each creation/modification
83 83 of repository to prevent bad names in repo
84 84 """
85 85 replacement_char = '-'
86 86
87 87 slug = remove_formatting(value)
88 88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 89 slug = re.sub('[\s]+', '-', slug)
90 90 slug = collapse(slug, replacement_char)
91 91 return slug
92 92
93 93
94 94 #==============================================================================
95 95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 96 #==============================================================================
97 97 def get_repo_slug(request):
98 98 _repo = ''
99 99
100 100 if hasattr(request, 'db_repo'):
101 101 # if our requests has set db reference use it for name, this
102 102 # translates the example.com/_<id> into proper repo names
103 103 _repo = request.db_repo.repo_name
104 104 elif getattr(request, 'matchdict', None):
105 105 # pyramid
106 106 _repo = request.matchdict.get('repo_name')
107 107
108 108 if _repo:
109 109 _repo = _repo.rstrip('/')
110 110 return _repo
111 111
112 112
113 113 def get_repo_group_slug(request):
114 114 _group = ''
115 115 if hasattr(request, 'db_repo_group'):
116 116 # if our requests has set db reference use it for name, this
117 117 # translates the example.com/_<id> into proper repo group names
118 118 _group = request.db_repo_group.group_name
119 119 elif getattr(request, 'matchdict', None):
120 120 # pyramid
121 121 _group = request.matchdict.get('repo_group_name')
122 122
123 123
124 124 if _group:
125 125 _group = _group.rstrip('/')
126 126 return _group
127 127
128 128
129 129 def get_user_group_slug(request):
130 130 _user_group = ''
131 131
132 132 if hasattr(request, 'db_user_group'):
133 133 _user_group = request.db_user_group.users_group_name
134 134 elif getattr(request, 'matchdict', None):
135 135 # pyramid
136 136 _user_group = request.matchdict.get('user_group_id')
137 137 _user_group_name = request.matchdict.get('user_group_name')
138 138 try:
139 139 if _user_group:
140 140 _user_group = UserGroup.get(_user_group)
141 141 elif _user_group_name:
142 142 _user_group = UserGroup.get_by_group_name(_user_group_name)
143 143
144 144 if _user_group:
145 145 _user_group = _user_group.users_group_name
146 146 except Exception:
147 147 log.exception('Failed to get user group by id and name')
148 148 # catch all failures here
149 149 return None
150 150
151 151 return _user_group
152 152
153 153
154 154 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
155 155 """
156 156 Scans given path for repos and return (name,(type,path)) tuple
157 157
158 158 :param path: path to scan for repositories
159 159 :param recursive: recursive search and return names with subdirs in front
160 160 """
161 161
162 162 # remove ending slash for better results
163 163 path = path.rstrip(os.sep)
164 164 log.debug('now scanning in %s location recursive:%s...', path, recursive)
165 165
166 166 def _get_repos(p):
167 167 dirpaths = _get_dirpaths(p)
168 168 if not _is_dir_writable(p):
169 169 log.warning('repo path without write access: %s', p)
170 170
171 171 for dirpath in dirpaths:
172 172 if os.path.isfile(os.path.join(p, dirpath)):
173 173 continue
174 174 cur_path = os.path.join(p, dirpath)
175 175
176 176 # skip removed repos
177 177 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
178 178 continue
179 179
180 180 #skip .<somethin> dirs
181 181 if dirpath.startswith('.'):
182 182 continue
183 183
184 184 try:
185 185 scm_info = get_scm(cur_path)
186 186 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
187 187 except VCSError:
188 188 if not recursive:
189 189 continue
190 190 #check if this dir containts other repos for recursive scan
191 191 rec_path = os.path.join(p, dirpath)
192 192 if os.path.isdir(rec_path):
193 193 for inner_scm in _get_repos(rec_path):
194 194 yield inner_scm
195 195
196 196 return _get_repos(path)
197 197
198 198
199 199 def _get_dirpaths(p):
200 200 try:
201 201 # OS-independable way of checking if we have at least read-only
202 202 # access or not.
203 203 dirpaths = os.listdir(p)
204 204 except OSError:
205 205 log.warning('ignoring repo path without read access: %s', p)
206 206 return []
207 207
208 208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
209 209 # decode paths and suddenly returns unicode objects itself. The items it
210 210 # cannot decode are returned as strings and cause issues.
211 211 #
212 212 # Those paths are ignored here until a solid solution for path handling has
213 213 # been built.
214 214 expected_type = type(p)
215 215
216 216 def _has_correct_type(item):
217 217 if type(item) is not expected_type:
218 218 log.error(
219 219 u"Ignoring path %s since it cannot be decoded into unicode.",
220 220 # Using "repr" to make sure that we see the byte value in case
221 221 # of support.
222 222 repr(item))
223 223 return False
224 224 return True
225 225
226 226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
227 227
228 228 return dirpaths
229 229
230 230
231 231 def _is_dir_writable(path):
232 232 """
233 233 Probe if `path` is writable.
234 234
235 235 Due to trouble on Cygwin / Windows, this is actually probing if it is
236 236 possible to create a file inside of `path`, stat does not produce reliable
237 237 results in this case.
238 238 """
239 239 try:
240 240 with tempfile.TemporaryFile(dir=path):
241 241 pass
242 242 except OSError:
243 243 return False
244 244 return True
245 245
246 246
247 247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
248 248 """
249 249 Returns True if given path is a valid repository False otherwise.
250 250 If expect_scm param is given also, compare if given scm is the same
251 251 as expected from scm parameter. If explicit_scm is given don't try to
252 252 detect the scm, just use the given one to check if repo is valid
253 253
254 254 :param repo_name:
255 255 :param base_path:
256 256 :param expect_scm:
257 257 :param explicit_scm:
258 258 :param config:
259 259
260 260 :return True: if given path is a valid repository
261 261 """
262 262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
263 263 log.debug('Checking if `%s` is a valid path for repository. '
264 264 'Explicit type: %s', repo_name, explicit_scm)
265 265
266 266 try:
267 267 if explicit_scm:
268 268 detected_scms = [get_scm_backend(explicit_scm)(
269 269 full_path, config=config).alias]
270 270 else:
271 271 detected_scms = get_scm(full_path)
272 272
273 273 if expect_scm:
274 274 return detected_scms[0] == expect_scm
275 275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
276 276 return True
277 277 except VCSError:
278 278 log.debug('path: %s is not a valid repo !', full_path)
279 279 return False
280 280
281 281
282 282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
283 283 """
284 284 Returns True if given path is a repository group, False otherwise
285 285
286 286 :param repo_name:
287 287 :param base_path:
288 288 """
289 289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
290 290 log.debug('Checking if `%s` is a valid path for repository group',
291 291 repo_group_name)
292 292
293 293 # check if it's not a repo
294 294 if is_valid_repo(repo_group_name, base_path):
295 295 log.debug('Repo called %s exist, it is not a valid '
296 296 'repo group' % repo_group_name)
297 297 return False
298 298
299 299 try:
300 300 # we need to check bare git repos at higher level
301 301 # since we might match branches/hooks/info/objects or possible
302 302 # other things inside bare git repo
303 303 scm_ = get_scm(os.path.dirname(full_path))
304 304 log.debug('path: %s is a vcs object:%s, not valid '
305 305 'repo group' % (full_path, scm_))
306 306 return False
307 307 except VCSError:
308 308 pass
309 309
310 310 # check if it's a valid path
311 311 if skip_path_check or os.path.isdir(full_path):
312 312 log.debug('path: %s is a valid repo group !', full_path)
313 313 return True
314 314
315 315 log.debug('path: %s is not a valid repo group !', full_path)
316 316 return False
317 317
318 318
319 319 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
320 320 while True:
321 321 ok = raw_input(prompt)
322 322 if ok.lower() in ('y', 'ye', 'yes'):
323 323 return True
324 324 if ok.lower() in ('n', 'no', 'nop', 'nope'):
325 325 return False
326 326 retries = retries - 1
327 327 if retries < 0:
328 328 raise IOError
329 329 print(complaint)
330 330
331 331 # propagated from mercurial documentation
332 332 ui_sections = [
333 333 'alias', 'auth',
334 334 'decode/encode', 'defaults',
335 335 'diff', 'email',
336 336 'extensions', 'format',
337 337 'merge-patterns', 'merge-tools',
338 338 'hooks', 'http_proxy',
339 339 'smtp', 'patch',
340 340 'paths', 'profiling',
341 341 'server', 'trusted',
342 342 'ui', 'web', ]
343 343
344 344
345 345 def config_data_from_db(clear_session=True, repo=None):
346 346 """
347 347 Read the configuration data from the database and return configuration
348 348 tuples.
349 349 """
350 350 from rhodecode.model.settings import VcsSettingsModel
351 351
352 352 config = []
353 353
354 354 sa = meta.Session()
355 355 settings_model = VcsSettingsModel(repo=repo, sa=sa)
356 356
357 357 ui_settings = settings_model.get_ui_settings()
358 358
359 359 ui_data = []
360 360 for setting in ui_settings:
361 361 if setting.active:
362 362 ui_data.append((setting.section, setting.key, setting.value))
363 363 config.append((
364 364 safe_str(setting.section), safe_str(setting.key),
365 365 safe_str(setting.value)))
366 366 if setting.key == 'push_ssl':
367 367 # force set push_ssl requirement to False, rhodecode
368 368 # handles that
369 369 config.append((
370 370 safe_str(setting.section), safe_str(setting.key), False))
371 371 log.debug(
372 372 'settings ui from db: %s',
373 373 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
374 374 if clear_session:
375 375 meta.Session.remove()
376 376
377 377 # TODO: mikhail: probably it makes no sense to re-read hooks information.
378 378 # It's already there and activated/deactivated
379 379 skip_entries = []
380 380 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
381 381 if 'pull' not in enabled_hook_classes:
382 382 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
383 383 if 'push' not in enabled_hook_classes:
384 384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
385 385 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
386 386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
387 387
388 388 config = [entry for entry in config if entry[:2] not in skip_entries]
389 389
390 390 return config
391 391
392 392
393 393 def make_db_config(clear_session=True, repo=None):
394 394 """
395 395 Create a :class:`Config` instance based on the values in the database.
396 396 """
397 397 config = Config()
398 398 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
399 399 for section, option, value in config_data:
400 400 config.set(section, option, value)
401 401 return config
402 402
403 403
404 404 def get_enabled_hook_classes(ui_settings):
405 405 """
406 406 Return the enabled hook classes.
407 407
408 408 :param ui_settings: List of ui_settings as returned
409 409 by :meth:`VcsSettingsModel.get_ui_settings`
410 410
411 411 :return: a list with the enabled hook classes. The order is not guaranteed.
412 412 :rtype: list
413 413 """
414 414 enabled_hooks = []
415 415 active_hook_keys = [
416 416 key for section, key, value, active in ui_settings
417 417 if section == 'hooks' and active]
418 418
419 419 hook_names = {
420 420 RhodeCodeUi.HOOK_PUSH: 'push',
421 421 RhodeCodeUi.HOOK_PULL: 'pull',
422 422 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
423 423 }
424 424
425 425 for key in active_hook_keys:
426 426 hook = hook_names.get(key)
427 427 if hook:
428 428 enabled_hooks.append(hook)
429 429
430 430 return enabled_hooks
431 431
432 432
433 433 def set_rhodecode_config(config):
434 434 """
435 435 Updates pyramid config with new settings from database
436 436
437 437 :param config:
438 438 """
439 439 from rhodecode.model.settings import SettingsModel
440 440 app_settings = SettingsModel().get_all_settings()
441 441
442 442 for k, v in app_settings.items():
443 443 config[k] = v
444 444
445 445
446 446 def get_rhodecode_realm():
447 447 """
448 448 Return the rhodecode realm from database.
449 449 """
450 450 from rhodecode.model.settings import SettingsModel
451 451 realm = SettingsModel().get_setting_by_name('realm')
452 452 return safe_str(realm.app_settings_value)
453 453
454 454
455 455 def get_rhodecode_base_path():
456 456 """
457 457 Returns the base path. The base path is the filesystem path which points
458 458 to the repository store.
459 459 """
460 460 from rhodecode.model.settings import SettingsModel
461 461 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
462 462 return safe_str(paths_ui.ui_value)
463 463
464 464
465 465 def map_groups(path):
466 466 """
467 467 Given a full path to a repository, create all nested groups that this
468 468 repo is inside. This function creates parent-child relationships between
469 469 groups and creates default perms for all new groups.
470 470
471 471 :param paths: full path to repository
472 472 """
473 473 from rhodecode.model.repo_group import RepoGroupModel
474 474 sa = meta.Session()
475 475 groups = path.split(Repository.NAME_SEP)
476 476 parent = None
477 477 group = None
478 478
479 479 # last element is repo in nested groups structure
480 480 groups = groups[:-1]
481 481 rgm = RepoGroupModel(sa)
482 482 owner = User.get_first_super_admin()
483 483 for lvl, group_name in enumerate(groups):
484 484 group_name = '/'.join(groups[:lvl] + [group_name])
485 485 group = RepoGroup.get_by_group_name(group_name)
486 486 desc = '%s group' % group_name
487 487
488 488 # skip folders that are now removed repos
489 489 if REMOVED_REPO_PAT.match(group_name):
490 490 break
491 491
492 492 if group is None:
493 493 log.debug('creating group level: %s group_name: %s',
494 494 lvl, group_name)
495 495 group = RepoGroup(group_name, parent)
496 496 group.group_description = desc
497 497 group.user = owner
498 498 sa.add(group)
499 499 perm_obj = rgm._create_default_perms(group)
500 500 sa.add(perm_obj)
501 501 sa.flush()
502 502
503 503 parent = group
504 504 return group
505 505
506 506
507 507 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
508 508 """
509 509 maps all repos given in initial_repo_list, non existing repositories
510 510 are created, if remove_obsolete is True it also checks for db entries
511 511 that are not in initial_repo_list and removes them.
512 512
513 513 :param initial_repo_list: list of repositories found by scanning methods
514 514 :param remove_obsolete: check for obsolete entries in database
515 515 """
516 516 from rhodecode.model.repo import RepoModel
517 from rhodecode.model.scm import ScmModel
518 517 from rhodecode.model.repo_group import RepoGroupModel
519 518 from rhodecode.model.settings import SettingsModel
520 519
521 520 sa = meta.Session()
522 521 repo_model = RepoModel()
523 522 user = User.get_first_super_admin()
524 523 added = []
525 524
526 525 # creation defaults
527 526 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
528 527 enable_statistics = defs.get('repo_enable_statistics')
529 528 enable_locking = defs.get('repo_enable_locking')
530 529 enable_downloads = defs.get('repo_enable_downloads')
531 530 private = defs.get('repo_private')
532 531
533 532 for name, repo in initial_repo_list.items():
534 533 group = map_groups(name)
535 534 unicode_name = safe_unicode(name)
536 535 db_repo = repo_model.get_by_repo_name(unicode_name)
537 536 # found repo that is on filesystem not in RhodeCode database
538 537 if not db_repo:
539 538 log.info('repository %s not found, creating now', name)
540 539 added.append(name)
541 540 desc = (repo.description
542 541 if repo.description != 'unknown'
543 542 else '%s repository' % name)
544 543
545 544 db_repo = repo_model._create_repo(
546 545 repo_name=name,
547 546 repo_type=repo.alias,
548 547 description=desc,
549 548 repo_group=getattr(group, 'group_id', None),
550 549 owner=user,
551 550 enable_locking=enable_locking,
552 551 enable_downloads=enable_downloads,
553 552 enable_statistics=enable_statistics,
554 553 private=private,
555 554 state=Repository.STATE_CREATED
556 555 )
557 556 sa.commit()
558 557 # we added that repo just now, and make sure we updated server info
559 558 if db_repo.repo_type == 'git':
560 559 git_repo = db_repo.scm_instance()
561 560 # update repository server-info
562 561 log.debug('Running update server info')
563 562 git_repo._update_server_info()
564 563
565 564 db_repo.update_commit_cache()
566 565
567 566 config = db_repo._config
568 567 config.set('extensions', 'largefiles', '')
569 ScmModel().install_hooks(
570 db_repo.scm_instance(config=config),
571 repo_type=db_repo.repo_type)
568 repo = db_repo.scm_instance(config=config)
569 repo.install_hooks()
572 570
573 571 removed = []
574 572 if remove_obsolete:
575 573 # remove from database those repositories that are not in the filesystem
576 574 for repo in sa.query(Repository).all():
577 575 if repo.repo_name not in initial_repo_list.keys():
578 576 log.debug("Removing non-existing repository found in db `%s`",
579 577 repo.repo_name)
580 578 try:
581 579 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
582 580 sa.commit()
583 581 removed.append(repo.repo_name)
584 582 except Exception:
585 583 # don't hold further removals on error
586 584 log.error(traceback.format_exc())
587 585 sa.rollback()
588 586
589 587 def splitter(full_repo_name):
590 588 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
591 589 gr_name = None
592 590 if len(_parts) == 2:
593 591 gr_name = _parts[0]
594 592 return gr_name
595 593
596 594 initial_repo_group_list = [splitter(x) for x in
597 595 initial_repo_list.keys() if splitter(x)]
598 596
599 597 # remove from database those repository groups that are not in the
600 598 # filesystem due to parent child relationships we need to delete them
601 599 # in a specific order of most nested first
602 600 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
603 601 nested_sort = lambda gr: len(gr.split('/'))
604 602 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
605 603 if group_name not in initial_repo_group_list:
606 604 repo_group = RepoGroup.get_by_group_name(group_name)
607 605 if (repo_group.children.all() or
608 606 not RepoGroupModel().check_exist_filesystem(
609 607 group_name=group_name, exc_on_failure=False)):
610 608 continue
611 609
612 610 log.info(
613 611 'Removing non-existing repository group found in db `%s`',
614 612 group_name)
615 613 try:
616 614 RepoGroupModel(sa).delete(group_name, fs_remove=False)
617 615 sa.commit()
618 616 removed.append(group_name)
619 617 except Exception:
620 618 # don't hold further removals on error
621 619 log.exception(
622 620 'Unable to remove repository group `%s`',
623 621 group_name)
624 622 sa.rollback()
625 623 raise
626 624
627 625 return added, removed
628 626
629 627
630 628 def load_rcextensions(root_path):
631 629 import rhodecode
632 630 from rhodecode.config import conf
633 631
634 632 path = os.path.join(root_path, 'rcextensions', '__init__.py')
635 633 if os.path.isfile(path):
636 634 rcext = create_module('rc', path)
637 635 EXT = rhodecode.EXTENSIONS = rcext
638 636 log.debug('Found rcextensions now loading %s...', rcext)
639 637
640 638 # Additional mappings that are not present in the pygments lexers
641 639 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
642 640
643 641 # auto check if the module is not missing any data, set to default if is
644 642 # this will help autoupdate new feature of rcext module
645 643 #from rhodecode.config import rcextensions
646 644 #for k in dir(rcextensions):
647 645 # if not k.startswith('_') and not hasattr(EXT, k):
648 646 # setattr(EXT, k, getattr(rcextensions, k))
649 647
650 648
651 649 def get_custom_lexer(extension):
652 650 """
653 651 returns a custom lexer if it is defined in rcextensions module, or None
654 652 if there's no custom lexer defined
655 653 """
656 654 import rhodecode
657 655 from pygments import lexers
658 656
659 657 # custom override made by RhodeCode
660 658 if extension in ['mako']:
661 659 return lexers.get_lexer_by_name('html+mako')
662 660
663 661 # check if we didn't define this extension as other lexer
664 662 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
665 663 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
666 664 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
667 665 return lexers.get_lexer_by_name(_lexer_name)
668 666
669 667
670 668 #==============================================================================
671 669 # TEST FUNCTIONS AND CREATORS
672 670 #==============================================================================
673 671 def create_test_index(repo_location, config):
674 672 """
675 673 Makes default test index.
676 674 """
677 675 import rc_testdata
678 676
679 677 rc_testdata.extract_search_index(
680 678 'vcs_search_index', os.path.dirname(config['search.location']))
681 679
682 680
683 681 def create_test_directory(test_path):
684 682 """
685 683 Create test directory if it doesn't exist.
686 684 """
687 685 if not os.path.isdir(test_path):
688 686 log.debug('Creating testdir %s', test_path)
689 687 os.makedirs(test_path)
690 688
691 689
692 690 def create_test_database(test_path, config):
693 691 """
694 692 Makes a fresh database.
695 693 """
696 694 from rhodecode.lib.db_manage import DbManage
697 695
698 696 # PART ONE create db
699 697 dbconf = config['sqlalchemy.db1.url']
700 698 log.debug('making test db %s', dbconf)
701 699
702 700 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
703 701 tests=True, cli_args={'force_ask': True})
704 702 dbmanage.create_tables(override=True)
705 703 dbmanage.set_db_version()
706 704 # for tests dynamically set new root paths based on generated content
707 705 dbmanage.create_settings(dbmanage.config_prompt(test_path))
708 706 dbmanage.create_default_user()
709 707 dbmanage.create_test_admin_and_users()
710 708 dbmanage.create_permissions()
711 709 dbmanage.populate_default_permissions()
712 710 Session().commit()
713 711
714 712
715 713 def create_test_repositories(test_path, config):
716 714 """
717 715 Creates test repositories in the temporary directory. Repositories are
718 716 extracted from archives within the rc_testdata package.
719 717 """
720 718 import rc_testdata
721 719 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
722 720
723 721 log.debug('making test vcs repositories')
724 722
725 723 idx_path = config['search.location']
726 724 data_path = config['cache_dir']
727 725
728 726 # clean index and data
729 727 if idx_path and os.path.exists(idx_path):
730 728 log.debug('remove %s', idx_path)
731 729 shutil.rmtree(idx_path)
732 730
733 731 if data_path and os.path.exists(data_path):
734 732 log.debug('remove %s', data_path)
735 733 shutil.rmtree(data_path)
736 734
737 735 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
738 736 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
739 737
740 738 # Note: Subversion is in the process of being integrated with the system,
741 739 # until we have a properly packed version of the test svn repository, this
742 740 # tries to copy over the repo from a package "rc_testdata"
743 741 svn_repo_path = rc_testdata.get_svn_repo_archive()
744 742 with tarfile.open(svn_repo_path) as tar:
745 743 tar.extractall(jn(test_path, SVN_REPO))
746 744
747 745
748 746 def password_changed(auth_user, session):
749 747 # Never report password change in case of default user or anonymous user.
750 748 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
751 749 return False
752 750
753 751 password_hash = md5(auth_user.password) if auth_user.password else None
754 752 rhodecode_user = session.get('rhodecode_user', {})
755 753 session_password_hash = rhodecode_user.get('password', '')
756 754 return password_hash != session_password_hash
757 755
758 756
759 757 def read_opensource_licenses():
760 758 global _license_cache
761 759
762 760 if not _license_cache:
763 761 licenses = pkg_resources.resource_string(
764 762 'rhodecode', 'config/licenses.json')
765 763 _license_cache = json.loads(licenses)
766 764
767 765 return _license_cache
768 766
769 767
770 768 def generate_platform_uuid():
771 769 """
772 770 Generates platform UUID based on it's name
773 771 """
774 772 import platform
775 773
776 774 try:
777 775 uuid_list = [platform.platform()]
778 776 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
779 777 except Exception as e:
780 778 log.error('Failed to generate host uuid: %s' % e)
781 779 return 'UNDEFINED'
@@ -1,1694 +1,1698 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24
25 25 import collections
26 26 import datetime
27 27 import fnmatch
28 28 import itertools
29 29 import logging
30 30 import os
31 31 import re
32 32 import time
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 38 from rhodecode.lib.vcs import connection
39 39 from rhodecode.lib.vcs.utils import author_name, author_email
40 40 from rhodecode.lib.vcs.conf import settings
41 41 from rhodecode.lib.vcs.exceptions import (
42 42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 46 RepositoryError)
47 47
48 48
49 49 log = logging.getLogger(__name__)
50 50
51 51
52 52 FILEMODE_DEFAULT = 0100644
53 53 FILEMODE_EXECUTABLE = 0100755
54 54
55 55 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 56 MergeResponse = collections.namedtuple(
57 57 'MergeResponse',
58 58 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59 59
60 60
61 61 class MergeFailureReason(object):
62 62 """
63 63 Enumeration with all the reasons why the server side merge could fail.
64 64
65 65 DO NOT change the number of the reasons, as they may be stored in the
66 66 database.
67 67
68 68 Changing the name of a reason is acceptable and encouraged to deprecate old
69 69 reasons.
70 70 """
71 71
72 72 # Everything went well.
73 73 NONE = 0
74 74
75 75 # An unexpected exception was raised. Check the logs for more details.
76 76 UNKNOWN = 1
77 77
78 78 # The merge was not successful, there are conflicts.
79 79 MERGE_FAILED = 2
80 80
81 81 # The merge succeeded but we could not push it to the target repository.
82 82 PUSH_FAILED = 3
83 83
84 84 # The specified target is not a head in the target repository.
85 85 TARGET_IS_NOT_HEAD = 4
86 86
87 87 # The source repository contains more branches than the target. Pushing
88 88 # the merge will create additional branches in the target.
89 89 HG_SOURCE_HAS_MORE_BRANCHES = 5
90 90
91 91 # The target reference has multiple heads. That does not allow to correctly
92 92 # identify the target location. This could only happen for mercurial
93 93 # branches.
94 94 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95 95
96 96 # The target repository is locked
97 97 TARGET_IS_LOCKED = 7
98 98
99 99 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 100 # A involved commit could not be found.
101 101 _DEPRECATED_MISSING_COMMIT = 8
102 102
103 103 # The target repo reference is missing.
104 104 MISSING_TARGET_REF = 9
105 105
106 106 # The source repo reference is missing.
107 107 MISSING_SOURCE_REF = 10
108 108
109 109 # The merge was not successful, there are conflicts related to sub
110 110 # repositories.
111 111 SUBREPO_MERGE_FAILED = 11
112 112
113 113
114 114 class UpdateFailureReason(object):
115 115 """
116 116 Enumeration with all the reasons why the pull request update could fail.
117 117
118 118 DO NOT change the number of the reasons, as they may be stored in the
119 119 database.
120 120
121 121 Changing the name of a reason is acceptable and encouraged to deprecate old
122 122 reasons.
123 123 """
124 124
125 125 # Everything went well.
126 126 NONE = 0
127 127
128 128 # An unexpected exception was raised. Check the logs for more details.
129 129 UNKNOWN = 1
130 130
131 131 # The pull request is up to date.
132 132 NO_CHANGE = 2
133 133
134 134 # The pull request has a reference type that is not supported for update.
135 135 WRONG_REF_TYPE = 3
136 136
137 137 # Update failed because the target reference is missing.
138 138 MISSING_TARGET_REF = 4
139 139
140 140 # Update failed because the source reference is missing.
141 141 MISSING_SOURCE_REF = 5
142 142
143 143
144 144 class BaseRepository(object):
145 145 """
146 146 Base Repository for final backends
147 147
148 148 .. attribute:: DEFAULT_BRANCH_NAME
149 149
150 150 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151 151
152 152 .. attribute:: commit_ids
153 153
154 154 list of all available commit ids, in ascending order
155 155
156 156 .. attribute:: path
157 157
158 158 absolute path to the repository
159 159
160 160 .. attribute:: bookmarks
161 161
162 162 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 163 there are no bookmarks or the backend implementation does not support
164 164 bookmarks.
165 165
166 166 .. attribute:: tags
167 167
168 168 Mapping from name to :term:`Commit ID` of the tag.
169 169
170 170 """
171 171
172 172 DEFAULT_BRANCH_NAME = None
173 173 DEFAULT_CONTACT = u"Unknown"
174 174 DEFAULT_DESCRIPTION = u"unknown"
175 175 EMPTY_COMMIT_ID = '0' * 40
176 176
177 177 path = None
178 _remote = None
178 179
179 180 def __init__(self, repo_path, config=None, create=False, **kwargs):
180 181 """
181 182 Initializes repository. Raises RepositoryError if repository could
182 183 not be find at the given ``repo_path`` or directory at ``repo_path``
183 184 exists and ``create`` is set to True.
184 185
185 186 :param repo_path: local path of the repository
186 187 :param config: repository configuration
187 188 :param create=False: if set to True, would try to create repository.
188 189 :param src_url=None: if set, should be proper url from which repository
189 190 would be cloned; requires ``create`` parameter to be set to True -
190 191 raises RepositoryError if src_url is set and create evaluates to
191 192 False
192 193 """
193 194 raise NotImplementedError
194 195
195 196 def __repr__(self):
196 197 return '<%s at %s>' % (self.__class__.__name__, self.path)
197 198
198 199 def __len__(self):
199 200 return self.count()
200 201
201 202 def __eq__(self, other):
202 203 same_instance = isinstance(other, self.__class__)
203 204 return same_instance and other.path == self.path
204 205
205 206 def __ne__(self, other):
206 207 return not self.__eq__(other)
207 208
208 209 @classmethod
209 210 def get_default_config(cls, default=None):
210 211 config = Config()
211 212 if default and isinstance(default, list):
212 213 for section, key, val in default:
213 214 config.set(section, key, val)
214 215 return config
215 216
216 217 @LazyProperty
217 218 def EMPTY_COMMIT(self):
218 219 return EmptyCommit(self.EMPTY_COMMIT_ID)
219 220
220 221 @LazyProperty
221 222 def alias(self):
222 223 for k, v in settings.BACKENDS.items():
223 224 if v.split('.')[-1] == str(self.__class__.__name__):
224 225 return k
225 226
226 227 @LazyProperty
227 228 def name(self):
228 229 return safe_unicode(os.path.basename(self.path))
229 230
230 231 @LazyProperty
231 232 def description(self):
232 233 raise NotImplementedError
233 234
234 235 def refs(self):
235 236 """
236 237 returns a `dict` with branches, bookmarks, tags, and closed_branches
237 238 for this repository
238 239 """
239 240 return dict(
240 241 branches=self.branches,
241 242 branches_closed=self.branches_closed,
242 243 tags=self.tags,
243 244 bookmarks=self.bookmarks
244 245 )
245 246
246 247 @LazyProperty
247 248 def branches(self):
248 249 """
249 250 A `dict` which maps branch names to commit ids.
250 251 """
251 252 raise NotImplementedError
252 253
253 254 @LazyProperty
254 255 def branches_closed(self):
255 256 """
256 257 A `dict` which maps tags names to commit ids.
257 258 """
258 259 raise NotImplementedError
259 260
260 261 @LazyProperty
261 262 def bookmarks(self):
262 263 """
263 264 A `dict` which maps tags names to commit ids.
264 265 """
265 266 raise NotImplementedError
266 267
267 268 @LazyProperty
268 269 def tags(self):
269 270 """
270 271 A `dict` which maps tags names to commit ids.
271 272 """
272 273 raise NotImplementedError
273 274
274 275 @LazyProperty
275 276 def size(self):
276 277 """
277 278 Returns combined size in bytes for all repository files
278 279 """
279 280 tip = self.get_commit()
280 281 return tip.size
281 282
282 283 def size_at_commit(self, commit_id):
283 284 commit = self.get_commit(commit_id)
284 285 return commit.size
285 286
286 287 def is_empty(self):
287 288 return not bool(self.commit_ids)
288 289
289 290 @staticmethod
290 291 def check_url(url, config):
291 292 """
292 293 Function will check given url and try to verify if it's a valid
293 294 link.
294 295 """
295 296 raise NotImplementedError
296 297
297 298 @staticmethod
298 299 def is_valid_repository(path):
299 300 """
300 301 Check if given `path` contains a valid repository of this backend
301 302 """
302 303 raise NotImplementedError
303 304
304 305 # ==========================================================================
305 306 # COMMITS
306 307 # ==========================================================================
307 308
308 309 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
309 310 """
310 311 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
311 312 are both None, most recent commit is returned.
312 313
313 314 :param pre_load: Optional. List of commit attributes to load.
314 315
315 316 :raises ``EmptyRepositoryError``: if there are no commits
316 317 """
317 318 raise NotImplementedError
318 319
319 320 def __iter__(self):
320 321 for commit_id in self.commit_ids:
321 322 yield self.get_commit(commit_id=commit_id)
322 323
323 324 def get_commits(
324 325 self, start_id=None, end_id=None, start_date=None, end_date=None,
325 326 branch_name=None, show_hidden=False, pre_load=None):
326 327 """
327 328 Returns iterator of `BaseCommit` objects from start to end
328 329 not inclusive. This should behave just like a list, ie. end is not
329 330 inclusive.
330 331
331 332 :param start_id: None or str, must be a valid commit id
332 333 :param end_id: None or str, must be a valid commit id
333 334 :param start_date:
334 335 :param end_date:
335 336 :param branch_name:
336 337 :param show_hidden:
337 338 :param pre_load:
338 339 """
339 340 raise NotImplementedError
340 341
341 342 def __getitem__(self, key):
342 343 """
343 344 Allows index based access to the commit objects of this repository.
344 345 """
345 346 pre_load = ["author", "branch", "date", "message", "parents"]
346 347 if isinstance(key, slice):
347 348 return self._get_range(key, pre_load)
348 349 return self.get_commit(commit_idx=key, pre_load=pre_load)
349 350
350 351 def _get_range(self, slice_obj, pre_load):
351 352 for commit_id in self.commit_ids.__getitem__(slice_obj):
352 353 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
353 354
354 355 def count(self):
355 356 return len(self.commit_ids)
356 357
357 358 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
358 359 """
359 360 Creates and returns a tag for the given ``commit_id``.
360 361
361 362 :param name: name for new tag
362 363 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 364 :param commit_id: commit id for which new tag would be created
364 365 :param message: message of the tag's commit
365 366 :param date: date of tag's commit
366 367
367 368 :raises TagAlreadyExistError: if tag with same name already exists
368 369 """
369 370 raise NotImplementedError
370 371
371 372 def remove_tag(self, name, user, message=None, date=None):
372 373 """
373 374 Removes tag with the given ``name``.
374 375
375 376 :param name: name of the tag to be removed
376 377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
377 378 :param message: message of the tag's removal commit
378 379 :param date: date of tag's removal commit
379 380
380 381 :raises TagDoesNotExistError: if tag with given name does not exists
381 382 """
382 383 raise NotImplementedError
383 384
384 385 def get_diff(
385 386 self, commit1, commit2, path=None, ignore_whitespace=False,
386 387 context=3, path1=None):
387 388 """
388 389 Returns (git like) *diff*, as plain text. Shows changes introduced by
389 390 `commit2` since `commit1`.
390 391
391 392 :param commit1: Entry point from which diff is shown. Can be
392 393 ``self.EMPTY_COMMIT`` - in this case, patch showing all
393 394 the changes since empty state of the repository until `commit2`
394 395 :param commit2: Until which commit changes should be shown.
395 396 :param path: Can be set to a path of a file to create a diff of that
396 397 file. If `path1` is also set, this value is only associated to
397 398 `commit2`.
398 399 :param ignore_whitespace: If set to ``True``, would not show whitespace
399 400 changes. Defaults to ``False``.
400 401 :param context: How many lines before/after changed lines should be
401 402 shown. Defaults to ``3``.
402 403 :param path1: Can be set to a path to associate with `commit1`. This
403 404 parameter works only for backends which support diff generation for
404 405 different paths. Other backends will raise a `ValueError` if `path1`
405 406 is set and has a different value than `path`.
406 407 :param file_path: filter this diff by given path pattern
407 408 """
408 409 raise NotImplementedError
409 410
410 411 def strip(self, commit_id, branch=None):
411 412 """
412 413 Strip given commit_id from the repository
413 414 """
414 415 raise NotImplementedError
415 416
416 417 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
417 418 """
418 419 Return a latest common ancestor commit if one exists for this repo
419 420 `commit_id1` vs `commit_id2` from `repo2`.
420 421
421 422 :param commit_id1: Commit it from this repository to use as a
422 423 target for the comparison.
423 424 :param commit_id2: Source commit id to use for comparison.
424 425 :param repo2: Source repository to use for comparison.
425 426 """
426 427 raise NotImplementedError
427 428
428 429 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
429 430 """
430 431 Compare this repository's revision `commit_id1` with `commit_id2`.
431 432
432 433 Returns a tuple(commits, ancestor) that would be merged from
433 434 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
434 435 will be returned as ancestor.
435 436
436 437 :param commit_id1: Commit it from this repository to use as a
437 438 target for the comparison.
438 439 :param commit_id2: Source commit id to use for comparison.
439 440 :param repo2: Source repository to use for comparison.
440 441 :param merge: If set to ``True`` will do a merge compare which also
441 442 returns the common ancestor.
442 443 :param pre_load: Optional. List of commit attributes to load.
443 444 """
444 445 raise NotImplementedError
445 446
446 447 def merge(self, target_ref, source_repo, source_ref, workspace_id,
447 448 user_name='', user_email='', message='', dry_run=False,
448 449 use_rebase=False, close_branch=False):
449 450 """
450 451 Merge the revisions specified in `source_ref` from `source_repo`
451 452 onto the `target_ref` of this repository.
452 453
453 454 `source_ref` and `target_ref` are named tupls with the following
454 455 fields `type`, `name` and `commit_id`.
455 456
456 457 Returns a MergeResponse named tuple with the following fields
457 458 'possible', 'executed', 'source_commit', 'target_commit',
458 459 'merge_commit'.
459 460
460 461 :param target_ref: `target_ref` points to the commit on top of which
461 462 the `source_ref` should be merged.
462 463 :param source_repo: The repository that contains the commits to be
463 464 merged.
464 465 :param source_ref: `source_ref` points to the topmost commit from
465 466 the `source_repo` which should be merged.
466 467 :param workspace_id: `workspace_id` unique identifier.
467 468 :param user_name: Merge commit `user_name`.
468 469 :param user_email: Merge commit `user_email`.
469 470 :param message: Merge commit `message`.
470 471 :param dry_run: If `True` the merge will not take place.
471 472 :param use_rebase: If `True` commits from the source will be rebased
472 473 on top of the target instead of being merged.
473 474 :param close_branch: If `True` branch will be close before merging it
474 475 """
475 476 if dry_run:
476 477 message = message or 'dry_run_merge_message'
477 478 user_email = user_email or 'dry-run-merge@rhodecode.com'
478 479 user_name = user_name or 'Dry-Run User'
479 480 else:
480 481 if not user_name:
481 482 raise ValueError('user_name cannot be empty')
482 483 if not user_email:
483 484 raise ValueError('user_email cannot be empty')
484 485 if not message:
485 486 raise ValueError('message cannot be empty')
486 487
487 488 shadow_repository_path = self._maybe_prepare_merge_workspace(
488 489 workspace_id, target_ref, source_ref)
489 490
490 491 try:
491 492 return self._merge_repo(
492 493 shadow_repository_path, target_ref, source_repo,
493 494 source_ref, message, user_name, user_email, dry_run=dry_run,
494 495 use_rebase=use_rebase, close_branch=close_branch)
495 496 except RepositoryError:
496 497 log.exception(
497 498 'Unexpected failure when running merge, dry-run=%s',
498 499 dry_run)
499 500 return MergeResponse(
500 501 False, False, None, MergeFailureReason.UNKNOWN)
501 502
502 503 def _merge_repo(self, shadow_repository_path, target_ref,
503 504 source_repo, source_ref, merge_message,
504 505 merger_name, merger_email, dry_run=False,
505 506 use_rebase=False, close_branch=False):
506 507 """Internal implementation of merge."""
507 508 raise NotImplementedError
508 509
509 510 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
510 511 """
511 512 Create the merge workspace.
512 513
513 514 :param workspace_id: `workspace_id` unique identifier.
514 515 """
515 516 raise NotImplementedError
516 517
517 518 def cleanup_merge_workspace(self, workspace_id):
518 519 """
519 520 Remove merge workspace.
520 521
521 522 This function MUST not fail in case there is no workspace associated to
522 523 the given `workspace_id`.
523 524
524 525 :param workspace_id: `workspace_id` unique identifier.
525 526 """
526 527 raise NotImplementedError
527 528
528 529 # ========== #
529 530 # COMMIT API #
530 531 # ========== #
531 532
532 533 @LazyProperty
533 534 def in_memory_commit(self):
534 535 """
535 536 Returns :class:`InMemoryCommit` object for this repository.
536 537 """
537 538 raise NotImplementedError
538 539
539 540 # ======================== #
540 541 # UTILITIES FOR SUBCLASSES #
541 542 # ======================== #
542 543
543 544 def _validate_diff_commits(self, commit1, commit2):
544 545 """
545 546 Validates that the given commits are related to this repository.
546 547
547 548 Intended as a utility for sub classes to have a consistent validation
548 549 of input parameters in methods like :meth:`get_diff`.
549 550 """
550 551 self._validate_commit(commit1)
551 552 self._validate_commit(commit2)
552 553 if (isinstance(commit1, EmptyCommit) and
553 554 isinstance(commit2, EmptyCommit)):
554 555 raise ValueError("Cannot compare two empty commits")
555 556
556 557 def _validate_commit(self, commit):
557 558 if not isinstance(commit, BaseCommit):
558 559 raise TypeError(
559 560 "%s is not of type BaseCommit" % repr(commit))
560 561 if commit.repository != self and not isinstance(commit, EmptyCommit):
561 562 raise ValueError(
562 563 "Commit %s must be a valid commit from this repository %s, "
563 564 "related to this repository instead %s." %
564 565 (commit, self, commit.repository))
565 566
566 567 def _validate_commit_id(self, commit_id):
567 568 if not isinstance(commit_id, basestring):
568 569 raise TypeError("commit_id must be a string value")
569 570
570 571 def _validate_commit_idx(self, commit_idx):
571 572 if not isinstance(commit_idx, (int, long)):
572 573 raise TypeError("commit_idx must be a numeric value")
573 574
574 575 def _validate_branch_name(self, branch_name):
575 576 if branch_name and branch_name not in self.branches_all:
576 577 msg = ("Branch %s not found in %s" % (branch_name, self))
577 578 raise BranchDoesNotExistError(msg)
578 579
579 580 #
580 581 # Supporting deprecated API parts
581 582 # TODO: johbo: consider to move this into a mixin
582 583 #
583 584
584 585 @property
585 586 def EMPTY_CHANGESET(self):
586 587 warnings.warn(
587 588 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
588 589 return self.EMPTY_COMMIT_ID
589 590
590 591 @property
591 592 def revisions(self):
592 593 warnings.warn("Use commits attribute instead", DeprecationWarning)
593 594 return self.commit_ids
594 595
595 596 @revisions.setter
596 597 def revisions(self, value):
597 598 warnings.warn("Use commits attribute instead", DeprecationWarning)
598 599 self.commit_ids = value
599 600
600 601 def get_changeset(self, revision=None, pre_load=None):
601 602 warnings.warn("Use get_commit instead", DeprecationWarning)
602 603 commit_id = None
603 604 commit_idx = None
604 605 if isinstance(revision, basestring):
605 606 commit_id = revision
606 607 else:
607 608 commit_idx = revision
608 609 return self.get_commit(
609 610 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
610 611
611 612 def get_changesets(
612 613 self, start=None, end=None, start_date=None, end_date=None,
613 614 branch_name=None, pre_load=None):
614 615 warnings.warn("Use get_commits instead", DeprecationWarning)
615 616 start_id = self._revision_to_commit(start)
616 617 end_id = self._revision_to_commit(end)
617 618 return self.get_commits(
618 619 start_id=start_id, end_id=end_id, start_date=start_date,
619 620 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
620 621
621 622 def _revision_to_commit(self, revision):
622 623 """
623 624 Translates a revision to a commit_id
624 625
625 626 Helps to support the old changeset based API which allows to use
626 627 commit ids and commit indices interchangeable.
627 628 """
628 629 if revision is None:
629 630 return revision
630 631
631 632 if isinstance(revision, basestring):
632 633 commit_id = revision
633 634 else:
634 635 commit_id = self.commit_ids[revision]
635 636 return commit_id
636 637
637 638 @property
638 639 def in_memory_changeset(self):
639 640 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
640 641 return self.in_memory_commit
641 642
642 643 def get_path_permissions(self, username):
643 644 """
644 645 Returns a path permission checker or None if not supported
645 646
646 647 :param username: session user name
647 648 :return: an instance of BasePathPermissionChecker or None
648 649 """
649 650 return None
650 651
652 def install_hooks(self, force=False):
653 return self._remote.install_hooks(force)
654
651 655
652 656 class BaseCommit(object):
653 657 """
654 658 Each backend should implement it's commit representation.
655 659
656 660 **Attributes**
657 661
658 662 ``repository``
659 663 repository object within which commit exists
660 664
661 665 ``id``
662 666 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
663 667 just ``tip``.
664 668
665 669 ``raw_id``
666 670 raw commit representation (i.e. full 40 length sha for git
667 671 backend)
668 672
669 673 ``short_id``
670 674 shortened (if apply) version of ``raw_id``; it would be simple
671 675 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
672 676 as ``raw_id`` for subversion
673 677
674 678 ``idx``
675 679 commit index
676 680
677 681 ``files``
678 682 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
679 683
680 684 ``dirs``
681 685 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
682 686
683 687 ``nodes``
684 688 combined list of ``Node`` objects
685 689
686 690 ``author``
687 691 author of the commit, as unicode
688 692
689 693 ``message``
690 694 message of the commit, as unicode
691 695
692 696 ``parents``
693 697 list of parent commits
694 698
695 699 """
696 700
697 701 branch = None
698 702 """
699 703 Depending on the backend this should be set to the branch name of the
700 704 commit. Backends not supporting branches on commits should leave this
701 705 value as ``None``.
702 706 """
703 707
704 708 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
705 709 """
706 710 This template is used to generate a default prefix for repository archives
707 711 if no prefix has been specified.
708 712 """
709 713
710 714 def __str__(self):
711 715 return '<%s at %s:%s>' % (
712 716 self.__class__.__name__, self.idx, self.short_id)
713 717
714 718 def __repr__(self):
715 719 return self.__str__()
716 720
717 721 def __unicode__(self):
718 722 return u'%s:%s' % (self.idx, self.short_id)
719 723
720 724 def __eq__(self, other):
721 725 same_instance = isinstance(other, self.__class__)
722 726 return same_instance and self.raw_id == other.raw_id
723 727
724 728 def __json__(self):
725 729 parents = []
726 730 try:
727 731 for parent in self.parents:
728 732 parents.append({'raw_id': parent.raw_id})
729 733 except NotImplementedError:
730 734 # empty commit doesn't have parents implemented
731 735 pass
732 736
733 737 return {
734 738 'short_id': self.short_id,
735 739 'raw_id': self.raw_id,
736 740 'revision': self.idx,
737 741 'message': self.message,
738 742 'date': self.date,
739 743 'author': self.author,
740 744 'parents': parents,
741 745 'branch': self.branch
742 746 }
743 747
744 748 def _get_refs(self):
745 749 return {
746 'branches': [self.branch],
750 'branches': [self.branch] if self.branch else [],
747 751 'bookmarks': getattr(self, 'bookmarks', []),
748 752 'tags': self.tags
749 753 }
750 754
751 755 @LazyProperty
752 756 def last(self):
753 757 """
754 758 ``True`` if this is last commit in repository, ``False``
755 759 otherwise; trying to access this attribute while there is no
756 760 commits would raise `EmptyRepositoryError`
757 761 """
758 762 if self.repository is None:
759 763 raise CommitError("Cannot check if it's most recent commit")
760 764 return self.raw_id == self.repository.commit_ids[-1]
761 765
762 766 @LazyProperty
763 767 def parents(self):
764 768 """
765 769 Returns list of parent commits.
766 770 """
767 771 raise NotImplementedError
768 772
769 773 @property
770 774 def merge(self):
771 775 """
772 776 Returns boolean if commit is a merge.
773 777 """
774 778 return len(self.parents) > 1
775 779
776 780 @LazyProperty
777 781 def children(self):
778 782 """
779 783 Returns list of child commits.
780 784 """
781 785 raise NotImplementedError
782 786
783 787 @LazyProperty
784 788 def id(self):
785 789 """
786 790 Returns string identifying this commit.
787 791 """
788 792 raise NotImplementedError
789 793
790 794 @LazyProperty
791 795 def raw_id(self):
792 796 """
793 797 Returns raw string identifying this commit.
794 798 """
795 799 raise NotImplementedError
796 800
797 801 @LazyProperty
798 802 def short_id(self):
799 803 """
800 804 Returns shortened version of ``raw_id`` attribute, as string,
801 805 identifying this commit, useful for presentation to users.
802 806 """
803 807 raise NotImplementedError
804 808
805 809 @LazyProperty
806 810 def idx(self):
807 811 """
808 812 Returns integer identifying this commit.
809 813 """
810 814 raise NotImplementedError
811 815
812 816 @LazyProperty
813 817 def committer(self):
814 818 """
815 819 Returns committer for this commit
816 820 """
817 821 raise NotImplementedError
818 822
819 823 @LazyProperty
820 824 def committer_name(self):
821 825 """
822 826 Returns committer name for this commit
823 827 """
824 828
825 829 return author_name(self.committer)
826 830
827 831 @LazyProperty
828 832 def committer_email(self):
829 833 """
830 834 Returns committer email address for this commit
831 835 """
832 836
833 837 return author_email(self.committer)
834 838
835 839 @LazyProperty
836 840 def author(self):
837 841 """
838 842 Returns author for this commit
839 843 """
840 844
841 845 raise NotImplementedError
842 846
843 847 @LazyProperty
844 848 def author_name(self):
845 849 """
846 850 Returns author name for this commit
847 851 """
848 852
849 853 return author_name(self.author)
850 854
851 855 @LazyProperty
852 856 def author_email(self):
853 857 """
854 858 Returns author email address for this commit
855 859 """
856 860
857 861 return author_email(self.author)
858 862
859 863 def get_file_mode(self, path):
860 864 """
861 865 Returns stat mode of the file at `path`.
862 866 """
863 867 raise NotImplementedError
864 868
865 869 def is_link(self, path):
866 870 """
867 871 Returns ``True`` if given `path` is a symlink
868 872 """
869 873 raise NotImplementedError
870 874
871 875 def get_file_content(self, path):
872 876 """
873 877 Returns content of the file at the given `path`.
874 878 """
875 879 raise NotImplementedError
876 880
877 881 def get_file_size(self, path):
878 882 """
879 883 Returns size of the file at the given `path`.
880 884 """
881 885 raise NotImplementedError
882 886
883 887 def get_file_commit(self, path, pre_load=None):
884 888 """
885 889 Returns last commit of the file at the given `path`.
886 890
887 891 :param pre_load: Optional. List of commit attributes to load.
888 892 """
889 893 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
890 894 if not commits:
891 895 raise RepositoryError(
892 896 'Failed to fetch history for path {}. '
893 897 'Please check if such path exists in your repository'.format(
894 898 path))
895 899 return commits[0]
896 900
897 901 def get_file_history(self, path, limit=None, pre_load=None):
898 902 """
899 903 Returns history of file as reversed list of :class:`BaseCommit`
900 904 objects for which file at given `path` has been modified.
901 905
902 906 :param limit: Optional. Allows to limit the size of the returned
903 907 history. This is intended as a hint to the underlying backend, so
904 908 that it can apply optimizations depending on the limit.
905 909 :param pre_load: Optional. List of commit attributes to load.
906 910 """
907 911 raise NotImplementedError
908 912
909 913 def get_file_annotate(self, path, pre_load=None):
910 914 """
911 915 Returns a generator of four element tuples with
912 916 lineno, sha, commit lazy loader and line
913 917
914 918 :param pre_load: Optional. List of commit attributes to load.
915 919 """
916 920 raise NotImplementedError
917 921
918 922 def get_nodes(self, path):
919 923 """
920 924 Returns combined ``DirNode`` and ``FileNode`` objects list representing
921 925 state of commit at the given ``path``.
922 926
923 927 :raises ``CommitError``: if node at the given ``path`` is not
924 928 instance of ``DirNode``
925 929 """
926 930 raise NotImplementedError
927 931
928 932 def get_node(self, path):
929 933 """
930 934 Returns ``Node`` object from the given ``path``.
931 935
932 936 :raises ``NodeDoesNotExistError``: if there is no node at the given
933 937 ``path``
934 938 """
935 939 raise NotImplementedError
936 940
937 941 def get_largefile_node(self, path):
938 942 """
939 943 Returns the path to largefile from Mercurial/Git-lfs storage.
940 944 or None if it's not a largefile node
941 945 """
942 946 return None
943 947
944 948 def archive_repo(self, file_path, kind='tgz', subrepos=None,
945 949 prefix=None, write_metadata=False, mtime=None):
946 950 """
947 951 Creates an archive containing the contents of the repository.
948 952
949 953 :param file_path: path to the file which to create the archive.
950 954 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
951 955 :param prefix: name of root directory in archive.
952 956 Default is repository name and commit's short_id joined with dash:
953 957 ``"{repo_name}-{short_id}"``.
954 958 :param write_metadata: write a metadata file into archive.
955 959 :param mtime: custom modification time for archive creation, defaults
956 960 to time.time() if not given.
957 961
958 962 :raise VCSError: If prefix has a problem.
959 963 """
960 964 allowed_kinds = settings.ARCHIVE_SPECS.keys()
961 965 if kind not in allowed_kinds:
962 966 raise ImproperArchiveTypeError(
963 967 'Archive kind (%s) not supported use one of %s' %
964 968 (kind, allowed_kinds))
965 969
966 970 prefix = self._validate_archive_prefix(prefix)
967 971
968 972 mtime = mtime or time.mktime(self.date.timetuple())
969 973
970 974 file_info = []
971 975 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
972 976 for _r, _d, files in cur_rev.walk('/'):
973 977 for f in files:
974 978 f_path = os.path.join(prefix, f.path)
975 979 file_info.append(
976 980 (f_path, f.mode, f.is_link(), f.raw_bytes))
977 981
978 982 if write_metadata:
979 983 metadata = [
980 984 ('repo_name', self.repository.name),
981 985 ('rev', self.raw_id),
982 986 ('create_time', mtime),
983 987 ('branch', self.branch),
984 988 ('tags', ','.join(self.tags)),
985 989 ]
986 990 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
987 991 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
988 992
989 993 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
990 994
991 995 def _validate_archive_prefix(self, prefix):
992 996 if prefix is None:
993 997 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
994 998 repo_name=safe_str(self.repository.name),
995 999 short_id=self.short_id)
996 1000 elif not isinstance(prefix, str):
997 1001 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
998 1002 elif prefix.startswith('/'):
999 1003 raise VCSError("Prefix cannot start with leading slash")
1000 1004 elif prefix.strip() == '':
1001 1005 raise VCSError("Prefix cannot be empty")
1002 1006 return prefix
1003 1007
1004 1008 @LazyProperty
1005 1009 def root(self):
1006 1010 """
1007 1011 Returns ``RootNode`` object for this commit.
1008 1012 """
1009 1013 return self.get_node('')
1010 1014
1011 1015 def next(self, branch=None):
1012 1016 """
1013 1017 Returns next commit from current, if branch is gives it will return
1014 1018 next commit belonging to this branch
1015 1019
1016 1020 :param branch: show commits within the given named branch
1017 1021 """
1018 1022 indexes = xrange(self.idx + 1, self.repository.count())
1019 1023 return self._find_next(indexes, branch)
1020 1024
1021 1025 def prev(self, branch=None):
1022 1026 """
1023 1027 Returns previous commit from current, if branch is gives it will
1024 1028 return previous commit belonging to this branch
1025 1029
1026 1030 :param branch: show commit within the given named branch
1027 1031 """
1028 1032 indexes = xrange(self.idx - 1, -1, -1)
1029 1033 return self._find_next(indexes, branch)
1030 1034
1031 1035 def _find_next(self, indexes, branch=None):
1032 1036 if branch and self.branch != branch:
1033 1037 raise VCSError('Branch option used on commit not belonging '
1034 1038 'to that branch')
1035 1039
1036 1040 for next_idx in indexes:
1037 1041 commit = self.repository.get_commit(commit_idx=next_idx)
1038 1042 if branch and branch != commit.branch:
1039 1043 continue
1040 1044 return commit
1041 1045 raise CommitDoesNotExistError
1042 1046
1043 1047 def diff(self, ignore_whitespace=True, context=3):
1044 1048 """
1045 1049 Returns a `Diff` object representing the change made by this commit.
1046 1050 """
1047 1051 parent = (
1048 1052 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1049 1053 diff = self.repository.get_diff(
1050 1054 parent, self,
1051 1055 ignore_whitespace=ignore_whitespace,
1052 1056 context=context)
1053 1057 return diff
1054 1058
1055 1059 @LazyProperty
1056 1060 def added(self):
1057 1061 """
1058 1062 Returns list of added ``FileNode`` objects.
1059 1063 """
1060 1064 raise NotImplementedError
1061 1065
1062 1066 @LazyProperty
1063 1067 def changed(self):
1064 1068 """
1065 1069 Returns list of modified ``FileNode`` objects.
1066 1070 """
1067 1071 raise NotImplementedError
1068 1072
1069 1073 @LazyProperty
1070 1074 def removed(self):
1071 1075 """
1072 1076 Returns list of removed ``FileNode`` objects.
1073 1077 """
1074 1078 raise NotImplementedError
1075 1079
1076 1080 @LazyProperty
1077 1081 def size(self):
1078 1082 """
1079 1083 Returns total number of bytes from contents of all filenodes.
1080 1084 """
1081 1085 return sum((node.size for node in self.get_filenodes_generator()))
1082 1086
1083 1087 def walk(self, topurl=''):
1084 1088 """
1085 1089 Similar to os.walk method. Insted of filesystem it walks through
1086 1090 commit starting at given ``topurl``. Returns generator of tuples
1087 1091 (topnode, dirnodes, filenodes).
1088 1092 """
1089 1093 topnode = self.get_node(topurl)
1090 1094 if not topnode.is_dir():
1091 1095 return
1092 1096 yield (topnode, topnode.dirs, topnode.files)
1093 1097 for dirnode in topnode.dirs:
1094 1098 for tup in self.walk(dirnode.path):
1095 1099 yield tup
1096 1100
1097 1101 def get_filenodes_generator(self):
1098 1102 """
1099 1103 Returns generator that yields *all* file nodes.
1100 1104 """
1101 1105 for topnode, dirs, files in self.walk():
1102 1106 for node in files:
1103 1107 yield node
1104 1108
1105 1109 #
1106 1110 # Utilities for sub classes to support consistent behavior
1107 1111 #
1108 1112
1109 1113 def no_node_at_path(self, path):
1110 1114 return NodeDoesNotExistError(
1111 1115 u"There is no file nor directory at the given path: "
1112 1116 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1113 1117
1114 1118 def _fix_path(self, path):
1115 1119 """
1116 1120 Paths are stored without trailing slash so we need to get rid off it if
1117 1121 needed.
1118 1122 """
1119 1123 return path.rstrip('/')
1120 1124
1121 1125 #
1122 1126 # Deprecated API based on changesets
1123 1127 #
1124 1128
1125 1129 @property
1126 1130 def revision(self):
1127 1131 warnings.warn("Use idx instead", DeprecationWarning)
1128 1132 return self.idx
1129 1133
1130 1134 @revision.setter
1131 1135 def revision(self, value):
1132 1136 warnings.warn("Use idx instead", DeprecationWarning)
1133 1137 self.idx = value
1134 1138
1135 1139 def get_file_changeset(self, path):
1136 1140 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1137 1141 return self.get_file_commit(path)
1138 1142
1139 1143
1140 1144 class BaseChangesetClass(type):
1141 1145
1142 1146 def __instancecheck__(self, instance):
1143 1147 return isinstance(instance, BaseCommit)
1144 1148
1145 1149
1146 1150 class BaseChangeset(BaseCommit):
1147 1151
1148 1152 __metaclass__ = BaseChangesetClass
1149 1153
1150 1154 def __new__(cls, *args, **kwargs):
1151 1155 warnings.warn(
1152 1156 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1153 1157 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1154 1158
1155 1159
1156 1160 class BaseInMemoryCommit(object):
1157 1161 """
1158 1162 Represents differences between repository's state (most recent head) and
1159 1163 changes made *in place*.
1160 1164
1161 1165 **Attributes**
1162 1166
1163 1167 ``repository``
1164 1168 repository object for this in-memory-commit
1165 1169
1166 1170 ``added``
1167 1171 list of ``FileNode`` objects marked as *added*
1168 1172
1169 1173 ``changed``
1170 1174 list of ``FileNode`` objects marked as *changed*
1171 1175
1172 1176 ``removed``
1173 1177 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1174 1178 *removed*
1175 1179
1176 1180 ``parents``
1177 1181 list of :class:`BaseCommit` instances representing parents of
1178 1182 in-memory commit. Should always be 2-element sequence.
1179 1183
1180 1184 """
1181 1185
1182 1186 def __init__(self, repository):
1183 1187 self.repository = repository
1184 1188 self.added = []
1185 1189 self.changed = []
1186 1190 self.removed = []
1187 1191 self.parents = []
1188 1192
1189 1193 def add(self, *filenodes):
1190 1194 """
1191 1195 Marks given ``FileNode`` objects as *to be committed*.
1192 1196
1193 1197 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1194 1198 latest commit
1195 1199 :raises ``NodeAlreadyAddedError``: if node with same path is already
1196 1200 marked as *added*
1197 1201 """
1198 1202 # Check if not already marked as *added* first
1199 1203 for node in filenodes:
1200 1204 if node.path in (n.path for n in self.added):
1201 1205 raise NodeAlreadyAddedError(
1202 1206 "Such FileNode %s is already marked for addition"
1203 1207 % node.path)
1204 1208 for node in filenodes:
1205 1209 self.added.append(node)
1206 1210
1207 1211 def change(self, *filenodes):
1208 1212 """
1209 1213 Marks given ``FileNode`` objects to be *changed* in next commit.
1210 1214
1211 1215 :raises ``EmptyRepositoryError``: if there are no commits yet
1212 1216 :raises ``NodeAlreadyExistsError``: if node with same path is already
1213 1217 marked to be *changed*
1214 1218 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1215 1219 marked to be *removed*
1216 1220 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1217 1221 commit
1218 1222 :raises ``NodeNotChangedError``: if node hasn't really be changed
1219 1223 """
1220 1224 for node in filenodes:
1221 1225 if node.path in (n.path for n in self.removed):
1222 1226 raise NodeAlreadyRemovedError(
1223 1227 "Node at %s is already marked as removed" % node.path)
1224 1228 try:
1225 1229 self.repository.get_commit()
1226 1230 except EmptyRepositoryError:
1227 1231 raise EmptyRepositoryError(
1228 1232 "Nothing to change - try to *add* new nodes rather than "
1229 1233 "changing them")
1230 1234 for node in filenodes:
1231 1235 if node.path in (n.path for n in self.changed):
1232 1236 raise NodeAlreadyChangedError(
1233 1237 "Node at '%s' is already marked as changed" % node.path)
1234 1238 self.changed.append(node)
1235 1239
1236 1240 def remove(self, *filenodes):
1237 1241 """
1238 1242 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1239 1243 *removed* in next commit.
1240 1244
1241 1245 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1242 1246 be *removed*
1243 1247 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1244 1248 be *changed*
1245 1249 """
1246 1250 for node in filenodes:
1247 1251 if node.path in (n.path for n in self.removed):
1248 1252 raise NodeAlreadyRemovedError(
1249 1253 "Node is already marked to for removal at %s" % node.path)
1250 1254 if node.path in (n.path for n in self.changed):
1251 1255 raise NodeAlreadyChangedError(
1252 1256 "Node is already marked to be changed at %s" % node.path)
1253 1257 # We only mark node as *removed* - real removal is done by
1254 1258 # commit method
1255 1259 self.removed.append(node)
1256 1260
1257 1261 def reset(self):
1258 1262 """
1259 1263 Resets this instance to initial state (cleans ``added``, ``changed``
1260 1264 and ``removed`` lists).
1261 1265 """
1262 1266 self.added = []
1263 1267 self.changed = []
1264 1268 self.removed = []
1265 1269 self.parents = []
1266 1270
1267 1271 def get_ipaths(self):
1268 1272 """
1269 1273 Returns generator of paths from nodes marked as added, changed or
1270 1274 removed.
1271 1275 """
1272 1276 for node in itertools.chain(self.added, self.changed, self.removed):
1273 1277 yield node.path
1274 1278
1275 1279 def get_paths(self):
1276 1280 """
1277 1281 Returns list of paths from nodes marked as added, changed or removed.
1278 1282 """
1279 1283 return list(self.get_ipaths())
1280 1284
1281 1285 def check_integrity(self, parents=None):
1282 1286 """
1283 1287 Checks in-memory commit's integrity. Also, sets parents if not
1284 1288 already set.
1285 1289
1286 1290 :raises CommitError: if any error occurs (i.e.
1287 1291 ``NodeDoesNotExistError``).
1288 1292 """
1289 1293 if not self.parents:
1290 1294 parents = parents or []
1291 1295 if len(parents) == 0:
1292 1296 try:
1293 1297 parents = [self.repository.get_commit(), None]
1294 1298 except EmptyRepositoryError:
1295 1299 parents = [None, None]
1296 1300 elif len(parents) == 1:
1297 1301 parents += [None]
1298 1302 self.parents = parents
1299 1303
1300 1304 # Local parents, only if not None
1301 1305 parents = [p for p in self.parents if p]
1302 1306
1303 1307 # Check nodes marked as added
1304 1308 for p in parents:
1305 1309 for node in self.added:
1306 1310 try:
1307 1311 p.get_node(node.path)
1308 1312 except NodeDoesNotExistError:
1309 1313 pass
1310 1314 else:
1311 1315 raise NodeAlreadyExistsError(
1312 1316 "Node `%s` already exists at %s" % (node.path, p))
1313 1317
1314 1318 # Check nodes marked as changed
1315 1319 missing = set(self.changed)
1316 1320 not_changed = set(self.changed)
1317 1321 if self.changed and not parents:
1318 1322 raise NodeDoesNotExistError(str(self.changed[0].path))
1319 1323 for p in parents:
1320 1324 for node in self.changed:
1321 1325 try:
1322 1326 old = p.get_node(node.path)
1323 1327 missing.remove(node)
1324 1328 # if content actually changed, remove node from not_changed
1325 1329 if old.content != node.content:
1326 1330 not_changed.remove(node)
1327 1331 except NodeDoesNotExistError:
1328 1332 pass
1329 1333 if self.changed and missing:
1330 1334 raise NodeDoesNotExistError(
1331 1335 "Node `%s` marked as modified but missing in parents: %s"
1332 1336 % (node.path, parents))
1333 1337
1334 1338 if self.changed and not_changed:
1335 1339 raise NodeNotChangedError(
1336 1340 "Node `%s` wasn't actually changed (parents: %s)"
1337 1341 % (not_changed.pop().path, parents))
1338 1342
1339 1343 # Check nodes marked as removed
1340 1344 if self.removed and not parents:
1341 1345 raise NodeDoesNotExistError(
1342 1346 "Cannot remove node at %s as there "
1343 1347 "were no parents specified" % self.removed[0].path)
1344 1348 really_removed = set()
1345 1349 for p in parents:
1346 1350 for node in self.removed:
1347 1351 try:
1348 1352 p.get_node(node.path)
1349 1353 really_removed.add(node)
1350 1354 except CommitError:
1351 1355 pass
1352 1356 not_removed = set(self.removed) - really_removed
1353 1357 if not_removed:
1354 1358 # TODO: johbo: This code branch does not seem to be covered
1355 1359 raise NodeDoesNotExistError(
1356 1360 "Cannot remove node at %s from "
1357 1361 "following parents: %s" % (not_removed, parents))
1358 1362
1359 1363 def commit(
1360 1364 self, message, author, parents=None, branch=None, date=None,
1361 1365 **kwargs):
1362 1366 """
1363 1367 Performs in-memory commit (doesn't check workdir in any way) and
1364 1368 returns newly created :class:`BaseCommit`. Updates repository's
1365 1369 attribute `commits`.
1366 1370
1367 1371 .. note::
1368 1372
1369 1373 While overriding this method each backend's should call
1370 1374 ``self.check_integrity(parents)`` in the first place.
1371 1375
1372 1376 :param message: message of the commit
1373 1377 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1374 1378 :param parents: single parent or sequence of parents from which commit
1375 1379 would be derived
1376 1380 :param date: ``datetime.datetime`` instance. Defaults to
1377 1381 ``datetime.datetime.now()``.
1378 1382 :param branch: branch name, as string. If none given, default backend's
1379 1383 branch would be used.
1380 1384
1381 1385 :raises ``CommitError``: if any error occurs while committing
1382 1386 """
1383 1387 raise NotImplementedError
1384 1388
1385 1389
1386 1390 class BaseInMemoryChangesetClass(type):
1387 1391
1388 1392 def __instancecheck__(self, instance):
1389 1393 return isinstance(instance, BaseInMemoryCommit)
1390 1394
1391 1395
1392 1396 class BaseInMemoryChangeset(BaseInMemoryCommit):
1393 1397
1394 1398 __metaclass__ = BaseInMemoryChangesetClass
1395 1399
1396 1400 def __new__(cls, *args, **kwargs):
1397 1401 warnings.warn(
1398 1402 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1399 1403 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1400 1404
1401 1405
1402 1406 class EmptyCommit(BaseCommit):
1403 1407 """
1404 1408 An dummy empty commit. It's possible to pass hash when creating
1405 1409 an EmptyCommit
1406 1410 """
1407 1411
1408 1412 def __init__(
1409 1413 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1410 1414 message='', author='', date=None):
1411 1415 self._empty_commit_id = commit_id
1412 1416 # TODO: johbo: Solve idx parameter, default value does not make
1413 1417 # too much sense
1414 1418 self.idx = idx
1415 1419 self.message = message
1416 1420 self.author = author
1417 1421 self.date = date or datetime.datetime.fromtimestamp(0)
1418 1422 self.repository = repo
1419 1423 self.alias = alias
1420 1424
1421 1425 @LazyProperty
1422 1426 def raw_id(self):
1423 1427 """
1424 1428 Returns raw string identifying this commit, useful for web
1425 1429 representation.
1426 1430 """
1427 1431
1428 1432 return self._empty_commit_id
1429 1433
1430 1434 @LazyProperty
1431 1435 def branch(self):
1432 1436 if self.alias:
1433 1437 from rhodecode.lib.vcs.backends import get_backend
1434 1438 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1435 1439
1436 1440 @LazyProperty
1437 1441 def short_id(self):
1438 1442 return self.raw_id[:12]
1439 1443
1440 1444 @LazyProperty
1441 1445 def id(self):
1442 1446 return self.raw_id
1443 1447
1444 1448 def get_file_commit(self, path):
1445 1449 return self
1446 1450
1447 1451 def get_file_content(self, path):
1448 1452 return u''
1449 1453
1450 1454 def get_file_size(self, path):
1451 1455 return 0
1452 1456
1453 1457
1454 1458 class EmptyChangesetClass(type):
1455 1459
1456 1460 def __instancecheck__(self, instance):
1457 1461 return isinstance(instance, EmptyCommit)
1458 1462
1459 1463
1460 1464 class EmptyChangeset(EmptyCommit):
1461 1465
1462 1466 __metaclass__ = EmptyChangesetClass
1463 1467
1464 1468 def __new__(cls, *args, **kwargs):
1465 1469 warnings.warn(
1466 1470 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1467 1471 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1468 1472
1469 1473 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1470 1474 alias=None, revision=-1, message='', author='', date=None):
1471 1475 if requested_revision is not None:
1472 1476 warnings.warn(
1473 1477 "Parameter requested_revision not supported anymore",
1474 1478 DeprecationWarning)
1475 1479 super(EmptyChangeset, self).__init__(
1476 1480 commit_id=cs, repo=repo, alias=alias, idx=revision,
1477 1481 message=message, author=author, date=date)
1478 1482
1479 1483 @property
1480 1484 def revision(self):
1481 1485 warnings.warn("Use idx instead", DeprecationWarning)
1482 1486 return self.idx
1483 1487
1484 1488 @revision.setter
1485 1489 def revision(self, value):
1486 1490 warnings.warn("Use idx instead", DeprecationWarning)
1487 1491 self.idx = value
1488 1492
1489 1493
1490 1494 class EmptyRepository(BaseRepository):
1491 1495 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1492 1496 pass
1493 1497
1494 1498 def get_diff(self, *args, **kwargs):
1495 1499 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1496 1500 return GitDiff('')
1497 1501
1498 1502
1499 1503 class CollectionGenerator(object):
1500 1504
1501 1505 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1502 1506 self.repo = repo
1503 1507 self.commit_ids = commit_ids
1504 1508 # TODO: (oliver) this isn't currently hooked up
1505 1509 self.collection_size = None
1506 1510 self.pre_load = pre_load
1507 1511
1508 1512 def __len__(self):
1509 1513 if self.collection_size is not None:
1510 1514 return self.collection_size
1511 1515 return self.commit_ids.__len__()
1512 1516
1513 1517 def __iter__(self):
1514 1518 for commit_id in self.commit_ids:
1515 1519 # TODO: johbo: Mercurial passes in commit indices or commit ids
1516 1520 yield self._commit_factory(commit_id)
1517 1521
1518 1522 def _commit_factory(self, commit_id):
1519 1523 """
1520 1524 Allows backends to override the way commits are generated.
1521 1525 """
1522 1526 return self.repo.get_commit(commit_id=commit_id,
1523 1527 pre_load=self.pre_load)
1524 1528
1525 1529 def __getslice__(self, i, j):
1526 1530 """
1527 1531 Returns an iterator of sliced repository
1528 1532 """
1529 1533 commit_ids = self.commit_ids[i:j]
1530 1534 return self.__class__(
1531 1535 self.repo, commit_ids, pre_load=self.pre_load)
1532 1536
1533 1537 def __repr__(self):
1534 1538 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1535 1539
1536 1540
1537 1541 class Config(object):
1538 1542 """
1539 1543 Represents the configuration for a repository.
1540 1544
1541 1545 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1542 1546 standard library. It implements only the needed subset.
1543 1547 """
1544 1548
1545 1549 def __init__(self):
1546 1550 self._values = {}
1547 1551
1548 1552 def copy(self):
1549 1553 clone = Config()
1550 1554 for section, values in self._values.items():
1551 1555 clone._values[section] = values.copy()
1552 1556 return clone
1553 1557
1554 1558 def __repr__(self):
1555 1559 return '<Config(%s sections) at %s>' % (
1556 1560 len(self._values), hex(id(self)))
1557 1561
1558 1562 def items(self, section):
1559 1563 return self._values.get(section, {}).iteritems()
1560 1564
1561 1565 def get(self, section, option):
1562 1566 return self._values.get(section, {}).get(option)
1563 1567
1564 1568 def set(self, section, option, value):
1565 1569 section_values = self._values.setdefault(section, {})
1566 1570 section_values[option] = value
1567 1571
1568 1572 def clear_section(self, section):
1569 1573 self._values[section] = {}
1570 1574
1571 1575 def serialize(self):
1572 1576 """
1573 1577 Creates a list of three tuples (section, key, value) representing
1574 1578 this config object.
1575 1579 """
1576 1580 items = []
1577 1581 for section in self._values:
1578 1582 for option, value in self._values[section].items():
1579 1583 items.append(
1580 1584 (safe_str(section), safe_str(option), safe_str(value)))
1581 1585 return items
1582 1586
1583 1587
1584 1588 class Diff(object):
1585 1589 """
1586 1590 Represents a diff result from a repository backend.
1587 1591
1588 1592 Subclasses have to provide a backend specific value for
1589 1593 :attr:`_header_re` and :attr:`_meta_re`.
1590 1594 """
1591 1595 _meta_re = None
1592 1596 _header_re = None
1593 1597
1594 1598 def __init__(self, raw_diff):
1595 1599 self.raw = raw_diff
1596 1600
1597 1601 def chunks(self):
1598 1602 """
1599 1603 split the diff in chunks of separate --git a/file b/file chunks
1600 1604 to make diffs consistent we must prepend with \n, and make sure
1601 1605 we can detect last chunk as this was also has special rule
1602 1606 """
1603 1607
1604 1608 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1605 1609 header = diff_parts[0]
1606 1610
1607 1611 if self._meta_re:
1608 1612 match = self._meta_re.match(header)
1609 1613
1610 1614 chunks = diff_parts[1:]
1611 1615 total_chunks = len(chunks)
1612 1616
1613 1617 return (
1614 1618 DiffChunk(chunk, self, cur_chunk == total_chunks)
1615 1619 for cur_chunk, chunk in enumerate(chunks, start=1))
1616 1620
1617 1621
1618 1622 class DiffChunk(object):
1619 1623
1620 1624 def __init__(self, chunk, diff, last_chunk):
1621 1625 self._diff = diff
1622 1626
1623 1627 # since we split by \ndiff --git that part is lost from original diff
1624 1628 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1625 1629 if not last_chunk:
1626 1630 chunk += '\n'
1627 1631
1628 1632 match = self._diff._header_re.match(chunk)
1629 1633 self.header = match.groupdict()
1630 1634 self.diff = chunk[match.end():]
1631 1635 self.raw = chunk
1632 1636
1633 1637
1634 1638 class BasePathPermissionChecker(object):
1635 1639
1636 1640 @staticmethod
1637 1641 def create_from_patterns(includes, excludes):
1638 1642 if includes and '*' in includes and not excludes:
1639 1643 return AllPathPermissionChecker()
1640 1644 elif excludes and '*' in excludes:
1641 1645 return NonePathPermissionChecker()
1642 1646 else:
1643 1647 return PatternPathPermissionChecker(includes, excludes)
1644 1648
1645 1649 @property
1646 1650 def has_full_access(self):
1647 1651 raise NotImplemented()
1648 1652
1649 1653 def has_access(self, path):
1650 1654 raise NotImplemented()
1651 1655
1652 1656
1653 1657 class AllPathPermissionChecker(BasePathPermissionChecker):
1654 1658
1655 1659 @property
1656 1660 def has_full_access(self):
1657 1661 return True
1658 1662
1659 1663 def has_access(self, path):
1660 1664 return True
1661 1665
1662 1666
1663 1667 class NonePathPermissionChecker(BasePathPermissionChecker):
1664 1668
1665 1669 @property
1666 1670 def has_full_access(self):
1667 1671 return False
1668 1672
1669 1673 def has_access(self, path):
1670 1674 return False
1671 1675
1672 1676
1673 1677 class PatternPathPermissionChecker(BasePathPermissionChecker):
1674 1678
1675 1679 def __init__(self, includes, excludes):
1676 1680 self.includes = includes
1677 1681 self.excludes = excludes
1678 1682 self.includes_re = [] if not includes else [
1679 1683 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1680 1684 self.excludes_re = [] if not excludes else [
1681 1685 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1682 1686
1683 1687 @property
1684 1688 def has_full_access(self):
1685 1689 return '*' in self.includes and not self.excludes
1686 1690
1687 1691 def has_access(self, path):
1688 1692 for regex in self.excludes_re:
1689 1693 if regex.match(path):
1690 1694 return False
1691 1695 for regex in self.includes_re:
1692 1696 if regex.match(path):
1693 1697 return True
1694 1698 return False
@@ -1,1036 +1,1036 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import shutil
24 24 import time
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode import events
33 33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 34 from rhodecode.lib.caching_query import FromCache
35 35 from rhodecode.lib.exceptions import AttachedForksError
36 36 from rhodecode.lib.hooks_base import log_delete_repository
37 37 from rhodecode.lib.user_log_filter import user_log_filter
38 38 from rhodecode.lib.utils import make_db_config
39 39 from rhodecode.lib.utils2 import (
40 40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 41 get_current_rhodecode_user, safe_int, datetime_to_time,
42 42 action_logger_generic)
43 43 from rhodecode.lib.vcs.backends import get_backend
44 44 from rhodecode.model import BaseModel
45 45 from rhodecode.model.db import (
46 46 _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 49
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 class RepoModel(BaseModel):
57 57
58 58 cls = Repository
59 59
60 60 def _get_user_group(self, users_group):
61 61 return self._get_instance(UserGroup, users_group,
62 62 callback=UserGroup.get_by_group_name)
63 63
64 64 def _get_repo_group(self, repo_group):
65 65 return self._get_instance(RepoGroup, repo_group,
66 66 callback=RepoGroup.get_by_group_name)
67 67
68 68 def _create_default_perms(self, repository, private):
69 69 # create default permission
70 70 default = 'repository.read'
71 71 def_user = User.get_default_user()
72 72 for p in def_user.user_perms:
73 73 if p.permission.permission_name.startswith('repository.'):
74 74 default = p.permission.permission_name
75 75 break
76 76
77 77 default_perm = 'repository.none' if private else default
78 78
79 79 repo_to_perm = UserRepoToPerm()
80 80 repo_to_perm.permission = Permission.get_by_key(default_perm)
81 81
82 82 repo_to_perm.repository = repository
83 83 repo_to_perm.user_id = def_user.user_id
84 84
85 85 return repo_to_perm
86 86
87 87 @LazyProperty
88 88 def repos_path(self):
89 89 """
90 90 Gets the repositories root path from database
91 91 """
92 92 settings_model = VcsSettingsModel(sa=self.sa)
93 93 return settings_model.get_repos_location()
94 94
95 95 def get(self, repo_id, cache=False):
96 96 repo = self.sa.query(Repository) \
97 97 .filter(Repository.repo_id == repo_id)
98 98
99 99 if cache:
100 100 repo = repo.options(
101 101 FromCache("sql_cache_short", "get_repo_%s" % repo_id))
102 102 return repo.scalar()
103 103
104 104 def get_repo(self, repository):
105 105 return self._get_repo(repository)
106 106
107 107 def get_by_repo_name(self, repo_name, cache=False):
108 108 repo = self.sa.query(Repository) \
109 109 .filter(Repository.repo_name == repo_name)
110 110
111 111 if cache:
112 112 name_key = _hash_key(repo_name)
113 113 repo = repo.options(
114 114 FromCache("sql_cache_short", "get_repo_%s" % name_key))
115 115 return repo.scalar()
116 116
117 117 def _extract_id_from_repo_name(self, repo_name):
118 118 if repo_name.startswith('/'):
119 119 repo_name = repo_name.lstrip('/')
120 120 by_id_match = re.match(r'^_(\d{1,})', repo_name)
121 121 if by_id_match:
122 122 return by_id_match.groups()[0]
123 123
124 124 def get_repo_by_id(self, repo_name):
125 125 """
126 126 Extracts repo_name by id from special urls.
127 127 Example url is _11/repo_name
128 128
129 129 :param repo_name:
130 130 :return: repo object if matched else None
131 131 """
132 132
133 133 try:
134 134 _repo_id = self._extract_id_from_repo_name(repo_name)
135 135 if _repo_id:
136 136 return self.get(_repo_id)
137 137 except Exception:
138 138 log.exception('Failed to extract repo_name from URL')
139 139
140 140 return None
141 141
142 142 def get_repos_for_root(self, root, traverse=False):
143 143 if traverse:
144 144 like_expression = u'{}%'.format(safe_unicode(root))
145 145 repos = Repository.query().filter(
146 146 Repository.repo_name.like(like_expression)).all()
147 147 else:
148 148 if root and not isinstance(root, RepoGroup):
149 149 raise ValueError(
150 150 'Root must be an instance '
151 151 'of RepoGroup, got:{} instead'.format(type(root)))
152 152 repos = Repository.query().filter(Repository.group == root).all()
153 153 return repos
154 154
155 155 def get_url(self, repo, request=None, permalink=False):
156 156 if not request:
157 157 request = get_current_request()
158 158
159 159 if not request:
160 160 return
161 161
162 162 if permalink:
163 163 return request.route_url(
164 164 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
165 165 else:
166 166 return request.route_url(
167 167 'repo_summary', repo_name=safe_str(repo.repo_name))
168 168
169 169 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
170 170 if not request:
171 171 request = get_current_request()
172 172
173 173 if not request:
174 174 return
175 175
176 176 if permalink:
177 177 return request.route_url(
178 178 'repo_commit', repo_name=safe_str(repo.repo_id),
179 179 commit_id=commit_id)
180 180
181 181 else:
182 182 return request.route_url(
183 183 'repo_commit', repo_name=safe_str(repo.repo_name),
184 184 commit_id=commit_id)
185 185
186 186 def get_repo_log(self, repo, filter_term):
187 187 repo_log = UserLog.query()\
188 188 .filter(or_(UserLog.repository_id == repo.repo_id,
189 189 UserLog.repository_name == repo.repo_name))\
190 190 .options(joinedload(UserLog.user))\
191 191 .options(joinedload(UserLog.repository))\
192 192 .order_by(UserLog.action_date.desc())
193 193
194 194 repo_log = user_log_filter(repo_log, filter_term)
195 195 return repo_log
196 196
197 197 @classmethod
198 198 def update_repoinfo(cls, repositories=None):
199 199 if not repositories:
200 200 repositories = Repository.getAll()
201 201 for repo in repositories:
202 202 repo.update_commit_cache()
203 203
204 204 def get_repos_as_dict(self, repo_list=None, admin=False,
205 205 super_user_actions=False):
206 206 _render = get_current_request().get_partial_renderer(
207 207 'rhodecode:templates/data_table/_dt_elements.mako')
208 208 c = _render.get_call_context()
209 209
210 210 def quick_menu(repo_name):
211 211 return _render('quick_menu', repo_name)
212 212
213 213 def repo_lnk(name, rtype, rstate, private, fork_of):
214 214 return _render('repo_name', name, rtype, rstate, private, fork_of,
215 215 short_name=not admin, admin=False)
216 216
217 217 def last_change(last_change):
218 218 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
219 219 last_change = last_change + datetime.timedelta(seconds=
220 220 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
221 221 return _render("last_change", last_change)
222 222
223 223 def rss_lnk(repo_name):
224 224 return _render("rss", repo_name)
225 225
226 226 def atom_lnk(repo_name):
227 227 return _render("atom", repo_name)
228 228
229 229 def last_rev(repo_name, cs_cache):
230 230 return _render('revision', repo_name, cs_cache.get('revision'),
231 231 cs_cache.get('raw_id'), cs_cache.get('author'),
232 232 cs_cache.get('message'), cs_cache.get('date'))
233 233
234 234 def desc(desc):
235 235 return _render('repo_desc', desc, c.visual.stylify_metatags)
236 236
237 237 def state(repo_state):
238 238 return _render("repo_state", repo_state)
239 239
240 240 def repo_actions(repo_name):
241 241 return _render('repo_actions', repo_name, super_user_actions)
242 242
243 243 def user_profile(username):
244 244 return _render('user_profile', username)
245 245
246 246 repos_data = []
247 247 for repo in repo_list:
248 248 cs_cache = repo.changeset_cache
249 249 row = {
250 250 "menu": quick_menu(repo.repo_name),
251 251
252 252 "name": repo_lnk(repo.repo_name, repo.repo_type,
253 253 repo.repo_state, repo.private, repo.fork),
254 254 "name_raw": repo.repo_name.lower(),
255 255
256 256 "last_change": last_change(repo.last_db_change),
257 257 "last_change_raw": datetime_to_time(repo.last_db_change),
258 258
259 259 "last_changeset": last_rev(repo.repo_name, cs_cache),
260 260 "last_changeset_raw": cs_cache.get('revision'),
261 261
262 262 "desc": desc(repo.description_safe),
263 263 "owner": user_profile(repo.user.username),
264 264
265 265 "state": state(repo.repo_state),
266 266 "rss": rss_lnk(repo.repo_name),
267 267
268 268 "atom": atom_lnk(repo.repo_name),
269 269 }
270 270 if admin:
271 271 row.update({
272 272 "action": repo_actions(repo.repo_name),
273 273 })
274 274 repos_data.append(row)
275 275
276 276 return repos_data
277 277
278 278 def _get_defaults(self, repo_name):
279 279 """
280 280 Gets information about repository, and returns a dict for
281 281 usage in forms
282 282
283 283 :param repo_name:
284 284 """
285 285
286 286 repo_info = Repository.get_by_repo_name(repo_name)
287 287
288 288 if repo_info is None:
289 289 return None
290 290
291 291 defaults = repo_info.get_dict()
292 292 defaults['repo_name'] = repo_info.just_name
293 293
294 294 groups = repo_info.groups_with_parents
295 295 parent_group = groups[-1] if groups else None
296 296
297 297 # we use -1 as this is how in HTML, we mark an empty group
298 298 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
299 299
300 300 keys_to_process = (
301 301 {'k': 'repo_type', 'strip': False},
302 302 {'k': 'repo_enable_downloads', 'strip': True},
303 303 {'k': 'repo_description', 'strip': True},
304 304 {'k': 'repo_enable_locking', 'strip': True},
305 305 {'k': 'repo_landing_rev', 'strip': True},
306 306 {'k': 'clone_uri', 'strip': False},
307 307 {'k': 'push_uri', 'strip': False},
308 308 {'k': 'repo_private', 'strip': True},
309 309 {'k': 'repo_enable_statistics', 'strip': True}
310 310 )
311 311
312 312 for item in keys_to_process:
313 313 attr = item['k']
314 314 if item['strip']:
315 315 attr = remove_prefix(item['k'], 'repo_')
316 316
317 317 val = defaults[attr]
318 318 if item['k'] == 'repo_landing_rev':
319 319 val = ':'.join(defaults[attr])
320 320 defaults[item['k']] = val
321 321 if item['k'] == 'clone_uri':
322 322 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
323 323 if item['k'] == 'push_uri':
324 324 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
325 325
326 326 # fill owner
327 327 if repo_info.user:
328 328 defaults.update({'user': repo_info.user.username})
329 329 else:
330 330 replacement_user = User.get_first_super_admin().username
331 331 defaults.update({'user': replacement_user})
332 332
333 333 return defaults
334 334
335 335 def update(self, repo, **kwargs):
336 336 try:
337 337 cur_repo = self._get_repo(repo)
338 338 source_repo_name = cur_repo.repo_name
339 339 if 'user' in kwargs:
340 340 cur_repo.user = User.get_by_username(kwargs['user'])
341 341
342 342 if 'repo_group' in kwargs:
343 343 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
344 344 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
345 345
346 346 update_keys = [
347 347 (1, 'repo_description'),
348 348 (1, 'repo_landing_rev'),
349 349 (1, 'repo_private'),
350 350 (1, 'repo_enable_downloads'),
351 351 (1, 'repo_enable_locking'),
352 352 (1, 'repo_enable_statistics'),
353 353 (0, 'clone_uri'),
354 354 (0, 'push_uri'),
355 355 (0, 'fork_id')
356 356 ]
357 357 for strip, k in update_keys:
358 358 if k in kwargs:
359 359 val = kwargs[k]
360 360 if strip:
361 361 k = remove_prefix(k, 'repo_')
362 362
363 363 setattr(cur_repo, k, val)
364 364
365 365 new_name = cur_repo.get_new_name(kwargs['repo_name'])
366 366 cur_repo.repo_name = new_name
367 367
368 368 # if private flag is set, reset default permission to NONE
369 369 if kwargs.get('repo_private'):
370 370 EMPTY_PERM = 'repository.none'
371 371 RepoModel().grant_user_permission(
372 372 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
373 373 )
374 374
375 375 # handle extra fields
376 376 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
377 377 kwargs):
378 378 k = RepositoryField.un_prefix_key(field)
379 379 ex_field = RepositoryField.get_by_key_name(
380 380 key=k, repo=cur_repo)
381 381 if ex_field:
382 382 ex_field.field_value = kwargs[field]
383 383 self.sa.add(ex_field)
384 384 cur_repo.updated_on = datetime.datetime.now()
385 385 self.sa.add(cur_repo)
386 386
387 387 if source_repo_name != new_name:
388 388 # rename repository
389 389 self._rename_filesystem_repo(
390 390 old=source_repo_name, new=new_name)
391 391
392 392 return cur_repo
393 393 except Exception:
394 394 log.error(traceback.format_exc())
395 395 raise
396 396
397 397 def _create_repo(self, repo_name, repo_type, description, owner,
398 398 private=False, clone_uri=None, repo_group=None,
399 399 landing_rev='rev:tip', fork_of=None,
400 400 copy_fork_permissions=False, enable_statistics=False,
401 401 enable_locking=False, enable_downloads=False,
402 402 copy_group_permissions=False,
403 403 state=Repository.STATE_PENDING):
404 404 """
405 405 Create repository inside database with PENDING state, this should be
406 406 only executed by create() repo. With exception of importing existing
407 407 repos
408 408 """
409 409 from rhodecode.model.scm import ScmModel
410 410
411 411 owner = self._get_user(owner)
412 412 fork_of = self._get_repo(fork_of)
413 413 repo_group = self._get_repo_group(safe_int(repo_group))
414 414
415 415 try:
416 416 repo_name = safe_unicode(repo_name)
417 417 description = safe_unicode(description)
418 418 # repo name is just a name of repository
419 419 # while repo_name_full is a full qualified name that is combined
420 420 # with name and path of group
421 421 repo_name_full = repo_name
422 422 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
423 423
424 424 new_repo = Repository()
425 425 new_repo.repo_state = state
426 426 new_repo.enable_statistics = False
427 427 new_repo.repo_name = repo_name_full
428 428 new_repo.repo_type = repo_type
429 429 new_repo.user = owner
430 430 new_repo.group = repo_group
431 431 new_repo.description = description or repo_name
432 432 new_repo.private = private
433 433 new_repo.clone_uri = clone_uri
434 434 new_repo.landing_rev = landing_rev
435 435
436 436 new_repo.enable_statistics = enable_statistics
437 437 new_repo.enable_locking = enable_locking
438 438 new_repo.enable_downloads = enable_downloads
439 439
440 440 if repo_group:
441 441 new_repo.enable_locking = repo_group.enable_locking
442 442
443 443 if fork_of:
444 444 parent_repo = fork_of
445 445 new_repo.fork = parent_repo
446 446
447 447 events.trigger(events.RepoPreCreateEvent(new_repo))
448 448
449 449 self.sa.add(new_repo)
450 450
451 451 EMPTY_PERM = 'repository.none'
452 452 if fork_of and copy_fork_permissions:
453 453 repo = fork_of
454 454 user_perms = UserRepoToPerm.query() \
455 455 .filter(UserRepoToPerm.repository == repo).all()
456 456 group_perms = UserGroupRepoToPerm.query() \
457 457 .filter(UserGroupRepoToPerm.repository == repo).all()
458 458
459 459 for perm in user_perms:
460 460 UserRepoToPerm.create(
461 461 perm.user, new_repo, perm.permission)
462 462
463 463 for perm in group_perms:
464 464 UserGroupRepoToPerm.create(
465 465 perm.users_group, new_repo, perm.permission)
466 466 # in case we copy permissions and also set this repo to private
467 467 # override the default user permission to make it a private
468 468 # repo
469 469 if private:
470 470 RepoModel(self.sa).grant_user_permission(
471 471 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
472 472
473 473 elif repo_group and copy_group_permissions:
474 474 user_perms = UserRepoGroupToPerm.query() \
475 475 .filter(UserRepoGroupToPerm.group == repo_group).all()
476 476
477 477 group_perms = UserGroupRepoGroupToPerm.query() \
478 478 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
479 479
480 480 for perm in user_perms:
481 481 perm_name = perm.permission.permission_name.replace(
482 482 'group.', 'repository.')
483 483 perm_obj = Permission.get_by_key(perm_name)
484 484 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
485 485
486 486 for perm in group_perms:
487 487 perm_name = perm.permission.permission_name.replace(
488 488 'group.', 'repository.')
489 489 perm_obj = Permission.get_by_key(perm_name)
490 490 UserGroupRepoToPerm.create(
491 491 perm.users_group, new_repo, perm_obj)
492 492
493 493 if private:
494 494 RepoModel(self.sa).grant_user_permission(
495 495 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
496 496
497 497 else:
498 498 perm_obj = self._create_default_perms(new_repo, private)
499 499 self.sa.add(perm_obj)
500 500
501 501 # now automatically start following this repository as owner
502 502 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
503 503 owner.user_id)
504 504
505 505 # we need to flush here, in order to check if database won't
506 506 # throw any exceptions, create filesystem dirs at the very end
507 507 self.sa.flush()
508 508 events.trigger(events.RepoCreateEvent(new_repo))
509 509 return new_repo
510 510
511 511 except Exception:
512 512 log.error(traceback.format_exc())
513 513 raise
514 514
515 515 def create(self, form_data, cur_user):
516 516 """
517 517 Create repository using celery tasks
518 518
519 519 :param form_data:
520 520 :param cur_user:
521 521 """
522 522 from rhodecode.lib.celerylib import tasks, run_task
523 523 return run_task(tasks.create_repo, form_data, cur_user)
524 524
525 525 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
526 526 perm_deletions=None, check_perms=True,
527 527 cur_user=None):
528 528 if not perm_additions:
529 529 perm_additions = []
530 530 if not perm_updates:
531 531 perm_updates = []
532 532 if not perm_deletions:
533 533 perm_deletions = []
534 534
535 535 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
536 536
537 537 changes = {
538 538 'added': [],
539 539 'updated': [],
540 540 'deleted': []
541 541 }
542 542 # update permissions
543 543 for member_id, perm, member_type in perm_updates:
544 544 member_id = int(member_id)
545 545 if member_type == 'user':
546 546 member_name = User.get(member_id).username
547 547 # this updates also current one if found
548 548 self.grant_user_permission(
549 549 repo=repo, user=member_id, perm=perm)
550 550 else: # set for user group
551 551 # check if we have permissions to alter this usergroup
552 552 member_name = UserGroup.get(member_id).users_group_name
553 553 if not check_perms or HasUserGroupPermissionAny(
554 554 *req_perms)(member_name, user=cur_user):
555 555 self.grant_user_group_permission(
556 556 repo=repo, group_name=member_id, perm=perm)
557 557
558 558 changes['updated'].append({'type': member_type, 'id': member_id,
559 559 'name': member_name, 'new_perm': perm})
560 560
561 561 # set new permissions
562 562 for member_id, perm, member_type in perm_additions:
563 563 member_id = int(member_id)
564 564 if member_type == 'user':
565 565 member_name = User.get(member_id).username
566 566 self.grant_user_permission(
567 567 repo=repo, user=member_id, perm=perm)
568 568 else: # set for user group
569 569 # check if we have permissions to alter this usergroup
570 570 member_name = UserGroup.get(member_id).users_group_name
571 571 if not check_perms or HasUserGroupPermissionAny(
572 572 *req_perms)(member_name, user=cur_user):
573 573 self.grant_user_group_permission(
574 574 repo=repo, group_name=member_id, perm=perm)
575 575 changes['added'].append({'type': member_type, 'id': member_id,
576 576 'name': member_name, 'new_perm': perm})
577 577 # delete permissions
578 578 for member_id, perm, member_type in perm_deletions:
579 579 member_id = int(member_id)
580 580 if member_type == 'user':
581 581 member_name = User.get(member_id).username
582 582 self.revoke_user_permission(repo=repo, user=member_id)
583 583 else: # set for user group
584 584 # check if we have permissions to alter this usergroup
585 585 member_name = UserGroup.get(member_id).users_group_name
586 586 if not check_perms or HasUserGroupPermissionAny(
587 587 *req_perms)(member_name, user=cur_user):
588 588 self.revoke_user_group_permission(
589 589 repo=repo, group_name=member_id)
590 590
591 591 changes['deleted'].append({'type': member_type, 'id': member_id,
592 592 'name': member_name, 'new_perm': perm})
593 593 return changes
594 594
595 595 def create_fork(self, form_data, cur_user):
596 596 """
597 597 Simple wrapper into executing celery task for fork creation
598 598
599 599 :param form_data:
600 600 :param cur_user:
601 601 """
602 602 from rhodecode.lib.celerylib import tasks, run_task
603 603 return run_task(tasks.create_repo_fork, form_data, cur_user)
604 604
605 605 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
606 606 """
607 607 Delete given repository, forks parameter defines what do do with
608 608 attached forks. Throws AttachedForksError if deleted repo has attached
609 609 forks
610 610
611 611 :param repo:
612 612 :param forks: str 'delete' or 'detach'
613 613 :param fs_remove: remove(archive) repo from filesystem
614 614 """
615 615 if not cur_user:
616 616 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
617 617 repo = self._get_repo(repo)
618 618 if repo:
619 619 if forks == 'detach':
620 620 for r in repo.forks:
621 621 r.fork = None
622 622 self.sa.add(r)
623 623 elif forks == 'delete':
624 624 for r in repo.forks:
625 625 self.delete(r, forks='delete')
626 626 elif [f for f in repo.forks]:
627 627 raise AttachedForksError()
628 628
629 629 old_repo_dict = repo.get_dict()
630 630 events.trigger(events.RepoPreDeleteEvent(repo))
631 631 try:
632 632 self.sa.delete(repo)
633 633 if fs_remove:
634 634 self._delete_filesystem_repo(repo)
635 635 else:
636 636 log.debug('skipping removal from filesystem')
637 637 old_repo_dict.update({
638 638 'deleted_by': cur_user,
639 639 'deleted_on': time.time(),
640 640 })
641 641 log_delete_repository(**old_repo_dict)
642 642 events.trigger(events.RepoDeleteEvent(repo))
643 643 except Exception:
644 644 log.error(traceback.format_exc())
645 645 raise
646 646
647 647 def grant_user_permission(self, repo, user, perm):
648 648 """
649 649 Grant permission for user on given repository, or update existing one
650 650 if found
651 651
652 652 :param repo: Instance of Repository, repository_id, or repository name
653 653 :param user: Instance of User, user_id or username
654 654 :param perm: Instance of Permission, or permission_name
655 655 """
656 656 user = self._get_user(user)
657 657 repo = self._get_repo(repo)
658 658 permission = self._get_perm(perm)
659 659
660 660 # check if we have that permission already
661 661 obj = self.sa.query(UserRepoToPerm) \
662 662 .filter(UserRepoToPerm.user == user) \
663 663 .filter(UserRepoToPerm.repository == repo) \
664 664 .scalar()
665 665 if obj is None:
666 666 # create new !
667 667 obj = UserRepoToPerm()
668 668 obj.repository = repo
669 669 obj.user = user
670 670 obj.permission = permission
671 671 self.sa.add(obj)
672 672 log.debug('Granted perm %s to %s on %s', perm, user, repo)
673 673 action_logger_generic(
674 674 'granted permission: {} to user: {} on repo: {}'.format(
675 675 perm, user, repo), namespace='security.repo')
676 676 return obj
677 677
678 678 def revoke_user_permission(self, repo, user):
679 679 """
680 680 Revoke permission for user on given repository
681 681
682 682 :param repo: Instance of Repository, repository_id, or repository name
683 683 :param user: Instance of User, user_id or username
684 684 """
685 685
686 686 user = self._get_user(user)
687 687 repo = self._get_repo(repo)
688 688
689 689 obj = self.sa.query(UserRepoToPerm) \
690 690 .filter(UserRepoToPerm.repository == repo) \
691 691 .filter(UserRepoToPerm.user == user) \
692 692 .scalar()
693 693 if obj:
694 694 self.sa.delete(obj)
695 695 log.debug('Revoked perm on %s on %s', repo, user)
696 696 action_logger_generic(
697 697 'revoked permission from user: {} on repo: {}'.format(
698 698 user, repo), namespace='security.repo')
699 699
700 700 def grant_user_group_permission(self, repo, group_name, perm):
701 701 """
702 702 Grant permission for user group on given repository, or update
703 703 existing one if found
704 704
705 705 :param repo: Instance of Repository, repository_id, or repository name
706 706 :param group_name: Instance of UserGroup, users_group_id,
707 707 or user group name
708 708 :param perm: Instance of Permission, or permission_name
709 709 """
710 710 repo = self._get_repo(repo)
711 711 group_name = self._get_user_group(group_name)
712 712 permission = self._get_perm(perm)
713 713
714 714 # check if we have that permission already
715 715 obj = self.sa.query(UserGroupRepoToPerm) \
716 716 .filter(UserGroupRepoToPerm.users_group == group_name) \
717 717 .filter(UserGroupRepoToPerm.repository == repo) \
718 718 .scalar()
719 719
720 720 if obj is None:
721 721 # create new
722 722 obj = UserGroupRepoToPerm()
723 723
724 724 obj.repository = repo
725 725 obj.users_group = group_name
726 726 obj.permission = permission
727 727 self.sa.add(obj)
728 728 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
729 729 action_logger_generic(
730 730 'granted permission: {} to usergroup: {} on repo: {}'.format(
731 731 perm, group_name, repo), namespace='security.repo')
732 732
733 733 return obj
734 734
735 735 def revoke_user_group_permission(self, repo, group_name):
736 736 """
737 737 Revoke permission for user group on given repository
738 738
739 739 :param repo: Instance of Repository, repository_id, or repository name
740 740 :param group_name: Instance of UserGroup, users_group_id,
741 741 or user group name
742 742 """
743 743 repo = self._get_repo(repo)
744 744 group_name = self._get_user_group(group_name)
745 745
746 746 obj = self.sa.query(UserGroupRepoToPerm) \
747 747 .filter(UserGroupRepoToPerm.repository == repo) \
748 748 .filter(UserGroupRepoToPerm.users_group == group_name) \
749 749 .scalar()
750 750 if obj:
751 751 self.sa.delete(obj)
752 752 log.debug('Revoked perm to %s on %s', repo, group_name)
753 753 action_logger_generic(
754 754 'revoked permission from usergroup: {} on repo: {}'.format(
755 755 group_name, repo), namespace='security.repo')
756 756
757 757 def delete_stats(self, repo_name):
758 758 """
759 759 removes stats for given repo
760 760
761 761 :param repo_name:
762 762 """
763 763 repo = self._get_repo(repo_name)
764 764 try:
765 765 obj = self.sa.query(Statistics) \
766 766 .filter(Statistics.repository == repo).scalar()
767 767 if obj:
768 768 self.sa.delete(obj)
769 769 except Exception:
770 770 log.error(traceback.format_exc())
771 771 raise
772 772
773 773 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
774 774 field_type='str', field_desc=''):
775 775
776 776 repo = self._get_repo(repo_name)
777 777
778 778 new_field = RepositoryField()
779 779 new_field.repository = repo
780 780 new_field.field_key = field_key
781 781 new_field.field_type = field_type # python type
782 782 new_field.field_value = field_value
783 783 new_field.field_desc = field_desc
784 784 new_field.field_label = field_label
785 785 self.sa.add(new_field)
786 786 return new_field
787 787
788 788 def delete_repo_field(self, repo_name, field_key):
789 789 repo = self._get_repo(repo_name)
790 790 field = RepositoryField.get_by_key_name(field_key, repo)
791 791 if field:
792 792 self.sa.delete(field)
793 793
794 794 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
795 795 clone_uri=None, repo_store_location=None,
796 796 use_global_config=False):
797 797 """
798 798 makes repository on filesystem. It's group aware means it'll create
799 799 a repository within a group, and alter the paths accordingly of
800 800 group location
801 801
802 802 :param repo_name:
803 803 :param alias:
804 804 :param parent:
805 805 :param clone_uri:
806 806 :param repo_store_location:
807 807 """
808 808 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
809 809 from rhodecode.model.scm import ScmModel
810 810
811 811 if Repository.NAME_SEP in repo_name:
812 812 raise ValueError(
813 813 'repo_name must not contain groups got `%s`' % repo_name)
814 814
815 815 if isinstance(repo_group, RepoGroup):
816 816 new_parent_path = os.sep.join(repo_group.full_path_splitted)
817 817 else:
818 818 new_parent_path = repo_group or ''
819 819
820 820 if repo_store_location:
821 821 _paths = [repo_store_location]
822 822 else:
823 823 _paths = [self.repos_path, new_parent_path, repo_name]
824 824 # we need to make it str for mercurial
825 825 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
826 826
827 827 # check if this path is not a repository
828 828 if is_valid_repo(repo_path, self.repos_path):
829 829 raise Exception('This path %s is a valid repository' % repo_path)
830 830
831 831 # check if this path is a group
832 832 if is_valid_repo_group(repo_path, self.repos_path):
833 833 raise Exception('This path %s is a valid group' % repo_path)
834 834
835 835 log.info('creating repo %s in %s from url: `%s`',
836 836 repo_name, safe_unicode(repo_path),
837 837 obfuscate_url_pw(clone_uri))
838 838
839 839 backend = get_backend(repo_type)
840 840
841 841 config_repo = None if use_global_config else repo_name
842 842 if config_repo and new_parent_path:
843 843 config_repo = Repository.NAME_SEP.join(
844 844 (new_parent_path, config_repo))
845 845 config = make_db_config(clear_session=False, repo=config_repo)
846 846 config.set('extensions', 'largefiles', '')
847 847
848 848 # patch and reset hooks section of UI config to not run any
849 849 # hooks on creating remote repo
850 850 config.clear_section('hooks')
851 851
852 852 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
853 853 if repo_type == 'git':
854 854 repo = backend(
855 855 repo_path, config=config, create=True, src_url=clone_uri,
856 856 bare=True)
857 857 else:
858 858 repo = backend(
859 859 repo_path, config=config, create=True, src_url=clone_uri)
860 860
861 ScmModel().install_hooks(repo, repo_type=repo_type)
861 repo.install_hooks()
862 862
863 863 log.debug('Created repo %s with %s backend',
864 864 safe_unicode(repo_name), safe_unicode(repo_type))
865 865 return repo
866 866
867 867 def _rename_filesystem_repo(self, old, new):
868 868 """
869 869 renames repository on filesystem
870 870
871 871 :param old: old name
872 872 :param new: new name
873 873 """
874 874 log.info('renaming repo from %s to %s', old, new)
875 875
876 876 old_path = os.path.join(self.repos_path, old)
877 877 new_path = os.path.join(self.repos_path, new)
878 878 if os.path.isdir(new_path):
879 879 raise Exception(
880 880 'Was trying to rename to already existing dir %s' % new_path
881 881 )
882 882 shutil.move(old_path, new_path)
883 883
884 884 def _delete_filesystem_repo(self, repo):
885 885 """
886 886 removes repo from filesystem, the removal is acctually made by
887 887 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
888 888 repository is no longer valid for rhodecode, can be undeleted later on
889 889 by reverting the renames on this repository
890 890
891 891 :param repo: repo object
892 892 """
893 893 rm_path = os.path.join(self.repos_path, repo.repo_name)
894 894 repo_group = repo.group
895 895 log.info("Removing repository %s", rm_path)
896 896 # disable hg/git internal that it doesn't get detected as repo
897 897 alias = repo.repo_type
898 898
899 899 config = make_db_config(clear_session=False)
900 900 config.set('extensions', 'largefiles', '')
901 901 bare = getattr(repo.scm_instance(config=config), 'bare', False)
902 902
903 903 # skip this for bare git repos
904 904 if not bare:
905 905 # disable VCS repo
906 906 vcs_path = os.path.join(rm_path, '.%s' % alias)
907 907 if os.path.exists(vcs_path):
908 908 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
909 909
910 910 _now = datetime.datetime.now()
911 911 _ms = str(_now.microsecond).rjust(6, '0')
912 912 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
913 913 repo.just_name)
914 914 if repo_group:
915 915 # if repository is in group, prefix the removal path with the group
916 916 args = repo_group.full_path_splitted + [_d]
917 917 _d = os.path.join(*args)
918 918
919 919 if os.path.isdir(rm_path):
920 920 shutil.move(rm_path, os.path.join(self.repos_path, _d))
921 921
922 922
923 923 class ReadmeFinder:
924 924 """
925 925 Utility which knows how to find a readme for a specific commit.
926 926
927 927 The main idea is that this is a configurable algorithm. When creating an
928 928 instance you can define parameters, currently only the `default_renderer`.
929 929 Based on this configuration the method :meth:`search` behaves slightly
930 930 different.
931 931 """
932 932
933 933 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
934 934 path_re = re.compile(r'^docs?', re.IGNORECASE)
935 935
936 936 default_priorities = {
937 937 None: 0,
938 938 '.text': 2,
939 939 '.txt': 3,
940 940 '.rst': 1,
941 941 '.rest': 2,
942 942 '.md': 1,
943 943 '.mkdn': 2,
944 944 '.mdown': 3,
945 945 '.markdown': 4,
946 946 }
947 947
948 948 path_priority = {
949 949 'doc': 0,
950 950 'docs': 1,
951 951 }
952 952
953 953 FALLBACK_PRIORITY = 99
954 954
955 955 RENDERER_TO_EXTENSION = {
956 956 'rst': ['.rst', '.rest'],
957 957 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
958 958 }
959 959
960 960 def __init__(self, default_renderer=None):
961 961 self._default_renderer = default_renderer
962 962 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
963 963 default_renderer, [])
964 964
965 965 def search(self, commit, path='/'):
966 966 """
967 967 Find a readme in the given `commit`.
968 968 """
969 969 nodes = commit.get_nodes(path)
970 970 matches = self._match_readmes(nodes)
971 971 matches = self._sort_according_to_priority(matches)
972 972 if matches:
973 973 return matches[0].node
974 974
975 975 paths = self._match_paths(nodes)
976 976 paths = self._sort_paths_according_to_priority(paths)
977 977 for path in paths:
978 978 match = self.search(commit, path=path)
979 979 if match:
980 980 return match
981 981
982 982 return None
983 983
984 984 def _match_readmes(self, nodes):
985 985 for node in nodes:
986 986 if not node.is_file():
987 987 continue
988 988 path = node.path.rsplit('/', 1)[-1]
989 989 match = self.readme_re.match(path)
990 990 if match:
991 991 extension = match.group(1)
992 992 yield ReadmeMatch(node, match, self._priority(extension))
993 993
994 994 def _match_paths(self, nodes):
995 995 for node in nodes:
996 996 if not node.is_dir():
997 997 continue
998 998 match = self.path_re.match(node.path)
999 999 if match:
1000 1000 yield node.path
1001 1001
1002 1002 def _priority(self, extension):
1003 1003 renderer_priority = (
1004 1004 0 if extension in self._renderer_extensions else 1)
1005 1005 extension_priority = self.default_priorities.get(
1006 1006 extension, self.FALLBACK_PRIORITY)
1007 1007 return (renderer_priority, extension_priority)
1008 1008
1009 1009 def _sort_according_to_priority(self, matches):
1010 1010
1011 1011 def priority_and_path(match):
1012 1012 return (match.priority, match.path)
1013 1013
1014 1014 return sorted(matches, key=priority_and_path)
1015 1015
1016 1016 def _sort_paths_according_to_priority(self, paths):
1017 1017
1018 1018 def priority_and_path(path):
1019 1019 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1020 1020
1021 1021 return sorted(paths, key=priority_and_path)
1022 1022
1023 1023
1024 1024 class ReadmeMatch:
1025 1025
1026 1026 def __init__(self, node, match, priority):
1027 1027 self.node = node
1028 1028 self._match = match
1029 1029 self.priority = priority
1030 1030
1031 1031 @property
1032 1032 def path(self):
1033 1033 return self.node.path
1034 1034
1035 1035 def __repr__(self):
1036 1036 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,922 +1,812 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import re
27 27 import sys
28 28 import traceback
29 29 import logging
30 30 import cStringIO
31 31 import pkg_resources
32 32
33 33 from sqlalchemy import func
34 34 from zope.cachedescriptors.property import Lazy as LazyProperty
35 35
36 36 import rhodecode
37 37 from rhodecode.lib.vcs import get_backend
38 38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 39 from rhodecode.lib.vcs.nodes import FileNode
40 40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 41 from rhodecode.lib import helpers as h
42 42 from rhodecode.lib.auth import (
43 43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 44 HasUserGroupPermissionAny)
45 45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
46 46 from rhodecode.lib import hooks_utils, caches
47 47 from rhodecode.lib.utils import (
48 48 get_filesystem_repos, make_db_config)
49 49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
50 50 from rhodecode.lib.system_info import get_system_info
51 51 from rhodecode.model import BaseModel
52 52 from rhodecode.model.db import (
53 53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
54 54 PullRequest)
55 55 from rhodecode.model.settings import VcsSettingsModel
56 56
57 57 log = logging.getLogger(__name__)
58 58
59 59
60 60 class UserTemp(object):
61 61 def __init__(self, user_id):
62 62 self.user_id = user_id
63 63
64 64 def __repr__(self):
65 65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
66 66
67 67
68 68 class RepoTemp(object):
69 69 def __init__(self, repo_id):
70 70 self.repo_id = repo_id
71 71
72 72 def __repr__(self):
73 73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
74 74
75 75
76 76 class SimpleCachedRepoList(object):
77 77 """
78 78 Lighter version of of iteration of repos without the scm initialisation,
79 79 and with cache usage
80 80 """
81 81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
82 82 self.db_repo_list = db_repo_list
83 83 self.repos_path = repos_path
84 84 self.order_by = order_by
85 85 self.reversed = (order_by or '').startswith('-')
86 86 if not perm_set:
87 87 perm_set = ['repository.read', 'repository.write',
88 88 'repository.admin']
89 89 self.perm_set = perm_set
90 90
91 91 def __len__(self):
92 92 return len(self.db_repo_list)
93 93
94 94 def __repr__(self):
95 95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
96 96
97 97 def __iter__(self):
98 98 for dbr in self.db_repo_list:
99 99 # check permission at this level
100 100 has_perm = HasRepoPermissionAny(*self.perm_set)(
101 101 dbr.repo_name, 'SimpleCachedRepoList check')
102 102 if not has_perm:
103 103 continue
104 104
105 105 tmp_d = {
106 106 'name': dbr.repo_name,
107 107 'dbrepo': dbr.get_dict(),
108 108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
109 109 }
110 110 yield tmp_d
111 111
112 112
113 113 class _PermCheckIterator(object):
114 114
115 115 def __init__(
116 116 self, obj_list, obj_attr, perm_set, perm_checker,
117 117 extra_kwargs=None):
118 118 """
119 119 Creates iterator from given list of objects, additionally
120 120 checking permission for them from perm_set var
121 121
122 122 :param obj_list: list of db objects
123 123 :param obj_attr: attribute of object to pass into perm_checker
124 124 :param perm_set: list of permissions to check
125 125 :param perm_checker: callable to check permissions against
126 126 """
127 127 self.obj_list = obj_list
128 128 self.obj_attr = obj_attr
129 129 self.perm_set = perm_set
130 130 self.perm_checker = perm_checker
131 131 self.extra_kwargs = extra_kwargs or {}
132 132
133 133 def __len__(self):
134 134 return len(self.obj_list)
135 135
136 136 def __repr__(self):
137 137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
138 138
139 139 def __iter__(self):
140 140 checker = self.perm_checker(*self.perm_set)
141 141 for db_obj in self.obj_list:
142 142 # check permission at this level
143 143 name = getattr(db_obj, self.obj_attr, None)
144 144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
145 145 continue
146 146
147 147 yield db_obj
148 148
149 149
150 150 class RepoList(_PermCheckIterator):
151 151
152 152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
153 153 if not perm_set:
154 154 perm_set = [
155 155 'repository.read', 'repository.write', 'repository.admin']
156 156
157 157 super(RepoList, self).__init__(
158 158 obj_list=db_repo_list,
159 159 obj_attr='repo_name', perm_set=perm_set,
160 160 perm_checker=HasRepoPermissionAny,
161 161 extra_kwargs=extra_kwargs)
162 162
163 163
164 164 class RepoGroupList(_PermCheckIterator):
165 165
166 166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
167 167 if not perm_set:
168 168 perm_set = ['group.read', 'group.write', 'group.admin']
169 169
170 170 super(RepoGroupList, self).__init__(
171 171 obj_list=db_repo_group_list,
172 172 obj_attr='group_name', perm_set=perm_set,
173 173 perm_checker=HasRepoGroupPermissionAny,
174 174 extra_kwargs=extra_kwargs)
175 175
176 176
177 177 class UserGroupList(_PermCheckIterator):
178 178
179 179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
180 180 if not perm_set:
181 181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
182 182
183 183 super(UserGroupList, self).__init__(
184 184 obj_list=db_user_group_list,
185 185 obj_attr='users_group_name', perm_set=perm_set,
186 186 perm_checker=HasUserGroupPermissionAny,
187 187 extra_kwargs=extra_kwargs)
188 188
189 189
190 190 class ScmModel(BaseModel):
191 191 """
192 192 Generic Scm Model
193 193 """
194 194
195 195 @LazyProperty
196 196 def repos_path(self):
197 197 """
198 198 Gets the repositories root path from database
199 199 """
200 200
201 201 settings_model = VcsSettingsModel(sa=self.sa)
202 202 return settings_model.get_repos_location()
203 203
204 204 def repo_scan(self, repos_path=None):
205 205 """
206 206 Listing of repositories in given path. This path should not be a
207 207 repository itself. Return a dictionary of repository objects
208 208
209 209 :param repos_path: path to directory containing repositories
210 210 """
211 211
212 212 if repos_path is None:
213 213 repos_path = self.repos_path
214 214
215 215 log.info('scanning for repositories in %s', repos_path)
216 216
217 217 config = make_db_config()
218 218 config.set('extensions', 'largefiles', '')
219 219 repos = {}
220 220
221 221 for name, path in get_filesystem_repos(repos_path, recursive=True):
222 222 # name need to be decomposed and put back together using the /
223 223 # since this is internal storage separator for rhodecode
224 224 name = Repository.normalize_repo_name(name)
225 225
226 226 try:
227 227 if name in repos:
228 228 raise RepositoryError('Duplicate repository name %s '
229 229 'found in %s' % (name, path))
230 230 elif path[0] in rhodecode.BACKENDS:
231 231 klass = get_backend(path[0])
232 232 repos[name] = klass(path[1], config=config)
233 233 except OSError:
234 234 continue
235 235 log.debug('found %s paths with repositories', len(repos))
236 236 return repos
237 237
238 238 def get_repos(self, all_repos=None, sort_key=None):
239 239 """
240 240 Get all repositories from db and for each repo create it's
241 241 backend instance and fill that backed with information from database
242 242
243 243 :param all_repos: list of repository names as strings
244 244 give specific repositories list, good for filtering
245 245
246 246 :param sort_key: initial sorting of repositories
247 247 """
248 248 if all_repos is None:
249 249 all_repos = self.sa.query(Repository)\
250 250 .filter(Repository.group_id == None)\
251 251 .order_by(func.lower(Repository.repo_name)).all()
252 252 repo_iter = SimpleCachedRepoList(
253 253 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 254 return repo_iter
255 255
256 256 def get_repo_groups(self, all_groups=None):
257 257 if all_groups is None:
258 258 all_groups = RepoGroup.query()\
259 259 .filter(RepoGroup.group_parent_id == None).all()
260 260 return [x for x in RepoGroupList(all_groups)]
261 261
262 262 def mark_for_invalidation(self, repo_name, delete=False):
263 263 """
264 264 Mark caches of this repo invalid in the database. `delete` flag
265 265 removes the cache entries
266 266
267 267 :param repo_name: the repo_name for which caches should be marked
268 268 invalid, or deleted
269 269 :param delete: delete the entry keys instead of setting bool
270 270 flag on them
271 271 """
272 272 CacheKey.set_invalidate(repo_name, delete=delete)
273 273 repo = Repository.get_by_repo_name(repo_name)
274 274
275 275 if repo:
276 276 config = repo._config
277 277 config.set('extensions', 'largefiles', '')
278 278 repo.update_commit_cache(config=config, cs_cache=None)
279 279 caches.clear_repo_caches(repo_name)
280 280
281 281 def toggle_following_repo(self, follow_repo_id, user_id):
282 282
283 283 f = self.sa.query(UserFollowing)\
284 284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
285 285 .filter(UserFollowing.user_id == user_id).scalar()
286 286
287 287 if f is not None:
288 288 try:
289 289 self.sa.delete(f)
290 290 return
291 291 except Exception:
292 292 log.error(traceback.format_exc())
293 293 raise
294 294
295 295 try:
296 296 f = UserFollowing()
297 297 f.user_id = user_id
298 298 f.follows_repo_id = follow_repo_id
299 299 self.sa.add(f)
300 300 except Exception:
301 301 log.error(traceback.format_exc())
302 302 raise
303 303
304 304 def toggle_following_user(self, follow_user_id, user_id):
305 305 f = self.sa.query(UserFollowing)\
306 306 .filter(UserFollowing.follows_user_id == follow_user_id)\
307 307 .filter(UserFollowing.user_id == user_id).scalar()
308 308
309 309 if f is not None:
310 310 try:
311 311 self.sa.delete(f)
312 312 return
313 313 except Exception:
314 314 log.error(traceback.format_exc())
315 315 raise
316 316
317 317 try:
318 318 f = UserFollowing()
319 319 f.user_id = user_id
320 320 f.follows_user_id = follow_user_id
321 321 self.sa.add(f)
322 322 except Exception:
323 323 log.error(traceback.format_exc())
324 324 raise
325 325
326 326 def is_following_repo(self, repo_name, user_id, cache=False):
327 327 r = self.sa.query(Repository)\
328 328 .filter(Repository.repo_name == repo_name).scalar()
329 329
330 330 f = self.sa.query(UserFollowing)\
331 331 .filter(UserFollowing.follows_repository == r)\
332 332 .filter(UserFollowing.user_id == user_id).scalar()
333 333
334 334 return f is not None
335 335
336 336 def is_following_user(self, username, user_id, cache=False):
337 337 u = User.get_by_username(username)
338 338
339 339 f = self.sa.query(UserFollowing)\
340 340 .filter(UserFollowing.follows_user == u)\
341 341 .filter(UserFollowing.user_id == user_id).scalar()
342 342
343 343 return f is not None
344 344
345 345 def get_followers(self, repo):
346 346 repo = self._get_repo(repo)
347 347
348 348 return self.sa.query(UserFollowing)\
349 349 .filter(UserFollowing.follows_repository == repo).count()
350 350
351 351 def get_forks(self, repo):
352 352 repo = self._get_repo(repo)
353 353 return self.sa.query(Repository)\
354 354 .filter(Repository.fork == repo).count()
355 355
356 356 def get_pull_requests(self, repo):
357 357 repo = self._get_repo(repo)
358 358 return self.sa.query(PullRequest)\
359 359 .filter(PullRequest.target_repo == repo)\
360 360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
361 361
362 362 def mark_as_fork(self, repo, fork, user):
363 363 repo = self._get_repo(repo)
364 364 fork = self._get_repo(fork)
365 365 if fork and repo.repo_id == fork.repo_id:
366 366 raise Exception("Cannot set repository as fork of itself")
367 367
368 368 if fork and repo.repo_type != fork.repo_type:
369 369 raise RepositoryError(
370 370 "Cannot set repository as fork of repository with other type")
371 371
372 372 repo.fork = fork
373 373 self.sa.add(repo)
374 374 return repo
375 375
376 376 def pull_changes(self, repo, username, remote_uri=None):
377 377 dbrepo = self._get_repo(repo)
378 378 remote_uri = remote_uri or dbrepo.clone_uri
379 379 if not remote_uri:
380 380 raise Exception("This repository doesn't have a clone uri")
381 381
382 382 repo = dbrepo.scm_instance(cache=False)
383 383 # TODO: marcink fix this an re-enable since we need common logic
384 384 # for hg/git remove hooks so we don't trigger them on fetching
385 385 # commits from remote
386 386 repo.config.clear_section('hooks')
387 387
388 388 repo_name = dbrepo.repo_name
389 389 try:
390 390 # TODO: we need to make sure those operations call proper hooks !
391 391 repo.pull(remote_uri)
392 392
393 393 self.mark_for_invalidation(repo_name)
394 394 except Exception:
395 395 log.error(traceback.format_exc())
396 396 raise
397 397
398 398 def push_changes(self, repo, username, remote_uri=None):
399 399 dbrepo = self._get_repo(repo)
400 400 remote_uri = remote_uri or dbrepo.push_uri
401 401 if not remote_uri:
402 402 raise Exception("This repository doesn't have a clone uri")
403 403
404 404 repo = dbrepo.scm_instance(cache=False)
405 405 repo.config.clear_section('hooks')
406 406
407 407 try:
408 408 repo.push(remote_uri)
409 409 except Exception:
410 410 log.error(traceback.format_exc())
411 411 raise
412 412
413 413 def commit_change(self, repo, repo_name, commit, user, author, message,
414 414 content, f_path):
415 415 """
416 416 Commits changes
417 417
418 418 :param repo: SCM instance
419 419
420 420 """
421 421 user = self._get_user(user)
422 422
423 423 # decoding here will force that we have proper encoded values
424 424 # in any other case this will throw exceptions and deny commit
425 425 content = safe_str(content)
426 426 path = safe_str(f_path)
427 427 # message and author needs to be unicode
428 428 # proper backend should then translate that into required type
429 429 message = safe_unicode(message)
430 430 author = safe_unicode(author)
431 431 imc = repo.in_memory_commit
432 432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 433 try:
434 434 # TODO: handle pre-push action !
435 435 tip = imc.commit(
436 436 message=message, author=author, parents=[commit],
437 437 branch=commit.branch)
438 438 except Exception as e:
439 439 log.error(traceback.format_exc())
440 440 raise IMCCommitError(str(e))
441 441 finally:
442 442 # always clear caches, if commit fails we want fresh object also
443 443 self.mark_for_invalidation(repo_name)
444 444
445 445 # We trigger the post-push action
446 446 hooks_utils.trigger_post_push_hook(
447 447 username=user.username, action='push_local', repo_name=repo_name,
448 448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 449 return tip
450 450
451 451 def _sanitize_path(self, f_path):
452 452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 453 raise NonRelativePathError('%s is not an relative path' % f_path)
454 454 if f_path:
455 455 f_path = os.path.normpath(f_path)
456 456 return f_path
457 457
458 458 def get_dirnode_metadata(self, request, commit, dir_node):
459 459 if not dir_node.is_dir():
460 460 return []
461 461
462 462 data = []
463 463 for node in dir_node:
464 464 if not node.is_file():
465 465 # we skip file-nodes
466 466 continue
467 467
468 468 last_commit = node.last_commit
469 469 last_commit_date = last_commit.date
470 470 data.append({
471 471 'name': node.name,
472 472 'size': h.format_byte_size_binary(node.size),
473 473 'modified_at': h.format_date(last_commit_date),
474 474 'modified_ts': last_commit_date.isoformat(),
475 475 'revision': last_commit.revision,
476 476 'short_id': last_commit.short_id,
477 477 'message': h.escape(last_commit.message),
478 478 'author': h.escape(last_commit.author),
479 479 'user_profile': h.gravatar_with_user(
480 480 request, last_commit.author),
481 481 })
482 482
483 483 return data
484 484
485 485 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
486 486 extended_info=False, content=False, max_file_bytes=None):
487 487 """
488 488 recursive walk in root dir and return a set of all path in that dir
489 489 based on repository walk function
490 490
491 491 :param repo_name: name of repository
492 492 :param commit_id: commit id for which to list nodes
493 493 :param root_path: root path to list
494 494 :param flat: return as a list, if False returns a dict with description
495 495 :param max_file_bytes: will not return file contents over this limit
496 496
497 497 """
498 498 _files = list()
499 499 _dirs = list()
500 500 try:
501 501 _repo = self._get_repo(repo_name)
502 502 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
503 503 root_path = root_path.lstrip('/')
504 504 for __, dirs, files in commit.walk(root_path):
505 505 for f in files:
506 506 _content = None
507 507 _data = f.unicode_path
508 508 over_size_limit = (max_file_bytes is not None
509 509 and f.size > max_file_bytes)
510 510
511 511 if not flat:
512 512 _data = {
513 513 "name": h.escape(f.unicode_path),
514 514 "type": "file",
515 515 }
516 516 if extended_info:
517 517 _data.update({
518 518 "md5": f.md5,
519 519 "binary": f.is_binary,
520 520 "size": f.size,
521 521 "extension": f.extension,
522 522 "mimetype": f.mimetype,
523 523 "lines": f.lines()[0]
524 524 })
525 525
526 526 if content:
527 527 full_content = None
528 528 if not f.is_binary and not over_size_limit:
529 529 full_content = safe_str(f.content)
530 530
531 531 _data.update({
532 532 "content": full_content,
533 533 })
534 534 _files.append(_data)
535 535 for d in dirs:
536 536 _data = d.unicode_path
537 537 if not flat:
538 538 _data = {
539 539 "name": h.escape(d.unicode_path),
540 540 "type": "dir",
541 541 }
542 542 if extended_info:
543 543 _data.update({
544 544 "md5": None,
545 545 "binary": None,
546 546 "size": None,
547 547 "extension": None,
548 548 })
549 549 if content:
550 550 _data.update({
551 551 "content": None
552 552 })
553 553 _dirs.append(_data)
554 554 except RepositoryError:
555 555 log.debug("Exception in get_nodes", exc_info=True)
556 556 raise
557 557
558 558 return _dirs, _files
559 559
560 560 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
561 561 author=None, trigger_push_hook=True):
562 562 """
563 563 Commits given multiple nodes into repo
564 564
565 565 :param user: RhodeCode User object or user_id, the commiter
566 566 :param repo: RhodeCode Repository object
567 567 :param message: commit message
568 568 :param nodes: mapping {filename:{'content':content},...}
569 569 :param parent_commit: parent commit, can be empty than it's
570 570 initial commit
571 571 :param author: author of commit, cna be different that commiter
572 572 only for git
573 573 :param trigger_push_hook: trigger push hooks
574 574
575 575 :returns: new commited commit
576 576 """
577 577
578 578 user = self._get_user(user)
579 579 scm_instance = repo.scm_instance(cache=False)
580 580
581 581 processed_nodes = []
582 582 for f_path in nodes:
583 583 f_path = self._sanitize_path(f_path)
584 584 content = nodes[f_path]['content']
585 585 f_path = safe_str(f_path)
586 586 # decoding here will force that we have proper encoded values
587 587 # in any other case this will throw exceptions and deny commit
588 588 if isinstance(content, (basestring,)):
589 589 content = safe_str(content)
590 590 elif isinstance(content, (file, cStringIO.OutputType,)):
591 591 content = content.read()
592 592 else:
593 593 raise Exception('Content is of unrecognized type %s' % (
594 594 type(content)
595 595 ))
596 596 processed_nodes.append((f_path, content))
597 597
598 598 message = safe_unicode(message)
599 599 commiter = user.full_contact
600 600 author = safe_unicode(author) if author else commiter
601 601
602 602 imc = scm_instance.in_memory_commit
603 603
604 604 if not parent_commit:
605 605 parent_commit = EmptyCommit(alias=scm_instance.alias)
606 606
607 607 if isinstance(parent_commit, EmptyCommit):
608 608 # EmptyCommit means we we're editing empty repository
609 609 parents = None
610 610 else:
611 611 parents = [parent_commit]
612 612 # add multiple nodes
613 613 for path, content in processed_nodes:
614 614 imc.add(FileNode(path, content=content))
615 615 # TODO: handle pre push scenario
616 616 tip = imc.commit(message=message,
617 617 author=author,
618 618 parents=parents,
619 619 branch=parent_commit.branch)
620 620
621 621 self.mark_for_invalidation(repo.repo_name)
622 622 if trigger_push_hook:
623 623 hooks_utils.trigger_post_push_hook(
624 624 username=user.username, action='push_local',
625 625 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
626 626 commit_ids=[tip.raw_id])
627 627 return tip
628 628
629 629 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
630 630 author=None, trigger_push_hook=True):
631 631 user = self._get_user(user)
632 632 scm_instance = repo.scm_instance(cache=False)
633 633
634 634 message = safe_unicode(message)
635 635 commiter = user.full_contact
636 636 author = safe_unicode(author) if author else commiter
637 637
638 638 imc = scm_instance.in_memory_commit
639 639
640 640 if not parent_commit:
641 641 parent_commit = EmptyCommit(alias=scm_instance.alias)
642 642
643 643 if isinstance(parent_commit, EmptyCommit):
644 644 # EmptyCommit means we we're editing empty repository
645 645 parents = None
646 646 else:
647 647 parents = [parent_commit]
648 648
649 649 # add multiple nodes
650 650 for _filename, data in nodes.items():
651 651 # new filename, can be renamed from the old one, also sanitaze
652 652 # the path for any hack around relative paths like ../../ etc.
653 653 filename = self._sanitize_path(data['filename'])
654 654 old_filename = self._sanitize_path(_filename)
655 655 content = data['content']
656 656
657 657 filenode = FileNode(old_filename, content=content)
658 658 op = data['op']
659 659 if op == 'add':
660 660 imc.add(filenode)
661 661 elif op == 'del':
662 662 imc.remove(filenode)
663 663 elif op == 'mod':
664 664 if filename != old_filename:
665 665 # TODO: handle renames more efficient, needs vcs lib
666 666 # changes
667 667 imc.remove(filenode)
668 668 imc.add(FileNode(filename, content=content))
669 669 else:
670 670 imc.change(filenode)
671 671
672 672 try:
673 673 # TODO: handle pre push scenario
674 674 # commit changes
675 675 tip = imc.commit(message=message,
676 676 author=author,
677 677 parents=parents,
678 678 branch=parent_commit.branch)
679 679 except NodeNotChangedError:
680 680 raise
681 681 except Exception as e:
682 682 log.exception("Unexpected exception during call to imc.commit")
683 683 raise IMCCommitError(str(e))
684 684 finally:
685 685 # always clear caches, if commit fails we want fresh object also
686 686 self.mark_for_invalidation(repo.repo_name)
687 687
688 688 if trigger_push_hook:
689 689 hooks_utils.trigger_post_push_hook(
690 690 username=user.username, action='push_local',
691 691 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
692 692 commit_ids=[tip.raw_id])
693 693
694 694 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
695 695 author=None, trigger_push_hook=True):
696 696 """
697 697 Deletes given multiple nodes into `repo`
698 698
699 699 :param user: RhodeCode User object or user_id, the committer
700 700 :param repo: RhodeCode Repository object
701 701 :param message: commit message
702 702 :param nodes: mapping {filename:{'content':content},...}
703 703 :param parent_commit: parent commit, can be empty than it's initial
704 704 commit
705 705 :param author: author of commit, cna be different that commiter only
706 706 for git
707 707 :param trigger_push_hook: trigger push hooks
708 708
709 709 :returns: new commit after deletion
710 710 """
711 711
712 712 user = self._get_user(user)
713 713 scm_instance = repo.scm_instance(cache=False)
714 714
715 715 processed_nodes = []
716 716 for f_path in nodes:
717 717 f_path = self._sanitize_path(f_path)
718 718 # content can be empty but for compatabilty it allows same dicts
719 719 # structure as add_nodes
720 720 content = nodes[f_path].get('content')
721 721 processed_nodes.append((f_path, content))
722 722
723 723 message = safe_unicode(message)
724 724 commiter = user.full_contact
725 725 author = safe_unicode(author) if author else commiter
726 726
727 727 imc = scm_instance.in_memory_commit
728 728
729 729 if not parent_commit:
730 730 parent_commit = EmptyCommit(alias=scm_instance.alias)
731 731
732 732 if isinstance(parent_commit, EmptyCommit):
733 733 # EmptyCommit means we we're editing empty repository
734 734 parents = None
735 735 else:
736 736 parents = [parent_commit]
737 737 # add multiple nodes
738 738 for path, content in processed_nodes:
739 739 imc.remove(FileNode(path, content=content))
740 740
741 741 # TODO: handle pre push scenario
742 742 tip = imc.commit(message=message,
743 743 author=author,
744 744 parents=parents,
745 745 branch=parent_commit.branch)
746 746
747 747 self.mark_for_invalidation(repo.repo_name)
748 748 if trigger_push_hook:
749 749 hooks_utils.trigger_post_push_hook(
750 750 username=user.username, action='push_local',
751 751 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
752 752 commit_ids=[tip.raw_id])
753 753 return tip
754 754
755 755 def strip(self, repo, commit_id, branch):
756 756 scm_instance = repo.scm_instance(cache=False)
757 757 scm_instance.config.clear_section('hooks')
758 758 scm_instance.strip(commit_id, branch)
759 759 self.mark_for_invalidation(repo.repo_name)
760 760
761 761 def get_unread_journal(self):
762 762 return self.sa.query(UserLog).count()
763 763
764 764 def get_repo_landing_revs(self, translator, repo=None):
765 765 """
766 766 Generates select option with tags branches and bookmarks (for hg only)
767 767 grouped by type
768 768
769 769 :param repo:
770 770 """
771 771 _ = translator
772 772 repo = self._get_repo(repo)
773 773
774 774 hist_l = [
775 775 ['rev:tip', _('latest tip')]
776 776 ]
777 777 choices = [
778 778 'rev:tip'
779 779 ]
780 780
781 781 if not repo:
782 782 return choices, hist_l
783 783
784 784 repo = repo.scm_instance()
785 785
786 786 branches_group = (
787 787 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
788 788 for b in repo.branches],
789 789 _("Branches"))
790 790 hist_l.append(branches_group)
791 791 choices.extend([x[0] for x in branches_group[0]])
792 792
793 793 if repo.alias == 'hg':
794 794 bookmarks_group = (
795 795 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
796 796 for b in repo.bookmarks],
797 797 _("Bookmarks"))
798 798 hist_l.append(bookmarks_group)
799 799 choices.extend([x[0] for x in bookmarks_group[0]])
800 800
801 801 tags_group = (
802 802 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
803 803 for t in repo.tags],
804 804 _("Tags"))
805 805 hist_l.append(tags_group)
806 806 choices.extend([x[0] for x in tags_group[0]])
807 807
808 808 return choices, hist_l
809 809
810 def install_git_hook(self, repo, force_create=False):
811 """
812 Creates a rhodecode hook inside a git repository
813
814 :param repo: Instance of VCS repo
815 :param force_create: Create even if same name hook exists
816 """
817
818 loc = os.path.join(repo.path, 'hooks')
819 if not repo.bare:
820 loc = os.path.join(repo.path, '.git', 'hooks')
821 if not os.path.isdir(loc):
822 os.makedirs(loc, mode=0777)
823
824 tmpl_post = pkg_resources.resource_string(
825 'rhodecode', '/'.join(
826 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
827 tmpl_pre = pkg_resources.resource_string(
828 'rhodecode', '/'.join(
829 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
830
831 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
832 _hook_file = os.path.join(loc, '%s-receive' % h_type)
833 log.debug('Installing git hook in repo %s', repo)
834 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
835
836 if _rhodecode_hook or force_create:
837 log.debug('writing %s hook file !', h_type)
838 try:
839 with open(_hook_file, 'wb') as f:
840 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
841 tmpl = tmpl.replace('_ENV_', sys.executable)
842 f.write(tmpl)
843 os.chmod(_hook_file, 0755)
844 except IOError:
845 log.exception('error writing hook file %s', _hook_file)
846 else:
847 log.debug('skipping writing hook file')
848
849 def install_svn_hooks(self, repo, force_create=False):
850 """
851 Creates rhodecode hooks inside a svn repository
852
853 :param repo: Instance of VCS repo
854 :param force_create: Create even if same name hook exists
855 """
856 hooks_path = os.path.join(repo.path, 'hooks')
857 if not os.path.isdir(hooks_path):
858 os.makedirs(hooks_path)
859 post_commit_tmpl = pkg_resources.resource_string(
860 'rhodecode', '/'.join(
861 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
862 pre_commit_template = pkg_resources.resource_string(
863 'rhodecode', '/'.join(
864 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
865 templates = {
866 'post-commit': post_commit_tmpl,
867 'pre-commit': pre_commit_template
868 }
869 for filename in templates:
870 _hook_file = os.path.join(hooks_path, filename)
871 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
872 if _rhodecode_hook or force_create:
873 log.debug('writing %s hook file !', filename)
874 template = templates[filename]
875 try:
876 with open(_hook_file, 'wb') as f:
877 template = template.replace(
878 '_TMPL_', rhodecode.__version__)
879 template = template.replace('_ENV_', sys.executable)
880 f.write(template)
881 os.chmod(_hook_file, 0755)
882 except IOError:
883 log.exception('error writing hook file %s', filename)
884 else:
885 log.debug('skipping writing hook file')
886
887 def install_hooks(self, repo, repo_type):
888 if repo_type == 'git':
889 self.install_git_hook(repo)
890 elif repo_type == 'svn':
891 self.install_svn_hooks(repo)
892
893 810 def get_server_info(self, environ=None):
894 811 server_info = get_system_info(environ)
895 812 return server_info
896
897
898 def _check_rhodecode_hook(hook_path):
899 """
900 Check if the hook was created by RhodeCode
901 """
902 if not os.path.exists(hook_path):
903 return True
904
905 log.debug('hook exists, checking if it is from rhodecode')
906 hook_content = _read_hook(hook_path)
907 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
908 if matches:
909 try:
910 version = matches.groups()[0]
911 log.debug('got %s, it is rhodecode', version)
912 return True
913 except Exception:
914 log.exception("Exception while reading the hook version.")
915
916 return False
917
918
919 def _read_hook(hook_path):
920 with open(hook_path, 'rb') as f:
921 content = f.read()
922 return content
@@ -1,499 +1,472 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import base64
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.lib.utils2 import AttributeDict
27 27 from rhodecode.tests.utils import CustomTestApp
28 28
29 29 from rhodecode.lib.caching_query import FromCache
30 30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
31 31 from rhodecode.lib.middleware import simplevcs
32 32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
33 33 from rhodecode.lib.middleware.utils import scm_app_http
34 34 from rhodecode.model.db import User, _hash_key
35 35 from rhodecode.model.meta import Session
36 36 from rhodecode.tests import (
37 37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
38 38 from rhodecode.tests.lib.middleware import mock_scm_app
39 39
40 40
41 41 class StubVCSController(simplevcs.SimpleVCS):
42 42
43 43 SCM = 'hg'
44 44 stub_response_body = tuple()
45 45
46 46 def __init__(self, *args, **kwargs):
47 47 super(StubVCSController, self).__init__(*args, **kwargs)
48 48 self._action = 'pull'
49 49 self._is_shadow_repo_dir = True
50 50 self._name = HG_REPO
51 51 self.set_repo_names(None)
52 52
53 53 @property
54 54 def is_shadow_repo_dir(self):
55 55 return self._is_shadow_repo_dir
56 56
57 57 def _get_repository_name(self, environ):
58 58 return self._name
59 59
60 60 def _get_action(self, environ):
61 61 return self._action
62 62
63 63 def _create_wsgi_app(self, repo_path, repo_name, config):
64 64 def fake_app(environ, start_response):
65 65 headers = [
66 66 ('Http-Accept', 'application/mercurial')
67 67 ]
68 68 start_response('200 OK', headers)
69 69 return self.stub_response_body
70 70 return fake_app
71 71
72 72 def _create_config(self, extras, repo_name):
73 73 return None
74 74
75 75
76 76 @pytest.fixture
77 77 def vcscontroller(baseapp, config_stub, request_stub):
78 78 config_stub.testing_securitypolicy()
79 79 config_stub.include('rhodecode.authentication')
80 80
81 81 controller = StubVCSController(
82 82 baseapp.config.get_settings(), request_stub.registry)
83 83 app = HttpsFixup(controller, baseapp.config.get_settings())
84 84 app = CustomTestApp(app)
85 85
86 86 _remove_default_user_from_query_cache()
87 87
88 88 # Sanity checks that things are set up correctly
89 89 app.get('/' + HG_REPO, status=200)
90 90
91 91 app.controller = controller
92 92 return app
93 93
94 94
95 95 def _remove_default_user_from_query_cache():
96 96 user = User.get_default_user(cache=True)
97 97 query = Session().query(User).filter(User.username == user.username)
98 98 query = query.options(
99 99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
100 100 query.invalidate()
101 101 Session().expire(user)
102 102
103 103
104 104 def test_handles_exceptions_during_permissions_checks(
105 105 vcscontroller, disable_anonymous_user):
106 106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
107 107 auth_password = base64.encodestring(user_and_pass).strip()
108 108 extra_environ = {
109 109 'AUTH_TYPE': 'Basic',
110 110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
111 111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
112 112 }
113 113
114 114 # Verify that things are hooked up correctly
115 115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
116 116
117 117 # Simulate trouble during permission checks
118 118 with mock.patch('rhodecode.model.db.User.get_by_username',
119 119 side_effect=Exception) as get_user:
120 120 # Verify that a correct 500 is returned and check that the expected
121 121 # code path was hit.
122 122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
123 123 assert get_user.called
124 124
125 125
126 126 def test_returns_forbidden_if_no_anonymous_access(
127 127 vcscontroller, disable_anonymous_user):
128 128 vcscontroller.get('/', status=401)
129 129
130 130
131 131 class StubFailVCSController(simplevcs.SimpleVCS):
132 132 def _handle_request(self, environ, start_response):
133 133 raise Exception("BOOM")
134 134
135 135
136 136 @pytest.fixture(scope='module')
137 137 def fail_controller(baseapp):
138 138 controller = StubFailVCSController(
139 139 baseapp.config.get_settings(), baseapp.config)
140 140 controller = HttpsFixup(controller, baseapp.config.get_settings())
141 141 controller = CustomTestApp(controller)
142 142 return controller
143 143
144 144
145 145 def test_handles_exceptions_as_internal_server_error(fail_controller):
146 146 fail_controller.get('/', status=500)
147 147
148 148
149 149 def test_provides_traceback_for_appenlight(fail_controller):
150 150 response = fail_controller.get(
151 151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
152 152 assert 'appenlight.__traceback' in response.request.environ
153 153
154 154
155 155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
156 156 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
157 157 assert controller.scm_app is scm_app_http
158 158
159 159
160 160 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
161 161 config = baseapp.config.get_settings().copy()
162 162 config['vcs.scm_app_implementation'] = (
163 163 'rhodecode.tests.lib.middleware.mock_scm_app')
164 164 controller = StubVCSController(config, request_stub.registry)
165 165 assert controller.scm_app is mock_scm_app
166 166
167 167
168 168 @pytest.mark.parametrize('query_string, expected', [
169 169 ('cmd=stub_command', True),
170 170 ('cmd=listkeys', False),
171 171 ])
172 172 def test_should_check_locking(query_string, expected):
173 173 result = simplevcs._should_check_locking(query_string)
174 174 assert result == expected
175 175
176 176
177 177 class TestShadowRepoRegularExpression(object):
178 178 pr_segment = 'pull-request'
179 179 shadow_segment = 'repository'
180 180
181 181 @pytest.mark.parametrize('url, expected', [
182 182 # repo with/without groups
183 183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
184 184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
185 185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
187 187
188 188 # pull request ID
189 189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
190 190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
191 191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
192 192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
193 193
194 194 # unicode
195 195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
197 197
198 198 # trailing/leading slash
199 199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
200 200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
202 202
203 203 # misc
204 204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
205 205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
206 206 ])
207 207 def test_shadow_repo_regular_expression(self, url, expected):
208 208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
209 209 url = url.format(
210 210 pr_segment=self.pr_segment,
211 211 shadow_segment=self.shadow_segment)
212 212 match_obj = SimpleVCS.shadow_repo_re.match(url)
213 213 assert (match_obj is not None) == expected
214 214
215 215
216 216 @pytest.mark.backends('git', 'hg')
217 217 class TestShadowRepoExposure(object):
218 218
219 219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
220 220 self, baseapp, request_stub):
221 221 """
222 222 Check that a pull action to a shadow repo is propagated to the
223 223 underlying wsgi app.
224 224 """
225 225 controller = StubVCSController(
226 226 baseapp.config.get_settings(), request_stub.registry)
227 227 controller._check_ssl = mock.Mock()
228 228 controller.is_shadow_repo = True
229 229 controller._action = 'pull'
230 230 controller._is_shadow_repo_dir = True
231 231 controller.stub_response_body = 'dummy body value'
232 232 controller._get_default_cache_ttl = mock.Mock(
233 233 return_value=(False, 0))
234 234
235 235 environ_stub = {
236 236 'HTTP_HOST': 'test.example.com',
237 237 'HTTP_ACCEPT': 'application/mercurial',
238 238 'REQUEST_METHOD': 'GET',
239 239 'wsgi.url_scheme': 'http',
240 240 }
241 241
242 242 response = controller(environ_stub, mock.Mock())
243 243 response_body = ''.join(response)
244 244
245 245 # Assert that we got the response from the wsgi app.
246 246 assert response_body == controller.stub_response_body
247 247
248 248 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
249 249 """
250 250 Check that a pull action to a shadow repo is propagated to the
251 251 underlying wsgi app.
252 252 """
253 253 controller = StubVCSController(
254 254 baseapp.config.get_settings(), request_stub.registry)
255 255 controller._check_ssl = mock.Mock()
256 256 controller.is_shadow_repo = True
257 257 controller._action = 'pull'
258 258 controller._is_shadow_repo_dir = False
259 259 controller.stub_response_body = 'dummy body value'
260 260 environ_stub = {
261 261 'HTTP_HOST': 'test.example.com',
262 262 'HTTP_ACCEPT': 'application/mercurial',
263 263 'REQUEST_METHOD': 'GET',
264 264 'wsgi.url_scheme': 'http',
265 265 }
266 266
267 267 response = controller(environ_stub, mock.Mock())
268 268 response_body = ''.join(response)
269 269
270 270 # Assert that we got the response from the wsgi app.
271 271 assert '404 Not Found' in response_body
272 272
273 273 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
274 274 """
275 275 Check that a push action to a shadow repo is aborted.
276 276 """
277 277 controller = StubVCSController(
278 278 baseapp.config.get_settings(), request_stub.registry)
279 279 controller._check_ssl = mock.Mock()
280 280 controller.is_shadow_repo = True
281 281 controller._action = 'push'
282 282 controller.stub_response_body = 'dummy body value'
283 283 environ_stub = {
284 284 'HTTP_HOST': 'test.example.com',
285 285 'HTTP_ACCEPT': 'application/mercurial',
286 286 'REQUEST_METHOD': 'GET',
287 287 'wsgi.url_scheme': 'http',
288 288 }
289 289
290 290 response = controller(environ_stub, mock.Mock())
291 291 response_body = ''.join(response)
292 292
293 293 assert response_body != controller.stub_response_body
294 294 # Assert that a 406 error is returned.
295 295 assert '406 Not Acceptable' in response_body
296 296
297 297 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
298 298 """
299 299 Check that the set_repo_names method sets all names to the one returned
300 300 by the _get_repository_name method on a request to a non shadow repo.
301 301 """
302 302 environ_stub = {}
303 303 controller = StubVCSController(
304 304 baseapp.config.get_settings(), request_stub.registry)
305 305 controller._name = 'RepoGroup/MyRepo'
306 306 controller.set_repo_names(environ_stub)
307 307 assert not controller.is_shadow_repo
308 308 assert (controller.url_repo_name ==
309 309 controller.acl_repo_name ==
310 310 controller.vcs_repo_name ==
311 311 controller._get_repository_name(environ_stub))
312 312
313 313 def test_set_repo_names_with_shadow(
314 314 self, baseapp, pr_util, config_stub, request_stub):
315 315 """
316 316 Check that the set_repo_names method sets correct names on a request
317 317 to a shadow repo.
318 318 """
319 319 from rhodecode.model.pull_request import PullRequestModel
320 320
321 321 pull_request = pr_util.create_pull_request()
322 322 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
323 323 target=pull_request.target_repo.repo_name,
324 324 pr_id=pull_request.pull_request_id,
325 325 pr_segment=TestShadowRepoRegularExpression.pr_segment,
326 326 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
327 327 controller = StubVCSController(
328 328 baseapp.config.get_settings(), request_stub.registry)
329 329 controller._name = shadow_url
330 330 controller.set_repo_names({})
331 331
332 332 # Get file system path to shadow repo for assertions.
333 333 workspace_id = PullRequestModel()._workspace_id(pull_request)
334 334 target_vcs = pull_request.target_repo.scm_instance()
335 335 vcs_repo_name = target_vcs._get_shadow_repository_path(
336 336 workspace_id)
337 337
338 338 assert controller.vcs_repo_name == vcs_repo_name
339 339 assert controller.url_repo_name == shadow_url
340 340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
341 341 assert controller.is_shadow_repo
342 342
343 343 def test_set_repo_names_with_shadow_but_missing_pr(
344 344 self, baseapp, pr_util, config_stub, request_stub):
345 345 """
346 346 Checks that the set_repo_names method enforces matching target repos
347 347 and pull request IDs.
348 348 """
349 349 pull_request = pr_util.create_pull_request()
350 350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
351 351 target=pull_request.target_repo.repo_name,
352 352 pr_id=999999999,
353 353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
354 354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
355 355 controller = StubVCSController(
356 356 baseapp.config.get_settings(), request_stub.registry)
357 357 controller._name = shadow_url
358 358 controller.set_repo_names({})
359 359
360 360 assert not controller.is_shadow_repo
361 361 assert (controller.url_repo_name ==
362 362 controller.acl_repo_name ==
363 363 controller.vcs_repo_name)
364 364
365 365
366 366 @pytest.mark.usefixtures('baseapp')
367 367 class TestGenerateVcsResponse(object):
368 368
369 369 def test_ensures_that_start_response_is_called_early_enough(self):
370 370 self.call_controller_with_response_body(iter(['a', 'b']))
371 371 assert self.start_response.called
372 372
373 373 def test_invalidates_cache_after_body_is_consumed(self):
374 374 result = self.call_controller_with_response_body(iter(['a', 'b']))
375 375 assert not self.was_cache_invalidated()
376 376 # Consume the result
377 377 list(result)
378 378 assert self.was_cache_invalidated()
379 379
380 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
381 def test_handles_locking_exception(self, http_locked_rc):
382 result = self.call_controller_with_response_body(
383 self.raise_result_iter(vcs_kind='repo_locked'))
384 assert not http_locked_rc.called
385 # Consume the result
386 list(result)
387 assert http_locked_rc.called
388
389 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError')
390 def test_handles_requirement_exception(self, http_requirement):
391 result = self.call_controller_with_response_body(
392 self.raise_result_iter(vcs_kind='requirement'))
393 assert not http_requirement.called
394 # Consume the result
395 list(result)
396 assert http_requirement.called
397
398 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
399 def test_handles_locking_exception_in_app_call(self, http_locked_rc):
400 app_factory_patcher = mock.patch.object(
401 StubVCSController, '_create_wsgi_app')
402 with app_factory_patcher as app_factory:
403 app_factory().side_effect = self.vcs_exception()
404 result = self.call_controller_with_response_body(['a'])
405 list(result)
406 assert http_locked_rc.called
407
408 380 def test_raises_unknown_exceptions(self):
409 381 result = self.call_controller_with_response_body(
410 382 self.raise_result_iter(vcs_kind='unknown'))
411 383 with pytest.raises(Exception):
412 384 list(result)
413 385
414 386 def test_prepare_callback_daemon_is_called(self):
415 def side_effect(extras):
387 def side_effect(extras, environ, action, txn_id=None):
416 388 return DummyHooksCallbackDaemon(), extras
417 389
418 390 prepare_patcher = mock.patch.object(
419 391 StubVCSController, '_prepare_callback_daemon')
420 392 with prepare_patcher as prepare_mock:
421 393 prepare_mock.side_effect = side_effect
422 394 self.call_controller_with_response_body(iter(['a', 'b']))
423 395 assert prepare_mock.called
424 396 assert prepare_mock.call_count == 1
425 397
426 398 def call_controller_with_response_body(self, response_body):
427 399 settings = {
428 400 'base_path': 'fake_base_path',
429 401 'vcs.hooks.protocol': 'http',
430 402 'vcs.hooks.direct_calls': False,
431 403 }
432 404 registry = AttributeDict()
433 405 controller = StubVCSController(settings, registry)
434 406 controller._invalidate_cache = mock.Mock()
435 407 controller.stub_response_body = response_body
436 408 self.start_response = mock.Mock()
437 409 result = controller._generate_vcs_response(
438 410 environ={}, start_response=self.start_response,
439 411 repo_path='fake_repo_path',
440 412 extras={}, action='push')
441 413 self.controller = controller
442 414 return result
443 415
444 416 def raise_result_iter(self, vcs_kind='repo_locked'):
445 417 """
446 418 Simulates an exception due to a vcs raised exception if kind vcs_kind
447 419 """
448 420 raise self.vcs_exception(vcs_kind=vcs_kind)
449 421 yield "never_reached"
450 422
451 423 def vcs_exception(self, vcs_kind='repo_locked'):
452 424 locked_exception = Exception('TEST_MESSAGE')
453 425 locked_exception._vcs_kind = vcs_kind
454 426 return locked_exception
455 427
456 428 def was_cache_invalidated(self):
457 429 return self.controller._invalidate_cache.called
458 430
459 431
460 432 class TestInitializeGenerator(object):
461 433
462 434 def test_drains_first_element(self):
463 435 gen = self.factory(['__init__', 1, 2])
464 436 result = list(gen)
465 437 assert result == [1, 2]
466 438
467 439 @pytest.mark.parametrize('values', [
468 440 [],
469 441 [1, 2],
470 442 ])
471 443 def test_raises_value_error(self, values):
472 444 with pytest.raises(ValueError):
473 445 self.factory(values)
474 446
475 447 @simplevcs.initialize_generator
476 448 def factory(self, iterable):
477 449 for elem in iterable:
478 450 yield elem
479 451
480 452
481 453 class TestPrepareHooksDaemon(object):
482 454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
483 455 expected_extras = {'extra1': 'value1'}
484 456 daemon = DummyHooksCallbackDaemon()
485 457
486 458 controller = StubVCSController(app_settings, request_stub.registry)
487 459 prepare_patcher = mock.patch.object(
488 460 simplevcs, 'prepare_callback_daemon',
489 461 return_value=(daemon, expected_extras))
490 462 with prepare_patcher as prepare_mock:
491 463 callback_daemon, extras = controller._prepare_callback_daemon(
492 expected_extras.copy())
464 expected_extras.copy(), {}, 'push')
493 465 prepare_mock.assert_called_once_with(
494 466 expected_extras,
495 467 protocol=app_settings['vcs.hooks.protocol'],
468 txn_id=None,
496 469 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
497 470
498 471 assert callback_daemon == daemon
499 472 assert extras == extras
@@ -1,321 +1,329 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import logging
23 23 from StringIO import StringIO
24 24
25 25 import mock
26 26 import pytest
27 27
28 28 from rhodecode.lib import hooks_daemon
29 29 from rhodecode.tests.utils import assert_message_in_log
30 30
31 31
32 32 class TestDummyHooksCallbackDaemon(object):
33 33 def test_hooks_module_path_set_properly(self):
34 34 daemon = hooks_daemon.DummyHooksCallbackDaemon()
35 35 assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon'
36 36
37 37 def test_logs_entering_the_hook(self):
38 38 daemon = hooks_daemon.DummyHooksCallbackDaemon()
39 39 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
40 40 with daemon as return_value:
41 41 log_mock.assert_called_once_with(
42 42 'Running dummy hooks callback daemon')
43 43 assert return_value == daemon
44 44
45 45 def test_logs_exiting_the_hook(self):
46 46 daemon = hooks_daemon.DummyHooksCallbackDaemon()
47 47 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
48 48 with daemon:
49 49 pass
50 50 log_mock.assert_called_with('Exiting dummy hooks callback daemon')
51 51
52 52
53 53 class TestHooks(object):
54 54 def test_hooks_can_be_used_as_a_context_processor(self):
55 55 hooks = hooks_daemon.Hooks()
56 56 with hooks as return_value:
57 57 pass
58 58 assert hooks == return_value
59 59
60 60
61 61 class TestHooksHttpHandler(object):
62 62 def test_read_request_parses_method_name_and_arguments(self):
63 63 data = {
64 64 'method': 'test',
65 65 'extras': {
66 66 'param1': 1,
67 67 'param2': 'a'
68 68 }
69 69 }
70 70 request = self._generate_post_request(data)
71 71 hooks_patcher = mock.patch.object(
72 72 hooks_daemon.Hooks, data['method'], create=True, return_value=1)
73 73
74 74 with hooks_patcher as hooks_mock:
75 75 MockServer(hooks_daemon.HooksHttpHandler, request)
76 76
77 77 hooks_mock.assert_called_once_with(data['extras'])
78 78
79 79 def test_hooks_serialized_result_is_returned(self):
80 80 request = self._generate_post_request({})
81 81 rpc_method = 'test'
82 82 hook_result = {
83 83 'first': 'one',
84 84 'second': 2
85 85 }
86 86 read_patcher = mock.patch.object(
87 87 hooks_daemon.HooksHttpHandler, '_read_request',
88 88 return_value=(rpc_method, {}))
89 89 hooks_patcher = mock.patch.object(
90 90 hooks_daemon.Hooks, rpc_method, create=True,
91 91 return_value=hook_result)
92 92
93 93 with read_patcher, hooks_patcher:
94 94 server = MockServer(hooks_daemon.HooksHttpHandler, request)
95 95
96 96 expected_result = json.dumps(hook_result)
97 97 assert server.request.output_stream.buflist[-1] == expected_result
98 98
99 99 def test_exception_is_returned_in_response(self):
100 100 request = self._generate_post_request({})
101 101 rpc_method = 'test'
102 102 read_patcher = mock.patch.object(
103 103 hooks_daemon.HooksHttpHandler, '_read_request',
104 104 return_value=(rpc_method, {}))
105 105 hooks_patcher = mock.patch.object(
106 106 hooks_daemon.Hooks, rpc_method, create=True,
107 107 side_effect=Exception('Test exception'))
108 108
109 109 with read_patcher, hooks_patcher:
110 110 server = MockServer(hooks_daemon.HooksHttpHandler, request)
111 111
112 112 org_exc = json.loads(server.request.output_stream.buflist[-1])
113 113 expected_result = {
114 114 'exception': 'Exception',
115 115 'exception_traceback': org_exc['exception_traceback'],
116 116 'exception_args': ['Test exception']
117 117 }
118 118 assert org_exc == expected_result
119 119
120 120 def test_log_message_writes_to_debug_log(self, caplog):
121 121 ip_port = ('0.0.0.0', 8888)
122 122 handler = hooks_daemon.HooksHttpHandler(
123 123 MockRequest('POST /'), ip_port, mock.Mock())
124 124 fake_date = '1/Nov/2015 00:00:00'
125 125 date_patcher = mock.patch.object(
126 126 handler, 'log_date_time_string', return_value=fake_date)
127 127 with date_patcher, caplog.at_level(logging.DEBUG):
128 128 handler.log_message('Some message %d, %s', 123, 'string')
129 129
130 130 expected_message = '{} - - [{}] Some message 123, string'.format(
131 131 ip_port[0], fake_date)
132 132 assert_message_in_log(
133 133 caplog.records, expected_message,
134 134 levelno=logging.DEBUG, module='hooks_daemon')
135 135
136 136 def _generate_post_request(self, data):
137 137 payload = json.dumps(data)
138 138 return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format(
139 139 len(payload), payload)
140 140
141 141
142 142 class ThreadedHookCallbackDaemon(object):
143 143 def test_constructor_calls_prepare(self):
144 144 prepare_daemon_patcher = mock.patch.object(
145 145 hooks_daemon.ThreadedHookCallbackDaemon, '_prepare')
146 146 with prepare_daemon_patcher as prepare_daemon_mock:
147 147 hooks_daemon.ThreadedHookCallbackDaemon()
148 148 prepare_daemon_mock.assert_called_once_with()
149 149
150 150 def test_run_is_called_on_context_start(self):
151 151 patchers = mock.patch.multiple(
152 152 hooks_daemon.ThreadedHookCallbackDaemon,
153 153 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
154 154
155 155 with patchers as mocks:
156 156 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
157 157 with daemon as daemon_context:
158 158 pass
159 159 mocks['_run'].assert_called_once_with()
160 160 assert daemon_context == daemon
161 161
162 162 def test_stop_is_called_on_context_exit(self):
163 163 patchers = mock.patch.multiple(
164 164 hooks_daemon.ThreadedHookCallbackDaemon,
165 165 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
166 166
167 167 with patchers as mocks:
168 168 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
169 169 with daemon as daemon_context:
170 170 assert mocks['_stop'].call_count == 0
171 171
172 172 mocks['_stop'].assert_called_once_with()
173 173 assert daemon_context == daemon
174 174
175 175
176 176 class TestHttpHooksCallbackDaemon(object):
177 177 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
178 178 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
179 179 daemon = hooks_daemon.HttpHooksCallbackDaemon()
180 180 assert daemon._daemon == tcp_server
181 181
182 _, port = tcp_server.server_address
183 expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port)
184 msg = 'Preparing HTTP callback daemon at `{}` and ' \
185 'registering hook object'.format(expected_uri)
182 186 assert_message_in_log(
183 caplog.records,
184 'Preparing HTTP callback daemon and registering hook object',
185 levelno=logging.DEBUG, module='hooks_daemon')
187 caplog.records, msg, levelno=logging.DEBUG, module='hooks_daemon')
186 188
187 189 def test_prepare_inits_hooks_uri_and_logs_it(
188 190 self, tcp_server, caplog):
189 191 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
190 192 daemon = hooks_daemon.HttpHooksCallbackDaemon()
191 193
192 194 _, port = tcp_server.server_address
193 195 expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port)
194 196 assert daemon.hooks_uri == expected_uri
195 197
198 msg = 'Preparing HTTP callback daemon at `{}` and ' \
199 'registering hook object'.format(expected_uri)
196 200 assert_message_in_log(
197 caplog.records, 'Hooks uri is: {}'.format(expected_uri),
201 caplog.records, msg,
198 202 levelno=logging.DEBUG, module='hooks_daemon')
199 203
200 204 def test_run_creates_a_thread(self, tcp_server):
201 205 thread = mock.Mock()
202 206
203 207 with self._tcp_patcher(tcp_server):
204 208 daemon = hooks_daemon.HttpHooksCallbackDaemon()
205 209
206 210 with self._thread_patcher(thread) as thread_mock:
207 211 daemon._run()
208 212
209 213 thread_mock.assert_called_once_with(
210 214 target=tcp_server.serve_forever,
211 215 kwargs={'poll_interval': daemon.POLL_INTERVAL})
212 216 assert thread.daemon is True
213 217 thread.start.assert_called_once_with()
214 218
215 219 def test_run_logs(self, tcp_server, caplog):
216 220
217 221 with self._tcp_patcher(tcp_server):
218 222 daemon = hooks_daemon.HttpHooksCallbackDaemon()
219 223
220 224 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
221 225 daemon._run()
222 226
223 227 assert_message_in_log(
224 228 caplog.records,
225 229 'Running event loop of callback daemon in background thread',
226 230 levelno=logging.DEBUG, module='hooks_daemon')
227 231
228 232 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
229 233 thread = mock.Mock()
230 234
231 235 with self._tcp_patcher(tcp_server):
232 236 daemon = hooks_daemon.HttpHooksCallbackDaemon()
233 237
234 238 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
235 239 with daemon:
236 240 assert daemon._daemon == tcp_server
237 241 assert daemon._callback_thread == thread
238 242
239 243 assert daemon._daemon is None
240 244 assert daemon._callback_thread is None
241 245 tcp_server.shutdown.assert_called_with()
242 246 thread.join.assert_called_once_with()
243 247
244 248 assert_message_in_log(
245 249 caplog.records, 'Waiting for background thread to finish.',
246 250 levelno=logging.DEBUG, module='hooks_daemon')
247 251
248 252 def _tcp_patcher(self, tcp_server):
249 253 return mock.patch.object(
250 254 hooks_daemon, 'TCPServer', return_value=tcp_server)
251 255
252 256 def _thread_patcher(self, thread):
253 257 return mock.patch.object(
254 258 hooks_daemon.threading, 'Thread', return_value=thread)
255 259
256 260
257 261 class TestPrepareHooksDaemon(object):
258 262 @pytest.mark.parametrize('protocol', ('http',))
259 263 def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls(
260 264 self, protocol):
261 265 expected_extras = {'extra1': 'value1'}
262 266 callback, extras = hooks_daemon.prepare_callback_daemon(
263 267 expected_extras.copy(), protocol=protocol, use_direct_calls=True)
264 268 assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon)
265 269 expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon'
266 assert extras == expected_extras
270 expected_extras['time'] = extras['time']
271 assert 'extra1' in extras
267 272
268 273 @pytest.mark.parametrize('protocol, expected_class', (
269 274 ('http', hooks_daemon.HttpHooksCallbackDaemon),
270 275 ))
271 276 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
272 277 self, protocol, expected_class):
273 278 expected_extras = {
274 279 'extra1': 'value1',
280 'txn_id': 'txnid2',
275 281 'hooks_protocol': protocol.lower()
276 282 }
277 283 callback, extras = hooks_daemon.prepare_callback_daemon(
278 expected_extras.copy(), protocol=protocol, use_direct_calls=False)
284 expected_extras.copy(), protocol=protocol, use_direct_calls=False,
285 txn_id='txnid2')
279 286 assert isinstance(callback, expected_class)
280 hooks_uri = extras.pop('hooks_uri')
287 extras.pop('hooks_uri')
288 expected_extras['time'] = extras['time']
281 289 assert extras == expected_extras
282 290
283 291 @pytest.mark.parametrize('protocol', (
284 292 'invalid',
285 293 'Http',
286 294 'HTTP',
287 295 ))
288 296 def test_raises_on_invalid_protocol(self, protocol):
289 297 expected_extras = {
290 298 'extra1': 'value1',
291 299 'hooks_protocol': protocol.lower()
292 300 }
293 301 with pytest.raises(Exception):
294 302 callback, extras = hooks_daemon.prepare_callback_daemon(
295 303 expected_extras.copy(),
296 304 protocol=protocol,
297 305 use_direct_calls=False)
298 306
299 307
300 308 class MockRequest(object):
301 309 def __init__(self, request):
302 310 self.request = request
303 311 self.input_stream = StringIO(b'{}'.format(self.request))
304 312 self.output_stream = StringIO()
305 313
306 314 def makefile(self, mode, *args, **kwargs):
307 315 return self.output_stream if mode == 'wb' else self.input_stream
308 316
309 317
310 318 class MockServer(object):
311 319 def __init__(self, Handler, request):
312 320 ip_port = ('0.0.0.0', 8888)
313 321 self.request = MockRequest(request)
314 322 self.handler = Handler(self.request, ip_port, self)
315 323
316 324
317 325 @pytest.fixture
318 326 def tcp_server():
319 327 server = mock.Mock()
320 328 server.server_address = ('127.0.0.1', 8881)
321 329 return server
@@ -1,456 +1,446 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import multiprocessing
23 23 import os
24 24
25 25 import mock
26 26 import py
27 27 import pytest
28 28
29 29 from rhodecode.lib import caching_query
30 30 from rhodecode.lib import utils
31 31 from rhodecode.lib.utils2 import md5
32 32 from rhodecode.model import settings
33 33 from rhodecode.model import db
34 34 from rhodecode.model import meta
35 35 from rhodecode.model.repo import RepoModel
36 36 from rhodecode.model.repo_group import RepoGroupModel
37 37 from rhodecode.model.scm import ScmModel
38 38 from rhodecode.model.settings import UiSetting, SettingsModel
39 39 from rhodecode.tests.fixture import Fixture
40 40
41 41
42 42 fixture = Fixture()
43 43
44 44
45 45 def extract_hooks(config):
46 46 """Return a dictionary with the hook entries of the given config."""
47 47 hooks = {}
48 48 config_items = config.serialize()
49 49 for section, name, value in config_items:
50 50 if section != 'hooks':
51 51 continue
52 52 hooks[name] = value
53 53
54 54 return hooks
55 55
56 56
57 57 def disable_hooks(request, hooks):
58 58 """Disables the given hooks from the UI settings."""
59 59 session = meta.Session()
60 60
61 61 model = SettingsModel()
62 62 for hook_key in hooks:
63 63 sett = model.get_ui_by_key(hook_key)
64 64 sett.ui_active = False
65 65 session.add(sett)
66 66
67 67 # Invalidate cache
68 68 ui_settings = session.query(db.RhodeCodeUi).options(
69 69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
70 70 ui_settings.invalidate()
71 71
72 72 ui_settings = session.query(db.RhodeCodeUi).options(
73 73 caching_query.FromCache(
74 74 'sql_cache_short', 'get_hook_settings', 'get_hook_settings'))
75 75 ui_settings.invalidate()
76 76
77 77 @request.addfinalizer
78 78 def rollback():
79 79 session.rollback()
80 80
81 81
82 82 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
83 83 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
84 84 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
85 85 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
86 86 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
87 87 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
88 88 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
89 89
90 90 HG_HOOKS = frozenset(
91 91 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
92 92 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
93 93
94 94
95 95 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
96 96 ([], HG_HOOKS),
97 97 (HG_HOOKS, []),
98 98
99 99 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
100 100
101 101 # When a pull/push hook is disabled, its pre-pull/push counterpart should
102 102 # be disabled too.
103 103 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
104 104 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
105 105 HOOK_PUSH_KEY]),
106 106 ])
107 107 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
108 108 expected_hooks):
109 109 disable_hooks(request, disabled_hooks)
110 110
111 111 config = utils.make_db_config()
112 112 hooks = extract_hooks(config)
113 113
114 114 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
115 115
116 116
117 117 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
118 118 ([], ['pull', 'push']),
119 119 ([HOOK_PUSH], ['pull']),
120 120 ([HOOK_PULL], ['push']),
121 121 ([HOOK_PULL, HOOK_PUSH], []),
122 122 ])
123 123 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
124 124 hook_keys = (HOOK_PUSH, HOOK_PULL)
125 125 ui_settings = [
126 126 ('hooks', key, 'some value', key not in disabled_hooks)
127 127 for key in hook_keys]
128 128
129 129 result = utils.get_enabled_hook_classes(ui_settings)
130 130 assert sorted(result) == expected_hooks
131 131
132 132
133 133 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
134 134 _stub_git_repo(tmpdir.ensure('repo', dir=True))
135 135 repos = list(utils.get_filesystem_repos(str(tmpdir)))
136 136 assert repos == [('repo', ('git', tmpdir.join('repo')))]
137 137
138 138
139 139 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
140 140 tmpdir.ensure('not-a-repo', dir=True)
141 141 repos = list(utils.get_filesystem_repos(str(tmpdir)))
142 142 assert repos == []
143 143
144 144
145 145 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
146 146 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
147 147 repos = list(utils.get_filesystem_repos(str(tmpdir)))
148 148 assert repos == []
149 149
150 150
151 151 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
152 152 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
153 153 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
154 154 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
155 155
156 156
157 157 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
158 158 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
159 159 repos = list(utils.get_filesystem_repos(str(tmpdir)))
160 160 assert repos == []
161 161
162 162
163 163 def test_get_filesystem_repos_skips_files(tmpdir):
164 164 tmpdir.ensure('test-file')
165 165 repos = list(utils.get_filesystem_repos(str(tmpdir)))
166 166 assert repos == []
167 167
168 168
169 169 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
170 170 removed_repo_name = 'rm__00000000_000000_000000__.stub'
171 171 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
172 172 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
173 173 repos = list(utils.get_filesystem_repos(str(tmpdir)))
174 174 assert repos == []
175 175
176 176
177 177 def _stub_git_repo(repo_path):
178 178 """
179 179 Make `repo_path` look like a Git repository.
180 180 """
181 181 repo_path.ensure('.git', dir=True)
182 182
183 183
184 184 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
185 185 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
186 186 tmpdir.ensure('test-file')
187 187 dirpaths = utils._get_dirpaths(str_class(tmpdir))
188 188 assert dirpaths == ['test-file']
189 189
190 190
191 191 def test_get_dirpaths_returns_all_paths_bytes(
192 192 tmpdir, platform_encodes_filenames):
193 193 if platform_encodes_filenames:
194 194 pytest.skip("This platform seems to encode filenames.")
195 195 tmpdir.ensure('repo-a-umlaut-\xe4')
196 196 dirpaths = utils._get_dirpaths(str(tmpdir))
197 197 assert dirpaths == ['repo-a-umlaut-\xe4']
198 198
199 199
200 200 def test_get_dirpaths_skips_paths_it_cannot_decode(
201 201 tmpdir, platform_encodes_filenames):
202 202 if platform_encodes_filenames:
203 203 pytest.skip("This platform seems to encode filenames.")
204 204 path_with_latin1 = 'repo-a-umlaut-\xe4'
205 205 tmpdir.ensure(path_with_latin1)
206 206 dirpaths = utils._get_dirpaths(unicode(tmpdir))
207 207 assert dirpaths == []
208 208
209 209
210 210 @pytest.fixture(scope='session')
211 211 def platform_encodes_filenames():
212 212 """
213 213 Boolean indicator if the current platform changes filename encodings.
214 214 """
215 215 path_with_latin1 = 'repo-a-umlaut-\xe4'
216 216 tmpdir = py.path.local.mkdtemp()
217 217 tmpdir.ensure(path_with_latin1)
218 218 read_path = tmpdir.listdir()[0].basename
219 219 tmpdir.remove()
220 220 return path_with_latin1 != read_path
221 221
222 222
223 223
224 224
225 225 def test_repo2db_mapper_groups(repo_groups):
226 226 session = meta.Session()
227 227 zombie_group, parent_group, child_group = repo_groups
228 228 zombie_path = os.path.join(
229 229 RepoGroupModel().repos_path, zombie_group.full_path)
230 230 os.rmdir(zombie_path)
231 231
232 232 # Avoid removing test repos when calling repo2db_mapper
233 233 repo_list = {
234 234 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
235 235 }
236 236 utils.repo2db_mapper(repo_list, remove_obsolete=True)
237 237
238 238 groups_in_db = session.query(db.RepoGroup).all()
239 239 assert child_group in groups_in_db
240 240 assert parent_group in groups_in_db
241 241 assert zombie_path not in groups_in_db
242 242
243 243
244 244 def test_repo2db_mapper_enables_largefiles(backend):
245 245 repo = backend.create_repo()
246 246 repo_list = {repo.repo_name: 'test'}
247 247 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
248 with mock.patch.multiple('rhodecode.model.scm.ScmModel',
249 install_git_hook=mock.DEFAULT,
250 install_svn_hooks=mock.DEFAULT):
251 utils.repo2db_mapper(repo_list, remove_obsolete=False)
252 _, kwargs = scm_mock.call_args
253 assert kwargs['config'].get('extensions', 'largefiles') == ''
248 utils.repo2db_mapper(repo_list, remove_obsolete=False)
249 _, kwargs = scm_mock.call_args
250 assert kwargs['config'].get('extensions', 'largefiles') == ''
254 251
255 252
256 253 @pytest.mark.backends("git", "svn")
257 254 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
258 255 repo = backend.create_repo()
259 256 repo_list = {repo.repo_name: 'test'}
260 with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock:
261 utils.repo2db_mapper(repo_list, remove_obsolete=False)
262 install_hooks_mock.assert_called_once_with(
263 repo.scm_instance(), repo_type=backend.alias)
257 utils.repo2db_mapper(repo_list, remove_obsolete=False)
264 258
265 259
266 260 @pytest.mark.backends("git", "svn")
267 261 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
268 262 repo = backend.create_repo()
269 263 RepoModel().delete(repo, fs_remove=False)
270 264 meta.Session().commit()
271 265 repo_list = {repo.repo_name: repo.scm_instance()}
272 with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock:
273 utils.repo2db_mapper(repo_list, remove_obsolete=False)
274 assert install_hooks_mock.call_count == 1
275 install_hooks_args, _ = install_hooks_mock.call_args
276 assert install_hooks_args[0].name == repo.repo_name
266 utils.repo2db_mapper(repo_list, remove_obsolete=False)
277 267
278 268
279 269 class TestPasswordChanged(object):
280 270 def setup(self):
281 271 self.session = {
282 272 'rhodecode_user': {
283 273 'password': '0cc175b9c0f1b6a831c399e269772661'
284 274 }
285 275 }
286 276 self.auth_user = mock.Mock()
287 277 self.auth_user.userame = 'test'
288 278 self.auth_user.password = 'abc123'
289 279
290 280 def test_returns_false_for_default_user(self):
291 281 self.auth_user.username = db.User.DEFAULT_USER
292 282 result = utils.password_changed(self.auth_user, self.session)
293 283 assert result is False
294 284
295 285 def test_returns_false_if_password_was_not_changed(self):
296 286 self.session['rhodecode_user']['password'] = md5(
297 287 self.auth_user.password)
298 288 result = utils.password_changed(self.auth_user, self.session)
299 289 assert result is False
300 290
301 291 def test_returns_true_if_password_was_changed(self):
302 292 result = utils.password_changed(self.auth_user, self.session)
303 293 assert result is True
304 294
305 295 def test_returns_true_if_auth_user_password_is_empty(self):
306 296 self.auth_user.password = None
307 297 result = utils.password_changed(self.auth_user, self.session)
308 298 assert result is True
309 299
310 300 def test_returns_true_if_session_password_is_empty(self):
311 301 self.session['rhodecode_user'].pop('password')
312 302 result = utils.password_changed(self.auth_user, self.session)
313 303 assert result is True
314 304
315 305
316 306 class TestReadOpensourceLicenses(object):
317 307 def test_success(self):
318 308 utils._license_cache = None
319 309 json_data = '''
320 310 {
321 311 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
322 312 "python2.7-Markdown-2.6.2": {
323 313 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
324 314 }
325 315 }
326 316 '''
327 317 resource_string_patch = mock.patch.object(
328 318 utils.pkg_resources, 'resource_string', return_value=json_data)
329 319 with resource_string_patch:
330 320 result = utils.read_opensource_licenses()
331 321 assert result == json.loads(json_data)
332 322
333 323 def test_caching(self):
334 324 utils._license_cache = {
335 325 "python2.7-pytest-2.7.1": {
336 326 "UNKNOWN": None
337 327 },
338 328 "python2.7-Markdown-2.6.2": {
339 329 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
340 330 }
341 331 }
342 332 resource_patch = mock.patch.object(
343 333 utils.pkg_resources, 'resource_string', side_effect=Exception)
344 334 json_patch = mock.patch.object(
345 335 utils.json, 'loads', side_effect=Exception)
346 336
347 337 with resource_patch as resource_mock, json_patch as json_mock:
348 338 result = utils.read_opensource_licenses()
349 339
350 340 assert resource_mock.call_count == 0
351 341 assert json_mock.call_count == 0
352 342 assert result == utils._license_cache
353 343
354 344 def test_licenses_file_contains_no_unknown_licenses(self):
355 345 utils._license_cache = None
356 346 result = utils.read_opensource_licenses()
357 347 license_names = []
358 348 for licenses in result.values():
359 349 license_names.extend(licenses.keys())
360 350 assert 'UNKNOWN' not in license_names
361 351
362 352
363 353 class TestMakeDbConfig(object):
364 354 def test_data_from_config_data_from_db_returned(self):
365 355 test_data = [
366 356 ('section1', 'option1', 'value1'),
367 357 ('section2', 'option2', 'value2'),
368 358 ('section3', 'option3', 'value3'),
369 359 ]
370 360 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
371 361 config_mock.return_value = test_data
372 362 kwargs = {'clear_session': False, 'repo': 'test_repo'}
373 363 result = utils.make_db_config(**kwargs)
374 364 config_mock.assert_called_once_with(**kwargs)
375 365 for section, option, expected_value in test_data:
376 366 value = result.get(section, option)
377 367 assert value == expected_value
378 368
379 369
380 370 class TestConfigDataFromDb(object):
381 371 def test_config_data_from_db_returns_active_settings(self):
382 372 test_data = [
383 373 UiSetting('section1', 'option1', 'value1', True),
384 374 UiSetting('section2', 'option2', 'value2', True),
385 375 UiSetting('section3', 'option3', 'value3', False),
386 376 ]
387 377 repo_name = 'test_repo'
388 378
389 379 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
390 380 hooks_patch = mock.patch.object(
391 381 utils, 'get_enabled_hook_classes',
392 382 return_value=['pull', 'push', 'repo_size'])
393 383 with model_patch as model_mock, hooks_patch:
394 384 instance_mock = mock.Mock()
395 385 model_mock.return_value = instance_mock
396 386 instance_mock.get_ui_settings.return_value = test_data
397 387 result = utils.config_data_from_db(
398 388 clear_session=False, repo=repo_name)
399 389
400 390 self._assert_repo_name_passed(model_mock, repo_name)
401 391
402 392 expected_result = [
403 393 ('section1', 'option1', 'value1'),
404 394 ('section2', 'option2', 'value2'),
405 395 ]
406 396 assert result == expected_result
407 397
408 398 def _assert_repo_name_passed(self, model_mock, repo_name):
409 399 assert model_mock.call_count == 1
410 400 call_args, call_kwargs = model_mock.call_args
411 401 assert call_kwargs['repo'] == repo_name
412 402
413 403
414 404 class TestIsDirWritable(object):
415 405 def test_returns_false_when_not_writable(self):
416 406 with mock.patch('__builtin__.open', side_effect=OSError):
417 407 assert not utils._is_dir_writable('/stub-path')
418 408
419 409 def test_returns_true_when_writable(self, tmpdir):
420 410 assert utils._is_dir_writable(str(tmpdir))
421 411
422 412 def test_is_safe_against_race_conditions(self, tmpdir):
423 413 workers = multiprocessing.Pool()
424 414 directories = [str(tmpdir)] * 10
425 415 workers.map(utils._is_dir_writable, directories)
426 416
427 417
428 418 class TestGetEnabledHooks(object):
429 419 def test_only_active_hooks_are_enabled(self):
430 420 ui_settings = [
431 421 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
432 422 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
433 423 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
434 424 ]
435 425 result = utils.get_enabled_hook_classes(ui_settings)
436 426 assert result == ['push', 'repo_size']
437 427
438 428 def test_all_hooks_are_enabled(self):
439 429 ui_settings = [
440 430 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
441 431 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
442 432 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
443 433 ]
444 434 result = utils.get_enabled_hook_classes(ui_settings)
445 435 assert result == ['push', 'repo_size', 'pull']
446 436
447 437 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
448 438 ui_settings = []
449 439 result = utils.get_enabled_hook_classes(ui_settings)
450 440 assert result == []
451 441
452 442
453 443 def test_obfuscate_url_pw():
454 444 from rhodecode.lib.utils2 import obfuscate_url_pw
455 445 engine = u'/home/repos/malmö'
456 446 assert obfuscate_url_pw(engine) No newline at end of file
@@ -1,171 +1,172 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 import os
22 import mock
23 import pytest
21 24 import tempfile
22 25
23 import mock
24 import pytest
25 26
26 27 from rhodecode.lib.exceptions import AttachedForksError
27 28 from rhodecode.lib.utils import make_db_config
28 29 from rhodecode.model.db import Repository
29 30 from rhodecode.model.meta import Session
30 31 from rhodecode.model.repo import RepoModel
31 32 from rhodecode.model.scm import ScmModel
32 33
33 34
34 35 class TestRepoModel(object):
35 36
36 37 def test_remove_repo(self, backend):
37 38 repo = backend.create_repo()
38 39 Session().commit()
39 40 RepoModel().delete(repo=repo)
40 41 Session().commit()
41 42
42 43 repos = ScmModel().repo_scan()
43 44
44 45 assert Repository.get_by_repo_name(repo_name=backend.repo_name) is None
45 46 assert repo.repo_name not in repos
46 47
47 48 def test_remove_repo_raises_exc_when_attached_forks(self, backend):
48 49 repo = backend.create_repo()
49 50 Session().commit()
50 51 backend.create_fork()
51 52 Session().commit()
52 53
53 54 with pytest.raises(AttachedForksError):
54 55 RepoModel().delete(repo=repo)
55 56
56 57 def test_remove_repo_delete_forks(self, backend):
57 58 repo = backend.create_repo()
58 59 Session().commit()
59 60
60 61 fork = backend.create_fork()
61 62 Session().commit()
62 63
63 64 fork_of_fork = backend.create_fork()
64 65 Session().commit()
65 66
66 67 RepoModel().delete(repo=repo, forks='delete')
67 68 Session().commit()
68 69
69 70 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
70 71 assert Repository.get_by_repo_name(repo_name=fork.repo_name) is None
71 72 assert (
72 73 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
73 74 is None)
74 75
75 76 def test_remove_repo_detach_forks(self, backend):
76 77 repo = backend.create_repo()
77 78 Session().commit()
78 79
79 80 fork = backend.create_fork()
80 81 Session().commit()
81 82
82 83 fork_of_fork = backend.create_fork()
83 84 Session().commit()
84 85
85 86 RepoModel().delete(repo=repo, forks='detach')
86 87 Session().commit()
87 88
88 89 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
89 90 assert (
90 91 Repository.get_by_repo_name(repo_name=fork.repo_name) is not None)
91 92 assert (
92 93 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
93 94 is not None)
94 95
95 96 @pytest.mark.parametrize("filename, expected", [
96 97 ("README", True),
97 98 ("README.rst", False),
98 99 ])
99 100 def test_filenode_is_link(self, vcsbackend, filename, expected):
100 101 repo = vcsbackend.repo
101 102 assert repo.get_commit().is_link(filename) is expected
102 103
103 104 def test_get_commit(self, backend):
104 105 backend.repo.get_commit()
105 106
106 107 def test_get_changeset_is_deprecated(self, backend):
107 108 repo = backend.repo
108 109 pytest.deprecated_call(repo.get_changeset)
109 110
110 111 def test_clone_url_encrypted_value(self, backend):
111 112 repo = backend.create_repo()
112 113 Session().commit()
113 114
114 115 repo.clone_url = 'https://marcink:qweqwe@code.rhodecode.com'
115 116 Session().add(repo)
116 117 Session().commit()
117 118
118 119 assert repo.clone_url == 'https://marcink:qweqwe@code.rhodecode.com'
119 120
120 121 @pytest.mark.backends("git", "svn")
121 122 def test_create_filesystem_repo_installs_hooks(self, tmpdir, backend):
122 hook_methods = {
123 'git': 'install_git_hook',
124 'svn': 'install_svn_hooks'
125 }
126 123 repo = backend.create_repo()
127 124 repo_name = repo.repo_name
128 125 model = RepoModel()
129 126 repo_location = tempfile.mkdtemp()
130 127 model.repos_path = repo_location
131 method = hook_methods[backend.alias]
132 with mock.patch.object(ScmModel, method) as hooks_mock:
133 model._create_filesystem_repo(
134 repo_name, backend.alias, repo_group='', clone_uri=None)
135 assert hooks_mock.call_count == 1
136 hook_args, hook_kwargs = hooks_mock.call_args
137 assert hook_args[0].name == repo_name
128 repo = model._create_filesystem_repo(
129 repo_name, backend.alias, repo_group='', clone_uri=None)
130
131 hooks = {
132 'svn': ('pre-commit', 'post-commit'),
133 'git': ('pre-receive', 'post-receive'),
134 }
135 for hook in hooks[backend.alias]:
136 with open(os.path.join(repo.path, 'hooks', hook)) as f:
137 data = f.read()
138 assert 'RC_HOOK_VER' in data
138 139
139 140 @pytest.mark.parametrize("use_global_config, repo_name_passed", [
140 141 (True, False),
141 142 (False, True)
142 143 ])
143 144 def test_per_repo_config_is_generated_during_filesystem_repo_creation(
144 145 self, tmpdir, backend, use_global_config, repo_name_passed):
145 146 repo_name = 'test-{}-repo-{}'.format(backend.alias, use_global_config)
146 147 config = make_db_config()
147 148 model = RepoModel()
148 149 with mock.patch('rhodecode.model.repo.make_db_config') as config_mock:
149 150 config_mock.return_value = config
150 151 model._create_filesystem_repo(
151 152 repo_name, backend.alias, repo_group='', clone_uri=None,
152 153 use_global_config=use_global_config)
153 154 expected_repo_name = repo_name if repo_name_passed else None
154 155 expected_call = mock.call(clear_session=False, repo=expected_repo_name)
155 156 assert expected_call in config_mock.call_args_list
156 157
157 158 def test_update_commit_cache_with_config(serf, backend):
158 159 repo = backend.create_repo()
159 160 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm:
160 161 scm_instance = mock.Mock()
161 162 scm_instance.get_commit.return_value = {
162 163 'raw_id': 40*'0',
163 164 'revision': 1
164 165 }
165 166 scm.return_value = scm_instance
166 167 repo.update_commit_cache()
167 168 scm.assert_called_with(cache=False, config=None)
168 169 config = {'test': 'config'}
169 170 repo.update_commit_cache(config=config)
170 171 scm.assert_called_with(
171 172 cache=False, config=config)
@@ -1,336 +1,196 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import stat
23 23 import sys
24 24
25 25 import pytest
26 26 from mock import Mock, patch, DEFAULT
27 27
28 28 import rhodecode
29 29 from rhodecode.model import db, scm
30 30 from rhodecode.tests import no_newline_id_generator
31 31
32 32
33 33 def test_scm_instance_config(backend):
34 34 repo = backend.create_repo()
35 35 with patch.multiple('rhodecode.model.db.Repository',
36 36 _get_instance=DEFAULT,
37 37 _get_instance_cached=DEFAULT) as mocks:
38 38 repo.scm_instance()
39 39 mocks['_get_instance'].assert_called_with(
40 40 config=None, cache=False)
41 41
42 42 config = {'some': 'value'}
43 43 repo.scm_instance(config=config)
44 44 mocks['_get_instance'].assert_called_with(
45 45 config=config, cache=False)
46 46
47 47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
48 48 repo.scm_instance(config=config)
49 49 mocks['_get_instance_cached'].assert_called()
50 50
51 51
52 52 def test__get_instance_config(backend):
53 53 repo = backend.create_repo()
54 54 vcs_class = Mock()
55 55 with patch.multiple('rhodecode.lib.vcs.backends',
56 56 get_scm=DEFAULT,
57 57 get_backend=DEFAULT) as mocks:
58 58 mocks['get_scm'].return_value = backend.alias
59 59 mocks['get_backend'].return_value = vcs_class
60 60 with patch('rhodecode.model.db.Repository._config') as config_mock:
61 61 repo._get_instance()
62 62 vcs_class.assert_called_with(
63 63 repo_path=repo.repo_full_path, config=config_mock,
64 64 create=False, with_wire={'cache': True})
65 65
66 66 new_config = {'override': 'old_config'}
67 67 repo._get_instance(config=new_config)
68 68 vcs_class.assert_called_with(
69 69 repo_path=repo.repo_full_path, config=new_config, create=False,
70 70 with_wire={'cache': True})
71 71
72 72
73 73 def test_mark_for_invalidation_config(backend):
74 74 repo = backend.create_repo()
75 75 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
76 76 scm.ScmModel().mark_for_invalidation(repo.repo_name)
77 77 _, kwargs = _mock.call_args
78 78 assert kwargs['config'].__dict__ == repo._config.__dict__
79 79
80 80
81 81 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
82 82 commits = [{'message': 'A'}, {'message': 'B'}]
83 83 repo = backend.create_repo(commits=commits)
84 84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
85 85 assert repo.changeset_cache['revision'] == 1
86 86
87 87
88 88 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
89 89 repo = backend.create_repo()
90 90 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
91 91 assert repo.changeset_cache['revision'] == -1
92 92
93 93
94 94 def test_strip_with_multiple_heads(backend_hg):
95 95 commits = [
96 96 {'message': 'A'},
97 97 {'message': 'a'},
98 98 {'message': 'b'},
99 99 {'message': 'B', 'parents': ['A']},
100 100 {'message': 'a1'},
101 101 ]
102 102 repo = backend_hg.create_repo(commits=commits)
103 103 commit_ids = backend_hg.commit_ids
104 104
105 105 model = scm.ScmModel()
106 106 model.strip(repo, commit_ids['b'], branch=None)
107 107
108 108 vcs_repo = repo.scm_instance()
109 109 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
110 110 assert len(rest_commit_ids) == 4
111 111 assert commit_ids['b'] not in rest_commit_ids
112 112
113 113
114 114 def test_strip_with_single_heads(backend_hg):
115 115 commits = [
116 116 {'message': 'A'},
117 117 {'message': 'a'},
118 118 {'message': 'b'},
119 119 ]
120 120 repo = backend_hg.create_repo(commits=commits)
121 121 commit_ids = backend_hg.commit_ids
122 122
123 123 model = scm.ScmModel()
124 124 model.strip(repo, commit_ids['b'], branch=None)
125 125
126 126 vcs_repo = repo.scm_instance()
127 127 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
128 128 assert len(rest_commit_ids) == 2
129 129 assert commit_ids['b'] not in rest_commit_ids
130 130
131 131
132 132 def test_get_nodes_returns_unicode_flat(backend_random):
133 133 repo = backend_random.repo
134 134 directories, files = scm.ScmModel().get_nodes(
135 135 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
136 136 flat=True)
137 137 assert_contains_only_unicode(directories)
138 138 assert_contains_only_unicode(files)
139 139
140 140
141 141 def test_get_nodes_returns_unicode_non_flat(backend_random):
142 142 repo = backend_random.repo
143 143 directories, files = scm.ScmModel().get_nodes(
144 144 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
145 145 flat=False)
146 146 # johbo: Checking only the names for now, since that is the critical
147 147 # part.
148 148 assert_contains_only_unicode([d['name'] for d in directories])
149 149 assert_contains_only_unicode([f['name'] for f in files])
150 150
151 151
152 152 def test_get_nodes_max_file_bytes(backend_random):
153 153 repo = backend_random.repo
154 154 max_file_bytes = 10
155 155 directories, files = scm.ScmModel().get_nodes(
156 156 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
157 157 extended_info=True, flat=False)
158 158 assert any(file['content'] and len(file['content']) > max_file_bytes
159 159 for file in files)
160 160
161 161 directories, files = scm.ScmModel().get_nodes(
162 162 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
163 163 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
164 164 assert all(
165 165 file['content'] is None if file['size'] > max_file_bytes else True
166 166 for file in files)
167 167
168 168
169 169 def assert_contains_only_unicode(structure):
170 170 assert structure
171 171 for value in structure:
172 172 assert isinstance(value, unicode)
173 173
174 174
175 175 @pytest.mark.backends("hg", "git")
176 176 def test_get_non_unicode_reference(backend):
177 177 model = scm.ScmModel()
178 178 non_unicode_list = ["Adını".decode("cp1254")]
179 179
180 180 def scm_instance():
181 181 return Mock(
182 182 branches=non_unicode_list, bookmarks=non_unicode_list,
183 183 tags=non_unicode_list, alias=backend.alias)
184 184
185 185 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
186 186 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
187 187 if backend.alias == 'hg':
188 188 valid_choices = [
189 189 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
190 190 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
191 191 else:
192 192 valid_choices = [
193 193 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
194 194 u'tag:Ad\xc4\xb1n\xc4\xb1']
195 195
196 196 assert choices == valid_choices
197
198
199 class TestInstallSvnHooks(object):
200 HOOK_FILES = ('pre-commit', 'post-commit')
201
202 def test_new_hooks_are_created(self, backend_svn):
203 model = scm.ScmModel()
204 repo = backend_svn.create_repo()
205 vcs_repo = repo.scm_instance()
206 model.install_svn_hooks(vcs_repo)
207
208 hooks_path = os.path.join(vcs_repo.path, 'hooks')
209 assert os.path.isdir(hooks_path)
210 for file_name in self.HOOK_FILES:
211 file_path = os.path.join(hooks_path, file_name)
212 self._check_hook_file_mode(file_path)
213 self._check_hook_file_content(file_path)
214
215 def test_rc_hooks_are_replaced(self, backend_svn):
216 model = scm.ScmModel()
217 repo = backend_svn.create_repo()
218 vcs_repo = repo.scm_instance()
219 hooks_path = os.path.join(vcs_repo.path, 'hooks')
220 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
221
222 for file_path in file_paths:
223 self._create_fake_hook(
224 file_path, content="RC_HOOK_VER = 'abcde'\n")
225
226 model.install_svn_hooks(vcs_repo)
227
228 for file_path in file_paths:
229 self._check_hook_file_content(file_path)
230
231 def test_non_rc_hooks_are_not_replaced_without_force_create(
232 self, backend_svn):
233 model = scm.ScmModel()
234 repo = backend_svn.create_repo()
235 vcs_repo = repo.scm_instance()
236 hooks_path = os.path.join(vcs_repo.path, 'hooks')
237 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
238 non_rc_content = "exit 0\n"
239
240 for file_path in file_paths:
241 self._create_fake_hook(file_path, content=non_rc_content)
242
243 model.install_svn_hooks(vcs_repo)
244
245 for file_path in file_paths:
246 with open(file_path, 'rt') as hook_file:
247 content = hook_file.read()
248 assert content == non_rc_content
249
250 def test_non_rc_hooks_are_replaced_with_force_create(self, backend_svn):
251 model = scm.ScmModel()
252 repo = backend_svn.create_repo()
253 vcs_repo = repo.scm_instance()
254 hooks_path = os.path.join(vcs_repo.path, 'hooks')
255 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
256 non_rc_content = "exit 0\n"
257
258 for file_path in file_paths:
259 self._create_fake_hook(file_path, content=non_rc_content)
260
261 model.install_svn_hooks(vcs_repo, force_create=True)
262
263 for file_path in file_paths:
264 self._check_hook_file_content(file_path)
265
266 def _check_hook_file_mode(self, file_path):
267 assert os.path.exists(file_path)
268 stat_info = os.stat(file_path)
269
270 file_mode = stat.S_IMODE(stat_info.st_mode)
271 expected_mode = int('755', 8)
272 assert expected_mode == file_mode
273
274 def _check_hook_file_content(self, file_path):
275 with open(file_path, 'rt') as hook_file:
276 content = hook_file.read()
277
278 expected_env = '#!{}'.format(sys.executable)
279 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
280 rhodecode.__version__)
281 assert content.strip().startswith(expected_env)
282 assert expected_rc_version in content
283
284 def _create_fake_hook(self, file_path, content):
285 with open(file_path, 'w') as hook_file:
286 hook_file.write(content)
287
288
289 class TestCheckRhodecodeHook(object):
290
291 @patch('os.path.exists', Mock(return_value=False))
292 def test_returns_true_when_no_hook_found(self):
293 result = scm._check_rhodecode_hook('/tmp/fake_hook_file.py')
294 assert result
295
296 @pytest.mark.parametrize("file_content, expected_result", [
297 ("RC_HOOK_VER = '3.3.3'\n", True),
298 ("RC_HOOK = '3.3.3'\n", False),
299 ], ids=no_newline_id_generator)
300 @patch('os.path.exists', Mock(return_value=True))
301 def test_signatures(self, file_content, expected_result):
302 hook_content_patcher = patch.object(
303 scm, '_read_hook', return_value=file_content)
304 with hook_content_patcher:
305 result = scm._check_rhodecode_hook('/tmp/fake_hook_file.py')
306
307 assert result is expected_result
308
309
310 class TestInstallHooks(object):
311 def test_hooks_are_installed_for_git_repo(self, backend_git):
312 repo = backend_git.create_repo()
313 model = scm.ScmModel()
314 scm_repo = repo.scm_instance()
315 with patch.object(model, 'install_git_hook') as hooks_mock:
316 model.install_hooks(scm_repo, repo_type='git')
317 hooks_mock.assert_called_once_with(scm_repo)
318
319 def test_hooks_are_installed_for_svn_repo(self, backend_svn):
320 repo = backend_svn.create_repo()
321 scm_repo = repo.scm_instance()
322 model = scm.ScmModel()
323 with patch.object(scm.ScmModel, 'install_svn_hooks') as hooks_mock:
324 model.install_hooks(scm_repo, repo_type='svn')
325 hooks_mock.assert_called_once_with(scm_repo)
326
327 @pytest.mark.parametrize('hook_method', [
328 'install_svn_hooks',
329 'install_git_hook'])
330 def test_mercurial_doesnt_trigger_hooks(self, backend_hg, hook_method):
331 repo = backend_hg.create_repo()
332 scm_repo = repo.scm_instance()
333 model = scm.ScmModel()
334 with patch.object(scm.ScmModel, hook_method) as hooks_mock:
335 model.install_hooks(scm_repo, repo_type='hg')
336 assert hooks_mock.call_count == 0
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now