##// END OF EJS Templates
svn: enable hooks and integration framework execution....
marcink -
r2677:25d65914 default
parent child Browse files
Show More
@@ -1,54 +1,51 b''
1 # top level files
1 # top level files
2
2
3 include MANIFEST.in
3 include MANIFEST.in
4 include README.rst
4 include README.rst
5 include CHANGES.rst
5 include CHANGES.rst
6 include LICENSE.txt
6 include LICENSE.txt
7
7
8 include rhodecode/VERSION
8 include rhodecode/VERSION
9
9
10 # docs
10 # docs
11 recursive-include docs *
11 recursive-include docs *
12
12
13 # all config files
13 # all config files
14 recursive-include configs *
14 recursive-include configs *
15
15
16 # translations
16 # translations
17 recursive-include rhodecode/i18n *
17 recursive-include rhodecode/i18n *
18
18
19 # hook templates
20 recursive-include rhodecode/config/hook_templates *
21
22 # non-python core stuff
19 # non-python core stuff
23 recursive-include rhodecode *.cfg
20 recursive-include rhodecode *.cfg
24 recursive-include rhodecode *.json
21 recursive-include rhodecode *.json
25 recursive-include rhodecode *.ini_tmpl
22 recursive-include rhodecode *.ini_tmpl
26 recursive-include rhodecode *.sh
23 recursive-include rhodecode *.sh
27 recursive-include rhodecode *.mako
24 recursive-include rhodecode *.mako
28
25
29 # 502 page
26 # 502 page
30 include rhodecode/public/502.html
27 include rhodecode/public/502.html
31
28
32
29
33 # images, css
30 # images, css
34 include rhodecode/public/css/*.css
31 include rhodecode/public/css/*.css
35 include rhodecode/public/images/*.*
32 include rhodecode/public/images/*.*
36 include rhodecode/public/images/ee_features/*.*
33 include rhodecode/public/images/ee_features/*.*
37
34
38 # sound files
35 # sound files
39 include rhodecode/public/sounds/*.mp3
36 include rhodecode/public/sounds/*.mp3
40 include rhodecode/public/sounds/*.wav
37 include rhodecode/public/sounds/*.wav
41
38
42 # fonts
39 # fonts
43 recursive-include rhodecode/public/fonts/ProximaNova *
40 recursive-include rhodecode/public/fonts/ProximaNova *
44 recursive-include rhodecode/public/fonts/RCIcons *
41 recursive-include rhodecode/public/fonts/RCIcons *
45
42
46 # js
43 # js
47 recursive-include rhodecode/public/js *
44 recursive-include rhodecode/public/js *
48
45
49 # templates
46 # templates
50 recursive-include rhodecode/templates *
47 recursive-include rhodecode/templates *
51
48
52 # skip any tests files
49 # skip any tests files
53 recursive-exclude rhodecode/tests *
50 recursive-exclude rhodecode/tests *
54
51
@@ -1,140 +1,141 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 import pytest
22 import pytest
23
23
24 from rhodecode.api.tests.utils import build_data, api_call, assert_error
24 from rhodecode.api.tests.utils import build_data, api_call, assert_error
25
25
26
26
27 @pytest.mark.usefixtures("testuser_api", "app")
27 @pytest.mark.usefixtures("testuser_api", "app")
28 class TestGetRepoChangeset(object):
28 class TestGetRepoChangeset(object):
29 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
29 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
30 def test_get_repo_changeset(self, details, backend):
30 def test_get_repo_changeset(self, details, backend):
31 commit = backend.repo.get_commit(commit_idx=0)
31 commit = backend.repo.get_commit(commit_idx=0)
32 __, params = build_data(
32 __, params = build_data(
33 self.apikey, 'get_repo_changeset',
33 self.apikey, 'get_repo_changeset',
34 repoid=backend.repo_name, revision=commit.raw_id,
34 repoid=backend.repo_name, revision=commit.raw_id,
35 details=details,
35 details=details,
36 )
36 )
37 response = api_call(self.app, params)
37 response = api_call(self.app, params)
38 result = response.json['result']
38 result = response.json['result']
39 assert result['revision'] == 0
39 assert result['revision'] == 0
40 assert result['raw_id'] == commit.raw_id
40 assert result['raw_id'] == commit.raw_id
41
41
42 if details == 'full':
42 if details == 'full':
43 assert result['refs']['bookmarks'] == getattr(
43 assert result['refs']['bookmarks'] == getattr(
44 commit, 'bookmarks', [])
44 commit, 'bookmarks', [])
45 assert result['refs']['branches'] == [commit.branch]
45 branches = [commit.branch] if commit.branch else []
46 assert result['refs']['branches'] == branches
46 assert result['refs']['tags'] == commit.tags
47 assert result['refs']['tags'] == commit.tags
47
48
48 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
49 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
49 def test_get_repo_changeset_bad_type(self, details, backend):
50 def test_get_repo_changeset_bad_type(self, details, backend):
50 id_, params = build_data(
51 id_, params = build_data(
51 self.apikey, 'get_repo_changeset',
52 self.apikey, 'get_repo_changeset',
52 repoid=backend.repo_name, revision=0,
53 repoid=backend.repo_name, revision=0,
53 details=details,
54 details=details,
54 )
55 )
55 response = api_call(self.app, params)
56 response = api_call(self.app, params)
56 expected = 'commit_id must be a string value'
57 expected = 'commit_id must be a string value'
57 assert_error(id_, expected, given=response.body)
58 assert_error(id_, expected, given=response.body)
58
59
59 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
60 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
60 def test_get_repo_changesets(self, details, backend):
61 def test_get_repo_changesets(self, details, backend):
61 limit = 2
62 limit = 2
62 commit = backend.repo.get_commit(commit_idx=0)
63 commit = backend.repo.get_commit(commit_idx=0)
63 __, params = build_data(
64 __, params = build_data(
64 self.apikey, 'get_repo_changesets',
65 self.apikey, 'get_repo_changesets',
65 repoid=backend.repo_name, start_rev=commit.raw_id, limit=limit,
66 repoid=backend.repo_name, start_rev=commit.raw_id, limit=limit,
66 details=details,
67 details=details,
67 )
68 )
68 response = api_call(self.app, params)
69 response = api_call(self.app, params)
69 result = response.json['result']
70 result = response.json['result']
70 assert result
71 assert result
71 assert len(result) == limit
72 assert len(result) == limit
72 for x in xrange(limit):
73 for x in xrange(limit):
73 assert result[x]['revision'] == x
74 assert result[x]['revision'] == x
74
75
75 if details == 'full':
76 if details == 'full':
76 for x in xrange(limit):
77 for x in xrange(limit):
77 assert 'bookmarks' in result[x]['refs']
78 assert 'bookmarks' in result[x]['refs']
78 assert 'branches' in result[x]['refs']
79 assert 'branches' in result[x]['refs']
79 assert 'tags' in result[x]['refs']
80 assert 'tags' in result[x]['refs']
80
81
81 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
82 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
82 @pytest.mark.parametrize("start_rev, expected_revision", [
83 @pytest.mark.parametrize("start_rev, expected_revision", [
83 ("0", 0),
84 ("0", 0),
84 ("10", 10),
85 ("10", 10),
85 ("20", 20),
86 ("20", 20),
86 ])
87 ])
87 @pytest.mark.backends("hg", "git")
88 @pytest.mark.backends("hg", "git")
88 def test_get_repo_changesets_commit_range(
89 def test_get_repo_changesets_commit_range(
89 self, details, backend, start_rev, expected_revision):
90 self, details, backend, start_rev, expected_revision):
90 limit = 10
91 limit = 10
91 __, params = build_data(
92 __, params = build_data(
92 self.apikey, 'get_repo_changesets',
93 self.apikey, 'get_repo_changesets',
93 repoid=backend.repo_name, start_rev=start_rev, limit=limit,
94 repoid=backend.repo_name, start_rev=start_rev, limit=limit,
94 details=details,
95 details=details,
95 )
96 )
96 response = api_call(self.app, params)
97 response = api_call(self.app, params)
97 result = response.json['result']
98 result = response.json['result']
98 assert result
99 assert result
99 assert len(result) == limit
100 assert len(result) == limit
100 for i in xrange(limit):
101 for i in xrange(limit):
101 assert result[i]['revision'] == int(expected_revision) + i
102 assert result[i]['revision'] == int(expected_revision) + i
102
103
103 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
104 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
104 @pytest.mark.parametrize("start_rev, expected_revision", [
105 @pytest.mark.parametrize("start_rev, expected_revision", [
105 ("0", 0),
106 ("0", 0),
106 ("10", 9),
107 ("10", 9),
107 ("20", 19),
108 ("20", 19),
108 ])
109 ])
109 def test_get_repo_changesets_commit_range_svn(
110 def test_get_repo_changesets_commit_range_svn(
110 self, details, backend_svn, start_rev, expected_revision):
111 self, details, backend_svn, start_rev, expected_revision):
111
112
112 # TODO: johbo: SVN showed a problem here: The parameter "start_rev"
113 # TODO: johbo: SVN showed a problem here: The parameter "start_rev"
113 # in our API allows to pass in a "Commit ID" as well as a
114 # in our API allows to pass in a "Commit ID" as well as a
114 # "Commit Index". In the case of Subversion it is not possible to
115 # "Commit Index". In the case of Subversion it is not possible to
115 # distinguish these cases. As a workaround we implemented this
116 # distinguish these cases. As a workaround we implemented this
116 # behavior which gives a preference to see it as a "Commit ID".
117 # behavior which gives a preference to see it as a "Commit ID".
117
118
118 limit = 10
119 limit = 10
119 __, params = build_data(
120 __, params = build_data(
120 self.apikey, 'get_repo_changesets',
121 self.apikey, 'get_repo_changesets',
121 repoid=backend_svn.repo_name, start_rev=start_rev, limit=limit,
122 repoid=backend_svn.repo_name, start_rev=start_rev, limit=limit,
122 details=details,
123 details=details,
123 )
124 )
124 response = api_call(self.app, params)
125 response = api_call(self.app, params)
125 result = response.json['result']
126 result = response.json['result']
126 assert result
127 assert result
127 assert len(result) == limit
128 assert len(result) == limit
128 for i in xrange(limit):
129 for i in xrange(limit):
129 assert result[i]['revision'] == int(expected_revision) + i
130 assert result[i]['revision'] == int(expected_revision) + i
130
131
131 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
132 @pytest.mark.parametrize("details", ['basic', 'extended', 'full'])
132 def test_get_repo_changesets_bad_type(self, details, backend):
133 def test_get_repo_changesets_bad_type(self, details, backend):
133 id_, params = build_data(
134 id_, params = build_data(
134 self.apikey, 'get_repo_changesets',
135 self.apikey, 'get_repo_changesets',
135 repoid=backend.repo_name, start_rev=0, limit=2,
136 repoid=backend.repo_name, start_rev=0, limit=2,
136 details=details,
137 details=details,
137 )
138 )
138 response = api_call(self.app, params)
139 response = api_call(self.app, params)
139 expected = 'commit_id must be a string value'
140 expected = 'commit_id must be a string value'
140 assert_error(id_, expected, given=response.body)
141 assert_error(id_, expected, given=response.body)
@@ -1,251 +1,313 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import os
22 import time
22 import logging
23 import logging
24 import tempfile
23 import traceback
25 import traceback
24 import threading
26 import threading
27
25 from BaseHTTPServer import BaseHTTPRequestHandler
28 from BaseHTTPServer import BaseHTTPRequestHandler
26 from SocketServer import TCPServer
29 from SocketServer import TCPServer
27
30
28 import rhodecode
31 import rhodecode
29 from rhodecode.model import meta
32 from rhodecode.model import meta
30 from rhodecode.lib.base import bootstrap_request, bootstrap_config
33 from rhodecode.lib.base import bootstrap_request, bootstrap_config
31 from rhodecode.lib import hooks_base
34 from rhodecode.lib import hooks_base
32 from rhodecode.lib.utils2 import AttributeDict
35 from rhodecode.lib.utils2 import AttributeDict
36 from rhodecode.lib.ext_json import json
33
37
34
38
35 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
36
40
37
41
38 class HooksHttpHandler(BaseHTTPRequestHandler):
42 class HooksHttpHandler(BaseHTTPRequestHandler):
43
39 def do_POST(self):
44 def do_POST(self):
40 method, extras = self._read_request()
45 method, extras = self._read_request()
46 txn_id = getattr(self.server, 'txn_id', None)
47 if txn_id:
48 from rhodecode.lib.caches import compute_key_from_params
49 log.debug('Computing TXN_ID based on `%s`:`%s`',
50 extras['repository'], extras['txn_id'])
51 computed_txn_id = compute_key_from_params(
52 extras['repository'], extras['txn_id'])
53 if txn_id != computed_txn_id:
54 raise Exception(
55 'TXN ID fail: expected {} got {} instead'.format(
56 txn_id, computed_txn_id))
57
41 try:
58 try:
42 result = self._call_hook(method, extras)
59 result = self._call_hook(method, extras)
43 except Exception as e:
60 except Exception as e:
44 exc_tb = traceback.format_exc()
61 exc_tb = traceback.format_exc()
45 result = {
62 result = {
46 'exception': e.__class__.__name__,
63 'exception': e.__class__.__name__,
47 'exception_traceback': exc_tb,
64 'exception_traceback': exc_tb,
48 'exception_args': e.args
65 'exception_args': e.args
49 }
66 }
50 self._write_response(result)
67 self._write_response(result)
51
68
52 def _read_request(self):
69 def _read_request(self):
53 length = int(self.headers['Content-Length'])
70 length = int(self.headers['Content-Length'])
54 body = self.rfile.read(length).decode('utf-8')
71 body = self.rfile.read(length).decode('utf-8')
55 data = json.loads(body)
72 data = json.loads(body)
56 return data['method'], data['extras']
73 return data['method'], data['extras']
57
74
58 def _write_response(self, result):
75 def _write_response(self, result):
59 self.send_response(200)
76 self.send_response(200)
60 self.send_header("Content-type", "text/json")
77 self.send_header("Content-type", "text/json")
61 self.end_headers()
78 self.end_headers()
62 self.wfile.write(json.dumps(result))
79 self.wfile.write(json.dumps(result))
63
80
64 def _call_hook(self, method, extras):
81 def _call_hook(self, method, extras):
65 hooks = Hooks()
82 hooks = Hooks()
66 try:
83 try:
67 result = getattr(hooks, method)(extras)
84 result = getattr(hooks, method)(extras)
68 finally:
85 finally:
69 meta.Session.remove()
86 meta.Session.remove()
70 return result
87 return result
71
88
72 def log_message(self, format, *args):
89 def log_message(self, format, *args):
73 """
90 """
74 This is an overridden method of BaseHTTPRequestHandler which logs using
91 This is an overridden method of BaseHTTPRequestHandler which logs using
75 logging library instead of writing directly to stderr.
92 logging library instead of writing directly to stderr.
76 """
93 """
77
94
78 message = format % args
95 message = format % args
79
96
80 # TODO: mikhail: add different log levels support
81 log.debug(
97 log.debug(
82 "%s - - [%s] %s", self.client_address[0],
98 "%s - - [%s] %s", self.client_address[0],
83 self.log_date_time_string(), message)
99 self.log_date_time_string(), message)
84
100
85
101
86 class DummyHooksCallbackDaemon(object):
102 class DummyHooksCallbackDaemon(object):
103 hooks_uri = ''
104
87 def __init__(self):
105 def __init__(self):
88 self.hooks_module = Hooks.__module__
106 self.hooks_module = Hooks.__module__
89
107
90 def __enter__(self):
108 def __enter__(self):
91 log.debug('Running dummy hooks callback daemon')
109 log.debug('Running dummy hooks callback daemon')
92 return self
110 return self
93
111
94 def __exit__(self, exc_type, exc_val, exc_tb):
112 def __exit__(self, exc_type, exc_val, exc_tb):
95 log.debug('Exiting dummy hooks callback daemon')
113 log.debug('Exiting dummy hooks callback daemon')
96
114
97
115
98 class ThreadedHookCallbackDaemon(object):
116 class ThreadedHookCallbackDaemon(object):
99
117
100 _callback_thread = None
118 _callback_thread = None
101 _daemon = None
119 _daemon = None
102 _done = False
120 _done = False
103
121
104 def __init__(self):
122 def __init__(self, txn_id=None, port=None):
105 self._prepare()
123 self._prepare(txn_id=txn_id, port=port)
106
124
107 def __enter__(self):
125 def __enter__(self):
108 self._run()
126 self._run()
109 return self
127 return self
110
128
111 def __exit__(self, exc_type, exc_val, exc_tb):
129 def __exit__(self, exc_type, exc_val, exc_tb):
112 log.debug('Callback daemon exiting now...')
130 log.debug('Callback daemon exiting now...')
113 self._stop()
131 self._stop()
114
132
115 def _prepare(self):
133 def _prepare(self, txn_id=None, port=None):
116 raise NotImplementedError()
134 raise NotImplementedError()
117
135
118 def _run(self):
136 def _run(self):
119 raise NotImplementedError()
137 raise NotImplementedError()
120
138
121 def _stop(self):
139 def _stop(self):
122 raise NotImplementedError()
140 raise NotImplementedError()
123
141
124
142
125 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
143 class HttpHooksCallbackDaemon(ThreadedHookCallbackDaemon):
126 """
144 """
127 Context manager which will run a callback daemon in a background thread.
145 Context manager which will run a callback daemon in a background thread.
128 """
146 """
129
147
130 hooks_uri = None
148 hooks_uri = None
131
149
132 IP_ADDRESS = '127.0.0.1'
150 IP_ADDRESS = '127.0.0.1'
133
151
134 # From Python docs: Polling reduces our responsiveness to a shutdown
152 # From Python docs: Polling reduces our responsiveness to a shutdown
135 # request and wastes cpu at all other times.
153 # request and wastes cpu at all other times.
136 POLL_INTERVAL = 0.01
154 POLL_INTERVAL = 0.01
137
155
138 def _prepare(self):
156 def _prepare(self, txn_id=None, port=None):
139 log.debug("Preparing HTTP callback daemon and registering hook object")
140
141 self._done = False
157 self._done = False
142 self._daemon = TCPServer((self.IP_ADDRESS, 0), HooksHttpHandler)
158 self._daemon = TCPServer((self.IP_ADDRESS, port or 0), HooksHttpHandler)
143 _, port = self._daemon.server_address
159 _, port = self._daemon.server_address
144 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
160 self.hooks_uri = '{}:{}'.format(self.IP_ADDRESS, port)
161 self.txn_id = txn_id
162 # inject transaction_id for later verification
163 self._daemon.txn_id = self.txn_id
145
164
146 log.debug("Hooks uri is: %s", self.hooks_uri)
165 log.debug(
166 "Preparing HTTP callback daemon at `%s` and registering hook object",
167 self.hooks_uri)
147
168
148 def _run(self):
169 def _run(self):
149 log.debug("Running event loop of callback daemon in background thread")
170 log.debug("Running event loop of callback daemon in background thread")
150 callback_thread = threading.Thread(
171 callback_thread = threading.Thread(
151 target=self._daemon.serve_forever,
172 target=self._daemon.serve_forever,
152 kwargs={'poll_interval': self.POLL_INTERVAL})
173 kwargs={'poll_interval': self.POLL_INTERVAL})
153 callback_thread.daemon = True
174 callback_thread.daemon = True
154 callback_thread.start()
175 callback_thread.start()
155 self._callback_thread = callback_thread
176 self._callback_thread = callback_thread
156
177
157 def _stop(self):
178 def _stop(self):
158 log.debug("Waiting for background thread to finish.")
179 log.debug("Waiting for background thread to finish.")
159 self._daemon.shutdown()
180 self._daemon.shutdown()
160 self._callback_thread.join()
181 self._callback_thread.join()
161 self._daemon = None
182 self._daemon = None
162 self._callback_thread = None
183 self._callback_thread = None
184 if self.txn_id:
185 txn_id_file = get_txn_id_data_path(self.txn_id)
186 log.debug('Cleaning up TXN ID %s', txn_id_file)
187 if os.path.isfile(txn_id_file):
188 os.remove(txn_id_file)
189
163 log.debug("Background thread done.")
190 log.debug("Background thread done.")
164
191
165
192
166 def prepare_callback_daemon(extras, protocol, use_direct_calls):
193 def get_txn_id_data_path(txn_id):
167 callback_daemon = None
194 root = tempfile.gettempdir()
195 return os.path.join(root, 'rc_txn_id_{}'.format(txn_id))
196
197
198 def store_txn_id_data(txn_id, data_dict):
199 if not txn_id:
200 log.warning('Cannot store txn_id because it is empty')
201 return
202
203 path = get_txn_id_data_path(txn_id)
204 try:
205 with open(path, 'wb') as f:
206 f.write(json.dumps(data_dict))
207 except Exception:
208 log.exception('Failed to write txn_id metadata')
168
209
210
211 def get_txn_id_from_store(txn_id):
212 """
213 Reads txn_id from store and if present returns the data for callback manager
214 """
215 path = get_txn_id_data_path(txn_id)
216 try:
217 with open(path, 'rb') as f:
218 return json.loads(f.read())
219 except Exception:
220 return {}
221
222
223 def prepare_callback_daemon(extras, protocol, use_direct_calls, txn_id=None):
224 txn_details = get_txn_id_from_store(txn_id)
225 port = txn_details.get('port', 0)
169 if use_direct_calls:
226 if use_direct_calls:
170 callback_daemon = DummyHooksCallbackDaemon()
227 callback_daemon = DummyHooksCallbackDaemon()
171 extras['hooks_module'] = callback_daemon.hooks_module
228 extras['hooks_module'] = callback_daemon.hooks_module
172 else:
229 else:
173 if protocol == 'http':
230 if protocol == 'http':
174 callback_daemon = HttpHooksCallbackDaemon()
231 callback_daemon = HttpHooksCallbackDaemon(txn_id=txn_id, port=port)
175 else:
232 else:
176 log.error('Unsupported callback daemon protocol "%s"', protocol)
233 log.error('Unsupported callback daemon protocol "%s"', protocol)
177 raise Exception('Unsupported callback daemon protocol.')
234 raise Exception('Unsupported callback daemon protocol.')
178
235
179 extras['hooks_uri'] = callback_daemon.hooks_uri
236 extras['hooks_uri'] = callback_daemon.hooks_uri
180 extras['hooks_protocol'] = protocol
237 extras['hooks_protocol'] = protocol
238 extras['time'] = time.time()
181
239
182 log.debug('Prepared a callback daemon: %s', callback_daemon)
240 # register txn_id
241 extras['txn_id'] = txn_id
242
243 log.debug('Prepared a callback daemon: %s at url `%s`',
244 callback_daemon.__class__.__name__, callback_daemon.hooks_uri)
183 return callback_daemon, extras
245 return callback_daemon, extras
184
246
185
247
186 class Hooks(object):
248 class Hooks(object):
187 """
249 """
188 Exposes the hooks for remote call backs
250 Exposes the hooks for remote call backs
189 """
251 """
190
252
191 def repo_size(self, extras):
253 def repo_size(self, extras):
192 log.debug("Called repo_size of %s object", self)
254 log.debug("Called repo_size of %s object", self)
193 return self._call_hook(hooks_base.repo_size, extras)
255 return self._call_hook(hooks_base.repo_size, extras)
194
256
195 def pre_pull(self, extras):
257 def pre_pull(self, extras):
196 log.debug("Called pre_pull of %s object", self)
258 log.debug("Called pre_pull of %s object", self)
197 return self._call_hook(hooks_base.pre_pull, extras)
259 return self._call_hook(hooks_base.pre_pull, extras)
198
260
199 def post_pull(self, extras):
261 def post_pull(self, extras):
200 log.debug("Called post_pull of %s object", self)
262 log.debug("Called post_pull of %s object", self)
201 return self._call_hook(hooks_base.post_pull, extras)
263 return self._call_hook(hooks_base.post_pull, extras)
202
264
203 def pre_push(self, extras):
265 def pre_push(self, extras):
204 log.debug("Called pre_push of %s object", self)
266 log.debug("Called pre_push of %s object", self)
205 return self._call_hook(hooks_base.pre_push, extras)
267 return self._call_hook(hooks_base.pre_push, extras)
206
268
207 def post_push(self, extras):
269 def post_push(self, extras):
208 log.debug("Called post_push of %s object", self)
270 log.debug("Called post_push of %s object", self)
209 return self._call_hook(hooks_base.post_push, extras)
271 return self._call_hook(hooks_base.post_push, extras)
210
272
211 def _call_hook(self, hook, extras):
273 def _call_hook(self, hook, extras):
212 extras = AttributeDict(extras)
274 extras = AttributeDict(extras)
213 server_url = extras['server_url']
275 server_url = extras['server_url']
214 request = bootstrap_request(application_url=server_url)
276 request = bootstrap_request(application_url=server_url)
215
277
216 bootstrap_config(request) # inject routes and other interfaces
278 bootstrap_config(request) # inject routes and other interfaces
217
279
218 # inject the user for usage in hooks
280 # inject the user for usage in hooks
219 request.user = AttributeDict({'username': extras.username,
281 request.user = AttributeDict({'username': extras.username,
220 'ip_addr': extras.ip,
282 'ip_addr': extras.ip,
221 'user_id': extras.user_id})
283 'user_id': extras.user_id})
222
284
223 extras.request = request
285 extras.request = request
224
286
225 try:
287 try:
226 result = hook(extras)
288 result = hook(extras)
227 except Exception as error:
289 except Exception as error:
228 exc_tb = traceback.format_exc()
290 exc_tb = traceback.format_exc()
229 log.exception('Exception when handling hook %s', hook)
291 log.exception('Exception when handling hook %s', hook)
230 error_args = error.args
292 error_args = error.args
231 return {
293 return {
232 'status': 128,
294 'status': 128,
233 'output': '',
295 'output': '',
234 'exception': type(error).__name__,
296 'exception': type(error).__name__,
235 'exception_traceback': exc_tb,
297 'exception_traceback': exc_tb,
236 'exception_args': error_args,
298 'exception_args': error_args,
237 }
299 }
238 finally:
300 finally:
239 meta.Session.remove()
301 meta.Session.remove()
240
302
241 log.debug('Got hook call response %s', result)
303 log.debug('Got hook call response %s', result)
242 return {
304 return {
243 'status': result.status,
305 'status': result.status,
244 'output': result.output,
306 'output': result.output,
245 }
307 }
246
308
247 def __enter__(self):
309 def __enter__(self):
248 return self
310 return self
249
311
250 def __exit__(self, exc_type, exc_val, exc_tb):
312 def __exit__(self, exc_type, exc_val, exc_tb):
251 pass
313 pass
@@ -1,175 +1,204 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import base64
21 import logging
22 import logging
22 import urllib
23 import urllib
23 from urlparse import urljoin
24 from urlparse import urljoin
24
25
25
26 import requests
26 import requests
27 from webob.exc import HTTPNotAcceptable
27 from webob.exc import HTTPNotAcceptable
28
28
29 from rhodecode.lib import caches
29 from rhodecode.lib.middleware import simplevcs
30 from rhodecode.lib.middleware import simplevcs
30 from rhodecode.lib.utils import is_valid_repo
31 from rhodecode.lib.utils import is_valid_repo
31 from rhodecode.lib.utils2 import str2bool
32 from rhodecode.lib.utils2 import str2bool, safe_int
33 from rhodecode.lib.ext_json import json
34 from rhodecode.lib.hooks_daemon import store_txn_id_data
35
32
36
33 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
34
38
35
39
36 class SimpleSvnApp(object):
40 class SimpleSvnApp(object):
37 IGNORED_HEADERS = [
41 IGNORED_HEADERS = [
38 'connection', 'keep-alive', 'content-encoding',
42 'connection', 'keep-alive', 'content-encoding',
39 'transfer-encoding', 'content-length']
43 'transfer-encoding', 'content-length']
40 rc_extras = {}
44 rc_extras = {}
41
45
42
43 def __init__(self, config):
46 def __init__(self, config):
44 self.config = config
47 self.config = config
45
48
46 def __call__(self, environ, start_response):
49 def __call__(self, environ, start_response):
47 request_headers = self._get_request_headers(environ)
50 request_headers = self._get_request_headers(environ)
48
51
49 data = environ['wsgi.input']
52 data = environ['wsgi.input']
50 # johbo: Avoid that we end up with sending the request in chunked
53 # johbo: Avoid that we end up with sending the request in chunked
51 # transfer encoding (mainly on Gunicorn). If we know the content
54 # transfer encoding (mainly on Gunicorn). If we know the content
52 # length, then we should transfer the payload in one request.
55 # length, then we should transfer the payload in one request.
53 if environ['REQUEST_METHOD'] == 'MKCOL' or 'CONTENT_LENGTH' in environ:
56 if environ['REQUEST_METHOD'] == 'MKCOL' or 'CONTENT_LENGTH' in environ:
54 data = data.read()
57 data = data.read()
58 if data.startswith('(create-txn-with-props'):
59 # store on-the-fly our rc_extra using svn revision properties
60 # those can be read later on in hooks executed so we have a way
61 # to pass in the data into svn hooks
62 rc_data = base64.urlsafe_b64encode(json.dumps(self.rc_extras))
63 rc_data_len = len(rc_data)
64 # header defines data lenght, and serialized data
65 skel = ' rc-scm-extras {} {}'.format(rc_data_len, rc_data)
66 data = data[:-2] + skel + '))'
55
67
56 log.debug('Calling: %s method via `%s`', environ['REQUEST_METHOD'],
68 log.debug('Calling: %s method via `%s`', environ['REQUEST_METHOD'],
57 self._get_url(environ['PATH_INFO']))
69 self._get_url(environ['PATH_INFO']))
70
58 response = requests.request(
71 response = requests.request(
59 environ['REQUEST_METHOD'], self._get_url(environ['PATH_INFO']),
72 environ['REQUEST_METHOD'], self._get_url(environ['PATH_INFO']),
60 data=data, headers=request_headers)
73 data=data, headers=request_headers)
61
74
62 if response.status_code not in [200, 401]:
75 if response.status_code not in [200, 401]:
63 if response.status_code >= 500:
76 if response.status_code >= 500:
64 log.error('Got SVN response:%s with text:`%s`',
77 log.error('Got SVN response:%s with text:`%s`',
65 response, response.text)
78 response, response.text)
66 else:
79 else:
67 log.debug('Got SVN response:%s with text:`%s`',
80 log.debug('Got SVN response:%s with text:`%s`',
68 response, response.text)
81 response, response.text)
69 else:
82 else:
70 log.debug('got response code: %s', response.status_code)
83 log.debug('got response code: %s', response.status_code)
71
84
72 response_headers = self._get_response_headers(response.headers)
85 response_headers = self._get_response_headers(response.headers)
86
87 if response.headers.get('SVN-Txn-name'):
88 svn_tx_id = response.headers.get('SVN-Txn-name')
89 txn_id = caches.compute_key_from_params(
90 self.config['repository'], svn_tx_id)
91 port = safe_int(self.rc_extras['hooks_uri'].split(':')[-1])
92 store_txn_id_data(txn_id, {'port': port})
93
73 start_response(
94 start_response(
74 '{} {}'.format(response.status_code, response.reason),
95 '{} {}'.format(response.status_code, response.reason),
75 response_headers)
96 response_headers)
76 return response.iter_content(chunk_size=1024)
97 return response.iter_content(chunk_size=1024)
77
98
78 def _get_url(self, path):
99 def _get_url(self, path):
79 url_path = urljoin(
100 url_path = urljoin(
80 self.config.get('subversion_http_server_url', ''), path)
101 self.config.get('subversion_http_server_url', ''), path)
81 url_path = urllib.quote(url_path, safe="/:=~+!$,;'")
102 url_path = urllib.quote(url_path, safe="/:=~+!$,;'")
82 return url_path
103 return url_path
83
104
84 def _get_request_headers(self, environ):
105 def _get_request_headers(self, environ):
85 headers = {}
106 headers = {}
86
107
87 for key in environ:
108 for key in environ:
88 if not key.startswith('HTTP_'):
109 if not key.startswith('HTTP_'):
89 continue
110 continue
90 new_key = key.split('_')
111 new_key = key.split('_')
91 new_key = [k.capitalize() for k in new_key[1:]]
112 new_key = [k.capitalize() for k in new_key[1:]]
92 new_key = '-'.join(new_key)
113 new_key = '-'.join(new_key)
93 headers[new_key] = environ[key]
114 headers[new_key] = environ[key]
94
115
95 if 'CONTENT_TYPE' in environ:
116 if 'CONTENT_TYPE' in environ:
96 headers['Content-Type'] = environ['CONTENT_TYPE']
117 headers['Content-Type'] = environ['CONTENT_TYPE']
97
118
98 if 'CONTENT_LENGTH' in environ:
119 if 'CONTENT_LENGTH' in environ:
99 headers['Content-Length'] = environ['CONTENT_LENGTH']
120 headers['Content-Length'] = environ['CONTENT_LENGTH']
100
121
101 return headers
122 return headers
102
123
103 def _get_response_headers(self, headers):
124 def _get_response_headers(self, headers):
104 headers = [
125 headers = [
105 (h, headers[h])
126 (h, headers[h])
106 for h in headers
127 for h in headers
107 if h.lower() not in self.IGNORED_HEADERS
128 if h.lower() not in self.IGNORED_HEADERS
108 ]
129 ]
109
130
110 return headers
131 return headers
111
132
112
133
113 class DisabledSimpleSvnApp(object):
134 class DisabledSimpleSvnApp(object):
114 def __init__(self, config):
135 def __init__(self, config):
115 self.config = config
136 self.config = config
116
137
117 def __call__(self, environ, start_response):
138 def __call__(self, environ, start_response):
118 reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled'
139 reason = 'Cannot handle SVN call because: SVN HTTP Proxy is not enabled'
119 log.warning(reason)
140 log.warning(reason)
120 return HTTPNotAcceptable(reason)(environ, start_response)
141 return HTTPNotAcceptable(reason)(environ, start_response)
121
142
122
143
123 class SimpleSvn(simplevcs.SimpleVCS):
144 class SimpleSvn(simplevcs.SimpleVCS):
124
145
125 SCM = 'svn'
146 SCM = 'svn'
126 READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT')
147 READ_ONLY_COMMANDS = ('OPTIONS', 'PROPFIND', 'GET', 'REPORT')
127 DEFAULT_HTTP_SERVER = 'http://localhost:8090'
148 DEFAULT_HTTP_SERVER = 'http://localhost:8090'
128
149
129 def _get_repository_name(self, environ):
150 def _get_repository_name(self, environ):
130 """
151 """
131 Gets repository name out of PATH_INFO header
152 Gets repository name out of PATH_INFO header
132
153
133 :param environ: environ where PATH_INFO is stored
154 :param environ: environ where PATH_INFO is stored
134 """
155 """
135 path = environ['PATH_INFO'].split('!')
156 path = environ['PATH_INFO'].split('!')
136 repo_name = path[0].strip('/')
157 repo_name = path[0].strip('/')
137
158
138 # SVN includes the whole path in it's requests, including
159 # SVN includes the whole path in it's requests, including
139 # subdirectories inside the repo. Therefore we have to search for
160 # subdirectories inside the repo. Therefore we have to search for
140 # the repo root directory.
161 # the repo root directory.
141 if not is_valid_repo(
162 if not is_valid_repo(
142 repo_name, self.base_path, explicit_scm=self.SCM):
163 repo_name, self.base_path, explicit_scm=self.SCM):
143 current_path = ''
164 current_path = ''
144 for component in repo_name.split('/'):
165 for component in repo_name.split('/'):
145 current_path += component
166 current_path += component
146 if is_valid_repo(
167 if is_valid_repo(
147 current_path, self.base_path, explicit_scm=self.SCM):
168 current_path, self.base_path, explicit_scm=self.SCM):
148 return current_path
169 return current_path
149 current_path += '/'
170 current_path += '/'
150
171
151 return repo_name
172 return repo_name
152
173
153 def _get_action(self, environ):
174 def _get_action(self, environ):
154 return (
175 return (
155 'pull'
176 'pull'
156 if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS
177 if environ['REQUEST_METHOD'] in self.READ_ONLY_COMMANDS
157 else 'push')
178 else 'push')
158
179
180 def _should_use_callback_daemon(self, extras, environ, action):
181 # only MERGE command triggers hooks, so we don't want to start
182 # hooks server too many times. POST however starts the svn transaction
183 # so we also need to run the init of callback daemon of POST
184 if environ['REQUEST_METHOD'] in ['MERGE', 'POST']:
185 return True
186 return False
187
159 def _create_wsgi_app(self, repo_path, repo_name, config):
188 def _create_wsgi_app(self, repo_path, repo_name, config):
160 if self._is_svn_enabled():
189 if self._is_svn_enabled():
161 return SimpleSvnApp(config)
190 return SimpleSvnApp(config)
162 # we don't have http proxy enabled return dummy request handler
191 # we don't have http proxy enabled return dummy request handler
163 return DisabledSimpleSvnApp(config)
192 return DisabledSimpleSvnApp(config)
164
193
165 def _is_svn_enabled(self):
194 def _is_svn_enabled(self):
166 conf = self.repo_vcs_config
195 conf = self.repo_vcs_config
167 return str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
196 return str2bool(conf.get('vcs_svn_proxy', 'http_requests_enabled'))
168
197
169 def _create_config(self, extras, repo_name):
198 def _create_config(self, extras, repo_name):
170 conf = self.repo_vcs_config
199 conf = self.repo_vcs_config
171 server_url = conf.get('vcs_svn_proxy', 'http_server_url')
200 server_url = conf.get('vcs_svn_proxy', 'http_server_url')
172 server_url = server_url or self.DEFAULT_HTTP_SERVER
201 server_url = server_url or self.DEFAULT_HTTP_SERVER
173
202
174 extras['subversion_http_server_url'] = server_url
203 extras['subversion_http_server_url'] = server_url
175 return extras
204 return extras
@@ -1,648 +1,673 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 It's implemented with basic auth function
23 It's implemented with basic auth function
24 """
24 """
25
25
26 import os
26 import os
27 import re
27 import re
28 import logging
28 import logging
29 import importlib
29 import importlib
30 from functools import wraps
30 from functools import wraps
31 from StringIO import StringIO
32 from lxml import etree
31
33
32 import time
34 import time
33 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
34 # TODO(marcink): check if we should use webob.exc here ?
36
35 from pyramid.httpexceptions import (
37 from pyramid.httpexceptions import (
36 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
37 from zope.cachedescriptors.property import Lazy as LazyProperty
39 from zope.cachedescriptors.property import Lazy as LazyProperty
38
40
39 import rhodecode
41 import rhodecode
40 from rhodecode.authentication.base import (
42 from rhodecode.authentication.base import (
41 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
43 authenticate, get_perms_cache_manager, VCS_TYPE, loadplugin)
42 from rhodecode.lib import caches
44 from rhodecode.lib import caches
43 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
44 from rhodecode.lib.base import (
46 from rhodecode.lib.base import (
45 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
46 from rhodecode.lib.exceptions import (
48 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
47 HTTPLockedRC, HTTPRequirementError, UserCreationError,
48 NotAllowedToCreateUserError)
49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
49 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
50 from rhodecode.lib.middleware import appenlight
50 from rhodecode.lib.middleware import appenlight
51 from rhodecode.lib.middleware.utils import scm_app_http
51 from rhodecode.lib.middleware.utils import scm_app_http
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
52 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
53 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 from rhodecode.lib.vcs.conf import settings as vcs_settings
55 from rhodecode.lib.vcs.backends import base
55 from rhodecode.lib.vcs.backends import base
56
56 from rhodecode.model import meta
57 from rhodecode.model import meta
57 from rhodecode.model.db import User, Repository, PullRequest
58 from rhodecode.model.db import User, Repository, PullRequest
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.pull_request import PullRequestModel
60 from rhodecode.model.pull_request import PullRequestModel
60 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61
62
62 log = logging.getLogger(__name__)
63 log = logging.getLogger(__name__)
63
64
64
65
66 def extract_svn_txn_id(acl_repo_name, data):
67 """
68 Helper method for extraction of svn txn_id from submited XML data during
69 POST operations
70 """
71 try:
72 root = etree.fromstring(data)
73 pat = re.compile(r'/txn/(?P<txn_id>.*)')
74 for el in root:
75 if el.tag == '{DAV:}source':
76 for sub_el in el:
77 if sub_el.tag == '{DAV:}href':
78 match = pat.search(sub_el.text)
79 if match:
80 svn_tx_id = match.groupdict()['txn_id']
81 txn_id = caches.compute_key_from_params(
82 acl_repo_name, svn_tx_id)
83 return txn_id
84 except Exception:
85 log.exception('Failed to extract txn_id')
86
87
65 def initialize_generator(factory):
88 def initialize_generator(factory):
66 """
89 """
67 Initializes the returned generator by draining its first element.
90 Initializes the returned generator by draining its first element.
68
91
69 This can be used to give a generator an initializer, which is the code
92 This can be used to give a generator an initializer, which is the code
70 up to the first yield statement. This decorator enforces that the first
93 up to the first yield statement. This decorator enforces that the first
71 produced element has the value ``"__init__"`` to make its special
94 produced element has the value ``"__init__"`` to make its special
72 purpose very explicit in the using code.
95 purpose very explicit in the using code.
73 """
96 """
74
97
75 @wraps(factory)
98 @wraps(factory)
76 def wrapper(*args, **kwargs):
99 def wrapper(*args, **kwargs):
77 gen = factory(*args, **kwargs)
100 gen = factory(*args, **kwargs)
78 try:
101 try:
79 init = gen.next()
102 init = gen.next()
80 except StopIteration:
103 except StopIteration:
81 raise ValueError('Generator must yield at least one element.')
104 raise ValueError('Generator must yield at least one element.')
82 if init != "__init__":
105 if init != "__init__":
83 raise ValueError('First yielded element must be "__init__".')
106 raise ValueError('First yielded element must be "__init__".')
84 return gen
107 return gen
85 return wrapper
108 return wrapper
86
109
87
110
88 class SimpleVCS(object):
111 class SimpleVCS(object):
89 """Common functionality for SCM HTTP handlers."""
112 """Common functionality for SCM HTTP handlers."""
90
113
91 SCM = 'unknown'
114 SCM = 'unknown'
92
115
93 acl_repo_name = None
116 acl_repo_name = None
94 url_repo_name = None
117 url_repo_name = None
95 vcs_repo_name = None
118 vcs_repo_name = None
96 rc_extras = {}
119 rc_extras = {}
97
120
98 # We have to handle requests to shadow repositories different than requests
121 # We have to handle requests to shadow repositories different than requests
99 # to normal repositories. Therefore we have to distinguish them. To do this
122 # to normal repositories. Therefore we have to distinguish them. To do this
100 # we use this regex which will match only on URLs pointing to shadow
123 # we use this regex which will match only on URLs pointing to shadow
101 # repositories.
124 # repositories.
102 shadow_repo_re = re.compile(
125 shadow_repo_re = re.compile(
103 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
126 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
104 '(?P<target>{slug_pat})/' # target repo
127 '(?P<target>{slug_pat})/' # target repo
105 'pull-request/(?P<pr_id>\d+)/' # pull request
128 'pull-request/(?P<pr_id>\d+)/' # pull request
106 'repository$' # shadow repo
129 'repository$' # shadow repo
107 .format(slug_pat=SLUG_RE.pattern))
130 .format(slug_pat=SLUG_RE.pattern))
108
131
109 def __init__(self, config, registry):
132 def __init__(self, config, registry):
110 self.registry = registry
133 self.registry = registry
111 self.config = config
134 self.config = config
112 # re-populated by specialized middleware
135 # re-populated by specialized middleware
113 self.repo_vcs_config = base.Config()
136 self.repo_vcs_config = base.Config()
114 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
137 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
115
138
116 registry.rhodecode_settings = self.rhodecode_settings
139 registry.rhodecode_settings = self.rhodecode_settings
117 # authenticate this VCS request using authfunc
140 # authenticate this VCS request using authfunc
118 auth_ret_code_detection = \
141 auth_ret_code_detection = \
119 str2bool(self.config.get('auth_ret_code_detection', False))
142 str2bool(self.config.get('auth_ret_code_detection', False))
120 self.authenticate = BasicAuth(
143 self.authenticate = BasicAuth(
121 '', authenticate, registry, config.get('auth_ret_code'),
144 '', authenticate, registry, config.get('auth_ret_code'),
122 auth_ret_code_detection)
145 auth_ret_code_detection)
123 self.ip_addr = '0.0.0.0'
146 self.ip_addr = '0.0.0.0'
124
147
125 @LazyProperty
148 @LazyProperty
126 def global_vcs_config(self):
149 def global_vcs_config(self):
127 try:
150 try:
128 return VcsSettingsModel().get_ui_settings_as_config_obj()
151 return VcsSettingsModel().get_ui_settings_as_config_obj()
129 except Exception:
152 except Exception:
130 return base.Config()
153 return base.Config()
131
154
132 @property
155 @property
133 def base_path(self):
156 def base_path(self):
134 settings_path = self.repo_vcs_config.get(
157 settings_path = self.repo_vcs_config.get(
135 *VcsSettingsModel.PATH_SETTING)
158 *VcsSettingsModel.PATH_SETTING)
136
159
137 if not settings_path:
160 if not settings_path:
138 settings_path = self.global_vcs_config.get(
161 settings_path = self.global_vcs_config.get(
139 *VcsSettingsModel.PATH_SETTING)
162 *VcsSettingsModel.PATH_SETTING)
140
163
141 if not settings_path:
164 if not settings_path:
142 # try, maybe we passed in explicitly as config option
165 # try, maybe we passed in explicitly as config option
143 settings_path = self.config.get('base_path')
166 settings_path = self.config.get('base_path')
144
167
145 if not settings_path:
168 if not settings_path:
146 raise ValueError('FATAL: base_path is empty')
169 raise ValueError('FATAL: base_path is empty')
147 return settings_path
170 return settings_path
148
171
149 def set_repo_names(self, environ):
172 def set_repo_names(self, environ):
150 """
173 """
151 This will populate the attributes acl_repo_name, url_repo_name,
174 This will populate the attributes acl_repo_name, url_repo_name,
152 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
175 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
153 shadow) repositories all names are equal. In case of requests to a
176 shadow) repositories all names are equal. In case of requests to a
154 shadow repository the acl-name points to the target repo of the pull
177 shadow repository the acl-name points to the target repo of the pull
155 request and the vcs-name points to the shadow repo file system path.
178 request and the vcs-name points to the shadow repo file system path.
156 The url-name is always the URL used by the vcs client program.
179 The url-name is always the URL used by the vcs client program.
157
180
158 Example in case of a shadow repo:
181 Example in case of a shadow repo:
159 acl_repo_name = RepoGroup/MyRepo
182 acl_repo_name = RepoGroup/MyRepo
160 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
183 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
161 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
184 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
162 """
185 """
163 # First we set the repo name from URL for all attributes. This is the
186 # First we set the repo name from URL for all attributes. This is the
164 # default if handling normal (non shadow) repo requests.
187 # default if handling normal (non shadow) repo requests.
165 self.url_repo_name = self._get_repository_name(environ)
188 self.url_repo_name = self._get_repository_name(environ)
166 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
189 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
167 self.is_shadow_repo = False
190 self.is_shadow_repo = False
168
191
169 # Check if this is a request to a shadow repository.
192 # Check if this is a request to a shadow repository.
170 match = self.shadow_repo_re.match(self.url_repo_name)
193 match = self.shadow_repo_re.match(self.url_repo_name)
171 if match:
194 if match:
172 match_dict = match.groupdict()
195 match_dict = match.groupdict()
173
196
174 # Build acl repo name from regex match.
197 # Build acl repo name from regex match.
175 acl_repo_name = safe_unicode('{groups}{target}'.format(
198 acl_repo_name = safe_unicode('{groups}{target}'.format(
176 groups=match_dict['groups'] or '',
199 groups=match_dict['groups'] or '',
177 target=match_dict['target']))
200 target=match_dict['target']))
178
201
179 # Retrieve pull request instance by ID from regex match.
202 # Retrieve pull request instance by ID from regex match.
180 pull_request = PullRequest.get(match_dict['pr_id'])
203 pull_request = PullRequest.get(match_dict['pr_id'])
181
204
182 # Only proceed if we got a pull request and if acl repo name from
205 # Only proceed if we got a pull request and if acl repo name from
183 # URL equals the target repo name of the pull request.
206 # URL equals the target repo name of the pull request.
184 if pull_request and (acl_repo_name ==
207 if pull_request and (acl_repo_name ==
185 pull_request.target_repo.repo_name):
208 pull_request.target_repo.repo_name):
186 # Get file system path to shadow repository.
209 # Get file system path to shadow repository.
187 workspace_id = PullRequestModel()._workspace_id(pull_request)
210 workspace_id = PullRequestModel()._workspace_id(pull_request)
188 target_vcs = pull_request.target_repo.scm_instance()
211 target_vcs = pull_request.target_repo.scm_instance()
189 vcs_repo_name = target_vcs._get_shadow_repository_path(
212 vcs_repo_name = target_vcs._get_shadow_repository_path(
190 workspace_id)
213 workspace_id)
191
214
192 # Store names for later usage.
215 # Store names for later usage.
193 self.vcs_repo_name = vcs_repo_name
216 self.vcs_repo_name = vcs_repo_name
194 self.acl_repo_name = acl_repo_name
217 self.acl_repo_name = acl_repo_name
195 self.is_shadow_repo = True
218 self.is_shadow_repo = True
196
219
197 log.debug('Setting all VCS repository names: %s', {
220 log.debug('Setting all VCS repository names: %s', {
198 'acl_repo_name': self.acl_repo_name,
221 'acl_repo_name': self.acl_repo_name,
199 'url_repo_name': self.url_repo_name,
222 'url_repo_name': self.url_repo_name,
200 'vcs_repo_name': self.vcs_repo_name,
223 'vcs_repo_name': self.vcs_repo_name,
201 })
224 })
202
225
203 @property
226 @property
204 def scm_app(self):
227 def scm_app(self):
205 custom_implementation = self.config['vcs.scm_app_implementation']
228 custom_implementation = self.config['vcs.scm_app_implementation']
206 if custom_implementation == 'http':
229 if custom_implementation == 'http':
207 log.info('Using HTTP implementation of scm app.')
230 log.info('Using HTTP implementation of scm app.')
208 scm_app_impl = scm_app_http
231 scm_app_impl = scm_app_http
209 else:
232 else:
210 log.info('Using custom implementation of scm_app: "{}"'.format(
233 log.info('Using custom implementation of scm_app: "{}"'.format(
211 custom_implementation))
234 custom_implementation))
212 scm_app_impl = importlib.import_module(custom_implementation)
235 scm_app_impl = importlib.import_module(custom_implementation)
213 return scm_app_impl
236 return scm_app_impl
214
237
215 def _get_by_id(self, repo_name):
238 def _get_by_id(self, repo_name):
216 """
239 """
217 Gets a special pattern _<ID> from clone url and tries to replace it
240 Gets a special pattern _<ID> from clone url and tries to replace it
218 with a repository_name for support of _<ID> non changeable urls
241 with a repository_name for support of _<ID> non changeable urls
219 """
242 """
220
243
221 data = repo_name.split('/')
244 data = repo_name.split('/')
222 if len(data) >= 2:
245 if len(data) >= 2:
223 from rhodecode.model.repo import RepoModel
246 from rhodecode.model.repo import RepoModel
224 by_id_match = RepoModel().get_repo_by_id(repo_name)
247 by_id_match = RepoModel().get_repo_by_id(repo_name)
225 if by_id_match:
248 if by_id_match:
226 data[1] = by_id_match.repo_name
249 data[1] = by_id_match.repo_name
227
250
228 return safe_str('/'.join(data))
251 return safe_str('/'.join(data))
229
252
230 def _invalidate_cache(self, repo_name):
253 def _invalidate_cache(self, repo_name):
231 """
254 """
232 Set's cache for this repository for invalidation on next access
255 Set's cache for this repository for invalidation on next access
233
256
234 :param repo_name: full repo name, also a cache key
257 :param repo_name: full repo name, also a cache key
235 """
258 """
236 ScmModel().mark_for_invalidation(repo_name)
259 ScmModel().mark_for_invalidation(repo_name)
237
260
238 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
261 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
239 db_repo = Repository.get_by_repo_name(repo_name)
262 db_repo = Repository.get_by_repo_name(repo_name)
240 if not db_repo:
263 if not db_repo:
241 log.debug('Repository `%s` not found inside the database.',
264 log.debug('Repository `%s` not found inside the database.',
242 repo_name)
265 repo_name)
243 return False
266 return False
244
267
245 if db_repo.repo_type != scm_type:
268 if db_repo.repo_type != scm_type:
246 log.warning(
269 log.warning(
247 'Repository `%s` have incorrect scm_type, expected %s got %s',
270 'Repository `%s` have incorrect scm_type, expected %s got %s',
248 repo_name, db_repo.repo_type, scm_type)
271 repo_name, db_repo.repo_type, scm_type)
249 return False
272 return False
250
273
251 config = db_repo._config
274 config = db_repo._config
252 config.set('extensions', 'largefiles', '')
275 config.set('extensions', 'largefiles', '')
253 return is_valid_repo(
276 return is_valid_repo(
254 repo_name, base_path,
277 repo_name, base_path,
255 explicit_scm=scm_type, expect_scm=scm_type, config=config)
278 explicit_scm=scm_type, expect_scm=scm_type, config=config)
256
279
257 def valid_and_active_user(self, user):
280 def valid_and_active_user(self, user):
258 """
281 """
259 Checks if that user is not empty, and if it's actually object it checks
282 Checks if that user is not empty, and if it's actually object it checks
260 if he's active.
283 if he's active.
261
284
262 :param user: user object or None
285 :param user: user object or None
263 :return: boolean
286 :return: boolean
264 """
287 """
265 if user is None:
288 if user is None:
266 return False
289 return False
267
290
268 elif user.active:
291 elif user.active:
269 return True
292 return True
270
293
271 return False
294 return False
272
295
273 @property
296 @property
274 def is_shadow_repo_dir(self):
297 def is_shadow_repo_dir(self):
275 return os.path.isdir(self.vcs_repo_name)
298 return os.path.isdir(self.vcs_repo_name)
276
299
277 def _check_permission(self, action, user, repo_name, ip_addr=None,
300 def _check_permission(self, action, user, repo_name, ip_addr=None,
278 plugin_id='', plugin_cache_active=False, cache_ttl=0):
301 plugin_id='', plugin_cache_active=False, cache_ttl=0):
279 """
302 """
280 Checks permissions using action (push/pull) user and repository
303 Checks permissions using action (push/pull) user and repository
281 name. If plugin_cache and ttl is set it will use the plugin which
304 name. If plugin_cache and ttl is set it will use the plugin which
282 authenticated the user to store the cached permissions result for N
305 authenticated the user to store the cached permissions result for N
283 amount of seconds as in cache_ttl
306 amount of seconds as in cache_ttl
284
307
285 :param action: push or pull action
308 :param action: push or pull action
286 :param user: user instance
309 :param user: user instance
287 :param repo_name: repository name
310 :param repo_name: repository name
288 """
311 """
289
312
290 # get instance of cache manager configured for a namespace
313 # get instance of cache manager configured for a namespace
291 cache_manager = get_perms_cache_manager(
314 cache_manager = get_perms_cache_manager(
292 custom_ttl=cache_ttl, suffix=user.user_id)
315 custom_ttl=cache_ttl, suffix=user.user_id)
293 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
316 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
294 plugin_id, plugin_cache_active, cache_ttl)
317 plugin_id, plugin_cache_active, cache_ttl)
295
318
296 # for environ based password can be empty, but then the validation is
319 # for environ based password can be empty, but then the validation is
297 # on the server that fills in the env data needed for authentication
320 # on the server that fills in the env data needed for authentication
298 _perm_calc_hash = caches.compute_key_from_params(
321 _perm_calc_hash = caches.compute_key_from_params(
299 plugin_id, action, user.user_id, repo_name, ip_addr)
322 plugin_id, action, user.user_id, repo_name, ip_addr)
300
323
301 # _authenticate is a wrapper for .auth() method of plugin.
324 # _authenticate is a wrapper for .auth() method of plugin.
302 # it checks if .auth() sends proper data.
325 # it checks if .auth() sends proper data.
303 # For RhodeCodeExternalAuthPlugin it also maps users to
326 # For RhodeCodeExternalAuthPlugin it also maps users to
304 # Database and maps the attributes returned from .auth()
327 # Database and maps the attributes returned from .auth()
305 # to RhodeCode database. If this function returns data
328 # to RhodeCode database. If this function returns data
306 # then auth is correct.
329 # then auth is correct.
307 start = time.time()
330 start = time.time()
308 log.debug('Running plugin `%s` permissions check', plugin_id)
331 log.debug('Running plugin `%s` permissions check', plugin_id)
309
332
310 def perm_func():
333 def perm_func():
311 """
334 """
312 This function is used internally in Cache of Beaker to calculate
335 This function is used internally in Cache of Beaker to calculate
313 Results
336 Results
314 """
337 """
315 log.debug('auth: calculating permission access now...')
338 log.debug('auth: calculating permission access now...')
316 # check IP
339 # check IP
317 inherit = user.inherit_default_permissions
340 inherit = user.inherit_default_permissions
318 ip_allowed = AuthUser.check_ip_allowed(
341 ip_allowed = AuthUser.check_ip_allowed(
319 user.user_id, ip_addr, inherit_from_default=inherit)
342 user.user_id, ip_addr, inherit_from_default=inherit)
320 if ip_allowed:
343 if ip_allowed:
321 log.info('Access for IP:%s allowed', ip_addr)
344 log.info('Access for IP:%s allowed', ip_addr)
322 else:
345 else:
323 return False
346 return False
324
347
325 if action == 'push':
348 if action == 'push':
326 perms = ('repository.write', 'repository.admin')
349 perms = ('repository.write', 'repository.admin')
327 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
350 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
328 return False
351 return False
329
352
330 else:
353 else:
331 # any other action need at least read permission
354 # any other action need at least read permission
332 perms = (
355 perms = (
333 'repository.read', 'repository.write', 'repository.admin')
356 'repository.read', 'repository.write', 'repository.admin')
334 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
357 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
335 return False
358 return False
336
359
337 return True
360 return True
338
361
339 if plugin_cache_active:
362 if plugin_cache_active:
340 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
363 log.debug('Trying to fetch cached perms by %s', _perm_calc_hash[:6])
341 perm_result = cache_manager.get(
364 perm_result = cache_manager.get(
342 _perm_calc_hash, createfunc=perm_func)
365 _perm_calc_hash, createfunc=perm_func)
343 else:
366 else:
344 perm_result = perm_func()
367 perm_result = perm_func()
345
368
346 auth_time = time.time() - start
369 auth_time = time.time() - start
347 log.debug('Permissions for plugin `%s` completed in %.3fs, '
370 log.debug('Permissions for plugin `%s` completed in %.3fs, '
348 'expiration time of fetched cache %.1fs.',
371 'expiration time of fetched cache %.1fs.',
349 plugin_id, auth_time, cache_ttl)
372 plugin_id, auth_time, cache_ttl)
350
373
351 return perm_result
374 return perm_result
352
375
353 def _check_ssl(self, environ, start_response):
376 def _check_ssl(self, environ, start_response):
354 """
377 """
355 Checks the SSL check flag and returns False if SSL is not present
378 Checks the SSL check flag and returns False if SSL is not present
356 and required True otherwise
379 and required True otherwise
357 """
380 """
358 org_proto = environ['wsgi._org_proto']
381 org_proto = environ['wsgi._org_proto']
359 # check if we have SSL required ! if not it's a bad request !
382 # check if we have SSL required ! if not it's a bad request !
360 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
383 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
361 if require_ssl and org_proto == 'http':
384 if require_ssl and org_proto == 'http':
362 log.debug(
385 log.debug(
363 'Bad request: detected protocol is `%s` and '
386 'Bad request: detected protocol is `%s` and '
364 'SSL/HTTPS is required.', org_proto)
387 'SSL/HTTPS is required.', org_proto)
365 return False
388 return False
366 return True
389 return True
367
390
368 def _get_default_cache_ttl(self):
391 def _get_default_cache_ttl(self):
369 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
392 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
370 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
393 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
371 plugin_settings = plugin.get_settings()
394 plugin_settings = plugin.get_settings()
372 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
395 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
373 plugin_settings) or (False, 0)
396 plugin_settings) or (False, 0)
374 return plugin_cache_active, cache_ttl
397 return plugin_cache_active, cache_ttl
375
398
376 def __call__(self, environ, start_response):
399 def __call__(self, environ, start_response):
377 try:
400 try:
378 return self._handle_request(environ, start_response)
401 return self._handle_request(environ, start_response)
379 except Exception:
402 except Exception:
380 log.exception("Exception while handling request")
403 log.exception("Exception while handling request")
381 appenlight.track_exception(environ)
404 appenlight.track_exception(environ)
382 return HTTPInternalServerError()(environ, start_response)
405 return HTTPInternalServerError()(environ, start_response)
383 finally:
406 finally:
384 meta.Session.remove()
407 meta.Session.remove()
385
408
386 def _handle_request(self, environ, start_response):
409 def _handle_request(self, environ, start_response):
387
410
388 if not self._check_ssl(environ, start_response):
411 if not self._check_ssl(environ, start_response):
389 reason = ('SSL required, while RhodeCode was unable '
412 reason = ('SSL required, while RhodeCode was unable '
390 'to detect this as SSL request')
413 'to detect this as SSL request')
391 log.debug('User not allowed to proceed, %s', reason)
414 log.debug('User not allowed to proceed, %s', reason)
392 return HTTPNotAcceptable(reason)(environ, start_response)
415 return HTTPNotAcceptable(reason)(environ, start_response)
393
416
394 if not self.url_repo_name:
417 if not self.url_repo_name:
395 log.warning('Repository name is empty: %s', self.url_repo_name)
418 log.warning('Repository name is empty: %s', self.url_repo_name)
396 # failed to get repo name, we fail now
419 # failed to get repo name, we fail now
397 return HTTPNotFound()(environ, start_response)
420 return HTTPNotFound()(environ, start_response)
398 log.debug('Extracted repo name is %s', self.url_repo_name)
421 log.debug('Extracted repo name is %s', self.url_repo_name)
399
422
400 ip_addr = get_ip_addr(environ)
423 ip_addr = get_ip_addr(environ)
401 user_agent = get_user_agent(environ)
424 user_agent = get_user_agent(environ)
402 username = None
425 username = None
403
426
404 # skip passing error to error controller
427 # skip passing error to error controller
405 environ['pylons.status_code_redirect'] = True
428 environ['pylons.status_code_redirect'] = True
406
429
407 # ======================================================================
430 # ======================================================================
408 # GET ACTION PULL or PUSH
431 # GET ACTION PULL or PUSH
409 # ======================================================================
432 # ======================================================================
410 action = self._get_action(environ)
433 action = self._get_action(environ)
411
434
412 # ======================================================================
435 # ======================================================================
413 # Check if this is a request to a shadow repository of a pull request.
436 # Check if this is a request to a shadow repository of a pull request.
414 # In this case only pull action is allowed.
437 # In this case only pull action is allowed.
415 # ======================================================================
438 # ======================================================================
416 if self.is_shadow_repo and action != 'pull':
439 if self.is_shadow_repo and action != 'pull':
417 reason = 'Only pull action is allowed for shadow repositories.'
440 reason = 'Only pull action is allowed for shadow repositories.'
418 log.debug('User not allowed to proceed, %s', reason)
441 log.debug('User not allowed to proceed, %s', reason)
419 return HTTPNotAcceptable(reason)(environ, start_response)
442 return HTTPNotAcceptable(reason)(environ, start_response)
420
443
421 # Check if the shadow repo actually exists, in case someone refers
444 # Check if the shadow repo actually exists, in case someone refers
422 # to it, and it has been deleted because of successful merge.
445 # to it, and it has been deleted because of successful merge.
423 if self.is_shadow_repo and not self.is_shadow_repo_dir:
446 if self.is_shadow_repo and not self.is_shadow_repo_dir:
424 log.debug(
447 log.debug(
425 'Shadow repo detected, and shadow repo dir `%s` is missing',
448 'Shadow repo detected, and shadow repo dir `%s` is missing',
426 self.is_shadow_repo_dir)
449 self.is_shadow_repo_dir)
427 return HTTPNotFound()(environ, start_response)
450 return HTTPNotFound()(environ, start_response)
428
451
429 # ======================================================================
452 # ======================================================================
430 # CHECK ANONYMOUS PERMISSION
453 # CHECK ANONYMOUS PERMISSION
431 # ======================================================================
454 # ======================================================================
432 if action in ['pull', 'push']:
455 if action in ['pull', 'push']:
433 anonymous_user = User.get_default_user()
456 anonymous_user = User.get_default_user()
434 username = anonymous_user.username
457 username = anonymous_user.username
435 if anonymous_user.active:
458 if anonymous_user.active:
436 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
459 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
437 # ONLY check permissions if the user is activated
460 # ONLY check permissions if the user is activated
438 anonymous_perm = self._check_permission(
461 anonymous_perm = self._check_permission(
439 action, anonymous_user, self.acl_repo_name, ip_addr,
462 action, anonymous_user, self.acl_repo_name, ip_addr,
440 plugin_id='anonymous_access',
463 plugin_id='anonymous_access',
441 plugin_cache_active=plugin_cache_active,
464 plugin_cache_active=plugin_cache_active,
442 cache_ttl=cache_ttl,
465 cache_ttl=cache_ttl,
443 )
466 )
444 else:
467 else:
445 anonymous_perm = False
468 anonymous_perm = False
446
469
447 if not anonymous_user.active or not anonymous_perm:
470 if not anonymous_user.active or not anonymous_perm:
448 if not anonymous_user.active:
471 if not anonymous_user.active:
449 log.debug('Anonymous access is disabled, running '
472 log.debug('Anonymous access is disabled, running '
450 'authentication')
473 'authentication')
451
474
452 if not anonymous_perm:
475 if not anonymous_perm:
453 log.debug('Not enough credentials to access this '
476 log.debug('Not enough credentials to access this '
454 'repository as anonymous user')
477 'repository as anonymous user')
455
478
456 username = None
479 username = None
457 # ==============================================================
480 # ==============================================================
458 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
481 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
459 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
482 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
460 # ==============================================================
483 # ==============================================================
461
484
462 # try to auth based on environ, container auth methods
485 # try to auth based on environ, container auth methods
463 log.debug('Running PRE-AUTH for container based authentication')
486 log.debug('Running PRE-AUTH for container based authentication')
464 pre_auth = authenticate(
487 pre_auth = authenticate(
465 '', '', environ, VCS_TYPE, registry=self.registry,
488 '', '', environ, VCS_TYPE, registry=self.registry,
466 acl_repo_name=self.acl_repo_name)
489 acl_repo_name=self.acl_repo_name)
467 if pre_auth and pre_auth.get('username'):
490 if pre_auth and pre_auth.get('username'):
468 username = pre_auth['username']
491 username = pre_auth['username']
469 log.debug('PRE-AUTH got %s as username', username)
492 log.debug('PRE-AUTH got %s as username', username)
470 if pre_auth:
493 if pre_auth:
471 log.debug('PRE-AUTH successful from %s',
494 log.debug('PRE-AUTH successful from %s',
472 pre_auth.get('auth_data', {}).get('_plugin'))
495 pre_auth.get('auth_data', {}).get('_plugin'))
473
496
474 # If not authenticated by the container, running basic auth
497 # If not authenticated by the container, running basic auth
475 # before inject the calling repo_name for special scope checks
498 # before inject the calling repo_name for special scope checks
476 self.authenticate.acl_repo_name = self.acl_repo_name
499 self.authenticate.acl_repo_name = self.acl_repo_name
477
500
478 plugin_cache_active, cache_ttl = False, 0
501 plugin_cache_active, cache_ttl = False, 0
479 plugin = None
502 plugin = None
480 if not username:
503 if not username:
481 self.authenticate.realm = self.authenticate.get_rc_realm()
504 self.authenticate.realm = self.authenticate.get_rc_realm()
482
505
483 try:
506 try:
484 auth_result = self.authenticate(environ)
507 auth_result = self.authenticate(environ)
485 except (UserCreationError, NotAllowedToCreateUserError) as e:
508 except (UserCreationError, NotAllowedToCreateUserError) as e:
486 log.error(e)
509 log.error(e)
487 reason = safe_str(e)
510 reason = safe_str(e)
488 return HTTPNotAcceptable(reason)(environ, start_response)
511 return HTTPNotAcceptable(reason)(environ, start_response)
489
512
490 if isinstance(auth_result, dict):
513 if isinstance(auth_result, dict):
491 AUTH_TYPE.update(environ, 'basic')
514 AUTH_TYPE.update(environ, 'basic')
492 REMOTE_USER.update(environ, auth_result['username'])
515 REMOTE_USER.update(environ, auth_result['username'])
493 username = auth_result['username']
516 username = auth_result['username']
494 plugin = auth_result.get('auth_data', {}).get('_plugin')
517 plugin = auth_result.get('auth_data', {}).get('_plugin')
495 log.info(
518 log.info(
496 'MAIN-AUTH successful for user `%s` from %s plugin',
519 'MAIN-AUTH successful for user `%s` from %s plugin',
497 username, plugin)
520 username, plugin)
498
521
499 plugin_cache_active, cache_ttl = auth_result.get(
522 plugin_cache_active, cache_ttl = auth_result.get(
500 'auth_data', {}).get('_ttl_cache') or (False, 0)
523 'auth_data', {}).get('_ttl_cache') or (False, 0)
501 else:
524 else:
502 return auth_result.wsgi_application(
525 return auth_result.wsgi_application(
503 environ, start_response)
526 environ, start_response)
504
527
505
528
506 # ==============================================================
529 # ==============================================================
507 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
530 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
508 # ==============================================================
531 # ==============================================================
509 user = User.get_by_username(username)
532 user = User.get_by_username(username)
510 if not self.valid_and_active_user(user):
533 if not self.valid_and_active_user(user):
511 return HTTPForbidden()(environ, start_response)
534 return HTTPForbidden()(environ, start_response)
512 username = user.username
535 username = user.username
513 user.update_lastactivity()
536 user.update_lastactivity()
514 meta.Session().commit()
537 meta.Session().commit()
515
538
516 # check user attributes for password change flag
539 # check user attributes for password change flag
517 user_obj = user
540 user_obj = user
518 if user_obj and user_obj.username != User.DEFAULT_USER and \
541 if user_obj and user_obj.username != User.DEFAULT_USER and \
519 user_obj.user_data.get('force_password_change'):
542 user_obj.user_data.get('force_password_change'):
520 reason = 'password change required'
543 reason = 'password change required'
521 log.debug('User not allowed to authenticate, %s', reason)
544 log.debug('User not allowed to authenticate, %s', reason)
522 return HTTPNotAcceptable(reason)(environ, start_response)
545 return HTTPNotAcceptable(reason)(environ, start_response)
523
546
524 # check permissions for this repository
547 # check permissions for this repository
525 perm = self._check_permission(
548 perm = self._check_permission(
526 action, user, self.acl_repo_name, ip_addr,
549 action, user, self.acl_repo_name, ip_addr,
527 plugin, plugin_cache_active, cache_ttl)
550 plugin, plugin_cache_active, cache_ttl)
528 if not perm:
551 if not perm:
529 return HTTPForbidden()(environ, start_response)
552 return HTTPForbidden()(environ, start_response)
530
553
531 # extras are injected into UI object and later available
554 # extras are injected into UI object and later available
532 # in hooks executed by RhodeCode
555 # in hooks executed by RhodeCode
533 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
556 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
534 extras = vcs_operation_context(
557 extras = vcs_operation_context(
535 environ, repo_name=self.acl_repo_name, username=username,
558 environ, repo_name=self.acl_repo_name, username=username,
536 action=action, scm=self.SCM, check_locking=check_locking,
559 action=action, scm=self.SCM, check_locking=check_locking,
537 is_shadow_repo=self.is_shadow_repo
560 is_shadow_repo=self.is_shadow_repo
538 )
561 )
539
562
540 # ======================================================================
563 # ======================================================================
541 # REQUEST HANDLING
564 # REQUEST HANDLING
542 # ======================================================================
565 # ======================================================================
543 repo_path = os.path.join(
566 repo_path = os.path.join(
544 safe_str(self.base_path), safe_str(self.vcs_repo_name))
567 safe_str(self.base_path), safe_str(self.vcs_repo_name))
545 log.debug('Repository path is %s', repo_path)
568 log.debug('Repository path is %s', repo_path)
546
569
547 fix_PATH()
570 fix_PATH()
548
571
549 log.info(
572 log.info(
550 '%s action on %s repo "%s" by "%s" from %s %s',
573 '%s action on %s repo "%s" by "%s" from %s %s',
551 action, self.SCM, safe_str(self.url_repo_name),
574 action, self.SCM, safe_str(self.url_repo_name),
552 safe_str(username), ip_addr, user_agent)
575 safe_str(username), ip_addr, user_agent)
553
576
554 return self._generate_vcs_response(
577 return self._generate_vcs_response(
555 environ, start_response, repo_path, extras, action)
578 environ, start_response, repo_path, extras, action)
556
579
557 @initialize_generator
580 @initialize_generator
558 def _generate_vcs_response(
581 def _generate_vcs_response(
559 self, environ, start_response, repo_path, extras, action):
582 self, environ, start_response, repo_path, extras, action):
560 """
583 """
561 Returns a generator for the response content.
584 Returns a generator for the response content.
562
585
563 This method is implemented as a generator, so that it can trigger
586 This method is implemented as a generator, so that it can trigger
564 the cache validation after all content sent back to the client. It
587 the cache validation after all content sent back to the client. It
565 also handles the locking exceptions which will be triggered when
588 also handles the locking exceptions which will be triggered when
566 the first chunk is produced by the underlying WSGI application.
589 the first chunk is produced by the underlying WSGI application.
567 """
590 """
568 callback_daemon, extras = self._prepare_callback_daemon(extras)
591 txn_id = ''
569 config = self._create_config(extras, self.acl_repo_name)
592 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
570 log.debug('HOOKS extras is %s', extras)
593 # case for SVN, we want to re-use the callback daemon port
571 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
594 # so we use the txn_id, for this we peek the body, and still save
572 app.rc_extras = extras
595 # it as wsgi.input
596 data = environ['wsgi.input'].read()
597 environ['wsgi.input'] = StringIO(data)
598 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
573
599
574 try:
600 callback_daemon, extras = self._prepare_callback_daemon(
575 with callback_daemon:
601 extras, environ, action, txn_id=txn_id)
576 try:
602 log.debug('HOOKS extras is %s', extras)
577 response = app(environ, start_response)
603
578 finally:
604 config = self._create_config(extras, self.acl_repo_name)
579 # This statement works together with the decorator
605 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
580 # "initialize_generator" above. The decorator ensures that
606 with callback_daemon:
581 # we hit the first yield statement before the generator is
607 app.rc_extras = extras
582 # returned back to the WSGI server. This is needed to
583 # ensure that the call to "app" above triggers the
584 # needed callback to "start_response" before the
585 # generator is actually used.
586 yield "__init__"
587
608
588 for chunk in response:
609 try:
589 yield chunk
610 response = app(environ, start_response)
590 except Exception as exc:
611 finally:
591 # TODO: martinb: Exceptions are only raised in case of the Pyro4
612 # This statement works together with the decorator
592 # backend. Refactor this except block after dropping Pyro4 support.
613 # "initialize_generator" above. The decorator ensures that
593 # TODO: johbo: Improve "translating" back the exception.
614 # we hit the first yield statement before the generator is
594 if getattr(exc, '_vcs_kind', None) == 'repo_locked':
615 # returned back to the WSGI server. This is needed to
595 exc = HTTPLockedRC(*exc.args)
616 # ensure that the call to "app" above triggers the
596 _code = rhodecode.CONFIG.get('lock_ret_code')
617 # needed callback to "start_response" before the
597 log.debug('Repository LOCKED ret code %s!', (_code,))
618 # generator is actually used.
598 elif getattr(exc, '_vcs_kind', None) == 'requirement':
619 yield "__init__"
599 log.debug(
600 'Repository requires features unknown to this Mercurial')
601 exc = HTTPRequirementError(*exc.args)
602 else:
603 raise
604
620
605 for chunk in exc(environ, start_response):
621 # iter content
622 for chunk in response:
606 yield chunk
623 yield chunk
607 finally:
624
608 # invalidate cache on push
609 try:
625 try:
626 # invalidate cache on push
610 if action == 'push':
627 if action == 'push':
611 self._invalidate_cache(self.url_repo_name)
628 self._invalidate_cache(self.url_repo_name)
612 finally:
629 finally:
613 meta.Session.remove()
630 meta.Session.remove()
614
631
615 def _get_repository_name(self, environ):
632 def _get_repository_name(self, environ):
616 """Get repository name out of the environmnent
633 """Get repository name out of the environmnent
617
634
618 :param environ: WSGI environment
635 :param environ: WSGI environment
619 """
636 """
620 raise NotImplementedError()
637 raise NotImplementedError()
621
638
622 def _get_action(self, environ):
639 def _get_action(self, environ):
623 """Map request commands into a pull or push command.
640 """Map request commands into a pull or push command.
624
641
625 :param environ: WSGI environment
642 :param environ: WSGI environment
626 """
643 """
627 raise NotImplementedError()
644 raise NotImplementedError()
628
645
629 def _create_wsgi_app(self, repo_path, repo_name, config):
646 def _create_wsgi_app(self, repo_path, repo_name, config):
630 """Return the WSGI app that will finally handle the request."""
647 """Return the WSGI app that will finally handle the request."""
631 raise NotImplementedError()
648 raise NotImplementedError()
632
649
633 def _create_config(self, extras, repo_name):
650 def _create_config(self, extras, repo_name):
634 """Create a safe config representation."""
651 """Create a safe config representation."""
635 raise NotImplementedError()
652 raise NotImplementedError()
636
653
637 def _prepare_callback_daemon(self, extras):
654 def _should_use_callback_daemon(self, extras, environ, action):
655 return True
656
657 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
658 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
659 if not self._should_use_callback_daemon(extras, environ, action):
660 # disable callback daemon for actions that don't require it
661 direct_calls = True
662
638 return prepare_callback_daemon(
663 return prepare_callback_daemon(
639 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
664 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
640 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
665 use_direct_calls=direct_calls, txn_id=txn_id)
641
666
642
667
643 def _should_check_locking(query_string):
668 def _should_check_locking(query_string):
644 # this is kind of hacky, but due to how mercurial handles client-server
669 # this is kind of hacky, but due to how mercurial handles client-server
645 # server see all operation on commit; bookmarks, phases and
670 # server see all operation on commit; bookmarks, phases and
646 # obsolescence marker in different transaction, we don't want to check
671 # obsolescence marker in different transaction, we don't want to check
647 # locking on those
672 # locking on those
648 return query_string not in ['cmd=listkeys']
673 return query_string not in ['cmd=listkeys']
@@ -1,781 +1,779 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Utilities library for RhodeCode
22 Utilities library for RhodeCode
23 """
23 """
24
24
25 import datetime
25 import datetime
26 import decorator
26 import decorator
27 import json
27 import json
28 import logging
28 import logging
29 import os
29 import os
30 import re
30 import re
31 import shutil
31 import shutil
32 import tempfile
32 import tempfile
33 import traceback
33 import traceback
34 import tarfile
34 import tarfile
35 import warnings
35 import warnings
36 import hashlib
36 import hashlib
37 from os.path import join as jn
37 from os.path import join as jn
38
38
39 import paste
39 import paste
40 import pkg_resources
40 import pkg_resources
41 from webhelpers.text import collapse, remove_formatting, strip_tags
41 from webhelpers.text import collapse, remove_formatting, strip_tags
42 from mako import exceptions
42 from mako import exceptions
43 from pyramid.threadlocal import get_current_registry
43 from pyramid.threadlocal import get_current_registry
44 from pyramid.request import Request
44 from pyramid.request import Request
45
45
46 from rhodecode.lib.fakemod import create_module
46 from rhodecode.lib.fakemod import create_module
47 from rhodecode.lib.vcs.backends.base import Config
47 from rhodecode.lib.vcs.backends.base import Config
48 from rhodecode.lib.vcs.exceptions import VCSError
48 from rhodecode.lib.vcs.exceptions import VCSError
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
49 from rhodecode.lib.vcs.utils.helpers import get_scm, get_scm_backend
50 from rhodecode.lib.utils2 import (
50 from rhodecode.lib.utils2 import (
51 safe_str, safe_unicode, get_current_rhodecode_user, md5)
51 safe_str, safe_unicode, get_current_rhodecode_user, md5)
52 from rhodecode.model import meta
52 from rhodecode.model import meta
53 from rhodecode.model.db import (
53 from rhodecode.model.db import (
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
54 Repository, User, RhodeCodeUi, UserLog, RepoGroup, UserGroup)
55 from rhodecode.model.meta import Session
55 from rhodecode.model.meta import Session
56
56
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
60 REMOVED_REPO_PAT = re.compile(r'rm__\d{8}_\d{6}_\d{6}__.*')
61
61
62 # String which contains characters that are not allowed in slug names for
62 # String which contains characters that are not allowed in slug names for
63 # repositories or repository groups. It is properly escaped to use it in
63 # repositories or repository groups. It is properly escaped to use it in
64 # regular expressions.
64 # regular expressions.
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
65 SLUG_BAD_CHARS = re.escape('`?=[]\;\'"<>,/~!@#$%^&*()+{}|:')
66
66
67 # Regex that matches forbidden characters in repo/group slugs.
67 # Regex that matches forbidden characters in repo/group slugs.
68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
68 SLUG_BAD_CHAR_RE = re.compile('[{}]'.format(SLUG_BAD_CHARS))
69
69
70 # Regex that matches allowed characters in repo/group slugs.
70 # Regex that matches allowed characters in repo/group slugs.
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
71 SLUG_GOOD_CHAR_RE = re.compile('[^{}]'.format(SLUG_BAD_CHARS))
72
72
73 # Regex that matches whole repo/group slugs.
73 # Regex that matches whole repo/group slugs.
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
74 SLUG_RE = re.compile('[^{}]+'.format(SLUG_BAD_CHARS))
75
75
76 _license_cache = None
76 _license_cache = None
77
77
78
78
79 def repo_name_slug(value):
79 def repo_name_slug(value):
80 """
80 """
81 Return slug of name of repository
81 Return slug of name of repository
82 This function is called on each creation/modification
82 This function is called on each creation/modification
83 of repository to prevent bad names in repo
83 of repository to prevent bad names in repo
84 """
84 """
85 replacement_char = '-'
85 replacement_char = '-'
86
86
87 slug = remove_formatting(value)
87 slug = remove_formatting(value)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
88 slug = SLUG_BAD_CHAR_RE.sub('', slug)
89 slug = re.sub('[\s]+', '-', slug)
89 slug = re.sub('[\s]+', '-', slug)
90 slug = collapse(slug, replacement_char)
90 slug = collapse(slug, replacement_char)
91 return slug
91 return slug
92
92
93
93
94 #==============================================================================
94 #==============================================================================
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
95 # PERM DECORATOR HELPERS FOR EXTRACTING NAMES FOR PERM CHECKS
96 #==============================================================================
96 #==============================================================================
97 def get_repo_slug(request):
97 def get_repo_slug(request):
98 _repo = ''
98 _repo = ''
99
99
100 if hasattr(request, 'db_repo'):
100 if hasattr(request, 'db_repo'):
101 # if our requests has set db reference use it for name, this
101 # if our requests has set db reference use it for name, this
102 # translates the example.com/_<id> into proper repo names
102 # translates the example.com/_<id> into proper repo names
103 _repo = request.db_repo.repo_name
103 _repo = request.db_repo.repo_name
104 elif getattr(request, 'matchdict', None):
104 elif getattr(request, 'matchdict', None):
105 # pyramid
105 # pyramid
106 _repo = request.matchdict.get('repo_name')
106 _repo = request.matchdict.get('repo_name')
107
107
108 if _repo:
108 if _repo:
109 _repo = _repo.rstrip('/')
109 _repo = _repo.rstrip('/')
110 return _repo
110 return _repo
111
111
112
112
113 def get_repo_group_slug(request):
113 def get_repo_group_slug(request):
114 _group = ''
114 _group = ''
115 if hasattr(request, 'db_repo_group'):
115 if hasattr(request, 'db_repo_group'):
116 # if our requests has set db reference use it for name, this
116 # if our requests has set db reference use it for name, this
117 # translates the example.com/_<id> into proper repo group names
117 # translates the example.com/_<id> into proper repo group names
118 _group = request.db_repo_group.group_name
118 _group = request.db_repo_group.group_name
119 elif getattr(request, 'matchdict', None):
119 elif getattr(request, 'matchdict', None):
120 # pyramid
120 # pyramid
121 _group = request.matchdict.get('repo_group_name')
121 _group = request.matchdict.get('repo_group_name')
122
122
123
123
124 if _group:
124 if _group:
125 _group = _group.rstrip('/')
125 _group = _group.rstrip('/')
126 return _group
126 return _group
127
127
128
128
129 def get_user_group_slug(request):
129 def get_user_group_slug(request):
130 _user_group = ''
130 _user_group = ''
131
131
132 if hasattr(request, 'db_user_group'):
132 if hasattr(request, 'db_user_group'):
133 _user_group = request.db_user_group.users_group_name
133 _user_group = request.db_user_group.users_group_name
134 elif getattr(request, 'matchdict', None):
134 elif getattr(request, 'matchdict', None):
135 # pyramid
135 # pyramid
136 _user_group = request.matchdict.get('user_group_id')
136 _user_group = request.matchdict.get('user_group_id')
137 _user_group_name = request.matchdict.get('user_group_name')
137 _user_group_name = request.matchdict.get('user_group_name')
138 try:
138 try:
139 if _user_group:
139 if _user_group:
140 _user_group = UserGroup.get(_user_group)
140 _user_group = UserGroup.get(_user_group)
141 elif _user_group_name:
141 elif _user_group_name:
142 _user_group = UserGroup.get_by_group_name(_user_group_name)
142 _user_group = UserGroup.get_by_group_name(_user_group_name)
143
143
144 if _user_group:
144 if _user_group:
145 _user_group = _user_group.users_group_name
145 _user_group = _user_group.users_group_name
146 except Exception:
146 except Exception:
147 log.exception('Failed to get user group by id and name')
147 log.exception('Failed to get user group by id and name')
148 # catch all failures here
148 # catch all failures here
149 return None
149 return None
150
150
151 return _user_group
151 return _user_group
152
152
153
153
154 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
154 def get_filesystem_repos(path, recursive=False, skip_removed_repos=True):
155 """
155 """
156 Scans given path for repos and return (name,(type,path)) tuple
156 Scans given path for repos and return (name,(type,path)) tuple
157
157
158 :param path: path to scan for repositories
158 :param path: path to scan for repositories
159 :param recursive: recursive search and return names with subdirs in front
159 :param recursive: recursive search and return names with subdirs in front
160 """
160 """
161
161
162 # remove ending slash for better results
162 # remove ending slash for better results
163 path = path.rstrip(os.sep)
163 path = path.rstrip(os.sep)
164 log.debug('now scanning in %s location recursive:%s...', path, recursive)
164 log.debug('now scanning in %s location recursive:%s...', path, recursive)
165
165
166 def _get_repos(p):
166 def _get_repos(p):
167 dirpaths = _get_dirpaths(p)
167 dirpaths = _get_dirpaths(p)
168 if not _is_dir_writable(p):
168 if not _is_dir_writable(p):
169 log.warning('repo path without write access: %s', p)
169 log.warning('repo path without write access: %s', p)
170
170
171 for dirpath in dirpaths:
171 for dirpath in dirpaths:
172 if os.path.isfile(os.path.join(p, dirpath)):
172 if os.path.isfile(os.path.join(p, dirpath)):
173 continue
173 continue
174 cur_path = os.path.join(p, dirpath)
174 cur_path = os.path.join(p, dirpath)
175
175
176 # skip removed repos
176 # skip removed repos
177 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
177 if skip_removed_repos and REMOVED_REPO_PAT.match(dirpath):
178 continue
178 continue
179
179
180 #skip .<somethin> dirs
180 #skip .<somethin> dirs
181 if dirpath.startswith('.'):
181 if dirpath.startswith('.'):
182 continue
182 continue
183
183
184 try:
184 try:
185 scm_info = get_scm(cur_path)
185 scm_info = get_scm(cur_path)
186 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
186 yield scm_info[1].split(path, 1)[-1].lstrip(os.sep), scm_info
187 except VCSError:
187 except VCSError:
188 if not recursive:
188 if not recursive:
189 continue
189 continue
190 #check if this dir containts other repos for recursive scan
190 #check if this dir containts other repos for recursive scan
191 rec_path = os.path.join(p, dirpath)
191 rec_path = os.path.join(p, dirpath)
192 if os.path.isdir(rec_path):
192 if os.path.isdir(rec_path):
193 for inner_scm in _get_repos(rec_path):
193 for inner_scm in _get_repos(rec_path):
194 yield inner_scm
194 yield inner_scm
195
195
196 return _get_repos(path)
196 return _get_repos(path)
197
197
198
198
199 def _get_dirpaths(p):
199 def _get_dirpaths(p):
200 try:
200 try:
201 # OS-independable way of checking if we have at least read-only
201 # OS-independable way of checking if we have at least read-only
202 # access or not.
202 # access or not.
203 dirpaths = os.listdir(p)
203 dirpaths = os.listdir(p)
204 except OSError:
204 except OSError:
205 log.warning('ignoring repo path without read access: %s', p)
205 log.warning('ignoring repo path without read access: %s', p)
206 return []
206 return []
207
207
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
208 # os.listpath has a tweak: If a unicode is passed into it, then it tries to
209 # decode paths and suddenly returns unicode objects itself. The items it
209 # decode paths and suddenly returns unicode objects itself. The items it
210 # cannot decode are returned as strings and cause issues.
210 # cannot decode are returned as strings and cause issues.
211 #
211 #
212 # Those paths are ignored here until a solid solution for path handling has
212 # Those paths are ignored here until a solid solution for path handling has
213 # been built.
213 # been built.
214 expected_type = type(p)
214 expected_type = type(p)
215
215
216 def _has_correct_type(item):
216 def _has_correct_type(item):
217 if type(item) is not expected_type:
217 if type(item) is not expected_type:
218 log.error(
218 log.error(
219 u"Ignoring path %s since it cannot be decoded into unicode.",
219 u"Ignoring path %s since it cannot be decoded into unicode.",
220 # Using "repr" to make sure that we see the byte value in case
220 # Using "repr" to make sure that we see the byte value in case
221 # of support.
221 # of support.
222 repr(item))
222 repr(item))
223 return False
223 return False
224 return True
224 return True
225
225
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
226 dirpaths = [item for item in dirpaths if _has_correct_type(item)]
227
227
228 return dirpaths
228 return dirpaths
229
229
230
230
231 def _is_dir_writable(path):
231 def _is_dir_writable(path):
232 """
232 """
233 Probe if `path` is writable.
233 Probe if `path` is writable.
234
234
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
235 Due to trouble on Cygwin / Windows, this is actually probing if it is
236 possible to create a file inside of `path`, stat does not produce reliable
236 possible to create a file inside of `path`, stat does not produce reliable
237 results in this case.
237 results in this case.
238 """
238 """
239 try:
239 try:
240 with tempfile.TemporaryFile(dir=path):
240 with tempfile.TemporaryFile(dir=path):
241 pass
241 pass
242 except OSError:
242 except OSError:
243 return False
243 return False
244 return True
244 return True
245
245
246
246
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
247 def is_valid_repo(repo_name, base_path, expect_scm=None, explicit_scm=None, config=None):
248 """
248 """
249 Returns True if given path is a valid repository False otherwise.
249 Returns True if given path is a valid repository False otherwise.
250 If expect_scm param is given also, compare if given scm is the same
250 If expect_scm param is given also, compare if given scm is the same
251 as expected from scm parameter. If explicit_scm is given don't try to
251 as expected from scm parameter. If explicit_scm is given don't try to
252 detect the scm, just use the given one to check if repo is valid
252 detect the scm, just use the given one to check if repo is valid
253
253
254 :param repo_name:
254 :param repo_name:
255 :param base_path:
255 :param base_path:
256 :param expect_scm:
256 :param expect_scm:
257 :param explicit_scm:
257 :param explicit_scm:
258 :param config:
258 :param config:
259
259
260 :return True: if given path is a valid repository
260 :return True: if given path is a valid repository
261 """
261 """
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
262 full_path = os.path.join(safe_str(base_path), safe_str(repo_name))
263 log.debug('Checking if `%s` is a valid path for repository. '
263 log.debug('Checking if `%s` is a valid path for repository. '
264 'Explicit type: %s', repo_name, explicit_scm)
264 'Explicit type: %s', repo_name, explicit_scm)
265
265
266 try:
266 try:
267 if explicit_scm:
267 if explicit_scm:
268 detected_scms = [get_scm_backend(explicit_scm)(
268 detected_scms = [get_scm_backend(explicit_scm)(
269 full_path, config=config).alias]
269 full_path, config=config).alias]
270 else:
270 else:
271 detected_scms = get_scm(full_path)
271 detected_scms = get_scm(full_path)
272
272
273 if expect_scm:
273 if expect_scm:
274 return detected_scms[0] == expect_scm
274 return detected_scms[0] == expect_scm
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
275 log.debug('path: %s is an vcs object:%s', full_path, detected_scms)
276 return True
276 return True
277 except VCSError:
277 except VCSError:
278 log.debug('path: %s is not a valid repo !', full_path)
278 log.debug('path: %s is not a valid repo !', full_path)
279 return False
279 return False
280
280
281
281
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
282 def is_valid_repo_group(repo_group_name, base_path, skip_path_check=False):
283 """
283 """
284 Returns True if given path is a repository group, False otherwise
284 Returns True if given path is a repository group, False otherwise
285
285
286 :param repo_name:
286 :param repo_name:
287 :param base_path:
287 :param base_path:
288 """
288 """
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
289 full_path = os.path.join(safe_str(base_path), safe_str(repo_group_name))
290 log.debug('Checking if `%s` is a valid path for repository group',
290 log.debug('Checking if `%s` is a valid path for repository group',
291 repo_group_name)
291 repo_group_name)
292
292
293 # check if it's not a repo
293 # check if it's not a repo
294 if is_valid_repo(repo_group_name, base_path):
294 if is_valid_repo(repo_group_name, base_path):
295 log.debug('Repo called %s exist, it is not a valid '
295 log.debug('Repo called %s exist, it is not a valid '
296 'repo group' % repo_group_name)
296 'repo group' % repo_group_name)
297 return False
297 return False
298
298
299 try:
299 try:
300 # we need to check bare git repos at higher level
300 # we need to check bare git repos at higher level
301 # since we might match branches/hooks/info/objects or possible
301 # since we might match branches/hooks/info/objects or possible
302 # other things inside bare git repo
302 # other things inside bare git repo
303 scm_ = get_scm(os.path.dirname(full_path))
303 scm_ = get_scm(os.path.dirname(full_path))
304 log.debug('path: %s is a vcs object:%s, not valid '
304 log.debug('path: %s is a vcs object:%s, not valid '
305 'repo group' % (full_path, scm_))
305 'repo group' % (full_path, scm_))
306 return False
306 return False
307 except VCSError:
307 except VCSError:
308 pass
308 pass
309
309
310 # check if it's a valid path
310 # check if it's a valid path
311 if skip_path_check or os.path.isdir(full_path):
311 if skip_path_check or os.path.isdir(full_path):
312 log.debug('path: %s is a valid repo group !', full_path)
312 log.debug('path: %s is a valid repo group !', full_path)
313 return True
313 return True
314
314
315 log.debug('path: %s is not a valid repo group !', full_path)
315 log.debug('path: %s is not a valid repo group !', full_path)
316 return False
316 return False
317
317
318
318
319 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
319 def ask_ok(prompt, retries=4, complaint='[y]es or [n]o please!'):
320 while True:
320 while True:
321 ok = raw_input(prompt)
321 ok = raw_input(prompt)
322 if ok.lower() in ('y', 'ye', 'yes'):
322 if ok.lower() in ('y', 'ye', 'yes'):
323 return True
323 return True
324 if ok.lower() in ('n', 'no', 'nop', 'nope'):
324 if ok.lower() in ('n', 'no', 'nop', 'nope'):
325 return False
325 return False
326 retries = retries - 1
326 retries = retries - 1
327 if retries < 0:
327 if retries < 0:
328 raise IOError
328 raise IOError
329 print(complaint)
329 print(complaint)
330
330
331 # propagated from mercurial documentation
331 # propagated from mercurial documentation
332 ui_sections = [
332 ui_sections = [
333 'alias', 'auth',
333 'alias', 'auth',
334 'decode/encode', 'defaults',
334 'decode/encode', 'defaults',
335 'diff', 'email',
335 'diff', 'email',
336 'extensions', 'format',
336 'extensions', 'format',
337 'merge-patterns', 'merge-tools',
337 'merge-patterns', 'merge-tools',
338 'hooks', 'http_proxy',
338 'hooks', 'http_proxy',
339 'smtp', 'patch',
339 'smtp', 'patch',
340 'paths', 'profiling',
340 'paths', 'profiling',
341 'server', 'trusted',
341 'server', 'trusted',
342 'ui', 'web', ]
342 'ui', 'web', ]
343
343
344
344
345 def config_data_from_db(clear_session=True, repo=None):
345 def config_data_from_db(clear_session=True, repo=None):
346 """
346 """
347 Read the configuration data from the database and return configuration
347 Read the configuration data from the database and return configuration
348 tuples.
348 tuples.
349 """
349 """
350 from rhodecode.model.settings import VcsSettingsModel
350 from rhodecode.model.settings import VcsSettingsModel
351
351
352 config = []
352 config = []
353
353
354 sa = meta.Session()
354 sa = meta.Session()
355 settings_model = VcsSettingsModel(repo=repo, sa=sa)
355 settings_model = VcsSettingsModel(repo=repo, sa=sa)
356
356
357 ui_settings = settings_model.get_ui_settings()
357 ui_settings = settings_model.get_ui_settings()
358
358
359 ui_data = []
359 ui_data = []
360 for setting in ui_settings:
360 for setting in ui_settings:
361 if setting.active:
361 if setting.active:
362 ui_data.append((setting.section, setting.key, setting.value))
362 ui_data.append((setting.section, setting.key, setting.value))
363 config.append((
363 config.append((
364 safe_str(setting.section), safe_str(setting.key),
364 safe_str(setting.section), safe_str(setting.key),
365 safe_str(setting.value)))
365 safe_str(setting.value)))
366 if setting.key == 'push_ssl':
366 if setting.key == 'push_ssl':
367 # force set push_ssl requirement to False, rhodecode
367 # force set push_ssl requirement to False, rhodecode
368 # handles that
368 # handles that
369 config.append((
369 config.append((
370 safe_str(setting.section), safe_str(setting.key), False))
370 safe_str(setting.section), safe_str(setting.key), False))
371 log.debug(
371 log.debug(
372 'settings ui from db: %s',
372 'settings ui from db: %s',
373 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
373 ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data)))
374 if clear_session:
374 if clear_session:
375 meta.Session.remove()
375 meta.Session.remove()
376
376
377 # TODO: mikhail: probably it makes no sense to re-read hooks information.
377 # TODO: mikhail: probably it makes no sense to re-read hooks information.
378 # It's already there and activated/deactivated
378 # It's already there and activated/deactivated
379 skip_entries = []
379 skip_entries = []
380 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
380 enabled_hook_classes = get_enabled_hook_classes(ui_settings)
381 if 'pull' not in enabled_hook_classes:
381 if 'pull' not in enabled_hook_classes:
382 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
382 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PULL))
383 if 'push' not in enabled_hook_classes:
383 if 'push' not in enabled_hook_classes:
384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
384 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRE_PUSH))
385 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
385 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PRETX_PUSH))
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
386 skip_entries.append(('hooks', RhodeCodeUi.HOOK_PUSH_KEY))
387
387
388 config = [entry for entry in config if entry[:2] not in skip_entries]
388 config = [entry for entry in config if entry[:2] not in skip_entries]
389
389
390 return config
390 return config
391
391
392
392
393 def make_db_config(clear_session=True, repo=None):
393 def make_db_config(clear_session=True, repo=None):
394 """
394 """
395 Create a :class:`Config` instance based on the values in the database.
395 Create a :class:`Config` instance based on the values in the database.
396 """
396 """
397 config = Config()
397 config = Config()
398 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
398 config_data = config_data_from_db(clear_session=clear_session, repo=repo)
399 for section, option, value in config_data:
399 for section, option, value in config_data:
400 config.set(section, option, value)
400 config.set(section, option, value)
401 return config
401 return config
402
402
403
403
404 def get_enabled_hook_classes(ui_settings):
404 def get_enabled_hook_classes(ui_settings):
405 """
405 """
406 Return the enabled hook classes.
406 Return the enabled hook classes.
407
407
408 :param ui_settings: List of ui_settings as returned
408 :param ui_settings: List of ui_settings as returned
409 by :meth:`VcsSettingsModel.get_ui_settings`
409 by :meth:`VcsSettingsModel.get_ui_settings`
410
410
411 :return: a list with the enabled hook classes. The order is not guaranteed.
411 :return: a list with the enabled hook classes. The order is not guaranteed.
412 :rtype: list
412 :rtype: list
413 """
413 """
414 enabled_hooks = []
414 enabled_hooks = []
415 active_hook_keys = [
415 active_hook_keys = [
416 key for section, key, value, active in ui_settings
416 key for section, key, value, active in ui_settings
417 if section == 'hooks' and active]
417 if section == 'hooks' and active]
418
418
419 hook_names = {
419 hook_names = {
420 RhodeCodeUi.HOOK_PUSH: 'push',
420 RhodeCodeUi.HOOK_PUSH: 'push',
421 RhodeCodeUi.HOOK_PULL: 'pull',
421 RhodeCodeUi.HOOK_PULL: 'pull',
422 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
422 RhodeCodeUi.HOOK_REPO_SIZE: 'repo_size'
423 }
423 }
424
424
425 for key in active_hook_keys:
425 for key in active_hook_keys:
426 hook = hook_names.get(key)
426 hook = hook_names.get(key)
427 if hook:
427 if hook:
428 enabled_hooks.append(hook)
428 enabled_hooks.append(hook)
429
429
430 return enabled_hooks
430 return enabled_hooks
431
431
432
432
433 def set_rhodecode_config(config):
433 def set_rhodecode_config(config):
434 """
434 """
435 Updates pyramid config with new settings from database
435 Updates pyramid config with new settings from database
436
436
437 :param config:
437 :param config:
438 """
438 """
439 from rhodecode.model.settings import SettingsModel
439 from rhodecode.model.settings import SettingsModel
440 app_settings = SettingsModel().get_all_settings()
440 app_settings = SettingsModel().get_all_settings()
441
441
442 for k, v in app_settings.items():
442 for k, v in app_settings.items():
443 config[k] = v
443 config[k] = v
444
444
445
445
446 def get_rhodecode_realm():
446 def get_rhodecode_realm():
447 """
447 """
448 Return the rhodecode realm from database.
448 Return the rhodecode realm from database.
449 """
449 """
450 from rhodecode.model.settings import SettingsModel
450 from rhodecode.model.settings import SettingsModel
451 realm = SettingsModel().get_setting_by_name('realm')
451 realm = SettingsModel().get_setting_by_name('realm')
452 return safe_str(realm.app_settings_value)
452 return safe_str(realm.app_settings_value)
453
453
454
454
455 def get_rhodecode_base_path():
455 def get_rhodecode_base_path():
456 """
456 """
457 Returns the base path. The base path is the filesystem path which points
457 Returns the base path. The base path is the filesystem path which points
458 to the repository store.
458 to the repository store.
459 """
459 """
460 from rhodecode.model.settings import SettingsModel
460 from rhodecode.model.settings import SettingsModel
461 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
461 paths_ui = SettingsModel().get_ui_by_section_and_key('paths', '/')
462 return safe_str(paths_ui.ui_value)
462 return safe_str(paths_ui.ui_value)
463
463
464
464
465 def map_groups(path):
465 def map_groups(path):
466 """
466 """
467 Given a full path to a repository, create all nested groups that this
467 Given a full path to a repository, create all nested groups that this
468 repo is inside. This function creates parent-child relationships between
468 repo is inside. This function creates parent-child relationships between
469 groups and creates default perms for all new groups.
469 groups and creates default perms for all new groups.
470
470
471 :param paths: full path to repository
471 :param paths: full path to repository
472 """
472 """
473 from rhodecode.model.repo_group import RepoGroupModel
473 from rhodecode.model.repo_group import RepoGroupModel
474 sa = meta.Session()
474 sa = meta.Session()
475 groups = path.split(Repository.NAME_SEP)
475 groups = path.split(Repository.NAME_SEP)
476 parent = None
476 parent = None
477 group = None
477 group = None
478
478
479 # last element is repo in nested groups structure
479 # last element is repo in nested groups structure
480 groups = groups[:-1]
480 groups = groups[:-1]
481 rgm = RepoGroupModel(sa)
481 rgm = RepoGroupModel(sa)
482 owner = User.get_first_super_admin()
482 owner = User.get_first_super_admin()
483 for lvl, group_name in enumerate(groups):
483 for lvl, group_name in enumerate(groups):
484 group_name = '/'.join(groups[:lvl] + [group_name])
484 group_name = '/'.join(groups[:lvl] + [group_name])
485 group = RepoGroup.get_by_group_name(group_name)
485 group = RepoGroup.get_by_group_name(group_name)
486 desc = '%s group' % group_name
486 desc = '%s group' % group_name
487
487
488 # skip folders that are now removed repos
488 # skip folders that are now removed repos
489 if REMOVED_REPO_PAT.match(group_name):
489 if REMOVED_REPO_PAT.match(group_name):
490 break
490 break
491
491
492 if group is None:
492 if group is None:
493 log.debug('creating group level: %s group_name: %s',
493 log.debug('creating group level: %s group_name: %s',
494 lvl, group_name)
494 lvl, group_name)
495 group = RepoGroup(group_name, parent)
495 group = RepoGroup(group_name, parent)
496 group.group_description = desc
496 group.group_description = desc
497 group.user = owner
497 group.user = owner
498 sa.add(group)
498 sa.add(group)
499 perm_obj = rgm._create_default_perms(group)
499 perm_obj = rgm._create_default_perms(group)
500 sa.add(perm_obj)
500 sa.add(perm_obj)
501 sa.flush()
501 sa.flush()
502
502
503 parent = group
503 parent = group
504 return group
504 return group
505
505
506
506
507 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
507 def repo2db_mapper(initial_repo_list, remove_obsolete=False):
508 """
508 """
509 maps all repos given in initial_repo_list, non existing repositories
509 maps all repos given in initial_repo_list, non existing repositories
510 are created, if remove_obsolete is True it also checks for db entries
510 are created, if remove_obsolete is True it also checks for db entries
511 that are not in initial_repo_list and removes them.
511 that are not in initial_repo_list and removes them.
512
512
513 :param initial_repo_list: list of repositories found by scanning methods
513 :param initial_repo_list: list of repositories found by scanning methods
514 :param remove_obsolete: check for obsolete entries in database
514 :param remove_obsolete: check for obsolete entries in database
515 """
515 """
516 from rhodecode.model.repo import RepoModel
516 from rhodecode.model.repo import RepoModel
517 from rhodecode.model.scm import ScmModel
518 from rhodecode.model.repo_group import RepoGroupModel
517 from rhodecode.model.repo_group import RepoGroupModel
519 from rhodecode.model.settings import SettingsModel
518 from rhodecode.model.settings import SettingsModel
520
519
521 sa = meta.Session()
520 sa = meta.Session()
522 repo_model = RepoModel()
521 repo_model = RepoModel()
523 user = User.get_first_super_admin()
522 user = User.get_first_super_admin()
524 added = []
523 added = []
525
524
526 # creation defaults
525 # creation defaults
527 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
526 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
528 enable_statistics = defs.get('repo_enable_statistics')
527 enable_statistics = defs.get('repo_enable_statistics')
529 enable_locking = defs.get('repo_enable_locking')
528 enable_locking = defs.get('repo_enable_locking')
530 enable_downloads = defs.get('repo_enable_downloads')
529 enable_downloads = defs.get('repo_enable_downloads')
531 private = defs.get('repo_private')
530 private = defs.get('repo_private')
532
531
533 for name, repo in initial_repo_list.items():
532 for name, repo in initial_repo_list.items():
534 group = map_groups(name)
533 group = map_groups(name)
535 unicode_name = safe_unicode(name)
534 unicode_name = safe_unicode(name)
536 db_repo = repo_model.get_by_repo_name(unicode_name)
535 db_repo = repo_model.get_by_repo_name(unicode_name)
537 # found repo that is on filesystem not in RhodeCode database
536 # found repo that is on filesystem not in RhodeCode database
538 if not db_repo:
537 if not db_repo:
539 log.info('repository %s not found, creating now', name)
538 log.info('repository %s not found, creating now', name)
540 added.append(name)
539 added.append(name)
541 desc = (repo.description
540 desc = (repo.description
542 if repo.description != 'unknown'
541 if repo.description != 'unknown'
543 else '%s repository' % name)
542 else '%s repository' % name)
544
543
545 db_repo = repo_model._create_repo(
544 db_repo = repo_model._create_repo(
546 repo_name=name,
545 repo_name=name,
547 repo_type=repo.alias,
546 repo_type=repo.alias,
548 description=desc,
547 description=desc,
549 repo_group=getattr(group, 'group_id', None),
548 repo_group=getattr(group, 'group_id', None),
550 owner=user,
549 owner=user,
551 enable_locking=enable_locking,
550 enable_locking=enable_locking,
552 enable_downloads=enable_downloads,
551 enable_downloads=enable_downloads,
553 enable_statistics=enable_statistics,
552 enable_statistics=enable_statistics,
554 private=private,
553 private=private,
555 state=Repository.STATE_CREATED
554 state=Repository.STATE_CREATED
556 )
555 )
557 sa.commit()
556 sa.commit()
558 # we added that repo just now, and make sure we updated server info
557 # we added that repo just now, and make sure we updated server info
559 if db_repo.repo_type == 'git':
558 if db_repo.repo_type == 'git':
560 git_repo = db_repo.scm_instance()
559 git_repo = db_repo.scm_instance()
561 # update repository server-info
560 # update repository server-info
562 log.debug('Running update server info')
561 log.debug('Running update server info')
563 git_repo._update_server_info()
562 git_repo._update_server_info()
564
563
565 db_repo.update_commit_cache()
564 db_repo.update_commit_cache()
566
565
567 config = db_repo._config
566 config = db_repo._config
568 config.set('extensions', 'largefiles', '')
567 config.set('extensions', 'largefiles', '')
569 ScmModel().install_hooks(
568 repo = db_repo.scm_instance(config=config)
570 db_repo.scm_instance(config=config),
569 repo.install_hooks()
571 repo_type=db_repo.repo_type)
572
570
573 removed = []
571 removed = []
574 if remove_obsolete:
572 if remove_obsolete:
575 # remove from database those repositories that are not in the filesystem
573 # remove from database those repositories that are not in the filesystem
576 for repo in sa.query(Repository).all():
574 for repo in sa.query(Repository).all():
577 if repo.repo_name not in initial_repo_list.keys():
575 if repo.repo_name not in initial_repo_list.keys():
578 log.debug("Removing non-existing repository found in db `%s`",
576 log.debug("Removing non-existing repository found in db `%s`",
579 repo.repo_name)
577 repo.repo_name)
580 try:
578 try:
581 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
579 RepoModel(sa).delete(repo, forks='detach', fs_remove=False)
582 sa.commit()
580 sa.commit()
583 removed.append(repo.repo_name)
581 removed.append(repo.repo_name)
584 except Exception:
582 except Exception:
585 # don't hold further removals on error
583 # don't hold further removals on error
586 log.error(traceback.format_exc())
584 log.error(traceback.format_exc())
587 sa.rollback()
585 sa.rollback()
588
586
589 def splitter(full_repo_name):
587 def splitter(full_repo_name):
590 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
588 _parts = full_repo_name.rsplit(RepoGroup.url_sep(), 1)
591 gr_name = None
589 gr_name = None
592 if len(_parts) == 2:
590 if len(_parts) == 2:
593 gr_name = _parts[0]
591 gr_name = _parts[0]
594 return gr_name
592 return gr_name
595
593
596 initial_repo_group_list = [splitter(x) for x in
594 initial_repo_group_list = [splitter(x) for x in
597 initial_repo_list.keys() if splitter(x)]
595 initial_repo_list.keys() if splitter(x)]
598
596
599 # remove from database those repository groups that are not in the
597 # remove from database those repository groups that are not in the
600 # filesystem due to parent child relationships we need to delete them
598 # filesystem due to parent child relationships we need to delete them
601 # in a specific order of most nested first
599 # in a specific order of most nested first
602 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
600 all_groups = [x.group_name for x in sa.query(RepoGroup).all()]
603 nested_sort = lambda gr: len(gr.split('/'))
601 nested_sort = lambda gr: len(gr.split('/'))
604 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
602 for group_name in sorted(all_groups, key=nested_sort, reverse=True):
605 if group_name not in initial_repo_group_list:
603 if group_name not in initial_repo_group_list:
606 repo_group = RepoGroup.get_by_group_name(group_name)
604 repo_group = RepoGroup.get_by_group_name(group_name)
607 if (repo_group.children.all() or
605 if (repo_group.children.all() or
608 not RepoGroupModel().check_exist_filesystem(
606 not RepoGroupModel().check_exist_filesystem(
609 group_name=group_name, exc_on_failure=False)):
607 group_name=group_name, exc_on_failure=False)):
610 continue
608 continue
611
609
612 log.info(
610 log.info(
613 'Removing non-existing repository group found in db `%s`',
611 'Removing non-existing repository group found in db `%s`',
614 group_name)
612 group_name)
615 try:
613 try:
616 RepoGroupModel(sa).delete(group_name, fs_remove=False)
614 RepoGroupModel(sa).delete(group_name, fs_remove=False)
617 sa.commit()
615 sa.commit()
618 removed.append(group_name)
616 removed.append(group_name)
619 except Exception:
617 except Exception:
620 # don't hold further removals on error
618 # don't hold further removals on error
621 log.exception(
619 log.exception(
622 'Unable to remove repository group `%s`',
620 'Unable to remove repository group `%s`',
623 group_name)
621 group_name)
624 sa.rollback()
622 sa.rollback()
625 raise
623 raise
626
624
627 return added, removed
625 return added, removed
628
626
629
627
630 def load_rcextensions(root_path):
628 def load_rcextensions(root_path):
631 import rhodecode
629 import rhodecode
632 from rhodecode.config import conf
630 from rhodecode.config import conf
633
631
634 path = os.path.join(root_path, 'rcextensions', '__init__.py')
632 path = os.path.join(root_path, 'rcextensions', '__init__.py')
635 if os.path.isfile(path):
633 if os.path.isfile(path):
636 rcext = create_module('rc', path)
634 rcext = create_module('rc', path)
637 EXT = rhodecode.EXTENSIONS = rcext
635 EXT = rhodecode.EXTENSIONS = rcext
638 log.debug('Found rcextensions now loading %s...', rcext)
636 log.debug('Found rcextensions now loading %s...', rcext)
639
637
640 # Additional mappings that are not present in the pygments lexers
638 # Additional mappings that are not present in the pygments lexers
641 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
639 conf.LANGUAGES_EXTENSIONS_MAP.update(getattr(EXT, 'EXTRA_MAPPINGS', {}))
642
640
643 # auto check if the module is not missing any data, set to default if is
641 # auto check if the module is not missing any data, set to default if is
644 # this will help autoupdate new feature of rcext module
642 # this will help autoupdate new feature of rcext module
645 #from rhodecode.config import rcextensions
643 #from rhodecode.config import rcextensions
646 #for k in dir(rcextensions):
644 #for k in dir(rcextensions):
647 # if not k.startswith('_') and not hasattr(EXT, k):
645 # if not k.startswith('_') and not hasattr(EXT, k):
648 # setattr(EXT, k, getattr(rcextensions, k))
646 # setattr(EXT, k, getattr(rcextensions, k))
649
647
650
648
651 def get_custom_lexer(extension):
649 def get_custom_lexer(extension):
652 """
650 """
653 returns a custom lexer if it is defined in rcextensions module, or None
651 returns a custom lexer if it is defined in rcextensions module, or None
654 if there's no custom lexer defined
652 if there's no custom lexer defined
655 """
653 """
656 import rhodecode
654 import rhodecode
657 from pygments import lexers
655 from pygments import lexers
658
656
659 # custom override made by RhodeCode
657 # custom override made by RhodeCode
660 if extension in ['mako']:
658 if extension in ['mako']:
661 return lexers.get_lexer_by_name('html+mako')
659 return lexers.get_lexer_by_name('html+mako')
662
660
663 # check if we didn't define this extension as other lexer
661 # check if we didn't define this extension as other lexer
664 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
662 extensions = rhodecode.EXTENSIONS and getattr(rhodecode.EXTENSIONS, 'EXTRA_LEXERS', None)
665 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
663 if extensions and extension in rhodecode.EXTENSIONS.EXTRA_LEXERS:
666 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
664 _lexer_name = rhodecode.EXTENSIONS.EXTRA_LEXERS[extension]
667 return lexers.get_lexer_by_name(_lexer_name)
665 return lexers.get_lexer_by_name(_lexer_name)
668
666
669
667
670 #==============================================================================
668 #==============================================================================
671 # TEST FUNCTIONS AND CREATORS
669 # TEST FUNCTIONS AND CREATORS
672 #==============================================================================
670 #==============================================================================
673 def create_test_index(repo_location, config):
671 def create_test_index(repo_location, config):
674 """
672 """
675 Makes default test index.
673 Makes default test index.
676 """
674 """
677 import rc_testdata
675 import rc_testdata
678
676
679 rc_testdata.extract_search_index(
677 rc_testdata.extract_search_index(
680 'vcs_search_index', os.path.dirname(config['search.location']))
678 'vcs_search_index', os.path.dirname(config['search.location']))
681
679
682
680
683 def create_test_directory(test_path):
681 def create_test_directory(test_path):
684 """
682 """
685 Create test directory if it doesn't exist.
683 Create test directory if it doesn't exist.
686 """
684 """
687 if not os.path.isdir(test_path):
685 if not os.path.isdir(test_path):
688 log.debug('Creating testdir %s', test_path)
686 log.debug('Creating testdir %s', test_path)
689 os.makedirs(test_path)
687 os.makedirs(test_path)
690
688
691
689
692 def create_test_database(test_path, config):
690 def create_test_database(test_path, config):
693 """
691 """
694 Makes a fresh database.
692 Makes a fresh database.
695 """
693 """
696 from rhodecode.lib.db_manage import DbManage
694 from rhodecode.lib.db_manage import DbManage
697
695
698 # PART ONE create db
696 # PART ONE create db
699 dbconf = config['sqlalchemy.db1.url']
697 dbconf = config['sqlalchemy.db1.url']
700 log.debug('making test db %s', dbconf)
698 log.debug('making test db %s', dbconf)
701
699
702 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
700 dbmanage = DbManage(log_sql=False, dbconf=dbconf, root=config['here'],
703 tests=True, cli_args={'force_ask': True})
701 tests=True, cli_args={'force_ask': True})
704 dbmanage.create_tables(override=True)
702 dbmanage.create_tables(override=True)
705 dbmanage.set_db_version()
703 dbmanage.set_db_version()
706 # for tests dynamically set new root paths based on generated content
704 # for tests dynamically set new root paths based on generated content
707 dbmanage.create_settings(dbmanage.config_prompt(test_path))
705 dbmanage.create_settings(dbmanage.config_prompt(test_path))
708 dbmanage.create_default_user()
706 dbmanage.create_default_user()
709 dbmanage.create_test_admin_and_users()
707 dbmanage.create_test_admin_and_users()
710 dbmanage.create_permissions()
708 dbmanage.create_permissions()
711 dbmanage.populate_default_permissions()
709 dbmanage.populate_default_permissions()
712 Session().commit()
710 Session().commit()
713
711
714
712
715 def create_test_repositories(test_path, config):
713 def create_test_repositories(test_path, config):
716 """
714 """
717 Creates test repositories in the temporary directory. Repositories are
715 Creates test repositories in the temporary directory. Repositories are
718 extracted from archives within the rc_testdata package.
716 extracted from archives within the rc_testdata package.
719 """
717 """
720 import rc_testdata
718 import rc_testdata
721 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
719 from rhodecode.tests import HG_REPO, GIT_REPO, SVN_REPO
722
720
723 log.debug('making test vcs repositories')
721 log.debug('making test vcs repositories')
724
722
725 idx_path = config['search.location']
723 idx_path = config['search.location']
726 data_path = config['cache_dir']
724 data_path = config['cache_dir']
727
725
728 # clean index and data
726 # clean index and data
729 if idx_path and os.path.exists(idx_path):
727 if idx_path and os.path.exists(idx_path):
730 log.debug('remove %s', idx_path)
728 log.debug('remove %s', idx_path)
731 shutil.rmtree(idx_path)
729 shutil.rmtree(idx_path)
732
730
733 if data_path and os.path.exists(data_path):
731 if data_path and os.path.exists(data_path):
734 log.debug('remove %s', data_path)
732 log.debug('remove %s', data_path)
735 shutil.rmtree(data_path)
733 shutil.rmtree(data_path)
736
734
737 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
735 rc_testdata.extract_hg_dump('vcs_test_hg', jn(test_path, HG_REPO))
738 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
736 rc_testdata.extract_git_dump('vcs_test_git', jn(test_path, GIT_REPO))
739
737
740 # Note: Subversion is in the process of being integrated with the system,
738 # Note: Subversion is in the process of being integrated with the system,
741 # until we have a properly packed version of the test svn repository, this
739 # until we have a properly packed version of the test svn repository, this
742 # tries to copy over the repo from a package "rc_testdata"
740 # tries to copy over the repo from a package "rc_testdata"
743 svn_repo_path = rc_testdata.get_svn_repo_archive()
741 svn_repo_path = rc_testdata.get_svn_repo_archive()
744 with tarfile.open(svn_repo_path) as tar:
742 with tarfile.open(svn_repo_path) as tar:
745 tar.extractall(jn(test_path, SVN_REPO))
743 tar.extractall(jn(test_path, SVN_REPO))
746
744
747
745
748 def password_changed(auth_user, session):
746 def password_changed(auth_user, session):
749 # Never report password change in case of default user or anonymous user.
747 # Never report password change in case of default user or anonymous user.
750 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
748 if auth_user.username == User.DEFAULT_USER or auth_user.user_id is None:
751 return False
749 return False
752
750
753 password_hash = md5(auth_user.password) if auth_user.password else None
751 password_hash = md5(auth_user.password) if auth_user.password else None
754 rhodecode_user = session.get('rhodecode_user', {})
752 rhodecode_user = session.get('rhodecode_user', {})
755 session_password_hash = rhodecode_user.get('password', '')
753 session_password_hash = rhodecode_user.get('password', '')
756 return password_hash != session_password_hash
754 return password_hash != session_password_hash
757
755
758
756
759 def read_opensource_licenses():
757 def read_opensource_licenses():
760 global _license_cache
758 global _license_cache
761
759
762 if not _license_cache:
760 if not _license_cache:
763 licenses = pkg_resources.resource_string(
761 licenses = pkg_resources.resource_string(
764 'rhodecode', 'config/licenses.json')
762 'rhodecode', 'config/licenses.json')
765 _license_cache = json.loads(licenses)
763 _license_cache = json.loads(licenses)
766
764
767 return _license_cache
765 return _license_cache
768
766
769
767
770 def generate_platform_uuid():
768 def generate_platform_uuid():
771 """
769 """
772 Generates platform UUID based on it's name
770 Generates platform UUID based on it's name
773 """
771 """
774 import platform
772 import platform
775
773
776 try:
774 try:
777 uuid_list = [platform.platform()]
775 uuid_list = [platform.platform()]
778 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
776 return hashlib.sha256(':'.join(uuid_list)).hexdigest()
779 except Exception as e:
777 except Exception as e:
780 log.error('Failed to generate host uuid: %s' % e)
778 log.error('Failed to generate host uuid: %s' % e)
781 return 'UNDEFINED'
779 return 'UNDEFINED'
@@ -1,1694 +1,1698 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2018 RhodeCode GmbH
3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24
24
25 import collections
25 import collections
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import os
30 import os
31 import re
31 import re
32 import time
32 import time
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from rhodecode.lib.utils2 import safe_str, safe_unicode
37 from rhodecode.lib.utils2 import safe_str, safe_unicode
38 from rhodecode.lib.vcs import connection
38 from rhodecode.lib.vcs import connection
39 from rhodecode.lib.vcs.utils import author_name, author_email
39 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.conf import settings
40 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 RepositoryError)
46 RepositoryError)
47
47
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 FILEMODE_DEFAULT = 0100644
52 FILEMODE_DEFAULT = 0100644
53 FILEMODE_EXECUTABLE = 0100755
53 FILEMODE_EXECUTABLE = 0100755
54
54
55 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
55 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
56 MergeResponse = collections.namedtuple(
56 MergeResponse = collections.namedtuple(
57 'MergeResponse',
57 'MergeResponse',
58 ('possible', 'executed', 'merge_ref', 'failure_reason'))
58 ('possible', 'executed', 'merge_ref', 'failure_reason'))
59
59
60
60
61 class MergeFailureReason(object):
61 class MergeFailureReason(object):
62 """
62 """
63 Enumeration with all the reasons why the server side merge could fail.
63 Enumeration with all the reasons why the server side merge could fail.
64
64
65 DO NOT change the number of the reasons, as they may be stored in the
65 DO NOT change the number of the reasons, as they may be stored in the
66 database.
66 database.
67
67
68 Changing the name of a reason is acceptable and encouraged to deprecate old
68 Changing the name of a reason is acceptable and encouraged to deprecate old
69 reasons.
69 reasons.
70 """
70 """
71
71
72 # Everything went well.
72 # Everything went well.
73 NONE = 0
73 NONE = 0
74
74
75 # An unexpected exception was raised. Check the logs for more details.
75 # An unexpected exception was raised. Check the logs for more details.
76 UNKNOWN = 1
76 UNKNOWN = 1
77
77
78 # The merge was not successful, there are conflicts.
78 # The merge was not successful, there are conflicts.
79 MERGE_FAILED = 2
79 MERGE_FAILED = 2
80
80
81 # The merge succeeded but we could not push it to the target repository.
81 # The merge succeeded but we could not push it to the target repository.
82 PUSH_FAILED = 3
82 PUSH_FAILED = 3
83
83
84 # The specified target is not a head in the target repository.
84 # The specified target is not a head in the target repository.
85 TARGET_IS_NOT_HEAD = 4
85 TARGET_IS_NOT_HEAD = 4
86
86
87 # The source repository contains more branches than the target. Pushing
87 # The source repository contains more branches than the target. Pushing
88 # the merge will create additional branches in the target.
88 # the merge will create additional branches in the target.
89 HG_SOURCE_HAS_MORE_BRANCHES = 5
89 HG_SOURCE_HAS_MORE_BRANCHES = 5
90
90
91 # The target reference has multiple heads. That does not allow to correctly
91 # The target reference has multiple heads. That does not allow to correctly
92 # identify the target location. This could only happen for mercurial
92 # identify the target location. This could only happen for mercurial
93 # branches.
93 # branches.
94 HG_TARGET_HAS_MULTIPLE_HEADS = 6
94 HG_TARGET_HAS_MULTIPLE_HEADS = 6
95
95
96 # The target repository is locked
96 # The target repository is locked
97 TARGET_IS_LOCKED = 7
97 TARGET_IS_LOCKED = 7
98
98
99 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
99 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
100 # A involved commit could not be found.
100 # A involved commit could not be found.
101 _DEPRECATED_MISSING_COMMIT = 8
101 _DEPRECATED_MISSING_COMMIT = 8
102
102
103 # The target repo reference is missing.
103 # The target repo reference is missing.
104 MISSING_TARGET_REF = 9
104 MISSING_TARGET_REF = 9
105
105
106 # The source repo reference is missing.
106 # The source repo reference is missing.
107 MISSING_SOURCE_REF = 10
107 MISSING_SOURCE_REF = 10
108
108
109 # The merge was not successful, there are conflicts related to sub
109 # The merge was not successful, there are conflicts related to sub
110 # repositories.
110 # repositories.
111 SUBREPO_MERGE_FAILED = 11
111 SUBREPO_MERGE_FAILED = 11
112
112
113
113
114 class UpdateFailureReason(object):
114 class UpdateFailureReason(object):
115 """
115 """
116 Enumeration with all the reasons why the pull request update could fail.
116 Enumeration with all the reasons why the pull request update could fail.
117
117
118 DO NOT change the number of the reasons, as they may be stored in the
118 DO NOT change the number of the reasons, as they may be stored in the
119 database.
119 database.
120
120
121 Changing the name of a reason is acceptable and encouraged to deprecate old
121 Changing the name of a reason is acceptable and encouraged to deprecate old
122 reasons.
122 reasons.
123 """
123 """
124
124
125 # Everything went well.
125 # Everything went well.
126 NONE = 0
126 NONE = 0
127
127
128 # An unexpected exception was raised. Check the logs for more details.
128 # An unexpected exception was raised. Check the logs for more details.
129 UNKNOWN = 1
129 UNKNOWN = 1
130
130
131 # The pull request is up to date.
131 # The pull request is up to date.
132 NO_CHANGE = 2
132 NO_CHANGE = 2
133
133
134 # The pull request has a reference type that is not supported for update.
134 # The pull request has a reference type that is not supported for update.
135 WRONG_REF_TYPE = 3
135 WRONG_REF_TYPE = 3
136
136
137 # Update failed because the target reference is missing.
137 # Update failed because the target reference is missing.
138 MISSING_TARGET_REF = 4
138 MISSING_TARGET_REF = 4
139
139
140 # Update failed because the source reference is missing.
140 # Update failed because the source reference is missing.
141 MISSING_SOURCE_REF = 5
141 MISSING_SOURCE_REF = 5
142
142
143
143
144 class BaseRepository(object):
144 class BaseRepository(object):
145 """
145 """
146 Base Repository for final backends
146 Base Repository for final backends
147
147
148 .. attribute:: DEFAULT_BRANCH_NAME
148 .. attribute:: DEFAULT_BRANCH_NAME
149
149
150 name of default branch (i.e. "trunk" for svn, "master" for git etc.
150 name of default branch (i.e. "trunk" for svn, "master" for git etc.
151
151
152 .. attribute:: commit_ids
152 .. attribute:: commit_ids
153
153
154 list of all available commit ids, in ascending order
154 list of all available commit ids, in ascending order
155
155
156 .. attribute:: path
156 .. attribute:: path
157
157
158 absolute path to the repository
158 absolute path to the repository
159
159
160 .. attribute:: bookmarks
160 .. attribute:: bookmarks
161
161
162 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
162 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
163 there are no bookmarks or the backend implementation does not support
163 there are no bookmarks or the backend implementation does not support
164 bookmarks.
164 bookmarks.
165
165
166 .. attribute:: tags
166 .. attribute:: tags
167
167
168 Mapping from name to :term:`Commit ID` of the tag.
168 Mapping from name to :term:`Commit ID` of the tag.
169
169
170 """
170 """
171
171
172 DEFAULT_BRANCH_NAME = None
172 DEFAULT_BRANCH_NAME = None
173 DEFAULT_CONTACT = u"Unknown"
173 DEFAULT_CONTACT = u"Unknown"
174 DEFAULT_DESCRIPTION = u"unknown"
174 DEFAULT_DESCRIPTION = u"unknown"
175 EMPTY_COMMIT_ID = '0' * 40
175 EMPTY_COMMIT_ID = '0' * 40
176
176
177 path = None
177 path = None
178 _remote = None
178
179
179 def __init__(self, repo_path, config=None, create=False, **kwargs):
180 def __init__(self, repo_path, config=None, create=False, **kwargs):
180 """
181 """
181 Initializes repository. Raises RepositoryError if repository could
182 Initializes repository. Raises RepositoryError if repository could
182 not be find at the given ``repo_path`` or directory at ``repo_path``
183 not be find at the given ``repo_path`` or directory at ``repo_path``
183 exists and ``create`` is set to True.
184 exists and ``create`` is set to True.
184
185
185 :param repo_path: local path of the repository
186 :param repo_path: local path of the repository
186 :param config: repository configuration
187 :param config: repository configuration
187 :param create=False: if set to True, would try to create repository.
188 :param create=False: if set to True, would try to create repository.
188 :param src_url=None: if set, should be proper url from which repository
189 :param src_url=None: if set, should be proper url from which repository
189 would be cloned; requires ``create`` parameter to be set to True -
190 would be cloned; requires ``create`` parameter to be set to True -
190 raises RepositoryError if src_url is set and create evaluates to
191 raises RepositoryError if src_url is set and create evaluates to
191 False
192 False
192 """
193 """
193 raise NotImplementedError
194 raise NotImplementedError
194
195
195 def __repr__(self):
196 def __repr__(self):
196 return '<%s at %s>' % (self.__class__.__name__, self.path)
197 return '<%s at %s>' % (self.__class__.__name__, self.path)
197
198
198 def __len__(self):
199 def __len__(self):
199 return self.count()
200 return self.count()
200
201
201 def __eq__(self, other):
202 def __eq__(self, other):
202 same_instance = isinstance(other, self.__class__)
203 same_instance = isinstance(other, self.__class__)
203 return same_instance and other.path == self.path
204 return same_instance and other.path == self.path
204
205
205 def __ne__(self, other):
206 def __ne__(self, other):
206 return not self.__eq__(other)
207 return not self.__eq__(other)
207
208
208 @classmethod
209 @classmethod
209 def get_default_config(cls, default=None):
210 def get_default_config(cls, default=None):
210 config = Config()
211 config = Config()
211 if default and isinstance(default, list):
212 if default and isinstance(default, list):
212 for section, key, val in default:
213 for section, key, val in default:
213 config.set(section, key, val)
214 config.set(section, key, val)
214 return config
215 return config
215
216
216 @LazyProperty
217 @LazyProperty
217 def EMPTY_COMMIT(self):
218 def EMPTY_COMMIT(self):
218 return EmptyCommit(self.EMPTY_COMMIT_ID)
219 return EmptyCommit(self.EMPTY_COMMIT_ID)
219
220
220 @LazyProperty
221 @LazyProperty
221 def alias(self):
222 def alias(self):
222 for k, v in settings.BACKENDS.items():
223 for k, v in settings.BACKENDS.items():
223 if v.split('.')[-1] == str(self.__class__.__name__):
224 if v.split('.')[-1] == str(self.__class__.__name__):
224 return k
225 return k
225
226
226 @LazyProperty
227 @LazyProperty
227 def name(self):
228 def name(self):
228 return safe_unicode(os.path.basename(self.path))
229 return safe_unicode(os.path.basename(self.path))
229
230
230 @LazyProperty
231 @LazyProperty
231 def description(self):
232 def description(self):
232 raise NotImplementedError
233 raise NotImplementedError
233
234
234 def refs(self):
235 def refs(self):
235 """
236 """
236 returns a `dict` with branches, bookmarks, tags, and closed_branches
237 returns a `dict` with branches, bookmarks, tags, and closed_branches
237 for this repository
238 for this repository
238 """
239 """
239 return dict(
240 return dict(
240 branches=self.branches,
241 branches=self.branches,
241 branches_closed=self.branches_closed,
242 branches_closed=self.branches_closed,
242 tags=self.tags,
243 tags=self.tags,
243 bookmarks=self.bookmarks
244 bookmarks=self.bookmarks
244 )
245 )
245
246
246 @LazyProperty
247 @LazyProperty
247 def branches(self):
248 def branches(self):
248 """
249 """
249 A `dict` which maps branch names to commit ids.
250 A `dict` which maps branch names to commit ids.
250 """
251 """
251 raise NotImplementedError
252 raise NotImplementedError
252
253
253 @LazyProperty
254 @LazyProperty
254 def branches_closed(self):
255 def branches_closed(self):
255 """
256 """
256 A `dict` which maps tags names to commit ids.
257 A `dict` which maps tags names to commit ids.
257 """
258 """
258 raise NotImplementedError
259 raise NotImplementedError
259
260
260 @LazyProperty
261 @LazyProperty
261 def bookmarks(self):
262 def bookmarks(self):
262 """
263 """
263 A `dict` which maps tags names to commit ids.
264 A `dict` which maps tags names to commit ids.
264 """
265 """
265 raise NotImplementedError
266 raise NotImplementedError
266
267
267 @LazyProperty
268 @LazyProperty
268 def tags(self):
269 def tags(self):
269 """
270 """
270 A `dict` which maps tags names to commit ids.
271 A `dict` which maps tags names to commit ids.
271 """
272 """
272 raise NotImplementedError
273 raise NotImplementedError
273
274
274 @LazyProperty
275 @LazyProperty
275 def size(self):
276 def size(self):
276 """
277 """
277 Returns combined size in bytes for all repository files
278 Returns combined size in bytes for all repository files
278 """
279 """
279 tip = self.get_commit()
280 tip = self.get_commit()
280 return tip.size
281 return tip.size
281
282
282 def size_at_commit(self, commit_id):
283 def size_at_commit(self, commit_id):
283 commit = self.get_commit(commit_id)
284 commit = self.get_commit(commit_id)
284 return commit.size
285 return commit.size
285
286
286 def is_empty(self):
287 def is_empty(self):
287 return not bool(self.commit_ids)
288 return not bool(self.commit_ids)
288
289
289 @staticmethod
290 @staticmethod
290 def check_url(url, config):
291 def check_url(url, config):
291 """
292 """
292 Function will check given url and try to verify if it's a valid
293 Function will check given url and try to verify if it's a valid
293 link.
294 link.
294 """
295 """
295 raise NotImplementedError
296 raise NotImplementedError
296
297
297 @staticmethod
298 @staticmethod
298 def is_valid_repository(path):
299 def is_valid_repository(path):
299 """
300 """
300 Check if given `path` contains a valid repository of this backend
301 Check if given `path` contains a valid repository of this backend
301 """
302 """
302 raise NotImplementedError
303 raise NotImplementedError
303
304
304 # ==========================================================================
305 # ==========================================================================
305 # COMMITS
306 # COMMITS
306 # ==========================================================================
307 # ==========================================================================
307
308
308 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
309 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None):
309 """
310 """
310 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
311 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
311 are both None, most recent commit is returned.
312 are both None, most recent commit is returned.
312
313
313 :param pre_load: Optional. List of commit attributes to load.
314 :param pre_load: Optional. List of commit attributes to load.
314
315
315 :raises ``EmptyRepositoryError``: if there are no commits
316 :raises ``EmptyRepositoryError``: if there are no commits
316 """
317 """
317 raise NotImplementedError
318 raise NotImplementedError
318
319
319 def __iter__(self):
320 def __iter__(self):
320 for commit_id in self.commit_ids:
321 for commit_id in self.commit_ids:
321 yield self.get_commit(commit_id=commit_id)
322 yield self.get_commit(commit_id=commit_id)
322
323
323 def get_commits(
324 def get_commits(
324 self, start_id=None, end_id=None, start_date=None, end_date=None,
325 self, start_id=None, end_id=None, start_date=None, end_date=None,
325 branch_name=None, show_hidden=False, pre_load=None):
326 branch_name=None, show_hidden=False, pre_load=None):
326 """
327 """
327 Returns iterator of `BaseCommit` objects from start to end
328 Returns iterator of `BaseCommit` objects from start to end
328 not inclusive. This should behave just like a list, ie. end is not
329 not inclusive. This should behave just like a list, ie. end is not
329 inclusive.
330 inclusive.
330
331
331 :param start_id: None or str, must be a valid commit id
332 :param start_id: None or str, must be a valid commit id
332 :param end_id: None or str, must be a valid commit id
333 :param end_id: None or str, must be a valid commit id
333 :param start_date:
334 :param start_date:
334 :param end_date:
335 :param end_date:
335 :param branch_name:
336 :param branch_name:
336 :param show_hidden:
337 :param show_hidden:
337 :param pre_load:
338 :param pre_load:
338 """
339 """
339 raise NotImplementedError
340 raise NotImplementedError
340
341
341 def __getitem__(self, key):
342 def __getitem__(self, key):
342 """
343 """
343 Allows index based access to the commit objects of this repository.
344 Allows index based access to the commit objects of this repository.
344 """
345 """
345 pre_load = ["author", "branch", "date", "message", "parents"]
346 pre_load = ["author", "branch", "date", "message", "parents"]
346 if isinstance(key, slice):
347 if isinstance(key, slice):
347 return self._get_range(key, pre_load)
348 return self._get_range(key, pre_load)
348 return self.get_commit(commit_idx=key, pre_load=pre_load)
349 return self.get_commit(commit_idx=key, pre_load=pre_load)
349
350
350 def _get_range(self, slice_obj, pre_load):
351 def _get_range(self, slice_obj, pre_load):
351 for commit_id in self.commit_ids.__getitem__(slice_obj):
352 for commit_id in self.commit_ids.__getitem__(slice_obj):
352 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
353 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
353
354
354 def count(self):
355 def count(self):
355 return len(self.commit_ids)
356 return len(self.commit_ids)
356
357
357 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
358 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
358 """
359 """
359 Creates and returns a tag for the given ``commit_id``.
360 Creates and returns a tag for the given ``commit_id``.
360
361
361 :param name: name for new tag
362 :param name: name for new tag
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 :param commit_id: commit id for which new tag would be created
364 :param commit_id: commit id for which new tag would be created
364 :param message: message of the tag's commit
365 :param message: message of the tag's commit
365 :param date: date of tag's commit
366 :param date: date of tag's commit
366
367
367 :raises TagAlreadyExistError: if tag with same name already exists
368 :raises TagAlreadyExistError: if tag with same name already exists
368 """
369 """
369 raise NotImplementedError
370 raise NotImplementedError
370
371
371 def remove_tag(self, name, user, message=None, date=None):
372 def remove_tag(self, name, user, message=None, date=None):
372 """
373 """
373 Removes tag with the given ``name``.
374 Removes tag with the given ``name``.
374
375
375 :param name: name of the tag to be removed
376 :param name: name of the tag to be removed
376 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
377 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
377 :param message: message of the tag's removal commit
378 :param message: message of the tag's removal commit
378 :param date: date of tag's removal commit
379 :param date: date of tag's removal commit
379
380
380 :raises TagDoesNotExistError: if tag with given name does not exists
381 :raises TagDoesNotExistError: if tag with given name does not exists
381 """
382 """
382 raise NotImplementedError
383 raise NotImplementedError
383
384
384 def get_diff(
385 def get_diff(
385 self, commit1, commit2, path=None, ignore_whitespace=False,
386 self, commit1, commit2, path=None, ignore_whitespace=False,
386 context=3, path1=None):
387 context=3, path1=None):
387 """
388 """
388 Returns (git like) *diff*, as plain text. Shows changes introduced by
389 Returns (git like) *diff*, as plain text. Shows changes introduced by
389 `commit2` since `commit1`.
390 `commit2` since `commit1`.
390
391
391 :param commit1: Entry point from which diff is shown. Can be
392 :param commit1: Entry point from which diff is shown. Can be
392 ``self.EMPTY_COMMIT`` - in this case, patch showing all
393 ``self.EMPTY_COMMIT`` - in this case, patch showing all
393 the changes since empty state of the repository until `commit2`
394 the changes since empty state of the repository until `commit2`
394 :param commit2: Until which commit changes should be shown.
395 :param commit2: Until which commit changes should be shown.
395 :param path: Can be set to a path of a file to create a diff of that
396 :param path: Can be set to a path of a file to create a diff of that
396 file. If `path1` is also set, this value is only associated to
397 file. If `path1` is also set, this value is only associated to
397 `commit2`.
398 `commit2`.
398 :param ignore_whitespace: If set to ``True``, would not show whitespace
399 :param ignore_whitespace: If set to ``True``, would not show whitespace
399 changes. Defaults to ``False``.
400 changes. Defaults to ``False``.
400 :param context: How many lines before/after changed lines should be
401 :param context: How many lines before/after changed lines should be
401 shown. Defaults to ``3``.
402 shown. Defaults to ``3``.
402 :param path1: Can be set to a path to associate with `commit1`. This
403 :param path1: Can be set to a path to associate with `commit1`. This
403 parameter works only for backends which support diff generation for
404 parameter works only for backends which support diff generation for
404 different paths. Other backends will raise a `ValueError` if `path1`
405 different paths. Other backends will raise a `ValueError` if `path1`
405 is set and has a different value than `path`.
406 is set and has a different value than `path`.
406 :param file_path: filter this diff by given path pattern
407 :param file_path: filter this diff by given path pattern
407 """
408 """
408 raise NotImplementedError
409 raise NotImplementedError
409
410
410 def strip(self, commit_id, branch=None):
411 def strip(self, commit_id, branch=None):
411 """
412 """
412 Strip given commit_id from the repository
413 Strip given commit_id from the repository
413 """
414 """
414 raise NotImplementedError
415 raise NotImplementedError
415
416
416 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
417 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
417 """
418 """
418 Return a latest common ancestor commit if one exists for this repo
419 Return a latest common ancestor commit if one exists for this repo
419 `commit_id1` vs `commit_id2` from `repo2`.
420 `commit_id1` vs `commit_id2` from `repo2`.
420
421
421 :param commit_id1: Commit it from this repository to use as a
422 :param commit_id1: Commit it from this repository to use as a
422 target for the comparison.
423 target for the comparison.
423 :param commit_id2: Source commit id to use for comparison.
424 :param commit_id2: Source commit id to use for comparison.
424 :param repo2: Source repository to use for comparison.
425 :param repo2: Source repository to use for comparison.
425 """
426 """
426 raise NotImplementedError
427 raise NotImplementedError
427
428
428 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
429 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
429 """
430 """
430 Compare this repository's revision `commit_id1` with `commit_id2`.
431 Compare this repository's revision `commit_id1` with `commit_id2`.
431
432
432 Returns a tuple(commits, ancestor) that would be merged from
433 Returns a tuple(commits, ancestor) that would be merged from
433 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
434 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
434 will be returned as ancestor.
435 will be returned as ancestor.
435
436
436 :param commit_id1: Commit it from this repository to use as a
437 :param commit_id1: Commit it from this repository to use as a
437 target for the comparison.
438 target for the comparison.
438 :param commit_id2: Source commit id to use for comparison.
439 :param commit_id2: Source commit id to use for comparison.
439 :param repo2: Source repository to use for comparison.
440 :param repo2: Source repository to use for comparison.
440 :param merge: If set to ``True`` will do a merge compare which also
441 :param merge: If set to ``True`` will do a merge compare which also
441 returns the common ancestor.
442 returns the common ancestor.
442 :param pre_load: Optional. List of commit attributes to load.
443 :param pre_load: Optional. List of commit attributes to load.
443 """
444 """
444 raise NotImplementedError
445 raise NotImplementedError
445
446
446 def merge(self, target_ref, source_repo, source_ref, workspace_id,
447 def merge(self, target_ref, source_repo, source_ref, workspace_id,
447 user_name='', user_email='', message='', dry_run=False,
448 user_name='', user_email='', message='', dry_run=False,
448 use_rebase=False, close_branch=False):
449 use_rebase=False, close_branch=False):
449 """
450 """
450 Merge the revisions specified in `source_ref` from `source_repo`
451 Merge the revisions specified in `source_ref` from `source_repo`
451 onto the `target_ref` of this repository.
452 onto the `target_ref` of this repository.
452
453
453 `source_ref` and `target_ref` are named tupls with the following
454 `source_ref` and `target_ref` are named tupls with the following
454 fields `type`, `name` and `commit_id`.
455 fields `type`, `name` and `commit_id`.
455
456
456 Returns a MergeResponse named tuple with the following fields
457 Returns a MergeResponse named tuple with the following fields
457 'possible', 'executed', 'source_commit', 'target_commit',
458 'possible', 'executed', 'source_commit', 'target_commit',
458 'merge_commit'.
459 'merge_commit'.
459
460
460 :param target_ref: `target_ref` points to the commit on top of which
461 :param target_ref: `target_ref` points to the commit on top of which
461 the `source_ref` should be merged.
462 the `source_ref` should be merged.
462 :param source_repo: The repository that contains the commits to be
463 :param source_repo: The repository that contains the commits to be
463 merged.
464 merged.
464 :param source_ref: `source_ref` points to the topmost commit from
465 :param source_ref: `source_ref` points to the topmost commit from
465 the `source_repo` which should be merged.
466 the `source_repo` which should be merged.
466 :param workspace_id: `workspace_id` unique identifier.
467 :param workspace_id: `workspace_id` unique identifier.
467 :param user_name: Merge commit `user_name`.
468 :param user_name: Merge commit `user_name`.
468 :param user_email: Merge commit `user_email`.
469 :param user_email: Merge commit `user_email`.
469 :param message: Merge commit `message`.
470 :param message: Merge commit `message`.
470 :param dry_run: If `True` the merge will not take place.
471 :param dry_run: If `True` the merge will not take place.
471 :param use_rebase: If `True` commits from the source will be rebased
472 :param use_rebase: If `True` commits from the source will be rebased
472 on top of the target instead of being merged.
473 on top of the target instead of being merged.
473 :param close_branch: If `True` branch will be close before merging it
474 :param close_branch: If `True` branch will be close before merging it
474 """
475 """
475 if dry_run:
476 if dry_run:
476 message = message or 'dry_run_merge_message'
477 message = message or 'dry_run_merge_message'
477 user_email = user_email or 'dry-run-merge@rhodecode.com'
478 user_email = user_email or 'dry-run-merge@rhodecode.com'
478 user_name = user_name or 'Dry-Run User'
479 user_name = user_name or 'Dry-Run User'
479 else:
480 else:
480 if not user_name:
481 if not user_name:
481 raise ValueError('user_name cannot be empty')
482 raise ValueError('user_name cannot be empty')
482 if not user_email:
483 if not user_email:
483 raise ValueError('user_email cannot be empty')
484 raise ValueError('user_email cannot be empty')
484 if not message:
485 if not message:
485 raise ValueError('message cannot be empty')
486 raise ValueError('message cannot be empty')
486
487
487 shadow_repository_path = self._maybe_prepare_merge_workspace(
488 shadow_repository_path = self._maybe_prepare_merge_workspace(
488 workspace_id, target_ref, source_ref)
489 workspace_id, target_ref, source_ref)
489
490
490 try:
491 try:
491 return self._merge_repo(
492 return self._merge_repo(
492 shadow_repository_path, target_ref, source_repo,
493 shadow_repository_path, target_ref, source_repo,
493 source_ref, message, user_name, user_email, dry_run=dry_run,
494 source_ref, message, user_name, user_email, dry_run=dry_run,
494 use_rebase=use_rebase, close_branch=close_branch)
495 use_rebase=use_rebase, close_branch=close_branch)
495 except RepositoryError:
496 except RepositoryError:
496 log.exception(
497 log.exception(
497 'Unexpected failure when running merge, dry-run=%s',
498 'Unexpected failure when running merge, dry-run=%s',
498 dry_run)
499 dry_run)
499 return MergeResponse(
500 return MergeResponse(
500 False, False, None, MergeFailureReason.UNKNOWN)
501 False, False, None, MergeFailureReason.UNKNOWN)
501
502
502 def _merge_repo(self, shadow_repository_path, target_ref,
503 def _merge_repo(self, shadow_repository_path, target_ref,
503 source_repo, source_ref, merge_message,
504 source_repo, source_ref, merge_message,
504 merger_name, merger_email, dry_run=False,
505 merger_name, merger_email, dry_run=False,
505 use_rebase=False, close_branch=False):
506 use_rebase=False, close_branch=False):
506 """Internal implementation of merge."""
507 """Internal implementation of merge."""
507 raise NotImplementedError
508 raise NotImplementedError
508
509
509 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
510 def _maybe_prepare_merge_workspace(self, workspace_id, target_ref, source_ref):
510 """
511 """
511 Create the merge workspace.
512 Create the merge workspace.
512
513
513 :param workspace_id: `workspace_id` unique identifier.
514 :param workspace_id: `workspace_id` unique identifier.
514 """
515 """
515 raise NotImplementedError
516 raise NotImplementedError
516
517
517 def cleanup_merge_workspace(self, workspace_id):
518 def cleanup_merge_workspace(self, workspace_id):
518 """
519 """
519 Remove merge workspace.
520 Remove merge workspace.
520
521
521 This function MUST not fail in case there is no workspace associated to
522 This function MUST not fail in case there is no workspace associated to
522 the given `workspace_id`.
523 the given `workspace_id`.
523
524
524 :param workspace_id: `workspace_id` unique identifier.
525 :param workspace_id: `workspace_id` unique identifier.
525 """
526 """
526 raise NotImplementedError
527 raise NotImplementedError
527
528
528 # ========== #
529 # ========== #
529 # COMMIT API #
530 # COMMIT API #
530 # ========== #
531 # ========== #
531
532
532 @LazyProperty
533 @LazyProperty
533 def in_memory_commit(self):
534 def in_memory_commit(self):
534 """
535 """
535 Returns :class:`InMemoryCommit` object for this repository.
536 Returns :class:`InMemoryCommit` object for this repository.
536 """
537 """
537 raise NotImplementedError
538 raise NotImplementedError
538
539
539 # ======================== #
540 # ======================== #
540 # UTILITIES FOR SUBCLASSES #
541 # UTILITIES FOR SUBCLASSES #
541 # ======================== #
542 # ======================== #
542
543
543 def _validate_diff_commits(self, commit1, commit2):
544 def _validate_diff_commits(self, commit1, commit2):
544 """
545 """
545 Validates that the given commits are related to this repository.
546 Validates that the given commits are related to this repository.
546
547
547 Intended as a utility for sub classes to have a consistent validation
548 Intended as a utility for sub classes to have a consistent validation
548 of input parameters in methods like :meth:`get_diff`.
549 of input parameters in methods like :meth:`get_diff`.
549 """
550 """
550 self._validate_commit(commit1)
551 self._validate_commit(commit1)
551 self._validate_commit(commit2)
552 self._validate_commit(commit2)
552 if (isinstance(commit1, EmptyCommit) and
553 if (isinstance(commit1, EmptyCommit) and
553 isinstance(commit2, EmptyCommit)):
554 isinstance(commit2, EmptyCommit)):
554 raise ValueError("Cannot compare two empty commits")
555 raise ValueError("Cannot compare two empty commits")
555
556
556 def _validate_commit(self, commit):
557 def _validate_commit(self, commit):
557 if not isinstance(commit, BaseCommit):
558 if not isinstance(commit, BaseCommit):
558 raise TypeError(
559 raise TypeError(
559 "%s is not of type BaseCommit" % repr(commit))
560 "%s is not of type BaseCommit" % repr(commit))
560 if commit.repository != self and not isinstance(commit, EmptyCommit):
561 if commit.repository != self and not isinstance(commit, EmptyCommit):
561 raise ValueError(
562 raise ValueError(
562 "Commit %s must be a valid commit from this repository %s, "
563 "Commit %s must be a valid commit from this repository %s, "
563 "related to this repository instead %s." %
564 "related to this repository instead %s." %
564 (commit, self, commit.repository))
565 (commit, self, commit.repository))
565
566
566 def _validate_commit_id(self, commit_id):
567 def _validate_commit_id(self, commit_id):
567 if not isinstance(commit_id, basestring):
568 if not isinstance(commit_id, basestring):
568 raise TypeError("commit_id must be a string value")
569 raise TypeError("commit_id must be a string value")
569
570
570 def _validate_commit_idx(self, commit_idx):
571 def _validate_commit_idx(self, commit_idx):
571 if not isinstance(commit_idx, (int, long)):
572 if not isinstance(commit_idx, (int, long)):
572 raise TypeError("commit_idx must be a numeric value")
573 raise TypeError("commit_idx must be a numeric value")
573
574
574 def _validate_branch_name(self, branch_name):
575 def _validate_branch_name(self, branch_name):
575 if branch_name and branch_name not in self.branches_all:
576 if branch_name and branch_name not in self.branches_all:
576 msg = ("Branch %s not found in %s" % (branch_name, self))
577 msg = ("Branch %s not found in %s" % (branch_name, self))
577 raise BranchDoesNotExistError(msg)
578 raise BranchDoesNotExistError(msg)
578
579
579 #
580 #
580 # Supporting deprecated API parts
581 # Supporting deprecated API parts
581 # TODO: johbo: consider to move this into a mixin
582 # TODO: johbo: consider to move this into a mixin
582 #
583 #
583
584
584 @property
585 @property
585 def EMPTY_CHANGESET(self):
586 def EMPTY_CHANGESET(self):
586 warnings.warn(
587 warnings.warn(
587 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
588 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
588 return self.EMPTY_COMMIT_ID
589 return self.EMPTY_COMMIT_ID
589
590
590 @property
591 @property
591 def revisions(self):
592 def revisions(self):
592 warnings.warn("Use commits attribute instead", DeprecationWarning)
593 warnings.warn("Use commits attribute instead", DeprecationWarning)
593 return self.commit_ids
594 return self.commit_ids
594
595
595 @revisions.setter
596 @revisions.setter
596 def revisions(self, value):
597 def revisions(self, value):
597 warnings.warn("Use commits attribute instead", DeprecationWarning)
598 warnings.warn("Use commits attribute instead", DeprecationWarning)
598 self.commit_ids = value
599 self.commit_ids = value
599
600
600 def get_changeset(self, revision=None, pre_load=None):
601 def get_changeset(self, revision=None, pre_load=None):
601 warnings.warn("Use get_commit instead", DeprecationWarning)
602 warnings.warn("Use get_commit instead", DeprecationWarning)
602 commit_id = None
603 commit_id = None
603 commit_idx = None
604 commit_idx = None
604 if isinstance(revision, basestring):
605 if isinstance(revision, basestring):
605 commit_id = revision
606 commit_id = revision
606 else:
607 else:
607 commit_idx = revision
608 commit_idx = revision
608 return self.get_commit(
609 return self.get_commit(
609 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
610 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
610
611
611 def get_changesets(
612 def get_changesets(
612 self, start=None, end=None, start_date=None, end_date=None,
613 self, start=None, end=None, start_date=None, end_date=None,
613 branch_name=None, pre_load=None):
614 branch_name=None, pre_load=None):
614 warnings.warn("Use get_commits instead", DeprecationWarning)
615 warnings.warn("Use get_commits instead", DeprecationWarning)
615 start_id = self._revision_to_commit(start)
616 start_id = self._revision_to_commit(start)
616 end_id = self._revision_to_commit(end)
617 end_id = self._revision_to_commit(end)
617 return self.get_commits(
618 return self.get_commits(
618 start_id=start_id, end_id=end_id, start_date=start_date,
619 start_id=start_id, end_id=end_id, start_date=start_date,
619 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
620 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
620
621
621 def _revision_to_commit(self, revision):
622 def _revision_to_commit(self, revision):
622 """
623 """
623 Translates a revision to a commit_id
624 Translates a revision to a commit_id
624
625
625 Helps to support the old changeset based API which allows to use
626 Helps to support the old changeset based API which allows to use
626 commit ids and commit indices interchangeable.
627 commit ids and commit indices interchangeable.
627 """
628 """
628 if revision is None:
629 if revision is None:
629 return revision
630 return revision
630
631
631 if isinstance(revision, basestring):
632 if isinstance(revision, basestring):
632 commit_id = revision
633 commit_id = revision
633 else:
634 else:
634 commit_id = self.commit_ids[revision]
635 commit_id = self.commit_ids[revision]
635 return commit_id
636 return commit_id
636
637
637 @property
638 @property
638 def in_memory_changeset(self):
639 def in_memory_changeset(self):
639 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
640 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
640 return self.in_memory_commit
641 return self.in_memory_commit
641
642
642 def get_path_permissions(self, username):
643 def get_path_permissions(self, username):
643 """
644 """
644 Returns a path permission checker or None if not supported
645 Returns a path permission checker or None if not supported
645
646
646 :param username: session user name
647 :param username: session user name
647 :return: an instance of BasePathPermissionChecker or None
648 :return: an instance of BasePathPermissionChecker or None
648 """
649 """
649 return None
650 return None
650
651
652 def install_hooks(self, force=False):
653 return self._remote.install_hooks(force)
654
651
655
652 class BaseCommit(object):
656 class BaseCommit(object):
653 """
657 """
654 Each backend should implement it's commit representation.
658 Each backend should implement it's commit representation.
655
659
656 **Attributes**
660 **Attributes**
657
661
658 ``repository``
662 ``repository``
659 repository object within which commit exists
663 repository object within which commit exists
660
664
661 ``id``
665 ``id``
662 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
666 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
663 just ``tip``.
667 just ``tip``.
664
668
665 ``raw_id``
669 ``raw_id``
666 raw commit representation (i.e. full 40 length sha for git
670 raw commit representation (i.e. full 40 length sha for git
667 backend)
671 backend)
668
672
669 ``short_id``
673 ``short_id``
670 shortened (if apply) version of ``raw_id``; it would be simple
674 shortened (if apply) version of ``raw_id``; it would be simple
671 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
675 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
672 as ``raw_id`` for subversion
676 as ``raw_id`` for subversion
673
677
674 ``idx``
678 ``idx``
675 commit index
679 commit index
676
680
677 ``files``
681 ``files``
678 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
682 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
679
683
680 ``dirs``
684 ``dirs``
681 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
685 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
682
686
683 ``nodes``
687 ``nodes``
684 combined list of ``Node`` objects
688 combined list of ``Node`` objects
685
689
686 ``author``
690 ``author``
687 author of the commit, as unicode
691 author of the commit, as unicode
688
692
689 ``message``
693 ``message``
690 message of the commit, as unicode
694 message of the commit, as unicode
691
695
692 ``parents``
696 ``parents``
693 list of parent commits
697 list of parent commits
694
698
695 """
699 """
696
700
697 branch = None
701 branch = None
698 """
702 """
699 Depending on the backend this should be set to the branch name of the
703 Depending on the backend this should be set to the branch name of the
700 commit. Backends not supporting branches on commits should leave this
704 commit. Backends not supporting branches on commits should leave this
701 value as ``None``.
705 value as ``None``.
702 """
706 """
703
707
704 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
708 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
705 """
709 """
706 This template is used to generate a default prefix for repository archives
710 This template is used to generate a default prefix for repository archives
707 if no prefix has been specified.
711 if no prefix has been specified.
708 """
712 """
709
713
710 def __str__(self):
714 def __str__(self):
711 return '<%s at %s:%s>' % (
715 return '<%s at %s:%s>' % (
712 self.__class__.__name__, self.idx, self.short_id)
716 self.__class__.__name__, self.idx, self.short_id)
713
717
714 def __repr__(self):
718 def __repr__(self):
715 return self.__str__()
719 return self.__str__()
716
720
717 def __unicode__(self):
721 def __unicode__(self):
718 return u'%s:%s' % (self.idx, self.short_id)
722 return u'%s:%s' % (self.idx, self.short_id)
719
723
720 def __eq__(self, other):
724 def __eq__(self, other):
721 same_instance = isinstance(other, self.__class__)
725 same_instance = isinstance(other, self.__class__)
722 return same_instance and self.raw_id == other.raw_id
726 return same_instance and self.raw_id == other.raw_id
723
727
724 def __json__(self):
728 def __json__(self):
725 parents = []
729 parents = []
726 try:
730 try:
727 for parent in self.parents:
731 for parent in self.parents:
728 parents.append({'raw_id': parent.raw_id})
732 parents.append({'raw_id': parent.raw_id})
729 except NotImplementedError:
733 except NotImplementedError:
730 # empty commit doesn't have parents implemented
734 # empty commit doesn't have parents implemented
731 pass
735 pass
732
736
733 return {
737 return {
734 'short_id': self.short_id,
738 'short_id': self.short_id,
735 'raw_id': self.raw_id,
739 'raw_id': self.raw_id,
736 'revision': self.idx,
740 'revision': self.idx,
737 'message': self.message,
741 'message': self.message,
738 'date': self.date,
742 'date': self.date,
739 'author': self.author,
743 'author': self.author,
740 'parents': parents,
744 'parents': parents,
741 'branch': self.branch
745 'branch': self.branch
742 }
746 }
743
747
744 def _get_refs(self):
748 def _get_refs(self):
745 return {
749 return {
746 'branches': [self.branch],
750 'branches': [self.branch] if self.branch else [],
747 'bookmarks': getattr(self, 'bookmarks', []),
751 'bookmarks': getattr(self, 'bookmarks', []),
748 'tags': self.tags
752 'tags': self.tags
749 }
753 }
750
754
751 @LazyProperty
755 @LazyProperty
752 def last(self):
756 def last(self):
753 """
757 """
754 ``True`` if this is last commit in repository, ``False``
758 ``True`` if this is last commit in repository, ``False``
755 otherwise; trying to access this attribute while there is no
759 otherwise; trying to access this attribute while there is no
756 commits would raise `EmptyRepositoryError`
760 commits would raise `EmptyRepositoryError`
757 """
761 """
758 if self.repository is None:
762 if self.repository is None:
759 raise CommitError("Cannot check if it's most recent commit")
763 raise CommitError("Cannot check if it's most recent commit")
760 return self.raw_id == self.repository.commit_ids[-1]
764 return self.raw_id == self.repository.commit_ids[-1]
761
765
762 @LazyProperty
766 @LazyProperty
763 def parents(self):
767 def parents(self):
764 """
768 """
765 Returns list of parent commits.
769 Returns list of parent commits.
766 """
770 """
767 raise NotImplementedError
771 raise NotImplementedError
768
772
769 @property
773 @property
770 def merge(self):
774 def merge(self):
771 """
775 """
772 Returns boolean if commit is a merge.
776 Returns boolean if commit is a merge.
773 """
777 """
774 return len(self.parents) > 1
778 return len(self.parents) > 1
775
779
776 @LazyProperty
780 @LazyProperty
777 def children(self):
781 def children(self):
778 """
782 """
779 Returns list of child commits.
783 Returns list of child commits.
780 """
784 """
781 raise NotImplementedError
785 raise NotImplementedError
782
786
783 @LazyProperty
787 @LazyProperty
784 def id(self):
788 def id(self):
785 """
789 """
786 Returns string identifying this commit.
790 Returns string identifying this commit.
787 """
791 """
788 raise NotImplementedError
792 raise NotImplementedError
789
793
790 @LazyProperty
794 @LazyProperty
791 def raw_id(self):
795 def raw_id(self):
792 """
796 """
793 Returns raw string identifying this commit.
797 Returns raw string identifying this commit.
794 """
798 """
795 raise NotImplementedError
799 raise NotImplementedError
796
800
797 @LazyProperty
801 @LazyProperty
798 def short_id(self):
802 def short_id(self):
799 """
803 """
800 Returns shortened version of ``raw_id`` attribute, as string,
804 Returns shortened version of ``raw_id`` attribute, as string,
801 identifying this commit, useful for presentation to users.
805 identifying this commit, useful for presentation to users.
802 """
806 """
803 raise NotImplementedError
807 raise NotImplementedError
804
808
805 @LazyProperty
809 @LazyProperty
806 def idx(self):
810 def idx(self):
807 """
811 """
808 Returns integer identifying this commit.
812 Returns integer identifying this commit.
809 """
813 """
810 raise NotImplementedError
814 raise NotImplementedError
811
815
812 @LazyProperty
816 @LazyProperty
813 def committer(self):
817 def committer(self):
814 """
818 """
815 Returns committer for this commit
819 Returns committer for this commit
816 """
820 """
817 raise NotImplementedError
821 raise NotImplementedError
818
822
819 @LazyProperty
823 @LazyProperty
820 def committer_name(self):
824 def committer_name(self):
821 """
825 """
822 Returns committer name for this commit
826 Returns committer name for this commit
823 """
827 """
824
828
825 return author_name(self.committer)
829 return author_name(self.committer)
826
830
827 @LazyProperty
831 @LazyProperty
828 def committer_email(self):
832 def committer_email(self):
829 """
833 """
830 Returns committer email address for this commit
834 Returns committer email address for this commit
831 """
835 """
832
836
833 return author_email(self.committer)
837 return author_email(self.committer)
834
838
835 @LazyProperty
839 @LazyProperty
836 def author(self):
840 def author(self):
837 """
841 """
838 Returns author for this commit
842 Returns author for this commit
839 """
843 """
840
844
841 raise NotImplementedError
845 raise NotImplementedError
842
846
843 @LazyProperty
847 @LazyProperty
844 def author_name(self):
848 def author_name(self):
845 """
849 """
846 Returns author name for this commit
850 Returns author name for this commit
847 """
851 """
848
852
849 return author_name(self.author)
853 return author_name(self.author)
850
854
851 @LazyProperty
855 @LazyProperty
852 def author_email(self):
856 def author_email(self):
853 """
857 """
854 Returns author email address for this commit
858 Returns author email address for this commit
855 """
859 """
856
860
857 return author_email(self.author)
861 return author_email(self.author)
858
862
859 def get_file_mode(self, path):
863 def get_file_mode(self, path):
860 """
864 """
861 Returns stat mode of the file at `path`.
865 Returns stat mode of the file at `path`.
862 """
866 """
863 raise NotImplementedError
867 raise NotImplementedError
864
868
865 def is_link(self, path):
869 def is_link(self, path):
866 """
870 """
867 Returns ``True`` if given `path` is a symlink
871 Returns ``True`` if given `path` is a symlink
868 """
872 """
869 raise NotImplementedError
873 raise NotImplementedError
870
874
871 def get_file_content(self, path):
875 def get_file_content(self, path):
872 """
876 """
873 Returns content of the file at the given `path`.
877 Returns content of the file at the given `path`.
874 """
878 """
875 raise NotImplementedError
879 raise NotImplementedError
876
880
877 def get_file_size(self, path):
881 def get_file_size(self, path):
878 """
882 """
879 Returns size of the file at the given `path`.
883 Returns size of the file at the given `path`.
880 """
884 """
881 raise NotImplementedError
885 raise NotImplementedError
882
886
883 def get_file_commit(self, path, pre_load=None):
887 def get_file_commit(self, path, pre_load=None):
884 """
888 """
885 Returns last commit of the file at the given `path`.
889 Returns last commit of the file at the given `path`.
886
890
887 :param pre_load: Optional. List of commit attributes to load.
891 :param pre_load: Optional. List of commit attributes to load.
888 """
892 """
889 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
893 commits = self.get_file_history(path, limit=1, pre_load=pre_load)
890 if not commits:
894 if not commits:
891 raise RepositoryError(
895 raise RepositoryError(
892 'Failed to fetch history for path {}. '
896 'Failed to fetch history for path {}. '
893 'Please check if such path exists in your repository'.format(
897 'Please check if such path exists in your repository'.format(
894 path))
898 path))
895 return commits[0]
899 return commits[0]
896
900
897 def get_file_history(self, path, limit=None, pre_load=None):
901 def get_file_history(self, path, limit=None, pre_load=None):
898 """
902 """
899 Returns history of file as reversed list of :class:`BaseCommit`
903 Returns history of file as reversed list of :class:`BaseCommit`
900 objects for which file at given `path` has been modified.
904 objects for which file at given `path` has been modified.
901
905
902 :param limit: Optional. Allows to limit the size of the returned
906 :param limit: Optional. Allows to limit the size of the returned
903 history. This is intended as a hint to the underlying backend, so
907 history. This is intended as a hint to the underlying backend, so
904 that it can apply optimizations depending on the limit.
908 that it can apply optimizations depending on the limit.
905 :param pre_load: Optional. List of commit attributes to load.
909 :param pre_load: Optional. List of commit attributes to load.
906 """
910 """
907 raise NotImplementedError
911 raise NotImplementedError
908
912
909 def get_file_annotate(self, path, pre_load=None):
913 def get_file_annotate(self, path, pre_load=None):
910 """
914 """
911 Returns a generator of four element tuples with
915 Returns a generator of four element tuples with
912 lineno, sha, commit lazy loader and line
916 lineno, sha, commit lazy loader and line
913
917
914 :param pre_load: Optional. List of commit attributes to load.
918 :param pre_load: Optional. List of commit attributes to load.
915 """
919 """
916 raise NotImplementedError
920 raise NotImplementedError
917
921
918 def get_nodes(self, path):
922 def get_nodes(self, path):
919 """
923 """
920 Returns combined ``DirNode`` and ``FileNode`` objects list representing
924 Returns combined ``DirNode`` and ``FileNode`` objects list representing
921 state of commit at the given ``path``.
925 state of commit at the given ``path``.
922
926
923 :raises ``CommitError``: if node at the given ``path`` is not
927 :raises ``CommitError``: if node at the given ``path`` is not
924 instance of ``DirNode``
928 instance of ``DirNode``
925 """
929 """
926 raise NotImplementedError
930 raise NotImplementedError
927
931
928 def get_node(self, path):
932 def get_node(self, path):
929 """
933 """
930 Returns ``Node`` object from the given ``path``.
934 Returns ``Node`` object from the given ``path``.
931
935
932 :raises ``NodeDoesNotExistError``: if there is no node at the given
936 :raises ``NodeDoesNotExistError``: if there is no node at the given
933 ``path``
937 ``path``
934 """
938 """
935 raise NotImplementedError
939 raise NotImplementedError
936
940
937 def get_largefile_node(self, path):
941 def get_largefile_node(self, path):
938 """
942 """
939 Returns the path to largefile from Mercurial/Git-lfs storage.
943 Returns the path to largefile from Mercurial/Git-lfs storage.
940 or None if it's not a largefile node
944 or None if it's not a largefile node
941 """
945 """
942 return None
946 return None
943
947
944 def archive_repo(self, file_path, kind='tgz', subrepos=None,
948 def archive_repo(self, file_path, kind='tgz', subrepos=None,
945 prefix=None, write_metadata=False, mtime=None):
949 prefix=None, write_metadata=False, mtime=None):
946 """
950 """
947 Creates an archive containing the contents of the repository.
951 Creates an archive containing the contents of the repository.
948
952
949 :param file_path: path to the file which to create the archive.
953 :param file_path: path to the file which to create the archive.
950 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
954 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
951 :param prefix: name of root directory in archive.
955 :param prefix: name of root directory in archive.
952 Default is repository name and commit's short_id joined with dash:
956 Default is repository name and commit's short_id joined with dash:
953 ``"{repo_name}-{short_id}"``.
957 ``"{repo_name}-{short_id}"``.
954 :param write_metadata: write a metadata file into archive.
958 :param write_metadata: write a metadata file into archive.
955 :param mtime: custom modification time for archive creation, defaults
959 :param mtime: custom modification time for archive creation, defaults
956 to time.time() if not given.
960 to time.time() if not given.
957
961
958 :raise VCSError: If prefix has a problem.
962 :raise VCSError: If prefix has a problem.
959 """
963 """
960 allowed_kinds = settings.ARCHIVE_SPECS.keys()
964 allowed_kinds = settings.ARCHIVE_SPECS.keys()
961 if kind not in allowed_kinds:
965 if kind not in allowed_kinds:
962 raise ImproperArchiveTypeError(
966 raise ImproperArchiveTypeError(
963 'Archive kind (%s) not supported use one of %s' %
967 'Archive kind (%s) not supported use one of %s' %
964 (kind, allowed_kinds))
968 (kind, allowed_kinds))
965
969
966 prefix = self._validate_archive_prefix(prefix)
970 prefix = self._validate_archive_prefix(prefix)
967
971
968 mtime = mtime or time.mktime(self.date.timetuple())
972 mtime = mtime or time.mktime(self.date.timetuple())
969
973
970 file_info = []
974 file_info = []
971 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
975 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
972 for _r, _d, files in cur_rev.walk('/'):
976 for _r, _d, files in cur_rev.walk('/'):
973 for f in files:
977 for f in files:
974 f_path = os.path.join(prefix, f.path)
978 f_path = os.path.join(prefix, f.path)
975 file_info.append(
979 file_info.append(
976 (f_path, f.mode, f.is_link(), f.raw_bytes))
980 (f_path, f.mode, f.is_link(), f.raw_bytes))
977
981
978 if write_metadata:
982 if write_metadata:
979 metadata = [
983 metadata = [
980 ('repo_name', self.repository.name),
984 ('repo_name', self.repository.name),
981 ('rev', self.raw_id),
985 ('rev', self.raw_id),
982 ('create_time', mtime),
986 ('create_time', mtime),
983 ('branch', self.branch),
987 ('branch', self.branch),
984 ('tags', ','.join(self.tags)),
988 ('tags', ','.join(self.tags)),
985 ]
989 ]
986 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
990 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
987 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
991 file_info.append(('.archival.txt', 0644, False, '\n'.join(meta)))
988
992
989 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
993 connection.Hg.archive_repo(file_path, mtime, file_info, kind)
990
994
991 def _validate_archive_prefix(self, prefix):
995 def _validate_archive_prefix(self, prefix):
992 if prefix is None:
996 if prefix is None:
993 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
997 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
994 repo_name=safe_str(self.repository.name),
998 repo_name=safe_str(self.repository.name),
995 short_id=self.short_id)
999 short_id=self.short_id)
996 elif not isinstance(prefix, str):
1000 elif not isinstance(prefix, str):
997 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1001 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
998 elif prefix.startswith('/'):
1002 elif prefix.startswith('/'):
999 raise VCSError("Prefix cannot start with leading slash")
1003 raise VCSError("Prefix cannot start with leading slash")
1000 elif prefix.strip() == '':
1004 elif prefix.strip() == '':
1001 raise VCSError("Prefix cannot be empty")
1005 raise VCSError("Prefix cannot be empty")
1002 return prefix
1006 return prefix
1003
1007
1004 @LazyProperty
1008 @LazyProperty
1005 def root(self):
1009 def root(self):
1006 """
1010 """
1007 Returns ``RootNode`` object for this commit.
1011 Returns ``RootNode`` object for this commit.
1008 """
1012 """
1009 return self.get_node('')
1013 return self.get_node('')
1010
1014
1011 def next(self, branch=None):
1015 def next(self, branch=None):
1012 """
1016 """
1013 Returns next commit from current, if branch is gives it will return
1017 Returns next commit from current, if branch is gives it will return
1014 next commit belonging to this branch
1018 next commit belonging to this branch
1015
1019
1016 :param branch: show commits within the given named branch
1020 :param branch: show commits within the given named branch
1017 """
1021 """
1018 indexes = xrange(self.idx + 1, self.repository.count())
1022 indexes = xrange(self.idx + 1, self.repository.count())
1019 return self._find_next(indexes, branch)
1023 return self._find_next(indexes, branch)
1020
1024
1021 def prev(self, branch=None):
1025 def prev(self, branch=None):
1022 """
1026 """
1023 Returns previous commit from current, if branch is gives it will
1027 Returns previous commit from current, if branch is gives it will
1024 return previous commit belonging to this branch
1028 return previous commit belonging to this branch
1025
1029
1026 :param branch: show commit within the given named branch
1030 :param branch: show commit within the given named branch
1027 """
1031 """
1028 indexes = xrange(self.idx - 1, -1, -1)
1032 indexes = xrange(self.idx - 1, -1, -1)
1029 return self._find_next(indexes, branch)
1033 return self._find_next(indexes, branch)
1030
1034
1031 def _find_next(self, indexes, branch=None):
1035 def _find_next(self, indexes, branch=None):
1032 if branch and self.branch != branch:
1036 if branch and self.branch != branch:
1033 raise VCSError('Branch option used on commit not belonging '
1037 raise VCSError('Branch option used on commit not belonging '
1034 'to that branch')
1038 'to that branch')
1035
1039
1036 for next_idx in indexes:
1040 for next_idx in indexes:
1037 commit = self.repository.get_commit(commit_idx=next_idx)
1041 commit = self.repository.get_commit(commit_idx=next_idx)
1038 if branch and branch != commit.branch:
1042 if branch and branch != commit.branch:
1039 continue
1043 continue
1040 return commit
1044 return commit
1041 raise CommitDoesNotExistError
1045 raise CommitDoesNotExistError
1042
1046
1043 def diff(self, ignore_whitespace=True, context=3):
1047 def diff(self, ignore_whitespace=True, context=3):
1044 """
1048 """
1045 Returns a `Diff` object representing the change made by this commit.
1049 Returns a `Diff` object representing the change made by this commit.
1046 """
1050 """
1047 parent = (
1051 parent = (
1048 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1052 self.parents[0] if self.parents else self.repository.EMPTY_COMMIT)
1049 diff = self.repository.get_diff(
1053 diff = self.repository.get_diff(
1050 parent, self,
1054 parent, self,
1051 ignore_whitespace=ignore_whitespace,
1055 ignore_whitespace=ignore_whitespace,
1052 context=context)
1056 context=context)
1053 return diff
1057 return diff
1054
1058
1055 @LazyProperty
1059 @LazyProperty
1056 def added(self):
1060 def added(self):
1057 """
1061 """
1058 Returns list of added ``FileNode`` objects.
1062 Returns list of added ``FileNode`` objects.
1059 """
1063 """
1060 raise NotImplementedError
1064 raise NotImplementedError
1061
1065
1062 @LazyProperty
1066 @LazyProperty
1063 def changed(self):
1067 def changed(self):
1064 """
1068 """
1065 Returns list of modified ``FileNode`` objects.
1069 Returns list of modified ``FileNode`` objects.
1066 """
1070 """
1067 raise NotImplementedError
1071 raise NotImplementedError
1068
1072
1069 @LazyProperty
1073 @LazyProperty
1070 def removed(self):
1074 def removed(self):
1071 """
1075 """
1072 Returns list of removed ``FileNode`` objects.
1076 Returns list of removed ``FileNode`` objects.
1073 """
1077 """
1074 raise NotImplementedError
1078 raise NotImplementedError
1075
1079
1076 @LazyProperty
1080 @LazyProperty
1077 def size(self):
1081 def size(self):
1078 """
1082 """
1079 Returns total number of bytes from contents of all filenodes.
1083 Returns total number of bytes from contents of all filenodes.
1080 """
1084 """
1081 return sum((node.size for node in self.get_filenodes_generator()))
1085 return sum((node.size for node in self.get_filenodes_generator()))
1082
1086
1083 def walk(self, topurl=''):
1087 def walk(self, topurl=''):
1084 """
1088 """
1085 Similar to os.walk method. Insted of filesystem it walks through
1089 Similar to os.walk method. Insted of filesystem it walks through
1086 commit starting at given ``topurl``. Returns generator of tuples
1090 commit starting at given ``topurl``. Returns generator of tuples
1087 (topnode, dirnodes, filenodes).
1091 (topnode, dirnodes, filenodes).
1088 """
1092 """
1089 topnode = self.get_node(topurl)
1093 topnode = self.get_node(topurl)
1090 if not topnode.is_dir():
1094 if not topnode.is_dir():
1091 return
1095 return
1092 yield (topnode, topnode.dirs, topnode.files)
1096 yield (topnode, topnode.dirs, topnode.files)
1093 for dirnode in topnode.dirs:
1097 for dirnode in topnode.dirs:
1094 for tup in self.walk(dirnode.path):
1098 for tup in self.walk(dirnode.path):
1095 yield tup
1099 yield tup
1096
1100
1097 def get_filenodes_generator(self):
1101 def get_filenodes_generator(self):
1098 """
1102 """
1099 Returns generator that yields *all* file nodes.
1103 Returns generator that yields *all* file nodes.
1100 """
1104 """
1101 for topnode, dirs, files in self.walk():
1105 for topnode, dirs, files in self.walk():
1102 for node in files:
1106 for node in files:
1103 yield node
1107 yield node
1104
1108
1105 #
1109 #
1106 # Utilities for sub classes to support consistent behavior
1110 # Utilities for sub classes to support consistent behavior
1107 #
1111 #
1108
1112
1109 def no_node_at_path(self, path):
1113 def no_node_at_path(self, path):
1110 return NodeDoesNotExistError(
1114 return NodeDoesNotExistError(
1111 u"There is no file nor directory at the given path: "
1115 u"There is no file nor directory at the given path: "
1112 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1116 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1113
1117
1114 def _fix_path(self, path):
1118 def _fix_path(self, path):
1115 """
1119 """
1116 Paths are stored without trailing slash so we need to get rid off it if
1120 Paths are stored without trailing slash so we need to get rid off it if
1117 needed.
1121 needed.
1118 """
1122 """
1119 return path.rstrip('/')
1123 return path.rstrip('/')
1120
1124
1121 #
1125 #
1122 # Deprecated API based on changesets
1126 # Deprecated API based on changesets
1123 #
1127 #
1124
1128
1125 @property
1129 @property
1126 def revision(self):
1130 def revision(self):
1127 warnings.warn("Use idx instead", DeprecationWarning)
1131 warnings.warn("Use idx instead", DeprecationWarning)
1128 return self.idx
1132 return self.idx
1129
1133
1130 @revision.setter
1134 @revision.setter
1131 def revision(self, value):
1135 def revision(self, value):
1132 warnings.warn("Use idx instead", DeprecationWarning)
1136 warnings.warn("Use idx instead", DeprecationWarning)
1133 self.idx = value
1137 self.idx = value
1134
1138
1135 def get_file_changeset(self, path):
1139 def get_file_changeset(self, path):
1136 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1140 warnings.warn("Use get_file_commit instead", DeprecationWarning)
1137 return self.get_file_commit(path)
1141 return self.get_file_commit(path)
1138
1142
1139
1143
1140 class BaseChangesetClass(type):
1144 class BaseChangesetClass(type):
1141
1145
1142 def __instancecheck__(self, instance):
1146 def __instancecheck__(self, instance):
1143 return isinstance(instance, BaseCommit)
1147 return isinstance(instance, BaseCommit)
1144
1148
1145
1149
1146 class BaseChangeset(BaseCommit):
1150 class BaseChangeset(BaseCommit):
1147
1151
1148 __metaclass__ = BaseChangesetClass
1152 __metaclass__ = BaseChangesetClass
1149
1153
1150 def __new__(cls, *args, **kwargs):
1154 def __new__(cls, *args, **kwargs):
1151 warnings.warn(
1155 warnings.warn(
1152 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1156 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1153 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1157 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1154
1158
1155
1159
1156 class BaseInMemoryCommit(object):
1160 class BaseInMemoryCommit(object):
1157 """
1161 """
1158 Represents differences between repository's state (most recent head) and
1162 Represents differences between repository's state (most recent head) and
1159 changes made *in place*.
1163 changes made *in place*.
1160
1164
1161 **Attributes**
1165 **Attributes**
1162
1166
1163 ``repository``
1167 ``repository``
1164 repository object for this in-memory-commit
1168 repository object for this in-memory-commit
1165
1169
1166 ``added``
1170 ``added``
1167 list of ``FileNode`` objects marked as *added*
1171 list of ``FileNode`` objects marked as *added*
1168
1172
1169 ``changed``
1173 ``changed``
1170 list of ``FileNode`` objects marked as *changed*
1174 list of ``FileNode`` objects marked as *changed*
1171
1175
1172 ``removed``
1176 ``removed``
1173 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1177 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1174 *removed*
1178 *removed*
1175
1179
1176 ``parents``
1180 ``parents``
1177 list of :class:`BaseCommit` instances representing parents of
1181 list of :class:`BaseCommit` instances representing parents of
1178 in-memory commit. Should always be 2-element sequence.
1182 in-memory commit. Should always be 2-element sequence.
1179
1183
1180 """
1184 """
1181
1185
1182 def __init__(self, repository):
1186 def __init__(self, repository):
1183 self.repository = repository
1187 self.repository = repository
1184 self.added = []
1188 self.added = []
1185 self.changed = []
1189 self.changed = []
1186 self.removed = []
1190 self.removed = []
1187 self.parents = []
1191 self.parents = []
1188
1192
1189 def add(self, *filenodes):
1193 def add(self, *filenodes):
1190 """
1194 """
1191 Marks given ``FileNode`` objects as *to be committed*.
1195 Marks given ``FileNode`` objects as *to be committed*.
1192
1196
1193 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1197 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1194 latest commit
1198 latest commit
1195 :raises ``NodeAlreadyAddedError``: if node with same path is already
1199 :raises ``NodeAlreadyAddedError``: if node with same path is already
1196 marked as *added*
1200 marked as *added*
1197 """
1201 """
1198 # Check if not already marked as *added* first
1202 # Check if not already marked as *added* first
1199 for node in filenodes:
1203 for node in filenodes:
1200 if node.path in (n.path for n in self.added):
1204 if node.path in (n.path for n in self.added):
1201 raise NodeAlreadyAddedError(
1205 raise NodeAlreadyAddedError(
1202 "Such FileNode %s is already marked for addition"
1206 "Such FileNode %s is already marked for addition"
1203 % node.path)
1207 % node.path)
1204 for node in filenodes:
1208 for node in filenodes:
1205 self.added.append(node)
1209 self.added.append(node)
1206
1210
1207 def change(self, *filenodes):
1211 def change(self, *filenodes):
1208 """
1212 """
1209 Marks given ``FileNode`` objects to be *changed* in next commit.
1213 Marks given ``FileNode`` objects to be *changed* in next commit.
1210
1214
1211 :raises ``EmptyRepositoryError``: if there are no commits yet
1215 :raises ``EmptyRepositoryError``: if there are no commits yet
1212 :raises ``NodeAlreadyExistsError``: if node with same path is already
1216 :raises ``NodeAlreadyExistsError``: if node with same path is already
1213 marked to be *changed*
1217 marked to be *changed*
1214 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1218 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1215 marked to be *removed*
1219 marked to be *removed*
1216 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1220 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1217 commit
1221 commit
1218 :raises ``NodeNotChangedError``: if node hasn't really be changed
1222 :raises ``NodeNotChangedError``: if node hasn't really be changed
1219 """
1223 """
1220 for node in filenodes:
1224 for node in filenodes:
1221 if node.path in (n.path for n in self.removed):
1225 if node.path in (n.path for n in self.removed):
1222 raise NodeAlreadyRemovedError(
1226 raise NodeAlreadyRemovedError(
1223 "Node at %s is already marked as removed" % node.path)
1227 "Node at %s is already marked as removed" % node.path)
1224 try:
1228 try:
1225 self.repository.get_commit()
1229 self.repository.get_commit()
1226 except EmptyRepositoryError:
1230 except EmptyRepositoryError:
1227 raise EmptyRepositoryError(
1231 raise EmptyRepositoryError(
1228 "Nothing to change - try to *add* new nodes rather than "
1232 "Nothing to change - try to *add* new nodes rather than "
1229 "changing them")
1233 "changing them")
1230 for node in filenodes:
1234 for node in filenodes:
1231 if node.path in (n.path for n in self.changed):
1235 if node.path in (n.path for n in self.changed):
1232 raise NodeAlreadyChangedError(
1236 raise NodeAlreadyChangedError(
1233 "Node at '%s' is already marked as changed" % node.path)
1237 "Node at '%s' is already marked as changed" % node.path)
1234 self.changed.append(node)
1238 self.changed.append(node)
1235
1239
1236 def remove(self, *filenodes):
1240 def remove(self, *filenodes):
1237 """
1241 """
1238 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1242 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1239 *removed* in next commit.
1243 *removed* in next commit.
1240
1244
1241 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1245 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1242 be *removed*
1246 be *removed*
1243 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1247 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1244 be *changed*
1248 be *changed*
1245 """
1249 """
1246 for node in filenodes:
1250 for node in filenodes:
1247 if node.path in (n.path for n in self.removed):
1251 if node.path in (n.path for n in self.removed):
1248 raise NodeAlreadyRemovedError(
1252 raise NodeAlreadyRemovedError(
1249 "Node is already marked to for removal at %s" % node.path)
1253 "Node is already marked to for removal at %s" % node.path)
1250 if node.path in (n.path for n in self.changed):
1254 if node.path in (n.path for n in self.changed):
1251 raise NodeAlreadyChangedError(
1255 raise NodeAlreadyChangedError(
1252 "Node is already marked to be changed at %s" % node.path)
1256 "Node is already marked to be changed at %s" % node.path)
1253 # We only mark node as *removed* - real removal is done by
1257 # We only mark node as *removed* - real removal is done by
1254 # commit method
1258 # commit method
1255 self.removed.append(node)
1259 self.removed.append(node)
1256
1260
1257 def reset(self):
1261 def reset(self):
1258 """
1262 """
1259 Resets this instance to initial state (cleans ``added``, ``changed``
1263 Resets this instance to initial state (cleans ``added``, ``changed``
1260 and ``removed`` lists).
1264 and ``removed`` lists).
1261 """
1265 """
1262 self.added = []
1266 self.added = []
1263 self.changed = []
1267 self.changed = []
1264 self.removed = []
1268 self.removed = []
1265 self.parents = []
1269 self.parents = []
1266
1270
1267 def get_ipaths(self):
1271 def get_ipaths(self):
1268 """
1272 """
1269 Returns generator of paths from nodes marked as added, changed or
1273 Returns generator of paths from nodes marked as added, changed or
1270 removed.
1274 removed.
1271 """
1275 """
1272 for node in itertools.chain(self.added, self.changed, self.removed):
1276 for node in itertools.chain(self.added, self.changed, self.removed):
1273 yield node.path
1277 yield node.path
1274
1278
1275 def get_paths(self):
1279 def get_paths(self):
1276 """
1280 """
1277 Returns list of paths from nodes marked as added, changed or removed.
1281 Returns list of paths from nodes marked as added, changed or removed.
1278 """
1282 """
1279 return list(self.get_ipaths())
1283 return list(self.get_ipaths())
1280
1284
1281 def check_integrity(self, parents=None):
1285 def check_integrity(self, parents=None):
1282 """
1286 """
1283 Checks in-memory commit's integrity. Also, sets parents if not
1287 Checks in-memory commit's integrity. Also, sets parents if not
1284 already set.
1288 already set.
1285
1289
1286 :raises CommitError: if any error occurs (i.e.
1290 :raises CommitError: if any error occurs (i.e.
1287 ``NodeDoesNotExistError``).
1291 ``NodeDoesNotExistError``).
1288 """
1292 """
1289 if not self.parents:
1293 if not self.parents:
1290 parents = parents or []
1294 parents = parents or []
1291 if len(parents) == 0:
1295 if len(parents) == 0:
1292 try:
1296 try:
1293 parents = [self.repository.get_commit(), None]
1297 parents = [self.repository.get_commit(), None]
1294 except EmptyRepositoryError:
1298 except EmptyRepositoryError:
1295 parents = [None, None]
1299 parents = [None, None]
1296 elif len(parents) == 1:
1300 elif len(parents) == 1:
1297 parents += [None]
1301 parents += [None]
1298 self.parents = parents
1302 self.parents = parents
1299
1303
1300 # Local parents, only if not None
1304 # Local parents, only if not None
1301 parents = [p for p in self.parents if p]
1305 parents = [p for p in self.parents if p]
1302
1306
1303 # Check nodes marked as added
1307 # Check nodes marked as added
1304 for p in parents:
1308 for p in parents:
1305 for node in self.added:
1309 for node in self.added:
1306 try:
1310 try:
1307 p.get_node(node.path)
1311 p.get_node(node.path)
1308 except NodeDoesNotExistError:
1312 except NodeDoesNotExistError:
1309 pass
1313 pass
1310 else:
1314 else:
1311 raise NodeAlreadyExistsError(
1315 raise NodeAlreadyExistsError(
1312 "Node `%s` already exists at %s" % (node.path, p))
1316 "Node `%s` already exists at %s" % (node.path, p))
1313
1317
1314 # Check nodes marked as changed
1318 # Check nodes marked as changed
1315 missing = set(self.changed)
1319 missing = set(self.changed)
1316 not_changed = set(self.changed)
1320 not_changed = set(self.changed)
1317 if self.changed and not parents:
1321 if self.changed and not parents:
1318 raise NodeDoesNotExistError(str(self.changed[0].path))
1322 raise NodeDoesNotExistError(str(self.changed[0].path))
1319 for p in parents:
1323 for p in parents:
1320 for node in self.changed:
1324 for node in self.changed:
1321 try:
1325 try:
1322 old = p.get_node(node.path)
1326 old = p.get_node(node.path)
1323 missing.remove(node)
1327 missing.remove(node)
1324 # if content actually changed, remove node from not_changed
1328 # if content actually changed, remove node from not_changed
1325 if old.content != node.content:
1329 if old.content != node.content:
1326 not_changed.remove(node)
1330 not_changed.remove(node)
1327 except NodeDoesNotExistError:
1331 except NodeDoesNotExistError:
1328 pass
1332 pass
1329 if self.changed and missing:
1333 if self.changed and missing:
1330 raise NodeDoesNotExistError(
1334 raise NodeDoesNotExistError(
1331 "Node `%s` marked as modified but missing in parents: %s"
1335 "Node `%s` marked as modified but missing in parents: %s"
1332 % (node.path, parents))
1336 % (node.path, parents))
1333
1337
1334 if self.changed and not_changed:
1338 if self.changed and not_changed:
1335 raise NodeNotChangedError(
1339 raise NodeNotChangedError(
1336 "Node `%s` wasn't actually changed (parents: %s)"
1340 "Node `%s` wasn't actually changed (parents: %s)"
1337 % (not_changed.pop().path, parents))
1341 % (not_changed.pop().path, parents))
1338
1342
1339 # Check nodes marked as removed
1343 # Check nodes marked as removed
1340 if self.removed and not parents:
1344 if self.removed and not parents:
1341 raise NodeDoesNotExistError(
1345 raise NodeDoesNotExistError(
1342 "Cannot remove node at %s as there "
1346 "Cannot remove node at %s as there "
1343 "were no parents specified" % self.removed[0].path)
1347 "were no parents specified" % self.removed[0].path)
1344 really_removed = set()
1348 really_removed = set()
1345 for p in parents:
1349 for p in parents:
1346 for node in self.removed:
1350 for node in self.removed:
1347 try:
1351 try:
1348 p.get_node(node.path)
1352 p.get_node(node.path)
1349 really_removed.add(node)
1353 really_removed.add(node)
1350 except CommitError:
1354 except CommitError:
1351 pass
1355 pass
1352 not_removed = set(self.removed) - really_removed
1356 not_removed = set(self.removed) - really_removed
1353 if not_removed:
1357 if not_removed:
1354 # TODO: johbo: This code branch does not seem to be covered
1358 # TODO: johbo: This code branch does not seem to be covered
1355 raise NodeDoesNotExistError(
1359 raise NodeDoesNotExistError(
1356 "Cannot remove node at %s from "
1360 "Cannot remove node at %s from "
1357 "following parents: %s" % (not_removed, parents))
1361 "following parents: %s" % (not_removed, parents))
1358
1362
1359 def commit(
1363 def commit(
1360 self, message, author, parents=None, branch=None, date=None,
1364 self, message, author, parents=None, branch=None, date=None,
1361 **kwargs):
1365 **kwargs):
1362 """
1366 """
1363 Performs in-memory commit (doesn't check workdir in any way) and
1367 Performs in-memory commit (doesn't check workdir in any way) and
1364 returns newly created :class:`BaseCommit`. Updates repository's
1368 returns newly created :class:`BaseCommit`. Updates repository's
1365 attribute `commits`.
1369 attribute `commits`.
1366
1370
1367 .. note::
1371 .. note::
1368
1372
1369 While overriding this method each backend's should call
1373 While overriding this method each backend's should call
1370 ``self.check_integrity(parents)`` in the first place.
1374 ``self.check_integrity(parents)`` in the first place.
1371
1375
1372 :param message: message of the commit
1376 :param message: message of the commit
1373 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1377 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1374 :param parents: single parent or sequence of parents from which commit
1378 :param parents: single parent or sequence of parents from which commit
1375 would be derived
1379 would be derived
1376 :param date: ``datetime.datetime`` instance. Defaults to
1380 :param date: ``datetime.datetime`` instance. Defaults to
1377 ``datetime.datetime.now()``.
1381 ``datetime.datetime.now()``.
1378 :param branch: branch name, as string. If none given, default backend's
1382 :param branch: branch name, as string. If none given, default backend's
1379 branch would be used.
1383 branch would be used.
1380
1384
1381 :raises ``CommitError``: if any error occurs while committing
1385 :raises ``CommitError``: if any error occurs while committing
1382 """
1386 """
1383 raise NotImplementedError
1387 raise NotImplementedError
1384
1388
1385
1389
1386 class BaseInMemoryChangesetClass(type):
1390 class BaseInMemoryChangesetClass(type):
1387
1391
1388 def __instancecheck__(self, instance):
1392 def __instancecheck__(self, instance):
1389 return isinstance(instance, BaseInMemoryCommit)
1393 return isinstance(instance, BaseInMemoryCommit)
1390
1394
1391
1395
1392 class BaseInMemoryChangeset(BaseInMemoryCommit):
1396 class BaseInMemoryChangeset(BaseInMemoryCommit):
1393
1397
1394 __metaclass__ = BaseInMemoryChangesetClass
1398 __metaclass__ = BaseInMemoryChangesetClass
1395
1399
1396 def __new__(cls, *args, **kwargs):
1400 def __new__(cls, *args, **kwargs):
1397 warnings.warn(
1401 warnings.warn(
1398 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1402 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1399 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1403 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1400
1404
1401
1405
1402 class EmptyCommit(BaseCommit):
1406 class EmptyCommit(BaseCommit):
1403 """
1407 """
1404 An dummy empty commit. It's possible to pass hash when creating
1408 An dummy empty commit. It's possible to pass hash when creating
1405 an EmptyCommit
1409 an EmptyCommit
1406 """
1410 """
1407
1411
1408 def __init__(
1412 def __init__(
1409 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1413 self, commit_id='0' * 40, repo=None, alias=None, idx=-1,
1410 message='', author='', date=None):
1414 message='', author='', date=None):
1411 self._empty_commit_id = commit_id
1415 self._empty_commit_id = commit_id
1412 # TODO: johbo: Solve idx parameter, default value does not make
1416 # TODO: johbo: Solve idx parameter, default value does not make
1413 # too much sense
1417 # too much sense
1414 self.idx = idx
1418 self.idx = idx
1415 self.message = message
1419 self.message = message
1416 self.author = author
1420 self.author = author
1417 self.date = date or datetime.datetime.fromtimestamp(0)
1421 self.date = date or datetime.datetime.fromtimestamp(0)
1418 self.repository = repo
1422 self.repository = repo
1419 self.alias = alias
1423 self.alias = alias
1420
1424
1421 @LazyProperty
1425 @LazyProperty
1422 def raw_id(self):
1426 def raw_id(self):
1423 """
1427 """
1424 Returns raw string identifying this commit, useful for web
1428 Returns raw string identifying this commit, useful for web
1425 representation.
1429 representation.
1426 """
1430 """
1427
1431
1428 return self._empty_commit_id
1432 return self._empty_commit_id
1429
1433
1430 @LazyProperty
1434 @LazyProperty
1431 def branch(self):
1435 def branch(self):
1432 if self.alias:
1436 if self.alias:
1433 from rhodecode.lib.vcs.backends import get_backend
1437 from rhodecode.lib.vcs.backends import get_backend
1434 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1438 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1435
1439
1436 @LazyProperty
1440 @LazyProperty
1437 def short_id(self):
1441 def short_id(self):
1438 return self.raw_id[:12]
1442 return self.raw_id[:12]
1439
1443
1440 @LazyProperty
1444 @LazyProperty
1441 def id(self):
1445 def id(self):
1442 return self.raw_id
1446 return self.raw_id
1443
1447
1444 def get_file_commit(self, path):
1448 def get_file_commit(self, path):
1445 return self
1449 return self
1446
1450
1447 def get_file_content(self, path):
1451 def get_file_content(self, path):
1448 return u''
1452 return u''
1449
1453
1450 def get_file_size(self, path):
1454 def get_file_size(self, path):
1451 return 0
1455 return 0
1452
1456
1453
1457
1454 class EmptyChangesetClass(type):
1458 class EmptyChangesetClass(type):
1455
1459
1456 def __instancecheck__(self, instance):
1460 def __instancecheck__(self, instance):
1457 return isinstance(instance, EmptyCommit)
1461 return isinstance(instance, EmptyCommit)
1458
1462
1459
1463
1460 class EmptyChangeset(EmptyCommit):
1464 class EmptyChangeset(EmptyCommit):
1461
1465
1462 __metaclass__ = EmptyChangesetClass
1466 __metaclass__ = EmptyChangesetClass
1463
1467
1464 def __new__(cls, *args, **kwargs):
1468 def __new__(cls, *args, **kwargs):
1465 warnings.warn(
1469 warnings.warn(
1466 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1470 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1467 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1471 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1468
1472
1469 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1473 def __init__(self, cs='0' * 40, repo=None, requested_revision=None,
1470 alias=None, revision=-1, message='', author='', date=None):
1474 alias=None, revision=-1, message='', author='', date=None):
1471 if requested_revision is not None:
1475 if requested_revision is not None:
1472 warnings.warn(
1476 warnings.warn(
1473 "Parameter requested_revision not supported anymore",
1477 "Parameter requested_revision not supported anymore",
1474 DeprecationWarning)
1478 DeprecationWarning)
1475 super(EmptyChangeset, self).__init__(
1479 super(EmptyChangeset, self).__init__(
1476 commit_id=cs, repo=repo, alias=alias, idx=revision,
1480 commit_id=cs, repo=repo, alias=alias, idx=revision,
1477 message=message, author=author, date=date)
1481 message=message, author=author, date=date)
1478
1482
1479 @property
1483 @property
1480 def revision(self):
1484 def revision(self):
1481 warnings.warn("Use idx instead", DeprecationWarning)
1485 warnings.warn("Use idx instead", DeprecationWarning)
1482 return self.idx
1486 return self.idx
1483
1487
1484 @revision.setter
1488 @revision.setter
1485 def revision(self, value):
1489 def revision(self, value):
1486 warnings.warn("Use idx instead", DeprecationWarning)
1490 warnings.warn("Use idx instead", DeprecationWarning)
1487 self.idx = value
1491 self.idx = value
1488
1492
1489
1493
1490 class EmptyRepository(BaseRepository):
1494 class EmptyRepository(BaseRepository):
1491 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1495 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1492 pass
1496 pass
1493
1497
1494 def get_diff(self, *args, **kwargs):
1498 def get_diff(self, *args, **kwargs):
1495 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1499 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1496 return GitDiff('')
1500 return GitDiff('')
1497
1501
1498
1502
1499 class CollectionGenerator(object):
1503 class CollectionGenerator(object):
1500
1504
1501 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1505 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None):
1502 self.repo = repo
1506 self.repo = repo
1503 self.commit_ids = commit_ids
1507 self.commit_ids = commit_ids
1504 # TODO: (oliver) this isn't currently hooked up
1508 # TODO: (oliver) this isn't currently hooked up
1505 self.collection_size = None
1509 self.collection_size = None
1506 self.pre_load = pre_load
1510 self.pre_load = pre_load
1507
1511
1508 def __len__(self):
1512 def __len__(self):
1509 if self.collection_size is not None:
1513 if self.collection_size is not None:
1510 return self.collection_size
1514 return self.collection_size
1511 return self.commit_ids.__len__()
1515 return self.commit_ids.__len__()
1512
1516
1513 def __iter__(self):
1517 def __iter__(self):
1514 for commit_id in self.commit_ids:
1518 for commit_id in self.commit_ids:
1515 # TODO: johbo: Mercurial passes in commit indices or commit ids
1519 # TODO: johbo: Mercurial passes in commit indices or commit ids
1516 yield self._commit_factory(commit_id)
1520 yield self._commit_factory(commit_id)
1517
1521
1518 def _commit_factory(self, commit_id):
1522 def _commit_factory(self, commit_id):
1519 """
1523 """
1520 Allows backends to override the way commits are generated.
1524 Allows backends to override the way commits are generated.
1521 """
1525 """
1522 return self.repo.get_commit(commit_id=commit_id,
1526 return self.repo.get_commit(commit_id=commit_id,
1523 pre_load=self.pre_load)
1527 pre_load=self.pre_load)
1524
1528
1525 def __getslice__(self, i, j):
1529 def __getslice__(self, i, j):
1526 """
1530 """
1527 Returns an iterator of sliced repository
1531 Returns an iterator of sliced repository
1528 """
1532 """
1529 commit_ids = self.commit_ids[i:j]
1533 commit_ids = self.commit_ids[i:j]
1530 return self.__class__(
1534 return self.__class__(
1531 self.repo, commit_ids, pre_load=self.pre_load)
1535 self.repo, commit_ids, pre_load=self.pre_load)
1532
1536
1533 def __repr__(self):
1537 def __repr__(self):
1534 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1538 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1535
1539
1536
1540
1537 class Config(object):
1541 class Config(object):
1538 """
1542 """
1539 Represents the configuration for a repository.
1543 Represents the configuration for a repository.
1540
1544
1541 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1545 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1542 standard library. It implements only the needed subset.
1546 standard library. It implements only the needed subset.
1543 """
1547 """
1544
1548
1545 def __init__(self):
1549 def __init__(self):
1546 self._values = {}
1550 self._values = {}
1547
1551
1548 def copy(self):
1552 def copy(self):
1549 clone = Config()
1553 clone = Config()
1550 for section, values in self._values.items():
1554 for section, values in self._values.items():
1551 clone._values[section] = values.copy()
1555 clone._values[section] = values.copy()
1552 return clone
1556 return clone
1553
1557
1554 def __repr__(self):
1558 def __repr__(self):
1555 return '<Config(%s sections) at %s>' % (
1559 return '<Config(%s sections) at %s>' % (
1556 len(self._values), hex(id(self)))
1560 len(self._values), hex(id(self)))
1557
1561
1558 def items(self, section):
1562 def items(self, section):
1559 return self._values.get(section, {}).iteritems()
1563 return self._values.get(section, {}).iteritems()
1560
1564
1561 def get(self, section, option):
1565 def get(self, section, option):
1562 return self._values.get(section, {}).get(option)
1566 return self._values.get(section, {}).get(option)
1563
1567
1564 def set(self, section, option, value):
1568 def set(self, section, option, value):
1565 section_values = self._values.setdefault(section, {})
1569 section_values = self._values.setdefault(section, {})
1566 section_values[option] = value
1570 section_values[option] = value
1567
1571
1568 def clear_section(self, section):
1572 def clear_section(self, section):
1569 self._values[section] = {}
1573 self._values[section] = {}
1570
1574
1571 def serialize(self):
1575 def serialize(self):
1572 """
1576 """
1573 Creates a list of three tuples (section, key, value) representing
1577 Creates a list of three tuples (section, key, value) representing
1574 this config object.
1578 this config object.
1575 """
1579 """
1576 items = []
1580 items = []
1577 for section in self._values:
1581 for section in self._values:
1578 for option, value in self._values[section].items():
1582 for option, value in self._values[section].items():
1579 items.append(
1583 items.append(
1580 (safe_str(section), safe_str(option), safe_str(value)))
1584 (safe_str(section), safe_str(option), safe_str(value)))
1581 return items
1585 return items
1582
1586
1583
1587
1584 class Diff(object):
1588 class Diff(object):
1585 """
1589 """
1586 Represents a diff result from a repository backend.
1590 Represents a diff result from a repository backend.
1587
1591
1588 Subclasses have to provide a backend specific value for
1592 Subclasses have to provide a backend specific value for
1589 :attr:`_header_re` and :attr:`_meta_re`.
1593 :attr:`_header_re` and :attr:`_meta_re`.
1590 """
1594 """
1591 _meta_re = None
1595 _meta_re = None
1592 _header_re = None
1596 _header_re = None
1593
1597
1594 def __init__(self, raw_diff):
1598 def __init__(self, raw_diff):
1595 self.raw = raw_diff
1599 self.raw = raw_diff
1596
1600
1597 def chunks(self):
1601 def chunks(self):
1598 """
1602 """
1599 split the diff in chunks of separate --git a/file b/file chunks
1603 split the diff in chunks of separate --git a/file b/file chunks
1600 to make diffs consistent we must prepend with \n, and make sure
1604 to make diffs consistent we must prepend with \n, and make sure
1601 we can detect last chunk as this was also has special rule
1605 we can detect last chunk as this was also has special rule
1602 """
1606 """
1603
1607
1604 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1608 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1605 header = diff_parts[0]
1609 header = diff_parts[0]
1606
1610
1607 if self._meta_re:
1611 if self._meta_re:
1608 match = self._meta_re.match(header)
1612 match = self._meta_re.match(header)
1609
1613
1610 chunks = diff_parts[1:]
1614 chunks = diff_parts[1:]
1611 total_chunks = len(chunks)
1615 total_chunks = len(chunks)
1612
1616
1613 return (
1617 return (
1614 DiffChunk(chunk, self, cur_chunk == total_chunks)
1618 DiffChunk(chunk, self, cur_chunk == total_chunks)
1615 for cur_chunk, chunk in enumerate(chunks, start=1))
1619 for cur_chunk, chunk in enumerate(chunks, start=1))
1616
1620
1617
1621
1618 class DiffChunk(object):
1622 class DiffChunk(object):
1619
1623
1620 def __init__(self, chunk, diff, last_chunk):
1624 def __init__(self, chunk, diff, last_chunk):
1621 self._diff = diff
1625 self._diff = diff
1622
1626
1623 # since we split by \ndiff --git that part is lost from original diff
1627 # since we split by \ndiff --git that part is lost from original diff
1624 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1628 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1625 if not last_chunk:
1629 if not last_chunk:
1626 chunk += '\n'
1630 chunk += '\n'
1627
1631
1628 match = self._diff._header_re.match(chunk)
1632 match = self._diff._header_re.match(chunk)
1629 self.header = match.groupdict()
1633 self.header = match.groupdict()
1630 self.diff = chunk[match.end():]
1634 self.diff = chunk[match.end():]
1631 self.raw = chunk
1635 self.raw = chunk
1632
1636
1633
1637
1634 class BasePathPermissionChecker(object):
1638 class BasePathPermissionChecker(object):
1635
1639
1636 @staticmethod
1640 @staticmethod
1637 def create_from_patterns(includes, excludes):
1641 def create_from_patterns(includes, excludes):
1638 if includes and '*' in includes and not excludes:
1642 if includes and '*' in includes and not excludes:
1639 return AllPathPermissionChecker()
1643 return AllPathPermissionChecker()
1640 elif excludes and '*' in excludes:
1644 elif excludes and '*' in excludes:
1641 return NonePathPermissionChecker()
1645 return NonePathPermissionChecker()
1642 else:
1646 else:
1643 return PatternPathPermissionChecker(includes, excludes)
1647 return PatternPathPermissionChecker(includes, excludes)
1644
1648
1645 @property
1649 @property
1646 def has_full_access(self):
1650 def has_full_access(self):
1647 raise NotImplemented()
1651 raise NotImplemented()
1648
1652
1649 def has_access(self, path):
1653 def has_access(self, path):
1650 raise NotImplemented()
1654 raise NotImplemented()
1651
1655
1652
1656
1653 class AllPathPermissionChecker(BasePathPermissionChecker):
1657 class AllPathPermissionChecker(BasePathPermissionChecker):
1654
1658
1655 @property
1659 @property
1656 def has_full_access(self):
1660 def has_full_access(self):
1657 return True
1661 return True
1658
1662
1659 def has_access(self, path):
1663 def has_access(self, path):
1660 return True
1664 return True
1661
1665
1662
1666
1663 class NonePathPermissionChecker(BasePathPermissionChecker):
1667 class NonePathPermissionChecker(BasePathPermissionChecker):
1664
1668
1665 @property
1669 @property
1666 def has_full_access(self):
1670 def has_full_access(self):
1667 return False
1671 return False
1668
1672
1669 def has_access(self, path):
1673 def has_access(self, path):
1670 return False
1674 return False
1671
1675
1672
1676
1673 class PatternPathPermissionChecker(BasePathPermissionChecker):
1677 class PatternPathPermissionChecker(BasePathPermissionChecker):
1674
1678
1675 def __init__(self, includes, excludes):
1679 def __init__(self, includes, excludes):
1676 self.includes = includes
1680 self.includes = includes
1677 self.excludes = excludes
1681 self.excludes = excludes
1678 self.includes_re = [] if not includes else [
1682 self.includes_re = [] if not includes else [
1679 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1683 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1680 self.excludes_re = [] if not excludes else [
1684 self.excludes_re = [] if not excludes else [
1681 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1685 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1682
1686
1683 @property
1687 @property
1684 def has_full_access(self):
1688 def has_full_access(self):
1685 return '*' in self.includes and not self.excludes
1689 return '*' in self.includes and not self.excludes
1686
1690
1687 def has_access(self, path):
1691 def has_access(self, path):
1688 for regex in self.excludes_re:
1692 for regex in self.excludes_re:
1689 if regex.match(path):
1693 if regex.match(path):
1690 return False
1694 return False
1691 for regex in self.includes_re:
1695 for regex in self.includes_re:
1692 if regex.match(path):
1696 if regex.match(path):
1693 return True
1697 return True
1694 return False
1698 return False
@@ -1,1036 +1,1036 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import re
22 import re
23 import shutil
23 import shutil
24 import time
24 import time
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import datetime
27 import datetime
28
28
29 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.lib.exceptions import AttachedForksError
35 from rhodecode.lib.exceptions import AttachedForksError
36 from rhodecode.lib.hooks_base import log_delete_repository
36 from rhodecode.lib.hooks_base import log_delete_repository
37 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.user_log_filter import user_log_filter
38 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils import make_db_config
39 from rhodecode.lib.utils2 import (
39 from rhodecode.lib.utils2 import (
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 get_current_rhodecode_user, safe_int, datetime_to_time,
41 get_current_rhodecode_user, safe_int, datetime_to_time,
42 action_logger_generic)
42 action_logger_generic)
43 from rhodecode.lib.vcs.backends import get_backend
43 from rhodecode.lib.vcs.backends import get_backend
44 from rhodecode.model import BaseModel
44 from rhodecode.model import BaseModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49
49
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 class RepoModel(BaseModel):
56 class RepoModel(BaseModel):
57
57
58 cls = Repository
58 cls = Repository
59
59
60 def _get_user_group(self, users_group):
60 def _get_user_group(self, users_group):
61 return self._get_instance(UserGroup, users_group,
61 return self._get_instance(UserGroup, users_group,
62 callback=UserGroup.get_by_group_name)
62 callback=UserGroup.get_by_group_name)
63
63
64 def _get_repo_group(self, repo_group):
64 def _get_repo_group(self, repo_group):
65 return self._get_instance(RepoGroup, repo_group,
65 return self._get_instance(RepoGroup, repo_group,
66 callback=RepoGroup.get_by_group_name)
66 callback=RepoGroup.get_by_group_name)
67
67
68 def _create_default_perms(self, repository, private):
68 def _create_default_perms(self, repository, private):
69 # create default permission
69 # create default permission
70 default = 'repository.read'
70 default = 'repository.read'
71 def_user = User.get_default_user()
71 def_user = User.get_default_user()
72 for p in def_user.user_perms:
72 for p in def_user.user_perms:
73 if p.permission.permission_name.startswith('repository.'):
73 if p.permission.permission_name.startswith('repository.'):
74 default = p.permission.permission_name
74 default = p.permission.permission_name
75 break
75 break
76
76
77 default_perm = 'repository.none' if private else default
77 default_perm = 'repository.none' if private else default
78
78
79 repo_to_perm = UserRepoToPerm()
79 repo_to_perm = UserRepoToPerm()
80 repo_to_perm.permission = Permission.get_by_key(default_perm)
80 repo_to_perm.permission = Permission.get_by_key(default_perm)
81
81
82 repo_to_perm.repository = repository
82 repo_to_perm.repository = repository
83 repo_to_perm.user_id = def_user.user_id
83 repo_to_perm.user_id = def_user.user_id
84
84
85 return repo_to_perm
85 return repo_to_perm
86
86
87 @LazyProperty
87 @LazyProperty
88 def repos_path(self):
88 def repos_path(self):
89 """
89 """
90 Gets the repositories root path from database
90 Gets the repositories root path from database
91 """
91 """
92 settings_model = VcsSettingsModel(sa=self.sa)
92 settings_model = VcsSettingsModel(sa=self.sa)
93 return settings_model.get_repos_location()
93 return settings_model.get_repos_location()
94
94
95 def get(self, repo_id, cache=False):
95 def get(self, repo_id, cache=False):
96 repo = self.sa.query(Repository) \
96 repo = self.sa.query(Repository) \
97 .filter(Repository.repo_id == repo_id)
97 .filter(Repository.repo_id == repo_id)
98
98
99 if cache:
99 if cache:
100 repo = repo.options(
100 repo = repo.options(
101 FromCache("sql_cache_short", "get_repo_%s" % repo_id))
101 FromCache("sql_cache_short", "get_repo_%s" % repo_id))
102 return repo.scalar()
102 return repo.scalar()
103
103
104 def get_repo(self, repository):
104 def get_repo(self, repository):
105 return self._get_repo(repository)
105 return self._get_repo(repository)
106
106
107 def get_by_repo_name(self, repo_name, cache=False):
107 def get_by_repo_name(self, repo_name, cache=False):
108 repo = self.sa.query(Repository) \
108 repo = self.sa.query(Repository) \
109 .filter(Repository.repo_name == repo_name)
109 .filter(Repository.repo_name == repo_name)
110
110
111 if cache:
111 if cache:
112 name_key = _hash_key(repo_name)
112 name_key = _hash_key(repo_name)
113 repo = repo.options(
113 repo = repo.options(
114 FromCache("sql_cache_short", "get_repo_%s" % name_key))
114 FromCache("sql_cache_short", "get_repo_%s" % name_key))
115 return repo.scalar()
115 return repo.scalar()
116
116
117 def _extract_id_from_repo_name(self, repo_name):
117 def _extract_id_from_repo_name(self, repo_name):
118 if repo_name.startswith('/'):
118 if repo_name.startswith('/'):
119 repo_name = repo_name.lstrip('/')
119 repo_name = repo_name.lstrip('/')
120 by_id_match = re.match(r'^_(\d{1,})', repo_name)
120 by_id_match = re.match(r'^_(\d{1,})', repo_name)
121 if by_id_match:
121 if by_id_match:
122 return by_id_match.groups()[0]
122 return by_id_match.groups()[0]
123
123
124 def get_repo_by_id(self, repo_name):
124 def get_repo_by_id(self, repo_name):
125 """
125 """
126 Extracts repo_name by id from special urls.
126 Extracts repo_name by id from special urls.
127 Example url is _11/repo_name
127 Example url is _11/repo_name
128
128
129 :param repo_name:
129 :param repo_name:
130 :return: repo object if matched else None
130 :return: repo object if matched else None
131 """
131 """
132
132
133 try:
133 try:
134 _repo_id = self._extract_id_from_repo_name(repo_name)
134 _repo_id = self._extract_id_from_repo_name(repo_name)
135 if _repo_id:
135 if _repo_id:
136 return self.get(_repo_id)
136 return self.get(_repo_id)
137 except Exception:
137 except Exception:
138 log.exception('Failed to extract repo_name from URL')
138 log.exception('Failed to extract repo_name from URL')
139
139
140 return None
140 return None
141
141
142 def get_repos_for_root(self, root, traverse=False):
142 def get_repos_for_root(self, root, traverse=False):
143 if traverse:
143 if traverse:
144 like_expression = u'{}%'.format(safe_unicode(root))
144 like_expression = u'{}%'.format(safe_unicode(root))
145 repos = Repository.query().filter(
145 repos = Repository.query().filter(
146 Repository.repo_name.like(like_expression)).all()
146 Repository.repo_name.like(like_expression)).all()
147 else:
147 else:
148 if root and not isinstance(root, RepoGroup):
148 if root and not isinstance(root, RepoGroup):
149 raise ValueError(
149 raise ValueError(
150 'Root must be an instance '
150 'Root must be an instance '
151 'of RepoGroup, got:{} instead'.format(type(root)))
151 'of RepoGroup, got:{} instead'.format(type(root)))
152 repos = Repository.query().filter(Repository.group == root).all()
152 repos = Repository.query().filter(Repository.group == root).all()
153 return repos
153 return repos
154
154
155 def get_url(self, repo, request=None, permalink=False):
155 def get_url(self, repo, request=None, permalink=False):
156 if not request:
156 if not request:
157 request = get_current_request()
157 request = get_current_request()
158
158
159 if not request:
159 if not request:
160 return
160 return
161
161
162 if permalink:
162 if permalink:
163 return request.route_url(
163 return request.route_url(
164 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
164 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
165 else:
165 else:
166 return request.route_url(
166 return request.route_url(
167 'repo_summary', repo_name=safe_str(repo.repo_name))
167 'repo_summary', repo_name=safe_str(repo.repo_name))
168
168
169 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
169 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
170 if not request:
170 if not request:
171 request = get_current_request()
171 request = get_current_request()
172
172
173 if not request:
173 if not request:
174 return
174 return
175
175
176 if permalink:
176 if permalink:
177 return request.route_url(
177 return request.route_url(
178 'repo_commit', repo_name=safe_str(repo.repo_id),
178 'repo_commit', repo_name=safe_str(repo.repo_id),
179 commit_id=commit_id)
179 commit_id=commit_id)
180
180
181 else:
181 else:
182 return request.route_url(
182 return request.route_url(
183 'repo_commit', repo_name=safe_str(repo.repo_name),
183 'repo_commit', repo_name=safe_str(repo.repo_name),
184 commit_id=commit_id)
184 commit_id=commit_id)
185
185
186 def get_repo_log(self, repo, filter_term):
186 def get_repo_log(self, repo, filter_term):
187 repo_log = UserLog.query()\
187 repo_log = UserLog.query()\
188 .filter(or_(UserLog.repository_id == repo.repo_id,
188 .filter(or_(UserLog.repository_id == repo.repo_id,
189 UserLog.repository_name == repo.repo_name))\
189 UserLog.repository_name == repo.repo_name))\
190 .options(joinedload(UserLog.user))\
190 .options(joinedload(UserLog.user))\
191 .options(joinedload(UserLog.repository))\
191 .options(joinedload(UserLog.repository))\
192 .order_by(UserLog.action_date.desc())
192 .order_by(UserLog.action_date.desc())
193
193
194 repo_log = user_log_filter(repo_log, filter_term)
194 repo_log = user_log_filter(repo_log, filter_term)
195 return repo_log
195 return repo_log
196
196
197 @classmethod
197 @classmethod
198 def update_repoinfo(cls, repositories=None):
198 def update_repoinfo(cls, repositories=None):
199 if not repositories:
199 if not repositories:
200 repositories = Repository.getAll()
200 repositories = Repository.getAll()
201 for repo in repositories:
201 for repo in repositories:
202 repo.update_commit_cache()
202 repo.update_commit_cache()
203
203
204 def get_repos_as_dict(self, repo_list=None, admin=False,
204 def get_repos_as_dict(self, repo_list=None, admin=False,
205 super_user_actions=False):
205 super_user_actions=False):
206 _render = get_current_request().get_partial_renderer(
206 _render = get_current_request().get_partial_renderer(
207 'rhodecode:templates/data_table/_dt_elements.mako')
207 'rhodecode:templates/data_table/_dt_elements.mako')
208 c = _render.get_call_context()
208 c = _render.get_call_context()
209
209
210 def quick_menu(repo_name):
210 def quick_menu(repo_name):
211 return _render('quick_menu', repo_name)
211 return _render('quick_menu', repo_name)
212
212
213 def repo_lnk(name, rtype, rstate, private, fork_of):
213 def repo_lnk(name, rtype, rstate, private, fork_of):
214 return _render('repo_name', name, rtype, rstate, private, fork_of,
214 return _render('repo_name', name, rtype, rstate, private, fork_of,
215 short_name=not admin, admin=False)
215 short_name=not admin, admin=False)
216
216
217 def last_change(last_change):
217 def last_change(last_change):
218 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
218 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
219 last_change = last_change + datetime.timedelta(seconds=
219 last_change = last_change + datetime.timedelta(seconds=
220 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
220 (datetime.datetime.now() - datetime.datetime.utcnow()).seconds)
221 return _render("last_change", last_change)
221 return _render("last_change", last_change)
222
222
223 def rss_lnk(repo_name):
223 def rss_lnk(repo_name):
224 return _render("rss", repo_name)
224 return _render("rss", repo_name)
225
225
226 def atom_lnk(repo_name):
226 def atom_lnk(repo_name):
227 return _render("atom", repo_name)
227 return _render("atom", repo_name)
228
228
229 def last_rev(repo_name, cs_cache):
229 def last_rev(repo_name, cs_cache):
230 return _render('revision', repo_name, cs_cache.get('revision'),
230 return _render('revision', repo_name, cs_cache.get('revision'),
231 cs_cache.get('raw_id'), cs_cache.get('author'),
231 cs_cache.get('raw_id'), cs_cache.get('author'),
232 cs_cache.get('message'), cs_cache.get('date'))
232 cs_cache.get('message'), cs_cache.get('date'))
233
233
234 def desc(desc):
234 def desc(desc):
235 return _render('repo_desc', desc, c.visual.stylify_metatags)
235 return _render('repo_desc', desc, c.visual.stylify_metatags)
236
236
237 def state(repo_state):
237 def state(repo_state):
238 return _render("repo_state", repo_state)
238 return _render("repo_state", repo_state)
239
239
240 def repo_actions(repo_name):
240 def repo_actions(repo_name):
241 return _render('repo_actions', repo_name, super_user_actions)
241 return _render('repo_actions', repo_name, super_user_actions)
242
242
243 def user_profile(username):
243 def user_profile(username):
244 return _render('user_profile', username)
244 return _render('user_profile', username)
245
245
246 repos_data = []
246 repos_data = []
247 for repo in repo_list:
247 for repo in repo_list:
248 cs_cache = repo.changeset_cache
248 cs_cache = repo.changeset_cache
249 row = {
249 row = {
250 "menu": quick_menu(repo.repo_name),
250 "menu": quick_menu(repo.repo_name),
251
251
252 "name": repo_lnk(repo.repo_name, repo.repo_type,
252 "name": repo_lnk(repo.repo_name, repo.repo_type,
253 repo.repo_state, repo.private, repo.fork),
253 repo.repo_state, repo.private, repo.fork),
254 "name_raw": repo.repo_name.lower(),
254 "name_raw": repo.repo_name.lower(),
255
255
256 "last_change": last_change(repo.last_db_change),
256 "last_change": last_change(repo.last_db_change),
257 "last_change_raw": datetime_to_time(repo.last_db_change),
257 "last_change_raw": datetime_to_time(repo.last_db_change),
258
258
259 "last_changeset": last_rev(repo.repo_name, cs_cache),
259 "last_changeset": last_rev(repo.repo_name, cs_cache),
260 "last_changeset_raw": cs_cache.get('revision'),
260 "last_changeset_raw": cs_cache.get('revision'),
261
261
262 "desc": desc(repo.description_safe),
262 "desc": desc(repo.description_safe),
263 "owner": user_profile(repo.user.username),
263 "owner": user_profile(repo.user.username),
264
264
265 "state": state(repo.repo_state),
265 "state": state(repo.repo_state),
266 "rss": rss_lnk(repo.repo_name),
266 "rss": rss_lnk(repo.repo_name),
267
267
268 "atom": atom_lnk(repo.repo_name),
268 "atom": atom_lnk(repo.repo_name),
269 }
269 }
270 if admin:
270 if admin:
271 row.update({
271 row.update({
272 "action": repo_actions(repo.repo_name),
272 "action": repo_actions(repo.repo_name),
273 })
273 })
274 repos_data.append(row)
274 repos_data.append(row)
275
275
276 return repos_data
276 return repos_data
277
277
278 def _get_defaults(self, repo_name):
278 def _get_defaults(self, repo_name):
279 """
279 """
280 Gets information about repository, and returns a dict for
280 Gets information about repository, and returns a dict for
281 usage in forms
281 usage in forms
282
282
283 :param repo_name:
283 :param repo_name:
284 """
284 """
285
285
286 repo_info = Repository.get_by_repo_name(repo_name)
286 repo_info = Repository.get_by_repo_name(repo_name)
287
287
288 if repo_info is None:
288 if repo_info is None:
289 return None
289 return None
290
290
291 defaults = repo_info.get_dict()
291 defaults = repo_info.get_dict()
292 defaults['repo_name'] = repo_info.just_name
292 defaults['repo_name'] = repo_info.just_name
293
293
294 groups = repo_info.groups_with_parents
294 groups = repo_info.groups_with_parents
295 parent_group = groups[-1] if groups else None
295 parent_group = groups[-1] if groups else None
296
296
297 # we use -1 as this is how in HTML, we mark an empty group
297 # we use -1 as this is how in HTML, we mark an empty group
298 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
298 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
299
299
300 keys_to_process = (
300 keys_to_process = (
301 {'k': 'repo_type', 'strip': False},
301 {'k': 'repo_type', 'strip': False},
302 {'k': 'repo_enable_downloads', 'strip': True},
302 {'k': 'repo_enable_downloads', 'strip': True},
303 {'k': 'repo_description', 'strip': True},
303 {'k': 'repo_description', 'strip': True},
304 {'k': 'repo_enable_locking', 'strip': True},
304 {'k': 'repo_enable_locking', 'strip': True},
305 {'k': 'repo_landing_rev', 'strip': True},
305 {'k': 'repo_landing_rev', 'strip': True},
306 {'k': 'clone_uri', 'strip': False},
306 {'k': 'clone_uri', 'strip': False},
307 {'k': 'push_uri', 'strip': False},
307 {'k': 'push_uri', 'strip': False},
308 {'k': 'repo_private', 'strip': True},
308 {'k': 'repo_private', 'strip': True},
309 {'k': 'repo_enable_statistics', 'strip': True}
309 {'k': 'repo_enable_statistics', 'strip': True}
310 )
310 )
311
311
312 for item in keys_to_process:
312 for item in keys_to_process:
313 attr = item['k']
313 attr = item['k']
314 if item['strip']:
314 if item['strip']:
315 attr = remove_prefix(item['k'], 'repo_')
315 attr = remove_prefix(item['k'], 'repo_')
316
316
317 val = defaults[attr]
317 val = defaults[attr]
318 if item['k'] == 'repo_landing_rev':
318 if item['k'] == 'repo_landing_rev':
319 val = ':'.join(defaults[attr])
319 val = ':'.join(defaults[attr])
320 defaults[item['k']] = val
320 defaults[item['k']] = val
321 if item['k'] == 'clone_uri':
321 if item['k'] == 'clone_uri':
322 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
322 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
323 if item['k'] == 'push_uri':
323 if item['k'] == 'push_uri':
324 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
324 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
325
325
326 # fill owner
326 # fill owner
327 if repo_info.user:
327 if repo_info.user:
328 defaults.update({'user': repo_info.user.username})
328 defaults.update({'user': repo_info.user.username})
329 else:
329 else:
330 replacement_user = User.get_first_super_admin().username
330 replacement_user = User.get_first_super_admin().username
331 defaults.update({'user': replacement_user})
331 defaults.update({'user': replacement_user})
332
332
333 return defaults
333 return defaults
334
334
335 def update(self, repo, **kwargs):
335 def update(self, repo, **kwargs):
336 try:
336 try:
337 cur_repo = self._get_repo(repo)
337 cur_repo = self._get_repo(repo)
338 source_repo_name = cur_repo.repo_name
338 source_repo_name = cur_repo.repo_name
339 if 'user' in kwargs:
339 if 'user' in kwargs:
340 cur_repo.user = User.get_by_username(kwargs['user'])
340 cur_repo.user = User.get_by_username(kwargs['user'])
341
341
342 if 'repo_group' in kwargs:
342 if 'repo_group' in kwargs:
343 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
343 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
344 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
344 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
345
345
346 update_keys = [
346 update_keys = [
347 (1, 'repo_description'),
347 (1, 'repo_description'),
348 (1, 'repo_landing_rev'),
348 (1, 'repo_landing_rev'),
349 (1, 'repo_private'),
349 (1, 'repo_private'),
350 (1, 'repo_enable_downloads'),
350 (1, 'repo_enable_downloads'),
351 (1, 'repo_enable_locking'),
351 (1, 'repo_enable_locking'),
352 (1, 'repo_enable_statistics'),
352 (1, 'repo_enable_statistics'),
353 (0, 'clone_uri'),
353 (0, 'clone_uri'),
354 (0, 'push_uri'),
354 (0, 'push_uri'),
355 (0, 'fork_id')
355 (0, 'fork_id')
356 ]
356 ]
357 for strip, k in update_keys:
357 for strip, k in update_keys:
358 if k in kwargs:
358 if k in kwargs:
359 val = kwargs[k]
359 val = kwargs[k]
360 if strip:
360 if strip:
361 k = remove_prefix(k, 'repo_')
361 k = remove_prefix(k, 'repo_')
362
362
363 setattr(cur_repo, k, val)
363 setattr(cur_repo, k, val)
364
364
365 new_name = cur_repo.get_new_name(kwargs['repo_name'])
365 new_name = cur_repo.get_new_name(kwargs['repo_name'])
366 cur_repo.repo_name = new_name
366 cur_repo.repo_name = new_name
367
367
368 # if private flag is set, reset default permission to NONE
368 # if private flag is set, reset default permission to NONE
369 if kwargs.get('repo_private'):
369 if kwargs.get('repo_private'):
370 EMPTY_PERM = 'repository.none'
370 EMPTY_PERM = 'repository.none'
371 RepoModel().grant_user_permission(
371 RepoModel().grant_user_permission(
372 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
372 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
373 )
373 )
374
374
375 # handle extra fields
375 # handle extra fields
376 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
376 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
377 kwargs):
377 kwargs):
378 k = RepositoryField.un_prefix_key(field)
378 k = RepositoryField.un_prefix_key(field)
379 ex_field = RepositoryField.get_by_key_name(
379 ex_field = RepositoryField.get_by_key_name(
380 key=k, repo=cur_repo)
380 key=k, repo=cur_repo)
381 if ex_field:
381 if ex_field:
382 ex_field.field_value = kwargs[field]
382 ex_field.field_value = kwargs[field]
383 self.sa.add(ex_field)
383 self.sa.add(ex_field)
384 cur_repo.updated_on = datetime.datetime.now()
384 cur_repo.updated_on = datetime.datetime.now()
385 self.sa.add(cur_repo)
385 self.sa.add(cur_repo)
386
386
387 if source_repo_name != new_name:
387 if source_repo_name != new_name:
388 # rename repository
388 # rename repository
389 self._rename_filesystem_repo(
389 self._rename_filesystem_repo(
390 old=source_repo_name, new=new_name)
390 old=source_repo_name, new=new_name)
391
391
392 return cur_repo
392 return cur_repo
393 except Exception:
393 except Exception:
394 log.error(traceback.format_exc())
394 log.error(traceback.format_exc())
395 raise
395 raise
396
396
397 def _create_repo(self, repo_name, repo_type, description, owner,
397 def _create_repo(self, repo_name, repo_type, description, owner,
398 private=False, clone_uri=None, repo_group=None,
398 private=False, clone_uri=None, repo_group=None,
399 landing_rev='rev:tip', fork_of=None,
399 landing_rev='rev:tip', fork_of=None,
400 copy_fork_permissions=False, enable_statistics=False,
400 copy_fork_permissions=False, enable_statistics=False,
401 enable_locking=False, enable_downloads=False,
401 enable_locking=False, enable_downloads=False,
402 copy_group_permissions=False,
402 copy_group_permissions=False,
403 state=Repository.STATE_PENDING):
403 state=Repository.STATE_PENDING):
404 """
404 """
405 Create repository inside database with PENDING state, this should be
405 Create repository inside database with PENDING state, this should be
406 only executed by create() repo. With exception of importing existing
406 only executed by create() repo. With exception of importing existing
407 repos
407 repos
408 """
408 """
409 from rhodecode.model.scm import ScmModel
409 from rhodecode.model.scm import ScmModel
410
410
411 owner = self._get_user(owner)
411 owner = self._get_user(owner)
412 fork_of = self._get_repo(fork_of)
412 fork_of = self._get_repo(fork_of)
413 repo_group = self._get_repo_group(safe_int(repo_group))
413 repo_group = self._get_repo_group(safe_int(repo_group))
414
414
415 try:
415 try:
416 repo_name = safe_unicode(repo_name)
416 repo_name = safe_unicode(repo_name)
417 description = safe_unicode(description)
417 description = safe_unicode(description)
418 # repo name is just a name of repository
418 # repo name is just a name of repository
419 # while repo_name_full is a full qualified name that is combined
419 # while repo_name_full is a full qualified name that is combined
420 # with name and path of group
420 # with name and path of group
421 repo_name_full = repo_name
421 repo_name_full = repo_name
422 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
422 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
423
423
424 new_repo = Repository()
424 new_repo = Repository()
425 new_repo.repo_state = state
425 new_repo.repo_state = state
426 new_repo.enable_statistics = False
426 new_repo.enable_statistics = False
427 new_repo.repo_name = repo_name_full
427 new_repo.repo_name = repo_name_full
428 new_repo.repo_type = repo_type
428 new_repo.repo_type = repo_type
429 new_repo.user = owner
429 new_repo.user = owner
430 new_repo.group = repo_group
430 new_repo.group = repo_group
431 new_repo.description = description or repo_name
431 new_repo.description = description or repo_name
432 new_repo.private = private
432 new_repo.private = private
433 new_repo.clone_uri = clone_uri
433 new_repo.clone_uri = clone_uri
434 new_repo.landing_rev = landing_rev
434 new_repo.landing_rev = landing_rev
435
435
436 new_repo.enable_statistics = enable_statistics
436 new_repo.enable_statistics = enable_statistics
437 new_repo.enable_locking = enable_locking
437 new_repo.enable_locking = enable_locking
438 new_repo.enable_downloads = enable_downloads
438 new_repo.enable_downloads = enable_downloads
439
439
440 if repo_group:
440 if repo_group:
441 new_repo.enable_locking = repo_group.enable_locking
441 new_repo.enable_locking = repo_group.enable_locking
442
442
443 if fork_of:
443 if fork_of:
444 parent_repo = fork_of
444 parent_repo = fork_of
445 new_repo.fork = parent_repo
445 new_repo.fork = parent_repo
446
446
447 events.trigger(events.RepoPreCreateEvent(new_repo))
447 events.trigger(events.RepoPreCreateEvent(new_repo))
448
448
449 self.sa.add(new_repo)
449 self.sa.add(new_repo)
450
450
451 EMPTY_PERM = 'repository.none'
451 EMPTY_PERM = 'repository.none'
452 if fork_of and copy_fork_permissions:
452 if fork_of and copy_fork_permissions:
453 repo = fork_of
453 repo = fork_of
454 user_perms = UserRepoToPerm.query() \
454 user_perms = UserRepoToPerm.query() \
455 .filter(UserRepoToPerm.repository == repo).all()
455 .filter(UserRepoToPerm.repository == repo).all()
456 group_perms = UserGroupRepoToPerm.query() \
456 group_perms = UserGroupRepoToPerm.query() \
457 .filter(UserGroupRepoToPerm.repository == repo).all()
457 .filter(UserGroupRepoToPerm.repository == repo).all()
458
458
459 for perm in user_perms:
459 for perm in user_perms:
460 UserRepoToPerm.create(
460 UserRepoToPerm.create(
461 perm.user, new_repo, perm.permission)
461 perm.user, new_repo, perm.permission)
462
462
463 for perm in group_perms:
463 for perm in group_perms:
464 UserGroupRepoToPerm.create(
464 UserGroupRepoToPerm.create(
465 perm.users_group, new_repo, perm.permission)
465 perm.users_group, new_repo, perm.permission)
466 # in case we copy permissions and also set this repo to private
466 # in case we copy permissions and also set this repo to private
467 # override the default user permission to make it a private
467 # override the default user permission to make it a private
468 # repo
468 # repo
469 if private:
469 if private:
470 RepoModel(self.sa).grant_user_permission(
470 RepoModel(self.sa).grant_user_permission(
471 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
471 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
472
472
473 elif repo_group and copy_group_permissions:
473 elif repo_group and copy_group_permissions:
474 user_perms = UserRepoGroupToPerm.query() \
474 user_perms = UserRepoGroupToPerm.query() \
475 .filter(UserRepoGroupToPerm.group == repo_group).all()
475 .filter(UserRepoGroupToPerm.group == repo_group).all()
476
476
477 group_perms = UserGroupRepoGroupToPerm.query() \
477 group_perms = UserGroupRepoGroupToPerm.query() \
478 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
478 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
479
479
480 for perm in user_perms:
480 for perm in user_perms:
481 perm_name = perm.permission.permission_name.replace(
481 perm_name = perm.permission.permission_name.replace(
482 'group.', 'repository.')
482 'group.', 'repository.')
483 perm_obj = Permission.get_by_key(perm_name)
483 perm_obj = Permission.get_by_key(perm_name)
484 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
484 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
485
485
486 for perm in group_perms:
486 for perm in group_perms:
487 perm_name = perm.permission.permission_name.replace(
487 perm_name = perm.permission.permission_name.replace(
488 'group.', 'repository.')
488 'group.', 'repository.')
489 perm_obj = Permission.get_by_key(perm_name)
489 perm_obj = Permission.get_by_key(perm_name)
490 UserGroupRepoToPerm.create(
490 UserGroupRepoToPerm.create(
491 perm.users_group, new_repo, perm_obj)
491 perm.users_group, new_repo, perm_obj)
492
492
493 if private:
493 if private:
494 RepoModel(self.sa).grant_user_permission(
494 RepoModel(self.sa).grant_user_permission(
495 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
495 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
496
496
497 else:
497 else:
498 perm_obj = self._create_default_perms(new_repo, private)
498 perm_obj = self._create_default_perms(new_repo, private)
499 self.sa.add(perm_obj)
499 self.sa.add(perm_obj)
500
500
501 # now automatically start following this repository as owner
501 # now automatically start following this repository as owner
502 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
502 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
503 owner.user_id)
503 owner.user_id)
504
504
505 # we need to flush here, in order to check if database won't
505 # we need to flush here, in order to check if database won't
506 # throw any exceptions, create filesystem dirs at the very end
506 # throw any exceptions, create filesystem dirs at the very end
507 self.sa.flush()
507 self.sa.flush()
508 events.trigger(events.RepoCreateEvent(new_repo))
508 events.trigger(events.RepoCreateEvent(new_repo))
509 return new_repo
509 return new_repo
510
510
511 except Exception:
511 except Exception:
512 log.error(traceback.format_exc())
512 log.error(traceback.format_exc())
513 raise
513 raise
514
514
515 def create(self, form_data, cur_user):
515 def create(self, form_data, cur_user):
516 """
516 """
517 Create repository using celery tasks
517 Create repository using celery tasks
518
518
519 :param form_data:
519 :param form_data:
520 :param cur_user:
520 :param cur_user:
521 """
521 """
522 from rhodecode.lib.celerylib import tasks, run_task
522 from rhodecode.lib.celerylib import tasks, run_task
523 return run_task(tasks.create_repo, form_data, cur_user)
523 return run_task(tasks.create_repo, form_data, cur_user)
524
524
525 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
525 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
526 perm_deletions=None, check_perms=True,
526 perm_deletions=None, check_perms=True,
527 cur_user=None):
527 cur_user=None):
528 if not perm_additions:
528 if not perm_additions:
529 perm_additions = []
529 perm_additions = []
530 if not perm_updates:
530 if not perm_updates:
531 perm_updates = []
531 perm_updates = []
532 if not perm_deletions:
532 if not perm_deletions:
533 perm_deletions = []
533 perm_deletions = []
534
534
535 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
535 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
536
536
537 changes = {
537 changes = {
538 'added': [],
538 'added': [],
539 'updated': [],
539 'updated': [],
540 'deleted': []
540 'deleted': []
541 }
541 }
542 # update permissions
542 # update permissions
543 for member_id, perm, member_type in perm_updates:
543 for member_id, perm, member_type in perm_updates:
544 member_id = int(member_id)
544 member_id = int(member_id)
545 if member_type == 'user':
545 if member_type == 'user':
546 member_name = User.get(member_id).username
546 member_name = User.get(member_id).username
547 # this updates also current one if found
547 # this updates also current one if found
548 self.grant_user_permission(
548 self.grant_user_permission(
549 repo=repo, user=member_id, perm=perm)
549 repo=repo, user=member_id, perm=perm)
550 else: # set for user group
550 else: # set for user group
551 # check if we have permissions to alter this usergroup
551 # check if we have permissions to alter this usergroup
552 member_name = UserGroup.get(member_id).users_group_name
552 member_name = UserGroup.get(member_id).users_group_name
553 if not check_perms or HasUserGroupPermissionAny(
553 if not check_perms or HasUserGroupPermissionAny(
554 *req_perms)(member_name, user=cur_user):
554 *req_perms)(member_name, user=cur_user):
555 self.grant_user_group_permission(
555 self.grant_user_group_permission(
556 repo=repo, group_name=member_id, perm=perm)
556 repo=repo, group_name=member_id, perm=perm)
557
557
558 changes['updated'].append({'type': member_type, 'id': member_id,
558 changes['updated'].append({'type': member_type, 'id': member_id,
559 'name': member_name, 'new_perm': perm})
559 'name': member_name, 'new_perm': perm})
560
560
561 # set new permissions
561 # set new permissions
562 for member_id, perm, member_type in perm_additions:
562 for member_id, perm, member_type in perm_additions:
563 member_id = int(member_id)
563 member_id = int(member_id)
564 if member_type == 'user':
564 if member_type == 'user':
565 member_name = User.get(member_id).username
565 member_name = User.get(member_id).username
566 self.grant_user_permission(
566 self.grant_user_permission(
567 repo=repo, user=member_id, perm=perm)
567 repo=repo, user=member_id, perm=perm)
568 else: # set for user group
568 else: # set for user group
569 # check if we have permissions to alter this usergroup
569 # check if we have permissions to alter this usergroup
570 member_name = UserGroup.get(member_id).users_group_name
570 member_name = UserGroup.get(member_id).users_group_name
571 if not check_perms or HasUserGroupPermissionAny(
571 if not check_perms or HasUserGroupPermissionAny(
572 *req_perms)(member_name, user=cur_user):
572 *req_perms)(member_name, user=cur_user):
573 self.grant_user_group_permission(
573 self.grant_user_group_permission(
574 repo=repo, group_name=member_id, perm=perm)
574 repo=repo, group_name=member_id, perm=perm)
575 changes['added'].append({'type': member_type, 'id': member_id,
575 changes['added'].append({'type': member_type, 'id': member_id,
576 'name': member_name, 'new_perm': perm})
576 'name': member_name, 'new_perm': perm})
577 # delete permissions
577 # delete permissions
578 for member_id, perm, member_type in perm_deletions:
578 for member_id, perm, member_type in perm_deletions:
579 member_id = int(member_id)
579 member_id = int(member_id)
580 if member_type == 'user':
580 if member_type == 'user':
581 member_name = User.get(member_id).username
581 member_name = User.get(member_id).username
582 self.revoke_user_permission(repo=repo, user=member_id)
582 self.revoke_user_permission(repo=repo, user=member_id)
583 else: # set for user group
583 else: # set for user group
584 # check if we have permissions to alter this usergroup
584 # check if we have permissions to alter this usergroup
585 member_name = UserGroup.get(member_id).users_group_name
585 member_name = UserGroup.get(member_id).users_group_name
586 if not check_perms or HasUserGroupPermissionAny(
586 if not check_perms or HasUserGroupPermissionAny(
587 *req_perms)(member_name, user=cur_user):
587 *req_perms)(member_name, user=cur_user):
588 self.revoke_user_group_permission(
588 self.revoke_user_group_permission(
589 repo=repo, group_name=member_id)
589 repo=repo, group_name=member_id)
590
590
591 changes['deleted'].append({'type': member_type, 'id': member_id,
591 changes['deleted'].append({'type': member_type, 'id': member_id,
592 'name': member_name, 'new_perm': perm})
592 'name': member_name, 'new_perm': perm})
593 return changes
593 return changes
594
594
595 def create_fork(self, form_data, cur_user):
595 def create_fork(self, form_data, cur_user):
596 """
596 """
597 Simple wrapper into executing celery task for fork creation
597 Simple wrapper into executing celery task for fork creation
598
598
599 :param form_data:
599 :param form_data:
600 :param cur_user:
600 :param cur_user:
601 """
601 """
602 from rhodecode.lib.celerylib import tasks, run_task
602 from rhodecode.lib.celerylib import tasks, run_task
603 return run_task(tasks.create_repo_fork, form_data, cur_user)
603 return run_task(tasks.create_repo_fork, form_data, cur_user)
604
604
605 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
605 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
606 """
606 """
607 Delete given repository, forks parameter defines what do do with
607 Delete given repository, forks parameter defines what do do with
608 attached forks. Throws AttachedForksError if deleted repo has attached
608 attached forks. Throws AttachedForksError if deleted repo has attached
609 forks
609 forks
610
610
611 :param repo:
611 :param repo:
612 :param forks: str 'delete' or 'detach'
612 :param forks: str 'delete' or 'detach'
613 :param fs_remove: remove(archive) repo from filesystem
613 :param fs_remove: remove(archive) repo from filesystem
614 """
614 """
615 if not cur_user:
615 if not cur_user:
616 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
616 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
617 repo = self._get_repo(repo)
617 repo = self._get_repo(repo)
618 if repo:
618 if repo:
619 if forks == 'detach':
619 if forks == 'detach':
620 for r in repo.forks:
620 for r in repo.forks:
621 r.fork = None
621 r.fork = None
622 self.sa.add(r)
622 self.sa.add(r)
623 elif forks == 'delete':
623 elif forks == 'delete':
624 for r in repo.forks:
624 for r in repo.forks:
625 self.delete(r, forks='delete')
625 self.delete(r, forks='delete')
626 elif [f for f in repo.forks]:
626 elif [f for f in repo.forks]:
627 raise AttachedForksError()
627 raise AttachedForksError()
628
628
629 old_repo_dict = repo.get_dict()
629 old_repo_dict = repo.get_dict()
630 events.trigger(events.RepoPreDeleteEvent(repo))
630 events.trigger(events.RepoPreDeleteEvent(repo))
631 try:
631 try:
632 self.sa.delete(repo)
632 self.sa.delete(repo)
633 if fs_remove:
633 if fs_remove:
634 self._delete_filesystem_repo(repo)
634 self._delete_filesystem_repo(repo)
635 else:
635 else:
636 log.debug('skipping removal from filesystem')
636 log.debug('skipping removal from filesystem')
637 old_repo_dict.update({
637 old_repo_dict.update({
638 'deleted_by': cur_user,
638 'deleted_by': cur_user,
639 'deleted_on': time.time(),
639 'deleted_on': time.time(),
640 })
640 })
641 log_delete_repository(**old_repo_dict)
641 log_delete_repository(**old_repo_dict)
642 events.trigger(events.RepoDeleteEvent(repo))
642 events.trigger(events.RepoDeleteEvent(repo))
643 except Exception:
643 except Exception:
644 log.error(traceback.format_exc())
644 log.error(traceback.format_exc())
645 raise
645 raise
646
646
647 def grant_user_permission(self, repo, user, perm):
647 def grant_user_permission(self, repo, user, perm):
648 """
648 """
649 Grant permission for user on given repository, or update existing one
649 Grant permission for user on given repository, or update existing one
650 if found
650 if found
651
651
652 :param repo: Instance of Repository, repository_id, or repository name
652 :param repo: Instance of Repository, repository_id, or repository name
653 :param user: Instance of User, user_id or username
653 :param user: Instance of User, user_id or username
654 :param perm: Instance of Permission, or permission_name
654 :param perm: Instance of Permission, or permission_name
655 """
655 """
656 user = self._get_user(user)
656 user = self._get_user(user)
657 repo = self._get_repo(repo)
657 repo = self._get_repo(repo)
658 permission = self._get_perm(perm)
658 permission = self._get_perm(perm)
659
659
660 # check if we have that permission already
660 # check if we have that permission already
661 obj = self.sa.query(UserRepoToPerm) \
661 obj = self.sa.query(UserRepoToPerm) \
662 .filter(UserRepoToPerm.user == user) \
662 .filter(UserRepoToPerm.user == user) \
663 .filter(UserRepoToPerm.repository == repo) \
663 .filter(UserRepoToPerm.repository == repo) \
664 .scalar()
664 .scalar()
665 if obj is None:
665 if obj is None:
666 # create new !
666 # create new !
667 obj = UserRepoToPerm()
667 obj = UserRepoToPerm()
668 obj.repository = repo
668 obj.repository = repo
669 obj.user = user
669 obj.user = user
670 obj.permission = permission
670 obj.permission = permission
671 self.sa.add(obj)
671 self.sa.add(obj)
672 log.debug('Granted perm %s to %s on %s', perm, user, repo)
672 log.debug('Granted perm %s to %s on %s', perm, user, repo)
673 action_logger_generic(
673 action_logger_generic(
674 'granted permission: {} to user: {} on repo: {}'.format(
674 'granted permission: {} to user: {} on repo: {}'.format(
675 perm, user, repo), namespace='security.repo')
675 perm, user, repo), namespace='security.repo')
676 return obj
676 return obj
677
677
678 def revoke_user_permission(self, repo, user):
678 def revoke_user_permission(self, repo, user):
679 """
679 """
680 Revoke permission for user on given repository
680 Revoke permission for user on given repository
681
681
682 :param repo: Instance of Repository, repository_id, or repository name
682 :param repo: Instance of Repository, repository_id, or repository name
683 :param user: Instance of User, user_id or username
683 :param user: Instance of User, user_id or username
684 """
684 """
685
685
686 user = self._get_user(user)
686 user = self._get_user(user)
687 repo = self._get_repo(repo)
687 repo = self._get_repo(repo)
688
688
689 obj = self.sa.query(UserRepoToPerm) \
689 obj = self.sa.query(UserRepoToPerm) \
690 .filter(UserRepoToPerm.repository == repo) \
690 .filter(UserRepoToPerm.repository == repo) \
691 .filter(UserRepoToPerm.user == user) \
691 .filter(UserRepoToPerm.user == user) \
692 .scalar()
692 .scalar()
693 if obj:
693 if obj:
694 self.sa.delete(obj)
694 self.sa.delete(obj)
695 log.debug('Revoked perm on %s on %s', repo, user)
695 log.debug('Revoked perm on %s on %s', repo, user)
696 action_logger_generic(
696 action_logger_generic(
697 'revoked permission from user: {} on repo: {}'.format(
697 'revoked permission from user: {} on repo: {}'.format(
698 user, repo), namespace='security.repo')
698 user, repo), namespace='security.repo')
699
699
700 def grant_user_group_permission(self, repo, group_name, perm):
700 def grant_user_group_permission(self, repo, group_name, perm):
701 """
701 """
702 Grant permission for user group on given repository, or update
702 Grant permission for user group on given repository, or update
703 existing one if found
703 existing one if found
704
704
705 :param repo: Instance of Repository, repository_id, or repository name
705 :param repo: Instance of Repository, repository_id, or repository name
706 :param group_name: Instance of UserGroup, users_group_id,
706 :param group_name: Instance of UserGroup, users_group_id,
707 or user group name
707 or user group name
708 :param perm: Instance of Permission, or permission_name
708 :param perm: Instance of Permission, or permission_name
709 """
709 """
710 repo = self._get_repo(repo)
710 repo = self._get_repo(repo)
711 group_name = self._get_user_group(group_name)
711 group_name = self._get_user_group(group_name)
712 permission = self._get_perm(perm)
712 permission = self._get_perm(perm)
713
713
714 # check if we have that permission already
714 # check if we have that permission already
715 obj = self.sa.query(UserGroupRepoToPerm) \
715 obj = self.sa.query(UserGroupRepoToPerm) \
716 .filter(UserGroupRepoToPerm.users_group == group_name) \
716 .filter(UserGroupRepoToPerm.users_group == group_name) \
717 .filter(UserGroupRepoToPerm.repository == repo) \
717 .filter(UserGroupRepoToPerm.repository == repo) \
718 .scalar()
718 .scalar()
719
719
720 if obj is None:
720 if obj is None:
721 # create new
721 # create new
722 obj = UserGroupRepoToPerm()
722 obj = UserGroupRepoToPerm()
723
723
724 obj.repository = repo
724 obj.repository = repo
725 obj.users_group = group_name
725 obj.users_group = group_name
726 obj.permission = permission
726 obj.permission = permission
727 self.sa.add(obj)
727 self.sa.add(obj)
728 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
728 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
729 action_logger_generic(
729 action_logger_generic(
730 'granted permission: {} to usergroup: {} on repo: {}'.format(
730 'granted permission: {} to usergroup: {} on repo: {}'.format(
731 perm, group_name, repo), namespace='security.repo')
731 perm, group_name, repo), namespace='security.repo')
732
732
733 return obj
733 return obj
734
734
735 def revoke_user_group_permission(self, repo, group_name):
735 def revoke_user_group_permission(self, repo, group_name):
736 """
736 """
737 Revoke permission for user group on given repository
737 Revoke permission for user group on given repository
738
738
739 :param repo: Instance of Repository, repository_id, or repository name
739 :param repo: Instance of Repository, repository_id, or repository name
740 :param group_name: Instance of UserGroup, users_group_id,
740 :param group_name: Instance of UserGroup, users_group_id,
741 or user group name
741 or user group name
742 """
742 """
743 repo = self._get_repo(repo)
743 repo = self._get_repo(repo)
744 group_name = self._get_user_group(group_name)
744 group_name = self._get_user_group(group_name)
745
745
746 obj = self.sa.query(UserGroupRepoToPerm) \
746 obj = self.sa.query(UserGroupRepoToPerm) \
747 .filter(UserGroupRepoToPerm.repository == repo) \
747 .filter(UserGroupRepoToPerm.repository == repo) \
748 .filter(UserGroupRepoToPerm.users_group == group_name) \
748 .filter(UserGroupRepoToPerm.users_group == group_name) \
749 .scalar()
749 .scalar()
750 if obj:
750 if obj:
751 self.sa.delete(obj)
751 self.sa.delete(obj)
752 log.debug('Revoked perm to %s on %s', repo, group_name)
752 log.debug('Revoked perm to %s on %s', repo, group_name)
753 action_logger_generic(
753 action_logger_generic(
754 'revoked permission from usergroup: {} on repo: {}'.format(
754 'revoked permission from usergroup: {} on repo: {}'.format(
755 group_name, repo), namespace='security.repo')
755 group_name, repo), namespace='security.repo')
756
756
757 def delete_stats(self, repo_name):
757 def delete_stats(self, repo_name):
758 """
758 """
759 removes stats for given repo
759 removes stats for given repo
760
760
761 :param repo_name:
761 :param repo_name:
762 """
762 """
763 repo = self._get_repo(repo_name)
763 repo = self._get_repo(repo_name)
764 try:
764 try:
765 obj = self.sa.query(Statistics) \
765 obj = self.sa.query(Statistics) \
766 .filter(Statistics.repository == repo).scalar()
766 .filter(Statistics.repository == repo).scalar()
767 if obj:
767 if obj:
768 self.sa.delete(obj)
768 self.sa.delete(obj)
769 except Exception:
769 except Exception:
770 log.error(traceback.format_exc())
770 log.error(traceback.format_exc())
771 raise
771 raise
772
772
773 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
773 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
774 field_type='str', field_desc=''):
774 field_type='str', field_desc=''):
775
775
776 repo = self._get_repo(repo_name)
776 repo = self._get_repo(repo_name)
777
777
778 new_field = RepositoryField()
778 new_field = RepositoryField()
779 new_field.repository = repo
779 new_field.repository = repo
780 new_field.field_key = field_key
780 new_field.field_key = field_key
781 new_field.field_type = field_type # python type
781 new_field.field_type = field_type # python type
782 new_field.field_value = field_value
782 new_field.field_value = field_value
783 new_field.field_desc = field_desc
783 new_field.field_desc = field_desc
784 new_field.field_label = field_label
784 new_field.field_label = field_label
785 self.sa.add(new_field)
785 self.sa.add(new_field)
786 return new_field
786 return new_field
787
787
788 def delete_repo_field(self, repo_name, field_key):
788 def delete_repo_field(self, repo_name, field_key):
789 repo = self._get_repo(repo_name)
789 repo = self._get_repo(repo_name)
790 field = RepositoryField.get_by_key_name(field_key, repo)
790 field = RepositoryField.get_by_key_name(field_key, repo)
791 if field:
791 if field:
792 self.sa.delete(field)
792 self.sa.delete(field)
793
793
794 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
794 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
795 clone_uri=None, repo_store_location=None,
795 clone_uri=None, repo_store_location=None,
796 use_global_config=False):
796 use_global_config=False):
797 """
797 """
798 makes repository on filesystem. It's group aware means it'll create
798 makes repository on filesystem. It's group aware means it'll create
799 a repository within a group, and alter the paths accordingly of
799 a repository within a group, and alter the paths accordingly of
800 group location
800 group location
801
801
802 :param repo_name:
802 :param repo_name:
803 :param alias:
803 :param alias:
804 :param parent:
804 :param parent:
805 :param clone_uri:
805 :param clone_uri:
806 :param repo_store_location:
806 :param repo_store_location:
807 """
807 """
808 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
808 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
809 from rhodecode.model.scm import ScmModel
809 from rhodecode.model.scm import ScmModel
810
810
811 if Repository.NAME_SEP in repo_name:
811 if Repository.NAME_SEP in repo_name:
812 raise ValueError(
812 raise ValueError(
813 'repo_name must not contain groups got `%s`' % repo_name)
813 'repo_name must not contain groups got `%s`' % repo_name)
814
814
815 if isinstance(repo_group, RepoGroup):
815 if isinstance(repo_group, RepoGroup):
816 new_parent_path = os.sep.join(repo_group.full_path_splitted)
816 new_parent_path = os.sep.join(repo_group.full_path_splitted)
817 else:
817 else:
818 new_parent_path = repo_group or ''
818 new_parent_path = repo_group or ''
819
819
820 if repo_store_location:
820 if repo_store_location:
821 _paths = [repo_store_location]
821 _paths = [repo_store_location]
822 else:
822 else:
823 _paths = [self.repos_path, new_parent_path, repo_name]
823 _paths = [self.repos_path, new_parent_path, repo_name]
824 # we need to make it str for mercurial
824 # we need to make it str for mercurial
825 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
825 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
826
826
827 # check if this path is not a repository
827 # check if this path is not a repository
828 if is_valid_repo(repo_path, self.repos_path):
828 if is_valid_repo(repo_path, self.repos_path):
829 raise Exception('This path %s is a valid repository' % repo_path)
829 raise Exception('This path %s is a valid repository' % repo_path)
830
830
831 # check if this path is a group
831 # check if this path is a group
832 if is_valid_repo_group(repo_path, self.repos_path):
832 if is_valid_repo_group(repo_path, self.repos_path):
833 raise Exception('This path %s is a valid group' % repo_path)
833 raise Exception('This path %s is a valid group' % repo_path)
834
834
835 log.info('creating repo %s in %s from url: `%s`',
835 log.info('creating repo %s in %s from url: `%s`',
836 repo_name, safe_unicode(repo_path),
836 repo_name, safe_unicode(repo_path),
837 obfuscate_url_pw(clone_uri))
837 obfuscate_url_pw(clone_uri))
838
838
839 backend = get_backend(repo_type)
839 backend = get_backend(repo_type)
840
840
841 config_repo = None if use_global_config else repo_name
841 config_repo = None if use_global_config else repo_name
842 if config_repo and new_parent_path:
842 if config_repo and new_parent_path:
843 config_repo = Repository.NAME_SEP.join(
843 config_repo = Repository.NAME_SEP.join(
844 (new_parent_path, config_repo))
844 (new_parent_path, config_repo))
845 config = make_db_config(clear_session=False, repo=config_repo)
845 config = make_db_config(clear_session=False, repo=config_repo)
846 config.set('extensions', 'largefiles', '')
846 config.set('extensions', 'largefiles', '')
847
847
848 # patch and reset hooks section of UI config to not run any
848 # patch and reset hooks section of UI config to not run any
849 # hooks on creating remote repo
849 # hooks on creating remote repo
850 config.clear_section('hooks')
850 config.clear_section('hooks')
851
851
852 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
852 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
853 if repo_type == 'git':
853 if repo_type == 'git':
854 repo = backend(
854 repo = backend(
855 repo_path, config=config, create=True, src_url=clone_uri,
855 repo_path, config=config, create=True, src_url=clone_uri,
856 bare=True)
856 bare=True)
857 else:
857 else:
858 repo = backend(
858 repo = backend(
859 repo_path, config=config, create=True, src_url=clone_uri)
859 repo_path, config=config, create=True, src_url=clone_uri)
860
860
861 ScmModel().install_hooks(repo, repo_type=repo_type)
861 repo.install_hooks()
862
862
863 log.debug('Created repo %s with %s backend',
863 log.debug('Created repo %s with %s backend',
864 safe_unicode(repo_name), safe_unicode(repo_type))
864 safe_unicode(repo_name), safe_unicode(repo_type))
865 return repo
865 return repo
866
866
867 def _rename_filesystem_repo(self, old, new):
867 def _rename_filesystem_repo(self, old, new):
868 """
868 """
869 renames repository on filesystem
869 renames repository on filesystem
870
870
871 :param old: old name
871 :param old: old name
872 :param new: new name
872 :param new: new name
873 """
873 """
874 log.info('renaming repo from %s to %s', old, new)
874 log.info('renaming repo from %s to %s', old, new)
875
875
876 old_path = os.path.join(self.repos_path, old)
876 old_path = os.path.join(self.repos_path, old)
877 new_path = os.path.join(self.repos_path, new)
877 new_path = os.path.join(self.repos_path, new)
878 if os.path.isdir(new_path):
878 if os.path.isdir(new_path):
879 raise Exception(
879 raise Exception(
880 'Was trying to rename to already existing dir %s' % new_path
880 'Was trying to rename to already existing dir %s' % new_path
881 )
881 )
882 shutil.move(old_path, new_path)
882 shutil.move(old_path, new_path)
883
883
884 def _delete_filesystem_repo(self, repo):
884 def _delete_filesystem_repo(self, repo):
885 """
885 """
886 removes repo from filesystem, the removal is acctually made by
886 removes repo from filesystem, the removal is acctually made by
887 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
887 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
888 repository is no longer valid for rhodecode, can be undeleted later on
888 repository is no longer valid for rhodecode, can be undeleted later on
889 by reverting the renames on this repository
889 by reverting the renames on this repository
890
890
891 :param repo: repo object
891 :param repo: repo object
892 """
892 """
893 rm_path = os.path.join(self.repos_path, repo.repo_name)
893 rm_path = os.path.join(self.repos_path, repo.repo_name)
894 repo_group = repo.group
894 repo_group = repo.group
895 log.info("Removing repository %s", rm_path)
895 log.info("Removing repository %s", rm_path)
896 # disable hg/git internal that it doesn't get detected as repo
896 # disable hg/git internal that it doesn't get detected as repo
897 alias = repo.repo_type
897 alias = repo.repo_type
898
898
899 config = make_db_config(clear_session=False)
899 config = make_db_config(clear_session=False)
900 config.set('extensions', 'largefiles', '')
900 config.set('extensions', 'largefiles', '')
901 bare = getattr(repo.scm_instance(config=config), 'bare', False)
901 bare = getattr(repo.scm_instance(config=config), 'bare', False)
902
902
903 # skip this for bare git repos
903 # skip this for bare git repos
904 if not bare:
904 if not bare:
905 # disable VCS repo
905 # disable VCS repo
906 vcs_path = os.path.join(rm_path, '.%s' % alias)
906 vcs_path = os.path.join(rm_path, '.%s' % alias)
907 if os.path.exists(vcs_path):
907 if os.path.exists(vcs_path):
908 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
908 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
909
909
910 _now = datetime.datetime.now()
910 _now = datetime.datetime.now()
911 _ms = str(_now.microsecond).rjust(6, '0')
911 _ms = str(_now.microsecond).rjust(6, '0')
912 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
912 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
913 repo.just_name)
913 repo.just_name)
914 if repo_group:
914 if repo_group:
915 # if repository is in group, prefix the removal path with the group
915 # if repository is in group, prefix the removal path with the group
916 args = repo_group.full_path_splitted + [_d]
916 args = repo_group.full_path_splitted + [_d]
917 _d = os.path.join(*args)
917 _d = os.path.join(*args)
918
918
919 if os.path.isdir(rm_path):
919 if os.path.isdir(rm_path):
920 shutil.move(rm_path, os.path.join(self.repos_path, _d))
920 shutil.move(rm_path, os.path.join(self.repos_path, _d))
921
921
922
922
923 class ReadmeFinder:
923 class ReadmeFinder:
924 """
924 """
925 Utility which knows how to find a readme for a specific commit.
925 Utility which knows how to find a readme for a specific commit.
926
926
927 The main idea is that this is a configurable algorithm. When creating an
927 The main idea is that this is a configurable algorithm. When creating an
928 instance you can define parameters, currently only the `default_renderer`.
928 instance you can define parameters, currently only the `default_renderer`.
929 Based on this configuration the method :meth:`search` behaves slightly
929 Based on this configuration the method :meth:`search` behaves slightly
930 different.
930 different.
931 """
931 """
932
932
933 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
933 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
934 path_re = re.compile(r'^docs?', re.IGNORECASE)
934 path_re = re.compile(r'^docs?', re.IGNORECASE)
935
935
936 default_priorities = {
936 default_priorities = {
937 None: 0,
937 None: 0,
938 '.text': 2,
938 '.text': 2,
939 '.txt': 3,
939 '.txt': 3,
940 '.rst': 1,
940 '.rst': 1,
941 '.rest': 2,
941 '.rest': 2,
942 '.md': 1,
942 '.md': 1,
943 '.mkdn': 2,
943 '.mkdn': 2,
944 '.mdown': 3,
944 '.mdown': 3,
945 '.markdown': 4,
945 '.markdown': 4,
946 }
946 }
947
947
948 path_priority = {
948 path_priority = {
949 'doc': 0,
949 'doc': 0,
950 'docs': 1,
950 'docs': 1,
951 }
951 }
952
952
953 FALLBACK_PRIORITY = 99
953 FALLBACK_PRIORITY = 99
954
954
955 RENDERER_TO_EXTENSION = {
955 RENDERER_TO_EXTENSION = {
956 'rst': ['.rst', '.rest'],
956 'rst': ['.rst', '.rest'],
957 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
957 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
958 }
958 }
959
959
960 def __init__(self, default_renderer=None):
960 def __init__(self, default_renderer=None):
961 self._default_renderer = default_renderer
961 self._default_renderer = default_renderer
962 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
962 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
963 default_renderer, [])
963 default_renderer, [])
964
964
965 def search(self, commit, path='/'):
965 def search(self, commit, path='/'):
966 """
966 """
967 Find a readme in the given `commit`.
967 Find a readme in the given `commit`.
968 """
968 """
969 nodes = commit.get_nodes(path)
969 nodes = commit.get_nodes(path)
970 matches = self._match_readmes(nodes)
970 matches = self._match_readmes(nodes)
971 matches = self._sort_according_to_priority(matches)
971 matches = self._sort_according_to_priority(matches)
972 if matches:
972 if matches:
973 return matches[0].node
973 return matches[0].node
974
974
975 paths = self._match_paths(nodes)
975 paths = self._match_paths(nodes)
976 paths = self._sort_paths_according_to_priority(paths)
976 paths = self._sort_paths_according_to_priority(paths)
977 for path in paths:
977 for path in paths:
978 match = self.search(commit, path=path)
978 match = self.search(commit, path=path)
979 if match:
979 if match:
980 return match
980 return match
981
981
982 return None
982 return None
983
983
984 def _match_readmes(self, nodes):
984 def _match_readmes(self, nodes):
985 for node in nodes:
985 for node in nodes:
986 if not node.is_file():
986 if not node.is_file():
987 continue
987 continue
988 path = node.path.rsplit('/', 1)[-1]
988 path = node.path.rsplit('/', 1)[-1]
989 match = self.readme_re.match(path)
989 match = self.readme_re.match(path)
990 if match:
990 if match:
991 extension = match.group(1)
991 extension = match.group(1)
992 yield ReadmeMatch(node, match, self._priority(extension))
992 yield ReadmeMatch(node, match, self._priority(extension))
993
993
994 def _match_paths(self, nodes):
994 def _match_paths(self, nodes):
995 for node in nodes:
995 for node in nodes:
996 if not node.is_dir():
996 if not node.is_dir():
997 continue
997 continue
998 match = self.path_re.match(node.path)
998 match = self.path_re.match(node.path)
999 if match:
999 if match:
1000 yield node.path
1000 yield node.path
1001
1001
1002 def _priority(self, extension):
1002 def _priority(self, extension):
1003 renderer_priority = (
1003 renderer_priority = (
1004 0 if extension in self._renderer_extensions else 1)
1004 0 if extension in self._renderer_extensions else 1)
1005 extension_priority = self.default_priorities.get(
1005 extension_priority = self.default_priorities.get(
1006 extension, self.FALLBACK_PRIORITY)
1006 extension, self.FALLBACK_PRIORITY)
1007 return (renderer_priority, extension_priority)
1007 return (renderer_priority, extension_priority)
1008
1008
1009 def _sort_according_to_priority(self, matches):
1009 def _sort_according_to_priority(self, matches):
1010
1010
1011 def priority_and_path(match):
1011 def priority_and_path(match):
1012 return (match.priority, match.path)
1012 return (match.priority, match.path)
1013
1013
1014 return sorted(matches, key=priority_and_path)
1014 return sorted(matches, key=priority_and_path)
1015
1015
1016 def _sort_paths_according_to_priority(self, paths):
1016 def _sort_paths_according_to_priority(self, paths):
1017
1017
1018 def priority_and_path(path):
1018 def priority_and_path(path):
1019 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1019 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1020
1020
1021 return sorted(paths, key=priority_and_path)
1021 return sorted(paths, key=priority_and_path)
1022
1022
1023
1023
1024 class ReadmeMatch:
1024 class ReadmeMatch:
1025
1025
1026 def __init__(self, node, match, priority):
1026 def __init__(self, node, match, priority):
1027 self.node = node
1027 self.node = node
1028 self._match = match
1028 self._match = match
1029 self.priority = priority
1029 self.priority = priority
1030
1030
1031 @property
1031 @property
1032 def path(self):
1032 def path(self):
1033 return self.node.path
1033 return self.node.path
1034
1034
1035 def __repr__(self):
1035 def __repr__(self):
1036 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
1036 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,922 +1,812 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import traceback
28 import traceback
29 import logging
29 import logging
30 import cStringIO
30 import cStringIO
31 import pkg_resources
31 import pkg_resources
32
32
33 from sqlalchemy import func
33 from sqlalchemy import func
34 from zope.cachedescriptors.property import Lazy as LazyProperty
34 from zope.cachedescriptors.property import Lazy as LazyProperty
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.lib.vcs import get_backend
37 from rhodecode.lib.vcs import get_backend
38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 from rhodecode.lib.vcs.nodes import FileNode
39 from rhodecode.lib.vcs.nodes import FileNode
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib import helpers as h
41 from rhodecode.lib import helpers as h
42 from rhodecode.lib.auth import (
42 from rhodecode.lib.auth import (
43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 HasUserGroupPermissionAny)
44 HasUserGroupPermissionAny)
45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
46 from rhodecode.lib import hooks_utils, caches
46 from rhodecode.lib import hooks_utils, caches
47 from rhodecode.lib.utils import (
47 from rhodecode.lib.utils import (
48 get_filesystem_repos, make_db_config)
48 get_filesystem_repos, make_db_config)
49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
50 from rhodecode.lib.system_info import get_system_info
50 from rhodecode.lib.system_info import get_system_info
51 from rhodecode.model import BaseModel
51 from rhodecode.model import BaseModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
54 PullRequest)
54 PullRequest)
55 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.settings import VcsSettingsModel
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class UserTemp(object):
60 class UserTemp(object):
61 def __init__(self, user_id):
61 def __init__(self, user_id):
62 self.user_id = user_id
62 self.user_id = user_id
63
63
64 def __repr__(self):
64 def __repr__(self):
65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
66
66
67
67
68 class RepoTemp(object):
68 class RepoTemp(object):
69 def __init__(self, repo_id):
69 def __init__(self, repo_id):
70 self.repo_id = repo_id
70 self.repo_id = repo_id
71
71
72 def __repr__(self):
72 def __repr__(self):
73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
74
74
75
75
76 class SimpleCachedRepoList(object):
76 class SimpleCachedRepoList(object):
77 """
77 """
78 Lighter version of of iteration of repos without the scm initialisation,
78 Lighter version of of iteration of repos without the scm initialisation,
79 and with cache usage
79 and with cache usage
80 """
80 """
81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
82 self.db_repo_list = db_repo_list
82 self.db_repo_list = db_repo_list
83 self.repos_path = repos_path
83 self.repos_path = repos_path
84 self.order_by = order_by
84 self.order_by = order_by
85 self.reversed = (order_by or '').startswith('-')
85 self.reversed = (order_by or '').startswith('-')
86 if not perm_set:
86 if not perm_set:
87 perm_set = ['repository.read', 'repository.write',
87 perm_set = ['repository.read', 'repository.write',
88 'repository.admin']
88 'repository.admin']
89 self.perm_set = perm_set
89 self.perm_set = perm_set
90
90
91 def __len__(self):
91 def __len__(self):
92 return len(self.db_repo_list)
92 return len(self.db_repo_list)
93
93
94 def __repr__(self):
94 def __repr__(self):
95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
96
96
97 def __iter__(self):
97 def __iter__(self):
98 for dbr in self.db_repo_list:
98 for dbr in self.db_repo_list:
99 # check permission at this level
99 # check permission at this level
100 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 has_perm = HasRepoPermissionAny(*self.perm_set)(
101 dbr.repo_name, 'SimpleCachedRepoList check')
101 dbr.repo_name, 'SimpleCachedRepoList check')
102 if not has_perm:
102 if not has_perm:
103 continue
103 continue
104
104
105 tmp_d = {
105 tmp_d = {
106 'name': dbr.repo_name,
106 'name': dbr.repo_name,
107 'dbrepo': dbr.get_dict(),
107 'dbrepo': dbr.get_dict(),
108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
109 }
109 }
110 yield tmp_d
110 yield tmp_d
111
111
112
112
113 class _PermCheckIterator(object):
113 class _PermCheckIterator(object):
114
114
115 def __init__(
115 def __init__(
116 self, obj_list, obj_attr, perm_set, perm_checker,
116 self, obj_list, obj_attr, perm_set, perm_checker,
117 extra_kwargs=None):
117 extra_kwargs=None):
118 """
118 """
119 Creates iterator from given list of objects, additionally
119 Creates iterator from given list of objects, additionally
120 checking permission for them from perm_set var
120 checking permission for them from perm_set var
121
121
122 :param obj_list: list of db objects
122 :param obj_list: list of db objects
123 :param obj_attr: attribute of object to pass into perm_checker
123 :param obj_attr: attribute of object to pass into perm_checker
124 :param perm_set: list of permissions to check
124 :param perm_set: list of permissions to check
125 :param perm_checker: callable to check permissions against
125 :param perm_checker: callable to check permissions against
126 """
126 """
127 self.obj_list = obj_list
127 self.obj_list = obj_list
128 self.obj_attr = obj_attr
128 self.obj_attr = obj_attr
129 self.perm_set = perm_set
129 self.perm_set = perm_set
130 self.perm_checker = perm_checker
130 self.perm_checker = perm_checker
131 self.extra_kwargs = extra_kwargs or {}
131 self.extra_kwargs = extra_kwargs or {}
132
132
133 def __len__(self):
133 def __len__(self):
134 return len(self.obj_list)
134 return len(self.obj_list)
135
135
136 def __repr__(self):
136 def __repr__(self):
137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
138
138
139 def __iter__(self):
139 def __iter__(self):
140 checker = self.perm_checker(*self.perm_set)
140 checker = self.perm_checker(*self.perm_set)
141 for db_obj in self.obj_list:
141 for db_obj in self.obj_list:
142 # check permission at this level
142 # check permission at this level
143 name = getattr(db_obj, self.obj_attr, None)
143 name = getattr(db_obj, self.obj_attr, None)
144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
145 continue
145 continue
146
146
147 yield db_obj
147 yield db_obj
148
148
149
149
150 class RepoList(_PermCheckIterator):
150 class RepoList(_PermCheckIterator):
151
151
152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
153 if not perm_set:
153 if not perm_set:
154 perm_set = [
154 perm_set = [
155 'repository.read', 'repository.write', 'repository.admin']
155 'repository.read', 'repository.write', 'repository.admin']
156
156
157 super(RepoList, self).__init__(
157 super(RepoList, self).__init__(
158 obj_list=db_repo_list,
158 obj_list=db_repo_list,
159 obj_attr='repo_name', perm_set=perm_set,
159 obj_attr='repo_name', perm_set=perm_set,
160 perm_checker=HasRepoPermissionAny,
160 perm_checker=HasRepoPermissionAny,
161 extra_kwargs=extra_kwargs)
161 extra_kwargs=extra_kwargs)
162
162
163
163
164 class RepoGroupList(_PermCheckIterator):
164 class RepoGroupList(_PermCheckIterator):
165
165
166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
167 if not perm_set:
167 if not perm_set:
168 perm_set = ['group.read', 'group.write', 'group.admin']
168 perm_set = ['group.read', 'group.write', 'group.admin']
169
169
170 super(RepoGroupList, self).__init__(
170 super(RepoGroupList, self).__init__(
171 obj_list=db_repo_group_list,
171 obj_list=db_repo_group_list,
172 obj_attr='group_name', perm_set=perm_set,
172 obj_attr='group_name', perm_set=perm_set,
173 perm_checker=HasRepoGroupPermissionAny,
173 perm_checker=HasRepoGroupPermissionAny,
174 extra_kwargs=extra_kwargs)
174 extra_kwargs=extra_kwargs)
175
175
176
176
177 class UserGroupList(_PermCheckIterator):
177 class UserGroupList(_PermCheckIterator):
178
178
179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
180 if not perm_set:
180 if not perm_set:
181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
182
182
183 super(UserGroupList, self).__init__(
183 super(UserGroupList, self).__init__(
184 obj_list=db_user_group_list,
184 obj_list=db_user_group_list,
185 obj_attr='users_group_name', perm_set=perm_set,
185 obj_attr='users_group_name', perm_set=perm_set,
186 perm_checker=HasUserGroupPermissionAny,
186 perm_checker=HasUserGroupPermissionAny,
187 extra_kwargs=extra_kwargs)
187 extra_kwargs=extra_kwargs)
188
188
189
189
190 class ScmModel(BaseModel):
190 class ScmModel(BaseModel):
191 """
191 """
192 Generic Scm Model
192 Generic Scm Model
193 """
193 """
194
194
195 @LazyProperty
195 @LazyProperty
196 def repos_path(self):
196 def repos_path(self):
197 """
197 """
198 Gets the repositories root path from database
198 Gets the repositories root path from database
199 """
199 """
200
200
201 settings_model = VcsSettingsModel(sa=self.sa)
201 settings_model = VcsSettingsModel(sa=self.sa)
202 return settings_model.get_repos_location()
202 return settings_model.get_repos_location()
203
203
204 def repo_scan(self, repos_path=None):
204 def repo_scan(self, repos_path=None):
205 """
205 """
206 Listing of repositories in given path. This path should not be a
206 Listing of repositories in given path. This path should not be a
207 repository itself. Return a dictionary of repository objects
207 repository itself. Return a dictionary of repository objects
208
208
209 :param repos_path: path to directory containing repositories
209 :param repos_path: path to directory containing repositories
210 """
210 """
211
211
212 if repos_path is None:
212 if repos_path is None:
213 repos_path = self.repos_path
213 repos_path = self.repos_path
214
214
215 log.info('scanning for repositories in %s', repos_path)
215 log.info('scanning for repositories in %s', repos_path)
216
216
217 config = make_db_config()
217 config = make_db_config()
218 config.set('extensions', 'largefiles', '')
218 config.set('extensions', 'largefiles', '')
219 repos = {}
219 repos = {}
220
220
221 for name, path in get_filesystem_repos(repos_path, recursive=True):
221 for name, path in get_filesystem_repos(repos_path, recursive=True):
222 # name need to be decomposed and put back together using the /
222 # name need to be decomposed and put back together using the /
223 # since this is internal storage separator for rhodecode
223 # since this is internal storage separator for rhodecode
224 name = Repository.normalize_repo_name(name)
224 name = Repository.normalize_repo_name(name)
225
225
226 try:
226 try:
227 if name in repos:
227 if name in repos:
228 raise RepositoryError('Duplicate repository name %s '
228 raise RepositoryError('Duplicate repository name %s '
229 'found in %s' % (name, path))
229 'found in %s' % (name, path))
230 elif path[0] in rhodecode.BACKENDS:
230 elif path[0] in rhodecode.BACKENDS:
231 klass = get_backend(path[0])
231 klass = get_backend(path[0])
232 repos[name] = klass(path[1], config=config)
232 repos[name] = klass(path[1], config=config)
233 except OSError:
233 except OSError:
234 continue
234 continue
235 log.debug('found %s paths with repositories', len(repos))
235 log.debug('found %s paths with repositories', len(repos))
236 return repos
236 return repos
237
237
238 def get_repos(self, all_repos=None, sort_key=None):
238 def get_repos(self, all_repos=None, sort_key=None):
239 """
239 """
240 Get all repositories from db and for each repo create it's
240 Get all repositories from db and for each repo create it's
241 backend instance and fill that backed with information from database
241 backend instance and fill that backed with information from database
242
242
243 :param all_repos: list of repository names as strings
243 :param all_repos: list of repository names as strings
244 give specific repositories list, good for filtering
244 give specific repositories list, good for filtering
245
245
246 :param sort_key: initial sorting of repositories
246 :param sort_key: initial sorting of repositories
247 """
247 """
248 if all_repos is None:
248 if all_repos is None:
249 all_repos = self.sa.query(Repository)\
249 all_repos = self.sa.query(Repository)\
250 .filter(Repository.group_id == None)\
250 .filter(Repository.group_id == None)\
251 .order_by(func.lower(Repository.repo_name)).all()
251 .order_by(func.lower(Repository.repo_name)).all()
252 repo_iter = SimpleCachedRepoList(
252 repo_iter = SimpleCachedRepoList(
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 return repo_iter
254 return repo_iter
255
255
256 def get_repo_groups(self, all_groups=None):
256 def get_repo_groups(self, all_groups=None):
257 if all_groups is None:
257 if all_groups is None:
258 all_groups = RepoGroup.query()\
258 all_groups = RepoGroup.query()\
259 .filter(RepoGroup.group_parent_id == None).all()
259 .filter(RepoGroup.group_parent_id == None).all()
260 return [x for x in RepoGroupList(all_groups)]
260 return [x for x in RepoGroupList(all_groups)]
261
261
262 def mark_for_invalidation(self, repo_name, delete=False):
262 def mark_for_invalidation(self, repo_name, delete=False):
263 """
263 """
264 Mark caches of this repo invalid in the database. `delete` flag
264 Mark caches of this repo invalid in the database. `delete` flag
265 removes the cache entries
265 removes the cache entries
266
266
267 :param repo_name: the repo_name for which caches should be marked
267 :param repo_name: the repo_name for which caches should be marked
268 invalid, or deleted
268 invalid, or deleted
269 :param delete: delete the entry keys instead of setting bool
269 :param delete: delete the entry keys instead of setting bool
270 flag on them
270 flag on them
271 """
271 """
272 CacheKey.set_invalidate(repo_name, delete=delete)
272 CacheKey.set_invalidate(repo_name, delete=delete)
273 repo = Repository.get_by_repo_name(repo_name)
273 repo = Repository.get_by_repo_name(repo_name)
274
274
275 if repo:
275 if repo:
276 config = repo._config
276 config = repo._config
277 config.set('extensions', 'largefiles', '')
277 config.set('extensions', 'largefiles', '')
278 repo.update_commit_cache(config=config, cs_cache=None)
278 repo.update_commit_cache(config=config, cs_cache=None)
279 caches.clear_repo_caches(repo_name)
279 caches.clear_repo_caches(repo_name)
280
280
281 def toggle_following_repo(self, follow_repo_id, user_id):
281 def toggle_following_repo(self, follow_repo_id, user_id):
282
282
283 f = self.sa.query(UserFollowing)\
283 f = self.sa.query(UserFollowing)\
284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
285 .filter(UserFollowing.user_id == user_id).scalar()
285 .filter(UserFollowing.user_id == user_id).scalar()
286
286
287 if f is not None:
287 if f is not None:
288 try:
288 try:
289 self.sa.delete(f)
289 self.sa.delete(f)
290 return
290 return
291 except Exception:
291 except Exception:
292 log.error(traceback.format_exc())
292 log.error(traceback.format_exc())
293 raise
293 raise
294
294
295 try:
295 try:
296 f = UserFollowing()
296 f = UserFollowing()
297 f.user_id = user_id
297 f.user_id = user_id
298 f.follows_repo_id = follow_repo_id
298 f.follows_repo_id = follow_repo_id
299 self.sa.add(f)
299 self.sa.add(f)
300 except Exception:
300 except Exception:
301 log.error(traceback.format_exc())
301 log.error(traceback.format_exc())
302 raise
302 raise
303
303
304 def toggle_following_user(self, follow_user_id, user_id):
304 def toggle_following_user(self, follow_user_id, user_id):
305 f = self.sa.query(UserFollowing)\
305 f = self.sa.query(UserFollowing)\
306 .filter(UserFollowing.follows_user_id == follow_user_id)\
306 .filter(UserFollowing.follows_user_id == follow_user_id)\
307 .filter(UserFollowing.user_id == user_id).scalar()
307 .filter(UserFollowing.user_id == user_id).scalar()
308
308
309 if f is not None:
309 if f is not None:
310 try:
310 try:
311 self.sa.delete(f)
311 self.sa.delete(f)
312 return
312 return
313 except Exception:
313 except Exception:
314 log.error(traceback.format_exc())
314 log.error(traceback.format_exc())
315 raise
315 raise
316
316
317 try:
317 try:
318 f = UserFollowing()
318 f = UserFollowing()
319 f.user_id = user_id
319 f.user_id = user_id
320 f.follows_user_id = follow_user_id
320 f.follows_user_id = follow_user_id
321 self.sa.add(f)
321 self.sa.add(f)
322 except Exception:
322 except Exception:
323 log.error(traceback.format_exc())
323 log.error(traceback.format_exc())
324 raise
324 raise
325
325
326 def is_following_repo(self, repo_name, user_id, cache=False):
326 def is_following_repo(self, repo_name, user_id, cache=False):
327 r = self.sa.query(Repository)\
327 r = self.sa.query(Repository)\
328 .filter(Repository.repo_name == repo_name).scalar()
328 .filter(Repository.repo_name == repo_name).scalar()
329
329
330 f = self.sa.query(UserFollowing)\
330 f = self.sa.query(UserFollowing)\
331 .filter(UserFollowing.follows_repository == r)\
331 .filter(UserFollowing.follows_repository == r)\
332 .filter(UserFollowing.user_id == user_id).scalar()
332 .filter(UserFollowing.user_id == user_id).scalar()
333
333
334 return f is not None
334 return f is not None
335
335
336 def is_following_user(self, username, user_id, cache=False):
336 def is_following_user(self, username, user_id, cache=False):
337 u = User.get_by_username(username)
337 u = User.get_by_username(username)
338
338
339 f = self.sa.query(UserFollowing)\
339 f = self.sa.query(UserFollowing)\
340 .filter(UserFollowing.follows_user == u)\
340 .filter(UserFollowing.follows_user == u)\
341 .filter(UserFollowing.user_id == user_id).scalar()
341 .filter(UserFollowing.user_id == user_id).scalar()
342
342
343 return f is not None
343 return f is not None
344
344
345 def get_followers(self, repo):
345 def get_followers(self, repo):
346 repo = self._get_repo(repo)
346 repo = self._get_repo(repo)
347
347
348 return self.sa.query(UserFollowing)\
348 return self.sa.query(UserFollowing)\
349 .filter(UserFollowing.follows_repository == repo).count()
349 .filter(UserFollowing.follows_repository == repo).count()
350
350
351 def get_forks(self, repo):
351 def get_forks(self, repo):
352 repo = self._get_repo(repo)
352 repo = self._get_repo(repo)
353 return self.sa.query(Repository)\
353 return self.sa.query(Repository)\
354 .filter(Repository.fork == repo).count()
354 .filter(Repository.fork == repo).count()
355
355
356 def get_pull_requests(self, repo):
356 def get_pull_requests(self, repo):
357 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
358 return self.sa.query(PullRequest)\
358 return self.sa.query(PullRequest)\
359 .filter(PullRequest.target_repo == repo)\
359 .filter(PullRequest.target_repo == repo)\
360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
361
361
362 def mark_as_fork(self, repo, fork, user):
362 def mark_as_fork(self, repo, fork, user):
363 repo = self._get_repo(repo)
363 repo = self._get_repo(repo)
364 fork = self._get_repo(fork)
364 fork = self._get_repo(fork)
365 if fork and repo.repo_id == fork.repo_id:
365 if fork and repo.repo_id == fork.repo_id:
366 raise Exception("Cannot set repository as fork of itself")
366 raise Exception("Cannot set repository as fork of itself")
367
367
368 if fork and repo.repo_type != fork.repo_type:
368 if fork and repo.repo_type != fork.repo_type:
369 raise RepositoryError(
369 raise RepositoryError(
370 "Cannot set repository as fork of repository with other type")
370 "Cannot set repository as fork of repository with other type")
371
371
372 repo.fork = fork
372 repo.fork = fork
373 self.sa.add(repo)
373 self.sa.add(repo)
374 return repo
374 return repo
375
375
376 def pull_changes(self, repo, username, remote_uri=None):
376 def pull_changes(self, repo, username, remote_uri=None):
377 dbrepo = self._get_repo(repo)
377 dbrepo = self._get_repo(repo)
378 remote_uri = remote_uri or dbrepo.clone_uri
378 remote_uri = remote_uri or dbrepo.clone_uri
379 if not remote_uri:
379 if not remote_uri:
380 raise Exception("This repository doesn't have a clone uri")
380 raise Exception("This repository doesn't have a clone uri")
381
381
382 repo = dbrepo.scm_instance(cache=False)
382 repo = dbrepo.scm_instance(cache=False)
383 # TODO: marcink fix this an re-enable since we need common logic
383 # TODO: marcink fix this an re-enable since we need common logic
384 # for hg/git remove hooks so we don't trigger them on fetching
384 # for hg/git remove hooks so we don't trigger them on fetching
385 # commits from remote
385 # commits from remote
386 repo.config.clear_section('hooks')
386 repo.config.clear_section('hooks')
387
387
388 repo_name = dbrepo.repo_name
388 repo_name = dbrepo.repo_name
389 try:
389 try:
390 # TODO: we need to make sure those operations call proper hooks !
390 # TODO: we need to make sure those operations call proper hooks !
391 repo.pull(remote_uri)
391 repo.pull(remote_uri)
392
392
393 self.mark_for_invalidation(repo_name)
393 self.mark_for_invalidation(repo_name)
394 except Exception:
394 except Exception:
395 log.error(traceback.format_exc())
395 log.error(traceback.format_exc())
396 raise
396 raise
397
397
398 def push_changes(self, repo, username, remote_uri=None):
398 def push_changes(self, repo, username, remote_uri=None):
399 dbrepo = self._get_repo(repo)
399 dbrepo = self._get_repo(repo)
400 remote_uri = remote_uri or dbrepo.push_uri
400 remote_uri = remote_uri or dbrepo.push_uri
401 if not remote_uri:
401 if not remote_uri:
402 raise Exception("This repository doesn't have a clone uri")
402 raise Exception("This repository doesn't have a clone uri")
403
403
404 repo = dbrepo.scm_instance(cache=False)
404 repo = dbrepo.scm_instance(cache=False)
405 repo.config.clear_section('hooks')
405 repo.config.clear_section('hooks')
406
406
407 try:
407 try:
408 repo.push(remote_uri)
408 repo.push(remote_uri)
409 except Exception:
409 except Exception:
410 log.error(traceback.format_exc())
410 log.error(traceback.format_exc())
411 raise
411 raise
412
412
413 def commit_change(self, repo, repo_name, commit, user, author, message,
413 def commit_change(self, repo, repo_name, commit, user, author, message,
414 content, f_path):
414 content, f_path):
415 """
415 """
416 Commits changes
416 Commits changes
417
417
418 :param repo: SCM instance
418 :param repo: SCM instance
419
419
420 """
420 """
421 user = self._get_user(user)
421 user = self._get_user(user)
422
422
423 # decoding here will force that we have proper encoded values
423 # decoding here will force that we have proper encoded values
424 # in any other case this will throw exceptions and deny commit
424 # in any other case this will throw exceptions and deny commit
425 content = safe_str(content)
425 content = safe_str(content)
426 path = safe_str(f_path)
426 path = safe_str(f_path)
427 # message and author needs to be unicode
427 # message and author needs to be unicode
428 # proper backend should then translate that into required type
428 # proper backend should then translate that into required type
429 message = safe_unicode(message)
429 message = safe_unicode(message)
430 author = safe_unicode(author)
430 author = safe_unicode(author)
431 imc = repo.in_memory_commit
431 imc = repo.in_memory_commit
432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 try:
433 try:
434 # TODO: handle pre-push action !
434 # TODO: handle pre-push action !
435 tip = imc.commit(
435 tip = imc.commit(
436 message=message, author=author, parents=[commit],
436 message=message, author=author, parents=[commit],
437 branch=commit.branch)
437 branch=commit.branch)
438 except Exception as e:
438 except Exception as e:
439 log.error(traceback.format_exc())
439 log.error(traceback.format_exc())
440 raise IMCCommitError(str(e))
440 raise IMCCommitError(str(e))
441 finally:
441 finally:
442 # always clear caches, if commit fails we want fresh object also
442 # always clear caches, if commit fails we want fresh object also
443 self.mark_for_invalidation(repo_name)
443 self.mark_for_invalidation(repo_name)
444
444
445 # We trigger the post-push action
445 # We trigger the post-push action
446 hooks_utils.trigger_post_push_hook(
446 hooks_utils.trigger_post_push_hook(
447 username=user.username, action='push_local', repo_name=repo_name,
447 username=user.username, action='push_local', repo_name=repo_name,
448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 return tip
449 return tip
450
450
451 def _sanitize_path(self, f_path):
451 def _sanitize_path(self, f_path):
452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 raise NonRelativePathError('%s is not an relative path' % f_path)
453 raise NonRelativePathError('%s is not an relative path' % f_path)
454 if f_path:
454 if f_path:
455 f_path = os.path.normpath(f_path)
455 f_path = os.path.normpath(f_path)
456 return f_path
456 return f_path
457
457
458 def get_dirnode_metadata(self, request, commit, dir_node):
458 def get_dirnode_metadata(self, request, commit, dir_node):
459 if not dir_node.is_dir():
459 if not dir_node.is_dir():
460 return []
460 return []
461
461
462 data = []
462 data = []
463 for node in dir_node:
463 for node in dir_node:
464 if not node.is_file():
464 if not node.is_file():
465 # we skip file-nodes
465 # we skip file-nodes
466 continue
466 continue
467
467
468 last_commit = node.last_commit
468 last_commit = node.last_commit
469 last_commit_date = last_commit.date
469 last_commit_date = last_commit.date
470 data.append({
470 data.append({
471 'name': node.name,
471 'name': node.name,
472 'size': h.format_byte_size_binary(node.size),
472 'size': h.format_byte_size_binary(node.size),
473 'modified_at': h.format_date(last_commit_date),
473 'modified_at': h.format_date(last_commit_date),
474 'modified_ts': last_commit_date.isoformat(),
474 'modified_ts': last_commit_date.isoformat(),
475 'revision': last_commit.revision,
475 'revision': last_commit.revision,
476 'short_id': last_commit.short_id,
476 'short_id': last_commit.short_id,
477 'message': h.escape(last_commit.message),
477 'message': h.escape(last_commit.message),
478 'author': h.escape(last_commit.author),
478 'author': h.escape(last_commit.author),
479 'user_profile': h.gravatar_with_user(
479 'user_profile': h.gravatar_with_user(
480 request, last_commit.author),
480 request, last_commit.author),
481 })
481 })
482
482
483 return data
483 return data
484
484
485 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
485 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
486 extended_info=False, content=False, max_file_bytes=None):
486 extended_info=False, content=False, max_file_bytes=None):
487 """
487 """
488 recursive walk in root dir and return a set of all path in that dir
488 recursive walk in root dir and return a set of all path in that dir
489 based on repository walk function
489 based on repository walk function
490
490
491 :param repo_name: name of repository
491 :param repo_name: name of repository
492 :param commit_id: commit id for which to list nodes
492 :param commit_id: commit id for which to list nodes
493 :param root_path: root path to list
493 :param root_path: root path to list
494 :param flat: return as a list, if False returns a dict with description
494 :param flat: return as a list, if False returns a dict with description
495 :param max_file_bytes: will not return file contents over this limit
495 :param max_file_bytes: will not return file contents over this limit
496
496
497 """
497 """
498 _files = list()
498 _files = list()
499 _dirs = list()
499 _dirs = list()
500 try:
500 try:
501 _repo = self._get_repo(repo_name)
501 _repo = self._get_repo(repo_name)
502 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
502 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
503 root_path = root_path.lstrip('/')
503 root_path = root_path.lstrip('/')
504 for __, dirs, files in commit.walk(root_path):
504 for __, dirs, files in commit.walk(root_path):
505 for f in files:
505 for f in files:
506 _content = None
506 _content = None
507 _data = f.unicode_path
507 _data = f.unicode_path
508 over_size_limit = (max_file_bytes is not None
508 over_size_limit = (max_file_bytes is not None
509 and f.size > max_file_bytes)
509 and f.size > max_file_bytes)
510
510
511 if not flat:
511 if not flat:
512 _data = {
512 _data = {
513 "name": h.escape(f.unicode_path),
513 "name": h.escape(f.unicode_path),
514 "type": "file",
514 "type": "file",
515 }
515 }
516 if extended_info:
516 if extended_info:
517 _data.update({
517 _data.update({
518 "md5": f.md5,
518 "md5": f.md5,
519 "binary": f.is_binary,
519 "binary": f.is_binary,
520 "size": f.size,
520 "size": f.size,
521 "extension": f.extension,
521 "extension": f.extension,
522 "mimetype": f.mimetype,
522 "mimetype": f.mimetype,
523 "lines": f.lines()[0]
523 "lines": f.lines()[0]
524 })
524 })
525
525
526 if content:
526 if content:
527 full_content = None
527 full_content = None
528 if not f.is_binary and not over_size_limit:
528 if not f.is_binary and not over_size_limit:
529 full_content = safe_str(f.content)
529 full_content = safe_str(f.content)
530
530
531 _data.update({
531 _data.update({
532 "content": full_content,
532 "content": full_content,
533 })
533 })
534 _files.append(_data)
534 _files.append(_data)
535 for d in dirs:
535 for d in dirs:
536 _data = d.unicode_path
536 _data = d.unicode_path
537 if not flat:
537 if not flat:
538 _data = {
538 _data = {
539 "name": h.escape(d.unicode_path),
539 "name": h.escape(d.unicode_path),
540 "type": "dir",
540 "type": "dir",
541 }
541 }
542 if extended_info:
542 if extended_info:
543 _data.update({
543 _data.update({
544 "md5": None,
544 "md5": None,
545 "binary": None,
545 "binary": None,
546 "size": None,
546 "size": None,
547 "extension": None,
547 "extension": None,
548 })
548 })
549 if content:
549 if content:
550 _data.update({
550 _data.update({
551 "content": None
551 "content": None
552 })
552 })
553 _dirs.append(_data)
553 _dirs.append(_data)
554 except RepositoryError:
554 except RepositoryError:
555 log.debug("Exception in get_nodes", exc_info=True)
555 log.debug("Exception in get_nodes", exc_info=True)
556 raise
556 raise
557
557
558 return _dirs, _files
558 return _dirs, _files
559
559
560 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
560 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
561 author=None, trigger_push_hook=True):
561 author=None, trigger_push_hook=True):
562 """
562 """
563 Commits given multiple nodes into repo
563 Commits given multiple nodes into repo
564
564
565 :param user: RhodeCode User object or user_id, the commiter
565 :param user: RhodeCode User object or user_id, the commiter
566 :param repo: RhodeCode Repository object
566 :param repo: RhodeCode Repository object
567 :param message: commit message
567 :param message: commit message
568 :param nodes: mapping {filename:{'content':content},...}
568 :param nodes: mapping {filename:{'content':content},...}
569 :param parent_commit: parent commit, can be empty than it's
569 :param parent_commit: parent commit, can be empty than it's
570 initial commit
570 initial commit
571 :param author: author of commit, cna be different that commiter
571 :param author: author of commit, cna be different that commiter
572 only for git
572 only for git
573 :param trigger_push_hook: trigger push hooks
573 :param trigger_push_hook: trigger push hooks
574
574
575 :returns: new commited commit
575 :returns: new commited commit
576 """
576 """
577
577
578 user = self._get_user(user)
578 user = self._get_user(user)
579 scm_instance = repo.scm_instance(cache=False)
579 scm_instance = repo.scm_instance(cache=False)
580
580
581 processed_nodes = []
581 processed_nodes = []
582 for f_path in nodes:
582 for f_path in nodes:
583 f_path = self._sanitize_path(f_path)
583 f_path = self._sanitize_path(f_path)
584 content = nodes[f_path]['content']
584 content = nodes[f_path]['content']
585 f_path = safe_str(f_path)
585 f_path = safe_str(f_path)
586 # decoding here will force that we have proper encoded values
586 # decoding here will force that we have proper encoded values
587 # in any other case this will throw exceptions and deny commit
587 # in any other case this will throw exceptions and deny commit
588 if isinstance(content, (basestring,)):
588 if isinstance(content, (basestring,)):
589 content = safe_str(content)
589 content = safe_str(content)
590 elif isinstance(content, (file, cStringIO.OutputType,)):
590 elif isinstance(content, (file, cStringIO.OutputType,)):
591 content = content.read()
591 content = content.read()
592 else:
592 else:
593 raise Exception('Content is of unrecognized type %s' % (
593 raise Exception('Content is of unrecognized type %s' % (
594 type(content)
594 type(content)
595 ))
595 ))
596 processed_nodes.append((f_path, content))
596 processed_nodes.append((f_path, content))
597
597
598 message = safe_unicode(message)
598 message = safe_unicode(message)
599 commiter = user.full_contact
599 commiter = user.full_contact
600 author = safe_unicode(author) if author else commiter
600 author = safe_unicode(author) if author else commiter
601
601
602 imc = scm_instance.in_memory_commit
602 imc = scm_instance.in_memory_commit
603
603
604 if not parent_commit:
604 if not parent_commit:
605 parent_commit = EmptyCommit(alias=scm_instance.alias)
605 parent_commit = EmptyCommit(alias=scm_instance.alias)
606
606
607 if isinstance(parent_commit, EmptyCommit):
607 if isinstance(parent_commit, EmptyCommit):
608 # EmptyCommit means we we're editing empty repository
608 # EmptyCommit means we we're editing empty repository
609 parents = None
609 parents = None
610 else:
610 else:
611 parents = [parent_commit]
611 parents = [parent_commit]
612 # add multiple nodes
612 # add multiple nodes
613 for path, content in processed_nodes:
613 for path, content in processed_nodes:
614 imc.add(FileNode(path, content=content))
614 imc.add(FileNode(path, content=content))
615 # TODO: handle pre push scenario
615 # TODO: handle pre push scenario
616 tip = imc.commit(message=message,
616 tip = imc.commit(message=message,
617 author=author,
617 author=author,
618 parents=parents,
618 parents=parents,
619 branch=parent_commit.branch)
619 branch=parent_commit.branch)
620
620
621 self.mark_for_invalidation(repo.repo_name)
621 self.mark_for_invalidation(repo.repo_name)
622 if trigger_push_hook:
622 if trigger_push_hook:
623 hooks_utils.trigger_post_push_hook(
623 hooks_utils.trigger_post_push_hook(
624 username=user.username, action='push_local',
624 username=user.username, action='push_local',
625 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
625 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
626 commit_ids=[tip.raw_id])
626 commit_ids=[tip.raw_id])
627 return tip
627 return tip
628
628
629 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
629 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
630 author=None, trigger_push_hook=True):
630 author=None, trigger_push_hook=True):
631 user = self._get_user(user)
631 user = self._get_user(user)
632 scm_instance = repo.scm_instance(cache=False)
632 scm_instance = repo.scm_instance(cache=False)
633
633
634 message = safe_unicode(message)
634 message = safe_unicode(message)
635 commiter = user.full_contact
635 commiter = user.full_contact
636 author = safe_unicode(author) if author else commiter
636 author = safe_unicode(author) if author else commiter
637
637
638 imc = scm_instance.in_memory_commit
638 imc = scm_instance.in_memory_commit
639
639
640 if not parent_commit:
640 if not parent_commit:
641 parent_commit = EmptyCommit(alias=scm_instance.alias)
641 parent_commit = EmptyCommit(alias=scm_instance.alias)
642
642
643 if isinstance(parent_commit, EmptyCommit):
643 if isinstance(parent_commit, EmptyCommit):
644 # EmptyCommit means we we're editing empty repository
644 # EmptyCommit means we we're editing empty repository
645 parents = None
645 parents = None
646 else:
646 else:
647 parents = [parent_commit]
647 parents = [parent_commit]
648
648
649 # add multiple nodes
649 # add multiple nodes
650 for _filename, data in nodes.items():
650 for _filename, data in nodes.items():
651 # new filename, can be renamed from the old one, also sanitaze
651 # new filename, can be renamed from the old one, also sanitaze
652 # the path for any hack around relative paths like ../../ etc.
652 # the path for any hack around relative paths like ../../ etc.
653 filename = self._sanitize_path(data['filename'])
653 filename = self._sanitize_path(data['filename'])
654 old_filename = self._sanitize_path(_filename)
654 old_filename = self._sanitize_path(_filename)
655 content = data['content']
655 content = data['content']
656
656
657 filenode = FileNode(old_filename, content=content)
657 filenode = FileNode(old_filename, content=content)
658 op = data['op']
658 op = data['op']
659 if op == 'add':
659 if op == 'add':
660 imc.add(filenode)
660 imc.add(filenode)
661 elif op == 'del':
661 elif op == 'del':
662 imc.remove(filenode)
662 imc.remove(filenode)
663 elif op == 'mod':
663 elif op == 'mod':
664 if filename != old_filename:
664 if filename != old_filename:
665 # TODO: handle renames more efficient, needs vcs lib
665 # TODO: handle renames more efficient, needs vcs lib
666 # changes
666 # changes
667 imc.remove(filenode)
667 imc.remove(filenode)
668 imc.add(FileNode(filename, content=content))
668 imc.add(FileNode(filename, content=content))
669 else:
669 else:
670 imc.change(filenode)
670 imc.change(filenode)
671
671
672 try:
672 try:
673 # TODO: handle pre push scenario
673 # TODO: handle pre push scenario
674 # commit changes
674 # commit changes
675 tip = imc.commit(message=message,
675 tip = imc.commit(message=message,
676 author=author,
676 author=author,
677 parents=parents,
677 parents=parents,
678 branch=parent_commit.branch)
678 branch=parent_commit.branch)
679 except NodeNotChangedError:
679 except NodeNotChangedError:
680 raise
680 raise
681 except Exception as e:
681 except Exception as e:
682 log.exception("Unexpected exception during call to imc.commit")
682 log.exception("Unexpected exception during call to imc.commit")
683 raise IMCCommitError(str(e))
683 raise IMCCommitError(str(e))
684 finally:
684 finally:
685 # always clear caches, if commit fails we want fresh object also
685 # always clear caches, if commit fails we want fresh object also
686 self.mark_for_invalidation(repo.repo_name)
686 self.mark_for_invalidation(repo.repo_name)
687
687
688 if trigger_push_hook:
688 if trigger_push_hook:
689 hooks_utils.trigger_post_push_hook(
689 hooks_utils.trigger_post_push_hook(
690 username=user.username, action='push_local',
690 username=user.username, action='push_local',
691 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
691 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
692 commit_ids=[tip.raw_id])
692 commit_ids=[tip.raw_id])
693
693
694 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
694 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
695 author=None, trigger_push_hook=True):
695 author=None, trigger_push_hook=True):
696 """
696 """
697 Deletes given multiple nodes into `repo`
697 Deletes given multiple nodes into `repo`
698
698
699 :param user: RhodeCode User object or user_id, the committer
699 :param user: RhodeCode User object or user_id, the committer
700 :param repo: RhodeCode Repository object
700 :param repo: RhodeCode Repository object
701 :param message: commit message
701 :param message: commit message
702 :param nodes: mapping {filename:{'content':content},...}
702 :param nodes: mapping {filename:{'content':content},...}
703 :param parent_commit: parent commit, can be empty than it's initial
703 :param parent_commit: parent commit, can be empty than it's initial
704 commit
704 commit
705 :param author: author of commit, cna be different that commiter only
705 :param author: author of commit, cna be different that commiter only
706 for git
706 for git
707 :param trigger_push_hook: trigger push hooks
707 :param trigger_push_hook: trigger push hooks
708
708
709 :returns: new commit after deletion
709 :returns: new commit after deletion
710 """
710 """
711
711
712 user = self._get_user(user)
712 user = self._get_user(user)
713 scm_instance = repo.scm_instance(cache=False)
713 scm_instance = repo.scm_instance(cache=False)
714
714
715 processed_nodes = []
715 processed_nodes = []
716 for f_path in nodes:
716 for f_path in nodes:
717 f_path = self._sanitize_path(f_path)
717 f_path = self._sanitize_path(f_path)
718 # content can be empty but for compatabilty it allows same dicts
718 # content can be empty but for compatabilty it allows same dicts
719 # structure as add_nodes
719 # structure as add_nodes
720 content = nodes[f_path].get('content')
720 content = nodes[f_path].get('content')
721 processed_nodes.append((f_path, content))
721 processed_nodes.append((f_path, content))
722
722
723 message = safe_unicode(message)
723 message = safe_unicode(message)
724 commiter = user.full_contact
724 commiter = user.full_contact
725 author = safe_unicode(author) if author else commiter
725 author = safe_unicode(author) if author else commiter
726
726
727 imc = scm_instance.in_memory_commit
727 imc = scm_instance.in_memory_commit
728
728
729 if not parent_commit:
729 if not parent_commit:
730 parent_commit = EmptyCommit(alias=scm_instance.alias)
730 parent_commit = EmptyCommit(alias=scm_instance.alias)
731
731
732 if isinstance(parent_commit, EmptyCommit):
732 if isinstance(parent_commit, EmptyCommit):
733 # EmptyCommit means we we're editing empty repository
733 # EmptyCommit means we we're editing empty repository
734 parents = None
734 parents = None
735 else:
735 else:
736 parents = [parent_commit]
736 parents = [parent_commit]
737 # add multiple nodes
737 # add multiple nodes
738 for path, content in processed_nodes:
738 for path, content in processed_nodes:
739 imc.remove(FileNode(path, content=content))
739 imc.remove(FileNode(path, content=content))
740
740
741 # TODO: handle pre push scenario
741 # TODO: handle pre push scenario
742 tip = imc.commit(message=message,
742 tip = imc.commit(message=message,
743 author=author,
743 author=author,
744 parents=parents,
744 parents=parents,
745 branch=parent_commit.branch)
745 branch=parent_commit.branch)
746
746
747 self.mark_for_invalidation(repo.repo_name)
747 self.mark_for_invalidation(repo.repo_name)
748 if trigger_push_hook:
748 if trigger_push_hook:
749 hooks_utils.trigger_post_push_hook(
749 hooks_utils.trigger_post_push_hook(
750 username=user.username, action='push_local',
750 username=user.username, action='push_local',
751 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
751 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
752 commit_ids=[tip.raw_id])
752 commit_ids=[tip.raw_id])
753 return tip
753 return tip
754
754
755 def strip(self, repo, commit_id, branch):
755 def strip(self, repo, commit_id, branch):
756 scm_instance = repo.scm_instance(cache=False)
756 scm_instance = repo.scm_instance(cache=False)
757 scm_instance.config.clear_section('hooks')
757 scm_instance.config.clear_section('hooks')
758 scm_instance.strip(commit_id, branch)
758 scm_instance.strip(commit_id, branch)
759 self.mark_for_invalidation(repo.repo_name)
759 self.mark_for_invalidation(repo.repo_name)
760
760
761 def get_unread_journal(self):
761 def get_unread_journal(self):
762 return self.sa.query(UserLog).count()
762 return self.sa.query(UserLog).count()
763
763
764 def get_repo_landing_revs(self, translator, repo=None):
764 def get_repo_landing_revs(self, translator, repo=None):
765 """
765 """
766 Generates select option with tags branches and bookmarks (for hg only)
766 Generates select option with tags branches and bookmarks (for hg only)
767 grouped by type
767 grouped by type
768
768
769 :param repo:
769 :param repo:
770 """
770 """
771 _ = translator
771 _ = translator
772 repo = self._get_repo(repo)
772 repo = self._get_repo(repo)
773
773
774 hist_l = [
774 hist_l = [
775 ['rev:tip', _('latest tip')]
775 ['rev:tip', _('latest tip')]
776 ]
776 ]
777 choices = [
777 choices = [
778 'rev:tip'
778 'rev:tip'
779 ]
779 ]
780
780
781 if not repo:
781 if not repo:
782 return choices, hist_l
782 return choices, hist_l
783
783
784 repo = repo.scm_instance()
784 repo = repo.scm_instance()
785
785
786 branches_group = (
786 branches_group = (
787 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
787 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
788 for b in repo.branches],
788 for b in repo.branches],
789 _("Branches"))
789 _("Branches"))
790 hist_l.append(branches_group)
790 hist_l.append(branches_group)
791 choices.extend([x[0] for x in branches_group[0]])
791 choices.extend([x[0] for x in branches_group[0]])
792
792
793 if repo.alias == 'hg':
793 if repo.alias == 'hg':
794 bookmarks_group = (
794 bookmarks_group = (
795 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
795 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
796 for b in repo.bookmarks],
796 for b in repo.bookmarks],
797 _("Bookmarks"))
797 _("Bookmarks"))
798 hist_l.append(bookmarks_group)
798 hist_l.append(bookmarks_group)
799 choices.extend([x[0] for x in bookmarks_group[0]])
799 choices.extend([x[0] for x in bookmarks_group[0]])
800
800
801 tags_group = (
801 tags_group = (
802 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
802 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
803 for t in repo.tags],
803 for t in repo.tags],
804 _("Tags"))
804 _("Tags"))
805 hist_l.append(tags_group)
805 hist_l.append(tags_group)
806 choices.extend([x[0] for x in tags_group[0]])
806 choices.extend([x[0] for x in tags_group[0]])
807
807
808 return choices, hist_l
808 return choices, hist_l
809
809
810 def install_git_hook(self, repo, force_create=False):
811 """
812 Creates a rhodecode hook inside a git repository
813
814 :param repo: Instance of VCS repo
815 :param force_create: Create even if same name hook exists
816 """
817
818 loc = os.path.join(repo.path, 'hooks')
819 if not repo.bare:
820 loc = os.path.join(repo.path, '.git', 'hooks')
821 if not os.path.isdir(loc):
822 os.makedirs(loc, mode=0777)
823
824 tmpl_post = pkg_resources.resource_string(
825 'rhodecode', '/'.join(
826 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
827 tmpl_pre = pkg_resources.resource_string(
828 'rhodecode', '/'.join(
829 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
830
831 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
832 _hook_file = os.path.join(loc, '%s-receive' % h_type)
833 log.debug('Installing git hook in repo %s', repo)
834 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
835
836 if _rhodecode_hook or force_create:
837 log.debug('writing %s hook file !', h_type)
838 try:
839 with open(_hook_file, 'wb') as f:
840 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
841 tmpl = tmpl.replace('_ENV_', sys.executable)
842 f.write(tmpl)
843 os.chmod(_hook_file, 0755)
844 except IOError:
845 log.exception('error writing hook file %s', _hook_file)
846 else:
847 log.debug('skipping writing hook file')
848
849 def install_svn_hooks(self, repo, force_create=False):
850 """
851 Creates rhodecode hooks inside a svn repository
852
853 :param repo: Instance of VCS repo
854 :param force_create: Create even if same name hook exists
855 """
856 hooks_path = os.path.join(repo.path, 'hooks')
857 if not os.path.isdir(hooks_path):
858 os.makedirs(hooks_path)
859 post_commit_tmpl = pkg_resources.resource_string(
860 'rhodecode', '/'.join(
861 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
862 pre_commit_template = pkg_resources.resource_string(
863 'rhodecode', '/'.join(
864 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
865 templates = {
866 'post-commit': post_commit_tmpl,
867 'pre-commit': pre_commit_template
868 }
869 for filename in templates:
870 _hook_file = os.path.join(hooks_path, filename)
871 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
872 if _rhodecode_hook or force_create:
873 log.debug('writing %s hook file !', filename)
874 template = templates[filename]
875 try:
876 with open(_hook_file, 'wb') as f:
877 template = template.replace(
878 '_TMPL_', rhodecode.__version__)
879 template = template.replace('_ENV_', sys.executable)
880 f.write(template)
881 os.chmod(_hook_file, 0755)
882 except IOError:
883 log.exception('error writing hook file %s', filename)
884 else:
885 log.debug('skipping writing hook file')
886
887 def install_hooks(self, repo, repo_type):
888 if repo_type == 'git':
889 self.install_git_hook(repo)
890 elif repo_type == 'svn':
891 self.install_svn_hooks(repo)
892
893 def get_server_info(self, environ=None):
810 def get_server_info(self, environ=None):
894 server_info = get_system_info(environ)
811 server_info = get_system_info(environ)
895 return server_info
812 return server_info
896
897
898 def _check_rhodecode_hook(hook_path):
899 """
900 Check if the hook was created by RhodeCode
901 """
902 if not os.path.exists(hook_path):
903 return True
904
905 log.debug('hook exists, checking if it is from rhodecode')
906 hook_content = _read_hook(hook_path)
907 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
908 if matches:
909 try:
910 version = matches.groups()[0]
911 log.debug('got %s, it is rhodecode', version)
912 return True
913 except Exception:
914 log.exception("Exception while reading the hook version.")
915
916 return False
917
918
919 def _read_hook(hook_path):
920 with open(hook_path, 'rb') as f:
921 content = f.read()
922 return content
@@ -1,499 +1,472 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import base64
21 import base64
22
22
23 import mock
23 import mock
24 import pytest
24 import pytest
25
25
26 from rhodecode.lib.utils2 import AttributeDict
26 from rhodecode.lib.utils2 import AttributeDict
27 from rhodecode.tests.utils import CustomTestApp
27 from rhodecode.tests.utils import CustomTestApp
28
28
29 from rhodecode.lib.caching_query import FromCache
29 from rhodecode.lib.caching_query import FromCache
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
30 from rhodecode.lib.hooks_daemon import DummyHooksCallbackDaemon
31 from rhodecode.lib.middleware import simplevcs
31 from rhodecode.lib.middleware import simplevcs
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
32 from rhodecode.lib.middleware.https_fixup import HttpsFixup
33 from rhodecode.lib.middleware.utils import scm_app_http
33 from rhodecode.lib.middleware.utils import scm_app_http
34 from rhodecode.model.db import User, _hash_key
34 from rhodecode.model.db import User, _hash_key
35 from rhodecode.model.meta import Session
35 from rhodecode.model.meta import Session
36 from rhodecode.tests import (
36 from rhodecode.tests import (
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
37 HG_REPO, TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
38 from rhodecode.tests.lib.middleware import mock_scm_app
38 from rhodecode.tests.lib.middleware import mock_scm_app
39
39
40
40
41 class StubVCSController(simplevcs.SimpleVCS):
41 class StubVCSController(simplevcs.SimpleVCS):
42
42
43 SCM = 'hg'
43 SCM = 'hg'
44 stub_response_body = tuple()
44 stub_response_body = tuple()
45
45
46 def __init__(self, *args, **kwargs):
46 def __init__(self, *args, **kwargs):
47 super(StubVCSController, self).__init__(*args, **kwargs)
47 super(StubVCSController, self).__init__(*args, **kwargs)
48 self._action = 'pull'
48 self._action = 'pull'
49 self._is_shadow_repo_dir = True
49 self._is_shadow_repo_dir = True
50 self._name = HG_REPO
50 self._name = HG_REPO
51 self.set_repo_names(None)
51 self.set_repo_names(None)
52
52
53 @property
53 @property
54 def is_shadow_repo_dir(self):
54 def is_shadow_repo_dir(self):
55 return self._is_shadow_repo_dir
55 return self._is_shadow_repo_dir
56
56
57 def _get_repository_name(self, environ):
57 def _get_repository_name(self, environ):
58 return self._name
58 return self._name
59
59
60 def _get_action(self, environ):
60 def _get_action(self, environ):
61 return self._action
61 return self._action
62
62
63 def _create_wsgi_app(self, repo_path, repo_name, config):
63 def _create_wsgi_app(self, repo_path, repo_name, config):
64 def fake_app(environ, start_response):
64 def fake_app(environ, start_response):
65 headers = [
65 headers = [
66 ('Http-Accept', 'application/mercurial')
66 ('Http-Accept', 'application/mercurial')
67 ]
67 ]
68 start_response('200 OK', headers)
68 start_response('200 OK', headers)
69 return self.stub_response_body
69 return self.stub_response_body
70 return fake_app
70 return fake_app
71
71
72 def _create_config(self, extras, repo_name):
72 def _create_config(self, extras, repo_name):
73 return None
73 return None
74
74
75
75
76 @pytest.fixture
76 @pytest.fixture
77 def vcscontroller(baseapp, config_stub, request_stub):
77 def vcscontroller(baseapp, config_stub, request_stub):
78 config_stub.testing_securitypolicy()
78 config_stub.testing_securitypolicy()
79 config_stub.include('rhodecode.authentication')
79 config_stub.include('rhodecode.authentication')
80
80
81 controller = StubVCSController(
81 controller = StubVCSController(
82 baseapp.config.get_settings(), request_stub.registry)
82 baseapp.config.get_settings(), request_stub.registry)
83 app = HttpsFixup(controller, baseapp.config.get_settings())
83 app = HttpsFixup(controller, baseapp.config.get_settings())
84 app = CustomTestApp(app)
84 app = CustomTestApp(app)
85
85
86 _remove_default_user_from_query_cache()
86 _remove_default_user_from_query_cache()
87
87
88 # Sanity checks that things are set up correctly
88 # Sanity checks that things are set up correctly
89 app.get('/' + HG_REPO, status=200)
89 app.get('/' + HG_REPO, status=200)
90
90
91 app.controller = controller
91 app.controller = controller
92 return app
92 return app
93
93
94
94
95 def _remove_default_user_from_query_cache():
95 def _remove_default_user_from_query_cache():
96 user = User.get_default_user(cache=True)
96 user = User.get_default_user(cache=True)
97 query = Session().query(User).filter(User.username == user.username)
97 query = Session().query(User).filter(User.username == user.username)
98 query = query.options(
98 query = query.options(
99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
99 FromCache("sql_cache_short", "get_user_%s" % _hash_key(user.username)))
100 query.invalidate()
100 query.invalidate()
101 Session().expire(user)
101 Session().expire(user)
102
102
103
103
104 def test_handles_exceptions_during_permissions_checks(
104 def test_handles_exceptions_during_permissions_checks(
105 vcscontroller, disable_anonymous_user):
105 vcscontroller, disable_anonymous_user):
106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
106 user_and_pass = '%s:%s' % (TEST_USER_ADMIN_LOGIN, TEST_USER_ADMIN_PASS)
107 auth_password = base64.encodestring(user_and_pass).strip()
107 auth_password = base64.encodestring(user_and_pass).strip()
108 extra_environ = {
108 extra_environ = {
109 'AUTH_TYPE': 'Basic',
109 'AUTH_TYPE': 'Basic',
110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
110 'HTTP_AUTHORIZATION': 'Basic %s' % auth_password,
111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
111 'REMOTE_USER': TEST_USER_ADMIN_LOGIN,
112 }
112 }
113
113
114 # Verify that things are hooked up correctly
114 # Verify that things are hooked up correctly
115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
115 vcscontroller.get('/', status=200, extra_environ=extra_environ)
116
116
117 # Simulate trouble during permission checks
117 # Simulate trouble during permission checks
118 with mock.patch('rhodecode.model.db.User.get_by_username',
118 with mock.patch('rhodecode.model.db.User.get_by_username',
119 side_effect=Exception) as get_user:
119 side_effect=Exception) as get_user:
120 # Verify that a correct 500 is returned and check that the expected
120 # Verify that a correct 500 is returned and check that the expected
121 # code path was hit.
121 # code path was hit.
122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
122 vcscontroller.get('/', status=500, extra_environ=extra_environ)
123 assert get_user.called
123 assert get_user.called
124
124
125
125
126 def test_returns_forbidden_if_no_anonymous_access(
126 def test_returns_forbidden_if_no_anonymous_access(
127 vcscontroller, disable_anonymous_user):
127 vcscontroller, disable_anonymous_user):
128 vcscontroller.get('/', status=401)
128 vcscontroller.get('/', status=401)
129
129
130
130
131 class StubFailVCSController(simplevcs.SimpleVCS):
131 class StubFailVCSController(simplevcs.SimpleVCS):
132 def _handle_request(self, environ, start_response):
132 def _handle_request(self, environ, start_response):
133 raise Exception("BOOM")
133 raise Exception("BOOM")
134
134
135
135
136 @pytest.fixture(scope='module')
136 @pytest.fixture(scope='module')
137 def fail_controller(baseapp):
137 def fail_controller(baseapp):
138 controller = StubFailVCSController(
138 controller = StubFailVCSController(
139 baseapp.config.get_settings(), baseapp.config)
139 baseapp.config.get_settings(), baseapp.config)
140 controller = HttpsFixup(controller, baseapp.config.get_settings())
140 controller = HttpsFixup(controller, baseapp.config.get_settings())
141 controller = CustomTestApp(controller)
141 controller = CustomTestApp(controller)
142 return controller
142 return controller
143
143
144
144
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
145 def test_handles_exceptions_as_internal_server_error(fail_controller):
146 fail_controller.get('/', status=500)
146 fail_controller.get('/', status=500)
147
147
148
148
149 def test_provides_traceback_for_appenlight(fail_controller):
149 def test_provides_traceback_for_appenlight(fail_controller):
150 response = fail_controller.get(
150 response = fail_controller.get(
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
151 '/', status=500, extra_environ={'appenlight.client': 'fake'})
152 assert 'appenlight.__traceback' in response.request.environ
152 assert 'appenlight.__traceback' in response.request.environ
153
153
154
154
155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
155 def test_provides_utils_scm_app_as_scm_app_by_default(baseapp, request_stub):
156 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
156 controller = StubVCSController(baseapp.config.get_settings(), request_stub.registry)
157 assert controller.scm_app is scm_app_http
157 assert controller.scm_app is scm_app_http
158
158
159
159
160 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
160 def test_allows_to_override_scm_app_via_config(baseapp, request_stub):
161 config = baseapp.config.get_settings().copy()
161 config = baseapp.config.get_settings().copy()
162 config['vcs.scm_app_implementation'] = (
162 config['vcs.scm_app_implementation'] = (
163 'rhodecode.tests.lib.middleware.mock_scm_app')
163 'rhodecode.tests.lib.middleware.mock_scm_app')
164 controller = StubVCSController(config, request_stub.registry)
164 controller = StubVCSController(config, request_stub.registry)
165 assert controller.scm_app is mock_scm_app
165 assert controller.scm_app is mock_scm_app
166
166
167
167
168 @pytest.mark.parametrize('query_string, expected', [
168 @pytest.mark.parametrize('query_string, expected', [
169 ('cmd=stub_command', True),
169 ('cmd=stub_command', True),
170 ('cmd=listkeys', False),
170 ('cmd=listkeys', False),
171 ])
171 ])
172 def test_should_check_locking(query_string, expected):
172 def test_should_check_locking(query_string, expected):
173 result = simplevcs._should_check_locking(query_string)
173 result = simplevcs._should_check_locking(query_string)
174 assert result == expected
174 assert result == expected
175
175
176
176
177 class TestShadowRepoRegularExpression(object):
177 class TestShadowRepoRegularExpression(object):
178 pr_segment = 'pull-request'
178 pr_segment = 'pull-request'
179 shadow_segment = 'repository'
179 shadow_segment = 'repository'
180
180
181 @pytest.mark.parametrize('url, expected', [
181 @pytest.mark.parametrize('url, expected', [
182 # repo with/without groups
182 # repo with/without groups
183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
183 ('My-Repo/{pr_segment}/1/{shadow_segment}', True),
184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
184 ('Group/My-Repo/{pr_segment}/2/{shadow_segment}', True),
185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
185 ('Group/Sub-Group/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
186 ('Group/Sub-Group1/Sub-Group2/My-Repo/{pr_segment}/3/{shadow_segment}', True),
187
187
188 # pull request ID
188 # pull request ID
189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
189 ('MyRepo/{pr_segment}/1/{shadow_segment}', True),
190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
190 ('MyRepo/{pr_segment}/1234567890/{shadow_segment}', True),
191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
191 ('MyRepo/{pr_segment}/-1/{shadow_segment}', False),
192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
192 ('MyRepo/{pr_segment}/invalid/{shadow_segment}', False),
193
193
194 # unicode
194 # unicode
195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
195 (u'Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
196 (u'Sp€çîál-Gröüp/Sp€çîál-Repö/{pr_segment}/1/{shadow_segment}', True),
197
197
198 # trailing/leading slash
198 # trailing/leading slash
199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
199 ('/My-Repo/{pr_segment}/1/{shadow_segment}', False),
200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
200 ('My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
201 ('/My-Repo/{pr_segment}/1/{shadow_segment}/', False),
202
202
203 # misc
203 # misc
204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
204 ('My-Repo/{pr_segment}/1/{shadow_segment}/extra', False),
205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
205 ('My-Repo/{pr_segment}/1/{shadow_segment}extra', False),
206 ])
206 ])
207 def test_shadow_repo_regular_expression(self, url, expected):
207 def test_shadow_repo_regular_expression(self, url, expected):
208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
208 from rhodecode.lib.middleware.simplevcs import SimpleVCS
209 url = url.format(
209 url = url.format(
210 pr_segment=self.pr_segment,
210 pr_segment=self.pr_segment,
211 shadow_segment=self.shadow_segment)
211 shadow_segment=self.shadow_segment)
212 match_obj = SimpleVCS.shadow_repo_re.match(url)
212 match_obj = SimpleVCS.shadow_repo_re.match(url)
213 assert (match_obj is not None) == expected
213 assert (match_obj is not None) == expected
214
214
215
215
216 @pytest.mark.backends('git', 'hg')
216 @pytest.mark.backends('git', 'hg')
217 class TestShadowRepoExposure(object):
217 class TestShadowRepoExposure(object):
218
218
219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
219 def test_pull_on_shadow_repo_propagates_to_wsgi_app(
220 self, baseapp, request_stub):
220 self, baseapp, request_stub):
221 """
221 """
222 Check that a pull action to a shadow repo is propagated to the
222 Check that a pull action to a shadow repo is propagated to the
223 underlying wsgi app.
223 underlying wsgi app.
224 """
224 """
225 controller = StubVCSController(
225 controller = StubVCSController(
226 baseapp.config.get_settings(), request_stub.registry)
226 baseapp.config.get_settings(), request_stub.registry)
227 controller._check_ssl = mock.Mock()
227 controller._check_ssl = mock.Mock()
228 controller.is_shadow_repo = True
228 controller.is_shadow_repo = True
229 controller._action = 'pull'
229 controller._action = 'pull'
230 controller._is_shadow_repo_dir = True
230 controller._is_shadow_repo_dir = True
231 controller.stub_response_body = 'dummy body value'
231 controller.stub_response_body = 'dummy body value'
232 controller._get_default_cache_ttl = mock.Mock(
232 controller._get_default_cache_ttl = mock.Mock(
233 return_value=(False, 0))
233 return_value=(False, 0))
234
234
235 environ_stub = {
235 environ_stub = {
236 'HTTP_HOST': 'test.example.com',
236 'HTTP_HOST': 'test.example.com',
237 'HTTP_ACCEPT': 'application/mercurial',
237 'HTTP_ACCEPT': 'application/mercurial',
238 'REQUEST_METHOD': 'GET',
238 'REQUEST_METHOD': 'GET',
239 'wsgi.url_scheme': 'http',
239 'wsgi.url_scheme': 'http',
240 }
240 }
241
241
242 response = controller(environ_stub, mock.Mock())
242 response = controller(environ_stub, mock.Mock())
243 response_body = ''.join(response)
243 response_body = ''.join(response)
244
244
245 # Assert that we got the response from the wsgi app.
245 # Assert that we got the response from the wsgi app.
246 assert response_body == controller.stub_response_body
246 assert response_body == controller.stub_response_body
247
247
248 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
248 def test_pull_on_shadow_repo_that_is_missing(self, baseapp, request_stub):
249 """
249 """
250 Check that a pull action to a shadow repo is propagated to the
250 Check that a pull action to a shadow repo is propagated to the
251 underlying wsgi app.
251 underlying wsgi app.
252 """
252 """
253 controller = StubVCSController(
253 controller = StubVCSController(
254 baseapp.config.get_settings(), request_stub.registry)
254 baseapp.config.get_settings(), request_stub.registry)
255 controller._check_ssl = mock.Mock()
255 controller._check_ssl = mock.Mock()
256 controller.is_shadow_repo = True
256 controller.is_shadow_repo = True
257 controller._action = 'pull'
257 controller._action = 'pull'
258 controller._is_shadow_repo_dir = False
258 controller._is_shadow_repo_dir = False
259 controller.stub_response_body = 'dummy body value'
259 controller.stub_response_body = 'dummy body value'
260 environ_stub = {
260 environ_stub = {
261 'HTTP_HOST': 'test.example.com',
261 'HTTP_HOST': 'test.example.com',
262 'HTTP_ACCEPT': 'application/mercurial',
262 'HTTP_ACCEPT': 'application/mercurial',
263 'REQUEST_METHOD': 'GET',
263 'REQUEST_METHOD': 'GET',
264 'wsgi.url_scheme': 'http',
264 'wsgi.url_scheme': 'http',
265 }
265 }
266
266
267 response = controller(environ_stub, mock.Mock())
267 response = controller(environ_stub, mock.Mock())
268 response_body = ''.join(response)
268 response_body = ''.join(response)
269
269
270 # Assert that we got the response from the wsgi app.
270 # Assert that we got the response from the wsgi app.
271 assert '404 Not Found' in response_body
271 assert '404 Not Found' in response_body
272
272
273 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
273 def test_push_on_shadow_repo_raises(self, baseapp, request_stub):
274 """
274 """
275 Check that a push action to a shadow repo is aborted.
275 Check that a push action to a shadow repo is aborted.
276 """
276 """
277 controller = StubVCSController(
277 controller = StubVCSController(
278 baseapp.config.get_settings(), request_stub.registry)
278 baseapp.config.get_settings(), request_stub.registry)
279 controller._check_ssl = mock.Mock()
279 controller._check_ssl = mock.Mock()
280 controller.is_shadow_repo = True
280 controller.is_shadow_repo = True
281 controller._action = 'push'
281 controller._action = 'push'
282 controller.stub_response_body = 'dummy body value'
282 controller.stub_response_body = 'dummy body value'
283 environ_stub = {
283 environ_stub = {
284 'HTTP_HOST': 'test.example.com',
284 'HTTP_HOST': 'test.example.com',
285 'HTTP_ACCEPT': 'application/mercurial',
285 'HTTP_ACCEPT': 'application/mercurial',
286 'REQUEST_METHOD': 'GET',
286 'REQUEST_METHOD': 'GET',
287 'wsgi.url_scheme': 'http',
287 'wsgi.url_scheme': 'http',
288 }
288 }
289
289
290 response = controller(environ_stub, mock.Mock())
290 response = controller(environ_stub, mock.Mock())
291 response_body = ''.join(response)
291 response_body = ''.join(response)
292
292
293 assert response_body != controller.stub_response_body
293 assert response_body != controller.stub_response_body
294 # Assert that a 406 error is returned.
294 # Assert that a 406 error is returned.
295 assert '406 Not Acceptable' in response_body
295 assert '406 Not Acceptable' in response_body
296
296
297 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
297 def test_set_repo_names_no_shadow(self, baseapp, request_stub):
298 """
298 """
299 Check that the set_repo_names method sets all names to the one returned
299 Check that the set_repo_names method sets all names to the one returned
300 by the _get_repository_name method on a request to a non shadow repo.
300 by the _get_repository_name method on a request to a non shadow repo.
301 """
301 """
302 environ_stub = {}
302 environ_stub = {}
303 controller = StubVCSController(
303 controller = StubVCSController(
304 baseapp.config.get_settings(), request_stub.registry)
304 baseapp.config.get_settings(), request_stub.registry)
305 controller._name = 'RepoGroup/MyRepo'
305 controller._name = 'RepoGroup/MyRepo'
306 controller.set_repo_names(environ_stub)
306 controller.set_repo_names(environ_stub)
307 assert not controller.is_shadow_repo
307 assert not controller.is_shadow_repo
308 assert (controller.url_repo_name ==
308 assert (controller.url_repo_name ==
309 controller.acl_repo_name ==
309 controller.acl_repo_name ==
310 controller.vcs_repo_name ==
310 controller.vcs_repo_name ==
311 controller._get_repository_name(environ_stub))
311 controller._get_repository_name(environ_stub))
312
312
313 def test_set_repo_names_with_shadow(
313 def test_set_repo_names_with_shadow(
314 self, baseapp, pr_util, config_stub, request_stub):
314 self, baseapp, pr_util, config_stub, request_stub):
315 """
315 """
316 Check that the set_repo_names method sets correct names on a request
316 Check that the set_repo_names method sets correct names on a request
317 to a shadow repo.
317 to a shadow repo.
318 """
318 """
319 from rhodecode.model.pull_request import PullRequestModel
319 from rhodecode.model.pull_request import PullRequestModel
320
320
321 pull_request = pr_util.create_pull_request()
321 pull_request = pr_util.create_pull_request()
322 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
322 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
323 target=pull_request.target_repo.repo_name,
323 target=pull_request.target_repo.repo_name,
324 pr_id=pull_request.pull_request_id,
324 pr_id=pull_request.pull_request_id,
325 pr_segment=TestShadowRepoRegularExpression.pr_segment,
325 pr_segment=TestShadowRepoRegularExpression.pr_segment,
326 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
326 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
327 controller = StubVCSController(
327 controller = StubVCSController(
328 baseapp.config.get_settings(), request_stub.registry)
328 baseapp.config.get_settings(), request_stub.registry)
329 controller._name = shadow_url
329 controller._name = shadow_url
330 controller.set_repo_names({})
330 controller.set_repo_names({})
331
331
332 # Get file system path to shadow repo for assertions.
332 # Get file system path to shadow repo for assertions.
333 workspace_id = PullRequestModel()._workspace_id(pull_request)
333 workspace_id = PullRequestModel()._workspace_id(pull_request)
334 target_vcs = pull_request.target_repo.scm_instance()
334 target_vcs = pull_request.target_repo.scm_instance()
335 vcs_repo_name = target_vcs._get_shadow_repository_path(
335 vcs_repo_name = target_vcs._get_shadow_repository_path(
336 workspace_id)
336 workspace_id)
337
337
338 assert controller.vcs_repo_name == vcs_repo_name
338 assert controller.vcs_repo_name == vcs_repo_name
339 assert controller.url_repo_name == shadow_url
339 assert controller.url_repo_name == shadow_url
340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
340 assert controller.acl_repo_name == pull_request.target_repo.repo_name
341 assert controller.is_shadow_repo
341 assert controller.is_shadow_repo
342
342
343 def test_set_repo_names_with_shadow_but_missing_pr(
343 def test_set_repo_names_with_shadow_but_missing_pr(
344 self, baseapp, pr_util, config_stub, request_stub):
344 self, baseapp, pr_util, config_stub, request_stub):
345 """
345 """
346 Checks that the set_repo_names method enforces matching target repos
346 Checks that the set_repo_names method enforces matching target repos
347 and pull request IDs.
347 and pull request IDs.
348 """
348 """
349 pull_request = pr_util.create_pull_request()
349 pull_request = pr_util.create_pull_request()
350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
350 shadow_url = '{target}/{pr_segment}/{pr_id}/{shadow_segment}'.format(
351 target=pull_request.target_repo.repo_name,
351 target=pull_request.target_repo.repo_name,
352 pr_id=999999999,
352 pr_id=999999999,
353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
353 pr_segment=TestShadowRepoRegularExpression.pr_segment,
354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
354 shadow_segment=TestShadowRepoRegularExpression.shadow_segment)
355 controller = StubVCSController(
355 controller = StubVCSController(
356 baseapp.config.get_settings(), request_stub.registry)
356 baseapp.config.get_settings(), request_stub.registry)
357 controller._name = shadow_url
357 controller._name = shadow_url
358 controller.set_repo_names({})
358 controller.set_repo_names({})
359
359
360 assert not controller.is_shadow_repo
360 assert not controller.is_shadow_repo
361 assert (controller.url_repo_name ==
361 assert (controller.url_repo_name ==
362 controller.acl_repo_name ==
362 controller.acl_repo_name ==
363 controller.vcs_repo_name)
363 controller.vcs_repo_name)
364
364
365
365
366 @pytest.mark.usefixtures('baseapp')
366 @pytest.mark.usefixtures('baseapp')
367 class TestGenerateVcsResponse(object):
367 class TestGenerateVcsResponse(object):
368
368
369 def test_ensures_that_start_response_is_called_early_enough(self):
369 def test_ensures_that_start_response_is_called_early_enough(self):
370 self.call_controller_with_response_body(iter(['a', 'b']))
370 self.call_controller_with_response_body(iter(['a', 'b']))
371 assert self.start_response.called
371 assert self.start_response.called
372
372
373 def test_invalidates_cache_after_body_is_consumed(self):
373 def test_invalidates_cache_after_body_is_consumed(self):
374 result = self.call_controller_with_response_body(iter(['a', 'b']))
374 result = self.call_controller_with_response_body(iter(['a', 'b']))
375 assert not self.was_cache_invalidated()
375 assert not self.was_cache_invalidated()
376 # Consume the result
376 # Consume the result
377 list(result)
377 list(result)
378 assert self.was_cache_invalidated()
378 assert self.was_cache_invalidated()
379
379
380 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
381 def test_handles_locking_exception(self, http_locked_rc):
382 result = self.call_controller_with_response_body(
383 self.raise_result_iter(vcs_kind='repo_locked'))
384 assert not http_locked_rc.called
385 # Consume the result
386 list(result)
387 assert http_locked_rc.called
388
389 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPRequirementError')
390 def test_handles_requirement_exception(self, http_requirement):
391 result = self.call_controller_with_response_body(
392 self.raise_result_iter(vcs_kind='requirement'))
393 assert not http_requirement.called
394 # Consume the result
395 list(result)
396 assert http_requirement.called
397
398 @mock.patch('rhodecode.lib.middleware.simplevcs.HTTPLockedRC')
399 def test_handles_locking_exception_in_app_call(self, http_locked_rc):
400 app_factory_patcher = mock.patch.object(
401 StubVCSController, '_create_wsgi_app')
402 with app_factory_patcher as app_factory:
403 app_factory().side_effect = self.vcs_exception()
404 result = self.call_controller_with_response_body(['a'])
405 list(result)
406 assert http_locked_rc.called
407
408 def test_raises_unknown_exceptions(self):
380 def test_raises_unknown_exceptions(self):
409 result = self.call_controller_with_response_body(
381 result = self.call_controller_with_response_body(
410 self.raise_result_iter(vcs_kind='unknown'))
382 self.raise_result_iter(vcs_kind='unknown'))
411 with pytest.raises(Exception):
383 with pytest.raises(Exception):
412 list(result)
384 list(result)
413
385
414 def test_prepare_callback_daemon_is_called(self):
386 def test_prepare_callback_daemon_is_called(self):
415 def side_effect(extras):
387 def side_effect(extras, environ, action, txn_id=None):
416 return DummyHooksCallbackDaemon(), extras
388 return DummyHooksCallbackDaemon(), extras
417
389
418 prepare_patcher = mock.patch.object(
390 prepare_patcher = mock.patch.object(
419 StubVCSController, '_prepare_callback_daemon')
391 StubVCSController, '_prepare_callback_daemon')
420 with prepare_patcher as prepare_mock:
392 with prepare_patcher as prepare_mock:
421 prepare_mock.side_effect = side_effect
393 prepare_mock.side_effect = side_effect
422 self.call_controller_with_response_body(iter(['a', 'b']))
394 self.call_controller_with_response_body(iter(['a', 'b']))
423 assert prepare_mock.called
395 assert prepare_mock.called
424 assert prepare_mock.call_count == 1
396 assert prepare_mock.call_count == 1
425
397
426 def call_controller_with_response_body(self, response_body):
398 def call_controller_with_response_body(self, response_body):
427 settings = {
399 settings = {
428 'base_path': 'fake_base_path',
400 'base_path': 'fake_base_path',
429 'vcs.hooks.protocol': 'http',
401 'vcs.hooks.protocol': 'http',
430 'vcs.hooks.direct_calls': False,
402 'vcs.hooks.direct_calls': False,
431 }
403 }
432 registry = AttributeDict()
404 registry = AttributeDict()
433 controller = StubVCSController(settings, registry)
405 controller = StubVCSController(settings, registry)
434 controller._invalidate_cache = mock.Mock()
406 controller._invalidate_cache = mock.Mock()
435 controller.stub_response_body = response_body
407 controller.stub_response_body = response_body
436 self.start_response = mock.Mock()
408 self.start_response = mock.Mock()
437 result = controller._generate_vcs_response(
409 result = controller._generate_vcs_response(
438 environ={}, start_response=self.start_response,
410 environ={}, start_response=self.start_response,
439 repo_path='fake_repo_path',
411 repo_path='fake_repo_path',
440 extras={}, action='push')
412 extras={}, action='push')
441 self.controller = controller
413 self.controller = controller
442 return result
414 return result
443
415
444 def raise_result_iter(self, vcs_kind='repo_locked'):
416 def raise_result_iter(self, vcs_kind='repo_locked'):
445 """
417 """
446 Simulates an exception due to a vcs raised exception if kind vcs_kind
418 Simulates an exception due to a vcs raised exception if kind vcs_kind
447 """
419 """
448 raise self.vcs_exception(vcs_kind=vcs_kind)
420 raise self.vcs_exception(vcs_kind=vcs_kind)
449 yield "never_reached"
421 yield "never_reached"
450
422
451 def vcs_exception(self, vcs_kind='repo_locked'):
423 def vcs_exception(self, vcs_kind='repo_locked'):
452 locked_exception = Exception('TEST_MESSAGE')
424 locked_exception = Exception('TEST_MESSAGE')
453 locked_exception._vcs_kind = vcs_kind
425 locked_exception._vcs_kind = vcs_kind
454 return locked_exception
426 return locked_exception
455
427
456 def was_cache_invalidated(self):
428 def was_cache_invalidated(self):
457 return self.controller._invalidate_cache.called
429 return self.controller._invalidate_cache.called
458
430
459
431
460 class TestInitializeGenerator(object):
432 class TestInitializeGenerator(object):
461
433
462 def test_drains_first_element(self):
434 def test_drains_first_element(self):
463 gen = self.factory(['__init__', 1, 2])
435 gen = self.factory(['__init__', 1, 2])
464 result = list(gen)
436 result = list(gen)
465 assert result == [1, 2]
437 assert result == [1, 2]
466
438
467 @pytest.mark.parametrize('values', [
439 @pytest.mark.parametrize('values', [
468 [],
440 [],
469 [1, 2],
441 [1, 2],
470 ])
442 ])
471 def test_raises_value_error(self, values):
443 def test_raises_value_error(self, values):
472 with pytest.raises(ValueError):
444 with pytest.raises(ValueError):
473 self.factory(values)
445 self.factory(values)
474
446
475 @simplevcs.initialize_generator
447 @simplevcs.initialize_generator
476 def factory(self, iterable):
448 def factory(self, iterable):
477 for elem in iterable:
449 for elem in iterable:
478 yield elem
450 yield elem
479
451
480
452
481 class TestPrepareHooksDaemon(object):
453 class TestPrepareHooksDaemon(object):
482 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
454 def test_calls_imported_prepare_callback_daemon(self, app_settings, request_stub):
483 expected_extras = {'extra1': 'value1'}
455 expected_extras = {'extra1': 'value1'}
484 daemon = DummyHooksCallbackDaemon()
456 daemon = DummyHooksCallbackDaemon()
485
457
486 controller = StubVCSController(app_settings, request_stub.registry)
458 controller = StubVCSController(app_settings, request_stub.registry)
487 prepare_patcher = mock.patch.object(
459 prepare_patcher = mock.patch.object(
488 simplevcs, 'prepare_callback_daemon',
460 simplevcs, 'prepare_callback_daemon',
489 return_value=(daemon, expected_extras))
461 return_value=(daemon, expected_extras))
490 with prepare_patcher as prepare_mock:
462 with prepare_patcher as prepare_mock:
491 callback_daemon, extras = controller._prepare_callback_daemon(
463 callback_daemon, extras = controller._prepare_callback_daemon(
492 expected_extras.copy())
464 expected_extras.copy(), {}, 'push')
493 prepare_mock.assert_called_once_with(
465 prepare_mock.assert_called_once_with(
494 expected_extras,
466 expected_extras,
495 protocol=app_settings['vcs.hooks.protocol'],
467 protocol=app_settings['vcs.hooks.protocol'],
468 txn_id=None,
496 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
469 use_direct_calls=app_settings['vcs.hooks.direct_calls'])
497
470
498 assert callback_daemon == daemon
471 assert callback_daemon == daemon
499 assert extras == extras
472 assert extras == extras
@@ -1,321 +1,329 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import logging
22 import logging
23 from StringIO import StringIO
23 from StringIO import StringIO
24
24
25 import mock
25 import mock
26 import pytest
26 import pytest
27
27
28 from rhodecode.lib import hooks_daemon
28 from rhodecode.lib import hooks_daemon
29 from rhodecode.tests.utils import assert_message_in_log
29 from rhodecode.tests.utils import assert_message_in_log
30
30
31
31
32 class TestDummyHooksCallbackDaemon(object):
32 class TestDummyHooksCallbackDaemon(object):
33 def test_hooks_module_path_set_properly(self):
33 def test_hooks_module_path_set_properly(self):
34 daemon = hooks_daemon.DummyHooksCallbackDaemon()
34 daemon = hooks_daemon.DummyHooksCallbackDaemon()
35 assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon'
35 assert daemon.hooks_module == 'rhodecode.lib.hooks_daemon'
36
36
37 def test_logs_entering_the_hook(self):
37 def test_logs_entering_the_hook(self):
38 daemon = hooks_daemon.DummyHooksCallbackDaemon()
38 daemon = hooks_daemon.DummyHooksCallbackDaemon()
39 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
39 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
40 with daemon as return_value:
40 with daemon as return_value:
41 log_mock.assert_called_once_with(
41 log_mock.assert_called_once_with(
42 'Running dummy hooks callback daemon')
42 'Running dummy hooks callback daemon')
43 assert return_value == daemon
43 assert return_value == daemon
44
44
45 def test_logs_exiting_the_hook(self):
45 def test_logs_exiting_the_hook(self):
46 daemon = hooks_daemon.DummyHooksCallbackDaemon()
46 daemon = hooks_daemon.DummyHooksCallbackDaemon()
47 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
47 with mock.patch.object(hooks_daemon.log, 'debug') as log_mock:
48 with daemon:
48 with daemon:
49 pass
49 pass
50 log_mock.assert_called_with('Exiting dummy hooks callback daemon')
50 log_mock.assert_called_with('Exiting dummy hooks callback daemon')
51
51
52
52
53 class TestHooks(object):
53 class TestHooks(object):
54 def test_hooks_can_be_used_as_a_context_processor(self):
54 def test_hooks_can_be_used_as_a_context_processor(self):
55 hooks = hooks_daemon.Hooks()
55 hooks = hooks_daemon.Hooks()
56 with hooks as return_value:
56 with hooks as return_value:
57 pass
57 pass
58 assert hooks == return_value
58 assert hooks == return_value
59
59
60
60
61 class TestHooksHttpHandler(object):
61 class TestHooksHttpHandler(object):
62 def test_read_request_parses_method_name_and_arguments(self):
62 def test_read_request_parses_method_name_and_arguments(self):
63 data = {
63 data = {
64 'method': 'test',
64 'method': 'test',
65 'extras': {
65 'extras': {
66 'param1': 1,
66 'param1': 1,
67 'param2': 'a'
67 'param2': 'a'
68 }
68 }
69 }
69 }
70 request = self._generate_post_request(data)
70 request = self._generate_post_request(data)
71 hooks_patcher = mock.patch.object(
71 hooks_patcher = mock.patch.object(
72 hooks_daemon.Hooks, data['method'], create=True, return_value=1)
72 hooks_daemon.Hooks, data['method'], create=True, return_value=1)
73
73
74 with hooks_patcher as hooks_mock:
74 with hooks_patcher as hooks_mock:
75 MockServer(hooks_daemon.HooksHttpHandler, request)
75 MockServer(hooks_daemon.HooksHttpHandler, request)
76
76
77 hooks_mock.assert_called_once_with(data['extras'])
77 hooks_mock.assert_called_once_with(data['extras'])
78
78
79 def test_hooks_serialized_result_is_returned(self):
79 def test_hooks_serialized_result_is_returned(self):
80 request = self._generate_post_request({})
80 request = self._generate_post_request({})
81 rpc_method = 'test'
81 rpc_method = 'test'
82 hook_result = {
82 hook_result = {
83 'first': 'one',
83 'first': 'one',
84 'second': 2
84 'second': 2
85 }
85 }
86 read_patcher = mock.patch.object(
86 read_patcher = mock.patch.object(
87 hooks_daemon.HooksHttpHandler, '_read_request',
87 hooks_daemon.HooksHttpHandler, '_read_request',
88 return_value=(rpc_method, {}))
88 return_value=(rpc_method, {}))
89 hooks_patcher = mock.patch.object(
89 hooks_patcher = mock.patch.object(
90 hooks_daemon.Hooks, rpc_method, create=True,
90 hooks_daemon.Hooks, rpc_method, create=True,
91 return_value=hook_result)
91 return_value=hook_result)
92
92
93 with read_patcher, hooks_patcher:
93 with read_patcher, hooks_patcher:
94 server = MockServer(hooks_daemon.HooksHttpHandler, request)
94 server = MockServer(hooks_daemon.HooksHttpHandler, request)
95
95
96 expected_result = json.dumps(hook_result)
96 expected_result = json.dumps(hook_result)
97 assert server.request.output_stream.buflist[-1] == expected_result
97 assert server.request.output_stream.buflist[-1] == expected_result
98
98
99 def test_exception_is_returned_in_response(self):
99 def test_exception_is_returned_in_response(self):
100 request = self._generate_post_request({})
100 request = self._generate_post_request({})
101 rpc_method = 'test'
101 rpc_method = 'test'
102 read_patcher = mock.patch.object(
102 read_patcher = mock.patch.object(
103 hooks_daemon.HooksHttpHandler, '_read_request',
103 hooks_daemon.HooksHttpHandler, '_read_request',
104 return_value=(rpc_method, {}))
104 return_value=(rpc_method, {}))
105 hooks_patcher = mock.patch.object(
105 hooks_patcher = mock.patch.object(
106 hooks_daemon.Hooks, rpc_method, create=True,
106 hooks_daemon.Hooks, rpc_method, create=True,
107 side_effect=Exception('Test exception'))
107 side_effect=Exception('Test exception'))
108
108
109 with read_patcher, hooks_patcher:
109 with read_patcher, hooks_patcher:
110 server = MockServer(hooks_daemon.HooksHttpHandler, request)
110 server = MockServer(hooks_daemon.HooksHttpHandler, request)
111
111
112 org_exc = json.loads(server.request.output_stream.buflist[-1])
112 org_exc = json.loads(server.request.output_stream.buflist[-1])
113 expected_result = {
113 expected_result = {
114 'exception': 'Exception',
114 'exception': 'Exception',
115 'exception_traceback': org_exc['exception_traceback'],
115 'exception_traceback': org_exc['exception_traceback'],
116 'exception_args': ['Test exception']
116 'exception_args': ['Test exception']
117 }
117 }
118 assert org_exc == expected_result
118 assert org_exc == expected_result
119
119
120 def test_log_message_writes_to_debug_log(self, caplog):
120 def test_log_message_writes_to_debug_log(self, caplog):
121 ip_port = ('0.0.0.0', 8888)
121 ip_port = ('0.0.0.0', 8888)
122 handler = hooks_daemon.HooksHttpHandler(
122 handler = hooks_daemon.HooksHttpHandler(
123 MockRequest('POST /'), ip_port, mock.Mock())
123 MockRequest('POST /'), ip_port, mock.Mock())
124 fake_date = '1/Nov/2015 00:00:00'
124 fake_date = '1/Nov/2015 00:00:00'
125 date_patcher = mock.patch.object(
125 date_patcher = mock.patch.object(
126 handler, 'log_date_time_string', return_value=fake_date)
126 handler, 'log_date_time_string', return_value=fake_date)
127 with date_patcher, caplog.at_level(logging.DEBUG):
127 with date_patcher, caplog.at_level(logging.DEBUG):
128 handler.log_message('Some message %d, %s', 123, 'string')
128 handler.log_message('Some message %d, %s', 123, 'string')
129
129
130 expected_message = '{} - - [{}] Some message 123, string'.format(
130 expected_message = '{} - - [{}] Some message 123, string'.format(
131 ip_port[0], fake_date)
131 ip_port[0], fake_date)
132 assert_message_in_log(
132 assert_message_in_log(
133 caplog.records, expected_message,
133 caplog.records, expected_message,
134 levelno=logging.DEBUG, module='hooks_daemon')
134 levelno=logging.DEBUG, module='hooks_daemon')
135
135
136 def _generate_post_request(self, data):
136 def _generate_post_request(self, data):
137 payload = json.dumps(data)
137 payload = json.dumps(data)
138 return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format(
138 return 'POST / HTTP/1.0\nContent-Length: {}\n\n{}'.format(
139 len(payload), payload)
139 len(payload), payload)
140
140
141
141
142 class ThreadedHookCallbackDaemon(object):
142 class ThreadedHookCallbackDaemon(object):
143 def test_constructor_calls_prepare(self):
143 def test_constructor_calls_prepare(self):
144 prepare_daemon_patcher = mock.patch.object(
144 prepare_daemon_patcher = mock.patch.object(
145 hooks_daemon.ThreadedHookCallbackDaemon, '_prepare')
145 hooks_daemon.ThreadedHookCallbackDaemon, '_prepare')
146 with prepare_daemon_patcher as prepare_daemon_mock:
146 with prepare_daemon_patcher as prepare_daemon_mock:
147 hooks_daemon.ThreadedHookCallbackDaemon()
147 hooks_daemon.ThreadedHookCallbackDaemon()
148 prepare_daemon_mock.assert_called_once_with()
148 prepare_daemon_mock.assert_called_once_with()
149
149
150 def test_run_is_called_on_context_start(self):
150 def test_run_is_called_on_context_start(self):
151 patchers = mock.patch.multiple(
151 patchers = mock.patch.multiple(
152 hooks_daemon.ThreadedHookCallbackDaemon,
152 hooks_daemon.ThreadedHookCallbackDaemon,
153 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
153 _run=mock.DEFAULT, _prepare=mock.DEFAULT, __exit__=mock.DEFAULT)
154
154
155 with patchers as mocks:
155 with patchers as mocks:
156 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
156 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
157 with daemon as daemon_context:
157 with daemon as daemon_context:
158 pass
158 pass
159 mocks['_run'].assert_called_once_with()
159 mocks['_run'].assert_called_once_with()
160 assert daemon_context == daemon
160 assert daemon_context == daemon
161
161
162 def test_stop_is_called_on_context_exit(self):
162 def test_stop_is_called_on_context_exit(self):
163 patchers = mock.patch.multiple(
163 patchers = mock.patch.multiple(
164 hooks_daemon.ThreadedHookCallbackDaemon,
164 hooks_daemon.ThreadedHookCallbackDaemon,
165 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
165 _run=mock.DEFAULT, _prepare=mock.DEFAULT, _stop=mock.DEFAULT)
166
166
167 with patchers as mocks:
167 with patchers as mocks:
168 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
168 daemon = hooks_daemon.ThreadedHookCallbackDaemon()
169 with daemon as daemon_context:
169 with daemon as daemon_context:
170 assert mocks['_stop'].call_count == 0
170 assert mocks['_stop'].call_count == 0
171
171
172 mocks['_stop'].assert_called_once_with()
172 mocks['_stop'].assert_called_once_with()
173 assert daemon_context == daemon
173 assert daemon_context == daemon
174
174
175
175
176 class TestHttpHooksCallbackDaemon(object):
176 class TestHttpHooksCallbackDaemon(object):
177 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
177 def test_prepare_inits_daemon_variable(self, tcp_server, caplog):
178 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
178 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
179 daemon = hooks_daemon.HttpHooksCallbackDaemon()
179 daemon = hooks_daemon.HttpHooksCallbackDaemon()
180 assert daemon._daemon == tcp_server
180 assert daemon._daemon == tcp_server
181
181
182 _, port = tcp_server.server_address
183 expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port)
184 msg = 'Preparing HTTP callback daemon at `{}` and ' \
185 'registering hook object'.format(expected_uri)
182 assert_message_in_log(
186 assert_message_in_log(
183 caplog.records,
187 caplog.records, msg, levelno=logging.DEBUG, module='hooks_daemon')
184 'Preparing HTTP callback daemon and registering hook object',
185 levelno=logging.DEBUG, module='hooks_daemon')
186
188
187 def test_prepare_inits_hooks_uri_and_logs_it(
189 def test_prepare_inits_hooks_uri_and_logs_it(
188 self, tcp_server, caplog):
190 self, tcp_server, caplog):
189 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
191 with self._tcp_patcher(tcp_server), caplog.at_level(logging.DEBUG):
190 daemon = hooks_daemon.HttpHooksCallbackDaemon()
192 daemon = hooks_daemon.HttpHooksCallbackDaemon()
191
193
192 _, port = tcp_server.server_address
194 _, port = tcp_server.server_address
193 expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port)
195 expected_uri = '{}:{}'.format(daemon.IP_ADDRESS, port)
194 assert daemon.hooks_uri == expected_uri
196 assert daemon.hooks_uri == expected_uri
195
197
198 msg = 'Preparing HTTP callback daemon at `{}` and ' \
199 'registering hook object'.format(expected_uri)
196 assert_message_in_log(
200 assert_message_in_log(
197 caplog.records, 'Hooks uri is: {}'.format(expected_uri),
201 caplog.records, msg,
198 levelno=logging.DEBUG, module='hooks_daemon')
202 levelno=logging.DEBUG, module='hooks_daemon')
199
203
200 def test_run_creates_a_thread(self, tcp_server):
204 def test_run_creates_a_thread(self, tcp_server):
201 thread = mock.Mock()
205 thread = mock.Mock()
202
206
203 with self._tcp_patcher(tcp_server):
207 with self._tcp_patcher(tcp_server):
204 daemon = hooks_daemon.HttpHooksCallbackDaemon()
208 daemon = hooks_daemon.HttpHooksCallbackDaemon()
205
209
206 with self._thread_patcher(thread) as thread_mock:
210 with self._thread_patcher(thread) as thread_mock:
207 daemon._run()
211 daemon._run()
208
212
209 thread_mock.assert_called_once_with(
213 thread_mock.assert_called_once_with(
210 target=tcp_server.serve_forever,
214 target=tcp_server.serve_forever,
211 kwargs={'poll_interval': daemon.POLL_INTERVAL})
215 kwargs={'poll_interval': daemon.POLL_INTERVAL})
212 assert thread.daemon is True
216 assert thread.daemon is True
213 thread.start.assert_called_once_with()
217 thread.start.assert_called_once_with()
214
218
215 def test_run_logs(self, tcp_server, caplog):
219 def test_run_logs(self, tcp_server, caplog):
216
220
217 with self._tcp_patcher(tcp_server):
221 with self._tcp_patcher(tcp_server):
218 daemon = hooks_daemon.HttpHooksCallbackDaemon()
222 daemon = hooks_daemon.HttpHooksCallbackDaemon()
219
223
220 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
224 with self._thread_patcher(mock.Mock()), caplog.at_level(logging.DEBUG):
221 daemon._run()
225 daemon._run()
222
226
223 assert_message_in_log(
227 assert_message_in_log(
224 caplog.records,
228 caplog.records,
225 'Running event loop of callback daemon in background thread',
229 'Running event loop of callback daemon in background thread',
226 levelno=logging.DEBUG, module='hooks_daemon')
230 levelno=logging.DEBUG, module='hooks_daemon')
227
231
228 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
232 def test_stop_cleans_up_the_connection(self, tcp_server, caplog):
229 thread = mock.Mock()
233 thread = mock.Mock()
230
234
231 with self._tcp_patcher(tcp_server):
235 with self._tcp_patcher(tcp_server):
232 daemon = hooks_daemon.HttpHooksCallbackDaemon()
236 daemon = hooks_daemon.HttpHooksCallbackDaemon()
233
237
234 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
238 with self._thread_patcher(thread), caplog.at_level(logging.DEBUG):
235 with daemon:
239 with daemon:
236 assert daemon._daemon == tcp_server
240 assert daemon._daemon == tcp_server
237 assert daemon._callback_thread == thread
241 assert daemon._callback_thread == thread
238
242
239 assert daemon._daemon is None
243 assert daemon._daemon is None
240 assert daemon._callback_thread is None
244 assert daemon._callback_thread is None
241 tcp_server.shutdown.assert_called_with()
245 tcp_server.shutdown.assert_called_with()
242 thread.join.assert_called_once_with()
246 thread.join.assert_called_once_with()
243
247
244 assert_message_in_log(
248 assert_message_in_log(
245 caplog.records, 'Waiting for background thread to finish.',
249 caplog.records, 'Waiting for background thread to finish.',
246 levelno=logging.DEBUG, module='hooks_daemon')
250 levelno=logging.DEBUG, module='hooks_daemon')
247
251
248 def _tcp_patcher(self, tcp_server):
252 def _tcp_patcher(self, tcp_server):
249 return mock.patch.object(
253 return mock.patch.object(
250 hooks_daemon, 'TCPServer', return_value=tcp_server)
254 hooks_daemon, 'TCPServer', return_value=tcp_server)
251
255
252 def _thread_patcher(self, thread):
256 def _thread_patcher(self, thread):
253 return mock.patch.object(
257 return mock.patch.object(
254 hooks_daemon.threading, 'Thread', return_value=thread)
258 hooks_daemon.threading, 'Thread', return_value=thread)
255
259
256
260
257 class TestPrepareHooksDaemon(object):
261 class TestPrepareHooksDaemon(object):
258 @pytest.mark.parametrize('protocol', ('http',))
262 @pytest.mark.parametrize('protocol', ('http',))
259 def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls(
263 def test_returns_dummy_hooks_callback_daemon_when_using_direct_calls(
260 self, protocol):
264 self, protocol):
261 expected_extras = {'extra1': 'value1'}
265 expected_extras = {'extra1': 'value1'}
262 callback, extras = hooks_daemon.prepare_callback_daemon(
266 callback, extras = hooks_daemon.prepare_callback_daemon(
263 expected_extras.copy(), protocol=protocol, use_direct_calls=True)
267 expected_extras.copy(), protocol=protocol, use_direct_calls=True)
264 assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon)
268 assert isinstance(callback, hooks_daemon.DummyHooksCallbackDaemon)
265 expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon'
269 expected_extras['hooks_module'] = 'rhodecode.lib.hooks_daemon'
266 assert extras == expected_extras
270 expected_extras['time'] = extras['time']
271 assert 'extra1' in extras
267
272
268 @pytest.mark.parametrize('protocol, expected_class', (
273 @pytest.mark.parametrize('protocol, expected_class', (
269 ('http', hooks_daemon.HttpHooksCallbackDaemon),
274 ('http', hooks_daemon.HttpHooksCallbackDaemon),
270 ))
275 ))
271 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
276 def test_returns_real_hooks_callback_daemon_when_protocol_is_specified(
272 self, protocol, expected_class):
277 self, protocol, expected_class):
273 expected_extras = {
278 expected_extras = {
274 'extra1': 'value1',
279 'extra1': 'value1',
280 'txn_id': 'txnid2',
275 'hooks_protocol': protocol.lower()
281 'hooks_protocol': protocol.lower()
276 }
282 }
277 callback, extras = hooks_daemon.prepare_callback_daemon(
283 callback, extras = hooks_daemon.prepare_callback_daemon(
278 expected_extras.copy(), protocol=protocol, use_direct_calls=False)
284 expected_extras.copy(), protocol=protocol, use_direct_calls=False,
285 txn_id='txnid2')
279 assert isinstance(callback, expected_class)
286 assert isinstance(callback, expected_class)
280 hooks_uri = extras.pop('hooks_uri')
287 extras.pop('hooks_uri')
288 expected_extras['time'] = extras['time']
281 assert extras == expected_extras
289 assert extras == expected_extras
282
290
283 @pytest.mark.parametrize('protocol', (
291 @pytest.mark.parametrize('protocol', (
284 'invalid',
292 'invalid',
285 'Http',
293 'Http',
286 'HTTP',
294 'HTTP',
287 ))
295 ))
288 def test_raises_on_invalid_protocol(self, protocol):
296 def test_raises_on_invalid_protocol(self, protocol):
289 expected_extras = {
297 expected_extras = {
290 'extra1': 'value1',
298 'extra1': 'value1',
291 'hooks_protocol': protocol.lower()
299 'hooks_protocol': protocol.lower()
292 }
300 }
293 with pytest.raises(Exception):
301 with pytest.raises(Exception):
294 callback, extras = hooks_daemon.prepare_callback_daemon(
302 callback, extras = hooks_daemon.prepare_callback_daemon(
295 expected_extras.copy(),
303 expected_extras.copy(),
296 protocol=protocol,
304 protocol=protocol,
297 use_direct_calls=False)
305 use_direct_calls=False)
298
306
299
307
300 class MockRequest(object):
308 class MockRequest(object):
301 def __init__(self, request):
309 def __init__(self, request):
302 self.request = request
310 self.request = request
303 self.input_stream = StringIO(b'{}'.format(self.request))
311 self.input_stream = StringIO(b'{}'.format(self.request))
304 self.output_stream = StringIO()
312 self.output_stream = StringIO()
305
313
306 def makefile(self, mode, *args, **kwargs):
314 def makefile(self, mode, *args, **kwargs):
307 return self.output_stream if mode == 'wb' else self.input_stream
315 return self.output_stream if mode == 'wb' else self.input_stream
308
316
309
317
310 class MockServer(object):
318 class MockServer(object):
311 def __init__(self, Handler, request):
319 def __init__(self, Handler, request):
312 ip_port = ('0.0.0.0', 8888)
320 ip_port = ('0.0.0.0', 8888)
313 self.request = MockRequest(request)
321 self.request = MockRequest(request)
314 self.handler = Handler(self.request, ip_port, self)
322 self.handler = Handler(self.request, ip_port, self)
315
323
316
324
317 @pytest.fixture
325 @pytest.fixture
318 def tcp_server():
326 def tcp_server():
319 server = mock.Mock()
327 server = mock.Mock()
320 server.server_address = ('127.0.0.1', 8881)
328 server.server_address = ('127.0.0.1', 8881)
321 return server
329 return server
@@ -1,456 +1,446 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import json
21 import json
22 import multiprocessing
22 import multiprocessing
23 import os
23 import os
24
24
25 import mock
25 import mock
26 import py
26 import py
27 import pytest
27 import pytest
28
28
29 from rhodecode.lib import caching_query
29 from rhodecode.lib import caching_query
30 from rhodecode.lib import utils
30 from rhodecode.lib import utils
31 from rhodecode.lib.utils2 import md5
31 from rhodecode.lib.utils2 import md5
32 from rhodecode.model import settings
32 from rhodecode.model import settings
33 from rhodecode.model import db
33 from rhodecode.model import db
34 from rhodecode.model import meta
34 from rhodecode.model import meta
35 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.repo import RepoModel
36 from rhodecode.model.repo_group import RepoGroupModel
36 from rhodecode.model.repo_group import RepoGroupModel
37 from rhodecode.model.scm import ScmModel
37 from rhodecode.model.scm import ScmModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
38 from rhodecode.model.settings import UiSetting, SettingsModel
39 from rhodecode.tests.fixture import Fixture
39 from rhodecode.tests.fixture import Fixture
40
40
41
41
42 fixture = Fixture()
42 fixture = Fixture()
43
43
44
44
45 def extract_hooks(config):
45 def extract_hooks(config):
46 """Return a dictionary with the hook entries of the given config."""
46 """Return a dictionary with the hook entries of the given config."""
47 hooks = {}
47 hooks = {}
48 config_items = config.serialize()
48 config_items = config.serialize()
49 for section, name, value in config_items:
49 for section, name, value in config_items:
50 if section != 'hooks':
50 if section != 'hooks':
51 continue
51 continue
52 hooks[name] = value
52 hooks[name] = value
53
53
54 return hooks
54 return hooks
55
55
56
56
57 def disable_hooks(request, hooks):
57 def disable_hooks(request, hooks):
58 """Disables the given hooks from the UI settings."""
58 """Disables the given hooks from the UI settings."""
59 session = meta.Session()
59 session = meta.Session()
60
60
61 model = SettingsModel()
61 model = SettingsModel()
62 for hook_key in hooks:
62 for hook_key in hooks:
63 sett = model.get_ui_by_key(hook_key)
63 sett = model.get_ui_by_key(hook_key)
64 sett.ui_active = False
64 sett.ui_active = False
65 session.add(sett)
65 session.add(sett)
66
66
67 # Invalidate cache
67 # Invalidate cache
68 ui_settings = session.query(db.RhodeCodeUi).options(
68 ui_settings = session.query(db.RhodeCodeUi).options(
69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
69 caching_query.FromCache('sql_cache_short', 'get_hg_ui_settings'))
70 ui_settings.invalidate()
70 ui_settings.invalidate()
71
71
72 ui_settings = session.query(db.RhodeCodeUi).options(
72 ui_settings = session.query(db.RhodeCodeUi).options(
73 caching_query.FromCache(
73 caching_query.FromCache(
74 'sql_cache_short', 'get_hook_settings', 'get_hook_settings'))
74 'sql_cache_short', 'get_hook_settings', 'get_hook_settings'))
75 ui_settings.invalidate()
75 ui_settings.invalidate()
76
76
77 @request.addfinalizer
77 @request.addfinalizer
78 def rollback():
78 def rollback():
79 session.rollback()
79 session.rollback()
80
80
81
81
82 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
82 HOOK_PRE_PUSH = db.RhodeCodeUi.HOOK_PRE_PUSH
83 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
83 HOOK_PRETX_PUSH = db.RhodeCodeUi.HOOK_PRETX_PUSH
84 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
84 HOOK_PUSH = db.RhodeCodeUi.HOOK_PUSH
85 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
85 HOOK_PRE_PULL = db.RhodeCodeUi.HOOK_PRE_PULL
86 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
86 HOOK_PULL = db.RhodeCodeUi.HOOK_PULL
87 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
87 HOOK_REPO_SIZE = db.RhodeCodeUi.HOOK_REPO_SIZE
88 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
88 HOOK_PUSH_KEY = db.RhodeCodeUi.HOOK_PUSH_KEY
89
89
90 HG_HOOKS = frozenset(
90 HG_HOOKS = frozenset(
91 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
91 (HOOK_PRE_PULL, HOOK_PULL, HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH,
92 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
92 HOOK_REPO_SIZE, HOOK_PUSH_KEY))
93
93
94
94
95 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
95 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
96 ([], HG_HOOKS),
96 ([], HG_HOOKS),
97 (HG_HOOKS, []),
97 (HG_HOOKS, []),
98
98
99 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
99 ([HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_REPO_SIZE, HOOK_PUSH_KEY], [HOOK_PRE_PULL, HOOK_PULL, HOOK_PUSH]),
100
100
101 # When a pull/push hook is disabled, its pre-pull/push counterpart should
101 # When a pull/push hook is disabled, its pre-pull/push counterpart should
102 # be disabled too.
102 # be disabled too.
103 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
103 ([HOOK_PUSH], [HOOK_PRE_PULL, HOOK_PULL, HOOK_REPO_SIZE]),
104 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
104 ([HOOK_PULL], [HOOK_PRE_PUSH, HOOK_PRETX_PUSH, HOOK_PUSH, HOOK_REPO_SIZE,
105 HOOK_PUSH_KEY]),
105 HOOK_PUSH_KEY]),
106 ])
106 ])
107 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
107 def test_make_db_config_hg_hooks(baseapp, request, disabled_hooks,
108 expected_hooks):
108 expected_hooks):
109 disable_hooks(request, disabled_hooks)
109 disable_hooks(request, disabled_hooks)
110
110
111 config = utils.make_db_config()
111 config = utils.make_db_config()
112 hooks = extract_hooks(config)
112 hooks = extract_hooks(config)
113
113
114 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
114 assert set(hooks.iterkeys()).intersection(HG_HOOKS) == set(expected_hooks)
115
115
116
116
117 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
117 @pytest.mark.parametrize('disabled_hooks,expected_hooks', [
118 ([], ['pull', 'push']),
118 ([], ['pull', 'push']),
119 ([HOOK_PUSH], ['pull']),
119 ([HOOK_PUSH], ['pull']),
120 ([HOOK_PULL], ['push']),
120 ([HOOK_PULL], ['push']),
121 ([HOOK_PULL, HOOK_PUSH], []),
121 ([HOOK_PULL, HOOK_PUSH], []),
122 ])
122 ])
123 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
123 def test_get_enabled_hook_classes(disabled_hooks, expected_hooks):
124 hook_keys = (HOOK_PUSH, HOOK_PULL)
124 hook_keys = (HOOK_PUSH, HOOK_PULL)
125 ui_settings = [
125 ui_settings = [
126 ('hooks', key, 'some value', key not in disabled_hooks)
126 ('hooks', key, 'some value', key not in disabled_hooks)
127 for key in hook_keys]
127 for key in hook_keys]
128
128
129 result = utils.get_enabled_hook_classes(ui_settings)
129 result = utils.get_enabled_hook_classes(ui_settings)
130 assert sorted(result) == expected_hooks
130 assert sorted(result) == expected_hooks
131
131
132
132
133 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
133 def test_get_filesystem_repos_finds_repos(tmpdir, baseapp):
134 _stub_git_repo(tmpdir.ensure('repo', dir=True))
134 _stub_git_repo(tmpdir.ensure('repo', dir=True))
135 repos = list(utils.get_filesystem_repos(str(tmpdir)))
135 repos = list(utils.get_filesystem_repos(str(tmpdir)))
136 assert repos == [('repo', ('git', tmpdir.join('repo')))]
136 assert repos == [('repo', ('git', tmpdir.join('repo')))]
137
137
138
138
139 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
139 def test_get_filesystem_repos_skips_directories(tmpdir, baseapp):
140 tmpdir.ensure('not-a-repo', dir=True)
140 tmpdir.ensure('not-a-repo', dir=True)
141 repos = list(utils.get_filesystem_repos(str(tmpdir)))
141 repos = list(utils.get_filesystem_repos(str(tmpdir)))
142 assert repos == []
142 assert repos == []
143
143
144
144
145 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
145 def test_get_filesystem_repos_skips_directories_with_repos(tmpdir, baseapp):
146 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
146 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
147 repos = list(utils.get_filesystem_repos(str(tmpdir)))
147 repos = list(utils.get_filesystem_repos(str(tmpdir)))
148 assert repos == []
148 assert repos == []
149
149
150
150
151 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
151 def test_get_filesystem_repos_finds_repos_in_subdirectories(tmpdir, baseapp):
152 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
152 _stub_git_repo(tmpdir.ensure('subdir/repo', dir=True))
153 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
153 repos = list(utils.get_filesystem_repos(str(tmpdir), recursive=True))
154 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
154 assert repos == [('subdir/repo', ('git', tmpdir.join('subdir', 'repo')))]
155
155
156
156
157 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
157 def test_get_filesystem_repos_skips_names_starting_with_dot(tmpdir):
158 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
158 _stub_git_repo(tmpdir.ensure('.repo', dir=True))
159 repos = list(utils.get_filesystem_repos(str(tmpdir)))
159 repos = list(utils.get_filesystem_repos(str(tmpdir)))
160 assert repos == []
160 assert repos == []
161
161
162
162
163 def test_get_filesystem_repos_skips_files(tmpdir):
163 def test_get_filesystem_repos_skips_files(tmpdir):
164 tmpdir.ensure('test-file')
164 tmpdir.ensure('test-file')
165 repos = list(utils.get_filesystem_repos(str(tmpdir)))
165 repos = list(utils.get_filesystem_repos(str(tmpdir)))
166 assert repos == []
166 assert repos == []
167
167
168
168
169 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
169 def test_get_filesystem_repos_skips_removed_repositories(tmpdir):
170 removed_repo_name = 'rm__00000000_000000_000000__.stub'
170 removed_repo_name = 'rm__00000000_000000_000000__.stub'
171 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
171 assert utils.REMOVED_REPO_PAT.match(removed_repo_name)
172 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
172 _stub_git_repo(tmpdir.ensure(removed_repo_name, dir=True))
173 repos = list(utils.get_filesystem_repos(str(tmpdir)))
173 repos = list(utils.get_filesystem_repos(str(tmpdir)))
174 assert repos == []
174 assert repos == []
175
175
176
176
177 def _stub_git_repo(repo_path):
177 def _stub_git_repo(repo_path):
178 """
178 """
179 Make `repo_path` look like a Git repository.
179 Make `repo_path` look like a Git repository.
180 """
180 """
181 repo_path.ensure('.git', dir=True)
181 repo_path.ensure('.git', dir=True)
182
182
183
183
184 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
184 @pytest.mark.parametrize('str_class', [str, unicode], ids=['str', 'unicode'])
185 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
185 def test_get_dirpaths_returns_all_paths(tmpdir, str_class):
186 tmpdir.ensure('test-file')
186 tmpdir.ensure('test-file')
187 dirpaths = utils._get_dirpaths(str_class(tmpdir))
187 dirpaths = utils._get_dirpaths(str_class(tmpdir))
188 assert dirpaths == ['test-file']
188 assert dirpaths == ['test-file']
189
189
190
190
191 def test_get_dirpaths_returns_all_paths_bytes(
191 def test_get_dirpaths_returns_all_paths_bytes(
192 tmpdir, platform_encodes_filenames):
192 tmpdir, platform_encodes_filenames):
193 if platform_encodes_filenames:
193 if platform_encodes_filenames:
194 pytest.skip("This platform seems to encode filenames.")
194 pytest.skip("This platform seems to encode filenames.")
195 tmpdir.ensure('repo-a-umlaut-\xe4')
195 tmpdir.ensure('repo-a-umlaut-\xe4')
196 dirpaths = utils._get_dirpaths(str(tmpdir))
196 dirpaths = utils._get_dirpaths(str(tmpdir))
197 assert dirpaths == ['repo-a-umlaut-\xe4']
197 assert dirpaths == ['repo-a-umlaut-\xe4']
198
198
199
199
200 def test_get_dirpaths_skips_paths_it_cannot_decode(
200 def test_get_dirpaths_skips_paths_it_cannot_decode(
201 tmpdir, platform_encodes_filenames):
201 tmpdir, platform_encodes_filenames):
202 if platform_encodes_filenames:
202 if platform_encodes_filenames:
203 pytest.skip("This platform seems to encode filenames.")
203 pytest.skip("This platform seems to encode filenames.")
204 path_with_latin1 = 'repo-a-umlaut-\xe4'
204 path_with_latin1 = 'repo-a-umlaut-\xe4'
205 tmpdir.ensure(path_with_latin1)
205 tmpdir.ensure(path_with_latin1)
206 dirpaths = utils._get_dirpaths(unicode(tmpdir))
206 dirpaths = utils._get_dirpaths(unicode(tmpdir))
207 assert dirpaths == []
207 assert dirpaths == []
208
208
209
209
210 @pytest.fixture(scope='session')
210 @pytest.fixture(scope='session')
211 def platform_encodes_filenames():
211 def platform_encodes_filenames():
212 """
212 """
213 Boolean indicator if the current platform changes filename encodings.
213 Boolean indicator if the current platform changes filename encodings.
214 """
214 """
215 path_with_latin1 = 'repo-a-umlaut-\xe4'
215 path_with_latin1 = 'repo-a-umlaut-\xe4'
216 tmpdir = py.path.local.mkdtemp()
216 tmpdir = py.path.local.mkdtemp()
217 tmpdir.ensure(path_with_latin1)
217 tmpdir.ensure(path_with_latin1)
218 read_path = tmpdir.listdir()[0].basename
218 read_path = tmpdir.listdir()[0].basename
219 tmpdir.remove()
219 tmpdir.remove()
220 return path_with_latin1 != read_path
220 return path_with_latin1 != read_path
221
221
222
222
223
223
224
224
225 def test_repo2db_mapper_groups(repo_groups):
225 def test_repo2db_mapper_groups(repo_groups):
226 session = meta.Session()
226 session = meta.Session()
227 zombie_group, parent_group, child_group = repo_groups
227 zombie_group, parent_group, child_group = repo_groups
228 zombie_path = os.path.join(
228 zombie_path = os.path.join(
229 RepoGroupModel().repos_path, zombie_group.full_path)
229 RepoGroupModel().repos_path, zombie_group.full_path)
230 os.rmdir(zombie_path)
230 os.rmdir(zombie_path)
231
231
232 # Avoid removing test repos when calling repo2db_mapper
232 # Avoid removing test repos when calling repo2db_mapper
233 repo_list = {
233 repo_list = {
234 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
234 repo.repo_name: 'test' for repo in session.query(db.Repository).all()
235 }
235 }
236 utils.repo2db_mapper(repo_list, remove_obsolete=True)
236 utils.repo2db_mapper(repo_list, remove_obsolete=True)
237
237
238 groups_in_db = session.query(db.RepoGroup).all()
238 groups_in_db = session.query(db.RepoGroup).all()
239 assert child_group in groups_in_db
239 assert child_group in groups_in_db
240 assert parent_group in groups_in_db
240 assert parent_group in groups_in_db
241 assert zombie_path not in groups_in_db
241 assert zombie_path not in groups_in_db
242
242
243
243
244 def test_repo2db_mapper_enables_largefiles(backend):
244 def test_repo2db_mapper_enables_largefiles(backend):
245 repo = backend.create_repo()
245 repo = backend.create_repo()
246 repo_list = {repo.repo_name: 'test'}
246 repo_list = {repo.repo_name: 'test'}
247 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
247 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm_mock:
248 with mock.patch.multiple('rhodecode.model.scm.ScmModel',
248 utils.repo2db_mapper(repo_list, remove_obsolete=False)
249 install_git_hook=mock.DEFAULT,
249 _, kwargs = scm_mock.call_args
250 install_svn_hooks=mock.DEFAULT):
250 assert kwargs['config'].get('extensions', 'largefiles') == ''
251 utils.repo2db_mapper(repo_list, remove_obsolete=False)
252 _, kwargs = scm_mock.call_args
253 assert kwargs['config'].get('extensions', 'largefiles') == ''
254
251
255
252
256 @pytest.mark.backends("git", "svn")
253 @pytest.mark.backends("git", "svn")
257 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
254 def test_repo2db_mapper_installs_hooks_for_repos_in_db(backend):
258 repo = backend.create_repo()
255 repo = backend.create_repo()
259 repo_list = {repo.repo_name: 'test'}
256 repo_list = {repo.repo_name: 'test'}
260 with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock:
257 utils.repo2db_mapper(repo_list, remove_obsolete=False)
261 utils.repo2db_mapper(repo_list, remove_obsolete=False)
262 install_hooks_mock.assert_called_once_with(
263 repo.scm_instance(), repo_type=backend.alias)
264
258
265
259
266 @pytest.mark.backends("git", "svn")
260 @pytest.mark.backends("git", "svn")
267 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
261 def test_repo2db_mapper_installs_hooks_for_newly_added_repos(backend):
268 repo = backend.create_repo()
262 repo = backend.create_repo()
269 RepoModel().delete(repo, fs_remove=False)
263 RepoModel().delete(repo, fs_remove=False)
270 meta.Session().commit()
264 meta.Session().commit()
271 repo_list = {repo.repo_name: repo.scm_instance()}
265 repo_list = {repo.repo_name: repo.scm_instance()}
272 with mock.patch.object(ScmModel, 'install_hooks') as install_hooks_mock:
266 utils.repo2db_mapper(repo_list, remove_obsolete=False)
273 utils.repo2db_mapper(repo_list, remove_obsolete=False)
274 assert install_hooks_mock.call_count == 1
275 install_hooks_args, _ = install_hooks_mock.call_args
276 assert install_hooks_args[0].name == repo.repo_name
277
267
278
268
279 class TestPasswordChanged(object):
269 class TestPasswordChanged(object):
280 def setup(self):
270 def setup(self):
281 self.session = {
271 self.session = {
282 'rhodecode_user': {
272 'rhodecode_user': {
283 'password': '0cc175b9c0f1b6a831c399e269772661'
273 'password': '0cc175b9c0f1b6a831c399e269772661'
284 }
274 }
285 }
275 }
286 self.auth_user = mock.Mock()
276 self.auth_user = mock.Mock()
287 self.auth_user.userame = 'test'
277 self.auth_user.userame = 'test'
288 self.auth_user.password = 'abc123'
278 self.auth_user.password = 'abc123'
289
279
290 def test_returns_false_for_default_user(self):
280 def test_returns_false_for_default_user(self):
291 self.auth_user.username = db.User.DEFAULT_USER
281 self.auth_user.username = db.User.DEFAULT_USER
292 result = utils.password_changed(self.auth_user, self.session)
282 result = utils.password_changed(self.auth_user, self.session)
293 assert result is False
283 assert result is False
294
284
295 def test_returns_false_if_password_was_not_changed(self):
285 def test_returns_false_if_password_was_not_changed(self):
296 self.session['rhodecode_user']['password'] = md5(
286 self.session['rhodecode_user']['password'] = md5(
297 self.auth_user.password)
287 self.auth_user.password)
298 result = utils.password_changed(self.auth_user, self.session)
288 result = utils.password_changed(self.auth_user, self.session)
299 assert result is False
289 assert result is False
300
290
301 def test_returns_true_if_password_was_changed(self):
291 def test_returns_true_if_password_was_changed(self):
302 result = utils.password_changed(self.auth_user, self.session)
292 result = utils.password_changed(self.auth_user, self.session)
303 assert result is True
293 assert result is True
304
294
305 def test_returns_true_if_auth_user_password_is_empty(self):
295 def test_returns_true_if_auth_user_password_is_empty(self):
306 self.auth_user.password = None
296 self.auth_user.password = None
307 result = utils.password_changed(self.auth_user, self.session)
297 result = utils.password_changed(self.auth_user, self.session)
308 assert result is True
298 assert result is True
309
299
310 def test_returns_true_if_session_password_is_empty(self):
300 def test_returns_true_if_session_password_is_empty(self):
311 self.session['rhodecode_user'].pop('password')
301 self.session['rhodecode_user'].pop('password')
312 result = utils.password_changed(self.auth_user, self.session)
302 result = utils.password_changed(self.auth_user, self.session)
313 assert result is True
303 assert result is True
314
304
315
305
316 class TestReadOpensourceLicenses(object):
306 class TestReadOpensourceLicenses(object):
317 def test_success(self):
307 def test_success(self):
318 utils._license_cache = None
308 utils._license_cache = None
319 json_data = '''
309 json_data = '''
320 {
310 {
321 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
311 "python2.7-pytest-2.7.1": {"UNKNOWN": null},
322 "python2.7-Markdown-2.6.2": {
312 "python2.7-Markdown-2.6.2": {
323 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
313 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
324 }
314 }
325 }
315 }
326 '''
316 '''
327 resource_string_patch = mock.patch.object(
317 resource_string_patch = mock.patch.object(
328 utils.pkg_resources, 'resource_string', return_value=json_data)
318 utils.pkg_resources, 'resource_string', return_value=json_data)
329 with resource_string_patch:
319 with resource_string_patch:
330 result = utils.read_opensource_licenses()
320 result = utils.read_opensource_licenses()
331 assert result == json.loads(json_data)
321 assert result == json.loads(json_data)
332
322
333 def test_caching(self):
323 def test_caching(self):
334 utils._license_cache = {
324 utils._license_cache = {
335 "python2.7-pytest-2.7.1": {
325 "python2.7-pytest-2.7.1": {
336 "UNKNOWN": None
326 "UNKNOWN": None
337 },
327 },
338 "python2.7-Markdown-2.6.2": {
328 "python2.7-Markdown-2.6.2": {
339 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
329 "BSD-3-Clause": "http://spdx.org/licenses/BSD-3-Clause"
340 }
330 }
341 }
331 }
342 resource_patch = mock.patch.object(
332 resource_patch = mock.patch.object(
343 utils.pkg_resources, 'resource_string', side_effect=Exception)
333 utils.pkg_resources, 'resource_string', side_effect=Exception)
344 json_patch = mock.patch.object(
334 json_patch = mock.patch.object(
345 utils.json, 'loads', side_effect=Exception)
335 utils.json, 'loads', side_effect=Exception)
346
336
347 with resource_patch as resource_mock, json_patch as json_mock:
337 with resource_patch as resource_mock, json_patch as json_mock:
348 result = utils.read_opensource_licenses()
338 result = utils.read_opensource_licenses()
349
339
350 assert resource_mock.call_count == 0
340 assert resource_mock.call_count == 0
351 assert json_mock.call_count == 0
341 assert json_mock.call_count == 0
352 assert result == utils._license_cache
342 assert result == utils._license_cache
353
343
354 def test_licenses_file_contains_no_unknown_licenses(self):
344 def test_licenses_file_contains_no_unknown_licenses(self):
355 utils._license_cache = None
345 utils._license_cache = None
356 result = utils.read_opensource_licenses()
346 result = utils.read_opensource_licenses()
357 license_names = []
347 license_names = []
358 for licenses in result.values():
348 for licenses in result.values():
359 license_names.extend(licenses.keys())
349 license_names.extend(licenses.keys())
360 assert 'UNKNOWN' not in license_names
350 assert 'UNKNOWN' not in license_names
361
351
362
352
363 class TestMakeDbConfig(object):
353 class TestMakeDbConfig(object):
364 def test_data_from_config_data_from_db_returned(self):
354 def test_data_from_config_data_from_db_returned(self):
365 test_data = [
355 test_data = [
366 ('section1', 'option1', 'value1'),
356 ('section1', 'option1', 'value1'),
367 ('section2', 'option2', 'value2'),
357 ('section2', 'option2', 'value2'),
368 ('section3', 'option3', 'value3'),
358 ('section3', 'option3', 'value3'),
369 ]
359 ]
370 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
360 with mock.patch.object(utils, 'config_data_from_db') as config_mock:
371 config_mock.return_value = test_data
361 config_mock.return_value = test_data
372 kwargs = {'clear_session': False, 'repo': 'test_repo'}
362 kwargs = {'clear_session': False, 'repo': 'test_repo'}
373 result = utils.make_db_config(**kwargs)
363 result = utils.make_db_config(**kwargs)
374 config_mock.assert_called_once_with(**kwargs)
364 config_mock.assert_called_once_with(**kwargs)
375 for section, option, expected_value in test_data:
365 for section, option, expected_value in test_data:
376 value = result.get(section, option)
366 value = result.get(section, option)
377 assert value == expected_value
367 assert value == expected_value
378
368
379
369
380 class TestConfigDataFromDb(object):
370 class TestConfigDataFromDb(object):
381 def test_config_data_from_db_returns_active_settings(self):
371 def test_config_data_from_db_returns_active_settings(self):
382 test_data = [
372 test_data = [
383 UiSetting('section1', 'option1', 'value1', True),
373 UiSetting('section1', 'option1', 'value1', True),
384 UiSetting('section2', 'option2', 'value2', True),
374 UiSetting('section2', 'option2', 'value2', True),
385 UiSetting('section3', 'option3', 'value3', False),
375 UiSetting('section3', 'option3', 'value3', False),
386 ]
376 ]
387 repo_name = 'test_repo'
377 repo_name = 'test_repo'
388
378
389 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
379 model_patch = mock.patch.object(settings, 'VcsSettingsModel')
390 hooks_patch = mock.patch.object(
380 hooks_patch = mock.patch.object(
391 utils, 'get_enabled_hook_classes',
381 utils, 'get_enabled_hook_classes',
392 return_value=['pull', 'push', 'repo_size'])
382 return_value=['pull', 'push', 'repo_size'])
393 with model_patch as model_mock, hooks_patch:
383 with model_patch as model_mock, hooks_patch:
394 instance_mock = mock.Mock()
384 instance_mock = mock.Mock()
395 model_mock.return_value = instance_mock
385 model_mock.return_value = instance_mock
396 instance_mock.get_ui_settings.return_value = test_data
386 instance_mock.get_ui_settings.return_value = test_data
397 result = utils.config_data_from_db(
387 result = utils.config_data_from_db(
398 clear_session=False, repo=repo_name)
388 clear_session=False, repo=repo_name)
399
389
400 self._assert_repo_name_passed(model_mock, repo_name)
390 self._assert_repo_name_passed(model_mock, repo_name)
401
391
402 expected_result = [
392 expected_result = [
403 ('section1', 'option1', 'value1'),
393 ('section1', 'option1', 'value1'),
404 ('section2', 'option2', 'value2'),
394 ('section2', 'option2', 'value2'),
405 ]
395 ]
406 assert result == expected_result
396 assert result == expected_result
407
397
408 def _assert_repo_name_passed(self, model_mock, repo_name):
398 def _assert_repo_name_passed(self, model_mock, repo_name):
409 assert model_mock.call_count == 1
399 assert model_mock.call_count == 1
410 call_args, call_kwargs = model_mock.call_args
400 call_args, call_kwargs = model_mock.call_args
411 assert call_kwargs['repo'] == repo_name
401 assert call_kwargs['repo'] == repo_name
412
402
413
403
414 class TestIsDirWritable(object):
404 class TestIsDirWritable(object):
415 def test_returns_false_when_not_writable(self):
405 def test_returns_false_when_not_writable(self):
416 with mock.patch('__builtin__.open', side_effect=OSError):
406 with mock.patch('__builtin__.open', side_effect=OSError):
417 assert not utils._is_dir_writable('/stub-path')
407 assert not utils._is_dir_writable('/stub-path')
418
408
419 def test_returns_true_when_writable(self, tmpdir):
409 def test_returns_true_when_writable(self, tmpdir):
420 assert utils._is_dir_writable(str(tmpdir))
410 assert utils._is_dir_writable(str(tmpdir))
421
411
422 def test_is_safe_against_race_conditions(self, tmpdir):
412 def test_is_safe_against_race_conditions(self, tmpdir):
423 workers = multiprocessing.Pool()
413 workers = multiprocessing.Pool()
424 directories = [str(tmpdir)] * 10
414 directories = [str(tmpdir)] * 10
425 workers.map(utils._is_dir_writable, directories)
415 workers.map(utils._is_dir_writable, directories)
426
416
427
417
428 class TestGetEnabledHooks(object):
418 class TestGetEnabledHooks(object):
429 def test_only_active_hooks_are_enabled(self):
419 def test_only_active_hooks_are_enabled(self):
430 ui_settings = [
420 ui_settings = [
431 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
421 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
432 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
422 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
433 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
423 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', False)
434 ]
424 ]
435 result = utils.get_enabled_hook_classes(ui_settings)
425 result = utils.get_enabled_hook_classes(ui_settings)
436 assert result == ['push', 'repo_size']
426 assert result == ['push', 'repo_size']
437
427
438 def test_all_hooks_are_enabled(self):
428 def test_all_hooks_are_enabled(self):
439 ui_settings = [
429 ui_settings = [
440 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
430 UiSetting('hooks', db.RhodeCodeUi.HOOK_PUSH, 'value', True),
441 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
431 UiSetting('hooks', db.RhodeCodeUi.HOOK_REPO_SIZE, 'value', True),
442 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
432 UiSetting('hooks', db.RhodeCodeUi.HOOK_PULL, 'value', True)
443 ]
433 ]
444 result = utils.get_enabled_hook_classes(ui_settings)
434 result = utils.get_enabled_hook_classes(ui_settings)
445 assert result == ['push', 'repo_size', 'pull']
435 assert result == ['push', 'repo_size', 'pull']
446
436
447 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
437 def test_no_enabled_hooks_when_no_hook_settings_are_found(self):
448 ui_settings = []
438 ui_settings = []
449 result = utils.get_enabled_hook_classes(ui_settings)
439 result = utils.get_enabled_hook_classes(ui_settings)
450 assert result == []
440 assert result == []
451
441
452
442
453 def test_obfuscate_url_pw():
443 def test_obfuscate_url_pw():
454 from rhodecode.lib.utils2 import obfuscate_url_pw
444 from rhodecode.lib.utils2 import obfuscate_url_pw
455 engine = u'/home/repos/malmö'
445 engine = u'/home/repos/malmö'
456 assert obfuscate_url_pw(engine) No newline at end of file
446 assert obfuscate_url_pw(engine)
@@ -1,171 +1,172 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
22 import mock
23 import pytest
21 import tempfile
24 import tempfile
22
25
23 import mock
24 import pytest
25
26
26 from rhodecode.lib.exceptions import AttachedForksError
27 from rhodecode.lib.exceptions import AttachedForksError
27 from rhodecode.lib.utils import make_db_config
28 from rhodecode.lib.utils import make_db_config
28 from rhodecode.model.db import Repository
29 from rhodecode.model.db import Repository
29 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
30 from rhodecode.model.repo import RepoModel
31 from rhodecode.model.repo import RepoModel
31 from rhodecode.model.scm import ScmModel
32 from rhodecode.model.scm import ScmModel
32
33
33
34
34 class TestRepoModel(object):
35 class TestRepoModel(object):
35
36
36 def test_remove_repo(self, backend):
37 def test_remove_repo(self, backend):
37 repo = backend.create_repo()
38 repo = backend.create_repo()
38 Session().commit()
39 Session().commit()
39 RepoModel().delete(repo=repo)
40 RepoModel().delete(repo=repo)
40 Session().commit()
41 Session().commit()
41
42
42 repos = ScmModel().repo_scan()
43 repos = ScmModel().repo_scan()
43
44
44 assert Repository.get_by_repo_name(repo_name=backend.repo_name) is None
45 assert Repository.get_by_repo_name(repo_name=backend.repo_name) is None
45 assert repo.repo_name not in repos
46 assert repo.repo_name not in repos
46
47
47 def test_remove_repo_raises_exc_when_attached_forks(self, backend):
48 def test_remove_repo_raises_exc_when_attached_forks(self, backend):
48 repo = backend.create_repo()
49 repo = backend.create_repo()
49 Session().commit()
50 Session().commit()
50 backend.create_fork()
51 backend.create_fork()
51 Session().commit()
52 Session().commit()
52
53
53 with pytest.raises(AttachedForksError):
54 with pytest.raises(AttachedForksError):
54 RepoModel().delete(repo=repo)
55 RepoModel().delete(repo=repo)
55
56
56 def test_remove_repo_delete_forks(self, backend):
57 def test_remove_repo_delete_forks(self, backend):
57 repo = backend.create_repo()
58 repo = backend.create_repo()
58 Session().commit()
59 Session().commit()
59
60
60 fork = backend.create_fork()
61 fork = backend.create_fork()
61 Session().commit()
62 Session().commit()
62
63
63 fork_of_fork = backend.create_fork()
64 fork_of_fork = backend.create_fork()
64 Session().commit()
65 Session().commit()
65
66
66 RepoModel().delete(repo=repo, forks='delete')
67 RepoModel().delete(repo=repo, forks='delete')
67 Session().commit()
68 Session().commit()
68
69
69 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
70 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
70 assert Repository.get_by_repo_name(repo_name=fork.repo_name) is None
71 assert Repository.get_by_repo_name(repo_name=fork.repo_name) is None
71 assert (
72 assert (
72 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
73 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
73 is None)
74 is None)
74
75
75 def test_remove_repo_detach_forks(self, backend):
76 def test_remove_repo_detach_forks(self, backend):
76 repo = backend.create_repo()
77 repo = backend.create_repo()
77 Session().commit()
78 Session().commit()
78
79
79 fork = backend.create_fork()
80 fork = backend.create_fork()
80 Session().commit()
81 Session().commit()
81
82
82 fork_of_fork = backend.create_fork()
83 fork_of_fork = backend.create_fork()
83 Session().commit()
84 Session().commit()
84
85
85 RepoModel().delete(repo=repo, forks='detach')
86 RepoModel().delete(repo=repo, forks='detach')
86 Session().commit()
87 Session().commit()
87
88
88 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
89 assert Repository.get_by_repo_name(repo_name=repo.repo_name) is None
89 assert (
90 assert (
90 Repository.get_by_repo_name(repo_name=fork.repo_name) is not None)
91 Repository.get_by_repo_name(repo_name=fork.repo_name) is not None)
91 assert (
92 assert (
92 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
93 Repository.get_by_repo_name(repo_name=fork_of_fork.repo_name)
93 is not None)
94 is not None)
94
95
95 @pytest.mark.parametrize("filename, expected", [
96 @pytest.mark.parametrize("filename, expected", [
96 ("README", True),
97 ("README", True),
97 ("README.rst", False),
98 ("README.rst", False),
98 ])
99 ])
99 def test_filenode_is_link(self, vcsbackend, filename, expected):
100 def test_filenode_is_link(self, vcsbackend, filename, expected):
100 repo = vcsbackend.repo
101 repo = vcsbackend.repo
101 assert repo.get_commit().is_link(filename) is expected
102 assert repo.get_commit().is_link(filename) is expected
102
103
103 def test_get_commit(self, backend):
104 def test_get_commit(self, backend):
104 backend.repo.get_commit()
105 backend.repo.get_commit()
105
106
106 def test_get_changeset_is_deprecated(self, backend):
107 def test_get_changeset_is_deprecated(self, backend):
107 repo = backend.repo
108 repo = backend.repo
108 pytest.deprecated_call(repo.get_changeset)
109 pytest.deprecated_call(repo.get_changeset)
109
110
110 def test_clone_url_encrypted_value(self, backend):
111 def test_clone_url_encrypted_value(self, backend):
111 repo = backend.create_repo()
112 repo = backend.create_repo()
112 Session().commit()
113 Session().commit()
113
114
114 repo.clone_url = 'https://marcink:qweqwe@code.rhodecode.com'
115 repo.clone_url = 'https://marcink:qweqwe@code.rhodecode.com'
115 Session().add(repo)
116 Session().add(repo)
116 Session().commit()
117 Session().commit()
117
118
118 assert repo.clone_url == 'https://marcink:qweqwe@code.rhodecode.com'
119 assert repo.clone_url == 'https://marcink:qweqwe@code.rhodecode.com'
119
120
120 @pytest.mark.backends("git", "svn")
121 @pytest.mark.backends("git", "svn")
121 def test_create_filesystem_repo_installs_hooks(self, tmpdir, backend):
122 def test_create_filesystem_repo_installs_hooks(self, tmpdir, backend):
122 hook_methods = {
123 'git': 'install_git_hook',
124 'svn': 'install_svn_hooks'
125 }
126 repo = backend.create_repo()
123 repo = backend.create_repo()
127 repo_name = repo.repo_name
124 repo_name = repo.repo_name
128 model = RepoModel()
125 model = RepoModel()
129 repo_location = tempfile.mkdtemp()
126 repo_location = tempfile.mkdtemp()
130 model.repos_path = repo_location
127 model.repos_path = repo_location
131 method = hook_methods[backend.alias]
128 repo = model._create_filesystem_repo(
132 with mock.patch.object(ScmModel, method) as hooks_mock:
129 repo_name, backend.alias, repo_group='', clone_uri=None)
133 model._create_filesystem_repo(
130
134 repo_name, backend.alias, repo_group='', clone_uri=None)
131 hooks = {
135 assert hooks_mock.call_count == 1
132 'svn': ('pre-commit', 'post-commit'),
136 hook_args, hook_kwargs = hooks_mock.call_args
133 'git': ('pre-receive', 'post-receive'),
137 assert hook_args[0].name == repo_name
134 }
135 for hook in hooks[backend.alias]:
136 with open(os.path.join(repo.path, 'hooks', hook)) as f:
137 data = f.read()
138 assert 'RC_HOOK_VER' in data
138
139
139 @pytest.mark.parametrize("use_global_config, repo_name_passed", [
140 @pytest.mark.parametrize("use_global_config, repo_name_passed", [
140 (True, False),
141 (True, False),
141 (False, True)
142 (False, True)
142 ])
143 ])
143 def test_per_repo_config_is_generated_during_filesystem_repo_creation(
144 def test_per_repo_config_is_generated_during_filesystem_repo_creation(
144 self, tmpdir, backend, use_global_config, repo_name_passed):
145 self, tmpdir, backend, use_global_config, repo_name_passed):
145 repo_name = 'test-{}-repo-{}'.format(backend.alias, use_global_config)
146 repo_name = 'test-{}-repo-{}'.format(backend.alias, use_global_config)
146 config = make_db_config()
147 config = make_db_config()
147 model = RepoModel()
148 model = RepoModel()
148 with mock.patch('rhodecode.model.repo.make_db_config') as config_mock:
149 with mock.patch('rhodecode.model.repo.make_db_config') as config_mock:
149 config_mock.return_value = config
150 config_mock.return_value = config
150 model._create_filesystem_repo(
151 model._create_filesystem_repo(
151 repo_name, backend.alias, repo_group='', clone_uri=None,
152 repo_name, backend.alias, repo_group='', clone_uri=None,
152 use_global_config=use_global_config)
153 use_global_config=use_global_config)
153 expected_repo_name = repo_name if repo_name_passed else None
154 expected_repo_name = repo_name if repo_name_passed else None
154 expected_call = mock.call(clear_session=False, repo=expected_repo_name)
155 expected_call = mock.call(clear_session=False, repo=expected_repo_name)
155 assert expected_call in config_mock.call_args_list
156 assert expected_call in config_mock.call_args_list
156
157
157 def test_update_commit_cache_with_config(serf, backend):
158 def test_update_commit_cache_with_config(serf, backend):
158 repo = backend.create_repo()
159 repo = backend.create_repo()
159 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm:
160 with mock.patch('rhodecode.model.db.Repository.scm_instance') as scm:
160 scm_instance = mock.Mock()
161 scm_instance = mock.Mock()
161 scm_instance.get_commit.return_value = {
162 scm_instance.get_commit.return_value = {
162 'raw_id': 40*'0',
163 'raw_id': 40*'0',
163 'revision': 1
164 'revision': 1
164 }
165 }
165 scm.return_value = scm_instance
166 scm.return_value = scm_instance
166 repo.update_commit_cache()
167 repo.update_commit_cache()
167 scm.assert_called_with(cache=False, config=None)
168 scm.assert_called_with(cache=False, config=None)
168 config = {'test': 'config'}
169 config = {'test': 'config'}
169 repo.update_commit_cache(config=config)
170 repo.update_commit_cache(config=config)
170 scm.assert_called_with(
171 scm.assert_called_with(
171 cache=False, config=config)
172 cache=False, config=config)
@@ -1,336 +1,196 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import stat
22 import stat
23 import sys
23 import sys
24
24
25 import pytest
25 import pytest
26 from mock import Mock, patch, DEFAULT
26 from mock import Mock, patch, DEFAULT
27
27
28 import rhodecode
28 import rhodecode
29 from rhodecode.model import db, scm
29 from rhodecode.model import db, scm
30 from rhodecode.tests import no_newline_id_generator
30 from rhodecode.tests import no_newline_id_generator
31
31
32
32
33 def test_scm_instance_config(backend):
33 def test_scm_instance_config(backend):
34 repo = backend.create_repo()
34 repo = backend.create_repo()
35 with patch.multiple('rhodecode.model.db.Repository',
35 with patch.multiple('rhodecode.model.db.Repository',
36 _get_instance=DEFAULT,
36 _get_instance=DEFAULT,
37 _get_instance_cached=DEFAULT) as mocks:
37 _get_instance_cached=DEFAULT) as mocks:
38 repo.scm_instance()
38 repo.scm_instance()
39 mocks['_get_instance'].assert_called_with(
39 mocks['_get_instance'].assert_called_with(
40 config=None, cache=False)
40 config=None, cache=False)
41
41
42 config = {'some': 'value'}
42 config = {'some': 'value'}
43 repo.scm_instance(config=config)
43 repo.scm_instance(config=config)
44 mocks['_get_instance'].assert_called_with(
44 mocks['_get_instance'].assert_called_with(
45 config=config, cache=False)
45 config=config, cache=False)
46
46
47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
47 with patch.dict(rhodecode.CONFIG, {'vcs_full_cache': 'true'}):
48 repo.scm_instance(config=config)
48 repo.scm_instance(config=config)
49 mocks['_get_instance_cached'].assert_called()
49 mocks['_get_instance_cached'].assert_called()
50
50
51
51
52 def test__get_instance_config(backend):
52 def test__get_instance_config(backend):
53 repo = backend.create_repo()
53 repo = backend.create_repo()
54 vcs_class = Mock()
54 vcs_class = Mock()
55 with patch.multiple('rhodecode.lib.vcs.backends',
55 with patch.multiple('rhodecode.lib.vcs.backends',
56 get_scm=DEFAULT,
56 get_scm=DEFAULT,
57 get_backend=DEFAULT) as mocks:
57 get_backend=DEFAULT) as mocks:
58 mocks['get_scm'].return_value = backend.alias
58 mocks['get_scm'].return_value = backend.alias
59 mocks['get_backend'].return_value = vcs_class
59 mocks['get_backend'].return_value = vcs_class
60 with patch('rhodecode.model.db.Repository._config') as config_mock:
60 with patch('rhodecode.model.db.Repository._config') as config_mock:
61 repo._get_instance()
61 repo._get_instance()
62 vcs_class.assert_called_with(
62 vcs_class.assert_called_with(
63 repo_path=repo.repo_full_path, config=config_mock,
63 repo_path=repo.repo_full_path, config=config_mock,
64 create=False, with_wire={'cache': True})
64 create=False, with_wire={'cache': True})
65
65
66 new_config = {'override': 'old_config'}
66 new_config = {'override': 'old_config'}
67 repo._get_instance(config=new_config)
67 repo._get_instance(config=new_config)
68 vcs_class.assert_called_with(
68 vcs_class.assert_called_with(
69 repo_path=repo.repo_full_path, config=new_config, create=False,
69 repo_path=repo.repo_full_path, config=new_config, create=False,
70 with_wire={'cache': True})
70 with_wire={'cache': True})
71
71
72
72
73 def test_mark_for_invalidation_config(backend):
73 def test_mark_for_invalidation_config(backend):
74 repo = backend.create_repo()
74 repo = backend.create_repo()
75 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
75 with patch('rhodecode.model.db.Repository.update_commit_cache') as _mock:
76 scm.ScmModel().mark_for_invalidation(repo.repo_name)
76 scm.ScmModel().mark_for_invalidation(repo.repo_name)
77 _, kwargs = _mock.call_args
77 _, kwargs = _mock.call_args
78 assert kwargs['config'].__dict__ == repo._config.__dict__
78 assert kwargs['config'].__dict__ == repo._config.__dict__
79
79
80
80
81 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
81 def test_mark_for_invalidation_with_delete_updates_last_commit(backend):
82 commits = [{'message': 'A'}, {'message': 'B'}]
82 commits = [{'message': 'A'}, {'message': 'B'}]
83 repo = backend.create_repo(commits=commits)
83 repo = backend.create_repo(commits=commits)
84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
84 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
85 assert repo.changeset_cache['revision'] == 1
85 assert repo.changeset_cache['revision'] == 1
86
86
87
87
88 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
88 def test_mark_for_invalidation_with_delete_updates_last_commit_empty(backend):
89 repo = backend.create_repo()
89 repo = backend.create_repo()
90 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
90 scm.ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
91 assert repo.changeset_cache['revision'] == -1
91 assert repo.changeset_cache['revision'] == -1
92
92
93
93
94 def test_strip_with_multiple_heads(backend_hg):
94 def test_strip_with_multiple_heads(backend_hg):
95 commits = [
95 commits = [
96 {'message': 'A'},
96 {'message': 'A'},
97 {'message': 'a'},
97 {'message': 'a'},
98 {'message': 'b'},
98 {'message': 'b'},
99 {'message': 'B', 'parents': ['A']},
99 {'message': 'B', 'parents': ['A']},
100 {'message': 'a1'},
100 {'message': 'a1'},
101 ]
101 ]
102 repo = backend_hg.create_repo(commits=commits)
102 repo = backend_hg.create_repo(commits=commits)
103 commit_ids = backend_hg.commit_ids
103 commit_ids = backend_hg.commit_ids
104
104
105 model = scm.ScmModel()
105 model = scm.ScmModel()
106 model.strip(repo, commit_ids['b'], branch=None)
106 model.strip(repo, commit_ids['b'], branch=None)
107
107
108 vcs_repo = repo.scm_instance()
108 vcs_repo = repo.scm_instance()
109 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
109 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
110 assert len(rest_commit_ids) == 4
110 assert len(rest_commit_ids) == 4
111 assert commit_ids['b'] not in rest_commit_ids
111 assert commit_ids['b'] not in rest_commit_ids
112
112
113
113
114 def test_strip_with_single_heads(backend_hg):
114 def test_strip_with_single_heads(backend_hg):
115 commits = [
115 commits = [
116 {'message': 'A'},
116 {'message': 'A'},
117 {'message': 'a'},
117 {'message': 'a'},
118 {'message': 'b'},
118 {'message': 'b'},
119 ]
119 ]
120 repo = backend_hg.create_repo(commits=commits)
120 repo = backend_hg.create_repo(commits=commits)
121 commit_ids = backend_hg.commit_ids
121 commit_ids = backend_hg.commit_ids
122
122
123 model = scm.ScmModel()
123 model = scm.ScmModel()
124 model.strip(repo, commit_ids['b'], branch=None)
124 model.strip(repo, commit_ids['b'], branch=None)
125
125
126 vcs_repo = repo.scm_instance()
126 vcs_repo = repo.scm_instance()
127 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
127 rest_commit_ids = [c.raw_id for c in vcs_repo.get_changesets()]
128 assert len(rest_commit_ids) == 2
128 assert len(rest_commit_ids) == 2
129 assert commit_ids['b'] not in rest_commit_ids
129 assert commit_ids['b'] not in rest_commit_ids
130
130
131
131
132 def test_get_nodes_returns_unicode_flat(backend_random):
132 def test_get_nodes_returns_unicode_flat(backend_random):
133 repo = backend_random.repo
133 repo = backend_random.repo
134 directories, files = scm.ScmModel().get_nodes(
134 directories, files = scm.ScmModel().get_nodes(
135 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
135 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
136 flat=True)
136 flat=True)
137 assert_contains_only_unicode(directories)
137 assert_contains_only_unicode(directories)
138 assert_contains_only_unicode(files)
138 assert_contains_only_unicode(files)
139
139
140
140
141 def test_get_nodes_returns_unicode_non_flat(backend_random):
141 def test_get_nodes_returns_unicode_non_flat(backend_random):
142 repo = backend_random.repo
142 repo = backend_random.repo
143 directories, files = scm.ScmModel().get_nodes(
143 directories, files = scm.ScmModel().get_nodes(
144 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
144 repo.repo_name, repo.get_commit(commit_idx=0).raw_id,
145 flat=False)
145 flat=False)
146 # johbo: Checking only the names for now, since that is the critical
146 # johbo: Checking only the names for now, since that is the critical
147 # part.
147 # part.
148 assert_contains_only_unicode([d['name'] for d in directories])
148 assert_contains_only_unicode([d['name'] for d in directories])
149 assert_contains_only_unicode([f['name'] for f in files])
149 assert_contains_only_unicode([f['name'] for f in files])
150
150
151
151
152 def test_get_nodes_max_file_bytes(backend_random):
152 def test_get_nodes_max_file_bytes(backend_random):
153 repo = backend_random.repo
153 repo = backend_random.repo
154 max_file_bytes = 10
154 max_file_bytes = 10
155 directories, files = scm.ScmModel().get_nodes(
155 directories, files = scm.ScmModel().get_nodes(
156 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
156 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
157 extended_info=True, flat=False)
157 extended_info=True, flat=False)
158 assert any(file['content'] and len(file['content']) > max_file_bytes
158 assert any(file['content'] and len(file['content']) > max_file_bytes
159 for file in files)
159 for file in files)
160
160
161 directories, files = scm.ScmModel().get_nodes(
161 directories, files = scm.ScmModel().get_nodes(
162 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
162 repo.repo_name, repo.get_commit(commit_idx=0).raw_id, content=True,
163 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
163 extended_info=True, flat=False, max_file_bytes=max_file_bytes)
164 assert all(
164 assert all(
165 file['content'] is None if file['size'] > max_file_bytes else True
165 file['content'] is None if file['size'] > max_file_bytes else True
166 for file in files)
166 for file in files)
167
167
168
168
169 def assert_contains_only_unicode(structure):
169 def assert_contains_only_unicode(structure):
170 assert structure
170 assert structure
171 for value in structure:
171 for value in structure:
172 assert isinstance(value, unicode)
172 assert isinstance(value, unicode)
173
173
174
174
175 @pytest.mark.backends("hg", "git")
175 @pytest.mark.backends("hg", "git")
176 def test_get_non_unicode_reference(backend):
176 def test_get_non_unicode_reference(backend):
177 model = scm.ScmModel()
177 model = scm.ScmModel()
178 non_unicode_list = ["Adını".decode("cp1254")]
178 non_unicode_list = ["Adını".decode("cp1254")]
179
179
180 def scm_instance():
180 def scm_instance():
181 return Mock(
181 return Mock(
182 branches=non_unicode_list, bookmarks=non_unicode_list,
182 branches=non_unicode_list, bookmarks=non_unicode_list,
183 tags=non_unicode_list, alias=backend.alias)
183 tags=non_unicode_list, alias=backend.alias)
184
184
185 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
185 repo = Mock(__class__=db.Repository, scm_instance=scm_instance)
186 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
186 choices, __ = model.get_repo_landing_revs(translator=lambda s: s, repo=repo)
187 if backend.alias == 'hg':
187 if backend.alias == 'hg':
188 valid_choices = [
188 valid_choices = [
189 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
189 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
190 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
190 u'book:Ad\xc4\xb1n\xc4\xb1', u'tag:Ad\xc4\xb1n\xc4\xb1']
191 else:
191 else:
192 valid_choices = [
192 valid_choices = [
193 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
193 'rev:tip', u'branch:Ad\xc4\xb1n\xc4\xb1',
194 u'tag:Ad\xc4\xb1n\xc4\xb1']
194 u'tag:Ad\xc4\xb1n\xc4\xb1']
195
195
196 assert choices == valid_choices
196 assert choices == valid_choices
197
198
199 class TestInstallSvnHooks(object):
200 HOOK_FILES = ('pre-commit', 'post-commit')
201
202 def test_new_hooks_are_created(self, backend_svn):
203 model = scm.ScmModel()
204 repo = backend_svn.create_repo()
205 vcs_repo = repo.scm_instance()
206 model.install_svn_hooks(vcs_repo)
207
208 hooks_path = os.path.join(vcs_repo.path, 'hooks')
209 assert os.path.isdir(hooks_path)
210 for file_name in self.HOOK_FILES:
211 file_path = os.path.join(hooks_path, file_name)
212 self._check_hook_file_mode(file_path)
213 self._check_hook_file_content(file_path)
214
215 def test_rc_hooks_are_replaced(self, backend_svn):
216 model = scm.ScmModel()
217 repo = backend_svn.create_repo()
218 vcs_repo = repo.scm_instance()
219 hooks_path = os.path.join(vcs_repo.path, 'hooks')
220 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
221
222 for file_path in file_paths:
223 self._create_fake_hook(
224 file_path, content="RC_HOOK_VER = 'abcde'\n")
225
226 model.install_svn_hooks(vcs_repo)
227
228 for file_path in file_paths:
229 self._check_hook_file_content(file_path)
230
231 def test_non_rc_hooks_are_not_replaced_without_force_create(
232 self, backend_svn):
233 model = scm.ScmModel()
234 repo = backend_svn.create_repo()
235 vcs_repo = repo.scm_instance()
236 hooks_path = os.path.join(vcs_repo.path, 'hooks')
237 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
238 non_rc_content = "exit 0\n"
239
240 for file_path in file_paths:
241 self._create_fake_hook(file_path, content=non_rc_content)
242
243 model.install_svn_hooks(vcs_repo)
244
245 for file_path in file_paths:
246 with open(file_path, 'rt') as hook_file:
247 content = hook_file.read()
248 assert content == non_rc_content
249
250 def test_non_rc_hooks_are_replaced_with_force_create(self, backend_svn):
251 model = scm.ScmModel()
252 repo = backend_svn.create_repo()
253 vcs_repo = repo.scm_instance()
254 hooks_path = os.path.join(vcs_repo.path, 'hooks')
255 file_paths = [os.path.join(hooks_path, f) for f in self.HOOK_FILES]
256 non_rc_content = "exit 0\n"
257
258 for file_path in file_paths:
259 self._create_fake_hook(file_path, content=non_rc_content)
260
261 model.install_svn_hooks(vcs_repo, force_create=True)
262
263 for file_path in file_paths:
264 self._check_hook_file_content(file_path)
265
266 def _check_hook_file_mode(self, file_path):
267 assert os.path.exists(file_path)
268 stat_info = os.stat(file_path)
269
270 file_mode = stat.S_IMODE(stat_info.st_mode)
271 expected_mode = int('755', 8)
272 assert expected_mode == file_mode
273
274 def _check_hook_file_content(self, file_path):
275 with open(file_path, 'rt') as hook_file:
276 content = hook_file.read()
277
278 expected_env = '#!{}'.format(sys.executable)
279 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
280 rhodecode.__version__)
281 assert content.strip().startswith(expected_env)
282 assert expected_rc_version in content
283
284 def _create_fake_hook(self, file_path, content):
285 with open(file_path, 'w') as hook_file:
286 hook_file.write(content)
287
288
289 class TestCheckRhodecodeHook(object):
290
291 @patch('os.path.exists', Mock(return_value=False))
292 def test_returns_true_when_no_hook_found(self):
293 result = scm._check_rhodecode_hook('/tmp/fake_hook_file.py')
294 assert result
295
296 @pytest.mark.parametrize("file_content, expected_result", [
297 ("RC_HOOK_VER = '3.3.3'\n", True),
298 ("RC_HOOK = '3.3.3'\n", False),
299 ], ids=no_newline_id_generator)
300 @patch('os.path.exists', Mock(return_value=True))
301 def test_signatures(self, file_content, expected_result):
302 hook_content_patcher = patch.object(
303 scm, '_read_hook', return_value=file_content)
304 with hook_content_patcher:
305 result = scm._check_rhodecode_hook('/tmp/fake_hook_file.py')
306
307 assert result is expected_result
308
309
310 class TestInstallHooks(object):
311 def test_hooks_are_installed_for_git_repo(self, backend_git):
312 repo = backend_git.create_repo()
313 model = scm.ScmModel()
314 scm_repo = repo.scm_instance()
315 with patch.object(model, 'install_git_hook') as hooks_mock:
316 model.install_hooks(scm_repo, repo_type='git')
317 hooks_mock.assert_called_once_with(scm_repo)
318
319 def test_hooks_are_installed_for_svn_repo(self, backend_svn):
320 repo = backend_svn.create_repo()
321 scm_repo = repo.scm_instance()
322 model = scm.ScmModel()
323 with patch.object(scm.ScmModel, 'install_svn_hooks') as hooks_mock:
324 model.install_hooks(scm_repo, repo_type='svn')
325 hooks_mock.assert_called_once_with(scm_repo)
326
327 @pytest.mark.parametrize('hook_method', [
328 'install_svn_hooks',
329 'install_git_hook'])
330 def test_mercurial_doesnt_trigger_hooks(self, backend_hg, hook_method):
331 repo = backend_hg.create_repo()
332 scm_repo = repo.scm_instance()
333 model = scm.ScmModel()
334 with patch.object(scm.ScmModel, hook_method) as hooks_mock:
335 model.install_hooks(scm_repo, repo_type='hg')
336 assert hooks_mock.call_count == 0
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now