##// END OF EJS Templates
hooks: pass in store_path into env for hooks.
marcink -
r3094:3f2abfbb default
parent child Browse files
Show More
@@ -1,162 +1,163 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import sys
23 23 import json
24 24 import logging
25 25
26 26 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
27 27 from rhodecode.lib.vcs.conf import settings as vcs_settings
28 28 from rhodecode.model.scm import ScmModel
29 29
30 30 log = logging.getLogger(__name__)
31 31
32 32
33 33 class VcsServer(object):
34 34 _path = None # set executable path for hg/git/svn binary
35 35 backend = None # set in child classes
36 36 tunnel = None # subprocess handling tunnel
37 37 write_perms = ['repository.admin', 'repository.write']
38 38 read_perms = ['repository.read', 'repository.admin', 'repository.write']
39 39
40 40 def __init__(self, user, user_permissions, config, env):
41 41 self.user = user
42 42 self.user_permissions = user_permissions
43 43 self.config = config
44 44 self.env = env
45 45 self.stdin = sys.stdin
46 46
47 47 self.repo_name = None
48 48 self.repo_mode = None
49 49 self.store = ''
50 50 self.ini_path = ''
51 51
52 52 def _invalidate_cache(self, repo_name):
53 53 """
54 54 Set's cache for this repository for invalidation on next access
55 55
56 56 :param repo_name: full repo name, also a cache key
57 57 """
58 58 ScmModel().mark_for_invalidation(repo_name)
59 59
60 60 def has_write_perm(self):
61 61 permission = self.user_permissions.get(self.repo_name)
62 62 if permission in ['repository.write', 'repository.admin']:
63 63 return True
64 64
65 65 return False
66 66
67 67 def _check_permissions(self, action):
68 68 permission = self.user_permissions.get(self.repo_name)
69 69 log.debug(
70 70 'permission for %s on %s are: %s',
71 71 self.user, self.repo_name, permission)
72 72
73 73 if not permission:
74 74 log.error('user `%s` permissions to repo:%s are empty. Forbidding access.',
75 75 self.user, self.repo_name)
76 76 return -2
77 77
78 78 if action == 'pull':
79 79 if permission in self.read_perms:
80 80 log.info(
81 81 'READ Permissions for User "%s" detected to repo "%s"!',
82 82 self.user, self.repo_name)
83 83 return 0
84 84 else:
85 85 if permission in self.write_perms:
86 86 log.info(
87 87 'WRITE+ Permissions for User "%s" detected to repo "%s"!',
88 88 self.user, self.repo_name)
89 89 return 0
90 90
91 91 log.error('Cannot properly fetch or verify user `%s` permissions. '
92 92 'Permissions: %s, vcs action: %s',
93 93 self.user, permission, action)
94 94 return -2
95 95
96 96 def update_environment(self, action, extras=None):
97 97
98 98 scm_data = {
99 99 'ip': os.environ['SSH_CLIENT'].split()[0],
100 100 'username': self.user.username,
101 101 'user_id': self.user.user_id,
102 102 'action': action,
103 103 'repository': self.repo_name,
104 104 'scm': self.backend,
105 105 'config': self.ini_path,
106 'repo_store': self.store,
106 107 'make_lock': None,
107 108 'locked_by': [None, None],
108 109 'server_url': None,
109 110 'user_agent': 'ssh-user-agent',
110 111 'hooks': ['push', 'pull'],
111 112 'hooks_module': 'rhodecode.lib.hooks_daemon',
112 113 'is_shadow_repo': False,
113 114 'detect_force_push': False,
114 115 'check_branch_perms': False,
115 116
116 117 'SSH': True,
117 118 'SSH_PERMISSIONS': self.user_permissions.get(self.repo_name),
118 119 }
119 120 if extras:
120 121 scm_data.update(extras)
121 122 os.putenv("RC_SCM_DATA", json.dumps(scm_data))
122 123
123 124 def get_root_store(self):
124 125 root_store = self.store
125 126 if not root_store.endswith('/'):
126 127 # always append trailing slash
127 128 root_store = root_store + '/'
128 129 return root_store
129 130
130 131 def _handle_tunnel(self, extras):
131 132 # pre-auth
132 133 action = 'pull'
133 134 exit_code = self._check_permissions(action)
134 135 if exit_code:
135 136 return exit_code, False
136 137
137 138 req = self.env['request']
138 139 server_url = req.host_url + req.script_name
139 140 extras['server_url'] = server_url
140 141
141 142 log.debug('Using %s binaries from path %s', self.backend, self._path)
142 143 exit_code = self.tunnel.run(extras)
143 144
144 145 return exit_code, action == "push"
145 146
146 147 def run(self, tunnel_extras=None):
147 148 tunnel_extras = tunnel_extras or {}
148 149 extras = {}
149 150 extras.update(tunnel_extras)
150 151
151 152 callback_daemon, extras = prepare_callback_daemon(
152 153 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
153 154 host=vcs_settings.HOOKS_HOST,
154 155 use_direct_calls=False)
155 156
156 157 with callback_daemon:
157 158 try:
158 159 return self._handle_tunnel(extras)
159 160 finally:
160 161 log.debug('Running cleanup with cache invalidation')
161 162 if self.repo_name:
162 163 self._invalidate_cache(self.repo_name)
@@ -1,149 +1,152 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import json
22 22 import mock
23 23 import pytest
24 24
25 25 from rhodecode.apps.ssh_support.lib.backends.git import GitServer
26 26 from rhodecode.apps.ssh_support.tests.conftest import dummy_env, dummy_user
27 27
28 28
29 29 class GitServerCreator(object):
30 30 root = '/tmp/repo/path/'
31 31 git_path = '/usr/local/bin/git'
32 32 config_data = {
33 33 'app:main': {
34 34 'ssh.executable.git': git_path,
35 35 'vcs.hooks.protocol': 'http',
36 36 }
37 37 }
38 38 repo_name = 'test_git'
39 39 repo_mode = 'receive-pack'
40 40 user = dummy_user()
41 41
42 42 def __init__(self):
43 43 def config_get(part, key):
44 44 return self.config_data.get(part, {}).get(key)
45 45 self.config_mock = mock.Mock()
46 46 self.config_mock.get = mock.Mock(side_effect=config_get)
47 47
48 48 def create(self, **kwargs):
49 49 parameters = {
50 50 'store': self.root,
51 51 'ini_path': '',
52 52 'user': self.user,
53 53 'repo_name': self.repo_name,
54 54 'repo_mode': self.repo_mode,
55 55 'user_permissions': {
56 56 self.repo_name: 'repository.admin'
57 57 },
58 58 'config': self.config_mock,
59 59 'env': dummy_env()
60 60 }
61 61 parameters.update(kwargs)
62 62 server = GitServer(**parameters)
63 63 return server
64 64
65 65
66 66 @pytest.fixture
67 67 def git_server(app):
68 68 return GitServerCreator()
69 69
70 70
71 71 class TestGitServer(object):
72 72
73 73 def test_command(self, git_server):
74 74 server = git_server.create()
75 75 expected_command = (
76 76 'cd {root}; {git_path} {repo_mode} \'{root}{repo_name}\''.format(
77 77 root=git_server.root, git_path=git_server.git_path,
78 78 repo_mode=git_server.repo_mode, repo_name=git_server.repo_name)
79 79 )
80 80 assert expected_command == server.tunnel.command()
81 81
82 82 @pytest.mark.parametrize('permissions, action, code', [
83 83 ({}, 'pull', -2),
84 84 ({'test_git': 'repository.read'}, 'pull', 0),
85 85 ({'test_git': 'repository.read'}, 'push', -2),
86 86 ({'test_git': 'repository.write'}, 'push', 0),
87 87 ({'test_git': 'repository.admin'}, 'push', 0),
88 88
89 89 ])
90 90 def test_permission_checks(self, git_server, permissions, action, code):
91 91 server = git_server.create(user_permissions=permissions)
92 92 result = server._check_permissions(action)
93 93 assert result is code
94 94
95 95 @pytest.mark.parametrize('permissions, value', [
96 96 ({}, False),
97 97 ({'test_git': 'repository.read'}, False),
98 98 ({'test_git': 'repository.write'}, True),
99 99 ({'test_git': 'repository.admin'}, True),
100 100
101 101 ])
102 102 def test_has_write_permissions(self, git_server, permissions, value):
103 103 server = git_server.create(user_permissions=permissions)
104 104 result = server.has_write_perm()
105 105 assert result is value
106 106
107 107 def test_run_returns_executes_command(self, git_server):
108 108 server = git_server.create()
109 109 from rhodecode.apps.ssh_support.lib.backends.git import GitTunnelWrapper
110 110 with mock.patch.object(GitTunnelWrapper, 'create_hooks_env') as _patch:
111 111 _patch.return_value = 0
112 112 with mock.patch.object(GitTunnelWrapper, 'command', return_value='date'):
113 113 exit_code = server.run()
114 114
115 115 assert exit_code == (0, False)
116 116
117 117 @pytest.mark.parametrize(
118 118 'repo_mode, action', [
119 119 ['receive-pack', 'push'],
120 120 ['upload-pack', 'pull']
121 121 ])
122 122 def test_update_environment(self, git_server, repo_mode, action):
123 123 server = git_server.create(repo_mode=repo_mode)
124 store = server.store
125
124 126 with mock.patch('os.environ', {'SSH_CLIENT': '10.10.10.10 b'}):
125 127 with mock.patch('os.putenv') as putenv_mock:
126 128 server.update_environment(action)
127 129
128 130 expected_data = {
129 131 'username': git_server.user.username,
130 132 'user_id': git_server.user.user_id,
131 133 'scm': 'git',
132 134 'repository': git_server.repo_name,
133 135 'make_lock': None,
134 136 'action': action,
135 137 'ip': '10.10.10.10',
136 138 'locked_by': [None, None],
137 139 'config': '',
140 'repo_store': store,
138 141 'server_url': None,
139 142 'hooks': ['push', 'pull'],
140 143 'is_shadow_repo': False,
141 144 'hooks_module': 'rhodecode.lib.hooks_daemon',
142 145 'check_branch_perms': False,
143 146 'detect_force_push': False,
144 147 'user_agent': u'ssh-user-agent',
145 148 'SSH': True,
146 149 'SSH_PERMISSIONS': 'repository.admin',
147 150 }
148 151 args, kwargs = putenv_mock.call_args
149 152 assert json.loads(args[1]) == expected_data
@@ -1,561 +1,567 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 The base Controller API
23 23 Provides the BaseController class for subclassing. And usage in different
24 24 controllers
25 25 """
26 26
27 27 import logging
28 28 import socket
29 29
30 30 import markupsafe
31 31 import ipaddress
32 32
33 33 from paste.auth.basic import AuthBasicAuthenticator
34 34 from paste.httpexceptions import HTTPUnauthorized, HTTPForbidden, get_exception
35 35 from paste.httpheaders import WWW_AUTHENTICATE, AUTHORIZATION
36 36
37 37 import rhodecode
38 38 from rhodecode.authentication.base import VCS_TYPE
39 39 from rhodecode.lib import auth, utils2
40 40 from rhodecode.lib import helpers as h
41 41 from rhodecode.lib.auth import AuthUser, CookieStoreWrapper
42 42 from rhodecode.lib.exceptions import UserCreationError
43 43 from rhodecode.lib.utils import (password_changed, get_enabled_hook_classes)
44 44 from rhodecode.lib.utils2 import (
45 45 str2bool, safe_unicode, AttributeDict, safe_int, sha1, aslist, safe_str)
46 46 from rhodecode.model.db import Repository, User, ChangesetComment
47 47 from rhodecode.model.notification import NotificationModel
48 48 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 def _filter_proxy(ip):
54 54 """
55 55 Passed in IP addresses in HEADERS can be in a special format of multiple
56 56 ips. Those comma separated IPs are passed from various proxies in the
57 57 chain of request processing. The left-most being the original client.
58 58 We only care about the first IP which came from the org. client.
59 59
60 60 :param ip: ip string from headers
61 61 """
62 62 if ',' in ip:
63 63 _ips = ip.split(',')
64 64 _first_ip = _ips[0].strip()
65 65 log.debug('Got multiple IPs %s, using %s', ','.join(_ips), _first_ip)
66 66 return _first_ip
67 67 return ip
68 68
69 69
70 70 def _filter_port(ip):
71 71 """
72 72 Removes a port from ip, there are 4 main cases to handle here.
73 73 - ipv4 eg. 127.0.0.1
74 74 - ipv6 eg. ::1
75 75 - ipv4+port eg. 127.0.0.1:8080
76 76 - ipv6+port eg. [::1]:8080
77 77
78 78 :param ip:
79 79 """
80 80 def is_ipv6(ip_addr):
81 81 if hasattr(socket, 'inet_pton'):
82 82 try:
83 83 socket.inet_pton(socket.AF_INET6, ip_addr)
84 84 except socket.error:
85 85 return False
86 86 else:
87 87 # fallback to ipaddress
88 88 try:
89 89 ipaddress.IPv6Address(safe_unicode(ip_addr))
90 90 except Exception:
91 91 return False
92 92 return True
93 93
94 94 if ':' not in ip: # must be ipv4 pure ip
95 95 return ip
96 96
97 97 if '[' in ip and ']' in ip: # ipv6 with port
98 98 return ip.split(']')[0][1:].lower()
99 99
100 100 # must be ipv6 or ipv4 with port
101 101 if is_ipv6(ip):
102 102 return ip
103 103 else:
104 104 ip, _port = ip.split(':')[:2] # means ipv4+port
105 105 return ip
106 106
107 107
108 108 def get_ip_addr(environ):
109 109 proxy_key = 'HTTP_X_REAL_IP'
110 110 proxy_key2 = 'HTTP_X_FORWARDED_FOR'
111 111 def_key = 'REMOTE_ADDR'
112 112 _filters = lambda x: _filter_port(_filter_proxy(x))
113 113
114 114 ip = environ.get(proxy_key)
115 115 if ip:
116 116 return _filters(ip)
117 117
118 118 ip = environ.get(proxy_key2)
119 119 if ip:
120 120 return _filters(ip)
121 121
122 122 ip = environ.get(def_key, '0.0.0.0')
123 123 return _filters(ip)
124 124
125 125
126 126 def get_server_ip_addr(environ, log_errors=True):
127 127 hostname = environ.get('SERVER_NAME')
128 128 try:
129 129 return socket.gethostbyname(hostname)
130 130 except Exception as e:
131 131 if log_errors:
132 132 # in some cases this lookup is not possible, and we don't want to
133 133 # make it an exception in logs
134 134 log.exception('Could not retrieve server ip address: %s', e)
135 135 return hostname
136 136
137 137
138 138 def get_server_port(environ):
139 139 return environ.get('SERVER_PORT')
140 140
141 141
142 142 def get_access_path(environ):
143 143 path = environ.get('PATH_INFO')
144 144 org_req = environ.get('pylons.original_request')
145 145 if org_req:
146 146 path = org_req.environ.get('PATH_INFO')
147 147 return path
148 148
149 149
150 150 def get_user_agent(environ):
151 151 return environ.get('HTTP_USER_AGENT')
152 152
153 153
154 154 def vcs_operation_context(
155 155 environ, repo_name, username, action, scm, check_locking=True,
156 156 is_shadow_repo=False, check_branch_perms=False, detect_force_push=False):
157 157 """
158 158 Generate the context for a vcs operation, e.g. push or pull.
159 159
160 160 This context is passed over the layers so that hooks triggered by the
161 161 vcs operation know details like the user, the user's IP address etc.
162 162
163 163 :param check_locking: Allows to switch of the computation of the locking
164 164 data. This serves mainly the need of the simplevcs middleware to be
165 165 able to disable this for certain operations.
166 166
167 167 """
168 168 # Tri-state value: False: unlock, None: nothing, True: lock
169 169 make_lock = None
170 170 locked_by = [None, None, None]
171 171 is_anonymous = username == User.DEFAULT_USER
172 172 user = User.get_by_username(username)
173 173 if not is_anonymous and check_locking:
174 174 log.debug('Checking locking on repository "%s"', repo_name)
175 175 repo = Repository.get_by_repo_name(repo_name)
176 176 make_lock, __, locked_by = repo.get_locking_state(
177 177 action, user.user_id)
178 178 user_id = user.user_id
179 179 settings_model = VcsSettingsModel(repo=repo_name)
180 180 ui_settings = settings_model.get_ui_settings()
181 181
182 182 # NOTE(marcink): This should be also in sync with
183 # rhodecode/apps/ssh_support/lib/backends/base.py:update_enviroment scm_data
183 # rhodecode/apps/ssh_support/lib/backends/base.py:update_environment scm_data
184 store = [x for x in ui_settings if x.key == '/']
185 repo_store = ''
186 if store:
187 repo_store = store[0].value
188
184 189 scm_data = {
185 190 'ip': get_ip_addr(environ),
186 191 'username': username,
187 192 'user_id': user_id,
188 193 'action': action,
189 194 'repository': repo_name,
190 195 'scm': scm,
191 196 'config': rhodecode.CONFIG['__file__'],
197 'repo_store': repo_store,
192 198 'make_lock': make_lock,
193 199 'locked_by': locked_by,
194 200 'server_url': utils2.get_server_url(environ),
195 201 'user_agent': get_user_agent(environ),
196 202 'hooks': get_enabled_hook_classes(ui_settings),
197 203 'is_shadow_repo': is_shadow_repo,
198 204 'detect_force_push': detect_force_push,
199 205 'check_branch_perms': check_branch_perms,
200 206 }
201 207 return scm_data
202 208
203 209
204 210 class BasicAuth(AuthBasicAuthenticator):
205 211
206 212 def __init__(self, realm, authfunc, registry, auth_http_code=None,
207 213 initial_call_detection=False, acl_repo_name=None):
208 214 self.realm = realm
209 215 self.initial_call = initial_call_detection
210 216 self.authfunc = authfunc
211 217 self.registry = registry
212 218 self.acl_repo_name = acl_repo_name
213 219 self._rc_auth_http_code = auth_http_code
214 220
215 221 def _get_response_from_code(self, http_code):
216 222 try:
217 223 return get_exception(safe_int(http_code))
218 224 except Exception:
219 225 log.exception('Failed to fetch response for code %s', http_code)
220 226 return HTTPForbidden
221 227
222 228 def get_rc_realm(self):
223 229 return safe_str(self.registry.rhodecode_settings.get('rhodecode_realm'))
224 230
225 231 def build_authentication(self):
226 232 head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
227 233 if self._rc_auth_http_code and not self.initial_call:
228 234 # return alternative HTTP code if alternative http return code
229 235 # is specified in RhodeCode config, but ONLY if it's not the
230 236 # FIRST call
231 237 custom_response_klass = self._get_response_from_code(
232 238 self._rc_auth_http_code)
233 239 return custom_response_klass(headers=head)
234 240 return HTTPUnauthorized(headers=head)
235 241
236 242 def authenticate(self, environ):
237 243 authorization = AUTHORIZATION(environ)
238 244 if not authorization:
239 245 return self.build_authentication()
240 246 (authmeth, auth) = authorization.split(' ', 1)
241 247 if 'basic' != authmeth.lower():
242 248 return self.build_authentication()
243 249 auth = auth.strip().decode('base64')
244 250 _parts = auth.split(':', 1)
245 251 if len(_parts) == 2:
246 252 username, password = _parts
247 253 auth_data = self.authfunc(
248 254 username, password, environ, VCS_TYPE,
249 255 registry=self.registry, acl_repo_name=self.acl_repo_name)
250 256 if auth_data:
251 257 return {'username': username, 'auth_data': auth_data}
252 258 if username and password:
253 259 # we mark that we actually executed authentication once, at
254 260 # that point we can use the alternative auth code
255 261 self.initial_call = False
256 262
257 263 return self.build_authentication()
258 264
259 265 __call__ = authenticate
260 266
261 267
262 268 def calculate_version_hash(config):
263 269 return sha1(
264 270 config.get('beaker.session.secret', '') +
265 271 rhodecode.__version__)[:8]
266 272
267 273
268 274 def get_current_lang(request):
269 275 # NOTE(marcink): remove after pyramid move
270 276 try:
271 277 return translation.get_lang()[0]
272 278 except:
273 279 pass
274 280
275 281 return getattr(request, '_LOCALE_', request.locale_name)
276 282
277 283
278 284 def attach_context_attributes(context, request, user_id):
279 285 """
280 286 Attach variables into template context called `c`.
281 287 """
282 288 config = request.registry.settings
283 289
284 290
285 291 rc_config = SettingsModel().get_all_settings(cache=True)
286 292
287 293 context.rhodecode_version = rhodecode.__version__
288 294 context.rhodecode_edition = config.get('rhodecode.edition')
289 295 # unique secret + version does not leak the version but keep consistency
290 296 context.rhodecode_version_hash = calculate_version_hash(config)
291 297
292 298 # Default language set for the incoming request
293 299 context.language = get_current_lang(request)
294 300
295 301 # Visual options
296 302 context.visual = AttributeDict({})
297 303
298 304 # DB stored Visual Items
299 305 context.visual.show_public_icon = str2bool(
300 306 rc_config.get('rhodecode_show_public_icon'))
301 307 context.visual.show_private_icon = str2bool(
302 308 rc_config.get('rhodecode_show_private_icon'))
303 309 context.visual.stylify_metatags = str2bool(
304 310 rc_config.get('rhodecode_stylify_metatags'))
305 311 context.visual.dashboard_items = safe_int(
306 312 rc_config.get('rhodecode_dashboard_items', 100))
307 313 context.visual.admin_grid_items = safe_int(
308 314 rc_config.get('rhodecode_admin_grid_items', 100))
309 315 context.visual.repository_fields = str2bool(
310 316 rc_config.get('rhodecode_repository_fields'))
311 317 context.visual.show_version = str2bool(
312 318 rc_config.get('rhodecode_show_version'))
313 319 context.visual.use_gravatar = str2bool(
314 320 rc_config.get('rhodecode_use_gravatar'))
315 321 context.visual.gravatar_url = rc_config.get('rhodecode_gravatar_url')
316 322 context.visual.default_renderer = rc_config.get(
317 323 'rhodecode_markup_renderer', 'rst')
318 324 context.visual.comment_types = ChangesetComment.COMMENT_TYPES
319 325 context.visual.rhodecode_support_url = \
320 326 rc_config.get('rhodecode_support_url') or h.route_url('rhodecode_support')
321 327
322 328 context.visual.affected_files_cut_off = 60
323 329
324 330 context.pre_code = rc_config.get('rhodecode_pre_code')
325 331 context.post_code = rc_config.get('rhodecode_post_code')
326 332 context.rhodecode_name = rc_config.get('rhodecode_title')
327 333 context.default_encodings = aslist(config.get('default_encoding'), sep=',')
328 334 # if we have specified default_encoding in the request, it has more
329 335 # priority
330 336 if request.GET.get('default_encoding'):
331 337 context.default_encodings.insert(0, request.GET.get('default_encoding'))
332 338 context.clone_uri_tmpl = rc_config.get('rhodecode_clone_uri_tmpl')
333 339 context.clone_uri_ssh_tmpl = rc_config.get('rhodecode_clone_uri_ssh_tmpl')
334 340
335 341 # INI stored
336 342 context.labs_active = str2bool(
337 343 config.get('labs_settings_active', 'false'))
338 344 context.ssh_enabled = str2bool(
339 345 config.get('ssh.generate_authorized_keyfile', 'false'))
340 346
341 347 context.visual.allow_repo_location_change = str2bool(
342 348 config.get('allow_repo_location_change', True))
343 349 context.visual.allow_custom_hooks_settings = str2bool(
344 350 config.get('allow_custom_hooks_settings', True))
345 351 context.debug_style = str2bool(config.get('debug_style', False))
346 352
347 353 context.rhodecode_instanceid = config.get('instance_id')
348 354
349 355 context.visual.cut_off_limit_diff = safe_int(
350 356 config.get('cut_off_limit_diff'))
351 357 context.visual.cut_off_limit_file = safe_int(
352 358 config.get('cut_off_limit_file'))
353 359
354 360 # AppEnlight
355 361 context.appenlight_enabled = str2bool(config.get('appenlight', 'false'))
356 362 context.appenlight_api_public_key = config.get(
357 363 'appenlight.api_public_key', '')
358 364 context.appenlight_server_url = config.get('appenlight.server_url', '')
359 365
360 366 diffmode = {
361 367 "unified": "unified",
362 368 "sideside": "sideside"
363 369 }.get(request.GET.get('diffmode'))
364 370
365 371 if diffmode and diffmode != request.session.get('rc_user_session_attr.diffmode'):
366 372 request.session['rc_user_session_attr.diffmode'] = diffmode
367 373
368 374 # session settings per user
369 375 session_attrs = {
370 376 # defaults
371 377 "clone_url_format": "http",
372 378 "diffmode": "sideside"
373 379 }
374 380 for k, v in request.session.items():
375 381 pref = 'rc_user_session_attr.'
376 382 if k and k.startswith(pref):
377 383 k = k[len(pref):]
378 384 session_attrs[k] = v
379 385
380 386 context.user_session_attrs = session_attrs
381 387
382 388 # JS template context
383 389 context.template_context = {
384 390 'repo_name': None,
385 391 'repo_type': None,
386 392 'repo_landing_commit': None,
387 393 'rhodecode_user': {
388 394 'username': None,
389 395 'email': None,
390 396 'notification_status': False
391 397 },
392 398 'session_attrs': session_attrs,
393 399 'visual': {
394 400 'default_renderer': None
395 401 },
396 402 'commit_data': {
397 403 'commit_id': None
398 404 },
399 405 'pull_request_data': {'pull_request_id': None},
400 406 'timeago': {
401 407 'refresh_time': 120 * 1000,
402 408 'cutoff_limit': 1000 * 60 * 60 * 24 * 7
403 409 },
404 410 'pyramid_dispatch': {
405 411
406 412 },
407 413 'extra': {'plugins': {}}
408 414 }
409 415 # END CONFIG VARS
410 416
411 417 context.csrf_token = auth.get_csrf_token(session=request.session)
412 418 context.backends = rhodecode.BACKENDS.keys()
413 419 context.backends.sort()
414 420 context.unread_notifications = NotificationModel().get_unread_cnt_for_user(user_id)
415 421
416 422 # web case
417 423 if hasattr(request, 'user'):
418 424 context.auth_user = request.user
419 425 context.rhodecode_user = request.user
420 426
421 427 # api case
422 428 if hasattr(request, 'rpc_user'):
423 429 context.auth_user = request.rpc_user
424 430 context.rhodecode_user = request.rpc_user
425 431
426 432 # attach the whole call context to the request
427 433 request.call_context = context
428 434
429 435
430 436 def get_auth_user(request):
431 437 environ = request.environ
432 438 session = request.session
433 439
434 440 ip_addr = get_ip_addr(environ)
435 441 # make sure that we update permissions each time we call controller
436 442 _auth_token = (request.GET.get('auth_token', '') or
437 443 request.GET.get('api_key', ''))
438 444
439 445 if _auth_token:
440 446 # when using API_KEY we assume user exists, and
441 447 # doesn't need auth based on cookies.
442 448 auth_user = AuthUser(api_key=_auth_token, ip_addr=ip_addr)
443 449 authenticated = False
444 450 else:
445 451 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
446 452 try:
447 453 auth_user = AuthUser(user_id=cookie_store.get('user_id', None),
448 454 ip_addr=ip_addr)
449 455 except UserCreationError as e:
450 456 h.flash(e, 'error')
451 457 # container auth or other auth functions that create users
452 458 # on the fly can throw this exception signaling that there's
453 459 # issue with user creation, explanation should be provided
454 460 # in Exception itself. We then create a simple blank
455 461 # AuthUser
456 462 auth_user = AuthUser(ip_addr=ip_addr)
457 463
458 464 # in case someone changes a password for user it triggers session
459 465 # flush and forces a re-login
460 466 if password_changed(auth_user, session):
461 467 session.invalidate()
462 468 cookie_store = CookieStoreWrapper(session.get('rhodecode_user'))
463 469 auth_user = AuthUser(ip_addr=ip_addr)
464 470
465 471 authenticated = cookie_store.get('is_authenticated')
466 472
467 473 if not auth_user.is_authenticated and auth_user.is_user_object:
468 474 # user is not authenticated and not empty
469 475 auth_user.set_authenticated(authenticated)
470 476
471 477 return auth_user
472 478
473 479
474 480 def h_filter(s):
475 481 """
476 482 Custom filter for Mako templates. Mako by standard uses `markupsafe.escape`
477 483 we wrap this with additional functionality that converts None to empty
478 484 strings
479 485 """
480 486 if s is None:
481 487 return markupsafe.Markup()
482 488 return markupsafe.escape(s)
483 489
484 490
485 491 def add_events_routes(config):
486 492 """
487 493 Adds routing that can be used in events. Because some events are triggered
488 494 outside of pyramid context, we need to bootstrap request with some
489 495 routing registered
490 496 """
491 497
492 498 from rhodecode.apps._base import ADMIN_PREFIX
493 499
494 500 config.add_route(name='home', pattern='/')
495 501
496 502 config.add_route(name='login', pattern=ADMIN_PREFIX + '/login')
497 503 config.add_route(name='logout', pattern=ADMIN_PREFIX + '/logout')
498 504 config.add_route(name='repo_summary', pattern='/{repo_name}')
499 505 config.add_route(name='repo_summary_explicit', pattern='/{repo_name}/summary')
500 506 config.add_route(name='repo_group_home', pattern='/{repo_group_name}')
501 507
502 508 config.add_route(name='pullrequest_show',
503 509 pattern='/{repo_name}/pull-request/{pull_request_id}')
504 510 config.add_route(name='pull_requests_global',
505 511 pattern='/pull-request/{pull_request_id}')
506 512 config.add_route(name='repo_commit',
507 513 pattern='/{repo_name}/changeset/{commit_id}')
508 514
509 515 config.add_route(name='repo_files',
510 516 pattern='/{repo_name}/files/{commit_id}/{f_path}')
511 517
512 518
513 519 def bootstrap_config(request):
514 520 import pyramid.testing
515 521 registry = pyramid.testing.Registry('RcTestRegistry')
516 522
517 523 config = pyramid.testing.setUp(registry=registry, request=request)
518 524
519 525 # allow pyramid lookup in testing
520 526 config.include('pyramid_mako')
521 527 config.include('pyramid_beaker')
522 528 config.include('rhodecode.lib.rc_cache')
523 529
524 530 add_events_routes(config)
525 531
526 532 return config
527 533
528 534
529 535 def bootstrap_request(**kwargs):
530 536 import pyramid.testing
531 537
532 538 class TestRequest(pyramid.testing.DummyRequest):
533 539 application_url = kwargs.pop('application_url', 'http://example.com')
534 540 host = kwargs.pop('host', 'example.com:80')
535 541 domain = kwargs.pop('domain', 'example.com')
536 542
537 543 def translate(self, msg):
538 544 return msg
539 545
540 546 def plularize(self, singular, plural, n):
541 547 return singular
542 548
543 549 def get_partial_renderer(self, tmpl_name):
544 550
545 551 from rhodecode.lib.partial_renderer import get_partial_renderer
546 552 return get_partial_renderer(request=self, tmpl_name=tmpl_name)
547 553
548 554 _call_context = {}
549 555 @property
550 556 def call_context(self):
551 557 return self._call_context
552 558
553 559 class TestDummySession(pyramid.testing.DummySession):
554 560 def save(*arg, **kw):
555 561 pass
556 562
557 563 request = TestRequest(**kwargs)
558 564 request.session = TestDummySession()
559 565
560 566 return request
561 567
@@ -1,121 +1,122 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import pytest
22 22
23 23 from rhodecode.tests.events.conftest import EventCatcher
24 24
25 25 from rhodecode.lib import hooks_base, utils2
26 26 from rhodecode.model.repo import RepoModel
27 27 from rhodecode.events.repo import (
28 28 RepoPrePullEvent, RepoPullEvent,
29 29 RepoPrePushEvent, RepoPushEvent,
30 30 RepoPreCreateEvent, RepoCreateEvent,
31 31 RepoPreDeleteEvent, RepoDeleteEvent,
32 32 )
33 33
34 34
35 35 @pytest.fixture
36 36 def scm_extras(user_regular, repo_stub):
37 37 extras = utils2.AttributeDict({
38 38 'ip': '127.0.0.1',
39 39 'username': user_regular.username,
40 40 'user_id': user_regular.user_id,
41 41 'action': '',
42 42 'repository': repo_stub.repo_name,
43 43 'scm': repo_stub.scm_instance().alias,
44 44 'config': '',
45 'repo_store': '',
45 46 'server_url': 'http://example.com',
46 47 'make_lock': None,
47 48 'user-agent': 'some-client',
48 49 'locked_by': [None],
49 50 'commit_ids': ['a' * 40] * 3,
50 51 'is_shadow_repo': False,
51 52 })
52 53 return extras
53 54
54 55
55 56 # TODO: dan: make the serialization tests complete json comparisons
56 57 @pytest.mark.parametrize('EventClass', [
57 58 RepoPreCreateEvent, RepoCreateEvent,
58 59 RepoPreDeleteEvent, RepoDeleteEvent,
59 60 ])
60 61 def test_repo_events_serialized(config_stub, repo_stub, EventClass):
61 62 event = EventClass(repo_stub)
62 63 data = event.as_dict()
63 64 assert data['name'] == EventClass.name
64 65 assert data['repo']['repo_name'] == repo_stub.repo_name
65 66 assert data['repo']['url']
66 67 assert data['repo']['permalink_url']
67 68
68 69
69 70 @pytest.mark.parametrize('EventClass', [
70 71 RepoPrePullEvent, RepoPullEvent, RepoPrePushEvent
71 72 ])
72 73 def test_vcs_repo_events_serialize(config_stub, repo_stub, scm_extras, EventClass):
73 74 event = EventClass(repo_name=repo_stub.repo_name, extras=scm_extras)
74 75 data = event.as_dict()
75 76 assert data['name'] == EventClass.name
76 77 assert data['repo']['repo_name'] == repo_stub.repo_name
77 78 assert data['repo']['url']
78 79 assert data['repo']['permalink_url']
79 80
80 81
81 82 @pytest.mark.parametrize('EventClass', [RepoPushEvent])
82 83 def test_vcs_repo_push_event_serialize(config_stub, repo_stub, scm_extras, EventClass):
83 84 event = EventClass(repo_name=repo_stub.repo_name,
84 85 pushed_commit_ids=scm_extras['commit_ids'],
85 86 extras=scm_extras)
86 87 data = event.as_dict()
87 88 assert data['name'] == EventClass.name
88 89 assert data['repo']['repo_name'] == repo_stub.repo_name
89 90 assert data['repo']['url']
90 91 assert data['repo']['permalink_url']
91 92
92 93
93 94 def test_create_delete_repo_fires_events(backend):
94 95 with EventCatcher() as event_catcher:
95 96 repo = backend.create_repo()
96 97 assert event_catcher.events_types == [RepoPreCreateEvent, RepoCreateEvent]
97 98
98 99 with EventCatcher() as event_catcher:
99 100 RepoModel().delete(repo)
100 101 assert event_catcher.events_types == [RepoPreDeleteEvent, RepoDeleteEvent]
101 102
102 103
103 104 def test_pull_fires_events(scm_extras):
104 105 with EventCatcher() as event_catcher:
105 106 hooks_base.pre_push(scm_extras)
106 107 assert event_catcher.events_types == [RepoPrePushEvent]
107 108
108 109 with EventCatcher() as event_catcher:
109 110 hooks_base.post_push(scm_extras)
110 111 assert event_catcher.events_types == [RepoPushEvent]
111 112
112 113
113 114 def test_push_fires_events(scm_extras):
114 115 with EventCatcher() as event_catcher:
115 116 hooks_base.pre_pull(scm_extras)
116 117 assert event_catcher.events_types == [RepoPrePullEvent]
117 118
118 119 with EventCatcher() as event_catcher:
119 120 hooks_base.post_pull(scm_extras)
120 121 assert event_catcher.events_types == [RepoPullEvent]
121 122
@@ -1,54 +1,55 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 import pytest
23 23 from rhodecode import events
24 24 from rhodecode.lib.utils2 import AttributeDict
25 25
26 26
27 27 @pytest.fixture
28 28 def repo_push_event(backend, user_regular):
29 29 commits = [
30 30 {'message': 'ancestor commit fixes #15'},
31 31 {'message': 'quick fixes'},
32 32 {'message': 'change that fixes #41, #2'},
33 33 {'message': 'this is because 5b23c3532 broke stuff'},
34 34 {'message': 'last commit'},
35 35 ]
36 36 commit_ids = backend.create_master_repo(commits).values()
37 37 repo = backend.create_repo()
38 38 scm_extras = AttributeDict({
39 39 'ip': '127.0.0.1',
40 40 'username': user_regular.username,
41 41 'user_id': user_regular.user_id,
42 42 'action': '',
43 43 'repository': repo.repo_name,
44 44 'scm': repo.scm_instance().alias,
45 45 'config': '',
46 'repo_store': '',
46 47 'server_url': 'http://example.com',
47 48 'make_lock': None,
48 49 'locked_by': [None],
49 50 'commit_ids': commit_ids,
50 51 })
51 52
52 53 return events.RepoPushEvent(repo_name=repo.repo_name,
53 54 pushed_commit_ids=commit_ids,
54 55 extras=scm_extras)
@@ -1,143 +1,144 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 from rhodecode.model.db import Session, UserLog
24 24 from rhodecode.lib import hooks_base, utils2
25 25
26 26
27 27 def test_post_push_truncates_commits(user_regular, repo_stub):
28 28 extras = {
29 29 'ip': '127.0.0.1',
30 30 'username': user_regular.username,
31 31 'user_id': user_regular.user_id,
32 32 'action': 'push_local',
33 33 'repository': repo_stub.repo_name,
34 34 'scm': 'git',
35 35 'config': '',
36 36 'server_url': 'http://example.com',
37 37 'make_lock': None,
38 38 'user_agent': 'some-client',
39 39 'locked_by': [None],
40 40 'commit_ids': ['abcde12345' * 4] * 30000,
41 41 'is_shadow_repo': False,
42 42 }
43 43 extras = utils2.AttributeDict(extras)
44 44
45 45 hooks_base.post_push(extras)
46 46
47 47 # Calculate appropriate action string here
48 48 commit_ids = extras.commit_ids[:400]
49 49
50 50 entry = UserLog.query().order_by('-user_log_id').first()
51 51 assert entry.action == 'user.push'
52 52 assert entry.action_data['commit_ids'] == commit_ids
53 53 Session().delete(entry)
54 54 Session().commit()
55 55
56 56
57 57 def assert_called_with_mock(callable_, expected_mock_name):
58 58 mock_obj = callable_.call_args[0][0]
59 59 mock_name = mock_obj._mock_new_parent._mock_new_name
60 60 assert mock_name == expected_mock_name
61 61
62 62
63 63 @pytest.fixture
64 64 def hook_extras(user_regular, repo_stub):
65 65 extras = utils2.AttributeDict({
66 66 'ip': '127.0.0.1',
67 67 'username': user_regular.username,
68 68 'user_id': user_regular.user_id,
69 69 'action': 'push',
70 70 'repository': repo_stub.repo_name,
71 71 'scm': '',
72 72 'config': '',
73 'repo_store': '',
73 74 'server_url': 'http://example.com',
74 75 'make_lock': None,
75 76 'user_agent': 'some-client',
76 77 'locked_by': [None],
77 78 'commit_ids': [],
78 79 'is_shadow_repo': False,
79 80 })
80 81 return extras
81 82
82 83
83 84 @pytest.mark.parametrize('func, extension, event', [
84 85 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
85 86 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
86 87 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
87 88 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
88 89 ])
89 90 def test_hooks_propagate(func, extension, event, hook_extras):
90 91 """
91 92 Tests that our hook code propagates to rhodecode extensions and triggers
92 93 the appropriate event.
93 94 """
94 95 extension_mock = mock.Mock()
95 96 events_mock = mock.Mock()
96 97 patches = {
97 98 'Repository': mock.Mock(),
98 99 'events': events_mock,
99 100 extension: extension_mock,
100 101 }
101 102
102 103 # Clear shadow repo flag.
103 104 hook_extras.is_shadow_repo = False
104 105
105 106 # Execute hook function.
106 107 with mock.patch.multiple(hooks_base, **patches):
107 108 func(hook_extras)
108 109
109 110 # Assert that extensions are called and event was fired.
110 111 extension_mock.called_once()
111 112 assert_called_with_mock(events_mock.trigger, event)
112 113
113 114
114 115 @pytest.mark.parametrize('func, extension, event', [
115 116 (hooks_base.pre_push, 'pre_push_extension', 'RepoPrePushEvent'),
116 117 (hooks_base.post_push, 'post_pull_extension', 'RepoPushEvent'),
117 118 (hooks_base.pre_pull, 'pre_pull_extension', 'RepoPrePullEvent'),
118 119 (hooks_base.post_pull, 'post_push_extension', 'RepoPullEvent'),
119 120 ])
120 121 def test_hooks_propagates_not_on_shadow(func, extension, event, hook_extras):
121 122 """
122 123 If hooks are called by a request to a shadow repo we only want to run our
123 124 internal hooks code but not external ones like rhodecode extensions or
124 125 trigger an event.
125 126 """
126 127 extension_mock = mock.Mock()
127 128 events_mock = mock.Mock()
128 129 patches = {
129 130 'Repository': mock.Mock(),
130 131 'events': events_mock,
131 132 extension: extension_mock,
132 133 }
133 134
134 135 # Set shadow repo flag.
135 136 hook_extras.is_shadow_repo = True
136 137
137 138 # Execute hook function.
138 139 with mock.patch.multiple(hooks_base, **patches):
139 140 func(hook_extras)
140 141
141 142 # Assert that extensions are *not* called and event was *not* fired.
142 143 assert not extension_mock.called
143 144 assert not events_mock.trigger.called
@@ -1,868 +1,869 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23 import textwrap
24 24
25 25 import rhodecode
26 26 from rhodecode.lib.utils2 import safe_unicode
27 27 from rhodecode.lib.vcs.backends import get_backend
28 28 from rhodecode.lib.vcs.backends.base import (
29 29 MergeResponse, MergeFailureReason, Reference)
30 30 from rhodecode.lib.vcs.exceptions import RepositoryError
31 31 from rhodecode.lib.vcs.nodes import FileNode
32 32 from rhodecode.model.comment import CommentsModel
33 33 from rhodecode.model.db import PullRequest, Session
34 34 from rhodecode.model.pull_request import PullRequestModel
35 35 from rhodecode.model.user import UserModel
36 36 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
37 37
38 38
39 39 pytestmark = [
40 40 pytest.mark.backends("git", "hg"),
41 41 ]
42 42
43 43
44 44 @pytest.mark.usefixtures('config_stub')
45 45 class TestPullRequestModel(object):
46 46
47 47 @pytest.fixture
48 48 def pull_request(self, request, backend, pr_util):
49 49 """
50 50 A pull request combined with multiples patches.
51 51 """
52 52 BackendClass = get_backend(backend.alias)
53 53 self.merge_patcher = mock.patch.object(
54 54 BackendClass, 'merge', return_value=MergeResponse(
55 55 False, False, None, MergeFailureReason.UNKNOWN))
56 56 self.workspace_remove_patcher = mock.patch.object(
57 57 BackendClass, 'cleanup_merge_workspace')
58 58
59 59 self.workspace_remove_mock = self.workspace_remove_patcher.start()
60 60 self.merge_mock = self.merge_patcher.start()
61 61 self.comment_patcher = mock.patch(
62 62 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
63 63 self.comment_patcher.start()
64 64 self.notification_patcher = mock.patch(
65 65 'rhodecode.model.notification.NotificationModel.create')
66 66 self.notification_patcher.start()
67 67 self.helper_patcher = mock.patch(
68 68 'rhodecode.lib.helpers.route_path')
69 69 self.helper_patcher.start()
70 70
71 71 self.hook_patcher = mock.patch.object(PullRequestModel,
72 72 '_trigger_pull_request_hook')
73 73 self.hook_mock = self.hook_patcher.start()
74 74
75 75 self.invalidation_patcher = mock.patch(
76 76 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
77 77 self.invalidation_mock = self.invalidation_patcher.start()
78 78
79 79 self.pull_request = pr_util.create_pull_request(
80 80 mergeable=True, name_suffix=u'Δ…Δ‡')
81 81 self.source_commit = self.pull_request.source_ref_parts.commit_id
82 82 self.target_commit = self.pull_request.target_ref_parts.commit_id
83 83 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
84 84 self.repo_id = self.pull_request.target_repo.repo_id
85 85
86 86 @request.addfinalizer
87 87 def cleanup_pull_request():
88 88 calls = [mock.call(
89 89 self.pull_request, self.pull_request.author, 'create')]
90 90 self.hook_mock.assert_has_calls(calls)
91 91
92 92 self.workspace_remove_patcher.stop()
93 93 self.merge_patcher.stop()
94 94 self.comment_patcher.stop()
95 95 self.notification_patcher.stop()
96 96 self.helper_patcher.stop()
97 97 self.hook_patcher.stop()
98 98 self.invalidation_patcher.stop()
99 99
100 100 return self.pull_request
101 101
102 102 def test_get_all(self, pull_request):
103 103 prs = PullRequestModel().get_all(pull_request.target_repo)
104 104 assert isinstance(prs, list)
105 105 assert len(prs) == 1
106 106
107 107 def test_count_all(self, pull_request):
108 108 pr_count = PullRequestModel().count_all(pull_request.target_repo)
109 109 assert pr_count == 1
110 110
111 111 def test_get_awaiting_review(self, pull_request):
112 112 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
113 113 assert isinstance(prs, list)
114 114 assert len(prs) == 1
115 115
116 116 def test_count_awaiting_review(self, pull_request):
117 117 pr_count = PullRequestModel().count_awaiting_review(
118 118 pull_request.target_repo)
119 119 assert pr_count == 1
120 120
121 121 def test_get_awaiting_my_review(self, pull_request):
122 122 PullRequestModel().update_reviewers(
123 123 pull_request, [(pull_request.author, ['author'], False, [])],
124 124 pull_request.author)
125 125 prs = PullRequestModel().get_awaiting_my_review(
126 126 pull_request.target_repo, user_id=pull_request.author.user_id)
127 127 assert isinstance(prs, list)
128 128 assert len(prs) == 1
129 129
130 130 def test_count_awaiting_my_review(self, pull_request):
131 131 PullRequestModel().update_reviewers(
132 132 pull_request, [(pull_request.author, ['author'], False, [])],
133 133 pull_request.author)
134 134 pr_count = PullRequestModel().count_awaiting_my_review(
135 135 pull_request.target_repo, user_id=pull_request.author.user_id)
136 136 assert pr_count == 1
137 137
138 138 def test_delete_calls_cleanup_merge(self, pull_request):
139 139 repo_id = pull_request.target_repo.repo_id
140 140 PullRequestModel().delete(pull_request, pull_request.author)
141 141
142 142 self.workspace_remove_mock.assert_called_once_with(
143 143 repo_id, self.workspace_id)
144 144
145 145 def test_close_calls_cleanup_and_hook(self, pull_request):
146 146 PullRequestModel().close_pull_request(
147 147 pull_request, pull_request.author)
148 148 repo_id = pull_request.target_repo.repo_id
149 149
150 150 self.workspace_remove_mock.assert_called_once_with(
151 151 repo_id, self.workspace_id)
152 152 self.hook_mock.assert_called_with(
153 153 self.pull_request, self.pull_request.author, 'close')
154 154
155 155 def test_merge_status(self, pull_request):
156 156 self.merge_mock.return_value = MergeResponse(
157 157 True, False, None, MergeFailureReason.NONE)
158 158
159 159 assert pull_request._last_merge_source_rev is None
160 160 assert pull_request._last_merge_target_rev is None
161 161 assert pull_request.last_merge_status is None
162 162
163 163 status, msg = PullRequestModel().merge_status(pull_request)
164 164 assert status is True
165 165 assert msg.eval() == 'This pull request can be automatically merged.'
166 166 self.merge_mock.assert_called_with(
167 167 self.repo_id, self.workspace_id,
168 168 pull_request.target_ref_parts,
169 169 pull_request.source_repo.scm_instance(),
170 170 pull_request.source_ref_parts, dry_run=True,
171 171 use_rebase=False, close_branch=False)
172 172
173 173 assert pull_request._last_merge_source_rev == self.source_commit
174 174 assert pull_request._last_merge_target_rev == self.target_commit
175 175 assert pull_request.last_merge_status is MergeFailureReason.NONE
176 176
177 177 self.merge_mock.reset_mock()
178 178 status, msg = PullRequestModel().merge_status(pull_request)
179 179 assert status is True
180 180 assert msg.eval() == 'This pull request can be automatically merged.'
181 181 assert self.merge_mock.called is False
182 182
183 183 def test_merge_status_known_failure(self, pull_request):
184 184 self.merge_mock.return_value = MergeResponse(
185 185 False, False, None, MergeFailureReason.MERGE_FAILED)
186 186
187 187 assert pull_request._last_merge_source_rev is None
188 188 assert pull_request._last_merge_target_rev is None
189 189 assert pull_request.last_merge_status is None
190 190
191 191 status, msg = PullRequestModel().merge_status(pull_request)
192 192 assert status is False
193 193 assert (
194 194 msg.eval() ==
195 195 'This pull request cannot be merged because of merge conflicts.')
196 196 self.merge_mock.assert_called_with(
197 197 self.repo_id, self.workspace_id,
198 198 pull_request.target_ref_parts,
199 199 pull_request.source_repo.scm_instance(),
200 200 pull_request.source_ref_parts, dry_run=True,
201 201 use_rebase=False, close_branch=False)
202 202
203 203 assert pull_request._last_merge_source_rev == self.source_commit
204 204 assert pull_request._last_merge_target_rev == self.target_commit
205 205 assert (
206 206 pull_request.last_merge_status is MergeFailureReason.MERGE_FAILED)
207 207
208 208 self.merge_mock.reset_mock()
209 209 status, msg = PullRequestModel().merge_status(pull_request)
210 210 assert status is False
211 211 assert (
212 212 msg.eval() ==
213 213 'This pull request cannot be merged because of merge conflicts.')
214 214 assert self.merge_mock.called is False
215 215
216 216 def test_merge_status_unknown_failure(self, pull_request):
217 217 self.merge_mock.return_value = MergeResponse(
218 218 False, False, None, MergeFailureReason.UNKNOWN)
219 219
220 220 assert pull_request._last_merge_source_rev is None
221 221 assert pull_request._last_merge_target_rev is None
222 222 assert pull_request.last_merge_status is None
223 223
224 224 status, msg = PullRequestModel().merge_status(pull_request)
225 225 assert status is False
226 226 assert msg.eval() == (
227 227 'This pull request cannot be merged because of an unhandled'
228 228 ' exception.')
229 229 self.merge_mock.assert_called_with(
230 230 self.repo_id, self.workspace_id,
231 231 pull_request.target_ref_parts,
232 232 pull_request.source_repo.scm_instance(),
233 233 pull_request.source_ref_parts, dry_run=True,
234 234 use_rebase=False, close_branch=False)
235 235
236 236 assert pull_request._last_merge_source_rev is None
237 237 assert pull_request._last_merge_target_rev is None
238 238 assert pull_request.last_merge_status is None
239 239
240 240 self.merge_mock.reset_mock()
241 241 status, msg = PullRequestModel().merge_status(pull_request)
242 242 assert status is False
243 243 assert msg.eval() == (
244 244 'This pull request cannot be merged because of an unhandled'
245 245 ' exception.')
246 246 assert self.merge_mock.called is True
247 247
248 248 def test_merge_status_when_target_is_locked(self, pull_request):
249 249 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
250 250 status, msg = PullRequestModel().merge_status(pull_request)
251 251 assert status is False
252 252 assert msg.eval() == (
253 253 'This pull request cannot be merged because the target repository'
254 254 ' is locked.')
255 255
256 256 def test_merge_status_requirements_check_target(self, pull_request):
257 257
258 258 def has_largefiles(self, repo):
259 259 return repo == pull_request.source_repo
260 260
261 261 patcher = mock.patch.object(
262 262 PullRequestModel, '_has_largefiles', has_largefiles)
263 263 with patcher:
264 264 status, msg = PullRequestModel().merge_status(pull_request)
265 265
266 266 assert status is False
267 267 assert msg == 'Target repository large files support is disabled.'
268 268
269 269 def test_merge_status_requirements_check_source(self, pull_request):
270 270
271 271 def has_largefiles(self, repo):
272 272 return repo == pull_request.target_repo
273 273
274 274 patcher = mock.patch.object(
275 275 PullRequestModel, '_has_largefiles', has_largefiles)
276 276 with patcher:
277 277 status, msg = PullRequestModel().merge_status(pull_request)
278 278
279 279 assert status is False
280 280 assert msg == 'Source repository large files support is disabled.'
281 281
282 282 def test_merge(self, pull_request, merge_extras):
283 283 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
284 284 merge_ref = Reference(
285 285 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
286 286 self.merge_mock.return_value = MergeResponse(
287 287 True, True, merge_ref, MergeFailureReason.NONE)
288 288
289 289 merge_extras['repository'] = pull_request.target_repo.repo_name
290 290 PullRequestModel().merge_repo(
291 291 pull_request, pull_request.author, extras=merge_extras)
292 292
293 293 message = (
294 294 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
295 295 u'\n\n {pr_title}'.format(
296 296 pr_id=pull_request.pull_request_id,
297 297 source_repo=safe_unicode(
298 298 pull_request.source_repo.scm_instance().name),
299 299 source_ref_name=pull_request.source_ref_parts.name,
300 300 pr_title=safe_unicode(pull_request.title)
301 301 )
302 302 )
303 303 self.merge_mock.assert_called_with(
304 304 self.repo_id, self.workspace_id,
305 305 pull_request.target_ref_parts,
306 306 pull_request.source_repo.scm_instance(),
307 307 pull_request.source_ref_parts,
308 308 user_name=user.username, user_email=user.email, message=message,
309 309 use_rebase=False, close_branch=False
310 310 )
311 311 self.invalidation_mock.assert_called_once_with(
312 312 pull_request.target_repo.repo_name)
313 313
314 314 self.hook_mock.assert_called_with(
315 315 self.pull_request, self.pull_request.author, 'merge')
316 316
317 317 pull_request = PullRequest.get(pull_request.pull_request_id)
318 318 assert (
319 319 pull_request.merge_rev ==
320 320 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
321 321
322 322 def test_merge_failed(self, pull_request, merge_extras):
323 323 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
324 324 merge_ref = Reference(
325 325 'type', 'name', '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
326 326 self.merge_mock.return_value = MergeResponse(
327 327 False, False, merge_ref, MergeFailureReason.MERGE_FAILED)
328 328
329 329 merge_extras['repository'] = pull_request.target_repo.repo_name
330 330 PullRequestModel().merge_repo(
331 331 pull_request, pull_request.author, extras=merge_extras)
332 332
333 333 message = (
334 334 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
335 335 u'\n\n {pr_title}'.format(
336 336 pr_id=pull_request.pull_request_id,
337 337 source_repo=safe_unicode(
338 338 pull_request.source_repo.scm_instance().name),
339 339 source_ref_name=pull_request.source_ref_parts.name,
340 340 pr_title=safe_unicode(pull_request.title)
341 341 )
342 342 )
343 343 self.merge_mock.assert_called_with(
344 344 self.repo_id, self.workspace_id,
345 345 pull_request.target_ref_parts,
346 346 pull_request.source_repo.scm_instance(),
347 347 pull_request.source_ref_parts,
348 348 user_name=user.username, user_email=user.email, message=message,
349 349 use_rebase=False, close_branch=False
350 350 )
351 351
352 352 pull_request = PullRequest.get(pull_request.pull_request_id)
353 353 assert self.invalidation_mock.called is False
354 354 assert pull_request.merge_rev is None
355 355
356 356 def test_get_commit_ids(self, pull_request):
357 357 # The PR has been not merget yet, so expect an exception
358 358 with pytest.raises(ValueError):
359 359 PullRequestModel()._get_commit_ids(pull_request)
360 360
361 361 # Merge revision is in the revisions list
362 362 pull_request.merge_rev = pull_request.revisions[0]
363 363 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
364 364 assert commit_ids == pull_request.revisions
365 365
366 366 # Merge revision is not in the revisions list
367 367 pull_request.merge_rev = 'f000' * 10
368 368 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
369 369 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
370 370
371 371 def test_get_diff_from_pr_version(self, pull_request):
372 372 source_repo = pull_request.source_repo
373 373 source_ref_id = pull_request.source_ref_parts.commit_id
374 374 target_ref_id = pull_request.target_ref_parts.commit_id
375 375 diff = PullRequestModel()._get_diff_from_pr_or_version(
376 376 source_repo, source_ref_id, target_ref_id, context=6)
377 377 assert 'file_1' in diff.raw
378 378
379 379 def test_generate_title_returns_unicode(self):
380 380 title = PullRequestModel().generate_pullrequest_title(
381 381 source='source-dummy',
382 382 source_ref='source-ref-dummy',
383 383 target='target-dummy',
384 384 )
385 385 assert type(title) == unicode
386 386
387 387
388 388 @pytest.mark.usefixtures('config_stub')
389 389 class TestIntegrationMerge(object):
390 390 @pytest.mark.parametrize('extra_config', (
391 391 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
392 392 ))
393 393 def test_merge_triggers_push_hooks(
394 394 self, pr_util, user_admin, capture_rcextensions, merge_extras,
395 395 extra_config):
396 396 pull_request = pr_util.create_pull_request(
397 397 approved=True, mergeable=True)
398 398 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
399 399 merge_extras['repository'] = pull_request.target_repo.repo_name
400 400 Session().commit()
401 401
402 402 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
403 403 merge_state = PullRequestModel().merge_repo(
404 404 pull_request, user_admin, extras=merge_extras)
405 405
406 406 assert merge_state.executed
407 407 assert 'pre_push' in capture_rcextensions
408 408 assert 'post_push' in capture_rcextensions
409 409
410 410 def test_merge_can_be_rejected_by_pre_push_hook(
411 411 self, pr_util, user_admin, capture_rcextensions, merge_extras):
412 412 pull_request = pr_util.create_pull_request(
413 413 approved=True, mergeable=True)
414 414 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
415 415 merge_extras['repository'] = pull_request.target_repo.repo_name
416 416 Session().commit()
417 417
418 418 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
419 419 pre_pull.side_effect = RepositoryError("Disallow push!")
420 420 merge_status = PullRequestModel().merge_repo(
421 421 pull_request, user_admin, extras=merge_extras)
422 422
423 423 assert not merge_status.executed
424 424 assert 'pre_push' not in capture_rcextensions
425 425 assert 'post_push' not in capture_rcextensions
426 426
427 427 def test_merge_fails_if_target_is_locked(
428 428 self, pr_util, user_regular, merge_extras):
429 429 pull_request = pr_util.create_pull_request(
430 430 approved=True, mergeable=True)
431 431 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
432 432 pull_request.target_repo.locked = locked_by
433 433 # TODO: johbo: Check if this can work based on the database, currently
434 434 # all data is pre-computed, that's why just updating the DB is not
435 435 # enough.
436 436 merge_extras['locked_by'] = locked_by
437 437 merge_extras['repository'] = pull_request.target_repo.repo_name
438 438 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
439 439 Session().commit()
440 440 merge_status = PullRequestModel().merge_repo(
441 441 pull_request, user_regular, extras=merge_extras)
442 442 assert not merge_status.executed
443 443
444 444
445 445 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
446 446 (False, 1, 0),
447 447 (True, 0, 1),
448 448 ])
449 449 def test_outdated_comments(
450 450 pr_util, use_outdated, inlines_count, outdated_count, config_stub):
451 451 pull_request = pr_util.create_pull_request()
452 452 pr_util.create_inline_comment(file_path='not_in_updated_diff')
453 453
454 454 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
455 455 pr_util.add_one_commit()
456 456 assert_inline_comments(
457 457 pull_request, visible=inlines_count, outdated=outdated_count)
458 458 outdated_comment_mock.assert_called_with(pull_request)
459 459
460 460
461 461 @pytest.fixture
462 462 def merge_extras(user_regular):
463 463 """
464 464 Context for the vcs operation when running a merge.
465 465 """
466 466 extras = {
467 467 'ip': '127.0.0.1',
468 468 'username': user_regular.username,
469 469 'user_id': user_regular.user_id,
470 470 'action': 'push',
471 471 'repository': 'fake_target_repo_name',
472 472 'scm': 'git',
473 473 'config': 'fake_config_ini_path',
474 'repo_store': '',
474 475 'make_lock': None,
475 476 'locked_by': [None, None, None],
476 477 'server_url': 'http://test.example.com:5000',
477 478 'hooks': ['push', 'pull'],
478 479 'is_shadow_repo': False,
479 480 }
480 481 return extras
481 482
482 483
483 484 @pytest.mark.usefixtures('config_stub')
484 485 class TestUpdateCommentHandling(object):
485 486
486 487 @pytest.fixture(autouse=True, scope='class')
487 488 def enable_outdated_comments(self, request, baseapp):
488 489 config_patch = mock.patch.dict(
489 490 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
490 491 config_patch.start()
491 492
492 493 @request.addfinalizer
493 494 def cleanup():
494 495 config_patch.stop()
495 496
496 497 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
497 498 commits = [
498 499 {'message': 'a'},
499 500 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
500 501 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
501 502 ]
502 503 pull_request = pr_util.create_pull_request(
503 504 commits=commits, target_head='a', source_head='b', revisions=['b'])
504 505 pr_util.create_inline_comment(file_path='file_b')
505 506 pr_util.add_one_commit(head='c')
506 507
507 508 assert_inline_comments(pull_request, visible=1, outdated=0)
508 509
509 510 def test_comment_stays_unflagged_on_change_above(self, pr_util):
510 511 original_content = ''.join(
511 512 ['line {}\n'.format(x) for x in range(1, 11)])
512 513 updated_content = 'new_line_at_top\n' + original_content
513 514 commits = [
514 515 {'message': 'a'},
515 516 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
516 517 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
517 518 ]
518 519 pull_request = pr_util.create_pull_request(
519 520 commits=commits, target_head='a', source_head='b', revisions=['b'])
520 521
521 522 with outdated_comments_patcher():
522 523 comment = pr_util.create_inline_comment(
523 524 line_no=u'n8', file_path='file_b')
524 525 pr_util.add_one_commit(head='c')
525 526
526 527 assert_inline_comments(pull_request, visible=1, outdated=0)
527 528 assert comment.line_no == u'n9'
528 529
529 530 def test_comment_stays_unflagged_on_change_below(self, pr_util):
530 531 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
531 532 updated_content = original_content + 'new_line_at_end\n'
532 533 commits = [
533 534 {'message': 'a'},
534 535 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
535 536 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
536 537 ]
537 538 pull_request = pr_util.create_pull_request(
538 539 commits=commits, target_head='a', source_head='b', revisions=['b'])
539 540 pr_util.create_inline_comment(file_path='file_b')
540 541 pr_util.add_one_commit(head='c')
541 542
542 543 assert_inline_comments(pull_request, visible=1, outdated=0)
543 544
544 545 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
545 546 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
546 547 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
547 548 change_lines = list(base_lines)
548 549 change_lines.insert(6, 'line 6a added\n')
549 550
550 551 # Changes on the last line of sight
551 552 update_lines = list(change_lines)
552 553 update_lines[0] = 'line 1 changed\n'
553 554 update_lines[-1] = 'line 12 changed\n'
554 555
555 556 def file_b(lines):
556 557 return FileNode('file_b', ''.join(lines))
557 558
558 559 commits = [
559 560 {'message': 'a', 'added': [file_b(base_lines)]},
560 561 {'message': 'b', 'changed': [file_b(change_lines)]},
561 562 {'message': 'c', 'changed': [file_b(update_lines)]},
562 563 ]
563 564
564 565 pull_request = pr_util.create_pull_request(
565 566 commits=commits, target_head='a', source_head='b', revisions=['b'])
566 567 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
567 568
568 569 with outdated_comments_patcher():
569 570 pr_util.add_one_commit(head='c')
570 571 assert_inline_comments(pull_request, visible=0, outdated=1)
571 572
572 573 @pytest.mark.parametrize("change, content", [
573 574 ('changed', 'changed\n'),
574 575 ('removed', ''),
575 576 ], ids=['changed', 'removed'])
576 577 def test_comment_flagged_on_change(self, pr_util, change, content):
577 578 commits = [
578 579 {'message': 'a'},
579 580 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
580 581 {'message': 'c', change: [FileNode('file_b', content)]},
581 582 ]
582 583 pull_request = pr_util.create_pull_request(
583 584 commits=commits, target_head='a', source_head='b', revisions=['b'])
584 585 pr_util.create_inline_comment(file_path='file_b')
585 586
586 587 with outdated_comments_patcher():
587 588 pr_util.add_one_commit(head='c')
588 589 assert_inline_comments(pull_request, visible=0, outdated=1)
589 590
590 591
591 592 @pytest.mark.usefixtures('config_stub')
592 593 class TestUpdateChangedFiles(object):
593 594
594 595 def test_no_changes_on_unchanged_diff(self, pr_util):
595 596 commits = [
596 597 {'message': 'a'},
597 598 {'message': 'b',
598 599 'added': [FileNode('file_b', 'test_content b\n')]},
599 600 {'message': 'c',
600 601 'added': [FileNode('file_c', 'test_content c\n')]},
601 602 ]
602 603 # open a PR from a to b, adding file_b
603 604 pull_request = pr_util.create_pull_request(
604 605 commits=commits, target_head='a', source_head='b', revisions=['b'],
605 606 name_suffix='per-file-review')
606 607
607 608 # modify PR adding new file file_c
608 609 pr_util.add_one_commit(head='c')
609 610
610 611 assert_pr_file_changes(
611 612 pull_request,
612 613 added=['file_c'],
613 614 modified=[],
614 615 removed=[])
615 616
616 617 def test_modify_and_undo_modification_diff(self, pr_util):
617 618 commits = [
618 619 {'message': 'a'},
619 620 {'message': 'b',
620 621 'added': [FileNode('file_b', 'test_content b\n')]},
621 622 {'message': 'c',
622 623 'changed': [FileNode('file_b', 'test_content b modified\n')]},
623 624 {'message': 'd',
624 625 'changed': [FileNode('file_b', 'test_content b\n')]},
625 626 ]
626 627 # open a PR from a to b, adding file_b
627 628 pull_request = pr_util.create_pull_request(
628 629 commits=commits, target_head='a', source_head='b', revisions=['b'],
629 630 name_suffix='per-file-review')
630 631
631 632 # modify PR modifying file file_b
632 633 pr_util.add_one_commit(head='c')
633 634
634 635 assert_pr_file_changes(
635 636 pull_request,
636 637 added=[],
637 638 modified=['file_b'],
638 639 removed=[])
639 640
640 641 # move the head again to d, which rollbacks change,
641 642 # meaning we should indicate no changes
642 643 pr_util.add_one_commit(head='d')
643 644
644 645 assert_pr_file_changes(
645 646 pull_request,
646 647 added=[],
647 648 modified=[],
648 649 removed=[])
649 650
650 651 def test_updated_all_files_in_pr(self, pr_util):
651 652 commits = [
652 653 {'message': 'a'},
653 654 {'message': 'b', 'added': [
654 655 FileNode('file_a', 'test_content a\n'),
655 656 FileNode('file_b', 'test_content b\n'),
656 657 FileNode('file_c', 'test_content c\n')]},
657 658 {'message': 'c', 'changed': [
658 659 FileNode('file_a', 'test_content a changed\n'),
659 660 FileNode('file_b', 'test_content b changed\n'),
660 661 FileNode('file_c', 'test_content c changed\n')]},
661 662 ]
662 663 # open a PR from a to b, changing 3 files
663 664 pull_request = pr_util.create_pull_request(
664 665 commits=commits, target_head='a', source_head='b', revisions=['b'],
665 666 name_suffix='per-file-review')
666 667
667 668 pr_util.add_one_commit(head='c')
668 669
669 670 assert_pr_file_changes(
670 671 pull_request,
671 672 added=[],
672 673 modified=['file_a', 'file_b', 'file_c'],
673 674 removed=[])
674 675
675 676 def test_updated_and_removed_all_files_in_pr(self, pr_util):
676 677 commits = [
677 678 {'message': 'a'},
678 679 {'message': 'b', 'added': [
679 680 FileNode('file_a', 'test_content a\n'),
680 681 FileNode('file_b', 'test_content b\n'),
681 682 FileNode('file_c', 'test_content c\n')]},
682 683 {'message': 'c', 'removed': [
683 684 FileNode('file_a', 'test_content a changed\n'),
684 685 FileNode('file_b', 'test_content b changed\n'),
685 686 FileNode('file_c', 'test_content c changed\n')]},
686 687 ]
687 688 # open a PR from a to b, removing 3 files
688 689 pull_request = pr_util.create_pull_request(
689 690 commits=commits, target_head='a', source_head='b', revisions=['b'],
690 691 name_suffix='per-file-review')
691 692
692 693 pr_util.add_one_commit(head='c')
693 694
694 695 assert_pr_file_changes(
695 696 pull_request,
696 697 added=[],
697 698 modified=[],
698 699 removed=['file_a', 'file_b', 'file_c'])
699 700
700 701
701 702 def test_update_writes_snapshot_into_pull_request_version(pr_util, config_stub):
702 703 model = PullRequestModel()
703 704 pull_request = pr_util.create_pull_request()
704 705 pr_util.update_source_repository()
705 706
706 707 model.update_commits(pull_request)
707 708
708 709 # Expect that it has a version entry now
709 710 assert len(model.get_versions(pull_request)) == 1
710 711
711 712
712 713 def test_update_skips_new_version_if_unchanged(pr_util, config_stub):
713 714 pull_request = pr_util.create_pull_request()
714 715 model = PullRequestModel()
715 716 model.update_commits(pull_request)
716 717
717 718 # Expect that it still has no versions
718 719 assert len(model.get_versions(pull_request)) == 0
719 720
720 721
721 722 def test_update_assigns_comments_to_the_new_version(pr_util, config_stub):
722 723 model = PullRequestModel()
723 724 pull_request = pr_util.create_pull_request()
724 725 comment = pr_util.create_comment()
725 726 pr_util.update_source_repository()
726 727
727 728 model.update_commits(pull_request)
728 729
729 730 # Expect that the comment is linked to the pr version now
730 731 assert comment.pull_request_version == model.get_versions(pull_request)[0]
731 732
732 733
733 734 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util, config_stub):
734 735 model = PullRequestModel()
735 736 pull_request = pr_util.create_pull_request()
736 737 pr_util.update_source_repository()
737 738 pr_util.update_source_repository()
738 739
739 740 model.update_commits(pull_request)
740 741
741 742 # Expect to find a new comment about the change
742 743 expected_message = textwrap.dedent(
743 744 """\
744 745 Pull request updated. Auto status change to |under_review|
745 746
746 747 .. role:: added
747 748 .. role:: removed
748 749 .. parsed-literal::
749 750
750 751 Changed commits:
751 752 * :added:`1 added`
752 753 * :removed:`0 removed`
753 754
754 755 Changed files:
755 756 * `A file_2 <#a_c--92ed3b5f07b4>`_
756 757
757 758 .. |under_review| replace:: *"Under Review"*"""
758 759 )
759 760 pull_request_comments = sorted(
760 761 pull_request.comments, key=lambda c: c.modified_at)
761 762 update_comment = pull_request_comments[-1]
762 763 assert update_comment.text == expected_message
763 764
764 765
765 766 def test_create_version_from_snapshot_updates_attributes(pr_util, config_stub):
766 767 pull_request = pr_util.create_pull_request()
767 768
768 769 # Avoiding default values
769 770 pull_request.status = PullRequest.STATUS_CLOSED
770 771 pull_request._last_merge_source_rev = "0" * 40
771 772 pull_request._last_merge_target_rev = "1" * 40
772 773 pull_request.last_merge_status = 1
773 774 pull_request.merge_rev = "2" * 40
774 775
775 776 # Remember automatic values
776 777 created_on = pull_request.created_on
777 778 updated_on = pull_request.updated_on
778 779
779 780 # Create a new version of the pull request
780 781 version = PullRequestModel()._create_version_from_snapshot(pull_request)
781 782
782 783 # Check attributes
783 784 assert version.title == pr_util.create_parameters['title']
784 785 assert version.description == pr_util.create_parameters['description']
785 786 assert version.status == PullRequest.STATUS_CLOSED
786 787
787 788 # versions get updated created_on
788 789 assert version.created_on != created_on
789 790
790 791 assert version.updated_on == updated_on
791 792 assert version.user_id == pull_request.user_id
792 793 assert version.revisions == pr_util.create_parameters['revisions']
793 794 assert version.source_repo == pr_util.source_repository
794 795 assert version.source_ref == pr_util.create_parameters['source_ref']
795 796 assert version.target_repo == pr_util.target_repository
796 797 assert version.target_ref == pr_util.create_parameters['target_ref']
797 798 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
798 799 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
799 800 assert version.last_merge_status == pull_request.last_merge_status
800 801 assert version.merge_rev == pull_request.merge_rev
801 802 assert version.pull_request == pull_request
802 803
803 804
804 805 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util, config_stub):
805 806 version1 = pr_util.create_version_of_pull_request()
806 807 comment_linked = pr_util.create_comment(linked_to=version1)
807 808 comment_unlinked = pr_util.create_comment()
808 809 version2 = pr_util.create_version_of_pull_request()
809 810
810 811 PullRequestModel()._link_comments_to_version(version2)
811 812
812 813 # Expect that only the new comment is linked to version2
813 814 assert (
814 815 comment_unlinked.pull_request_version_id ==
815 816 version2.pull_request_version_id)
816 817 assert (
817 818 comment_linked.pull_request_version_id ==
818 819 version1.pull_request_version_id)
819 820 assert (
820 821 comment_unlinked.pull_request_version_id !=
821 822 comment_linked.pull_request_version_id)
822 823
823 824
824 825 def test_calculate_commits():
825 826 old_ids = [1, 2, 3]
826 827 new_ids = [1, 3, 4, 5]
827 828 change = PullRequestModel()._calculate_commit_id_changes(old_ids, new_ids)
828 829 assert change.added == [4, 5]
829 830 assert change.common == [1, 3]
830 831 assert change.removed == [2]
831 832 assert change.total == [1, 3, 4, 5]
832 833
833 834
834 835 def assert_inline_comments(pull_request, visible=None, outdated=None):
835 836 if visible is not None:
836 837 inline_comments = CommentsModel().get_inline_comments(
837 838 pull_request.target_repo.repo_id, pull_request=pull_request)
838 839 inline_cnt = CommentsModel().get_inline_comments_count(
839 840 inline_comments)
840 841 assert inline_cnt == visible
841 842 if outdated is not None:
842 843 outdated_comments = CommentsModel().get_outdated_comments(
843 844 pull_request.target_repo.repo_id, pull_request)
844 845 assert len(outdated_comments) == outdated
845 846
846 847
847 848 def assert_pr_file_changes(
848 849 pull_request, added=None, modified=None, removed=None):
849 850 pr_versions = PullRequestModel().get_versions(pull_request)
850 851 # always use first version, ie original PR to calculate changes
851 852 pull_request_version = pr_versions[0]
852 853 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
853 854 pull_request, pull_request_version)
854 855 file_changes = PullRequestModel()._calculate_file_changes(
855 856 old_diff_data, new_diff_data)
856 857
857 858 assert added == file_changes.added, \
858 859 'expected added:%s vs value:%s' % (added, file_changes.added)
859 860 assert modified == file_changes.modified, \
860 861 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
861 862 assert removed == file_changes.removed, \
862 863 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
863 864
864 865
865 866 def outdated_comments_patcher(use_outdated=True):
866 867 return mock.patch.object(
867 868 CommentsModel, 'use_outdated_comments',
868 869 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now