##// END OF EJS Templates
core: removed last activity as it was creating lots of DB locks.
marcink -
r2929:c363a5c7 default
parent child Browse files
Show More
@@ -1,1068 +1,1069 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22
23 23 import mock
24 24 import pytest
25 25
26 26 from rhodecode.apps.repository.views.repo_files import RepoFilesView
27 27 from rhodecode.lib import helpers as h
28 28 from rhodecode.lib.compat import OrderedDict
29 29 from rhodecode.lib.ext_json import json
30 30 from rhodecode.lib.vcs import nodes
31 31
32 32 from rhodecode.lib.vcs.conf import settings
33 33 from rhodecode.tests import assert_session_flash
34 34 from rhodecode.tests.fixture import Fixture
35 from rhodecode.model.db import Session
35 36
36 37 fixture = Fixture()
37 38
38 39
39 40 def get_node_history(backend_type):
40 41 return {
41 42 'hg': json.loads(fixture.load_resource('hg_node_history_response.json')),
42 43 'git': json.loads(fixture.load_resource('git_node_history_response.json')),
43 44 'svn': json.loads(fixture.load_resource('svn_node_history_response.json')),
44 45 }[backend_type]
45 46
46 47
47 48 def route_path(name, params=None, **kwargs):
48 49 import urllib
49 50
50 51 base_url = {
51 52 'repo_summary': '/{repo_name}',
52 53 'repo_archivefile': '/{repo_name}/archive/{fname}',
53 54 'repo_files_diff': '/{repo_name}/diff/{f_path}',
54 55 'repo_files_diff_2way_redirect': '/{repo_name}/diff-2way/{f_path}',
55 56 'repo_files': '/{repo_name}/files/{commit_id}/{f_path}',
56 57 'repo_files:default_path': '/{repo_name}/files/{commit_id}/',
57 58 'repo_files:default_commit': '/{repo_name}/files',
58 59 'repo_files:rendered': '/{repo_name}/render/{commit_id}/{f_path}',
59 60 'repo_files:annotated': '/{repo_name}/annotate/{commit_id}/{f_path}',
60 61 'repo_files:annotated_previous': '/{repo_name}/annotate-previous/{commit_id}/{f_path}',
61 62 'repo_files_nodelist': '/{repo_name}/nodelist/{commit_id}/{f_path}',
62 63 'repo_file_raw': '/{repo_name}/raw/{commit_id}/{f_path}',
63 64 'repo_file_download': '/{repo_name}/download/{commit_id}/{f_path}',
64 65 'repo_file_history': '/{repo_name}/history/{commit_id}/{f_path}',
65 66 'repo_file_authors': '/{repo_name}/authors/{commit_id}/{f_path}',
66 67 'repo_files_remove_file': '/{repo_name}/remove_file/{commit_id}/{f_path}',
67 68 'repo_files_delete_file': '/{repo_name}/delete_file/{commit_id}/{f_path}',
68 69 'repo_files_edit_file': '/{repo_name}/edit_file/{commit_id}/{f_path}',
69 70 'repo_files_update_file': '/{repo_name}/update_file/{commit_id}/{f_path}',
70 71 'repo_files_add_file': '/{repo_name}/add_file/{commit_id}/{f_path}',
71 72 'repo_files_create_file': '/{repo_name}/create_file/{commit_id}/{f_path}',
72 73 'repo_nodetree_full': '/{repo_name}/nodetree_full/{commit_id}/{f_path}',
73 74 'repo_nodetree_full:default_path': '/{repo_name}/nodetree_full/{commit_id}/',
74 75 }[name].format(**kwargs)
75 76
76 77 if params:
77 78 base_url = '{}?{}'.format(base_url, urllib.urlencode(params))
78 79 return base_url
79 80
80 81
81 82 def assert_files_in_response(response, files, params):
82 83 template = (
83 84 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
84 85 _assert_items_in_response(response, files, template, params)
85 86
86 87
87 88 def assert_dirs_in_response(response, dirs, params):
88 89 template = (
89 90 'href="/%(repo_name)s/files/%(commit_id)s/%(name)s"')
90 91 _assert_items_in_response(response, dirs, template, params)
91 92
92 93
93 94 def _assert_items_in_response(response, items, template, params):
94 95 for item in items:
95 96 item_params = {'name': item}
96 97 item_params.update(params)
97 98 response.mustcontain(template % item_params)
98 99
99 100
100 101 def assert_timeago_in_response(response, items, params):
101 102 for item in items:
102 103 response.mustcontain(h.age_component(params['date']))
103 104
104 105
105 106 @pytest.mark.usefixtures("app")
106 107 class TestFilesViews(object):
107 108
108 109 def test_show_files(self, backend):
109 110 response = self.app.get(
110 111 route_path('repo_files',
111 112 repo_name=backend.repo_name,
112 113 commit_id='tip', f_path='/'))
113 114 commit = backend.repo.get_commit()
114 115
115 116 params = {
116 117 'repo_name': backend.repo_name,
117 118 'commit_id': commit.raw_id,
118 119 'date': commit.date
119 120 }
120 121 assert_dirs_in_response(response, ['docs', 'vcs'], params)
121 122 files = [
122 123 '.gitignore',
123 124 '.hgignore',
124 125 '.hgtags',
125 126 # TODO: missing in Git
126 127 # '.travis.yml',
127 128 'MANIFEST.in',
128 129 'README.rst',
129 130 # TODO: File is missing in svn repository
130 131 # 'run_test_and_report.sh',
131 132 'setup.cfg',
132 133 'setup.py',
133 134 'test_and_report.sh',
134 135 'tox.ini',
135 136 ]
136 137 assert_files_in_response(response, files, params)
137 138 assert_timeago_in_response(response, files, params)
138 139
139 140 def test_show_files_links_submodules_with_absolute_url(self, backend_hg):
140 141 repo = backend_hg['subrepos']
141 142 response = self.app.get(
142 143 route_path('repo_files',
143 144 repo_name=repo.repo_name,
144 145 commit_id='tip', f_path='/'))
145 146 assert_response = response.assert_response()
146 147 assert_response.contains_one_link(
147 148 'absolute-path @ 000000000000', 'http://example.com/absolute-path')
148 149
149 150 def test_show_files_links_submodules_with_absolute_url_subpaths(
150 151 self, backend_hg):
151 152 repo = backend_hg['subrepos']
152 153 response = self.app.get(
153 154 route_path('repo_files',
154 155 repo_name=repo.repo_name,
155 156 commit_id='tip', f_path='/'))
156 157 assert_response = response.assert_response()
157 158 assert_response.contains_one_link(
158 159 'subpaths-path @ 000000000000',
159 160 'http://sub-base.example.com/subpaths-path')
160 161
161 162 @pytest.mark.xfail_backends("svn", reason="Depends on branch support")
162 163 def test_files_menu(self, backend):
163 164 new_branch = "temp_branch_name"
164 165 commits = [
165 166 {'message': 'a'},
166 167 {'message': 'b', 'branch': new_branch}
167 168 ]
168 169 backend.create_repo(commits)
169
170 170 backend.repo.landing_rev = "branch:%s" % new_branch
171 Session().commit()
171 172
172 173 # get response based on tip and not new commit
173 174 response = self.app.get(
174 175 route_path('repo_files',
175 176 repo_name=backend.repo_name,
176 177 commit_id='tip', f_path='/'))
177 178
178 179 # make sure Files menu url is not tip but new commit
179 180 landing_rev = backend.repo.landing_rev[1]
180 181 files_url = route_path('repo_files:default_path',
181 182 repo_name=backend.repo_name,
182 183 commit_id=landing_rev)
183 184
184 185 assert landing_rev != 'tip'
185 186 response.mustcontain(
186 187 '<li class="active"><a class="menulink" href="%s">' % files_url)
187 188
188 189 def test_show_files_commit(self, backend):
189 190 commit = backend.repo.get_commit(commit_idx=32)
190 191
191 192 response = self.app.get(
192 193 route_path('repo_files',
193 194 repo_name=backend.repo_name,
194 195 commit_id=commit.raw_id, f_path='/'))
195 196
196 197 dirs = ['docs', 'tests']
197 198 files = ['README.rst']
198 199 params = {
199 200 'repo_name': backend.repo_name,
200 201 'commit_id': commit.raw_id,
201 202 }
202 203 assert_dirs_in_response(response, dirs, params)
203 204 assert_files_in_response(response, files, params)
204 205
205 206 def test_show_files_different_branch(self, backend):
206 207 branches = dict(
207 208 hg=(150, ['git']),
208 209 # TODO: Git test repository does not contain other branches
209 210 git=(633, ['master']),
210 211 # TODO: Branch support in Subversion
211 212 svn=(150, [])
212 213 )
213 214 idx, branches = branches[backend.alias]
214 215 commit = backend.repo.get_commit(commit_idx=idx)
215 216 response = self.app.get(
216 217 route_path('repo_files',
217 218 repo_name=backend.repo_name,
218 219 commit_id=commit.raw_id, f_path='/'))
219 220
220 221 assert_response = response.assert_response()
221 222 for branch in branches:
222 223 assert_response.element_contains('.tags .branchtag', branch)
223 224
224 225 def test_show_files_paging(self, backend):
225 226 repo = backend.repo
226 227 indexes = [73, 92, 109, 1, 0]
227 228 idx_map = [(rev, repo.get_commit(commit_idx=rev).raw_id)
228 229 for rev in indexes]
229 230
230 231 for idx in idx_map:
231 232 response = self.app.get(
232 233 route_path('repo_files',
233 234 repo_name=backend.repo_name,
234 235 commit_id=idx[1], f_path='/'))
235 236
236 237 response.mustcontain("""r%s:%s""" % (idx[0], idx[1][:8]))
237 238
238 239 def test_file_source(self, backend):
239 240 commit = backend.repo.get_commit(commit_idx=167)
240 241 response = self.app.get(
241 242 route_path('repo_files',
242 243 repo_name=backend.repo_name,
243 244 commit_id=commit.raw_id, f_path='vcs/nodes.py'))
244 245
245 246 msgbox = """<div class="commit right-content">%s</div>"""
246 247 response.mustcontain(msgbox % (commit.message, ))
247 248
248 249 assert_response = response.assert_response()
249 250 if commit.branch:
250 251 assert_response.element_contains(
251 252 '.tags.tags-main .branchtag', commit.branch)
252 253 if commit.tags:
253 254 for tag in commit.tags:
254 255 assert_response.element_contains('.tags.tags-main .tagtag', tag)
255 256
256 257 def test_file_source_annotated(self, backend):
257 258 response = self.app.get(
258 259 route_path('repo_files:annotated',
259 260 repo_name=backend.repo_name,
260 261 commit_id='tip', f_path='vcs/nodes.py'))
261 262 expected_commits = {
262 263 'hg': 'r356',
263 264 'git': 'r345',
264 265 'svn': 'r208',
265 266 }
266 267 response.mustcontain(expected_commits[backend.alias])
267 268
268 269 def test_file_source_authors(self, backend):
269 270 response = self.app.get(
270 271 route_path('repo_file_authors',
271 272 repo_name=backend.repo_name,
272 273 commit_id='tip', f_path='vcs/nodes.py'))
273 274 expected_authors = {
274 275 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
275 276 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
276 277 'svn': ('marcin', 'lukasz'),
277 278 }
278 279
279 280 for author in expected_authors[backend.alias]:
280 281 response.mustcontain(author)
281 282
282 283 def test_file_source_authors_with_annotation(self, backend):
283 284 response = self.app.get(
284 285 route_path('repo_file_authors',
285 286 repo_name=backend.repo_name,
286 287 commit_id='tip', f_path='vcs/nodes.py',
287 288 params=dict(annotate=1)))
288 289 expected_authors = {
289 290 'hg': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
290 291 'git': ('Marcin Kuzminski', 'Lukasz Balcerzak'),
291 292 'svn': ('marcin', 'lukasz'),
292 293 }
293 294
294 295 for author in expected_authors[backend.alias]:
295 296 response.mustcontain(author)
296 297
297 298 def test_file_source_history(self, backend, xhr_header):
298 299 response = self.app.get(
299 300 route_path('repo_file_history',
300 301 repo_name=backend.repo_name,
301 302 commit_id='tip', f_path='vcs/nodes.py'),
302 303 extra_environ=xhr_header)
303 304 assert get_node_history(backend.alias) == json.loads(response.body)
304 305
305 306 def test_file_source_history_svn(self, backend_svn, xhr_header):
306 307 simple_repo = backend_svn['svn-simple-layout']
307 308 response = self.app.get(
308 309 route_path('repo_file_history',
309 310 repo_name=simple_repo.repo_name,
310 311 commit_id='tip', f_path='trunk/example.py'),
311 312 extra_environ=xhr_header)
312 313
313 314 expected_data = json.loads(
314 315 fixture.load_resource('svn_node_history_branches.json'))
315 316 assert expected_data == response.json
316 317
317 318 def test_file_source_history_with_annotation(self, backend, xhr_header):
318 319 response = self.app.get(
319 320 route_path('repo_file_history',
320 321 repo_name=backend.repo_name,
321 322 commit_id='tip', f_path='vcs/nodes.py',
322 323 params=dict(annotate=1)),
323 324
324 325 extra_environ=xhr_header)
325 326 assert get_node_history(backend.alias) == json.loads(response.body)
326 327
327 328 def test_tree_search_top_level(self, backend, xhr_header):
328 329 commit = backend.repo.get_commit(commit_idx=173)
329 330 response = self.app.get(
330 331 route_path('repo_files_nodelist',
331 332 repo_name=backend.repo_name,
332 333 commit_id=commit.raw_id, f_path='/'),
333 334 extra_environ=xhr_header)
334 335 assert 'nodes' in response.json
335 336 assert {'name': 'docs', 'type': 'dir'} in response.json['nodes']
336 337
337 338 def test_tree_search_missing_xhr(self, backend):
338 339 self.app.get(
339 340 route_path('repo_files_nodelist',
340 341 repo_name=backend.repo_name,
341 342 commit_id='tip', f_path='/'),
342 343 status=404)
343 344
344 345 def test_tree_search_at_path(self, backend, xhr_header):
345 346 commit = backend.repo.get_commit(commit_idx=173)
346 347 response = self.app.get(
347 348 route_path('repo_files_nodelist',
348 349 repo_name=backend.repo_name,
349 350 commit_id=commit.raw_id, f_path='/docs'),
350 351 extra_environ=xhr_header)
351 352 assert 'nodes' in response.json
352 353 nodes = response.json['nodes']
353 354 assert {'name': 'docs/api', 'type': 'dir'} in nodes
354 355 assert {'name': 'docs/index.rst', 'type': 'file'} in nodes
355 356
356 357 def test_tree_search_at_path_2nd_level(self, backend, xhr_header):
357 358 commit = backend.repo.get_commit(commit_idx=173)
358 359 response = self.app.get(
359 360 route_path('repo_files_nodelist',
360 361 repo_name=backend.repo_name,
361 362 commit_id=commit.raw_id, f_path='/docs/api'),
362 363 extra_environ=xhr_header)
363 364 assert 'nodes' in response.json
364 365 nodes = response.json['nodes']
365 366 assert {'name': 'docs/api/index.rst', 'type': 'file'} in nodes
366 367
367 368 def test_tree_search_at_path_missing_xhr(self, backend):
368 369 self.app.get(
369 370 route_path('repo_files_nodelist',
370 371 repo_name=backend.repo_name,
371 372 commit_id='tip', f_path='/docs'),
372 373 status=404)
373 374
374 375 def test_nodetree(self, backend, xhr_header):
375 376 commit = backend.repo.get_commit(commit_idx=173)
376 377 response = self.app.get(
377 378 route_path('repo_nodetree_full',
378 379 repo_name=backend.repo_name,
379 380 commit_id=commit.raw_id, f_path='/'),
380 381 extra_environ=xhr_header)
381 382
382 383 assert_response = response.assert_response()
383 384
384 385 for attr in ['data-commit-id', 'data-date', 'data-author']:
385 386 elements = assert_response.get_elements('[{}]'.format(attr))
386 387 assert len(elements) > 1
387 388
388 389 for element in elements:
389 390 assert element.get(attr)
390 391
391 392 def test_nodetree_if_file(self, backend, xhr_header):
392 393 commit = backend.repo.get_commit(commit_idx=173)
393 394 response = self.app.get(
394 395 route_path('repo_nodetree_full',
395 396 repo_name=backend.repo_name,
396 397 commit_id=commit.raw_id, f_path='README.rst'),
397 398 extra_environ=xhr_header)
398 399 assert response.body == ''
399 400
400 401 def test_nodetree_wrong_path(self, backend, xhr_header):
401 402 commit = backend.repo.get_commit(commit_idx=173)
402 403 response = self.app.get(
403 404 route_path('repo_nodetree_full',
404 405 repo_name=backend.repo_name,
405 406 commit_id=commit.raw_id, f_path='/dont-exist'),
406 407 extra_environ=xhr_header)
407 408
408 409 err = 'error: There is no file nor ' \
409 410 'directory at the given path'
410 411 assert err in response.body
411 412
412 413 def test_nodetree_missing_xhr(self, backend):
413 414 self.app.get(
414 415 route_path('repo_nodetree_full',
415 416 repo_name=backend.repo_name,
416 417 commit_id='tip', f_path='/'),
417 418 status=404)
418 419
419 420
420 421 @pytest.mark.usefixtures("app", "autologin_user")
421 422 class TestRawFileHandling(object):
422 423
423 424 def test_download_file(self, backend):
424 425 commit = backend.repo.get_commit(commit_idx=173)
425 426 response = self.app.get(
426 427 route_path('repo_file_download',
427 428 repo_name=backend.repo_name,
428 429 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
429 430
430 431 assert response.content_disposition == "attachment; filename=nodes.py"
431 432 assert response.content_type == "text/x-python"
432 433
433 434 def test_download_file_wrong_cs(self, backend):
434 435 raw_id = u'ERRORce30c96924232dffcd24178a07ffeb5dfc'
435 436
436 437 response = self.app.get(
437 438 route_path('repo_file_download',
438 439 repo_name=backend.repo_name,
439 440 commit_id=raw_id, f_path='vcs/nodes.svg'),
440 441 status=404)
441 442
442 443 msg = """No such commit exists for this repository"""
443 444 response.mustcontain(msg)
444 445
445 446 def test_download_file_wrong_f_path(self, backend):
446 447 commit = backend.repo.get_commit(commit_idx=173)
447 448 f_path = 'vcs/ERRORnodes.py'
448 449
449 450 response = self.app.get(
450 451 route_path('repo_file_download',
451 452 repo_name=backend.repo_name,
452 453 commit_id=commit.raw_id, f_path=f_path),
453 454 status=404)
454 455
455 456 msg = (
456 457 "There is no file nor directory at the given path: "
457 458 "`%s` at commit %s" % (f_path, commit.short_id))
458 459 response.mustcontain(msg)
459 460
460 461 def test_file_raw(self, backend):
461 462 commit = backend.repo.get_commit(commit_idx=173)
462 463 response = self.app.get(
463 464 route_path('repo_file_raw',
464 465 repo_name=backend.repo_name,
465 466 commit_id=commit.raw_id, f_path='vcs/nodes.py'),)
466 467
467 468 assert response.content_type == "text/plain"
468 469
469 470 def test_file_raw_binary(self, backend):
470 471 commit = backend.repo.get_commit()
471 472 response = self.app.get(
472 473 route_path('repo_file_raw',
473 474 repo_name=backend.repo_name,
474 475 commit_id=commit.raw_id,
475 476 f_path='docs/theme/ADC/static/breadcrumb_background.png'),)
476 477
477 478 assert response.content_disposition == 'inline'
478 479
479 480 def test_raw_file_wrong_cs(self, backend):
480 481 raw_id = u'ERRORcce30c96924232dffcd24178a07ffeb5dfc'
481 482
482 483 response = self.app.get(
483 484 route_path('repo_file_raw',
484 485 repo_name=backend.repo_name,
485 486 commit_id=raw_id, f_path='vcs/nodes.svg'),
486 487 status=404)
487 488
488 489 msg = """No such commit exists for this repository"""
489 490 response.mustcontain(msg)
490 491
491 492 def test_raw_wrong_f_path(self, backend):
492 493 commit = backend.repo.get_commit(commit_idx=173)
493 494 f_path = 'vcs/ERRORnodes.py'
494 495 response = self.app.get(
495 496 route_path('repo_file_raw',
496 497 repo_name=backend.repo_name,
497 498 commit_id=commit.raw_id, f_path=f_path),
498 499 status=404)
499 500
500 501 msg = (
501 502 "There is no file nor directory at the given path: "
502 503 "`%s` at commit %s" % (f_path, commit.short_id))
503 504 response.mustcontain(msg)
504 505
505 506 def test_raw_svg_should_not_be_rendered(self, backend):
506 507 backend.create_repo()
507 508 backend.ensure_file("xss.svg")
508 509 response = self.app.get(
509 510 route_path('repo_file_raw',
510 511 repo_name=backend.repo_name,
511 512 commit_id='tip', f_path='xss.svg'),)
512 513 # If the content type is image/svg+xml then it allows to render HTML
513 514 # and malicious SVG.
514 515 assert response.content_type == "text/plain"
515 516
516 517
517 518 @pytest.mark.usefixtures("app")
518 519 class TestRepositoryArchival(object):
519 520
520 521 def test_archival(self, backend):
521 522 backend.enable_downloads()
522 523 commit = backend.repo.get_commit(commit_idx=173)
523 524 for archive, info in settings.ARCHIVE_SPECS.items():
524 525 mime_type, arch_ext = info
525 526 short = commit.short_id + arch_ext
526 527 fname = commit.raw_id + arch_ext
527 528 filename = '%s-%s' % (backend.repo_name, short)
528 529 response = self.app.get(
529 530 route_path('repo_archivefile',
530 531 repo_name=backend.repo_name,
531 532 fname=fname))
532 533
533 534 assert response.status == '200 OK'
534 535 headers = [
535 536 ('Content-Disposition', 'attachment; filename=%s' % filename),
536 537 ('Content-Type', '%s' % mime_type),
537 538 ]
538 539
539 540 for header in headers:
540 541 assert header in response.headers.items()
541 542
542 543 @pytest.mark.parametrize('arch_ext',[
543 544 'tar', 'rar', 'x', '..ax', '.zipz', 'tar.gz.tar'])
544 545 def test_archival_wrong_ext(self, backend, arch_ext):
545 546 backend.enable_downloads()
546 547 commit = backend.repo.get_commit(commit_idx=173)
547 548
548 549 fname = commit.raw_id + '.' + arch_ext
549 550
550 551 response = self.app.get(
551 552 route_path('repo_archivefile',
552 553 repo_name=backend.repo_name,
553 554 fname=fname))
554 555 response.mustcontain(
555 556 'Unknown archive type for: `{}`'.format(fname))
556 557
557 558 @pytest.mark.parametrize('commit_id', [
558 559 '00x000000', 'tar', 'wrong', '@$@$42413232', '232dffcd'])
559 560 def test_archival_wrong_commit_id(self, backend, commit_id):
560 561 backend.enable_downloads()
561 562 fname = '%s.zip' % commit_id
562 563
563 564 response = self.app.get(
564 565 route_path('repo_archivefile',
565 566 repo_name=backend.repo_name,
566 567 fname=fname))
567 568 response.mustcontain('Unknown commit_id')
568 569
569 570
570 571 @pytest.mark.usefixtures("app")
571 572 class TestFilesDiff(object):
572 573
573 574 @pytest.mark.parametrize("diff", ['diff', 'download', 'raw'])
574 575 def test_file_full_diff(self, backend, diff):
575 576 commit1 = backend.repo.get_commit(commit_idx=-1)
576 577 commit2 = backend.repo.get_commit(commit_idx=-2)
577 578
578 579 response = self.app.get(
579 580 route_path('repo_files_diff',
580 581 repo_name=backend.repo_name,
581 582 f_path='README'),
582 583 params={
583 584 'diff1': commit2.raw_id,
584 585 'diff2': commit1.raw_id,
585 586 'fulldiff': '1',
586 587 'diff': diff,
587 588 })
588 589
589 590 if diff == 'diff':
590 591 # use redirect since this is OLD view redirecting to compare page
591 592 response = response.follow()
592 593
593 594 # It's a symlink to README.rst
594 595 response.mustcontain('README.rst')
595 596 response.mustcontain('No newline at end of file')
596 597
597 598 def test_file_binary_diff(self, backend):
598 599 commits = [
599 600 {'message': 'First commit'},
600 601 {'message': 'Commit with binary',
601 602 'added': [nodes.FileNode('file.bin', content='\0BINARY\0')]},
602 603 ]
603 604 repo = backend.create_repo(commits=commits)
604 605
605 606 response = self.app.get(
606 607 route_path('repo_files_diff',
607 608 repo_name=backend.repo_name,
608 609 f_path='file.bin'),
609 610 params={
610 611 'diff1': repo.get_commit(commit_idx=0).raw_id,
611 612 'diff2': repo.get_commit(commit_idx=1).raw_id,
612 613 'fulldiff': '1',
613 614 'diff': 'diff',
614 615 })
615 616 # use redirect since this is OLD view redirecting to compare page
616 617 response = response.follow()
617 618 response.mustcontain('Expand 1 commit')
618 619 response.mustcontain('1 file changed: 0 inserted, 0 deleted')
619 620
620 621 if backend.alias == 'svn':
621 622 response.mustcontain('new file 10644')
622 623 # TODO(marcink): SVN doesn't yet detect binary changes
623 624 else:
624 625 response.mustcontain('new file 100644')
625 626 response.mustcontain('binary diff hidden')
626 627
627 628 def test_diff_2way(self, backend):
628 629 commit1 = backend.repo.get_commit(commit_idx=-1)
629 630 commit2 = backend.repo.get_commit(commit_idx=-2)
630 631 response = self.app.get(
631 632 route_path('repo_files_diff_2way_redirect',
632 633 repo_name=backend.repo_name,
633 634 f_path='README'),
634 635 params={
635 636 'diff1': commit2.raw_id,
636 637 'diff2': commit1.raw_id,
637 638 })
638 639 # use redirect since this is OLD view redirecting to compare page
639 640 response = response.follow()
640 641
641 642 # It's a symlink to README.rst
642 643 response.mustcontain('README.rst')
643 644 response.mustcontain('No newline at end of file')
644 645
645 646 def test_requires_one_commit_id(self, backend, autologin_user):
646 647 response = self.app.get(
647 648 route_path('repo_files_diff',
648 649 repo_name=backend.repo_name,
649 650 f_path='README.rst'),
650 651 status=400)
651 652 response.mustcontain(
652 653 'Need query parameter', 'diff1', 'diff2', 'to generate a diff.')
653 654
654 655 def test_returns_no_files_if_file_does_not_exist(self, vcsbackend):
655 656 repo = vcsbackend.repo
656 657 response = self.app.get(
657 658 route_path('repo_files_diff',
658 659 repo_name=repo.name,
659 660 f_path='does-not-exist-in-any-commit'),
660 661 params={
661 662 'diff1': repo[0].raw_id,
662 663 'diff2': repo[1].raw_id
663 664 })
664 665
665 666 response = response.follow()
666 667 response.mustcontain('No files')
667 668
668 669 def test_returns_redirect_if_file_not_changed(self, backend):
669 670 commit = backend.repo.get_commit(commit_idx=-1)
670 671 response = self.app.get(
671 672 route_path('repo_files_diff_2way_redirect',
672 673 repo_name=backend.repo_name,
673 674 f_path='README'),
674 675 params={
675 676 'diff1': commit.raw_id,
676 677 'diff2': commit.raw_id,
677 678 })
678 679
679 680 response = response.follow()
680 681 response.mustcontain('No files')
681 682 response.mustcontain('No commits in this compare')
682 683
683 684 def test_supports_diff_to_different_path_svn(self, backend_svn):
684 685 #TODO: check this case
685 686 return
686 687
687 688 repo = backend_svn['svn-simple-layout'].scm_instance()
688 689 commit_id_1 = '24'
689 690 commit_id_2 = '26'
690 691
691 692 response = self.app.get(
692 693 route_path('repo_files_diff',
693 694 repo_name=backend_svn.repo_name,
694 695 f_path='trunk/example.py'),
695 696 params={
696 697 'diff1': 'tags/v0.2/example.py@' + commit_id_1,
697 698 'diff2': commit_id_2,
698 699 })
699 700
700 701 response = response.follow()
701 702 response.mustcontain(
702 703 # diff contains this
703 704 "Will print out a useful message on invocation.")
704 705
705 706 # Note: Expecting that we indicate the user what's being compared
706 707 response.mustcontain("trunk/example.py")
707 708 response.mustcontain("tags/v0.2/example.py")
708 709
709 710 def test_show_rev_redirects_to_svn_path(self, backend_svn):
710 711 #TODO: check this case
711 712 return
712 713
713 714 repo = backend_svn['svn-simple-layout'].scm_instance()
714 715 commit_id = repo[-1].raw_id
715 716
716 717 response = self.app.get(
717 718 route_path('repo_files_diff',
718 719 repo_name=backend_svn.repo_name,
719 720 f_path='trunk/example.py'),
720 721 params={
721 722 'diff1': 'branches/argparse/example.py@' + commit_id,
722 723 'diff2': commit_id,
723 724 },
724 725 status=302)
725 726 response = response.follow()
726 727 assert response.headers['Location'].endswith(
727 728 'svn-svn-simple-layout/files/26/branches/argparse/example.py')
728 729
729 730 def test_show_rev_and_annotate_redirects_to_svn_path(self, backend_svn):
730 731 #TODO: check this case
731 732 return
732 733
733 734 repo = backend_svn['svn-simple-layout'].scm_instance()
734 735 commit_id = repo[-1].raw_id
735 736 response = self.app.get(
736 737 route_path('repo_files_diff',
737 738 repo_name=backend_svn.repo_name,
738 739 f_path='trunk/example.py'),
739 740 params={
740 741 'diff1': 'branches/argparse/example.py@' + commit_id,
741 742 'diff2': commit_id,
742 743 'show_rev': 'Show at Revision',
743 744 'annotate': 'true',
744 745 },
745 746 status=302)
746 747 response = response.follow()
747 748 assert response.headers['Location'].endswith(
748 749 'svn-svn-simple-layout/annotate/26/branches/argparse/example.py')
749 750
750 751
751 752 @pytest.mark.usefixtures("app", "autologin_user")
752 753 class TestModifyFilesWithWebInterface(object):
753 754
754 755 def test_add_file_view(self, backend):
755 756 self.app.get(
756 757 route_path('repo_files_add_file',
757 758 repo_name=backend.repo_name,
758 759 commit_id='tip', f_path='/')
759 760 )
760 761
761 762 @pytest.mark.xfail_backends("svn", reason="Depends on online editing")
762 763 def test_add_file_into_repo_missing_content(self, backend, csrf_token):
763 764 repo = backend.create_repo()
764 765 filename = 'init.py'
765 766 response = self.app.post(
766 767 route_path('repo_files_create_file',
767 768 repo_name=backend.repo_name,
768 769 commit_id='tip', f_path='/'),
769 770 params={
770 771 'content': "",
771 772 'filename': filename,
772 773 'location': "",
773 774 'csrf_token': csrf_token,
774 775 },
775 776 status=302)
776 777 assert_session_flash(response,
777 778 'Successfully committed new file `{}`'.format(
778 779 os.path.join(filename)))
779 780
780 781 def test_add_file_into_repo_missing_filename(self, backend, csrf_token):
781 782 response = self.app.post(
782 783 route_path('repo_files_create_file',
783 784 repo_name=backend.repo_name,
784 785 commit_id='tip', f_path='/'),
785 786 params={
786 787 'content': "foo",
787 788 'csrf_token': csrf_token,
788 789 },
789 790 status=302)
790 791
791 792 assert_session_flash(response, 'No filename')
792 793
793 794 def test_add_file_into_repo_errors_and_no_commits(
794 795 self, backend, csrf_token):
795 796 repo = backend.create_repo()
796 797 # Create a file with no filename, it will display an error but
797 798 # the repo has no commits yet
798 799 response = self.app.post(
799 800 route_path('repo_files_create_file',
800 801 repo_name=repo.repo_name,
801 802 commit_id='tip', f_path='/'),
802 803 params={
803 804 'content': "foo",
804 805 'csrf_token': csrf_token,
805 806 },
806 807 status=302)
807 808
808 809 assert_session_flash(response, 'No filename')
809 810
810 811 # Not allowed, redirect to the summary
811 812 redirected = response.follow()
812 813 summary_url = h.route_path('repo_summary', repo_name=repo.repo_name)
813 814
814 815 # As there are no commits, displays the summary page with the error of
815 816 # creating a file with no filename
816 817
817 818 assert redirected.request.path == summary_url
818 819
819 820 @pytest.mark.parametrize("location, filename", [
820 821 ('/abs', 'foo'),
821 822 ('../rel', 'foo'),
822 823 ('file/../foo', 'foo'),
823 824 ])
824 825 def test_add_file_into_repo_bad_filenames(
825 826 self, location, filename, backend, csrf_token):
826 827 response = self.app.post(
827 828 route_path('repo_files_create_file',
828 829 repo_name=backend.repo_name,
829 830 commit_id='tip', f_path='/'),
830 831 params={
831 832 'content': "foo",
832 833 'filename': filename,
833 834 'location': location,
834 835 'csrf_token': csrf_token,
835 836 },
836 837 status=302)
837 838
838 839 assert_session_flash(
839 840 response,
840 841 'The location specified must be a relative path and must not '
841 842 'contain .. in the path')
842 843
843 844 @pytest.mark.parametrize("cnt, location, filename", [
844 845 (1, '', 'foo.txt'),
845 846 (2, 'dir', 'foo.rst'),
846 847 (3, 'rel/dir', 'foo.bar'),
847 848 ])
848 849 def test_add_file_into_repo(self, cnt, location, filename, backend,
849 850 csrf_token):
850 851 repo = backend.create_repo()
851 852 response = self.app.post(
852 853 route_path('repo_files_create_file',
853 854 repo_name=repo.repo_name,
854 855 commit_id='tip', f_path='/'),
855 856 params={
856 857 'content': "foo",
857 858 'filename': filename,
858 859 'location': location,
859 860 'csrf_token': csrf_token,
860 861 },
861 862 status=302)
862 863 assert_session_flash(response,
863 864 'Successfully committed new file `{}`'.format(
864 865 os.path.join(location, filename)))
865 866
866 867 def test_edit_file_view(self, backend):
867 868 response = self.app.get(
868 869 route_path('repo_files_edit_file',
869 870 repo_name=backend.repo_name,
870 871 commit_id=backend.default_head_id,
871 872 f_path='vcs/nodes.py'),
872 873 status=200)
873 874 response.mustcontain("Module holding everything related to vcs nodes.")
874 875
875 876 def test_edit_file_view_not_on_branch(self, backend):
876 877 repo = backend.create_repo()
877 878 backend.ensure_file("vcs/nodes.py")
878 879
879 880 response = self.app.get(
880 881 route_path('repo_files_edit_file',
881 882 repo_name=repo.repo_name,
882 883 commit_id='tip',
883 884 f_path='vcs/nodes.py'),
884 885 status=302)
885 886 assert_session_flash(
886 887 response,
887 888 'You can only edit files with commit being a valid branch')
888 889
889 890 def test_edit_file_view_commit_changes(self, backend, csrf_token):
890 891 repo = backend.create_repo()
891 892 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
892 893
893 894 response = self.app.post(
894 895 route_path('repo_files_update_file',
895 896 repo_name=repo.repo_name,
896 897 commit_id=backend.default_head_id,
897 898 f_path='vcs/nodes.py'),
898 899 params={
899 900 'content': "print 'hello world'",
900 901 'message': 'I committed',
901 902 'filename': "vcs/nodes.py",
902 903 'csrf_token': csrf_token,
903 904 },
904 905 status=302)
905 906 assert_session_flash(
906 907 response, 'Successfully committed changes to file `vcs/nodes.py`')
907 908 tip = repo.get_commit(commit_idx=-1)
908 909 assert tip.message == 'I committed'
909 910
910 911 def test_edit_file_view_commit_changes_default_message(self, backend,
911 912 csrf_token):
912 913 repo = backend.create_repo()
913 914 backend.ensure_file("vcs/nodes.py", content="print 'hello'")
914 915
915 916 commit_id = (
916 917 backend.default_branch_name or
917 918 backend.repo.scm_instance().commit_ids[-1])
918 919
919 920 response = self.app.post(
920 921 route_path('repo_files_update_file',
921 922 repo_name=repo.repo_name,
922 923 commit_id=commit_id,
923 924 f_path='vcs/nodes.py'),
924 925 params={
925 926 'content': "print 'hello world'",
926 927 'message': '',
927 928 'filename': "vcs/nodes.py",
928 929 'csrf_token': csrf_token,
929 930 },
930 931 status=302)
931 932 assert_session_flash(
932 933 response, 'Successfully committed changes to file `vcs/nodes.py`')
933 934 tip = repo.get_commit(commit_idx=-1)
934 935 assert tip.message == 'Edited file vcs/nodes.py via RhodeCode Enterprise'
935 936
936 937 def test_delete_file_view(self, backend):
937 938 self.app.get(
938 939 route_path('repo_files_remove_file',
939 940 repo_name=backend.repo_name,
940 941 commit_id=backend.default_head_id,
941 942 f_path='vcs/nodes.py'),
942 943 status=200)
943 944
944 945 def test_delete_file_view_not_on_branch(self, backend):
945 946 repo = backend.create_repo()
946 947 backend.ensure_file('vcs/nodes.py')
947 948
948 949 response = self.app.get(
949 950 route_path('repo_files_remove_file',
950 951 repo_name=repo.repo_name,
951 952 commit_id='tip',
952 953 f_path='vcs/nodes.py'),
953 954 status=302)
954 955 assert_session_flash(
955 956 response,
956 957 'You can only delete files with commit being a valid branch')
957 958
958 959 def test_delete_file_view_commit_changes(self, backend, csrf_token):
959 960 repo = backend.create_repo()
960 961 backend.ensure_file("vcs/nodes.py")
961 962
962 963 response = self.app.post(
963 964 route_path('repo_files_delete_file',
964 965 repo_name=repo.repo_name,
965 966 commit_id=backend.default_head_id,
966 967 f_path='vcs/nodes.py'),
967 968 params={
968 969 'message': 'i commited',
969 970 'csrf_token': csrf_token,
970 971 },
971 972 status=302)
972 973 assert_session_flash(
973 974 response, 'Successfully deleted file `vcs/nodes.py`')
974 975
975 976
976 977 @pytest.mark.usefixtures("app")
977 978 class TestFilesViewOtherCases(object):
978 979
979 980 def test_access_empty_repo_redirect_to_summary_with_alert_write_perms(
980 981 self, backend_stub, autologin_regular_user, user_regular,
981 982 user_util):
982 983
983 984 repo = backend_stub.create_repo()
984 985 user_util.grant_user_permission_to_repo(
985 986 repo, user_regular, 'repository.write')
986 987 response = self.app.get(
987 988 route_path('repo_files',
988 989 repo_name=repo.repo_name,
989 990 commit_id='tip', f_path='/'))
990 991
991 992 repo_file_add_url = route_path(
992 993 'repo_files_add_file',
993 994 repo_name=repo.repo_name,
994 995 commit_id=0, f_path='') + '#edit'
995 996
996 997 assert_session_flash(
997 998 response,
998 999 'There are no files yet. <a class="alert-link" '
999 1000 'href="{}">Click here to add a new file.</a>'
1000 1001 .format(repo_file_add_url))
1001 1002
1002 1003 def test_access_empty_repo_redirect_to_summary_with_alert_no_write_perms(
1003 1004 self, backend_stub, autologin_regular_user):
1004 1005 repo = backend_stub.create_repo()
1005 1006 # init session for anon user
1006 1007 route_path('repo_summary', repo_name=repo.repo_name)
1007 1008
1008 1009 repo_file_add_url = route_path(
1009 1010 'repo_files_add_file',
1010 1011 repo_name=repo.repo_name,
1011 1012 commit_id=0, f_path='') + '#edit'
1012 1013
1013 1014 response = self.app.get(
1014 1015 route_path('repo_files',
1015 1016 repo_name=repo.repo_name,
1016 1017 commit_id='tip', f_path='/'))
1017 1018
1018 1019 assert_session_flash(response, no_=repo_file_add_url)
1019 1020
1020 1021 @pytest.mark.parametrize('file_node', [
1021 1022 'archive/file.zip',
1022 1023 'diff/my-file.txt',
1023 1024 'render.py',
1024 1025 'render',
1025 1026 'remove_file',
1026 1027 'remove_file/to-delete.txt',
1027 1028 ])
1028 1029 def test_file_names_equal_to_routes_parts(self, backend, file_node):
1029 1030 backend.create_repo()
1030 1031 backend.ensure_file(file_node)
1031 1032
1032 1033 self.app.get(
1033 1034 route_path('repo_files',
1034 1035 repo_name=backend.repo_name,
1035 1036 commit_id='tip', f_path=file_node),
1036 1037 status=200)
1037 1038
1038 1039
1039 1040 class TestAdjustFilePathForSvn(object):
1040 1041 """
1041 1042 SVN specific adjustments of node history in RepoFilesView.
1042 1043 """
1043 1044
1044 1045 def test_returns_path_relative_to_matched_reference(self):
1045 1046 repo = self._repo(branches=['trunk'])
1046 1047 self.assert_file_adjustment('trunk/file', 'file', repo)
1047 1048
1048 1049 def test_does_not_modify_file_if_no_reference_matches(self):
1049 1050 repo = self._repo(branches=['trunk'])
1050 1051 self.assert_file_adjustment('notes/file', 'notes/file', repo)
1051 1052
1052 1053 def test_does_not_adjust_partial_directory_names(self):
1053 1054 repo = self._repo(branches=['trun'])
1054 1055 self.assert_file_adjustment('trunk/file', 'trunk/file', repo)
1055 1056
1056 1057 def test_is_robust_to_patterns_which_prefix_other_patterns(self):
1057 1058 repo = self._repo(branches=['trunk', 'trunk/new', 'trunk/old'])
1058 1059 self.assert_file_adjustment('trunk/new/file', 'file', repo)
1059 1060
1060 1061 def assert_file_adjustment(self, f_path, expected, repo):
1061 1062 result = RepoFilesView.adjust_file_path_for_svn(f_path, repo)
1062 1063 assert result == expected
1063 1064
1064 1065 def _repo(self, branches=None):
1065 1066 repo = mock.Mock()
1066 1067 repo.branches = OrderedDict((name, '0') for name in branches or [])
1067 1068 repo.tags = {}
1068 1069 return repo
@@ -1,2202 +1,2195 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 authentication and permission libraries
23 23 """
24 24
25 25 import os
26 26 import time
27 27 import inspect
28 28 import collections
29 29 import fnmatch
30 30 import hashlib
31 31 import itertools
32 32 import logging
33 33 import random
34 34 import traceback
35 35 from functools import wraps
36 36
37 37 import ipaddress
38 38
39 39 from pyramid.httpexceptions import HTTPForbidden, HTTPFound, HTTPNotFound
40 40 from sqlalchemy.orm.exc import ObjectDeletedError
41 41 from sqlalchemy.orm import joinedload
42 42 from zope.cachedescriptors.property import Lazy as LazyProperty
43 43
44 44 import rhodecode
45 45 from rhodecode.model import meta
46 46 from rhodecode.model.meta import Session
47 47 from rhodecode.model.user import UserModel
48 48 from rhodecode.model.db import (
49 49 User, Repository, Permission, UserToPerm, UserGroupToPerm, UserGroupMember,
50 50 UserIpMap, UserApiKeys, RepoGroup, UserGroup)
51 51 from rhodecode.lib import rc_cache
52 52 from rhodecode.lib.utils2 import safe_unicode, aslist, safe_str, md5, safe_int, sha1
53 53 from rhodecode.lib.utils import (
54 54 get_repo_slug, get_repo_group_slug, get_user_group_slug)
55 55 from rhodecode.lib.caching_query import FromCache
56 56
57 57
58 58 if rhodecode.is_unix:
59 59 import bcrypt
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63 csrf_token_key = "csrf_token"
64 64
65 65
66 66 class PasswordGenerator(object):
67 67 """
68 68 This is a simple class for generating password from different sets of
69 69 characters
70 70 usage::
71 71
72 72 passwd_gen = PasswordGenerator()
73 73 #print 8-letter password containing only big and small letters
74 74 of alphabet
75 75 passwd_gen.gen_password(8, passwd_gen.ALPHABETS_BIG_SMALL)
76 76 """
77 77 ALPHABETS_NUM = r'''1234567890'''
78 78 ALPHABETS_SMALL = r'''qwertyuiopasdfghjklzxcvbnm'''
79 79 ALPHABETS_BIG = r'''QWERTYUIOPASDFGHJKLZXCVBNM'''
80 80 ALPHABETS_SPECIAL = r'''`-=[]\;',./~!@#$%^&*()_+{}|:"<>?'''
81 81 ALPHABETS_FULL = ALPHABETS_BIG + ALPHABETS_SMALL \
82 82 + ALPHABETS_NUM + ALPHABETS_SPECIAL
83 83 ALPHABETS_ALPHANUM = ALPHABETS_BIG + ALPHABETS_SMALL + ALPHABETS_NUM
84 84 ALPHABETS_BIG_SMALL = ALPHABETS_BIG + ALPHABETS_SMALL
85 85 ALPHABETS_ALPHANUM_BIG = ALPHABETS_BIG + ALPHABETS_NUM
86 86 ALPHABETS_ALPHANUM_SMALL = ALPHABETS_SMALL + ALPHABETS_NUM
87 87
88 88 def __init__(self, passwd=''):
89 89 self.passwd = passwd
90 90
91 91 def gen_password(self, length, type_=None):
92 92 if type_ is None:
93 93 type_ = self.ALPHABETS_FULL
94 94 self.passwd = ''.join([random.choice(type_) for _ in range(length)])
95 95 return self.passwd
96 96
97 97
98 98 class _RhodeCodeCryptoBase(object):
99 99 ENC_PREF = None
100 100
101 101 def hash_create(self, str_):
102 102 """
103 103 hash the string using
104 104
105 105 :param str_: password to hash
106 106 """
107 107 raise NotImplementedError
108 108
109 109 def hash_check_with_upgrade(self, password, hashed):
110 110 """
111 111 Returns tuple in which first element is boolean that states that
112 112 given password matches it's hashed version, and the second is new hash
113 113 of the password, in case this password should be migrated to new
114 114 cipher.
115 115 """
116 116 checked_hash = self.hash_check(password, hashed)
117 117 return checked_hash, None
118 118
119 119 def hash_check(self, password, hashed):
120 120 """
121 121 Checks matching password with it's hashed value.
122 122
123 123 :param password: password
124 124 :param hashed: password in hashed form
125 125 """
126 126 raise NotImplementedError
127 127
128 128 def _assert_bytes(self, value):
129 129 """
130 130 Passing in an `unicode` object can lead to hard to detect issues
131 131 if passwords contain non-ascii characters. Doing a type check
132 132 during runtime, so that such mistakes are detected early on.
133 133 """
134 134 if not isinstance(value, str):
135 135 raise TypeError(
136 136 "Bytestring required as input, got %r." % (value, ))
137 137
138 138
139 139 class _RhodeCodeCryptoBCrypt(_RhodeCodeCryptoBase):
140 140 ENC_PREF = ('$2a$10', '$2b$10')
141 141
142 142 def hash_create(self, str_):
143 143 self._assert_bytes(str_)
144 144 return bcrypt.hashpw(str_, bcrypt.gensalt(10))
145 145
146 146 def hash_check_with_upgrade(self, password, hashed):
147 147 """
148 148 Returns tuple in which first element is boolean that states that
149 149 given password matches it's hashed version, and the second is new hash
150 150 of the password, in case this password should be migrated to new
151 151 cipher.
152 152
153 153 This implements special upgrade logic which works like that:
154 154 - check if the given password == bcrypted hash, if yes then we
155 155 properly used password and it was already in bcrypt. Proceed
156 156 without any changes
157 157 - if bcrypt hash check is not working try with sha256. If hash compare
158 158 is ok, it means we using correct but old hashed password. indicate
159 159 hash change and proceed
160 160 """
161 161
162 162 new_hash = None
163 163
164 164 # regular pw check
165 165 password_match_bcrypt = self.hash_check(password, hashed)
166 166
167 167 # now we want to know if the password was maybe from sha256
168 168 # basically calling _RhodeCodeCryptoSha256().hash_check()
169 169 if not password_match_bcrypt:
170 170 if _RhodeCodeCryptoSha256().hash_check(password, hashed):
171 171 new_hash = self.hash_create(password) # make new bcrypt hash
172 172 password_match_bcrypt = True
173 173
174 174 return password_match_bcrypt, new_hash
175 175
176 176 def hash_check(self, password, hashed):
177 177 """
178 178 Checks matching password with it's hashed value.
179 179
180 180 :param password: password
181 181 :param hashed: password in hashed form
182 182 """
183 183 self._assert_bytes(password)
184 184 try:
185 185 return bcrypt.hashpw(password, hashed) == hashed
186 186 except ValueError as e:
187 187 # we're having a invalid salt here probably, we should not crash
188 188 # just return with False as it would be a wrong password.
189 189 log.debug('Failed to check password hash using bcrypt %s',
190 190 safe_str(e))
191 191
192 192 return False
193 193
194 194
195 195 class _RhodeCodeCryptoSha256(_RhodeCodeCryptoBase):
196 196 ENC_PREF = '_'
197 197
198 198 def hash_create(self, str_):
199 199 self._assert_bytes(str_)
200 200 return hashlib.sha256(str_).hexdigest()
201 201
202 202 def hash_check(self, password, hashed):
203 203 """
204 204 Checks matching password with it's hashed value.
205 205
206 206 :param password: password
207 207 :param hashed: password in hashed form
208 208 """
209 209 self._assert_bytes(password)
210 210 return hashlib.sha256(password).hexdigest() == hashed
211 211
212 212
213 213 class _RhodeCodeCryptoTest(_RhodeCodeCryptoBase):
214 214 ENC_PREF = '_'
215 215
216 216 def hash_create(self, str_):
217 217 self._assert_bytes(str_)
218 218 return sha1(str_)
219 219
220 220 def hash_check(self, password, hashed):
221 221 """
222 222 Checks matching password with it's hashed value.
223 223
224 224 :param password: password
225 225 :param hashed: password in hashed form
226 226 """
227 227 self._assert_bytes(password)
228 228 return sha1(password) == hashed
229 229
230 230
231 231 def crypto_backend():
232 232 """
233 233 Return the matching crypto backend.
234 234
235 235 Selection is based on if we run tests or not, we pick sha1-test backend to run
236 236 tests faster since BCRYPT is expensive to calculate
237 237 """
238 238 if rhodecode.is_test:
239 239 RhodeCodeCrypto = _RhodeCodeCryptoTest()
240 240 else:
241 241 RhodeCodeCrypto = _RhodeCodeCryptoBCrypt()
242 242
243 243 return RhodeCodeCrypto
244 244
245 245
246 246 def get_crypt_password(password):
247 247 """
248 248 Create the hash of `password` with the active crypto backend.
249 249
250 250 :param password: The cleartext password.
251 251 :type password: unicode
252 252 """
253 253 password = safe_str(password)
254 254 return crypto_backend().hash_create(password)
255 255
256 256
257 257 def check_password(password, hashed):
258 258 """
259 259 Check if the value in `password` matches the hash in `hashed`.
260 260
261 261 :param password: The cleartext password.
262 262 :type password: unicode
263 263
264 264 :param hashed: The expected hashed version of the password.
265 265 :type hashed: The hash has to be passed in in text representation.
266 266 """
267 267 password = safe_str(password)
268 268 return crypto_backend().hash_check(password, hashed)
269 269
270 270
271 271 def generate_auth_token(data, salt=None):
272 272 """
273 273 Generates API KEY from given string
274 274 """
275 275
276 276 if salt is None:
277 277 salt = os.urandom(16)
278 278 return hashlib.sha1(safe_str(data) + salt).hexdigest()
279 279
280 280
281 281 def get_came_from(request):
282 282 """
283 283 get query_string+path from request sanitized after removing auth_token
284 284 """
285 285 _req = request
286 286
287 287 path = _req.path
288 288 if 'auth_token' in _req.GET:
289 289 # sanitize the request and remove auth_token for redirection
290 290 _req.GET.pop('auth_token')
291 291 qs = _req.query_string
292 292 if qs:
293 293 path += '?' + qs
294 294
295 295 return path
296 296
297 297
298 298 class CookieStoreWrapper(object):
299 299
300 300 def __init__(self, cookie_store):
301 301 self.cookie_store = cookie_store
302 302
303 303 def __repr__(self):
304 304 return 'CookieStore<%s>' % (self.cookie_store)
305 305
306 306 def get(self, key, other=None):
307 307 if isinstance(self.cookie_store, dict):
308 308 return self.cookie_store.get(key, other)
309 309 elif isinstance(self.cookie_store, AuthUser):
310 310 return self.cookie_store.__dict__.get(key, other)
311 311
312 312
313 313 def _cached_perms_data(user_id, scope, user_is_admin,
314 314 user_inherit_default_permissions, explicit, algo,
315 315 calculate_super_admin):
316 316
317 317 permissions = PermissionCalculator(
318 318 user_id, scope, user_is_admin, user_inherit_default_permissions,
319 319 explicit, algo, calculate_super_admin)
320 320 return permissions.calculate()
321 321
322 322
323 323 class PermOrigin(object):
324 324 SUPER_ADMIN = 'superadmin'
325 325
326 326 REPO_USER = 'user:%s'
327 327 REPO_USERGROUP = 'usergroup:%s'
328 328 REPO_OWNER = 'repo.owner'
329 329 REPO_DEFAULT = 'repo.default'
330 330 REPO_DEFAULT_NO_INHERIT = 'repo.default.no.inherit'
331 331 REPO_PRIVATE = 'repo.private'
332 332
333 333 REPOGROUP_USER = 'user:%s'
334 334 REPOGROUP_USERGROUP = 'usergroup:%s'
335 335 REPOGROUP_OWNER = 'group.owner'
336 336 REPOGROUP_DEFAULT = 'group.default'
337 337 REPOGROUP_DEFAULT_NO_INHERIT = 'group.default.no.inherit'
338 338
339 339 USERGROUP_USER = 'user:%s'
340 340 USERGROUP_USERGROUP = 'usergroup:%s'
341 341 USERGROUP_OWNER = 'usergroup.owner'
342 342 USERGROUP_DEFAULT = 'usergroup.default'
343 343 USERGROUP_DEFAULT_NO_INHERIT = 'usergroup.default.no.inherit'
344 344
345 345
346 346 class PermOriginDict(dict):
347 347 """
348 348 A special dict used for tracking permissions along with their origins.
349 349
350 350 `__setitem__` has been overridden to expect a tuple(perm, origin)
351 351 `__getitem__` will return only the perm
352 352 `.perm_origin_stack` will return the stack of (perm, origin) set per key
353 353
354 354 >>> perms = PermOriginDict()
355 355 >>> perms['resource'] = 'read', 'default'
356 356 >>> perms['resource']
357 357 'read'
358 358 >>> perms['resource'] = 'write', 'admin'
359 359 >>> perms['resource']
360 360 'write'
361 361 >>> perms.perm_origin_stack
362 362 {'resource': [('read', 'default'), ('write', 'admin')]}
363 363 """
364 364
365 365 def __init__(self, *args, **kw):
366 366 dict.__init__(self, *args, **kw)
367 367 self.perm_origin_stack = collections.OrderedDict()
368 368
369 369 def __setitem__(self, key, (perm, origin)):
370 370 self.perm_origin_stack.setdefault(key, []).append((perm, origin))
371 371 dict.__setitem__(self, key, perm)
372 372
373 373
374 374 class PermissionCalculator(object):
375 375
376 376 def __init__(
377 377 self, user_id, scope, user_is_admin,
378 378 user_inherit_default_permissions, explicit, algo,
379 379 calculate_super_admin=False):
380 380
381 381 self.user_id = user_id
382 382 self.user_is_admin = user_is_admin
383 383 self.inherit_default_permissions = user_inherit_default_permissions
384 384 self.explicit = explicit
385 385 self.algo = algo
386 386 self.calculate_super_admin = calculate_super_admin
387 387
388 388 scope = scope or {}
389 389 self.scope_repo_id = scope.get('repo_id')
390 390 self.scope_repo_group_id = scope.get('repo_group_id')
391 391 self.scope_user_group_id = scope.get('user_group_id')
392 392
393 393 self.default_user_id = User.get_default_user(cache=True).user_id
394 394
395 395 self.permissions_repositories = PermOriginDict()
396 396 self.permissions_repository_groups = PermOriginDict()
397 397 self.permissions_user_groups = PermOriginDict()
398 398 self.permissions_global = set()
399 399
400 400 self.default_repo_perms = Permission.get_default_repo_perms(
401 401 self.default_user_id, self.scope_repo_id)
402 402 self.default_repo_groups_perms = Permission.get_default_group_perms(
403 403 self.default_user_id, self.scope_repo_group_id)
404 404 self.default_user_group_perms = \
405 405 Permission.get_default_user_group_perms(
406 406 self.default_user_id, self.scope_user_group_id)
407 407
408 408 def calculate(self):
409 409 if self.user_is_admin and not self.calculate_super_admin:
410 410 return self._admin_permissions()
411 411
412 412 self._calculate_global_default_permissions()
413 413 self._calculate_global_permissions()
414 414 self._calculate_default_permissions()
415 415 self._calculate_repository_permissions()
416 416 self._calculate_repository_group_permissions()
417 417 self._calculate_user_group_permissions()
418 418 return self._permission_structure()
419 419
420 420 def _admin_permissions(self):
421 421 """
422 422 admin user have all default rights for repositories
423 423 and groups set to admin
424 424 """
425 425 self.permissions_global.add('hg.admin')
426 426 self.permissions_global.add('hg.create.write_on_repogroup.true')
427 427
428 428 # repositories
429 429 for perm in self.default_repo_perms:
430 430 r_k = perm.UserRepoToPerm.repository.repo_name
431 431 p = 'repository.admin'
432 432 self.permissions_repositories[r_k] = p, PermOrigin.SUPER_ADMIN
433 433
434 434 # repository groups
435 435 for perm in self.default_repo_groups_perms:
436 436 rg_k = perm.UserRepoGroupToPerm.group.group_name
437 437 p = 'group.admin'
438 438 self.permissions_repository_groups[rg_k] = p, PermOrigin.SUPER_ADMIN
439 439
440 440 # user groups
441 441 for perm in self.default_user_group_perms:
442 442 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
443 443 p = 'usergroup.admin'
444 444 self.permissions_user_groups[u_k] = p, PermOrigin.SUPER_ADMIN
445 445
446 446 return self._permission_structure()
447 447
448 448 def _calculate_global_default_permissions(self):
449 449 """
450 450 global permissions taken from the default user
451 451 """
452 452 default_global_perms = UserToPerm.query()\
453 453 .filter(UserToPerm.user_id == self.default_user_id)\
454 454 .options(joinedload(UserToPerm.permission))
455 455
456 456 for perm in default_global_perms:
457 457 self.permissions_global.add(perm.permission.permission_name)
458 458
459 459 if self.user_is_admin:
460 460 self.permissions_global.add('hg.admin')
461 461 self.permissions_global.add('hg.create.write_on_repogroup.true')
462 462
463 463 def _calculate_global_permissions(self):
464 464 """
465 465 Set global system permissions with user permissions or permissions
466 466 taken from the user groups of the current user.
467 467
468 468 The permissions include repo creating, repo group creating, forking
469 469 etc.
470 470 """
471 471
472 472 # now we read the defined permissions and overwrite what we have set
473 473 # before those can be configured from groups or users explicitly.
474 474
475 475 # TODO: johbo: This seems to be out of sync, find out the reason
476 476 # for the comment below and update it.
477 477
478 478 # In case we want to extend this list we should be always in sync with
479 479 # User.DEFAULT_USER_PERMISSIONS definitions
480 480 _configurable = frozenset([
481 481 'hg.fork.none', 'hg.fork.repository',
482 482 'hg.create.none', 'hg.create.repository',
483 483 'hg.usergroup.create.false', 'hg.usergroup.create.true',
484 484 'hg.repogroup.create.false', 'hg.repogroup.create.true',
485 485 'hg.create.write_on_repogroup.false',
486 486 'hg.create.write_on_repogroup.true',
487 487 'hg.inherit_default_perms.false', 'hg.inherit_default_perms.true'
488 488 ])
489 489
490 490 # USER GROUPS comes first user group global permissions
491 491 user_perms_from_users_groups = Session().query(UserGroupToPerm)\
492 492 .options(joinedload(UserGroupToPerm.permission))\
493 493 .join((UserGroupMember, UserGroupToPerm.users_group_id ==
494 494 UserGroupMember.users_group_id))\
495 495 .filter(UserGroupMember.user_id == self.user_id)\
496 496 .order_by(UserGroupToPerm.users_group_id)\
497 497 .all()
498 498
499 499 # need to group here by groups since user can be in more than
500 500 # one group, so we get all groups
501 501 _explicit_grouped_perms = [
502 502 [x, list(y)] for x, y in
503 503 itertools.groupby(user_perms_from_users_groups,
504 504 lambda _x: _x.users_group)]
505 505
506 506 for gr, perms in _explicit_grouped_perms:
507 507 # since user can be in multiple groups iterate over them and
508 508 # select the lowest permissions first (more explicit)
509 509 # TODO: marcink: do this^^
510 510
511 511 # group doesn't inherit default permissions so we actually set them
512 512 if not gr.inherit_default_permissions:
513 513 # NEED TO IGNORE all previously set configurable permissions
514 514 # and replace them with explicitly set from this user
515 515 # group permissions
516 516 self.permissions_global = self.permissions_global.difference(
517 517 _configurable)
518 518 for perm in perms:
519 519 self.permissions_global.add(perm.permission.permission_name)
520 520
521 521 # user explicit global permissions
522 522 user_perms = Session().query(UserToPerm)\
523 523 .options(joinedload(UserToPerm.permission))\
524 524 .filter(UserToPerm.user_id == self.user_id).all()
525 525
526 526 if not self.inherit_default_permissions:
527 527 # NEED TO IGNORE all configurable permissions and
528 528 # replace them with explicitly set from this user permissions
529 529 self.permissions_global = self.permissions_global.difference(
530 530 _configurable)
531 531 for perm in user_perms:
532 532 self.permissions_global.add(perm.permission.permission_name)
533 533
534 534 def _calculate_default_permissions(self):
535 535 """
536 536 Set default user permissions for repositories, repository groups
537 537 taken from the default user.
538 538
539 539 Calculate inheritance of object permissions based on what we have now
540 540 in GLOBAL permissions. We check if .false is in GLOBAL since this is
541 541 explicitly set. Inherit is the opposite of .false being there.
542 542
543 543 .. note::
544 544
545 545 the syntax is little bit odd but what we need to check here is
546 546 the opposite of .false permission being in the list so even for
547 547 inconsistent state when both .true/.false is there
548 548 .false is more important
549 549
550 550 """
551 551 user_inherit_object_permissions = not ('hg.inherit_default_perms.false'
552 552 in self.permissions_global)
553 553
554 554 # defaults for repositories, taken from `default` user permissions
555 555 # on given repo
556 556 for perm in self.default_repo_perms:
557 557 r_k = perm.UserRepoToPerm.repository.repo_name
558 558 p = perm.Permission.permission_name
559 559 o = PermOrigin.REPO_DEFAULT
560 560 self.permissions_repositories[r_k] = p, o
561 561
562 562 # if we decide this user isn't inheriting permissions from
563 563 # default user we set him to .none so only explicit
564 564 # permissions work
565 565 if not user_inherit_object_permissions:
566 566 p = 'repository.none'
567 567 o = PermOrigin.REPO_DEFAULT_NO_INHERIT
568 568 self.permissions_repositories[r_k] = p, o
569 569
570 570 if perm.Repository.private and not (
571 571 perm.Repository.user_id == self.user_id):
572 572 # disable defaults for private repos,
573 573 p = 'repository.none'
574 574 o = PermOrigin.REPO_PRIVATE
575 575 self.permissions_repositories[r_k] = p, o
576 576
577 577 elif perm.Repository.user_id == self.user_id:
578 578 # set admin if owner
579 579 p = 'repository.admin'
580 580 o = PermOrigin.REPO_OWNER
581 581 self.permissions_repositories[r_k] = p, o
582 582
583 583 if self.user_is_admin:
584 584 p = 'repository.admin'
585 585 o = PermOrigin.SUPER_ADMIN
586 586 self.permissions_repositories[r_k] = p, o
587 587
588 588 # defaults for repository groups taken from `default` user permission
589 589 # on given group
590 590 for perm in self.default_repo_groups_perms:
591 591 rg_k = perm.UserRepoGroupToPerm.group.group_name
592 592 p = perm.Permission.permission_name
593 593 o = PermOrigin.REPOGROUP_DEFAULT
594 594 self.permissions_repository_groups[rg_k] = p, o
595 595
596 596 # if we decide this user isn't inheriting permissions from default
597 597 # user we set him to .none so only explicit permissions work
598 598 if not user_inherit_object_permissions:
599 599 p = 'group.none'
600 600 o = PermOrigin.REPOGROUP_DEFAULT_NO_INHERIT
601 601 self.permissions_repository_groups[rg_k] = p, o
602 602
603 603 if perm.RepoGroup.user_id == self.user_id:
604 604 # set admin if owner
605 605 p = 'group.admin'
606 606 o = PermOrigin.REPOGROUP_OWNER
607 607 self.permissions_repository_groups[rg_k] = p, o
608 608
609 609 if self.user_is_admin:
610 610 p = 'group.admin'
611 611 o = PermOrigin.SUPER_ADMIN
612 612 self.permissions_repository_groups[rg_k] = p, o
613 613
614 614 # defaults for user groups taken from `default` user permission
615 615 # on given user group
616 616 for perm in self.default_user_group_perms:
617 617 u_k = perm.UserUserGroupToPerm.user_group.users_group_name
618 618 p = perm.Permission.permission_name
619 619 o = PermOrigin.USERGROUP_DEFAULT
620 620 self.permissions_user_groups[u_k] = p, o
621 621
622 622 # if we decide this user isn't inheriting permissions from default
623 623 # user we set him to .none so only explicit permissions work
624 624 if not user_inherit_object_permissions:
625 625 p = 'usergroup.none'
626 626 o = PermOrigin.USERGROUP_DEFAULT_NO_INHERIT
627 627 self.permissions_user_groups[u_k] = p, o
628 628
629 629 if perm.UserGroup.user_id == self.user_id:
630 630 # set admin if owner
631 631 p = 'usergroup.admin'
632 632 o = PermOrigin.USERGROUP_OWNER
633 633 self.permissions_user_groups[u_k] = p, o
634 634
635 635 if self.user_is_admin:
636 636 p = 'usergroup.admin'
637 637 o = PermOrigin.SUPER_ADMIN
638 638 self.permissions_user_groups[u_k] = p, o
639 639
640 640 def _calculate_repository_permissions(self):
641 641 """
642 642 Repository permissions for the current user.
643 643
644 644 Check if the user is part of user groups for this repository and
645 645 fill in the permission from it. `_choose_permission` decides of which
646 646 permission should be selected based on selected method.
647 647 """
648 648
649 649 # user group for repositories permissions
650 650 user_repo_perms_from_user_group = Permission\
651 651 .get_default_repo_perms_from_user_group(
652 652 self.user_id, self.scope_repo_id)
653 653
654 654 multiple_counter = collections.defaultdict(int)
655 655 for perm in user_repo_perms_from_user_group:
656 656 r_k = perm.UserGroupRepoToPerm.repository.repo_name
657 657 multiple_counter[r_k] += 1
658 658 p = perm.Permission.permission_name
659 659 o = PermOrigin.REPO_USERGROUP % perm.UserGroupRepoToPerm\
660 660 .users_group.users_group_name
661 661
662 662 if multiple_counter[r_k] > 1:
663 663 cur_perm = self.permissions_repositories[r_k]
664 664 p = self._choose_permission(p, cur_perm)
665 665
666 666 self.permissions_repositories[r_k] = p, o
667 667
668 668 if perm.Repository.user_id == self.user_id:
669 669 # set admin if owner
670 670 p = 'repository.admin'
671 671 o = PermOrigin.REPO_OWNER
672 672 self.permissions_repositories[r_k] = p, o
673 673
674 674 if self.user_is_admin:
675 675 p = 'repository.admin'
676 676 o = PermOrigin.SUPER_ADMIN
677 677 self.permissions_repositories[r_k] = p, o
678 678
679 679 # user explicit permissions for repositories, overrides any specified
680 680 # by the group permission
681 681 user_repo_perms = Permission.get_default_repo_perms(
682 682 self.user_id, self.scope_repo_id)
683 683 for perm in user_repo_perms:
684 684 r_k = perm.UserRepoToPerm.repository.repo_name
685 685 p = perm.Permission.permission_name
686 686 o = PermOrigin.REPO_USER % perm.UserRepoToPerm.user.username
687 687
688 688 if not self.explicit:
689 689 cur_perm = self.permissions_repositories.get(
690 690 r_k, 'repository.none')
691 691 p = self._choose_permission(p, cur_perm)
692 692
693 693 self.permissions_repositories[r_k] = p, o
694 694
695 695 if perm.Repository.user_id == self.user_id:
696 696 # set admin if owner
697 697 p = 'repository.admin'
698 698 o = PermOrigin.REPO_OWNER
699 699 self.permissions_repositories[r_k] = p, o
700 700
701 701 if self.user_is_admin:
702 702 p = 'repository.admin'
703 703 o = PermOrigin.SUPER_ADMIN
704 704 self.permissions_repositories[r_k] = p, o
705 705
706 706 def _calculate_repository_group_permissions(self):
707 707 """
708 708 Repository group permissions for the current user.
709 709
710 710 Check if the user is part of user groups for repository groups and
711 711 fill in the permissions from it. `_choose_permission` decides of which
712 712 permission should be selected based on selected method.
713 713 """
714 714 # user group for repo groups permissions
715 715 user_repo_group_perms_from_user_group = Permission\
716 716 .get_default_group_perms_from_user_group(
717 717 self.user_id, self.scope_repo_group_id)
718 718
719 719 multiple_counter = collections.defaultdict(int)
720 720 for perm in user_repo_group_perms_from_user_group:
721 721 rg_k = perm.UserGroupRepoGroupToPerm.group.group_name
722 722 multiple_counter[rg_k] += 1
723 723 o = PermOrigin.REPOGROUP_USERGROUP % perm.UserGroupRepoGroupToPerm\
724 724 .users_group.users_group_name
725 725 p = perm.Permission.permission_name
726 726
727 727 if multiple_counter[rg_k] > 1:
728 728 cur_perm = self.permissions_repository_groups[rg_k]
729 729 p = self._choose_permission(p, cur_perm)
730 730 self.permissions_repository_groups[rg_k] = p, o
731 731
732 732 if perm.RepoGroup.user_id == self.user_id:
733 733 # set admin if owner, even for member of other user group
734 734 p = 'group.admin'
735 735 o = PermOrigin.REPOGROUP_OWNER
736 736 self.permissions_repository_groups[rg_k] = p, o
737 737
738 738 if self.user_is_admin:
739 739 p = 'group.admin'
740 740 o = PermOrigin.SUPER_ADMIN
741 741 self.permissions_repository_groups[rg_k] = p, o
742 742
743 743 # user explicit permissions for repository groups
744 744 user_repo_groups_perms = Permission.get_default_group_perms(
745 745 self.user_id, self.scope_repo_group_id)
746 746 for perm in user_repo_groups_perms:
747 747 rg_k = perm.UserRepoGroupToPerm.group.group_name
748 748 o = PermOrigin.REPOGROUP_USER % perm.UserRepoGroupToPerm\
749 749 .user.username
750 750 p = perm.Permission.permission_name
751 751
752 752 if not self.explicit:
753 753 cur_perm = self.permissions_repository_groups.get(
754 754 rg_k, 'group.none')
755 755 p = self._choose_permission(p, cur_perm)
756 756
757 757 self.permissions_repository_groups[rg_k] = p, o
758 758
759 759 if perm.RepoGroup.user_id == self.user_id:
760 760 # set admin if owner
761 761 p = 'group.admin'
762 762 o = PermOrigin.REPOGROUP_OWNER
763 763 self.permissions_repository_groups[rg_k] = p, o
764 764
765 765 if self.user_is_admin:
766 766 p = 'group.admin'
767 767 o = PermOrigin.SUPER_ADMIN
768 768 self.permissions_repository_groups[rg_k] = p, o
769 769
770 770 def _calculate_user_group_permissions(self):
771 771 """
772 772 User group permissions for the current user.
773 773 """
774 774 # user group for user group permissions
775 775 user_group_from_user_group = Permission\
776 776 .get_default_user_group_perms_from_user_group(
777 777 self.user_id, self.scope_user_group_id)
778 778
779 779 multiple_counter = collections.defaultdict(int)
780 780 for perm in user_group_from_user_group:
781 781 ug_k = perm.UserGroupUserGroupToPerm\
782 782 .target_user_group.users_group_name
783 783 multiple_counter[ug_k] += 1
784 784 o = PermOrigin.USERGROUP_USERGROUP % perm.UserGroupUserGroupToPerm\
785 785 .user_group.users_group_name
786 786 p = perm.Permission.permission_name
787 787
788 788 if multiple_counter[ug_k] > 1:
789 789 cur_perm = self.permissions_user_groups[ug_k]
790 790 p = self._choose_permission(p, cur_perm)
791 791
792 792 self.permissions_user_groups[ug_k] = p, o
793 793
794 794 if perm.UserGroup.user_id == self.user_id:
795 795 # set admin if owner, even for member of other user group
796 796 p = 'usergroup.admin'
797 797 o = PermOrigin.USERGROUP_OWNER
798 798 self.permissions_user_groups[ug_k] = p, o
799 799
800 800 if self.user_is_admin:
801 801 p = 'usergroup.admin'
802 802 o = PermOrigin.SUPER_ADMIN
803 803 self.permissions_user_groups[ug_k] = p, o
804 804
805 805 # user explicit permission for user groups
806 806 user_user_groups_perms = Permission.get_default_user_group_perms(
807 807 self.user_id, self.scope_user_group_id)
808 808 for perm in user_user_groups_perms:
809 809 ug_k = perm.UserUserGroupToPerm.user_group.users_group_name
810 810 o = PermOrigin.USERGROUP_USER % perm.UserUserGroupToPerm\
811 811 .user.username
812 812 p = perm.Permission.permission_name
813 813
814 814 if not self.explicit:
815 815 cur_perm = self.permissions_user_groups.get(
816 816 ug_k, 'usergroup.none')
817 817 p = self._choose_permission(p, cur_perm)
818 818
819 819 self.permissions_user_groups[ug_k] = p, o
820 820
821 821 if perm.UserGroup.user_id == self.user_id:
822 822 # set admin if owner
823 823 p = 'usergroup.admin'
824 824 o = PermOrigin.USERGROUP_OWNER
825 825 self.permissions_user_groups[ug_k] = p, o
826 826
827 827 if self.user_is_admin:
828 828 p = 'usergroup.admin'
829 829 o = PermOrigin.SUPER_ADMIN
830 830 self.permissions_user_groups[ug_k] = p, o
831 831
832 832 def _choose_permission(self, new_perm, cur_perm):
833 833 new_perm_val = Permission.PERM_WEIGHTS[new_perm]
834 834 cur_perm_val = Permission.PERM_WEIGHTS[cur_perm]
835 835 if self.algo == 'higherwin':
836 836 if new_perm_val > cur_perm_val:
837 837 return new_perm
838 838 return cur_perm
839 839 elif self.algo == 'lowerwin':
840 840 if new_perm_val < cur_perm_val:
841 841 return new_perm
842 842 return cur_perm
843 843
844 844 def _permission_structure(self):
845 845 return {
846 846 'global': self.permissions_global,
847 847 'repositories': self.permissions_repositories,
848 848 'repositories_groups': self.permissions_repository_groups,
849 849 'user_groups': self.permissions_user_groups,
850 850 }
851 851
852 852
853 853 def allowed_auth_token_access(view_name, auth_token, whitelist=None):
854 854 """
855 855 Check if given controller_name is in whitelist of auth token access
856 856 """
857 857 if not whitelist:
858 858 from rhodecode import CONFIG
859 859 whitelist = aslist(
860 860 CONFIG.get('api_access_controllers_whitelist'), sep=',')
861 861 # backward compat translation
862 862 compat = {
863 863 # old controller, new VIEW
864 864 'ChangesetController:*': 'RepoCommitsView:*',
865 865 'ChangesetController:changeset_patch': 'RepoCommitsView:repo_commit_patch',
866 866 'ChangesetController:changeset_raw': 'RepoCommitsView:repo_commit_raw',
867 867 'FilesController:raw': 'RepoCommitsView:repo_commit_raw',
868 868 'FilesController:archivefile': 'RepoFilesView:repo_archivefile',
869 869 'GistsController:*': 'GistView:*',
870 870 }
871 871
872 872 log.debug(
873 873 'Allowed views for AUTH TOKEN access: %s' % (whitelist,))
874 874 auth_token_access_valid = False
875 875
876 876 for entry in whitelist:
877 877 token_match = True
878 878 if entry in compat:
879 879 # translate from old Controllers to Pyramid Views
880 880 entry = compat[entry]
881 881
882 882 if '@' in entry:
883 883 # specific AuthToken
884 884 entry, allowed_token = entry.split('@', 1)
885 885 token_match = auth_token == allowed_token
886 886
887 887 if fnmatch.fnmatch(view_name, entry) and token_match:
888 888 auth_token_access_valid = True
889 889 break
890 890
891 891 if auth_token_access_valid:
892 892 log.debug('view: `%s` matches entry in whitelist: %s'
893 893 % (view_name, whitelist))
894 894 else:
895 895 msg = ('view: `%s` does *NOT* match any entry in whitelist: %s'
896 896 % (view_name, whitelist))
897 897 if auth_token:
898 898 # if we use auth token key and don't have access it's a warning
899 899 log.warning(msg)
900 900 else:
901 901 log.debug(msg)
902 902
903 903 return auth_token_access_valid
904 904
905 905
906 906 class AuthUser(object):
907 907 """
908 908 A simple object that handles all attributes of user in RhodeCode
909 909
910 910 It does lookup based on API key,given user, or user present in session
911 911 Then it fills all required information for such user. It also checks if
912 912 anonymous access is enabled and if so, it returns default user as logged in
913 913 """
914 914 GLOBAL_PERMS = [x[0] for x in Permission.PERMS]
915 915
916 916 def __init__(self, user_id=None, api_key=None, username=None, ip_addr=None):
917 917
918 918 self.user_id = user_id
919 919 self._api_key = api_key
920 920
921 921 self.api_key = None
922 922 self.username = username
923 923 self.ip_addr = ip_addr
924 924 self.name = ''
925 925 self.lastname = ''
926 926 self.first_name = ''
927 927 self.last_name = ''
928 928 self.email = ''
929 929 self.is_authenticated = False
930 930 self.admin = False
931 931 self.inherit_default_permissions = False
932 932 self.password = ''
933 933
934 934 self.anonymous_user = None # propagated on propagate_data
935 935 self.propagate_data()
936 936 self._instance = None
937 937 self._permissions_scoped_cache = {} # used to bind scoped calculation
938 938
939 939 @LazyProperty
940 940 def permissions(self):
941 941 return self.get_perms(user=self, cache=False)
942 942
943 943 @LazyProperty
944 944 def permissions_safe(self):
945 945 """
946 946 Filtered permissions excluding not allowed repositories
947 947 """
948 948 perms = self.get_perms(user=self, cache=False)
949 949
950 950 perms['repositories'] = {
951 951 k: v for k, v in perms['repositories'].items()
952 952 if v != 'repository.none'}
953 953 perms['repositories_groups'] = {
954 954 k: v for k, v in perms['repositories_groups'].items()
955 955 if v != 'group.none'}
956 956 perms['user_groups'] = {
957 957 k: v for k, v in perms['user_groups'].items()
958 958 if v != 'usergroup.none'}
959 959 return perms
960 960
961 961 @LazyProperty
962 962 def permissions_full_details(self):
963 963 return self.get_perms(
964 964 user=self, cache=False, calculate_super_admin=True)
965 965
966 966 def permissions_with_scope(self, scope):
967 967 """
968 968 Call the get_perms function with scoped data. The scope in that function
969 969 narrows the SQL calls to the given ID of objects resulting in fetching
970 970 Just particular permission we want to obtain. If scope is an empty dict
971 971 then it basically narrows the scope to GLOBAL permissions only.
972 972
973 973 :param scope: dict
974 974 """
975 975 if 'repo_name' in scope:
976 976 obj = Repository.get_by_repo_name(scope['repo_name'])
977 977 if obj:
978 978 scope['repo_id'] = obj.repo_id
979 979 _scope = collections.OrderedDict()
980 980 _scope['repo_id'] = -1
981 981 _scope['user_group_id'] = -1
982 982 _scope['repo_group_id'] = -1
983 983
984 984 for k in sorted(scope.keys()):
985 985 _scope[k] = scope[k]
986 986
987 987 # store in cache to mimic how the @LazyProperty works,
988 988 # the difference here is that we use the unique key calculated
989 989 # from params and values
990 990 return self.get_perms(user=self, cache=False, scope=_scope)
991 991
992 992 def get_instance(self):
993 993 return User.get(self.user_id)
994 994
995 def update_lastactivity(self):
996 if self.user_id:
997 User.get(self.user_id).update_lastactivity()
998
999 995 def propagate_data(self):
1000 996 """
1001 997 Fills in user data and propagates values to this instance. Maps fetched
1002 998 user attributes to this class instance attributes
1003 999 """
1004 1000 log.debug('AuthUser: starting data propagation for new potential user')
1005 1001 user_model = UserModel()
1006 1002 anon_user = self.anonymous_user = User.get_default_user(cache=True)
1007 1003 is_user_loaded = False
1008 1004
1009 1005 # lookup by userid
1010 1006 if self.user_id is not None and self.user_id != anon_user.user_id:
1011 1007 log.debug('Trying Auth User lookup by USER ID: `%s`', self.user_id)
1012 1008 is_user_loaded = user_model.fill_data(self, user_id=self.user_id)
1013 1009
1014 1010 # try go get user by api key
1015 1011 elif self._api_key and self._api_key != anon_user.api_key:
1016 1012 log.debug('Trying Auth User lookup by API KEY: `%s`', self._api_key)
1017 1013 is_user_loaded = user_model.fill_data(self, api_key=self._api_key)
1018 1014
1019 1015 # lookup by username
1020 1016 elif self.username:
1021 1017 log.debug('Trying Auth User lookup by USER NAME: `%s`', self.username)
1022 1018 is_user_loaded = user_model.fill_data(self, username=self.username)
1023 1019 else:
1024 1020 log.debug('No data in %s that could been used to log in', self)
1025 1021
1026 1022 if not is_user_loaded:
1027 1023 log.debug(
1028 1024 'Failed to load user. Fallback to default user %s', anon_user)
1029 1025 # if we cannot authenticate user try anonymous
1030 1026 if anon_user.active:
1031 1027 log.debug('default user is active, using it as a session user')
1032 1028 user_model.fill_data(self, user_id=anon_user.user_id)
1033 1029 # then we set this user is logged in
1034 1030 self.is_authenticated = True
1035 1031 else:
1036 1032 log.debug('default user is NOT active')
1037 1033 # in case of disabled anonymous user we reset some of the
1038 1034 # parameters so such user is "corrupted", skipping the fill_data
1039 1035 for attr in ['user_id', 'username', 'admin', 'active']:
1040 1036 setattr(self, attr, None)
1041 1037 self.is_authenticated = False
1042 1038
1043 1039 if not self.username:
1044 1040 self.username = 'None'
1045 1041
1046 1042 log.debug('AuthUser: propagated user is now %s', self)
1047 1043
1048 1044 def get_perms(self, user, scope=None, explicit=True, algo='higherwin',
1049 1045 calculate_super_admin=False, cache=False):
1050 1046 """
1051 1047 Fills user permission attribute with permissions taken from database
1052 1048 works for permissions given for repositories, and for permissions that
1053 1049 are granted to groups
1054 1050
1055 1051 :param user: instance of User object from database
1056 1052 :param explicit: In case there are permissions both for user and a group
1057 1053 that user is part of, explicit flag will defiine if user will
1058 1054 explicitly override permissions from group, if it's False it will
1059 1055 make decision based on the algo
1060 1056 :param algo: algorithm to decide what permission should be choose if
1061 1057 it's multiple defined, eg user in two different groups. It also
1062 1058 decides if explicit flag is turned off how to specify the permission
1063 1059 for case when user is in a group + have defined separate permission
1064 1060 """
1065 1061 user_id = user.user_id
1066 1062 user_is_admin = user.is_admin
1067 1063
1068 1064 # inheritance of global permissions like create repo/fork repo etc
1069 1065 user_inherit_default_permissions = user.inherit_default_permissions
1070 1066
1071 1067 cache_seconds = safe_int(
1072 1068 rhodecode.CONFIG.get('rc_cache.cache_perms.expiration_time'))
1073 1069
1074 1070 cache_on = cache or cache_seconds > 0
1075 1071 log.debug(
1076 1072 'Computing PERMISSION tree for user %s scope `%s` '
1077 1073 'with caching: %s[TTL: %ss]' % (user, scope, cache_on, cache_seconds or 0))
1078 1074
1079 1075 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
1080 1076 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
1081 1077
1082 1078 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
1083 1079 condition=cache_on)
1084 1080 def compute_perm_tree(cache_name,
1085 1081 user_id, scope, user_is_admin,user_inherit_default_permissions,
1086 1082 explicit, algo, calculate_super_admin):
1087 1083 return _cached_perms_data(
1088 1084 user_id, scope, user_is_admin, user_inherit_default_permissions,
1089 1085 explicit, algo, calculate_super_admin)
1090 1086
1091 1087 start = time.time()
1092 1088 result = compute_perm_tree('permissions', user_id, scope, user_is_admin,
1093 1089 user_inherit_default_permissions, explicit, algo,
1094 1090 calculate_super_admin)
1095 1091
1096 1092 result_repr = []
1097 1093 for k in result:
1098 1094 result_repr.append((k, len(result[k])))
1099 1095 total = time.time() - start
1100 1096 log.debug('PERMISSION tree for user %s computed in %.3fs: %s' % (
1101 1097 user, total, result_repr))
1102 1098
1103 1099 return result
1104 1100
1105 1101 @property
1106 1102 def is_default(self):
1107 1103 return self.username == User.DEFAULT_USER
1108 1104
1109 1105 @property
1110 1106 def is_admin(self):
1111 1107 return self.admin
1112 1108
1113 1109 @property
1114 1110 def is_user_object(self):
1115 1111 return self.user_id is not None
1116 1112
1117 1113 @property
1118 1114 def repositories_admin(self):
1119 1115 """
1120 1116 Returns list of repositories you're an admin of
1121 1117 """
1122 1118 return [
1123 1119 x[0] for x in self.permissions['repositories'].items()
1124 1120 if x[1] == 'repository.admin']
1125 1121
1126 1122 @property
1127 1123 def repository_groups_admin(self):
1128 1124 """
1129 1125 Returns list of repository groups you're an admin of
1130 1126 """
1131 1127 return [
1132 1128 x[0] for x in self.permissions['repositories_groups'].items()
1133 1129 if x[1] == 'group.admin']
1134 1130
1135 1131 @property
1136 1132 def user_groups_admin(self):
1137 1133 """
1138 1134 Returns list of user groups you're an admin of
1139 1135 """
1140 1136 return [
1141 1137 x[0] for x in self.permissions['user_groups'].items()
1142 1138 if x[1] == 'usergroup.admin']
1143 1139
1144 1140 def repo_acl_ids(self, perms=None, name_filter=None, cache=False):
1145 1141 """
1146 1142 Returns list of repository ids that user have access to based on given
1147 1143 perms. The cache flag should be only used in cases that are used for
1148 1144 display purposes, NOT IN ANY CASE for permission checks.
1149 1145 """
1150 1146 from rhodecode.model.scm import RepoList
1151 1147 if not perms:
1152 1148 perms = [
1153 1149 'repository.read', 'repository.write', 'repository.admin']
1154 1150
1155 1151 def _cached_repo_acl(user_id, perm_def, _name_filter):
1156 1152 qry = Repository.query()
1157 1153 if _name_filter:
1158 1154 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1159 1155 qry = qry.filter(
1160 1156 Repository.repo_name.ilike(ilike_expression))
1161 1157
1162 1158 return [x.repo_id for x in
1163 1159 RepoList(qry, perm_set=perm_def)]
1164 1160
1165 1161 return _cached_repo_acl(self.user_id, perms, name_filter)
1166 1162
1167 1163 def repo_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1168 1164 """
1169 1165 Returns list of repository group ids that user have access to based on given
1170 1166 perms. The cache flag should be only used in cases that are used for
1171 1167 display purposes, NOT IN ANY CASE for permission checks.
1172 1168 """
1173 1169 from rhodecode.model.scm import RepoGroupList
1174 1170 if not perms:
1175 1171 perms = [
1176 1172 'group.read', 'group.write', 'group.admin']
1177 1173
1178 1174 def _cached_repo_group_acl(user_id, perm_def, _name_filter):
1179 1175 qry = RepoGroup.query()
1180 1176 if _name_filter:
1181 1177 ilike_expression = u'%{}%'.format(safe_unicode(_name_filter))
1182 1178 qry = qry.filter(
1183 1179 RepoGroup.group_name.ilike(ilike_expression))
1184 1180
1185 1181 return [x.group_id for x in
1186 1182 RepoGroupList(qry, perm_set=perm_def)]
1187 1183
1188 1184 return _cached_repo_group_acl(self.user_id, perms, name_filter)
1189 1185
1190 1186 def user_group_acl_ids(self, perms=None, name_filter=None, cache=False):
1191 1187 """
1192 1188 Returns list of user group ids that user have access to based on given
1193 1189 perms. The cache flag should be only used in cases that are used for
1194 1190 display purposes, NOT IN ANY CASE for permission checks.
1195 1191 """
1196 1192 from rhodecode.model.scm import UserGroupList
1197 1193 if not perms:
1198 1194 perms = [
1199 1195 'usergroup.read', 'usergroup.write', 'usergroup.admin']
1200 1196
1201 1197 def _cached_user_group_acl(user_id, perm_def, name_filter):
1202 1198 qry = UserGroup.query()
1203 1199 if name_filter:
1204 1200 ilike_expression = u'%{}%'.format(safe_unicode(name_filter))
1205 1201 qry = qry.filter(
1206 1202 UserGroup.users_group_name.ilike(ilike_expression))
1207 1203
1208 1204 return [x.users_group_id for x in
1209 1205 UserGroupList(qry, perm_set=perm_def)]
1210 1206
1211 1207 return _cached_user_group_acl(self.user_id, perms, name_filter)
1212 1208
1213 1209 @property
1214 1210 def ip_allowed(self):
1215 1211 """
1216 1212 Checks if ip_addr used in constructor is allowed from defined list of
1217 1213 allowed ip_addresses for user
1218 1214
1219 1215 :returns: boolean, True if ip is in allowed ip range
1220 1216 """
1221 1217 # check IP
1222 1218 inherit = self.inherit_default_permissions
1223 1219 return AuthUser.check_ip_allowed(self.user_id, self.ip_addr,
1224 1220 inherit_from_default=inherit)
1225 1221 @property
1226 1222 def personal_repo_group(self):
1227 1223 return RepoGroup.get_user_personal_repo_group(self.user_id)
1228 1224
1229 1225 @LazyProperty
1230 1226 def feed_token(self):
1231 1227 return self.get_instance().feed_token
1232 1228
1233 1229 @classmethod
1234 1230 def check_ip_allowed(cls, user_id, ip_addr, inherit_from_default):
1235 1231 allowed_ips = AuthUser.get_allowed_ips(
1236 1232 user_id, cache=True, inherit_from_default=inherit_from_default)
1237 1233 if check_ip_access(source_ip=ip_addr, allowed_ips=allowed_ips):
1238 1234 log.debug('IP:%s for user %s is in range of %s' % (
1239 1235 ip_addr, user_id, allowed_ips))
1240 1236 return True
1241 1237 else:
1242 1238 log.info('Access for IP:%s forbidden for user %s, '
1243 1239 'not in %s' % (ip_addr, user_id, allowed_ips))
1244 1240 return False
1245 1241
1246 1242 def __repr__(self):
1247 1243 return "<AuthUser('id:%s[%s] ip:%s auth:%s')>"\
1248 1244 % (self.user_id, self.username, self.ip_addr, self.is_authenticated)
1249 1245
1250 1246 def set_authenticated(self, authenticated=True):
1251 1247 if self.user_id != self.anonymous_user.user_id:
1252 1248 self.is_authenticated = authenticated
1253 1249
1254 1250 def get_cookie_store(self):
1255 1251 return {
1256 1252 'username': self.username,
1257 1253 'password': md5(self.password or ''),
1258 1254 'user_id': self.user_id,
1259 1255 'is_authenticated': self.is_authenticated
1260 1256 }
1261 1257
1262 1258 @classmethod
1263 1259 def from_cookie_store(cls, cookie_store):
1264 1260 """
1265 1261 Creates AuthUser from a cookie store
1266 1262
1267 1263 :param cls:
1268 1264 :param cookie_store:
1269 1265 """
1270 1266 user_id = cookie_store.get('user_id')
1271 1267 username = cookie_store.get('username')
1272 1268 api_key = cookie_store.get('api_key')
1273 1269 return AuthUser(user_id, api_key, username)
1274 1270
1275 1271 @classmethod
1276 1272 def get_allowed_ips(cls, user_id, cache=False, inherit_from_default=False):
1277 1273 _set = set()
1278 1274
1279 1275 if inherit_from_default:
1280 1276 def_user_id = User.get_default_user(cache=True).user_id
1281 1277 default_ips = UserIpMap.query().filter(UserIpMap.user_id == def_user_id)
1282 1278 if cache:
1283 1279 default_ips = default_ips.options(
1284 1280 FromCache("sql_cache_short", "get_user_ips_default"))
1285 1281
1286 1282 # populate from default user
1287 1283 for ip in default_ips:
1288 1284 try:
1289 1285 _set.add(ip.ip_addr)
1290 1286 except ObjectDeletedError:
1291 1287 # since we use heavy caching sometimes it happens that
1292 1288 # we get deleted objects here, we just skip them
1293 1289 pass
1294 1290
1295 1291 # NOTE:(marcink) we don't want to load any rules for empty
1296 1292 # user_id which is the case of access of non logged users when anonymous
1297 1293 # access is disabled
1298 1294 user_ips = []
1299 1295 if user_id:
1300 1296 user_ips = UserIpMap.query().filter(UserIpMap.user_id == user_id)
1301 1297 if cache:
1302 1298 user_ips = user_ips.options(
1303 1299 FromCache("sql_cache_short", "get_user_ips_%s" % user_id))
1304 1300
1305 1301 for ip in user_ips:
1306 1302 try:
1307 1303 _set.add(ip.ip_addr)
1308 1304 except ObjectDeletedError:
1309 1305 # since we use heavy caching sometimes it happens that we get
1310 1306 # deleted objects here, we just skip them
1311 1307 pass
1312 1308 return _set or {ip for ip in ['0.0.0.0/0', '::/0']}
1313 1309
1314 1310
1315 1311 def set_available_permissions(settings):
1316 1312 """
1317 1313 This function will propagate pyramid settings with all available defined
1318 1314 permission given in db. We don't want to check each time from db for new
1319 1315 permissions since adding a new permission also requires application restart
1320 1316 ie. to decorate new views with the newly created permission
1321 1317
1322 1318 :param settings: current pyramid registry.settings
1323 1319
1324 1320 """
1325 1321 log.debug('auth: getting information about all available permissions')
1326 1322 try:
1327 1323 sa = meta.Session
1328 1324 all_perms = sa.query(Permission).all()
1329 1325 settings.setdefault('available_permissions',
1330 1326 [x.permission_name for x in all_perms])
1331 1327 log.debug('auth: set available permissions')
1332 1328 except Exception:
1333 1329 log.exception('Failed to fetch permissions from the database.')
1334 1330 raise
1335 1331
1336 1332
1337 1333 def get_csrf_token(session, force_new=False, save_if_missing=True):
1338 1334 """
1339 1335 Return the current authentication token, creating one if one doesn't
1340 1336 already exist and the save_if_missing flag is present.
1341 1337
1342 1338 :param session: pass in the pyramid session, else we use the global ones
1343 1339 :param force_new: force to re-generate the token and store it in session
1344 1340 :param save_if_missing: save the newly generated token if it's missing in
1345 1341 session
1346 1342 """
1347 1343 # NOTE(marcink): probably should be replaced with below one from pyramid 1.9
1348 1344 # from pyramid.csrf import get_csrf_token
1349 1345
1350 1346 if (csrf_token_key not in session and save_if_missing) or force_new:
1351 1347 token = hashlib.sha1(str(random.getrandbits(128))).hexdigest()
1352 1348 session[csrf_token_key] = token
1353 1349 if hasattr(session, 'save'):
1354 1350 session.save()
1355 1351 return session.get(csrf_token_key)
1356 1352
1357 1353
1358 1354 def get_request(perm_class_instance):
1359 1355 from pyramid.threadlocal import get_current_request
1360 1356 pyramid_request = get_current_request()
1361 1357 return pyramid_request
1362 1358
1363 1359
1364 1360 # CHECK DECORATORS
1365 1361 class CSRFRequired(object):
1366 1362 """
1367 1363 Decorator for authenticating a form
1368 1364
1369 1365 This decorator uses an authorization token stored in the client's
1370 1366 session for prevention of certain Cross-site request forgery (CSRF)
1371 1367 attacks (See
1372 1368 http://en.wikipedia.org/wiki/Cross-site_request_forgery for more
1373 1369 information).
1374 1370
1375 1371 For use with the ``webhelpers.secure_form`` helper functions.
1376 1372
1377 1373 """
1378 1374 def __init__(self, token=csrf_token_key, header='X-CSRF-Token',
1379 1375 except_methods=None):
1380 1376 self.token = token
1381 1377 self.header = header
1382 1378 self.except_methods = except_methods or []
1383 1379
1384 1380 def __call__(self, func):
1385 1381 return get_cython_compat_decorator(self.__wrapper, func)
1386 1382
1387 1383 def _get_csrf(self, _request):
1388 1384 return _request.POST.get(self.token, _request.headers.get(self.header))
1389 1385
1390 1386 def check_csrf(self, _request, cur_token):
1391 1387 supplied_token = self._get_csrf(_request)
1392 1388 return supplied_token and supplied_token == cur_token
1393 1389
1394 1390 def _get_request(self):
1395 1391 return get_request(self)
1396 1392
1397 1393 def __wrapper(self, func, *fargs, **fkwargs):
1398 1394 request = self._get_request()
1399 1395
1400 1396 if request.method in self.except_methods:
1401 1397 return func(*fargs, **fkwargs)
1402 1398
1403 1399 cur_token = get_csrf_token(request.session, save_if_missing=False)
1404 1400 if self.check_csrf(request, cur_token):
1405 1401 if request.POST.get(self.token):
1406 1402 del request.POST[self.token]
1407 1403 return func(*fargs, **fkwargs)
1408 1404 else:
1409 1405 reason = 'token-missing'
1410 1406 supplied_token = self._get_csrf(request)
1411 1407 if supplied_token and cur_token != supplied_token:
1412 1408 reason = 'token-mismatch [%s:%s]' % (
1413 1409 cur_token or ''[:6], supplied_token or ''[:6])
1414 1410
1415 1411 csrf_message = \
1416 1412 ("Cross-site request forgery detected, request denied. See "
1417 1413 "http://en.wikipedia.org/wiki/Cross-site_request_forgery for "
1418 1414 "more information.")
1419 1415 log.warn('Cross-site request forgery detected, request %r DENIED: %s '
1420 1416 'REMOTE_ADDR:%s, HEADERS:%s' % (
1421 1417 request, reason, request.remote_addr, request.headers))
1422 1418
1423 1419 raise HTTPForbidden(explanation=csrf_message)
1424 1420
1425 1421
1426 1422 class LoginRequired(object):
1427 1423 """
1428 1424 Must be logged in to execute this function else
1429 1425 redirect to login page
1430 1426
1431 1427 :param api_access: if enabled this checks only for valid auth token
1432 1428 and grants access based on valid token
1433 1429 """
1434 1430 def __init__(self, auth_token_access=None):
1435 1431 self.auth_token_access = auth_token_access
1436 1432
1437 1433 def __call__(self, func):
1438 1434 return get_cython_compat_decorator(self.__wrapper, func)
1439 1435
1440 1436 def _get_request(self):
1441 1437 return get_request(self)
1442 1438
1443 1439 def __wrapper(self, func, *fargs, **fkwargs):
1444 1440 from rhodecode.lib import helpers as h
1445 1441 cls = fargs[0]
1446 1442 user = cls._rhodecode_user
1447 1443 request = self._get_request()
1448 1444 _ = request.translate
1449 1445
1450 1446 loc = "%s:%s" % (cls.__class__.__name__, func.__name__)
1451 1447 log.debug('Starting login restriction checks for user: %s' % (user,))
1452 1448 # check if our IP is allowed
1453 1449 ip_access_valid = True
1454 1450 if not user.ip_allowed:
1455 1451 h.flash(h.literal(_('IP %s not allowed' % (user.ip_addr,))),
1456 1452 category='warning')
1457 1453 ip_access_valid = False
1458 1454
1459 1455 # check if we used an APIKEY and it's a valid one
1460 1456 # defined white-list of controllers which API access will be enabled
1461 1457 _auth_token = request.GET.get(
1462 1458 'auth_token', '') or request.GET.get('api_key', '')
1463 1459 auth_token_access_valid = allowed_auth_token_access(
1464 1460 loc, auth_token=_auth_token)
1465 1461
1466 1462 # explicit controller is enabled or API is in our whitelist
1467 1463 if self.auth_token_access or auth_token_access_valid:
1468 1464 log.debug('Checking AUTH TOKEN access for %s' % (cls,))
1469 1465 db_user = user.get_instance()
1470 1466
1471 1467 if db_user:
1472 1468 if self.auth_token_access:
1473 1469 roles = self.auth_token_access
1474 1470 else:
1475 1471 roles = [UserApiKeys.ROLE_HTTP]
1476 1472 token_match = db_user.authenticate_by_token(
1477 1473 _auth_token, roles=roles)
1478 1474 else:
1479 1475 log.debug('Unable to fetch db instance for auth user: %s', user)
1480 1476 token_match = False
1481 1477
1482 1478 if _auth_token and token_match:
1483 1479 auth_token_access_valid = True
1484 1480 log.debug('AUTH TOKEN ****%s is VALID' % (_auth_token[-4:],))
1485 1481 else:
1486 1482 auth_token_access_valid = False
1487 1483 if not _auth_token:
1488 1484 log.debug("AUTH TOKEN *NOT* present in request")
1489 1485 else:
1490 1486 log.warning(
1491 1487 "AUTH TOKEN ****%s *NOT* valid" % _auth_token[-4:])
1492 1488
1493 1489 log.debug('Checking if %s is authenticated @ %s' % (user.username, loc))
1494 1490 reason = 'RHODECODE_AUTH' if user.is_authenticated \
1495 1491 else 'AUTH_TOKEN_AUTH'
1496 1492
1497 1493 if ip_access_valid and (
1498 1494 user.is_authenticated or auth_token_access_valid):
1499 1495 log.info(
1500 1496 'user %s authenticating with:%s IS authenticated on func %s'
1501 1497 % (user, reason, loc))
1502 1498
1503 # update user data to check last activity
1504 user.update_lastactivity()
1505 Session().commit()
1506 1499 return func(*fargs, **fkwargs)
1507 1500 else:
1508 1501 log.warning(
1509 1502 'user %s authenticating with:%s NOT authenticated on '
1510 1503 'func: %s: IP_ACCESS:%s AUTH_TOKEN_ACCESS:%s'
1511 1504 % (user, reason, loc, ip_access_valid,
1512 1505 auth_token_access_valid))
1513 1506 # we preserve the get PARAM
1514 1507 came_from = get_came_from(request)
1515 1508
1516 1509 log.debug('redirecting to login page with %s' % (came_from,))
1517 1510 raise HTTPFound(
1518 1511 h.route_path('login', _query={'came_from': came_from}))
1519 1512
1520 1513
1521 1514 class NotAnonymous(object):
1522 1515 """
1523 1516 Must be logged in to execute this function else
1524 1517 redirect to login page
1525 1518 """
1526 1519
1527 1520 def __call__(self, func):
1528 1521 return get_cython_compat_decorator(self.__wrapper, func)
1529 1522
1530 1523 def _get_request(self):
1531 1524 return get_request(self)
1532 1525
1533 1526 def __wrapper(self, func, *fargs, **fkwargs):
1534 1527 import rhodecode.lib.helpers as h
1535 1528 cls = fargs[0]
1536 1529 self.user = cls._rhodecode_user
1537 1530 request = self._get_request()
1538 1531 _ = request.translate
1539 1532 log.debug('Checking if user is not anonymous @%s' % cls)
1540 1533
1541 1534 anonymous = self.user.username == User.DEFAULT_USER
1542 1535
1543 1536 if anonymous:
1544 1537 came_from = get_came_from(request)
1545 1538 h.flash(_('You need to be a registered user to '
1546 1539 'perform this action'),
1547 1540 category='warning')
1548 1541 raise HTTPFound(
1549 1542 h.route_path('login', _query={'came_from': came_from}))
1550 1543 else:
1551 1544 return func(*fargs, **fkwargs)
1552 1545
1553 1546
1554 1547 class PermsDecorator(object):
1555 1548 """
1556 1549 Base class for controller decorators, we extract the current user from
1557 1550 the class itself, which has it stored in base controllers
1558 1551 """
1559 1552
1560 1553 def __init__(self, *required_perms):
1561 1554 self.required_perms = set(required_perms)
1562 1555
1563 1556 def __call__(self, func):
1564 1557 return get_cython_compat_decorator(self.__wrapper, func)
1565 1558
1566 1559 def _get_request(self):
1567 1560 return get_request(self)
1568 1561
1569 1562 def __wrapper(self, func, *fargs, **fkwargs):
1570 1563 import rhodecode.lib.helpers as h
1571 1564 cls = fargs[0]
1572 1565 _user = cls._rhodecode_user
1573 1566 request = self._get_request()
1574 1567 _ = request.translate
1575 1568
1576 1569 log.debug('checking %s permissions %s for %s %s',
1577 1570 self.__class__.__name__, self.required_perms, cls, _user)
1578 1571
1579 1572 if self.check_permissions(_user):
1580 1573 log.debug('Permission granted for %s %s', cls, _user)
1581 1574 return func(*fargs, **fkwargs)
1582 1575
1583 1576 else:
1584 1577 log.debug('Permission denied for %s %s', cls, _user)
1585 1578 anonymous = _user.username == User.DEFAULT_USER
1586 1579
1587 1580 if anonymous:
1588 1581 came_from = get_came_from(self._get_request())
1589 1582 h.flash(_('You need to be signed in to view this page'),
1590 1583 category='warning')
1591 1584 raise HTTPFound(
1592 1585 h.route_path('login', _query={'came_from': came_from}))
1593 1586
1594 1587 else:
1595 1588 # redirect with 404 to prevent resource discovery
1596 1589 raise HTTPNotFound()
1597 1590
1598 1591 def check_permissions(self, user):
1599 1592 """Dummy function for overriding"""
1600 1593 raise NotImplementedError(
1601 1594 'You have to write this function in child class')
1602 1595
1603 1596
1604 1597 class HasPermissionAllDecorator(PermsDecorator):
1605 1598 """
1606 1599 Checks for access permission for all given predicates. All of them
1607 1600 have to be meet in order to fulfill the request
1608 1601 """
1609 1602
1610 1603 def check_permissions(self, user):
1611 1604 perms = user.permissions_with_scope({})
1612 1605 if self.required_perms.issubset(perms['global']):
1613 1606 return True
1614 1607 return False
1615 1608
1616 1609
1617 1610 class HasPermissionAnyDecorator(PermsDecorator):
1618 1611 """
1619 1612 Checks for access permission for any of given predicates. In order to
1620 1613 fulfill the request any of predicates must be meet
1621 1614 """
1622 1615
1623 1616 def check_permissions(self, user):
1624 1617 perms = user.permissions_with_scope({})
1625 1618 if self.required_perms.intersection(perms['global']):
1626 1619 return True
1627 1620 return False
1628 1621
1629 1622
1630 1623 class HasRepoPermissionAllDecorator(PermsDecorator):
1631 1624 """
1632 1625 Checks for access permission for all given predicates for specific
1633 1626 repository. All of them have to be meet in order to fulfill the request
1634 1627 """
1635 1628 def _get_repo_name(self):
1636 1629 _request = self._get_request()
1637 1630 return get_repo_slug(_request)
1638 1631
1639 1632 def check_permissions(self, user):
1640 1633 perms = user.permissions
1641 1634 repo_name = self._get_repo_name()
1642 1635
1643 1636 try:
1644 1637 user_perms = {perms['repositories'][repo_name]}
1645 1638 except KeyError:
1646 1639 log.debug('cannot locate repo with name: `%s` in permissions defs',
1647 1640 repo_name)
1648 1641 return False
1649 1642
1650 1643 log.debug('checking `%s` permissions for repo `%s`',
1651 1644 user_perms, repo_name)
1652 1645 if self.required_perms.issubset(user_perms):
1653 1646 return True
1654 1647 return False
1655 1648
1656 1649
1657 1650 class HasRepoPermissionAnyDecorator(PermsDecorator):
1658 1651 """
1659 1652 Checks for access permission for any of given predicates for specific
1660 1653 repository. In order to fulfill the request any of predicates must be meet
1661 1654 """
1662 1655 def _get_repo_name(self):
1663 1656 _request = self._get_request()
1664 1657 return get_repo_slug(_request)
1665 1658
1666 1659 def check_permissions(self, user):
1667 1660 perms = user.permissions
1668 1661 repo_name = self._get_repo_name()
1669 1662
1670 1663 try:
1671 1664 user_perms = {perms['repositories'][repo_name]}
1672 1665 except KeyError:
1673 1666 log.debug(
1674 1667 'cannot locate repo with name: `%s` in permissions defs',
1675 1668 repo_name)
1676 1669 return False
1677 1670
1678 1671 log.debug('checking `%s` permissions for repo `%s`',
1679 1672 user_perms, repo_name)
1680 1673 if self.required_perms.intersection(user_perms):
1681 1674 return True
1682 1675 return False
1683 1676
1684 1677
1685 1678 class HasRepoGroupPermissionAllDecorator(PermsDecorator):
1686 1679 """
1687 1680 Checks for access permission for all given predicates for specific
1688 1681 repository group. All of them have to be meet in order to
1689 1682 fulfill the request
1690 1683 """
1691 1684 def _get_repo_group_name(self):
1692 1685 _request = self._get_request()
1693 1686 return get_repo_group_slug(_request)
1694 1687
1695 1688 def check_permissions(self, user):
1696 1689 perms = user.permissions
1697 1690 group_name = self._get_repo_group_name()
1698 1691 try:
1699 1692 user_perms = {perms['repositories_groups'][group_name]}
1700 1693 except KeyError:
1701 1694 log.debug(
1702 1695 'cannot locate repo group with name: `%s` in permissions defs',
1703 1696 group_name)
1704 1697 return False
1705 1698
1706 1699 log.debug('checking `%s` permissions for repo group `%s`',
1707 1700 user_perms, group_name)
1708 1701 if self.required_perms.issubset(user_perms):
1709 1702 return True
1710 1703 return False
1711 1704
1712 1705
1713 1706 class HasRepoGroupPermissionAnyDecorator(PermsDecorator):
1714 1707 """
1715 1708 Checks for access permission for any of given predicates for specific
1716 1709 repository group. In order to fulfill the request any
1717 1710 of predicates must be met
1718 1711 """
1719 1712 def _get_repo_group_name(self):
1720 1713 _request = self._get_request()
1721 1714 return get_repo_group_slug(_request)
1722 1715
1723 1716 def check_permissions(self, user):
1724 1717 perms = user.permissions
1725 1718 group_name = self._get_repo_group_name()
1726 1719
1727 1720 try:
1728 1721 user_perms = {perms['repositories_groups'][group_name]}
1729 1722 except KeyError:
1730 1723 log.debug(
1731 1724 'cannot locate repo group with name: `%s` in permissions defs',
1732 1725 group_name)
1733 1726 return False
1734 1727
1735 1728 log.debug('checking `%s` permissions for repo group `%s`',
1736 1729 user_perms, group_name)
1737 1730 if self.required_perms.intersection(user_perms):
1738 1731 return True
1739 1732 return False
1740 1733
1741 1734
1742 1735 class HasUserGroupPermissionAllDecorator(PermsDecorator):
1743 1736 """
1744 1737 Checks for access permission for all given predicates for specific
1745 1738 user group. All of them have to be meet in order to fulfill the request
1746 1739 """
1747 1740 def _get_user_group_name(self):
1748 1741 _request = self._get_request()
1749 1742 return get_user_group_slug(_request)
1750 1743
1751 1744 def check_permissions(self, user):
1752 1745 perms = user.permissions
1753 1746 group_name = self._get_user_group_name()
1754 1747 try:
1755 1748 user_perms = {perms['user_groups'][group_name]}
1756 1749 except KeyError:
1757 1750 return False
1758 1751
1759 1752 if self.required_perms.issubset(user_perms):
1760 1753 return True
1761 1754 return False
1762 1755
1763 1756
1764 1757 class HasUserGroupPermissionAnyDecorator(PermsDecorator):
1765 1758 """
1766 1759 Checks for access permission for any of given predicates for specific
1767 1760 user group. In order to fulfill the request any of predicates must be meet
1768 1761 """
1769 1762 def _get_user_group_name(self):
1770 1763 _request = self._get_request()
1771 1764 return get_user_group_slug(_request)
1772 1765
1773 1766 def check_permissions(self, user):
1774 1767 perms = user.permissions
1775 1768 group_name = self._get_user_group_name()
1776 1769 try:
1777 1770 user_perms = {perms['user_groups'][group_name]}
1778 1771 except KeyError:
1779 1772 return False
1780 1773
1781 1774 if self.required_perms.intersection(user_perms):
1782 1775 return True
1783 1776 return False
1784 1777
1785 1778
1786 1779 # CHECK FUNCTIONS
1787 1780 class PermsFunction(object):
1788 1781 """Base function for other check functions"""
1789 1782
1790 1783 def __init__(self, *perms):
1791 1784 self.required_perms = set(perms)
1792 1785 self.repo_name = None
1793 1786 self.repo_group_name = None
1794 1787 self.user_group_name = None
1795 1788
1796 1789 def __bool__(self):
1797 1790 frame = inspect.currentframe()
1798 1791 stack_trace = traceback.format_stack(frame)
1799 1792 log.error('Checking bool value on a class instance of perm '
1800 1793 'function is not allowed: %s' % ''.join(stack_trace))
1801 1794 # rather than throwing errors, here we always return False so if by
1802 1795 # accident someone checks truth for just an instance it will always end
1803 1796 # up in returning False
1804 1797 return False
1805 1798 __nonzero__ = __bool__
1806 1799
1807 1800 def __call__(self, check_location='', user=None):
1808 1801 if not user:
1809 1802 log.debug('Using user attribute from global request')
1810 1803 # TODO: remove this someday,put as user as attribute here
1811 1804 request = self._get_request()
1812 1805 user = request.user
1813 1806
1814 1807 # init auth user if not already given
1815 1808 if not isinstance(user, AuthUser):
1816 1809 log.debug('Wrapping user %s into AuthUser', user)
1817 1810 user = AuthUser(user.user_id)
1818 1811
1819 1812 cls_name = self.__class__.__name__
1820 1813 check_scope = self._get_check_scope(cls_name)
1821 1814 check_location = check_location or 'unspecified location'
1822 1815
1823 1816 log.debug('checking cls:%s %s usr:%s %s @ %s', cls_name,
1824 1817 self.required_perms, user, check_scope, check_location)
1825 1818 if not user:
1826 1819 log.warning('Empty user given for permission check')
1827 1820 return False
1828 1821
1829 1822 if self.check_permissions(user):
1830 1823 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
1831 1824 check_scope, user, check_location)
1832 1825 return True
1833 1826
1834 1827 else:
1835 1828 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
1836 1829 check_scope, user, check_location)
1837 1830 return False
1838 1831
1839 1832 def _get_request(self):
1840 1833 return get_request(self)
1841 1834
1842 1835 def _get_check_scope(self, cls_name):
1843 1836 return {
1844 1837 'HasPermissionAll': 'GLOBAL',
1845 1838 'HasPermissionAny': 'GLOBAL',
1846 1839 'HasRepoPermissionAll': 'repo:%s' % self.repo_name,
1847 1840 'HasRepoPermissionAny': 'repo:%s' % self.repo_name,
1848 1841 'HasRepoGroupPermissionAll': 'repo_group:%s' % self.repo_group_name,
1849 1842 'HasRepoGroupPermissionAny': 'repo_group:%s' % self.repo_group_name,
1850 1843 'HasUserGroupPermissionAll': 'user_group:%s' % self.user_group_name,
1851 1844 'HasUserGroupPermissionAny': 'user_group:%s' % self.user_group_name,
1852 1845 }.get(cls_name, '?:%s' % cls_name)
1853 1846
1854 1847 def check_permissions(self, user):
1855 1848 """Dummy function for overriding"""
1856 1849 raise Exception('You have to write this function in child class')
1857 1850
1858 1851
1859 1852 class HasPermissionAll(PermsFunction):
1860 1853 def check_permissions(self, user):
1861 1854 perms = user.permissions_with_scope({})
1862 1855 if self.required_perms.issubset(perms.get('global')):
1863 1856 return True
1864 1857 return False
1865 1858
1866 1859
1867 1860 class HasPermissionAny(PermsFunction):
1868 1861 def check_permissions(self, user):
1869 1862 perms = user.permissions_with_scope({})
1870 1863 if self.required_perms.intersection(perms.get('global')):
1871 1864 return True
1872 1865 return False
1873 1866
1874 1867
1875 1868 class HasRepoPermissionAll(PermsFunction):
1876 1869 def __call__(self, repo_name=None, check_location='', user=None):
1877 1870 self.repo_name = repo_name
1878 1871 return super(HasRepoPermissionAll, self).__call__(check_location, user)
1879 1872
1880 1873 def _get_repo_name(self):
1881 1874 if not self.repo_name:
1882 1875 _request = self._get_request()
1883 1876 self.repo_name = get_repo_slug(_request)
1884 1877 return self.repo_name
1885 1878
1886 1879 def check_permissions(self, user):
1887 1880 self.repo_name = self._get_repo_name()
1888 1881 perms = user.permissions
1889 1882 try:
1890 1883 user_perms = {perms['repositories'][self.repo_name]}
1891 1884 except KeyError:
1892 1885 return False
1893 1886 if self.required_perms.issubset(user_perms):
1894 1887 return True
1895 1888 return False
1896 1889
1897 1890
1898 1891 class HasRepoPermissionAny(PermsFunction):
1899 1892 def __call__(self, repo_name=None, check_location='', user=None):
1900 1893 self.repo_name = repo_name
1901 1894 return super(HasRepoPermissionAny, self).__call__(check_location, user)
1902 1895
1903 1896 def _get_repo_name(self):
1904 1897 if not self.repo_name:
1905 1898 _request = self._get_request()
1906 1899 self.repo_name = get_repo_slug(_request)
1907 1900 return self.repo_name
1908 1901
1909 1902 def check_permissions(self, user):
1910 1903 self.repo_name = self._get_repo_name()
1911 1904 perms = user.permissions
1912 1905 try:
1913 1906 user_perms = {perms['repositories'][self.repo_name]}
1914 1907 except KeyError:
1915 1908 return False
1916 1909 if self.required_perms.intersection(user_perms):
1917 1910 return True
1918 1911 return False
1919 1912
1920 1913
1921 1914 class HasRepoGroupPermissionAny(PermsFunction):
1922 1915 def __call__(self, group_name=None, check_location='', user=None):
1923 1916 self.repo_group_name = group_name
1924 1917 return super(HasRepoGroupPermissionAny, self).__call__(
1925 1918 check_location, user)
1926 1919
1927 1920 def check_permissions(self, user):
1928 1921 perms = user.permissions
1929 1922 try:
1930 1923 user_perms = {perms['repositories_groups'][self.repo_group_name]}
1931 1924 except KeyError:
1932 1925 return False
1933 1926 if self.required_perms.intersection(user_perms):
1934 1927 return True
1935 1928 return False
1936 1929
1937 1930
1938 1931 class HasRepoGroupPermissionAll(PermsFunction):
1939 1932 def __call__(self, group_name=None, check_location='', user=None):
1940 1933 self.repo_group_name = group_name
1941 1934 return super(HasRepoGroupPermissionAll, self).__call__(
1942 1935 check_location, user)
1943 1936
1944 1937 def check_permissions(self, user):
1945 1938 perms = user.permissions
1946 1939 try:
1947 1940 user_perms = {perms['repositories_groups'][self.repo_group_name]}
1948 1941 except KeyError:
1949 1942 return False
1950 1943 if self.required_perms.issubset(user_perms):
1951 1944 return True
1952 1945 return False
1953 1946
1954 1947
1955 1948 class HasUserGroupPermissionAny(PermsFunction):
1956 1949 def __call__(self, user_group_name=None, check_location='', user=None):
1957 1950 self.user_group_name = user_group_name
1958 1951 return super(HasUserGroupPermissionAny, self).__call__(
1959 1952 check_location, user)
1960 1953
1961 1954 def check_permissions(self, user):
1962 1955 perms = user.permissions
1963 1956 try:
1964 1957 user_perms = {perms['user_groups'][self.user_group_name]}
1965 1958 except KeyError:
1966 1959 return False
1967 1960 if self.required_perms.intersection(user_perms):
1968 1961 return True
1969 1962 return False
1970 1963
1971 1964
1972 1965 class HasUserGroupPermissionAll(PermsFunction):
1973 1966 def __call__(self, user_group_name=None, check_location='', user=None):
1974 1967 self.user_group_name = user_group_name
1975 1968 return super(HasUserGroupPermissionAll, self).__call__(
1976 1969 check_location, user)
1977 1970
1978 1971 def check_permissions(self, user):
1979 1972 perms = user.permissions
1980 1973 try:
1981 1974 user_perms = {perms['user_groups'][self.user_group_name]}
1982 1975 except KeyError:
1983 1976 return False
1984 1977 if self.required_perms.issubset(user_perms):
1985 1978 return True
1986 1979 return False
1987 1980
1988 1981
1989 1982 # SPECIAL VERSION TO HANDLE MIDDLEWARE AUTH
1990 1983 class HasPermissionAnyMiddleware(object):
1991 1984 def __init__(self, *perms):
1992 1985 self.required_perms = set(perms)
1993 1986
1994 1987 def __call__(self, user, repo_name):
1995 1988 # repo_name MUST be unicode, since we handle keys in permission
1996 1989 # dict by unicode
1997 1990 repo_name = safe_unicode(repo_name)
1998 1991 user = AuthUser(user.user_id)
1999 1992 log.debug(
2000 1993 'Checking VCS protocol permissions %s for user:%s repo:`%s`',
2001 1994 self.required_perms, user, repo_name)
2002 1995
2003 1996 if self.check_permissions(user, repo_name):
2004 1997 log.debug('Permission to repo:`%s` GRANTED for user:%s @ %s',
2005 1998 repo_name, user, 'PermissionMiddleware')
2006 1999 return True
2007 2000
2008 2001 else:
2009 2002 log.debug('Permission to repo:`%s` DENIED for user:%s @ %s',
2010 2003 repo_name, user, 'PermissionMiddleware')
2011 2004 return False
2012 2005
2013 2006 def check_permissions(self, user, repo_name):
2014 2007 perms = user.permissions_with_scope({'repo_name': repo_name})
2015 2008
2016 2009 try:
2017 2010 user_perms = {perms['repositories'][repo_name]}
2018 2011 except Exception:
2019 2012 log.exception('Error while accessing user permissions')
2020 2013 return False
2021 2014
2022 2015 if self.required_perms.intersection(user_perms):
2023 2016 return True
2024 2017 return False
2025 2018
2026 2019
2027 2020 # SPECIAL VERSION TO HANDLE API AUTH
2028 2021 class _BaseApiPerm(object):
2029 2022 def __init__(self, *perms):
2030 2023 self.required_perms = set(perms)
2031 2024
2032 2025 def __call__(self, check_location=None, user=None, repo_name=None,
2033 2026 group_name=None, user_group_name=None):
2034 2027 cls_name = self.__class__.__name__
2035 2028 check_scope = 'global:%s' % (self.required_perms,)
2036 2029 if repo_name:
2037 2030 check_scope += ', repo_name:%s' % (repo_name,)
2038 2031
2039 2032 if group_name:
2040 2033 check_scope += ', repo_group_name:%s' % (group_name,)
2041 2034
2042 2035 if user_group_name:
2043 2036 check_scope += ', user_group_name:%s' % (user_group_name,)
2044 2037
2045 2038 log.debug(
2046 2039 'checking cls:%s %s %s @ %s'
2047 2040 % (cls_name, self.required_perms, check_scope, check_location))
2048 2041 if not user:
2049 2042 log.debug('Empty User passed into arguments')
2050 2043 return False
2051 2044
2052 2045 # process user
2053 2046 if not isinstance(user, AuthUser):
2054 2047 user = AuthUser(user.user_id)
2055 2048 if not check_location:
2056 2049 check_location = 'unspecified'
2057 2050 if self.check_permissions(user.permissions, repo_name, group_name,
2058 2051 user_group_name):
2059 2052 log.debug('Permission to repo:`%s` GRANTED for user:`%s` @ %s',
2060 2053 check_scope, user, check_location)
2061 2054 return True
2062 2055
2063 2056 else:
2064 2057 log.debug('Permission to repo:`%s` DENIED for user:`%s` @ %s',
2065 2058 check_scope, user, check_location)
2066 2059 return False
2067 2060
2068 2061 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2069 2062 user_group_name=None):
2070 2063 """
2071 2064 implement in child class should return True if permissions are ok,
2072 2065 False otherwise
2073 2066
2074 2067 :param perm_defs: dict with permission definitions
2075 2068 :param repo_name: repo name
2076 2069 """
2077 2070 raise NotImplementedError()
2078 2071
2079 2072
2080 2073 class HasPermissionAllApi(_BaseApiPerm):
2081 2074 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2082 2075 user_group_name=None):
2083 2076 if self.required_perms.issubset(perm_defs.get('global')):
2084 2077 return True
2085 2078 return False
2086 2079
2087 2080
2088 2081 class HasPermissionAnyApi(_BaseApiPerm):
2089 2082 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2090 2083 user_group_name=None):
2091 2084 if self.required_perms.intersection(perm_defs.get('global')):
2092 2085 return True
2093 2086 return False
2094 2087
2095 2088
2096 2089 class HasRepoPermissionAllApi(_BaseApiPerm):
2097 2090 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2098 2091 user_group_name=None):
2099 2092 try:
2100 2093 _user_perms = {perm_defs['repositories'][repo_name]}
2101 2094 except KeyError:
2102 2095 log.warning(traceback.format_exc())
2103 2096 return False
2104 2097 if self.required_perms.issubset(_user_perms):
2105 2098 return True
2106 2099 return False
2107 2100
2108 2101
2109 2102 class HasRepoPermissionAnyApi(_BaseApiPerm):
2110 2103 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2111 2104 user_group_name=None):
2112 2105 try:
2113 2106 _user_perms = {perm_defs['repositories'][repo_name]}
2114 2107 except KeyError:
2115 2108 log.warning(traceback.format_exc())
2116 2109 return False
2117 2110 if self.required_perms.intersection(_user_perms):
2118 2111 return True
2119 2112 return False
2120 2113
2121 2114
2122 2115 class HasRepoGroupPermissionAnyApi(_BaseApiPerm):
2123 2116 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2124 2117 user_group_name=None):
2125 2118 try:
2126 2119 _user_perms = {perm_defs['repositories_groups'][group_name]}
2127 2120 except KeyError:
2128 2121 log.warning(traceback.format_exc())
2129 2122 return False
2130 2123 if self.required_perms.intersection(_user_perms):
2131 2124 return True
2132 2125 return False
2133 2126
2134 2127
2135 2128 class HasRepoGroupPermissionAllApi(_BaseApiPerm):
2136 2129 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2137 2130 user_group_name=None):
2138 2131 try:
2139 2132 _user_perms = {perm_defs['repositories_groups'][group_name]}
2140 2133 except KeyError:
2141 2134 log.warning(traceback.format_exc())
2142 2135 return False
2143 2136 if self.required_perms.issubset(_user_perms):
2144 2137 return True
2145 2138 return False
2146 2139
2147 2140
2148 2141 class HasUserGroupPermissionAnyApi(_BaseApiPerm):
2149 2142 def check_permissions(self, perm_defs, repo_name=None, group_name=None,
2150 2143 user_group_name=None):
2151 2144 try:
2152 2145 _user_perms = {perm_defs['user_groups'][user_group_name]}
2153 2146 except KeyError:
2154 2147 log.warning(traceback.format_exc())
2155 2148 return False
2156 2149 if self.required_perms.intersection(_user_perms):
2157 2150 return True
2158 2151 return False
2159 2152
2160 2153
2161 2154 def check_ip_access(source_ip, allowed_ips=None):
2162 2155 """
2163 2156 Checks if source_ip is a subnet of any of allowed_ips.
2164 2157
2165 2158 :param source_ip:
2166 2159 :param allowed_ips: list of allowed ips together with mask
2167 2160 """
2168 2161 log.debug('checking if ip:%s is subnet of %s' % (source_ip, allowed_ips))
2169 2162 source_ip_address = ipaddress.ip_address(safe_unicode(source_ip))
2170 2163 if isinstance(allowed_ips, (tuple, list, set)):
2171 2164 for ip in allowed_ips:
2172 2165 ip = safe_unicode(ip)
2173 2166 try:
2174 2167 network_address = ipaddress.ip_network(ip, strict=False)
2175 2168 if source_ip_address in network_address:
2176 2169 log.debug('IP %s is network %s' %
2177 2170 (source_ip_address, network_address))
2178 2171 return True
2179 2172 # for any case we cannot determine the IP, don't crash just
2180 2173 # skip it and log as error, we want to say forbidden still when
2181 2174 # sending bad IP
2182 2175 except Exception:
2183 2176 log.error(traceback.format_exc())
2184 2177 continue
2185 2178 return False
2186 2179
2187 2180
2188 2181 def get_cython_compat_decorator(wrapper, func):
2189 2182 """
2190 2183 Creates a cython compatible decorator. The previously used
2191 2184 decorator.decorator() function seems to be incompatible with cython.
2192 2185
2193 2186 :param wrapper: __wrapper method of the decorator class
2194 2187 :param func: decorated function
2195 2188 """
2196 2189 @wraps(func)
2197 2190 def local_wrapper(*args, **kwds):
2198 2191 return wrapper(func, *args, **kwds)
2199 2192 local_wrapper.__wrapped__ = func
2200 2193 return local_wrapper
2201 2194
2202 2195
@@ -1,662 +1,659 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SimpleVCS middleware for handling protocol request (push/clone etc.)
23 23 It's implemented with basic auth function
24 24 """
25 25
26 26 import os
27 27 import re
28 28 import logging
29 29 import importlib
30 30 from functools import wraps
31 31 from StringIO import StringIO
32 32 from lxml import etree
33 33
34 34 import time
35 35 from paste.httpheaders import REMOTE_USER, AUTH_TYPE
36 36
37 37 from pyramid.httpexceptions import (
38 38 HTTPNotFound, HTTPForbidden, HTTPNotAcceptable, HTTPInternalServerError)
39 39 from zope.cachedescriptors.property import Lazy as LazyProperty
40 40
41 41 import rhodecode
42 42 from rhodecode.authentication.base import authenticate, VCS_TYPE, loadplugin
43 43 from rhodecode.lib import caches, rc_cache
44 44 from rhodecode.lib.auth import AuthUser, HasPermissionAnyMiddleware
45 45 from rhodecode.lib.base import (
46 46 BasicAuth, get_ip_addr, get_user_agent, vcs_operation_context)
47 47 from rhodecode.lib.exceptions import (UserCreationError, NotAllowedToCreateUserError)
48 48 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
49 49 from rhodecode.lib.middleware import appenlight
50 50 from rhodecode.lib.middleware.utils import scm_app_http
51 51 from rhodecode.lib.utils import is_valid_repo, SLUG_RE
52 52 from rhodecode.lib.utils2 import safe_str, fix_PATH, str2bool, safe_unicode
53 53 from rhodecode.lib.vcs.conf import settings as vcs_settings
54 54 from rhodecode.lib.vcs.backends import base
55 55
56 56 from rhodecode.model import meta
57 57 from rhodecode.model.db import User, Repository, PullRequest
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.pull_request import PullRequestModel
60 60 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
61 61
62 62 log = logging.getLogger(__name__)
63 63
64 64
65 65 def extract_svn_txn_id(acl_repo_name, data):
66 66 """
67 67 Helper method for extraction of svn txn_id from submited XML data during
68 68 POST operations
69 69 """
70 70 try:
71 71 root = etree.fromstring(data)
72 72 pat = re.compile(r'/txn/(?P<txn_id>.*)')
73 73 for el in root:
74 74 if el.tag == '{DAV:}source':
75 75 for sub_el in el:
76 76 if sub_el.tag == '{DAV:}href':
77 77 match = pat.search(sub_el.text)
78 78 if match:
79 79 svn_tx_id = match.groupdict()['txn_id']
80 80 txn_id = caches.compute_key_from_params(
81 81 acl_repo_name, svn_tx_id)
82 82 return txn_id
83 83 except Exception:
84 84 log.exception('Failed to extract txn_id')
85 85
86 86
87 87 def initialize_generator(factory):
88 88 """
89 89 Initializes the returned generator by draining its first element.
90 90
91 91 This can be used to give a generator an initializer, which is the code
92 92 up to the first yield statement. This decorator enforces that the first
93 93 produced element has the value ``"__init__"`` to make its special
94 94 purpose very explicit in the using code.
95 95 """
96 96
97 97 @wraps(factory)
98 98 def wrapper(*args, **kwargs):
99 99 gen = factory(*args, **kwargs)
100 100 try:
101 101 init = gen.next()
102 102 except StopIteration:
103 103 raise ValueError('Generator must yield at least one element.')
104 104 if init != "__init__":
105 105 raise ValueError('First yielded element must be "__init__".')
106 106 return gen
107 107 return wrapper
108 108
109 109
110 110 class SimpleVCS(object):
111 111 """Common functionality for SCM HTTP handlers."""
112 112
113 113 SCM = 'unknown'
114 114
115 115 acl_repo_name = None
116 116 url_repo_name = None
117 117 vcs_repo_name = None
118 118 rc_extras = {}
119 119
120 120 # We have to handle requests to shadow repositories different than requests
121 121 # to normal repositories. Therefore we have to distinguish them. To do this
122 122 # we use this regex which will match only on URLs pointing to shadow
123 123 # repositories.
124 124 shadow_repo_re = re.compile(
125 125 '(?P<groups>(?:{slug_pat}/)*)' # repo groups
126 126 '(?P<target>{slug_pat})/' # target repo
127 127 'pull-request/(?P<pr_id>\d+)/' # pull request
128 128 'repository$' # shadow repo
129 129 .format(slug_pat=SLUG_RE.pattern))
130 130
131 131 def __init__(self, config, registry):
132 132 self.registry = registry
133 133 self.config = config
134 134 # re-populated by specialized middleware
135 135 self.repo_vcs_config = base.Config()
136 136 self.rhodecode_settings = SettingsModel().get_all_settings(cache=True)
137 137
138 138 registry.rhodecode_settings = self.rhodecode_settings
139 139 # authenticate this VCS request using authfunc
140 140 auth_ret_code_detection = \
141 141 str2bool(self.config.get('auth_ret_code_detection', False))
142 142 self.authenticate = BasicAuth(
143 143 '', authenticate, registry, config.get('auth_ret_code'),
144 144 auth_ret_code_detection)
145 145 self.ip_addr = '0.0.0.0'
146 146
147 147 @LazyProperty
148 148 def global_vcs_config(self):
149 149 try:
150 150 return VcsSettingsModel().get_ui_settings_as_config_obj()
151 151 except Exception:
152 152 return base.Config()
153 153
154 154 @property
155 155 def base_path(self):
156 156 settings_path = self.repo_vcs_config.get(
157 157 *VcsSettingsModel.PATH_SETTING)
158 158
159 159 if not settings_path:
160 160 settings_path = self.global_vcs_config.get(
161 161 *VcsSettingsModel.PATH_SETTING)
162 162
163 163 if not settings_path:
164 164 # try, maybe we passed in explicitly as config option
165 165 settings_path = self.config.get('base_path')
166 166
167 167 if not settings_path:
168 168 raise ValueError('FATAL: base_path is empty')
169 169 return settings_path
170 170
171 171 def set_repo_names(self, environ):
172 172 """
173 173 This will populate the attributes acl_repo_name, url_repo_name,
174 174 vcs_repo_name and is_shadow_repo. In case of requests to normal (non
175 175 shadow) repositories all names are equal. In case of requests to a
176 176 shadow repository the acl-name points to the target repo of the pull
177 177 request and the vcs-name points to the shadow repo file system path.
178 178 The url-name is always the URL used by the vcs client program.
179 179
180 180 Example in case of a shadow repo:
181 181 acl_repo_name = RepoGroup/MyRepo
182 182 url_repo_name = RepoGroup/MyRepo/pull-request/3/repository
183 183 vcs_repo_name = /repo/base/path/RepoGroup/.__shadow_MyRepo_pr-3'
184 184 """
185 185 # First we set the repo name from URL for all attributes. This is the
186 186 # default if handling normal (non shadow) repo requests.
187 187 self.url_repo_name = self._get_repository_name(environ)
188 188 self.acl_repo_name = self.vcs_repo_name = self.url_repo_name
189 189 self.is_shadow_repo = False
190 190
191 191 # Check if this is a request to a shadow repository.
192 192 match = self.shadow_repo_re.match(self.url_repo_name)
193 193 if match:
194 194 match_dict = match.groupdict()
195 195
196 196 # Build acl repo name from regex match.
197 197 acl_repo_name = safe_unicode('{groups}{target}'.format(
198 198 groups=match_dict['groups'] or '',
199 199 target=match_dict['target']))
200 200
201 201 # Retrieve pull request instance by ID from regex match.
202 202 pull_request = PullRequest.get(match_dict['pr_id'])
203 203
204 204 # Only proceed if we got a pull request and if acl repo name from
205 205 # URL equals the target repo name of the pull request.
206 206 if pull_request and \
207 207 (acl_repo_name == pull_request.target_repo.repo_name):
208 208 repo_id = pull_request.target_repo.repo_id
209 209 # Get file system path to shadow repository.
210 210 workspace_id = PullRequestModel()._workspace_id(pull_request)
211 211 target_vcs = pull_request.target_repo.scm_instance()
212 212 vcs_repo_name = target_vcs._get_shadow_repository_path(
213 213 repo_id, workspace_id)
214 214
215 215 # Store names for later usage.
216 216 self.vcs_repo_name = vcs_repo_name
217 217 self.acl_repo_name = acl_repo_name
218 218 self.is_shadow_repo = True
219 219
220 220 log.debug('Setting all VCS repository names: %s', {
221 221 'acl_repo_name': self.acl_repo_name,
222 222 'url_repo_name': self.url_repo_name,
223 223 'vcs_repo_name': self.vcs_repo_name,
224 224 })
225 225
226 226 @property
227 227 def scm_app(self):
228 228 custom_implementation = self.config['vcs.scm_app_implementation']
229 229 if custom_implementation == 'http':
230 230 log.info('Using HTTP implementation of scm app.')
231 231 scm_app_impl = scm_app_http
232 232 else:
233 233 log.info('Using custom implementation of scm_app: "{}"'.format(
234 234 custom_implementation))
235 235 scm_app_impl = importlib.import_module(custom_implementation)
236 236 return scm_app_impl
237 237
238 238 def _get_by_id(self, repo_name):
239 239 """
240 240 Gets a special pattern _<ID> from clone url and tries to replace it
241 241 with a repository_name for support of _<ID> non changeable urls
242 242 """
243 243
244 244 data = repo_name.split('/')
245 245 if len(data) >= 2:
246 246 from rhodecode.model.repo import RepoModel
247 247 by_id_match = RepoModel().get_repo_by_id(repo_name)
248 248 if by_id_match:
249 249 data[1] = by_id_match.repo_name
250 250
251 251 return safe_str('/'.join(data))
252 252
253 253 def _invalidate_cache(self, repo_name):
254 254 """
255 255 Set's cache for this repository for invalidation on next access
256 256
257 257 :param repo_name: full repo name, also a cache key
258 258 """
259 259 ScmModel().mark_for_invalidation(repo_name)
260 260
261 261 def is_valid_and_existing_repo(self, repo_name, base_path, scm_type):
262 262 db_repo = Repository.get_by_repo_name(repo_name)
263 263 if not db_repo:
264 264 log.debug('Repository `%s` not found inside the database.',
265 265 repo_name)
266 266 return False
267 267
268 268 if db_repo.repo_type != scm_type:
269 269 log.warning(
270 270 'Repository `%s` have incorrect scm_type, expected %s got %s',
271 271 repo_name, db_repo.repo_type, scm_type)
272 272 return False
273 273
274 274 config = db_repo._config
275 275 config.set('extensions', 'largefiles', '')
276 276 return is_valid_repo(
277 277 repo_name, base_path,
278 278 explicit_scm=scm_type, expect_scm=scm_type, config=config)
279 279
280 280 def valid_and_active_user(self, user):
281 281 """
282 282 Checks if that user is not empty, and if it's actually object it checks
283 283 if he's active.
284 284
285 285 :param user: user object or None
286 286 :return: boolean
287 287 """
288 288 if user is None:
289 289 return False
290 290
291 291 elif user.active:
292 292 return True
293 293
294 294 return False
295 295
296 296 @property
297 297 def is_shadow_repo_dir(self):
298 298 return os.path.isdir(self.vcs_repo_name)
299 299
300 300 def _check_permission(self, action, user, repo_name, ip_addr=None,
301 301 plugin_id='', plugin_cache_active=False, cache_ttl=0):
302 302 """
303 303 Checks permissions using action (push/pull) user and repository
304 304 name. If plugin_cache and ttl is set it will use the plugin which
305 305 authenticated the user to store the cached permissions result for N
306 306 amount of seconds as in cache_ttl
307 307
308 308 :param action: push or pull action
309 309 :param user: user instance
310 310 :param repo_name: repository name
311 311 """
312 312
313 313 log.debug('AUTH_CACHE_TTL for permissions `%s` active: %s (TTL: %s)',
314 314 plugin_id, plugin_cache_active, cache_ttl)
315 315
316 316 user_id = user.user_id
317 317 cache_namespace_uid = 'cache_user_auth.{}'.format(user_id)
318 318 region = rc_cache.get_or_create_region('cache_perms', cache_namespace_uid)
319 319
320 320 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
321 321 expiration_time=cache_ttl,
322 322 condition=plugin_cache_active)
323 323 def compute_perm_vcs(
324 324 cache_name, plugin_id, action, user_id, repo_name, ip_addr):
325 325
326 326 log.debug('auth: calculating permission access now...')
327 327 # check IP
328 328 inherit = user.inherit_default_permissions
329 329 ip_allowed = AuthUser.check_ip_allowed(
330 330 user_id, ip_addr, inherit_from_default=inherit)
331 331 if ip_allowed:
332 332 log.info('Access for IP:%s allowed', ip_addr)
333 333 else:
334 334 return False
335 335
336 336 if action == 'push':
337 337 perms = ('repository.write', 'repository.admin')
338 338 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
339 339 return False
340 340
341 341 else:
342 342 # any other action need at least read permission
343 343 perms = (
344 344 'repository.read', 'repository.write', 'repository.admin')
345 345 if not HasPermissionAnyMiddleware(*perms)(user, repo_name):
346 346 return False
347 347
348 348 return True
349 349
350 350 start = time.time()
351 351 log.debug('Running plugin `%s` permissions check', plugin_id)
352 352
353 353 # for environ based auth, password can be empty, but then the validation is
354 354 # on the server that fills in the env data needed for authentication
355 355 perm_result = compute_perm_vcs(
356 356 'vcs_permissions', plugin_id, action, user.user_id, repo_name, ip_addr)
357 357
358 358 auth_time = time.time() - start
359 359 log.debug('Permissions for plugin `%s` completed in %.3fs, '
360 360 'expiration time of fetched cache %.1fs.',
361 361 plugin_id, auth_time, cache_ttl)
362 362
363 363 return perm_result
364 364
365 365 def _check_ssl(self, environ, start_response):
366 366 """
367 367 Checks the SSL check flag and returns False if SSL is not present
368 368 and required True otherwise
369 369 """
370 370 org_proto = environ['wsgi._org_proto']
371 371 # check if we have SSL required ! if not it's a bad request !
372 372 require_ssl = str2bool(self.repo_vcs_config.get('web', 'push_ssl'))
373 373 if require_ssl and org_proto == 'http':
374 374 log.debug(
375 375 'Bad request: detected protocol is `%s` and '
376 376 'SSL/HTTPS is required.', org_proto)
377 377 return False
378 378 return True
379 379
380 380 def _get_default_cache_ttl(self):
381 381 # take AUTH_CACHE_TTL from the `rhodecode` auth plugin
382 382 plugin = loadplugin('egg:rhodecode-enterprise-ce#rhodecode')
383 383 plugin_settings = plugin.get_settings()
384 384 plugin_cache_active, cache_ttl = plugin.get_ttl_cache(
385 385 plugin_settings) or (False, 0)
386 386 return plugin_cache_active, cache_ttl
387 387
388 388 def __call__(self, environ, start_response):
389 389 try:
390 390 return self._handle_request(environ, start_response)
391 391 except Exception:
392 392 log.exception("Exception while handling request")
393 393 appenlight.track_exception(environ)
394 394 return HTTPInternalServerError()(environ, start_response)
395 395 finally:
396 396 meta.Session.remove()
397 397
398 398 def _handle_request(self, environ, start_response):
399 399
400 400 if not self._check_ssl(environ, start_response):
401 401 reason = ('SSL required, while RhodeCode was unable '
402 402 'to detect this as SSL request')
403 403 log.debug('User not allowed to proceed, %s', reason)
404 404 return HTTPNotAcceptable(reason)(environ, start_response)
405 405
406 406 if not self.url_repo_name:
407 407 log.warning('Repository name is empty: %s', self.url_repo_name)
408 408 # failed to get repo name, we fail now
409 409 return HTTPNotFound()(environ, start_response)
410 410 log.debug('Extracted repo name is %s', self.url_repo_name)
411 411
412 412 ip_addr = get_ip_addr(environ)
413 413 user_agent = get_user_agent(environ)
414 414 username = None
415 415
416 416 # skip passing error to error controller
417 417 environ['pylons.status_code_redirect'] = True
418 418
419 419 # ======================================================================
420 420 # GET ACTION PULL or PUSH
421 421 # ======================================================================
422 422 action = self._get_action(environ)
423 423
424 424 # ======================================================================
425 425 # Check if this is a request to a shadow repository of a pull request.
426 426 # In this case only pull action is allowed.
427 427 # ======================================================================
428 428 if self.is_shadow_repo and action != 'pull':
429 429 reason = 'Only pull action is allowed for shadow repositories.'
430 430 log.debug('User not allowed to proceed, %s', reason)
431 431 return HTTPNotAcceptable(reason)(environ, start_response)
432 432
433 433 # Check if the shadow repo actually exists, in case someone refers
434 434 # to it, and it has been deleted because of successful merge.
435 435 if self.is_shadow_repo and not self.is_shadow_repo_dir:
436 436 log.debug(
437 437 'Shadow repo detected, and shadow repo dir `%s` is missing',
438 438 self.is_shadow_repo_dir)
439 439 return HTTPNotFound()(environ, start_response)
440 440
441 441 # ======================================================================
442 442 # CHECK ANONYMOUS PERMISSION
443 443 # ======================================================================
444 444 if action in ['pull', 'push']:
445 445 anonymous_user = User.get_default_user()
446 446 username = anonymous_user.username
447 447 if anonymous_user.active:
448 448 plugin_cache_active, cache_ttl = self._get_default_cache_ttl()
449 449 # ONLY check permissions if the user is activated
450 450 anonymous_perm = self._check_permission(
451 451 action, anonymous_user, self.acl_repo_name, ip_addr,
452 452 plugin_id='anonymous_access',
453 453 plugin_cache_active=plugin_cache_active,
454 454 cache_ttl=cache_ttl,
455 455 )
456 456 else:
457 457 anonymous_perm = False
458 458
459 459 if not anonymous_user.active or not anonymous_perm:
460 460 if not anonymous_user.active:
461 461 log.debug('Anonymous access is disabled, running '
462 462 'authentication')
463 463
464 464 if not anonymous_perm:
465 465 log.debug('Not enough credentials to access this '
466 466 'repository as anonymous user')
467 467
468 468 username = None
469 469 # ==============================================================
470 470 # DEFAULT PERM FAILED OR ANONYMOUS ACCESS IS DISABLED SO WE
471 471 # NEED TO AUTHENTICATE AND ASK FOR AUTH USER PERMISSIONS
472 472 # ==============================================================
473 473
474 474 # try to auth based on environ, container auth methods
475 475 log.debug('Running PRE-AUTH for container based authentication')
476 476 pre_auth = authenticate(
477 477 '', '', environ, VCS_TYPE, registry=self.registry,
478 478 acl_repo_name=self.acl_repo_name)
479 479 if pre_auth and pre_auth.get('username'):
480 480 username = pre_auth['username']
481 481 log.debug('PRE-AUTH got %s as username', username)
482 482 if pre_auth:
483 483 log.debug('PRE-AUTH successful from %s',
484 484 pre_auth.get('auth_data', {}).get('_plugin'))
485 485
486 486 # If not authenticated by the container, running basic auth
487 487 # before inject the calling repo_name for special scope checks
488 488 self.authenticate.acl_repo_name = self.acl_repo_name
489 489
490 490 plugin_cache_active, cache_ttl = False, 0
491 491 plugin = None
492 492 if not username:
493 493 self.authenticate.realm = self.authenticate.get_rc_realm()
494 494
495 495 try:
496 496 auth_result = self.authenticate(environ)
497 497 except (UserCreationError, NotAllowedToCreateUserError) as e:
498 498 log.error(e)
499 499 reason = safe_str(e)
500 500 return HTTPNotAcceptable(reason)(environ, start_response)
501 501
502 502 if isinstance(auth_result, dict):
503 503 AUTH_TYPE.update(environ, 'basic')
504 504 REMOTE_USER.update(environ, auth_result['username'])
505 505 username = auth_result['username']
506 506 plugin = auth_result.get('auth_data', {}).get('_plugin')
507 507 log.info(
508 508 'MAIN-AUTH successful for user `%s` from %s plugin',
509 509 username, plugin)
510 510
511 511 plugin_cache_active, cache_ttl = auth_result.get(
512 512 'auth_data', {}).get('_ttl_cache') or (False, 0)
513 513 else:
514 514 return auth_result.wsgi_application(
515 515 environ, start_response)
516 516
517
518 517 # ==============================================================
519 518 # CHECK PERMISSIONS FOR THIS REQUEST USING GIVEN USERNAME
520 519 # ==============================================================
521 520 user = User.get_by_username(username)
522 521 if not self.valid_and_active_user(user):
523 522 return HTTPForbidden()(environ, start_response)
524 523 username = user.username
525 user.update_lastactivity()
526 meta.Session().commit()
527 524
528 525 # check user attributes for password change flag
529 526 user_obj = user
530 527 if user_obj and user_obj.username != User.DEFAULT_USER and \
531 528 user_obj.user_data.get('force_password_change'):
532 529 reason = 'password change required'
533 530 log.debug('User not allowed to authenticate, %s', reason)
534 531 return HTTPNotAcceptable(reason)(environ, start_response)
535 532
536 533 # check permissions for this repository
537 534 perm = self._check_permission(
538 535 action, user, self.acl_repo_name, ip_addr,
539 536 plugin, plugin_cache_active, cache_ttl)
540 537 if not perm:
541 538 return HTTPForbidden()(environ, start_response)
542 539
543 540 # extras are injected into UI object and later available
544 541 # in hooks executed by RhodeCode
545 542 check_locking = _should_check_locking(environ.get('QUERY_STRING'))
546 543 extras = vcs_operation_context(
547 544 environ, repo_name=self.acl_repo_name, username=username,
548 545 action=action, scm=self.SCM, check_locking=check_locking,
549 546 is_shadow_repo=self.is_shadow_repo
550 547 )
551 548
552 549 # ======================================================================
553 550 # REQUEST HANDLING
554 551 # ======================================================================
555 552 repo_path = os.path.join(
556 553 safe_str(self.base_path), safe_str(self.vcs_repo_name))
557 554 log.debug('Repository path is %s', repo_path)
558 555
559 556 fix_PATH()
560 557
561 558 log.info(
562 559 '%s action on %s repo "%s" by "%s" from %s %s',
563 560 action, self.SCM, safe_str(self.url_repo_name),
564 561 safe_str(username), ip_addr, user_agent)
565 562
566 563 return self._generate_vcs_response(
567 564 environ, start_response, repo_path, extras, action)
568 565
569 566 @initialize_generator
570 567 def _generate_vcs_response(
571 568 self, environ, start_response, repo_path, extras, action):
572 569 """
573 570 Returns a generator for the response content.
574 571
575 572 This method is implemented as a generator, so that it can trigger
576 573 the cache validation after all content sent back to the client. It
577 574 also handles the locking exceptions which will be triggered when
578 575 the first chunk is produced by the underlying WSGI application.
579 576 """
580 577 txn_id = ''
581 578 if 'CONTENT_LENGTH' in environ and environ['REQUEST_METHOD'] == 'MERGE':
582 579 # case for SVN, we want to re-use the callback daemon port
583 580 # so we use the txn_id, for this we peek the body, and still save
584 581 # it as wsgi.input
585 582 data = environ['wsgi.input'].read()
586 583 environ['wsgi.input'] = StringIO(data)
587 584 txn_id = extract_svn_txn_id(self.acl_repo_name, data)
588 585
589 586 callback_daemon, extras = self._prepare_callback_daemon(
590 587 extras, environ, action, txn_id=txn_id)
591 588 log.debug('HOOKS extras is %s', extras)
592 589
593 590 config = self._create_config(extras, self.acl_repo_name)
594 591 app = self._create_wsgi_app(repo_path, self.url_repo_name, config)
595 592 with callback_daemon:
596 593 app.rc_extras = extras
597 594
598 595 try:
599 596 response = app(environ, start_response)
600 597 finally:
601 598 # This statement works together with the decorator
602 599 # "initialize_generator" above. The decorator ensures that
603 600 # we hit the first yield statement before the generator is
604 601 # returned back to the WSGI server. This is needed to
605 602 # ensure that the call to "app" above triggers the
606 603 # needed callback to "start_response" before the
607 604 # generator is actually used.
608 605 yield "__init__"
609 606
610 607 # iter content
611 608 for chunk in response:
612 609 yield chunk
613 610
614 611 try:
615 612 # invalidate cache on push
616 613 if action == 'push':
617 614 self._invalidate_cache(self.url_repo_name)
618 615 finally:
619 616 meta.Session.remove()
620 617
621 618 def _get_repository_name(self, environ):
622 619 """Get repository name out of the environmnent
623 620
624 621 :param environ: WSGI environment
625 622 """
626 623 raise NotImplementedError()
627 624
628 625 def _get_action(self, environ):
629 626 """Map request commands into a pull or push command.
630 627
631 628 :param environ: WSGI environment
632 629 """
633 630 raise NotImplementedError()
634 631
635 632 def _create_wsgi_app(self, repo_path, repo_name, config):
636 633 """Return the WSGI app that will finally handle the request."""
637 634 raise NotImplementedError()
638 635
639 636 def _create_config(self, extras, repo_name):
640 637 """Create a safe config representation."""
641 638 raise NotImplementedError()
642 639
643 640 def _should_use_callback_daemon(self, extras, environ, action):
644 641 return True
645 642
646 643 def _prepare_callback_daemon(self, extras, environ, action, txn_id=None):
647 644 direct_calls = vcs_settings.HOOKS_DIRECT_CALLS
648 645 if not self._should_use_callback_daemon(extras, environ, action):
649 646 # disable callback daemon for actions that don't require it
650 647 direct_calls = True
651 648
652 649 return prepare_callback_daemon(
653 650 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
654 651 host=vcs_settings.HOOKS_HOST, use_direct_calls=direct_calls, txn_id=txn_id)
655 652
656 653
657 654 def _should_check_locking(query_string):
658 655 # this is kind of hacky, but due to how mercurial handles client-server
659 656 # server see all operation on commit; bookmarks, phases and
660 657 # obsolescence marker in different transaction, we don't want to check
661 658 # locking on those
662 659 return query_string not in ['cmd=listkeys']
General Comments 0
You need to be logged in to leave comments. Login now