##// END OF EJS Templates
events: add event system for RepoEvents
dan -
r375:41f1288c default
parent child Browse files
Show More
@@ -0,0 +1,49 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 from pyramid.threadlocal import get_current_registry
20
21
22 class RhodecodeEvent(object):
23 """
24 Base event class for all Rhodecode events
25 """
26
27
28 def trigger(event):
29 """
30 Helper method to send an event. This wraps the pyramid logic to send an
31 event.
32 """
33 # For the first step we are using pyramids thread locals here. If the
34 # event mechanism works out as a good solution we should think about
35 # passing the registry as an argument to get rid of it.
36 registry = get_current_registry()
37 registry.notify(event)
38
39
40 from rhodecode.events.user import (
41 UserPreCreate, UserPreUpdate, UserRegistered
42 )
43
44 from rhodecode.events.repo import (
45 RepoPreCreateEvent, RepoCreatedEvent,
46 RepoPreDeleteEvent, RepoDeletedEvent,
47 RepoPrePushEvent, RepoPushEvent,
48 RepoPrePullEvent, RepoPullEvent,
49 ) No newline at end of file
@@ -0,0 +1,115 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 from rhodecode.model.db import Repository, Session
20 from rhodecode.events import RhodecodeEvent
21
22
23 class RepoEvent(RhodecodeEvent):
24 """
25 Base class for events acting on a repository.
26
27 :param repo: a :class:`Repository` instance
28 """
29 def __init__(self, repo):
30 self.repo = repo
31
32
33 class RepoPreCreateEvent(RepoEvent):
34 """
35 An instance of this class is emitted as an :term:`event` before a repo is
36 created.
37
38 :param repo_name: repository name
39 """
40 name = 'repo-pre-create'
41
42
43 class RepoCreatedEvent(RepoEvent):
44 """
45 An instance of this class is emitted as an :term:`event` whenever a repo is
46 created.
47 """
48 name = 'repo-created'
49
50
51 class RepoPreDeleteEvent(RepoEvent):
52 """
53 An instance of this class is emitted as an :term:`event` whenever a repo is
54 created.
55 """
56 name = 'repo-pre-delete'
57
58
59 class RepoDeletedEvent(RepoEvent):
60 """
61 An instance of this class is emitted as an :term:`event` whenever a repo is
62 created.
63 """
64 name = 'repo-deleted'
65
66
67 class RepoVCSEvent(RepoEvent):
68 """
69 Base class for events triggered by the VCS
70 """
71 def __init__(self, repo_name, extras):
72 self.repo = Repository.get_by_repo_name(repo_name)
73 if not self.repo:
74 raise Exception('repo by this name %s does not exist' % repo_name)
75 self.extras = extras
76 super(RepoVCSEvent, self).__init__(self.repo)
77
78
79 class RepoPrePullEvent(RepoVCSEvent):
80 """
81 An instance of this class is emitted as an :term:`event` before commits
82 are pulled from a repo.
83 """
84 name = 'repo-pre-pull'
85
86
87 class RepoPullEvent(RepoVCSEvent):
88 """
89 An instance of this class is emitted as an :term:`event` after commits
90 are pulled from a repo.
91 """
92 name = 'repo-pull'
93
94
95 class RepoPrePushEvent(RepoVCSEvent):
96 """
97 An instance of this class is emitted as an :term:`event` before commits
98 are pushed to a repo.
99 """
100 name = 'repo-pre-push'
101
102
103 class RepoPushEvent(RepoVCSEvent):
104 """
105 An instance of this class is emitted as an :term:`event` after commits
106 are pushed to a repo.
107
108 :param extras: (optional) dict of data from proxied VCS actions
109 """
110 name = 'repo-push'
111
112 def __init__(self, repo_name, pushed_commit_ids, extras):
113 super(RepoPushEvent, self).__init__(repo_name, extras)
114 self.pushed_commit_ids = pushed_commit_ids
115
@@ -0,0 +1,19 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -0,0 +1,38 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
6 #
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
11 #
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
19 import mock
20 import decorator
21
22
23 def assert_fires_events(*expected_events):
24 """ Testing decorator to check if the function fires events in order """
25 def deco(func):
26 def wrapper(func, *args, **kwargs):
27 with mock.patch('rhodecode.events.trigger') as mock_trigger:
28 result = func(*args, **kwargs)
29
30 captured_events = []
31 for call in mock_trigger.call_args_list:
32 event = call[0][0]
33 captured_events.append(type(event))
34
35 assert set(captured_events) == set(expected_events)
36 return result
37 return decorator.decorator(wrapper, func)
38 return deco No newline at end of file
@@ -0,0 +1,68 b''
1 # -*- coding: utf-8 -*-
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
21 import pytest
22
23 from rhodecode.tests.events.conftest import assert_fires_events
24
25 from rhodecode.lib import hooks_base, utils2
26 from rhodecode.model.repo import RepoModel
27 from rhodecode.events.repo import (
28 RepoPrePullEvent, RepoPullEvent,
29 RepoPrePushEvent, RepoPushEvent,
30 RepoPreCreateEvent, RepoCreatedEvent,
31 RepoPreDeleteEvent, RepoDeletedEvent,
32 )
33
34
35 @pytest.fixture
36 def scm_extras(user_regular, repo_stub):
37 extras = utils2.AttributeDict({
38 'ip': '127.0.0.1',
39 'username': user_regular.username,
40 'action': '',
41 'repository': repo_stub.repo_name,
42 'scm': repo_stub.scm_instance().alias,
43 'config': '',
44 'server_url': 'http://example.com',
45 'make_lock': None,
46 'locked_by': [None],
47 'commit_ids': ['a' * 40] * 3,
48 })
49 return extras
50
51
52 @assert_fires_events(
53 RepoPreCreateEvent, RepoCreatedEvent, RepoPreDeleteEvent, RepoDeletedEvent)
54 def test_create_delete_repo_fires_events(backend):
55 repo = backend.create_repo()
56 RepoModel().delete(repo)
57
58
59 @assert_fires_events(RepoPrePushEvent, RepoPushEvent)
60 def test_pull_fires_events(scm_extras):
61 hooks_base.pre_push(scm_extras)
62 hooks_base.post_push(scm_extras)
63
64
65 @assert_fires_events(RepoPrePullEvent, RepoPullEvent)
66 def test_push_fires_events(scm_extras):
67 hooks_base.pre_pull(scm_extras)
68 hooks_base.post_pull(scm_extras)
1 NO CONTENT: file renamed from rhodecode/interfaces.py to rhodecode/events/interfaces.py
NO CONTENT: file renamed from rhodecode/interfaces.py to rhodecode/events/interfaces.py
@@ -1,53 +1,54 b''
1 # Copyright (C) 2016-2016 RhodeCode GmbH
1 # Copyright (C) 2016-2016 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 from zope.interface import implementer
19 from zope.interface import implementer
20 from rhodecode.interfaces import (
20 from rhodecode.events import RhodecodeEvent
21 from rhodecode.events.interfaces import (
21 IUserRegistered, IUserPreCreate, IUserPreUpdate)
22 IUserRegistered, IUserPreCreate, IUserPreUpdate)
22
23
23
24
24 @implementer(IUserRegistered)
25 @implementer(IUserRegistered)
25 class UserRegistered(object):
26 class UserRegistered(RhodecodeEvent):
26 """
27 """
27 An instance of this class is emitted as an :term:`event` whenever a user
28 An instance of this class is emitted as an :term:`event` whenever a user
28 account is registered.
29 account is registered.
29 """
30 """
30 def __init__(self, user, session):
31 def __init__(self, user, session):
31 self.user = user
32 self.user = user
32 self.session = session
33 self.session = session
33
34
34
35
35 @implementer(IUserPreCreate)
36 @implementer(IUserPreCreate)
36 class UserPreCreate(object):
37 class UserPreCreate(RhodecodeEvent):
37 """
38 """
38 An instance of this class is emitted as an :term:`event` before a new user
39 An instance of this class is emitted as an :term:`event` before a new user
39 object is created.
40 object is created.
40 """
41 """
41 def __init__(self, user_data):
42 def __init__(self, user_data):
42 self.user_data = user_data
43 self.user_data = user_data
43
44
44
45
45 @implementer(IUserPreUpdate)
46 @implementer(IUserPreUpdate)
46 class UserPreUpdate(object):
47 class UserPreUpdate(RhodecodeEvent):
47 """
48 """
48 An instance of this class is emitted as an :term:`event` before a user
49 An instance of this class is emitted as an :term:`event` before a user
49 object is updated.
50 object is updated.
50 """
51 """
51 def __init__(self, user, user_data):
52 def __init__(self, user, user_data):
52 self.user = user
53 self.user = user
53 self.user_data = user_data
54 self.user_data = user_data
@@ -1,366 +1,378 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2013-2016 RhodeCode GmbH
3 # Copyright (C) 2013-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Set of hooks run by RhodeCode Enterprise
23 Set of hooks run by RhodeCode Enterprise
24 """
24 """
25
25
26 import os
26 import os
27 import collections
27 import collections
28
28
29 import rhodecode
29 import rhodecode
30 from rhodecode import events
30 from rhodecode.lib import helpers as h
31 from rhodecode.lib import helpers as h
31 from rhodecode.lib.utils import action_logger
32 from rhodecode.lib.utils import action_logger
32 from rhodecode.lib.utils2 import safe_str
33 from rhodecode.lib.utils2 import safe_str
33 from rhodecode.lib.exceptions import HTTPLockedRC, UserCreationError
34 from rhodecode.lib.exceptions import HTTPLockedRC, UserCreationError
34 from rhodecode.model.db import Repository, User
35 from rhodecode.model.db import Repository, User
35
36
36
37
37 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
38 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
38
39
39
40
40 def _get_scm_size(alias, root_path):
41 def _get_scm_size(alias, root_path):
41
42
42 if not alias.startswith('.'):
43 if not alias.startswith('.'):
43 alias += '.'
44 alias += '.'
44
45
45 size_scm, size_root = 0, 0
46 size_scm, size_root = 0, 0
46 for path, unused_dirs, files in os.walk(safe_str(root_path)):
47 for path, unused_dirs, files in os.walk(safe_str(root_path)):
47 if path.find(alias) != -1:
48 if path.find(alias) != -1:
48 for f in files:
49 for f in files:
49 try:
50 try:
50 size_scm += os.path.getsize(os.path.join(path, f))
51 size_scm += os.path.getsize(os.path.join(path, f))
51 except OSError:
52 except OSError:
52 pass
53 pass
53 else:
54 else:
54 for f in files:
55 for f in files:
55 try:
56 try:
56 size_root += os.path.getsize(os.path.join(path, f))
57 size_root += os.path.getsize(os.path.join(path, f))
57 except OSError:
58 except OSError:
58 pass
59 pass
59
60
60 size_scm_f = h.format_byte_size_binary(size_scm)
61 size_scm_f = h.format_byte_size_binary(size_scm)
61 size_root_f = h.format_byte_size_binary(size_root)
62 size_root_f = h.format_byte_size_binary(size_root)
62 size_total_f = h.format_byte_size_binary(size_root + size_scm)
63 size_total_f = h.format_byte_size_binary(size_root + size_scm)
63
64
64 return size_scm_f, size_root_f, size_total_f
65 return size_scm_f, size_root_f, size_total_f
65
66
66
67
67 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
68 # actual hooks called by Mercurial internally, and GIT by our Python Hooks
68 def repo_size(extras):
69 def repo_size(extras):
69 """Present size of repository after push."""
70 """Present size of repository after push."""
70 repo = Repository.get_by_repo_name(extras.repository)
71 repo = Repository.get_by_repo_name(extras.repository)
71 vcs_part = safe_str(u'.%s' % repo.repo_type)
72 vcs_part = safe_str(u'.%s' % repo.repo_type)
72 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
73 size_vcs, size_root, size_total = _get_scm_size(vcs_part,
73 repo.repo_full_path)
74 repo.repo_full_path)
74 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
75 msg = ('Repository `%s` size summary %s:%s repo:%s total:%s\n'
75 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
76 % (repo.repo_name, vcs_part, size_vcs, size_root, size_total))
76 return HookResponse(0, msg)
77 return HookResponse(0, msg)
77
78
78
79
79 def pre_push(extras):
80 def pre_push(extras):
80 """
81 """
81 Hook executed before pushing code.
82 Hook executed before pushing code.
82
83
83 It bans pushing when the repository is locked.
84 It bans pushing when the repository is locked.
84 """
85 """
85 usr = User.get_by_username(extras.username)
86 usr = User.get_by_username(extras.username)
86
87
87
88
88 output = ''
89 output = ''
89 if extras.locked_by[0] and usr.user_id != int(extras.locked_by[0]):
90 if extras.locked_by[0] and usr.user_id != int(extras.locked_by[0]):
90 locked_by = User.get(extras.locked_by[0]).username
91 locked_by = User.get(extras.locked_by[0]).username
91 reason = extras.locked_by[2]
92 reason = extras.locked_by[2]
92 # this exception is interpreted in git/hg middlewares and based
93 # this exception is interpreted in git/hg middlewares and based
93 # on that proper return code is server to client
94 # on that proper return code is server to client
94 _http_ret = HTTPLockedRC(
95 _http_ret = HTTPLockedRC(
95 _locked_by_explanation(extras.repository, locked_by, reason))
96 _locked_by_explanation(extras.repository, locked_by, reason))
96 if str(_http_ret.code).startswith('2'):
97 if str(_http_ret.code).startswith('2'):
97 # 2xx Codes don't raise exceptions
98 # 2xx Codes don't raise exceptions
98 output = _http_ret.title
99 output = _http_ret.title
99 else:
100 else:
100 raise _http_ret
101 raise _http_ret
101
102
102 # Calling hooks after checking the lock, for consistent behavior
103 # Calling hooks after checking the lock, for consistent behavior
103 pre_push_extension(repo_store_path=Repository.base_path(), **extras)
104 pre_push_extension(repo_store_path=Repository.base_path(), **extras)
104
105
106 events.trigger(events.RepoPrePushEvent(repo_name=extras.repository,
107 extras=extras))
108
105 return HookResponse(0, output)
109 return HookResponse(0, output)
106
110
107
111
108 def pre_pull(extras):
112 def pre_pull(extras):
109 """
113 """
110 Hook executed before pulling the code.
114 Hook executed before pulling the code.
111
115
112 It bans pulling when the repository is locked.
116 It bans pulling when the repository is locked.
113 """
117 """
114
118
115 output = ''
119 output = ''
116 if extras.locked_by[0]:
120 if extras.locked_by[0]:
117 locked_by = User.get(extras.locked_by[0]).username
121 locked_by = User.get(extras.locked_by[0]).username
118 reason = extras.locked_by[2]
122 reason = extras.locked_by[2]
119 # this exception is interpreted in git/hg middlewares and based
123 # this exception is interpreted in git/hg middlewares and based
120 # on that proper return code is server to client
124 # on that proper return code is server to client
121 _http_ret = HTTPLockedRC(
125 _http_ret = HTTPLockedRC(
122 _locked_by_explanation(extras.repository, locked_by, reason))
126 _locked_by_explanation(extras.repository, locked_by, reason))
123 if str(_http_ret.code).startswith('2'):
127 if str(_http_ret.code).startswith('2'):
124 # 2xx Codes don't raise exceptions
128 # 2xx Codes don't raise exceptions
125 output = _http_ret.title
129 output = _http_ret.title
126 else:
130 else:
127 raise _http_ret
131 raise _http_ret
128
132
129 # Calling hooks after checking the lock, for consistent behavior
133 # Calling hooks after checking the lock, for consistent behavior
130 pre_pull_extension(**extras)
134 pre_pull_extension(**extras)
135 events.trigger(events.RepoPrePullEvent(repo_name=extras.repository,
136 extras=extras))
131
137
132 return HookResponse(0, output)
138 return HookResponse(0, output)
133
139
134
140
135 def post_pull(extras):
141 def post_pull(extras):
136 """Hook executed after client pulls the code."""
142 """Hook executed after client pulls the code."""
137 user = User.get_by_username(extras.username)
143 user = User.get_by_username(extras.username)
138 action = 'pull'
144 action = 'pull'
139 action_logger(user, action, extras.repository, extras.ip, commit=True)
145 action_logger(user, action, extras.repository, extras.ip, commit=True)
140
146
147 events.trigger(events.RepoPullEvent(repo_name=extras.repository,
148 extras=extras))
141 # extension hook call
149 # extension hook call
142 post_pull_extension(**extras)
150 post_pull_extension(**extras)
143
151
144 output = ''
152 output = ''
145 # make lock is a tri state False, True, None. We only make lock on True
153 # make lock is a tri state False, True, None. We only make lock on True
146 if extras.make_lock is True:
154 if extras.make_lock is True:
147 Repository.lock(Repository.get_by_repo_name(extras.repository),
155 Repository.lock(Repository.get_by_repo_name(extras.repository),
148 user.user_id,
156 user.user_id,
149 lock_reason=Repository.LOCK_PULL)
157 lock_reason=Repository.LOCK_PULL)
150 msg = 'Made lock on repo `%s`' % (extras.repository,)
158 msg = 'Made lock on repo `%s`' % (extras.repository,)
151 output += msg
159 output += msg
152
160
153 if extras.locked_by[0]:
161 if extras.locked_by[0]:
154 locked_by = User.get(extras.locked_by[0]).username
162 locked_by = User.get(extras.locked_by[0]).username
155 reason = extras.locked_by[2]
163 reason = extras.locked_by[2]
156 _http_ret = HTTPLockedRC(
164 _http_ret = HTTPLockedRC(
157 _locked_by_explanation(extras.repository, locked_by, reason))
165 _locked_by_explanation(extras.repository, locked_by, reason))
158 if str(_http_ret.code).startswith('2'):
166 if str(_http_ret.code).startswith('2'):
159 # 2xx Codes don't raise exceptions
167 # 2xx Codes don't raise exceptions
160 output += _http_ret.title
168 output += _http_ret.title
161
169
162 return HookResponse(0, output)
170 return HookResponse(0, output)
163
171
164
172
165 def post_push(extras):
173 def post_push(extras):
166 """Hook executed after user pushes to the repository."""
174 """Hook executed after user pushes to the repository."""
167 action_tmpl = extras.action + ':%s'
175 action_tmpl = extras.action + ':%s'
168 commit_ids = extras.commit_ids[:29000]
176 commit_ids = extras.commit_ids[:29000]
169
177
170 action = action_tmpl % ','.join(commit_ids)
178 action = action_tmpl % ','.join(commit_ids)
171 action_logger(
179 action_logger(
172 extras.username, action, extras.repository, extras.ip, commit=True)
180 extras.username, action, extras.repository, extras.ip, commit=True)
173
181
182 events.trigger(events.RepoPushEvent(repo_name=extras.repository,
183 pushed_commit_ids=commit_ids,
184 extras=extras))
185
174 # extension hook call
186 # extension hook call
175 post_push_extension(
187 post_push_extension(
176 repo_store_path=Repository.base_path(),
188 repo_store_path=Repository.base_path(),
177 pushed_revs=commit_ids,
189 pushed_revs=commit_ids,
178 **extras)
190 **extras)
179
191
180 output = ''
192 output = ''
181 # make lock is a tri state False, True, None. We only release lock on False
193 # make lock is a tri state False, True, None. We only release lock on False
182 if extras.make_lock is False:
194 if extras.make_lock is False:
183 Repository.unlock(Repository.get_by_repo_name(extras.repository))
195 Repository.unlock(Repository.get_by_repo_name(extras.repository))
184 msg = 'Released lock on repo `%s`\n' % extras.repository
196 msg = 'Released lock on repo `%s`\n' % extras.repository
185 output += msg
197 output += msg
186
198
187 if extras.locked_by[0]:
199 if extras.locked_by[0]:
188 locked_by = User.get(extras.locked_by[0]).username
200 locked_by = User.get(extras.locked_by[0]).username
189 reason = extras.locked_by[2]
201 reason = extras.locked_by[2]
190 _http_ret = HTTPLockedRC(
202 _http_ret = HTTPLockedRC(
191 _locked_by_explanation(extras.repository, locked_by, reason))
203 _locked_by_explanation(extras.repository, locked_by, reason))
192 # TODO: johbo: if not?
204 # TODO: johbo: if not?
193 if str(_http_ret.code).startswith('2'):
205 if str(_http_ret.code).startswith('2'):
194 # 2xx Codes don't raise exceptions
206 # 2xx Codes don't raise exceptions
195 output += _http_ret.title
207 output += _http_ret.title
196
208
197 output += 'RhodeCode: push completed\n'
209 output += 'RhodeCode: push completed\n'
198
210
199 return HookResponse(0, output)
211 return HookResponse(0, output)
200
212
201
213
202 def _locked_by_explanation(repo_name, user_name, reason):
214 def _locked_by_explanation(repo_name, user_name, reason):
203 message = (
215 message = (
204 'Repository `%s` locked by user `%s`. Reason:`%s`'
216 'Repository `%s` locked by user `%s`. Reason:`%s`'
205 % (repo_name, user_name, reason))
217 % (repo_name, user_name, reason))
206 return message
218 return message
207
219
208
220
209 def check_allowed_create_user(user_dict, created_by, **kwargs):
221 def check_allowed_create_user(user_dict, created_by, **kwargs):
210 # pre create hooks
222 # pre create hooks
211 if pre_create_user.is_active():
223 if pre_create_user.is_active():
212 allowed, reason = pre_create_user(created_by=created_by, **user_dict)
224 allowed, reason = pre_create_user(created_by=created_by, **user_dict)
213 if not allowed:
225 if not allowed:
214 raise UserCreationError(reason)
226 raise UserCreationError(reason)
215
227
216
228
217 class ExtensionCallback(object):
229 class ExtensionCallback(object):
218 """
230 """
219 Forwards a given call to rcextensions, sanitizes keyword arguments.
231 Forwards a given call to rcextensions, sanitizes keyword arguments.
220
232
221 Does check if there is an extension active for that hook. If it is
233 Does check if there is an extension active for that hook. If it is
222 there, it will forward all `kwargs_keys` keyword arguments to the
234 there, it will forward all `kwargs_keys` keyword arguments to the
223 extension callback.
235 extension callback.
224 """
236 """
225
237
226 def __init__(self, hook_name, kwargs_keys):
238 def __init__(self, hook_name, kwargs_keys):
227 self._hook_name = hook_name
239 self._hook_name = hook_name
228 self._kwargs_keys = set(kwargs_keys)
240 self._kwargs_keys = set(kwargs_keys)
229
241
230 def __call__(self, *args, **kwargs):
242 def __call__(self, *args, **kwargs):
231 kwargs_to_pass = dict((key, kwargs[key]) for key in self._kwargs_keys)
243 kwargs_to_pass = dict((key, kwargs[key]) for key in self._kwargs_keys)
232 callback = self._get_callback()
244 callback = self._get_callback()
233 if callback:
245 if callback:
234 return callback(**kwargs_to_pass)
246 return callback(**kwargs_to_pass)
235
247
236 def is_active(self):
248 def is_active(self):
237 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
249 return hasattr(rhodecode.EXTENSIONS, self._hook_name)
238
250
239 def _get_callback(self):
251 def _get_callback(self):
240 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
252 return getattr(rhodecode.EXTENSIONS, self._hook_name, None)
241
253
242
254
243 pre_pull_extension = ExtensionCallback(
255 pre_pull_extension = ExtensionCallback(
244 hook_name='PRE_PULL_HOOK',
256 hook_name='PRE_PULL_HOOK',
245 kwargs_keys=(
257 kwargs_keys=(
246 'server_url', 'config', 'scm', 'username', 'ip', 'action',
258 'server_url', 'config', 'scm', 'username', 'ip', 'action',
247 'repository'))
259 'repository'))
248
260
249
261
250 post_pull_extension = ExtensionCallback(
262 post_pull_extension = ExtensionCallback(
251 hook_name='PULL_HOOK',
263 hook_name='PULL_HOOK',
252 kwargs_keys=(
264 kwargs_keys=(
253 'server_url', 'config', 'scm', 'username', 'ip', 'action',
265 'server_url', 'config', 'scm', 'username', 'ip', 'action',
254 'repository'))
266 'repository'))
255
267
256
268
257 pre_push_extension = ExtensionCallback(
269 pre_push_extension = ExtensionCallback(
258 hook_name='PRE_PUSH_HOOK',
270 hook_name='PRE_PUSH_HOOK',
259 kwargs_keys=(
271 kwargs_keys=(
260 'server_url', 'config', 'scm', 'username', 'ip', 'action',
272 'server_url', 'config', 'scm', 'username', 'ip', 'action',
261 'repository', 'repo_store_path'))
273 'repository', 'repo_store_path'))
262
274
263
275
264 post_push_extension = ExtensionCallback(
276 post_push_extension = ExtensionCallback(
265 hook_name='PUSH_HOOK',
277 hook_name='PUSH_HOOK',
266 kwargs_keys=(
278 kwargs_keys=(
267 'server_url', 'config', 'scm', 'username', 'ip', 'action',
279 'server_url', 'config', 'scm', 'username', 'ip', 'action',
268 'repository', 'repo_store_path', 'pushed_revs'))
280 'repository', 'repo_store_path', 'pushed_revs'))
269
281
270
282
271 pre_create_user = ExtensionCallback(
283 pre_create_user = ExtensionCallback(
272 hook_name='PRE_CREATE_USER_HOOK',
284 hook_name='PRE_CREATE_USER_HOOK',
273 kwargs_keys=(
285 kwargs_keys=(
274 'username', 'password', 'email', 'firstname', 'lastname', 'active',
286 'username', 'password', 'email', 'firstname', 'lastname', 'active',
275 'admin', 'created_by'))
287 'admin', 'created_by'))
276
288
277
289
278 log_create_pull_request = ExtensionCallback(
290 log_create_pull_request = ExtensionCallback(
279 hook_name='CREATE_PULL_REQUEST',
291 hook_name='CREATE_PULL_REQUEST',
280 kwargs_keys=(
292 kwargs_keys=(
281 'server_url', 'config', 'scm', 'username', 'ip', 'action',
293 'server_url', 'config', 'scm', 'username', 'ip', 'action',
282 'repository', 'pull_request_id', 'url', 'title', 'description',
294 'repository', 'pull_request_id', 'url', 'title', 'description',
283 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
295 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
284 'mergeable', 'source', 'target', 'author', 'reviewers'))
296 'mergeable', 'source', 'target', 'author', 'reviewers'))
285
297
286
298
287 log_merge_pull_request = ExtensionCallback(
299 log_merge_pull_request = ExtensionCallback(
288 hook_name='MERGE_PULL_REQUEST',
300 hook_name='MERGE_PULL_REQUEST',
289 kwargs_keys=(
301 kwargs_keys=(
290 'server_url', 'config', 'scm', 'username', 'ip', 'action',
302 'server_url', 'config', 'scm', 'username', 'ip', 'action',
291 'repository', 'pull_request_id', 'url', 'title', 'description',
303 'repository', 'pull_request_id', 'url', 'title', 'description',
292 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
304 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
293 'mergeable', 'source', 'target', 'author', 'reviewers'))
305 'mergeable', 'source', 'target', 'author', 'reviewers'))
294
306
295
307
296 log_close_pull_request = ExtensionCallback(
308 log_close_pull_request = ExtensionCallback(
297 hook_name='CLOSE_PULL_REQUEST',
309 hook_name='CLOSE_PULL_REQUEST',
298 kwargs_keys=(
310 kwargs_keys=(
299 'server_url', 'config', 'scm', 'username', 'ip', 'action',
311 'server_url', 'config', 'scm', 'username', 'ip', 'action',
300 'repository', 'pull_request_id', 'url', 'title', 'description',
312 'repository', 'pull_request_id', 'url', 'title', 'description',
301 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
313 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
302 'mergeable', 'source', 'target', 'author', 'reviewers'))
314 'mergeable', 'source', 'target', 'author', 'reviewers'))
303
315
304
316
305 log_review_pull_request = ExtensionCallback(
317 log_review_pull_request = ExtensionCallback(
306 hook_name='REVIEW_PULL_REQUEST',
318 hook_name='REVIEW_PULL_REQUEST',
307 kwargs_keys=(
319 kwargs_keys=(
308 'server_url', 'config', 'scm', 'username', 'ip', 'action',
320 'server_url', 'config', 'scm', 'username', 'ip', 'action',
309 'repository', 'pull_request_id', 'url', 'title', 'description',
321 'repository', 'pull_request_id', 'url', 'title', 'description',
310 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
322 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
311 'mergeable', 'source', 'target', 'author', 'reviewers'))
323 'mergeable', 'source', 'target', 'author', 'reviewers'))
312
324
313
325
314 log_update_pull_request = ExtensionCallback(
326 log_update_pull_request = ExtensionCallback(
315 hook_name='UPDATE_PULL_REQUEST',
327 hook_name='UPDATE_PULL_REQUEST',
316 kwargs_keys=(
328 kwargs_keys=(
317 'server_url', 'config', 'scm', 'username', 'ip', 'action',
329 'server_url', 'config', 'scm', 'username', 'ip', 'action',
318 'repository', 'pull_request_id', 'url', 'title', 'description',
330 'repository', 'pull_request_id', 'url', 'title', 'description',
319 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
331 'status', 'created_on', 'updated_on', 'commit_ids', 'review_status',
320 'mergeable', 'source', 'target', 'author', 'reviewers'))
332 'mergeable', 'source', 'target', 'author', 'reviewers'))
321
333
322
334
323 log_create_user = ExtensionCallback(
335 log_create_user = ExtensionCallback(
324 hook_name='CREATE_USER_HOOK',
336 hook_name='CREATE_USER_HOOK',
325 kwargs_keys=(
337 kwargs_keys=(
326 'username', 'full_name_or_username', 'full_contact', 'user_id',
338 'username', 'full_name_or_username', 'full_contact', 'user_id',
327 'name', 'firstname', 'short_contact', 'admin', 'lastname',
339 'name', 'firstname', 'short_contact', 'admin', 'lastname',
328 'ip_addresses', 'extern_type', 'extern_name',
340 'ip_addresses', 'extern_type', 'extern_name',
329 'email', 'api_key', 'api_keys', 'last_login',
341 'email', 'api_key', 'api_keys', 'last_login',
330 'full_name', 'active', 'password', 'emails',
342 'full_name', 'active', 'password', 'emails',
331 'inherit_default_permissions', 'created_by', 'created_on'))
343 'inherit_default_permissions', 'created_by', 'created_on'))
332
344
333
345
334 log_delete_user = ExtensionCallback(
346 log_delete_user = ExtensionCallback(
335 hook_name='DELETE_USER_HOOK',
347 hook_name='DELETE_USER_HOOK',
336 kwargs_keys=(
348 kwargs_keys=(
337 'username', 'full_name_or_username', 'full_contact', 'user_id',
349 'username', 'full_name_or_username', 'full_contact', 'user_id',
338 'name', 'firstname', 'short_contact', 'admin', 'lastname',
350 'name', 'firstname', 'short_contact', 'admin', 'lastname',
339 'ip_addresses',
351 'ip_addresses',
340 'email', 'api_key', 'last_login',
352 'email', 'api_key', 'last_login',
341 'full_name', 'active', 'password', 'emails',
353 'full_name', 'active', 'password', 'emails',
342 'inherit_default_permissions', 'deleted_by'))
354 'inherit_default_permissions', 'deleted_by'))
343
355
344
356
345 log_create_repository = ExtensionCallback(
357 log_create_repository = ExtensionCallback(
346 hook_name='CREATE_REPO_HOOK',
358 hook_name='CREATE_REPO_HOOK',
347 kwargs_keys=(
359 kwargs_keys=(
348 'repo_name', 'repo_type', 'description', 'private', 'created_on',
360 'repo_name', 'repo_type', 'description', 'private', 'created_on',
349 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
361 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
350 'clone_uri', 'fork_id', 'group_id', 'created_by'))
362 'clone_uri', 'fork_id', 'group_id', 'created_by'))
351
363
352
364
353 log_delete_repository = ExtensionCallback(
365 log_delete_repository = ExtensionCallback(
354 hook_name='DELETE_REPO_HOOK',
366 hook_name='DELETE_REPO_HOOK',
355 kwargs_keys=(
367 kwargs_keys=(
356 'repo_name', 'repo_type', 'description', 'private', 'created_on',
368 'repo_name', 'repo_type', 'description', 'private', 'created_on',
357 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
369 'enable_downloads', 'repo_id', 'user_id', 'enable_statistics',
358 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
370 'clone_uri', 'fork_id', 'group_id', 'deleted_by', 'deleted_on'))
359
371
360
372
361 log_create_repository_group = ExtensionCallback(
373 log_create_repository_group = ExtensionCallback(
362 hook_name='CREATE_REPO_GROUP_HOOK',
374 hook_name='CREATE_REPO_GROUP_HOOK',
363 kwargs_keys=(
375 kwargs_keys=(
364 'group_name', 'group_parent_id', 'group_description',
376 'group_name', 'group_parent_id', 'group_description',
365 'group_id', 'user_id', 'created_by', 'created_on',
377 'group_id', 'user_id', 'created_by', 'created_on',
366 'enable_locking'))
378 'enable_locking'))
@@ -1,164 +1,153 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 The application's model objects
22 The application's model objects
23
23
24 :example:
24 :example:
25
25
26 .. code-block:: python
26 .. code-block:: python
27
27
28 from paste.deploy import appconfig
28 from paste.deploy import appconfig
29 from pylons import config
29 from pylons import config
30 from sqlalchemy import engine_from_config
30 from sqlalchemy import engine_from_config
31 from rhodecode.config.environment import load_environment
31 from rhodecode.config.environment import load_environment
32
32
33 conf = appconfig('config:development.ini', relative_to = './../../')
33 conf = appconfig('config:development.ini', relative_to = './../../')
34 load_environment(conf.global_conf, conf.local_conf)
34 load_environment(conf.global_conf, conf.local_conf)
35
35
36 engine = engine_from_config(config, 'sqlalchemy.')
36 engine = engine_from_config(config, 'sqlalchemy.')
37 init_model(engine)
37 init_model(engine)
38 # RUN YOUR CODE HERE
38 # RUN YOUR CODE HERE
39
39
40 """
40 """
41
41
42
42
43 import logging
43 import logging
44
44
45 from pylons import config
45 from pylons import config
46 from pyramid.threadlocal import get_current_registry
46 from pyramid.threadlocal import get_current_registry
47
47
48 from rhodecode.model import meta, db
48 from rhodecode.model import meta, db
49 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
49 from rhodecode.lib.utils2 import obfuscate_url_pw, get_encryption_key
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 def init_model(engine, encryption_key=None):
54 def init_model(engine, encryption_key=None):
55 """
55 """
56 Initializes db session, bind the engine with the metadata,
56 Initializes db session, bind the engine with the metadata,
57 Call this before using any of the tables or classes in the model,
57 Call this before using any of the tables or classes in the model,
58 preferably once in application start
58 preferably once in application start
59
59
60 :param engine: engine to bind to
60 :param engine: engine to bind to
61 """
61 """
62 engine_str = obfuscate_url_pw(str(engine.url))
62 engine_str = obfuscate_url_pw(str(engine.url))
63 log.info("initializing db for %s", engine_str)
63 log.info("initializing db for %s", engine_str)
64 meta.Base.metadata.bind = engine
64 meta.Base.metadata.bind = engine
65 db.ENCRYPTION_KEY = encryption_key
65 db.ENCRYPTION_KEY = encryption_key
66
66
67
67
68 def init_model_encryption(migration_models):
68 def init_model_encryption(migration_models):
69 migration_models.ENCRYPTION_KEY = get_encryption_key(config)
69 migration_models.ENCRYPTION_KEY = get_encryption_key(config)
70 db.ENCRYPTION_KEY = get_encryption_key(config)
70 db.ENCRYPTION_KEY = get_encryption_key(config)
71
71
72
72
73 class BaseModel(object):
73 class BaseModel(object):
74 """
74 """
75 Base Model for all RhodeCode models, it adds sql alchemy session
75 Base Model for all RhodeCode models, it adds sql alchemy session
76 into instance of model
76 into instance of model
77
77
78 :param sa: If passed it reuses this session instead of creating a new one
78 :param sa: If passed it reuses this session instead of creating a new one
79 """
79 """
80
80
81 cls = None # override in child class
81 cls = None # override in child class
82
82
83 def __init__(self, sa=None):
83 def __init__(self, sa=None):
84 if sa is not None:
84 if sa is not None:
85 self.sa = sa
85 self.sa = sa
86 else:
86 else:
87 self.sa = meta.Session()
87 self.sa = meta.Session()
88
88
89 def _get_instance(self, cls, instance, callback=None):
89 def _get_instance(self, cls, instance, callback=None):
90 """
90 """
91 Gets instance of given cls using some simple lookup mechanism.
91 Gets instance of given cls using some simple lookup mechanism.
92
92
93 :param cls: class to fetch
93 :param cls: class to fetch
94 :param instance: int or Instance
94 :param instance: int or Instance
95 :param callback: callback to call if all lookups failed
95 :param callback: callback to call if all lookups failed
96 """
96 """
97
97
98 if isinstance(instance, cls):
98 if isinstance(instance, cls):
99 return instance
99 return instance
100 elif isinstance(instance, (int, long)):
100 elif isinstance(instance, (int, long)):
101 return cls.get(instance)
101 return cls.get(instance)
102 else:
102 else:
103 if instance:
103 if instance:
104 if callback is None:
104 if callback is None:
105 raise Exception(
105 raise Exception(
106 'given object must be int, long or Instance of %s '
106 'given object must be int, long or Instance of %s '
107 'got %s, no callback provided' % (cls, type(instance))
107 'got %s, no callback provided' % (cls, type(instance))
108 )
108 )
109 else:
109 else:
110 return callback(instance)
110 return callback(instance)
111
111
112 def _get_user(self, user):
112 def _get_user(self, user):
113 """
113 """
114 Helper method to get user by ID, or username fallback
114 Helper method to get user by ID, or username fallback
115
115
116 :param user: UserID, username, or User instance
116 :param user: UserID, username, or User instance
117 """
117 """
118 return self._get_instance(
118 return self._get_instance(
119 db.User, user, callback=db.User.get_by_username)
119 db.User, user, callback=db.User.get_by_username)
120
120
121 def _get_user_group(self, user_group):
121 def _get_user_group(self, user_group):
122 """
122 """
123 Helper method to get user by ID, or username fallback
123 Helper method to get user by ID, or username fallback
124
124
125 :param user_group: UserGroupID, user_group_name, or UserGroup instance
125 :param user_group: UserGroupID, user_group_name, or UserGroup instance
126 """
126 """
127 return self._get_instance(
127 return self._get_instance(
128 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
128 db.UserGroup, user_group, callback=db.UserGroup.get_by_group_name)
129
129
130 def _get_repo(self, repository):
130 def _get_repo(self, repository):
131 """
131 """
132 Helper method to get repository by ID, or repository name
132 Helper method to get repository by ID, or repository name
133
133
134 :param repository: RepoID, repository name or Repository Instance
134 :param repository: RepoID, repository name or Repository Instance
135 """
135 """
136 return self._get_instance(
136 return self._get_instance(
137 db.Repository, repository, callback=db.Repository.get_by_repo_name)
137 db.Repository, repository, callback=db.Repository.get_by_repo_name)
138
138
139 def _get_perm(self, permission):
139 def _get_perm(self, permission):
140 """
140 """
141 Helper method to get permission by ID, or permission name
141 Helper method to get permission by ID, or permission name
142
142
143 :param permission: PermissionID, permission_name or Permission instance
143 :param permission: PermissionID, permission_name or Permission instance
144 """
144 """
145 return self._get_instance(
145 return self._get_instance(
146 db.Permission, permission, callback=db.Permission.get_by_key)
146 db.Permission, permission, callback=db.Permission.get_by_key)
147
147
148 def send_event(self, event):
149 """
150 Helper method to send an event. This wraps the pyramid logic to send an
151 event.
152 """
153 # For the first step we are using pyramids thread locals here. If the
154 # event mechanism works out as a good solution we should think about
155 # passing the registry into the constructor to get rid of it.
156 registry = get_current_registry()
157 registry.notify(event)
158
159 @classmethod
148 @classmethod
160 def get_all(cls):
149 def get_all(cls):
161 """
150 """
162 Returns all instances of what is defined in `cls` class variable
151 Returns all instances of what is defined in `cls` class variable
163 """
152 """
164 return cls.cls.getAll()
153 return cls.cls.getAll()
@@ -1,924 +1,931 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Repository model for rhodecode
22 Repository model for rhodecode
23 """
23 """
24
24
25 import logging
25 import logging
26 import os
26 import os
27 import re
27 import re
28 import shutil
28 import shutil
29 import time
29 import time
30 import traceback
30 import traceback
31 from datetime import datetime
31 from datetime import datetime
32
32
33 from sqlalchemy.sql import func
33 from sqlalchemy.sql import func
34 from sqlalchemy.sql.expression import true, or_
34 from sqlalchemy.sql.expression import true, or_
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from rhodecode import events
37 from rhodecode.lib import helpers as h
38 from rhodecode.lib import helpers as h
38 from rhodecode.lib.auth import HasUserGroupPermissionAny
39 from rhodecode.lib.auth import HasUserGroupPermissionAny
39 from rhodecode.lib.caching_query import FromCache
40 from rhodecode.lib.caching_query import FromCache
40 from rhodecode.lib.exceptions import AttachedForksError
41 from rhodecode.lib.exceptions import AttachedForksError
41 from rhodecode.lib.hooks_base import log_delete_repository
42 from rhodecode.lib.hooks_base import log_delete_repository
42 from rhodecode.lib.utils import make_db_config
43 from rhodecode.lib.utils import make_db_config
43 from rhodecode.lib.utils2 import (
44 from rhodecode.lib.utils2 import (
44 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
45 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
45 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
46 get_current_rhodecode_user, safe_int, datetime_to_time, action_logger_generic)
46 from rhodecode.lib.vcs.backends import get_backend
47 from rhodecode.lib.vcs.backends import get_backend
47 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
48 from rhodecode.model.db import (
49 from rhodecode.model.db import (
49 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
50 Repository, UserRepoToPerm, UserGroupRepoToPerm, UserRepoGroupToPerm,
50 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
51 UserGroupRepoGroupToPerm, User, Permission, Statistics, UserGroup,
51 RepoGroup, RepositoryField)
52 RepoGroup, RepositoryField)
52 from rhodecode.model.scm import UserGroupList
53 from rhodecode.model.scm import UserGroupList
53 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
54
55
55
56
56 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
57
58
58
59
59 class RepoModel(BaseModel):
60 class RepoModel(BaseModel):
60
61
61 cls = Repository
62 cls = Repository
62
63
63 def _get_user_group(self, users_group):
64 def _get_user_group(self, users_group):
64 return self._get_instance(UserGroup, users_group,
65 return self._get_instance(UserGroup, users_group,
65 callback=UserGroup.get_by_group_name)
66 callback=UserGroup.get_by_group_name)
66
67
67 def _get_repo_group(self, repo_group):
68 def _get_repo_group(self, repo_group):
68 return self._get_instance(RepoGroup, repo_group,
69 return self._get_instance(RepoGroup, repo_group,
69 callback=RepoGroup.get_by_group_name)
70 callback=RepoGroup.get_by_group_name)
70
71
71 def _create_default_perms(self, repository, private):
72 def _create_default_perms(self, repository, private):
72 # create default permission
73 # create default permission
73 default = 'repository.read'
74 default = 'repository.read'
74 def_user = User.get_default_user()
75 def_user = User.get_default_user()
75 for p in def_user.user_perms:
76 for p in def_user.user_perms:
76 if p.permission.permission_name.startswith('repository.'):
77 if p.permission.permission_name.startswith('repository.'):
77 default = p.permission.permission_name
78 default = p.permission.permission_name
78 break
79 break
79
80
80 default_perm = 'repository.none' if private else default
81 default_perm = 'repository.none' if private else default
81
82
82 repo_to_perm = UserRepoToPerm()
83 repo_to_perm = UserRepoToPerm()
83 repo_to_perm.permission = Permission.get_by_key(default_perm)
84 repo_to_perm.permission = Permission.get_by_key(default_perm)
84
85
85 repo_to_perm.repository = repository
86 repo_to_perm.repository = repository
86 repo_to_perm.user_id = def_user.user_id
87 repo_to_perm.user_id = def_user.user_id
87
88
88 return repo_to_perm
89 return repo_to_perm
89
90
90 @LazyProperty
91 @LazyProperty
91 def repos_path(self):
92 def repos_path(self):
92 """
93 """
93 Gets the repositories root path from database
94 Gets the repositories root path from database
94 """
95 """
95 settings_model = VcsSettingsModel(sa=self.sa)
96 settings_model = VcsSettingsModel(sa=self.sa)
96 return settings_model.get_repos_location()
97 return settings_model.get_repos_location()
97
98
98 def get(self, repo_id, cache=False):
99 def get(self, repo_id, cache=False):
99 repo = self.sa.query(Repository) \
100 repo = self.sa.query(Repository) \
100 .filter(Repository.repo_id == repo_id)
101 .filter(Repository.repo_id == repo_id)
101
102
102 if cache:
103 if cache:
103 repo = repo.options(FromCache("sql_cache_short",
104 repo = repo.options(FromCache("sql_cache_short",
104 "get_repo_%s" % repo_id))
105 "get_repo_%s" % repo_id))
105 return repo.scalar()
106 return repo.scalar()
106
107
107 def get_repo(self, repository):
108 def get_repo(self, repository):
108 return self._get_repo(repository)
109 return self._get_repo(repository)
109
110
110 def get_by_repo_name(self, repo_name, cache=False):
111 def get_by_repo_name(self, repo_name, cache=False):
111 repo = self.sa.query(Repository) \
112 repo = self.sa.query(Repository) \
112 .filter(Repository.repo_name == repo_name)
113 .filter(Repository.repo_name == repo_name)
113
114
114 if cache:
115 if cache:
115 repo = repo.options(FromCache("sql_cache_short",
116 repo = repo.options(FromCache("sql_cache_short",
116 "get_repo_%s" % repo_name))
117 "get_repo_%s" % repo_name))
117 return repo.scalar()
118 return repo.scalar()
118
119
119 def _extract_id_from_repo_name(self, repo_name):
120 def _extract_id_from_repo_name(self, repo_name):
120 if repo_name.startswith('/'):
121 if repo_name.startswith('/'):
121 repo_name = repo_name.lstrip('/')
122 repo_name = repo_name.lstrip('/')
122 by_id_match = re.match(r'^_(\d{1,})', repo_name)
123 by_id_match = re.match(r'^_(\d{1,})', repo_name)
123 if by_id_match:
124 if by_id_match:
124 return by_id_match.groups()[0]
125 return by_id_match.groups()[0]
125
126
126 def get_repo_by_id(self, repo_name):
127 def get_repo_by_id(self, repo_name):
127 """
128 """
128 Extracts repo_name by id from special urls.
129 Extracts repo_name by id from special urls.
129 Example url is _11/repo_name
130 Example url is _11/repo_name
130
131
131 :param repo_name:
132 :param repo_name:
132 :return: repo object if matched else None
133 :return: repo object if matched else None
133 """
134 """
134 try:
135 try:
135 _repo_id = self._extract_id_from_repo_name(repo_name)
136 _repo_id = self._extract_id_from_repo_name(repo_name)
136 if _repo_id:
137 if _repo_id:
137 return self.get(_repo_id)
138 return self.get(_repo_id)
138 except Exception:
139 except Exception:
139 log.exception('Failed to extract repo_name from URL')
140 log.exception('Failed to extract repo_name from URL')
140
141
141 return None
142 return None
142
143
143 def get_users(self, name_contains=None, limit=20, only_active=True):
144 def get_users(self, name_contains=None, limit=20, only_active=True):
144 # TODO: mikhail: move this method to the UserModel.
145 # TODO: mikhail: move this method to the UserModel.
145 query = self.sa.query(User)
146 query = self.sa.query(User)
146 if only_active:
147 if only_active:
147 query = query.filter(User.active == true())
148 query = query.filter(User.active == true())
148
149
149 if name_contains:
150 if name_contains:
150 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
151 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
151 query = query.filter(
152 query = query.filter(
152 or_(
153 or_(
153 User.name.ilike(ilike_expression),
154 User.name.ilike(ilike_expression),
154 User.lastname.ilike(ilike_expression),
155 User.lastname.ilike(ilike_expression),
155 User.username.ilike(ilike_expression)
156 User.username.ilike(ilike_expression)
156 )
157 )
157 )
158 )
158 query = query.limit(limit)
159 query = query.limit(limit)
159 users = query.all()
160 users = query.all()
160
161
161 _users = [
162 _users = [
162 {
163 {
163 'id': user.user_id,
164 'id': user.user_id,
164 'first_name': user.name,
165 'first_name': user.name,
165 'last_name': user.lastname,
166 'last_name': user.lastname,
166 'username': user.username,
167 'username': user.username,
167 'icon_link': h.gravatar_url(user.email, 14),
168 'icon_link': h.gravatar_url(user.email, 14),
168 'value_display': h.person(user.email),
169 'value_display': h.person(user.email),
169 'value': user.username,
170 'value': user.username,
170 'value_type': 'user',
171 'value_type': 'user',
171 'active': user.active,
172 'active': user.active,
172 }
173 }
173 for user in users
174 for user in users
174 ]
175 ]
175 return _users
176 return _users
176
177
177 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
178 def get_user_groups(self, name_contains=None, limit=20, only_active=True):
178 # TODO: mikhail: move this method to the UserGroupModel.
179 # TODO: mikhail: move this method to the UserGroupModel.
179 query = self.sa.query(UserGroup)
180 query = self.sa.query(UserGroup)
180 if only_active:
181 if only_active:
181 query = query.filter(UserGroup.users_group_active == true())
182 query = query.filter(UserGroup.users_group_active == true())
182
183
183 if name_contains:
184 if name_contains:
184 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
185 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
185 query = query.filter(
186 query = query.filter(
186 UserGroup.users_group_name.ilike(ilike_expression))\
187 UserGroup.users_group_name.ilike(ilike_expression))\
187 .order_by(func.length(UserGroup.users_group_name))\
188 .order_by(func.length(UserGroup.users_group_name))\
188 .order_by(UserGroup.users_group_name)
189 .order_by(UserGroup.users_group_name)
189
190
190 query = query.limit(limit)
191 query = query.limit(limit)
191 user_groups = query.all()
192 user_groups = query.all()
192 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
193 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
193 user_groups = UserGroupList(user_groups, perm_set=perm_set)
194 user_groups = UserGroupList(user_groups, perm_set=perm_set)
194
195
195 _groups = [
196 _groups = [
196 {
197 {
197 'id': group.users_group_id,
198 'id': group.users_group_id,
198 # TODO: marcink figure out a way to generate the url for the
199 # TODO: marcink figure out a way to generate the url for the
199 # icon
200 # icon
200 'icon_link': '',
201 'icon_link': '',
201 'value_display': 'Group: %s (%d members)' % (
202 'value_display': 'Group: %s (%d members)' % (
202 group.users_group_name, len(group.members),),
203 group.users_group_name, len(group.members),),
203 'value': group.users_group_name,
204 'value': group.users_group_name,
204 'value_type': 'user_group',
205 'value_type': 'user_group',
205 'active': group.users_group_active,
206 'active': group.users_group_active,
206 }
207 }
207 for group in user_groups
208 for group in user_groups
208 ]
209 ]
209 return _groups
210 return _groups
210
211
211 @classmethod
212 @classmethod
212 def update_repoinfo(cls, repositories=None):
213 def update_repoinfo(cls, repositories=None):
213 if not repositories:
214 if not repositories:
214 repositories = Repository.getAll()
215 repositories = Repository.getAll()
215 for repo in repositories:
216 for repo in repositories:
216 repo.update_commit_cache()
217 repo.update_commit_cache()
217
218
218 def get_repos_as_dict(self, repo_list=None, admin=False,
219 def get_repos_as_dict(self, repo_list=None, admin=False,
219 super_user_actions=False):
220 super_user_actions=False):
220
221
221 from rhodecode.lib.utils import PartialRenderer
222 from rhodecode.lib.utils import PartialRenderer
222 _render = PartialRenderer('data_table/_dt_elements.html')
223 _render = PartialRenderer('data_table/_dt_elements.html')
223 c = _render.c
224 c = _render.c
224
225
225 def quick_menu(repo_name):
226 def quick_menu(repo_name):
226 return _render('quick_menu', repo_name)
227 return _render('quick_menu', repo_name)
227
228
228 def repo_lnk(name, rtype, rstate, private, fork_of):
229 def repo_lnk(name, rtype, rstate, private, fork_of):
229 return _render('repo_name', name, rtype, rstate, private, fork_of,
230 return _render('repo_name', name, rtype, rstate, private, fork_of,
230 short_name=not admin, admin=False)
231 short_name=not admin, admin=False)
231
232
232 def last_change(last_change):
233 def last_change(last_change):
233 return _render("last_change", last_change)
234 return _render("last_change", last_change)
234
235
235 def rss_lnk(repo_name):
236 def rss_lnk(repo_name):
236 return _render("rss", repo_name)
237 return _render("rss", repo_name)
237
238
238 def atom_lnk(repo_name):
239 def atom_lnk(repo_name):
239 return _render("atom", repo_name)
240 return _render("atom", repo_name)
240
241
241 def last_rev(repo_name, cs_cache):
242 def last_rev(repo_name, cs_cache):
242 return _render('revision', repo_name, cs_cache.get('revision'),
243 return _render('revision', repo_name, cs_cache.get('revision'),
243 cs_cache.get('raw_id'), cs_cache.get('author'),
244 cs_cache.get('raw_id'), cs_cache.get('author'),
244 cs_cache.get('message'))
245 cs_cache.get('message'))
245
246
246 def desc(desc):
247 def desc(desc):
247 if c.visual.stylify_metatags:
248 if c.visual.stylify_metatags:
248 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
249 return h.urlify_text(h.escaped_stylize(h.truncate(desc, 60)))
249 else:
250 else:
250 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
251 return h.urlify_text(h.html_escape(h.truncate(desc, 60)))
251
252
252 def state(repo_state):
253 def state(repo_state):
253 return _render("repo_state", repo_state)
254 return _render("repo_state", repo_state)
254
255
255 def repo_actions(repo_name):
256 def repo_actions(repo_name):
256 return _render('repo_actions', repo_name, super_user_actions)
257 return _render('repo_actions', repo_name, super_user_actions)
257
258
258 def user_profile(username):
259 def user_profile(username):
259 return _render('user_profile', username)
260 return _render('user_profile', username)
260
261
261 repos_data = []
262 repos_data = []
262 for repo in repo_list:
263 for repo in repo_list:
263 cs_cache = repo.changeset_cache
264 cs_cache = repo.changeset_cache
264 row = {
265 row = {
265 "menu": quick_menu(repo.repo_name),
266 "menu": quick_menu(repo.repo_name),
266
267
267 "name": repo_lnk(repo.repo_name, repo.repo_type,
268 "name": repo_lnk(repo.repo_name, repo.repo_type,
268 repo.repo_state, repo.private, repo.fork),
269 repo.repo_state, repo.private, repo.fork),
269 "name_raw": repo.repo_name.lower(),
270 "name_raw": repo.repo_name.lower(),
270
271
271 "last_change": last_change(repo.last_db_change),
272 "last_change": last_change(repo.last_db_change),
272 "last_change_raw": datetime_to_time(repo.last_db_change),
273 "last_change_raw": datetime_to_time(repo.last_db_change),
273
274
274 "last_changeset": last_rev(repo.repo_name, cs_cache),
275 "last_changeset": last_rev(repo.repo_name, cs_cache),
275 "last_changeset_raw": cs_cache.get('revision'),
276 "last_changeset_raw": cs_cache.get('revision'),
276
277
277 "desc": desc(repo.description),
278 "desc": desc(repo.description),
278 "owner": user_profile(repo.user.username),
279 "owner": user_profile(repo.user.username),
279
280
280 "state": state(repo.repo_state),
281 "state": state(repo.repo_state),
281 "rss": rss_lnk(repo.repo_name),
282 "rss": rss_lnk(repo.repo_name),
282
283
283 "atom": atom_lnk(repo.repo_name),
284 "atom": atom_lnk(repo.repo_name),
284 }
285 }
285 if admin:
286 if admin:
286 row.update({
287 row.update({
287 "action": repo_actions(repo.repo_name),
288 "action": repo_actions(repo.repo_name),
288 })
289 })
289 repos_data.append(row)
290 repos_data.append(row)
290
291
291 return repos_data
292 return repos_data
292
293
293 def _get_defaults(self, repo_name):
294 def _get_defaults(self, repo_name):
294 """
295 """
295 Gets information about repository, and returns a dict for
296 Gets information about repository, and returns a dict for
296 usage in forms
297 usage in forms
297
298
298 :param repo_name:
299 :param repo_name:
299 """
300 """
300
301
301 repo_info = Repository.get_by_repo_name(repo_name)
302 repo_info = Repository.get_by_repo_name(repo_name)
302
303
303 if repo_info is None:
304 if repo_info is None:
304 return None
305 return None
305
306
306 defaults = repo_info.get_dict()
307 defaults = repo_info.get_dict()
307 defaults['repo_name'] = repo_info.just_name
308 defaults['repo_name'] = repo_info.just_name
308
309
309 groups = repo_info.groups_with_parents
310 groups = repo_info.groups_with_parents
310 parent_group = groups[-1] if groups else None
311 parent_group = groups[-1] if groups else None
311
312
312 # we use -1 as this is how in HTML, we mark an empty group
313 # we use -1 as this is how in HTML, we mark an empty group
313 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
314 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
314
315
315 keys_to_process = (
316 keys_to_process = (
316 {'k': 'repo_type', 'strip': False},
317 {'k': 'repo_type', 'strip': False},
317 {'k': 'repo_enable_downloads', 'strip': True},
318 {'k': 'repo_enable_downloads', 'strip': True},
318 {'k': 'repo_description', 'strip': True},
319 {'k': 'repo_description', 'strip': True},
319 {'k': 'repo_enable_locking', 'strip': True},
320 {'k': 'repo_enable_locking', 'strip': True},
320 {'k': 'repo_landing_rev', 'strip': True},
321 {'k': 'repo_landing_rev', 'strip': True},
321 {'k': 'clone_uri', 'strip': False},
322 {'k': 'clone_uri', 'strip': False},
322 {'k': 'repo_private', 'strip': True},
323 {'k': 'repo_private', 'strip': True},
323 {'k': 'repo_enable_statistics', 'strip': True}
324 {'k': 'repo_enable_statistics', 'strip': True}
324 )
325 )
325
326
326 for item in keys_to_process:
327 for item in keys_to_process:
327 attr = item['k']
328 attr = item['k']
328 if item['strip']:
329 if item['strip']:
329 attr = remove_prefix(item['k'], 'repo_')
330 attr = remove_prefix(item['k'], 'repo_')
330
331
331 val = defaults[attr]
332 val = defaults[attr]
332 if item['k'] == 'repo_landing_rev':
333 if item['k'] == 'repo_landing_rev':
333 val = ':'.join(defaults[attr])
334 val = ':'.join(defaults[attr])
334 defaults[item['k']] = val
335 defaults[item['k']] = val
335 if item['k'] == 'clone_uri':
336 if item['k'] == 'clone_uri':
336 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
337 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
337
338
338 # fill owner
339 # fill owner
339 if repo_info.user:
340 if repo_info.user:
340 defaults.update({'user': repo_info.user.username})
341 defaults.update({'user': repo_info.user.username})
341 else:
342 else:
342 replacement_user = User.get_first_super_admin().username
343 replacement_user = User.get_first_super_admin().username
343 defaults.update({'user': replacement_user})
344 defaults.update({'user': replacement_user})
344
345
345 # fill repository users
346 # fill repository users
346 for p in repo_info.repo_to_perm:
347 for p in repo_info.repo_to_perm:
347 defaults.update({'u_perm_%s' % p.user.user_id:
348 defaults.update({'u_perm_%s' % p.user.user_id:
348 p.permission.permission_name})
349 p.permission.permission_name})
349
350
350 # fill repository groups
351 # fill repository groups
351 for p in repo_info.users_group_to_perm:
352 for p in repo_info.users_group_to_perm:
352 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
353 defaults.update({'g_perm_%s' % p.users_group.users_group_id:
353 p.permission.permission_name})
354 p.permission.permission_name})
354
355
355 return defaults
356 return defaults
356
357
357 def update(self, repo, **kwargs):
358 def update(self, repo, **kwargs):
358 try:
359 try:
359 cur_repo = self._get_repo(repo)
360 cur_repo = self._get_repo(repo)
360 source_repo_name = cur_repo.repo_name
361 source_repo_name = cur_repo.repo_name
361 if 'user' in kwargs:
362 if 'user' in kwargs:
362 cur_repo.user = User.get_by_username(kwargs['user'])
363 cur_repo.user = User.get_by_username(kwargs['user'])
363
364
364 if 'repo_group' in kwargs:
365 if 'repo_group' in kwargs:
365 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
366 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
366 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
367 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
367
368
368 update_keys = [
369 update_keys = [
369 (1, 'repo_enable_downloads'),
370 (1, 'repo_enable_downloads'),
370 (1, 'repo_description'),
371 (1, 'repo_description'),
371 (1, 'repo_enable_locking'),
372 (1, 'repo_enable_locking'),
372 (1, 'repo_landing_rev'),
373 (1, 'repo_landing_rev'),
373 (1, 'repo_private'),
374 (1, 'repo_private'),
374 (1, 'repo_enable_statistics'),
375 (1, 'repo_enable_statistics'),
375 (0, 'clone_uri'),
376 (0, 'clone_uri'),
376 (0, 'fork_id')
377 (0, 'fork_id')
377 ]
378 ]
378 for strip, k in update_keys:
379 for strip, k in update_keys:
379 if k in kwargs:
380 if k in kwargs:
380 val = kwargs[k]
381 val = kwargs[k]
381 if strip:
382 if strip:
382 k = remove_prefix(k, 'repo_')
383 k = remove_prefix(k, 'repo_')
383 if k == 'clone_uri':
384 if k == 'clone_uri':
384 from rhodecode.model.validators import Missing
385 from rhodecode.model.validators import Missing
385 _change = kwargs.get('clone_uri_change')
386 _change = kwargs.get('clone_uri_change')
386 if _change in [Missing, 'OLD']:
387 if _change in [Missing, 'OLD']:
387 # we don't change the value, so use original one
388 # we don't change the value, so use original one
388 val = cur_repo.clone_uri
389 val = cur_repo.clone_uri
389
390
390 setattr(cur_repo, k, val)
391 setattr(cur_repo, k, val)
391
392
392 new_name = cur_repo.get_new_name(kwargs['repo_name'])
393 new_name = cur_repo.get_new_name(kwargs['repo_name'])
393 cur_repo.repo_name = new_name
394 cur_repo.repo_name = new_name
394
395
395 # if private flag is set, reset default permission to NONE
396 # if private flag is set, reset default permission to NONE
396 if kwargs.get('repo_private'):
397 if kwargs.get('repo_private'):
397 EMPTY_PERM = 'repository.none'
398 EMPTY_PERM = 'repository.none'
398 RepoModel().grant_user_permission(
399 RepoModel().grant_user_permission(
399 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
400 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
400 )
401 )
401
402
402 # handle extra fields
403 # handle extra fields
403 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
404 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX),
404 kwargs):
405 kwargs):
405 k = RepositoryField.un_prefix_key(field)
406 k = RepositoryField.un_prefix_key(field)
406 ex_field = RepositoryField.get_by_key_name(
407 ex_field = RepositoryField.get_by_key_name(
407 key=k, repo=cur_repo)
408 key=k, repo=cur_repo)
408 if ex_field:
409 if ex_field:
409 ex_field.field_value = kwargs[field]
410 ex_field.field_value = kwargs[field]
410 self.sa.add(ex_field)
411 self.sa.add(ex_field)
411 self.sa.add(cur_repo)
412 self.sa.add(cur_repo)
412
413
413 if source_repo_name != new_name:
414 if source_repo_name != new_name:
414 # rename repository
415 # rename repository
415 self._rename_filesystem_repo(
416 self._rename_filesystem_repo(
416 old=source_repo_name, new=new_name)
417 old=source_repo_name, new=new_name)
417
418
418 return cur_repo
419 return cur_repo
419 except Exception:
420 except Exception:
420 log.error(traceback.format_exc())
421 log.error(traceback.format_exc())
421 raise
422 raise
422
423
423 def _create_repo(self, repo_name, repo_type, description, owner,
424 def _create_repo(self, repo_name, repo_type, description, owner,
424 private=False, clone_uri=None, repo_group=None,
425 private=False, clone_uri=None, repo_group=None,
425 landing_rev='rev:tip', fork_of=None,
426 landing_rev='rev:tip', fork_of=None,
426 copy_fork_permissions=False, enable_statistics=False,
427 copy_fork_permissions=False, enable_statistics=False,
427 enable_locking=False, enable_downloads=False,
428 enable_locking=False, enable_downloads=False,
428 copy_group_permissions=False,
429 copy_group_permissions=False,
429 state=Repository.STATE_PENDING):
430 state=Repository.STATE_PENDING):
430 """
431 """
431 Create repository inside database with PENDING state, this should be
432 Create repository inside database with PENDING state, this should be
432 only executed by create() repo. With exception of importing existing
433 only executed by create() repo. With exception of importing existing
433 repos
434 repos
434 """
435 """
435 from rhodecode.model.scm import ScmModel
436 from rhodecode.model.scm import ScmModel
436
437
437 owner = self._get_user(owner)
438 owner = self._get_user(owner)
438 fork_of = self._get_repo(fork_of)
439 fork_of = self._get_repo(fork_of)
439 repo_group = self._get_repo_group(safe_int(repo_group))
440 repo_group = self._get_repo_group(safe_int(repo_group))
440
441
441 try:
442 try:
442 repo_name = safe_unicode(repo_name)
443 repo_name = safe_unicode(repo_name)
443 description = safe_unicode(description)
444 description = safe_unicode(description)
444 # repo name is just a name of repository
445 # repo name is just a name of repository
445 # while repo_name_full is a full qualified name that is combined
446 # while repo_name_full is a full qualified name that is combined
446 # with name and path of group
447 # with name and path of group
447 repo_name_full = repo_name
448 repo_name_full = repo_name
448 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
449 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
449
450
450 new_repo = Repository()
451 new_repo = Repository()
451 new_repo.repo_state = state
452 new_repo.repo_state = state
452 new_repo.enable_statistics = False
453 new_repo.enable_statistics = False
453 new_repo.repo_name = repo_name_full
454 new_repo.repo_name = repo_name_full
454 new_repo.repo_type = repo_type
455 new_repo.repo_type = repo_type
455 new_repo.user = owner
456 new_repo.user = owner
456 new_repo.group = repo_group
457 new_repo.group = repo_group
457 new_repo.description = description or repo_name
458 new_repo.description = description or repo_name
458 new_repo.private = private
459 new_repo.private = private
459 new_repo.clone_uri = clone_uri
460 new_repo.clone_uri = clone_uri
460 new_repo.landing_rev = landing_rev
461 new_repo.landing_rev = landing_rev
461
462
462 new_repo.enable_statistics = enable_statistics
463 new_repo.enable_statistics = enable_statistics
463 new_repo.enable_locking = enable_locking
464 new_repo.enable_locking = enable_locking
464 new_repo.enable_downloads = enable_downloads
465 new_repo.enable_downloads = enable_downloads
465
466
466 if repo_group:
467 if repo_group:
467 new_repo.enable_locking = repo_group.enable_locking
468 new_repo.enable_locking = repo_group.enable_locking
468
469
469 if fork_of:
470 if fork_of:
470 parent_repo = fork_of
471 parent_repo = fork_of
471 new_repo.fork = parent_repo
472 new_repo.fork = parent_repo
472
473
474 events.trigger(events.RepoPreCreateEvent(new_repo))
475
473 self.sa.add(new_repo)
476 self.sa.add(new_repo)
474
477
475 EMPTY_PERM = 'repository.none'
478 EMPTY_PERM = 'repository.none'
476 if fork_of and copy_fork_permissions:
479 if fork_of and copy_fork_permissions:
477 repo = fork_of
480 repo = fork_of
478 user_perms = UserRepoToPerm.query() \
481 user_perms = UserRepoToPerm.query() \
479 .filter(UserRepoToPerm.repository == repo).all()
482 .filter(UserRepoToPerm.repository == repo).all()
480 group_perms = UserGroupRepoToPerm.query() \
483 group_perms = UserGroupRepoToPerm.query() \
481 .filter(UserGroupRepoToPerm.repository == repo).all()
484 .filter(UserGroupRepoToPerm.repository == repo).all()
482
485
483 for perm in user_perms:
486 for perm in user_perms:
484 UserRepoToPerm.create(
487 UserRepoToPerm.create(
485 perm.user, new_repo, perm.permission)
488 perm.user, new_repo, perm.permission)
486
489
487 for perm in group_perms:
490 for perm in group_perms:
488 UserGroupRepoToPerm.create(
491 UserGroupRepoToPerm.create(
489 perm.users_group, new_repo, perm.permission)
492 perm.users_group, new_repo, perm.permission)
490 # in case we copy permissions and also set this repo to private
493 # in case we copy permissions and also set this repo to private
491 # override the default user permission to make it a private
494 # override the default user permission to make it a private
492 # repo
495 # repo
493 if private:
496 if private:
494 RepoModel(self.sa).grant_user_permission(
497 RepoModel(self.sa).grant_user_permission(
495 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
498 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
496
499
497 elif repo_group and copy_group_permissions:
500 elif repo_group and copy_group_permissions:
498 user_perms = UserRepoGroupToPerm.query() \
501 user_perms = UserRepoGroupToPerm.query() \
499 .filter(UserRepoGroupToPerm.group == repo_group).all()
502 .filter(UserRepoGroupToPerm.group == repo_group).all()
500
503
501 group_perms = UserGroupRepoGroupToPerm.query() \
504 group_perms = UserGroupRepoGroupToPerm.query() \
502 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
505 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
503
506
504 for perm in user_perms:
507 for perm in user_perms:
505 perm_name = perm.permission.permission_name.replace(
508 perm_name = perm.permission.permission_name.replace(
506 'group.', 'repository.')
509 'group.', 'repository.')
507 perm_obj = Permission.get_by_key(perm_name)
510 perm_obj = Permission.get_by_key(perm_name)
508 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
511 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
509
512
510 for perm in group_perms:
513 for perm in group_perms:
511 perm_name = perm.permission.permission_name.replace(
514 perm_name = perm.permission.permission_name.replace(
512 'group.', 'repository.')
515 'group.', 'repository.')
513 perm_obj = Permission.get_by_key(perm_name)
516 perm_obj = Permission.get_by_key(perm_name)
514 UserGroupRepoToPerm.create(
517 UserGroupRepoToPerm.create(
515 perm.users_group, new_repo, perm_obj)
518 perm.users_group, new_repo, perm_obj)
516
519
517 if private:
520 if private:
518 RepoModel(self.sa).grant_user_permission(
521 RepoModel(self.sa).grant_user_permission(
519 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
522 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
520
523
521 else:
524 else:
522 perm_obj = self._create_default_perms(new_repo, private)
525 perm_obj = self._create_default_perms(new_repo, private)
523 self.sa.add(perm_obj)
526 self.sa.add(perm_obj)
524
527
525 # now automatically start following this repository as owner
528 # now automatically start following this repository as owner
526 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
529 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id,
527 owner.user_id)
530 owner.user_id)
531
528 # we need to flush here, in order to check if database won't
532 # we need to flush here, in order to check if database won't
529 # throw any exceptions, create filesystem dirs at the very end
533 # throw any exceptions, create filesystem dirs at the very end
530 self.sa.flush()
534 self.sa.flush()
535 events.trigger(events.RepoCreatedEvent(new_repo))
536 return new_repo
531
537
532 return new_repo
533 except Exception:
538 except Exception:
534 log.error(traceback.format_exc())
539 log.error(traceback.format_exc())
535 raise
540 raise
536
541
537 def create(self, form_data, cur_user):
542 def create(self, form_data, cur_user):
538 """
543 """
539 Create repository using celery tasks
544 Create repository using celery tasks
540
545
541 :param form_data:
546 :param form_data:
542 :param cur_user:
547 :param cur_user:
543 """
548 """
544 from rhodecode.lib.celerylib import tasks, run_task
549 from rhodecode.lib.celerylib import tasks, run_task
545 return run_task(tasks.create_repo, form_data, cur_user)
550 return run_task(tasks.create_repo, form_data, cur_user)
546
551
547 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
552 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
548 perm_deletions=None, check_perms=True,
553 perm_deletions=None, check_perms=True,
549 cur_user=None):
554 cur_user=None):
550 if not perm_additions:
555 if not perm_additions:
551 perm_additions = []
556 perm_additions = []
552 if not perm_updates:
557 if not perm_updates:
553 perm_updates = []
558 perm_updates = []
554 if not perm_deletions:
559 if not perm_deletions:
555 perm_deletions = []
560 perm_deletions = []
556
561
557 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
562 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
558
563
559 # update permissions
564 # update permissions
560 for member_id, perm, member_type in perm_updates:
565 for member_id, perm, member_type in perm_updates:
561 member_id = int(member_id)
566 member_id = int(member_id)
562 if member_type == 'user':
567 if member_type == 'user':
563 # this updates also current one if found
568 # this updates also current one if found
564 self.grant_user_permission(
569 self.grant_user_permission(
565 repo=repo, user=member_id, perm=perm)
570 repo=repo, user=member_id, perm=perm)
566 else: # set for user group
571 else: # set for user group
567 # check if we have permissions to alter this usergroup
572 # check if we have permissions to alter this usergroup
568 member_name = UserGroup.get(member_id).users_group_name
573 member_name = UserGroup.get(member_id).users_group_name
569 if not check_perms or HasUserGroupPermissionAny(
574 if not check_perms or HasUserGroupPermissionAny(
570 *req_perms)(member_name, user=cur_user):
575 *req_perms)(member_name, user=cur_user):
571 self.grant_user_group_permission(
576 self.grant_user_group_permission(
572 repo=repo, group_name=member_id, perm=perm)
577 repo=repo, group_name=member_id, perm=perm)
573
578
574 # set new permissions
579 # set new permissions
575 for member_id, perm, member_type in perm_additions:
580 for member_id, perm, member_type in perm_additions:
576 member_id = int(member_id)
581 member_id = int(member_id)
577 if member_type == 'user':
582 if member_type == 'user':
578 self.grant_user_permission(
583 self.grant_user_permission(
579 repo=repo, user=member_id, perm=perm)
584 repo=repo, user=member_id, perm=perm)
580 else: # set for user group
585 else: # set for user group
581 # check if we have permissions to alter this usergroup
586 # check if we have permissions to alter this usergroup
582 member_name = UserGroup.get(member_id).users_group_name
587 member_name = UserGroup.get(member_id).users_group_name
583 if not check_perms or HasUserGroupPermissionAny(
588 if not check_perms or HasUserGroupPermissionAny(
584 *req_perms)(member_name, user=cur_user):
589 *req_perms)(member_name, user=cur_user):
585 self.grant_user_group_permission(
590 self.grant_user_group_permission(
586 repo=repo, group_name=member_id, perm=perm)
591 repo=repo, group_name=member_id, perm=perm)
587
592
588 # delete permissions
593 # delete permissions
589 for member_id, perm, member_type in perm_deletions:
594 for member_id, perm, member_type in perm_deletions:
590 member_id = int(member_id)
595 member_id = int(member_id)
591 if member_type == 'user':
596 if member_type == 'user':
592 self.revoke_user_permission(repo=repo, user=member_id)
597 self.revoke_user_permission(repo=repo, user=member_id)
593 else: # set for user group
598 else: # set for user group
594 # check if we have permissions to alter this usergroup
599 # check if we have permissions to alter this usergroup
595 member_name = UserGroup.get(member_id).users_group_name
600 member_name = UserGroup.get(member_id).users_group_name
596 if not check_perms or HasUserGroupPermissionAny(
601 if not check_perms or HasUserGroupPermissionAny(
597 *req_perms)(member_name, user=cur_user):
602 *req_perms)(member_name, user=cur_user):
598 self.revoke_user_group_permission(
603 self.revoke_user_group_permission(
599 repo=repo, group_name=member_id)
604 repo=repo, group_name=member_id)
600
605
601 def create_fork(self, form_data, cur_user):
606 def create_fork(self, form_data, cur_user):
602 """
607 """
603 Simple wrapper into executing celery task for fork creation
608 Simple wrapper into executing celery task for fork creation
604
609
605 :param form_data:
610 :param form_data:
606 :param cur_user:
611 :param cur_user:
607 """
612 """
608 from rhodecode.lib.celerylib import tasks, run_task
613 from rhodecode.lib.celerylib import tasks, run_task
609 return run_task(tasks.create_repo_fork, form_data, cur_user)
614 return run_task(tasks.create_repo_fork, form_data, cur_user)
610
615
611 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
616 def delete(self, repo, forks=None, fs_remove=True, cur_user=None):
612 """
617 """
613 Delete given repository, forks parameter defines what do do with
618 Delete given repository, forks parameter defines what do do with
614 attached forks. Throws AttachedForksError if deleted repo has attached
619 attached forks. Throws AttachedForksError if deleted repo has attached
615 forks
620 forks
616
621
617 :param repo:
622 :param repo:
618 :param forks: str 'delete' or 'detach'
623 :param forks: str 'delete' or 'detach'
619 :param fs_remove: remove(archive) repo from filesystem
624 :param fs_remove: remove(archive) repo from filesystem
620 """
625 """
621 if not cur_user:
626 if not cur_user:
622 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
627 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
623 repo = self._get_repo(repo)
628 repo = self._get_repo(repo)
624 if repo:
629 if repo:
625 if forks == 'detach':
630 if forks == 'detach':
626 for r in repo.forks:
631 for r in repo.forks:
627 r.fork = None
632 r.fork = None
628 self.sa.add(r)
633 self.sa.add(r)
629 elif forks == 'delete':
634 elif forks == 'delete':
630 for r in repo.forks:
635 for r in repo.forks:
631 self.delete(r, forks='delete')
636 self.delete(r, forks='delete')
632 elif [f for f in repo.forks]:
637 elif [f for f in repo.forks]:
633 raise AttachedForksError()
638 raise AttachedForksError()
634
639
635 old_repo_dict = repo.get_dict()
640 old_repo_dict = repo.get_dict()
641 events.trigger(events.RepoPreDeleteEvent(repo))
636 try:
642 try:
637 self.sa.delete(repo)
643 self.sa.delete(repo)
638 if fs_remove:
644 if fs_remove:
639 self._delete_filesystem_repo(repo)
645 self._delete_filesystem_repo(repo)
640 else:
646 else:
641 log.debug('skipping removal from filesystem')
647 log.debug('skipping removal from filesystem')
642 old_repo_dict.update({
648 old_repo_dict.update({
643 'deleted_by': cur_user,
649 'deleted_by': cur_user,
644 'deleted_on': time.time(),
650 'deleted_on': time.time(),
645 })
651 })
646 log_delete_repository(**old_repo_dict)
652 log_delete_repository(**old_repo_dict)
653 events.trigger(events.RepoDeletedEvent(repo))
647 except Exception:
654 except Exception:
648 log.error(traceback.format_exc())
655 log.error(traceback.format_exc())
649 raise
656 raise
650
657
651 def grant_user_permission(self, repo, user, perm):
658 def grant_user_permission(self, repo, user, perm):
652 """
659 """
653 Grant permission for user on given repository, or update existing one
660 Grant permission for user on given repository, or update existing one
654 if found
661 if found
655
662
656 :param repo: Instance of Repository, repository_id, or repository name
663 :param repo: Instance of Repository, repository_id, or repository name
657 :param user: Instance of User, user_id or username
664 :param user: Instance of User, user_id or username
658 :param perm: Instance of Permission, or permission_name
665 :param perm: Instance of Permission, or permission_name
659 """
666 """
660 user = self._get_user(user)
667 user = self._get_user(user)
661 repo = self._get_repo(repo)
668 repo = self._get_repo(repo)
662 permission = self._get_perm(perm)
669 permission = self._get_perm(perm)
663
670
664 # check if we have that permission already
671 # check if we have that permission already
665 obj = self.sa.query(UserRepoToPerm) \
672 obj = self.sa.query(UserRepoToPerm) \
666 .filter(UserRepoToPerm.user == user) \
673 .filter(UserRepoToPerm.user == user) \
667 .filter(UserRepoToPerm.repository == repo) \
674 .filter(UserRepoToPerm.repository == repo) \
668 .scalar()
675 .scalar()
669 if obj is None:
676 if obj is None:
670 # create new !
677 # create new !
671 obj = UserRepoToPerm()
678 obj = UserRepoToPerm()
672 obj.repository = repo
679 obj.repository = repo
673 obj.user = user
680 obj.user = user
674 obj.permission = permission
681 obj.permission = permission
675 self.sa.add(obj)
682 self.sa.add(obj)
676 log.debug('Granted perm %s to %s on %s', perm, user, repo)
683 log.debug('Granted perm %s to %s on %s', perm, user, repo)
677 action_logger_generic(
684 action_logger_generic(
678 'granted permission: {} to user: {} on repo: {}'.format(
685 'granted permission: {} to user: {} on repo: {}'.format(
679 perm, user, repo), namespace='security.repo')
686 perm, user, repo), namespace='security.repo')
680 return obj
687 return obj
681
688
682 def revoke_user_permission(self, repo, user):
689 def revoke_user_permission(self, repo, user):
683 """
690 """
684 Revoke permission for user on given repository
691 Revoke permission for user on given repository
685
692
686 :param repo: Instance of Repository, repository_id, or repository name
693 :param repo: Instance of Repository, repository_id, or repository name
687 :param user: Instance of User, user_id or username
694 :param user: Instance of User, user_id or username
688 """
695 """
689
696
690 user = self._get_user(user)
697 user = self._get_user(user)
691 repo = self._get_repo(repo)
698 repo = self._get_repo(repo)
692
699
693 obj = self.sa.query(UserRepoToPerm) \
700 obj = self.sa.query(UserRepoToPerm) \
694 .filter(UserRepoToPerm.repository == repo) \
701 .filter(UserRepoToPerm.repository == repo) \
695 .filter(UserRepoToPerm.user == user) \
702 .filter(UserRepoToPerm.user == user) \
696 .scalar()
703 .scalar()
697 if obj:
704 if obj:
698 self.sa.delete(obj)
705 self.sa.delete(obj)
699 log.debug('Revoked perm on %s on %s', repo, user)
706 log.debug('Revoked perm on %s on %s', repo, user)
700 action_logger_generic(
707 action_logger_generic(
701 'revoked permission from user: {} on repo: {}'.format(
708 'revoked permission from user: {} on repo: {}'.format(
702 user, repo), namespace='security.repo')
709 user, repo), namespace='security.repo')
703
710
704 def grant_user_group_permission(self, repo, group_name, perm):
711 def grant_user_group_permission(self, repo, group_name, perm):
705 """
712 """
706 Grant permission for user group on given repository, or update
713 Grant permission for user group on given repository, or update
707 existing one if found
714 existing one if found
708
715
709 :param repo: Instance of Repository, repository_id, or repository name
716 :param repo: Instance of Repository, repository_id, or repository name
710 :param group_name: Instance of UserGroup, users_group_id,
717 :param group_name: Instance of UserGroup, users_group_id,
711 or user group name
718 or user group name
712 :param perm: Instance of Permission, or permission_name
719 :param perm: Instance of Permission, or permission_name
713 """
720 """
714 repo = self._get_repo(repo)
721 repo = self._get_repo(repo)
715 group_name = self._get_user_group(group_name)
722 group_name = self._get_user_group(group_name)
716 permission = self._get_perm(perm)
723 permission = self._get_perm(perm)
717
724
718 # check if we have that permission already
725 # check if we have that permission already
719 obj = self.sa.query(UserGroupRepoToPerm) \
726 obj = self.sa.query(UserGroupRepoToPerm) \
720 .filter(UserGroupRepoToPerm.users_group == group_name) \
727 .filter(UserGroupRepoToPerm.users_group == group_name) \
721 .filter(UserGroupRepoToPerm.repository == repo) \
728 .filter(UserGroupRepoToPerm.repository == repo) \
722 .scalar()
729 .scalar()
723
730
724 if obj is None:
731 if obj is None:
725 # create new
732 # create new
726 obj = UserGroupRepoToPerm()
733 obj = UserGroupRepoToPerm()
727
734
728 obj.repository = repo
735 obj.repository = repo
729 obj.users_group = group_name
736 obj.users_group = group_name
730 obj.permission = permission
737 obj.permission = permission
731 self.sa.add(obj)
738 self.sa.add(obj)
732 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
739 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
733 action_logger_generic(
740 action_logger_generic(
734 'granted permission: {} to usergroup: {} on repo: {}'.format(
741 'granted permission: {} to usergroup: {} on repo: {}'.format(
735 perm, group_name, repo), namespace='security.repo')
742 perm, group_name, repo), namespace='security.repo')
736
743
737 return obj
744 return obj
738
745
739 def revoke_user_group_permission(self, repo, group_name):
746 def revoke_user_group_permission(self, repo, group_name):
740 """
747 """
741 Revoke permission for user group on given repository
748 Revoke permission for user group on given repository
742
749
743 :param repo: Instance of Repository, repository_id, or repository name
750 :param repo: Instance of Repository, repository_id, or repository name
744 :param group_name: Instance of UserGroup, users_group_id,
751 :param group_name: Instance of UserGroup, users_group_id,
745 or user group name
752 or user group name
746 """
753 """
747 repo = self._get_repo(repo)
754 repo = self._get_repo(repo)
748 group_name = self._get_user_group(group_name)
755 group_name = self._get_user_group(group_name)
749
756
750 obj = self.sa.query(UserGroupRepoToPerm) \
757 obj = self.sa.query(UserGroupRepoToPerm) \
751 .filter(UserGroupRepoToPerm.repository == repo) \
758 .filter(UserGroupRepoToPerm.repository == repo) \
752 .filter(UserGroupRepoToPerm.users_group == group_name) \
759 .filter(UserGroupRepoToPerm.users_group == group_name) \
753 .scalar()
760 .scalar()
754 if obj:
761 if obj:
755 self.sa.delete(obj)
762 self.sa.delete(obj)
756 log.debug('Revoked perm to %s on %s', repo, group_name)
763 log.debug('Revoked perm to %s on %s', repo, group_name)
757 action_logger_generic(
764 action_logger_generic(
758 'revoked permission from usergroup: {} on repo: {}'.format(
765 'revoked permission from usergroup: {} on repo: {}'.format(
759 group_name, repo), namespace='security.repo')
766 group_name, repo), namespace='security.repo')
760
767
761 def delete_stats(self, repo_name):
768 def delete_stats(self, repo_name):
762 """
769 """
763 removes stats for given repo
770 removes stats for given repo
764
771
765 :param repo_name:
772 :param repo_name:
766 """
773 """
767 repo = self._get_repo(repo_name)
774 repo = self._get_repo(repo_name)
768 try:
775 try:
769 obj = self.sa.query(Statistics) \
776 obj = self.sa.query(Statistics) \
770 .filter(Statistics.repository == repo).scalar()
777 .filter(Statistics.repository == repo).scalar()
771 if obj:
778 if obj:
772 self.sa.delete(obj)
779 self.sa.delete(obj)
773 except Exception:
780 except Exception:
774 log.error(traceback.format_exc())
781 log.error(traceback.format_exc())
775 raise
782 raise
776
783
777 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
784 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
778 field_type='str', field_desc=''):
785 field_type='str', field_desc=''):
779
786
780 repo = self._get_repo(repo_name)
787 repo = self._get_repo(repo_name)
781
788
782 new_field = RepositoryField()
789 new_field = RepositoryField()
783 new_field.repository = repo
790 new_field.repository = repo
784 new_field.field_key = field_key
791 new_field.field_key = field_key
785 new_field.field_type = field_type # python type
792 new_field.field_type = field_type # python type
786 new_field.field_value = field_value
793 new_field.field_value = field_value
787 new_field.field_desc = field_desc
794 new_field.field_desc = field_desc
788 new_field.field_label = field_label
795 new_field.field_label = field_label
789 self.sa.add(new_field)
796 self.sa.add(new_field)
790 return new_field
797 return new_field
791
798
792 def delete_repo_field(self, repo_name, field_key):
799 def delete_repo_field(self, repo_name, field_key):
793 repo = self._get_repo(repo_name)
800 repo = self._get_repo(repo_name)
794 field = RepositoryField.get_by_key_name(field_key, repo)
801 field = RepositoryField.get_by_key_name(field_key, repo)
795 if field:
802 if field:
796 self.sa.delete(field)
803 self.sa.delete(field)
797
804
798 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
805 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
799 clone_uri=None, repo_store_location=None,
806 clone_uri=None, repo_store_location=None,
800 use_global_config=False):
807 use_global_config=False):
801 """
808 """
802 makes repository on filesystem. It's group aware means it'll create
809 makes repository on filesystem. It's group aware means it'll create
803 a repository within a group, and alter the paths accordingly of
810 a repository within a group, and alter the paths accordingly of
804 group location
811 group location
805
812
806 :param repo_name:
813 :param repo_name:
807 :param alias:
814 :param alias:
808 :param parent:
815 :param parent:
809 :param clone_uri:
816 :param clone_uri:
810 :param repo_store_location:
817 :param repo_store_location:
811 """
818 """
812 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
819 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
813 from rhodecode.model.scm import ScmModel
820 from rhodecode.model.scm import ScmModel
814
821
815 if Repository.NAME_SEP in repo_name:
822 if Repository.NAME_SEP in repo_name:
816 raise ValueError(
823 raise ValueError(
817 'repo_name must not contain groups got `%s`' % repo_name)
824 'repo_name must not contain groups got `%s`' % repo_name)
818
825
819 if isinstance(repo_group, RepoGroup):
826 if isinstance(repo_group, RepoGroup):
820 new_parent_path = os.sep.join(repo_group.full_path_splitted)
827 new_parent_path = os.sep.join(repo_group.full_path_splitted)
821 else:
828 else:
822 new_parent_path = repo_group or ''
829 new_parent_path = repo_group or ''
823
830
824 if repo_store_location:
831 if repo_store_location:
825 _paths = [repo_store_location]
832 _paths = [repo_store_location]
826 else:
833 else:
827 _paths = [self.repos_path, new_parent_path, repo_name]
834 _paths = [self.repos_path, new_parent_path, repo_name]
828 # we need to make it str for mercurial
835 # we need to make it str for mercurial
829 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
836 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
830
837
831 # check if this path is not a repository
838 # check if this path is not a repository
832 if is_valid_repo(repo_path, self.repos_path):
839 if is_valid_repo(repo_path, self.repos_path):
833 raise Exception('This path %s is a valid repository' % repo_path)
840 raise Exception('This path %s is a valid repository' % repo_path)
834
841
835 # check if this path is a group
842 # check if this path is a group
836 if is_valid_repo_group(repo_path, self.repos_path):
843 if is_valid_repo_group(repo_path, self.repos_path):
837 raise Exception('This path %s is a valid group' % repo_path)
844 raise Exception('This path %s is a valid group' % repo_path)
838
845
839 log.info('creating repo %s in %s from url: `%s`',
846 log.info('creating repo %s in %s from url: `%s`',
840 repo_name, safe_unicode(repo_path),
847 repo_name, safe_unicode(repo_path),
841 obfuscate_url_pw(clone_uri))
848 obfuscate_url_pw(clone_uri))
842
849
843 backend = get_backend(repo_type)
850 backend = get_backend(repo_type)
844
851
845 config_repo = None if use_global_config else repo_name
852 config_repo = None if use_global_config else repo_name
846 if config_repo and new_parent_path:
853 if config_repo and new_parent_path:
847 config_repo = Repository.NAME_SEP.join(
854 config_repo = Repository.NAME_SEP.join(
848 (new_parent_path, config_repo))
855 (new_parent_path, config_repo))
849 config = make_db_config(clear_session=False, repo=config_repo)
856 config = make_db_config(clear_session=False, repo=config_repo)
850 config.set('extensions', 'largefiles', '')
857 config.set('extensions', 'largefiles', '')
851
858
852 # patch and reset hooks section of UI config to not run any
859 # patch and reset hooks section of UI config to not run any
853 # hooks on creating remote repo
860 # hooks on creating remote repo
854 config.clear_section('hooks')
861 config.clear_section('hooks')
855
862
856 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
863 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
857 if repo_type == 'git':
864 if repo_type == 'git':
858 repo = backend(
865 repo = backend(
859 repo_path, config=config, create=True, src_url=clone_uri,
866 repo_path, config=config, create=True, src_url=clone_uri,
860 bare=True)
867 bare=True)
861 else:
868 else:
862 repo = backend(
869 repo = backend(
863 repo_path, config=config, create=True, src_url=clone_uri)
870 repo_path, config=config, create=True, src_url=clone_uri)
864
871
865 ScmModel().install_hooks(repo, repo_type=repo_type)
872 ScmModel().install_hooks(repo, repo_type=repo_type)
866
873
867 log.debug('Created repo %s with %s backend',
874 log.debug('Created repo %s with %s backend',
868 safe_unicode(repo_name), safe_unicode(repo_type))
875 safe_unicode(repo_name), safe_unicode(repo_type))
869 return repo
876 return repo
870
877
871 def _rename_filesystem_repo(self, old, new):
878 def _rename_filesystem_repo(self, old, new):
872 """
879 """
873 renames repository on filesystem
880 renames repository on filesystem
874
881
875 :param old: old name
882 :param old: old name
876 :param new: new name
883 :param new: new name
877 """
884 """
878 log.info('renaming repo from %s to %s', old, new)
885 log.info('renaming repo from %s to %s', old, new)
879
886
880 old_path = os.path.join(self.repos_path, old)
887 old_path = os.path.join(self.repos_path, old)
881 new_path = os.path.join(self.repos_path, new)
888 new_path = os.path.join(self.repos_path, new)
882 if os.path.isdir(new_path):
889 if os.path.isdir(new_path):
883 raise Exception(
890 raise Exception(
884 'Was trying to rename to already existing dir %s' % new_path
891 'Was trying to rename to already existing dir %s' % new_path
885 )
892 )
886 shutil.move(old_path, new_path)
893 shutil.move(old_path, new_path)
887
894
888 def _delete_filesystem_repo(self, repo):
895 def _delete_filesystem_repo(self, repo):
889 """
896 """
890 removes repo from filesystem, the removal is acctually made by
897 removes repo from filesystem, the removal is acctually made by
891 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
898 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
892 repository is no longer valid for rhodecode, can be undeleted later on
899 repository is no longer valid for rhodecode, can be undeleted later on
893 by reverting the renames on this repository
900 by reverting the renames on this repository
894
901
895 :param repo: repo object
902 :param repo: repo object
896 """
903 """
897 rm_path = os.path.join(self.repos_path, repo.repo_name)
904 rm_path = os.path.join(self.repos_path, repo.repo_name)
898 repo_group = repo.group
905 repo_group = repo.group
899 log.info("Removing repository %s", rm_path)
906 log.info("Removing repository %s", rm_path)
900 # disable hg/git internal that it doesn't get detected as repo
907 # disable hg/git internal that it doesn't get detected as repo
901 alias = repo.repo_type
908 alias = repo.repo_type
902
909
903 config = make_db_config(clear_session=False)
910 config = make_db_config(clear_session=False)
904 config.set('extensions', 'largefiles', '')
911 config.set('extensions', 'largefiles', '')
905 bare = getattr(repo.scm_instance(config=config), 'bare', False)
912 bare = getattr(repo.scm_instance(config=config), 'bare', False)
906
913
907 # skip this for bare git repos
914 # skip this for bare git repos
908 if not bare:
915 if not bare:
909 # disable VCS repo
916 # disable VCS repo
910 vcs_path = os.path.join(rm_path, '.%s' % alias)
917 vcs_path = os.path.join(rm_path, '.%s' % alias)
911 if os.path.exists(vcs_path):
918 if os.path.exists(vcs_path):
912 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
919 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
913
920
914 _now = datetime.now()
921 _now = datetime.now()
915 _ms = str(_now.microsecond).rjust(6, '0')
922 _ms = str(_now.microsecond).rjust(6, '0')
916 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
923 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
917 repo.just_name)
924 repo.just_name)
918 if repo_group:
925 if repo_group:
919 # if repository is in group, prefix the removal path with the group
926 # if repository is in group, prefix the removal path with the group
920 args = repo_group.full_path_splitted + [_d]
927 args = repo_group.full_path_splitted + [_d]
921 _d = os.path.join(*args)
928 _d = os.path.join(*args)
922
929
923 if os.path.isdir(rm_path):
930 if os.path.isdir(rm_path):
924 shutil.move(rm_path, os.path.join(self.repos_path, _d))
931 shutil.move(rm_path, os.path.join(self.repos_path, _d))
@@ -1,838 +1,838 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 users model for RhodeCode
22 users model for RhodeCode
23 """
23 """
24
24
25 import logging
25 import logging
26 import traceback
26 import traceback
27
27
28 import datetime
28 import datetime
29 from pylons.i18n.translation import _
29 from pylons.i18n.translation import _
30
30
31 import ipaddress
31 import ipaddress
32 from sqlalchemy.exc import DatabaseError
32 from sqlalchemy.exc import DatabaseError
33 from sqlalchemy.sql.expression import true, false
33 from sqlalchemy.sql.expression import true, false
34
34
35 from rhodecode.events import UserPreCreate, UserPreUpdate
35 from rhodecode import events
36 from rhodecode.lib.utils2 import (
36 from rhodecode.lib.utils2 import (
37 safe_unicode, get_current_rhodecode_user, action_logger_generic,
37 safe_unicode, get_current_rhodecode_user, action_logger_generic,
38 AttributeDict)
38 AttributeDict)
39 from rhodecode.lib.caching_query import FromCache
39 from rhodecode.lib.caching_query import FromCache
40 from rhodecode.model import BaseModel
40 from rhodecode.model import BaseModel
41 from rhodecode.model.auth_token import AuthTokenModel
41 from rhodecode.model.auth_token import AuthTokenModel
42 from rhodecode.model.db import (
42 from rhodecode.model.db import (
43 User, UserToPerm, UserEmailMap, UserIpMap)
43 User, UserToPerm, UserEmailMap, UserIpMap)
44 from rhodecode.lib.exceptions import (
44 from rhodecode.lib.exceptions import (
45 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
45 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
46 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
46 UserOwnsUserGroupsException, NotAllowedToCreateUserError)
47 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
48 from rhodecode.model.repo_group import RepoGroupModel
48 from rhodecode.model.repo_group import RepoGroupModel
49
49
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class UserModel(BaseModel):
54 class UserModel(BaseModel):
55 cls = User
55 cls = User
56
56
57 def get(self, user_id, cache=False):
57 def get(self, user_id, cache=False):
58 user = self.sa.query(User)
58 user = self.sa.query(User)
59 if cache:
59 if cache:
60 user = user.options(FromCache("sql_cache_short",
60 user = user.options(FromCache("sql_cache_short",
61 "get_user_%s" % user_id))
61 "get_user_%s" % user_id))
62 return user.get(user_id)
62 return user.get(user_id)
63
63
64 def get_user(self, user):
64 def get_user(self, user):
65 return self._get_user(user)
65 return self._get_user(user)
66
66
67 def get_by_username(self, username, cache=False, case_insensitive=False):
67 def get_by_username(self, username, cache=False, case_insensitive=False):
68
68
69 if case_insensitive:
69 if case_insensitive:
70 user = self.sa.query(User).filter(User.username.ilike(username))
70 user = self.sa.query(User).filter(User.username.ilike(username))
71 else:
71 else:
72 user = self.sa.query(User)\
72 user = self.sa.query(User)\
73 .filter(User.username == username)
73 .filter(User.username == username)
74 if cache:
74 if cache:
75 user = user.options(FromCache("sql_cache_short",
75 user = user.options(FromCache("sql_cache_short",
76 "get_user_%s" % username))
76 "get_user_%s" % username))
77 return user.scalar()
77 return user.scalar()
78
78
79 def get_by_email(self, email, cache=False, case_insensitive=False):
79 def get_by_email(self, email, cache=False, case_insensitive=False):
80 return User.get_by_email(email, case_insensitive, cache)
80 return User.get_by_email(email, case_insensitive, cache)
81
81
82 def get_by_auth_token(self, auth_token, cache=False):
82 def get_by_auth_token(self, auth_token, cache=False):
83 return User.get_by_auth_token(auth_token, cache)
83 return User.get_by_auth_token(auth_token, cache)
84
84
85 def get_active_user_count(self, cache=False):
85 def get_active_user_count(self, cache=False):
86 return User.query().filter(
86 return User.query().filter(
87 User.active == True).filter(
87 User.active == True).filter(
88 User.username != User.DEFAULT_USER).count()
88 User.username != User.DEFAULT_USER).count()
89
89
90 def create(self, form_data, cur_user=None):
90 def create(self, form_data, cur_user=None):
91 if not cur_user:
91 if not cur_user:
92 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
92 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
93
93
94 user_data = {
94 user_data = {
95 'username': form_data['username'],
95 'username': form_data['username'],
96 'password': form_data['password'],
96 'password': form_data['password'],
97 'email': form_data['email'],
97 'email': form_data['email'],
98 'firstname': form_data['firstname'],
98 'firstname': form_data['firstname'],
99 'lastname': form_data['lastname'],
99 'lastname': form_data['lastname'],
100 'active': form_data['active'],
100 'active': form_data['active'],
101 'extern_type': form_data['extern_type'],
101 'extern_type': form_data['extern_type'],
102 'extern_name': form_data['extern_name'],
102 'extern_name': form_data['extern_name'],
103 'admin': False,
103 'admin': False,
104 'cur_user': cur_user
104 'cur_user': cur_user
105 }
105 }
106
106
107 try:
107 try:
108 if form_data.get('create_repo_group'):
108 if form_data.get('create_repo_group'):
109 user_data['create_repo_group'] = True
109 user_data['create_repo_group'] = True
110 if form_data.get('password_change'):
110 if form_data.get('password_change'):
111 user_data['force_password_change'] = True
111 user_data['force_password_change'] = True
112
112
113 return UserModel().create_or_update(**user_data)
113 return UserModel().create_or_update(**user_data)
114 except Exception:
114 except Exception:
115 log.error(traceback.format_exc())
115 log.error(traceback.format_exc())
116 raise
116 raise
117
117
118 def update_user(self, user, skip_attrs=None, **kwargs):
118 def update_user(self, user, skip_attrs=None, **kwargs):
119 from rhodecode.lib.auth import get_crypt_password
119 from rhodecode.lib.auth import get_crypt_password
120
120
121 user = self._get_user(user)
121 user = self._get_user(user)
122 if user.username == User.DEFAULT_USER:
122 if user.username == User.DEFAULT_USER:
123 raise DefaultUserException(
123 raise DefaultUserException(
124 _("You can't Edit this user since it's"
124 _("You can't Edit this user since it's"
125 " crucial for entire application"))
125 " crucial for entire application"))
126
126
127 # first store only defaults
127 # first store only defaults
128 user_attrs = {
128 user_attrs = {
129 'updating_user_id': user.user_id,
129 'updating_user_id': user.user_id,
130 'username': user.username,
130 'username': user.username,
131 'password': user.password,
131 'password': user.password,
132 'email': user.email,
132 'email': user.email,
133 'firstname': user.name,
133 'firstname': user.name,
134 'lastname': user.lastname,
134 'lastname': user.lastname,
135 'active': user.active,
135 'active': user.active,
136 'admin': user.admin,
136 'admin': user.admin,
137 'extern_name': user.extern_name,
137 'extern_name': user.extern_name,
138 'extern_type': user.extern_type,
138 'extern_type': user.extern_type,
139 'language': user.user_data.get('language')
139 'language': user.user_data.get('language')
140 }
140 }
141
141
142 # in case there's new_password, that comes from form, use it to
142 # in case there's new_password, that comes from form, use it to
143 # store password
143 # store password
144 if kwargs.get('new_password'):
144 if kwargs.get('new_password'):
145 kwargs['password'] = kwargs['new_password']
145 kwargs['password'] = kwargs['new_password']
146
146
147 # cleanups, my_account password change form
147 # cleanups, my_account password change form
148 kwargs.pop('current_password', None)
148 kwargs.pop('current_password', None)
149 kwargs.pop('new_password', None)
149 kwargs.pop('new_password', None)
150 kwargs.pop('new_password_confirmation', None)
150 kwargs.pop('new_password_confirmation', None)
151
151
152 # cleanups, user edit password change form
152 # cleanups, user edit password change form
153 kwargs.pop('password_confirmation', None)
153 kwargs.pop('password_confirmation', None)
154 kwargs.pop('password_change', None)
154 kwargs.pop('password_change', None)
155
155
156 # create repo group on user creation
156 # create repo group on user creation
157 kwargs.pop('create_repo_group', None)
157 kwargs.pop('create_repo_group', None)
158
158
159 # legacy forms send name, which is the firstname
159 # legacy forms send name, which is the firstname
160 firstname = kwargs.pop('name', None)
160 firstname = kwargs.pop('name', None)
161 if firstname:
161 if firstname:
162 kwargs['firstname'] = firstname
162 kwargs['firstname'] = firstname
163
163
164 for k, v in kwargs.items():
164 for k, v in kwargs.items():
165 # skip if we don't want to update this
165 # skip if we don't want to update this
166 if skip_attrs and k in skip_attrs:
166 if skip_attrs and k in skip_attrs:
167 continue
167 continue
168
168
169 user_attrs[k] = v
169 user_attrs[k] = v
170
170
171 try:
171 try:
172 return self.create_or_update(**user_attrs)
172 return self.create_or_update(**user_attrs)
173 except Exception:
173 except Exception:
174 log.error(traceback.format_exc())
174 log.error(traceback.format_exc())
175 raise
175 raise
176
176
177 def create_or_update(
177 def create_or_update(
178 self, username, password, email, firstname='', lastname='',
178 self, username, password, email, firstname='', lastname='',
179 active=True, admin=False, extern_type=None, extern_name=None,
179 active=True, admin=False, extern_type=None, extern_name=None,
180 cur_user=None, plugin=None, force_password_change=False,
180 cur_user=None, plugin=None, force_password_change=False,
181 allow_to_create_user=True, create_repo_group=False,
181 allow_to_create_user=True, create_repo_group=False,
182 updating_user_id=None, language=None, strict_creation_check=True):
182 updating_user_id=None, language=None, strict_creation_check=True):
183 """
183 """
184 Creates a new instance if not found, or updates current one
184 Creates a new instance if not found, or updates current one
185
185
186 :param username:
186 :param username:
187 :param password:
187 :param password:
188 :param email:
188 :param email:
189 :param firstname:
189 :param firstname:
190 :param lastname:
190 :param lastname:
191 :param active:
191 :param active:
192 :param admin:
192 :param admin:
193 :param extern_type:
193 :param extern_type:
194 :param extern_name:
194 :param extern_name:
195 :param cur_user:
195 :param cur_user:
196 :param plugin: optional plugin this method was called from
196 :param plugin: optional plugin this method was called from
197 :param force_password_change: toggles new or existing user flag
197 :param force_password_change: toggles new or existing user flag
198 for password change
198 for password change
199 :param allow_to_create_user: Defines if the method can actually create
199 :param allow_to_create_user: Defines if the method can actually create
200 new users
200 new users
201 :param create_repo_group: Defines if the method should also
201 :param create_repo_group: Defines if the method should also
202 create an repo group with user name, and owner
202 create an repo group with user name, and owner
203 :param updating_user_id: if we set it up this is the user we want to
203 :param updating_user_id: if we set it up this is the user we want to
204 update this allows to editing username.
204 update this allows to editing username.
205 :param language: language of user from interface.
205 :param language: language of user from interface.
206
206
207 :returns: new User object with injected `is_new_user` attribute.
207 :returns: new User object with injected `is_new_user` attribute.
208 """
208 """
209 if not cur_user:
209 if not cur_user:
210 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
210 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
211
211
212 from rhodecode.lib.auth import (
212 from rhodecode.lib.auth import (
213 get_crypt_password, check_password, generate_auth_token)
213 get_crypt_password, check_password, generate_auth_token)
214 from rhodecode.lib.hooks_base import (
214 from rhodecode.lib.hooks_base import (
215 log_create_user, check_allowed_create_user)
215 log_create_user, check_allowed_create_user)
216
216
217 def _password_change(new_user, password):
217 def _password_change(new_user, password):
218 # empty password
218 # empty password
219 if not new_user.password:
219 if not new_user.password:
220 return False
220 return False
221
221
222 # password check is only needed for RhodeCode internal auth calls
222 # password check is only needed for RhodeCode internal auth calls
223 # in case it's a plugin we don't care
223 # in case it's a plugin we don't care
224 if not plugin:
224 if not plugin:
225
225
226 # first check if we gave crypted password back, and if it matches
226 # first check if we gave crypted password back, and if it matches
227 # it's not password change
227 # it's not password change
228 if new_user.password == password:
228 if new_user.password == password:
229 return False
229 return False
230
230
231 password_match = check_password(password, new_user.password)
231 password_match = check_password(password, new_user.password)
232 if not password_match:
232 if not password_match:
233 return True
233 return True
234
234
235 return False
235 return False
236
236
237 user_data = {
237 user_data = {
238 'username': username,
238 'username': username,
239 'password': password,
239 'password': password,
240 'email': email,
240 'email': email,
241 'firstname': firstname,
241 'firstname': firstname,
242 'lastname': lastname,
242 'lastname': lastname,
243 'active': active,
243 'active': active,
244 'admin': admin
244 'admin': admin
245 }
245 }
246
246
247 if updating_user_id:
247 if updating_user_id:
248 log.debug('Checking for existing account in RhodeCode '
248 log.debug('Checking for existing account in RhodeCode '
249 'database with user_id `%s` ' % (updating_user_id,))
249 'database with user_id `%s` ' % (updating_user_id,))
250 user = User.get(updating_user_id)
250 user = User.get(updating_user_id)
251 else:
251 else:
252 log.debug('Checking for existing account in RhodeCode '
252 log.debug('Checking for existing account in RhodeCode '
253 'database with username `%s` ' % (username,))
253 'database with username `%s` ' % (username,))
254 user = User.get_by_username(username, case_insensitive=True)
254 user = User.get_by_username(username, case_insensitive=True)
255
255
256 if user is None:
256 if user is None:
257 # we check internal flag if this method is actually allowed to
257 # we check internal flag if this method is actually allowed to
258 # create new user
258 # create new user
259 if not allow_to_create_user:
259 if not allow_to_create_user:
260 msg = ('Method wants to create new user, but it is not '
260 msg = ('Method wants to create new user, but it is not '
261 'allowed to do so')
261 'allowed to do so')
262 log.warning(msg)
262 log.warning(msg)
263 raise NotAllowedToCreateUserError(msg)
263 raise NotAllowedToCreateUserError(msg)
264
264
265 log.debug('Creating new user %s', username)
265 log.debug('Creating new user %s', username)
266
266
267 # only if we create user that is active
267 # only if we create user that is active
268 new_active_user = active
268 new_active_user = active
269 if new_active_user and strict_creation_check:
269 if new_active_user and strict_creation_check:
270 # raises UserCreationError if it's not allowed for any reason to
270 # raises UserCreationError if it's not allowed for any reason to
271 # create new active user, this also executes pre-create hooks
271 # create new active user, this also executes pre-create hooks
272 check_allowed_create_user(user_data, cur_user, strict_check=True)
272 check_allowed_create_user(user_data, cur_user, strict_check=True)
273 self.send_event(UserPreCreate(user_data))
273 events.trigger(events.UserPreCreate(user_data))
274 new_user = User()
274 new_user = User()
275 edit = False
275 edit = False
276 else:
276 else:
277 log.debug('updating user %s', username)
277 log.debug('updating user %s', username)
278 self.send_event(UserPreUpdate(user, user_data))
278 events.trigger(events.UserPreUpdate(user, user_data))
279 new_user = user
279 new_user = user
280 edit = True
280 edit = True
281
281
282 # we're not allowed to edit default user
282 # we're not allowed to edit default user
283 if user.username == User.DEFAULT_USER:
283 if user.username == User.DEFAULT_USER:
284 raise DefaultUserException(
284 raise DefaultUserException(
285 _("You can't edit this user (`%(username)s`) since it's "
285 _("You can't edit this user (`%(username)s`) since it's "
286 "crucial for entire application") % {'username': user.username})
286 "crucial for entire application") % {'username': user.username})
287
287
288 # inject special attribute that will tell us if User is new or old
288 # inject special attribute that will tell us if User is new or old
289 new_user.is_new_user = not edit
289 new_user.is_new_user = not edit
290 # for users that didn's specify auth type, we use RhodeCode built in
290 # for users that didn's specify auth type, we use RhodeCode built in
291 from rhodecode.authentication.plugins import auth_rhodecode
291 from rhodecode.authentication.plugins import auth_rhodecode
292 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
292 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.name
293 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
293 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.name
294
294
295 try:
295 try:
296 new_user.username = username
296 new_user.username = username
297 new_user.admin = admin
297 new_user.admin = admin
298 new_user.email = email
298 new_user.email = email
299 new_user.active = active
299 new_user.active = active
300 new_user.extern_name = safe_unicode(extern_name)
300 new_user.extern_name = safe_unicode(extern_name)
301 new_user.extern_type = safe_unicode(extern_type)
301 new_user.extern_type = safe_unicode(extern_type)
302 new_user.name = firstname
302 new_user.name = firstname
303 new_user.lastname = lastname
303 new_user.lastname = lastname
304
304
305 if not edit:
305 if not edit:
306 new_user.api_key = generate_auth_token(username)
306 new_user.api_key = generate_auth_token(username)
307
307
308 # set password only if creating an user or password is changed
308 # set password only if creating an user or password is changed
309 if not edit or _password_change(new_user, password):
309 if not edit or _password_change(new_user, password):
310 reason = 'new password' if edit else 'new user'
310 reason = 'new password' if edit else 'new user'
311 log.debug('Updating password reason=>%s', reason)
311 log.debug('Updating password reason=>%s', reason)
312 new_user.password = get_crypt_password(password) if password else None
312 new_user.password = get_crypt_password(password) if password else None
313
313
314 if force_password_change:
314 if force_password_change:
315 new_user.update_userdata(force_password_change=True)
315 new_user.update_userdata(force_password_change=True)
316 if language:
316 if language:
317 new_user.update_userdata(language=language)
317 new_user.update_userdata(language=language)
318
318
319 self.sa.add(new_user)
319 self.sa.add(new_user)
320
320
321 if not edit and create_repo_group:
321 if not edit and create_repo_group:
322 # create new group same as username, and make this user an owner
322 # create new group same as username, and make this user an owner
323 desc = RepoGroupModel.PERSONAL_GROUP_DESC % {'username': username}
323 desc = RepoGroupModel.PERSONAL_GROUP_DESC % {'username': username}
324 RepoGroupModel().create(group_name=username,
324 RepoGroupModel().create(group_name=username,
325 group_description=desc,
325 group_description=desc,
326 owner=username, commit_early=False)
326 owner=username, commit_early=False)
327 if not edit:
327 if not edit:
328 # add the RSS token
328 # add the RSS token
329 AuthTokenModel().create(username,
329 AuthTokenModel().create(username,
330 description='Generated feed token',
330 description='Generated feed token',
331 role=AuthTokenModel.cls.ROLE_FEED)
331 role=AuthTokenModel.cls.ROLE_FEED)
332 log_create_user(created_by=cur_user, **new_user.get_dict())
332 log_create_user(created_by=cur_user, **new_user.get_dict())
333 return new_user
333 return new_user
334 except (DatabaseError,):
334 except (DatabaseError,):
335 log.error(traceback.format_exc())
335 log.error(traceback.format_exc())
336 raise
336 raise
337
337
338 def create_registration(self, form_data):
338 def create_registration(self, form_data):
339 from rhodecode.model.notification import NotificationModel
339 from rhodecode.model.notification import NotificationModel
340 from rhodecode.model.notification import EmailNotificationModel
340 from rhodecode.model.notification import EmailNotificationModel
341
341
342 try:
342 try:
343 form_data['admin'] = False
343 form_data['admin'] = False
344 form_data['extern_name'] = 'rhodecode'
344 form_data['extern_name'] = 'rhodecode'
345 form_data['extern_type'] = 'rhodecode'
345 form_data['extern_type'] = 'rhodecode'
346 new_user = self.create(form_data)
346 new_user = self.create(form_data)
347
347
348 self.sa.add(new_user)
348 self.sa.add(new_user)
349 self.sa.flush()
349 self.sa.flush()
350
350
351 user_data = new_user.get_dict()
351 user_data = new_user.get_dict()
352 kwargs = {
352 kwargs = {
353 # use SQLALCHEMY safe dump of user data
353 # use SQLALCHEMY safe dump of user data
354 'user': AttributeDict(user_data),
354 'user': AttributeDict(user_data),
355 'date': datetime.datetime.now()
355 'date': datetime.datetime.now()
356 }
356 }
357 notification_type = EmailNotificationModel.TYPE_REGISTRATION
357 notification_type = EmailNotificationModel.TYPE_REGISTRATION
358 # pre-generate the subject for notification itself
358 # pre-generate the subject for notification itself
359 (subject,
359 (subject,
360 _h, _e, # we don't care about those
360 _h, _e, # we don't care about those
361 body_plaintext) = EmailNotificationModel().render_email(
361 body_plaintext) = EmailNotificationModel().render_email(
362 notification_type, **kwargs)
362 notification_type, **kwargs)
363
363
364 # create notification objects, and emails
364 # create notification objects, and emails
365 NotificationModel().create(
365 NotificationModel().create(
366 created_by=new_user,
366 created_by=new_user,
367 notification_subject=subject,
367 notification_subject=subject,
368 notification_body=body_plaintext,
368 notification_body=body_plaintext,
369 notification_type=notification_type,
369 notification_type=notification_type,
370 recipients=None, # all admins
370 recipients=None, # all admins
371 email_kwargs=kwargs,
371 email_kwargs=kwargs,
372 )
372 )
373
373
374 return new_user
374 return new_user
375 except Exception:
375 except Exception:
376 log.error(traceback.format_exc())
376 log.error(traceback.format_exc())
377 raise
377 raise
378
378
379 def _handle_user_repos(self, username, repositories, handle_mode=None):
379 def _handle_user_repos(self, username, repositories, handle_mode=None):
380 _superadmin = self.cls.get_first_super_admin()
380 _superadmin = self.cls.get_first_super_admin()
381 left_overs = True
381 left_overs = True
382
382
383 from rhodecode.model.repo import RepoModel
383 from rhodecode.model.repo import RepoModel
384
384
385 if handle_mode == 'detach':
385 if handle_mode == 'detach':
386 for obj in repositories:
386 for obj in repositories:
387 obj.user = _superadmin
387 obj.user = _superadmin
388 # set description we know why we super admin now owns
388 # set description we know why we super admin now owns
389 # additional repositories that were orphaned !
389 # additional repositories that were orphaned !
390 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
390 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
391 self.sa.add(obj)
391 self.sa.add(obj)
392 left_overs = False
392 left_overs = False
393 elif handle_mode == 'delete':
393 elif handle_mode == 'delete':
394 for obj in repositories:
394 for obj in repositories:
395 RepoModel().delete(obj, forks='detach')
395 RepoModel().delete(obj, forks='detach')
396 left_overs = False
396 left_overs = False
397
397
398 # if nothing is done we have left overs left
398 # if nothing is done we have left overs left
399 return left_overs
399 return left_overs
400
400
401 def _handle_user_repo_groups(self, username, repository_groups,
401 def _handle_user_repo_groups(self, username, repository_groups,
402 handle_mode=None):
402 handle_mode=None):
403 _superadmin = self.cls.get_first_super_admin()
403 _superadmin = self.cls.get_first_super_admin()
404 left_overs = True
404 left_overs = True
405
405
406 from rhodecode.model.repo_group import RepoGroupModel
406 from rhodecode.model.repo_group import RepoGroupModel
407
407
408 if handle_mode == 'detach':
408 if handle_mode == 'detach':
409 for r in repository_groups:
409 for r in repository_groups:
410 r.user = _superadmin
410 r.user = _superadmin
411 # set description we know why we super admin now owns
411 # set description we know why we super admin now owns
412 # additional repositories that were orphaned !
412 # additional repositories that were orphaned !
413 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
413 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
414 self.sa.add(r)
414 self.sa.add(r)
415 left_overs = False
415 left_overs = False
416 elif handle_mode == 'delete':
416 elif handle_mode == 'delete':
417 for r in repository_groups:
417 for r in repository_groups:
418 RepoGroupModel().delete(r)
418 RepoGroupModel().delete(r)
419 left_overs = False
419 left_overs = False
420
420
421 # if nothing is done we have left overs left
421 # if nothing is done we have left overs left
422 return left_overs
422 return left_overs
423
423
424 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
424 def _handle_user_user_groups(self, username, user_groups, handle_mode=None):
425 _superadmin = self.cls.get_first_super_admin()
425 _superadmin = self.cls.get_first_super_admin()
426 left_overs = True
426 left_overs = True
427
427
428 from rhodecode.model.user_group import UserGroupModel
428 from rhodecode.model.user_group import UserGroupModel
429
429
430 if handle_mode == 'detach':
430 if handle_mode == 'detach':
431 for r in user_groups:
431 for r in user_groups:
432 for user_user_group_to_perm in r.user_user_group_to_perm:
432 for user_user_group_to_perm in r.user_user_group_to_perm:
433 if user_user_group_to_perm.user.username == username:
433 if user_user_group_to_perm.user.username == username:
434 user_user_group_to_perm.user = _superadmin
434 user_user_group_to_perm.user = _superadmin
435 r.user = _superadmin
435 r.user = _superadmin
436 # set description we know why we super admin now owns
436 # set description we know why we super admin now owns
437 # additional repositories that were orphaned !
437 # additional repositories that were orphaned !
438 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
438 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
439 self.sa.add(r)
439 self.sa.add(r)
440 left_overs = False
440 left_overs = False
441 elif handle_mode == 'delete':
441 elif handle_mode == 'delete':
442 for r in user_groups:
442 for r in user_groups:
443 UserGroupModel().delete(r)
443 UserGroupModel().delete(r)
444 left_overs = False
444 left_overs = False
445
445
446 # if nothing is done we have left overs left
446 # if nothing is done we have left overs left
447 return left_overs
447 return left_overs
448
448
449 def delete(self, user, cur_user=None, handle_repos=None,
449 def delete(self, user, cur_user=None, handle_repos=None,
450 handle_repo_groups=None, handle_user_groups=None):
450 handle_repo_groups=None, handle_user_groups=None):
451 if not cur_user:
451 if not cur_user:
452 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
452 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
453 user = self._get_user(user)
453 user = self._get_user(user)
454
454
455 try:
455 try:
456 if user.username == User.DEFAULT_USER:
456 if user.username == User.DEFAULT_USER:
457 raise DefaultUserException(
457 raise DefaultUserException(
458 _(u"You can't remove this user since it's"
458 _(u"You can't remove this user since it's"
459 u" crucial for entire application"))
459 u" crucial for entire application"))
460
460
461 left_overs = self._handle_user_repos(
461 left_overs = self._handle_user_repos(
462 user.username, user.repositories, handle_repos)
462 user.username, user.repositories, handle_repos)
463 if left_overs and user.repositories:
463 if left_overs and user.repositories:
464 repos = [x.repo_name for x in user.repositories]
464 repos = [x.repo_name for x in user.repositories]
465 raise UserOwnsReposException(
465 raise UserOwnsReposException(
466 _(u'user "%s" still owns %s repositories and cannot be '
466 _(u'user "%s" still owns %s repositories and cannot be '
467 u'removed. Switch owners or remove those repositories:%s')
467 u'removed. Switch owners or remove those repositories:%s')
468 % (user.username, len(repos), ', '.join(repos)))
468 % (user.username, len(repos), ', '.join(repos)))
469
469
470 left_overs = self._handle_user_repo_groups(
470 left_overs = self._handle_user_repo_groups(
471 user.username, user.repository_groups, handle_repo_groups)
471 user.username, user.repository_groups, handle_repo_groups)
472 if left_overs and user.repository_groups:
472 if left_overs and user.repository_groups:
473 repo_groups = [x.group_name for x in user.repository_groups]
473 repo_groups = [x.group_name for x in user.repository_groups]
474 raise UserOwnsRepoGroupsException(
474 raise UserOwnsRepoGroupsException(
475 _(u'user "%s" still owns %s repository groups and cannot be '
475 _(u'user "%s" still owns %s repository groups and cannot be '
476 u'removed. Switch owners or remove those repository groups:%s')
476 u'removed. Switch owners or remove those repository groups:%s')
477 % (user.username, len(repo_groups), ', '.join(repo_groups)))
477 % (user.username, len(repo_groups), ', '.join(repo_groups)))
478
478
479 left_overs = self._handle_user_user_groups(
479 left_overs = self._handle_user_user_groups(
480 user.username, user.user_groups, handle_user_groups)
480 user.username, user.user_groups, handle_user_groups)
481 if left_overs and user.user_groups:
481 if left_overs and user.user_groups:
482 user_groups = [x.users_group_name for x in user.user_groups]
482 user_groups = [x.users_group_name for x in user.user_groups]
483 raise UserOwnsUserGroupsException(
483 raise UserOwnsUserGroupsException(
484 _(u'user "%s" still owns %s user groups and cannot be '
484 _(u'user "%s" still owns %s user groups and cannot be '
485 u'removed. Switch owners or remove those user groups:%s')
485 u'removed. Switch owners or remove those user groups:%s')
486 % (user.username, len(user_groups), ', '.join(user_groups)))
486 % (user.username, len(user_groups), ', '.join(user_groups)))
487
487
488 # we might change the user data with detach/delete, make sure
488 # we might change the user data with detach/delete, make sure
489 # the object is marked as expired before actually deleting !
489 # the object is marked as expired before actually deleting !
490 self.sa.expire(user)
490 self.sa.expire(user)
491 self.sa.delete(user)
491 self.sa.delete(user)
492 from rhodecode.lib.hooks_base import log_delete_user
492 from rhodecode.lib.hooks_base import log_delete_user
493 log_delete_user(deleted_by=cur_user, **user.get_dict())
493 log_delete_user(deleted_by=cur_user, **user.get_dict())
494 except Exception:
494 except Exception:
495 log.error(traceback.format_exc())
495 log.error(traceback.format_exc())
496 raise
496 raise
497
497
498 def reset_password_link(self, data, pwd_reset_url):
498 def reset_password_link(self, data, pwd_reset_url):
499 from rhodecode.lib.celerylib import tasks, run_task
499 from rhodecode.lib.celerylib import tasks, run_task
500 from rhodecode.model.notification import EmailNotificationModel
500 from rhodecode.model.notification import EmailNotificationModel
501 user_email = data['email']
501 user_email = data['email']
502 try:
502 try:
503 user = User.get_by_email(user_email)
503 user = User.get_by_email(user_email)
504 if user:
504 if user:
505 log.debug('password reset user found %s', user)
505 log.debug('password reset user found %s', user)
506
506
507 email_kwargs = {
507 email_kwargs = {
508 'password_reset_url': pwd_reset_url,
508 'password_reset_url': pwd_reset_url,
509 'user': user,
509 'user': user,
510 'email': user_email,
510 'email': user_email,
511 'date': datetime.datetime.now()
511 'date': datetime.datetime.now()
512 }
512 }
513
513
514 (subject, headers, email_body,
514 (subject, headers, email_body,
515 email_body_plaintext) = EmailNotificationModel().render_email(
515 email_body_plaintext) = EmailNotificationModel().render_email(
516 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
516 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
517
517
518 recipients = [user_email]
518 recipients = [user_email]
519
519
520 action_logger_generic(
520 action_logger_generic(
521 'sending password reset email to user: {}'.format(
521 'sending password reset email to user: {}'.format(
522 user), namespace='security.password_reset')
522 user), namespace='security.password_reset')
523
523
524 run_task(tasks.send_email, recipients, subject,
524 run_task(tasks.send_email, recipients, subject,
525 email_body_plaintext, email_body)
525 email_body_plaintext, email_body)
526
526
527 else:
527 else:
528 log.debug("password reset email %s not found", user_email)
528 log.debug("password reset email %s not found", user_email)
529 except Exception:
529 except Exception:
530 log.error(traceback.format_exc())
530 log.error(traceback.format_exc())
531 return False
531 return False
532
532
533 return True
533 return True
534
534
535 def reset_password(self, data):
535 def reset_password(self, data):
536 from rhodecode.lib.celerylib import tasks, run_task
536 from rhodecode.lib.celerylib import tasks, run_task
537 from rhodecode.model.notification import EmailNotificationModel
537 from rhodecode.model.notification import EmailNotificationModel
538 from rhodecode.lib import auth
538 from rhodecode.lib import auth
539 user_email = data['email']
539 user_email = data['email']
540 pre_db = True
540 pre_db = True
541 try:
541 try:
542 user = User.get_by_email(user_email)
542 user = User.get_by_email(user_email)
543 new_passwd = auth.PasswordGenerator().gen_password(
543 new_passwd = auth.PasswordGenerator().gen_password(
544 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
544 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
545 if user:
545 if user:
546 user.password = auth.get_crypt_password(new_passwd)
546 user.password = auth.get_crypt_password(new_passwd)
547 # also force this user to reset his password !
547 # also force this user to reset his password !
548 user.update_userdata(force_password_change=True)
548 user.update_userdata(force_password_change=True)
549
549
550 Session().add(user)
550 Session().add(user)
551 Session().commit()
551 Session().commit()
552 log.info('change password for %s', user_email)
552 log.info('change password for %s', user_email)
553 if new_passwd is None:
553 if new_passwd is None:
554 raise Exception('unable to generate new password')
554 raise Exception('unable to generate new password')
555
555
556 pre_db = False
556 pre_db = False
557
557
558 email_kwargs = {
558 email_kwargs = {
559 'new_password': new_passwd,
559 'new_password': new_passwd,
560 'user': user,
560 'user': user,
561 'email': user_email,
561 'email': user_email,
562 'date': datetime.datetime.now()
562 'date': datetime.datetime.now()
563 }
563 }
564
564
565 (subject, headers, email_body,
565 (subject, headers, email_body,
566 email_body_plaintext) = EmailNotificationModel().render_email(
566 email_body_plaintext) = EmailNotificationModel().render_email(
567 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION, **email_kwargs)
567 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION, **email_kwargs)
568
568
569 recipients = [user_email]
569 recipients = [user_email]
570
570
571 action_logger_generic(
571 action_logger_generic(
572 'sent new password to user: {} with email: {}'.format(
572 'sent new password to user: {} with email: {}'.format(
573 user, user_email), namespace='security.password_reset')
573 user, user_email), namespace='security.password_reset')
574
574
575 run_task(tasks.send_email, recipients, subject,
575 run_task(tasks.send_email, recipients, subject,
576 email_body_plaintext, email_body)
576 email_body_plaintext, email_body)
577
577
578 except Exception:
578 except Exception:
579 log.error('Failed to update user password')
579 log.error('Failed to update user password')
580 log.error(traceback.format_exc())
580 log.error(traceback.format_exc())
581 if pre_db:
581 if pre_db:
582 # we rollback only if local db stuff fails. If it goes into
582 # we rollback only if local db stuff fails. If it goes into
583 # run_task, we're pass rollback state this wouldn't work then
583 # run_task, we're pass rollback state this wouldn't work then
584 Session().rollback()
584 Session().rollback()
585
585
586 return True
586 return True
587
587
588 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
588 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
589 """
589 """
590 Fetches auth_user by user_id,or api_key if present.
590 Fetches auth_user by user_id,or api_key if present.
591 Fills auth_user attributes with those taken from database.
591 Fills auth_user attributes with those taken from database.
592 Additionally set's is_authenitated if lookup fails
592 Additionally set's is_authenitated if lookup fails
593 present in database
593 present in database
594
594
595 :param auth_user: instance of user to set attributes
595 :param auth_user: instance of user to set attributes
596 :param user_id: user id to fetch by
596 :param user_id: user id to fetch by
597 :param api_key: api key to fetch by
597 :param api_key: api key to fetch by
598 :param username: username to fetch by
598 :param username: username to fetch by
599 """
599 """
600 if user_id is None and api_key is None and username is None:
600 if user_id is None and api_key is None and username is None:
601 raise Exception('You need to pass user_id, api_key or username')
601 raise Exception('You need to pass user_id, api_key or username')
602
602
603 log.debug(
603 log.debug(
604 'doing fill data based on: user_id:%s api_key:%s username:%s',
604 'doing fill data based on: user_id:%s api_key:%s username:%s',
605 user_id, api_key, username)
605 user_id, api_key, username)
606 try:
606 try:
607 dbuser = None
607 dbuser = None
608 if user_id:
608 if user_id:
609 dbuser = self.get(user_id)
609 dbuser = self.get(user_id)
610 elif api_key:
610 elif api_key:
611 dbuser = self.get_by_auth_token(api_key)
611 dbuser = self.get_by_auth_token(api_key)
612 elif username:
612 elif username:
613 dbuser = self.get_by_username(username)
613 dbuser = self.get_by_username(username)
614
614
615 if not dbuser:
615 if not dbuser:
616 log.warning(
616 log.warning(
617 'Unable to lookup user by id:%s api_key:%s username:%s',
617 'Unable to lookup user by id:%s api_key:%s username:%s',
618 user_id, api_key, username)
618 user_id, api_key, username)
619 return False
619 return False
620 if not dbuser.active:
620 if not dbuser.active:
621 log.debug('User `%s` is inactive, skipping fill data', username)
621 log.debug('User `%s` is inactive, skipping fill data', username)
622 return False
622 return False
623
623
624 log.debug('filling user:%s data', dbuser)
624 log.debug('filling user:%s data', dbuser)
625
625
626 # TODO: johbo: Think about this and find a clean solution
626 # TODO: johbo: Think about this and find a clean solution
627 user_data = dbuser.get_dict()
627 user_data = dbuser.get_dict()
628 user_data.update(dbuser.get_api_data(include_secrets=True))
628 user_data.update(dbuser.get_api_data(include_secrets=True))
629
629
630 for k, v in user_data.iteritems():
630 for k, v in user_data.iteritems():
631 # properties of auth user we dont update
631 # properties of auth user we dont update
632 if k not in ['auth_tokens', 'permissions']:
632 if k not in ['auth_tokens', 'permissions']:
633 setattr(auth_user, k, v)
633 setattr(auth_user, k, v)
634
634
635 # few extras
635 # few extras
636 setattr(auth_user, 'feed_token', dbuser.feed_token)
636 setattr(auth_user, 'feed_token', dbuser.feed_token)
637 except Exception:
637 except Exception:
638 log.error(traceback.format_exc())
638 log.error(traceback.format_exc())
639 auth_user.is_authenticated = False
639 auth_user.is_authenticated = False
640 return False
640 return False
641
641
642 return True
642 return True
643
643
644 def has_perm(self, user, perm):
644 def has_perm(self, user, perm):
645 perm = self._get_perm(perm)
645 perm = self._get_perm(perm)
646 user = self._get_user(user)
646 user = self._get_user(user)
647
647
648 return UserToPerm.query().filter(UserToPerm.user == user)\
648 return UserToPerm.query().filter(UserToPerm.user == user)\
649 .filter(UserToPerm.permission == perm).scalar() is not None
649 .filter(UserToPerm.permission == perm).scalar() is not None
650
650
651 def grant_perm(self, user, perm):
651 def grant_perm(self, user, perm):
652 """
652 """
653 Grant user global permissions
653 Grant user global permissions
654
654
655 :param user:
655 :param user:
656 :param perm:
656 :param perm:
657 """
657 """
658 user = self._get_user(user)
658 user = self._get_user(user)
659 perm = self._get_perm(perm)
659 perm = self._get_perm(perm)
660 # if this permission is already granted skip it
660 # if this permission is already granted skip it
661 _perm = UserToPerm.query()\
661 _perm = UserToPerm.query()\
662 .filter(UserToPerm.user == user)\
662 .filter(UserToPerm.user == user)\
663 .filter(UserToPerm.permission == perm)\
663 .filter(UserToPerm.permission == perm)\
664 .scalar()
664 .scalar()
665 if _perm:
665 if _perm:
666 return
666 return
667 new = UserToPerm()
667 new = UserToPerm()
668 new.user = user
668 new.user = user
669 new.permission = perm
669 new.permission = perm
670 self.sa.add(new)
670 self.sa.add(new)
671 return new
671 return new
672
672
673 def revoke_perm(self, user, perm):
673 def revoke_perm(self, user, perm):
674 """
674 """
675 Revoke users global permissions
675 Revoke users global permissions
676
676
677 :param user:
677 :param user:
678 :param perm:
678 :param perm:
679 """
679 """
680 user = self._get_user(user)
680 user = self._get_user(user)
681 perm = self._get_perm(perm)
681 perm = self._get_perm(perm)
682
682
683 obj = UserToPerm.query()\
683 obj = UserToPerm.query()\
684 .filter(UserToPerm.user == user)\
684 .filter(UserToPerm.user == user)\
685 .filter(UserToPerm.permission == perm)\
685 .filter(UserToPerm.permission == perm)\
686 .scalar()
686 .scalar()
687 if obj:
687 if obj:
688 self.sa.delete(obj)
688 self.sa.delete(obj)
689
689
690 def add_extra_email(self, user, email):
690 def add_extra_email(self, user, email):
691 """
691 """
692 Adds email address to UserEmailMap
692 Adds email address to UserEmailMap
693
693
694 :param user:
694 :param user:
695 :param email:
695 :param email:
696 """
696 """
697 from rhodecode.model import forms
697 from rhodecode.model import forms
698 form = forms.UserExtraEmailForm()()
698 form = forms.UserExtraEmailForm()()
699 data = form.to_python({'email': email})
699 data = form.to_python({'email': email})
700 user = self._get_user(user)
700 user = self._get_user(user)
701
701
702 obj = UserEmailMap()
702 obj = UserEmailMap()
703 obj.user = user
703 obj.user = user
704 obj.email = data['email']
704 obj.email = data['email']
705 self.sa.add(obj)
705 self.sa.add(obj)
706 return obj
706 return obj
707
707
708 def delete_extra_email(self, user, email_id):
708 def delete_extra_email(self, user, email_id):
709 """
709 """
710 Removes email address from UserEmailMap
710 Removes email address from UserEmailMap
711
711
712 :param user:
712 :param user:
713 :param email_id:
713 :param email_id:
714 """
714 """
715 user = self._get_user(user)
715 user = self._get_user(user)
716 obj = UserEmailMap.query().get(email_id)
716 obj = UserEmailMap.query().get(email_id)
717 if obj:
717 if obj:
718 self.sa.delete(obj)
718 self.sa.delete(obj)
719
719
720 def parse_ip_range(self, ip_range):
720 def parse_ip_range(self, ip_range):
721 ip_list = []
721 ip_list = []
722 def make_unique(value):
722 def make_unique(value):
723 seen = []
723 seen = []
724 return [c for c in value if not (c in seen or seen.append(c))]
724 return [c for c in value if not (c in seen or seen.append(c))]
725
725
726 # firsts split by commas
726 # firsts split by commas
727 for ip_range in ip_range.split(','):
727 for ip_range in ip_range.split(','):
728 if not ip_range:
728 if not ip_range:
729 continue
729 continue
730 ip_range = ip_range.strip()
730 ip_range = ip_range.strip()
731 if '-' in ip_range:
731 if '-' in ip_range:
732 start_ip, end_ip = ip_range.split('-', 1)
732 start_ip, end_ip = ip_range.split('-', 1)
733 start_ip = ipaddress.ip_address(start_ip.strip())
733 start_ip = ipaddress.ip_address(start_ip.strip())
734 end_ip = ipaddress.ip_address(end_ip.strip())
734 end_ip = ipaddress.ip_address(end_ip.strip())
735 parsed_ip_range = []
735 parsed_ip_range = []
736
736
737 for index in xrange(int(start_ip), int(end_ip) + 1):
737 for index in xrange(int(start_ip), int(end_ip) + 1):
738 new_ip = ipaddress.ip_address(index)
738 new_ip = ipaddress.ip_address(index)
739 parsed_ip_range.append(str(new_ip))
739 parsed_ip_range.append(str(new_ip))
740 ip_list.extend(parsed_ip_range)
740 ip_list.extend(parsed_ip_range)
741 else:
741 else:
742 ip_list.append(ip_range)
742 ip_list.append(ip_range)
743
743
744 return make_unique(ip_list)
744 return make_unique(ip_list)
745
745
746 def add_extra_ip(self, user, ip, description=None):
746 def add_extra_ip(self, user, ip, description=None):
747 """
747 """
748 Adds ip address to UserIpMap
748 Adds ip address to UserIpMap
749
749
750 :param user:
750 :param user:
751 :param ip:
751 :param ip:
752 """
752 """
753 from rhodecode.model import forms
753 from rhodecode.model import forms
754 form = forms.UserExtraIpForm()()
754 form = forms.UserExtraIpForm()()
755 data = form.to_python({'ip': ip})
755 data = form.to_python({'ip': ip})
756 user = self._get_user(user)
756 user = self._get_user(user)
757
757
758 obj = UserIpMap()
758 obj = UserIpMap()
759 obj.user = user
759 obj.user = user
760 obj.ip_addr = data['ip']
760 obj.ip_addr = data['ip']
761 obj.description = description
761 obj.description = description
762 self.sa.add(obj)
762 self.sa.add(obj)
763 return obj
763 return obj
764
764
765 def delete_extra_ip(self, user, ip_id):
765 def delete_extra_ip(self, user, ip_id):
766 """
766 """
767 Removes ip address from UserIpMap
767 Removes ip address from UserIpMap
768
768
769 :param user:
769 :param user:
770 :param ip_id:
770 :param ip_id:
771 """
771 """
772 user = self._get_user(user)
772 user = self._get_user(user)
773 obj = UserIpMap.query().get(ip_id)
773 obj = UserIpMap.query().get(ip_id)
774 if obj:
774 if obj:
775 self.sa.delete(obj)
775 self.sa.delete(obj)
776
776
777 def get_accounts_in_creation_order(self, current_user=None):
777 def get_accounts_in_creation_order(self, current_user=None):
778 """
778 """
779 Get accounts in order of creation for deactivation for license limits
779 Get accounts in order of creation for deactivation for license limits
780
780
781 pick currently logged in user, and append to the list in position 0
781 pick currently logged in user, and append to the list in position 0
782 pick all super-admins in order of creation date and add it to the list
782 pick all super-admins in order of creation date and add it to the list
783 pick all other accounts in order of creation and add it to the list.
783 pick all other accounts in order of creation and add it to the list.
784
784
785 Based on that list, the last accounts can be disabled as they are
785 Based on that list, the last accounts can be disabled as they are
786 created at the end and don't include any of the super admins as well
786 created at the end and don't include any of the super admins as well
787 as the current user.
787 as the current user.
788
788
789 :param current_user: optionally current user running this operation
789 :param current_user: optionally current user running this operation
790 """
790 """
791
791
792 if not current_user:
792 if not current_user:
793 current_user = get_current_rhodecode_user()
793 current_user = get_current_rhodecode_user()
794 active_super_admins = [
794 active_super_admins = [
795 x.user_id for x in User.query()
795 x.user_id for x in User.query()
796 .filter(User.user_id != current_user.user_id)
796 .filter(User.user_id != current_user.user_id)
797 .filter(User.active == true())
797 .filter(User.active == true())
798 .filter(User.admin == true())
798 .filter(User.admin == true())
799 .order_by(User.created_on.asc())]
799 .order_by(User.created_on.asc())]
800
800
801 active_regular_users = [
801 active_regular_users = [
802 x.user_id for x in User.query()
802 x.user_id for x in User.query()
803 .filter(User.user_id != current_user.user_id)
803 .filter(User.user_id != current_user.user_id)
804 .filter(User.active == true())
804 .filter(User.active == true())
805 .filter(User.admin == false())
805 .filter(User.admin == false())
806 .order_by(User.created_on.asc())]
806 .order_by(User.created_on.asc())]
807
807
808 list_of_accounts = [current_user.user_id]
808 list_of_accounts = [current_user.user_id]
809 list_of_accounts += active_super_admins
809 list_of_accounts += active_super_admins
810 list_of_accounts += active_regular_users
810 list_of_accounts += active_regular_users
811
811
812 return list_of_accounts
812 return list_of_accounts
813
813
814 def deactivate_last_users(self, expected_users):
814 def deactivate_last_users(self, expected_users):
815 """
815 """
816 Deactivate accounts that are over the license limits.
816 Deactivate accounts that are over the license limits.
817 Algorithm of which accounts to disabled is based on the formula:
817 Algorithm of which accounts to disabled is based on the formula:
818
818
819 Get current user, then super admins in creation order, then regular
819 Get current user, then super admins in creation order, then regular
820 active users in creation order.
820 active users in creation order.
821
821
822 Using that list we mark all accounts from the end of it as inactive.
822 Using that list we mark all accounts from the end of it as inactive.
823 This way we block only latest created accounts.
823 This way we block only latest created accounts.
824
824
825 :param expected_users: list of users in special order, we deactivate
825 :param expected_users: list of users in special order, we deactivate
826 the end N ammoun of users from that list
826 the end N ammoun of users from that list
827 """
827 """
828
828
829 list_of_accounts = self.get_accounts_in_creation_order()
829 list_of_accounts = self.get_accounts_in_creation_order()
830
830
831 for acc_id in list_of_accounts[expected_users + 1:]:
831 for acc_id in list_of_accounts[expected_users + 1:]:
832 user = User.get(acc_id)
832 user = User.get(acc_id)
833 log.info('Deactivating account %s for license unlock', user)
833 log.info('Deactivating account %s for license unlock', user)
834 user.active = False
834 user.active = False
835 Session().add(user)
835 Session().add(user)
836 Session().commit()
836 Session().commit()
837
837
838 return
838 return
@@ -1,53 +1,53 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22
22
23 from rhodecode.lib import hooks_base, utils2
23 from rhodecode.lib import hooks_base, utils2
24
24
25
25
26 @mock.patch.multiple(
26 @mock.patch.multiple(
27 hooks_base,
27 hooks_base,
28 action_logger=mock.Mock(),
28 action_logger=mock.Mock(),
29 post_push_extension=mock.Mock(),
29 post_push_extension=mock.Mock(),
30 Repository=mock.Mock())
30 Repository=mock.Mock())
31 def test_post_push_truncates_commits():
31 def test_post_push_truncates_commits(user_regular, repo_stub):
32 extras = {
32 extras = {
33 'ip': '127.0.0.1',
33 'ip': '127.0.0.1',
34 'username': 'test',
34 'username': user_regular.username,
35 'action': 'push_local',
35 'action': 'push_local',
36 'repository': 'test',
36 'repository': repo_stub.repo_name,
37 'scm': 'git',
37 'scm': 'git',
38 'config': '',
38 'config': '',
39 'server_url': 'http://example.com',
39 'server_url': 'http://example.com',
40 'make_lock': None,
40 'make_lock': None,
41 'locked_by': [None],
41 'locked_by': [None],
42 'commit_ids': ['abcde12345' * 4] * 30000,
42 'commit_ids': ['abcde12345' * 4] * 30000,
43 }
43 }
44 extras = utils2.AttributeDict(extras)
44 extras = utils2.AttributeDict(extras)
45
45
46 hooks_base.post_push(extras)
46 hooks_base.post_push(extras)
47
47
48 # Calculate appropriate action string here
48 # Calculate appropriate action string here
49 expected_action = 'push_local:%s' % ','.join(extras.commit_ids[:29000])
49 expected_action = 'push_local:%s' % ','.join(extras.commit_ids[:29000])
50
50
51 hooks_base.action_logger.assert_called_with(
51 hooks_base.action_logger.assert_called_with(
52 extras.username, expected_action, extras.repository, extras.ip,
52 extras.username, expected_action, extras.repository, extras.ip,
53 commit=True)
53 commit=True)
@@ -1,821 +1,826 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2016 RhodeCode GmbH
3 # Copyright (C) 2010-2016 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import textwrap
23 import textwrap
24
24
25 import rhodecode
25 import rhodecode
26 from rhodecode.lib.utils2 import safe_unicode
26 from rhodecode.lib.utils2 import safe_unicode
27 from rhodecode.lib.vcs.backends import get_backend
27 from rhodecode.lib.vcs.backends import get_backend
28 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
28 from rhodecode.lib.vcs.backends.base import MergeResponse, MergeFailureReason
29 from rhodecode.lib.vcs.exceptions import RepositoryError
29 from rhodecode.lib.vcs.exceptions import RepositoryError
30 from rhodecode.lib.vcs.nodes import FileNode
30 from rhodecode.lib.vcs.nodes import FileNode
31 from rhodecode.model.comment import ChangesetCommentsModel
31 from rhodecode.model.comment import ChangesetCommentsModel
32 from rhodecode.model.db import PullRequest, Session
32 from rhodecode.model.db import PullRequest, Session
33 from rhodecode.model.pull_request import PullRequestModel
33 from rhodecode.model.pull_request import PullRequestModel
34 from rhodecode.model.user import UserModel
34 from rhodecode.model.user import UserModel
35 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
35 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
36
36
37
37
38 pytestmark = [
38 pytestmark = [
39 pytest.mark.backends("git", "hg"),
39 pytest.mark.backends("git", "hg"),
40 ]
40 ]
41
41
42
42
43 class TestPullRequestModel:
43 class TestPullRequestModel:
44
44
45 @pytest.fixture
45 @pytest.fixture
46 def pull_request(self, request, backend, pr_util):
46 def pull_request(self, request, backend, pr_util):
47 """
47 """
48 A pull request combined with multiples patches.
48 A pull request combined with multiples patches.
49 """
49 """
50 BackendClass = get_backend(backend.alias)
50 BackendClass = get_backend(backend.alias)
51 self.merge_patcher = mock.patch.object(BackendClass, 'merge')
51 self.merge_patcher = mock.patch.object(BackendClass, 'merge')
52 self.workspace_remove_patcher = mock.patch.object(
52 self.workspace_remove_patcher = mock.patch.object(
53 BackendClass, 'cleanup_merge_workspace')
53 BackendClass, 'cleanup_merge_workspace')
54
54
55 self.workspace_remove_mock = self.workspace_remove_patcher.start()
55 self.workspace_remove_mock = self.workspace_remove_patcher.start()
56 self.merge_mock = self.merge_patcher.start()
56 self.merge_mock = self.merge_patcher.start()
57 self.comment_patcher = mock.patch(
57 self.comment_patcher = mock.patch(
58 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
58 'rhodecode.model.changeset_status.ChangesetStatusModel.set_status')
59 self.comment_patcher.start()
59 self.comment_patcher.start()
60 self.notification_patcher = mock.patch(
60 self.notification_patcher = mock.patch(
61 'rhodecode.model.notification.NotificationModel.create')
61 'rhodecode.model.notification.NotificationModel.create')
62 self.notification_patcher.start()
62 self.notification_patcher.start()
63 self.helper_patcher = mock.patch(
63 self.helper_patcher = mock.patch(
64 'rhodecode.lib.helpers.url')
64 'rhodecode.lib.helpers.url')
65 self.helper_patcher.start()
65 self.helper_patcher.start()
66
66
67 self.hook_patcher = mock.patch.object(PullRequestModel,
67 self.hook_patcher = mock.patch.object(PullRequestModel,
68 '_trigger_pull_request_hook')
68 '_trigger_pull_request_hook')
69 self.hook_mock = self.hook_patcher.start()
69 self.hook_mock = self.hook_patcher.start()
70
70
71 self.invalidation_patcher = mock.patch(
71 self.invalidation_patcher = mock.patch(
72 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
72 'rhodecode.model.pull_request.ScmModel.mark_for_invalidation')
73 self.invalidation_mock = self.invalidation_patcher.start()
73 self.invalidation_mock = self.invalidation_patcher.start()
74
74
75 self.pull_request = pr_util.create_pull_request(
75 self.pull_request = pr_util.create_pull_request(
76 mergeable=True, name_suffix=u'Δ…Δ‡')
76 mergeable=True, name_suffix=u'Δ…Δ‡')
77 self.source_commit = self.pull_request.source_ref_parts.commit_id
77 self.source_commit = self.pull_request.source_ref_parts.commit_id
78 self.target_commit = self.pull_request.target_ref_parts.commit_id
78 self.target_commit = self.pull_request.target_ref_parts.commit_id
79 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
79 self.workspace_id = 'pr-%s' % self.pull_request.pull_request_id
80
80
81 @request.addfinalizer
81 @request.addfinalizer
82 def cleanup_pull_request():
82 def cleanup_pull_request():
83 calls = [mock.call(
83 calls = [mock.call(
84 self.pull_request, self.pull_request.author, 'create')]
84 self.pull_request, self.pull_request.author, 'create')]
85 self.hook_mock.assert_has_calls(calls)
85 self.hook_mock.assert_has_calls(calls)
86
86
87 self.workspace_remove_patcher.stop()
87 self.workspace_remove_patcher.stop()
88 self.merge_patcher.stop()
88 self.merge_patcher.stop()
89 self.comment_patcher.stop()
89 self.comment_patcher.stop()
90 self.notification_patcher.stop()
90 self.notification_patcher.stop()
91 self.helper_patcher.stop()
91 self.helper_patcher.stop()
92 self.hook_patcher.stop()
92 self.hook_patcher.stop()
93 self.invalidation_patcher.stop()
93 self.invalidation_patcher.stop()
94
94
95 return self.pull_request
95 return self.pull_request
96
96
97 def test_get_all(self, pull_request):
97 def test_get_all(self, pull_request):
98 prs = PullRequestModel().get_all(pull_request.target_repo)
98 prs = PullRequestModel().get_all(pull_request.target_repo)
99 assert isinstance(prs, list)
99 assert isinstance(prs, list)
100 assert len(prs) == 1
100 assert len(prs) == 1
101
101
102 def test_count_all(self, pull_request):
102 def test_count_all(self, pull_request):
103 pr_count = PullRequestModel().count_all(pull_request.target_repo)
103 pr_count = PullRequestModel().count_all(pull_request.target_repo)
104 assert pr_count == 1
104 assert pr_count == 1
105
105
106 def test_get_awaiting_review(self, pull_request):
106 def test_get_awaiting_review(self, pull_request):
107 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
107 prs = PullRequestModel().get_awaiting_review(pull_request.target_repo)
108 assert isinstance(prs, list)
108 assert isinstance(prs, list)
109 assert len(prs) == 1
109 assert len(prs) == 1
110
110
111 def test_count_awaiting_review(self, pull_request):
111 def test_count_awaiting_review(self, pull_request):
112 pr_count = PullRequestModel().count_awaiting_review(
112 pr_count = PullRequestModel().count_awaiting_review(
113 pull_request.target_repo)
113 pull_request.target_repo)
114 assert pr_count == 1
114 assert pr_count == 1
115
115
116 def test_get_awaiting_my_review(self, pull_request):
116 def test_get_awaiting_my_review(self, pull_request):
117 PullRequestModel().update_reviewers(
117 PullRequestModel().update_reviewers(
118 pull_request, [pull_request.author])
118 pull_request, [pull_request.author])
119 prs = PullRequestModel().get_awaiting_my_review(
119 prs = PullRequestModel().get_awaiting_my_review(
120 pull_request.target_repo, user_id=pull_request.author.user_id)
120 pull_request.target_repo, user_id=pull_request.author.user_id)
121 assert isinstance(prs, list)
121 assert isinstance(prs, list)
122 assert len(prs) == 1
122 assert len(prs) == 1
123
123
124 def test_count_awaiting_my_review(self, pull_request):
124 def test_count_awaiting_my_review(self, pull_request):
125 PullRequestModel().update_reviewers(
125 PullRequestModel().update_reviewers(
126 pull_request, [pull_request.author])
126 pull_request, [pull_request.author])
127 pr_count = PullRequestModel().count_awaiting_my_review(
127 pr_count = PullRequestModel().count_awaiting_my_review(
128 pull_request.target_repo, user_id=pull_request.author.user_id)
128 pull_request.target_repo, user_id=pull_request.author.user_id)
129 assert pr_count == 1
129 assert pr_count == 1
130
130
131 def test_delete_calls_cleanup_merge(self, pull_request):
131 def test_delete_calls_cleanup_merge(self, pull_request):
132 PullRequestModel().delete(pull_request)
132 PullRequestModel().delete(pull_request)
133
133
134 self.workspace_remove_mock.assert_called_once_with(
134 self.workspace_remove_mock.assert_called_once_with(
135 self.workspace_id)
135 self.workspace_id)
136
136
137 def test_close_calls_cleanup_and_hook(self, pull_request):
137 def test_close_calls_cleanup_and_hook(self, pull_request):
138 PullRequestModel().close_pull_request(
138 PullRequestModel().close_pull_request(
139 pull_request, pull_request.author)
139 pull_request, pull_request.author)
140
140
141 self.workspace_remove_mock.assert_called_once_with(
141 self.workspace_remove_mock.assert_called_once_with(
142 self.workspace_id)
142 self.workspace_id)
143 self.hook_mock.assert_called_with(
143 self.hook_mock.assert_called_with(
144 self.pull_request, self.pull_request.author, 'close')
144 self.pull_request, self.pull_request.author, 'close')
145
145
146 def test_merge_status(self, pull_request):
146 def test_merge_status(self, pull_request):
147 self.merge_mock.return_value = MergeResponse(
147 self.merge_mock.return_value = MergeResponse(
148 True, False, None, MergeFailureReason.NONE)
148 True, False, None, MergeFailureReason.NONE)
149
149
150 assert pull_request._last_merge_source_rev is None
150 assert pull_request._last_merge_source_rev is None
151 assert pull_request._last_merge_target_rev is None
151 assert pull_request._last_merge_target_rev is None
152 assert pull_request._last_merge_status is None
152 assert pull_request._last_merge_status is None
153
153
154 status, msg = PullRequestModel().merge_status(pull_request)
154 status, msg = PullRequestModel().merge_status(pull_request)
155 assert status is True
155 assert status is True
156 assert msg.eval() == 'This pull request can be automatically merged.'
156 assert msg.eval() == 'This pull request can be automatically merged.'
157 self.merge_mock.assert_called_once_with(
157 self.merge_mock.assert_called_once_with(
158 pull_request.target_ref_parts,
158 pull_request.target_ref_parts,
159 pull_request.source_repo.scm_instance(),
159 pull_request.source_repo.scm_instance(),
160 pull_request.source_ref_parts, self.workspace_id, dry_run=True)
160 pull_request.source_ref_parts, self.workspace_id, dry_run=True)
161
161
162 assert pull_request._last_merge_source_rev == self.source_commit
162 assert pull_request._last_merge_source_rev == self.source_commit
163 assert pull_request._last_merge_target_rev == self.target_commit
163 assert pull_request._last_merge_target_rev == self.target_commit
164 assert pull_request._last_merge_status is MergeFailureReason.NONE
164 assert pull_request._last_merge_status is MergeFailureReason.NONE
165
165
166 self.merge_mock.reset_mock()
166 self.merge_mock.reset_mock()
167 status, msg = PullRequestModel().merge_status(pull_request)
167 status, msg = PullRequestModel().merge_status(pull_request)
168 assert status is True
168 assert status is True
169 assert msg.eval() == 'This pull request can be automatically merged.'
169 assert msg.eval() == 'This pull request can be automatically merged.'
170 assert self.merge_mock.called is False
170 assert self.merge_mock.called is False
171
171
172 def test_merge_status_known_failure(self, pull_request):
172 def test_merge_status_known_failure(self, pull_request):
173 self.merge_mock.return_value = MergeResponse(
173 self.merge_mock.return_value = MergeResponse(
174 False, False, None, MergeFailureReason.MERGE_FAILED)
174 False, False, None, MergeFailureReason.MERGE_FAILED)
175
175
176 assert pull_request._last_merge_source_rev is None
176 assert pull_request._last_merge_source_rev is None
177 assert pull_request._last_merge_target_rev is None
177 assert pull_request._last_merge_target_rev is None
178 assert pull_request._last_merge_status is None
178 assert pull_request._last_merge_status is None
179
179
180 status, msg = PullRequestModel().merge_status(pull_request)
180 status, msg = PullRequestModel().merge_status(pull_request)
181 assert status is False
181 assert status is False
182 assert (
182 assert (
183 msg.eval() ==
183 msg.eval() ==
184 'This pull request cannot be merged because of conflicts.')
184 'This pull request cannot be merged because of conflicts.')
185 self.merge_mock.assert_called_once_with(
185 self.merge_mock.assert_called_once_with(
186 pull_request.target_ref_parts,
186 pull_request.target_ref_parts,
187 pull_request.source_repo.scm_instance(),
187 pull_request.source_repo.scm_instance(),
188 pull_request.source_ref_parts, self.workspace_id, dry_run=True)
188 pull_request.source_ref_parts, self.workspace_id, dry_run=True)
189
189
190 assert pull_request._last_merge_source_rev == self.source_commit
190 assert pull_request._last_merge_source_rev == self.source_commit
191 assert pull_request._last_merge_target_rev == self.target_commit
191 assert pull_request._last_merge_target_rev == self.target_commit
192 assert (
192 assert (
193 pull_request._last_merge_status is MergeFailureReason.MERGE_FAILED)
193 pull_request._last_merge_status is MergeFailureReason.MERGE_FAILED)
194
194
195 self.merge_mock.reset_mock()
195 self.merge_mock.reset_mock()
196 status, msg = PullRequestModel().merge_status(pull_request)
196 status, msg = PullRequestModel().merge_status(pull_request)
197 assert status is False
197 assert status is False
198 assert (
198 assert (
199 msg.eval() ==
199 msg.eval() ==
200 'This pull request cannot be merged because of conflicts.')
200 'This pull request cannot be merged because of conflicts.')
201 assert self.merge_mock.called is False
201 assert self.merge_mock.called is False
202
202
203 def test_merge_status_unknown_failure(self, pull_request):
203 def test_merge_status_unknown_failure(self, pull_request):
204 self.merge_mock.return_value = MergeResponse(
204 self.merge_mock.return_value = MergeResponse(
205 False, False, None, MergeFailureReason.UNKNOWN)
205 False, False, None, MergeFailureReason.UNKNOWN)
206
206
207 assert pull_request._last_merge_source_rev is None
207 assert pull_request._last_merge_source_rev is None
208 assert pull_request._last_merge_target_rev is None
208 assert pull_request._last_merge_target_rev is None
209 assert pull_request._last_merge_status is None
209 assert pull_request._last_merge_status is None
210
210
211 status, msg = PullRequestModel().merge_status(pull_request)
211 status, msg = PullRequestModel().merge_status(pull_request)
212 assert status is False
212 assert status is False
213 assert msg.eval() == (
213 assert msg.eval() == (
214 'This pull request cannot be merged because of an unhandled'
214 'This pull request cannot be merged because of an unhandled'
215 ' exception.')
215 ' exception.')
216 self.merge_mock.assert_called_once_with(
216 self.merge_mock.assert_called_once_with(
217 pull_request.target_ref_parts,
217 pull_request.target_ref_parts,
218 pull_request.source_repo.scm_instance(),
218 pull_request.source_repo.scm_instance(),
219 pull_request.source_ref_parts, self.workspace_id, dry_run=True)
219 pull_request.source_ref_parts, self.workspace_id, dry_run=True)
220
220
221 assert pull_request._last_merge_source_rev is None
221 assert pull_request._last_merge_source_rev is None
222 assert pull_request._last_merge_target_rev is None
222 assert pull_request._last_merge_target_rev is None
223 assert pull_request._last_merge_status is None
223 assert pull_request._last_merge_status is None
224
224
225 self.merge_mock.reset_mock()
225 self.merge_mock.reset_mock()
226 status, msg = PullRequestModel().merge_status(pull_request)
226 status, msg = PullRequestModel().merge_status(pull_request)
227 assert status is False
227 assert status is False
228 assert msg.eval() == (
228 assert msg.eval() == (
229 'This pull request cannot be merged because of an unhandled'
229 'This pull request cannot be merged because of an unhandled'
230 ' exception.')
230 ' exception.')
231 assert self.merge_mock.called is True
231 assert self.merge_mock.called is True
232
232
233 def test_merge_status_when_target_is_locked(self, pull_request):
233 def test_merge_status_when_target_is_locked(self, pull_request):
234 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
234 pull_request.target_repo.locked = [1, u'12345.50', 'lock_web']
235 status, msg = PullRequestModel().merge_status(pull_request)
235 status, msg = PullRequestModel().merge_status(pull_request)
236 assert status is False
236 assert status is False
237 assert msg.eval() == (
237 assert msg.eval() == (
238 'This pull request cannot be merged because the target repository'
238 'This pull request cannot be merged because the target repository'
239 ' is locked.')
239 ' is locked.')
240
240
241 def test_merge_status_requirements_check_target(self, pull_request):
241 def test_merge_status_requirements_check_target(self, pull_request):
242
242
243 def has_largefiles(self, repo):
243 def has_largefiles(self, repo):
244 return repo == pull_request.source_repo
244 return repo == pull_request.source_repo
245
245
246 patcher = mock.patch.object(
246 patcher = mock.patch.object(
247 PullRequestModel, '_has_largefiles', has_largefiles)
247 PullRequestModel, '_has_largefiles', has_largefiles)
248 with patcher:
248 with patcher:
249 status, msg = PullRequestModel().merge_status(pull_request)
249 status, msg = PullRequestModel().merge_status(pull_request)
250
250
251 assert status is False
251 assert status is False
252 assert msg == 'Target repository large files support is disabled.'
252 assert msg == 'Target repository large files support is disabled.'
253
253
254 def test_merge_status_requirements_check_source(self, pull_request):
254 def test_merge_status_requirements_check_source(self, pull_request):
255
255
256 def has_largefiles(self, repo):
256 def has_largefiles(self, repo):
257 return repo == pull_request.target_repo
257 return repo == pull_request.target_repo
258
258
259 patcher = mock.patch.object(
259 patcher = mock.patch.object(
260 PullRequestModel, '_has_largefiles', has_largefiles)
260 PullRequestModel, '_has_largefiles', has_largefiles)
261 with patcher:
261 with patcher:
262 status, msg = PullRequestModel().merge_status(pull_request)
262 status, msg = PullRequestModel().merge_status(pull_request)
263
263
264 assert status is False
264 assert status is False
265 assert msg == 'Source repository large files support is disabled.'
265 assert msg == 'Source repository large files support is disabled.'
266
266
267 def test_merge(self, pull_request, merge_extras):
267 def test_merge(self, pull_request, merge_extras):
268 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
268 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
269 self.merge_mock.return_value = MergeResponse(
269 self.merge_mock.return_value = MergeResponse(
270 True, True,
270 True, True,
271 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6',
271 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6',
272 MergeFailureReason.NONE)
272 MergeFailureReason.NONE)
273
273
274 merge_extras['repository'] = pull_request.target_repo.repo_name
274 PullRequestModel().merge(
275 PullRequestModel().merge(
275 pull_request, pull_request.author, extras=merge_extras)
276 pull_request, pull_request.author, extras=merge_extras)
276
277
277 message = (
278 message = (
278 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
279 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
279 u'\n\n {pr_title}'.format(
280 u'\n\n {pr_title}'.format(
280 pr_id=pull_request.pull_request_id,
281 pr_id=pull_request.pull_request_id,
281 source_repo=safe_unicode(
282 source_repo=safe_unicode(
282 pull_request.source_repo.scm_instance().name),
283 pull_request.source_repo.scm_instance().name),
283 source_ref_name=pull_request.source_ref_parts.name,
284 source_ref_name=pull_request.source_ref_parts.name,
284 pr_title=safe_unicode(pull_request.title)
285 pr_title=safe_unicode(pull_request.title)
285 )
286 )
286 )
287 )
287 self.merge_mock.assert_called_once_with(
288 self.merge_mock.assert_called_once_with(
288 pull_request.target_ref_parts,
289 pull_request.target_ref_parts,
289 pull_request.source_repo.scm_instance(),
290 pull_request.source_repo.scm_instance(),
290 pull_request.source_ref_parts, self.workspace_id,
291 pull_request.source_ref_parts, self.workspace_id,
291 user_name=user.username, user_email=user.email, message=message
292 user_name=user.username, user_email=user.email, message=message
292 )
293 )
293 self.invalidation_mock.assert_called_once_with(
294 self.invalidation_mock.assert_called_once_with(
294 pull_request.target_repo.repo_name)
295 pull_request.target_repo.repo_name)
295
296
296 self.hook_mock.assert_called_with(
297 self.hook_mock.assert_called_with(
297 self.pull_request, self.pull_request.author, 'merge')
298 self.pull_request, self.pull_request.author, 'merge')
298
299
299 pull_request = PullRequest.get(pull_request.pull_request_id)
300 pull_request = PullRequest.get(pull_request.pull_request_id)
300 assert (
301 assert (
301 pull_request.merge_rev ==
302 pull_request.merge_rev ==
302 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
303 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6')
303
304
304 def test_merge_failed(self, pull_request, merge_extras):
305 def test_merge_failed(self, pull_request, merge_extras):
305 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
306 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
306 self.merge_mock.return_value = MergeResponse(
307 self.merge_mock.return_value = MergeResponse(
307 False, False,
308 False, False,
308 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6',
309 '6126b7bfcc82ad2d3deaee22af926b082ce54cc6',
309 MergeFailureReason.MERGE_FAILED)
310 MergeFailureReason.MERGE_FAILED)
310
311
312 merge_extras['repository'] = pull_request.target_repo.repo_name
311 PullRequestModel().merge(
313 PullRequestModel().merge(
312 pull_request, pull_request.author, extras=merge_extras)
314 pull_request, pull_request.author, extras=merge_extras)
313
315
314 message = (
316 message = (
315 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
317 u'Merge pull request #{pr_id} from {source_repo} {source_ref_name}'
316 u'\n\n {pr_title}'.format(
318 u'\n\n {pr_title}'.format(
317 pr_id=pull_request.pull_request_id,
319 pr_id=pull_request.pull_request_id,
318 source_repo=safe_unicode(
320 source_repo=safe_unicode(
319 pull_request.source_repo.scm_instance().name),
321 pull_request.source_repo.scm_instance().name),
320 source_ref_name=pull_request.source_ref_parts.name,
322 source_ref_name=pull_request.source_ref_parts.name,
321 pr_title=safe_unicode(pull_request.title)
323 pr_title=safe_unicode(pull_request.title)
322 )
324 )
323 )
325 )
324 self.merge_mock.assert_called_once_with(
326 self.merge_mock.assert_called_once_with(
325 pull_request.target_ref_parts,
327 pull_request.target_ref_parts,
326 pull_request.source_repo.scm_instance(),
328 pull_request.source_repo.scm_instance(),
327 pull_request.source_ref_parts, self.workspace_id,
329 pull_request.source_ref_parts, self.workspace_id,
328 user_name=user.username, user_email=user.email, message=message
330 user_name=user.username, user_email=user.email, message=message
329 )
331 )
330
332
331 pull_request = PullRequest.get(pull_request.pull_request_id)
333 pull_request = PullRequest.get(pull_request.pull_request_id)
332 assert self.invalidation_mock.called is False
334 assert self.invalidation_mock.called is False
333 assert pull_request.merge_rev is None
335 assert pull_request.merge_rev is None
334
336
335 def test_get_commit_ids(self, pull_request):
337 def test_get_commit_ids(self, pull_request):
336 # The PR has been not merget yet, so expect an exception
338 # The PR has been not merget yet, so expect an exception
337 with pytest.raises(ValueError):
339 with pytest.raises(ValueError):
338 PullRequestModel()._get_commit_ids(pull_request)
340 PullRequestModel()._get_commit_ids(pull_request)
339
341
340 # Merge revision is in the revisions list
342 # Merge revision is in the revisions list
341 pull_request.merge_rev = pull_request.revisions[0]
343 pull_request.merge_rev = pull_request.revisions[0]
342 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
344 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
343 assert commit_ids == pull_request.revisions
345 assert commit_ids == pull_request.revisions
344
346
345 # Merge revision is not in the revisions list
347 # Merge revision is not in the revisions list
346 pull_request.merge_rev = 'f000' * 10
348 pull_request.merge_rev = 'f000' * 10
347 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
349 commit_ids = PullRequestModel()._get_commit_ids(pull_request)
348 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
350 assert commit_ids == pull_request.revisions + [pull_request.merge_rev]
349
351
350 def test_get_diff_from_pr_version(self, pull_request):
352 def test_get_diff_from_pr_version(self, pull_request):
351 diff = PullRequestModel()._get_diff_from_pr_or_version(
353 diff = PullRequestModel()._get_diff_from_pr_or_version(
352 pull_request, context=6)
354 pull_request, context=6)
353 assert 'file_1' in diff.raw
355 assert 'file_1' in diff.raw
354
356
355
357
356 class TestIntegrationMerge(object):
358 class TestIntegrationMerge(object):
357 @pytest.mark.parametrize('extra_config', (
359 @pytest.mark.parametrize('extra_config', (
358 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
360 {'vcs.hooks.protocol': 'http', 'vcs.hooks.direct_calls': False},
359 {'vcs.hooks.protocol': 'Pyro4', 'vcs.hooks.direct_calls': False},
361 {'vcs.hooks.protocol': 'Pyro4', 'vcs.hooks.direct_calls': False},
360 ))
362 ))
361 def test_merge_triggers_push_hooks(
363 def test_merge_triggers_push_hooks(
362 self, pr_util, user_admin, capture_rcextensions, merge_extras,
364 self, pr_util, user_admin, capture_rcextensions, merge_extras,
363 extra_config):
365 extra_config):
364 pull_request = pr_util.create_pull_request(
366 pull_request = pr_util.create_pull_request(
365 approved=True, mergeable=True)
367 approved=True, mergeable=True)
366 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
368 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
369 merge_extras['repository'] = pull_request.target_repo.repo_name
367 Session().commit()
370 Session().commit()
368
371
369 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
372 with mock.patch.dict(rhodecode.CONFIG, extra_config, clear=False):
370 merge_state = PullRequestModel().merge(
373 merge_state = PullRequestModel().merge(
371 pull_request, user_admin, extras=merge_extras)
374 pull_request, user_admin, extras=merge_extras)
372
375
373 assert merge_state.executed
376 assert merge_state.executed
374 assert 'pre_push' in capture_rcextensions
377 assert 'pre_push' in capture_rcextensions
375 assert 'post_push' in capture_rcextensions
378 assert 'post_push' in capture_rcextensions
376
379
377 def test_merge_can_be_rejected_by_pre_push_hook(
380 def test_merge_can_be_rejected_by_pre_push_hook(
378 self, pr_util, user_admin, capture_rcextensions, merge_extras):
381 self, pr_util, user_admin, capture_rcextensions, merge_extras):
379 pull_request = pr_util.create_pull_request(
382 pull_request = pr_util.create_pull_request(
380 approved=True, mergeable=True)
383 approved=True, mergeable=True)
381 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
384 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
385 merge_extras['repository'] = pull_request.target_repo.repo_name
382 Session().commit()
386 Session().commit()
383
387
384 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
388 with mock.patch('rhodecode.EXTENSIONS.PRE_PUSH_HOOK') as pre_pull:
385 pre_pull.side_effect = RepositoryError("Disallow push!")
389 pre_pull.side_effect = RepositoryError("Disallow push!")
386 merge_status = PullRequestModel().merge(
390 merge_status = PullRequestModel().merge(
387 pull_request, user_admin, extras=merge_extras)
391 pull_request, user_admin, extras=merge_extras)
388
392
389 assert not merge_status.executed
393 assert not merge_status.executed
390 assert 'pre_push' not in capture_rcextensions
394 assert 'pre_push' not in capture_rcextensions
391 assert 'post_push' not in capture_rcextensions
395 assert 'post_push' not in capture_rcextensions
392
396
393 def test_merge_fails_if_target_is_locked(
397 def test_merge_fails_if_target_is_locked(
394 self, pr_util, user_regular, merge_extras):
398 self, pr_util, user_regular, merge_extras):
395 pull_request = pr_util.create_pull_request(
399 pull_request = pr_util.create_pull_request(
396 approved=True, mergeable=True)
400 approved=True, mergeable=True)
397 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
401 locked_by = [user_regular.user_id + 1, 12345.50, 'lock_web']
398 pull_request.target_repo.locked = locked_by
402 pull_request.target_repo.locked = locked_by
399 # TODO: johbo: Check if this can work based on the database, currently
403 # TODO: johbo: Check if this can work based on the database, currently
400 # all data is pre-computed, that's why just updating the DB is not
404 # all data is pre-computed, that's why just updating the DB is not
401 # enough.
405 # enough.
402 merge_extras['locked_by'] = locked_by
406 merge_extras['locked_by'] = locked_by
407 merge_extras['repository'] = pull_request.target_repo.repo_name
403 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
408 # TODO: johbo: Needed for sqlite, try to find an automatic way for it
404 Session().commit()
409 Session().commit()
405 merge_status = PullRequestModel().merge(
410 merge_status = PullRequestModel().merge(
406 pull_request, user_regular, extras=merge_extras)
411 pull_request, user_regular, extras=merge_extras)
407 assert not merge_status.executed
412 assert not merge_status.executed
408
413
409
414
410 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
415 @pytest.mark.parametrize('use_outdated, inlines_count, outdated_count', [
411 (False, 1, 0),
416 (False, 1, 0),
412 (True, 0, 1),
417 (True, 0, 1),
413 ])
418 ])
414 def test_outdated_comments(
419 def test_outdated_comments(
415 pr_util, use_outdated, inlines_count, outdated_count):
420 pr_util, use_outdated, inlines_count, outdated_count):
416 pull_request = pr_util.create_pull_request()
421 pull_request = pr_util.create_pull_request()
417 pr_util.create_inline_comment(file_path='not_in_updated_diff')
422 pr_util.create_inline_comment(file_path='not_in_updated_diff')
418
423
419 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
424 with outdated_comments_patcher(use_outdated) as outdated_comment_mock:
420 pr_util.add_one_commit()
425 pr_util.add_one_commit()
421 assert_inline_comments(
426 assert_inline_comments(
422 pull_request, visible=inlines_count, outdated=outdated_count)
427 pull_request, visible=inlines_count, outdated=outdated_count)
423 outdated_comment_mock.assert_called_with(pull_request)
428 outdated_comment_mock.assert_called_with(pull_request)
424
429
425
430
426 @pytest.fixture
431 @pytest.fixture
427 def merge_extras(user_regular):
432 def merge_extras(user_regular):
428 """
433 """
429 Context for the vcs operation when running a merge.
434 Context for the vcs operation when running a merge.
430 """
435 """
431 extras = {
436 extras = {
432 'ip': '127.0.0.1',
437 'ip': '127.0.0.1',
433 'username': user_regular.username,
438 'username': user_regular.username,
434 'action': 'push',
439 'action': 'push',
435 'repository': 'fake_target_repo_name',
440 'repository': 'fake_target_repo_name',
436 'scm': 'git',
441 'scm': 'git',
437 'config': 'fake_config_ini_path',
442 'config': 'fake_config_ini_path',
438 'make_lock': None,
443 'make_lock': None,
439 'locked_by': [None, None, None],
444 'locked_by': [None, None, None],
440 'server_url': 'http://test.example.com:5000',
445 'server_url': 'http://test.example.com:5000',
441 'hooks': ['push', 'pull'],
446 'hooks': ['push', 'pull'],
442 }
447 }
443 return extras
448 return extras
444
449
445
450
446 class TestUpdateCommentHandling(object):
451 class TestUpdateCommentHandling(object):
447
452
448 @pytest.fixture(autouse=True, scope='class')
453 @pytest.fixture(autouse=True, scope='class')
449 def enable_outdated_comments(self, request, pylonsapp):
454 def enable_outdated_comments(self, request, pylonsapp):
450 config_patch = mock.patch.dict(
455 config_patch = mock.patch.dict(
451 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
456 'rhodecode.CONFIG', {'rhodecode_use_outdated_comments': True})
452 config_patch.start()
457 config_patch.start()
453
458
454 @request.addfinalizer
459 @request.addfinalizer
455 def cleanup():
460 def cleanup():
456 config_patch.stop()
461 config_patch.stop()
457
462
458 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
463 def test_comment_stays_unflagged_on_unchanged_diff(self, pr_util):
459 commits = [
464 commits = [
460 {'message': 'a'},
465 {'message': 'a'},
461 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
466 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
462 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
467 {'message': 'c', 'added': [FileNode('file_c', 'test_content\n')]},
463 ]
468 ]
464 pull_request = pr_util.create_pull_request(
469 pull_request = pr_util.create_pull_request(
465 commits=commits, target_head='a', source_head='b', revisions=['b'])
470 commits=commits, target_head='a', source_head='b', revisions=['b'])
466 pr_util.create_inline_comment(file_path='file_b')
471 pr_util.create_inline_comment(file_path='file_b')
467 pr_util.add_one_commit(head='c')
472 pr_util.add_one_commit(head='c')
468
473
469 assert_inline_comments(pull_request, visible=1, outdated=0)
474 assert_inline_comments(pull_request, visible=1, outdated=0)
470
475
471 def test_comment_stays_unflagged_on_change_above(self, pr_util):
476 def test_comment_stays_unflagged_on_change_above(self, pr_util):
472 original_content = ''.join(
477 original_content = ''.join(
473 ['line {}\n'.format(x) for x in range(1, 11)])
478 ['line {}\n'.format(x) for x in range(1, 11)])
474 updated_content = 'new_line_at_top\n' + original_content
479 updated_content = 'new_line_at_top\n' + original_content
475 commits = [
480 commits = [
476 {'message': 'a'},
481 {'message': 'a'},
477 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
482 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
478 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
483 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
479 ]
484 ]
480 pull_request = pr_util.create_pull_request(
485 pull_request = pr_util.create_pull_request(
481 commits=commits, target_head='a', source_head='b', revisions=['b'])
486 commits=commits, target_head='a', source_head='b', revisions=['b'])
482
487
483 with outdated_comments_patcher():
488 with outdated_comments_patcher():
484 comment = pr_util.create_inline_comment(
489 comment = pr_util.create_inline_comment(
485 line_no=u'n8', file_path='file_b')
490 line_no=u'n8', file_path='file_b')
486 pr_util.add_one_commit(head='c')
491 pr_util.add_one_commit(head='c')
487
492
488 assert_inline_comments(pull_request, visible=1, outdated=0)
493 assert_inline_comments(pull_request, visible=1, outdated=0)
489 assert comment.line_no == u'n9'
494 assert comment.line_no == u'n9'
490
495
491 def test_comment_stays_unflagged_on_change_below(self, pr_util):
496 def test_comment_stays_unflagged_on_change_below(self, pr_util):
492 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
497 original_content = ''.join(['line {}\n'.format(x) for x in range(10)])
493 updated_content = original_content + 'new_line_at_end\n'
498 updated_content = original_content + 'new_line_at_end\n'
494 commits = [
499 commits = [
495 {'message': 'a'},
500 {'message': 'a'},
496 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
501 {'message': 'b', 'added': [FileNode('file_b', original_content)]},
497 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
502 {'message': 'c', 'changed': [FileNode('file_b', updated_content)]},
498 ]
503 ]
499 pull_request = pr_util.create_pull_request(
504 pull_request = pr_util.create_pull_request(
500 commits=commits, target_head='a', source_head='b', revisions=['b'])
505 commits=commits, target_head='a', source_head='b', revisions=['b'])
501 pr_util.create_inline_comment(file_path='file_b')
506 pr_util.create_inline_comment(file_path='file_b')
502 pr_util.add_one_commit(head='c')
507 pr_util.add_one_commit(head='c')
503
508
504 assert_inline_comments(pull_request, visible=1, outdated=0)
509 assert_inline_comments(pull_request, visible=1, outdated=0)
505
510
506 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
511 @pytest.mark.parametrize('line_no', ['n4', 'o4', 'n10', 'o9'])
507 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
512 def test_comment_flagged_on_change_around_context(self, pr_util, line_no):
508 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
513 base_lines = ['line {}\n'.format(x) for x in range(1, 13)]
509 change_lines = list(base_lines)
514 change_lines = list(base_lines)
510 change_lines.insert(6, 'line 6a added\n')
515 change_lines.insert(6, 'line 6a added\n')
511
516
512 # Changes on the last line of sight
517 # Changes on the last line of sight
513 update_lines = list(change_lines)
518 update_lines = list(change_lines)
514 update_lines[0] = 'line 1 changed\n'
519 update_lines[0] = 'line 1 changed\n'
515 update_lines[-1] = 'line 12 changed\n'
520 update_lines[-1] = 'line 12 changed\n'
516
521
517 def file_b(lines):
522 def file_b(lines):
518 return FileNode('file_b', ''.join(lines))
523 return FileNode('file_b', ''.join(lines))
519
524
520 commits = [
525 commits = [
521 {'message': 'a', 'added': [file_b(base_lines)]},
526 {'message': 'a', 'added': [file_b(base_lines)]},
522 {'message': 'b', 'changed': [file_b(change_lines)]},
527 {'message': 'b', 'changed': [file_b(change_lines)]},
523 {'message': 'c', 'changed': [file_b(update_lines)]},
528 {'message': 'c', 'changed': [file_b(update_lines)]},
524 ]
529 ]
525
530
526 pull_request = pr_util.create_pull_request(
531 pull_request = pr_util.create_pull_request(
527 commits=commits, target_head='a', source_head='b', revisions=['b'])
532 commits=commits, target_head='a', source_head='b', revisions=['b'])
528 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
533 pr_util.create_inline_comment(line_no=line_no, file_path='file_b')
529
534
530 with outdated_comments_patcher():
535 with outdated_comments_patcher():
531 pr_util.add_one_commit(head='c')
536 pr_util.add_one_commit(head='c')
532 assert_inline_comments(pull_request, visible=0, outdated=1)
537 assert_inline_comments(pull_request, visible=0, outdated=1)
533
538
534 @pytest.mark.parametrize("change, content", [
539 @pytest.mark.parametrize("change, content", [
535 ('changed', 'changed\n'),
540 ('changed', 'changed\n'),
536 ('removed', ''),
541 ('removed', ''),
537 ], ids=['changed', 'removed'])
542 ], ids=['changed', 'removed'])
538 def test_comment_flagged_on_change(self, pr_util, change, content):
543 def test_comment_flagged_on_change(self, pr_util, change, content):
539 commits = [
544 commits = [
540 {'message': 'a'},
545 {'message': 'a'},
541 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
546 {'message': 'b', 'added': [FileNode('file_b', 'test_content\n')]},
542 {'message': 'c', change: [FileNode('file_b', content)]},
547 {'message': 'c', change: [FileNode('file_b', content)]},
543 ]
548 ]
544 pull_request = pr_util.create_pull_request(
549 pull_request = pr_util.create_pull_request(
545 commits=commits, target_head='a', source_head='b', revisions=['b'])
550 commits=commits, target_head='a', source_head='b', revisions=['b'])
546 pr_util.create_inline_comment(file_path='file_b')
551 pr_util.create_inline_comment(file_path='file_b')
547
552
548 with outdated_comments_patcher():
553 with outdated_comments_patcher():
549 pr_util.add_one_commit(head='c')
554 pr_util.add_one_commit(head='c')
550 assert_inline_comments(pull_request, visible=0, outdated=1)
555 assert_inline_comments(pull_request, visible=0, outdated=1)
551
556
552
557
553 class TestUpdateChangedFiles(object):
558 class TestUpdateChangedFiles(object):
554
559
555 def test_no_changes_on_unchanged_diff(self, pr_util):
560 def test_no_changes_on_unchanged_diff(self, pr_util):
556 commits = [
561 commits = [
557 {'message': 'a'},
562 {'message': 'a'},
558 {'message': 'b',
563 {'message': 'b',
559 'added': [FileNode('file_b', 'test_content b\n')]},
564 'added': [FileNode('file_b', 'test_content b\n')]},
560 {'message': 'c',
565 {'message': 'c',
561 'added': [FileNode('file_c', 'test_content c\n')]},
566 'added': [FileNode('file_c', 'test_content c\n')]},
562 ]
567 ]
563 # open a PR from a to b, adding file_b
568 # open a PR from a to b, adding file_b
564 pull_request = pr_util.create_pull_request(
569 pull_request = pr_util.create_pull_request(
565 commits=commits, target_head='a', source_head='b', revisions=['b'],
570 commits=commits, target_head='a', source_head='b', revisions=['b'],
566 name_suffix='per-file-review')
571 name_suffix='per-file-review')
567
572
568 # modify PR adding new file file_c
573 # modify PR adding new file file_c
569 pr_util.add_one_commit(head='c')
574 pr_util.add_one_commit(head='c')
570
575
571 assert_pr_file_changes(
576 assert_pr_file_changes(
572 pull_request,
577 pull_request,
573 added=['file_c'],
578 added=['file_c'],
574 modified=[],
579 modified=[],
575 removed=[])
580 removed=[])
576
581
577 def test_modify_and_undo_modification_diff(self, pr_util):
582 def test_modify_and_undo_modification_diff(self, pr_util):
578 commits = [
583 commits = [
579 {'message': 'a'},
584 {'message': 'a'},
580 {'message': 'b',
585 {'message': 'b',
581 'added': [FileNode('file_b', 'test_content b\n')]},
586 'added': [FileNode('file_b', 'test_content b\n')]},
582 {'message': 'c',
587 {'message': 'c',
583 'changed': [FileNode('file_b', 'test_content b modified\n')]},
588 'changed': [FileNode('file_b', 'test_content b modified\n')]},
584 {'message': 'd',
589 {'message': 'd',
585 'changed': [FileNode('file_b', 'test_content b\n')]},
590 'changed': [FileNode('file_b', 'test_content b\n')]},
586 ]
591 ]
587 # open a PR from a to b, adding file_b
592 # open a PR from a to b, adding file_b
588 pull_request = pr_util.create_pull_request(
593 pull_request = pr_util.create_pull_request(
589 commits=commits, target_head='a', source_head='b', revisions=['b'],
594 commits=commits, target_head='a', source_head='b', revisions=['b'],
590 name_suffix='per-file-review')
595 name_suffix='per-file-review')
591
596
592 # modify PR modifying file file_b
597 # modify PR modifying file file_b
593 pr_util.add_one_commit(head='c')
598 pr_util.add_one_commit(head='c')
594
599
595 assert_pr_file_changes(
600 assert_pr_file_changes(
596 pull_request,
601 pull_request,
597 added=[],
602 added=[],
598 modified=['file_b'],
603 modified=['file_b'],
599 removed=[])
604 removed=[])
600
605
601 # move the head again to d, which rollbacks change,
606 # move the head again to d, which rollbacks change,
602 # meaning we should indicate no changes
607 # meaning we should indicate no changes
603 pr_util.add_one_commit(head='d')
608 pr_util.add_one_commit(head='d')
604
609
605 assert_pr_file_changes(
610 assert_pr_file_changes(
606 pull_request,
611 pull_request,
607 added=[],
612 added=[],
608 modified=[],
613 modified=[],
609 removed=[])
614 removed=[])
610
615
611 def test_updated_all_files_in_pr(self, pr_util):
616 def test_updated_all_files_in_pr(self, pr_util):
612 commits = [
617 commits = [
613 {'message': 'a'},
618 {'message': 'a'},
614 {'message': 'b', 'added': [
619 {'message': 'b', 'added': [
615 FileNode('file_a', 'test_content a\n'),
620 FileNode('file_a', 'test_content a\n'),
616 FileNode('file_b', 'test_content b\n'),
621 FileNode('file_b', 'test_content b\n'),
617 FileNode('file_c', 'test_content c\n')]},
622 FileNode('file_c', 'test_content c\n')]},
618 {'message': 'c', 'changed': [
623 {'message': 'c', 'changed': [
619 FileNode('file_a', 'test_content a changed\n'),
624 FileNode('file_a', 'test_content a changed\n'),
620 FileNode('file_b', 'test_content b changed\n'),
625 FileNode('file_b', 'test_content b changed\n'),
621 FileNode('file_c', 'test_content c changed\n')]},
626 FileNode('file_c', 'test_content c changed\n')]},
622 ]
627 ]
623 # open a PR from a to b, changing 3 files
628 # open a PR from a to b, changing 3 files
624 pull_request = pr_util.create_pull_request(
629 pull_request = pr_util.create_pull_request(
625 commits=commits, target_head='a', source_head='b', revisions=['b'],
630 commits=commits, target_head='a', source_head='b', revisions=['b'],
626 name_suffix='per-file-review')
631 name_suffix='per-file-review')
627
632
628 pr_util.add_one_commit(head='c')
633 pr_util.add_one_commit(head='c')
629
634
630 assert_pr_file_changes(
635 assert_pr_file_changes(
631 pull_request,
636 pull_request,
632 added=[],
637 added=[],
633 modified=['file_a', 'file_b', 'file_c'],
638 modified=['file_a', 'file_b', 'file_c'],
634 removed=[])
639 removed=[])
635
640
636 def test_updated_and_removed_all_files_in_pr(self, pr_util):
641 def test_updated_and_removed_all_files_in_pr(self, pr_util):
637 commits = [
642 commits = [
638 {'message': 'a'},
643 {'message': 'a'},
639 {'message': 'b', 'added': [
644 {'message': 'b', 'added': [
640 FileNode('file_a', 'test_content a\n'),
645 FileNode('file_a', 'test_content a\n'),
641 FileNode('file_b', 'test_content b\n'),
646 FileNode('file_b', 'test_content b\n'),
642 FileNode('file_c', 'test_content c\n')]},
647 FileNode('file_c', 'test_content c\n')]},
643 {'message': 'c', 'removed': [
648 {'message': 'c', 'removed': [
644 FileNode('file_a', 'test_content a changed\n'),
649 FileNode('file_a', 'test_content a changed\n'),
645 FileNode('file_b', 'test_content b changed\n'),
650 FileNode('file_b', 'test_content b changed\n'),
646 FileNode('file_c', 'test_content c changed\n')]},
651 FileNode('file_c', 'test_content c changed\n')]},
647 ]
652 ]
648 # open a PR from a to b, removing 3 files
653 # open a PR from a to b, removing 3 files
649 pull_request = pr_util.create_pull_request(
654 pull_request = pr_util.create_pull_request(
650 commits=commits, target_head='a', source_head='b', revisions=['b'],
655 commits=commits, target_head='a', source_head='b', revisions=['b'],
651 name_suffix='per-file-review')
656 name_suffix='per-file-review')
652
657
653 pr_util.add_one_commit(head='c')
658 pr_util.add_one_commit(head='c')
654
659
655 assert_pr_file_changes(
660 assert_pr_file_changes(
656 pull_request,
661 pull_request,
657 added=[],
662 added=[],
658 modified=[],
663 modified=[],
659 removed=['file_a', 'file_b', 'file_c'])
664 removed=['file_a', 'file_b', 'file_c'])
660
665
661
666
662 def test_update_writes_snapshot_into_pull_request_version(pr_util):
667 def test_update_writes_snapshot_into_pull_request_version(pr_util):
663 model = PullRequestModel()
668 model = PullRequestModel()
664 pull_request = pr_util.create_pull_request()
669 pull_request = pr_util.create_pull_request()
665 pr_util.update_source_repository()
670 pr_util.update_source_repository()
666
671
667 model.update_commits(pull_request)
672 model.update_commits(pull_request)
668
673
669 # Expect that it has a version entry now
674 # Expect that it has a version entry now
670 assert len(model.get_versions(pull_request)) == 1
675 assert len(model.get_versions(pull_request)) == 1
671
676
672
677
673 def test_update_skips_new_version_if_unchanged(pr_util):
678 def test_update_skips_new_version_if_unchanged(pr_util):
674 pull_request = pr_util.create_pull_request()
679 pull_request = pr_util.create_pull_request()
675 model = PullRequestModel()
680 model = PullRequestModel()
676 model.update_commits(pull_request)
681 model.update_commits(pull_request)
677
682
678 # Expect that it still has no versions
683 # Expect that it still has no versions
679 assert len(model.get_versions(pull_request)) == 0
684 assert len(model.get_versions(pull_request)) == 0
680
685
681
686
682 def test_update_assigns_comments_to_the_new_version(pr_util):
687 def test_update_assigns_comments_to_the_new_version(pr_util):
683 model = PullRequestModel()
688 model = PullRequestModel()
684 pull_request = pr_util.create_pull_request()
689 pull_request = pr_util.create_pull_request()
685 comment = pr_util.create_comment()
690 comment = pr_util.create_comment()
686 pr_util.update_source_repository()
691 pr_util.update_source_repository()
687
692
688 model.update_commits(pull_request)
693 model.update_commits(pull_request)
689
694
690 # Expect that the comment is linked to the pr version now
695 # Expect that the comment is linked to the pr version now
691 assert comment.pull_request_version == model.get_versions(pull_request)[0]
696 assert comment.pull_request_version == model.get_versions(pull_request)[0]
692
697
693
698
694 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util):
699 def test_update_adds_a_comment_to_the_pull_request_about_the_change(pr_util):
695 model = PullRequestModel()
700 model = PullRequestModel()
696 pull_request = pr_util.create_pull_request()
701 pull_request = pr_util.create_pull_request()
697 pr_util.update_source_repository()
702 pr_util.update_source_repository()
698 pr_util.update_source_repository()
703 pr_util.update_source_repository()
699
704
700 model.update_commits(pull_request)
705 model.update_commits(pull_request)
701
706
702 # Expect to find a new comment about the change
707 # Expect to find a new comment about the change
703 expected_message = textwrap.dedent(
708 expected_message = textwrap.dedent(
704 """\
709 """\
705 Auto status change to |under_review|
710 Auto status change to |under_review|
706
711
707 .. role:: added
712 .. role:: added
708 .. role:: removed
713 .. role:: removed
709 .. parsed-literal::
714 .. parsed-literal::
710
715
711 Changed commits:
716 Changed commits:
712 * :added:`1 added`
717 * :added:`1 added`
713 * :removed:`0 removed`
718 * :removed:`0 removed`
714
719
715 Changed files:
720 Changed files:
716 * `A file_2 <#a_c--92ed3b5f07b4>`_
721 * `A file_2 <#a_c--92ed3b5f07b4>`_
717
722
718 .. |under_review| replace:: *"Under Review"*"""
723 .. |under_review| replace:: *"Under Review"*"""
719 )
724 )
720 pull_request_comments = sorted(
725 pull_request_comments = sorted(
721 pull_request.comments, key=lambda c: c.modified_at)
726 pull_request.comments, key=lambda c: c.modified_at)
722 update_comment = pull_request_comments[-1]
727 update_comment = pull_request_comments[-1]
723 assert update_comment.text == expected_message
728 assert update_comment.text == expected_message
724
729
725
730
726 def test_create_version_from_snapshot_updates_attributes(pr_util):
731 def test_create_version_from_snapshot_updates_attributes(pr_util):
727 pull_request = pr_util.create_pull_request()
732 pull_request = pr_util.create_pull_request()
728
733
729 # Avoiding default values
734 # Avoiding default values
730 pull_request.status = PullRequest.STATUS_CLOSED
735 pull_request.status = PullRequest.STATUS_CLOSED
731 pull_request._last_merge_source_rev = "0" * 40
736 pull_request._last_merge_source_rev = "0" * 40
732 pull_request._last_merge_target_rev = "1" * 40
737 pull_request._last_merge_target_rev = "1" * 40
733 pull_request._last_merge_status = 1
738 pull_request._last_merge_status = 1
734 pull_request.merge_rev = "2" * 40
739 pull_request.merge_rev = "2" * 40
735
740
736 # Remember automatic values
741 # Remember automatic values
737 created_on = pull_request.created_on
742 created_on = pull_request.created_on
738 updated_on = pull_request.updated_on
743 updated_on = pull_request.updated_on
739
744
740 # Create a new version of the pull request
745 # Create a new version of the pull request
741 version = PullRequestModel()._create_version_from_snapshot(pull_request)
746 version = PullRequestModel()._create_version_from_snapshot(pull_request)
742
747
743 # Check attributes
748 # Check attributes
744 assert version.title == pr_util.create_parameters['title']
749 assert version.title == pr_util.create_parameters['title']
745 assert version.description == pr_util.create_parameters['description']
750 assert version.description == pr_util.create_parameters['description']
746 assert version.status == PullRequest.STATUS_CLOSED
751 assert version.status == PullRequest.STATUS_CLOSED
747 assert version.created_on == created_on
752 assert version.created_on == created_on
748 assert version.updated_on == updated_on
753 assert version.updated_on == updated_on
749 assert version.user_id == pull_request.user_id
754 assert version.user_id == pull_request.user_id
750 assert version.revisions == pr_util.create_parameters['revisions']
755 assert version.revisions == pr_util.create_parameters['revisions']
751 assert version.source_repo == pr_util.source_repository
756 assert version.source_repo == pr_util.source_repository
752 assert version.source_ref == pr_util.create_parameters['source_ref']
757 assert version.source_ref == pr_util.create_parameters['source_ref']
753 assert version.target_repo == pr_util.target_repository
758 assert version.target_repo == pr_util.target_repository
754 assert version.target_ref == pr_util.create_parameters['target_ref']
759 assert version.target_ref == pr_util.create_parameters['target_ref']
755 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
760 assert version._last_merge_source_rev == pull_request._last_merge_source_rev
756 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
761 assert version._last_merge_target_rev == pull_request._last_merge_target_rev
757 assert version._last_merge_status == pull_request._last_merge_status
762 assert version._last_merge_status == pull_request._last_merge_status
758 assert version.merge_rev == pull_request.merge_rev
763 assert version.merge_rev == pull_request.merge_rev
759 assert version.pull_request == pull_request
764 assert version.pull_request == pull_request
760
765
761
766
762 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util):
767 def test_link_comments_to_version_only_updates_unlinked_comments(pr_util):
763 version1 = pr_util.create_version_of_pull_request()
768 version1 = pr_util.create_version_of_pull_request()
764 comment_linked = pr_util.create_comment(linked_to=version1)
769 comment_linked = pr_util.create_comment(linked_to=version1)
765 comment_unlinked = pr_util.create_comment()
770 comment_unlinked = pr_util.create_comment()
766 version2 = pr_util.create_version_of_pull_request()
771 version2 = pr_util.create_version_of_pull_request()
767
772
768 PullRequestModel()._link_comments_to_version(version2)
773 PullRequestModel()._link_comments_to_version(version2)
769
774
770 # Expect that only the new comment is linked to version2
775 # Expect that only the new comment is linked to version2
771 assert (
776 assert (
772 comment_unlinked.pull_request_version_id ==
777 comment_unlinked.pull_request_version_id ==
773 version2.pull_request_version_id)
778 version2.pull_request_version_id)
774 assert (
779 assert (
775 comment_linked.pull_request_version_id ==
780 comment_linked.pull_request_version_id ==
776 version1.pull_request_version_id)
781 version1.pull_request_version_id)
777 assert (
782 assert (
778 comment_unlinked.pull_request_version_id !=
783 comment_unlinked.pull_request_version_id !=
779 comment_linked.pull_request_version_id)
784 comment_linked.pull_request_version_id)
780
785
781
786
782 def test_calculate_commits():
787 def test_calculate_commits():
783 change = PullRequestModel()._calculate_commit_id_changes(
788 change = PullRequestModel()._calculate_commit_id_changes(
784 set([1, 2, 3]), set([1, 3, 4, 5]))
789 set([1, 2, 3]), set([1, 3, 4, 5]))
785 assert (set([4, 5]), set([1, 3]), set([2])) == (
790 assert (set([4, 5]), set([1, 3]), set([2])) == (
786 change.added, change.common, change.removed)
791 change.added, change.common, change.removed)
787
792
788
793
789 def assert_inline_comments(pull_request, visible=None, outdated=None):
794 def assert_inline_comments(pull_request, visible=None, outdated=None):
790 if visible is not None:
795 if visible is not None:
791 inline_comments = ChangesetCommentsModel().get_inline_comments(
796 inline_comments = ChangesetCommentsModel().get_inline_comments(
792 pull_request.target_repo.repo_id, pull_request=pull_request)
797 pull_request.target_repo.repo_id, pull_request=pull_request)
793 assert len(inline_comments) == visible
798 assert len(inline_comments) == visible
794 if outdated is not None:
799 if outdated is not None:
795 outdated_comments = ChangesetCommentsModel().get_outdated_comments(
800 outdated_comments = ChangesetCommentsModel().get_outdated_comments(
796 pull_request.target_repo.repo_id, pull_request)
801 pull_request.target_repo.repo_id, pull_request)
797 assert len(outdated_comments) == outdated
802 assert len(outdated_comments) == outdated
798
803
799
804
800 def assert_pr_file_changes(
805 def assert_pr_file_changes(
801 pull_request, added=None, modified=None, removed=None):
806 pull_request, added=None, modified=None, removed=None):
802 pr_versions = PullRequestModel().get_versions(pull_request)
807 pr_versions = PullRequestModel().get_versions(pull_request)
803 # always use first version, ie original PR to calculate changes
808 # always use first version, ie original PR to calculate changes
804 pull_request_version = pr_versions[0]
809 pull_request_version = pr_versions[0]
805 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
810 old_diff_data, new_diff_data = PullRequestModel()._generate_update_diffs(
806 pull_request, pull_request_version)
811 pull_request, pull_request_version)
807 file_changes = PullRequestModel()._calculate_file_changes(
812 file_changes = PullRequestModel()._calculate_file_changes(
808 old_diff_data, new_diff_data)
813 old_diff_data, new_diff_data)
809
814
810 assert added == file_changes.added, \
815 assert added == file_changes.added, \
811 'expected added:%s vs value:%s' % (added, file_changes.added)
816 'expected added:%s vs value:%s' % (added, file_changes.added)
812 assert modified == file_changes.modified, \
817 assert modified == file_changes.modified, \
813 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
818 'expected modified:%s vs value:%s' % (modified, file_changes.modified)
814 assert removed == file_changes.removed, \
819 assert removed == file_changes.removed, \
815 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
820 'expected removed:%s vs value:%s' % (removed, file_changes.removed)
816
821
817
822
818 def outdated_comments_patcher(use_outdated=True):
823 def outdated_comments_patcher(use_outdated=True):
819 return mock.patch.object(
824 return mock.patch.object(
820 ChangesetCommentsModel, 'use_outdated_comments',
825 ChangesetCommentsModel, 'use_outdated_comments',
821 return_value=use_outdated)
826 return_value=use_outdated)
General Comments 0
You need to be logged in to leave comments. Login now