##// END OF EJS Templates
models: major update for python3,...
super-admin -
r5070:175fe6cb default
parent child Browse files
Show More
@@ -1,402 +1,403 b''
1
1
2 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20
20
21 import itertools
21 import itertools
22 import logging
22 import logging
23 import collections
23 import collections
24
24
25 from rhodecode.model import BaseModel
25 from rhodecode.model import BaseModel
26 from rhodecode.model.db import (
26 from rhodecode.model.db import (
27 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
27 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
28 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
28 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
29 from rhodecode.lib.markup_renderer import (
29 from rhodecode.lib.markup_renderer import (
30 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
30 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 class ChangesetStatusModel(BaseModel):
35 class ChangesetStatusModel(BaseModel):
36
36
37 cls = ChangesetStatus
37 cls = ChangesetStatus
38
38
39 def __get_changeset_status(self, changeset_status):
39 def __get_changeset_status(self, changeset_status):
40 return self._get_instance(ChangesetStatus, changeset_status)
40 return self._get_instance(ChangesetStatus, changeset_status)
41
41
42 def __get_pull_request(self, pull_request):
42 def __get_pull_request(self, pull_request):
43 return self._get_instance(PullRequest, pull_request)
43 return self._get_instance(PullRequest, pull_request)
44
44
45 def _get_status_query(self, repo, revision, pull_request,
45 def _get_status_query(self, repo, revision, pull_request,
46 with_revisions=False):
46 with_revisions=False):
47 repo = self._get_repo(repo)
47 repo = self._get_repo(repo)
48
48
49 q = ChangesetStatus.query()\
49 q = ChangesetStatus.query()\
50 .filter(ChangesetStatus.repo == repo)
50 .filter(ChangesetStatus.repo == repo)
51 if not with_revisions:
51 if not with_revisions:
52 q = q.filter(ChangesetStatus.version == 0)
52 q = q.filter(ChangesetStatus.version == 0)
53
53
54 if revision:
54 if revision:
55 q = q.filter(ChangesetStatus.revision == revision)
55 q = q.filter(ChangesetStatus.revision == revision)
56 elif pull_request:
56 elif pull_request:
57 pull_request = self.__get_pull_request(pull_request)
57 pull_request = self.__get_pull_request(pull_request)
58 # TODO: johbo: Think about the impact of this join, there must
58 # TODO: johbo: Think about the impact of this join, there must
59 # be a reason why ChangesetStatus and ChanagesetComment is linked
59 # be a reason why ChangesetStatus and ChanagesetComment is linked
60 # to the pull request. Might be that we want to do the same for
60 # to the pull request. Might be that we want to do the same for
61 # the pull_request_version_id.
61 # the pull_request_version_id.
62 q = q.join(ChangesetComment).filter(
62 q = q.join(ChangesetComment).filter(
63 ChangesetStatus.pull_request == pull_request,
63 ChangesetStatus.pull_request == pull_request,
64 ChangesetComment.pull_request_version_id == None)
64 ChangesetComment.pull_request_version_id == None)
65 else:
65 else:
66 raise Exception('Please specify revision or pull_request')
66 raise Exception('Please specify revision or pull_request')
67 q = q.order_by(ChangesetStatus.version.asc())
67 q = q.order_by(ChangesetStatus.version.asc())
68 return q
68 return q
69
69
70 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
70 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
71 trim_votes=True):
71 trim_votes=True):
72 """
72 """
73 Calculate status based on given group members, and voting rule
73 Calculate status based on given group members, and voting rule
74
74
75
75
76 group1 - 4 members, 3 required for approval
76 group1 - 4 members, 3 required for approval
77 user1 - approved
77 user1 - approved
78 user2 - reject
78 user2 - reject
79 user3 - approved
79 user3 - approved
80 user4 - rejected
80 user4 - rejected
81
81
82 final_state: rejected, reasons not at least 3 votes
82 final_state: rejected, reasons not at least 3 votes
83
83
84
84
85 group1 - 4 members, 2 required for approval
85 group1 - 4 members, 2 required for approval
86 user1 - approved
86 user1 - approved
87 user2 - reject
87 user2 - reject
88 user3 - approved
88 user3 - approved
89 user4 - rejected
89 user4 - rejected
90
90
91 final_state: approved, reasons got at least 2 approvals
91 final_state: approved, reasons got at least 2 approvals
92
92
93 group1 - 4 members, ALL required for approval
93 group1 - 4 members, ALL required for approval
94 user1 - approved
94 user1 - approved
95 user2 - reject
95 user2 - reject
96 user3 - approved
96 user3 - approved
97 user4 - rejected
97 user4 - rejected
98
98
99 final_state: rejected, reasons not all approvals
99 final_state: rejected, reasons not all approvals
100
100
101
101
102 group1 - 4 members, ALL required for approval
102 group1 - 4 members, ALL required for approval
103 user1 - approved
103 user1 - approved
104 user2 - approved
104 user2 - approved
105 user3 - approved
105 user3 - approved
106 user4 - approved
106 user4 - approved
107
107
108 final_state: approved, reason all approvals received
108 final_state: approved, reason all approvals received
109
109
110 group1 - 4 members, 5 required for approval
110 group1 - 4 members, 5 required for approval
111 (approval should be shorted to number of actual members)
111 (approval should be shorted to number of actual members)
112
112
113 user1 - approved
113 user1 - approved
114 user2 - approved
114 user2 - approved
115 user3 - approved
115 user3 - approved
116 user4 - approved
116 user4 - approved
117
117
118 final_state: approved, reason all approvals received
118 final_state: approved, reason all approvals received
119
119
120 """
120 """
121 group_vote_data = {}
121 group_vote_data = {}
122 got_rule = False
122 got_rule = False
123 members = collections.OrderedDict()
123 members = collections.OrderedDict()
124 for review_obj, user, reasons, mandatory, statuses \
124 for review_obj, user, reasons, mandatory, statuses \
125 in group_statuses_by_reviewers:
125 in group_statuses_by_reviewers:
126
126
127 if not got_rule:
127 if not got_rule:
128 group_vote_data = review_obj.rule_user_group_data()
128 group_vote_data = review_obj.rule_user_group_data()
129 got_rule = bool(group_vote_data)
129 got_rule = bool(group_vote_data)
130
130
131 members[user.user_id] = statuses
131 members[user.user_id] = statuses
132
132
133 if not group_vote_data:
133 if not group_vote_data:
134 return []
134 return []
135
135
136 required_votes = group_vote_data['vote_rule']
136 required_votes = group_vote_data['vote_rule']
137 if required_votes == -1:
137 if required_votes == -1:
138 # -1 means all required, so we replace it with how many people
138 # -1 means all required, so we replace it with how many people
139 # are in the members
139 # are in the members
140 required_votes = len(members)
140 required_votes = len(members)
141
141
142 if trim_votes and required_votes > len(members):
142 if trim_votes and required_votes > len(members):
143 # we require more votes than we have members in the group
143 # we require more votes than we have members in the group
144 # in this case we trim the required votes to the number of members
144 # in this case we trim the required votes to the number of members
145 required_votes = len(members)
145 required_votes = len(members)
146
146
147 approvals = sum([
147 approvals = sum([
148 1 for statuses in members.values()
148 1 for statuses in members.values()
149 if statuses and
149 if statuses and
150 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
150 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
151
151
152 calculated_votes = []
152 calculated_votes = []
153 # we have all votes from users, now check if we have enough votes
153 # we have all votes from users, now check if we have enough votes
154 # to fill other
154 # to fill other
155 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
155 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
156 if approvals >= required_votes:
156 if approvals >= required_votes:
157 fill_in = ChangesetStatus.STATUS_APPROVED
157 fill_in = ChangesetStatus.STATUS_APPROVED
158
158
159 for member, statuses in members.items():
159 for member, statuses in members.items():
160 if statuses:
160 if statuses:
161 ver, latest = statuses[0]
161 ver, latest = statuses[0]
162 if fill_in == ChangesetStatus.STATUS_APPROVED:
162 if fill_in == ChangesetStatus.STATUS_APPROVED:
163 calculated_votes.append(fill_in)
163 calculated_votes.append(fill_in)
164 else:
164 else:
165 calculated_votes.append(latest.status)
165 calculated_votes.append(latest.status)
166 else:
166 else:
167 calculated_votes.append(fill_in)
167 calculated_votes.append(fill_in)
168
168
169 return calculated_votes
169 return calculated_votes
170
170
171 def calculate_status(self, statuses_by_reviewers):
171 def calculate_status(self, statuses_by_reviewers):
172 """
172 """
173 Given the approval statuses from reviewers, calculates final approval
173 Given the approval statuses from reviewers, calculates final approval
174 status. There can only be 3 results, all approved, all rejected. If
174 status. There can only be 3 results, all approved, all rejected. If
175 there is no consensus the PR is under review.
175 there is no consensus the PR is under review.
176
176
177 :param statuses_by_reviewers:
177 :param statuses_by_reviewers:
178 """
178 """
179
179
180 def group_rule(element):
180 def group_rule(element):
181 review_obj = element[0]
181 _review_obj = element[0]
182 rule_data = review_obj.rule_user_group_data()
182 rule_data = _review_obj.rule_user_group_data()
183 if rule_data and rule_data['id']:
183 if rule_data and rule_data['id']:
184 return rule_data['id']
184 return rule_data['id']
185 # don't return None, as we cant compare this
186 return 0
185
187
186 voting_groups = itertools.groupby(
188 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
187 sorted(statuses_by_reviewers, key=group_rule), group_rule)
188
189
189 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
190 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
190
191
191 reviewers_number = len(statuses_by_reviewers)
192 reviewers_number = len(statuses_by_reviewers)
192 votes = collections.defaultdict(int)
193 votes = collections.defaultdict(int)
193 for group, group_statuses_by_reviewers in voting_by_groups:
194 for group, group_statuses_by_reviewers in voting_by_groups:
194 if group:
195 if group:
195 # calculate how the "group" voted
196 # calculate how the "group" voted
196 for vote_status in self.calculate_group_vote(
197 for vote_status in self.calculate_group_vote(
197 group, group_statuses_by_reviewers):
198 group, group_statuses_by_reviewers):
198 votes[vote_status] += 1
199 votes[vote_status] += 1
199 else:
200 else:
200
201
201 for review_obj, user, reasons, mandatory, statuses \
202 for review_obj, user, reasons, mandatory, statuses \
202 in group_statuses_by_reviewers:
203 in group_statuses_by_reviewers:
203 # individual vote
204 # individual vote
204 if statuses:
205 if statuses:
205 ver, latest = statuses[0]
206 ver, latest = statuses[0]
206 votes[latest.status] += 1
207 votes[latest.status] += 1
207
208
208 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
209 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
209 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
210 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
210
211
211 # TODO(marcink): with group voting, how does rejected work,
212 # TODO(marcink): with group voting, how does rejected work,
212 # do we ever get rejected state ?
213 # do we ever get rejected state ?
213
214
214 if approved_votes_count and (approved_votes_count == reviewers_number):
215 if approved_votes_count and (approved_votes_count == reviewers_number):
215 return ChangesetStatus.STATUS_APPROVED
216 return ChangesetStatus.STATUS_APPROVED
216
217
217 if rejected_votes_count and (rejected_votes_count == reviewers_number):
218 if rejected_votes_count and (rejected_votes_count == reviewers_number):
218 return ChangesetStatus.STATUS_REJECTED
219 return ChangesetStatus.STATUS_REJECTED
219
220
220 return ChangesetStatus.STATUS_UNDER_REVIEW
221 return ChangesetStatus.STATUS_UNDER_REVIEW
221
222
222 def get_statuses(self, repo, revision=None, pull_request=None,
223 def get_statuses(self, repo, revision=None, pull_request=None,
223 with_revisions=False):
224 with_revisions=False):
224 q = self._get_status_query(repo, revision, pull_request,
225 q = self._get_status_query(repo, revision, pull_request,
225 with_revisions)
226 with_revisions)
226 return q.all()
227 return q.all()
227
228
228 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
229 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
229 """
230 """
230 Returns latest status of changeset for given revision or for given
231 Returns latest status of changeset for given revision or for given
231 pull request. Statuses are versioned inside a table itself and
232 pull request. Statuses are versioned inside a table itself and
232 version == 0 is always the current one
233 version == 0 is always the current one
233
234
234 :param repo:
235 :param repo:
235 :param revision: 40char hash or None
236 :param revision: 40char hash or None
236 :param pull_request: pull_request reference
237 :param pull_request: pull_request reference
237 :param as_str: return status as string not object
238 :param as_str: return status as string not object
238 """
239 """
239 q = self._get_status_query(repo, revision, pull_request)
240 q = self._get_status_query(repo, revision, pull_request)
240
241
241 # need to use first here since there can be multiple statuses
242 # need to use first here since there can be multiple statuses
242 # returned from pull_request
243 # returned from pull_request
243 status = q.first()
244 status = q.first()
244 if as_str:
245 if as_str:
245 status = status.status if status else status
246 status = status.status if status else status
246 st = status or ChangesetStatus.DEFAULT
247 st = status or ChangesetStatus.DEFAULT
247 return str(st)
248 return str(st)
248 return status
249 return status
249
250
250 def _render_auto_status_message(
251 def _render_auto_status_message(
251 self, status, commit_id=None, pull_request=None):
252 self, status, commit_id=None, pull_request=None):
252 """
253 """
253 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
254 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
254 so it's always looking the same disregarding on which default
255 so it's always looking the same disregarding on which default
255 renderer system is using.
256 renderer system is using.
256
257
257 :param status: status text to change into
258 :param status: status text to change into
258 :param commit_id: the commit_id we change the status for
259 :param commit_id: the commit_id we change the status for
259 :param pull_request: the pull request we change the status for
260 :param pull_request: the pull request we change the status for
260 """
261 """
261
262
262 new_status = ChangesetStatus.get_status_lbl(status)
263 new_status = ChangesetStatus.get_status_lbl(status)
263
264
264 params = {
265 params = {
265 'new_status_label': new_status,
266 'new_status_label': new_status,
266 'pull_request': pull_request,
267 'pull_request': pull_request,
267 'commit_id': commit_id,
268 'commit_id': commit_id,
268 }
269 }
269 renderer = RstTemplateRenderer()
270 renderer = RstTemplateRenderer()
270 return renderer.render('auto_status_change.mako', **params)
271 return renderer.render('auto_status_change.mako', **params)
271
272
272 def set_status(self, repo, status, user, comment=None, revision=None,
273 def set_status(self, repo, status, user, comment=None, revision=None,
273 pull_request=None, dont_allow_on_closed_pull_request=False):
274 pull_request=None, dont_allow_on_closed_pull_request=False):
274 """
275 """
275 Creates new status for changeset or updates the old ones bumping their
276 Creates new status for changeset or updates the old ones bumping their
276 version, leaving the current status at
277 version, leaving the current status at
277
278
278 :param repo:
279 :param repo:
279 :param revision:
280 :param revision:
280 :param status:
281 :param status:
281 :param user:
282 :param user:
282 :param comment:
283 :param comment:
283 :param dont_allow_on_closed_pull_request: don't allow a status change
284 :param dont_allow_on_closed_pull_request: don't allow a status change
284 if last status was for pull request and it's closed. We shouldn't
285 if last status was for pull request and it's closed. We shouldn't
285 mess around this manually
286 mess around this manually
286 """
287 """
287 repo = self._get_repo(repo)
288 repo = self._get_repo(repo)
288
289
289 q = ChangesetStatus.query()
290 q = ChangesetStatus.query()
290
291
291 if revision:
292 if revision:
292 q = q.filter(ChangesetStatus.repo == repo)
293 q = q.filter(ChangesetStatus.repo == repo)
293 q = q.filter(ChangesetStatus.revision == revision)
294 q = q.filter(ChangesetStatus.revision == revision)
294 elif pull_request:
295 elif pull_request:
295 pull_request = self.__get_pull_request(pull_request)
296 pull_request = self.__get_pull_request(pull_request)
296 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
297 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
297 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
298 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
298 cur_statuses = q.all()
299 cur_statuses = q.all()
299
300
300 # if statuses exists and last is associated with a closed pull request
301 # if statuses exists and last is associated with a closed pull request
301 # we need to check if we can allow this status change
302 # we need to check if we can allow this status change
302 if (dont_allow_on_closed_pull_request and cur_statuses
303 if (dont_allow_on_closed_pull_request and cur_statuses
303 and getattr(cur_statuses[0].pull_request, 'status', '')
304 and getattr(cur_statuses[0].pull_request, 'status', '')
304 == PullRequest.STATUS_CLOSED):
305 == PullRequest.STATUS_CLOSED):
305 raise StatusChangeOnClosedPullRequestError(
306 raise StatusChangeOnClosedPullRequestError(
306 'Changing status on closed pull request is not allowed'
307 'Changing status on closed pull request is not allowed'
307 )
308 )
308
309
309 # update all current statuses with older version
310 # update all current statuses with older version
310 if cur_statuses:
311 if cur_statuses:
311 for st in cur_statuses:
312 for st in cur_statuses:
312 st.version += 1
313 st.version += 1
313 Session().add(st)
314 Session().add(st)
314 Session().flush()
315 Session().flush()
315
316
316 def _create_status(user, repo, status, comment, revision, pull_request):
317 def _create_status(user, repo, status, comment, revision, pull_request):
317 new_status = ChangesetStatus()
318 new_status = ChangesetStatus()
318 new_status.author = self._get_user(user)
319 new_status.author = self._get_user(user)
319 new_status.repo = self._get_repo(repo)
320 new_status.repo = self._get_repo(repo)
320 new_status.status = status
321 new_status.status = status
321 new_status.comment = comment
322 new_status.comment = comment
322 new_status.revision = revision
323 new_status.revision = revision
323 new_status.pull_request = pull_request
324 new_status.pull_request = pull_request
324 return new_status
325 return new_status
325
326
326 if not comment:
327 if not comment:
327 from rhodecode.model.comment import CommentsModel
328 from rhodecode.model.comment import CommentsModel
328 comment = CommentsModel().create(
329 comment = CommentsModel().create(
329 text=self._render_auto_status_message(
330 text=self._render_auto_status_message(
330 status, commit_id=revision, pull_request=pull_request),
331 status, commit_id=revision, pull_request=pull_request),
331 repo=repo,
332 repo=repo,
332 user=user,
333 user=user,
333 pull_request=pull_request,
334 pull_request=pull_request,
334 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
335 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
335 )
336 )
336
337
337 if revision:
338 if revision:
338 new_status = _create_status(
339 new_status = _create_status(
339 user=user, repo=repo, status=status, comment=comment,
340 user=user, repo=repo, status=status, comment=comment,
340 revision=revision, pull_request=pull_request)
341 revision=revision, pull_request=pull_request)
341 Session().add(new_status)
342 Session().add(new_status)
342 return new_status
343 return new_status
343 elif pull_request:
344 elif pull_request:
344 # pull request can have more than one revision associated to it
345 # pull request can have more than one revision associated to it
345 # we need to create new version for each one
346 # we need to create new version for each one
346 new_statuses = []
347 new_statuses = []
347 repo = pull_request.source_repo
348 repo = pull_request.source_repo
348 for rev in pull_request.revisions:
349 for rev in pull_request.revisions:
349 new_status = _create_status(
350 new_status = _create_status(
350 user=user, repo=repo, status=status, comment=comment,
351 user=user, repo=repo, status=status, comment=comment,
351 revision=rev, pull_request=pull_request)
352 revision=rev, pull_request=pull_request)
352 new_statuses.append(new_status)
353 new_statuses.append(new_status)
353 Session().add(new_status)
354 Session().add(new_status)
354 return new_statuses
355 return new_statuses
355
356
356 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
357 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
357
358
358 commit_statuses_map = collections.defaultdict(list)
359 commit_statuses_map = collections.defaultdict(list)
359 for st in commit_statuses:
360 for st in commit_statuses:
360 commit_statuses_map[st.author.username] += [st]
361 commit_statuses_map[st.author.username] += [st]
361
362
362 reviewers = []
363 reviewers = []
363
364
364 def version(commit_status):
365 def version(commit_status):
365 return commit_status.version
366 return commit_status.version
366
367
367 for obj in reviewers_data:
368 for obj in reviewers_data:
368 if not obj.user:
369 if not obj.user:
369 continue
370 continue
370 if user and obj.user.username != user.username:
371 if user and obj.user.username != user.username:
371 # single user filter
372 # single user filter
372 continue
373 continue
373
374
374 statuses = commit_statuses_map.get(obj.user.username, None)
375 statuses = commit_statuses_map.get(obj.user.username, None)
375 if statuses:
376 if statuses:
376 status_groups = itertools.groupby(
377 status_groups = itertools.groupby(
377 sorted(statuses, key=version), version)
378 sorted(statuses, key=version), version)
378 statuses = [(x, list(y)[0]) for x, y in status_groups]
379 statuses = [(x, list(y)[0]) for x, y in status_groups]
379
380
380 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
381 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
381
382
382 if user:
383 if user:
383 return reviewers[0] if reviewers else reviewers
384 return reviewers[0] if reviewers else reviewers
384 else:
385 else:
385 return reviewers
386 return reviewers
386
387
387 def reviewers_statuses(self, pull_request, user=None):
388 def reviewers_statuses(self, pull_request, user=None):
388 _commit_statuses = self.get_statuses(
389 _commit_statuses = self.get_statuses(
389 pull_request.source_repo,
390 pull_request.source_repo,
390 pull_request=pull_request,
391 pull_request=pull_request,
391 with_revisions=True)
392 with_revisions=True)
392 reviewers = pull_request.get_pull_request_reviewers(
393 reviewers = pull_request.get_pull_request_reviewers(
393 role=PullRequestReviewers.ROLE_REVIEWER)
394 role=PullRequestReviewers.ROLE_REVIEWER)
394 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
395 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
395
396
396 def calculated_review_status(self, pull_request):
397 def calculated_review_status(self, pull_request):
397 """
398 """
398 calculate pull request status based on reviewers, it should be a list
399 calculate pull request status based on reviewers, it should be a list
399 of two element lists.
400 of two element lists.
400 """
401 """
401 reviewers = self.reviewers_statuses(pull_request)
402 reviewers = self.reviewers_statuses(pull_request)
402 return self.calculate_status(reviewers)
403 return self.calculate_status(reviewers)
@@ -1,857 +1,854 b''
1
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 comments model for RhodeCode
22 comments model for RhodeCode
23 """
23 """
24 import datetime
24 import datetime
25
25
26 import logging
26 import logging
27 import traceback
27 import traceback
28 import collections
28 import collections
29
29
30 from pyramid.threadlocal import get_current_registry, get_current_request
30 from pyramid.threadlocal import get_current_registry, get_current_request
31 from sqlalchemy.sql.expression import null
31 from sqlalchemy.sql.expression import null
32 from sqlalchemy.sql.functions import coalesce
32 from sqlalchemy.sql.functions import coalesce
33
33
34 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
34 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
35 from rhodecode.lib import audit_logger
35 from rhodecode.lib import audit_logger
36 from rhodecode.lib.exceptions import CommentVersionMismatch
36 from rhodecode.lib.exceptions import CommentVersionMismatch
37 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
37 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
38 from rhodecode.model import BaseModel
38 from rhodecode.model import BaseModel
39 from rhodecode.model.db import (
39 from rhodecode.model.db import (
40 false, true,
40 false, true,
41 ChangesetComment,
41 ChangesetComment,
42 User,
42 User,
43 Notification,
43 Notification,
44 PullRequest,
44 PullRequest,
45 AttributeDict,
45 AttributeDict,
46 ChangesetCommentHistory,
46 ChangesetCommentHistory,
47 )
47 )
48 from rhodecode.model.notification import NotificationModel
48 from rhodecode.model.notification import NotificationModel
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
51 from rhodecode.model.notification import EmailNotificationModel
51 from rhodecode.model.notification import EmailNotificationModel
52 from rhodecode.model.validation_schema.schemas import comment_schema
52 from rhodecode.model.validation_schema.schemas import comment_schema
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class CommentsModel(BaseModel):
58 class CommentsModel(BaseModel):
59
59
60 cls = ChangesetComment
60 cls = ChangesetComment
61
61
62 DIFF_CONTEXT_BEFORE = 3
62 DIFF_CONTEXT_BEFORE = 3
63 DIFF_CONTEXT_AFTER = 3
63 DIFF_CONTEXT_AFTER = 3
64
64
65 def __get_commit_comment(self, changeset_comment):
65 def __get_commit_comment(self, changeset_comment):
66 return self._get_instance(ChangesetComment, changeset_comment)
66 return self._get_instance(ChangesetComment, changeset_comment)
67
67
68 def __get_pull_request(self, pull_request):
68 def __get_pull_request(self, pull_request):
69 return self._get_instance(PullRequest, pull_request)
69 return self._get_instance(PullRequest, pull_request)
70
70
71 def _extract_mentions(self, s):
71 def _extract_mentions(self, s):
72 user_objects = []
72 user_objects = []
73 for username in extract_mentioned_users(s):
73 for username in extract_mentioned_users(s):
74 user_obj = User.get_by_username(username, case_insensitive=True)
74 user_obj = User.get_by_username(username, case_insensitive=True)
75 if user_obj:
75 if user_obj:
76 user_objects.append(user_obj)
76 user_objects.append(user_obj)
77 return user_objects
77 return user_objects
78
78
79 def _get_renderer(self, global_renderer='rst', request=None):
79 def _get_renderer(self, global_renderer='rst', request=None):
80 request = request or get_current_request()
80 request = request or get_current_request()
81
81
82 try:
82 try:
83 global_renderer = request.call_context.visual.default_renderer
83 global_renderer = request.call_context.visual.default_renderer
84 except AttributeError:
84 except AttributeError:
85 log.debug("Renderer not set, falling back "
85 log.debug("Renderer not set, falling back "
86 "to default renderer '%s'", global_renderer)
86 "to default renderer '%s'", global_renderer)
87 except Exception:
87 except Exception:
88 log.error(traceback.format_exc())
88 log.error(traceback.format_exc())
89 return global_renderer
89 return global_renderer
90
90
91 def aggregate_comments(self, comments, versions, show_version, inline=False):
91 def aggregate_comments(self, comments, versions, show_version, inline=False):
92 # group by versions, and count until, and display objects
92 # group by versions, and count until, and display objects
93
93
94 comment_groups = collections.defaultdict(list)
94 comment_groups = collections.defaultdict(list)
95 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
95 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
96
96
97 def yield_comments(pos):
97 def yield_comments(pos):
98 for co in comment_groups[pos]:
98 for co in comment_groups[pos]:
99 yield co
99 yield co
100
100
101 comment_versions = collections.defaultdict(
101 comment_versions = collections.defaultdict(
102 lambda: collections.defaultdict(list))
102 lambda: collections.defaultdict(list))
103 prev_prvid = -1
103 prev_prvid = -1
104 # fake last entry with None, to aggregate on "latest" version which
104 # fake last entry with None, to aggregate on "latest" version which
105 # doesn't have an pull_request_version_id
105 # doesn't have an pull_request_version_id
106 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
106 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
107 prvid = ver.pull_request_version_id
107 prvid = ver.pull_request_version_id
108 if prev_prvid == -1:
108 if prev_prvid == -1:
109 prev_prvid = prvid
109 prev_prvid = prvid
110
110
111 for co in yield_comments(prvid):
111 for co in yield_comments(prvid):
112 comment_versions[prvid]['at'].append(co)
112 comment_versions[prvid]['at'].append(co)
113
113
114 # save until
114 # save until
115 current = comment_versions[prvid]['at']
115 current = comment_versions[prvid]['at']
116 prev_until = comment_versions[prev_prvid]['until']
116 prev_until = comment_versions[prev_prvid]['until']
117 cur_until = prev_until + current
117 cur_until = prev_until + current
118 comment_versions[prvid]['until'].extend(cur_until)
118 comment_versions[prvid]['until'].extend(cur_until)
119
119
120 # save outdated
120 # save outdated
121 if inline:
121 if inline:
122 outdated = [x for x in cur_until
122 outdated = [x for x in cur_until
123 if x.outdated_at_version(show_version)]
123 if x.outdated_at_version(show_version)]
124 else:
124 else:
125 outdated = [x for x in cur_until
125 outdated = [x for x in cur_until
126 if x.older_than_version(show_version)]
126 if x.older_than_version(show_version)]
127 display = [x for x in cur_until if x not in outdated]
127 display = [x for x in cur_until if x not in outdated]
128
128
129 comment_versions[prvid]['outdated'] = outdated
129 comment_versions[prvid]['outdated'] = outdated
130 comment_versions[prvid]['display'] = display
130 comment_versions[prvid]['display'] = display
131
131
132 prev_prvid = prvid
132 prev_prvid = prvid
133
133
134 return comment_versions
134 return comment_versions
135
135
136 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
136 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
137 qry = Session().query(ChangesetComment) \
137 qry = Session().query(ChangesetComment) \
138 .filter(ChangesetComment.repo == repo)
138 .filter(ChangesetComment.repo == repo)
139
139
140 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
140 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
141 qry = qry.filter(ChangesetComment.comment_type == comment_type)
141 qry = qry.filter(ChangesetComment.comment_type == comment_type)
142
142
143 if user:
143 if user:
144 user = self._get_user(user)
144 user = self._get_user(user)
145 if user:
145 if user:
146 qry = qry.filter(ChangesetComment.user_id == user.user_id)
146 qry = qry.filter(ChangesetComment.user_id == user.user_id)
147
147
148 if commit_id:
148 if commit_id:
149 qry = qry.filter(ChangesetComment.revision == commit_id)
149 qry = qry.filter(ChangesetComment.revision == commit_id)
150
150
151 qry = qry.order_by(ChangesetComment.created_on)
151 qry = qry.order_by(ChangesetComment.created_on)
152 return qry.all()
152 return qry.all()
153
153
154 def get_repository_unresolved_todos(self, repo):
154 def get_repository_unresolved_todos(self, repo):
155 todos = Session().query(ChangesetComment) \
155 todos = Session().query(ChangesetComment) \
156 .filter(ChangesetComment.repo == repo) \
156 .filter(ChangesetComment.repo == repo) \
157 .filter(ChangesetComment.resolved_by == None) \
157 .filter(ChangesetComment.resolved_by == None) \
158 .filter(ChangesetComment.comment_type
158 .filter(ChangesetComment.comment_type
159 == ChangesetComment.COMMENT_TYPE_TODO)
159 == ChangesetComment.COMMENT_TYPE_TODO)
160 todos = todos.all()
160 todos = todos.all()
161
161
162 return todos
162 return todos
163
163
164 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
164 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
165
165
166 todos = Session().query(ChangesetComment) \
166 todos = Session().query(ChangesetComment) \
167 .filter(ChangesetComment.pull_request == pull_request) \
167 .filter(ChangesetComment.pull_request == pull_request) \
168 .filter(ChangesetComment.resolved_by == None) \
168 .filter(ChangesetComment.resolved_by == None) \
169 .filter(ChangesetComment.comment_type
169 .filter(ChangesetComment.comment_type
170 == ChangesetComment.COMMENT_TYPE_TODO)
170 == ChangesetComment.COMMENT_TYPE_TODO)
171
171
172 if not include_drafts:
172 if not include_drafts:
173 todos = todos.filter(ChangesetComment.draft == false())
173 todos = todos.filter(ChangesetComment.draft == false())
174
174
175 if not show_outdated:
175 if not show_outdated:
176 todos = todos.filter(
176 todos = todos.filter(
177 coalesce(ChangesetComment.display_state, '') !=
177 coalesce(ChangesetComment.display_state, '') !=
178 ChangesetComment.COMMENT_OUTDATED)
178 ChangesetComment.COMMENT_OUTDATED)
179
179
180 todos = todos.all()
180 todos = todos.all()
181
181
182 return todos
182 return todos
183
183
184 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
184 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
185
185
186 todos = Session().query(ChangesetComment) \
186 todos = Session().query(ChangesetComment) \
187 .filter(ChangesetComment.pull_request == pull_request) \
187 .filter(ChangesetComment.pull_request == pull_request) \
188 .filter(ChangesetComment.resolved_by != None) \
188 .filter(ChangesetComment.resolved_by != None) \
189 .filter(ChangesetComment.comment_type
189 .filter(ChangesetComment.comment_type
190 == ChangesetComment.COMMENT_TYPE_TODO)
190 == ChangesetComment.COMMENT_TYPE_TODO)
191
191
192 if not include_drafts:
192 if not include_drafts:
193 todos = todos.filter(ChangesetComment.draft == false())
193 todos = todos.filter(ChangesetComment.draft == false())
194
194
195 if not show_outdated:
195 if not show_outdated:
196 todos = todos.filter(
196 todos = todos.filter(
197 coalesce(ChangesetComment.display_state, '') !=
197 coalesce(ChangesetComment.display_state, '') !=
198 ChangesetComment.COMMENT_OUTDATED)
198 ChangesetComment.COMMENT_OUTDATED)
199
199
200 todos = todos.all()
200 todos = todos.all()
201
201
202 return todos
202 return todos
203
203
204 def get_pull_request_drafts(self, user_id, pull_request):
204 def get_pull_request_drafts(self, user_id, pull_request):
205 drafts = Session().query(ChangesetComment) \
205 drafts = Session().query(ChangesetComment) \
206 .filter(ChangesetComment.pull_request == pull_request) \
206 .filter(ChangesetComment.pull_request == pull_request) \
207 .filter(ChangesetComment.user_id == user_id) \
207 .filter(ChangesetComment.user_id == user_id) \
208 .filter(ChangesetComment.draft == true())
208 .filter(ChangesetComment.draft == true())
209 return drafts.all()
209 return drafts.all()
210
210
211 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
211 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
212
212
213 todos = Session().query(ChangesetComment) \
213 todos = Session().query(ChangesetComment) \
214 .filter(ChangesetComment.revision == commit_id) \
214 .filter(ChangesetComment.revision == commit_id) \
215 .filter(ChangesetComment.resolved_by == None) \
215 .filter(ChangesetComment.resolved_by == None) \
216 .filter(ChangesetComment.comment_type
216 .filter(ChangesetComment.comment_type
217 == ChangesetComment.COMMENT_TYPE_TODO)
217 == ChangesetComment.COMMENT_TYPE_TODO)
218
218
219 if not include_drafts:
219 if not include_drafts:
220 todos = todos.filter(ChangesetComment.draft == false())
220 todos = todos.filter(ChangesetComment.draft == false())
221
221
222 if not show_outdated:
222 if not show_outdated:
223 todos = todos.filter(
223 todos = todos.filter(
224 coalesce(ChangesetComment.display_state, '') !=
224 coalesce(ChangesetComment.display_state, '') !=
225 ChangesetComment.COMMENT_OUTDATED)
225 ChangesetComment.COMMENT_OUTDATED)
226
226
227 todos = todos.all()
227 todos = todos.all()
228
228
229 return todos
229 return todos
230
230
231 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
231 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
232
232
233 todos = Session().query(ChangesetComment) \
233 todos = Session().query(ChangesetComment) \
234 .filter(ChangesetComment.revision == commit_id) \
234 .filter(ChangesetComment.revision == commit_id) \
235 .filter(ChangesetComment.resolved_by != None) \
235 .filter(ChangesetComment.resolved_by != None) \
236 .filter(ChangesetComment.comment_type
236 .filter(ChangesetComment.comment_type
237 == ChangesetComment.COMMENT_TYPE_TODO)
237 == ChangesetComment.COMMENT_TYPE_TODO)
238
238
239 if not include_drafts:
239 if not include_drafts:
240 todos = todos.filter(ChangesetComment.draft == false())
240 todos = todos.filter(ChangesetComment.draft == false())
241
241
242 if not show_outdated:
242 if not show_outdated:
243 todos = todos.filter(
243 todos = todos.filter(
244 coalesce(ChangesetComment.display_state, '') !=
244 coalesce(ChangesetComment.display_state, '') !=
245 ChangesetComment.COMMENT_OUTDATED)
245 ChangesetComment.COMMENT_OUTDATED)
246
246
247 todos = todos.all()
247 todos = todos.all()
248
248
249 return todos
249 return todos
250
250
251 def get_commit_inline_comments(self, commit_id, include_drafts=True):
251 def get_commit_inline_comments(self, commit_id, include_drafts=True):
252 inline_comments = Session().query(ChangesetComment) \
252 inline_comments = Session().query(ChangesetComment) \
253 .filter(ChangesetComment.line_no != None) \
253 .filter(ChangesetComment.line_no != None) \
254 .filter(ChangesetComment.f_path != None) \
254 .filter(ChangesetComment.f_path != None) \
255 .filter(ChangesetComment.revision == commit_id)
255 .filter(ChangesetComment.revision == commit_id)
256
256
257 if not include_drafts:
257 if not include_drafts:
258 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
258 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
259
259
260 inline_comments = inline_comments.all()
260 inline_comments = inline_comments.all()
261 return inline_comments
261 return inline_comments
262
262
263 def _log_audit_action(self, action, action_data, auth_user, comment):
263 def _log_audit_action(self, action, action_data, auth_user, comment):
264 audit_logger.store(
264 audit_logger.store(
265 action=action,
265 action=action,
266 action_data=action_data,
266 action_data=action_data,
267 user=auth_user,
267 user=auth_user,
268 repo=comment.repo)
268 repo=comment.repo)
269
269
270 def create(self, text, repo, user, commit_id=None, pull_request=None,
270 def create(self, text, repo, user, commit_id=None, pull_request=None,
271 f_path=None, line_no=None, status_change=None,
271 f_path=None, line_no=None, status_change=None,
272 status_change_type=None, comment_type=None, is_draft=False,
272 status_change_type=None, comment_type=None, is_draft=False,
273 resolves_comment_id=None, closing_pr=False, send_email=True,
273 resolves_comment_id=None, closing_pr=False, send_email=True,
274 renderer=None, auth_user=None, extra_recipients=None):
274 renderer=None, auth_user=None, extra_recipients=None):
275 """
275 """
276 Creates new comment for commit or pull request.
276 Creates new comment for commit or pull request.
277 IF status_change is not none this comment is associated with a
277 IF status_change is not none this comment is associated with a
278 status change of commit or commit associated with pull request
278 status change of commit or commit associated with pull request
279
279
280 :param text:
280 :param text:
281 :param repo:
281 :param repo:
282 :param user:
282 :param user:
283 :param commit_id:
283 :param commit_id:
284 :param pull_request:
284 :param pull_request:
285 :param f_path:
285 :param f_path:
286 :param line_no:
286 :param line_no:
287 :param status_change: Label for status change
287 :param status_change: Label for status change
288 :param comment_type: Type of comment
288 :param comment_type: Type of comment
289 :param is_draft: is comment a draft only
289 :param is_draft: is comment a draft only
290 :param resolves_comment_id: id of comment which this one will resolve
290 :param resolves_comment_id: id of comment which this one will resolve
291 :param status_change_type: type of status change
291 :param status_change_type: type of status change
292 :param closing_pr:
292 :param closing_pr:
293 :param send_email:
293 :param send_email:
294 :param renderer: pick renderer for this comment
294 :param renderer: pick renderer for this comment
295 :param auth_user: current authenticated user calling this method
295 :param auth_user: current authenticated user calling this method
296 :param extra_recipients: list of extra users to be added to recipients
296 :param extra_recipients: list of extra users to be added to recipients
297 """
297 """
298
298
299 if not text:
300 log.warning('Missing text for comment, skipping...')
301 return
302 request = get_current_request()
299 request = get_current_request()
303 _ = request.translate
300 _ = request.translate
304
301
305 if not renderer:
302 if not renderer:
306 renderer = self._get_renderer(request=request)
303 renderer = self._get_renderer(request=request)
307
304
308 repo = self._get_repo(repo)
305 repo = self._get_repo(repo)
309 user = self._get_user(user)
306 user = self._get_user(user)
310 auth_user = auth_user or user
307 auth_user = auth_user or user
311
308
312 schema = comment_schema.CommentSchema()
309 schema = comment_schema.CommentSchema()
313 validated_kwargs = schema.deserialize(dict(
310 validated_kwargs = schema.deserialize(dict(
314 comment_body=text,
311 comment_body=text,
315 comment_type=comment_type,
312 comment_type=comment_type,
316 is_draft=is_draft,
313 is_draft=is_draft,
317 comment_file=f_path,
314 comment_file=f_path,
318 comment_line=line_no,
315 comment_line=line_no,
319 renderer_type=renderer,
316 renderer_type=renderer,
320 status_change=status_change_type,
317 status_change=status_change_type,
321 resolves_comment_id=resolves_comment_id,
318 resolves_comment_id=resolves_comment_id,
322 repo=repo.repo_id,
319 repo=repo.repo_id,
323 user=user.user_id,
320 user=user.user_id,
324 ))
321 ))
325 is_draft = validated_kwargs['is_draft']
322 is_draft = validated_kwargs['is_draft']
326
323
327 comment = ChangesetComment()
324 comment = ChangesetComment()
328 comment.renderer = validated_kwargs['renderer_type']
325 comment.renderer = validated_kwargs['renderer_type']
329 comment.text = validated_kwargs['comment_body']
326 comment.text = validated_kwargs['comment_body']
330 comment.f_path = validated_kwargs['comment_file']
327 comment.f_path = validated_kwargs['comment_file']
331 comment.line_no = validated_kwargs['comment_line']
328 comment.line_no = validated_kwargs['comment_line']
332 comment.comment_type = validated_kwargs['comment_type']
329 comment.comment_type = validated_kwargs['comment_type']
333 comment.draft = is_draft
330 comment.draft = is_draft
334
331
335 comment.repo = repo
332 comment.repo = repo
336 comment.author = user
333 comment.author = user
337 resolved_comment = self.__get_commit_comment(
334 resolved_comment = self.__get_commit_comment(
338 validated_kwargs['resolves_comment_id'])
335 validated_kwargs['resolves_comment_id'])
339
336
340 # check if the comment actually belongs to this PR
337 # check if the comment actually belongs to this PR
341 if resolved_comment and resolved_comment.pull_request and \
338 if resolved_comment and resolved_comment.pull_request and \
342 resolved_comment.pull_request != pull_request:
339 resolved_comment.pull_request != pull_request:
343 log.warning('Comment tried to resolved unrelated todo comment: %s',
340 log.warning('Comment tried to resolved unrelated todo comment: %s',
344 resolved_comment)
341 resolved_comment)
345 # comment not bound to this pull request, forbid
342 # comment not bound to this pull request, forbid
346 resolved_comment = None
343 resolved_comment = None
347
344
348 elif resolved_comment and resolved_comment.repo and \
345 elif resolved_comment and resolved_comment.repo and \
349 resolved_comment.repo != repo:
346 resolved_comment.repo != repo:
350 log.warning('Comment tried to resolved unrelated todo comment: %s',
347 log.warning('Comment tried to resolved unrelated todo comment: %s',
351 resolved_comment)
348 resolved_comment)
352 # comment not bound to this repo, forbid
349 # comment not bound to this repo, forbid
353 resolved_comment = None
350 resolved_comment = None
354
351
355 if resolved_comment and resolved_comment.resolved_by:
352 if resolved_comment and resolved_comment.resolved_by:
356 # if this comment is already resolved, don't mark it again!
353 # if this comment is already resolved, don't mark it again!
357 resolved_comment = None
354 resolved_comment = None
358
355
359 comment.resolved_comment = resolved_comment
356 comment.resolved_comment = resolved_comment
360
357
361 pull_request_id = pull_request
358 pull_request_id = pull_request
362
359
363 commit_obj = None
360 commit_obj = None
364 pull_request_obj = None
361 pull_request_obj = None
365
362
366 if commit_id:
363 if commit_id:
367 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
364 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
368 # do a lookup, so we don't pass something bad here
365 # do a lookup, so we don't pass something bad here
369 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
366 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
370 comment.revision = commit_obj.raw_id
367 comment.revision = commit_obj.raw_id
371
368
372 elif pull_request_id:
369 elif pull_request_id:
373 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
370 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
374 pull_request_obj = self.__get_pull_request(pull_request_id)
371 pull_request_obj = self.__get_pull_request(pull_request_id)
375 comment.pull_request = pull_request_obj
372 comment.pull_request = pull_request_obj
376 else:
373 else:
377 raise Exception('Please specify commit or pull_request_id')
374 raise Exception('Please specify commit or pull_request_id')
378
375
379 Session().add(comment)
376 Session().add(comment)
380 Session().flush()
377 Session().flush()
381 kwargs = {
378 kwargs = {
382 'user': user,
379 'user': user,
383 'renderer_type': renderer,
380 'renderer_type': renderer,
384 'repo_name': repo.repo_name,
381 'repo_name': repo.repo_name,
385 'status_change': status_change,
382 'status_change': status_change,
386 'status_change_type': status_change_type,
383 'status_change_type': status_change_type,
387 'comment_body': text,
384 'comment_body': text,
388 'comment_file': f_path,
385 'comment_file': f_path,
389 'comment_line': line_no,
386 'comment_line': line_no,
390 'comment_type': comment_type or 'note',
387 'comment_type': comment_type or 'note',
391 'comment_id': comment.comment_id
388 'comment_id': comment.comment_id
392 }
389 }
393
390
394 if commit_obj:
391 if commit_obj:
395 recipients = ChangesetComment.get_users(
392 recipients = ChangesetComment.get_users(
396 revision=commit_obj.raw_id)
393 revision=commit_obj.raw_id)
397 # add commit author if it's in RhodeCode system
394 # add commit author if it's in RhodeCode system
398 cs_author = User.get_from_cs_author(commit_obj.author)
395 cs_author = User.get_from_cs_author(commit_obj.author)
399 if not cs_author:
396 if not cs_author:
400 # use repo owner if we cannot extract the author correctly
397 # use repo owner if we cannot extract the author correctly
401 cs_author = repo.user
398 cs_author = repo.user
402 recipients += [cs_author]
399 recipients += [cs_author]
403
400
404 commit_comment_url = self.get_url(comment, request=request)
401 commit_comment_url = self.get_url(comment, request=request)
405 commit_comment_reply_url = self.get_url(
402 commit_comment_reply_url = self.get_url(
406 comment, request=request,
403 comment, request=request,
407 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
404 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
408
405
409 target_repo_url = h.link_to(
406 target_repo_url = h.link_to(
410 repo.repo_name,
407 repo.repo_name,
411 h.route_url('repo_summary', repo_name=repo.repo_name))
408 h.route_url('repo_summary', repo_name=repo.repo_name))
412
409
413 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
410 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
414 commit_id=commit_id)
411 commit_id=commit_id)
415
412
416 # commit specifics
413 # commit specifics
417 kwargs.update({
414 kwargs.update({
418 'commit': commit_obj,
415 'commit': commit_obj,
419 'commit_message': commit_obj.message,
416 'commit_message': commit_obj.message,
420 'commit_target_repo_url': target_repo_url,
417 'commit_target_repo_url': target_repo_url,
421 'commit_comment_url': commit_comment_url,
418 'commit_comment_url': commit_comment_url,
422 'commit_comment_reply_url': commit_comment_reply_url,
419 'commit_comment_reply_url': commit_comment_reply_url,
423 'commit_url': commit_url,
420 'commit_url': commit_url,
424 'thread_ids': [commit_url, commit_comment_url],
421 'thread_ids': [commit_url, commit_comment_url],
425 })
422 })
426
423
427 elif pull_request_obj:
424 elif pull_request_obj:
428 # get the current participants of this pull request
425 # get the current participants of this pull request
429 recipients = ChangesetComment.get_users(
426 recipients = ChangesetComment.get_users(
430 pull_request_id=pull_request_obj.pull_request_id)
427 pull_request_id=pull_request_obj.pull_request_id)
431 # add pull request author
428 # add pull request author
432 recipients += [pull_request_obj.author]
429 recipients += [pull_request_obj.author]
433
430
434 # add the reviewers to notification
431 # add the reviewers to notification
435 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
432 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
436
433
437 pr_target_repo = pull_request_obj.target_repo
434 pr_target_repo = pull_request_obj.target_repo
438 pr_source_repo = pull_request_obj.source_repo
435 pr_source_repo = pull_request_obj.source_repo
439
436
440 pr_comment_url = self.get_url(comment, request=request)
437 pr_comment_url = self.get_url(comment, request=request)
441 pr_comment_reply_url = self.get_url(
438 pr_comment_reply_url = self.get_url(
442 comment, request=request,
439 comment, request=request,
443 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
440 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
444
441
445 pr_url = h.route_url(
442 pr_url = h.route_url(
446 'pullrequest_show',
443 'pullrequest_show',
447 repo_name=pr_target_repo.repo_name,
444 repo_name=pr_target_repo.repo_name,
448 pull_request_id=pull_request_obj.pull_request_id, )
445 pull_request_id=pull_request_obj.pull_request_id, )
449
446
450 # set some variables for email notification
447 # set some variables for email notification
451 pr_target_repo_url = h.route_url(
448 pr_target_repo_url = h.route_url(
452 'repo_summary', repo_name=pr_target_repo.repo_name)
449 'repo_summary', repo_name=pr_target_repo.repo_name)
453
450
454 pr_source_repo_url = h.route_url(
451 pr_source_repo_url = h.route_url(
455 'repo_summary', repo_name=pr_source_repo.repo_name)
452 'repo_summary', repo_name=pr_source_repo.repo_name)
456
453
457 # pull request specifics
454 # pull request specifics
458 kwargs.update({
455 kwargs.update({
459 'pull_request': pull_request_obj,
456 'pull_request': pull_request_obj,
460 'pr_id': pull_request_obj.pull_request_id,
457 'pr_id': pull_request_obj.pull_request_id,
461 'pull_request_url': pr_url,
458 'pull_request_url': pr_url,
462 'pull_request_target_repo': pr_target_repo,
459 'pull_request_target_repo': pr_target_repo,
463 'pull_request_target_repo_url': pr_target_repo_url,
460 'pull_request_target_repo_url': pr_target_repo_url,
464 'pull_request_source_repo': pr_source_repo,
461 'pull_request_source_repo': pr_source_repo,
465 'pull_request_source_repo_url': pr_source_repo_url,
462 'pull_request_source_repo_url': pr_source_repo_url,
466 'pr_comment_url': pr_comment_url,
463 'pr_comment_url': pr_comment_url,
467 'pr_comment_reply_url': pr_comment_reply_url,
464 'pr_comment_reply_url': pr_comment_reply_url,
468 'pr_closing': closing_pr,
465 'pr_closing': closing_pr,
469 'thread_ids': [pr_url, pr_comment_url],
466 'thread_ids': [pr_url, pr_comment_url],
470 })
467 })
471
468
472 if send_email:
469 if send_email:
473 recipients += [self._get_user(u) for u in (extra_recipients or [])]
470 recipients += [self._get_user(u) for u in (extra_recipients or [])]
474
471
475 mention_recipients = set(
472 mention_recipients = set(
476 self._extract_mentions(text)).difference(recipients)
473 self._extract_mentions(text)).difference(recipients)
477
474
478 # create notification objects, and emails
475 # create notification objects, and emails
479 NotificationModel().create(
476 NotificationModel().create(
480 created_by=user,
477 created_by=user,
481 notification_subject='', # Filled in based on the notification_type
478 notification_subject='', # Filled in based on the notification_type
482 notification_body='', # Filled in based on the notification_type
479 notification_body='', # Filled in based on the notification_type
483 notification_type=notification_type,
480 notification_type=notification_type,
484 recipients=recipients,
481 recipients=recipients,
485 mention_recipients=mention_recipients,
482 mention_recipients=mention_recipients,
486 email_kwargs=kwargs,
483 email_kwargs=kwargs,
487 )
484 )
488
485
489 Session().flush()
486 Session().flush()
490 if comment.pull_request:
487 if comment.pull_request:
491 action = 'repo.pull_request.comment.create'
488 action = 'repo.pull_request.comment.create'
492 else:
489 else:
493 action = 'repo.commit.comment.create'
490 action = 'repo.commit.comment.create'
494
491
495 if not is_draft:
492 if not is_draft:
496 comment_data = comment.get_api_data()
493 comment_data = comment.get_api_data()
497
494
498 self._log_audit_action(
495 self._log_audit_action(
499 action, {'data': comment_data}, auth_user, comment)
496 action, {'data': comment_data}, auth_user, comment)
500
497
501 return comment
498 return comment
502
499
503 def edit(self, comment_id, text, auth_user, version):
500 def edit(self, comment_id, text, auth_user, version):
504 """
501 """
505 Change existing comment for commit or pull request.
502 Change existing comment for commit or pull request.
506
503
507 :param comment_id:
504 :param comment_id:
508 :param text:
505 :param text:
509 :param auth_user: current authenticated user calling this method
506 :param auth_user: current authenticated user calling this method
510 :param version: last comment version
507 :param version: last comment version
511 """
508 """
512 if not text:
509 if not text:
513 log.warning('Missing text for comment, skipping...')
510 log.warning('Missing text for comment, skipping...')
514 return
511 return
515
512
516 comment = ChangesetComment.get(comment_id)
513 comment = ChangesetComment.get(comment_id)
517 old_comment_text = comment.text
514 old_comment_text = comment.text
518 comment.text = text
515 comment.text = text
519 comment.modified_at = datetime.datetime.now()
516 comment.modified_at = datetime.datetime.now()
520 version = safe_int(version)
517 version = safe_int(version)
521
518
522 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
519 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
523 # would return 3 here
520 # would return 3 here
524 comment_version = ChangesetCommentHistory.get_version(comment_id)
521 comment_version = ChangesetCommentHistory.get_version(comment_id)
525
522
526 if isinstance(version, int) and (comment_version - version) != 1:
523 if isinstance(version, int) and (comment_version - version) != 1:
527 log.warning(
524 log.warning(
528 'Version mismatch comment_version {} submitted {}, skipping'.format(
525 'Version mismatch comment_version {} submitted {}, skipping'.format(
529 comment_version-1, # -1 since note above
526 comment_version-1, # -1 since note above
530 version
527 version
531 )
528 )
532 )
529 )
533 raise CommentVersionMismatch()
530 raise CommentVersionMismatch()
534
531
535 comment_history = ChangesetCommentHistory()
532 comment_history = ChangesetCommentHistory()
536 comment_history.comment_id = comment_id
533 comment_history.comment_id = comment_id
537 comment_history.version = comment_version
534 comment_history.version = comment_version
538 comment_history.created_by_user_id = auth_user.user_id
535 comment_history.created_by_user_id = auth_user.user_id
539 comment_history.text = old_comment_text
536 comment_history.text = old_comment_text
540 # TODO add email notification
537 # TODO add email notification
541 Session().add(comment_history)
538 Session().add(comment_history)
542 Session().add(comment)
539 Session().add(comment)
543 Session().flush()
540 Session().flush()
544
541
545 if comment.pull_request:
542 if comment.pull_request:
546 action = 'repo.pull_request.comment.edit'
543 action = 'repo.pull_request.comment.edit'
547 else:
544 else:
548 action = 'repo.commit.comment.edit'
545 action = 'repo.commit.comment.edit'
549
546
550 comment_data = comment.get_api_data()
547 comment_data = comment.get_api_data()
551 comment_data['old_comment_text'] = old_comment_text
548 comment_data['old_comment_text'] = old_comment_text
552 self._log_audit_action(
549 self._log_audit_action(
553 action, {'data': comment_data}, auth_user, comment)
550 action, {'data': comment_data}, auth_user, comment)
554
551
555 return comment_history
552 return comment_history
556
553
557 def delete(self, comment, auth_user):
554 def delete(self, comment, auth_user):
558 """
555 """
559 Deletes given comment
556 Deletes given comment
560 """
557 """
561 comment = self.__get_commit_comment(comment)
558 comment = self.__get_commit_comment(comment)
562 old_data = comment.get_api_data()
559 old_data = comment.get_api_data()
563 Session().delete(comment)
560 Session().delete(comment)
564
561
565 if comment.pull_request:
562 if comment.pull_request:
566 action = 'repo.pull_request.comment.delete'
563 action = 'repo.pull_request.comment.delete'
567 else:
564 else:
568 action = 'repo.commit.comment.delete'
565 action = 'repo.commit.comment.delete'
569
566
570 self._log_audit_action(
567 self._log_audit_action(
571 action, {'old_data': old_data}, auth_user, comment)
568 action, {'old_data': old_data}, auth_user, comment)
572
569
573 return comment
570 return comment
574
571
575 def get_all_comments(self, repo_id, revision=None, pull_request=None,
572 def get_all_comments(self, repo_id, revision=None, pull_request=None,
576 include_drafts=True, count_only=False):
573 include_drafts=True, count_only=False):
577 q = ChangesetComment.query()\
574 q = ChangesetComment.query()\
578 .filter(ChangesetComment.repo_id == repo_id)
575 .filter(ChangesetComment.repo_id == repo_id)
579 if revision:
576 if revision:
580 q = q.filter(ChangesetComment.revision == revision)
577 q = q.filter(ChangesetComment.revision == revision)
581 elif pull_request:
578 elif pull_request:
582 pull_request = self.__get_pull_request(pull_request)
579 pull_request = self.__get_pull_request(pull_request)
583 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
580 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
584 else:
581 else:
585 raise Exception('Please specify commit or pull_request')
582 raise Exception('Please specify commit or pull_request')
586 if not include_drafts:
583 if not include_drafts:
587 q = q.filter(ChangesetComment.draft == false())
584 q = q.filter(ChangesetComment.draft == false())
588 q = q.order_by(ChangesetComment.created_on)
585 q = q.order_by(ChangesetComment.created_on)
589 if count_only:
586 if count_only:
590 return q.count()
587 return q.count()
591
588
592 return q.all()
589 return q.all()
593
590
594 def get_url(self, comment, request=None, permalink=False, anchor=None):
591 def get_url(self, comment, request=None, permalink=False, anchor=None):
595 if not request:
592 if not request:
596 request = get_current_request()
593 request = get_current_request()
597
594
598 comment = self.__get_commit_comment(comment)
595 comment = self.__get_commit_comment(comment)
599 if anchor is None:
596 if anchor is None:
600 anchor = 'comment-{}'.format(comment.comment_id)
597 anchor = 'comment-{}'.format(comment.comment_id)
601
598
602 if comment.pull_request:
599 if comment.pull_request:
603 pull_request = comment.pull_request
600 pull_request = comment.pull_request
604 if permalink:
601 if permalink:
605 return request.route_url(
602 return request.route_url(
606 'pull_requests_global',
603 'pull_requests_global',
607 pull_request_id=pull_request.pull_request_id,
604 pull_request_id=pull_request.pull_request_id,
608 _anchor=anchor)
605 _anchor=anchor)
609 else:
606 else:
610 return request.route_url(
607 return request.route_url(
611 'pullrequest_show',
608 'pullrequest_show',
612 repo_name=safe_str(pull_request.target_repo.repo_name),
609 repo_name=safe_str(pull_request.target_repo.repo_name),
613 pull_request_id=pull_request.pull_request_id,
610 pull_request_id=pull_request.pull_request_id,
614 _anchor=anchor)
611 _anchor=anchor)
615
612
616 else:
613 else:
617 repo = comment.repo
614 repo = comment.repo
618 commit_id = comment.revision
615 commit_id = comment.revision
619
616
620 if permalink:
617 if permalink:
621 return request.route_url(
618 return request.route_url(
622 'repo_commit', repo_name=safe_str(repo.repo_id),
619 'repo_commit', repo_name=safe_str(repo.repo_id),
623 commit_id=commit_id,
620 commit_id=commit_id,
624 _anchor=anchor)
621 _anchor=anchor)
625
622
626 else:
623 else:
627 return request.route_url(
624 return request.route_url(
628 'repo_commit', repo_name=safe_str(repo.repo_name),
625 'repo_commit', repo_name=safe_str(repo.repo_name),
629 commit_id=commit_id,
626 commit_id=commit_id,
630 _anchor=anchor)
627 _anchor=anchor)
631
628
632 def get_comments(self, repo_id, revision=None, pull_request=None):
629 def get_comments(self, repo_id, revision=None, pull_request=None):
633 """
630 """
634 Gets main comments based on revision or pull_request_id
631 Gets main comments based on revision or pull_request_id
635
632
636 :param repo_id:
633 :param repo_id:
637 :param revision:
634 :param revision:
638 :param pull_request:
635 :param pull_request:
639 """
636 """
640
637
641 q = ChangesetComment.query()\
638 q = ChangesetComment.query()\
642 .filter(ChangesetComment.repo_id == repo_id)\
639 .filter(ChangesetComment.repo_id == repo_id)\
643 .filter(ChangesetComment.line_no == None)\
640 .filter(ChangesetComment.line_no == None)\
644 .filter(ChangesetComment.f_path == None)
641 .filter(ChangesetComment.f_path == None)
645 if revision:
642 if revision:
646 q = q.filter(ChangesetComment.revision == revision)
643 q = q.filter(ChangesetComment.revision == revision)
647 elif pull_request:
644 elif pull_request:
648 pull_request = self.__get_pull_request(pull_request)
645 pull_request = self.__get_pull_request(pull_request)
649 q = q.filter(ChangesetComment.pull_request == pull_request)
646 q = q.filter(ChangesetComment.pull_request == pull_request)
650 else:
647 else:
651 raise Exception('Please specify commit or pull_request')
648 raise Exception('Please specify commit or pull_request')
652 q = q.order_by(ChangesetComment.created_on)
649 q = q.order_by(ChangesetComment.created_on)
653 return q.all()
650 return q.all()
654
651
655 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
652 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
656 q = self._get_inline_comments_query(repo_id, revision, pull_request)
653 q = self._get_inline_comments_query(repo_id, revision, pull_request)
657 return self._group_comments_by_path_and_line_number(q)
654 return self._group_comments_by_path_and_line_number(q)
658
655
659 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
656 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
660 version=None):
657 version=None):
661 inline_comms = []
658 inline_comms = []
662 for fname, per_line_comments in inline_comments.items():
659 for fname, per_line_comments in inline_comments.items():
663 for lno, comments in per_line_comments.items():
660 for lno, comments in per_line_comments.items():
664 for comm in comments:
661 for comm in comments:
665 if not comm.outdated_at_version(version) and skip_outdated:
662 if not comm.outdated_at_version(version) and skip_outdated:
666 inline_comms.append(comm)
663 inline_comms.append(comm)
667
664
668 return inline_comms
665 return inline_comms
669
666
670 def get_outdated_comments(self, repo_id, pull_request):
667 def get_outdated_comments(self, repo_id, pull_request):
671 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
668 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
672 # of a pull request.
669 # of a pull request.
673 q = self._all_inline_comments_of_pull_request(pull_request)
670 q = self._all_inline_comments_of_pull_request(pull_request)
674 q = q.filter(
671 q = q.filter(
675 ChangesetComment.display_state ==
672 ChangesetComment.display_state ==
676 ChangesetComment.COMMENT_OUTDATED
673 ChangesetComment.COMMENT_OUTDATED
677 ).order_by(ChangesetComment.comment_id.asc())
674 ).order_by(ChangesetComment.comment_id.asc())
678
675
679 return self._group_comments_by_path_and_line_number(q)
676 return self._group_comments_by_path_and_line_number(q)
680
677
681 def _get_inline_comments_query(self, repo_id, revision, pull_request):
678 def _get_inline_comments_query(self, repo_id, revision, pull_request):
682 # TODO: johbo: Split this into two methods: One for PR and one for
679 # TODO: johbo: Split this into two methods: One for PR and one for
683 # commit.
680 # commit.
684 if revision:
681 if revision:
685 q = Session().query(ChangesetComment).filter(
682 q = Session().query(ChangesetComment).filter(
686 ChangesetComment.repo_id == repo_id,
683 ChangesetComment.repo_id == repo_id,
687 ChangesetComment.line_no != null(),
684 ChangesetComment.line_no != null(),
688 ChangesetComment.f_path != null(),
685 ChangesetComment.f_path != null(),
689 ChangesetComment.revision == revision)
686 ChangesetComment.revision == revision)
690
687
691 elif pull_request:
688 elif pull_request:
692 pull_request = self.__get_pull_request(pull_request)
689 pull_request = self.__get_pull_request(pull_request)
693 if not CommentsModel.use_outdated_comments(pull_request):
690 if not CommentsModel.use_outdated_comments(pull_request):
694 q = self._visible_inline_comments_of_pull_request(pull_request)
691 q = self._visible_inline_comments_of_pull_request(pull_request)
695 else:
692 else:
696 q = self._all_inline_comments_of_pull_request(pull_request)
693 q = self._all_inline_comments_of_pull_request(pull_request)
697
694
698 else:
695 else:
699 raise Exception('Please specify commit or pull_request_id')
696 raise Exception('Please specify commit or pull_request_id')
700 q = q.order_by(ChangesetComment.comment_id.asc())
697 q = q.order_by(ChangesetComment.comment_id.asc())
701 return q
698 return q
702
699
703 def _group_comments_by_path_and_line_number(self, q):
700 def _group_comments_by_path_and_line_number(self, q):
704 comments = q.all()
701 comments = q.all()
705 paths = collections.defaultdict(lambda: collections.defaultdict(list))
702 paths = collections.defaultdict(lambda: collections.defaultdict(list))
706 for co in comments:
703 for co in comments:
707 paths[co.f_path][co.line_no].append(co)
704 paths[co.f_path][co.line_no].append(co)
708 return paths
705 return paths
709
706
710 @classmethod
707 @classmethod
711 def needed_extra_diff_context(cls):
708 def needed_extra_diff_context(cls):
712 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
709 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
713
710
714 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
711 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
715 if not CommentsModel.use_outdated_comments(pull_request):
712 if not CommentsModel.use_outdated_comments(pull_request):
716 return
713 return
717
714
718 comments = self._visible_inline_comments_of_pull_request(pull_request)
715 comments = self._visible_inline_comments_of_pull_request(pull_request)
719 comments_to_outdate = comments.all()
716 comments_to_outdate = comments.all()
720
717
721 for comment in comments_to_outdate:
718 for comment in comments_to_outdate:
722 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
719 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
723
720
724 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
721 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
725 diff_line = _parse_comment_line_number(comment.line_no)
722 diff_line = _parse_comment_line_number(comment.line_no)
726
723
727 try:
724 try:
728 old_context = old_diff_proc.get_context_of_line(
725 old_context = old_diff_proc.get_context_of_line(
729 path=comment.f_path, diff_line=diff_line)
726 path=comment.f_path, diff_line=diff_line)
730 new_context = new_diff_proc.get_context_of_line(
727 new_context = new_diff_proc.get_context_of_line(
731 path=comment.f_path, diff_line=diff_line)
728 path=comment.f_path, diff_line=diff_line)
732 except (diffs.LineNotInDiffException,
729 except (diffs.LineNotInDiffException,
733 diffs.FileNotInDiffException):
730 diffs.FileNotInDiffException):
734 if not comment.draft:
731 if not comment.draft:
735 comment.display_state = ChangesetComment.COMMENT_OUTDATED
732 comment.display_state = ChangesetComment.COMMENT_OUTDATED
736 return
733 return
737
734
738 if old_context == new_context:
735 if old_context == new_context:
739 return
736 return
740
737
741 if self._should_relocate_diff_line(diff_line):
738 if self._should_relocate_diff_line(diff_line):
742 new_diff_lines = new_diff_proc.find_context(
739 new_diff_lines = new_diff_proc.find_context(
743 path=comment.f_path, context=old_context,
740 path=comment.f_path, context=old_context,
744 offset=self.DIFF_CONTEXT_BEFORE)
741 offset=self.DIFF_CONTEXT_BEFORE)
745 if not new_diff_lines and not comment.draft:
742 if not new_diff_lines and not comment.draft:
746 comment.display_state = ChangesetComment.COMMENT_OUTDATED
743 comment.display_state = ChangesetComment.COMMENT_OUTDATED
747 else:
744 else:
748 new_diff_line = self._choose_closest_diff_line(
745 new_diff_line = self._choose_closest_diff_line(
749 diff_line, new_diff_lines)
746 diff_line, new_diff_lines)
750 comment.line_no = _diff_to_comment_line_number(new_diff_line)
747 comment.line_no = _diff_to_comment_line_number(new_diff_line)
751 else:
748 else:
752 if not comment.draft:
749 if not comment.draft:
753 comment.display_state = ChangesetComment.COMMENT_OUTDATED
750 comment.display_state = ChangesetComment.COMMENT_OUTDATED
754
751
755 def _should_relocate_diff_line(self, diff_line):
752 def _should_relocate_diff_line(self, diff_line):
756 """
753 """
757 Checks if relocation shall be tried for the given `diff_line`.
754 Checks if relocation shall be tried for the given `diff_line`.
758
755
759 If a comment points into the first lines, then we can have a situation
756 If a comment points into the first lines, then we can have a situation
760 that after an update another line has been added on top. In this case
757 that after an update another line has been added on top. In this case
761 we would find the context still and move the comment around. This
758 we would find the context still and move the comment around. This
762 would be wrong.
759 would be wrong.
763 """
760 """
764 should_relocate = (
761 should_relocate = (
765 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
762 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
766 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
763 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
767 return should_relocate
764 return should_relocate
768
765
769 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
766 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
770 candidate = new_diff_lines[0]
767 candidate = new_diff_lines[0]
771 best_delta = _diff_line_delta(diff_line, candidate)
768 best_delta = _diff_line_delta(diff_line, candidate)
772 for new_diff_line in new_diff_lines[1:]:
769 for new_diff_line in new_diff_lines[1:]:
773 delta = _diff_line_delta(diff_line, new_diff_line)
770 delta = _diff_line_delta(diff_line, new_diff_line)
774 if delta < best_delta:
771 if delta < best_delta:
775 candidate = new_diff_line
772 candidate = new_diff_line
776 best_delta = delta
773 best_delta = delta
777 return candidate
774 return candidate
778
775
779 def _visible_inline_comments_of_pull_request(self, pull_request):
776 def _visible_inline_comments_of_pull_request(self, pull_request):
780 comments = self._all_inline_comments_of_pull_request(pull_request)
777 comments = self._all_inline_comments_of_pull_request(pull_request)
781 comments = comments.filter(
778 comments = comments.filter(
782 coalesce(ChangesetComment.display_state, '') !=
779 coalesce(ChangesetComment.display_state, '') !=
783 ChangesetComment.COMMENT_OUTDATED)
780 ChangesetComment.COMMENT_OUTDATED)
784 return comments
781 return comments
785
782
786 def _all_inline_comments_of_pull_request(self, pull_request):
783 def _all_inline_comments_of_pull_request(self, pull_request):
787 comments = Session().query(ChangesetComment)\
784 comments = Session().query(ChangesetComment)\
788 .filter(ChangesetComment.line_no != None)\
785 .filter(ChangesetComment.line_no != None)\
789 .filter(ChangesetComment.f_path != None)\
786 .filter(ChangesetComment.f_path != None)\
790 .filter(ChangesetComment.pull_request == pull_request)
787 .filter(ChangesetComment.pull_request == pull_request)
791 return comments
788 return comments
792
789
793 def _all_general_comments_of_pull_request(self, pull_request):
790 def _all_general_comments_of_pull_request(self, pull_request):
794 comments = Session().query(ChangesetComment)\
791 comments = Session().query(ChangesetComment)\
795 .filter(ChangesetComment.line_no == None)\
792 .filter(ChangesetComment.line_no == None)\
796 .filter(ChangesetComment.f_path == None)\
793 .filter(ChangesetComment.f_path == None)\
797 .filter(ChangesetComment.pull_request == pull_request)
794 .filter(ChangesetComment.pull_request == pull_request)
798
795
799 return comments
796 return comments
800
797
801 @staticmethod
798 @staticmethod
802 def use_outdated_comments(pull_request):
799 def use_outdated_comments(pull_request):
803 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
800 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
804 settings = settings_model.get_general_settings()
801 settings = settings_model.get_general_settings()
805 return settings.get('rhodecode_use_outdated_comments', False)
802 return settings.get('rhodecode_use_outdated_comments', False)
806
803
807 def trigger_commit_comment_hook(self, repo, user, action, data=None):
804 def trigger_commit_comment_hook(self, repo, user, action, data=None):
808 repo = self._get_repo(repo)
805 repo = self._get_repo(repo)
809 target_scm = repo.scm_instance()
806 target_scm = repo.scm_instance()
810 if action == 'create':
807 if action == 'create':
811 trigger_hook = hooks_utils.trigger_comment_commit_hooks
808 trigger_hook = hooks_utils.trigger_comment_commit_hooks
812 elif action == 'edit':
809 elif action == 'edit':
813 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
810 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
814 else:
811 else:
815 return
812 return
816
813
817 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
814 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
818 repo, action, trigger_hook)
815 repo, action, trigger_hook)
819 trigger_hook(
816 trigger_hook(
820 username=user.username,
817 username=user.username,
821 repo_name=repo.repo_name,
818 repo_name=repo.repo_name,
822 repo_type=target_scm.alias,
819 repo_type=target_scm.alias,
823 repo=repo,
820 repo=repo,
824 data=data)
821 data=data)
825
822
826
823
827 def _parse_comment_line_number(line_no):
824 def _parse_comment_line_number(line_no):
828 """
825 """
829 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
826 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
830 """
827 """
831 old_line = None
828 old_line = None
832 new_line = None
829 new_line = None
833 if line_no.startswith('o'):
830 if line_no.startswith('o'):
834 old_line = int(line_no[1:])
831 old_line = int(line_no[1:])
835 elif line_no.startswith('n'):
832 elif line_no.startswith('n'):
836 new_line = int(line_no[1:])
833 new_line = int(line_no[1:])
837 else:
834 else:
838 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
835 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
839 return diffs.DiffLineNumber(old_line, new_line)
836 return diffs.DiffLineNumber(old_line, new_line)
840
837
841
838
842 def _diff_to_comment_line_number(diff_line):
839 def _diff_to_comment_line_number(diff_line):
843 if diff_line.new is not None:
840 if diff_line.new is not None:
844 return u'n{}'.format(diff_line.new)
841 return u'n{}'.format(diff_line.new)
845 elif diff_line.old is not None:
842 elif diff_line.old is not None:
846 return u'o{}'.format(diff_line.old)
843 return u'o{}'.format(diff_line.old)
847 return u''
844 return u''
848
845
849
846
850 def _diff_line_delta(a, b):
847 def _diff_line_delta(a, b):
851 if None not in (a.new, b.new):
848 if None not in (a.new, b.new):
852 return abs(a.new - b.new)
849 return abs(a.new - b.new)
853 elif None not in (a.old, b.old):
850 elif None not in (a.old, b.old):
854 return abs(a.old - b.old)
851 return abs(a.old - b.old)
855 else:
852 else:
856 raise ValueError(
853 raise ValueError(
857 "Cannot compute delta between {} and {}".format(a, b))
854 "Cannot compute delta between {} and {}".format(a, b))
@@ -1,256 +1,256 b''
1
1
2
2
3 # Copyright (C) 2013-2020 RhodeCode GmbH
3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 gist model for RhodeCode
22 gist model for RhodeCode
23 """
23 """
24
24
25 import os
25 import os
26 import time
26 import time
27 import logging
27 import logging
28 import traceback
28 import traceback
29 import shutil
29 import shutil
30
30
31 from pyramid.threadlocal import get_current_request
31 from pyramid.threadlocal import get_current_request
32
32
33 from rhodecode.lib.utils2 import (
33 from rhodecode.lib.utils2 import (
34 safe_unicode, unique_id, safe_int, time_to_datetime, AttributeDict)
34 unique_id, safe_int, safe_str, time_to_datetime, AttributeDict)
35 from rhodecode.lib.ext_json import json
35 from rhodecode.lib.ext_json import json
36 from rhodecode.lib.vcs import VCSError
36 from rhodecode.lib.vcs import VCSError
37 from rhodecode.model import BaseModel
37 from rhodecode.model import BaseModel
38 from rhodecode.model.db import Gist
38 from rhodecode.model.db import Gist
39 from rhodecode.model.repo import RepoModel
39 from rhodecode.model.repo import RepoModel
40 from rhodecode.model.scm import ScmModel
40 from rhodecode.model.scm import ScmModel
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44 GIST_STORE_LOC = '.rc_gist_store'
44 GIST_STORE_LOC = '.rc_gist_store'
45 GIST_METADATA_FILE = '.rc_gist_metadata'
45 GIST_METADATA_FILE = '.rc_gist_metadata'
46
46
47
47
48 class GistModel(BaseModel):
48 class GistModel(BaseModel):
49 cls = Gist
49 cls = Gist
50 vcs_backend = 'hg'
50 vcs_backend = 'hg'
51
51
52 def _get_gist(self, gist):
52 def _get_gist(self, gist):
53 """
53 """
54 Helper method to get gist by ID, or gist_access_id as a fallback
54 Helper method to get gist by ID, or gist_access_id as a fallback
55
55
56 :param gist: GistID, gist_access_id, or Gist instance
56 :param gist: GistID, gist_access_id, or Gist instance
57 """
57 """
58 return self._get_instance(Gist, gist, callback=Gist.get_by_access_id)
58 return self._get_instance(Gist, gist, callback=Gist.get_by_access_id)
59
59
60 def __delete_gist(self, gist):
60 def __delete_gist(self, gist):
61 """
61 """
62 removes gist from filesystem
62 removes gist from filesystem
63
63
64 :param gist: gist object
64 :param gist: gist object
65 """
65 """
66 root_path = RepoModel().repos_path
66 root_path = RepoModel().repos_path
67 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
67 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
68 log.info("Removing %s", rm_path)
68 log.info("Removing %s", rm_path)
69 shutil.rmtree(rm_path)
69 shutil.rmtree(rm_path)
70
70
71 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, username,
71 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, username,
72 gist_type, gist_expires, gist_acl_level):
72 gist_type, gist_expires, gist_acl_level):
73 """
73 """
74 store metadata inside the gist repo, this can be later used for imports
74 store metadata inside the gist repo, this can be later used for imports
75 or gist identification. Currently we use this inside RhodeCode tools
75 or gist identification. Currently we use this inside RhodeCode tools
76 to do cleanup of gists that are in storage but not in database.
76 to do cleanup of gists that are in storage but not in database.
77 """
77 """
78 metadata = {
78 metadata = {
79 'metadata_version': '2',
79 'metadata_version': '2',
80 'gist_db_id': gist_id,
80 'gist_db_id': gist_id,
81 'gist_access_id': gist_access_id,
81 'gist_access_id': gist_access_id,
82 'gist_owner_id': user_id,
82 'gist_owner_id': user_id,
83 'gist_owner_username': username,
83 'gist_owner_username': username,
84 'gist_type': gist_type,
84 'gist_type': gist_type,
85 'gist_expires': gist_expires,
85 'gist_expires': gist_expires,
86 'gist_updated': time.time(),
86 'gist_updated': time.time(),
87 'gist_acl_level': gist_acl_level,
87 'gist_acl_level': gist_acl_level,
88 }
88 }
89 metadata_file = os.path.join(repo.path, '.hg', GIST_METADATA_FILE)
89 metadata_file = os.path.join(repo.path, '.hg', GIST_METADATA_FILE)
90 with open(metadata_file, 'wb') as f:
90 with open(metadata_file, 'wb') as f:
91 f.write(json.dumps(metadata))
91 f.write(json.dumps(metadata))
92
92
93 def get_gist(self, gist):
93 def get_gist(self, gist):
94 return self._get_gist(gist)
94 return self._get_gist(gist)
95
95
96 def get_gist_files(self, gist_access_id, revision=None):
96 def get_gist_files(self, gist_access_id, revision=None):
97 """
97 """
98 Get files for given gist
98 Get files for given gist
99
99
100 :param gist_access_id:
100 :param gist_access_id:
101 """
101 """
102 repo = Gist.get_by_access_id(gist_access_id)
102 repo = Gist.get_by_access_id(gist_access_id)
103 vcs_repo = repo.scm_instance()
103 vcs_repo = repo.scm_instance()
104 if not vcs_repo:
104 if not vcs_repo:
105 raise VCSError('Failed to load gist repository for {}'.format(repo))
105 raise VCSError('Failed to load gist repository for {}'.format(repo))
106
106
107 commit = vcs_repo.get_commit(commit_id=revision)
107 commit = vcs_repo.get_commit(commit_id=revision)
108 return commit, [n for n in commit.get_node('/')]
108 return commit, [n for n in commit.get_node('/')]
109
109
110 def create(self, description, owner, gist_mapping,
110 def create(self, description, owner, gist_mapping,
111 gist_type=Gist.GIST_PUBLIC, lifetime=-1, gist_id=None,
111 gist_type=Gist.GIST_PUBLIC, lifetime=-1, gist_id=None,
112 gist_acl_level=Gist.ACL_LEVEL_PRIVATE):
112 gist_acl_level=Gist.ACL_LEVEL_PRIVATE):
113 """
113 """
114 Create a gist
114 Create a gist
115
115
116 :param description: description of the gist
116 :param description: description of the gist
117 :param owner: user who created this gist
117 :param owner: user who created this gist
118 :param gist_mapping: mapping [{'filename': 'file1.txt', 'content': content}, ...}]
118 :param gist_mapping: mapping [{'filename': 'file1.txt', 'content': content}, ...}]
119 :param gist_type: type of gist private/public
119 :param gist_type: type of gist private/public
120 :param lifetime: in minutes, -1 == forever
120 :param lifetime: in minutes, -1 == forever
121 :param gist_acl_level: acl level for this gist
121 :param gist_acl_level: acl level for this gist
122 """
122 """
123 owner = self._get_user(owner)
123 owner = self._get_user(owner)
124 gist_id = safe_unicode(gist_id or unique_id(20))
124 gist_id = safe_str(gist_id or unique_id(20))
125 lifetime = safe_int(lifetime, -1)
125 lifetime = safe_int(lifetime, -1)
126 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
126 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
127 expiration = (time_to_datetime(gist_expires)
127 expiration = (time_to_datetime(gist_expires)
128 if gist_expires != -1 else 'forever')
128 if gist_expires != -1 else 'forever')
129 log.debug('set GIST expiration date to: %s', expiration)
129 log.debug('set GIST expiration date to: %s', expiration)
130 # create the Database version
130 # create the Database version
131 gist = Gist()
131 gist = Gist()
132 gist.gist_description = description
132 gist.gist_description = description
133 gist.gist_access_id = gist_id
133 gist.gist_access_id = gist_id
134 gist.gist_owner = owner.user_id
134 gist.gist_owner = owner.user_id
135 gist.gist_expires = gist_expires
135 gist.gist_expires = gist_expires
136 gist.gist_type = safe_unicode(gist_type)
136 gist.gist_type = safe_str(gist_type)
137 gist.acl_level = gist_acl_level
137 gist.acl_level = gist_acl_level
138 self.sa.add(gist)
138 self.sa.add(gist)
139 self.sa.flush()
139 self.sa.flush()
140 if gist_type == Gist.GIST_PUBLIC:
140 if gist_type == Gist.GIST_PUBLIC:
141 # use DB ID for easy to use GIST ID
141 # use DB ID for easy to use GIST ID
142 gist_id = safe_unicode(gist.gist_id)
142 gist_id = safe_str(gist.gist_id)
143 gist.gist_access_id = gist_id
143 gist.gist_access_id = gist_id
144 self.sa.add(gist)
144 self.sa.add(gist)
145
145
146 gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
146 gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
147 log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
147 log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
148 repo = RepoModel()._create_filesystem_repo(
148 repo = RepoModel()._create_filesystem_repo(
149 repo_name=gist_id, repo_type=self.vcs_backend, repo_group=GIST_STORE_LOC,
149 repo_name=gist_id, repo_type=self.vcs_backend, repo_group=GIST_STORE_LOC,
150 use_global_config=True)
150 use_global_config=True)
151
151
152 # now create single multifile commit
152 # now create single multifile commit
153 message = 'added file'
153 message = 'added file'
154 message += 's: ' if len(gist_mapping) > 1 else ': '
154 message += 's: ' if len(gist_mapping) > 1 else ': '
155 message += ', '.join([x for x in gist_mapping])
155 message += ', '.join([safe_str(x) for x in gist_mapping])
156
156
157 # fake RhodeCode Repository object
157 # fake RhodeCode Repository object
158 fake_repo = AttributeDict({
158 fake_repo = AttributeDict({
159 'repo_name': gist_repo_path,
159 'repo_name': gist_repo_path,
160 'scm_instance': lambda *args, **kwargs: repo,
160 'scm_instance': lambda *args, **kwargs: repo,
161 })
161 })
162
162
163 ScmModel().create_nodes(
163 ScmModel().create_nodes(
164 user=owner.user_id, repo=fake_repo,
164 user=owner.user_id, repo=fake_repo,
165 message=message,
165 message=message,
166 nodes=gist_mapping,
166 nodes=gist_mapping,
167 trigger_push_hook=False
167 trigger_push_hook=False
168 )
168 )
169
169
170 self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
170 self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
171 owner.user_id, owner.username, gist.gist_type,
171 owner.user_id, owner.username, gist.gist_type,
172 gist.gist_expires, gist_acl_level)
172 gist.gist_expires, gist_acl_level)
173 return gist
173 return gist
174
174
175 def delete(self, gist, fs_remove=True):
175 def delete(self, gist, fs_remove=True):
176 gist = self._get_gist(gist)
176 gist = self._get_gist(gist)
177 try:
177 try:
178 self.sa.delete(gist)
178 self.sa.delete(gist)
179 if fs_remove:
179 if fs_remove:
180 self.__delete_gist(gist)
180 self.__delete_gist(gist)
181 else:
181 else:
182 log.debug('skipping removal from filesystem')
182 log.debug('skipping removal from filesystem')
183 except Exception:
183 except Exception:
184 log.error(traceback.format_exc())
184 log.error(traceback.format_exc())
185 raise
185 raise
186
186
187 def update(self, gist, description, owner, gist_mapping, lifetime,
187 def update(self, gist, description, owner, gist_mapping, lifetime,
188 gist_acl_level):
188 gist_acl_level):
189 gist = self._get_gist(gist)
189 gist = self._get_gist(gist)
190 gist_repo = gist.scm_instance()
190 gist_repo = gist.scm_instance()
191
191
192 if lifetime == 0: # preserve old value
192 if lifetime == 0: # preserve old value
193 gist_expires = gist.gist_expires
193 gist_expires = gist.gist_expires
194 else:
194 else:
195 gist_expires = (
195 gist_expires = (
196 time.time() + (lifetime * 60) if lifetime != -1 else -1)
196 time.time() + (lifetime * 60) if lifetime != -1 else -1)
197
197
198 # calculate operation type based on given data
198 # calculate operation type based on given data
199 gist_mapping_op = {}
199 gist_mapping_op = {}
200 for k, v in gist_mapping.items():
200 for k, v in gist_mapping.items():
201 # add, mod, del
201 # add, mod, del
202 if not v['filename_org'] and v['filename']:
202 if not v['filename_org'] and v['filename']:
203 op = 'add'
203 op = 'add'
204 elif v['filename_org'] and not v['filename']:
204 elif v['filename_org'] and not v['filename']:
205 op = 'del'
205 op = 'del'
206 else:
206 else:
207 op = 'mod'
207 op = 'mod'
208
208
209 v['op'] = op
209 v['op'] = op
210 gist_mapping_op[k] = v
210 gist_mapping_op[k] = v
211
211
212 gist.gist_description = description
212 gist.gist_description = description
213 gist.gist_expires = gist_expires
213 gist.gist_expires = gist_expires
214 gist.owner = owner
214 gist.owner = owner
215 gist.acl_level = gist_acl_level
215 gist.acl_level = gist_acl_level
216 self.sa.add(gist)
216 self.sa.add(gist)
217 self.sa.flush()
217 self.sa.flush()
218
218
219 message = 'updated file'
219 message = 'updated file'
220 message += 's: ' if len(gist_mapping) > 1 else ': '
220 message += 's: ' if len(gist_mapping) > 1 else ': '
221 message += ', '.join([x for x in gist_mapping])
221 message += ', '.join([safe_str(x) for x in gist_mapping])
222
222
223 # fake RhodeCode Repository object
223 # fake RhodeCode Repository object
224 fake_repo = AttributeDict({
224 fake_repo = AttributeDict({
225 'repo_name': gist_repo.path,
225 'repo_name': gist_repo.path,
226 'scm_instance': lambda *args, **kwargs: gist_repo,
226 'scm_instance': lambda *args, **kwargs: gist_repo,
227 })
227 })
228
228
229 self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
229 self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
230 owner.user_id, owner.username, gist.gist_type,
230 owner.user_id, owner.username, gist.gist_type,
231 gist.gist_expires, gist_acl_level)
231 gist.gist_expires, gist_acl_level)
232
232
233 # this can throw NodeNotChangedError, if changes we're trying to commit
233 # this can throw NodeNotChangedError, if changes we're trying to commit
234 # are not actually changes...
234 # are not actually changes...
235 ScmModel().update_nodes(
235 ScmModel().update_nodes(
236 user=owner.user_id,
236 user=owner.user_id,
237 repo=fake_repo,
237 repo=fake_repo,
238 message=message,
238 message=message,
239 nodes=gist_mapping_op,
239 nodes=gist_mapping_op,
240 trigger_push_hook=False
240 trigger_push_hook=False
241 )
241 )
242
242
243 return gist
243 return gist
244
244
245 def get_url(self, gist, request=None):
245 def get_url(self, gist, request=None):
246 import rhodecode
246 import rhodecode
247
247
248 if not request:
248 if not request:
249 request = get_current_request()
249 request = get_current_request()
250
250
251 alias_url = rhodecode.CONFIG.get('gist_alias_url')
251 alias_url = rhodecode.CONFIG.get('gist_alias_url')
252 if alias_url:
252 if alias_url:
253 return alias_url.replace('{gistid}', gist.gist_access_id)
253 return alias_url.replace('{gistid}', gist.gist_access_id)
254
254
255 return request.route_url('gist_show', gist_id=gist.gist_access_id)
255 return request.route_url('gist_show', gist_id=gist.gist_access_id)
256
256
@@ -1,241 +1,239 b''
1
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Model for integrations
23 Model for integrations
24 """
24 """
25
25
26
26
27 import logging
27 import logging
28
28
29 from sqlalchemy import or_, and_
29 from sqlalchemy import or_, and_
30
30
31 import rhodecode
32 from rhodecode import events
31 from rhodecode import events
33 from rhodecode.integrations.types.base import EEIntegration
32 from rhodecode.integrations.types.base import EEIntegration
34 from rhodecode.lib.caching_query import FromCache
33 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.model import BaseModel
34 from rhodecode.model import BaseModel
36 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case
35 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case, null
37 from rhodecode.integrations import integration_type_registry
36 from rhodecode.integrations import integration_type_registry
38
37
39 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
40
39
41
40
42 class IntegrationModel(BaseModel):
41 class IntegrationModel(BaseModel):
43
42
44 cls = Integration
43 cls = Integration
45
44
46 def __get_integration(self, integration):
45 def __get_integration(self, integration):
47 if isinstance(integration, Integration):
46 if isinstance(integration, Integration):
48 return integration
47 return integration
49 elif isinstance(integration, int):
48 elif isinstance(integration, int):
50 return self.sa.query(Integration).get(integration)
49 return self.sa.query(Integration).get(integration)
51 else:
50 else:
52 if integration:
51 if integration:
53 raise Exception('integration must be int or Instance'
52 raise Exception('integration must be int or Instance'
54 ' of Integration got %s' % type(integration))
53 ' of Integration got %s' % type(integration))
55
54
56 def create(self, IntegrationType, name, enabled, repo, repo_group,
55 def create(self, IntegrationType, name, enabled, repo, repo_group, child_repos_only, settings):
57 child_repos_only, settings):
58 """ Create an IntegrationType integration """
56 """ Create an IntegrationType integration """
59 integration = Integration()
57 integration = Integration()
60 integration.integration_type = IntegrationType.key
58 integration.integration_type = IntegrationType.key
61 self.sa.add(integration)
59 self.sa.add(integration)
62 self.update_integration(integration, name, enabled, repo, repo_group,
60 self.update_integration(integration, name, enabled, repo, repo_group,
63 child_repos_only, settings)
61 child_repos_only, settings)
64 self.sa.commit()
62 self.sa.commit()
65 return integration
63 return integration
66
64
67 def update_integration(self, integration, name, enabled, repo, repo_group,
65 def update_integration(self, integration, name, enabled, repo, repo_group,
68 child_repos_only, settings):
66 child_repos_only, settings):
69 integration = self.__get_integration(integration)
67 integration = self.__get_integration(integration)
70
68
71 integration.repo = repo
69 integration.repo = repo
72 integration.repo_group = repo_group
70 integration.repo_group = repo_group
73 integration.child_repos_only = child_repos_only
71 integration.child_repos_only = child_repos_only
74 integration.name = name
72 integration.name = name
75 integration.enabled = enabled
73 integration.enabled = enabled
76 integration.settings = settings
74 integration.settings = settings
77
75
78 return integration
76 return integration
79
77
80 def delete(self, integration):
78 def delete(self, integration):
81 integration = self.__get_integration(integration)
79 integration = self.__get_integration(integration)
82 if integration:
80 if integration:
83 self.sa.delete(integration)
81 self.sa.delete(integration)
84 return True
82 return True
85 return False
83 return False
86
84
87 def get_integration_handler(self, integration):
85 def get_integration_handler(self, integration):
88 TypeClass = integration_type_registry.get(integration.integration_type)
86 TypeClass = integration_type_registry.get(integration.integration_type)
89 if not TypeClass:
87 if not TypeClass:
90 log.error('No class could be found for integration type: {}'.format(
88 log.error('No class could be found for integration type: {}'.format(
91 integration.integration_type))
89 integration.integration_type))
92 return None
90 return None
93 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
91 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
94 log.error('EE integration cannot be '
92 log.error('EE integration cannot be '
95 'executed for integration type: {}'.format(
93 'executed for integration type: {}'.format(
96 integration.integration_type))
94 integration.integration_type))
97 return None
95 return None
98
96
99 return TypeClass(integration.settings)
97 return TypeClass(integration.settings)
100
98
101 def send_event(self, integration, event):
99 def send_event(self, integration, event):
102 """ Send an event to an integration """
100 """ Send an event to an integration """
103 handler = self.get_integration_handler(integration)
101 handler = self.get_integration_handler(integration)
104 if handler:
102 if handler:
105 log.debug(
103 log.debug(
106 'events: sending event %s on integration %s using handler %s',
104 'events: sending event %s on integration %s using handler %s',
107 event, integration, handler)
105 event, integration, handler)
108 handler.send_event(event)
106 handler.send_event(event)
109
107
110 def get_integrations(self, scope, IntegrationType=None):
108 def get_integrations(self, scope, IntegrationType=None):
111 """
109 """
112 Return integrations for a scope, which must be one of:
110 Return integrations for a scope, which must be one of:
113
111
114 'all' - every integration, global/repogroup/repo
112 'all' - every integration, global/repogroup/repo
115 'global' - global integrations only
113 'global' - global integrations only
116 <Repository> instance - integrations for this repo only
114 <Repository> instance - integrations for this repo only
117 <RepoGroup> instance - integrations for this repogroup only
115 <RepoGroup> instance - integrations for this repogroup only
118 """
116 """
119
117
120 if isinstance(scope, Repository):
118 if isinstance(scope, Repository):
121 query = self.sa.query(Integration).filter(
119 query = self.sa.query(Integration).filter(
122 Integration.repo == scope)
120 Integration.repo == scope)
123 elif isinstance(scope, RepoGroup):
121 elif isinstance(scope, RepoGroup):
124 query = self.sa.query(Integration).filter(
122 query = self.sa.query(Integration).filter(
125 Integration.repo_group == scope)
123 Integration.repo_group == scope)
126 elif scope == 'global':
124 elif scope == 'global':
127 # global integrations
125 # global integrations
128 query = self.sa.query(Integration).filter(
126 query = self.sa.query(Integration).filter(
129 and_(Integration.repo_id == None, Integration.repo_group_id == None)
127 and_(Integration.repo_id == None, Integration.repo_group_id == None)
130 )
128 )
131 elif scope == 'root-repos':
129 elif scope == 'root-repos':
132 query = self.sa.query(Integration).filter(
130 query = self.sa.query(Integration).filter(
133 and_(Integration.repo_id == None,
131 and_(Integration.repo_id == None,
134 Integration.repo_group_id == None,
132 Integration.repo_group_id == None,
135 Integration.child_repos_only == true())
133 Integration.child_repos_only == true())
136 )
134 )
137 elif scope == 'all':
135 elif scope == 'all':
138 query = self.sa.query(Integration)
136 query = self.sa.query(Integration)
139 else:
137 else:
140 raise Exception(
138 raise Exception(
141 "invalid `scope`, must be one of: "
139 "invalid `scope`, must be one of: "
142 "['global', 'all', <Repository>, <RepoGroup>]")
140 "['global', 'all', <Repository>, <RepoGroup>]")
143
141
144 if IntegrationType is not None:
142 if IntegrationType is not None:
145 query = query.filter(
143 query = query.filter(
146 Integration.integration_type==IntegrationType.key)
144 Integration.integration_type==IntegrationType.key)
147
145
148 result = []
146 result = []
149 for integration in query.all():
147 for integration in query.all():
150 IntType = integration_type_registry.get(integration.integration_type)
148 IntType = integration_type_registry.get(integration.integration_type)
151 result.append((IntType, integration))
149 result.append((IntType, integration))
152 return result
150 return result
153
151
154 def get_for_event(self, event, cache=False):
152 def get_for_event(self, event, cache=False):
155 """
153 """
156 Get integrations that match an event
154 Get integrations that match an event
157 """
155 """
158 # base query
156 # base query
159 query = self.sa.query(
157 query = self.sa.query(
160 Integration
158 Integration
161 ).filter(
159 ).filter(
162 Integration.enabled == true()
160 Integration.enabled == true()
163 )
161 )
164
162
165 global_integrations_filter = and_(
163 global_integrations_filter = and_(
166 Integration.repo_id == None,
164 Integration.repo_id == null(),
167 Integration.repo_group_id == None,
165 Integration.repo_group_id == null(),
168 Integration.child_repos_only == false(),
166 Integration.child_repos_only == false(),
169 )
167 )
170
168
171 if isinstance(event, events.RepoEvent):
169 if isinstance(event, events.RepoEvent):
172 root_repos_integrations_filter = and_(
170 root_repos_integrations_filter = and_(
173 Integration.repo_id == None,
171 Integration.repo_id == null(),
174 Integration.repo_group_id == None,
172 Integration.repo_group_id == null(),
175 Integration.child_repos_only == true(),
173 Integration.child_repos_only == true(),
176 )
174 )
177
175
178 clauses = [
176 clauses = [
179 global_integrations_filter,
177 global_integrations_filter,
180 ]
178 ]
181 cases = [
179 cases = [
182 (global_integrations_filter, 1),
180 (global_integrations_filter, 1),
183 (root_repos_integrations_filter, 2),
181 (root_repos_integrations_filter, 2),
184 ]
182 ]
185
183
186 # repo group integrations
184 # repo group integrations
187 if event.repo.group:
185 if event.repo.group:
188 # repo group with only root level repos
186 # repo group with only root level repos
189 group_child_repos_filter = and_(
187 group_child_repos_filter = and_(
190 Integration.repo_group_id == event.repo.group.group_id,
188 Integration.repo_group_id == event.repo.group.group_id,
191 Integration.child_repos_only == true()
189 Integration.child_repos_only == true()
192 )
190 )
193
191
194 clauses.append(group_child_repos_filter)
192 clauses.append(group_child_repos_filter)
195 cases.append(
193 cases.append(
196 (group_child_repos_filter, 3),
194 (group_child_repos_filter, 3),
197 )
195 )
198
196
199 # repo group cascade to kids
197 # repo group cascade to kids
200 group_recursive_repos_filter = and_(
198 group_recursive_repos_filter = and_(
201 Integration.repo_group_id.in_(
199 Integration.repo_group_id.in_(
202 [group.group_id for group in event.repo.groups_with_parents]
200 [group.group_id for group in event.repo.groups_with_parents]
203 ),
201 ),
204 Integration.child_repos_only == false()
202 Integration.child_repos_only == false()
205 )
203 )
206 clauses.append(group_recursive_repos_filter)
204 clauses.append(group_recursive_repos_filter)
207 cases.append(
205 cases.append(
208 (group_recursive_repos_filter, 4),
206 (group_recursive_repos_filter, 4),
209 )
207 )
210
208
211 if not event.repo.group: # root repo
209 if not event.repo.group: # root repo
212 clauses.append(root_repos_integrations_filter)
210 clauses.append(root_repos_integrations_filter)
213
211
214 # repo integrations
212 # repo integrations
215 if event.repo.repo_id: # pre create events dont have a repo_id yet
213 if event.repo.repo_id: # pre create events dont have a repo_id yet
216 specific_repo_filter = Integration.repo_id == event.repo.repo_id
214 specific_repo_filter = Integration.repo_id == event.repo.repo_id
217 clauses.append(specific_repo_filter)
215 clauses.append(specific_repo_filter)
218 cases.append(
216 cases.append(
219 (specific_repo_filter, 5),
217 (specific_repo_filter, 5),
220 )
218 )
221
219
222 order_by_criterion = case(cases)
220 order_by_criterion = case(cases)
223
221
224 query = query.filter(or_(*clauses))
222 query = query.filter(or_(*clauses))
225 query = query.order_by(order_by_criterion)
223 query = query.order_by(order_by_criterion)
226
224
227 if cache:
225 if cache:
228 cache_key = "get_enabled_repo_integrations_%i" % event.repo.repo_id
226 cache_key = f"get_enabled_repo_integrations_{event.repo.repo_id}"
229 query = query.options(
227 query = query.options(
230 FromCache("sql_cache_short", cache_key))
228 FromCache("sql_cache_short", cache_key))
231 else: # only global integrations
229 else: # only global integrations
232 order_by_criterion = Integration.integration_id
230 order_by_criterion = Integration.integration_id
233
231
234 query = query.filter(global_integrations_filter)
232 query = query.filter(global_integrations_filter)
235 query = query.order_by(order_by_criterion)
233 query = query.order_by(order_by_criterion)
236 if cache:
234 if cache:
237 query = query.options(
235 query = query.options(
238 FromCache("sql_cache_short", "get_enabled_global_integrations"))
236 FromCache("sql_cache_short", "get_enabled_global_integrations"))
239
237
240 result = query.all()
238 result = query.all()
241 return result
239 return result
@@ -1,453 +1,457 b''
1
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 Model for notifications
23 Model for notifications
24 """
24 """
25
25
26 import logging
26 import logging
27 import traceback
27 import traceback
28
28
29 import premailer
29 import premailer
30 from pyramid.threadlocal import get_current_request
30 from pyramid.threadlocal import get_current_request
31 from sqlalchemy.sql.expression import false, true
31 from sqlalchemy.sql.expression import false, true
32
32
33 import rhodecode
33 import rhodecode
34 from rhodecode.lib import helpers as h
34 from rhodecode.lib import helpers as h
35 from rhodecode.model import BaseModel
35 from rhodecode.model import BaseModel
36 from rhodecode.model.db import Notification, User, UserNotification
36 from rhodecode.model.db import Notification, User, UserNotification
37 from rhodecode.model.meta import Session
37 from rhodecode.model.meta import Session
38 from rhodecode.translation import TranslationString
38 from rhodecode.translation import TranslationString
39
39
40 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
41
41
42
42
43 class NotificationModel(BaseModel):
43 class NotificationModel(BaseModel):
44
44
45 cls = Notification
45 cls = Notification
46
46
47 def __get_notification(self, notification):
47 def __get_notification(self, notification):
48 if isinstance(notification, Notification):
48 if isinstance(notification, Notification):
49 return notification
49 return notification
50 elif isinstance(notification, int):
50 elif isinstance(notification, int):
51 return Notification.get(notification)
51 return Notification.get(notification)
52 else:
52 else:
53 if notification:
53 if notification:
54 raise Exception('notification must be int or Instance'
54 raise Exception('notification must be int or Instance'
55 ' of Notification got %s' % type(notification))
55 ' of Notification got %s' % type(notification))
56
56
57 def create(
57 def create(
58 self, created_by, notification_subject='', notification_body='',
58 self, created_by, notification_subject='', notification_body='',
59 notification_type=Notification.TYPE_MESSAGE, recipients=None,
59 notification_type=Notification.TYPE_MESSAGE, recipients=None,
60 mention_recipients=None, with_email=True, email_kwargs=None):
60 mention_recipients=None, with_email=True, email_kwargs=None):
61 """
61 """
62
62
63 Creates notification of given type
63 Creates notification of given type
64
64
65 :param created_by: int, str or User instance. User who created this
65 :param created_by: int, str or User instance. User who created this
66 notification
66 notification
67 :param notification_subject: subject of notification itself,
67 :param notification_subject: subject of notification itself,
68 it will be generated automatically from notification_type if not specified
68 it will be generated automatically from notification_type if not specified
69 :param notification_body: body of notification text
69 :param notification_body: body of notification text
70 it will be generated automatically from notification_type if not specified
70 it will be generated automatically from notification_type if not specified
71 :param notification_type: type of notification, based on that we
71 :param notification_type: type of notification, based on that we
72 pick templates
72 pick templates
73 :param recipients: list of int, str or User objects, when None
73 :param recipients: list of int, str or User objects, when None
74 is given send to all admins
74 is given send to all admins
75 :param mention_recipients: list of int, str or User objects,
75 :param mention_recipients: list of int, str or User objects,
76 that were mentioned
76 that were mentioned
77 :param with_email: send email with this notification
77 :param with_email: send email with this notification
78 :param email_kwargs: dict with arguments to generate email
78 :param email_kwargs: dict with arguments to generate email
79 """
79 """
80
80
81 from rhodecode.lib.celerylib import tasks, run_task
81 from rhodecode.lib.celerylib import tasks, run_task
82
82
83 if recipients and not getattr(recipients, '__iter__', False):
83 if recipients and not getattr(recipients, '__iter__', False):
84 raise Exception('recipients must be an iterable object')
84 raise Exception('recipients must be an iterable object')
85
85
86 if not (notification_subject and notification_body) and not notification_type:
86 if not (notification_subject and notification_body) and not notification_type:
87 raise ValueError('notification_subject, and notification_body '
87 raise ValueError('notification_subject, and notification_body '
88 'cannot be empty when notification_type is not specified')
88 'cannot be empty when notification_type is not specified')
89
89
90 created_by_obj = self._get_user(created_by)
90 created_by_obj = self._get_user(created_by)
91
91
92 if not created_by_obj:
92 if not created_by_obj:
93 raise Exception('unknown user %s' % created_by)
93 raise Exception('unknown user %s' % created_by)
94
94
95 # default MAIN body if not given
95 # default MAIN body if not given
96 email_kwargs = email_kwargs or {'body': notification_body}
96 email_kwargs = email_kwargs or {'body': notification_body}
97 mention_recipients = mention_recipients or set()
97 mention_recipients = mention_recipients or set()
98
98
99 if recipients is None:
99 if recipients is None:
100 # recipients is None means to all admins
100 # recipients is None means to all admins
101 recipients_objs = User.query().filter(User.admin == true()).all()
101 recipients_objs = User.query().filter(User.admin == true()).all()
102 log.debug('sending notifications %s to admins: %s',
102 log.debug('sending notifications %s to admins: %s',
103 notification_type, recipients_objs)
103 notification_type, recipients_objs)
104 else:
104 else:
105 recipients_objs = set()
105 recipients_objs = set()
106 for u in recipients:
106 for u in recipients:
107 obj = self._get_user(u)
107 obj = self._get_user(u)
108 if obj:
108 if obj:
109 recipients_objs.add(obj)
109 recipients_objs.add(obj)
110 else: # we didn't find this user, log the error and carry on
110 else: # we didn't find this user, log the error and carry on
111 log.error('cannot notify unknown user %r', u)
111 log.error('cannot notify unknown user %r', u)
112
112
113 if not recipients_objs:
113 if not recipients_objs:
114 raise Exception('no valid recipients specified')
114 raise Exception('no valid recipients specified')
115
115
116 log.debug('sending notifications %s to %s',
116 log.debug('sending notifications %s to %s',
117 notification_type, recipients_objs)
117 notification_type, recipients_objs)
118
118
119 # add mentioned users into recipients
119 # add mentioned users into recipients
120 final_recipients = set(recipients_objs).union(mention_recipients)
120 final_recipients = set(recipients_objs).union(mention_recipients)
121
121
122 (subject, email_body, email_body_plaintext) = \
122 (subject, email_body, email_body_plaintext) = \
123 EmailNotificationModel().render_email(notification_type, **email_kwargs)
123 EmailNotificationModel().render_email(notification_type, **email_kwargs)
124
124
125 if not notification_subject:
125 if not notification_subject:
126 notification_subject = subject
126 notification_subject = subject
127
127
128 if not notification_body:
128 if not notification_body:
129 notification_body = email_body_plaintext
129 notification_body = email_body_plaintext
130
130
131 notification = Notification.create(
131 notification = Notification.create(
132 created_by=created_by_obj, subject=notification_subject,
132 created_by=created_by_obj, subject=notification_subject,
133 body=notification_body, recipients=final_recipients,
133 body=notification_body, recipients=final_recipients,
134 type_=notification_type
134 type_=notification_type
135 )
135 )
136
136
137 if not with_email: # skip sending email, and just create notification
137 if not with_email: # skip sending email, and just create notification
138 return notification
138 return notification
139
139
140 # don't send email to person who created this comment
140 # don't send email to person who created this comment
141 rec_objs = set(recipients_objs).difference({created_by_obj})
141 rec_objs = set(recipients_objs).difference({created_by_obj})
142
142
143 # now notify all recipients in question
143 # now notify all recipients in question
144
144
145 for recipient in rec_objs.union(mention_recipients):
145 for recipient in rec_objs.union(mention_recipients):
146 # inject current recipient
146 # inject current recipient
147 email_kwargs['recipient'] = recipient
147 email_kwargs['recipient'] = recipient
148 email_kwargs['mention'] = recipient in mention_recipients
148 email_kwargs['mention'] = recipient in mention_recipients
149 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
149 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
150 notification_type, **email_kwargs)
150 notification_type, **email_kwargs)
151
151
152 extra_headers = None
152 extra_headers = None
153 if 'thread_ids' in email_kwargs:
153 if 'thread_ids' in email_kwargs:
154 extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')}
154 extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')}
155
155
156 log.debug('Creating notification email task for user:`%s`', recipient)
156 log.debug('Creating notification email task for user:`%s`', recipient)
157 task = run_task(tasks.send_email, recipient.email, subject,
157 task = run_task(tasks.send_email, recipient.email, subject,
158 email_body_plaintext, email_body, extra_headers=extra_headers)
158 email_body_plaintext, email_body, extra_headers=extra_headers)
159 log.debug('Created email task: %s', task)
159 log.debug('Created email task: %s', task)
160
160
161 return notification
161 return notification
162
162
163 def delete(self, user, notification):
163 def delete(self, user, notification):
164 # we don't want to remove actual notification just the assignment
164 # we don't want to remove actual notification just the assignment
165 try:
165 try:
166 notification = self.__get_notification(notification)
166 notification = self.__get_notification(notification)
167 user = self._get_user(user)
167 user = self._get_user(user)
168 if notification and user:
168 if notification and user:
169 obj = UserNotification.query()\
169 obj = UserNotification.query()\
170 .filter(UserNotification.user == user)\
170 .filter(UserNotification.user == user)\
171 .filter(UserNotification.notification == notification)\
171 .filter(UserNotification.notification == notification)\
172 .one()
172 .one()
173 Session().delete(obj)
173 Session().delete(obj)
174 return True
174 return True
175 except Exception:
175 except Exception:
176 log.error(traceback.format_exc())
176 log.error(traceback.format_exc())
177 raise
177 raise
178
178
179 def get_for_user(self, user, filter_=None):
179 def get_for_user(self, user, filter_=None):
180 """
180 """
181 Get mentions for given user, filter them if filter dict is given
181 Get mentions for given user, filter them if filter dict is given
182 """
182 """
183 user = self._get_user(user)
183 user = self._get_user(user)
184
184
185 q = UserNotification.query()\
185 q = UserNotification.query()\
186 .filter(UserNotification.user == user)\
186 .filter(UserNotification.user == user)\
187 .join((
187 .join((
188 Notification, UserNotification.notification_id ==
188 Notification, UserNotification.notification_id ==
189 Notification.notification_id))
189 Notification.notification_id))
190 if filter_ == ['all']:
190 if filter_ == ['all']:
191 q = q # no filter
191 q = q # no filter
192 elif filter_ == ['unread']:
192 elif filter_ == ['unread']:
193 q = q.filter(UserNotification.read == false())
193 q = q.filter(UserNotification.read == false())
194 elif filter_:
194 elif filter_:
195 q = q.filter(Notification.type_.in_(filter_))
195 q = q.filter(Notification.type_.in_(filter_))
196
196
197 return q
197 return q
198
198
199 def mark_read(self, user, notification):
199 def mark_read(self, user, notification):
200 try:
200 try:
201 notification = self.__get_notification(notification)
201 notification = self.__get_notification(notification)
202 user = self._get_user(user)
202 user = self._get_user(user)
203 if notification and user:
203 if notification and user:
204 obj = UserNotification.query()\
204 obj = UserNotification.query()\
205 .filter(UserNotification.user == user)\
205 .filter(UserNotification.user == user)\
206 .filter(UserNotification.notification == notification)\
206 .filter(UserNotification.notification == notification)\
207 .one()
207 .one()
208 obj.read = True
208 obj.read = True
209 Session().add(obj)
209 Session().add(obj)
210 return True
210 return True
211 except Exception:
211 except Exception:
212 log.error(traceback.format_exc())
212 log.error(traceback.format_exc())
213 raise
213 raise
214
214
215 def mark_all_read_for_user(self, user, filter_=None):
215 def mark_all_read_for_user(self, user, filter_=None):
216 user = self._get_user(user)
216 user = self._get_user(user)
217 q = UserNotification.query()\
217 q = UserNotification.query()\
218 .filter(UserNotification.user == user)\
218 .filter(UserNotification.user == user)\
219 .filter(UserNotification.read == false())\
219 .filter(UserNotification.read == false())\
220 .join((
220 .join((
221 Notification, UserNotification.notification_id ==
221 Notification, UserNotification.notification_id ==
222 Notification.notification_id))
222 Notification.notification_id))
223 if filter_ == ['unread']:
223 if filter_ == ['unread']:
224 q = q.filter(UserNotification.read == false())
224 q = q.filter(UserNotification.read == false())
225 elif filter_:
225 elif filter_:
226 q = q.filter(Notification.type_.in_(filter_))
226 q = q.filter(Notification.type_.in_(filter_))
227
227
228 # this is a little inefficient but sqlalchemy doesn't support
228 # this is a little inefficient but sqlalchemy doesn't support
229 # update on joined tables :(
229 # update on joined tables :(
230 for obj in q.all():
230 for obj in q.all():
231 obj.read = True
231 obj.read = True
232 Session().add(obj)
232 Session().add(obj)
233
233
234 def get_unread_cnt_for_user(self, user):
234 def get_unread_cnt_for_user(self, user):
235 user = self._get_user(user)
235 user = self._get_user(user)
236 return UserNotification.query()\
236 return UserNotification.query()\
237 .filter(UserNotification.read == false())\
237 .filter(UserNotification.read == false())\
238 .filter(UserNotification.user == user).count()
238 .filter(UserNotification.user == user).count()
239
239
240 def get_unread_for_user(self, user):
240 def get_unread_for_user(self, user):
241 user = self._get_user(user)
241 user = self._get_user(user)
242 return [x.notification for x in UserNotification.query()
242 return [x.notification for x in UserNotification.query()
243 .filter(UserNotification.read == false())
243 .filter(UserNotification.read == false())
244 .filter(UserNotification.user == user).all()]
244 .filter(UserNotification.user == user).all()]
245
245
246 def get_user_notification(self, user, notification):
246 def get_user_notification(self, user, notification):
247 user = self._get_user(user)
247 user = self._get_user(user)
248 notification = self.__get_notification(notification)
248 notification = self.__get_notification(notification)
249
249
250 return UserNotification.query()\
250 return UserNotification.query()\
251 .filter(UserNotification.notification == notification)\
251 .filter(UserNotification.notification == notification)\
252 .filter(UserNotification.user == user).scalar()
252 .filter(UserNotification.user == user).scalar()
253
253
254 def make_description(self, notification, translate, show_age=True):
254 def make_description(self, notification, translate, show_age=True):
255 """
255 """
256 Creates a human readable description based on properties
256 Creates a human readable description based on properties
257 of notification object
257 of notification object
258 """
258 """
259 _ = translate
259 _ = translate
260 _map = {
260 _map = {
261 notification.TYPE_CHANGESET_COMMENT: [
261 notification.TYPE_CHANGESET_COMMENT: [
262 _('%(user)s commented on commit %(date_or_age)s'),
262 _('%(user)s commented on commit %(date_or_age)s'),
263 _('%(user)s commented on commit at %(date_or_age)s'),
263 _('%(user)s commented on commit at %(date_or_age)s'),
264 ],
264 ],
265 notification.TYPE_MESSAGE: [
265 notification.TYPE_MESSAGE: [
266 _('%(user)s sent message %(date_or_age)s'),
266 _('%(user)s sent message %(date_or_age)s'),
267 _('%(user)s sent message at %(date_or_age)s'),
267 _('%(user)s sent message at %(date_or_age)s'),
268 ],
268 ],
269 notification.TYPE_MENTION: [
269 notification.TYPE_MENTION: [
270 _('%(user)s mentioned you %(date_or_age)s'),
270 _('%(user)s mentioned you %(date_or_age)s'),
271 _('%(user)s mentioned you at %(date_or_age)s'),
271 _('%(user)s mentioned you at %(date_or_age)s'),
272 ],
272 ],
273 notification.TYPE_REGISTRATION: [
273 notification.TYPE_REGISTRATION: [
274 _('%(user)s registered in RhodeCode %(date_or_age)s'),
274 _('%(user)s registered in RhodeCode %(date_or_age)s'),
275 _('%(user)s registered in RhodeCode at %(date_or_age)s'),
275 _('%(user)s registered in RhodeCode at %(date_or_age)s'),
276 ],
276 ],
277 notification.TYPE_PULL_REQUEST: [
277 notification.TYPE_PULL_REQUEST: [
278 _('%(user)s opened new pull request %(date_or_age)s'),
278 _('%(user)s opened new pull request %(date_or_age)s'),
279 _('%(user)s opened new pull request at %(date_or_age)s'),
279 _('%(user)s opened new pull request at %(date_or_age)s'),
280 ],
280 ],
281 notification.TYPE_PULL_REQUEST_UPDATE: [
281 notification.TYPE_PULL_REQUEST_UPDATE: [
282 _('%(user)s updated pull request %(date_or_age)s'),
282 _('%(user)s updated pull request %(date_or_age)s'),
283 _('%(user)s updated pull request at %(date_or_age)s'),
283 _('%(user)s updated pull request at %(date_or_age)s'),
284 ],
284 ],
285 notification.TYPE_PULL_REQUEST_COMMENT: [
285 notification.TYPE_PULL_REQUEST_COMMENT: [
286 _('%(user)s commented on pull request %(date_or_age)s'),
286 _('%(user)s commented on pull request %(date_or_age)s'),
287 _('%(user)s commented on pull request at %(date_or_age)s'),
287 _('%(user)s commented on pull request at %(date_or_age)s'),
288 ],
288 ],
289 }
289 }
290
290
291 templates = _map[notification.type_]
291 templates = _map[notification.type_]
292
292
293 if show_age:
293 if show_age:
294 template = templates[0]
294 template = templates[0]
295 date_or_age = h.age(notification.created_on)
295 date_or_age = h.age(notification.created_on)
296 if translate:
296 if translate:
297 date_or_age = translate(date_or_age)
297 date_or_age = translate(date_or_age)
298
298
299 if isinstance(date_or_age, TranslationString):
299 if isinstance(date_or_age, TranslationString):
300 date_or_age = date_or_age.interpolate()
300 date_or_age = date_or_age.interpolate()
301
301
302 else:
302 else:
303 template = templates[1]
303 template = templates[1]
304 date_or_age = h.format_date(notification.created_on)
304 date_or_age = h.format_date(notification.created_on)
305
305
306 return template % {
306 return template % {
307 'user': notification.created_by_user.username,
307 'user': notification.created_by_user.username,
308 'date_or_age': date_or_age,
308 'date_or_age': date_or_age,
309 }
309 }
310
310
311
311
312 # Templates for Titles, that could be overwritten by rcextensions
312 # Templates for Titles, that could be overwritten by rcextensions
313 # Title of email for pull-request update
313 # Title of email for pull-request update
314 EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = ''
314 EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = ''
315 # Title of email for request for pull request review
315 # Title of email for request for pull request review
316 EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = ''
316 EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = ''
317
317
318 # Title of email for general comment on pull request
318 # Title of email for general comment on pull request
319 EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = ''
319 EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = ''
320 # Title of email for general comment which includes status change on pull request
320 # Title of email for general comment which includes status change on pull request
321 EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
321 EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
322 # Title of email for inline comment on a file in pull request
322 # Title of email for inline comment on a file in pull request
323 EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = ''
323 EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = ''
324
324
325 # Title of email for general comment on commit
325 # Title of email for general comment on commit
326 EMAIL_COMMENT_SUBJECT_TEMPLATE = ''
326 EMAIL_COMMENT_SUBJECT_TEMPLATE = ''
327 # Title of email for general comment which includes status change on commit
327 # Title of email for general comment which includes status change on commit
328 EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
328 EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
329 # Title of email for inline comment on a file in commit
329 # Title of email for inline comment on a file in commit
330 EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = ''
330 EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = ''
331
331
332 import cssutils
332 import cssutils
333 # hijack css utils logger and replace with ours
333 # hijack css utils logger and replace with ours
334 log = logging.getLogger('rhodecode.cssutils.premailer')
334 log = logging.getLogger('rhodecode.cssutils.premailer')
335 log.setLevel(logging.INFO)
335 cssutils.log.setLog(log)
336 cssutils.log.setLog(log)
336
337
337
338
338 class EmailNotificationModel(BaseModel):
339 class EmailNotificationModel(BaseModel):
339 TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT
340 TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT
340 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
341 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
341 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
342 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
342 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
343 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
343 TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE
344 TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE
344 TYPE_MAIN = Notification.TYPE_MESSAGE
345 TYPE_MAIN = Notification.TYPE_MESSAGE
345
346
346 TYPE_PASSWORD_RESET = 'password_reset'
347 TYPE_PASSWORD_RESET = 'password_reset'
347 TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation'
348 TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation'
348 TYPE_EMAIL_TEST = 'email_test'
349 TYPE_EMAIL_TEST = 'email_test'
349 TYPE_EMAIL_EXCEPTION = 'exception'
350 TYPE_EMAIL_EXCEPTION = 'exception'
350 TYPE_UPDATE_AVAILABLE = 'update_available'
351 TYPE_UPDATE_AVAILABLE = 'update_available'
351 TYPE_TEST = 'test'
352 TYPE_TEST = 'test'
352
353
353 email_types = {
354 email_types = {
354 TYPE_MAIN:
355 TYPE_MAIN:
355 'rhodecode:templates/email_templates/main.mako',
356 'rhodecode:templates/email_templates/main.mako',
356 TYPE_TEST:
357 TYPE_TEST:
357 'rhodecode:templates/email_templates/test.mako',
358 'rhodecode:templates/email_templates/test.mako',
358 TYPE_EMAIL_EXCEPTION:
359 TYPE_EMAIL_EXCEPTION:
359 'rhodecode:templates/email_templates/exception_tracker.mako',
360 'rhodecode:templates/email_templates/exception_tracker.mako',
360 TYPE_UPDATE_AVAILABLE:
361 TYPE_UPDATE_AVAILABLE:
361 'rhodecode:templates/email_templates/update_available.mako',
362 'rhodecode:templates/email_templates/update_available.mako',
362 TYPE_EMAIL_TEST:
363 TYPE_EMAIL_TEST:
363 'rhodecode:templates/email_templates/email_test.mako',
364 'rhodecode:templates/email_templates/email_test.mako',
364 TYPE_REGISTRATION:
365 TYPE_REGISTRATION:
365 'rhodecode:templates/email_templates/user_registration.mako',
366 'rhodecode:templates/email_templates/user_registration.mako',
366 TYPE_PASSWORD_RESET:
367 TYPE_PASSWORD_RESET:
367 'rhodecode:templates/email_templates/password_reset.mako',
368 'rhodecode:templates/email_templates/password_reset.mako',
368 TYPE_PASSWORD_RESET_CONFIRMATION:
369 TYPE_PASSWORD_RESET_CONFIRMATION:
369 'rhodecode:templates/email_templates/password_reset_confirmation.mako',
370 'rhodecode:templates/email_templates/password_reset_confirmation.mako',
370 TYPE_COMMIT_COMMENT:
371 TYPE_COMMIT_COMMENT:
371 'rhodecode:templates/email_templates/commit_comment.mako',
372 'rhodecode:templates/email_templates/commit_comment.mako',
372 TYPE_PULL_REQUEST:
373 TYPE_PULL_REQUEST:
373 'rhodecode:templates/email_templates/pull_request_review.mako',
374 'rhodecode:templates/email_templates/pull_request_review.mako',
374 TYPE_PULL_REQUEST_COMMENT:
375 TYPE_PULL_REQUEST_COMMENT:
375 'rhodecode:templates/email_templates/pull_request_comment.mako',
376 'rhodecode:templates/email_templates/pull_request_comment.mako',
376 TYPE_PULL_REQUEST_UPDATE:
377 TYPE_PULL_REQUEST_UPDATE:
377 'rhodecode:templates/email_templates/pull_request_update.mako',
378 'rhodecode:templates/email_templates/pull_request_update.mako',
378 }
379 }
379
380
380 premailer_instance = premailer.Premailer()
381 premailer_instance = premailer.Premailer(
382 #cssutils_logging_handler=log.handlers[0],
383 #cssutils_logging_level=logging.INFO
384 )
381
385
382 def __init__(self):
386 def __init__(self):
383 """
387 """
384 Example usage::
388 Example usage::
385
389
386 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
390 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
387 EmailNotificationModel.TYPE_TEST, **email_kwargs)
391 EmailNotificationModel.TYPE_TEST, **email_kwargs)
388
392
389 """
393 """
390 super(EmailNotificationModel, self).__init__()
394 super(EmailNotificationModel, self).__init__()
391 self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title')
395 self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title')
392
396
393 def _update_kwargs_for_render(self, kwargs):
397 def _update_kwargs_for_render(self, kwargs):
394 """
398 """
395 Inject params required for Mako rendering
399 Inject params required for Mako rendering
396
400
397 :param kwargs:
401 :param kwargs:
398 """
402 """
399
403
400 kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name
404 kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name
401 kwargs['rhodecode_version'] = rhodecode.__version__
405 kwargs['rhodecode_version'] = rhodecode.__version__
402 instance_url = h.route_url('home')
406 instance_url = h.route_url('home')
403 _kwargs = {
407 _kwargs = {
404 'instance_url': instance_url,
408 'instance_url': instance_url,
405 'whitespace_filter': self.whitespace_filter,
409 'whitespace_filter': self.whitespace_filter,
406 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE,
410 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE,
407 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE,
411 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE,
408 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE,
412 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE,
409 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
413 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
410 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE,
414 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE,
411 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE,
415 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE,
412 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
416 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
413 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE,
417 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE,
414 }
418 }
415 _kwargs.update(kwargs)
419 _kwargs.update(kwargs)
416 return _kwargs
420 return _kwargs
417
421
418 def whitespace_filter(self, text):
422 def whitespace_filter(self, text):
419 return text.replace('\n', '').replace('\t', '')
423 return text.replace('\n', '').replace('\t', '')
420
424
421 def get_renderer(self, type_, request):
425 def get_renderer(self, type_, request):
422 template_name = self.email_types[type_]
426 template_name = self.email_types[type_]
423 return request.get_partial_renderer(template_name)
427 return request.get_partial_renderer(template_name)
424
428
425 def render_email(self, type_, **kwargs):
429 def render_email(self, type_, **kwargs):
426 """
430 """
427 renders template for email, and returns a tuple of
431 renders template for email, and returns a tuple of
428 (subject, email_headers, email_html_body, email_plaintext_body)
432 (subject, email_headers, email_html_body, email_plaintext_body)
429 """
433 """
430 request = get_current_request()
434 request = get_current_request()
431
435
432 # translator and helpers inject
436 # translator and helpers inject
433 _kwargs = self._update_kwargs_for_render(kwargs)
437 _kwargs = self._update_kwargs_for_render(kwargs)
434 email_template = self.get_renderer(type_, request=request)
438 email_template = self.get_renderer(type_, request=request)
435 subject = email_template.render('subject', **_kwargs)
439 subject = email_template.render('subject', **_kwargs)
436
440
437 try:
441 try:
438 body_plaintext = email_template.render('body_plaintext', **_kwargs)
442 body_plaintext = email_template.render('body_plaintext', **_kwargs)
439 except AttributeError:
443 except AttributeError:
440 # it's not defined in template, ok we can skip it
444 # it's not defined in template, ok we can skip it
441 body_plaintext = ''
445 body_plaintext = ''
442
446
443 # render WHOLE template
447 # render WHOLE template
444 body = email_template.render(None, **_kwargs)
448 body = email_template.render(None, **_kwargs)
445
449
446 try:
450 try:
447 # Inline CSS styles and conversion
451 # Inline CSS styles and conversion
448 body = self.premailer_instance.transform(body)
452 body = self.premailer_instance.transform(body)
449 except Exception:
453 except Exception:
450 log.exception('Failed to parse body with premailer')
454 log.exception('Failed to parse body with premailer')
451 pass
455 pass
452
456
453 return subject, body, body_plaintext
457 return subject, body, body_plaintext
@@ -1,599 +1,607 b''
1
1
2 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 permissions model for RhodeCode
21 permissions model for RhodeCode
22 """
22 """
23 import collections
23 import collections
24 import logging
24 import logging
25 import traceback
25 import traceback
26
26
27 from sqlalchemy.exc import DatabaseError
27 from sqlalchemy.exc import DatabaseError
28
28
29 from rhodecode import events
29 from rhodecode import events
30 from rhodecode.model import BaseModel
30 from rhodecode.model import BaseModel
31 from rhodecode.model.db import (
31 from rhodecode.model.db import (
32 User, Permission, UserToPerm, UserRepoToPerm, UserRepoGroupToPerm,
32 User, Permission, UserToPerm, UserRepoToPerm, UserRepoGroupToPerm,
33 UserUserGroupToPerm, UserGroup, UserGroupToPerm, UserToRepoBranchPermission)
33 UserUserGroupToPerm, UserGroup, UserGroupToPerm, UserToRepoBranchPermission)
34 from rhodecode.lib.utils2 import str2bool, safe_int
34 from rhodecode.lib.utils2 import str2bool, safe_int
35
35
36 log = logging.getLogger(__name__)
36 log = logging.getLogger(__name__)
37
37
38
38
39 class PermissionModel(BaseModel):
39 class PermissionModel(BaseModel):
40 """
40 """
41 Permissions model for RhodeCode
41 Permissions model for RhodeCode
42 """
42 """
43 FORKING_DISABLED = 'hg.fork.none'
43 FORKING_DISABLED = 'hg.fork.none'
44 FORKING_ENABLED = 'hg.fork.repository'
44 FORKING_ENABLED = 'hg.fork.repository'
45
45
46 cls = Permission
46 cls = Permission
47 global_perms = {
47 global_perms = {
48 'default_repo_create': None,
48 'default_repo_create': None,
49 # special case for create repos on write access to group
49 # special case for create repos on write access to group
50 'default_repo_create_on_write': None,
50 'default_repo_create_on_write': None,
51 'default_repo_group_create': None,
51 'default_repo_group_create': None,
52 'default_user_group_create': None,
52 'default_user_group_create': None,
53 'default_fork_create': None,
53 'default_fork_create': None,
54 'default_inherit_default_permissions': None,
54 'default_inherit_default_permissions': None,
55 'default_register': None,
55 'default_register': None,
56 'default_password_reset': None,
56 'default_password_reset': None,
57 'default_extern_activate': None,
57 'default_extern_activate': None,
58
58
59 # object permissions below
59 # object permissions below
60 'default_repo_perm': None,
60 'default_repo_perm': None,
61 'default_group_perm': None,
61 'default_group_perm': None,
62 'default_user_group_perm': None,
62 'default_user_group_perm': None,
63
63
64 # branch
64 # branch
65 'default_branch_perm': None,
65 'default_branch_perm': None,
66 }
66 }
67
67
68 def set_global_permission_choices(self, c_obj, gettext_translator):
68 def set_global_permission_choices(self, c_obj, gettext_translator):
69 _ = gettext_translator
69 _ = gettext_translator
70
70
71 c_obj.repo_perms_choices = [
71 c_obj.repo_perms_choices = [
72 ('repository.none', _('None'),),
72 ('repository.none', _('None'),),
73 ('repository.read', _('Read'),),
73 ('repository.read', _('Read'),),
74 ('repository.write', _('Write'),),
74 ('repository.write', _('Write'),),
75 ('repository.admin', _('Admin'),)]
75 ('repository.admin', _('Admin'),)]
76
76
77 c_obj.group_perms_choices = [
77 c_obj.group_perms_choices = [
78 ('group.none', _('None'),),
78 ('group.none', _('None'),),
79 ('group.read', _('Read'),),
79 ('group.read', _('Read'),),
80 ('group.write', _('Write'),),
80 ('group.write', _('Write'),),
81 ('group.admin', _('Admin'),)]
81 ('group.admin', _('Admin'),)]
82
82
83 c_obj.user_group_perms_choices = [
83 c_obj.user_group_perms_choices = [
84 ('usergroup.none', _('None'),),
84 ('usergroup.none', _('None'),),
85 ('usergroup.read', _('Read'),),
85 ('usergroup.read', _('Read'),),
86 ('usergroup.write', _('Write'),),
86 ('usergroup.write', _('Write'),),
87 ('usergroup.admin', _('Admin'),)]
87 ('usergroup.admin', _('Admin'),)]
88
88
89 c_obj.branch_perms_choices = [
89 c_obj.branch_perms_choices = [
90 ('branch.none', _('Protected/No Access'),),
90 ('branch.none', _('Protected/No Access'),),
91 ('branch.merge', _('Web merge'),),
91 ('branch.merge', _('Web merge'),),
92 ('branch.push', _('Push'),),
92 ('branch.push', _('Push'),),
93 ('branch.push_force', _('Force Push'),)]
93 ('branch.push_force', _('Force Push'),)]
94
94
95 c_obj.register_choices = [
95 c_obj.register_choices = [
96 ('hg.register.none', _('Disabled')),
96 ('hg.register.none', _('Disabled')),
97 ('hg.register.manual_activate', _('Allowed with manual account activation')),
97 ('hg.register.manual_activate', _('Allowed with manual account activation')),
98 ('hg.register.auto_activate', _('Allowed with automatic account activation'))]
98 ('hg.register.auto_activate', _('Allowed with automatic account activation'))]
99
99
100 c_obj.password_reset_choices = [
100 c_obj.password_reset_choices = [
101 ('hg.password_reset.enabled', _('Allow password recovery')),
101 ('hg.password_reset.enabled', _('Allow password recovery')),
102 ('hg.password_reset.hidden', _('Hide password recovery link')),
102 ('hg.password_reset.hidden', _('Hide password recovery link')),
103 ('hg.password_reset.disabled', _('Disable password recovery'))]
103 ('hg.password_reset.disabled', _('Disable password recovery'))]
104
104
105 c_obj.extern_activate_choices = [
105 c_obj.extern_activate_choices = [
106 ('hg.extern_activate.manual', _('Manual activation of external account')),
106 ('hg.extern_activate.manual', _('Manual activation of external account')),
107 ('hg.extern_activate.auto', _('Automatic activation of external account'))]
107 ('hg.extern_activate.auto', _('Automatic activation of external account'))]
108
108
109 c_obj.repo_create_choices = [
109 c_obj.repo_create_choices = [
110 ('hg.create.none', _('Disabled')),
110 ('hg.create.none', _('Disabled')),
111 ('hg.create.repository', _('Enabled'))]
111 ('hg.create.repository', _('Enabled'))]
112
112
113 c_obj.repo_create_on_write_choices = [
113 c_obj.repo_create_on_write_choices = [
114 ('hg.create.write_on_repogroup.false', _('Disabled')),
114 ('hg.create.write_on_repogroup.false', _('Disabled')),
115 ('hg.create.write_on_repogroup.true', _('Enabled'))]
115 ('hg.create.write_on_repogroup.true', _('Enabled'))]
116
116
117 c_obj.user_group_create_choices = [
117 c_obj.user_group_create_choices = [
118 ('hg.usergroup.create.false', _('Disabled')),
118 ('hg.usergroup.create.false', _('Disabled')),
119 ('hg.usergroup.create.true', _('Enabled'))]
119 ('hg.usergroup.create.true', _('Enabled'))]
120
120
121 c_obj.repo_group_create_choices = [
121 c_obj.repo_group_create_choices = [
122 ('hg.repogroup.create.false', _('Disabled')),
122 ('hg.repogroup.create.false', _('Disabled')),
123 ('hg.repogroup.create.true', _('Enabled'))]
123 ('hg.repogroup.create.true', _('Enabled'))]
124
124
125 c_obj.fork_choices = [
125 c_obj.fork_choices = [
126 (self.FORKING_DISABLED, _('Disabled')),
126 (self.FORKING_DISABLED, _('Disabled')),
127 (self.FORKING_ENABLED, _('Enabled'))]
127 (self.FORKING_ENABLED, _('Enabled'))]
128
128
129 c_obj.inherit_default_permission_choices = [
129 c_obj.inherit_default_permission_choices = [
130 ('hg.inherit_default_perms.false', _('Disabled')),
130 ('hg.inherit_default_perms.false', _('Disabled')),
131 ('hg.inherit_default_perms.true', _('Enabled'))]
131 ('hg.inherit_default_perms.true', _('Enabled'))]
132
132
133 def get_default_perms(self, object_perms, suffix):
133 def get_default_perms(self, object_perms, suffix):
134 defaults = {}
134 defaults = {}
135 for perm in object_perms:
135 for perm in object_perms:
136 # perms
136 # perms
137 if perm.permission.permission_name.startswith('repository.'):
137 if perm.permission.permission_name.startswith('repository.'):
138 defaults['default_repo_perm' + suffix] = perm.permission.permission_name
138 defaults['default_repo_perm' + suffix] = perm.permission.permission_name
139
139
140 if perm.permission.permission_name.startswith('group.'):
140 if perm.permission.permission_name.startswith('group.'):
141 defaults['default_group_perm' + suffix] = perm.permission.permission_name
141 defaults['default_group_perm' + suffix] = perm.permission.permission_name
142
142
143 if perm.permission.permission_name.startswith('usergroup.'):
143 if perm.permission.permission_name.startswith('usergroup.'):
144 defaults['default_user_group_perm' + suffix] = perm.permission.permission_name
144 defaults['default_user_group_perm' + suffix] = perm.permission.permission_name
145
145
146 # branch
146 # branch
147 if perm.permission.permission_name.startswith('branch.'):
147 if perm.permission.permission_name.startswith('branch.'):
148 defaults['default_branch_perm' + suffix] = perm.permission.permission_name
148 defaults['default_branch_perm' + suffix] = perm.permission.permission_name
149
149
150 # creation of objects
150 # creation of objects
151 if perm.permission.permission_name.startswith('hg.create.write_on_repogroup'):
151 if perm.permission.permission_name.startswith('hg.create.write_on_repogroup'):
152 defaults['default_repo_create_on_write' + suffix] = perm.permission.permission_name
152 defaults['default_repo_create_on_write' + suffix] = perm.permission.permission_name
153
153
154 elif perm.permission.permission_name.startswith('hg.create.'):
154 elif perm.permission.permission_name.startswith('hg.create.'):
155 defaults['default_repo_create' + suffix] = perm.permission.permission_name
155 defaults['default_repo_create' + suffix] = perm.permission.permission_name
156
156
157 if perm.permission.permission_name.startswith('hg.fork.'):
157 if perm.permission.permission_name.startswith('hg.fork.'):
158 defaults['default_fork_create' + suffix] = perm.permission.permission_name
158 defaults['default_fork_create' + suffix] = perm.permission.permission_name
159
159
160 if perm.permission.permission_name.startswith('hg.inherit_default_perms.'):
160 if perm.permission.permission_name.startswith('hg.inherit_default_perms.'):
161 defaults['default_inherit_default_permissions' + suffix] = perm.permission.permission_name
161 defaults['default_inherit_default_permissions' + suffix] = perm.permission.permission_name
162
162
163 if perm.permission.permission_name.startswith('hg.repogroup.'):
163 if perm.permission.permission_name.startswith('hg.repogroup.'):
164 defaults['default_repo_group_create' + suffix] = perm.permission.permission_name
164 defaults['default_repo_group_create' + suffix] = perm.permission.permission_name
165
165
166 if perm.permission.permission_name.startswith('hg.usergroup.'):
166 if perm.permission.permission_name.startswith('hg.usergroup.'):
167 defaults['default_user_group_create' + suffix] = perm.permission.permission_name
167 defaults['default_user_group_create' + suffix] = perm.permission.permission_name
168
168
169 # registration and external account activation
169 # registration and external account activation
170 if perm.permission.permission_name.startswith('hg.register.'):
170 if perm.permission.permission_name.startswith('hg.register.'):
171 defaults['default_register' + suffix] = perm.permission.permission_name
171 defaults['default_register' + suffix] = perm.permission.permission_name
172
172
173 if perm.permission.permission_name.startswith('hg.password_reset.'):
173 if perm.permission.permission_name.startswith('hg.password_reset.'):
174 defaults['default_password_reset' + suffix] = perm.permission.permission_name
174 defaults['default_password_reset' + suffix] = perm.permission.permission_name
175
175
176 if perm.permission.permission_name.startswith('hg.extern_activate.'):
176 if perm.permission.permission_name.startswith('hg.extern_activate.'):
177 defaults['default_extern_activate' + suffix] = perm.permission.permission_name
177 defaults['default_extern_activate' + suffix] = perm.permission.permission_name
178
178
179 return defaults
179 return defaults
180
180
181 def _make_new_user_perm(self, user, perm_name):
181 def _make_new_user_perm(self, user, perm_name):
182 log.debug('Creating new user permission:%s', perm_name)
182 log.debug('Creating new user permission:%s', perm_name)
183 new_perm = Permission.get_by_key(perm_name)
184 if not new_perm:
185 raise ValueError(f'permission with name {perm_name} not found')
186
183 new = UserToPerm()
187 new = UserToPerm()
184 new.user = user
188 new.user = user
185 new.permission = Permission.get_by_key(perm_name)
189 new.permission = new_perm
186 return new
190 return new
187
191
188 def _make_new_user_group_perm(self, user_group, perm_name):
192 def _make_new_user_group_perm(self, user_group, perm_name):
189 log.debug('Creating new user group permission:%s', perm_name)
193 log.debug('Creating new user group permission:%s', perm_name)
194 new_perm = Permission.get_by_key(perm_name)
195 if not new_perm:
196 raise ValueError(f'permission with name {perm_name} not found')
197
190 new = UserGroupToPerm()
198 new = UserGroupToPerm()
191 new.users_group = user_group
199 new.users_group = user_group
192 new.permission = Permission.get_by_key(perm_name)
200 new.permission = new_perm
193 return new
201 return new
194
202
195 def _keep_perm(self, perm_name, keep_fields):
203 def _keep_perm(self, perm_name, keep_fields):
196 def get_pat(field_name):
204 def get_pat(field_name):
197 return {
205 return {
198 # global perms
206 # global perms
199 'default_repo_create': 'hg.create.',
207 'default_repo_create': 'hg.create.',
200 # special case for create repos on write access to group
208 # special case for create repos on write access to group
201 'default_repo_create_on_write': 'hg.create.write_on_repogroup.',
209 'default_repo_create_on_write': 'hg.create.write_on_repogroup.',
202 'default_repo_group_create': 'hg.repogroup.create.',
210 'default_repo_group_create': 'hg.repogroup.create.',
203 'default_user_group_create': 'hg.usergroup.create.',
211 'default_user_group_create': 'hg.usergroup.create.',
204 'default_fork_create': 'hg.fork.',
212 'default_fork_create': 'hg.fork.',
205 'default_inherit_default_permissions': 'hg.inherit_default_perms.',
213 'default_inherit_default_permissions': 'hg.inherit_default_perms.',
206
214
207 # application perms
215 # application perms
208 'default_register': 'hg.register.',
216 'default_register': 'hg.register.',
209 'default_password_reset': 'hg.password_reset.',
217 'default_password_reset': 'hg.password_reset.',
210 'default_extern_activate': 'hg.extern_activate.',
218 'default_extern_activate': 'hg.extern_activate.',
211
219
212 # object permissions below
220 # object permissions below
213 'default_repo_perm': 'repository.',
221 'default_repo_perm': 'repository.',
214 'default_group_perm': 'group.',
222 'default_group_perm': 'group.',
215 'default_user_group_perm': 'usergroup.',
223 'default_user_group_perm': 'usergroup.',
216 # branch
224 # branch
217 'default_branch_perm': 'branch.',
225 'default_branch_perm': 'branch.',
218
226
219 }[field_name]
227 }[field_name]
220 for field in keep_fields:
228 for field in keep_fields:
221 pat = get_pat(field)
229 pat = get_pat(field)
222 if perm_name.startswith(pat):
230 if perm_name.startswith(pat):
223 return True
231 return True
224 return False
232 return False
225
233
226 def _clear_object_perm(self, object_perms, preserve=None):
234 def _clear_object_perm(self, object_perms, preserve=None):
227 preserve = preserve or []
235 preserve = preserve or []
228 _deleted = []
236 _deleted = []
229 for perm in object_perms:
237 for perm in object_perms:
230 perm_name = perm.permission.permission_name
238 perm_name = perm.permission.permission_name
231 if not self._keep_perm(perm_name, keep_fields=preserve):
239 if not self._keep_perm(perm_name, keep_fields=preserve):
232 _deleted.append(perm_name)
240 _deleted.append(perm_name)
233 self.sa.delete(perm)
241 self.sa.delete(perm)
234 return _deleted
242 return _deleted
235
243
236 def _clear_user_perms(self, user_id, preserve=None):
244 def _clear_user_perms(self, user_id, preserve=None):
237 perms = self.sa.query(UserToPerm)\
245 perms = self.sa.query(UserToPerm)\
238 .filter(UserToPerm.user_id == user_id)\
246 .filter(UserToPerm.user_id == user_id)\
239 .all()
247 .all()
240 return self._clear_object_perm(perms, preserve=preserve)
248 return self._clear_object_perm(perms, preserve=preserve)
241
249
242 def _clear_user_group_perms(self, user_group_id, preserve=None):
250 def _clear_user_group_perms(self, user_group_id, preserve=None):
243 perms = self.sa.query(UserGroupToPerm)\
251 perms = self.sa.query(UserGroupToPerm)\
244 .filter(UserGroupToPerm.users_group_id == user_group_id)\
252 .filter(UserGroupToPerm.users_group_id == user_group_id)\
245 .all()
253 .all()
246 return self._clear_object_perm(perms, preserve=preserve)
254 return self._clear_object_perm(perms, preserve=preserve)
247
255
248 def _set_new_object_perms(self, obj_type, to_object, form_result, preserve=None):
256 def _set_new_object_perms(self, obj_type, to_object, form_result, preserve=None):
249 # clear current entries, to make this function idempotent
257 # clear current entries, to make this function idempotent
250 # it will fix even if we define more permissions or permissions
258 # it will fix even if we define more permissions or permissions
251 # are somehow missing
259 # are somehow missing
252 preserve = preserve or []
260 preserve = preserve or []
253 _global_perms = self.global_perms.copy()
261 _global_perms = self.global_perms.copy()
254 if obj_type not in ['user', 'user_group']:
262 if obj_type not in ['user', 'user_group']:
255 raise ValueError("obj_type must be on of 'user' or 'user_group'")
263 raise ValueError("obj_type must be on of 'user' or 'user_group'")
256 global_perms = len(_global_perms)
264 global_perms = len(_global_perms)
257 default_user_perms = len(Permission.DEFAULT_USER_PERMISSIONS)
265 default_user_perms = len(Permission.DEFAULT_USER_PERMISSIONS)
258 if global_perms != default_user_perms:
266 if global_perms != default_user_perms:
259 raise Exception(
267 raise Exception(
260 'Inconsistent permissions definition. Got {} vs {}'.format(
268 'Inconsistent permissions definition. Got {} vs {}'.format(
261 global_perms, default_user_perms))
269 global_perms, default_user_perms))
262
270
263 if obj_type == 'user':
271 if obj_type == 'user':
264 self._clear_user_perms(to_object.user_id, preserve)
272 self._clear_user_perms(to_object.user_id, preserve)
265 if obj_type == 'user_group':
273 if obj_type == 'user_group':
266 self._clear_user_group_perms(to_object.users_group_id, preserve)
274 self._clear_user_group_perms(to_object.users_group_id, preserve)
267
275
268 # now kill the keys that we want to preserve from the form.
276 # now kill the keys that we want to preserve from the form.
269 for key in preserve:
277 for key in preserve:
270 del _global_perms[key]
278 del _global_perms[key]
271
279
272 for k in _global_perms.copy():
280 for k in _global_perms.copy():
273 _global_perms[k] = form_result[k]
281 _global_perms[k] = form_result[k]
274
282
275 # at that stage we validate all are passed inside form_result
283 # at that stage we validate all are passed inside form_result
276 for _perm_key, perm_value in _global_perms.items():
284 for _perm_key, perm_value in _global_perms.items():
277 if perm_value is None:
285 if perm_value is None:
278 raise ValueError('Missing permission for %s' % (_perm_key,))
286 raise ValueError('Missing permission for %s' % (_perm_key,))
279
287
280 if obj_type == 'user':
288 if obj_type == 'user':
281 p = self._make_new_user_perm(object, perm_value)
289 p = self._make_new_user_perm(to_object, perm_value)
282 self.sa.add(p)
290 self.sa.add(p)
283 if obj_type == 'user_group':
291 if obj_type == 'user_group':
284 p = self._make_new_user_group_perm(object, perm_value)
292 p = self._make_new_user_group_perm(to_object, perm_value)
285 self.sa.add(p)
293 self.sa.add(p)
286
294
287 def _set_new_user_perms(self, user, form_result, preserve=None):
295 def _set_new_user_perms(self, user, form_result, preserve=None):
288 return self._set_new_object_perms(
296 return self._set_new_object_perms(
289 'user', user, form_result, preserve)
297 'user', user, form_result, preserve)
290
298
291 def _set_new_user_group_perms(self, user_group, form_result, preserve=None):
299 def _set_new_user_group_perms(self, user_group, form_result, preserve=None):
292 return self._set_new_object_perms(
300 return self._set_new_object_perms(
293 'user_group', user_group, form_result, preserve)
301 'user_group', user_group, form_result, preserve)
294
302
295 def set_new_user_perms(self, user, form_result):
303 def set_new_user_perms(self, user, form_result):
296 # calculate what to preserve from what is given in form_result
304 # calculate what to preserve from what is given in form_result
297 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
305 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
298 return self._set_new_user_perms(user, form_result, preserve)
306 return self._set_new_user_perms(user, form_result, preserve)
299
307
300 def set_new_user_group_perms(self, user_group, form_result):
308 def set_new_user_group_perms(self, user_group, form_result):
301 # calculate what to preserve from what is given in form_result
309 # calculate what to preserve from what is given in form_result
302 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
310 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
303 return self._set_new_user_group_perms(user_group, form_result, preserve)
311 return self._set_new_user_group_perms(user_group, form_result, preserve)
304
312
305 def create_permissions(self):
313 def create_permissions(self):
306 """
314 """
307 Create permissions for whole system
315 Create permissions for whole system
308 """
316 """
309 for p in Permission.PERMS:
317 for p in Permission.PERMS:
310 if not Permission.get_by_key(p[0]):
318 if not Permission.get_by_key(p[0]):
311 new_perm = Permission()
319 new_perm = Permission()
312 new_perm.permission_name = p[0]
320 new_perm.permission_name = p[0]
313 new_perm.permission_longname = p[0] # translation err with p[1]
321 new_perm.permission_longname = p[0] # translation err with p[1]
314 self.sa.add(new_perm)
322 self.sa.add(new_perm)
315
323
316 def _create_default_object_permission(self, obj_type, obj, obj_perms,
324 def _create_default_object_permission(self, obj_type, obj, obj_perms,
317 force=False):
325 force=False):
318 if obj_type not in ['user', 'user_group']:
326 if obj_type not in ['user', 'user_group']:
319 raise ValueError("obj_type must be on of 'user' or 'user_group'")
327 raise ValueError("obj_type must be on of 'user' or 'user_group'")
320
328
321 def _get_group(perm_name):
329 def _get_group(perm_name):
322 return '.'.join(perm_name.split('.')[:1])
330 return '.'.join(perm_name.split('.')[:1])
323
331
324 defined_perms_groups = map(
332 defined_perms_groups = list(map(
325 _get_group, (x.permission.permission_name for x in obj_perms))
333 _get_group, (x.permission.permission_name for x in obj_perms)))
326 log.debug('GOT ALREADY DEFINED:%s', obj_perms)
334 log.debug('GOT ALREADY DEFINED:%s', obj_perms)
327
335
328 if force:
336 if force:
329 self._clear_object_perm(obj_perms)
337 self._clear_object_perm(obj_perms)
330 self.sa.commit()
338 self.sa.commit()
331 defined_perms_groups = []
339 defined_perms_groups = []
332 # for every default permission that needs to be created, we check if
340 # for every default permission that needs to be created, we check if
333 # it's group is already defined, if it's not we create default perm
341 # it's group is already defined, if it's not we create default perm
334 for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
342 for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
335 gr = _get_group(perm_name)
343 gr = _get_group(perm_name)
336 if gr not in defined_perms_groups:
344 if gr not in defined_perms_groups:
337 log.debug('GR:%s not found, creating permission %s',
345 log.debug('GR:%s not found, creating permission %s',
338 gr, perm_name)
346 gr, perm_name)
339 if obj_type == 'user':
347 if obj_type == 'user':
340 new_perm = self._make_new_user_perm(obj, perm_name)
348 new_perm = self._make_new_user_perm(obj, perm_name)
341 self.sa.add(new_perm)
349 self.sa.add(new_perm)
342 if obj_type == 'user_group':
350 if obj_type == 'user_group':
343 new_perm = self._make_new_user_group_perm(obj, perm_name)
351 new_perm = self._make_new_user_group_perm(obj, perm_name)
344 self.sa.add(new_perm)
352 self.sa.add(new_perm)
345
353
346 def create_default_user_permissions(self, user, force=False):
354 def create_default_user_permissions(self, user, force=False):
347 """
355 """
348 Creates only missing default permissions for user, if force is set it
356 Creates only missing default permissions for user, if force is set it
349 resets the default permissions for that user
357 resets the default permissions for that user
350
358
351 :param user:
359 :param user:
352 :param force:
360 :param force:
353 """
361 """
354 user = self._get_user(user)
362 user = self._get_user(user)
355 obj_perms = UserToPerm.query().filter(UserToPerm.user == user).all()
363 obj_perms = UserToPerm.query().filter(UserToPerm.user == user).all()
356 return self._create_default_object_permission(
364 return self._create_default_object_permission(
357 'user', user, obj_perms, force)
365 'user', user, obj_perms, force)
358
366
359 def create_default_user_group_permissions(self, user_group, force=False):
367 def create_default_user_group_permissions(self, user_group, force=False):
360 """
368 """
361 Creates only missing default permissions for user group, if force is
369 Creates only missing default permissions for user group, if force is
362 set it resets the default permissions for that user group
370 set it resets the default permissions for that user group
363
371
364 :param user_group:
372 :param user_group:
365 :param force:
373 :param force:
366 """
374 """
367 user_group = self._get_user_group(user_group)
375 user_group = self._get_user_group(user_group)
368 obj_perms = UserToPerm.query().filter(UserGroupToPerm.users_group == user_group).all()
376 obj_perms = UserToPerm.query().filter(UserGroupToPerm.users_group == user_group).all()
369 return self._create_default_object_permission(
377 return self._create_default_object_permission(
370 'user_group', user_group, obj_perms, force)
378 'user_group', user_group, obj_perms, force)
371
379
372 def update_application_permissions(self, form_result):
380 def update_application_permissions(self, form_result):
373 if 'perm_user_id' in form_result:
381 if 'perm_user_id' in form_result:
374 perm_user = User.get(safe_int(form_result['perm_user_id']))
382 perm_user = User.get(safe_int(form_result['perm_user_id']))
375 else:
383 else:
376 # used mostly to do lookup for default user
384 # used mostly to do lookup for default user
377 perm_user = User.get_by_username(form_result['perm_user_name'])
385 perm_user = User.get_by_username(form_result['perm_user_name'])
378
386
379 try:
387 try:
380 # stage 1 set anonymous access
388 # stage 1 set anonymous access
381 if perm_user.username == User.DEFAULT_USER:
389 if perm_user.username == User.DEFAULT_USER:
382 perm_user.active = str2bool(form_result['anonymous'])
390 perm_user.active = str2bool(form_result['anonymous'])
383 self.sa.add(perm_user)
391 self.sa.add(perm_user)
384
392
385 # stage 2 reset defaults and set them from form data
393 # stage 2 reset defaults and set them from form data
386 self._set_new_user_perms(perm_user, form_result, preserve=[
394 self._set_new_user_perms(perm_user, form_result, preserve=[
387 'default_repo_perm',
395 'default_repo_perm',
388 'default_group_perm',
396 'default_group_perm',
389 'default_user_group_perm',
397 'default_user_group_perm',
390 'default_branch_perm',
398 'default_branch_perm',
391
399
392 'default_repo_group_create',
400 'default_repo_group_create',
393 'default_user_group_create',
401 'default_user_group_create',
394 'default_repo_create_on_write',
402 'default_repo_create_on_write',
395 'default_repo_create',
403 'default_repo_create',
396 'default_fork_create',
404 'default_fork_create',
397 'default_inherit_default_permissions'])
405 'default_inherit_default_permissions'])
398
406
399 self.sa.commit()
407 self.sa.commit()
400 except (DatabaseError,):
408 except (DatabaseError,):
401 log.error(traceback.format_exc())
409 log.error(traceback.format_exc())
402 self.sa.rollback()
410 self.sa.rollback()
403 raise
411 raise
404
412
405 def update_user_permissions(self, form_result):
413 def update_user_permissions(self, form_result):
406 if 'perm_user_id' in form_result:
414 if 'perm_user_id' in form_result:
407 perm_user = User.get(safe_int(form_result['perm_user_id']))
415 perm_user = User.get(safe_int(form_result['perm_user_id']))
408 else:
416 else:
409 # used mostly to do lookup for default user
417 # used mostly to do lookup for default user
410 perm_user = User.get_by_username(form_result['perm_user_name'])
418 perm_user = User.get_by_username(form_result['perm_user_name'])
411 try:
419 try:
412 # stage 2 reset defaults and set them from form data
420 # stage 2 reset defaults and set them from form data
413 self._set_new_user_perms(perm_user, form_result, preserve=[
421 self._set_new_user_perms(perm_user, form_result, preserve=[
414 'default_repo_perm',
422 'default_repo_perm',
415 'default_group_perm',
423 'default_group_perm',
416 'default_user_group_perm',
424 'default_user_group_perm',
417 'default_branch_perm',
425 'default_branch_perm',
418
426
419 'default_register',
427 'default_register',
420 'default_password_reset',
428 'default_password_reset',
421 'default_extern_activate'])
429 'default_extern_activate'])
422 self.sa.commit()
430 self.sa.commit()
423 except (DatabaseError,):
431 except (DatabaseError,):
424 log.error(traceback.format_exc())
432 log.error(traceback.format_exc())
425 self.sa.rollback()
433 self.sa.rollback()
426 raise
434 raise
427
435
428 def update_user_group_permissions(self, form_result):
436 def update_user_group_permissions(self, form_result):
429 if 'perm_user_group_id' in form_result:
437 if 'perm_user_group_id' in form_result:
430 perm_user_group = UserGroup.get(safe_int(form_result['perm_user_group_id']))
438 perm_user_group = UserGroup.get(safe_int(form_result['perm_user_group_id']))
431 else:
439 else:
432 # used mostly to do lookup for default user
440 # used mostly to do lookup for default user
433 perm_user_group = UserGroup.get_by_group_name(form_result['perm_user_group_name'])
441 perm_user_group = UserGroup.get_by_group_name(form_result['perm_user_group_name'])
434 try:
442 try:
435 # stage 2 reset defaults and set them from form data
443 # stage 2 reset defaults and set them from form data
436 self._set_new_user_group_perms(perm_user_group, form_result, preserve=[
444 self._set_new_user_group_perms(perm_user_group, form_result, preserve=[
437 'default_repo_perm',
445 'default_repo_perm',
438 'default_group_perm',
446 'default_group_perm',
439 'default_user_group_perm',
447 'default_user_group_perm',
440 'default_branch_perm',
448 'default_branch_perm',
441
449
442 'default_register',
450 'default_register',
443 'default_password_reset',
451 'default_password_reset',
444 'default_extern_activate'])
452 'default_extern_activate'])
445 self.sa.commit()
453 self.sa.commit()
446 except (DatabaseError,):
454 except (DatabaseError,):
447 log.error(traceback.format_exc())
455 log.error(traceback.format_exc())
448 self.sa.rollback()
456 self.sa.rollback()
449 raise
457 raise
450
458
451 def update_object_permissions(self, form_result):
459 def update_object_permissions(self, form_result):
452 if 'perm_user_id' in form_result:
460 if 'perm_user_id' in form_result:
453 perm_user = User.get(safe_int(form_result['perm_user_id']))
461 perm_user = User.get(safe_int(form_result['perm_user_id']))
454 else:
462 else:
455 # used mostly to do lookup for default user
463 # used mostly to do lookup for default user
456 perm_user = User.get_by_username(form_result['perm_user_name'])
464 perm_user = User.get_by_username(form_result['perm_user_name'])
457 try:
465 try:
458
466
459 # stage 2 reset defaults and set them from form data
467 # stage 2 reset defaults and set them from form data
460 self._set_new_user_perms(perm_user, form_result, preserve=[
468 self._set_new_user_perms(perm_user, form_result, preserve=[
461 'default_repo_group_create',
469 'default_repo_group_create',
462 'default_user_group_create',
470 'default_user_group_create',
463 'default_repo_create_on_write',
471 'default_repo_create_on_write',
464 'default_repo_create',
472 'default_repo_create',
465 'default_fork_create',
473 'default_fork_create',
466 'default_inherit_default_permissions',
474 'default_inherit_default_permissions',
467 'default_branch_perm',
475 'default_branch_perm',
468
476
469 'default_register',
477 'default_register',
470 'default_password_reset',
478 'default_password_reset',
471 'default_extern_activate'])
479 'default_extern_activate'])
472
480
473 # overwrite default repo permissions
481 # overwrite default repo permissions
474 if form_result['overwrite_default_repo']:
482 if form_result['overwrite_default_repo']:
475 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
483 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
476 _def = Permission.get_by_key('repository.' + _def_name)
484 _def = Permission.get_by_key('repository.' + _def_name)
477 for r2p in self.sa.query(UserRepoToPerm)\
485 for r2p in self.sa.query(UserRepoToPerm)\
478 .filter(UserRepoToPerm.user == perm_user)\
486 .filter(UserRepoToPerm.user == perm_user)\
479 .all():
487 .all():
480 # don't reset PRIVATE repositories
488 # don't reset PRIVATE repositories
481 if not r2p.repository.private:
489 if not r2p.repository.private:
482 r2p.permission = _def
490 r2p.permission = _def
483 self.sa.add(r2p)
491 self.sa.add(r2p)
484
492
485 # overwrite default repo group permissions
493 # overwrite default repo group permissions
486 if form_result['overwrite_default_group']:
494 if form_result['overwrite_default_group']:
487 _def_name = form_result['default_group_perm'].split('group.')[-1]
495 _def_name = form_result['default_group_perm'].split('group.')[-1]
488 _def = Permission.get_by_key('group.' + _def_name)
496 _def = Permission.get_by_key('group.' + _def_name)
489 for g2p in self.sa.query(UserRepoGroupToPerm)\
497 for g2p in self.sa.query(UserRepoGroupToPerm)\
490 .filter(UserRepoGroupToPerm.user == perm_user)\
498 .filter(UserRepoGroupToPerm.user == perm_user)\
491 .all():
499 .all():
492 g2p.permission = _def
500 g2p.permission = _def
493 self.sa.add(g2p)
501 self.sa.add(g2p)
494
502
495 # overwrite default user group permissions
503 # overwrite default user group permissions
496 if form_result['overwrite_default_user_group']:
504 if form_result['overwrite_default_user_group']:
497 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
505 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
498 # user groups
506 # user groups
499 _def = Permission.get_by_key('usergroup.' + _def_name)
507 _def = Permission.get_by_key('usergroup.' + _def_name)
500 for g2p in self.sa.query(UserUserGroupToPerm)\
508 for g2p in self.sa.query(UserUserGroupToPerm)\
501 .filter(UserUserGroupToPerm.user == perm_user)\
509 .filter(UserUserGroupToPerm.user == perm_user)\
502 .all():
510 .all():
503 g2p.permission = _def
511 g2p.permission = _def
504 self.sa.add(g2p)
512 self.sa.add(g2p)
505
513
506 # COMMIT
514 # COMMIT
507 self.sa.commit()
515 self.sa.commit()
508 except (DatabaseError,):
516 except (DatabaseError,):
509 log.exception('Failed to set default object permissions')
517 log.exception('Failed to set default object permissions')
510 self.sa.rollback()
518 self.sa.rollback()
511 raise
519 raise
512
520
513 def update_branch_permissions(self, form_result):
521 def update_branch_permissions(self, form_result):
514 if 'perm_user_id' in form_result:
522 if 'perm_user_id' in form_result:
515 perm_user = User.get(safe_int(form_result['perm_user_id']))
523 perm_user = User.get(safe_int(form_result['perm_user_id']))
516 else:
524 else:
517 # used mostly to do lookup for default user
525 # used mostly to do lookup for default user
518 perm_user = User.get_by_username(form_result['perm_user_name'])
526 perm_user = User.get_by_username(form_result['perm_user_name'])
519 try:
527 try:
520
528
521 # stage 2 reset defaults and set them from form data
529 # stage 2 reset defaults and set them from form data
522 self._set_new_user_perms(perm_user, form_result, preserve=[
530 self._set_new_user_perms(perm_user, form_result, preserve=[
523 'default_repo_perm',
531 'default_repo_perm',
524 'default_group_perm',
532 'default_group_perm',
525 'default_user_group_perm',
533 'default_user_group_perm',
526
534
527 'default_repo_group_create',
535 'default_repo_group_create',
528 'default_user_group_create',
536 'default_user_group_create',
529 'default_repo_create_on_write',
537 'default_repo_create_on_write',
530 'default_repo_create',
538 'default_repo_create',
531 'default_fork_create',
539 'default_fork_create',
532 'default_inherit_default_permissions',
540 'default_inherit_default_permissions',
533
541
534 'default_register',
542 'default_register',
535 'default_password_reset',
543 'default_password_reset',
536 'default_extern_activate'])
544 'default_extern_activate'])
537
545
538 # overwrite default branch permissions
546 # overwrite default branch permissions
539 if form_result['overwrite_default_branch']:
547 if form_result['overwrite_default_branch']:
540 _def_name = \
548 _def_name = \
541 form_result['default_branch_perm'].split('branch.')[-1]
549 form_result['default_branch_perm'].split('branch.')[-1]
542
550
543 _def = Permission.get_by_key('branch.' + _def_name)
551 _def = Permission.get_by_key('branch.' + _def_name)
544
552
545 user_perms = UserToRepoBranchPermission.query()\
553 user_perms = UserToRepoBranchPermission.query()\
546 .join(UserToRepoBranchPermission.user_repo_to_perm)\
554 .join(UserToRepoBranchPermission.user_repo_to_perm)\
547 .filter(UserRepoToPerm.user == perm_user).all()
555 .filter(UserRepoToPerm.user == perm_user).all()
548
556
549 for g2p in user_perms:
557 for g2p in user_perms:
550 g2p.permission = _def
558 g2p.permission = _def
551 self.sa.add(g2p)
559 self.sa.add(g2p)
552
560
553 # COMMIT
561 # COMMIT
554 self.sa.commit()
562 self.sa.commit()
555 except (DatabaseError,):
563 except (DatabaseError,):
556 log.exception('Failed to set default branch permissions')
564 log.exception('Failed to set default branch permissions')
557 self.sa.rollback()
565 self.sa.rollback()
558 raise
566 raise
559
567
560 def get_users_with_repo_write(self, db_repo):
568 def get_users_with_repo_write(self, db_repo):
561 write_plus = ['repository.write', 'repository.admin']
569 write_plus = ['repository.write', 'repository.admin']
562 default_user_id = User.get_default_user_id()
570 default_user_id = User.get_default_user_id()
563 user_write_permissions = collections.OrderedDict()
571 user_write_permissions = collections.OrderedDict()
564
572
565 # write or higher and DEFAULT user for inheritance
573 # write or higher and DEFAULT user for inheritance
566 for perm in db_repo.permissions():
574 for perm in db_repo.permissions():
567 if perm.permission in write_plus or perm.user_id == default_user_id:
575 if perm.permission in write_plus or perm.user_id == default_user_id:
568 user_write_permissions[perm.user_id] = perm
576 user_write_permissions[perm.user_id] = perm
569 return user_write_permissions
577 return user_write_permissions
570
578
571 def get_user_groups_with_repo_write(self, db_repo):
579 def get_user_groups_with_repo_write(self, db_repo):
572 write_plus = ['repository.write', 'repository.admin']
580 write_plus = ['repository.write', 'repository.admin']
573 user_group_write_permissions = collections.OrderedDict()
581 user_group_write_permissions = collections.OrderedDict()
574
582
575 # write or higher and DEFAULT user for inheritance
583 # write or higher and DEFAULT user for inheritance
576 for p in db_repo.permission_user_groups():
584 for p in db_repo.permission_user_groups():
577 if p.permission in write_plus:
585 if p.permission in write_plus:
578 user_group_write_permissions[p.users_group_id] = p
586 user_group_write_permissions[p.users_group_id] = p
579 return user_group_write_permissions
587 return user_group_write_permissions
580
588
581 def trigger_permission_flush(self, affected_user_ids=None):
589 def trigger_permission_flush(self, affected_user_ids=None):
582 affected_user_ids = affected_user_ids or User.get_all_user_ids()
590 affected_user_ids = affected_user_ids or User.get_all_user_ids()
583 events.trigger(events.UserPermissionsChange(affected_user_ids))
591 events.trigger(events.UserPermissionsChange(affected_user_ids))
584
592
585 def flush_user_permission_caches(self, changes, affected_user_ids=None):
593 def flush_user_permission_caches(self, changes, affected_user_ids=None):
586 affected_user_ids = affected_user_ids or []
594 affected_user_ids = affected_user_ids or []
587
595
588 for change in changes['added'] + changes['updated'] + changes['deleted']:
596 for change in changes['added'] + changes['updated'] + changes['deleted']:
589 if change['type'] == 'user':
597 if change['type'] == 'user':
590 affected_user_ids.append(change['id'])
598 affected_user_ids.append(change['id'])
591 if change['type'] == 'user_group':
599 if change['type'] == 'user_group':
592 user_group = UserGroup.get(safe_int(change['id']))
600 user_group = UserGroup.get(safe_int(change['id']))
593 if user_group:
601 if user_group:
594 group_members_ids = [x.user_id for x in user_group.members]
602 group_members_ids = [x.user_id for x in user_group.members]
595 affected_user_ids.extend(group_members_ids)
603 affected_user_ids.extend(group_members_ids)
596
604
597 self.trigger_permission_flush(affected_user_ids)
605 self.trigger_permission_flush(affected_user_ids)
598
606
599 return affected_user_ids
607 return affected_user_ids
@@ -1,2380 +1,2394 b''
1
1
2
2
3 # Copyright (C) 2012-2020 RhodeCode GmbH
3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 pull request model for RhodeCode
23 pull request model for RhodeCode
24 """
24 """
25
25
26
27 import json
28 import logging
26 import logging
29 import os
27 import os
30
28
31 import datetime
29 import datetime
32 import urllib.request, urllib.parse, urllib.error
30 import urllib.request
31 import urllib.parse
32 import urllib.error
33 import collections
33 import collections
34
34
35 import dataclasses as dataclasses
35 from pyramid.threadlocal import get_current_request
36 from pyramid.threadlocal import get_current_request
36
37
37 from rhodecode.lib.vcs.nodes import FileNode
38 from rhodecode.lib.vcs.nodes import FileNode
38 from rhodecode.translation import lazy_ugettext
39 from rhodecode.translation import lazy_ugettext
39 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 from rhodecode.lib import audit_logger
41 from rhodecode.lib import audit_logger
41 from collections import OrderedDict
42 from collections import OrderedDict
42 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.ext_json import sjson as json
43 from rhodecode.lib.markup_renderer import (
45 from rhodecode.lib.markup_renderer import (
44 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
46 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 from rhodecode.lib.utils2 import (
47 from rhodecode.lib.hash_utils import md5_safe
46 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
48 from rhodecode.lib.str_utils import safe_str
47 get_current_rhodecode_user)
49 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
48 from rhodecode.lib.vcs.backends.base import (
50 from rhodecode.lib.vcs.backends.base import (
49 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
51 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 TargetRefMissing, SourceRefMissing)
52 TargetRefMissing, SourceRefMissing)
51 from rhodecode.lib.vcs.conf import settings as vcs_settings
53 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 from rhodecode.lib.vcs.exceptions import (
54 from rhodecode.lib.vcs.exceptions import (
53 CommitDoesNotExistError, EmptyRepositoryError)
55 CommitDoesNotExistError, EmptyRepositoryError)
54 from rhodecode.model import BaseModel
56 from rhodecode.model import BaseModel
55 from rhodecode.model.changeset_status import ChangesetStatusModel
57 from rhodecode.model.changeset_status import ChangesetStatusModel
56 from rhodecode.model.comment import CommentsModel
58 from rhodecode.model.comment import CommentsModel
57 from rhodecode.model.db import (
59 from rhodecode.model.db import (
58 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
61 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 from rhodecode.model.meta import Session
62 from rhodecode.model.meta import Session
61 from rhodecode.model.notification import NotificationModel, \
63 from rhodecode.model.notification import NotificationModel, \
62 EmailNotificationModel
64 EmailNotificationModel
63 from rhodecode.model.scm import ScmModel
65 from rhodecode.model.scm import ScmModel
64 from rhodecode.model.settings import VcsSettingsModel
66 from rhodecode.model.settings import VcsSettingsModel
65
67
66
68
67 log = logging.getLogger(__name__)
69 log = logging.getLogger(__name__)
68
70
69
71
70 # Data structure to hold the response data when updating commits during a pull
72 # Data structure to hold the response data when updating commits during a pull
71 # request update.
73 # request update.
72 class UpdateResponse(object):
74 class UpdateResponse(object):
73
75
74 def __init__(self, executed, reason, new, old, common_ancestor_id,
76 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 commit_changes, source_changed, target_changed):
77 commit_changes, source_changed, target_changed):
76
78
77 self.executed = executed
79 self.executed = executed
78 self.reason = reason
80 self.reason = reason
79 self.new = new
81 self.new = new
80 self.old = old
82 self.old = old
81 self.common_ancestor_id = common_ancestor_id
83 self.common_ancestor_id = common_ancestor_id
82 self.changes = commit_changes
84 self.changes = commit_changes
83 self.source_changed = source_changed
85 self.source_changed = source_changed
84 self.target_changed = target_changed
86 self.target_changed = target_changed
85
87
86
88
87 def get_diff_info(
89 def get_diff_info(
88 source_repo, source_ref, target_repo, target_ref, get_authors=False,
90 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 get_commit_authors=True):
91 get_commit_authors=True):
90 """
92 """
91 Calculates detailed diff information for usage in preview of creation of a pull-request.
93 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 This is also used for default reviewers logic
94 This is also used for default reviewers logic
93 """
95 """
94
96
95 source_scm = source_repo.scm_instance()
97 source_scm = source_repo.scm_instance()
96 target_scm = target_repo.scm_instance()
98 target_scm = target_repo.scm_instance()
97
99
98 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
100 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 if not ancestor_id:
101 if not ancestor_id:
100 raise ValueError(
102 raise ValueError(
101 'cannot calculate diff info without a common ancestor. '
103 'cannot calculate diff info without a common ancestor. '
102 'Make sure both repositories are related, and have a common forking commit.')
104 'Make sure both repositories are related, and have a common forking commit.')
103
105
104 # case here is that want a simple diff without incoming commits,
106 # case here is that want a simple diff without incoming commits,
105 # previewing what will be merged based only on commits in the source.
107 # previewing what will be merged based only on commits in the source.
106 log.debug('Using ancestor %s as source_ref instead of %s',
108 log.debug('Using ancestor %s as source_ref instead of %s',
107 ancestor_id, source_ref)
109 ancestor_id, source_ref)
108
110
109 # source of changes now is the common ancestor
111 # source of changes now is the common ancestor
110 source_commit = source_scm.get_commit(commit_id=ancestor_id)
112 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 # target commit becomes the source ref as it is the last commit
113 # target commit becomes the source ref as it is the last commit
112 # for diff generation this logic gives proper diff
114 # for diff generation this logic gives proper diff
113 target_commit = source_scm.get_commit(commit_id=source_ref)
115 target_commit = source_scm.get_commit(commit_id=source_ref)
114
116
115 vcs_diff = \
117 vcs_diff = \
116 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
118 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 ignore_whitespace=False, context=3)
119 ignore_whitespace=False, context=3)
118
120
119 diff_processor = diffs.DiffProcessor(
121 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
120 vcs_diff, format='newdiff', diff_limit=None,
122 diff_limit=0, file_limit=0, show_full_diff=True)
121 file_limit=None, show_full_diff=True)
122
123
123 _parsed = diff_processor.prepare()
124 _parsed = diff_processor.prepare()
124
125
125 all_files = []
126 all_files = []
126 all_files_changes = []
127 all_files_changes = []
127 changed_lines = {}
128 changed_lines = {}
128 stats = [0, 0]
129 stats = [0, 0]
129 for f in _parsed:
130 for f in _parsed:
130 all_files.append(f['filename'])
131 all_files.append(f['filename'])
131 all_files_changes.append({
132 all_files_changes.append({
132 'filename': f['filename'],
133 'filename': f['filename'],
133 'stats': f['stats']
134 'stats': f['stats']
134 })
135 })
135 stats[0] += f['stats']['added']
136 stats[0] += f['stats']['added']
136 stats[1] += f['stats']['deleted']
137 stats[1] += f['stats']['deleted']
137
138
138 changed_lines[f['filename']] = []
139 changed_lines[f['filename']] = []
139 if len(f['chunks']) < 2:
140 if len(f['chunks']) < 2:
140 continue
141 continue
141 # first line is "context" information
142 # first line is "context" information
142 for chunks in f['chunks'][1:]:
143 for chunks in f['chunks'][1:]:
143 for chunk in chunks['lines']:
144 for chunk in chunks['lines']:
144 if chunk['action'] not in ('del', 'mod'):
145 if chunk['action'] not in ('del', 'mod'):
145 continue
146 continue
146 changed_lines[f['filename']].append(chunk['old_lineno'])
147 changed_lines[f['filename']].append(chunk['old_lineno'])
147
148
148 commit_authors = []
149 commit_authors = []
149 user_counts = {}
150 user_counts = {}
150 email_counts = {}
151 email_counts = {}
151 author_counts = {}
152 author_counts = {}
152 _commit_cache = {}
153 _commit_cache = {}
153
154
154 commits = []
155 commits = []
155 if get_commit_authors:
156 if get_commit_authors:
156 log.debug('Obtaining commit authors from set of commits')
157 log.debug('Obtaining commit authors from set of commits')
157 _compare_data = target_scm.compare(
158 _compare_data = target_scm.compare(
158 target_ref, source_ref, source_scm, merge=True,
159 target_ref, source_ref, source_scm, merge=True,
159 pre_load=["author", "date", "message"]
160 pre_load=["author", "date", "message"]
160 )
161 )
161
162
162 for commit in _compare_data:
163 for commit in _compare_data:
163 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
164 # at this function which is later called via JSON serialization
165 # at this function which is later called via JSON serialization
165 serialized_commit = dict(
166 serialized_commit = dict(
166 author=commit.author,
167 author=commit.author,
167 date=commit.date,
168 date=commit.date,
168 message=commit.message,
169 message=commit.message,
169 commit_id=commit.raw_id,
170 commit_id=commit.raw_id,
170 raw_id=commit.raw_id
171 raw_id=commit.raw_id
171 )
172 )
172 commits.append(serialized_commit)
173 commits.append(serialized_commit)
173 user = User.get_from_cs_author(serialized_commit['author'])
174 user = User.get_from_cs_author(serialized_commit['author'])
174 if user and user not in commit_authors:
175 if user and user not in commit_authors:
175 commit_authors.append(user)
176 commit_authors.append(user)
176
177
177 # lines
178 # lines
178 if get_authors:
179 if get_authors:
179 log.debug('Calculating authors of changed files')
180 log.debug('Calculating authors of changed files')
180 target_commit = source_repo.get_commit(ancestor_id)
181 target_commit = source_repo.get_commit(ancestor_id)
181
182
182 for fname, lines in changed_lines.items():
183 for fname, lines in changed_lines.items():
183
184
184 try:
185 try:
185 node = target_commit.get_node(fname, pre_load=["is_binary"])
186 node = target_commit.get_node(fname, pre_load=["is_binary"])
186 except Exception:
187 except Exception:
187 log.exception("Failed to load node with path %s", fname)
188 log.exception("Failed to load node with path %s", fname)
188 continue
189 continue
189
190
190 if not isinstance(node, FileNode):
191 if not isinstance(node, FileNode):
191 continue
192 continue
192
193
193 # NOTE(marcink): for binary node we don't do annotation, just use last author
194 # NOTE(marcink): for binary node we don't do annotation, just use last author
194 if node.is_binary:
195 if node.is_binary:
195 author = node.last_commit.author
196 author = node.last_commit.author
196 email = node.last_commit.author_email
197 email = node.last_commit.author_email
197
198
198 user = User.get_from_cs_author(author)
199 user = User.get_from_cs_author(author)
199 if user:
200 if user:
200 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
201 author_counts[author] = author_counts.get(author, 0) + 1
202 author_counts[author] = author_counts.get(author, 0) + 1
202 email_counts[email] = email_counts.get(email, 0) + 1
203 email_counts[email] = email_counts.get(email, 0) + 1
203
204
204 continue
205 continue
205
206
206 for annotation in node.annotate:
207 for annotation in node.annotate:
207 line_no, commit_id, get_commit_func, line_text = annotation
208 line_no, commit_id, get_commit_func, line_text = annotation
208 if line_no in lines:
209 if line_no in lines:
209 if commit_id not in _commit_cache:
210 if commit_id not in _commit_cache:
210 _commit_cache[commit_id] = get_commit_func()
211 _commit_cache[commit_id] = get_commit_func()
211 commit = _commit_cache[commit_id]
212 commit = _commit_cache[commit_id]
212 author = commit.author
213 author = commit.author
213 email = commit.author_email
214 email = commit.author_email
214 user = User.get_from_cs_author(author)
215 user = User.get_from_cs_author(author)
215 if user:
216 if user:
216 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
217 author_counts[author] = author_counts.get(author, 0) + 1
218 author_counts[author] = author_counts.get(author, 0) + 1
218 email_counts[email] = email_counts.get(email, 0) + 1
219 email_counts[email] = email_counts.get(email, 0) + 1
219
220
220 log.debug('Default reviewers processing finished')
221 log.debug('Default reviewers processing finished')
221
222
222 return {
223 return {
223 'commits': commits,
224 'commits': commits,
224 'files': all_files_changes,
225 'files': all_files_changes,
225 'stats': stats,
226 'stats': stats,
226 'ancestor': ancestor_id,
227 'ancestor': ancestor_id,
227 # original authors of modified files
228 # original authors of modified files
228 'original_authors': {
229 'original_authors': {
229 'users': user_counts,
230 'users': user_counts,
230 'authors': author_counts,
231 'authors': author_counts,
231 'emails': email_counts,
232 'emails': email_counts,
232 },
233 },
233 'commit_authors': commit_authors
234 'commit_authors': commit_authors
234 }
235 }
235
236
236
237
237 class PullRequestModel(BaseModel):
238 class PullRequestModel(BaseModel):
238
239
239 cls = PullRequest
240 cls = PullRequest
240
241
241 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
242
243
243 UPDATE_STATUS_MESSAGES = {
244 UPDATE_STATUS_MESSAGES = {
244 UpdateFailureReason.NONE: lazy_ugettext(
245 UpdateFailureReason.NONE: lazy_ugettext(
245 'Pull request update successful.'),
246 'Pull request update successful.'),
246 UpdateFailureReason.UNKNOWN: lazy_ugettext(
247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
247 'Pull request update failed because of an unknown error.'),
248 'Pull request update failed because of an unknown error.'),
248 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
249 'No update needed because the source and target have not changed.'),
250 'No update needed because the source and target have not changed.'),
250 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
251 'Pull request cannot be updated because the reference type is '
252 'Pull request cannot be updated because the reference type is '
252 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
253 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
254 'This pull request cannot be updated because the target '
255 'This pull request cannot be updated because the target '
255 'reference is missing.'),
256 'reference is missing.'),
256 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
257 'This pull request cannot be updated because the source '
258 'This pull request cannot be updated because the source '
258 'reference is missing.'),
259 'reference is missing.'),
259 }
260 }
260 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
261 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
262
263
263 def __get_pull_request(self, pull_request):
264 def __get_pull_request(self, pull_request):
264 return self._get_instance((
265 return self._get_instance((
265 PullRequest, PullRequestVersion), pull_request)
266 PullRequest, PullRequestVersion), pull_request)
266
267
267 def _check_perms(self, perms, pull_request, user, api=False):
268 def _check_perms(self, perms, pull_request, user, api=False):
268 if not api:
269 if not api:
269 return h.HasRepoPermissionAny(*perms)(
270 return h.HasRepoPermissionAny(*perms)(
270 user=user, repo_name=pull_request.target_repo.repo_name)
271 user=user, repo_name=pull_request.target_repo.repo_name)
271 else:
272 else:
272 return h.HasRepoPermissionAnyApi(*perms)(
273 return h.HasRepoPermissionAnyApi(*perms)(
273 user=user, repo_name=pull_request.target_repo.repo_name)
274 user=user, repo_name=pull_request.target_repo.repo_name)
274
275
275 def check_user_read(self, pull_request, user, api=False):
276 def check_user_read(self, pull_request, user, api=False):
276 _perms = ('repository.admin', 'repository.write', 'repository.read',)
277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
277 return self._check_perms(_perms, pull_request, user, api)
278 return self._check_perms(_perms, pull_request, user, api)
278
279
279 def check_user_merge(self, pull_request, user, api=False):
280 def check_user_merge(self, pull_request, user, api=False):
280 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
281 return self._check_perms(_perms, pull_request, user, api)
282 return self._check_perms(_perms, pull_request, user, api)
282
283
283 def check_user_update(self, pull_request, user, api=False):
284 def check_user_update(self, pull_request, user, api=False):
284 owner = user.user_id == pull_request.user_id
285 owner = user.user_id == pull_request.user_id
285 return self.check_user_merge(pull_request, user, api) or owner
286 return self.check_user_merge(pull_request, user, api) or owner
286
287
287 def check_user_delete(self, pull_request, user):
288 def check_user_delete(self, pull_request, user):
288 owner = user.user_id == pull_request.user_id
289 owner = user.user_id == pull_request.user_id
289 _perms = ('repository.admin',)
290 _perms = ('repository.admin',)
290 return self._check_perms(_perms, pull_request, user) or owner
291 return self._check_perms(_perms, pull_request, user) or owner
291
292
292 def is_user_reviewer(self, pull_request, user):
293 def is_user_reviewer(self, pull_request, user):
293 return user.user_id in [
294 return user.user_id in [
294 x.user_id for x in
295 x.user_id for x in
295 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
296 if x.user
297 if x.user
297 ]
298 ]
298
299
299 def check_user_change_status(self, pull_request, user, api=False):
300 def check_user_change_status(self, pull_request, user, api=False):
300 return self.check_user_update(pull_request, user, api) \
301 return self.check_user_update(pull_request, user, api) \
301 or self.is_user_reviewer(pull_request, user)
302 or self.is_user_reviewer(pull_request, user)
302
303
303 def check_user_comment(self, pull_request, user):
304 def check_user_comment(self, pull_request, user):
304 owner = user.user_id == pull_request.user_id
305 owner = user.user_id == pull_request.user_id
305 return self.check_user_read(pull_request, user) or owner
306 return self.check_user_read(pull_request, user) or owner
306
307
307 def get(self, pull_request):
308 def get(self, pull_request):
308 return self.__get_pull_request(pull_request)
309 return self.__get_pull_request(pull_request)
309
310
310 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
311 statuses=None, opened_by=None, order_by=None,
312 statuses=None, opened_by=None, order_by=None,
312 order_dir='desc', only_created=False):
313 order_dir='desc', only_created=False):
313 repo = None
314 repo = None
314 if repo_name:
315 if repo_name:
315 repo = self._get_repo(repo_name)
316 repo = self._get_repo(repo_name)
316
317
317 q = PullRequest.query()
318 q = PullRequest.query()
318
319
319 if search_q:
320 if search_q:
320 like_expression = u'%{}%'.format(safe_unicode(search_q))
321 like_expression = u'%{}%'.format(safe_str(search_q))
321 q = q.join(User, User.user_id == PullRequest.user_id)
322 q = q.join(User, User.user_id == PullRequest.user_id)
322 q = q.filter(or_(
323 q = q.filter(or_(
323 cast(PullRequest.pull_request_id, String).ilike(like_expression),
324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
324 User.username.ilike(like_expression),
325 User.username.ilike(like_expression),
325 PullRequest.title.ilike(like_expression),
326 PullRequest.title.ilike(like_expression),
326 PullRequest.description.ilike(like_expression),
327 PullRequest.description.ilike(like_expression),
327 ))
328 ))
328
329
329 # source or target
330 # source or target
330 if repo and source:
331 if repo and source:
331 q = q.filter(PullRequest.source_repo == repo)
332 q = q.filter(PullRequest.source_repo == repo)
332 elif repo:
333 elif repo:
333 q = q.filter(PullRequest.target_repo == repo)
334 q = q.filter(PullRequest.target_repo == repo)
334
335
335 # closed,opened
336 # closed,opened
336 if statuses:
337 if statuses:
337 q = q.filter(PullRequest.status.in_(statuses))
338 q = q.filter(PullRequest.status.in_(statuses))
338
339
339 # opened by filter
340 # opened by filter
340 if opened_by:
341 if opened_by:
341 q = q.filter(PullRequest.user_id.in_(opened_by))
342 q = q.filter(PullRequest.user_id.in_(opened_by))
342
343
343 # only get those that are in "created" state
344 # only get those that are in "created" state
344 if only_created:
345 if only_created:
345 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346
347
347 order_map = {
348 order_map = {
348 'name_raw': PullRequest.pull_request_id,
349 'name_raw': PullRequest.pull_request_id,
349 'id': PullRequest.pull_request_id,
350 'id': PullRequest.pull_request_id,
350 'title': PullRequest.title,
351 'title': PullRequest.title,
351 'updated_on_raw': PullRequest.updated_on,
352 'updated_on_raw': PullRequest.updated_on,
352 'target_repo': PullRequest.target_repo_id
353 'target_repo': PullRequest.target_repo_id
353 }
354 }
354 if order_by and order_by in order_map:
355 if order_by and order_by in order_map:
355 if order_dir == 'asc':
356 if order_dir == 'asc':
356 q = q.order_by(order_map[order_by].asc())
357 q = q.order_by(order_map[order_by].asc())
357 else:
358 else:
358 q = q.order_by(order_map[order_by].desc())
359 q = q.order_by(order_map[order_by].desc())
359
360
360 return q
361 return q
361
362
362 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
363 opened_by=None):
364 opened_by=None):
364 """
365 """
365 Count the number of pull requests for a specific repository.
366 Count the number of pull requests for a specific repository.
366
367
367 :param repo_name: target or source repo
368 :param repo_name: target or source repo
368 :param search_q: filter by text
369 :param search_q: filter by text
369 :param source: boolean flag to specify if repo_name refers to source
370 :param source: boolean flag to specify if repo_name refers to source
370 :param statuses: list of pull request statuses
371 :param statuses: list of pull request statuses
371 :param opened_by: author user of the pull request
372 :param opened_by: author user of the pull request
372 :returns: int number of pull requests
373 :returns: int number of pull requests
373 """
374 """
374 q = self._prepare_get_all_query(
375 q = self._prepare_get_all_query(
375 repo_name, search_q=search_q, source=source, statuses=statuses,
376 repo_name, search_q=search_q, source=source, statuses=statuses,
376 opened_by=opened_by)
377 opened_by=opened_by)
377
378
378 return q.count()
379 return q.count()
379
380
380 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
381 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
382 """
383 """
383 Get all pull requests for a specific repository.
384 Get all pull requests for a specific repository.
384
385
385 :param repo_name: target or source repo
386 :param repo_name: target or source repo
386 :param search_q: filter by text
387 :param search_q: filter by text
387 :param source: boolean flag to specify if repo_name refers to source
388 :param source: boolean flag to specify if repo_name refers to source
388 :param statuses: list of pull request statuses
389 :param statuses: list of pull request statuses
389 :param opened_by: author user of the pull request
390 :param opened_by: author user of the pull request
390 :param offset: pagination offset
391 :param offset: pagination offset
391 :param length: length of returned list
392 :param length: length of returned list
392 :param order_by: order of the returned list
393 :param order_by: order of the returned list
393 :param order_dir: 'asc' or 'desc' ordering direction
394 :param order_dir: 'asc' or 'desc' ordering direction
394 :returns: list of pull requests
395 :returns: list of pull requests
395 """
396 """
396 q = self._prepare_get_all_query(
397 q = self._prepare_get_all_query(
397 repo_name, search_q=search_q, source=source, statuses=statuses,
398 repo_name, search_q=search_q, source=source, statuses=statuses,
398 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
399
400
400 if length:
401 if length:
401 pull_requests = q.limit(length).offset(offset).all()
402 pull_requests = q.limit(length).offset(offset).all()
402 else:
403 else:
403 pull_requests = q.all()
404 pull_requests = q.all()
404
405
405 return pull_requests
406 return pull_requests
406
407
407 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
408 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
408 """
409 """
409 Count the number of pull requests for a specific repository that are
410 Count the number of pull requests for a specific repository that are
410 awaiting review.
411 awaiting review.
411
412
412 :param repo_name: target or source repo
413 :param repo_name: target or source repo
413 :param search_q: filter by text
414 :param search_q: filter by text
414 :param statuses: list of pull request statuses
415 :param statuses: list of pull request statuses
415 :returns: int number of pull requests
416 :returns: int number of pull requests
416 """
417 """
417 pull_requests = self.get_awaiting_review(
418 pull_requests = self.get_awaiting_review(
418 repo_name, search_q=search_q, statuses=statuses)
419 repo_name, search_q=search_q, statuses=statuses)
419
420
420 return len(pull_requests)
421 return len(pull_requests)
421
422
422 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
423 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
423 offset=0, length=None, order_by=None, order_dir='desc'):
424 offset=0, length=None, order_by=None, order_dir='desc'):
424 """
425 """
425 Get all pull requests for a specific repository that are awaiting
426 Get all pull requests for a specific repository that are awaiting
426 review.
427 review.
427
428
428 :param repo_name: target or source repo
429 :param repo_name: target or source repo
429 :param search_q: filter by text
430 :param search_q: filter by text
430 :param statuses: list of pull request statuses
431 :param statuses: list of pull request statuses
431 :param offset: pagination offset
432 :param offset: pagination offset
432 :param length: length of returned list
433 :param length: length of returned list
433 :param order_by: order of the returned list
434 :param order_by: order of the returned list
434 :param order_dir: 'asc' or 'desc' ordering direction
435 :param order_dir: 'asc' or 'desc' ordering direction
435 :returns: list of pull requests
436 :returns: list of pull requests
436 """
437 """
437 pull_requests = self.get_all(
438 pull_requests = self.get_all(
438 repo_name, search_q=search_q, statuses=statuses,
439 repo_name, search_q=search_q, statuses=statuses,
439 order_by=order_by, order_dir=order_dir)
440 order_by=order_by, order_dir=order_dir)
440
441
441 _filtered_pull_requests = []
442 _filtered_pull_requests = []
442 for pr in pull_requests:
443 for pr in pull_requests:
443 status = pr.calculated_review_status()
444 status = pr.calculated_review_status()
444 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
445 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
445 ChangesetStatus.STATUS_UNDER_REVIEW]:
446 ChangesetStatus.STATUS_UNDER_REVIEW]:
446 _filtered_pull_requests.append(pr)
447 _filtered_pull_requests.append(pr)
447 if length:
448 if length:
448 return _filtered_pull_requests[offset:offset+length]
449 return _filtered_pull_requests[offset:offset+length]
449 else:
450 else:
450 return _filtered_pull_requests
451 return _filtered_pull_requests
451
452
452 def _prepare_awaiting_my_review_review_query(
453 def _prepare_awaiting_my_review_review_query(
453 self, repo_name, user_id, search_q=None, statuses=None,
454 self, repo_name, user_id, search_q=None, statuses=None,
454 order_by=None, order_dir='desc'):
455 order_by=None, order_dir='desc'):
455
456
456 for_review_statuses = [
457 for_review_statuses = [
457 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
458 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
458 ]
459 ]
459
460
460 pull_request_alias = aliased(PullRequest)
461 pull_request_alias = aliased(PullRequest)
461 status_alias = aliased(ChangesetStatus)
462 status_alias = aliased(ChangesetStatus)
462 reviewers_alias = aliased(PullRequestReviewers)
463 reviewers_alias = aliased(PullRequestReviewers)
463 repo_alias = aliased(Repository)
464 repo_alias = aliased(Repository)
464
465
465 last_ver_subq = Session()\
466 last_ver_subq = Session()\
466 .query(func.min(ChangesetStatus.version)) \
467 .query(func.min(ChangesetStatus.version)) \
467 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
468 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
468 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
469 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
469 .subquery()
470 .subquery()
470
471
471 q = Session().query(pull_request_alias) \
472 q = Session().query(pull_request_alias) \
472 .options(lazyload(pull_request_alias.author)) \
473 .options(lazyload(pull_request_alias.author)) \
473 .join(reviewers_alias,
474 .join(reviewers_alias,
474 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
475 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
475 .join(repo_alias,
476 .join(repo_alias,
476 repo_alias.repo_id == pull_request_alias.target_repo_id) \
477 repo_alias.repo_id == pull_request_alias.target_repo_id) \
477 .outerjoin(status_alias,
478 .outerjoin(status_alias,
478 and_(status_alias.user_id == reviewers_alias.user_id,
479 and_(status_alias.user_id == reviewers_alias.user_id,
479 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
480 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
480 .filter(or_(status_alias.version == null(),
481 .filter(or_(status_alias.version == null(),
481 status_alias.version == last_ver_subq)) \
482 status_alias.version == last_ver_subq)) \
482 .filter(reviewers_alias.user_id == user_id) \
483 .filter(reviewers_alias.user_id == user_id) \
483 .filter(repo_alias.repo_name == repo_name) \
484 .filter(repo_alias.repo_name == repo_name) \
484 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
485 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
485 .group_by(pull_request_alias)
486 .group_by(pull_request_alias)
486
487
487 # closed,opened
488 # closed,opened
488 if statuses:
489 if statuses:
489 q = q.filter(pull_request_alias.status.in_(statuses))
490 q = q.filter(pull_request_alias.status.in_(statuses))
490
491
491 if search_q:
492 if search_q:
492 like_expression = u'%{}%'.format(safe_unicode(search_q))
493 like_expression = u'%{}%'.format(safe_str(search_q))
493 q = q.join(User, User.user_id == pull_request_alias.user_id)
494 q = q.join(User, User.user_id == pull_request_alias.user_id)
494 q = q.filter(or_(
495 q = q.filter(or_(
495 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
496 User.username.ilike(like_expression),
497 User.username.ilike(like_expression),
497 pull_request_alias.title.ilike(like_expression),
498 pull_request_alias.title.ilike(like_expression),
498 pull_request_alias.description.ilike(like_expression),
499 pull_request_alias.description.ilike(like_expression),
499 ))
500 ))
500
501
501 order_map = {
502 order_map = {
502 'name_raw': pull_request_alias.pull_request_id,
503 'name_raw': pull_request_alias.pull_request_id,
503 'title': pull_request_alias.title,
504 'title': pull_request_alias.title,
504 'updated_on_raw': pull_request_alias.updated_on,
505 'updated_on_raw': pull_request_alias.updated_on,
505 'target_repo': pull_request_alias.target_repo_id
506 'target_repo': pull_request_alias.target_repo_id
506 }
507 }
507 if order_by and order_by in order_map:
508 if order_by and order_by in order_map:
508 if order_dir == 'asc':
509 if order_dir == 'asc':
509 q = q.order_by(order_map[order_by].asc())
510 q = q.order_by(order_map[order_by].asc())
510 else:
511 else:
511 q = q.order_by(order_map[order_by].desc())
512 q = q.order_by(order_map[order_by].desc())
512
513
513 return q
514 return q
514
515
515 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
516 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
516 """
517 """
517 Count the number of pull requests for a specific repository that are
518 Count the number of pull requests for a specific repository that are
518 awaiting review from a specific user.
519 awaiting review from a specific user.
519
520
520 :param repo_name: target or source repo
521 :param repo_name: target or source repo
521 :param user_id: reviewer user of the pull request
522 :param user_id: reviewer user of the pull request
522 :param search_q: filter by text
523 :param search_q: filter by text
523 :param statuses: list of pull request statuses
524 :param statuses: list of pull request statuses
524 :returns: int number of pull requests
525 :returns: int number of pull requests
525 """
526 """
526 q = self._prepare_awaiting_my_review_review_query(
527 q = self._prepare_awaiting_my_review_review_query(
527 repo_name, user_id, search_q=search_q, statuses=statuses)
528 repo_name, user_id, search_q=search_q, statuses=statuses)
528 return q.count()
529 return q.count()
529
530
530 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
531 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
531 offset=0, length=None, order_by=None, order_dir='desc'):
532 offset=0, length=None, order_by=None, order_dir='desc'):
532 """
533 """
533 Get all pull requests for a specific repository that are awaiting
534 Get all pull requests for a specific repository that are awaiting
534 review from a specific user.
535 review from a specific user.
535
536
536 :param repo_name: target or source repo
537 :param repo_name: target or source repo
537 :param user_id: reviewer user of the pull request
538 :param user_id: reviewer user of the pull request
538 :param search_q: filter by text
539 :param search_q: filter by text
539 :param statuses: list of pull request statuses
540 :param statuses: list of pull request statuses
540 :param offset: pagination offset
541 :param offset: pagination offset
541 :param length: length of returned list
542 :param length: length of returned list
542 :param order_by: order of the returned list
543 :param order_by: order of the returned list
543 :param order_dir: 'asc' or 'desc' ordering direction
544 :param order_dir: 'asc' or 'desc' ordering direction
544 :returns: list of pull requests
545 :returns: list of pull requests
545 """
546 """
546
547
547 q = self._prepare_awaiting_my_review_review_query(
548 q = self._prepare_awaiting_my_review_review_query(
548 repo_name, user_id, search_q=search_q, statuses=statuses,
549 repo_name, user_id, search_q=search_q, statuses=statuses,
549 order_by=order_by, order_dir=order_dir)
550 order_by=order_by, order_dir=order_dir)
550
551
551 if length:
552 if length:
552 pull_requests = q.limit(length).offset(offset).all()
553 pull_requests = q.limit(length).offset(offset).all()
553 else:
554 else:
554 pull_requests = q.all()
555 pull_requests = q.all()
555
556
556 return pull_requests
557 return pull_requests
557
558
558 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
559 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
559 order_by=None, order_dir='desc'):
560 order_by=None, order_dir='desc'):
560 """
561 """
561 return a query of pull-requests user is an creator, or he's added as a reviewer
562 return a query of pull-requests user is an creator, or he's added as a reviewer
562 """
563 """
563 q = PullRequest.query()
564 q = PullRequest.query()
564 if user_id:
565 if user_id:
565 reviewers_subquery = Session().query(
566
566 PullRequestReviewers.pull_request_id).filter(
567 base_query = select(PullRequestReviewers)\
567 PullRequestReviewers.user_id == user_id).subquery()
568 .where(PullRequestReviewers.user_id == user_id)\
569 .with_only_columns(PullRequestReviewers.pull_request_id)
570
568 user_filter = or_(
571 user_filter = or_(
569 PullRequest.user_id == user_id,
572 PullRequest.user_id == user_id,
570 PullRequest.pull_request_id.in_(reviewers_subquery)
573 PullRequest.pull_request_id.in_(base_query)
571 )
574 )
572 q = PullRequest.query().filter(user_filter)
575 q = PullRequest.query().filter(user_filter)
573
576
574 # closed,opened
577 # closed,opened
575 if statuses:
578 if statuses:
576 q = q.filter(PullRequest.status.in_(statuses))
579 q = q.filter(PullRequest.status.in_(statuses))
577
580
578 if query:
581 if query:
579 like_expression = u'%{}%'.format(safe_unicode(query))
582 like_expression = u'%{}%'.format(safe_str(query))
580 q = q.join(User, User.user_id == PullRequest.user_id)
583 q = q.join(User, User.user_id == PullRequest.user_id)
581 q = q.filter(or_(
584 q = q.filter(or_(
582 cast(PullRequest.pull_request_id, String).ilike(like_expression),
585 cast(PullRequest.pull_request_id, String).ilike(like_expression),
583 User.username.ilike(like_expression),
586 User.username.ilike(like_expression),
584 PullRequest.title.ilike(like_expression),
587 PullRequest.title.ilike(like_expression),
585 PullRequest.description.ilike(like_expression),
588 PullRequest.description.ilike(like_expression),
586 ))
589 ))
587
590
588 order_map = {
591 order_map = {
589 'name_raw': PullRequest.pull_request_id,
592 'name_raw': PullRequest.pull_request_id,
590 'title': PullRequest.title,
593 'title': PullRequest.title,
591 'updated_on_raw': PullRequest.updated_on,
594 'updated_on_raw': PullRequest.updated_on,
592 'target_repo': PullRequest.target_repo_id
595 'target_repo': PullRequest.target_repo_id
593 }
596 }
594 if order_by and order_by in order_map:
597 if order_by and order_by in order_map:
595 if order_dir == 'asc':
598 if order_dir == 'asc':
596 q = q.order_by(order_map[order_by].asc())
599 q = q.order_by(order_map[order_by].asc())
597 else:
600 else:
598 q = q.order_by(order_map[order_by].desc())
601 q = q.order_by(order_map[order_by].desc())
599
602
600 return q
603 return q
601
604
602 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
605 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
606 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 return q.count()
607 return q.count()
605
608
606 def get_im_participating_in(
609 def get_im_participating_in(
607 self, user_id=None, statuses=None, query='', offset=0,
610 self, user_id=None, statuses=None, query='', offset=0,
608 length=None, order_by=None, order_dir='desc'):
611 length=None, order_by=None, order_dir='desc'):
609 """
612 """
610 Get all Pull requests that i'm participating in as a reviewer, or i have opened
613 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 """
614 """
612
615
613 q = self._prepare_im_participating_query(
616 q = self._prepare_im_participating_query(
614 user_id, statuses=statuses, query=query, order_by=order_by,
617 user_id, statuses=statuses, query=query, order_by=order_by,
615 order_dir=order_dir)
618 order_dir=order_dir)
616
619
617 if length:
620 if length:
618 pull_requests = q.limit(length).offset(offset).all()
621 pull_requests = q.limit(length).offset(offset).all()
619 else:
622 else:
620 pull_requests = q.all()
623 pull_requests = q.all()
621
624
622 return pull_requests
625 return pull_requests
623
626
624 def _prepare_participating_in_for_review_query(
627 def _prepare_participating_in_for_review_query(
625 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
628 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626
629
627 for_review_statuses = [
630 for_review_statuses = [
628 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
631 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 ]
632 ]
630
633
631 pull_request_alias = aliased(PullRequest)
634 pull_request_alias = aliased(PullRequest)
632 status_alias = aliased(ChangesetStatus)
635 status_alias = aliased(ChangesetStatus)
633 reviewers_alias = aliased(PullRequestReviewers)
636 reviewers_alias = aliased(PullRequestReviewers)
634
637
635 last_ver_subq = Session()\
638 last_ver_subq = Session()\
636 .query(func.min(ChangesetStatus.version)) \
639 .query(func.min(ChangesetStatus.version)) \
637 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
640 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
641 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 .subquery()
642 .subquery()
640
643
641 q = Session().query(pull_request_alias) \
644 q = Session().query(pull_request_alias) \
642 .options(lazyload(pull_request_alias.author)) \
645 .options(lazyload(pull_request_alias.author)) \
643 .join(reviewers_alias,
646 .join(reviewers_alias,
644 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
647 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 .outerjoin(status_alias,
648 .outerjoin(status_alias,
646 and_(status_alias.user_id == reviewers_alias.user_id,
649 and_(status_alias.user_id == reviewers_alias.user_id,
647 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
650 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 .filter(or_(status_alias.version == null(),
651 .filter(or_(status_alias.version == null(),
649 status_alias.version == last_ver_subq)) \
652 status_alias.version == last_ver_subq)) \
650 .filter(reviewers_alias.user_id == user_id) \
653 .filter(reviewers_alias.user_id == user_id) \
651 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
654 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 .group_by(pull_request_alias)
655 .group_by(pull_request_alias)
653
656
654 # closed,opened
657 # closed,opened
655 if statuses:
658 if statuses:
656 q = q.filter(pull_request_alias.status.in_(statuses))
659 q = q.filter(pull_request_alias.status.in_(statuses))
657
660
658 if query:
661 if query:
659 like_expression = u'%{}%'.format(safe_unicode(query))
662 like_expression = u'%{}%'.format(safe_str(query))
660 q = q.join(User, User.user_id == pull_request_alias.user_id)
663 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 q = q.filter(or_(
664 q = q.filter(or_(
662 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
665 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 User.username.ilike(like_expression),
666 User.username.ilike(like_expression),
664 pull_request_alias.title.ilike(like_expression),
667 pull_request_alias.title.ilike(like_expression),
665 pull_request_alias.description.ilike(like_expression),
668 pull_request_alias.description.ilike(like_expression),
666 ))
669 ))
667
670
668 order_map = {
671 order_map = {
669 'name_raw': pull_request_alias.pull_request_id,
672 'name_raw': pull_request_alias.pull_request_id,
670 'title': pull_request_alias.title,
673 'title': pull_request_alias.title,
671 'updated_on_raw': pull_request_alias.updated_on,
674 'updated_on_raw': pull_request_alias.updated_on,
672 'target_repo': pull_request_alias.target_repo_id
675 'target_repo': pull_request_alias.target_repo_id
673 }
676 }
674 if order_by and order_by in order_map:
677 if order_by and order_by in order_map:
675 if order_dir == 'asc':
678 if order_dir == 'asc':
676 q = q.order_by(order_map[order_by].asc())
679 q = q.order_by(order_map[order_by].asc())
677 else:
680 else:
678 q = q.order_by(order_map[order_by].desc())
681 q = q.order_by(order_map[order_by].desc())
679
682
680 return q
683 return q
681
684
682 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
685 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
686 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 return q.count()
687 return q.count()
685
688
686 def get_im_participating_in_for_review(
689 def get_im_participating_in_for_review(
687 self, user_id, statuses=None, query='', offset=0,
690 self, user_id, statuses=None, query='', offset=0,
688 length=None, order_by=None, order_dir='desc'):
691 length=None, order_by=None, order_dir='desc'):
689 """
692 """
690 Get all Pull requests that needs user approval or rejection
693 Get all Pull requests that needs user approval or rejection
691 """
694 """
692
695
693 q = self._prepare_participating_in_for_review_query(
696 q = self._prepare_participating_in_for_review_query(
694 user_id, statuses=statuses, query=query, order_by=order_by,
697 user_id, statuses=statuses, query=query, order_by=order_by,
695 order_dir=order_dir)
698 order_dir=order_dir)
696
699
697 if length:
700 if length:
698 pull_requests = q.limit(length).offset(offset).all()
701 pull_requests = q.limit(length).offset(offset).all()
699 else:
702 else:
700 pull_requests = q.all()
703 pull_requests = q.all()
701
704
702 return pull_requests
705 return pull_requests
703
706
704 def get_versions(self, pull_request):
707 def get_versions(self, pull_request):
705 """
708 """
706 returns version of pull request sorted by ID descending
709 returns version of pull request sorted by ID descending
707 """
710 """
708 return PullRequestVersion.query()\
711 return PullRequestVersion.query()\
709 .filter(PullRequestVersion.pull_request == pull_request)\
712 .filter(PullRequestVersion.pull_request == pull_request)\
710 .order_by(PullRequestVersion.pull_request_version_id.asc())\
713 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 .all()
714 .all()
712
715
713 def get_pr_version(self, pull_request_id, version=None):
716 def get_pr_version(self, pull_request_id, version=None):
714 at_version = None
717 at_version = None
715
718
716 if version and version == 'latest':
719 if version and version == 'latest':
717 pull_request_ver = PullRequest.get(pull_request_id)
720 pull_request_ver = PullRequest.get(pull_request_id)
718 pull_request_obj = pull_request_ver
721 pull_request_obj = pull_request_ver
719 _org_pull_request_obj = pull_request_obj
722 _org_pull_request_obj = pull_request_obj
720 at_version = 'latest'
723 at_version = 'latest'
721 elif version:
724 elif version:
722 pull_request_ver = PullRequestVersion.get_or_404(version)
725 pull_request_ver = PullRequestVersion.get_or_404(version)
723 pull_request_obj = pull_request_ver
726 pull_request_obj = pull_request_ver
724 _org_pull_request_obj = pull_request_ver.pull_request
727 _org_pull_request_obj = pull_request_ver.pull_request
725 at_version = pull_request_ver.pull_request_version_id
728 at_version = pull_request_ver.pull_request_version_id
726 else:
729 else:
727 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
730 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 pull_request_id)
731 pull_request_id)
729
732
730 pull_request_display_obj = PullRequest.get_pr_display_object(
733 pull_request_display_obj = PullRequest.get_pr_display_object(
731 pull_request_obj, _org_pull_request_obj)
734 pull_request_obj, _org_pull_request_obj)
732
735
733 return _org_pull_request_obj, pull_request_obj, \
736 return _org_pull_request_obj, pull_request_obj, \
734 pull_request_display_obj, at_version
737 pull_request_display_obj, at_version
735
738
736 def pr_commits_versions(self, versions):
739 def pr_commits_versions(self, versions):
737 """
740 """
738 Maps the pull-request commits into all known PR versions. This way we can obtain
741 Maps the pull-request commits into all known PR versions. This way we can obtain
739 each pr version the commit was introduced in.
742 each pr version the commit was introduced in.
740 """
743 """
741 commit_versions = collections.defaultdict(list)
744 commit_versions = collections.defaultdict(list)
742 num_versions = [x.pull_request_version_id for x in versions]
745 num_versions = [x.pull_request_version_id for x in versions]
743 for ver in versions:
746 for ver in versions:
744 for commit_id in ver.revisions:
747 for commit_id in ver.revisions:
745 ver_idx = ChangesetComment.get_index_from_version(
748 ver_idx = ChangesetComment.get_index_from_version(
746 ver.pull_request_version_id, num_versions=num_versions)
749 ver.pull_request_version_id, num_versions=num_versions)
747 commit_versions[commit_id].append(ver_idx)
750 commit_versions[commit_id].append(ver_idx)
748 return commit_versions
751 return commit_versions
749
752
750 def create(self, created_by, source_repo, source_ref, target_repo,
753 def create(self, created_by, source_repo, source_ref, target_repo,
751 target_ref, revisions, reviewers, observers, title, description=None,
754 target_ref, revisions, reviewers, observers, title, description=None,
752 common_ancestor_id=None,
755 common_ancestor_id=None,
753 description_renderer=None,
756 description_renderer=None,
754 reviewer_data=None, translator=None, auth_user=None):
757 reviewer_data=None, translator=None, auth_user=None):
755 translator = translator or get_current_request().translate
758 translator = translator or get_current_request().translate
756
759
757 created_by_user = self._get_user(created_by)
760 created_by_user = self._get_user(created_by)
758 auth_user = auth_user or created_by_user.AuthUser()
761 auth_user = auth_user or created_by_user.AuthUser()
759 source_repo = self._get_repo(source_repo)
762 source_repo = self._get_repo(source_repo)
760 target_repo = self._get_repo(target_repo)
763 target_repo = self._get_repo(target_repo)
761
764
762 pull_request = PullRequest()
765 pull_request = PullRequest()
763 pull_request.source_repo = source_repo
766 pull_request.source_repo = source_repo
764 pull_request.source_ref = source_ref
767 pull_request.source_ref = source_ref
765 pull_request.target_repo = target_repo
768 pull_request.target_repo = target_repo
766 pull_request.target_ref = target_ref
769 pull_request.target_ref = target_ref
767 pull_request.revisions = revisions
770 pull_request.revisions = revisions
768 pull_request.title = title
771 pull_request.title = title
769 pull_request.description = description
772 pull_request.description = description
770 pull_request.description_renderer = description_renderer
773 pull_request.description_renderer = description_renderer
771 pull_request.author = created_by_user
774 pull_request.author = created_by_user
772 pull_request.reviewer_data = reviewer_data
775 pull_request.reviewer_data = reviewer_data
773 pull_request.pull_request_state = pull_request.STATE_CREATING
776 pull_request.pull_request_state = pull_request.STATE_CREATING
774 pull_request.common_ancestor_id = common_ancestor_id
777 pull_request.common_ancestor_id = common_ancestor_id
775
778
776 Session().add(pull_request)
779 Session().add(pull_request)
777 Session().flush()
780 Session().flush()
778
781
779 reviewer_ids = set()
782 reviewer_ids = set()
780 # members / reviewers
783 # members / reviewers
781 for reviewer_object in reviewers:
784 for reviewer_object in reviewers:
782 user_id, reasons, mandatory, role, rules = reviewer_object
785 user_id, reasons, mandatory, role, rules = reviewer_object
783 user = self._get_user(user_id)
786 user = self._get_user(user_id)
784
787
785 # skip duplicates
788 # skip duplicates
786 if user.user_id in reviewer_ids:
789 if user.user_id in reviewer_ids:
787 continue
790 continue
788
791
789 reviewer_ids.add(user.user_id)
792 reviewer_ids.add(user.user_id)
790
793
791 reviewer = PullRequestReviewers()
794 reviewer = PullRequestReviewers()
792 reviewer.user = user
795 reviewer.user = user
793 reviewer.pull_request = pull_request
796 reviewer.pull_request = pull_request
794 reviewer.reasons = reasons
797 reviewer.reasons = reasons
795 reviewer.mandatory = mandatory
798 reviewer.mandatory = mandatory
796 reviewer.role = role
799 reviewer.role = role
797
800
798 # NOTE(marcink): pick only first rule for now
801 # NOTE(marcink): pick only first rule for now
799 rule_id = list(rules)[0] if rules else None
802 rule_id = list(rules)[0] if rules else None
800 rule = RepoReviewRule.get(rule_id) if rule_id else None
803 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 if rule:
804 if rule:
802 review_group = rule.user_group_vote_rule(user_id)
805 review_group = rule.user_group_vote_rule(user_id)
803 # we check if this particular reviewer is member of a voting group
806 # we check if this particular reviewer is member of a voting group
804 if review_group:
807 if review_group:
805 # NOTE(marcink):
808 # NOTE(marcink):
806 # can be that user is member of more but we pick the first same,
809 # can be that user is member of more but we pick the first same,
807 # same as default reviewers algo
810 # same as default reviewers algo
808 review_group = review_group[0]
811 review_group = review_group[0]
809
812
810 rule_data = {
813 rule_data = {
811 'rule_name':
814 'rule_name':
812 rule.review_rule_name,
815 rule.review_rule_name,
813 'rule_user_group_entry_id':
816 'rule_user_group_entry_id':
814 review_group.repo_review_rule_users_group_id,
817 review_group.repo_review_rule_users_group_id,
815 'rule_user_group_name':
818 'rule_user_group_name':
816 review_group.users_group.users_group_name,
819 review_group.users_group.users_group_name,
817 'rule_user_group_members':
820 'rule_user_group_members':
818 [x.user.username for x in review_group.users_group.members],
821 [x.user.username for x in review_group.users_group.members],
819 'rule_user_group_members_id':
822 'rule_user_group_members_id':
820 [x.user.user_id for x in review_group.users_group.members],
823 [x.user.user_id for x in review_group.users_group.members],
821 }
824 }
822 # e.g {'vote_rule': -1, 'mandatory': True}
825 # e.g {'vote_rule': -1, 'mandatory': True}
823 rule_data.update(review_group.rule_data())
826 rule_data.update(review_group.rule_data())
824
827
825 reviewer.rule_data = rule_data
828 reviewer.rule_data = rule_data
826
829
827 Session().add(reviewer)
830 Session().add(reviewer)
828 Session().flush()
831 Session().flush()
829
832
830 for observer_object in observers:
833 for observer_object in observers:
831 user_id, reasons, mandatory, role, rules = observer_object
834 user_id, reasons, mandatory, role, rules = observer_object
832 user = self._get_user(user_id)
835 user = self._get_user(user_id)
833
836
834 # skip duplicates from reviewers
837 # skip duplicates from reviewers
835 if user.user_id in reviewer_ids:
838 if user.user_id in reviewer_ids:
836 continue
839 continue
837
840
838 #reviewer_ids.add(user.user_id)
841 #reviewer_ids.add(user.user_id)
839
842
840 observer = PullRequestReviewers()
843 observer = PullRequestReviewers()
841 observer.user = user
844 observer.user = user
842 observer.pull_request = pull_request
845 observer.pull_request = pull_request
843 observer.reasons = reasons
846 observer.reasons = reasons
844 observer.mandatory = mandatory
847 observer.mandatory = mandatory
845 observer.role = role
848 observer.role = role
846
849
847 # NOTE(marcink): pick only first rule for now
850 # NOTE(marcink): pick only first rule for now
848 rule_id = list(rules)[0] if rules else None
851 rule_id = list(rules)[0] if rules else None
849 rule = RepoReviewRule.get(rule_id) if rule_id else None
852 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 if rule:
853 if rule:
851 # TODO(marcink): do we need this for observers ??
854 # TODO(marcink): do we need this for observers ??
852 pass
855 pass
853
856
854 Session().add(observer)
857 Session().add(observer)
855 Session().flush()
858 Session().flush()
856
859
857 # Set approval status to "Under Review" for all commits which are
860 # Set approval status to "Under Review" for all commits which are
858 # part of this pull request.
861 # part of this pull request.
859 ChangesetStatusModel().set_status(
862 ChangesetStatusModel().set_status(
860 repo=target_repo,
863 repo=target_repo,
861 status=ChangesetStatus.STATUS_UNDER_REVIEW,
864 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 user=created_by_user,
865 user=created_by_user,
863 pull_request=pull_request
866 pull_request=pull_request
864 )
867 )
865 # we commit early at this point. This has to do with a fact
868 # we commit early at this point. This has to do with a fact
866 # that before queries do some row-locking. And because of that
869 # that before queries do some row-locking. And because of that
867 # we need to commit and finish transaction before below validate call
870 # we need to commit and finish transaction before below validate call
868 # that for large repos could be long resulting in long row locks
871 # that for large repos could be long resulting in long row locks
869 Session().commit()
872 Session().commit()
870
873
871 # prepare workspace, and run initial merge simulation. Set state during that
874 # prepare workspace, and run initial merge simulation. Set state during that
872 # operation
875 # operation
873 pull_request = PullRequest.get(pull_request.pull_request_id)
876 pull_request = PullRequest.get(pull_request.pull_request_id)
874
877
875 # set as merging, for merge simulation, and if finished to created so we mark
878 # set as merging, for merge simulation, and if finished to created so we mark
876 # simulation is working fine
879 # simulation is working fine
877 with pull_request.set_state(PullRequest.STATE_MERGING,
880 with pull_request.set_state(PullRequest.STATE_MERGING,
878 final_state=PullRequest.STATE_CREATED) as state_obj:
881 final_state=PullRequest.STATE_CREATED) as state_obj:
879 MergeCheck.validate(
882 MergeCheck.validate(
880 pull_request, auth_user=auth_user, translator=translator)
883 pull_request, auth_user=auth_user, translator=translator)
881
884
882 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
885 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
886 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884
887
885 creation_data = pull_request.get_api_data(with_merge_state=False)
888 creation_data = pull_request.get_api_data(with_merge_state=False)
886 self._log_audit_action(
889 self._log_audit_action(
887 'repo.pull_request.create', {'data': creation_data},
890 'repo.pull_request.create', {'data': creation_data},
888 auth_user, pull_request)
891 auth_user, pull_request)
889
892
890 return pull_request
893 return pull_request
891
894
892 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
895 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 pull_request = self.__get_pull_request(pull_request)
896 pull_request = self.__get_pull_request(pull_request)
894 target_scm = pull_request.target_repo.scm_instance()
897 target_scm = pull_request.target_repo.scm_instance()
895 if action == 'create':
898 if action == 'create':
896 trigger_hook = hooks_utils.trigger_create_pull_request_hook
899 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 elif action == 'merge':
900 elif action == 'merge':
898 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
901 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 elif action == 'close':
902 elif action == 'close':
900 trigger_hook = hooks_utils.trigger_close_pull_request_hook
903 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 elif action == 'review_status_change':
904 elif action == 'review_status_change':
902 trigger_hook = hooks_utils.trigger_review_pull_request_hook
905 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 elif action == 'update':
906 elif action == 'update':
904 trigger_hook = hooks_utils.trigger_update_pull_request_hook
907 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 elif action == 'comment':
908 elif action == 'comment':
906 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
909 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 elif action == 'comment_edit':
910 elif action == 'comment_edit':
908 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
911 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 else:
912 else:
910 return
913 return
911
914
912 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
915 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 pull_request, action, trigger_hook)
916 pull_request, action, trigger_hook)
914 trigger_hook(
917 trigger_hook(
915 username=user.username,
918 username=user.username,
916 repo_name=pull_request.target_repo.repo_name,
919 repo_name=pull_request.target_repo.repo_name,
917 repo_type=target_scm.alias,
920 repo_type=target_scm.alias,
918 pull_request=pull_request,
921 pull_request=pull_request,
919 data=data)
922 data=data)
920
923
921 def _get_commit_ids(self, pull_request):
924 def _get_commit_ids(self, pull_request):
922 """
925 """
923 Return the commit ids of the merged pull request.
926 Return the commit ids of the merged pull request.
924
927
925 This method is not dealing correctly yet with the lack of autoupdates
928 This method is not dealing correctly yet with the lack of autoupdates
926 nor with the implicit target updates.
929 nor with the implicit target updates.
927 For example: if a commit in the source repo is already in the target it
930 For example: if a commit in the source repo is already in the target it
928 will be reported anyways.
931 will be reported anyways.
929 """
932 """
930 merge_rev = pull_request.merge_rev
933 merge_rev = pull_request.merge_rev
931 if merge_rev is None:
934 if merge_rev is None:
932 raise ValueError('This pull request was not merged yet')
935 raise ValueError('This pull request was not merged yet')
933
936
934 commit_ids = list(pull_request.revisions)
937 commit_ids = list(pull_request.revisions)
935 if merge_rev not in commit_ids:
938 if merge_rev not in commit_ids:
936 commit_ids.append(merge_rev)
939 commit_ids.append(merge_rev)
937
940
938 return commit_ids
941 return commit_ids
939
942
940 def merge_repo(self, pull_request, user, extras):
943 def merge_repo(self, pull_request, user, extras):
941 repo_type = pull_request.source_repo.repo_type
944 repo_type = pull_request.source_repo.repo_type
942 log.debug("Merging pull request %s", pull_request.pull_request_id)
945 log.debug("Merging pull request %s", pull_request)
946
943 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
947 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
944 merge_state = self._merge_pull_request(pull_request, user, extras)
948 merge_state = self._merge_pull_request(pull_request, user, extras)
945 if merge_state.executed:
949 if merge_state.executed:
946 log.debug("Merge was successful, updating the pull request comments.")
950 log.debug("Merge was successful, updating the pull request comments.")
947 self._comment_and_close_pr(pull_request, user, merge_state)
951 self._comment_and_close_pr(pull_request, user, merge_state)
948
952
949 self._log_audit_action(
953 self._log_audit_action(
950 'repo.pull_request.merge',
954 'repo.pull_request.merge',
951 {'merge_state': merge_state.__dict__},
955 {'merge_state': merge_state.__dict__},
952 user, pull_request)
956 user, pull_request)
953
957
954 else:
958 else:
955 log.warn("Merge failed, not updating the pull request.")
959 log.warning("Merge failed, not updating the pull request.")
956 return merge_state
960 return merge_state
957
961
958 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
962 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
959 target_vcs = pull_request.target_repo.scm_instance()
963 target_vcs = pull_request.target_repo.scm_instance()
960 source_vcs = pull_request.source_repo.scm_instance()
964 source_vcs = pull_request.source_repo.scm_instance()
961
965
962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
966 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
963 pr_id=pull_request.pull_request_id,
967 pr_id=pull_request.pull_request_id,
964 pr_title=pull_request.title,
968 pr_title=pull_request.title,
965 pr_desc=pull_request.description,
969 pr_desc=pull_request.description,
966 source_repo=source_vcs.name,
970 source_repo=source_vcs.name,
967 source_ref_name=pull_request.source_ref_parts.name,
971 source_ref_name=pull_request.source_ref_parts.name,
968 target_repo=target_vcs.name,
972 target_repo=target_vcs.name,
969 target_ref_name=pull_request.target_ref_parts.name,
973 target_ref_name=pull_request.target_ref_parts.name,
970 )
974 )
971
975
972 workspace_id = self._workspace_id(pull_request)
976 workspace_id = self._workspace_id(pull_request)
973 repo_id = pull_request.target_repo.repo_id
977 repo_id = pull_request.target_repo.repo_id
974 use_rebase = self._use_rebase_for_merging(pull_request)
978 use_rebase = self._use_rebase_for_merging(pull_request)
975 close_branch = self._close_branch_before_merging(pull_request)
979 close_branch = self._close_branch_before_merging(pull_request)
976 user_name = self._user_name_for_merging(pull_request, user)
980 user_name = self._user_name_for_merging(pull_request, user)
977
981
978 target_ref = self._refresh_reference(
982 target_ref = self._refresh_reference(
979 pull_request.target_ref_parts, target_vcs)
983 pull_request.target_ref_parts, target_vcs)
980
984
981 callback_daemon, extras = prepare_callback_daemon(
985 callback_daemon, extras = prepare_callback_daemon(
982 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
986 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
983 host=vcs_settings.HOOKS_HOST,
987 host=vcs_settings.HOOKS_HOST,
984 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
988 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
985
989
986 with callback_daemon:
990 with callback_daemon:
987 # TODO: johbo: Implement a clean way to run a config_override
991 # TODO: johbo: Implement a clean way to run a config_override
988 # for a single call.
992 # for a single call.
989 target_vcs.config.set(
993 target_vcs.config.set(
990 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
994 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
991
995
992 merge_state = target_vcs.merge(
996 merge_state = target_vcs.merge(
993 repo_id, workspace_id, target_ref, source_vcs,
997 repo_id, workspace_id, target_ref, source_vcs,
994 pull_request.source_ref_parts,
998 pull_request.source_ref_parts,
995 user_name=user_name, user_email=user.email,
999 user_name=user_name, user_email=user.email,
996 message=message, use_rebase=use_rebase,
1000 message=message, use_rebase=use_rebase,
997 close_branch=close_branch)
1001 close_branch=close_branch)
1002
998 return merge_state
1003 return merge_state
999
1004
1000 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1005 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1001 pull_request.merge_rev = merge_state.merge_ref.commit_id
1006 pull_request.merge_rev = merge_state.merge_ref.commit_id
1002 pull_request.updated_on = datetime.datetime.now()
1007 pull_request.updated_on = datetime.datetime.now()
1003 close_msg = close_msg or 'Pull request merged and closed'
1008 close_msg = close_msg or 'Pull request merged and closed'
1004
1009
1005 CommentsModel().create(
1010 CommentsModel().create(
1006 text=safe_unicode(close_msg),
1011 text=safe_str(close_msg),
1007 repo=pull_request.target_repo.repo_id,
1012 repo=pull_request.target_repo.repo_id,
1008 user=user.user_id,
1013 user=user.user_id,
1009 pull_request=pull_request.pull_request_id,
1014 pull_request=pull_request.pull_request_id,
1010 f_path=None,
1015 f_path=None,
1011 line_no=None,
1016 line_no=None,
1012 closing_pr=True
1017 closing_pr=True
1013 )
1018 )
1014
1019
1015 Session().add(pull_request)
1020 Session().add(pull_request)
1016 Session().flush()
1021 Session().flush()
1017 # TODO: paris: replace invalidation with less radical solution
1022 # TODO: paris: replace invalidation with less radical solution
1018 ScmModel().mark_for_invalidation(
1023 ScmModel().mark_for_invalidation(
1019 pull_request.target_repo.repo_name)
1024 pull_request.target_repo.repo_name)
1020 self.trigger_pull_request_hook(pull_request, user, 'merge')
1025 self.trigger_pull_request_hook(pull_request, user, 'merge')
1021
1026
1022 def has_valid_update_type(self, pull_request):
1027 def has_valid_update_type(self, pull_request):
1023 source_ref_type = pull_request.source_ref_parts.type
1028 source_ref_type = pull_request.source_ref_parts.type
1024 return source_ref_type in self.REF_TYPES
1029 return source_ref_type in self.REF_TYPES
1025
1030
1026 def get_flow_commits(self, pull_request):
1031 def get_flow_commits(self, pull_request):
1027
1032
1028 # source repo
1033 # source repo
1029 source_ref_name = pull_request.source_ref_parts.name
1034 source_ref_name = pull_request.source_ref_parts.name
1030 source_ref_type = pull_request.source_ref_parts.type
1035 source_ref_type = pull_request.source_ref_parts.type
1031 source_ref_id = pull_request.source_ref_parts.commit_id
1036 source_ref_id = pull_request.source_ref_parts.commit_id
1032 source_repo = pull_request.source_repo.scm_instance()
1037 source_repo = pull_request.source_repo.scm_instance()
1033
1038
1034 try:
1039 try:
1035 if source_ref_type in self.REF_TYPES:
1040 if source_ref_type in self.REF_TYPES:
1036 source_commit = source_repo.get_commit(
1041 source_commit = source_repo.get_commit(
1037 source_ref_name, reference_obj=pull_request.source_ref_parts)
1042 source_ref_name, reference_obj=pull_request.source_ref_parts)
1038 else:
1043 else:
1039 source_commit = source_repo.get_commit(source_ref_id)
1044 source_commit = source_repo.get_commit(source_ref_id)
1040 except CommitDoesNotExistError:
1045 except CommitDoesNotExistError:
1041 raise SourceRefMissing()
1046 raise SourceRefMissing()
1042
1047
1043 # target repo
1048 # target repo
1044 target_ref_name = pull_request.target_ref_parts.name
1049 target_ref_name = pull_request.target_ref_parts.name
1045 target_ref_type = pull_request.target_ref_parts.type
1050 target_ref_type = pull_request.target_ref_parts.type
1046 target_ref_id = pull_request.target_ref_parts.commit_id
1051 target_ref_id = pull_request.target_ref_parts.commit_id
1047 target_repo = pull_request.target_repo.scm_instance()
1052 target_repo = pull_request.target_repo.scm_instance()
1048
1053
1049 try:
1054 try:
1050 if target_ref_type in self.REF_TYPES:
1055 if target_ref_type in self.REF_TYPES:
1051 target_commit = target_repo.get_commit(
1056 target_commit = target_repo.get_commit(
1052 target_ref_name, reference_obj=pull_request.target_ref_parts)
1057 target_ref_name, reference_obj=pull_request.target_ref_parts)
1053 else:
1058 else:
1054 target_commit = target_repo.get_commit(target_ref_id)
1059 target_commit = target_repo.get_commit(target_ref_id)
1055 except CommitDoesNotExistError:
1060 except CommitDoesNotExistError:
1056 raise TargetRefMissing()
1061 raise TargetRefMissing()
1057
1062
1058 return source_commit, target_commit
1063 return source_commit, target_commit
1059
1064
1060 def update_commits(self, pull_request, updating_user):
1065 def update_commits(self, pull_request, updating_user):
1061 """
1066 """
1062 Get the updated list of commits for the pull request
1067 Get the updated list of commits for the pull request
1063 and return the new pull request version and the list
1068 and return the new pull request version and the list
1064 of commits processed by this update action
1069 of commits processed by this update action
1065
1070
1066 updating_user is the user_object who triggered the update
1071 updating_user is the user_object who triggered the update
1067 """
1072 """
1068 pull_request = self.__get_pull_request(pull_request)
1073 pull_request = self.__get_pull_request(pull_request)
1069 source_ref_type = pull_request.source_ref_parts.type
1074 source_ref_type = pull_request.source_ref_parts.type
1070 source_ref_name = pull_request.source_ref_parts.name
1075 source_ref_name = pull_request.source_ref_parts.name
1071 source_ref_id = pull_request.source_ref_parts.commit_id
1076 source_ref_id = pull_request.source_ref_parts.commit_id
1072
1077
1073 target_ref_type = pull_request.target_ref_parts.type
1078 target_ref_type = pull_request.target_ref_parts.type
1074 target_ref_name = pull_request.target_ref_parts.name
1079 target_ref_name = pull_request.target_ref_parts.name
1075 target_ref_id = pull_request.target_ref_parts.commit_id
1080 target_ref_id = pull_request.target_ref_parts.commit_id
1076
1081
1077 if not self.has_valid_update_type(pull_request):
1082 if not self.has_valid_update_type(pull_request):
1078 log.debug("Skipping update of pull request %s due to ref type: %s",
1083 log.debug("Skipping update of pull request %s due to ref type: %s",
1079 pull_request, source_ref_type)
1084 pull_request, source_ref_type)
1080 return UpdateResponse(
1085 return UpdateResponse(
1081 executed=False,
1086 executed=False,
1082 reason=UpdateFailureReason.WRONG_REF_TYPE,
1087 reason=UpdateFailureReason.WRONG_REF_TYPE,
1083 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1088 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1084 source_changed=False, target_changed=False)
1089 source_changed=False, target_changed=False)
1085
1090
1086 try:
1091 try:
1087 source_commit, target_commit = self.get_flow_commits(pull_request)
1092 source_commit, target_commit = self.get_flow_commits(pull_request)
1088 except SourceRefMissing:
1093 except SourceRefMissing:
1089 return UpdateResponse(
1094 return UpdateResponse(
1090 executed=False,
1095 executed=False,
1091 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1096 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1092 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1097 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1093 source_changed=False, target_changed=False)
1098 source_changed=False, target_changed=False)
1094 except TargetRefMissing:
1099 except TargetRefMissing:
1095 return UpdateResponse(
1100 return UpdateResponse(
1096 executed=False,
1101 executed=False,
1097 reason=UpdateFailureReason.MISSING_TARGET_REF,
1102 reason=UpdateFailureReason.MISSING_TARGET_REF,
1098 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1103 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1099 source_changed=False, target_changed=False)
1104 source_changed=False, target_changed=False)
1100
1105
1101 source_changed = source_ref_id != source_commit.raw_id
1106 source_changed = source_ref_id != source_commit.raw_id
1102 target_changed = target_ref_id != target_commit.raw_id
1107 target_changed = target_ref_id != target_commit.raw_id
1103
1108
1104 if not (source_changed or target_changed):
1109 if not (source_changed or target_changed):
1105 log.debug("Nothing changed in pull request %s", pull_request)
1110 log.debug("Nothing changed in pull request %s", pull_request)
1106 return UpdateResponse(
1111 return UpdateResponse(
1107 executed=False,
1112 executed=False,
1108 reason=UpdateFailureReason.NO_CHANGE,
1113 reason=UpdateFailureReason.NO_CHANGE,
1109 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1114 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1110 source_changed=target_changed, target_changed=source_changed)
1115 source_changed=target_changed, target_changed=source_changed)
1111
1116
1112 change_in_found = 'target repo' if target_changed else 'source repo'
1117 change_in_found = 'target repo' if target_changed else 'source repo'
1113 log.debug('Updating pull request because of change in %s detected',
1118 log.debug('Updating pull request because of change in %s detected',
1114 change_in_found)
1119 change_in_found)
1115
1120
1116 # Finally there is a need for an update, in case of source change
1121 # Finally there is a need for an update, in case of source change
1117 # we create a new version, else just an update
1122 # we create a new version, else just an update
1118 if source_changed:
1123 if source_changed:
1119 pull_request_version = self._create_version_from_snapshot(pull_request)
1124 pull_request_version = self._create_version_from_snapshot(pull_request)
1120 self._link_comments_to_version(pull_request_version)
1125 self._link_comments_to_version(pull_request_version)
1121 else:
1126 else:
1122 try:
1127 try:
1123 ver = pull_request.versions[-1]
1128 ver = pull_request.versions[-1]
1124 except IndexError:
1129 except IndexError:
1125 ver = None
1130 ver = None
1126
1131
1127 pull_request.pull_request_version_id = \
1132 pull_request.pull_request_version_id = \
1128 ver.pull_request_version_id if ver else None
1133 ver.pull_request_version_id if ver else None
1129 pull_request_version = pull_request
1134 pull_request_version = pull_request
1130
1135
1131 source_repo = pull_request.source_repo.scm_instance()
1136 source_repo = pull_request.source_repo.scm_instance()
1132 target_repo = pull_request.target_repo.scm_instance()
1137 target_repo = pull_request.target_repo.scm_instance()
1133
1138
1134 # re-compute commit ids
1139 # re-compute commit ids
1135 old_commit_ids = pull_request.revisions
1140 old_commit_ids = pull_request.revisions
1136 pre_load = ["author", "date", "message", "branch"]
1141 pre_load = ["author", "date", "message", "branch"]
1137 commit_ranges = target_repo.compare(
1142 commit_ranges = target_repo.compare(
1138 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1143 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1139 pre_load=pre_load)
1144 pre_load=pre_load)
1140
1145
1141 target_ref = target_commit.raw_id
1146 target_ref = target_commit.raw_id
1142 source_ref = source_commit.raw_id
1147 source_ref = source_commit.raw_id
1143 ancestor_commit_id = target_repo.get_common_ancestor(
1148 ancestor_commit_id = target_repo.get_common_ancestor(
1144 target_ref, source_ref, source_repo)
1149 target_ref, source_ref, source_repo)
1145
1150
1146 if not ancestor_commit_id:
1151 if not ancestor_commit_id:
1147 raise ValueError(
1152 raise ValueError(
1148 'cannot calculate diff info without a common ancestor. '
1153 'cannot calculate diff info without a common ancestor. '
1149 'Make sure both repositories are related, and have a common forking commit.')
1154 'Make sure both repositories are related, and have a common forking commit.')
1150
1155
1151 pull_request.common_ancestor_id = ancestor_commit_id
1156 pull_request.common_ancestor_id = ancestor_commit_id
1152
1157
1153 pull_request.source_ref = '%s:%s:%s' % (
1158 pull_request.source_ref = '%s:%s:%s' % (
1154 source_ref_type, source_ref_name, source_commit.raw_id)
1159 source_ref_type, source_ref_name, source_commit.raw_id)
1155 pull_request.target_ref = '%s:%s:%s' % (
1160 pull_request.target_ref = '%s:%s:%s' % (
1156 target_ref_type, target_ref_name, ancestor_commit_id)
1161 target_ref_type, target_ref_name, ancestor_commit_id)
1157
1162
1158 pull_request.revisions = [
1163 pull_request.revisions = [
1159 commit.raw_id for commit in reversed(commit_ranges)]
1164 commit.raw_id for commit in reversed(commit_ranges)]
1160 pull_request.updated_on = datetime.datetime.now()
1165 pull_request.updated_on = datetime.datetime.now()
1161 Session().add(pull_request)
1166 Session().add(pull_request)
1162 new_commit_ids = pull_request.revisions
1167 new_commit_ids = pull_request.revisions
1163
1168
1164 old_diff_data, new_diff_data = self._generate_update_diffs(
1169 old_diff_data, new_diff_data = self._generate_update_diffs(
1165 pull_request, pull_request_version)
1170 pull_request, pull_request_version)
1166
1171
1167 # calculate commit and file changes
1172 # calculate commit and file changes
1168 commit_changes = self._calculate_commit_id_changes(
1173 commit_changes = self._calculate_commit_id_changes(
1169 old_commit_ids, new_commit_ids)
1174 old_commit_ids, new_commit_ids)
1170 file_changes = self._calculate_file_changes(
1175 file_changes = self._calculate_file_changes(
1171 old_diff_data, new_diff_data)
1176 old_diff_data, new_diff_data)
1172
1177
1173 # set comments as outdated if DIFFS changed
1178 # set comments as outdated if DIFFS changed
1174 CommentsModel().outdate_comments(
1179 CommentsModel().outdate_comments(
1175 pull_request, old_diff_data=old_diff_data,
1180 pull_request, old_diff_data=old_diff_data,
1176 new_diff_data=new_diff_data)
1181 new_diff_data=new_diff_data)
1177
1182
1178 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1183 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1179 file_node_changes = (
1184 file_node_changes = (
1180 file_changes.added or file_changes.modified or file_changes.removed)
1185 file_changes.added or file_changes.modified or file_changes.removed)
1181 pr_has_changes = valid_commit_changes or file_node_changes
1186 pr_has_changes = valid_commit_changes or file_node_changes
1182
1187
1183 # Add an automatic comment to the pull request, in case
1188 # Add an automatic comment to the pull request, in case
1184 # anything has changed
1189 # anything has changed
1185 if pr_has_changes:
1190 if pr_has_changes:
1186 update_comment = CommentsModel().create(
1191 update_comment = CommentsModel().create(
1187 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1192 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1188 repo=pull_request.target_repo,
1193 repo=pull_request.target_repo,
1189 user=pull_request.author,
1194 user=pull_request.author,
1190 pull_request=pull_request,
1195 pull_request=pull_request,
1191 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1196 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1192
1197
1193 # Update status to "Under Review" for added commits
1198 # Update status to "Under Review" for added commits
1194 for commit_id in commit_changes.added:
1199 for commit_id in commit_changes.added:
1195 ChangesetStatusModel().set_status(
1200 ChangesetStatusModel().set_status(
1196 repo=pull_request.source_repo,
1201 repo=pull_request.source_repo,
1197 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1202 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1198 comment=update_comment,
1203 comment=update_comment,
1199 user=pull_request.author,
1204 user=pull_request.author,
1200 pull_request=pull_request,
1205 pull_request=pull_request,
1201 revision=commit_id)
1206 revision=commit_id)
1202
1207
1203 # initial commit
1208 # initial commit
1204 Session().commit()
1209 Session().commit()
1205
1210
1206 if pr_has_changes:
1211 if pr_has_changes:
1207 # send update email to users
1212 # send update email to users
1208 try:
1213 try:
1209 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1214 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1210 ancestor_commit_id=ancestor_commit_id,
1215 ancestor_commit_id=ancestor_commit_id,
1211 commit_changes=commit_changes,
1216 commit_changes=commit_changes,
1212 file_changes=file_changes)
1217 file_changes=file_changes)
1213 Session().commit()
1218 Session().commit()
1214 except Exception:
1219 except Exception:
1215 log.exception('Failed to send email notification to users')
1220 log.exception('Failed to send email notification to users')
1216 Session().rollback()
1221 Session().rollback()
1217
1222
1218 log.debug(
1223 log.debug(
1219 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1224 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1220 'removed_ids: %s', pull_request.pull_request_id,
1225 'removed_ids: %s', pull_request.pull_request_id,
1221 commit_changes.added, commit_changes.common, commit_changes.removed)
1226 commit_changes.added, commit_changes.common, commit_changes.removed)
1222 log.debug(
1227 log.debug(
1223 'Updated pull request with the following file changes: %s',
1228 'Updated pull request with the following file changes: %s',
1224 file_changes)
1229 file_changes)
1225
1230
1226 log.info(
1231 log.info(
1227 "Updated pull request %s from commit %s to commit %s, "
1232 "Updated pull request %s from commit %s to commit %s, "
1228 "stored new version %s of this pull request.",
1233 "stored new version %s of this pull request.",
1229 pull_request.pull_request_id, source_ref_id,
1234 pull_request.pull_request_id, source_ref_id,
1230 pull_request.source_ref_parts.commit_id,
1235 pull_request.source_ref_parts.commit_id,
1231 pull_request_version.pull_request_version_id)
1236 pull_request_version.pull_request_version_id)
1232
1237
1233 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1238 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1234
1239
1235 return UpdateResponse(
1240 return UpdateResponse(
1236 executed=True, reason=UpdateFailureReason.NONE,
1241 executed=True, reason=UpdateFailureReason.NONE,
1237 old=pull_request, new=pull_request_version,
1242 old=pull_request, new=pull_request_version,
1238 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1243 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1239 source_changed=source_changed, target_changed=target_changed)
1244 source_changed=source_changed, target_changed=target_changed)
1240
1245
1241 def _create_version_from_snapshot(self, pull_request):
1246 def _create_version_from_snapshot(self, pull_request):
1242 version = PullRequestVersion()
1247 version = PullRequestVersion()
1243 version.title = pull_request.title
1248 version.title = pull_request.title
1244 version.description = pull_request.description
1249 version.description = pull_request.description
1245 version.status = pull_request.status
1250 version.status = pull_request.status
1246 version.pull_request_state = pull_request.pull_request_state
1251 version.pull_request_state = pull_request.pull_request_state
1247 version.created_on = datetime.datetime.now()
1252 version.created_on = datetime.datetime.now()
1248 version.updated_on = pull_request.updated_on
1253 version.updated_on = pull_request.updated_on
1249 version.user_id = pull_request.user_id
1254 version.user_id = pull_request.user_id
1250 version.source_repo = pull_request.source_repo
1255 version.source_repo = pull_request.source_repo
1251 version.source_ref = pull_request.source_ref
1256 version.source_ref = pull_request.source_ref
1252 version.target_repo = pull_request.target_repo
1257 version.target_repo = pull_request.target_repo
1253 version.target_ref = pull_request.target_ref
1258 version.target_ref = pull_request.target_ref
1254
1259
1255 version._last_merge_source_rev = pull_request._last_merge_source_rev
1260 version._last_merge_source_rev = pull_request._last_merge_source_rev
1256 version._last_merge_target_rev = pull_request._last_merge_target_rev
1261 version._last_merge_target_rev = pull_request._last_merge_target_rev
1257 version.last_merge_status = pull_request.last_merge_status
1262 version.last_merge_status = pull_request.last_merge_status
1258 version.last_merge_metadata = pull_request.last_merge_metadata
1263 version.last_merge_metadata = pull_request.last_merge_metadata
1259 version.shadow_merge_ref = pull_request.shadow_merge_ref
1264 version.shadow_merge_ref = pull_request.shadow_merge_ref
1260 version.merge_rev = pull_request.merge_rev
1265 version.merge_rev = pull_request.merge_rev
1261 version.reviewer_data = pull_request.reviewer_data
1266 version.reviewer_data = pull_request.reviewer_data
1262
1267
1263 version.revisions = pull_request.revisions
1268 version.revisions = pull_request.revisions
1264 version.common_ancestor_id = pull_request.common_ancestor_id
1269 version.common_ancestor_id = pull_request.common_ancestor_id
1265 version.pull_request = pull_request
1270 version.pull_request = pull_request
1266 Session().add(version)
1271 Session().add(version)
1267 Session().flush()
1272 Session().flush()
1268
1273
1269 return version
1274 return version
1270
1275
1271 def _generate_update_diffs(self, pull_request, pull_request_version):
1276 def _generate_update_diffs(self, pull_request, pull_request_version):
1272
1277
1273 diff_context = (
1278 diff_context = (
1274 self.DIFF_CONTEXT +
1279 self.DIFF_CONTEXT +
1275 CommentsModel.needed_extra_diff_context())
1280 CommentsModel.needed_extra_diff_context())
1276 hide_whitespace_changes = False
1281 hide_whitespace_changes = False
1277 source_repo = pull_request_version.source_repo
1282 source_repo = pull_request_version.source_repo
1278 source_ref_id = pull_request_version.source_ref_parts.commit_id
1283 source_ref_id = pull_request_version.source_ref_parts.commit_id
1279 target_ref_id = pull_request_version.target_ref_parts.commit_id
1284 target_ref_id = pull_request_version.target_ref_parts.commit_id
1280 old_diff = self._get_diff_from_pr_or_version(
1285 old_diff = self._get_diff_from_pr_or_version(
1281 source_repo, source_ref_id, target_ref_id,
1286 source_repo, source_ref_id, target_ref_id,
1282 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1287 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1283
1288
1284 source_repo = pull_request.source_repo
1289 source_repo = pull_request.source_repo
1285 source_ref_id = pull_request.source_ref_parts.commit_id
1290 source_ref_id = pull_request.source_ref_parts.commit_id
1286 target_ref_id = pull_request.target_ref_parts.commit_id
1291 target_ref_id = pull_request.target_ref_parts.commit_id
1287
1292
1288 new_diff = self._get_diff_from_pr_or_version(
1293 new_diff = self._get_diff_from_pr_or_version(
1289 source_repo, source_ref_id, target_ref_id,
1294 source_repo, source_ref_id, target_ref_id,
1290 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1295 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1291
1296
1292 old_diff_data = diffs.DiffProcessor(old_diff)
1297 # NOTE: this was using diff_format='gitdiff'
1298 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1293 old_diff_data.prepare()
1299 old_diff_data.prepare()
1294 new_diff_data = diffs.DiffProcessor(new_diff)
1300 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1295 new_diff_data.prepare()
1301 new_diff_data.prepare()
1296
1302
1297 return old_diff_data, new_diff_data
1303 return old_diff_data, new_diff_data
1298
1304
1299 def _link_comments_to_version(self, pull_request_version):
1305 def _link_comments_to_version(self, pull_request_version):
1300 """
1306 """
1301 Link all unlinked comments of this pull request to the given version.
1307 Link all unlinked comments of this pull request to the given version.
1302
1308
1303 :param pull_request_version: The `PullRequestVersion` to which
1309 :param pull_request_version: The `PullRequestVersion` to which
1304 the comments shall be linked.
1310 the comments shall be linked.
1305
1311
1306 """
1312 """
1307 pull_request = pull_request_version.pull_request
1313 pull_request = pull_request_version.pull_request
1308 comments = ChangesetComment.query()\
1314 comments = ChangesetComment.query()\
1309 .filter(
1315 .filter(
1310 # TODO: johbo: Should we query for the repo at all here?
1316 # TODO: johbo: Should we query for the repo at all here?
1311 # Pending decision on how comments of PRs are to be related
1317 # Pending decision on how comments of PRs are to be related
1312 # to either the source repo, the target repo or no repo at all.
1318 # to either the source repo, the target repo or no repo at all.
1313 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1319 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1314 ChangesetComment.pull_request == pull_request,
1320 ChangesetComment.pull_request == pull_request,
1315 ChangesetComment.pull_request_version == None)\
1321 ChangesetComment.pull_request_version == None)\
1316 .order_by(ChangesetComment.comment_id.asc())
1322 .order_by(ChangesetComment.comment_id.asc())
1317
1323
1318 # TODO: johbo: Find out why this breaks if it is done in a bulk
1324 # TODO: johbo: Find out why this breaks if it is done in a bulk
1319 # operation.
1325 # operation.
1320 for comment in comments:
1326 for comment in comments:
1321 comment.pull_request_version_id = (
1327 comment.pull_request_version_id = (
1322 pull_request_version.pull_request_version_id)
1328 pull_request_version.pull_request_version_id)
1323 Session().add(comment)
1329 Session().add(comment)
1324
1330
1325 def _calculate_commit_id_changes(self, old_ids, new_ids):
1331 def _calculate_commit_id_changes(self, old_ids, new_ids):
1326 added = [x for x in new_ids if x not in old_ids]
1332 added = [x for x in new_ids if x not in old_ids]
1327 common = [x for x in new_ids if x in old_ids]
1333 common = [x for x in new_ids if x in old_ids]
1328 removed = [x for x in old_ids if x not in new_ids]
1334 removed = [x for x in old_ids if x not in new_ids]
1329 total = new_ids
1335 total = new_ids
1330 return ChangeTuple(added, common, removed, total)
1336 return ChangeTuple(added, common, removed, total)
1331
1337
1332 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1338 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1333
1339
1334 old_files = OrderedDict()
1340 old_files = OrderedDict()
1335 for diff_data in old_diff_data.parsed_diff:
1341 for diff_data in old_diff_data.parsed_diff:
1336 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1342 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1337
1343
1338 added_files = []
1344 added_files = []
1339 modified_files = []
1345 modified_files = []
1340 removed_files = []
1346 removed_files = []
1341 for diff_data in new_diff_data.parsed_diff:
1347 for diff_data in new_diff_data.parsed_diff:
1342 new_filename = diff_data['filename']
1348 new_filename = diff_data['filename']
1343 new_hash = md5_safe(diff_data['raw_diff'])
1349 new_hash = md5_safe(diff_data['raw_diff'])
1344
1350
1345 old_hash = old_files.get(new_filename)
1351 old_hash = old_files.get(new_filename)
1346 if not old_hash:
1352 if not old_hash:
1347 # file is not present in old diff, we have to figure out from parsed diff
1353 # file is not present in old diff, we have to figure out from parsed diff
1348 # operation ADD/REMOVE
1354 # operation ADD/REMOVE
1349 operations_dict = diff_data['stats']['ops']
1355 operations_dict = diff_data['stats']['ops']
1350 if diffs.DEL_FILENODE in operations_dict:
1356 if diffs.DEL_FILENODE in operations_dict:
1351 removed_files.append(new_filename)
1357 removed_files.append(new_filename)
1352 else:
1358 else:
1353 added_files.append(new_filename)
1359 added_files.append(new_filename)
1354 else:
1360 else:
1355 if new_hash != old_hash:
1361 if new_hash != old_hash:
1356 modified_files.append(new_filename)
1362 modified_files.append(new_filename)
1357 # now remove a file from old, since we have seen it already
1363 # now remove a file from old, since we have seen it already
1358 del old_files[new_filename]
1364 del old_files[new_filename]
1359
1365
1360 # removed files is when there are present in old, but not in NEW,
1366 # removed files is when there are present in old, but not in NEW,
1361 # since we remove old files that are present in new diff, left-overs
1367 # since we remove old files that are present in new diff, left-overs
1362 # if any should be the removed files
1368 # if any should be the removed files
1363 removed_files.extend(old_files.keys())
1369 removed_files.extend(old_files.keys())
1364
1370
1365 return FileChangeTuple(added_files, modified_files, removed_files)
1371 return FileChangeTuple(added_files, modified_files, removed_files)
1366
1372
1367 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1373 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1368 """
1374 """
1369 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1375 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1370 so it's always looking the same disregarding on which default
1376 so it's always looking the same disregarding on which default
1371 renderer system is using.
1377 renderer system is using.
1372
1378
1373 :param ancestor_commit_id: ancestor raw_id
1379 :param ancestor_commit_id: ancestor raw_id
1374 :param changes: changes named tuple
1380 :param changes: changes named tuple
1375 :param file_changes: file changes named tuple
1381 :param file_changes: file changes named tuple
1376
1382
1377 """
1383 """
1378 new_status = ChangesetStatus.get_status_lbl(
1384 new_status = ChangesetStatus.get_status_lbl(
1379 ChangesetStatus.STATUS_UNDER_REVIEW)
1385 ChangesetStatus.STATUS_UNDER_REVIEW)
1380
1386
1381 changed_files = (
1387 changed_files = (
1382 file_changes.added + file_changes.modified + file_changes.removed)
1388 file_changes.added + file_changes.modified + file_changes.removed)
1383
1389
1384 params = {
1390 params = {
1385 'under_review_label': new_status,
1391 'under_review_label': new_status,
1386 'added_commits': changes.added,
1392 'added_commits': changes.added,
1387 'removed_commits': changes.removed,
1393 'removed_commits': changes.removed,
1388 'changed_files': changed_files,
1394 'changed_files': changed_files,
1389 'added_files': file_changes.added,
1395 'added_files': file_changes.added,
1390 'modified_files': file_changes.modified,
1396 'modified_files': file_changes.modified,
1391 'removed_files': file_changes.removed,
1397 'removed_files': file_changes.removed,
1392 'ancestor_commit_id': ancestor_commit_id
1398 'ancestor_commit_id': ancestor_commit_id
1393 }
1399 }
1394 renderer = RstTemplateRenderer()
1400 renderer = RstTemplateRenderer()
1395 return renderer.render('pull_request_update.mako', **params)
1401 return renderer.render('pull_request_update.mako', **params)
1396
1402
1397 def edit(self, pull_request, title, description, description_renderer, user):
1403 def edit(self, pull_request, title, description, description_renderer, user):
1398 pull_request = self.__get_pull_request(pull_request)
1404 pull_request = self.__get_pull_request(pull_request)
1399 old_data = pull_request.get_api_data(with_merge_state=False)
1405 old_data = pull_request.get_api_data(with_merge_state=False)
1400 if pull_request.is_closed():
1406 if pull_request.is_closed():
1401 raise ValueError('This pull request is closed')
1407 raise ValueError('This pull request is closed')
1402 if title:
1408 if title:
1403 pull_request.title = title
1409 pull_request.title = title
1404 pull_request.description = description
1410 pull_request.description = description
1405 pull_request.updated_on = datetime.datetime.now()
1411 pull_request.updated_on = datetime.datetime.now()
1406 pull_request.description_renderer = description_renderer
1412 pull_request.description_renderer = description_renderer
1407 Session().add(pull_request)
1413 Session().add(pull_request)
1408 self._log_audit_action(
1414 self._log_audit_action(
1409 'repo.pull_request.edit', {'old_data': old_data},
1415 'repo.pull_request.edit', {'old_data': old_data},
1410 user, pull_request)
1416 user, pull_request)
1411
1417
1412 def update_reviewers(self, pull_request, reviewer_data, user):
1418 def update_reviewers(self, pull_request, reviewer_data, user):
1413 """
1419 """
1414 Update the reviewers in the pull request
1420 Update the reviewers in the pull request
1415
1421
1416 :param pull_request: the pr to update
1422 :param pull_request: the pr to update
1417 :param reviewer_data: list of tuples
1423 :param reviewer_data: list of tuples
1418 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1424 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1419 :param user: current use who triggers this action
1425 :param user: current use who triggers this action
1420 """
1426 """
1421
1427
1422 pull_request = self.__get_pull_request(pull_request)
1428 pull_request = self.__get_pull_request(pull_request)
1423 if pull_request.is_closed():
1429 if pull_request.is_closed():
1424 raise ValueError('This pull request is closed')
1430 raise ValueError('This pull request is closed')
1425
1431
1426 reviewers = {}
1432 reviewers = {}
1427 for user_id, reasons, mandatory, role, rules in reviewer_data:
1433 for user_id, reasons, mandatory, role, rules in reviewer_data:
1428 if isinstance(user_id, (int, str)):
1434 if isinstance(user_id, (int, str)):
1429 user_id = self._get_user(user_id).user_id
1435 user_id = self._get_user(user_id).user_id
1430 reviewers[user_id] = {
1436 reviewers[user_id] = {
1431 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1437 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1432
1438
1433 reviewers_ids = set(reviewers.keys())
1439 reviewers_ids = set(reviewers.keys())
1434 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1440 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1435 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1441 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1436
1442
1437 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1443 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1438
1444
1439 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1445 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1440 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1446 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1441
1447
1442 log.debug("Adding %s reviewers", ids_to_add)
1448 log.debug("Adding %s reviewers", ids_to_add)
1443 log.debug("Removing %s reviewers", ids_to_remove)
1449 log.debug("Removing %s reviewers", ids_to_remove)
1444 changed = False
1450 changed = False
1445 added_audit_reviewers = []
1451 added_audit_reviewers = []
1446 removed_audit_reviewers = []
1452 removed_audit_reviewers = []
1447
1453
1448 for uid in ids_to_add:
1454 for uid in ids_to_add:
1449 changed = True
1455 changed = True
1450 _usr = self._get_user(uid)
1456 _usr = self._get_user(uid)
1451 reviewer = PullRequestReviewers()
1457 reviewer = PullRequestReviewers()
1452 reviewer.user = _usr
1458 reviewer.user = _usr
1453 reviewer.pull_request = pull_request
1459 reviewer.pull_request = pull_request
1454 reviewer.reasons = reviewers[uid]['reasons']
1460 reviewer.reasons = reviewers[uid]['reasons']
1455 # NOTE(marcink): mandatory shouldn't be changed now
1461 # NOTE(marcink): mandatory shouldn't be changed now
1456 # reviewer.mandatory = reviewers[uid]['reasons']
1462 # reviewer.mandatory = reviewers[uid]['reasons']
1457 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1463 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1458 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1464 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1459 Session().add(reviewer)
1465 Session().add(reviewer)
1460 added_audit_reviewers.append(reviewer.get_dict())
1466 added_audit_reviewers.append(reviewer.get_dict())
1461
1467
1462 for uid in ids_to_remove:
1468 for uid in ids_to_remove:
1463 changed = True
1469 changed = True
1464 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1470 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1465 # This is an edge case that handles previous state of having the same reviewer twice.
1471 # This is an edge case that handles previous state of having the same reviewer twice.
1466 # this CAN happen due to the lack of DB checks
1472 # this CAN happen due to the lack of DB checks
1467 reviewers = PullRequestReviewers.query()\
1473 reviewers = PullRequestReviewers.query()\
1468 .filter(PullRequestReviewers.user_id == uid,
1474 .filter(PullRequestReviewers.user_id == uid,
1469 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1475 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1470 PullRequestReviewers.pull_request == pull_request)\
1476 PullRequestReviewers.pull_request == pull_request)\
1471 .all()
1477 .all()
1472
1478
1473 for obj in reviewers:
1479 for obj in reviewers:
1474 added_audit_reviewers.append(obj.get_dict())
1480 added_audit_reviewers.append(obj.get_dict())
1475 Session().delete(obj)
1481 Session().delete(obj)
1476
1482
1477 if changed:
1483 if changed:
1478 Session().expire_all()
1484 Session().expire_all()
1479 pull_request.updated_on = datetime.datetime.now()
1485 pull_request.updated_on = datetime.datetime.now()
1480 Session().add(pull_request)
1486 Session().add(pull_request)
1481
1487
1482 # finally store audit logs
1488 # finally store audit logs
1483 for user_data in added_audit_reviewers:
1489 for user_data in added_audit_reviewers:
1484 self._log_audit_action(
1490 self._log_audit_action(
1485 'repo.pull_request.reviewer.add', {'data': user_data},
1491 'repo.pull_request.reviewer.add', {'data': user_data},
1486 user, pull_request)
1492 user, pull_request)
1487 for user_data in removed_audit_reviewers:
1493 for user_data in removed_audit_reviewers:
1488 self._log_audit_action(
1494 self._log_audit_action(
1489 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1495 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1490 user, pull_request)
1496 user, pull_request)
1491
1497
1492 self.notify_reviewers(pull_request, ids_to_add, user)
1498 self.notify_reviewers(pull_request, ids_to_add, user)
1493 return ids_to_add, ids_to_remove
1499 return ids_to_add, ids_to_remove
1494
1500
1495 def update_observers(self, pull_request, observer_data, user):
1501 def update_observers(self, pull_request, observer_data, user):
1496 """
1502 """
1497 Update the observers in the pull request
1503 Update the observers in the pull request
1498
1504
1499 :param pull_request: the pr to update
1505 :param pull_request: the pr to update
1500 :param observer_data: list of tuples
1506 :param observer_data: list of tuples
1501 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1507 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1502 :param user: current use who triggers this action
1508 :param user: current use who triggers this action
1503 """
1509 """
1504 pull_request = self.__get_pull_request(pull_request)
1510 pull_request = self.__get_pull_request(pull_request)
1505 if pull_request.is_closed():
1511 if pull_request.is_closed():
1506 raise ValueError('This pull request is closed')
1512 raise ValueError('This pull request is closed')
1507
1513
1508 observers = {}
1514 observers = {}
1509 for user_id, reasons, mandatory, role, rules in observer_data:
1515 for user_id, reasons, mandatory, role, rules in observer_data:
1510 if isinstance(user_id, (int, str)):
1516 if isinstance(user_id, (int, str)):
1511 user_id = self._get_user(user_id).user_id
1517 user_id = self._get_user(user_id).user_id
1512 observers[user_id] = {
1518 observers[user_id] = {
1513 'reasons': reasons, 'observers': mandatory, 'role': role}
1519 'reasons': reasons, 'observers': mandatory, 'role': role}
1514
1520
1515 observers_ids = set(observers.keys())
1521 observers_ids = set(observers.keys())
1516 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1522 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1517 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1523 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1518
1524
1519 current_observers_ids = set([x.user.user_id for x in current_observers])
1525 current_observers_ids = set([x.user.user_id for x in current_observers])
1520
1526
1521 ids_to_add = observers_ids.difference(current_observers_ids)
1527 ids_to_add = observers_ids.difference(current_observers_ids)
1522 ids_to_remove = current_observers_ids.difference(observers_ids)
1528 ids_to_remove = current_observers_ids.difference(observers_ids)
1523
1529
1524 log.debug("Adding %s observer", ids_to_add)
1530 log.debug("Adding %s observer", ids_to_add)
1525 log.debug("Removing %s observer", ids_to_remove)
1531 log.debug("Removing %s observer", ids_to_remove)
1526 changed = False
1532 changed = False
1527 added_audit_observers = []
1533 added_audit_observers = []
1528 removed_audit_observers = []
1534 removed_audit_observers = []
1529
1535
1530 for uid in ids_to_add:
1536 for uid in ids_to_add:
1531 changed = True
1537 changed = True
1532 _usr = self._get_user(uid)
1538 _usr = self._get_user(uid)
1533 observer = PullRequestReviewers()
1539 observer = PullRequestReviewers()
1534 observer.user = _usr
1540 observer.user = _usr
1535 observer.pull_request = pull_request
1541 observer.pull_request = pull_request
1536 observer.reasons = observers[uid]['reasons']
1542 observer.reasons = observers[uid]['reasons']
1537 # NOTE(marcink): mandatory shouldn't be changed now
1543 # NOTE(marcink): mandatory shouldn't be changed now
1538 # observer.mandatory = observer[uid]['reasons']
1544 # observer.mandatory = observer[uid]['reasons']
1539
1545
1540 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1546 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1541 observer.role = PullRequestReviewers.ROLE_OBSERVER
1547 observer.role = PullRequestReviewers.ROLE_OBSERVER
1542 Session().add(observer)
1548 Session().add(observer)
1543 added_audit_observers.append(observer.get_dict())
1549 added_audit_observers.append(observer.get_dict())
1544
1550
1545 for uid in ids_to_remove:
1551 for uid in ids_to_remove:
1546 changed = True
1552 changed = True
1547 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1553 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1548 # This is an edge case that handles previous state of having the same reviewer twice.
1554 # This is an edge case that handles previous state of having the same reviewer twice.
1549 # this CAN happen due to the lack of DB checks
1555 # this CAN happen due to the lack of DB checks
1550 observers = PullRequestReviewers.query()\
1556 observers = PullRequestReviewers.query()\
1551 .filter(PullRequestReviewers.user_id == uid,
1557 .filter(PullRequestReviewers.user_id == uid,
1552 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1558 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1553 PullRequestReviewers.pull_request == pull_request)\
1559 PullRequestReviewers.pull_request == pull_request)\
1554 .all()
1560 .all()
1555
1561
1556 for obj in observers:
1562 for obj in observers:
1557 added_audit_observers.append(obj.get_dict())
1563 added_audit_observers.append(obj.get_dict())
1558 Session().delete(obj)
1564 Session().delete(obj)
1559
1565
1560 if changed:
1566 if changed:
1561 Session().expire_all()
1567 Session().expire_all()
1562 pull_request.updated_on = datetime.datetime.now()
1568 pull_request.updated_on = datetime.datetime.now()
1563 Session().add(pull_request)
1569 Session().add(pull_request)
1564
1570
1565 # finally store audit logs
1571 # finally store audit logs
1566 for user_data in added_audit_observers:
1572 for user_data in added_audit_observers:
1567 self._log_audit_action(
1573 self._log_audit_action(
1568 'repo.pull_request.observer.add', {'data': user_data},
1574 'repo.pull_request.observer.add', {'data': user_data},
1569 user, pull_request)
1575 user, pull_request)
1570 for user_data in removed_audit_observers:
1576 for user_data in removed_audit_observers:
1571 self._log_audit_action(
1577 self._log_audit_action(
1572 'repo.pull_request.observer.delete', {'old_data': user_data},
1578 'repo.pull_request.observer.delete', {'old_data': user_data},
1573 user, pull_request)
1579 user, pull_request)
1574
1580
1575 self.notify_observers(pull_request, ids_to_add, user)
1581 self.notify_observers(pull_request, ids_to_add, user)
1576 return ids_to_add, ids_to_remove
1582 return ids_to_add, ids_to_remove
1577
1583
1578 def get_url(self, pull_request, request=None, permalink=False):
1584 def get_url(self, pull_request, request=None, permalink=False):
1579 if not request:
1585 if not request:
1580 request = get_current_request()
1586 request = get_current_request()
1581
1587
1582 if permalink:
1588 if permalink:
1583 return request.route_url(
1589 return request.route_url(
1584 'pull_requests_global',
1590 'pull_requests_global',
1585 pull_request_id=pull_request.pull_request_id,)
1591 pull_request_id=pull_request.pull_request_id,)
1586 else:
1592 else:
1587 return request.route_url('pullrequest_show',
1593 return request.route_url('pullrequest_show',
1588 repo_name=safe_str(pull_request.target_repo.repo_name),
1594 repo_name=safe_str(pull_request.target_repo.repo_name),
1589 pull_request_id=pull_request.pull_request_id,)
1595 pull_request_id=pull_request.pull_request_id,)
1590
1596
1591 def get_shadow_clone_url(self, pull_request, request=None):
1597 def get_shadow_clone_url(self, pull_request, request=None):
1592 """
1598 """
1593 Returns qualified url pointing to the shadow repository. If this pull
1599 Returns qualified url pointing to the shadow repository. If this pull
1594 request is closed there is no shadow repository and ``None`` will be
1600 request is closed there is no shadow repository and ``None`` will be
1595 returned.
1601 returned.
1596 """
1602 """
1597 if pull_request.is_closed():
1603 if pull_request.is_closed():
1598 return None
1604 return None
1599 else:
1605 else:
1600 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1606 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1601 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1607 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1602
1608
1603 def _notify_reviewers(self, pull_request, user_ids, role, user):
1609 def _notify_reviewers(self, pull_request, user_ids, role, user):
1604 # notification to reviewers/observers
1610 # notification to reviewers/observers
1605 if not user_ids:
1611 if not user_ids:
1606 return
1612 return
1607
1613
1608 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1614 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1609
1615
1610 pull_request_obj = pull_request
1616 pull_request_obj = pull_request
1611 # get the current participants of this pull request
1617 # get the current participants of this pull request
1612 recipients = user_ids
1618 recipients = user_ids
1613 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1619 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1614
1620
1615 pr_source_repo = pull_request_obj.source_repo
1621 pr_source_repo = pull_request_obj.source_repo
1616 pr_target_repo = pull_request_obj.target_repo
1622 pr_target_repo = pull_request_obj.target_repo
1617
1623
1618 pr_url = h.route_url('pullrequest_show',
1624 pr_url = h.route_url('pullrequest_show',
1619 repo_name=pr_target_repo.repo_name,
1625 repo_name=pr_target_repo.repo_name,
1620 pull_request_id=pull_request_obj.pull_request_id,)
1626 pull_request_id=pull_request_obj.pull_request_id,)
1621
1627
1622 # set some variables for email notification
1628 # set some variables for email notification
1623 pr_target_repo_url = h.route_url(
1629 pr_target_repo_url = h.route_url(
1624 'repo_summary', repo_name=pr_target_repo.repo_name)
1630 'repo_summary', repo_name=pr_target_repo.repo_name)
1625
1631
1626 pr_source_repo_url = h.route_url(
1632 pr_source_repo_url = h.route_url(
1627 'repo_summary', repo_name=pr_source_repo.repo_name)
1633 'repo_summary', repo_name=pr_source_repo.repo_name)
1628
1634
1629 # pull request specifics
1635 # pull request specifics
1630 pull_request_commits = [
1636 pull_request_commits = [
1631 (x.raw_id, x.message)
1637 (x.raw_id, x.message)
1632 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1638 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1633
1639
1634 current_rhodecode_user = user
1640 current_rhodecode_user = user
1635 kwargs = {
1641 kwargs = {
1636 'user': current_rhodecode_user,
1642 'user': current_rhodecode_user,
1637 'pull_request_author': pull_request.author,
1643 'pull_request_author': pull_request.author,
1638 'pull_request': pull_request_obj,
1644 'pull_request': pull_request_obj,
1639 'pull_request_commits': pull_request_commits,
1645 'pull_request_commits': pull_request_commits,
1640
1646
1641 'pull_request_target_repo': pr_target_repo,
1647 'pull_request_target_repo': pr_target_repo,
1642 'pull_request_target_repo_url': pr_target_repo_url,
1648 'pull_request_target_repo_url': pr_target_repo_url,
1643
1649
1644 'pull_request_source_repo': pr_source_repo,
1650 'pull_request_source_repo': pr_source_repo,
1645 'pull_request_source_repo_url': pr_source_repo_url,
1651 'pull_request_source_repo_url': pr_source_repo_url,
1646
1652
1647 'pull_request_url': pr_url,
1653 'pull_request_url': pr_url,
1648 'thread_ids': [pr_url],
1654 'thread_ids': [pr_url],
1649 'user_role': role
1655 'user_role': role
1650 }
1656 }
1651
1657
1652 # create notification objects, and emails
1658 # create notification objects, and emails
1653 NotificationModel().create(
1659 NotificationModel().create(
1654 created_by=current_rhodecode_user,
1660 created_by=current_rhodecode_user,
1655 notification_subject='', # Filled in based on the notification_type
1661 notification_subject='', # Filled in based on the notification_type
1656 notification_body='', # Filled in based on the notification_type
1662 notification_body='', # Filled in based on the notification_type
1657 notification_type=notification_type,
1663 notification_type=notification_type,
1658 recipients=recipients,
1664 recipients=recipients,
1659 email_kwargs=kwargs,
1665 email_kwargs=kwargs,
1660 )
1666 )
1661
1667
1662 def notify_reviewers(self, pull_request, reviewers_ids, user):
1668 def notify_reviewers(self, pull_request, reviewers_ids, user):
1663 return self._notify_reviewers(pull_request, reviewers_ids,
1669 return self._notify_reviewers(pull_request, reviewers_ids,
1664 PullRequestReviewers.ROLE_REVIEWER, user)
1670 PullRequestReviewers.ROLE_REVIEWER, user)
1665
1671
1666 def notify_observers(self, pull_request, observers_ids, user):
1672 def notify_observers(self, pull_request, observers_ids, user):
1667 return self._notify_reviewers(pull_request, observers_ids,
1673 return self._notify_reviewers(pull_request, observers_ids,
1668 PullRequestReviewers.ROLE_OBSERVER, user)
1674 PullRequestReviewers.ROLE_OBSERVER, user)
1669
1675
1670 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1676 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1671 commit_changes, file_changes):
1677 commit_changes, file_changes):
1672
1678
1673 updating_user_id = updating_user.user_id
1679 updating_user_id = updating_user.user_id
1674 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1680 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1675 # NOTE(marcink): send notification to all other users except to
1681 # NOTE(marcink): send notification to all other users except to
1676 # person who updated the PR
1682 # person who updated the PR
1677 recipients = reviewers.difference(set([updating_user_id]))
1683 recipients = reviewers.difference(set([updating_user_id]))
1678
1684
1679 log.debug('Notify following recipients about pull-request update %s', recipients)
1685 log.debug('Notify following recipients about pull-request update %s', recipients)
1680
1686
1681 pull_request_obj = pull_request
1687 pull_request_obj = pull_request
1682
1688
1683 # send email about the update
1689 # send email about the update
1684 changed_files = (
1690 changed_files = (
1685 file_changes.added + file_changes.modified + file_changes.removed)
1691 file_changes.added + file_changes.modified + file_changes.removed)
1686
1692
1687 pr_source_repo = pull_request_obj.source_repo
1693 pr_source_repo = pull_request_obj.source_repo
1688 pr_target_repo = pull_request_obj.target_repo
1694 pr_target_repo = pull_request_obj.target_repo
1689
1695
1690 pr_url = h.route_url('pullrequest_show',
1696 pr_url = h.route_url('pullrequest_show',
1691 repo_name=pr_target_repo.repo_name,
1697 repo_name=pr_target_repo.repo_name,
1692 pull_request_id=pull_request_obj.pull_request_id,)
1698 pull_request_id=pull_request_obj.pull_request_id,)
1693
1699
1694 # set some variables for email notification
1700 # set some variables for email notification
1695 pr_target_repo_url = h.route_url(
1701 pr_target_repo_url = h.route_url(
1696 'repo_summary', repo_name=pr_target_repo.repo_name)
1702 'repo_summary', repo_name=pr_target_repo.repo_name)
1697
1703
1698 pr_source_repo_url = h.route_url(
1704 pr_source_repo_url = h.route_url(
1699 'repo_summary', repo_name=pr_source_repo.repo_name)
1705 'repo_summary', repo_name=pr_source_repo.repo_name)
1700
1706
1701 email_kwargs = {
1707 email_kwargs = {
1702 'date': datetime.datetime.now(),
1708 'date': datetime.datetime.now(),
1703 'updating_user': updating_user,
1709 'updating_user': updating_user,
1704
1710
1705 'pull_request': pull_request_obj,
1711 'pull_request': pull_request_obj,
1706
1712
1707 'pull_request_target_repo': pr_target_repo,
1713 'pull_request_target_repo': pr_target_repo,
1708 'pull_request_target_repo_url': pr_target_repo_url,
1714 'pull_request_target_repo_url': pr_target_repo_url,
1709
1715
1710 'pull_request_source_repo': pr_source_repo,
1716 'pull_request_source_repo': pr_source_repo,
1711 'pull_request_source_repo_url': pr_source_repo_url,
1717 'pull_request_source_repo_url': pr_source_repo_url,
1712
1718
1713 'pull_request_url': pr_url,
1719 'pull_request_url': pr_url,
1714
1720
1715 'ancestor_commit_id': ancestor_commit_id,
1721 'ancestor_commit_id': ancestor_commit_id,
1716 'added_commits': commit_changes.added,
1722 'added_commits': commit_changes.added,
1717 'removed_commits': commit_changes.removed,
1723 'removed_commits': commit_changes.removed,
1718 'changed_files': changed_files,
1724 'changed_files': changed_files,
1719 'added_files': file_changes.added,
1725 'added_files': file_changes.added,
1720 'modified_files': file_changes.modified,
1726 'modified_files': file_changes.modified,
1721 'removed_files': file_changes.removed,
1727 'removed_files': file_changes.removed,
1722 'thread_ids': [pr_url],
1728 'thread_ids': [pr_url],
1723 }
1729 }
1724
1730
1725 # create notification objects, and emails
1731 # create notification objects, and emails
1726 NotificationModel().create(
1732 NotificationModel().create(
1727 created_by=updating_user,
1733 created_by=updating_user,
1728 notification_subject='', # Filled in based on the notification_type
1734 notification_subject='', # Filled in based on the notification_type
1729 notification_body='', # Filled in based on the notification_type
1735 notification_body='', # Filled in based on the notification_type
1730 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1736 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1731 recipients=recipients,
1737 recipients=recipients,
1732 email_kwargs=email_kwargs,
1738 email_kwargs=email_kwargs,
1733 )
1739 )
1734
1740
1735 def delete(self, pull_request, user=None):
1741 def delete(self, pull_request, user=None):
1736 if not user:
1742 if not user:
1737 user = getattr(get_current_rhodecode_user(), 'username', None)
1743 user = getattr(get_current_rhodecode_user(), 'username', None)
1738
1744
1739 pull_request = self.__get_pull_request(pull_request)
1745 pull_request = self.__get_pull_request(pull_request)
1740 old_data = pull_request.get_api_data(with_merge_state=False)
1746 old_data = pull_request.get_api_data(with_merge_state=False)
1741 self._cleanup_merge_workspace(pull_request)
1747 self._cleanup_merge_workspace(pull_request)
1742 self._log_audit_action(
1748 self._log_audit_action(
1743 'repo.pull_request.delete', {'old_data': old_data},
1749 'repo.pull_request.delete', {'old_data': old_data},
1744 user, pull_request)
1750 user, pull_request)
1745 Session().delete(pull_request)
1751 Session().delete(pull_request)
1746
1752
1747 def close_pull_request(self, pull_request, user):
1753 def close_pull_request(self, pull_request, user):
1748 pull_request = self.__get_pull_request(pull_request)
1754 pull_request = self.__get_pull_request(pull_request)
1749 self._cleanup_merge_workspace(pull_request)
1755 self._cleanup_merge_workspace(pull_request)
1750 pull_request.status = PullRequest.STATUS_CLOSED
1756 pull_request.status = PullRequest.STATUS_CLOSED
1751 pull_request.updated_on = datetime.datetime.now()
1757 pull_request.updated_on = datetime.datetime.now()
1752 Session().add(pull_request)
1758 Session().add(pull_request)
1753 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1759 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1754
1760
1755 pr_data = pull_request.get_api_data(with_merge_state=False)
1761 pr_data = pull_request.get_api_data(with_merge_state=False)
1756 self._log_audit_action(
1762 self._log_audit_action(
1757 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1763 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1758
1764
1759 def close_pull_request_with_comment(
1765 def close_pull_request_with_comment(
1760 self, pull_request, user, repo, message=None, auth_user=None):
1766 self, pull_request, user, repo, message=None, auth_user=None):
1761
1767
1762 pull_request_review_status = pull_request.calculated_review_status()
1768 pull_request_review_status = pull_request.calculated_review_status()
1763
1769
1764 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1770 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1765 # approved only if we have voting consent
1771 # approved only if we have voting consent
1766 status = ChangesetStatus.STATUS_APPROVED
1772 status = ChangesetStatus.STATUS_APPROVED
1767 else:
1773 else:
1768 status = ChangesetStatus.STATUS_REJECTED
1774 status = ChangesetStatus.STATUS_REJECTED
1769 status_lbl = ChangesetStatus.get_status_lbl(status)
1775 status_lbl = ChangesetStatus.get_status_lbl(status)
1770
1776
1771 default_message = (
1777 default_message = (
1772 'Closing with status change {transition_icon} {status}.'
1778 'Closing with status change {transition_icon} {status}.'
1773 ).format(transition_icon='>', status=status_lbl)
1779 ).format(transition_icon='>', status=status_lbl)
1774 text = message or default_message
1780 text = message or default_message
1775
1781
1776 # create a comment, and link it to new status
1782 # create a comment, and link it to new status
1777 comment = CommentsModel().create(
1783 comment = CommentsModel().create(
1778 text=text,
1784 text=text,
1779 repo=repo.repo_id,
1785 repo=repo.repo_id,
1780 user=user.user_id,
1786 user=user.user_id,
1781 pull_request=pull_request.pull_request_id,
1787 pull_request=pull_request.pull_request_id,
1782 status_change=status_lbl,
1788 status_change=status_lbl,
1783 status_change_type=status,
1789 status_change_type=status,
1784 closing_pr=True,
1790 closing_pr=True,
1785 auth_user=auth_user,
1791 auth_user=auth_user,
1786 )
1792 )
1787
1793
1788 # calculate old status before we change it
1794 # calculate old status before we change it
1789 old_calculated_status = pull_request.calculated_review_status()
1795 old_calculated_status = pull_request.calculated_review_status()
1790 ChangesetStatusModel().set_status(
1796 ChangesetStatusModel().set_status(
1791 repo.repo_id,
1797 repo.repo_id,
1792 status,
1798 status,
1793 user.user_id,
1799 user.user_id,
1794 comment=comment,
1800 comment=comment,
1795 pull_request=pull_request.pull_request_id
1801 pull_request=pull_request.pull_request_id
1796 )
1802 )
1797
1803
1798 Session().flush()
1804 Session().flush()
1799
1805
1800 self.trigger_pull_request_hook(pull_request, user, 'comment',
1806 self.trigger_pull_request_hook(pull_request, user, 'comment',
1801 data={'comment': comment})
1807 data={'comment': comment})
1802
1808
1803 # we now calculate the status of pull request again, and based on that
1809 # we now calculate the status of pull request again, and based on that
1804 # calculation trigger status change. This might happen in cases
1810 # calculation trigger status change. This might happen in cases
1805 # that non-reviewer admin closes a pr, which means his vote doesn't
1811 # that non-reviewer admin closes a pr, which means his vote doesn't
1806 # change the status, while if he's a reviewer this might change it.
1812 # change the status, while if he's a reviewer this might change it.
1807 calculated_status = pull_request.calculated_review_status()
1813 calculated_status = pull_request.calculated_review_status()
1808 if old_calculated_status != calculated_status:
1814 if old_calculated_status != calculated_status:
1809 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1815 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1810 data={'status': calculated_status})
1816 data={'status': calculated_status})
1811
1817
1812 # finally close the PR
1818 # finally close the PR
1813 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1819 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1814
1820
1815 return comment, status
1821 return comment, status
1816
1822
1817 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1823 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1818 _ = translator or get_current_request().translate
1824 _ = translator or get_current_request().translate
1819
1825
1820 if not self._is_merge_enabled(pull_request):
1826 if not self._is_merge_enabled(pull_request):
1821 return None, False, _('Server-side pull request merging is disabled.')
1827 return None, False, _('Server-side pull request merging is disabled.')
1822
1828
1823 if pull_request.is_closed():
1829 if pull_request.is_closed():
1824 return None, False, _('This pull request is closed.')
1830 return None, False, _('This pull request is closed.')
1825
1831
1826 merge_possible, msg = self._check_repo_requirements(
1832 merge_possible, msg = self._check_repo_requirements(
1827 target=pull_request.target_repo, source=pull_request.source_repo,
1833 target=pull_request.target_repo, source=pull_request.source_repo,
1828 translator=_)
1834 translator=_)
1829 if not merge_possible:
1835 if not merge_possible:
1830 return None, merge_possible, msg
1836 return None, merge_possible, msg
1831
1837
1832 try:
1838 try:
1833 merge_response = self._try_merge(
1839 merge_response = self._try_merge(
1834 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1840 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1835 log.debug("Merge response: %s", merge_response)
1841 log.debug("Merge response: %s", merge_response)
1836 return merge_response, merge_response.possible, merge_response.merge_status_message
1842 return merge_response, merge_response.possible, merge_response.merge_status_message
1837 except NotImplementedError:
1843 except NotImplementedError:
1838 return None, False, _('Pull request merging is not supported.')
1844 return None, False, _('Pull request merging is not supported.')
1839
1845
1840 def _check_repo_requirements(self, target, source, translator):
1846 def _check_repo_requirements(self, target, source, translator):
1841 """
1847 """
1842 Check if `target` and `source` have compatible requirements.
1848 Check if `target` and `source` have compatible requirements.
1843
1849
1844 Currently this is just checking for largefiles.
1850 Currently this is just checking for largefiles.
1845 """
1851 """
1846 _ = translator
1852 _ = translator
1847 target_has_largefiles = self._has_largefiles(target)
1853 target_has_largefiles = self._has_largefiles(target)
1848 source_has_largefiles = self._has_largefiles(source)
1854 source_has_largefiles = self._has_largefiles(source)
1849 merge_possible = True
1855 merge_possible = True
1850 message = u''
1856 message = u''
1851
1857
1852 if target_has_largefiles != source_has_largefiles:
1858 if target_has_largefiles != source_has_largefiles:
1853 merge_possible = False
1859 merge_possible = False
1854 if source_has_largefiles:
1860 if source_has_largefiles:
1855 message = _(
1861 message = _(
1856 'Target repository large files support is disabled.')
1862 'Target repository large files support is disabled.')
1857 else:
1863 else:
1858 message = _(
1864 message = _(
1859 'Source repository large files support is disabled.')
1865 'Source repository large files support is disabled.')
1860
1866
1861 return merge_possible, message
1867 return merge_possible, message
1862
1868
1863 def _has_largefiles(self, repo):
1869 def _has_largefiles(self, repo):
1864 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1870 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1865 'extensions', 'largefiles')
1871 'extensions', 'largefiles')
1866 return largefiles_ui and largefiles_ui[0].active
1872 return largefiles_ui and largefiles_ui[0].active
1867
1873
1868 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1874 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1869 """
1875 """
1870 Try to merge the pull request and return the merge status.
1876 Try to merge the pull request and return the merge status.
1871 """
1877 """
1872 log.debug(
1878 log.debug(
1873 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1879 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1874 pull_request.pull_request_id, force_shadow_repo_refresh)
1880 pull_request.pull_request_id, force_shadow_repo_refresh)
1875 target_vcs = pull_request.target_repo.scm_instance()
1881 target_vcs = pull_request.target_repo.scm_instance()
1876 # Refresh the target reference.
1882 # Refresh the target reference.
1877 try:
1883 try:
1878 target_ref = self._refresh_reference(
1884 target_ref = self._refresh_reference(
1879 pull_request.target_ref_parts, target_vcs)
1885 pull_request.target_ref_parts, target_vcs)
1880 except CommitDoesNotExistError:
1886 except CommitDoesNotExistError:
1881 merge_state = MergeResponse(
1887 merge_state = MergeResponse(
1882 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1888 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1883 metadata={'target_ref': pull_request.target_ref_parts})
1889 metadata={'target_ref': pull_request.target_ref_parts})
1884 return merge_state
1890 return merge_state
1885
1891
1886 target_locked = pull_request.target_repo.locked
1892 target_locked = pull_request.target_repo.locked
1887 if target_locked and target_locked[0]:
1893 if target_locked and target_locked[0]:
1888 locked_by = 'user:{}'.format(target_locked[0])
1894 locked_by = 'user:{}'.format(target_locked[0])
1889 log.debug("The target repository is locked by %s.", locked_by)
1895 log.debug("The target repository is locked by %s.", locked_by)
1890 merge_state = MergeResponse(
1896 merge_state = MergeResponse(
1891 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1897 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1892 metadata={'locked_by': locked_by})
1898 metadata={'locked_by': locked_by})
1893 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1899 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1894 pull_request, target_ref):
1900 pull_request, target_ref):
1895 log.debug("Refreshing the merge status of the repository.")
1901 log.debug("Refreshing the merge status of the repository.")
1896 merge_state = self._refresh_merge_state(
1902 merge_state = self._refresh_merge_state(
1897 pull_request, target_vcs, target_ref)
1903 pull_request, target_vcs, target_ref)
1898 else:
1904 else:
1899 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1905 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1900 metadata = {
1906 metadata = {
1901 'unresolved_files': '',
1907 'unresolved_files': '',
1902 'target_ref': pull_request.target_ref_parts,
1908 'target_ref': pull_request.target_ref_parts,
1903 'source_ref': pull_request.source_ref_parts,
1909 'source_ref': pull_request.source_ref_parts,
1904 }
1910 }
1905 if pull_request.last_merge_metadata:
1911 if pull_request.last_merge_metadata:
1906 metadata.update(pull_request.last_merge_metadata_parsed)
1912 metadata.update(pull_request.last_merge_metadata_parsed)
1907
1913
1908 if not possible and target_ref.type == 'branch':
1914 if not possible and target_ref.type == 'branch':
1909 # NOTE(marcink): case for mercurial multiple heads on branch
1915 # NOTE(marcink): case for mercurial multiple heads on branch
1910 heads = target_vcs._heads(target_ref.name)
1916 heads = target_vcs._heads(target_ref.name)
1911 if len(heads) != 1:
1917 if len(heads) != 1:
1912 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1918 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1913 metadata.update({
1919 metadata.update({
1914 'heads': heads
1920 'heads': heads
1915 })
1921 })
1916
1922
1917 merge_state = MergeResponse(
1923 merge_state = MergeResponse(
1918 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1924 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1919
1925
1920 return merge_state
1926 return merge_state
1921
1927
1922 def _refresh_reference(self, reference, vcs_repository):
1928 def _refresh_reference(self, reference, vcs_repository):
1923 if reference.type in self.UPDATABLE_REF_TYPES:
1929 if reference.type in self.UPDATABLE_REF_TYPES:
1924 name_or_id = reference.name
1930 name_or_id = reference.name
1925 else:
1931 else:
1926 name_or_id = reference.commit_id
1932 name_or_id = reference.commit_id
1927
1933
1928 refreshed_commit = vcs_repository.get_commit(name_or_id)
1934 refreshed_commit = vcs_repository.get_commit(name_or_id)
1929 refreshed_reference = Reference(
1935 refreshed_reference = Reference(
1930 reference.type, reference.name, refreshed_commit.raw_id)
1936 reference.type, reference.name, refreshed_commit.raw_id)
1931 return refreshed_reference
1937 return refreshed_reference
1932
1938
1933 def _needs_merge_state_refresh(self, pull_request, target_reference):
1939 def _needs_merge_state_refresh(self, pull_request, target_reference):
1934 return not(
1940 return not(
1935 pull_request.revisions and
1941 pull_request.revisions and
1936 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1942 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1937 target_reference.commit_id == pull_request._last_merge_target_rev)
1943 target_reference.commit_id == pull_request._last_merge_target_rev)
1938
1944
1939 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1945 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1940 workspace_id = self._workspace_id(pull_request)
1946 workspace_id = self._workspace_id(pull_request)
1941 source_vcs = pull_request.source_repo.scm_instance()
1947 source_vcs = pull_request.source_repo.scm_instance()
1942 repo_id = pull_request.target_repo.repo_id
1948 repo_id = pull_request.target_repo.repo_id
1943 use_rebase = self._use_rebase_for_merging(pull_request)
1949 use_rebase = self._use_rebase_for_merging(pull_request)
1944 close_branch = self._close_branch_before_merging(pull_request)
1950 close_branch = self._close_branch_before_merging(pull_request)
1945 merge_state = target_vcs.merge(
1951 merge_state = target_vcs.merge(
1946 repo_id, workspace_id,
1952 repo_id, workspace_id,
1947 target_reference, source_vcs, pull_request.source_ref_parts,
1953 target_reference, source_vcs, pull_request.source_ref_parts,
1948 dry_run=True, use_rebase=use_rebase,
1954 dry_run=True, use_rebase=use_rebase,
1949 close_branch=close_branch)
1955 close_branch=close_branch)
1950
1956
1951 # Do not store the response if there was an unknown error.
1957 # Do not store the response if there was an unknown error.
1952 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1958 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1953 pull_request._last_merge_source_rev = \
1959 pull_request._last_merge_source_rev = \
1954 pull_request.source_ref_parts.commit_id
1960 pull_request.source_ref_parts.commit_id
1955 pull_request._last_merge_target_rev = target_reference.commit_id
1961 pull_request._last_merge_target_rev = target_reference.commit_id
1956 pull_request.last_merge_status = merge_state.failure_reason
1962 pull_request.last_merge_status = merge_state.failure_reason
1957 pull_request.last_merge_metadata = merge_state.metadata
1963 pull_request.last_merge_metadata = merge_state.metadata
1958
1964
1959 pull_request.shadow_merge_ref = merge_state.merge_ref
1965 pull_request.shadow_merge_ref = merge_state.merge_ref
1960 Session().add(pull_request)
1966 Session().add(pull_request)
1961 Session().commit()
1967 Session().commit()
1962
1968
1963 return merge_state
1969 return merge_state
1964
1970
1965 def _workspace_id(self, pull_request):
1971 def _workspace_id(self, pull_request):
1966 workspace_id = 'pr-%s' % pull_request.pull_request_id
1972 workspace_id = 'pr-%s' % pull_request.pull_request_id
1967 return workspace_id
1973 return workspace_id
1968
1974
1969 def generate_repo_data(self, repo, commit_id=None, branch=None,
1975 def generate_repo_data(self, repo, commit_id=None, branch=None,
1970 bookmark=None, translator=None):
1976 bookmark=None, translator=None):
1971 from rhodecode.model.repo import RepoModel
1977 from rhodecode.model.repo import RepoModel
1972
1978
1973 all_refs, selected_ref = \
1979 all_refs, selected_ref = \
1974 self._get_repo_pullrequest_sources(
1980 self._get_repo_pullrequest_sources(
1975 repo.scm_instance(), commit_id=commit_id,
1981 repo.scm_instance(), commit_id=commit_id,
1976 branch=branch, bookmark=bookmark, translator=translator)
1982 branch=branch, bookmark=bookmark, translator=translator)
1977
1983
1978 refs_select2 = []
1984 refs_select2 = []
1979 for element in all_refs:
1985 for element in all_refs:
1980 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1986 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1981 refs_select2.append({'text': element[1], 'children': children})
1987 refs_select2.append({'text': element[1], 'children': children})
1982
1988
1983 return {
1989 return {
1984 'user': {
1990 'user': {
1985 'user_id': repo.user.user_id,
1991 'user_id': repo.user.user_id,
1986 'username': repo.user.username,
1992 'username': repo.user.username,
1987 'firstname': repo.user.first_name,
1993 'firstname': repo.user.first_name,
1988 'lastname': repo.user.last_name,
1994 'lastname': repo.user.last_name,
1989 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1995 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1990 },
1996 },
1991 'name': repo.repo_name,
1997 'name': repo.repo_name,
1992 'link': RepoModel().get_url(repo),
1998 'link': RepoModel().get_url(repo),
1993 'description': h.chop_at_smart(repo.description_safe, '\n'),
1999 'description': h.chop_at_smart(repo.description_safe, '\n'),
1994 'refs': {
2000 'refs': {
1995 'all_refs': all_refs,
2001 'all_refs': all_refs,
1996 'selected_ref': selected_ref,
2002 'selected_ref': selected_ref,
1997 'select2_refs': refs_select2
2003 'select2_refs': refs_select2
1998 }
2004 }
1999 }
2005 }
2000
2006
2001 def generate_pullrequest_title(self, source, source_ref, target):
2007 def generate_pullrequest_title(self, source, source_ref, target):
2002 return u'{source}#{at_ref} to {target}'.format(
2008 return u'{source}#{at_ref} to {target}'.format(
2003 source=source,
2009 source=source,
2004 at_ref=source_ref,
2010 at_ref=source_ref,
2005 target=target,
2011 target=target,
2006 )
2012 )
2007
2013
2008 def _cleanup_merge_workspace(self, pull_request):
2014 def _cleanup_merge_workspace(self, pull_request):
2009 # Merging related cleanup
2015 # Merging related cleanup
2010 repo_id = pull_request.target_repo.repo_id
2016 repo_id = pull_request.target_repo.repo_id
2011 target_scm = pull_request.target_repo.scm_instance()
2017 target_scm = pull_request.target_repo.scm_instance()
2012 workspace_id = self._workspace_id(pull_request)
2018 workspace_id = self._workspace_id(pull_request)
2013
2019
2014 try:
2020 try:
2015 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2021 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2016 except NotImplementedError:
2022 except NotImplementedError:
2017 pass
2023 pass
2018
2024
2019 def _get_repo_pullrequest_sources(
2025 def _get_repo_pullrequest_sources(
2020 self, repo, commit_id=None, branch=None, bookmark=None,
2026 self, repo, commit_id=None, branch=None, bookmark=None,
2021 translator=None):
2027 translator=None):
2022 """
2028 """
2023 Return a structure with repo's interesting commits, suitable for
2029 Return a structure with repo's interesting commits, suitable for
2024 the selectors in pullrequest controller
2030 the selectors in pullrequest controller
2025
2031
2026 :param commit_id: a commit that must be in the list somehow
2032 :param commit_id: a commit that must be in the list somehow
2027 and selected by default
2033 and selected by default
2028 :param branch: a branch that must be in the list and selected
2034 :param branch: a branch that must be in the list and selected
2029 by default - even if closed
2035 by default - even if closed
2030 :param bookmark: a bookmark that must be in the list and selected
2036 :param bookmark: a bookmark that must be in the list and selected
2031 """
2037 """
2032 _ = translator or get_current_request().translate
2038 _ = translator or get_current_request().translate
2033
2039
2034 commit_id = safe_str(commit_id) if commit_id else None
2040 commit_id = safe_str(commit_id) if commit_id else None
2035 branch = safe_unicode(branch) if branch else None
2041 branch = safe_str(branch) if branch else None
2036 bookmark = safe_unicode(bookmark) if bookmark else None
2042 bookmark = safe_str(bookmark) if bookmark else None
2037
2043
2038 selected = None
2044 selected = None
2039
2045
2040 # order matters: first source that has commit_id in it will be selected
2046 # order matters: first source that has commit_id in it will be selected
2041 sources = []
2047 sources = []
2042 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2048 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2043 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2049 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2044
2050
2045 if commit_id:
2051 if commit_id:
2046 ref_commit = (h.short_id(commit_id), commit_id)
2052 ref_commit = (h.short_id(commit_id), commit_id)
2047 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2053 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2048
2054
2049 sources.append(
2055 sources.append(
2050 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2056 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2051 )
2057 )
2052
2058
2053 groups = []
2059 groups = []
2054
2060
2055 for group_key, ref_list, group_name, match in sources:
2061 for group_key, ref_list, group_name, match in sources:
2056 group_refs = []
2062 group_refs = []
2057 for ref_name, ref_id in ref_list:
2063 for ref_name, ref_id in ref_list:
2058 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2064 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2059 group_refs.append((ref_key, ref_name))
2065 group_refs.append((ref_key, ref_name))
2060
2066
2061 if not selected:
2067 if not selected:
2062 if set([commit_id, match]) & set([ref_id, ref_name]):
2068 if set([commit_id, match]) & set([ref_id, ref_name]):
2063 selected = ref_key
2069 selected = ref_key
2064
2070
2065 if group_refs:
2071 if group_refs:
2066 groups.append((group_refs, group_name))
2072 groups.append((group_refs, group_name))
2067
2073
2068 if not selected:
2074 if not selected:
2069 ref = commit_id or branch or bookmark
2075 ref = commit_id or branch or bookmark
2070 if ref:
2076 if ref:
2071 raise CommitDoesNotExistError(
2077 raise CommitDoesNotExistError(
2072 u'No commit refs could be found matching: {}'.format(ref))
2078 u'No commit refs could be found matching: {}'.format(ref))
2073 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2079 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2074 selected = u'branch:{}:{}'.format(
2080 selected = u'branch:{}:{}'.format(
2075 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2081 safe_str(repo.DEFAULT_BRANCH_NAME),
2076 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2082 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2077 )
2083 )
2078 elif repo.commit_ids:
2084 elif repo.commit_ids:
2079 # make the user select in this case
2085 # make the user select in this case
2080 selected = None
2086 selected = None
2081 else:
2087 else:
2082 raise EmptyRepositoryError()
2088 raise EmptyRepositoryError()
2083 return groups, selected
2089 return groups, selected
2084
2090
2085 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2091 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2086 hide_whitespace_changes, diff_context):
2092 hide_whitespace_changes, diff_context):
2087
2093
2088 return self._get_diff_from_pr_or_version(
2094 return self._get_diff_from_pr_or_version(
2089 source_repo, source_ref_id, target_ref_id,
2095 source_repo, source_ref_id, target_ref_id,
2090 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2096 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2091
2097
2092 def _get_diff_from_pr_or_version(
2098 def _get_diff_from_pr_or_version(
2093 self, source_repo, source_ref_id, target_ref_id,
2099 self, source_repo, source_ref_id, target_ref_id,
2094 hide_whitespace_changes, diff_context):
2100 hide_whitespace_changes, diff_context):
2095
2101
2096 target_commit = source_repo.get_commit(
2102 target_commit = source_repo.get_commit(
2097 commit_id=safe_str(target_ref_id))
2103 commit_id=safe_str(target_ref_id))
2098 source_commit = source_repo.get_commit(
2104 source_commit = source_repo.get_commit(
2099 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2105 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2100 if isinstance(source_repo, Repository):
2106 if isinstance(source_repo, Repository):
2101 vcs_repo = source_repo.scm_instance()
2107 vcs_repo = source_repo.scm_instance()
2102 else:
2108 else:
2103 vcs_repo = source_repo
2109 vcs_repo = source_repo
2104
2110
2105 # TODO: johbo: In the context of an update, we cannot reach
2111 # TODO: johbo: In the context of an update, we cannot reach
2106 # the old commit anymore with our normal mechanisms. It needs
2112 # the old commit anymore with our normal mechanisms. It needs
2107 # some sort of special support in the vcs layer to avoid this
2113 # some sort of special support in the vcs layer to avoid this
2108 # workaround.
2114 # workaround.
2109 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2115 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2110 vcs_repo.alias == 'git'):
2116 vcs_repo.alias == 'git'):
2111 source_commit.raw_id = safe_str(source_ref_id)
2117 source_commit.raw_id = safe_str(source_ref_id)
2112
2118
2113 log.debug('calculating diff between '
2119 log.debug('calculating diff between '
2114 'source_ref:%s and target_ref:%s for repo `%s`',
2120 'source_ref:%s and target_ref:%s for repo `%s`',
2115 target_ref_id, source_ref_id,
2121 target_ref_id, source_ref_id,
2116 safe_unicode(vcs_repo.path))
2122 safe_str(vcs_repo.path))
2117
2123
2118 vcs_diff = vcs_repo.get_diff(
2124 vcs_diff = vcs_repo.get_diff(
2119 commit1=target_commit, commit2=source_commit,
2125 commit1=target_commit, commit2=source_commit,
2120 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2126 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2121 return vcs_diff
2127 return vcs_diff
2122
2128
2123 def _is_merge_enabled(self, pull_request):
2129 def _is_merge_enabled(self, pull_request):
2124 return self._get_general_setting(
2130 return self._get_general_setting(
2125 pull_request, 'rhodecode_pr_merge_enabled')
2131 pull_request, 'rhodecode_pr_merge_enabled')
2126
2132
2127 def _use_rebase_for_merging(self, pull_request):
2133 def _use_rebase_for_merging(self, pull_request):
2128 repo_type = pull_request.target_repo.repo_type
2134 repo_type = pull_request.target_repo.repo_type
2129 if repo_type == 'hg':
2135 if repo_type == 'hg':
2130 return self._get_general_setting(
2136 return self._get_general_setting(
2131 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2137 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2132 elif repo_type == 'git':
2138 elif repo_type == 'git':
2133 return self._get_general_setting(
2139 return self._get_general_setting(
2134 pull_request, 'rhodecode_git_use_rebase_for_merging')
2140 pull_request, 'rhodecode_git_use_rebase_for_merging')
2135
2141
2136 return False
2142 return False
2137
2143
2138 def _user_name_for_merging(self, pull_request, user):
2144 def _user_name_for_merging(self, pull_request, user):
2139 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2145 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2140 if env_user_name_attr and hasattr(user, env_user_name_attr):
2146 if env_user_name_attr and hasattr(user, env_user_name_attr):
2141 user_name_attr = env_user_name_attr
2147 user_name_attr = env_user_name_attr
2142 else:
2148 else:
2143 user_name_attr = 'short_contact'
2149 user_name_attr = 'short_contact'
2144
2150
2145 user_name = getattr(user, user_name_attr)
2151 user_name = getattr(user, user_name_attr)
2146 return user_name
2152 return user_name
2147
2153
2148 def _close_branch_before_merging(self, pull_request):
2154 def _close_branch_before_merging(self, pull_request):
2149 repo_type = pull_request.target_repo.repo_type
2155 repo_type = pull_request.target_repo.repo_type
2150 if repo_type == 'hg':
2156 if repo_type == 'hg':
2151 return self._get_general_setting(
2157 return self._get_general_setting(
2152 pull_request, 'rhodecode_hg_close_branch_before_merging')
2158 pull_request, 'rhodecode_hg_close_branch_before_merging')
2153 elif repo_type == 'git':
2159 elif repo_type == 'git':
2154 return self._get_general_setting(
2160 return self._get_general_setting(
2155 pull_request, 'rhodecode_git_close_branch_before_merging')
2161 pull_request, 'rhodecode_git_close_branch_before_merging')
2156
2162
2157 return False
2163 return False
2158
2164
2159 def _get_general_setting(self, pull_request, settings_key, default=False):
2165 def _get_general_setting(self, pull_request, settings_key, default=False):
2160 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2166 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2161 settings = settings_model.get_general_settings()
2167 settings = settings_model.get_general_settings()
2162 return settings.get(settings_key, default)
2168 return settings.get(settings_key, default)
2163
2169
2164 def _log_audit_action(self, action, action_data, user, pull_request):
2170 def _log_audit_action(self, action, action_data, user, pull_request):
2165 audit_logger.store(
2171 audit_logger.store(
2166 action=action,
2172 action=action,
2167 action_data=action_data,
2173 action_data=action_data,
2168 user=user,
2174 user=user,
2169 repo=pull_request.target_repo)
2175 repo=pull_request.target_repo)
2170
2176
2171 def get_reviewer_functions(self):
2177 def get_reviewer_functions(self):
2172 """
2178 """
2173 Fetches functions for validation and fetching default reviewers.
2179 Fetches functions for validation and fetching default reviewers.
2174 If available we use the EE package, else we fallback to CE
2180 If available we use the EE package, else we fallback to CE
2175 package functions
2181 package functions
2176 """
2182 """
2177 try:
2183 try:
2178 from rc_reviewers.utils import get_default_reviewers_data
2184 from rc_reviewers.utils import get_default_reviewers_data
2179 from rc_reviewers.utils import validate_default_reviewers
2185 from rc_reviewers.utils import validate_default_reviewers
2180 from rc_reviewers.utils import validate_observers
2186 from rc_reviewers.utils import validate_observers
2181 except ImportError:
2187 except ImportError:
2182 from rhodecode.apps.repository.utils import get_default_reviewers_data
2188 from rhodecode.apps.repository.utils import get_default_reviewers_data
2183 from rhodecode.apps.repository.utils import validate_default_reviewers
2189 from rhodecode.apps.repository.utils import validate_default_reviewers
2184 from rhodecode.apps.repository.utils import validate_observers
2190 from rhodecode.apps.repository.utils import validate_observers
2185
2191
2186 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2192 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2187
2193
2188
2194
2189 class MergeCheck(object):
2195 class MergeCheck(object):
2190 """
2196 """
2191 Perform Merge Checks and returns a check object which stores information
2197 Perform Merge Checks and returns a check object which stores information
2192 about merge errors, and merge conditions
2198 about merge errors, and merge conditions
2193 """
2199 """
2194 TODO_CHECK = 'todo'
2200 TODO_CHECK = 'todo'
2195 PERM_CHECK = 'perm'
2201 PERM_CHECK = 'perm'
2196 REVIEW_CHECK = 'review'
2202 REVIEW_CHECK = 'review'
2197 MERGE_CHECK = 'merge'
2203 MERGE_CHECK = 'merge'
2198 WIP_CHECK = 'wip'
2204 WIP_CHECK = 'wip'
2199
2205
2200 def __init__(self):
2206 def __init__(self):
2201 self.review_status = None
2207 self.review_status = None
2202 self.merge_possible = None
2208 self.merge_possible = None
2203 self.merge_msg = ''
2209 self.merge_msg = ''
2204 self.merge_response = None
2210 self.merge_response = None
2205 self.failed = None
2211 self.failed = None
2206 self.errors = []
2212 self.errors = []
2207 self.error_details = OrderedDict()
2213 self.error_details = OrderedDict()
2208 self.source_commit = AttributeDict()
2214 self.source_commit = AttributeDict()
2209 self.target_commit = AttributeDict()
2215 self.target_commit = AttributeDict()
2210 self.reviewers_count = 0
2216 self.reviewers_count = 0
2211 self.observers_count = 0
2217 self.observers_count = 0
2212
2218
2213 def __repr__(self):
2219 def __repr__(self):
2214 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2220 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2215 self.merge_possible, self.failed, self.errors)
2221 self.merge_possible, self.failed, self.errors)
2216
2222
2217 def push_error(self, error_type, message, error_key, details):
2223 def push_error(self, error_type, message, error_key, details):
2218 self.failed = True
2224 self.failed = True
2219 self.errors.append([error_type, message])
2225 self.errors.append([error_type, message])
2220 self.error_details[error_key] = dict(
2226 self.error_details[error_key] = dict(
2221 details=details,
2227 details=details,
2222 error_type=error_type,
2228 error_type=error_type,
2223 message=message
2229 message=message
2224 )
2230 )
2225
2231
2226 @classmethod
2232 @classmethod
2227 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2233 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2228 force_shadow_repo_refresh=False):
2234 force_shadow_repo_refresh=False):
2229 _ = translator
2235 _ = translator
2230 merge_check = cls()
2236 merge_check = cls()
2231
2237
2232 # title has WIP:
2238 # title has WIP:
2233 if pull_request.work_in_progress:
2239 if pull_request.work_in_progress:
2234 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2240 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2235
2241
2236 msg = _('WIP marker in title prevents from accidental merge.')
2242 msg = _('WIP marker in title prevents from accidental merge.')
2237 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2243 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2238 if fail_early:
2244 if fail_early:
2239 return merge_check
2245 return merge_check
2240
2246
2241 # permissions to merge
2247 # permissions to merge
2242 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2248 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2243 if not user_allowed_to_merge:
2249 if not user_allowed_to_merge:
2244 log.debug("MergeCheck: cannot merge, approval is pending.")
2250 log.debug("MergeCheck: cannot merge, approval is pending.")
2245
2251
2246 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2252 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2247 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2253 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2248 if fail_early:
2254 if fail_early:
2249 return merge_check
2255 return merge_check
2250
2256
2251 # permission to merge into the target branch
2257 # permission to merge into the target branch
2252 target_commit_id = pull_request.target_ref_parts.commit_id
2258 target_commit_id = pull_request.target_ref_parts.commit_id
2253 if pull_request.target_ref_parts.type == 'branch':
2259 if pull_request.target_ref_parts.type == 'branch':
2254 branch_name = pull_request.target_ref_parts.name
2260 branch_name = pull_request.target_ref_parts.name
2255 else:
2261 else:
2256 # for mercurial we can always figure out the branch from the commit
2262 # for mercurial we can always figure out the branch from the commit
2257 # in case of bookmark
2263 # in case of bookmark
2258 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2264 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2259 branch_name = target_commit.branch
2265 branch_name = target_commit.branch
2260
2266
2261 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2267 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2262 pull_request.target_repo.repo_name, branch_name)
2268 pull_request.target_repo.repo_name, branch_name)
2263 if branch_perm and branch_perm == 'branch.none':
2269 if branch_perm and branch_perm == 'branch.none':
2264 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2270 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2265 branch_name, rule)
2271 branch_name, rule)
2266 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2272 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2267 if fail_early:
2273 if fail_early:
2268 return merge_check
2274 return merge_check
2269
2275
2270 # review status, must be always present
2276 # review status, must be always present
2271 review_status = pull_request.calculated_review_status()
2277 review_status = pull_request.calculated_review_status()
2272 merge_check.review_status = review_status
2278 merge_check.review_status = review_status
2273 merge_check.reviewers_count = pull_request.reviewers_count
2279 merge_check.reviewers_count = pull_request.reviewers_count
2274 merge_check.observers_count = pull_request.observers_count
2280 merge_check.observers_count = pull_request.observers_count
2275
2281
2276 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2282 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2277 if not status_approved and merge_check.reviewers_count:
2283 if not status_approved and merge_check.reviewers_count:
2278 log.debug("MergeCheck: cannot merge, approval is pending.")
2284 log.debug("MergeCheck: cannot merge, approval is pending.")
2279 msg = _('Pull request reviewer approval is pending.')
2285 msg = _('Pull request reviewer approval is pending.')
2280
2286
2281 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2287 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2282
2288
2283 if fail_early:
2289 if fail_early:
2284 return merge_check
2290 return merge_check
2285
2291
2286 # left over TODOs
2292 # left over TODOs
2287 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2293 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2288 if todos:
2294 if todos:
2289 log.debug("MergeCheck: cannot merge, {} "
2295 log.debug("MergeCheck: cannot merge, {} "
2290 "unresolved TODOs left.".format(len(todos)))
2296 "unresolved TODOs left.".format(len(todos)))
2291
2297
2292 if len(todos) == 1:
2298 if len(todos) == 1:
2293 msg = _('Cannot merge, {} TODO still not resolved.').format(
2299 msg = _('Cannot merge, {} TODO still not resolved.').format(
2294 len(todos))
2300 len(todos))
2295 else:
2301 else:
2296 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2302 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2297 len(todos))
2303 len(todos))
2298
2304
2299 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2305 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2300
2306
2301 if fail_early:
2307 if fail_early:
2302 return merge_check
2308 return merge_check
2303
2309
2304 # merge possible, here is the filesystem simulation + shadow repo
2310 # merge possible, here is the filesystem simulation + shadow repo
2305 merge_response, merge_status, msg = PullRequestModel().merge_status(
2311 merge_response, merge_status, msg = PullRequestModel().merge_status(
2306 pull_request, translator=translator,
2312 pull_request, translator=translator,
2307 force_shadow_repo_refresh=force_shadow_repo_refresh)
2313 force_shadow_repo_refresh=force_shadow_repo_refresh)
2308
2314
2309 merge_check.merge_possible = merge_status
2315 merge_check.merge_possible = merge_status
2310 merge_check.merge_msg = msg
2316 merge_check.merge_msg = msg
2311 merge_check.merge_response = merge_response
2317 merge_check.merge_response = merge_response
2312
2318
2313 source_ref_id = pull_request.source_ref_parts.commit_id
2319 source_ref_id = pull_request.source_ref_parts.commit_id
2314 target_ref_id = pull_request.target_ref_parts.commit_id
2320 target_ref_id = pull_request.target_ref_parts.commit_id
2315
2321
2316 try:
2322 try:
2317 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2323 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2318 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2324 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2319 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2325 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2320 merge_check.source_commit.current_raw_id = source_commit.raw_id
2326 merge_check.source_commit.current_raw_id = source_commit.raw_id
2321 merge_check.source_commit.previous_raw_id = source_ref_id
2327 merge_check.source_commit.previous_raw_id = source_ref_id
2322
2328
2323 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2329 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2324 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2330 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2325 merge_check.target_commit.current_raw_id = target_commit.raw_id
2331 merge_check.target_commit.current_raw_id = target_commit.raw_id
2326 merge_check.target_commit.previous_raw_id = target_ref_id
2332 merge_check.target_commit.previous_raw_id = target_ref_id
2327 except (SourceRefMissing, TargetRefMissing):
2333 except (SourceRefMissing, TargetRefMissing):
2328 pass
2334 pass
2329
2335
2330 if not merge_status:
2336 if not merge_status:
2331 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2337 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2332 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2338 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2333
2339
2334 if fail_early:
2340 if fail_early:
2335 return merge_check
2341 return merge_check
2336
2342
2337 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2343 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2338 return merge_check
2344 return merge_check
2339
2345
2340 @classmethod
2346 @classmethod
2341 def get_merge_conditions(cls, pull_request, translator):
2347 def get_merge_conditions(cls, pull_request, translator):
2342 _ = translator
2348 _ = translator
2343 merge_details = {}
2349 merge_details = {}
2344
2350
2345 model = PullRequestModel()
2351 model = PullRequestModel()
2346 use_rebase = model._use_rebase_for_merging(pull_request)
2352 use_rebase = model._use_rebase_for_merging(pull_request)
2347
2353
2348 if use_rebase:
2354 if use_rebase:
2349 merge_details['merge_strategy'] = dict(
2355 merge_details['merge_strategy'] = dict(
2350 details={},
2356 details={},
2351 message=_('Merge strategy: rebase')
2357 message=_('Merge strategy: rebase')
2352 )
2358 )
2353 else:
2359 else:
2354 merge_details['merge_strategy'] = dict(
2360 merge_details['merge_strategy'] = dict(
2355 details={},
2361 details={},
2356 message=_('Merge strategy: explicit merge commit')
2362 message=_('Merge strategy: explicit merge commit')
2357 )
2363 )
2358
2364
2359 close_branch = model._close_branch_before_merging(pull_request)
2365 close_branch = model._close_branch_before_merging(pull_request)
2360 if close_branch:
2366 if close_branch:
2361 repo_type = pull_request.target_repo.repo_type
2367 repo_type = pull_request.target_repo.repo_type
2362 close_msg = ''
2368 close_msg = ''
2363 if repo_type == 'hg':
2369 if repo_type == 'hg':
2364 close_msg = _('Source branch will be closed before the merge.')
2370 close_msg = _('Source branch will be closed before the merge.')
2365 elif repo_type == 'git':
2371 elif repo_type == 'git':
2366 close_msg = _('Source branch will be deleted after the merge.')
2372 close_msg = _('Source branch will be deleted after the merge.')
2367
2373
2368 merge_details['close_branch'] = dict(
2374 merge_details['close_branch'] = dict(
2369 details={},
2375 details={},
2370 message=close_msg
2376 message=close_msg
2371 )
2377 )
2372
2378
2373 return merge_details
2379 return merge_details
2374
2380
2375
2381
2376 ChangeTuple = collections.namedtuple(
2382 @dataclasses.dataclass
2377 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2383 class ChangeTuple:
2384 added: list
2385 common: list
2386 removed: list
2387 total: list
2378
2388
2379 FileChangeTuple = collections.namedtuple(
2389
2380 'FileChangeTuple', ['added', 'modified', 'removed'])
2390 @dataclasses.dataclass
2391 class FileChangeTuple:
2392 added: list
2393 modified: list
2394 removed: list
@@ -1,1196 +1,1199 b''
1
1
2 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import os
20 import os
21 import re
21 import re
22 import shutil
22 import shutil
23 import time
23 import time
24 import logging
24 import logging
25 import traceback
25 import traceback
26 import datetime
26 import datetime
27
27
28 from pyramid.threadlocal import get_current_request
28 from pyramid.threadlocal import get_current_request
29 from sqlalchemy.orm import aliased
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30
31
31 from rhodecode import events
32 from rhodecode import events
32 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 from rhodecode.lib import hooks_base
36 from rhodecode.lib import hooks_base
36 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils2 import (
39 from rhodecode.lib.utils2 import (
39 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
40 safe_str, remove_prefix, obfuscate_url_pw,
40 get_current_rhodecode_user, safe_int, action_logger_generic)
41 get_current_rhodecode_user, safe_int, action_logger_generic)
41 from rhodecode.lib.vcs.backends import get_backend
42 from rhodecode.lib.vcs.backends import get_backend
42 from rhodecode.model import BaseModel
43 from rhodecode.model import BaseModel
43 from rhodecode.model.db import (
44 from rhodecode.model.db import (
44 _hash_key, func, case, joinedload, or_, in_filter_generator,
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
45 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.settings import VcsSettingsModel
50 from rhodecode.model.settings import VcsSettingsModel
50
51
51 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
52
53
53
54
54 class RepoModel(BaseModel):
55 class RepoModel(BaseModel):
55
56
56 cls = Repository
57 cls = Repository
57
58
58 def _get_user_group(self, users_group):
59 def _get_user_group(self, users_group):
59 return self._get_instance(UserGroup, users_group,
60 return self._get_instance(UserGroup, users_group,
60 callback=UserGroup.get_by_group_name)
61 callback=UserGroup.get_by_group_name)
61
62
62 def _get_repo_group(self, repo_group):
63 def _get_repo_group(self, repo_group):
63 return self._get_instance(RepoGroup, repo_group,
64 return self._get_instance(RepoGroup, repo_group,
64 callback=RepoGroup.get_by_group_name)
65 callback=RepoGroup.get_by_group_name)
65
66
66 def _create_default_perms(self, repository, private):
67 def _create_default_perms(self, repository, private):
67 # create default permission
68 # create default permission
68 default = 'repository.read'
69 default = 'repository.read'
69 def_user = User.get_default_user()
70 def_user = User.get_default_user()
70 for p in def_user.user_perms:
71 for p in def_user.user_perms:
71 if p.permission.permission_name.startswith('repository.'):
72 if p.permission.permission_name.startswith('repository.'):
72 default = p.permission.permission_name
73 default = p.permission.permission_name
73 break
74 break
74
75
75 default_perm = 'repository.none' if private else default
76 default_perm = 'repository.none' if private else default
76
77
77 repo_to_perm = UserRepoToPerm()
78 repo_to_perm = UserRepoToPerm()
78 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 repo_to_perm.permission = Permission.get_by_key(default_perm)
79
80
80 repo_to_perm.repository = repository
81 repo_to_perm.repository = repository
81 repo_to_perm.user_id = def_user.user_id
82 repo_to_perm.user = def_user
82
83
83 return repo_to_perm
84 return repo_to_perm
84
85
85 @LazyProperty
86 @LazyProperty
86 def repos_path(self):
87 def repos_path(self):
87 """
88 """
88 Gets the repositories root path from database
89 Gets the repositories root path from database
89 """
90 """
90 settings_model = VcsSettingsModel(sa=self.sa)
91 settings_model = VcsSettingsModel(sa=self.sa)
91 return settings_model.get_repos_location()
92 return settings_model.get_repos_location()
92
93
93 def get(self, repo_id):
94 def get(self, repo_id):
94 repo = self.sa.query(Repository) \
95 repo = self.sa.query(Repository) \
95 .filter(Repository.repo_id == repo_id)
96 .filter(Repository.repo_id == repo_id)
96
97
97 return repo.scalar()
98 return repo.scalar()
98
99
99 def get_repo(self, repository):
100 def get_repo(self, repository):
100 return self._get_repo(repository)
101 return self._get_repo(repository)
101
102
102 def get_by_repo_name(self, repo_name, cache=False):
103 def get_by_repo_name(self, repo_name, cache=False):
103 repo = self.sa.query(Repository) \
104 repo = self.sa.query(Repository) \
104 .filter(Repository.repo_name == repo_name)
105 .filter(Repository.repo_name == repo_name)
105
106
106 if cache:
107 if cache:
107 name_key = _hash_key(repo_name)
108 name_key = _hash_key(repo_name)
108 repo = repo.options(
109 repo = repo.options(
109 FromCache("sql_cache_short", f"get_repo_{name_key}"))
110 FromCache("sql_cache_short", f"get_repo_{name_key}"))
110 return repo.scalar()
111 return repo.scalar()
111
112
112 def _extract_id_from_repo_name(self, repo_name):
113 def _extract_id_from_repo_name(self, repo_name):
113 if repo_name.startswith('/'):
114 if repo_name.startswith('/'):
114 repo_name = repo_name.lstrip('/')
115 repo_name = repo_name.lstrip('/')
115 by_id_match = re.match(r'^_(\d{1,})', repo_name)
116 by_id_match = re.match(r'^_(\d+)', repo_name)
116 if by_id_match:
117 if by_id_match:
117 return by_id_match.groups()[0]
118 return by_id_match.groups()[0]
118
119
119 def get_repo_by_id(self, repo_name):
120 def get_repo_by_id(self, repo_name):
120 """
121 """
121 Extracts repo_name by id from special urls.
122 Extracts repo_name by id from special urls.
122 Example url is _11/repo_name
123 Example url is _11/repo_name
123
124
124 :param repo_name:
125 :param repo_name:
125 :return: repo object if matched else None
126 :return: repo object if matched else None
126 """
127 """
127 _repo_id = None
128 _repo_id = None
128 try:
129 try:
129 _repo_id = self._extract_id_from_repo_name(repo_name)
130 _repo_id = self._extract_id_from_repo_name(repo_name)
130 if _repo_id:
131 if _repo_id:
131 return self.get(_repo_id)
132 return self.get(_repo_id)
132 except Exception:
133 except Exception:
133 log.exception('Failed to extract repo_name from URL')
134 log.exception('Failed to extract repo_name from URL')
134 if _repo_id:
135 if _repo_id:
135 Session().rollback()
136 Session().rollback()
136
137
137 return None
138 return None
138
139
139 def get_repos_for_root(self, root, traverse=False):
140 def get_repos_for_root(self, root, traverse=False):
140 if traverse:
141 if traverse:
141 like_expression = u'{}%'.format(safe_unicode(root))
142 like_expression = u'{}%'.format(safe_str(root))
142 repos = Repository.query().filter(
143 repos = Repository.query().filter(
143 Repository.repo_name.like(like_expression)).all()
144 Repository.repo_name.like(like_expression)).all()
144 else:
145 else:
145 if root and not isinstance(root, RepoGroup):
146 if root and not isinstance(root, RepoGroup):
146 raise ValueError(
147 raise ValueError(
147 'Root must be an instance '
148 'Root must be an instance '
148 'of RepoGroup, got:{} instead'.format(type(root)))
149 'of RepoGroup, got:{} instead'.format(type(root)))
149 repos = Repository.query().filter(Repository.group == root).all()
150 repos = Repository.query().filter(Repository.group == root).all()
150 return repos
151 return repos
151
152
152 def get_url(self, repo, request=None, permalink=False):
153 def get_url(self, repo, request=None, permalink=False):
153 if not request:
154 if not request:
154 request = get_current_request()
155 request = get_current_request()
155
156
156 if not request:
157 if not request:
157 return
158 return
158
159
159 if permalink:
160 if permalink:
160 return request.route_url(
161 return request.route_url(
161 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 else:
163 else:
163 return request.route_url(
164 return request.route_url(
164 'repo_summary', repo_name=safe_str(repo.repo_name))
165 'repo_summary', repo_name=safe_str(repo.repo_name))
165
166
166 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 if not request:
168 if not request:
168 request = get_current_request()
169 request = get_current_request()
169
170
170 if not request:
171 if not request:
171 return
172 return
172
173
173 if permalink:
174 if permalink:
174 return request.route_url(
175 return request.route_url(
175 'repo_commit', repo_name=safe_str(repo.repo_id),
176 'repo_commit', repo_name=safe_str(repo.repo_id),
176 commit_id=commit_id)
177 commit_id=commit_id)
177
178
178 else:
179 else:
179 return request.route_url(
180 return request.route_url(
180 'repo_commit', repo_name=safe_str(repo.repo_name),
181 'repo_commit', repo_name=safe_str(repo.repo_name),
181 commit_id=commit_id)
182 commit_id=commit_id)
182
183
183 def get_repo_log(self, repo, filter_term):
184 def get_repo_log(self, repo, filter_term):
184 repo_log = UserLog.query()\
185 repo_log = UserLog.query()\
185 .filter(or_(UserLog.repository_id == repo.repo_id,
186 .filter(or_(UserLog.repository_id == repo.repo_id,
186 UserLog.repository_name == repo.repo_name))\
187 UserLog.repository_name == repo.repo_name))\
187 .options(joinedload(UserLog.user))\
188 .options(joinedload(UserLog.user))\
188 .options(joinedload(UserLog.repository))\
189 .options(joinedload(UserLog.repository))\
189 .order_by(UserLog.action_date.desc())
190 .order_by(UserLog.action_date.desc())
190
191
191 repo_log = user_log_filter(repo_log, filter_term)
192 repo_log = user_log_filter(repo_log, filter_term)
192 return repo_log
193 return repo_log
193
194
194 @classmethod
195 @classmethod
195 def update_commit_cache(cls, repositories=None):
196 def update_commit_cache(cls, repositories=None):
196 if not repositories:
197 if not repositories:
197 repositories = Repository.getAll()
198 repositories = Repository.getAll()
198 for repo in repositories:
199 for repo in repositories:
199 repo.update_commit_cache()
200 repo.update_commit_cache()
200
201
201 def get_repos_as_dict(self, repo_list=None, admin=False,
202 def get_repos_as_dict(self, repo_list=None, admin=False,
202 super_user_actions=False, short_name=None):
203 super_user_actions=False, short_name=None):
203
204
204 _render = get_current_request().get_partial_renderer(
205 _render = get_current_request().get_partial_renderer(
205 'rhodecode:templates/data_table/_dt_elements.mako')
206 'rhodecode:templates/data_table/_dt_elements.mako')
206 c = _render.get_call_context()
207 c = _render.get_call_context()
207 h = _render.get_helpers()
208 h = _render.get_helpers()
208
209
209 def quick_menu(repo_name):
210 def quick_menu(repo_name):
210 return _render('quick_menu', repo_name)
211 return _render('quick_menu', repo_name)
211
212
212 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
213 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
213 if short_name is not None:
214 if short_name is not None:
214 short_name_var = short_name
215 short_name_var = short_name
215 else:
216 else:
216 short_name_var = not admin
217 short_name_var = not admin
217 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
218 short_name=short_name_var, admin=False)
219 short_name=short_name_var, admin=False)
219
220
220 def last_change(last_change):
221 def last_change(last_change):
221 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
222 ts = time.time()
223 ts = time.time()
223 utc_offset = (datetime.datetime.fromtimestamp(ts)
224 utc_offset = (datetime.datetime.fromtimestamp(ts)
224 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
225 last_change = last_change + datetime.timedelta(seconds=utc_offset)
226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
226
227
227 return _render("last_change", last_change)
228 return _render("last_change", last_change)
228
229
229 def rss_lnk(repo_name):
230 def rss_lnk(repo_name):
230 return _render("rss", repo_name)
231 return _render("rss", repo_name)
231
232
232 def atom_lnk(repo_name):
233 def atom_lnk(repo_name):
233 return _render("atom", repo_name)
234 return _render("atom", repo_name)
234
235
235 def last_rev(repo_name, cs_cache):
236 def last_rev(repo_name, cs_cache):
236 return _render('revision', repo_name, cs_cache.get('revision'),
237 return _render('revision', repo_name, cs_cache.get('revision'),
237 cs_cache.get('raw_id'), cs_cache.get('author'),
238 cs_cache.get('raw_id'), cs_cache.get('author'),
238 cs_cache.get('message'), cs_cache.get('date'))
239 cs_cache.get('message'), cs_cache.get('date'))
239
240
240 def desc(desc):
241 def desc(desc):
241 return _render('repo_desc', desc, c.visual.stylify_metatags)
242 return _render('repo_desc', desc, c.visual.stylify_metatags)
242
243
243 def state(repo_state):
244 def state(repo_state):
244 return _render("repo_state", repo_state)
245 return _render("repo_state", repo_state)
245
246
246 def repo_actions(repo_name):
247 def repo_actions(repo_name):
247 return _render('repo_actions', repo_name, super_user_actions)
248 return _render('repo_actions', repo_name, super_user_actions)
248
249
249 def user_profile(username):
250 def user_profile(username):
250 return _render('user_profile', username)
251 return _render('user_profile', username)
251
252
252 repos_data = []
253 repos_data = []
253 for repo in repo_list:
254 for repo in repo_list:
254 # NOTE(marcink): because we use only raw column we need to load it like that
255 # NOTE(marcink): because we use only raw column we need to load it like that
255 changeset_cache = Repository._load_changeset_cache(
256 changeset_cache = Repository._load_changeset_cache(
256 repo.repo_id, repo._changeset_cache)
257 repo.repo_id, repo._changeset_cache)
257
258
258 row = {
259 row = {
259 "menu": quick_menu(repo.repo_name),
260 "menu": quick_menu(repo.repo_name),
260
261
261 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
262 repo.private, repo.archived, repo.fork),
263 repo.private, repo.archived, repo.fork_repo_name),
263
264
264 "desc": desc(h.escape(repo.description)),
265 "desc": desc(h.escape(repo.description)),
265
266
266 "last_change": last_change(repo.updated_on),
267 "last_change": last_change(repo.updated_on),
267
268
268 "last_changeset": last_rev(repo.repo_name, changeset_cache),
269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
269 "last_changeset_raw": changeset_cache.get('revision'),
270 "last_changeset_raw": changeset_cache.get('revision'),
270
271
271 "owner": user_profile(repo.User.username),
272 "owner": user_profile(repo.owner_username),
272
273
273 "state": state(repo.repo_state),
274 "state": state(repo.repo_state),
274 "rss": rss_lnk(repo.repo_name),
275 "rss": rss_lnk(repo.repo_name),
275 "atom": atom_lnk(repo.repo_name),
276 "atom": atom_lnk(repo.repo_name),
276 }
277 }
277 if admin:
278 if admin:
278 row.update({
279 row.update({
279 "action": repo_actions(repo.repo_name),
280 "action": repo_actions(repo.repo_name),
280 })
281 })
281 repos_data.append(row)
282 repos_data.append(row)
282
283
283 return repos_data
284 return repos_data
284
285
285 def get_repos_data_table(
286 def get_repos_data_table(
286 self, draw, start, limit,
287 self, draw, start, limit,
287 search_q, order_by, order_dir,
288 search_q, order_by, order_dir,
288 auth_user, repo_group_id):
289 auth_user, repo_group_id):
289 from rhodecode.model.scm import RepoList
290 from rhodecode.model.scm import RepoList
290
291
291 _perms = ['repository.read', 'repository.write', 'repository.admin']
292 _perms = ['repository.read', 'repository.write', 'repository.admin']
292
293
293 repos = Repository.query() \
294 repos = Repository.query() \
294 .filter(Repository.group_id == repo_group_id) \
295 .filter(Repository.group_id == repo_group_id) \
295 .all()
296 .all()
296 auth_repo_list = RepoList(
297 auth_repo_list = RepoList(
297 repos, perm_set=_perms,
298 repos, perm_set=_perms,
298 extra_kwargs=dict(user=auth_user))
299 extra_kwargs=dict(user=auth_user))
299
300
300 allowed_ids = [-1]
301 allowed_ids = [-1]
301 for repo in auth_repo_list:
302 for repo in auth_repo_list:
302 allowed_ids.append(repo.repo_id)
303 allowed_ids.append(repo.repo_id)
303
304
304 repos_data_total_count = Repository.query() \
305 repos_data_total_count = Repository.query() \
305 .filter(Repository.group_id == repo_group_id) \
306 .filter(Repository.group_id == repo_group_id) \
306 .filter(or_(
307 .filter(or_(
307 # generate multiple IN to fix limitation problems
308 # generate multiple IN to fix limitation problems
308 *in_filter_generator(Repository.repo_id, allowed_ids))
309 *in_filter_generator(Repository.repo_id, allowed_ids))
309 ) \
310 ) \
310 .count()
311 .count()
311
312
313 RepoFork = aliased(Repository)
314 OwnerUser = aliased(User)
312 base_q = Session.query(
315 base_q = Session.query(
313 Repository.repo_id,
316 Repository.repo_id,
314 Repository.repo_name,
317 Repository.repo_name,
315 Repository.description,
318 Repository.description,
316 Repository.repo_type,
319 Repository.repo_type,
317 Repository.repo_state,
320 Repository.repo_state,
318 Repository.private,
321 Repository.private,
319 Repository.archived,
322 Repository.archived,
320 Repository.fork,
321 Repository.updated_on,
323 Repository.updated_on,
322 Repository._changeset_cache,
324 Repository._changeset_cache,
323 User,
325 RepoFork.repo_name.label('fork_repo_name'),
326 OwnerUser.username.label('owner_username'),
324 ) \
327 ) \
325 .filter(Repository.group_id == repo_group_id) \
328 .filter(Repository.group_id == repo_group_id) \
326 .filter(or_(
329 .filter(or_(
327 # generate multiple IN to fix limitation problems
330 # generate multiple IN to fix limitation problems
328 *in_filter_generator(Repository.repo_id, allowed_ids))
331 *in_filter_generator(Repository.repo_id, allowed_ids))
329 ) \
332 ) \
330 .join(User, User.user_id == Repository.user_id) \
333 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
331 .group_by(Repository, User)
334 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
332
335
333 repos_data_total_filtered_count = base_q.count()
336 repos_data_total_filtered_count = base_q.count()
334
337
335 sort_defined = False
338 sort_defined = False
336 if order_by == 'repo_name':
339 if order_by == 'repo_name':
337 sort_col = func.lower(Repository.repo_name)
340 sort_col = func.lower(Repository.repo_name)
338 sort_defined = True
341 sort_defined = True
339 elif order_by == 'user_username':
342 elif order_by == 'user_username':
340 sort_col = User.username
343 sort_col = User.username
341 else:
344 else:
342 sort_col = getattr(Repository, order_by, None)
345 sort_col = getattr(Repository, order_by, None)
343
346
344 if sort_defined or sort_col:
347 if sort_defined or sort_col:
345 if order_dir == 'asc':
348 if order_dir == 'asc':
346 sort_col = sort_col.asc()
349 sort_col = sort_col.asc()
347 else:
350 else:
348 sort_col = sort_col.desc()
351 sort_col = sort_col.desc()
349
352
350 base_q = base_q.order_by(sort_col)
353 base_q = base_q.order_by(sort_col)
351 base_q = base_q.offset(start).limit(limit)
354 base_q = base_q.offset(start).limit(limit)
352
355
353 repos_list = base_q.all()
356 repos_list = base_q.all()
354
357
355 repos_data = RepoModel().get_repos_as_dict(
358 repos_data = RepoModel().get_repos_as_dict(
356 repo_list=repos_list, admin=False)
359 repo_list=repos_list, admin=False)
357
360
358 data = ({
361 data = ({
359 'draw': draw,
362 'draw': draw,
360 'data': repos_data,
363 'data': repos_data,
361 'recordsTotal': repos_data_total_count,
364 'recordsTotal': repos_data_total_count,
362 'recordsFiltered': repos_data_total_filtered_count,
365 'recordsFiltered': repos_data_total_filtered_count,
363 })
366 })
364 return data
367 return data
365
368
366 def _get_defaults(self, repo_name):
369 def _get_defaults(self, repo_name):
367 """
370 """
368 Gets information about repository, and returns a dict for
371 Gets information about repository, and returns a dict for
369 usage in forms
372 usage in forms
370
373
371 :param repo_name:
374 :param repo_name:
372 """
375 """
373
376
374 repo_info = Repository.get_by_repo_name(repo_name)
377 repo_info = Repository.get_by_repo_name(repo_name)
375
378
376 if repo_info is None:
379 if repo_info is None:
377 return None
380 return None
378
381
379 defaults = repo_info.get_dict()
382 defaults = repo_info.get_dict()
380 defaults['repo_name'] = repo_info.just_name
383 defaults['repo_name'] = repo_info.just_name
381
384
382 groups = repo_info.groups_with_parents
385 groups = repo_info.groups_with_parents
383 parent_group = groups[-1] if groups else None
386 parent_group = groups[-1] if groups else None
384
387
385 # we use -1 as this is how in HTML, we mark an empty group
388 # we use -1 as this is how in HTML, we mark an empty group
386 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
389 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
387
390
388 keys_to_process = (
391 keys_to_process = (
389 {'k': 'repo_type', 'strip': False},
392 {'k': 'repo_type', 'strip': False},
390 {'k': 'repo_enable_downloads', 'strip': True},
393 {'k': 'repo_enable_downloads', 'strip': True},
391 {'k': 'repo_description', 'strip': True},
394 {'k': 'repo_description', 'strip': True},
392 {'k': 'repo_enable_locking', 'strip': True},
395 {'k': 'repo_enable_locking', 'strip': True},
393 {'k': 'repo_landing_rev', 'strip': True},
396 {'k': 'repo_landing_rev', 'strip': True},
394 {'k': 'clone_uri', 'strip': False},
397 {'k': 'clone_uri', 'strip': False},
395 {'k': 'push_uri', 'strip': False},
398 {'k': 'push_uri', 'strip': False},
396 {'k': 'repo_private', 'strip': True},
399 {'k': 'repo_private', 'strip': True},
397 {'k': 'repo_enable_statistics', 'strip': True}
400 {'k': 'repo_enable_statistics', 'strip': True}
398 )
401 )
399
402
400 for item in keys_to_process:
403 for item in keys_to_process:
401 attr = item['k']
404 attr = item['k']
402 if item['strip']:
405 if item['strip']:
403 attr = remove_prefix(item['k'], 'repo_')
406 attr = remove_prefix(item['k'], 'repo_')
404
407
405 val = defaults[attr]
408 val = defaults[attr]
406 if item['k'] == 'repo_landing_rev':
409 if item['k'] == 'repo_landing_rev':
407 val = ':'.join(defaults[attr])
410 val = ':'.join(defaults[attr])
408 defaults[item['k']] = val
411 defaults[item['k']] = val
409 if item['k'] == 'clone_uri':
412 if item['k'] == 'clone_uri':
410 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
413 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
411 if item['k'] == 'push_uri':
414 if item['k'] == 'push_uri':
412 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
415 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
413
416
414 # fill owner
417 # fill owner
415 if repo_info.user:
418 if repo_info.user:
416 defaults.update({'user': repo_info.user.username})
419 defaults.update({'user': repo_info.user.username})
417 else:
420 else:
418 replacement_user = User.get_first_super_admin().username
421 replacement_user = User.get_first_super_admin().username
419 defaults.update({'user': replacement_user})
422 defaults.update({'user': replacement_user})
420
423
421 return defaults
424 return defaults
422
425
423 def update(self, repo, **kwargs):
426 def update(self, repo, **kwargs):
424 try:
427 try:
425 cur_repo = self._get_repo(repo)
428 cur_repo = self._get_repo(repo)
426 source_repo_name = cur_repo.repo_name
429 source_repo_name = cur_repo.repo_name
427
430
428 affected_user_ids = []
431 affected_user_ids = []
429 if 'user' in kwargs:
432 if 'user' in kwargs:
430 old_owner_id = cur_repo.user.user_id
433 old_owner_id = cur_repo.user.user_id
431 new_owner = User.get_by_username(kwargs['user'])
434 new_owner = User.get_by_username(kwargs['user'])
432 cur_repo.user = new_owner
435 cur_repo.user = new_owner
433
436
434 if old_owner_id != new_owner.user_id:
437 if old_owner_id != new_owner.user_id:
435 affected_user_ids = [new_owner.user_id, old_owner_id]
438 affected_user_ids = [new_owner.user_id, old_owner_id]
436
439
437 if 'repo_group' in kwargs:
440 if 'repo_group' in kwargs:
438 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
441 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
439 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
442 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
440
443
441 update_keys = [
444 update_keys = [
442 (1, 'repo_description'),
445 (1, 'repo_description'),
443 (1, 'repo_landing_rev'),
446 (1, 'repo_landing_rev'),
444 (1, 'repo_private'),
447 (1, 'repo_private'),
445 (1, 'repo_enable_downloads'),
448 (1, 'repo_enable_downloads'),
446 (1, 'repo_enable_locking'),
449 (1, 'repo_enable_locking'),
447 (1, 'repo_enable_statistics'),
450 (1, 'repo_enable_statistics'),
448 (0, 'clone_uri'),
451 (0, 'clone_uri'),
449 (0, 'push_uri'),
452 (0, 'push_uri'),
450 (0, 'fork_id')
453 (0, 'fork_id')
451 ]
454 ]
452 for strip, k in update_keys:
455 for strip, k in update_keys:
453 if k in kwargs:
456 if k in kwargs:
454 val = kwargs[k]
457 val = kwargs[k]
455 if strip:
458 if strip:
456 k = remove_prefix(k, 'repo_')
459 k = remove_prefix(k, 'repo_')
457
460
458 setattr(cur_repo, k, val)
461 setattr(cur_repo, k, val)
459
462
460 new_name = cur_repo.get_new_name(kwargs['repo_name'])
463 new_name = cur_repo.get_new_name(kwargs['repo_name'])
461 cur_repo.repo_name = new_name
464 cur_repo.repo_name = new_name
462
465
463 # if private flag is set, reset default permission to NONE
466 # if private flag is set, reset default permission to NONE
464 if kwargs.get('repo_private'):
467 if kwargs.get('repo_private'):
465 EMPTY_PERM = 'repository.none'
468 EMPTY_PERM = 'repository.none'
466 RepoModel().grant_user_permission(
469 RepoModel().grant_user_permission(
467 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
470 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
468 )
471 )
469 if kwargs.get('repo_landing_rev'):
472 if kwargs.get('repo_landing_rev'):
470 landing_rev_val = kwargs['repo_landing_rev']
473 landing_rev_val = kwargs['repo_landing_rev']
471 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
474 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
472
475
473 # handle extra fields
476 # handle extra fields
474 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
477 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
475 k = RepositoryField.un_prefix_key(field)
478 k = RepositoryField.un_prefix_key(field)
476 ex_field = RepositoryField.get_by_key_name(
479 ex_field = RepositoryField.get_by_key_name(
477 key=k, repo=cur_repo)
480 key=k, repo=cur_repo)
478 if ex_field:
481 if ex_field:
479 ex_field.field_value = kwargs[field]
482 ex_field.field_value = kwargs[field]
480 self.sa.add(ex_field)
483 self.sa.add(ex_field)
481
484
482 self.sa.add(cur_repo)
485 self.sa.add(cur_repo)
483
486
484 if source_repo_name != new_name:
487 if source_repo_name != new_name:
485 # rename repository
488 # rename repository
486 self._rename_filesystem_repo(
489 self._rename_filesystem_repo(
487 old=source_repo_name, new=new_name)
490 old=source_repo_name, new=new_name)
488
491
489 if affected_user_ids:
492 if affected_user_ids:
490 PermissionModel().trigger_permission_flush(affected_user_ids)
493 PermissionModel().trigger_permission_flush(affected_user_ids)
491
494
492 return cur_repo
495 return cur_repo
493 except Exception:
496 except Exception:
494 log.error(traceback.format_exc())
497 log.error(traceback.format_exc())
495 raise
498 raise
496
499
497 def _create_repo(self, repo_name, repo_type, description, owner,
500 def _create_repo(self, repo_name, repo_type, description, owner,
498 private=False, clone_uri=None, repo_group=None,
501 private=False, clone_uri=None, repo_group=None,
499 landing_rev=None, fork_of=None,
502 landing_rev=None, fork_of=None,
500 copy_fork_permissions=False, enable_statistics=False,
503 copy_fork_permissions=False, enable_statistics=False,
501 enable_locking=False, enable_downloads=False,
504 enable_locking=False, enable_downloads=False,
502 copy_group_permissions=False,
505 copy_group_permissions=False,
503 state=Repository.STATE_PENDING):
506 state=Repository.STATE_PENDING):
504 """
507 """
505 Create repository inside database with PENDING state, this should be
508 Create repository inside database with PENDING state, this should be
506 only executed by create() repo. With exception of importing existing
509 only executed by create() repo. With exception of importing existing
507 repos
510 repos
508 """
511 """
509 from rhodecode.model.scm import ScmModel
512 from rhodecode.model.scm import ScmModel
510
513
511 owner = self._get_user(owner)
514 owner = self._get_user(owner)
512 fork_of = self._get_repo(fork_of)
515 fork_of = self._get_repo(fork_of)
513 repo_group = self._get_repo_group(safe_int(repo_group))
516 repo_group = self._get_repo_group(safe_int(repo_group))
514 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
517 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
515 landing_rev = landing_rev or default_landing_ref
518 landing_rev = landing_rev or default_landing_ref
516
519
517 try:
520 try:
518 repo_name = safe_unicode(repo_name)
521 repo_name = safe_str(repo_name)
519 description = safe_unicode(description)
522 description = safe_str(description)
520 # repo name is just a name of repository
523 # repo name is just a name of repository
521 # while repo_name_full is a full qualified name that is combined
524 # while repo_name_full is a full qualified name that is combined
522 # with name and path of group
525 # with name and path of group
523 repo_name_full = repo_name
526 repo_name_full = repo_name
524 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
527 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
525
528
526 new_repo = Repository()
529 new_repo = Repository()
527 new_repo.repo_state = state
530 new_repo.repo_state = state
528 new_repo.enable_statistics = False
531 new_repo.enable_statistics = False
529 new_repo.repo_name = repo_name_full
532 new_repo.repo_name = repo_name_full
530 new_repo.repo_type = repo_type
533 new_repo.repo_type = repo_type
531 new_repo.user = owner
534 new_repo.user = owner
532 new_repo.group = repo_group
535 new_repo.group = repo_group
533 new_repo.description = description or repo_name
536 new_repo.description = description or repo_name
534 new_repo.private = private
537 new_repo.private = private
535 new_repo.archived = False
538 new_repo.archived = False
536 new_repo.clone_uri = clone_uri
539 new_repo.clone_uri = clone_uri
537 new_repo.landing_rev = landing_rev
540 new_repo.landing_rev = landing_rev
538
541
539 new_repo.enable_statistics = enable_statistics
542 new_repo.enable_statistics = enable_statistics
540 new_repo.enable_locking = enable_locking
543 new_repo.enable_locking = enable_locking
541 new_repo.enable_downloads = enable_downloads
544 new_repo.enable_downloads = enable_downloads
542
545
543 if repo_group:
546 if repo_group:
544 new_repo.enable_locking = repo_group.enable_locking
547 new_repo.enable_locking = repo_group.enable_locking
545
548
546 if fork_of:
549 if fork_of:
547 parent_repo = fork_of
550 parent_repo = fork_of
548 new_repo.fork = parent_repo
551 new_repo.fork = parent_repo
549
552
550 events.trigger(events.RepoPreCreateEvent(new_repo))
553 events.trigger(events.RepoPreCreateEvent(new_repo))
551
554
552 self.sa.add(new_repo)
555 self.sa.add(new_repo)
553
556
554 EMPTY_PERM = 'repository.none'
557 EMPTY_PERM = 'repository.none'
555 if fork_of and copy_fork_permissions:
558 if fork_of and copy_fork_permissions:
556 repo = fork_of
559 repo = fork_of
557 user_perms = UserRepoToPerm.query() \
560 user_perms = UserRepoToPerm.query() \
558 .filter(UserRepoToPerm.repository == repo).all()
561 .filter(UserRepoToPerm.repository == repo).all()
559 group_perms = UserGroupRepoToPerm.query() \
562 group_perms = UserGroupRepoToPerm.query() \
560 .filter(UserGroupRepoToPerm.repository == repo).all()
563 .filter(UserGroupRepoToPerm.repository == repo).all()
561
564
562 for perm in user_perms:
565 for perm in user_perms:
563 UserRepoToPerm.create(
566 UserRepoToPerm.create(
564 perm.user, new_repo, perm.permission)
567 perm.user, new_repo, perm.permission)
565
568
566 for perm in group_perms:
569 for perm in group_perms:
567 UserGroupRepoToPerm.create(
570 UserGroupRepoToPerm.create(
568 perm.users_group, new_repo, perm.permission)
571 perm.users_group, new_repo, perm.permission)
569 # in case we copy permissions and also set this repo to private
572 # in case we copy permissions and also set this repo to private
570 # override the default user permission to make it a private repo
573 # override the default user permission to make it a private repo
571 if private:
574 if private:
572 RepoModel(self.sa).grant_user_permission(
575 RepoModel(self.sa).grant_user_permission(
573 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
576 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
574
577
575 elif repo_group and copy_group_permissions:
578 elif repo_group and copy_group_permissions:
576 user_perms = UserRepoGroupToPerm.query() \
579 user_perms = UserRepoGroupToPerm.query() \
577 .filter(UserRepoGroupToPerm.group == repo_group).all()
580 .filter(UserRepoGroupToPerm.group == repo_group).all()
578
581
579 group_perms = UserGroupRepoGroupToPerm.query() \
582 group_perms = UserGroupRepoGroupToPerm.query() \
580 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
583 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
581
584
582 for perm in user_perms:
585 for perm in user_perms:
583 perm_name = perm.permission.permission_name.replace(
586 perm_name = perm.permission.permission_name.replace(
584 'group.', 'repository.')
587 'group.', 'repository.')
585 perm_obj = Permission.get_by_key(perm_name)
588 perm_obj = Permission.get_by_key(perm_name)
586 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
589 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
587
590
588 for perm in group_perms:
591 for perm in group_perms:
589 perm_name = perm.permission.permission_name.replace(
592 perm_name = perm.permission.permission_name.replace(
590 'group.', 'repository.')
593 'group.', 'repository.')
591 perm_obj = Permission.get_by_key(perm_name)
594 perm_obj = Permission.get_by_key(perm_name)
592 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
595 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
593
596
594 if private:
597 if private:
595 RepoModel(self.sa).grant_user_permission(
598 RepoModel(self.sa).grant_user_permission(
596 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
599 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
597
600
598 else:
601 else:
599 perm_obj = self._create_default_perms(new_repo, private)
602 perm_obj = self._create_default_perms(new_repo, private)
600 self.sa.add(perm_obj)
603 self.sa.add(perm_obj)
601
604
602 # now automatically start following this repository as owner
605 # now automatically start following this repository as owner
603 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
606 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
604
607
605 # we need to flush here, in order to check if database won't
608 # we need to flush here, in order to check if database won't
606 # throw any exceptions, create filesystem dirs at the very end
609 # throw any exceptions, create filesystem dirs at the very end
607 self.sa.flush()
610 self.sa.flush()
608 events.trigger(events.RepoCreateEvent(new_repo))
611 events.trigger(events.RepoCreateEvent(new_repo))
609 return new_repo
612 return new_repo
610
613
611 except Exception:
614 except Exception:
612 log.error(traceback.format_exc())
615 log.error(traceback.format_exc())
613 raise
616 raise
614
617
615 def create(self, form_data, cur_user):
618 def create(self, form_data, cur_user):
616 """
619 """
617 Create repository using celery tasks
620 Create repository using celery tasks
618
621
619 :param form_data:
622 :param form_data:
620 :param cur_user:
623 :param cur_user:
621 """
624 """
622 from rhodecode.lib.celerylib import tasks, run_task
625 from rhodecode.lib.celerylib import tasks, run_task
623 return run_task(tasks.create_repo, form_data, cur_user)
626 return run_task(tasks.create_repo, form_data, cur_user)
624
627
625 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
628 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
626 perm_deletions=None, check_perms=True,
629 perm_deletions=None, check_perms=True,
627 cur_user=None):
630 cur_user=None):
628 if not perm_additions:
631 if not perm_additions:
629 perm_additions = []
632 perm_additions = []
630 if not perm_updates:
633 if not perm_updates:
631 perm_updates = []
634 perm_updates = []
632 if not perm_deletions:
635 if not perm_deletions:
633 perm_deletions = []
636 perm_deletions = []
634
637
635 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
638 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
636
639
637 changes = {
640 changes = {
638 'added': [],
641 'added': [],
639 'updated': [],
642 'updated': [],
640 'deleted': [],
643 'deleted': [],
641 'default_user_changed': None
644 'default_user_changed': None
642 }
645 }
643
646
644 repo = self._get_repo(repo)
647 repo = self._get_repo(repo)
645
648
646 # update permissions
649 # update permissions
647 for member_id, perm, member_type in perm_updates:
650 for member_id, perm, member_type in perm_updates:
648 member_id = int(member_id)
651 member_id = int(member_id)
649 if member_type == 'user':
652 if member_type == 'user':
650 member_name = User.get(member_id).username
653 member_name = User.get(member_id).username
651 if member_name == User.DEFAULT_USER:
654 if member_name == User.DEFAULT_USER:
652 # NOTE(dan): detect if we changed permissions for default user
655 # NOTE(dan): detect if we changed permissions for default user
653 perm_obj = self.sa.query(UserRepoToPerm) \
656 perm_obj = self.sa.query(UserRepoToPerm) \
654 .filter(UserRepoToPerm.user_id == member_id) \
657 .filter(UserRepoToPerm.user_id == member_id) \
655 .filter(UserRepoToPerm.repository == repo) \
658 .filter(UserRepoToPerm.repository == repo) \
656 .scalar()
659 .scalar()
657 if perm_obj and perm_obj.permission.permission_name != perm:
660 if perm_obj and perm_obj.permission.permission_name != perm:
658 changes['default_user_changed'] = True
661 changes['default_user_changed'] = True
659
662
660 # this updates also current one if found
663 # this updates also current one if found
661 self.grant_user_permission(
664 self.grant_user_permission(
662 repo=repo, user=member_id, perm=perm)
665 repo=repo, user=member_id, perm=perm)
663 elif member_type == 'user_group':
666 elif member_type == 'user_group':
664 # check if we have permissions to alter this usergroup
667 # check if we have permissions to alter this usergroup
665 member_name = UserGroup.get(member_id).users_group_name
668 member_name = UserGroup.get(member_id).users_group_name
666 if not check_perms or HasUserGroupPermissionAny(
669 if not check_perms or HasUserGroupPermissionAny(
667 *req_perms)(member_name, user=cur_user):
670 *req_perms)(member_name, user=cur_user):
668 self.grant_user_group_permission(
671 self.grant_user_group_permission(
669 repo=repo, group_name=member_id, perm=perm)
672 repo=repo, group_name=member_id, perm=perm)
670 else:
673 else:
671 raise ValueError("member_type must be 'user' or 'user_group' "
674 raise ValueError("member_type must be 'user' or 'user_group' "
672 "got {} instead".format(member_type))
675 "got {} instead".format(member_type))
673 changes['updated'].append({'type': member_type, 'id': member_id,
676 changes['updated'].append({'type': member_type, 'id': member_id,
674 'name': member_name, 'new_perm': perm})
677 'name': member_name, 'new_perm': perm})
675
678
676 # set new permissions
679 # set new permissions
677 for member_id, perm, member_type in perm_additions:
680 for member_id, perm, member_type in perm_additions:
678 member_id = int(member_id)
681 member_id = int(member_id)
679 if member_type == 'user':
682 if member_type == 'user':
680 member_name = User.get(member_id).username
683 member_name = User.get(member_id).username
681 self.grant_user_permission(
684 self.grant_user_permission(
682 repo=repo, user=member_id, perm=perm)
685 repo=repo, user=member_id, perm=perm)
683 elif member_type == 'user_group':
686 elif member_type == 'user_group':
684 # check if we have permissions to alter this usergroup
687 # check if we have permissions to alter this usergroup
685 member_name = UserGroup.get(member_id).users_group_name
688 member_name = UserGroup.get(member_id).users_group_name
686 if not check_perms or HasUserGroupPermissionAny(
689 if not check_perms or HasUserGroupPermissionAny(
687 *req_perms)(member_name, user=cur_user):
690 *req_perms)(member_name, user=cur_user):
688 self.grant_user_group_permission(
691 self.grant_user_group_permission(
689 repo=repo, group_name=member_id, perm=perm)
692 repo=repo, group_name=member_id, perm=perm)
690 else:
693 else:
691 raise ValueError("member_type must be 'user' or 'user_group' "
694 raise ValueError("member_type must be 'user' or 'user_group' "
692 "got {} instead".format(member_type))
695 "got {} instead".format(member_type))
693
696
694 changes['added'].append({'type': member_type, 'id': member_id,
697 changes['added'].append({'type': member_type, 'id': member_id,
695 'name': member_name, 'new_perm': perm})
698 'name': member_name, 'new_perm': perm})
696 # delete permissions
699 # delete permissions
697 for member_id, perm, member_type in perm_deletions:
700 for member_id, perm, member_type in perm_deletions:
698 member_id = int(member_id)
701 member_id = int(member_id)
699 if member_type == 'user':
702 if member_type == 'user':
700 member_name = User.get(member_id).username
703 member_name = User.get(member_id).username
701 self.revoke_user_permission(repo=repo, user=member_id)
704 self.revoke_user_permission(repo=repo, user=member_id)
702 elif member_type == 'user_group':
705 elif member_type == 'user_group':
703 # check if we have permissions to alter this usergroup
706 # check if we have permissions to alter this usergroup
704 member_name = UserGroup.get(member_id).users_group_name
707 member_name = UserGroup.get(member_id).users_group_name
705 if not check_perms or HasUserGroupPermissionAny(
708 if not check_perms or HasUserGroupPermissionAny(
706 *req_perms)(member_name, user=cur_user):
709 *req_perms)(member_name, user=cur_user):
707 self.revoke_user_group_permission(
710 self.revoke_user_group_permission(
708 repo=repo, group_name=member_id)
711 repo=repo, group_name=member_id)
709 else:
712 else:
710 raise ValueError("member_type must be 'user' or 'user_group' "
713 raise ValueError("member_type must be 'user' or 'user_group' "
711 "got {} instead".format(member_type))
714 "got {} instead".format(member_type))
712
715
713 changes['deleted'].append({'type': member_type, 'id': member_id,
716 changes['deleted'].append({'type': member_type, 'id': member_id,
714 'name': member_name, 'new_perm': perm})
717 'name': member_name, 'new_perm': perm})
715 return changes
718 return changes
716
719
717 def create_fork(self, form_data, cur_user):
720 def create_fork(self, form_data, cur_user):
718 """
721 """
719 Simple wrapper into executing celery task for fork creation
722 Simple wrapper into executing celery task for fork creation
720
723
721 :param form_data:
724 :param form_data:
722 :param cur_user:
725 :param cur_user:
723 """
726 """
724 from rhodecode.lib.celerylib import tasks, run_task
727 from rhodecode.lib.celerylib import tasks, run_task
725 return run_task(tasks.create_repo_fork, form_data, cur_user)
728 return run_task(tasks.create_repo_fork, form_data, cur_user)
726
729
727 def archive(self, repo):
730 def archive(self, repo):
728 """
731 """
729 Archive given repository. Set archive flag.
732 Archive given repository. Set archive flag.
730
733
731 :param repo:
734 :param repo:
732 """
735 """
733 repo = self._get_repo(repo)
736 repo = self._get_repo(repo)
734 if repo:
737 if repo:
735
738
736 try:
739 try:
737 repo.archived = True
740 repo.archived = True
738 self.sa.add(repo)
741 self.sa.add(repo)
739 self.sa.commit()
742 self.sa.commit()
740 except Exception:
743 except Exception:
741 log.error(traceback.format_exc())
744 log.error(traceback.format_exc())
742 raise
745 raise
743
746
744 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
747 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
745 """
748 """
746 Delete given repository, forks parameter defines what do do with
749 Delete given repository, forks parameter defines what do do with
747 attached forks. Throws AttachedForksError if deleted repo has attached
750 attached forks. Throws AttachedForksError if deleted repo has attached
748 forks
751 forks
749
752
750 :param repo:
753 :param repo:
751 :param forks: str 'delete' or 'detach'
754 :param forks: str 'delete' or 'detach'
752 :param pull_requests: str 'delete' or None
755 :param pull_requests: str 'delete' or None
753 :param fs_remove: remove(archive) repo from filesystem
756 :param fs_remove: remove(archive) repo from filesystem
754 """
757 """
755 if not cur_user:
758 if not cur_user:
756 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
759 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
757 repo = self._get_repo(repo)
760 repo = self._get_repo(repo)
758 if repo:
761 if repo:
759 if forks == 'detach':
762 if forks == 'detach':
760 for r in repo.forks:
763 for r in repo.forks:
761 r.fork = None
764 r.fork = None
762 self.sa.add(r)
765 self.sa.add(r)
763 elif forks == 'delete':
766 elif forks == 'delete':
764 for r in repo.forks:
767 for r in repo.forks:
765 self.delete(r, forks='delete')
768 self.delete(r, forks='delete')
766 elif [f for f in repo.forks]:
769 elif [f for f in repo.forks]:
767 raise AttachedForksError()
770 raise AttachedForksError()
768
771
769 # check for pull requests
772 # check for pull requests
770 pr_sources = repo.pull_requests_source
773 pr_sources = repo.pull_requests_source
771 pr_targets = repo.pull_requests_target
774 pr_targets = repo.pull_requests_target
772 if pull_requests != 'delete' and (pr_sources or pr_targets):
775 if pull_requests != 'delete' and (pr_sources or pr_targets):
773 raise AttachedPullRequestsError()
776 raise AttachedPullRequestsError()
774
777
775 old_repo_dict = repo.get_dict()
778 old_repo_dict = repo.get_dict()
776 events.trigger(events.RepoPreDeleteEvent(repo))
779 events.trigger(events.RepoPreDeleteEvent(repo))
777 try:
780 try:
778 self.sa.delete(repo)
781 self.sa.delete(repo)
779 if fs_remove:
782 if fs_remove:
780 self._delete_filesystem_repo(repo)
783 self._delete_filesystem_repo(repo)
781 else:
784 else:
782 log.debug('skipping removal from filesystem')
785 log.debug('skipping removal from filesystem')
783 old_repo_dict.update({
786 old_repo_dict.update({
784 'deleted_by': cur_user,
787 'deleted_by': cur_user,
785 'deleted_on': time.time(),
788 'deleted_on': time.time(),
786 })
789 })
787 hooks_base.delete_repository(**old_repo_dict)
790 hooks_base.delete_repository(**old_repo_dict)
788 events.trigger(events.RepoDeleteEvent(repo))
791 events.trigger(events.RepoDeleteEvent(repo))
789 except Exception:
792 except Exception:
790 log.error(traceback.format_exc())
793 log.error(traceback.format_exc())
791 raise
794 raise
792
795
793 def grant_user_permission(self, repo, user, perm):
796 def grant_user_permission(self, repo, user, perm):
794 """
797 """
795 Grant permission for user on given repository, or update existing one
798 Grant permission for user on given repository, or update existing one
796 if found
799 if found
797
800
798 :param repo: Instance of Repository, repository_id, or repository name
801 :param repo: Instance of Repository, repository_id, or repository name
799 :param user: Instance of User, user_id or username
802 :param user: Instance of User, user_id or username
800 :param perm: Instance of Permission, or permission_name
803 :param perm: Instance of Permission, or permission_name
801 """
804 """
802 user = self._get_user(user)
805 user = self._get_user(user)
803 repo = self._get_repo(repo)
806 repo = self._get_repo(repo)
804 permission = self._get_perm(perm)
807 permission = self._get_perm(perm)
805
808
806 # check if we have that permission already
809 # check if we have that permission already
807 obj = self.sa.query(UserRepoToPerm) \
810 obj = self.sa.query(UserRepoToPerm) \
808 .filter(UserRepoToPerm.user == user) \
811 .filter(UserRepoToPerm.user == user) \
809 .filter(UserRepoToPerm.repository == repo) \
812 .filter(UserRepoToPerm.repository == repo) \
810 .scalar()
813 .scalar()
811 if obj is None:
814 if obj is None:
812 # create new !
815 # create new !
813 obj = UserRepoToPerm()
816 obj = UserRepoToPerm()
814 obj.repository = repo
817 obj.repository = repo
815 obj.user = user
818 obj.user = user
816 obj.permission = permission
819 obj.permission = permission
817 self.sa.add(obj)
820 self.sa.add(obj)
818 log.debug('Granted perm %s to %s on %s', perm, user, repo)
821 log.debug('Granted perm %s to %s on %s', perm, user, repo)
819 action_logger_generic(
822 action_logger_generic(
820 'granted permission: {} to user: {} on repo: {}'.format(
823 'granted permission: {} to user: {} on repo: {}'.format(
821 perm, user, repo), namespace='security.repo')
824 perm, user, repo), namespace='security.repo')
822 return obj
825 return obj
823
826
824 def revoke_user_permission(self, repo, user):
827 def revoke_user_permission(self, repo, user):
825 """
828 """
826 Revoke permission for user on given repository
829 Revoke permission for user on given repository
827
830
828 :param repo: Instance of Repository, repository_id, or repository name
831 :param repo: Instance of Repository, repository_id, or repository name
829 :param user: Instance of User, user_id or username
832 :param user: Instance of User, user_id or username
830 """
833 """
831
834
832 user = self._get_user(user)
835 user = self._get_user(user)
833 repo = self._get_repo(repo)
836 repo = self._get_repo(repo)
834
837
835 obj = self.sa.query(UserRepoToPerm) \
838 obj = self.sa.query(UserRepoToPerm) \
836 .filter(UserRepoToPerm.repository == repo) \
839 .filter(UserRepoToPerm.repository == repo) \
837 .filter(UserRepoToPerm.user == user) \
840 .filter(UserRepoToPerm.user == user) \
838 .scalar()
841 .scalar()
839 if obj:
842 if obj:
840 self.sa.delete(obj)
843 self.sa.delete(obj)
841 log.debug('Revoked perm on %s on %s', repo, user)
844 log.debug('Revoked perm on %s on %s', repo, user)
842 action_logger_generic(
845 action_logger_generic(
843 'revoked permission from user: {} on repo: {}'.format(
846 'revoked permission from user: {} on repo: {}'.format(
844 user, repo), namespace='security.repo')
847 user, repo), namespace='security.repo')
845
848
846 def grant_user_group_permission(self, repo, group_name, perm):
849 def grant_user_group_permission(self, repo, group_name, perm):
847 """
850 """
848 Grant permission for user group on given repository, or update
851 Grant permission for user group on given repository, or update
849 existing one if found
852 existing one if found
850
853
851 :param repo: Instance of Repository, repository_id, or repository name
854 :param repo: Instance of Repository, repository_id, or repository name
852 :param group_name: Instance of UserGroup, users_group_id,
855 :param group_name: Instance of UserGroup, users_group_id,
853 or user group name
856 or user group name
854 :param perm: Instance of Permission, or permission_name
857 :param perm: Instance of Permission, or permission_name
855 """
858 """
856 repo = self._get_repo(repo)
859 repo = self._get_repo(repo)
857 group_name = self._get_user_group(group_name)
860 group_name = self._get_user_group(group_name)
858 permission = self._get_perm(perm)
861 permission = self._get_perm(perm)
859
862
860 # check if we have that permission already
863 # check if we have that permission already
861 obj = self.sa.query(UserGroupRepoToPerm) \
864 obj = self.sa.query(UserGroupRepoToPerm) \
862 .filter(UserGroupRepoToPerm.users_group == group_name) \
865 .filter(UserGroupRepoToPerm.users_group == group_name) \
863 .filter(UserGroupRepoToPerm.repository == repo) \
866 .filter(UserGroupRepoToPerm.repository == repo) \
864 .scalar()
867 .scalar()
865
868
866 if obj is None:
869 if obj is None:
867 # create new
870 # create new
868 obj = UserGroupRepoToPerm()
871 obj = UserGroupRepoToPerm()
869
872
870 obj.repository = repo
873 obj.repository = repo
871 obj.users_group = group_name
874 obj.users_group = group_name
872 obj.permission = permission
875 obj.permission = permission
873 self.sa.add(obj)
876 self.sa.add(obj)
874 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
877 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
875 action_logger_generic(
878 action_logger_generic(
876 'granted permission: {} to usergroup: {} on repo: {}'.format(
879 'granted permission: {} to usergroup: {} on repo: {}'.format(
877 perm, group_name, repo), namespace='security.repo')
880 perm, group_name, repo), namespace='security.repo')
878
881
879 return obj
882 return obj
880
883
881 def revoke_user_group_permission(self, repo, group_name):
884 def revoke_user_group_permission(self, repo, group_name):
882 """
885 """
883 Revoke permission for user group on given repository
886 Revoke permission for user group on given repository
884
887
885 :param repo: Instance of Repository, repository_id, or repository name
888 :param repo: Instance of Repository, repository_id, or repository name
886 :param group_name: Instance of UserGroup, users_group_id,
889 :param group_name: Instance of UserGroup, users_group_id,
887 or user group name
890 or user group name
888 """
891 """
889 repo = self._get_repo(repo)
892 repo = self._get_repo(repo)
890 group_name = self._get_user_group(group_name)
893 group_name = self._get_user_group(group_name)
891
894
892 obj = self.sa.query(UserGroupRepoToPerm) \
895 obj = self.sa.query(UserGroupRepoToPerm) \
893 .filter(UserGroupRepoToPerm.repository == repo) \
896 .filter(UserGroupRepoToPerm.repository == repo) \
894 .filter(UserGroupRepoToPerm.users_group == group_name) \
897 .filter(UserGroupRepoToPerm.users_group == group_name) \
895 .scalar()
898 .scalar()
896 if obj:
899 if obj:
897 self.sa.delete(obj)
900 self.sa.delete(obj)
898 log.debug('Revoked perm to %s on %s', repo, group_name)
901 log.debug('Revoked perm to %s on %s', repo, group_name)
899 action_logger_generic(
902 action_logger_generic(
900 'revoked permission from usergroup: {} on repo: {}'.format(
903 'revoked permission from usergroup: {} on repo: {}'.format(
901 group_name, repo), namespace='security.repo')
904 group_name, repo), namespace='security.repo')
902
905
903 def delete_stats(self, repo_name):
906 def delete_stats(self, repo_name):
904 """
907 """
905 removes stats for given repo
908 removes stats for given repo
906
909
907 :param repo_name:
910 :param repo_name:
908 """
911 """
909 repo = self._get_repo(repo_name)
912 repo = self._get_repo(repo_name)
910 try:
913 try:
911 obj = self.sa.query(Statistics) \
914 obj = self.sa.query(Statistics) \
912 .filter(Statistics.repository == repo).scalar()
915 .filter(Statistics.repository == repo).scalar()
913 if obj:
916 if obj:
914 self.sa.delete(obj)
917 self.sa.delete(obj)
915 except Exception:
918 except Exception:
916 log.error(traceback.format_exc())
919 log.error(traceback.format_exc())
917 raise
920 raise
918
921
919 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
922 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
920 field_type='str', field_desc=''):
923 field_type='str', field_desc=''):
921
924
922 repo = self._get_repo(repo_name)
925 repo = self._get_repo(repo_name)
923
926
924 new_field = RepositoryField()
927 new_field = RepositoryField()
925 new_field.repository = repo
928 new_field.repository = repo
926 new_field.field_key = field_key
929 new_field.field_key = field_key
927 new_field.field_type = field_type # python type
930 new_field.field_type = field_type # python type
928 new_field.field_value = field_value
931 new_field.field_value = field_value
929 new_field.field_desc = field_desc
932 new_field.field_desc = field_desc
930 new_field.field_label = field_label
933 new_field.field_label = field_label
931 self.sa.add(new_field)
934 self.sa.add(new_field)
932 return new_field
935 return new_field
933
936
934 def delete_repo_field(self, repo_name, field_key):
937 def delete_repo_field(self, repo_name, field_key):
935 repo = self._get_repo(repo_name)
938 repo = self._get_repo(repo_name)
936 field = RepositoryField.get_by_key_name(field_key, repo)
939 field = RepositoryField.get_by_key_name(field_key, repo)
937 if field:
940 if field:
938 self.sa.delete(field)
941 self.sa.delete(field)
939
942
940 def set_landing_rev(self, repo, landing_rev_name):
943 def set_landing_rev(self, repo, landing_rev_name):
941 if landing_rev_name.startswith('branch:'):
944 if landing_rev_name.startswith('branch:'):
942 landing_rev_name = landing_rev_name.split('branch:')[-1]
945 landing_rev_name = landing_rev_name.split('branch:')[-1]
943 scm_instance = repo.scm_instance()
946 scm_instance = repo.scm_instance()
944 if scm_instance:
947 if scm_instance:
945 return scm_instance._remote.set_head_ref(landing_rev_name)
948 return scm_instance._remote.set_head_ref(landing_rev_name)
946
949
947 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
950 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
948 clone_uri=None, repo_store_location=None,
951 clone_uri=None, repo_store_location=None,
949 use_global_config=False, install_hooks=True):
952 use_global_config=False, install_hooks=True):
950 """
953 """
951 makes repository on filesystem. It's group aware means it'll create
954 makes repository on filesystem. It's group aware means it'll create
952 a repository within a group, and alter the paths accordingly of
955 a repository within a group, and alter the paths accordingly of
953 group location
956 group location
954
957
955 :param repo_name:
958 :param repo_name:
956 :param alias:
959 :param alias:
957 :param parent:
960 :param parent:
958 :param clone_uri:
961 :param clone_uri:
959 :param repo_store_location:
962 :param repo_store_location:
960 """
963 """
961 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
964 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
962 from rhodecode.model.scm import ScmModel
965 from rhodecode.model.scm import ScmModel
963
966
964 if Repository.NAME_SEP in repo_name:
967 if Repository.NAME_SEP in repo_name:
965 raise ValueError(
968 raise ValueError(
966 'repo_name must not contain groups got `%s`' % repo_name)
969 'repo_name must not contain groups got `%s`' % repo_name)
967
970
968 if isinstance(repo_group, RepoGroup):
971 if isinstance(repo_group, RepoGroup):
969 new_parent_path = os.sep.join(repo_group.full_path_splitted)
972 new_parent_path = os.sep.join(repo_group.full_path_splitted)
970 else:
973 else:
971 new_parent_path = repo_group or ''
974 new_parent_path = repo_group or ''
972
975
973 if repo_store_location:
976 if repo_store_location:
974 _paths = [repo_store_location]
977 _paths = [repo_store_location]
975 else:
978 else:
976 _paths = [self.repos_path, new_parent_path, repo_name]
979 _paths = [self.repos_path, new_parent_path, repo_name]
977 # we need to make it str for mercurial
980 # we need to make it str for mercurial
978 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
981 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
979
982
980 # check if this path is not a repository
983 # check if this path is not a repository
981 if is_valid_repo(repo_path, self.repos_path):
984 if is_valid_repo(repo_path, self.repos_path):
982 raise Exception('This path %s is a valid repository' % repo_path)
985 raise Exception(f'This path {repo_path} is a valid repository')
983
986
984 # check if this path is a group
987 # check if this path is a group
985 if is_valid_repo_group(repo_path, self.repos_path):
988 if is_valid_repo_group(repo_path, self.repos_path):
986 raise Exception('This path %s is a valid group' % repo_path)
989 raise Exception(f'This path {repo_path} is a valid group')
987
990
988 log.info('creating repo %s in %s from url: `%s`',
991 log.info('creating repo %s in %s from url: `%s`',
989 repo_name, safe_unicode(repo_path),
992 repo_name, safe_str(repo_path),
990 obfuscate_url_pw(clone_uri))
993 obfuscate_url_pw(clone_uri))
991
994
992 backend = get_backend(repo_type)
995 backend = get_backend(repo_type)
993
996
994 config_repo = None if use_global_config else repo_name
997 config_repo = None if use_global_config else repo_name
995 if config_repo and new_parent_path:
998 if config_repo and new_parent_path:
996 config_repo = Repository.NAME_SEP.join(
999 config_repo = Repository.NAME_SEP.join(
997 (new_parent_path, config_repo))
1000 (new_parent_path, config_repo))
998 config = make_db_config(clear_session=False, repo=config_repo)
1001 config = make_db_config(clear_session=False, repo=config_repo)
999 config.set('extensions', 'largefiles', '')
1002 config.set('extensions', 'largefiles', '')
1000
1003
1001 # patch and reset hooks section of UI config to not run any
1004 # patch and reset hooks section of UI config to not run any
1002 # hooks on creating remote repo
1005 # hooks on creating remote repo
1003 config.clear_section('hooks')
1006 config.clear_section('hooks')
1004
1007
1005 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1008 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1006 if repo_type == 'git':
1009 if repo_type == 'git':
1007 repo = backend(
1010 repo = backend(
1008 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1011 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1009 with_wire={"cache": False})
1012 with_wire={"cache": False})
1010 else:
1013 else:
1011 repo = backend(
1014 repo = backend(
1012 repo_path, config=config, create=True, src_url=clone_uri,
1015 repo_path, config=config, create=True, src_url=clone_uri,
1013 with_wire={"cache": False})
1016 with_wire={"cache": False})
1014
1017
1015 if install_hooks:
1018 if install_hooks:
1016 repo.install_hooks()
1019 repo.install_hooks()
1017
1020
1018 log.debug('Created repo %s with %s backend',
1021 log.debug('Created repo %s with %s backend',
1019 safe_unicode(repo_name), safe_unicode(repo_type))
1022 safe_str(repo_name), safe_str(repo_type))
1020 return repo
1023 return repo
1021
1024
1022 def _rename_filesystem_repo(self, old, new):
1025 def _rename_filesystem_repo(self, old, new):
1023 """
1026 """
1024 renames repository on filesystem
1027 renames repository on filesystem
1025
1028
1026 :param old: old name
1029 :param old: old name
1027 :param new: new name
1030 :param new: new name
1028 """
1031 """
1029 log.info('renaming repo from %s to %s', old, new)
1032 log.info('renaming repo from %s to %s', old, new)
1030
1033
1031 old_path = os.path.join(self.repos_path, old)
1034 old_path = os.path.join(self.repos_path, old)
1032 new_path = os.path.join(self.repos_path, new)
1035 new_path = os.path.join(self.repos_path, new)
1033 if os.path.isdir(new_path):
1036 if os.path.isdir(new_path):
1034 raise Exception(
1037 raise Exception(
1035 'Was trying to rename to already existing dir %s' % new_path
1038 'Was trying to rename to already existing dir %s' % new_path
1036 )
1039 )
1037 shutil.move(old_path, new_path)
1040 shutil.move(old_path, new_path)
1038
1041
1039 def _delete_filesystem_repo(self, repo):
1042 def _delete_filesystem_repo(self, repo):
1040 """
1043 """
1041 removes repo from filesystem, the removal is acctually made by
1044 removes repo from filesystem, the removal is actually made by
1042 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1045 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1043 repository is no longer valid for rhodecode, can be undeleted later on
1046 repository is no longer valid for rhodecode, can be undeleted later on
1044 by reverting the renames on this repository
1047 by reverting the renames on this repository
1045
1048
1046 :param repo: repo object
1049 :param repo: repo object
1047 """
1050 """
1048 rm_path = os.path.join(self.repos_path, repo.repo_name)
1051 rm_path = os.path.join(self.repos_path, repo.repo_name)
1049 repo_group = repo.group
1052 repo_group = repo.group
1050 log.info("Removing repository %s", rm_path)
1053 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1051 # disable hg/git internal that it doesn't get detected as repo
1054 # disable hg/git internal that it doesn't get detected as repo
1052 alias = repo.repo_type
1055 alias = repo.repo_type
1053
1056
1054 config = make_db_config(clear_session=False)
1057 config = make_db_config(clear_session=False)
1055 config.set('extensions', 'largefiles', '')
1058 config.set('extensions', 'largefiles', '')
1056 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1059 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1057
1060
1058 # skip this for bare git repos
1061 # skip this for bare git repos
1059 if not bare:
1062 if not bare:
1060 # disable VCS repo
1063 # disable VCS repo
1061 vcs_path = os.path.join(rm_path, '.%s' % alias)
1064 vcs_path = os.path.join(rm_path, '.%s' % alias)
1062 if os.path.exists(vcs_path):
1065 if os.path.exists(vcs_path):
1063 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1066 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1064
1067
1065 _now = datetime.datetime.now()
1068 _now = datetime.datetime.now()
1066 _ms = str(_now.microsecond).rjust(6, '0')
1069 _ms = str(_now.microsecond).rjust(6, '0')
1067 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1070 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1068 repo.just_name)
1071 repo.just_name)
1069 if repo_group:
1072 if repo_group:
1070 # if repository is in group, prefix the removal path with the group
1073 # if repository is in group, prefix the removal path with the group
1071 args = repo_group.full_path_splitted + [_d]
1074 args = repo_group.full_path_splitted + [_d]
1072 _d = os.path.join(*args)
1075 _d = os.path.join(*args)
1073
1076
1074 if os.path.isdir(rm_path):
1077 if os.path.isdir(rm_path):
1075 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1078 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1076
1079
1077 # finally cleanup diff-cache if it exists
1080 # finally cleanup diff-cache if it exists
1078 cached_diffs_dir = repo.cached_diffs_dir
1081 cached_diffs_dir = repo.cached_diffs_dir
1079 if os.path.isdir(cached_diffs_dir):
1082 if os.path.isdir(cached_diffs_dir):
1080 shutil.rmtree(cached_diffs_dir)
1083 shutil.rmtree(cached_diffs_dir)
1081
1084
1082
1085
1083 class ReadmeFinder:
1086 class ReadmeFinder:
1084 """
1087 """
1085 Utility which knows how to find a readme for a specific commit.
1088 Utility which knows how to find a readme for a specific commit.
1086
1089
1087 The main idea is that this is a configurable algorithm. When creating an
1090 The main idea is that this is a configurable algorithm. When creating an
1088 instance you can define parameters, currently only the `default_renderer`.
1091 instance you can define parameters, currently only the `default_renderer`.
1089 Based on this configuration the method :meth:`search` behaves slightly
1092 Based on this configuration the method :meth:`search` behaves slightly
1090 different.
1093 different.
1091 """
1094 """
1092
1095
1093 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1096 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1094 path_re = re.compile(r'^docs?', re.IGNORECASE)
1097 path_re = re.compile(r'^docs?', re.IGNORECASE)
1095
1098
1096 default_priorities = {
1099 default_priorities = {
1097 None: 0,
1100 None: 0,
1098 '.text': 2,
1101 '.rst': 1,
1099 '.txt': 3,
1102 '.md': 1,
1100 '.rst': 1,
1103 '.rest': 2,
1101 '.rest': 2,
1104 '.mkdn': 2,
1102 '.md': 1,
1105 '.text': 2,
1103 '.mkdn': 2,
1106 '.txt': 3,
1104 '.mdown': 3,
1107 '.mdown': 3,
1105 '.markdown': 4,
1108 '.markdown': 4,
1106 }
1109 }
1107
1110
1108 path_priority = {
1111 path_priority = {
1109 'doc': 0,
1112 'doc': 0,
1110 'docs': 1,
1113 'docs': 1,
1111 }
1114 }
1112
1115
1113 FALLBACK_PRIORITY = 99
1116 FALLBACK_PRIORITY = 99
1114
1117
1115 RENDERER_TO_EXTENSION = {
1118 RENDERER_TO_EXTENSION = {
1116 'rst': ['.rst', '.rest'],
1119 'rst': ['.rst', '.rest'],
1117 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1120 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1118 }
1121 }
1119
1122
1120 def __init__(self, default_renderer=None):
1123 def __init__(self, default_renderer=None):
1121 self._default_renderer = default_renderer
1124 self._default_renderer = default_renderer
1122 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1125 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1123 default_renderer, [])
1126 default_renderer, [])
1124
1127
1125 def search(self, commit, path=u'/'):
1128 def search(self, commit, path='/'):
1126 """
1129 """
1127 Find a readme in the given `commit`.
1130 Find a readme in the given `commit`.
1128 """
1131 """
1129 nodes = commit.get_nodes(path)
1132 nodes = commit.get_nodes(path)
1130 matches = self._match_readmes(nodes)
1133 matches = self._match_readmes(nodes)
1131 matches = self._sort_according_to_priority(matches)
1134 matches = self._sort_according_to_priority(matches)
1132 if matches:
1135 if matches:
1133 return matches[0].node
1136 return matches[0].node
1134
1137
1135 paths = self._match_paths(nodes)
1138 paths = self._match_paths(nodes)
1136 paths = self._sort_paths_according_to_priority(paths)
1139 paths = self._sort_paths_according_to_priority(paths)
1137 for path in paths:
1140 for path in paths:
1138 match = self.search(commit, path=path)
1141 match = self.search(commit, path=path)
1139 if match:
1142 if match:
1140 return match
1143 return match
1141
1144
1142 return None
1145 return None
1143
1146
1144 def _match_readmes(self, nodes):
1147 def _match_readmes(self, nodes):
1145 for node in nodes:
1148 for node in nodes:
1146 if not node.is_file():
1149 if not node.is_file():
1147 continue
1150 continue
1148 path = node.path.rsplit('/', 1)[-1]
1151 path = node.path.rsplit('/', 1)[-1]
1149 match = self.readme_re.match(path)
1152 match = self.readme_re.match(path)
1150 if match:
1153 if match:
1151 extension = match.group(1)
1154 extension = match.group(1)
1152 yield ReadmeMatch(node, match, self._priority(extension))
1155 yield ReadmeMatch(node, match, self._priority(extension))
1153
1156
1154 def _match_paths(self, nodes):
1157 def _match_paths(self, nodes):
1155 for node in nodes:
1158 for node in nodes:
1156 if not node.is_dir():
1159 if not node.is_dir():
1157 continue
1160 continue
1158 match = self.path_re.match(node.path)
1161 match = self.path_re.match(node.path)
1159 if match:
1162 if match:
1160 yield node.path
1163 yield node.path
1161
1164
1162 def _priority(self, extension):
1165 def _priority(self, extension):
1163 renderer_priority = (
1166 renderer_priority = (
1164 0 if extension in self._renderer_extensions else 1)
1167 0 if extension in self._renderer_extensions else 1)
1165 extension_priority = self.default_priorities.get(
1168 extension_priority = self.default_priorities.get(
1166 extension, self.FALLBACK_PRIORITY)
1169 extension, self.FALLBACK_PRIORITY)
1167 return (renderer_priority, extension_priority)
1170 return (renderer_priority, extension_priority)
1168
1171
1169 def _sort_according_to_priority(self, matches):
1172 def _sort_according_to_priority(self, matches):
1170
1173
1171 def priority_and_path(match):
1174 def priority_and_path(match):
1172 return (match.priority, match.path)
1175 return (match.priority, match.path)
1173
1176
1174 return sorted(matches, key=priority_and_path)
1177 return sorted(matches, key=priority_and_path)
1175
1178
1176 def _sort_paths_according_to_priority(self, paths):
1179 def _sort_paths_according_to_priority(self, paths):
1177
1180
1178 def priority_and_path(path):
1181 def priority_and_path(path):
1179 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1182 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1180
1183
1181 return sorted(paths, key=priority_and_path)
1184 return sorted(paths, key=priority_and_path)
1182
1185
1183
1186
1184 class ReadmeMatch:
1187 class ReadmeMatch:
1185
1188
1186 def __init__(self, node, match, priority):
1189 def __init__(self, node, match, priority):
1187 self.node = node
1190 self.node = node
1188 self._match = match
1191 self._match = match
1189 self.priority = priority
1192 self.priority = priority
1190
1193
1191 @property
1194 @property
1192 def path(self):
1195 def path(self):
1193 return self.node.path
1196 return self.node.path
1194
1197
1195 def __repr__(self):
1198 def __repr__(self):
1196 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
1199 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,897 +1,897 b''
1
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21
21
22 """
22 """
23 repo group model for RhodeCode
23 repo group model for RhodeCode
24 """
24 """
25
25
26 import os
26 import os
27 import datetime
27 import datetime
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import shutil
30 import shutil
31 import time
31 import time
32 import traceback
32 import traceback
33 import string
33 import string
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from rhodecode import events
37 from rhodecode import events
38 from rhodecode.model import BaseModel
38 from rhodecode.model import BaseModel
39 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
39 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
40 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
40 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
41 UserGroup, Repository)
41 UserGroup, Repository)
42 from rhodecode.model.permission import PermissionModel
42 from rhodecode.model.permission import PermissionModel
43 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
43 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
44 from rhodecode.lib.caching_query import FromCache
44 from rhodecode.lib.caching_query import FromCache
45 from rhodecode.lib.utils2 import action_logger_generic
45 from rhodecode.lib.utils2 import action_logger_generic
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 class RepoGroupModel(BaseModel):
50 class RepoGroupModel(BaseModel):
51
51
52 cls = RepoGroup
52 cls = RepoGroup
53 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
53 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
54 PERSONAL_GROUP_PATTERN = '${username}' # default
54 PERSONAL_GROUP_PATTERN = '${username}' # default
55
55
56 def _get_user_group(self, users_group):
56 def _get_user_group(self, users_group):
57 return self._get_instance(UserGroup, users_group,
57 return self._get_instance(UserGroup, users_group,
58 callback=UserGroup.get_by_group_name)
58 callback=UserGroup.get_by_group_name)
59
59
60 def _get_repo_group(self, repo_group):
60 def _get_repo_group(self, repo_group):
61 return self._get_instance(RepoGroup, repo_group,
61 return self._get_instance(RepoGroup, repo_group,
62 callback=RepoGroup.get_by_group_name)
62 callback=RepoGroup.get_by_group_name)
63
63
64 def get_repo_group(self, repo_group):
64 def get_repo_group(self, repo_group):
65 return self._get_repo_group(repo_group)
65 return self._get_repo_group(repo_group)
66
66
67 @LazyProperty
67 @LazyProperty
68 def repos_path(self):
68 def repos_path(self):
69 """
69 """
70 Gets the repositories root path from database
70 Gets the repositories root path from database
71 """
71 """
72
72
73 settings_model = VcsSettingsModel(sa=self.sa)
73 settings_model = VcsSettingsModel(sa=self.sa)
74 return settings_model.get_repos_location()
74 return settings_model.get_repos_location()
75
75
76 def get_by_group_name(self, repo_group_name, cache=None):
76 def get_by_group_name(self, repo_group_name, cache=None):
77 repo = self.sa.query(RepoGroup) \
77 repo = self.sa.query(RepoGroup) \
78 .filter(RepoGroup.group_name == repo_group_name)
78 .filter(RepoGroup.group_name == repo_group_name)
79
79
80 if cache:
80 if cache:
81 name_key = _hash_key(repo_group_name)
81 name_key = _hash_key(repo_group_name)
82 repo = repo.options(
82 repo = repo.options(
83 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
83 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
84 return repo.scalar()
84 return repo.scalar()
85
85
86 def get_default_create_personal_repo_group(self):
86 def get_default_create_personal_repo_group(self):
87 value = SettingsModel().get_setting_by_name(
87 value = SettingsModel().get_setting_by_name(
88 'create_personal_repo_group')
88 'create_personal_repo_group')
89 return value.app_settings_value if value else None or False
89 return value.app_settings_value if value else None or False
90
90
91 def get_personal_group_name_pattern(self):
91 def get_personal_group_name_pattern(self):
92 value = SettingsModel().get_setting_by_name(
92 value = SettingsModel().get_setting_by_name(
93 'personal_repo_group_pattern')
93 'personal_repo_group_pattern')
94 val = value.app_settings_value if value else None
94 val = value.app_settings_value if value else None
95 group_template = val or self.PERSONAL_GROUP_PATTERN
95 group_template = val or self.PERSONAL_GROUP_PATTERN
96
96
97 group_template = group_template.lstrip('/')
97 group_template = group_template.lstrip('/')
98 return group_template
98 return group_template
99
99
100 def get_personal_group_name(self, user):
100 def get_personal_group_name(self, user):
101 template = self.get_personal_group_name_pattern()
101 template = self.get_personal_group_name_pattern()
102 return string.Template(template).safe_substitute(
102 return string.Template(template).safe_substitute(
103 username=user.username,
103 username=user.username,
104 user_id=user.user_id,
104 user_id=user.user_id,
105 first_name=user.first_name,
105 first_name=user.first_name,
106 last_name=user.last_name,
106 last_name=user.last_name,
107 )
107 )
108
108
109 def create_personal_repo_group(self, user, commit_early=True):
109 def create_personal_repo_group(self, user, commit_early=True):
110 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
110 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
111 personal_repo_group_name = self.get_personal_group_name(user)
111 personal_repo_group_name = self.get_personal_group_name(user)
112
112
113 # create a new one
113 # create a new one
114 RepoGroupModel().create(
114 RepoGroupModel().create(
115 group_name=personal_repo_group_name,
115 group_name=personal_repo_group_name,
116 group_description=desc,
116 group_description=desc,
117 owner=user.username,
117 owner=user.username,
118 personal=True,
118 personal=True,
119 commit_early=commit_early)
119 commit_early=commit_early)
120
120
121 def _create_default_perms(self, new_group):
121 def _create_default_perms(self, new_group):
122 # create default permission
122 # create default permission
123 default_perm = 'group.read'
123 default_perm = 'group.read'
124 def_user = User.get_default_user()
124 def_user = User.get_default_user()
125 for p in def_user.user_perms:
125 for p in def_user.user_perms:
126 if p.permission.permission_name.startswith('group.'):
126 if p.permission.permission_name.startswith('group.'):
127 default_perm = p.permission.permission_name
127 default_perm = p.permission.permission_name
128 break
128 break
129
129
130 repo_group_to_perm = UserRepoGroupToPerm()
130 repo_group_to_perm = UserRepoGroupToPerm()
131 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
131 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
132
132
133 repo_group_to_perm.group = new_group
133 repo_group_to_perm.group = new_group
134 repo_group_to_perm.user_id = def_user.user_id
134 repo_group_to_perm.user = def_user
135 return repo_group_to_perm
135 return repo_group_to_perm
136
136
137 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
137 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
138 get_object=False):
138 get_object=False):
139 """
139 """
140 Get's the group name and a parent group name from given group name.
140 Get's the group name and a parent group name from given group name.
141 If repo_in_path is set to truth, we asume the full path also includes
141 If repo_in_path is set to truth, we asume the full path also includes
142 repo name, in such case we clean the last element.
142 repo name, in such case we clean the last element.
143
143
144 :param group_name_full:
144 :param group_name_full:
145 """
145 """
146 split_paths = 1
146 split_paths = 1
147 if repo_in_path:
147 if repo_in_path:
148 split_paths = 2
148 split_paths = 2
149 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
149 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
150
150
151 if repo_in_path and len(_parts) > 1:
151 if repo_in_path and len(_parts) > 1:
152 # such case last element is the repo_name
152 # such case last element is the repo_name
153 _parts.pop(-1)
153 _parts.pop(-1)
154 group_name_cleaned = _parts[-1] # just the group name
154 group_name_cleaned = _parts[-1] # just the group name
155 parent_repo_group_name = None
155 parent_repo_group_name = None
156
156
157 if len(_parts) > 1:
157 if len(_parts) > 1:
158 parent_repo_group_name = _parts[0]
158 parent_repo_group_name = _parts[0]
159
159
160 parent_group = None
160 parent_group = None
161 if parent_repo_group_name:
161 if parent_repo_group_name:
162 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
162 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
163
163
164 if get_object:
164 if get_object:
165 return group_name_cleaned, parent_repo_group_name, parent_group
165 return group_name_cleaned, parent_repo_group_name, parent_group
166
166
167 return group_name_cleaned, parent_repo_group_name
167 return group_name_cleaned, parent_repo_group_name
168
168
169 def check_exist_filesystem(self, group_name, exc_on_failure=True):
169 def check_exist_filesystem(self, group_name, exc_on_failure=True):
170 create_path = os.path.join(self.repos_path, group_name)
170 create_path = os.path.join(self.repos_path, group_name)
171 log.debug('creating new group in %s', create_path)
171 log.debug('creating new group in %s', create_path)
172
172
173 if os.path.isdir(create_path):
173 if os.path.isdir(create_path):
174 if exc_on_failure:
174 if exc_on_failure:
175 abs_create_path = os.path.abspath(create_path)
175 abs_create_path = os.path.abspath(create_path)
176 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
176 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
177 return False
177 return False
178 return True
178 return True
179
179
180 def _create_group(self, group_name):
180 def _create_group(self, group_name):
181 """
181 """
182 makes repository group on filesystem
182 makes repository group on filesystem
183
183
184 :param repo_name:
184 :param repo_name:
185 :param parent_id:
185 :param parent_id:
186 """
186 """
187
187
188 self.check_exist_filesystem(group_name)
188 self.check_exist_filesystem(group_name)
189 create_path = os.path.join(self.repos_path, group_name)
189 create_path = os.path.join(self.repos_path, group_name)
190 log.debug('creating new group in %s', create_path)
190 log.debug('creating new group in %s', create_path)
191 os.makedirs(create_path, mode=0o755)
191 os.makedirs(create_path, mode=0o755)
192 log.debug('created group in %s', create_path)
192 log.debug('created group in %s', create_path)
193
193
194 def _rename_group(self, old, new):
194 def _rename_group(self, old, new):
195 """
195 """
196 Renames a group on filesystem
196 Renames a group on filesystem
197
197
198 :param group_name:
198 :param group_name:
199 """
199 """
200
200
201 if old == new:
201 if old == new:
202 log.debug('skipping group rename')
202 log.debug('skipping group rename')
203 return
203 return
204
204
205 log.debug('renaming repository group from %s to %s', old, new)
205 log.debug('renaming repository group from %s to %s', old, new)
206
206
207 old_path = os.path.join(self.repos_path, old)
207 old_path = os.path.join(self.repos_path, old)
208 new_path = os.path.join(self.repos_path, new)
208 new_path = os.path.join(self.repos_path, new)
209
209
210 log.debug('renaming repos paths from %s to %s', old_path, new_path)
210 log.debug('renaming repos paths from %s to %s', old_path, new_path)
211
211
212 if os.path.isdir(new_path):
212 if os.path.isdir(new_path):
213 raise Exception('Was trying to rename to already '
213 raise Exception('Was trying to rename to already '
214 'existing dir %s' % new_path)
214 'existing dir %s' % new_path)
215 shutil.move(old_path, new_path)
215 shutil.move(old_path, new_path)
216
216
217 def _delete_filesystem_group(self, group, force_delete=False):
217 def _delete_filesystem_group(self, group, force_delete=False):
218 """
218 """
219 Deletes a group from a filesystem
219 Deletes a group from a filesystem
220
220
221 :param group: instance of group from database
221 :param group: instance of group from database
222 :param force_delete: use shutil rmtree to remove all objects
222 :param force_delete: use shutil rmtree to remove all objects
223 """
223 """
224 paths = group.full_path.split(RepoGroup.url_sep())
224 paths = group.full_path.split(RepoGroup.url_sep())
225 paths = os.sep.join(paths)
225 paths = os.sep.join(paths)
226
226
227 rm_path = os.path.join(self.repos_path, paths)
227 rm_path = os.path.join(self.repos_path, paths)
228 log.info("Removing group %s", rm_path)
228 log.info("Removing group %s", rm_path)
229 # delete only if that path really exists
229 # delete only if that path really exists
230 if os.path.isdir(rm_path):
230 if os.path.isdir(rm_path):
231 if force_delete:
231 if force_delete:
232 shutil.rmtree(rm_path)
232 shutil.rmtree(rm_path)
233 else:
233 else:
234 # archive that group`
234 # archive that group`
235 _now = datetime.datetime.now()
235 _now = datetime.datetime.now()
236 _ms = str(_now.microsecond).rjust(6, '0')
236 _ms = str(_now.microsecond).rjust(6, '0')
237 _d = 'rm__%s_GROUP_%s' % (
237 _d = 'rm__%s_GROUP_%s' % (
238 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
238 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
239 shutil.move(rm_path, os.path.join(self.repos_path, _d))
239 shutil.move(rm_path, os.path.join(self.repos_path, _d))
240
240
241 def create(self, group_name, group_description, owner, just_db=False,
241 def create(self, group_name, group_description, owner, just_db=False,
242 copy_permissions=False, personal=None, commit_early=True):
242 copy_permissions=False, personal=None, commit_early=True):
243
243
244 (group_name_cleaned,
244 (group_name_cleaned,
245 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
245 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
246
246
247 parent_group = None
247 parent_group = None
248 if parent_group_name:
248 if parent_group_name:
249 parent_group = self._get_repo_group(parent_group_name)
249 parent_group = self._get_repo_group(parent_group_name)
250 if not parent_group:
250 if not parent_group:
251 # we tried to create a nested group, but the parent is not
251 # we tried to create a nested group, but the parent is not
252 # existing
252 # existing
253 raise ValueError(
253 raise ValueError(
254 'Parent group `%s` given in `%s` group name '
254 'Parent group `%s` given in `%s` group name '
255 'is not yet existing.' % (parent_group_name, group_name))
255 'is not yet existing.' % (parent_group_name, group_name))
256
256
257 # because we are doing a cleanup, we need to check if such directory
257 # because we are doing a cleanup, we need to check if such directory
258 # already exists. If we don't do that we can accidentally delete
258 # already exists. If we don't do that we can accidentally delete
259 # existing directory via cleanup that can cause data issues, since
259 # existing directory via cleanup that can cause data issues, since
260 # delete does a folder rename to special syntax later cleanup
260 # delete does a folder rename to special syntax later cleanup
261 # functions can delete this
261 # functions can delete this
262 cleanup_group = self.check_exist_filesystem(group_name,
262 cleanup_group = self.check_exist_filesystem(group_name,
263 exc_on_failure=False)
263 exc_on_failure=False)
264 user = self._get_user(owner)
264 user = self._get_user(owner)
265 if not user:
265 if not user:
266 raise ValueError('Owner %s not found as rhodecode user', owner)
266 raise ValueError('Owner %s not found as rhodecode user', owner)
267
267
268 try:
268 try:
269 new_repo_group = RepoGroup()
269 new_repo_group = RepoGroup()
270 new_repo_group.user = user
270 new_repo_group.user = user
271 new_repo_group.group_description = group_description or group_name
271 new_repo_group.group_description = group_description or group_name
272 new_repo_group.parent_group = parent_group
272 new_repo_group.parent_group = parent_group
273 new_repo_group.group_name = group_name
273 new_repo_group.group_name = group_name
274 new_repo_group.personal = personal
274 new_repo_group.personal = personal
275
275
276 self.sa.add(new_repo_group)
276 self.sa.add(new_repo_group)
277
277
278 # create an ADMIN permission for owner except if we're super admin,
278 # create an ADMIN permission for owner except if we're super admin,
279 # later owner should go into the owner field of groups
279 # later owner should go into the owner field of groups
280 if not user.is_admin:
280 if not user.is_admin:
281 self.grant_user_permission(repo_group=new_repo_group,
281 self.grant_user_permission(repo_group=new_repo_group,
282 user=owner, perm='group.admin')
282 user=owner, perm='group.admin')
283
283
284 if parent_group and copy_permissions:
284 if parent_group and copy_permissions:
285 # copy permissions from parent
285 # copy permissions from parent
286 user_perms = UserRepoGroupToPerm.query() \
286 user_perms = UserRepoGroupToPerm.query() \
287 .filter(UserRepoGroupToPerm.group == parent_group).all()
287 .filter(UserRepoGroupToPerm.group == parent_group).all()
288
288
289 group_perms = UserGroupRepoGroupToPerm.query() \
289 group_perms = UserGroupRepoGroupToPerm.query() \
290 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
290 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
291
291
292 for perm in user_perms:
292 for perm in user_perms:
293 # don't copy over the permission for user who is creating
293 # don't copy over the permission for user who is creating
294 # this group, if he is not super admin he get's admin
294 # this group, if he is not super admin he get's admin
295 # permission set above
295 # permission set above
296 if perm.user != user or user.is_admin:
296 if perm.user != user or user.is_admin:
297 UserRepoGroupToPerm.create(
297 UserRepoGroupToPerm.create(
298 perm.user, new_repo_group, perm.permission)
298 perm.user, new_repo_group, perm.permission)
299
299
300 for perm in group_perms:
300 for perm in group_perms:
301 UserGroupRepoGroupToPerm.create(
301 UserGroupRepoGroupToPerm.create(
302 perm.users_group, new_repo_group, perm.permission)
302 perm.users_group, new_repo_group, perm.permission)
303 else:
303 else:
304 perm_obj = self._create_default_perms(new_repo_group)
304 perm_obj = self._create_default_perms(new_repo_group)
305 self.sa.add(perm_obj)
305 self.sa.add(perm_obj)
306
306
307 # now commit the changes, earlier so we are sure everything is in
307 # now commit the changes, earlier so we are sure everything is in
308 # the database.
308 # the database.
309 if commit_early:
309 if commit_early:
310 self.sa.commit()
310 self.sa.commit()
311 if not just_db:
311 if not just_db:
312 self._create_group(new_repo_group.group_name)
312 self._create_group(new_repo_group.group_name)
313
313
314 # trigger the post hook
314 # trigger the post hook
315 from rhodecode.lib import hooks_base
315 from rhodecode.lib import hooks_base
316 repo_group = RepoGroup.get_by_group_name(group_name)
316 repo_group = RepoGroup.get_by_group_name(group_name)
317
317
318 # update repo group commit caches initially
318 # update repo group commit caches initially
319 repo_group.update_commit_cache()
319 repo_group.update_commit_cache()
320
320
321 hooks_base.create_repository_group(
321 hooks_base.create_repository_group(
322 created_by=user.username, **repo_group.get_dict())
322 created_by=user.username, **repo_group.get_dict())
323
323
324 # Trigger create event.
324 # Trigger create event.
325 events.trigger(events.RepoGroupCreateEvent(repo_group))
325 events.trigger(events.RepoGroupCreateEvent(repo_group))
326
326
327 return new_repo_group
327 return new_repo_group
328 except Exception:
328 except Exception:
329 self.sa.rollback()
329 self.sa.rollback()
330 log.exception('Exception occurred when creating repository group, '
330 log.exception('Exception occurred when creating repository group, '
331 'doing cleanup...')
331 'doing cleanup...')
332 # rollback things manually !
332 # rollback things manually !
333 repo_group = RepoGroup.get_by_group_name(group_name)
333 repo_group = RepoGroup.get_by_group_name(group_name)
334 if repo_group:
334 if repo_group:
335 RepoGroup.delete(repo_group.group_id)
335 RepoGroup.delete(repo_group.group_id)
336 self.sa.commit()
336 self.sa.commit()
337 if cleanup_group:
337 if cleanup_group:
338 RepoGroupModel()._delete_filesystem_group(repo_group)
338 RepoGroupModel()._delete_filesystem_group(repo_group)
339 raise
339 raise
340
340
341 def update_permissions(
341 def update_permissions(
342 self, repo_group, perm_additions=None, perm_updates=None,
342 self, repo_group, perm_additions=None, perm_updates=None,
343 perm_deletions=None, recursive=None, check_perms=True,
343 perm_deletions=None, recursive=None, check_perms=True,
344 cur_user=None):
344 cur_user=None):
345 from rhodecode.model.repo import RepoModel
345 from rhodecode.model.repo import RepoModel
346 from rhodecode.lib.auth import HasUserGroupPermissionAny
346 from rhodecode.lib.auth import HasUserGroupPermissionAny
347
347
348 if not perm_additions:
348 if not perm_additions:
349 perm_additions = []
349 perm_additions = []
350 if not perm_updates:
350 if not perm_updates:
351 perm_updates = []
351 perm_updates = []
352 if not perm_deletions:
352 if not perm_deletions:
353 perm_deletions = []
353 perm_deletions = []
354
354
355 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
355 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
356
356
357 changes = {
357 changes = {
358 'added': [],
358 'added': [],
359 'updated': [],
359 'updated': [],
360 'deleted': [],
360 'deleted': [],
361 'default_user_changed': None
361 'default_user_changed': None
362 }
362 }
363
363
364 def _set_perm_user(obj, user, perm):
364 def _set_perm_user(obj, user, perm):
365 if isinstance(obj, RepoGroup):
365 if isinstance(obj, RepoGroup):
366 self.grant_user_permission(
366 self.grant_user_permission(
367 repo_group=obj, user=user, perm=perm)
367 repo_group=obj, user=user, perm=perm)
368 elif isinstance(obj, Repository):
368 elif isinstance(obj, Repository):
369 # private repos will not allow to change the default
369 # private repos will not allow to change the default
370 # permissions using recursive mode
370 # permissions using recursive mode
371 if obj.private and user == User.DEFAULT_USER:
371 if obj.private and user == User.DEFAULT_USER:
372 return
372 return
373
373
374 # we set group permission but we have to switch to repo
374 # we set group permission but we have to switch to repo
375 # permission
375 # permission
376 perm = perm.replace('group.', 'repository.')
376 perm = perm.replace('group.', 'repository.')
377 RepoModel().grant_user_permission(
377 RepoModel().grant_user_permission(
378 repo=obj, user=user, perm=perm)
378 repo=obj, user=user, perm=perm)
379
379
380 def _set_perm_group(obj, users_group, perm):
380 def _set_perm_group(obj, users_group, perm):
381 if isinstance(obj, RepoGroup):
381 if isinstance(obj, RepoGroup):
382 self.grant_user_group_permission(
382 self.grant_user_group_permission(
383 repo_group=obj, group_name=users_group, perm=perm)
383 repo_group=obj, group_name=users_group, perm=perm)
384 elif isinstance(obj, Repository):
384 elif isinstance(obj, Repository):
385 # we set group permission but we have to switch to repo
385 # we set group permission but we have to switch to repo
386 # permission
386 # permission
387 perm = perm.replace('group.', 'repository.')
387 perm = perm.replace('group.', 'repository.')
388 RepoModel().grant_user_group_permission(
388 RepoModel().grant_user_group_permission(
389 repo=obj, group_name=users_group, perm=perm)
389 repo=obj, group_name=users_group, perm=perm)
390
390
391 def _revoke_perm_user(obj, user):
391 def _revoke_perm_user(obj, user):
392 if isinstance(obj, RepoGroup):
392 if isinstance(obj, RepoGroup):
393 self.revoke_user_permission(repo_group=obj, user=user)
393 self.revoke_user_permission(repo_group=obj, user=user)
394 elif isinstance(obj, Repository):
394 elif isinstance(obj, Repository):
395 RepoModel().revoke_user_permission(repo=obj, user=user)
395 RepoModel().revoke_user_permission(repo=obj, user=user)
396
396
397 def _revoke_perm_group(obj, user_group):
397 def _revoke_perm_group(obj, user_group):
398 if isinstance(obj, RepoGroup):
398 if isinstance(obj, RepoGroup):
399 self.revoke_user_group_permission(
399 self.revoke_user_group_permission(
400 repo_group=obj, group_name=user_group)
400 repo_group=obj, group_name=user_group)
401 elif isinstance(obj, Repository):
401 elif isinstance(obj, Repository):
402 RepoModel().revoke_user_group_permission(
402 RepoModel().revoke_user_group_permission(
403 repo=obj, group_name=user_group)
403 repo=obj, group_name=user_group)
404
404
405 # start updates
405 # start updates
406 log.debug('Now updating permissions for %s in recursive mode:%s',
406 log.debug('Now updating permissions for %s in recursive mode:%s',
407 repo_group, recursive)
407 repo_group, recursive)
408
408
409 # initialize check function, we'll call that multiple times
409 # initialize check function, we'll call that multiple times
410 has_group_perm = HasUserGroupPermissionAny(*req_perms)
410 has_group_perm = HasUserGroupPermissionAny(*req_perms)
411
411
412 for obj in repo_group.recursive_groups_and_repos():
412 for obj in repo_group.recursive_groups_and_repos():
413 # iterated obj is an instance of a repos group or repository in
413 # iterated obj is an instance of a repos group or repository in
414 # that group, recursive option can be: none, repos, groups, all
414 # that group, recursive option can be: none, repos, groups, all
415 if recursive == 'all':
415 if recursive == 'all':
416 obj = obj
416 obj = obj
417 elif recursive == 'repos':
417 elif recursive == 'repos':
418 # skip groups, other than this one
418 # skip groups, other than this one
419 if isinstance(obj, RepoGroup) and not obj == repo_group:
419 if isinstance(obj, RepoGroup) and not obj == repo_group:
420 continue
420 continue
421 elif recursive == 'groups':
421 elif recursive == 'groups':
422 # skip repos
422 # skip repos
423 if isinstance(obj, Repository):
423 if isinstance(obj, Repository):
424 continue
424 continue
425 else: # recursive == 'none':
425 else: # recursive == 'none':
426 # DEFAULT option - don't apply to iterated objects
426 # DEFAULT option - don't apply to iterated objects
427 # also we do a break at the end of this loop. if we are not
427 # also we do a break at the end of this loop. if we are not
428 # in recursive mode
428 # in recursive mode
429 obj = repo_group
429 obj = repo_group
430
430
431 change_obj = obj.get_api_data()
431 change_obj = obj.get_api_data()
432
432
433 # update permissions
433 # update permissions
434 for member_id, perm, member_type in perm_updates:
434 for member_id, perm, member_type in perm_updates:
435 member_id = int(member_id)
435 member_id = int(member_id)
436 if member_type == 'user':
436 if member_type == 'user':
437 member_name = User.get(member_id).username
437 member_name = User.get(member_id).username
438 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
438 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
439 # NOTE(dan): detect if we changed permissions for default user
439 # NOTE(dan): detect if we changed permissions for default user
440 perm_obj = self.sa.query(UserRepoGroupToPerm) \
440 perm_obj = self.sa.query(UserRepoGroupToPerm) \
441 .filter(UserRepoGroupToPerm.user_id == member_id) \
441 .filter(UserRepoGroupToPerm.user_id == member_id) \
442 .filter(UserRepoGroupToPerm.group == repo_group) \
442 .filter(UserRepoGroupToPerm.group == repo_group) \
443 .scalar()
443 .scalar()
444 if perm_obj and perm_obj.permission.permission_name != perm:
444 if perm_obj and perm_obj.permission.permission_name != perm:
445 changes['default_user_changed'] = True
445 changes['default_user_changed'] = True
446
446
447 # this updates also current one if found
447 # this updates also current one if found
448 _set_perm_user(obj, user=member_id, perm=perm)
448 _set_perm_user(obj, user=member_id, perm=perm)
449 elif member_type == 'user_group':
449 elif member_type == 'user_group':
450 member_name = UserGroup.get(member_id).users_group_name
450 member_name = UserGroup.get(member_id).users_group_name
451 if not check_perms or has_group_perm(member_name,
451 if not check_perms or has_group_perm(member_name,
452 user=cur_user):
452 user=cur_user):
453 _set_perm_group(obj, users_group=member_id, perm=perm)
453 _set_perm_group(obj, users_group=member_id, perm=perm)
454 else:
454 else:
455 raise ValueError("member_type must be 'user' or 'user_group' "
455 raise ValueError("member_type must be 'user' or 'user_group' "
456 "got {} instead".format(member_type))
456 "got {} instead".format(member_type))
457
457
458 changes['updated'].append(
458 changes['updated'].append(
459 {'change_obj': change_obj, 'type': member_type,
459 {'change_obj': change_obj, 'type': member_type,
460 'id': member_id, 'name': member_name, 'new_perm': perm})
460 'id': member_id, 'name': member_name, 'new_perm': perm})
461
461
462 # set new permissions
462 # set new permissions
463 for member_id, perm, member_type in perm_additions:
463 for member_id, perm, member_type in perm_additions:
464 member_id = int(member_id)
464 member_id = int(member_id)
465 if member_type == 'user':
465 if member_type == 'user':
466 member_name = User.get(member_id).username
466 member_name = User.get(member_id).username
467 _set_perm_user(obj, user=member_id, perm=perm)
467 _set_perm_user(obj, user=member_id, perm=perm)
468 elif member_type == 'user_group':
468 elif member_type == 'user_group':
469 # check if we have permissions to alter this usergroup
469 # check if we have permissions to alter this usergroup
470 member_name = UserGroup.get(member_id).users_group_name
470 member_name = UserGroup.get(member_id).users_group_name
471 if not check_perms or has_group_perm(member_name,
471 if not check_perms or has_group_perm(member_name,
472 user=cur_user):
472 user=cur_user):
473 _set_perm_group(obj, users_group=member_id, perm=perm)
473 _set_perm_group(obj, users_group=member_id, perm=perm)
474 else:
474 else:
475 raise ValueError("member_type must be 'user' or 'user_group' "
475 raise ValueError("member_type must be 'user' or 'user_group' "
476 "got {} instead".format(member_type))
476 "got {} instead".format(member_type))
477
477
478 changes['added'].append(
478 changes['added'].append(
479 {'change_obj': change_obj, 'type': member_type,
479 {'change_obj': change_obj, 'type': member_type,
480 'id': member_id, 'name': member_name, 'new_perm': perm})
480 'id': member_id, 'name': member_name, 'new_perm': perm})
481
481
482 # delete permissions
482 # delete permissions
483 for member_id, perm, member_type in perm_deletions:
483 for member_id, perm, member_type in perm_deletions:
484 member_id = int(member_id)
484 member_id = int(member_id)
485 if member_type == 'user':
485 if member_type == 'user':
486 member_name = User.get(member_id).username
486 member_name = User.get(member_id).username
487 _revoke_perm_user(obj, user=member_id)
487 _revoke_perm_user(obj, user=member_id)
488 elif member_type == 'user_group':
488 elif member_type == 'user_group':
489 # check if we have permissions to alter this usergroup
489 # check if we have permissions to alter this usergroup
490 member_name = UserGroup.get(member_id).users_group_name
490 member_name = UserGroup.get(member_id).users_group_name
491 if not check_perms or has_group_perm(member_name,
491 if not check_perms or has_group_perm(member_name,
492 user=cur_user):
492 user=cur_user):
493 _revoke_perm_group(obj, user_group=member_id)
493 _revoke_perm_group(obj, user_group=member_id)
494 else:
494 else:
495 raise ValueError("member_type must be 'user' or 'user_group' "
495 raise ValueError("member_type must be 'user' or 'user_group' "
496 "got {} instead".format(member_type))
496 "got {} instead".format(member_type))
497
497
498 changes['deleted'].append(
498 changes['deleted'].append(
499 {'change_obj': change_obj, 'type': member_type,
499 {'change_obj': change_obj, 'type': member_type,
500 'id': member_id, 'name': member_name, 'new_perm': perm})
500 'id': member_id, 'name': member_name, 'new_perm': perm})
501
501
502 # if it's not recursive call for all,repos,groups
502 # if it's not recursive call for all,repos,groups
503 # break the loop and don't proceed with other changes
503 # break the loop and don't proceed with other changes
504 if recursive not in ['all', 'repos', 'groups']:
504 if recursive not in ['all', 'repos', 'groups']:
505 break
505 break
506
506
507 return changes
507 return changes
508
508
509 def update(self, repo_group, form_data):
509 def update(self, repo_group, form_data):
510 try:
510 try:
511 repo_group = self._get_repo_group(repo_group)
511 repo_group = self._get_repo_group(repo_group)
512 old_path = repo_group.full_path
512 old_path = repo_group.full_path
513
513
514 # change properties
514 # change properties
515 if 'group_description' in form_data:
515 if 'group_description' in form_data:
516 repo_group.group_description = form_data['group_description']
516 repo_group.group_description = form_data['group_description']
517
517
518 if 'enable_locking' in form_data:
518 if 'enable_locking' in form_data:
519 repo_group.enable_locking = form_data['enable_locking']
519 repo_group.enable_locking = form_data['enable_locking']
520
520
521 if 'group_parent_id' in form_data:
521 if 'group_parent_id' in form_data:
522 parent_group = (
522 parent_group = (
523 self._get_repo_group(form_data['group_parent_id']))
523 self._get_repo_group(form_data['group_parent_id']))
524 repo_group.group_parent_id = (
524 repo_group.group_parent_id = (
525 parent_group.group_id if parent_group else None)
525 parent_group.group_id if parent_group else None)
526 repo_group.parent_group = parent_group
526 repo_group.parent_group = parent_group
527
527
528 # mikhail: to update the full_path, we have to explicitly
528 # mikhail: to update the full_path, we have to explicitly
529 # update group_name
529 # update group_name
530 group_name = form_data.get('group_name', repo_group.name)
530 group_name = form_data.get('group_name', repo_group.name)
531 repo_group.group_name = repo_group.get_new_name(group_name)
531 repo_group.group_name = repo_group.get_new_name(group_name)
532
532
533 new_path = repo_group.full_path
533 new_path = repo_group.full_path
534
534
535 affected_user_ids = []
535 affected_user_ids = []
536 if 'user' in form_data:
536 if 'user' in form_data:
537 old_owner_id = repo_group.user.user_id
537 old_owner_id = repo_group.user.user_id
538 new_owner = User.get_by_username(form_data['user'])
538 new_owner = User.get_by_username(form_data['user'])
539 repo_group.user = new_owner
539 repo_group.user = new_owner
540
540
541 if old_owner_id != new_owner.user_id:
541 if old_owner_id != new_owner.user_id:
542 affected_user_ids = [new_owner.user_id, old_owner_id]
542 affected_user_ids = [new_owner.user_id, old_owner_id]
543
543
544 self.sa.add(repo_group)
544 self.sa.add(repo_group)
545
545
546 # iterate over all members of this groups and do fixes
546 # iterate over all members of this groups and do fixes
547 # set locking if given
547 # set locking if given
548 # if obj is a repoGroup also fix the name of the group according
548 # if obj is a repoGroup also fix the name of the group according
549 # to the parent
549 # to the parent
550 # if obj is a Repo fix it's name
550 # if obj is a Repo fix it's name
551 # this can be potentially heavy operation
551 # this can be potentially heavy operation
552 for obj in repo_group.recursive_groups_and_repos():
552 for obj in repo_group.recursive_groups_and_repos():
553 # set the value from it's parent
553 # set the value from it's parent
554 obj.enable_locking = repo_group.enable_locking
554 obj.enable_locking = repo_group.enable_locking
555 if isinstance(obj, RepoGroup):
555 if isinstance(obj, RepoGroup):
556 new_name = obj.get_new_name(obj.name)
556 new_name = obj.get_new_name(obj.name)
557 log.debug('Fixing group %s to new name %s',
557 log.debug('Fixing group %s to new name %s',
558 obj.group_name, new_name)
558 obj.group_name, new_name)
559 obj.group_name = new_name
559 obj.group_name = new_name
560
560
561 elif isinstance(obj, Repository):
561 elif isinstance(obj, Repository):
562 # we need to get all repositories from this new group and
562 # we need to get all repositories from this new group and
563 # rename them accordingly to new group path
563 # rename them accordingly to new group path
564 new_name = obj.get_new_name(obj.just_name)
564 new_name = obj.get_new_name(obj.just_name)
565 log.debug('Fixing repo %s to new name %s',
565 log.debug('Fixing repo %s to new name %s',
566 obj.repo_name, new_name)
566 obj.repo_name, new_name)
567 obj.repo_name = new_name
567 obj.repo_name = new_name
568
568
569 self.sa.add(obj)
569 self.sa.add(obj)
570
570
571 self._rename_group(old_path, new_path)
571 self._rename_group(old_path, new_path)
572
572
573 # Trigger update event.
573 # Trigger update event.
574 events.trigger(events.RepoGroupUpdateEvent(repo_group))
574 events.trigger(events.RepoGroupUpdateEvent(repo_group))
575
575
576 if affected_user_ids:
576 if affected_user_ids:
577 PermissionModel().trigger_permission_flush(affected_user_ids)
577 PermissionModel().trigger_permission_flush(affected_user_ids)
578
578
579 return repo_group
579 return repo_group
580 except Exception:
580 except Exception:
581 log.error(traceback.format_exc())
581 log.error(traceback.format_exc())
582 raise
582 raise
583
583
584 def delete(self, repo_group, force_delete=False, fs_remove=True):
584 def delete(self, repo_group, force_delete=False, fs_remove=True):
585 repo_group = self._get_repo_group(repo_group)
585 repo_group = self._get_repo_group(repo_group)
586 if not repo_group:
586 if not repo_group:
587 return False
587 return False
588 try:
588 try:
589 self.sa.delete(repo_group)
589 self.sa.delete(repo_group)
590 if fs_remove:
590 if fs_remove:
591 self._delete_filesystem_group(repo_group, force_delete)
591 self._delete_filesystem_group(repo_group, force_delete)
592 else:
592 else:
593 log.debug('skipping removal from filesystem')
593 log.debug('skipping removal from filesystem')
594
594
595 # Trigger delete event.
595 # Trigger delete event.
596 events.trigger(events.RepoGroupDeleteEvent(repo_group))
596 events.trigger(events.RepoGroupDeleteEvent(repo_group))
597 return True
597 return True
598
598
599 except Exception:
599 except Exception:
600 log.error('Error removing repo_group %s', repo_group)
600 log.error('Error removing repo_group %s', repo_group)
601 raise
601 raise
602
602
603 def grant_user_permission(self, repo_group, user, perm):
603 def grant_user_permission(self, repo_group, user, perm):
604 """
604 """
605 Grant permission for user on given repository group, or update
605 Grant permission for user on given repository group, or update
606 existing one if found
606 existing one if found
607
607
608 :param repo_group: Instance of RepoGroup, repositories_group_id,
608 :param repo_group: Instance of RepoGroup, repositories_group_id,
609 or repositories_group name
609 or repositories_group name
610 :param user: Instance of User, user_id or username
610 :param user: Instance of User, user_id or username
611 :param perm: Instance of Permission, or permission_name
611 :param perm: Instance of Permission, or permission_name
612 """
612 """
613
613
614 repo_group = self._get_repo_group(repo_group)
614 repo_group = self._get_repo_group(repo_group)
615 user = self._get_user(user)
615 user = self._get_user(user)
616 permission = self._get_perm(perm)
616 permission = self._get_perm(perm)
617
617
618 # check if we have that permission already
618 # check if we have that permission already
619 obj = self.sa.query(UserRepoGroupToPerm)\
619 obj = self.sa.query(UserRepoGroupToPerm)\
620 .filter(UserRepoGroupToPerm.user == user)\
620 .filter(UserRepoGroupToPerm.user == user)\
621 .filter(UserRepoGroupToPerm.group == repo_group)\
621 .filter(UserRepoGroupToPerm.group == repo_group)\
622 .scalar()
622 .scalar()
623 if obj is None:
623 if obj is None:
624 # create new !
624 # create new !
625 obj = UserRepoGroupToPerm()
625 obj = UserRepoGroupToPerm()
626 obj.group = repo_group
626 obj.group = repo_group
627 obj.user = user
627 obj.user = user
628 obj.permission = permission
628 obj.permission = permission
629 self.sa.add(obj)
629 self.sa.add(obj)
630 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
630 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
631 action_logger_generic(
631 action_logger_generic(
632 'granted permission: {} to user: {} on repogroup: {}'.format(
632 'granted permission: {} to user: {} on repogroup: {}'.format(
633 perm, user, repo_group), namespace='security.repogroup')
633 perm, user, repo_group), namespace='security.repogroup')
634 return obj
634 return obj
635
635
636 def revoke_user_permission(self, repo_group, user):
636 def revoke_user_permission(self, repo_group, user):
637 """
637 """
638 Revoke permission for user on given repository group
638 Revoke permission for user on given repository group
639
639
640 :param repo_group: Instance of RepoGroup, repositories_group_id,
640 :param repo_group: Instance of RepoGroup, repositories_group_id,
641 or repositories_group name
641 or repositories_group name
642 :param user: Instance of User, user_id or username
642 :param user: Instance of User, user_id or username
643 """
643 """
644
644
645 repo_group = self._get_repo_group(repo_group)
645 repo_group = self._get_repo_group(repo_group)
646 user = self._get_user(user)
646 user = self._get_user(user)
647
647
648 obj = self.sa.query(UserRepoGroupToPerm)\
648 obj = self.sa.query(UserRepoGroupToPerm)\
649 .filter(UserRepoGroupToPerm.user == user)\
649 .filter(UserRepoGroupToPerm.user == user)\
650 .filter(UserRepoGroupToPerm.group == repo_group)\
650 .filter(UserRepoGroupToPerm.group == repo_group)\
651 .scalar()
651 .scalar()
652 if obj:
652 if obj:
653 self.sa.delete(obj)
653 self.sa.delete(obj)
654 log.debug('Revoked perm on %s on %s', repo_group, user)
654 log.debug('Revoked perm on %s on %s', repo_group, user)
655 action_logger_generic(
655 action_logger_generic(
656 'revoked permission from user: {} on repogroup: {}'.format(
656 'revoked permission from user: {} on repogroup: {}'.format(
657 user, repo_group), namespace='security.repogroup')
657 user, repo_group), namespace='security.repogroup')
658
658
659 def grant_user_group_permission(self, repo_group, group_name, perm):
659 def grant_user_group_permission(self, repo_group, group_name, perm):
660 """
660 """
661 Grant permission for user group on given repository group, or update
661 Grant permission for user group on given repository group, or update
662 existing one if found
662 existing one if found
663
663
664 :param repo_group: Instance of RepoGroup, repositories_group_id,
664 :param repo_group: Instance of RepoGroup, repositories_group_id,
665 or repositories_group name
665 or repositories_group name
666 :param group_name: Instance of UserGroup, users_group_id,
666 :param group_name: Instance of UserGroup, users_group_id,
667 or user group name
667 or user group name
668 :param perm: Instance of Permission, or permission_name
668 :param perm: Instance of Permission, or permission_name
669 """
669 """
670 repo_group = self._get_repo_group(repo_group)
670 repo_group = self._get_repo_group(repo_group)
671 group_name = self._get_user_group(group_name)
671 group_name = self._get_user_group(group_name)
672 permission = self._get_perm(perm)
672 permission = self._get_perm(perm)
673
673
674 # check if we have that permission already
674 # check if we have that permission already
675 obj = self.sa.query(UserGroupRepoGroupToPerm)\
675 obj = self.sa.query(UserGroupRepoGroupToPerm)\
676 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
676 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
677 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
677 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
678 .scalar()
678 .scalar()
679
679
680 if obj is None:
680 if obj is None:
681 # create new
681 # create new
682 obj = UserGroupRepoGroupToPerm()
682 obj = UserGroupRepoGroupToPerm()
683
683
684 obj.group = repo_group
684 obj.group = repo_group
685 obj.users_group = group_name
685 obj.users_group = group_name
686 obj.permission = permission
686 obj.permission = permission
687 self.sa.add(obj)
687 self.sa.add(obj)
688 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
688 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
689 action_logger_generic(
689 action_logger_generic(
690 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
690 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
691 perm, group_name, repo_group), namespace='security.repogroup')
691 perm, group_name, repo_group), namespace='security.repogroup')
692 return obj
692 return obj
693
693
694 def revoke_user_group_permission(self, repo_group, group_name):
694 def revoke_user_group_permission(self, repo_group, group_name):
695 """
695 """
696 Revoke permission for user group on given repository group
696 Revoke permission for user group on given repository group
697
697
698 :param repo_group: Instance of RepoGroup, repositories_group_id,
698 :param repo_group: Instance of RepoGroup, repositories_group_id,
699 or repositories_group name
699 or repositories_group name
700 :param group_name: Instance of UserGroup, users_group_id,
700 :param group_name: Instance of UserGroup, users_group_id,
701 or user group name
701 or user group name
702 """
702 """
703 repo_group = self._get_repo_group(repo_group)
703 repo_group = self._get_repo_group(repo_group)
704 group_name = self._get_user_group(group_name)
704 group_name = self._get_user_group(group_name)
705
705
706 obj = self.sa.query(UserGroupRepoGroupToPerm)\
706 obj = self.sa.query(UserGroupRepoGroupToPerm)\
707 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
707 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
708 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
708 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
709 .scalar()
709 .scalar()
710 if obj:
710 if obj:
711 self.sa.delete(obj)
711 self.sa.delete(obj)
712 log.debug('Revoked perm to %s on %s', repo_group, group_name)
712 log.debug('Revoked perm to %s on %s', repo_group, group_name)
713 action_logger_generic(
713 action_logger_generic(
714 'revoked permission from usergroup: {} on repogroup: {}'.format(
714 'revoked permission from usergroup: {} on repogroup: {}'.format(
715 group_name, repo_group), namespace='security.repogroup')
715 group_name, repo_group), namespace='security.repogroup')
716
716
717 @classmethod
717 @classmethod
718 def update_commit_cache(cls, repo_groups=None):
718 def update_commit_cache(cls, repo_groups=None):
719 if not repo_groups:
719 if not repo_groups:
720 repo_groups = RepoGroup.getAll()
720 repo_groups = RepoGroup.getAll()
721 for repo_group in repo_groups:
721 for repo_group in repo_groups:
722 repo_group.update_commit_cache()
722 repo_group.update_commit_cache()
723
723
724 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
724 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
725 super_user_actions=False):
725 super_user_actions=False):
726
726
727 from pyramid.threadlocal import get_current_request
727 from pyramid.threadlocal import get_current_request
728 _render = get_current_request().get_partial_renderer(
728 _render = get_current_request().get_partial_renderer(
729 'rhodecode:templates/data_table/_dt_elements.mako')
729 'rhodecode:templates/data_table/_dt_elements.mako')
730 c = _render.get_call_context()
730 c = _render.get_call_context()
731 h = _render.get_helpers()
731 h = _render.get_helpers()
732
732
733 def quick_menu(repo_group_name):
733 def quick_menu(repo_group_name):
734 return _render('quick_repo_group_menu', repo_group_name)
734 return _render('quick_repo_group_menu', repo_group_name)
735
735
736 def repo_group_lnk(repo_group_name):
736 def repo_group_lnk(repo_group_name):
737 return _render('repo_group_name', repo_group_name)
737 return _render('repo_group_name', repo_group_name)
738
738
739 def last_change(last_change):
739 def last_change(last_change):
740 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
740 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
741 ts = time.time()
741 ts = time.time()
742 utc_offset = (datetime.datetime.fromtimestamp(ts)
742 utc_offset = (datetime.datetime.fromtimestamp(ts)
743 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
743 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
744 last_change = last_change + datetime.timedelta(seconds=utc_offset)
744 last_change = last_change + datetime.timedelta(seconds=utc_offset)
745 return _render("last_change", last_change)
745 return _render("last_change", last_change)
746
746
747 def desc(desc, personal):
747 def desc(desc, personal):
748 return _render(
748 return _render(
749 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
749 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
750
750
751 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
751 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
752 return _render(
752 return _render(
753 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
753 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
754
754
755 def repo_group_name(repo_group_name, children_groups):
755 def repo_group_name(repo_group_name, children_groups):
756 return _render("repo_group_name", repo_group_name, children_groups)
756 return _render("repo_group_name", repo_group_name, children_groups)
757
757
758 def user_profile(username):
758 def user_profile(username):
759 return _render('user_profile', username)
759 return _render('user_profile', username)
760
760
761 repo_group_data = []
761 repo_group_data = []
762 for group in repo_group_list:
762 for group in repo_group_list:
763 # NOTE(marcink): because we use only raw column we need to load it like that
763 # NOTE(marcink): because we use only raw column we need to load it like that
764 changeset_cache = RepoGroup._load_changeset_cache(
764 changeset_cache = RepoGroup._load_changeset_cache(
765 '', group._changeset_cache)
765 '', group._changeset_cache)
766 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
766 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
767 row = {
767 row = {
768 "menu": quick_menu(group.group_name),
768 "menu": quick_menu(group.group_name),
769 "name": repo_group_lnk(group.group_name),
769 "name": repo_group_lnk(group.group_name),
770 "name_raw": group.group_name,
770 "name_raw": group.group_name,
771
771
772 "last_change": last_change(last_commit_change),
772 "last_change": last_change(last_commit_change),
773
773
774 "last_changeset": "",
774 "last_changeset": "",
775 "last_changeset_raw": "",
775 "last_changeset_raw": "",
776
776
777 "desc": desc(h.escape(group.group_description), group.personal),
777 "desc": desc(h.escape(group.group_description), group.personal),
778 "top_level_repos": 0,
778 "top_level_repos": 0,
779 "owner": user_profile(group.User.username)
779 "owner": user_profile(group.User.username)
780 }
780 }
781 if admin:
781 if admin:
782 repo_count = group.repositories.count()
782 repo_count = group.repositories.count()
783 children_groups = map(
783 children_groups = list(map(
784 h.safe_unicode,
784 h.safe_str,
785 itertools.chain((g.name for g in group.parents),
785 itertools.chain((g.name for g in group.parents),
786 (x.name for x in [group])))
786 (x.name for x in [group]))))
787 row.update({
787 row.update({
788 "action": repo_group_actions(
788 "action": repo_group_actions(
789 group.group_id, group.group_name, repo_count),
789 group.group_id, group.group_name, repo_count),
790 "top_level_repos": repo_count,
790 "top_level_repos": repo_count,
791 "name": repo_group_name(group.group_name, children_groups),
791 "name": repo_group_name(group.group_name, children_groups),
792
792
793 })
793 })
794 repo_group_data.append(row)
794 repo_group_data.append(row)
795
795
796 return repo_group_data
796 return repo_group_data
797
797
798 def get_repo_groups_data_table(
798 def get_repo_groups_data_table(
799 self, draw, start, limit,
799 self, draw, start, limit,
800 search_q, order_by, order_dir,
800 search_q, order_by, order_dir,
801 auth_user, repo_group_id):
801 auth_user, repo_group_id):
802 from rhodecode.model.scm import RepoGroupList
802 from rhodecode.model.scm import RepoGroupList
803
803
804 _perms = ['group.read', 'group.write', 'group.admin']
804 _perms = ['group.read', 'group.write', 'group.admin']
805 repo_groups = RepoGroup.query() \
805 repo_groups = RepoGroup.query() \
806 .filter(RepoGroup.group_parent_id == repo_group_id) \
806 .filter(RepoGroup.group_parent_id == repo_group_id) \
807 .all()
807 .all()
808 auth_repo_group_list = RepoGroupList(
808 auth_repo_group_list = RepoGroupList(
809 repo_groups, perm_set=_perms,
809 repo_groups, perm_set=_perms,
810 extra_kwargs=dict(user=auth_user))
810 extra_kwargs=dict(user=auth_user))
811
811
812 allowed_ids = [-1]
812 allowed_ids = [-1]
813 for repo_group in auth_repo_group_list:
813 for repo_group in auth_repo_group_list:
814 allowed_ids.append(repo_group.group_id)
814 allowed_ids.append(repo_group.group_id)
815
815
816 repo_groups_data_total_count = RepoGroup.query() \
816 repo_groups_data_total_count = RepoGroup.query() \
817 .filter(RepoGroup.group_parent_id == repo_group_id) \
817 .filter(RepoGroup.group_parent_id == repo_group_id) \
818 .filter(or_(
818 .filter(or_(
819 # generate multiple IN to fix limitation problems
819 # generate multiple IN to fix limitation problems
820 *in_filter_generator(RepoGroup.group_id, allowed_ids))
820 *in_filter_generator(RepoGroup.group_id, allowed_ids))
821 ) \
821 ) \
822 .count()
822 .count()
823
823
824 base_q = Session.query(
824 base_q = Session.query(
825 RepoGroup.group_name,
825 RepoGroup.group_name,
826 RepoGroup.group_name_hash,
826 RepoGroup.group_name_hash,
827 RepoGroup.group_description,
827 RepoGroup.group_description,
828 RepoGroup.group_id,
828 RepoGroup.group_id,
829 RepoGroup.personal,
829 RepoGroup.personal,
830 RepoGroup.updated_on,
830 RepoGroup.updated_on,
831 RepoGroup._changeset_cache,
831 RepoGroup._changeset_cache,
832 User,
832 User,
833 ) \
833 ) \
834 .filter(RepoGroup.group_parent_id == repo_group_id) \
834 .filter(RepoGroup.group_parent_id == repo_group_id) \
835 .filter(or_(
835 .filter(or_(
836 # generate multiple IN to fix limitation problems
836 # generate multiple IN to fix limitation problems
837 *in_filter_generator(RepoGroup.group_id, allowed_ids))
837 *in_filter_generator(RepoGroup.group_id, allowed_ids))
838 ) \
838 ) \
839 .join(User, User.user_id == RepoGroup.user_id) \
839 .join(User, User.user_id == RepoGroup.user_id) \
840 .group_by(RepoGroup, User)
840 .group_by(RepoGroup, User)
841
841
842 repo_groups_data_total_filtered_count = base_q.count()
842 repo_groups_data_total_filtered_count = base_q.count()
843
843
844 sort_defined = False
844 sort_defined = False
845
845
846 if order_by == 'group_name':
846 if order_by == 'group_name':
847 sort_col = func.lower(RepoGroup.group_name)
847 sort_col = func.lower(RepoGroup.group_name)
848 sort_defined = True
848 sort_defined = True
849 elif order_by == 'user_username':
849 elif order_by == 'user_username':
850 sort_col = User.username
850 sort_col = User.username
851 else:
851 else:
852 sort_col = getattr(RepoGroup, order_by, None)
852 sort_col = getattr(RepoGroup, order_by, None)
853
853
854 if sort_defined or sort_col:
854 if sort_defined or sort_col:
855 if order_dir == 'asc':
855 if order_dir == 'asc':
856 sort_col = sort_col.asc()
856 sort_col = sort_col.asc()
857 else:
857 else:
858 sort_col = sort_col.desc()
858 sort_col = sort_col.desc()
859
859
860 base_q = base_q.order_by(sort_col)
860 base_q = base_q.order_by(sort_col)
861 base_q = base_q.offset(start).limit(limit)
861 base_q = base_q.offset(start).limit(limit)
862
862
863 repo_group_list = base_q.all()
863 repo_group_list = base_q.all()
864
864
865 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
865 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
866 repo_group_list=repo_group_list, admin=False)
866 repo_group_list=repo_group_list, admin=False)
867
867
868 data = ({
868 data = ({
869 'draw': draw,
869 'draw': draw,
870 'data': repo_groups_data,
870 'data': repo_groups_data,
871 'recordsTotal': repo_groups_data_total_count,
871 'recordsTotal': repo_groups_data_total_count,
872 'recordsFiltered': repo_groups_data_total_filtered_count,
872 'recordsFiltered': repo_groups_data_total_filtered_count,
873 })
873 })
874 return data
874 return data
875
875
876 def _get_defaults(self, repo_group_name):
876 def _get_defaults(self, repo_group_name):
877 repo_group = RepoGroup.get_by_group_name(repo_group_name)
877 repo_group = RepoGroup.get_by_group_name(repo_group_name)
878
878
879 if repo_group is None:
879 if repo_group is None:
880 return None
880 return None
881
881
882 defaults = repo_group.get_dict()
882 defaults = repo_group.get_dict()
883 defaults['repo_group_name'] = repo_group.name
883 defaults['repo_group_name'] = repo_group.name
884 defaults['repo_group_description'] = repo_group.group_description
884 defaults['repo_group_description'] = repo_group.group_description
885 defaults['repo_group_enable_locking'] = repo_group.enable_locking
885 defaults['repo_group_enable_locking'] = repo_group.enable_locking
886
886
887 # we use -1 as this is how in HTML, we mark an empty group
887 # we use -1 as this is how in HTML, we mark an empty group
888 defaults['repo_group'] = defaults['group_parent_id'] or -1
888 defaults['repo_group'] = defaults['group_parent_id'] or -1
889
889
890 # fill owner
890 # fill owner
891 if repo_group.user:
891 if repo_group.user:
892 defaults.update({'user': repo_group.user.username})
892 defaults.update({'user': repo_group.user.username})
893 else:
893 else:
894 replacement_user = User.get_first_super_admin().username
894 replacement_user = User.get_first_super_admin().username
895 defaults.update({'user': replacement_user})
895 defaults.update({'user': replacement_user})
896
896
897 return defaults
897 return defaults
@@ -1,1027 +1,1042 b''
1
1
2 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 Scm model for RhodeCode
21 Scm model for RhodeCode
22 """
22 """
23
23
24 import os.path
24 import os.path
25 import traceback
25 import traceback
26 import logging
26 import logging
27 import io
27 import io
28
28
29 from sqlalchemy import func
29 from sqlalchemy import func
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 import rhodecode
32 import rhodecode
33 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib.auth import (
39 from rhodecode.lib.auth import (
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasUserGroupPermissionAny)
41 HasUserGroupPermissionAny)
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib import hooks_utils
43 from rhodecode.lib import hooks_utils
43 from rhodecode.lib.utils import (
44 from rhodecode.lib.utils import (
44 get_filesystem_repos, make_db_config)
45 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.model import BaseModel
48 from rhodecode.model import BaseModel
48 from rhodecode.model.db import (
49 from rhodecode.model.db import (
49 or_, false,
50 or_, false,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest, FileStore)
52 PullRequest, FileStore)
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
55
55 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
56
57
57
58
58 class UserTemp(object):
59 class UserTemp(object):
59 def __init__(self, user_id):
60 def __init__(self, user_id):
60 self.user_id = user_id
61 self.user_id = user_id
61
62
62 def __repr__(self):
63 def __repr__(self):
63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64
65
65
66
66 class RepoTemp(object):
67 class RepoTemp(object):
67 def __init__(self, repo_id):
68 def __init__(self, repo_id):
68 self.repo_id = repo_id
69 self.repo_id = repo_id
69
70
70 def __repr__(self):
71 def __repr__(self):
71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72
73
73
74
74 class SimpleCachedRepoList(object):
75 class SimpleCachedRepoList(object):
75 """
76 """
76 Lighter version of of iteration of repos without the scm initialisation,
77 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
78 and with cache usage
78 """
79 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
81 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
82 self.repos_path = repos_path
82 self.order_by = order_by
83 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
84 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
85 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
86 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
87 'repository.admin']
87 self.perm_set = perm_set
88 self.perm_set = perm_set
88
89
89 def __len__(self):
90 def __len__(self):
90 return len(self.db_repo_list)
91 return len(self.db_repo_list)
91
92
92 def __repr__(self):
93 def __repr__(self):
93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94
95
95 def __iter__(self):
96 def __iter__(self):
96 for dbr in self.db_repo_list:
97 for dbr in self.db_repo_list:
97 # check permission at this level
98 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
101 if not has_perm:
101 continue
102 continue
102
103
103 tmp_d = {
104 tmp_d = {
104 'name': dbr.repo_name,
105 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
106 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
108 }
108 yield tmp_d
109 yield tmp_d
109
110
110
111
111 class _PermCheckIterator(object):
112 class _PermCheckIterator(object):
112
113
113 def __init__(
114 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
116 extra_kwargs=None):
116 """
117 """
117 Creates iterator from given list of objects, additionally
118 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
119 checking permission for them from perm_set var
119
120
120 :param obj_list: list of db objects
121 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
123 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
124 :param perm_checker: callable to check permissions against
124 """
125 """
125 self.obj_list = obj_list
126 self.obj_list = obj_list
126 self.obj_attr = obj_attr
127 self.obj_attr = obj_attr
127 self.perm_set = perm_set
128 self.perm_set = perm_set
128 self.perm_checker = perm_checker(*self.perm_set)
129 self.perm_checker = perm_checker(*self.perm_set)
129 self.extra_kwargs = extra_kwargs or {}
130 self.extra_kwargs = extra_kwargs or {}
130
131
131 def __len__(self):
132 def __len__(self):
132 return len(self.obj_list)
133 return len(self.obj_list)
133
134
134 def __repr__(self):
135 def __repr__(self):
135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136
137
137 def __iter__(self):
138 def __iter__(self):
138 for db_obj in self.obj_list:
139 for db_obj in self.obj_list:
139 # check permission at this level
140 # check permission at this level
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 name = db_obj.__dict__.get(self.obj_attr, None)
142 name = db_obj.__dict__.get(self.obj_attr, None)
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
144 continue
144
145
145 yield db_obj
146 yield db_obj
146
147
147
148
148 class RepoList(_PermCheckIterator):
149 class RepoList(_PermCheckIterator):
149
150
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
152 if not perm_set:
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153
154
154 super(RepoList, self).__init__(
155 super(RepoList, self).__init__(
155 obj_list=db_repo_list,
156 obj_list=db_repo_list,
156 obj_attr='_repo_name', perm_set=perm_set,
157 obj_attr='_repo_name', perm_set=perm_set,
157 perm_checker=HasRepoPermissionAny,
158 perm_checker=HasRepoPermissionAny,
158 extra_kwargs=extra_kwargs)
159 extra_kwargs=extra_kwargs)
159
160
160
161
161 class RepoGroupList(_PermCheckIterator):
162 class RepoGroupList(_PermCheckIterator):
162
163
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 if not perm_set:
165 if not perm_set:
165 perm_set = ['group.read', 'group.write', 'group.admin']
166 perm_set = ['group.read', 'group.write', 'group.admin']
166
167
167 super(RepoGroupList, self).__init__(
168 super(RepoGroupList, self).__init__(
168 obj_list=db_repo_group_list,
169 obj_list=db_repo_group_list,
169 obj_attr='_group_name', perm_set=perm_set,
170 obj_attr='_group_name', perm_set=perm_set,
170 perm_checker=HasRepoGroupPermissionAny,
171 perm_checker=HasRepoGroupPermissionAny,
171 extra_kwargs=extra_kwargs)
172 extra_kwargs=extra_kwargs)
172
173
173
174
174 class UserGroupList(_PermCheckIterator):
175 class UserGroupList(_PermCheckIterator):
175
176
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 if not perm_set:
178 if not perm_set:
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179
180
180 super(UserGroupList, self).__init__(
181 super(UserGroupList, self).__init__(
181 obj_list=db_user_group_list,
182 obj_list=db_user_group_list,
182 obj_attr='users_group_name', perm_set=perm_set,
183 obj_attr='users_group_name', perm_set=perm_set,
183 perm_checker=HasUserGroupPermissionAny,
184 perm_checker=HasUserGroupPermissionAny,
184 extra_kwargs=extra_kwargs)
185 extra_kwargs=extra_kwargs)
185
186
186
187
187 class ScmModel(BaseModel):
188 class ScmModel(BaseModel):
188 """
189 """
189 Generic Scm Model
190 Generic Scm Model
190 """
191 """
191
192
192 @LazyProperty
193 @LazyProperty
193 def repos_path(self):
194 def repos_path(self):
194 """
195 """
195 Gets the repositories root path from database
196 Gets the repositories root path from database
196 """
197 """
197
198
198 settings_model = VcsSettingsModel(sa=self.sa)
199 settings_model = VcsSettingsModel(sa=self.sa)
199 return settings_model.get_repos_location()
200 return settings_model.get_repos_location()
200
201
201 def repo_scan(self, repos_path=None):
202 def repo_scan(self, repos_path=None):
202 """
203 """
203 Listing of repositories in given path. This path should not be a
204 Listing of repositories in given path. This path should not be a
204 repository itself. Return a dictionary of repository objects
205 repository itself. Return a dictionary of repository objects
205
206
206 :param repos_path: path to directory containing repositories
207 :param repos_path: path to directory containing repositories
207 """
208 """
208
209
209 if repos_path is None:
210 if repos_path is None:
210 repos_path = self.repos_path
211 repos_path = self.repos_path
211
212
212 log.info('scanning for repositories in %s', repos_path)
213 log.info('scanning for repositories in %s', repos_path)
213
214
214 config = make_db_config()
215 config = make_db_config()
215 config.set('extensions', 'largefiles', '')
216 config.set('extensions', 'largefiles', '')
216 repos = {}
217 repos = {}
217
218
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 # name need to be decomposed and put back together using the /
220 # name need to be decomposed and put back together using the /
220 # since this is internal storage separator for rhodecode
221 # since this is internal storage separator for rhodecode
221 name = Repository.normalize_repo_name(name)
222 name = Repository.normalize_repo_name(name)
222
223
223 try:
224 try:
224 if name in repos:
225 if name in repos:
225 raise RepositoryError('Duplicate repository name %s '
226 raise RepositoryError('Duplicate repository name %s '
226 'found in %s' % (name, path))
227 'found in %s' % (name, path))
227 elif path[0] in rhodecode.BACKENDS:
228 elif path[0] in rhodecode.BACKENDS:
228 backend = get_backend(path[0])
229 backend = get_backend(path[0])
229 repos[name] = backend(path[1], config=config,
230 repos[name] = backend(path[1], config=config,
230 with_wire={"cache": False})
231 with_wire={"cache": False})
231 except OSError:
232 except OSError:
232 continue
233 continue
233 except RepositoryError:
234 except RepositoryError:
234 log.exception('Failed to create a repo')
235 log.exception('Failed to create a repo')
235 continue
236 continue
236
237
237 log.debug('found %s paths with repositories', len(repos))
238 log.debug('found %s paths with repositories', len(repos))
238 return repos
239 return repos
239
240
240 def get_repos(self, all_repos=None, sort_key=None):
241 def get_repos(self, all_repos=None, sort_key=None):
241 """
242 """
242 Get all repositories from db and for each repo create it's
243 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
244 backend instance and fill that backed with information from database
244
245
245 :param all_repos: list of repository names as strings
246 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
247 give specific repositories list, good for filtering
247
248
248 :param sort_key: initial sorting of repositories
249 :param sort_key: initial sorting of repositories
249 """
250 """
250 if all_repos is None:
251 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
252 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == None)\
253 .filter(Repository.group_id == None)\
253 .order_by(func.lower(Repository.repo_name)).all()
254 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
255 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
257 return repo_iter
257
258
258 def get_repo_groups(self, all_groups=None):
259 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
260 if all_groups is None:
260 all_groups = RepoGroup.query()\
261 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == None).all()
262 .filter(RepoGroup.group_parent_id == None).all()
262 return [x for x in RepoGroupList(all_groups)]
263 return [x for x in RepoGroupList(all_groups)]
263
264
264 def mark_for_invalidation(self, repo_name, delete=False):
265 def mark_for_invalidation(self, repo_name, delete=False):
265 """
266 """
266 Mark caches of this repo invalid in the database. `delete` flag
267 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
268 removes the cache entries
268
269
269 :param repo_name: the repo_name for which caches should be marked
270 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
271 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
272 :param delete: delete the entry keys instead of setting bool
272 flag on them, and also purge caches used by the dogpile
273 flag on them, and also purge caches used by the dogpile
273 """
274 """
274 repo = Repository.get_by_repo_name(repo_name)
275 repo = Repository.get_by_repo_name(repo_name)
275
276
276 if repo:
277 if repo:
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 repo_id=repo.repo_id)
279 repo_id=repo.repo_id)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280
281
281 repo_id = repo.repo_id
282 repo_id = repo.repo_id
282 config = repo._config
283 config = repo._config
283 config.set('extensions', 'largefiles', '')
284 config.set('extensions', 'largefiles', '')
284 repo.update_commit_cache(config=config, cs_cache=None)
285 repo.update_commit_cache(config=config, cs_cache=None)
285 if delete:
286 if delete:
286 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
287 rc_cache.clear_cache_namespace(
288 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
288 'cache_repo', cache_namespace_uid, invalidate=True)
289
289
290 def toggle_following_repo(self, follow_repo_id, user_id):
290 def toggle_following_repo(self, follow_repo_id, user_id):
291
291
292 f = self.sa.query(UserFollowing)\
292 f = self.sa.query(UserFollowing)\
293 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
294 .filter(UserFollowing.user_id == user_id).scalar()
294 .filter(UserFollowing.user_id == user_id).scalar()
295
295
296 if f is not None:
296 if f is not None:
297 try:
297 try:
298 self.sa.delete(f)
298 self.sa.delete(f)
299 return
299 return
300 except Exception:
300 except Exception:
301 log.error(traceback.format_exc())
301 log.error(traceback.format_exc())
302 raise
302 raise
303
303
304 try:
304 try:
305 f = UserFollowing()
305 f = UserFollowing()
306 f.user_id = user_id
306 f.user_id = user_id
307 f.follows_repo_id = follow_repo_id
307 f.follows_repo_id = follow_repo_id
308 self.sa.add(f)
308 self.sa.add(f)
309 except Exception:
309 except Exception:
310 log.error(traceback.format_exc())
310 log.error(traceback.format_exc())
311 raise
311 raise
312
312
313 def toggle_following_user(self, follow_user_id, user_id):
313 def toggle_following_user(self, follow_user_id, user_id):
314 f = self.sa.query(UserFollowing)\
314 f = self.sa.query(UserFollowing)\
315 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 .filter(UserFollowing.follows_user_id == follow_user_id)\
316 .filter(UserFollowing.user_id == user_id).scalar()
316 .filter(UserFollowing.user_id == user_id).scalar()
317
317
318 if f is not None:
318 if f is not None:
319 try:
319 try:
320 self.sa.delete(f)
320 self.sa.delete(f)
321 return
321 return
322 except Exception:
322 except Exception:
323 log.error(traceback.format_exc())
323 log.error(traceback.format_exc())
324 raise
324 raise
325
325
326 try:
326 try:
327 f = UserFollowing()
327 f = UserFollowing()
328 f.user_id = user_id
328 f.user_id = user_id
329 f.follows_user_id = follow_user_id
329 f.follows_user_id = follow_user_id
330 self.sa.add(f)
330 self.sa.add(f)
331 except Exception:
331 except Exception:
332 log.error(traceback.format_exc())
332 log.error(traceback.format_exc())
333 raise
333 raise
334
334
335 def is_following_repo(self, repo_name, user_id, cache=False):
335 def is_following_repo(self, repo_name, user_id, cache=False):
336 r = self.sa.query(Repository)\
336 r = self.sa.query(Repository)\
337 .filter(Repository.repo_name == repo_name).scalar()
337 .filter(Repository.repo_name == repo_name).scalar()
338
338
339 f = self.sa.query(UserFollowing)\
339 f = self.sa.query(UserFollowing)\
340 .filter(UserFollowing.follows_repository == r)\
340 .filter(UserFollowing.follows_repository == r)\
341 .filter(UserFollowing.user_id == user_id).scalar()
341 .filter(UserFollowing.user_id == user_id).scalar()
342
342
343 return f is not None
343 return f is not None
344
344
345 def is_following_user(self, username, user_id, cache=False):
345 def is_following_user(self, username, user_id, cache=False):
346 u = User.get_by_username(username)
346 u = User.get_by_username(username)
347
347
348 f = self.sa.query(UserFollowing)\
348 f = self.sa.query(UserFollowing)\
349 .filter(UserFollowing.follows_user == u)\
349 .filter(UserFollowing.follows_user == u)\
350 .filter(UserFollowing.user_id == user_id).scalar()
350 .filter(UserFollowing.user_id == user_id).scalar()
351
351
352 return f is not None
352 return f is not None
353
353
354 def get_followers(self, repo):
354 def get_followers(self, repo):
355 repo = self._get_repo(repo)
355 repo = self._get_repo(repo)
356
356
357 return self.sa.query(UserFollowing)\
357 return self.sa.query(UserFollowing)\
358 .filter(UserFollowing.follows_repository == repo).count()
358 .filter(UserFollowing.follows_repository == repo).count()
359
359
360 def get_forks(self, repo):
360 def get_forks(self, repo):
361 repo = self._get_repo(repo)
361 repo = self._get_repo(repo)
362 return self.sa.query(Repository)\
362 return self.sa.query(Repository)\
363 .filter(Repository.fork == repo).count()
363 .filter(Repository.fork == repo).count()
364
364
365 def get_pull_requests(self, repo):
365 def get_pull_requests(self, repo):
366 repo = self._get_repo(repo)
366 repo = self._get_repo(repo)
367 return self.sa.query(PullRequest)\
367 return self.sa.query(PullRequest)\
368 .filter(PullRequest.target_repo == repo)\
368 .filter(PullRequest.target_repo == repo)\
369 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
370
370
371 def get_artifacts(self, repo):
371 def get_artifacts(self, repo):
372 repo = self._get_repo(repo)
372 repo = self._get_repo(repo)
373 return self.sa.query(FileStore)\
373 return self.sa.query(FileStore)\
374 .filter(FileStore.repo == repo)\
374 .filter(FileStore.repo == repo)\
375 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
375 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
376
376
377 def mark_as_fork(self, repo, fork, user):
377 def mark_as_fork(self, repo, fork, user):
378 repo = self._get_repo(repo)
378 repo = self._get_repo(repo)
379 fork = self._get_repo(fork)
379 fork = self._get_repo(fork)
380 if fork and repo.repo_id == fork.repo_id:
380 if fork and repo.repo_id == fork.repo_id:
381 raise Exception("Cannot set repository as fork of itself")
381 raise Exception("Cannot set repository as fork of itself")
382
382
383 if fork and repo.repo_type != fork.repo_type:
383 if fork and repo.repo_type != fork.repo_type:
384 raise RepositoryError(
384 raise RepositoryError(
385 "Cannot set repository as fork of repository with other type")
385 "Cannot set repository as fork of repository with other type")
386
386
387 repo.fork = fork
387 repo.fork = fork
388 self.sa.add(repo)
388 self.sa.add(repo)
389 return repo
389 return repo
390
390
391 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
391 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
392 dbrepo = self._get_repo(repo)
392 dbrepo = self._get_repo(repo)
393 remote_uri = remote_uri or dbrepo.clone_uri
393 remote_uri = remote_uri or dbrepo.clone_uri
394 if not remote_uri:
394 if not remote_uri:
395 raise Exception("This repository doesn't have a clone uri")
395 raise Exception("This repository doesn't have a clone uri")
396
396
397 repo = dbrepo.scm_instance(cache=False)
397 repo = dbrepo.scm_instance(cache=False)
398 repo.config.clear_section('hooks')
398 repo.config.clear_section('hooks')
399
399
400 try:
400 try:
401 # NOTE(marcink): add extra validation so we skip invalid urls
401 # NOTE(marcink): add extra validation so we skip invalid urls
402 # this is due this tasks can be executed via scheduler without
402 # this is due this tasks can be executed via scheduler without
403 # proper validation of remote_uri
403 # proper validation of remote_uri
404 if validate_uri:
404 if validate_uri:
405 config = make_db_config(clear_session=False)
405 config = make_db_config(clear_session=False)
406 url_validator(remote_uri, dbrepo.repo_type, config)
406 url_validator(remote_uri, dbrepo.repo_type, config)
407 except InvalidCloneUrl:
407 except InvalidCloneUrl:
408 raise
408 raise
409
409
410 repo_name = dbrepo.repo_name
410 repo_name = dbrepo.repo_name
411 try:
411 try:
412 # TODO: we need to make sure those operations call proper hooks !
412 # TODO: we need to make sure those operations call proper hooks !
413 repo.fetch(remote_uri)
413 repo.fetch(remote_uri)
414
414
415 self.mark_for_invalidation(repo_name)
415 self.mark_for_invalidation(repo_name)
416 except Exception:
416 except Exception:
417 log.error(traceback.format_exc())
417 log.error(traceback.format_exc())
418 raise
418 raise
419
419
420 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
421 dbrepo = self._get_repo(repo)
421 dbrepo = self._get_repo(repo)
422 remote_uri = remote_uri or dbrepo.push_uri
422 remote_uri = remote_uri or dbrepo.push_uri
423 if not remote_uri:
423 if not remote_uri:
424 raise Exception("This repository doesn't have a clone uri")
424 raise Exception("This repository doesn't have a clone uri")
425
425
426 repo = dbrepo.scm_instance(cache=False)
426 repo = dbrepo.scm_instance(cache=False)
427 repo.config.clear_section('hooks')
427 repo.config.clear_section('hooks')
428
428
429 try:
429 try:
430 # NOTE(marcink): add extra validation so we skip invalid urls
430 # NOTE(marcink): add extra validation so we skip invalid urls
431 # this is due this tasks can be executed via scheduler without
431 # this is due this tasks can be executed via scheduler without
432 # proper validation of remote_uri
432 # proper validation of remote_uri
433 if validate_uri:
433 if validate_uri:
434 config = make_db_config(clear_session=False)
434 config = make_db_config(clear_session=False)
435 url_validator(remote_uri, dbrepo.repo_type, config)
435 url_validator(remote_uri, dbrepo.repo_type, config)
436 except InvalidCloneUrl:
436 except InvalidCloneUrl:
437 raise
437 raise
438
438
439 try:
439 try:
440 repo.push(remote_uri)
440 repo.push(remote_uri)
441 except Exception:
441 except Exception:
442 log.error(traceback.format_exc())
442 log.error(traceback.format_exc())
443 raise
443 raise
444
444
445 def commit_change(self, repo, repo_name, commit, user, author, message,
445 def commit_change(self, repo, repo_name, commit, user, author, message,
446 content, f_path):
446 content: bytes, f_path: bytes):
447 """
447 """
448 Commits changes
448 Commits changes
449
450 :param repo: SCM instance
451
452 """
449 """
453 user = self._get_user(user)
450 user = self._get_user(user)
454
451
455 # decoding here will force that we have proper encoded values
456 # in any other case this will throw exceptions and deny commit
457 content = safe_str(content)
458 path = safe_str(f_path)
459 # message and author needs to be unicode
452 # message and author needs to be unicode
460 # proper backend should then translate that into required type
453 # proper backend should then translate that into required type
461 message = safe_unicode(message)
454 message = safe_str(message)
462 author = safe_unicode(author)
455 author = safe_str(author)
463 imc = repo.in_memory_commit
456 imc = repo.in_memory_commit
464 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
457 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
465 try:
458 try:
466 # TODO: handle pre-push action !
459 # TODO: handle pre-push action !
467 tip = imc.commit(
460 tip = imc.commit(
468 message=message, author=author, parents=[commit],
461 message=message, author=author, parents=[commit],
469 branch=commit.branch)
462 branch=commit.branch)
470 except Exception as e:
463 except Exception as e:
471 log.error(traceback.format_exc())
464 log.error(traceback.format_exc())
472 raise IMCCommitError(str(e))
465 raise IMCCommitError(str(e))
473 finally:
466 finally:
474 # always clear caches, if commit fails we want fresh object also
467 # always clear caches, if commit fails we want fresh object also
475 self.mark_for_invalidation(repo_name)
468 self.mark_for_invalidation(repo_name)
476
469
477 # We trigger the post-push action
470 # We trigger the post-push action
478 hooks_utils.trigger_post_push_hook(
471 hooks_utils.trigger_post_push_hook(
479 username=user.username, action='push_local', hook_type='post_push',
472 username=user.username, action='push_local', hook_type='post_push',
480 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
473 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
481 return tip
474 return tip
482
475
483 def _sanitize_path(self, f_path):
476 def _sanitize_path(self, f_path: bytes):
484 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
477 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
485 raise NonRelativePathError('%s is not an relative path' % f_path)
478 raise NonRelativePathError(b'%b is not an relative path' % f_path)
486 if f_path:
479 if f_path:
487 f_path = os.path.normpath(f_path)
480 f_path = os.path.normpath(f_path)
488 return f_path
481 return f_path
489
482
490 def get_dirnode_metadata(self, request, commit, dir_node):
483 def get_dirnode_metadata(self, request, commit, dir_node):
491 if not dir_node.is_dir():
484 if not dir_node.is_dir():
492 return []
485 return []
493
486
494 data = []
487 data = []
495 for node in dir_node:
488 for node in dir_node:
496 if not node.is_file():
489 if not node.is_file():
497 # we skip file-nodes
490 # we skip file-nodes
498 continue
491 continue
499
492
500 last_commit = node.last_commit
493 last_commit = node.last_commit
501 last_commit_date = last_commit.date
494 last_commit_date = last_commit.date
502 data.append({
495 data.append({
503 'name': node.name,
496 'name': node.name,
504 'size': h.format_byte_size_binary(node.size),
497 'size': h.format_byte_size_binary(node.size),
505 'modified_at': h.format_date(last_commit_date),
498 'modified_at': h.format_date(last_commit_date),
506 'modified_ts': last_commit_date.isoformat(),
499 'modified_ts': last_commit_date.isoformat(),
507 'revision': last_commit.revision,
500 'revision': last_commit.revision,
508 'short_id': last_commit.short_id,
501 'short_id': last_commit.short_id,
509 'message': h.escape(last_commit.message),
502 'message': h.escape(last_commit.message),
510 'author': h.escape(last_commit.author),
503 'author': h.escape(last_commit.author),
511 'user_profile': h.gravatar_with_user(
504 'user_profile': h.gravatar_with_user(
512 request, last_commit.author),
505 request, last_commit.author),
513 })
506 })
514
507
515 return data
508 return data
516
509
517 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
510 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
518 extended_info=False, content=False, max_file_bytes=None):
511 extended_info=False, content=False, max_file_bytes=None):
519 """
512 """
520 recursive walk in root dir and return a set of all path in that dir
513 recursive walk in root dir and return a set of all path in that dir
521 based on repository walk function
514 based on repository walk function
522
515
523 :param repo_name: name of repository
516 :param repo_name: name of repository
524 :param commit_id: commit id for which to list nodes
517 :param commit_id: commit id for which to list nodes
525 :param root_path: root path to list
518 :param root_path: root path to list
526 :param flat: return as a list, if False returns a dict with description
519 :param flat: return as a list, if False returns a dict with description
527 :param extended_info: show additional info such as md5, binary, size etc
520 :param extended_info: show additional info such as md5, binary, size etc
528 :param content: add nodes content to the return data
521 :param content: add nodes content to the return data
529 :param max_file_bytes: will not return file contents over this limit
522 :param max_file_bytes: will not return file contents over this limit
530
523
531 """
524 """
532 _files = list()
525 _files = list()
533 _dirs = list()
526 _dirs = list()
527
534 try:
528 try:
535 _repo = self._get_repo(repo_name)
529 _repo = self._get_repo(repo_name)
536 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
530 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
537 root_path = root_path.lstrip('/')
531 root_path = root_path.lstrip('/')
538 for __, dirs, files in commit.walk(root_path):
532
533 # get RootNode, inject pre-load options before walking
534 top_node = commit.get_node(root_path)
535 extended_info_pre_load = []
536 if extended_info:
537 extended_info_pre_load += ['md5']
538 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
539
540 for __, dirs, files in commit.walk(top_node):
539
541
540 for f in files:
542 for f in files:
541 _content = None
543 _content = None
542 _data = f_name = f.unicode_path
544 _data = f_name = f.str_path
543
545
544 if not flat:
546 if not flat:
545 _data = {
547 _data = {
546 "name": h.escape(f_name),
548 "name": h.escape(f_name),
547 "type": "file",
549 "type": "file",
548 }
550 }
549 if extended_info:
551 if extended_info:
550 _data.update({
552 _data.update({
551 "md5": f.md5,
553 "md5": f.md5,
552 "binary": f.is_binary,
554 "binary": f.is_binary,
553 "size": f.size,
555 "size": f.size,
554 "extension": f.extension,
556 "extension": f.extension,
555 "mimetype": f.mimetype,
557 "mimetype": f.mimetype,
556 "lines": f.lines()[0]
558 "lines": f.lines()[0]
557 })
559 })
558
560
559 if content:
561 if content:
560 over_size_limit = (max_file_bytes is not None
562 over_size_limit = (max_file_bytes is not None
561 and f.size > max_file_bytes)
563 and f.size > max_file_bytes)
562 full_content = None
564 full_content = None
563 if not f.is_binary and not over_size_limit:
565 if not f.is_binary and not over_size_limit:
564 full_content = safe_str(f.content)
566 full_content = f.str_content
565
567
566 _data.update({
568 _data.update({
567 "content": full_content,
569 "content": full_content,
568 })
570 })
569 _files.append(_data)
571 _files.append(_data)
570
572
571 for d in dirs:
573 for d in dirs:
572 _data = d_name = d.unicode_path
574 _data = d_name = d.str_path
573 if not flat:
575 if not flat:
574 _data = {
576 _data = {
575 "name": h.escape(d_name),
577 "name": h.escape(d_name),
576 "type": "dir",
578 "type": "dir",
577 }
579 }
578 if extended_info:
580 if extended_info:
579 _data.update({
581 _data.update({
580 "md5": None,
582 "md5": "",
581 "binary": None,
583 "binary": False,
582 "size": None,
584 "size": 0,
583 "extension": None,
585 "extension": "",
584 })
586 })
585 if content:
587 if content:
586 _data.update({
588 _data.update({
587 "content": None
589 "content": None
588 })
590 })
589 _dirs.append(_data)
591 _dirs.append(_data)
590 except RepositoryError:
592 except RepositoryError:
591 log.exception("Exception in get_nodes")
593 log.exception("Exception in get_nodes")
592 raise
594 raise
593
595
594 return _dirs, _files
596 return _dirs, _files
595
597
596 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 """
599 """
598 Generate files for quick filter in files view
600 Generate files for quick filter in files view
599 """
601 """
600
602
601 _files = list()
603 _files = list()
602 _dirs = list()
604 _dirs = list()
603 try:
605 try:
604 _repo = self._get_repo(repo_name)
606 _repo = self._get_repo(repo_name)
605 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 root_path = root_path.lstrip('/')
608 root_path = root_path.lstrip('/')
607 for __, dirs, files in commit.walk(root_path):
609 for __, dirs, files in commit.walk(root_path):
608
610
609 for f in files:
611 for f in files:
610
612
611 _data = {
613 _data = {
612 "name": h.escape(f.unicode_path),
614 "name": h.escape(f.str_path),
613 "type": "file",
615 "type": "file",
614 }
616 }
615
617
616 _files.append(_data)
618 _files.append(_data)
617
619
618 for d in dirs:
620 for d in dirs:
619
621
620 _data = {
622 _data = {
621 "name": h.escape(d.unicode_path),
623 "name": h.escape(d.str_path),
622 "type": "dir",
624 "type": "dir",
623 }
625 }
624
626
625 _dirs.append(_data)
627 _dirs.append(_data)
626 except RepositoryError:
628 except RepositoryError:
627 log.exception("Exception in get_quick_filter_nodes")
629 log.exception("Exception in get_quick_filter_nodes")
628 raise
630 raise
629
631
630 return _dirs, _files
632 return _dirs, _files
631
633
632 def get_node(self, repo_name, commit_id, file_path,
634 def get_node(self, repo_name, commit_id, file_path,
633 extended_info=False, content=False, max_file_bytes=None, cache=True):
635 extended_info=False, content=False, max_file_bytes=None, cache=True):
634 """
636 """
635 retrieve single node from commit
637 retrieve single node from commit
636 """
638 """
639
637 try:
640 try:
638
641
639 _repo = self._get_repo(repo_name)
642 _repo = self._get_repo(repo_name)
640 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
643 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
641
644
642 file_node = commit.get_node(file_path)
645 file_node = commit.get_node(file_path)
643 if file_node.is_dir():
646 if file_node.is_dir():
644 raise RepositoryError('The given path is a directory')
647 raise RepositoryError('The given path is a directory')
645
648
646 _content = None
649 _content = None
647 f_name = file_node.unicode_path
650 f_name = file_node.str_path
648
651
649 file_data = {
652 file_data = {
650 "name": h.escape(f_name),
653 "name": h.escape(f_name),
651 "type": "file",
654 "type": "file",
652 }
655 }
653
656
654 if extended_info:
657 if extended_info:
655 file_data.update({
658 file_data.update({
656 "extension": file_node.extension,
659 "extension": file_node.extension,
657 "mimetype": file_node.mimetype,
660 "mimetype": file_node.mimetype,
658 })
661 })
659
662
660 if cache:
663 if cache:
661 md5 = file_node.md5
664 md5 = file_node.md5
662 is_binary = file_node.is_binary
665 is_binary = file_node.is_binary
663 size = file_node.size
666 size = file_node.size
664 else:
667 else:
665 is_binary, md5, size, _content = file_node.metadata_uncached()
668 is_binary, md5, size, _content = file_node.metadata_uncached()
666
669
667 file_data.update({
670 file_data.update({
668 "md5": md5,
671 "md5": md5,
669 "binary": is_binary,
672 "binary": is_binary,
670 "size": size,
673 "size": size,
671 })
674 })
672
675
673 if content and cache:
676 if content and cache:
674 # get content + cache
677 # get content + cache
675 size = file_node.size
678 size = file_node.size
676 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
679 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
677 full_content = None
680 full_content = None
678 all_lines = 0
681 all_lines = 0
679 if not file_node.is_binary and not over_size_limit:
682 if not file_node.is_binary and not over_size_limit:
680 full_content = safe_unicode(file_node.content)
683 full_content = safe_str(file_node.content)
681 all_lines, empty_lines = file_node.count_lines(full_content)
684 all_lines, empty_lines = file_node.count_lines(full_content)
682
685
683 file_data.update({
686 file_data.update({
684 "content": full_content,
687 "content": full_content,
685 "lines": all_lines
688 "lines": all_lines
686 })
689 })
687 elif content:
690 elif content:
688 # get content *without* cache
691 # get content *without* cache
689 if _content is None:
692 if _content is None:
690 is_binary, md5, size, _content = file_node.metadata_uncached()
693 is_binary, md5, size, _content = file_node.metadata_uncached()
691
694
692 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
695 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
693 full_content = None
696 full_content = None
694 all_lines = 0
697 all_lines = 0
695 if not is_binary and not over_size_limit:
698 if not is_binary and not over_size_limit:
696 full_content = safe_unicode(_content)
699 full_content = safe_str(_content)
697 all_lines, empty_lines = file_node.count_lines(full_content)
700 all_lines, empty_lines = file_node.count_lines(full_content)
698
701
699 file_data.update({
702 file_data.update({
700 "content": full_content,
703 "content": full_content,
701 "lines": all_lines
704 "lines": all_lines
702 })
705 })
703
706
704 except RepositoryError:
707 except RepositoryError:
705 log.exception("Exception in get_node")
708 log.exception("Exception in get_node")
706 raise
709 raise
707
710
708 return file_data
711 return file_data
709
712
710 def get_fts_data(self, repo_name, commit_id, root_path='/'):
713 def get_fts_data(self, repo_name, commit_id, root_path='/'):
711 """
714 """
712 Fetch node tree for usage in full text search
715 Fetch node tree for usage in full text search
713 """
716 """
714
717
715 tree_info = list()
718 tree_info = list()
716
719
717 try:
720 try:
718 _repo = self._get_repo(repo_name)
721 _repo = self._get_repo(repo_name)
719 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
722 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
720 root_path = root_path.lstrip('/')
723 root_path = root_path.lstrip('/')
721 for __, dirs, files in commit.walk(root_path):
724 top_node = commit.get_node(root_path)
725 top_node.default_pre_load = []
726
727 for __, dirs, files in commit.walk(top_node):
722
728
723 for f in files:
729 for f in files:
724 is_binary, md5, size, _content = f.metadata_uncached()
730 is_binary, md5, size, _content = f.metadata_uncached()
725 _data = {
731 _data = {
726 "name": f.unicode_path,
732 "name": f.str_path,
727 "md5": md5,
733 "md5": md5,
728 "extension": f.extension,
734 "extension": f.extension,
729 "binary": is_binary,
735 "binary": is_binary,
730 "size": size
736 "size": size
731 }
737 }
732
738
733 tree_info.append(_data)
739 tree_info.append(_data)
734
740
735 except RepositoryError:
741 except RepositoryError:
736 log.exception("Exception in get_nodes")
742 log.exception("Exception in get_nodes")
737 raise
743 raise
738
744
739 return tree_info
745 return tree_info
740
746
741 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
747 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
742 author=None, trigger_push_hook=True):
748 author=None, trigger_push_hook=True):
743 """
749 """
744 Commits given multiple nodes into repo
750 Commits given multiple nodes into repo
745
751
746 :param user: RhodeCode User object or user_id, the commiter
752 :param user: RhodeCode User object or user_id, the commiter
747 :param repo: RhodeCode Repository object
753 :param repo: RhodeCode Repository object
748 :param message: commit message
754 :param message: commit message
749 :param nodes: mapping {filename:{'content':content},...}
755 :param nodes: mapping {filename:{'content':content},...}
750 :param parent_commit: parent commit, can be empty than it's
756 :param parent_commit: parent commit, can be empty than it's
751 initial commit
757 initial commit
752 :param author: author of commit, cna be different that commiter
758 :param author: author of commit, cna be different that commiter
753 only for git
759 only for git
754 :param trigger_push_hook: trigger push hooks
760 :param trigger_push_hook: trigger push hooks
755
761
756 :returns: new committed commit
762 :returns: new committed commit
757 """
763 """
758
764
759 user = self._get_user(user)
765 user = self._get_user(user)
760 scm_instance = repo.scm_instance(cache=False)
766 scm_instance = repo.scm_instance(cache=False)
761
767
762 processed_nodes = []
768 message = safe_str(message)
763 for f_path in nodes:
764 f_path = self._sanitize_path(f_path)
765 content = nodes[f_path]['content']
766 f_path = safe_str(f_path)
767 # decoding here will force that we have proper encoded values
768 # in any other case this will throw exceptions and deny commit
769 if isinstance(content, (str,)):
770 content = safe_str(content)
771 elif isinstance(content, (file, cStringIO.OutputType,)):
772 content = content.read()
773 else:
774 raise Exception('Content is of unrecognized type %s' % (
775 type(content)
776 ))
777 processed_nodes.append((f_path, content))
778
779 message = safe_unicode(message)
780 commiter = user.full_contact
769 commiter = user.full_contact
781 author = safe_unicode(author) if author else commiter
770 author = safe_str(author) if author else commiter
782
771
783 imc = scm_instance.in_memory_commit
772 imc = scm_instance.in_memory_commit
784
773
785 if not parent_commit:
774 if not parent_commit:
786 parent_commit = EmptyCommit(alias=scm_instance.alias)
775 parent_commit = EmptyCommit(alias=scm_instance.alias)
787
776
788 if isinstance(parent_commit, EmptyCommit):
777 if isinstance(parent_commit, EmptyCommit):
789 # EmptyCommit means we we're editing empty repository
778 # EmptyCommit means we're editing empty repository
790 parents = None
779 parents = None
791 else:
780 else:
792 parents = [parent_commit]
781 parents = [parent_commit]
782
783 upload_file_types = (io.BytesIO, io.BufferedRandom)
784 processed_nodes = []
785 for filename, content_dict in nodes.items():
786 if not isinstance(filename, bytes):
787 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
788 content = content_dict['content']
789 if not isinstance(content, upload_file_types + (bytes,)):
790 raise ValueError('content key value in nodes needs to be bytes')
791
792 for f_path in nodes:
793 f_path = self._sanitize_path(f_path)
794 content = nodes[f_path]['content']
795
796 # decoding here will force that we have proper encoded values
797 # in any other case this will throw exceptions and deny commit
798
799 if isinstance(content, bytes):
800 pass
801 elif isinstance(content, upload_file_types):
802 content = content.read()
803 else:
804 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
805 processed_nodes.append((f_path, content))
806
793 # add multiple nodes
807 # add multiple nodes
794 for path, content in processed_nodes:
808 for path, content in processed_nodes:
795 imc.add(FileNode(path, content=content))
809 imc.add(FileNode(path, content=content))
810
796 # TODO: handle pre push scenario
811 # TODO: handle pre push scenario
797 tip = imc.commit(message=message,
812 tip = imc.commit(message=message,
798 author=author,
813 author=author,
799 parents=parents,
814 parents=parents,
800 branch=parent_commit.branch)
815 branch=parent_commit.branch)
801
816
802 self.mark_for_invalidation(repo.repo_name)
817 self.mark_for_invalidation(repo.repo_name)
803 if trigger_push_hook:
818 if trigger_push_hook:
804 hooks_utils.trigger_post_push_hook(
819 hooks_utils.trigger_post_push_hook(
805 username=user.username, action='push_local',
820 username=user.username, action='push_local',
806 repo_name=repo.repo_name, repo_type=scm_instance.alias,
821 repo_name=repo.repo_name, repo_type=scm_instance.alias,
807 hook_type='post_push',
822 hook_type='post_push',
808 commit_ids=[tip.raw_id])
823 commit_ids=[tip.raw_id])
809 return tip
824 return tip
810
825
811 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
826 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
812 author=None, trigger_push_hook=True):
827 author=None, trigger_push_hook=True):
813 user = self._get_user(user)
828 user = self._get_user(user)
814 scm_instance = repo.scm_instance(cache=False)
829 scm_instance = repo.scm_instance(cache=False)
815
830
816 message = safe_unicode(message)
831 message = safe_str(message)
817 commiter = user.full_contact
832 commiter = user.full_contact
818 author = safe_unicode(author) if author else commiter
833 author = safe_str(author) if author else commiter
819
834
820 imc = scm_instance.in_memory_commit
835 imc = scm_instance.in_memory_commit
821
836
822 if not parent_commit:
837 if not parent_commit:
823 parent_commit = EmptyCommit(alias=scm_instance.alias)
838 parent_commit = EmptyCommit(alias=scm_instance.alias)
824
839
825 if isinstance(parent_commit, EmptyCommit):
840 if isinstance(parent_commit, EmptyCommit):
826 # EmptyCommit means we we're editing empty repository
841 # EmptyCommit means we we're editing empty repository
827 parents = None
842 parents = None
828 else:
843 else:
829 parents = [parent_commit]
844 parents = [parent_commit]
830
845
831 # add multiple nodes
846 # add multiple nodes
832 for _filename, data in nodes.items():
847 for _filename, data in nodes.items():
833 # new filename, can be renamed from the old one, also sanitaze
848 # new filename, can be renamed from the old one, also sanitaze
834 # the path for any hack around relative paths like ../../ etc.
849 # the path for any hack around relative paths like ../../ etc.
835 filename = self._sanitize_path(data['filename'])
850 filename = self._sanitize_path(data['filename'])
836 old_filename = self._sanitize_path(_filename)
851 old_filename = self._sanitize_path(_filename)
837 content = data['content']
852 content = data['content']
838 file_mode = data.get('mode')
853 file_mode = data.get('mode')
839 filenode = FileNode(old_filename, content=content, mode=file_mode)
854 filenode = FileNode(old_filename, content=content, mode=file_mode)
840 op = data['op']
855 op = data['op']
841 if op == 'add':
856 if op == 'add':
842 imc.add(filenode)
857 imc.add(filenode)
843 elif op == 'del':
858 elif op == 'del':
844 imc.remove(filenode)
859 imc.remove(filenode)
845 elif op == 'mod':
860 elif op == 'mod':
846 if filename != old_filename:
861 if filename != old_filename:
847 # TODO: handle renames more efficient, needs vcs lib changes
862 # TODO: handle renames more efficient, needs vcs lib changes
848 imc.remove(filenode)
863 imc.remove(filenode)
849 imc.add(FileNode(filename, content=content, mode=file_mode))
864 imc.add(FileNode(filename, content=content, mode=file_mode))
850 else:
865 else:
851 imc.change(filenode)
866 imc.change(filenode)
852
867
853 try:
868 try:
854 # TODO: handle pre push scenario commit changes
869 # TODO: handle pre push scenario commit changes
855 tip = imc.commit(message=message,
870 tip = imc.commit(message=message,
856 author=author,
871 author=author,
857 parents=parents,
872 parents=parents,
858 branch=parent_commit.branch)
873 branch=parent_commit.branch)
859 except NodeNotChangedError:
874 except NodeNotChangedError:
860 raise
875 raise
861 except Exception as e:
876 except Exception as e:
862 log.exception("Unexpected exception during call to imc.commit")
877 log.exception("Unexpected exception during call to imc.commit")
863 raise IMCCommitError(str(e))
878 raise IMCCommitError(str(e))
864 finally:
879 finally:
865 # always clear caches, if commit fails we want fresh object also
880 # always clear caches, if commit fails we want fresh object also
866 self.mark_for_invalidation(repo.repo_name)
881 self.mark_for_invalidation(repo.repo_name)
867
882
868 if trigger_push_hook:
883 if trigger_push_hook:
869 hooks_utils.trigger_post_push_hook(
884 hooks_utils.trigger_post_push_hook(
870 username=user.username, action='push_local', hook_type='post_push',
885 username=user.username, action='push_local', hook_type='post_push',
871 repo_name=repo.repo_name, repo_type=scm_instance.alias,
886 repo_name=repo.repo_name, repo_type=scm_instance.alias,
872 commit_ids=[tip.raw_id])
887 commit_ids=[tip.raw_id])
873
888
874 return tip
889 return tip
875
890
876 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
891 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
877 author=None, trigger_push_hook=True):
892 author=None, trigger_push_hook=True):
878 """
893 """
879 Deletes given multiple nodes into `repo`
894 Deletes given multiple nodes into `repo`
880
895
881 :param user: RhodeCode User object or user_id, the committer
896 :param user: RhodeCode User object or user_id, the committer
882 :param repo: RhodeCode Repository object
897 :param repo: RhodeCode Repository object
883 :param message: commit message
898 :param message: commit message
884 :param nodes: mapping {filename:{'content':content},...}
899 :param nodes: mapping {filename:{'content':content},...}
885 :param parent_commit: parent commit, can be empty than it's initial
900 :param parent_commit: parent commit, can be empty than it's initial
886 commit
901 commit
887 :param author: author of commit, cna be different that commiter only
902 :param author: author of commit, cna be different that commiter only
888 for git
903 for git
889 :param trigger_push_hook: trigger push hooks
904 :param trigger_push_hook: trigger push hooks
890
905
891 :returns: new commit after deletion
906 :returns: new commit after deletion
892 """
907 """
893
908
894 user = self._get_user(user)
909 user = self._get_user(user)
895 scm_instance = repo.scm_instance(cache=False)
910 scm_instance = repo.scm_instance(cache=False)
896
911
897 processed_nodes = []
912 processed_nodes = []
898 for f_path in nodes:
913 for f_path in nodes:
899 f_path = self._sanitize_path(f_path)
914 f_path = self._sanitize_path(f_path)
900 # content can be empty but for compatabilty it allows same dicts
915 # content can be empty but for compatibility it allows same dicts
901 # structure as add_nodes
916 # structure as add_nodes
902 content = nodes[f_path].get('content')
917 content = nodes[f_path].get('content')
903 processed_nodes.append((f_path, content))
918 processed_nodes.append((safe_bytes(f_path), content))
904
919
905 message = safe_unicode(message)
920 message = safe_str(message)
906 commiter = user.full_contact
921 commiter = user.full_contact
907 author = safe_unicode(author) if author else commiter
922 author = safe_str(author) if author else commiter
908
923
909 imc = scm_instance.in_memory_commit
924 imc = scm_instance.in_memory_commit
910
925
911 if not parent_commit:
926 if not parent_commit:
912 parent_commit = EmptyCommit(alias=scm_instance.alias)
927 parent_commit = EmptyCommit(alias=scm_instance.alias)
913
928
914 if isinstance(parent_commit, EmptyCommit):
929 if isinstance(parent_commit, EmptyCommit):
915 # EmptyCommit means we we're editing empty repository
930 # EmptyCommit means we we're editing empty repository
916 parents = None
931 parents = None
917 else:
932 else:
918 parents = [parent_commit]
933 parents = [parent_commit]
919 # add multiple nodes
934 # add multiple nodes
920 for path, content in processed_nodes:
935 for path, content in processed_nodes:
921 imc.remove(FileNode(path, content=content))
936 imc.remove(FileNode(path, content=content))
922
937
923 # TODO: handle pre push scenario
938 # TODO: handle pre push scenario
924 tip = imc.commit(message=message,
939 tip = imc.commit(message=message,
925 author=author,
940 author=author,
926 parents=parents,
941 parents=parents,
927 branch=parent_commit.branch)
942 branch=parent_commit.branch)
928
943
929 self.mark_for_invalidation(repo.repo_name)
944 self.mark_for_invalidation(repo.repo_name)
930 if trigger_push_hook:
945 if trigger_push_hook:
931 hooks_utils.trigger_post_push_hook(
946 hooks_utils.trigger_post_push_hook(
932 username=user.username, action='push_local', hook_type='post_push',
947 username=user.username, action='push_local', hook_type='post_push',
933 repo_name=repo.repo_name, repo_type=scm_instance.alias,
948 repo_name=repo.repo_name, repo_type=scm_instance.alias,
934 commit_ids=[tip.raw_id])
949 commit_ids=[tip.raw_id])
935 return tip
950 return tip
936
951
937 def strip(self, repo, commit_id, branch):
952 def strip(self, repo, commit_id, branch):
938 scm_instance = repo.scm_instance(cache=False)
953 scm_instance = repo.scm_instance(cache=False)
939 scm_instance.config.clear_section('hooks')
954 scm_instance.config.clear_section('hooks')
940 scm_instance.strip(commit_id, branch)
955 scm_instance.strip(commit_id, branch)
941 self.mark_for_invalidation(repo.repo_name)
956 self.mark_for_invalidation(repo.repo_name)
942
957
943 def get_unread_journal(self):
958 def get_unread_journal(self):
944 return self.sa.query(UserLog).count()
959 return self.sa.query(UserLog).count()
945
960
946 @classmethod
961 @classmethod
947 def backend_landing_ref(cls, repo_type):
962 def backend_landing_ref(cls, repo_type):
948 """
963 """
949 Return a default landing ref based on a repository type.
964 Return a default landing ref based on a repository type.
950 """
965 """
951
966
952 landing_ref = {
967 landing_ref = {
953 'hg': ('branch:default', 'default'),
968 'hg': ('branch:default', 'default'),
954 'git': ('branch:master', 'master'),
969 'git': ('branch:master', 'master'),
955 'svn': ('rev:tip', 'latest tip'),
970 'svn': ('rev:tip', 'latest tip'),
956 'default': ('rev:tip', 'latest tip'),
971 'default': ('rev:tip', 'latest tip'),
957 }
972 }
958
973
959 return landing_ref.get(repo_type) or landing_ref['default']
974 return landing_ref.get(repo_type) or landing_ref['default']
960
975
961 def get_repo_landing_revs(self, translator, repo=None):
976 def get_repo_landing_revs(self, translator, repo=None):
962 """
977 """
963 Generates select option with tags branches and bookmarks (for hg only)
978 Generates select option with tags branches and bookmarks (for hg only)
964 grouped by type
979 grouped by type
965
980
966 :param repo:
981 :param repo:
967 """
982 """
968 from rhodecode.lib.vcs.backends.git import GitRepository
983 from rhodecode.lib.vcs.backends.git import GitRepository
969
984
970 _ = translator
985 _ = translator
971 repo = self._get_repo(repo)
986 repo = self._get_repo(repo)
972
987
973 if repo:
988 if repo:
974 repo_type = repo.repo_type
989 repo_type = repo.repo_type
975 else:
990 else:
976 repo_type = 'default'
991 repo_type = 'default'
977
992
978 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
993 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
979
994
980 default_ref_options = [
995 default_ref_options = [
981 [default_landing_ref, landing_ref_lbl]
996 [default_landing_ref, landing_ref_lbl]
982 ]
997 ]
983 default_choices = [
998 default_choices = [
984 default_landing_ref
999 default_landing_ref
985 ]
1000 ]
986
1001
987 if not repo:
1002 if not repo:
988 # presented at NEW repo creation
1003 # presented at NEW repo creation
989 return default_choices, default_ref_options
1004 return default_choices, default_ref_options
990
1005
991 repo = repo.scm_instance()
1006 repo = repo.scm_instance()
992
1007
993 ref_options = [(default_landing_ref, landing_ref_lbl)]
1008 ref_options = [(default_landing_ref, landing_ref_lbl)]
994 choices = [default_landing_ref]
1009 choices = [default_landing_ref]
995
1010
996 # branches
1011 # branches
997 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
1012 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
998 if not branch_group:
1013 if not branch_group:
999 # new repo, or without maybe a branch?
1014 # new repo, or without maybe a branch?
1000 branch_group = default_ref_options
1015 branch_group = default_ref_options
1001
1016
1002 branches_group = (branch_group, _("Branches"))
1017 branches_group = (branch_group, _("Branches"))
1003 ref_options.append(branches_group)
1018 ref_options.append(branches_group)
1004 choices.extend([x[0] for x in branches_group[0]])
1019 choices.extend([x[0] for x in branches_group[0]])
1005
1020
1006 # bookmarks for HG
1021 # bookmarks for HG
1007 if repo.alias == 'hg':
1022 if repo.alias == 'hg':
1008 bookmarks_group = (
1023 bookmarks_group = (
1009 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1024 [(f'book:{safe_str(b)}', safe_str(b))
1010 for b in repo.bookmarks],
1025 for b in repo.bookmarks],
1011 _("Bookmarks"))
1026 _("Bookmarks"))
1012 ref_options.append(bookmarks_group)
1027 ref_options.append(bookmarks_group)
1013 choices.extend([x[0] for x in bookmarks_group[0]])
1028 choices.extend([x[0] for x in bookmarks_group[0]])
1014
1029
1015 # tags
1030 # tags
1016 tags_group = (
1031 tags_group = (
1017 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1032 [(f'tag:{safe_str(t)}', safe_str(t))
1018 for t in repo.tags],
1033 for t in repo.tags],
1019 _("Tags"))
1034 _("Tags"))
1020 ref_options.append(tags_group)
1035 ref_options.append(tags_group)
1021 choices.extend([x[0] for x in tags_group[0]])
1036 choices.extend([x[0] for x in tags_group[0]])
1022
1037
1023 return choices, ref_options
1038 return choices, ref_options
1024
1039
1025 def get_server_info(self, environ=None):
1040 def get_server_info(self, environ=None):
1026 server_info = get_system_info(environ)
1041 server_info = get_system_info(environ)
1027 return server_info
1042 return server_info
@@ -1,918 +1,924 b''
1
1
2 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import os
20 import os
21 import re
21 import re
22 import hashlib
23 import logging
22 import logging
24 import time
23 import time
25 import functools
24 import functools
26 import bleach
25 import bleach
27 from collections import namedtuple
26 from collections import namedtuple
28
27
29 from pyramid.threadlocal import get_current_request, get_current_registry
28 from pyramid.threadlocal import get_current_request
30
29
31 from rhodecode.lib import rc_cache
30 from rhodecode.lib import rc_cache
31 from rhodecode.lib.hash_utils import sha1_safe
32 from rhodecode.lib.utils2 import (
32 from rhodecode.lib.utils2 import (
33 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
33 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
34 from rhodecode.lib.vcs.backends import base
34 from rhodecode.lib.vcs.backends import base
35 from rhodecode.lib.statsd_client import StatsdClient
35 from rhodecode.lib.statsd_client import StatsdClient
36 from rhodecode.model import BaseModel
36 from rhodecode.model import BaseModel
37 from rhodecode.model.db import (
37 from rhodecode.model.db import (
38 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
38 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
39 from rhodecode.model.meta import Session
39 from rhodecode.model.meta import Session
40
40
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 UiSetting = namedtuple(
45 UiSetting = namedtuple(
46 'UiSetting', ['section', 'key', 'value', 'active'])
46 'UiSetting', ['section', 'key', 'value', 'active'])
47
47
48 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
48 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
49
49
50
50
51 class SettingNotFound(Exception):
51 class SettingNotFound(Exception):
52 def __init__(self, setting_id):
52 def __init__(self, setting_id):
53 msg = 'Setting `{}` is not found'.format(setting_id)
53 msg = 'Setting `{}` is not found'.format(setting_id)
54 super(SettingNotFound, self).__init__(msg)
54 super(SettingNotFound, self).__init__(msg)
55
55
56
56
57 class SettingsModel(BaseModel):
57 class SettingsModel(BaseModel):
58 BUILTIN_HOOKS = (
58 BUILTIN_HOOKS = (
59 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
59 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
60 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
60 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
61 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
61 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
62 RhodeCodeUi.HOOK_PUSH_KEY,)
62 RhodeCodeUi.HOOK_PUSH_KEY,)
63 HOOKS_SECTION = 'hooks'
63 HOOKS_SECTION = 'hooks'
64
64
65 def __init__(self, sa=None, repo=None):
65 def __init__(self, sa=None, repo=None):
66 self.repo = repo
66 self.repo = repo
67 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
67 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
68 self.SettingsDbModel = (
68 self.SettingsDbModel = (
69 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
69 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
70 super(SettingsModel, self).__init__(sa)
70 super(SettingsModel, self).__init__(sa)
71
71
72 def get_ui_by_key(self, key):
72 def get_ui_by_key(self, key):
73 q = self.UiDbModel.query()
73 q = self.UiDbModel.query()
74 q = q.filter(self.UiDbModel.ui_key == key)
74 q = q.filter(self.UiDbModel.ui_key == key)
75 q = self._filter_by_repo(RepoRhodeCodeUi, q)
75 q = self._filter_by_repo(RepoRhodeCodeUi, q)
76 return q.scalar()
76 return q.scalar()
77
77
78 def get_ui_by_section(self, section):
78 def get_ui_by_section(self, section):
79 q = self.UiDbModel.query()
79 q = self.UiDbModel.query()
80 q = q.filter(self.UiDbModel.ui_section == section)
80 q = q.filter(self.UiDbModel.ui_section == section)
81 q = self._filter_by_repo(RepoRhodeCodeUi, q)
81 q = self._filter_by_repo(RepoRhodeCodeUi, q)
82 return q.all()
82 return q.all()
83
83
84 def get_ui_by_section_and_key(self, section, key):
84 def get_ui_by_section_and_key(self, section, key):
85 q = self.UiDbModel.query()
85 q = self.UiDbModel.query()
86 q = q.filter(self.UiDbModel.ui_section == section)
86 q = q.filter(self.UiDbModel.ui_section == section)
87 q = q.filter(self.UiDbModel.ui_key == key)
87 q = q.filter(self.UiDbModel.ui_key == key)
88 q = self._filter_by_repo(RepoRhodeCodeUi, q)
88 q = self._filter_by_repo(RepoRhodeCodeUi, q)
89 return q.scalar()
89 return q.scalar()
90
90
91 def get_ui(self, section=None, key=None):
91 def get_ui(self, section=None, key=None):
92 q = self.UiDbModel.query()
92 q = self.UiDbModel.query()
93 q = self._filter_by_repo(RepoRhodeCodeUi, q)
93 q = self._filter_by_repo(RepoRhodeCodeUi, q)
94
94
95 if section:
95 if section:
96 q = q.filter(self.UiDbModel.ui_section == section)
96 q = q.filter(self.UiDbModel.ui_section == section)
97 if key:
97 if key:
98 q = q.filter(self.UiDbModel.ui_key == key)
98 q = q.filter(self.UiDbModel.ui_key == key)
99
99
100 # TODO: mikhail: add caching
100 # TODO: mikhail: add caching
101 result = [
101 result = [
102 UiSetting(
102 UiSetting(
103 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
103 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
104 value=safe_str(r.ui_value), active=r.ui_active
104 value=safe_str(r.ui_value), active=r.ui_active
105 )
105 )
106 for r in q.all()
106 for r in q.all()
107 ]
107 ]
108 return result
108 return result
109
109
110 def get_builtin_hooks(self):
110 def get_builtin_hooks(self):
111 q = self.UiDbModel.query()
111 q = self.UiDbModel.query()
112 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
112 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
113 return self._get_hooks(q)
113 return self._get_hooks(q)
114
114
115 def get_custom_hooks(self):
115 def get_custom_hooks(self):
116 q = self.UiDbModel.query()
116 q = self.UiDbModel.query()
117 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
117 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
118 return self._get_hooks(q)
118 return self._get_hooks(q)
119
119
120 def create_ui_section_value(self, section, val, key=None, active=True):
120 def create_ui_section_value(self, section, val, key=None, active=True):
121 new_ui = self.UiDbModel()
121 new_ui = self.UiDbModel()
122 new_ui.ui_section = section
122 new_ui.ui_section = section
123 new_ui.ui_value = val
123 new_ui.ui_value = val
124 new_ui.ui_active = active
124 new_ui.ui_active = active
125
125
126 repository_id = ''
126 repository_id = ''
127 if self.repo:
127 if self.repo:
128 repo = self._get_repo(self.repo)
128 repo = self._get_repo(self.repo)
129 repository_id = repo.repo_id
129 repository_id = repo.repo_id
130 new_ui.repository_id = repository_id
130 new_ui.repository_id = repository_id
131
131
132 if not key:
132 if not key:
133 # keys are unique so they need appended info
133 # keys are unique so they need appended info
134 if self.repo:
134 if self.repo:
135 key = hashlib.sha1(
135 key = sha1_safe(f'{section}{val}{repository_id}')
136 '{}{}{}'.format(section, val, repository_id)).hexdigest()
137 else:
136 else:
138 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
137 key = sha1_safe(f'{section}{val}')
139
138
140 new_ui.ui_key = key
139 new_ui.ui_key = key
141
140
142 Session().add(new_ui)
141 Session().add(new_ui)
143 return new_ui
142 return new_ui
144
143
145 def create_or_update_hook(self, key, value):
144 def create_or_update_hook(self, key, value):
146 ui = (
145 ui = (
147 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
146 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
148 self.UiDbModel())
147 self.UiDbModel())
149 ui.ui_section = self.HOOKS_SECTION
148 ui.ui_section = self.HOOKS_SECTION
150 ui.ui_active = True
149 ui.ui_active = True
151 ui.ui_key = key
150 ui.ui_key = key
152 ui.ui_value = value
151 ui.ui_value = value
153
152
154 if self.repo:
153 if self.repo:
155 repo = self._get_repo(self.repo)
154 repo = self._get_repo(self.repo)
156 repository_id = repo.repo_id
155 repository_id = repo.repo_id
157 ui.repository_id = repository_id
156 ui.repository_id = repository_id
158
157
159 Session().add(ui)
158 Session().add(ui)
160 return ui
159 return ui
161
160
162 def delete_ui(self, id_):
161 def delete_ui(self, id_):
163 ui = self.UiDbModel.get(id_)
162 ui = self.UiDbModel.get(id_)
164 if not ui:
163 if not ui:
165 raise SettingNotFound(id_)
164 raise SettingNotFound(id_)
166 Session().delete(ui)
165 Session().delete(ui)
167
166
168 def get_setting_by_name(self, name):
167 def get_setting_by_name(self, name):
169 q = self._get_settings_query()
168 q = self._get_settings_query()
170 q = q.filter(self.SettingsDbModel.app_settings_name == name)
169 q = q.filter(self.SettingsDbModel.app_settings_name == name)
171 return q.scalar()
170 return q.scalar()
172
171
173 def create_or_update_setting(
172 def create_or_update_setting(
174 self, name, val=Optional(''), type_=Optional('unicode')):
173 self, name, val=Optional(''), type_=Optional('unicode')):
175 """
174 """
176 Creates or updates RhodeCode setting. If updates is triggered it will
175 Creates or updates RhodeCode setting. If updates is triggered it will
177 only update parameters that are explicitly set Optional instance will
176 only update parameters that are explicitly set Optional instance will
178 be skipped
177 be skipped
179
178
180 :param name:
179 :param name:
181 :param val:
180 :param val:
182 :param type_:
181 :param type_:
183 :return:
182 :return:
184 """
183 """
185
184
186 res = self.get_setting_by_name(name)
185 res = self.get_setting_by_name(name)
187 repo = self._get_repo(self.repo) if self.repo else None
186 repo = self._get_repo(self.repo) if self.repo else None
188
187
189 if not res:
188 if not res:
190 val = Optional.extract(val)
189 val = Optional.extract(val)
191 type_ = Optional.extract(type_)
190 type_ = Optional.extract(type_)
192
191
193 args = (
192 args = (
194 (repo.repo_id, name, val, type_)
193 (repo.repo_id, name, val, type_)
195 if repo else (name, val, type_))
194 if repo else (name, val, type_))
196 res = self.SettingsDbModel(*args)
195 res = self.SettingsDbModel(*args)
197
196
198 else:
197 else:
199 if self.repo:
198 if self.repo:
200 res.repository_id = repo.repo_id
199 res.repository_id = repo.repo_id
201
200
202 res.app_settings_name = name
201 res.app_settings_name = name
203 if not isinstance(type_, Optional):
202 if not isinstance(type_, Optional):
204 # update if set
203 # update if set
205 res.app_settings_type = type_
204 res.app_settings_type = type_
206 if not isinstance(val, Optional):
205 if not isinstance(val, Optional):
207 # update if set
206 # update if set
208 res.app_settings_value = val
207 res.app_settings_value = val
209
208
210 Session().add(res)
209 Session().add(res)
211 return res
210 return res
212
211
213 def get_cache_region(self):
212 def get_cache_region(self):
214 repo = self._get_repo(self.repo) if self.repo else None
213 repo = self._get_repo(self.repo) if self.repo else None
215 cache_key = "repo.{}".format(repo.repo_id) if repo else "general_settings"
214 cache_key = f"repo.{repo.repo_id}" if repo else "repo.ALL"
216 cache_namespace_uid = 'cache_settings.{}'.format(cache_key)
215 cache_namespace_uid = f'cache_settings.{cache_key}'
217 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
216 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
218 return region, cache_key
217 return region, cache_namespace_uid
219
220 def invalidate_settings_cache(self):
221 region, cache_key = self.get_cache_region()
222 log.debug('Invalidation cache region %s for cache_key: %s', region, cache_key)
223 region.invalidate()
224
218
225 def get_all_settings(self, cache=False, from_request=True):
219 def invalidate_settings_cache(self, hard=False):
226 # defines if we use GLOBAL, or PER_REPO
220 region, namespace_key = self.get_cache_region()
227 repo = self._get_repo(self.repo) if self.repo else None
221 log.debug('Invalidation cache [%s] region %s for cache_key: %s',
222 'invalidate_settings_cache', region, namespace_key)
228
223
229 # initially try the requests context, this is the fastest
224 # we use hard cleanup if invalidation is sent
230 # we only fetch global config
225 rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE)
231 if from_request:
232 request = get_current_request()
233
226
234 if request and not repo and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
227 def get_cache_call_method(self, cache=True):
235 rc_config = request.call_context.rc_config
236 if rc_config:
237 return rc_config
238
239 region, cache_key = self.get_cache_region()
228 region, cache_key = self.get_cache_region()
240
229
241 @region.conditional_cache_on_arguments(condition=cache)
230 @region.conditional_cache_on_arguments(condition=cache)
242 def _get_all_settings(name, key):
231 def _get_all_settings(name, key):
243 q = self._get_settings_query()
232 q = self._get_settings_query()
244 if not q:
233 if not q:
245 raise Exception('Could not get application settings !')
234 raise Exception('Could not get application settings !')
246
235
247 settings = {
236 settings = {
248 'rhodecode_' + res.app_settings_name: res.app_settings_value
237 f'rhodecode_{res.app_settings_name}': res.app_settings_value
249 for res in q
238 for res in q
250 }
239 }
251 return settings
240 return settings
241 return _get_all_settings
242
243 def get_all_settings(self, cache=False, from_request=True):
244 # defines if we use GLOBAL, or PER_REPO
245 repo = self._get_repo(self.repo) if self.repo else None
246
247 # initially try the requests context, this is the fastest
248 # we only fetch global config, NOT for repo-specific
249 if from_request and not repo:
250 request = get_current_request()
251
252 if request and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
253 rc_config = request.call_context.rc_config
254 if rc_config:
255 return rc_config
256
257 _region, cache_key = self.get_cache_region()
258 _get_all_settings = self.get_cache_call_method(cache=cache)
252
259
253 start = time.time()
260 start = time.time()
254 result = _get_all_settings('rhodecode_settings', cache_key)
261 result = _get_all_settings('rhodecode_settings', cache_key)
255 compute_time = time.time() - start
262 compute_time = time.time() - start
256 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
263 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
257
264
258 statsd = StatsdClient.statsd
265 statsd = StatsdClient.statsd
259 if statsd:
266 if statsd:
260 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
267 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
261 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
268 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
262 use_decimals=False)
269 use_decimals=False)
263
270
264 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
271 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
265
272
266 return result
273 return result
267
274
268 def get_auth_settings(self):
275 def get_auth_settings(self):
269 q = self._get_settings_query()
276 q = self._get_settings_query()
270 q = q.filter(
277 q = q.filter(
271 self.SettingsDbModel.app_settings_name.startswith('auth_'))
278 self.SettingsDbModel.app_settings_name.startswith('auth_'))
272 rows = q.all()
279 rows = q.all()
273 auth_settings = {
280 auth_settings = {
274 row.app_settings_name: row.app_settings_value for row in rows}
281 row.app_settings_name: row.app_settings_value for row in rows}
275 return auth_settings
282 return auth_settings
276
283
277 def get_auth_plugins(self):
284 def get_auth_plugins(self):
278 auth_plugins = self.get_setting_by_name("auth_plugins")
285 auth_plugins = self.get_setting_by_name("auth_plugins")
279 return auth_plugins.app_settings_value
286 return auth_plugins.app_settings_value
280
287
281 def get_default_repo_settings(self, strip_prefix=False):
288 def get_default_repo_settings(self, strip_prefix=False):
282 q = self._get_settings_query()
289 q = self._get_settings_query()
283 q = q.filter(
290 q = q.filter(
284 self.SettingsDbModel.app_settings_name.startswith('default_'))
291 self.SettingsDbModel.app_settings_name.startswith('default_'))
285 rows = q.all()
292 rows = q.all()
286
293
287 result = {}
294 result = {}
288 for row in rows:
295 for row in rows:
289 key = row.app_settings_name
296 key = row.app_settings_name
290 if strip_prefix:
297 if strip_prefix:
291 key = remove_prefix(key, prefix='default_')
298 key = remove_prefix(key, prefix='default_')
292 result.update({key: row.app_settings_value})
299 result.update({key: row.app_settings_value})
293 return result
300 return result
294
301
295 def get_repo(self):
302 def get_repo(self):
296 repo = self._get_repo(self.repo)
303 repo = self._get_repo(self.repo)
297 if not repo:
304 if not repo:
298 raise Exception(
305 raise Exception(
299 'Repository `{}` cannot be found inside the database'.format(
306 'Repository `{}` cannot be found inside the database'.format(
300 self.repo))
307 self.repo))
301 return repo
308 return repo
302
309
303 def _filter_by_repo(self, model, query):
310 def _filter_by_repo(self, model, query):
304 if self.repo:
311 if self.repo:
305 repo = self.get_repo()
312 repo = self.get_repo()
306 query = query.filter(model.repository_id == repo.repo_id)
313 query = query.filter(model.repository_id == repo.repo_id)
307 return query
314 return query
308
315
309 def _get_hooks(self, query):
316 def _get_hooks(self, query):
310 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
317 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
311 query = self._filter_by_repo(RepoRhodeCodeUi, query)
318 query = self._filter_by_repo(RepoRhodeCodeUi, query)
312 return query.all()
319 return query.all()
313
320
314 def _get_settings_query(self):
321 def _get_settings_query(self):
315 q = self.SettingsDbModel.query()
322 q = self.SettingsDbModel.query()
316 return self._filter_by_repo(RepoRhodeCodeSetting, q)
323 return self._filter_by_repo(RepoRhodeCodeSetting, q)
317
324
318 def list_enabled_social_plugins(self, settings):
325 def list_enabled_social_plugins(self, settings):
319 enabled = []
326 enabled = []
320 for plug in SOCIAL_PLUGINS_LIST:
327 for plug in SOCIAL_PLUGINS_LIST:
321 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
328 if str2bool(settings.get(f'rhodecode_auth_{plug}_enabled')):
322 )):
323 enabled.append(plug)
329 enabled.append(plug)
324 return enabled
330 return enabled
325
331
326
332
327 def assert_repo_settings(func):
333 def assert_repo_settings(func):
328 @functools.wraps(func)
334 @functools.wraps(func)
329 def _wrapper(self, *args, **kwargs):
335 def _wrapper(self, *args, **kwargs):
330 if not self.repo_settings:
336 if not self.repo_settings:
331 raise Exception('Repository is not specified')
337 raise Exception('Repository is not specified')
332 return func(self, *args, **kwargs)
338 return func(self, *args, **kwargs)
333 return _wrapper
339 return _wrapper
334
340
335
341
336 class IssueTrackerSettingsModel(object):
342 class IssueTrackerSettingsModel(object):
337 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
343 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
338 SETTINGS_PREFIX = 'issuetracker_'
344 SETTINGS_PREFIX = 'issuetracker_'
339
345
340 def __init__(self, sa=None, repo=None):
346 def __init__(self, sa=None, repo=None):
341 self.global_settings = SettingsModel(sa=sa)
347 self.global_settings = SettingsModel(sa=sa)
342 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
348 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
343
349
344 @property
350 @property
345 def inherit_global_settings(self):
351 def inherit_global_settings(self):
346 if not self.repo_settings:
352 if not self.repo_settings:
347 return True
353 return True
348 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
354 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
349 return setting.app_settings_value if setting else True
355 return setting.app_settings_value if setting else True
350
356
351 @inherit_global_settings.setter
357 @inherit_global_settings.setter
352 def inherit_global_settings(self, value):
358 def inherit_global_settings(self, value):
353 if self.repo_settings:
359 if self.repo_settings:
354 settings = self.repo_settings.create_or_update_setting(
360 settings = self.repo_settings.create_or_update_setting(
355 self.INHERIT_SETTINGS, value, type_='bool')
361 self.INHERIT_SETTINGS, value, type_='bool')
356 Session().add(settings)
362 Session().add(settings)
357
363
358 def _get_keyname(self, key, uid, prefix=''):
364 def _get_keyname(self, key, uid, prefix=''):
359 return '{0}{1}{2}_{3}'.format(
365 return '{0}{1}{2}_{3}'.format(
360 prefix, self.SETTINGS_PREFIX, key, uid)
366 prefix, self.SETTINGS_PREFIX, key, uid)
361
367
362 def _make_dict_for_settings(self, qs):
368 def _make_dict_for_settings(self, qs):
363 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
369 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
364
370
365 issuetracker_entries = {}
371 issuetracker_entries = {}
366 # create keys
372 # create keys
367 for k, v in qs.items():
373 for k, v in qs.items():
368 if k.startswith(prefix_match):
374 if k.startswith(prefix_match):
369 uid = k[len(prefix_match):]
375 uid = k[len(prefix_match):]
370 issuetracker_entries[uid] = None
376 issuetracker_entries[uid] = None
371
377
372 def url_cleaner(input_str):
378 def url_cleaner(input_str):
373 input_str = input_str.replace('"', '').replace("'", '')
379 input_str = input_str.replace('"', '').replace("'", '')
374 input_str = bleach.clean(input_str, strip=True)
380 input_str = bleach.clean(input_str, strip=True)
375 return input_str
381 return input_str
376
382
377 # populate
383 # populate
378 for uid in issuetracker_entries:
384 for uid in issuetracker_entries:
379 url_data = qs.get(self._get_keyname('url', uid, 'rhodecode_'))
385 url_data = qs.get(self._get_keyname('url', uid, 'rhodecode_'))
380
386
381 pat = qs.get(self._get_keyname('pat', uid, 'rhodecode_'))
387 pat = qs.get(self._get_keyname('pat', uid, 'rhodecode_'))
382 try:
388 try:
383 pat_compiled = re.compile(r'%s' % pat)
389 pat_compiled = re.compile(r'%s' % pat)
384 except re.error:
390 except re.error:
385 pat_compiled = None
391 pat_compiled = None
386
392
387 issuetracker_entries[uid] = AttributeDict({
393 issuetracker_entries[uid] = AttributeDict({
388 'pat': pat,
394 'pat': pat,
389 'pat_compiled': pat_compiled,
395 'pat_compiled': pat_compiled,
390 'url': url_cleaner(
396 'url': url_cleaner(
391 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
397 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
392 'pref': bleach.clean(
398 'pref': bleach.clean(
393 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
399 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
394 'desc': qs.get(
400 'desc': qs.get(
395 self._get_keyname('desc', uid, 'rhodecode_')),
401 self._get_keyname('desc', uid, 'rhodecode_')),
396 })
402 })
397
403
398 return issuetracker_entries
404 return issuetracker_entries
399
405
400 def get_global_settings(self, cache=False):
406 def get_global_settings(self, cache=False):
401 """
407 """
402 Returns list of global issue tracker settings
408 Returns list of global issue tracker settings
403 """
409 """
404 defaults = self.global_settings.get_all_settings(cache=cache)
410 defaults = self.global_settings.get_all_settings(cache=cache)
405 settings = self._make_dict_for_settings(defaults)
411 settings = self._make_dict_for_settings(defaults)
406 return settings
412 return settings
407
413
408 def get_repo_settings(self, cache=False):
414 def get_repo_settings(self, cache=False):
409 """
415 """
410 Returns list of issue tracker settings per repository
416 Returns list of issue tracker settings per repository
411 """
417 """
412 if not self.repo_settings:
418 if not self.repo_settings:
413 raise Exception('Repository is not specified')
419 raise Exception('Repository is not specified')
414 all_settings = self.repo_settings.get_all_settings(cache=cache)
420 all_settings = self.repo_settings.get_all_settings(cache=cache)
415 settings = self._make_dict_for_settings(all_settings)
421 settings = self._make_dict_for_settings(all_settings)
416 return settings
422 return settings
417
423
418 def get_settings(self, cache=False):
424 def get_settings(self, cache=False):
419 if self.inherit_global_settings:
425 if self.inherit_global_settings:
420 return self.get_global_settings(cache=cache)
426 return self.get_global_settings(cache=cache)
421 else:
427 else:
422 return self.get_repo_settings(cache=cache)
428 return self.get_repo_settings(cache=cache)
423
429
424 def delete_entries(self, uid):
430 def delete_entries(self, uid):
425 if self.repo_settings:
431 if self.repo_settings:
426 all_patterns = self.get_repo_settings()
432 all_patterns = self.get_repo_settings()
427 settings_model = self.repo_settings
433 settings_model = self.repo_settings
428 else:
434 else:
429 all_patterns = self.get_global_settings()
435 all_patterns = self.get_global_settings()
430 settings_model = self.global_settings
436 settings_model = self.global_settings
431 entries = all_patterns.get(uid, [])
437 entries = all_patterns.get(uid, [])
432
438
433 for del_key in entries:
439 for del_key in entries:
434 setting_name = self._get_keyname(del_key, uid)
440 setting_name = self._get_keyname(del_key, uid)
435 entry = settings_model.get_setting_by_name(setting_name)
441 entry = settings_model.get_setting_by_name(setting_name)
436 if entry:
442 if entry:
437 Session().delete(entry)
443 Session().delete(entry)
438
444
439 Session().commit()
445 Session().commit()
440
446
441 def create_or_update_setting(
447 def create_or_update_setting(
442 self, name, val=Optional(''), type_=Optional('unicode')):
448 self, name, val=Optional(''), type_=Optional('unicode')):
443 if self.repo_settings:
449 if self.repo_settings:
444 setting = self.repo_settings.create_or_update_setting(
450 setting = self.repo_settings.create_or_update_setting(
445 name, val, type_)
451 name, val, type_)
446 else:
452 else:
447 setting = self.global_settings.create_or_update_setting(
453 setting = self.global_settings.create_or_update_setting(
448 name, val, type_)
454 name, val, type_)
449 return setting
455 return setting
450
456
451
457
452 class VcsSettingsModel(object):
458 class VcsSettingsModel(object):
453
459
454 INHERIT_SETTINGS = 'inherit_vcs_settings'
460 INHERIT_SETTINGS = 'inherit_vcs_settings'
455 GENERAL_SETTINGS = (
461 GENERAL_SETTINGS = (
456 'use_outdated_comments',
462 'use_outdated_comments',
457 'pr_merge_enabled',
463 'pr_merge_enabled',
458 'hg_use_rebase_for_merging',
464 'hg_use_rebase_for_merging',
459 'hg_close_branch_before_merging',
465 'hg_close_branch_before_merging',
460 'git_use_rebase_for_merging',
466 'git_use_rebase_for_merging',
461 'git_close_branch_before_merging',
467 'git_close_branch_before_merging',
462 'diff_cache',
468 'diff_cache',
463 )
469 )
464
470
465 HOOKS_SETTINGS = (
471 HOOKS_SETTINGS = (
466 ('hooks', 'changegroup.repo_size'),
472 ('hooks', 'changegroup.repo_size'),
467 ('hooks', 'changegroup.push_logger'),
473 ('hooks', 'changegroup.push_logger'),
468 ('hooks', 'outgoing.pull_logger'),
474 ('hooks', 'outgoing.pull_logger'),
469 )
475 )
470 HG_SETTINGS = (
476 HG_SETTINGS = (
471 ('extensions', 'largefiles'),
477 ('extensions', 'largefiles'),
472 ('phases', 'publish'),
478 ('phases', 'publish'),
473 ('extensions', 'evolve'),
479 ('extensions', 'evolve'),
474 ('extensions', 'topic'),
480 ('extensions', 'topic'),
475 ('experimental', 'evolution'),
481 ('experimental', 'evolution'),
476 ('experimental', 'evolution.exchange'),
482 ('experimental', 'evolution.exchange'),
477 )
483 )
478 GIT_SETTINGS = (
484 GIT_SETTINGS = (
479 ('vcs_git_lfs', 'enabled'),
485 ('vcs_git_lfs', 'enabled'),
480 )
486 )
481 GLOBAL_HG_SETTINGS = (
487 GLOBAL_HG_SETTINGS = (
482 ('extensions', 'largefiles'),
488 ('extensions', 'largefiles'),
483 ('largefiles', 'usercache'),
489 ('largefiles', 'usercache'),
484 ('phases', 'publish'),
490 ('phases', 'publish'),
485 ('extensions', 'hgsubversion'),
491 ('extensions', 'hgsubversion'),
486 ('extensions', 'evolve'),
492 ('extensions', 'evolve'),
487 ('extensions', 'topic'),
493 ('extensions', 'topic'),
488 ('experimental', 'evolution'),
494 ('experimental', 'evolution'),
489 ('experimental', 'evolution.exchange'),
495 ('experimental', 'evolution.exchange'),
490 )
496 )
491
497
492 GLOBAL_GIT_SETTINGS = (
498 GLOBAL_GIT_SETTINGS = (
493 ('vcs_git_lfs', 'enabled'),
499 ('vcs_git_lfs', 'enabled'),
494 ('vcs_git_lfs', 'store_location')
500 ('vcs_git_lfs', 'store_location')
495 )
501 )
496
502
497 GLOBAL_SVN_SETTINGS = (
503 GLOBAL_SVN_SETTINGS = (
498 ('vcs_svn_proxy', 'http_requests_enabled'),
504 ('vcs_svn_proxy', 'http_requests_enabled'),
499 ('vcs_svn_proxy', 'http_server_url')
505 ('vcs_svn_proxy', 'http_server_url')
500 )
506 )
501
507
502 SVN_BRANCH_SECTION = 'vcs_svn_branch'
508 SVN_BRANCH_SECTION = 'vcs_svn_branch'
503 SVN_TAG_SECTION = 'vcs_svn_tag'
509 SVN_TAG_SECTION = 'vcs_svn_tag'
504 SSL_SETTING = ('web', 'push_ssl')
510 SSL_SETTING = ('web', 'push_ssl')
505 PATH_SETTING = ('paths', '/')
511 PATH_SETTING = ('paths', '/')
506
512
507 def __init__(self, sa=None, repo=None):
513 def __init__(self, sa=None, repo=None):
508 self.global_settings = SettingsModel(sa=sa)
514 self.global_settings = SettingsModel(sa=sa)
509 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
515 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
510 self._ui_settings = (
516 self._ui_settings = (
511 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
517 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
512 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
518 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
513
519
514 @property
520 @property
515 @assert_repo_settings
521 @assert_repo_settings
516 def inherit_global_settings(self):
522 def inherit_global_settings(self):
517 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
523 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
518 return setting.app_settings_value if setting else True
524 return setting.app_settings_value if setting else True
519
525
520 @inherit_global_settings.setter
526 @inherit_global_settings.setter
521 @assert_repo_settings
527 @assert_repo_settings
522 def inherit_global_settings(self, value):
528 def inherit_global_settings(self, value):
523 self.repo_settings.create_or_update_setting(
529 self.repo_settings.create_or_update_setting(
524 self.INHERIT_SETTINGS, value, type_='bool')
530 self.INHERIT_SETTINGS, value, type_='bool')
525
531
526 def get_global_svn_branch_patterns(self):
532 def get_global_svn_branch_patterns(self):
527 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
533 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
528
534
529 @assert_repo_settings
535 @assert_repo_settings
530 def get_repo_svn_branch_patterns(self):
536 def get_repo_svn_branch_patterns(self):
531 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
537 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
532
538
533 def get_global_svn_tag_patterns(self):
539 def get_global_svn_tag_patterns(self):
534 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
540 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
535
541
536 @assert_repo_settings
542 @assert_repo_settings
537 def get_repo_svn_tag_patterns(self):
543 def get_repo_svn_tag_patterns(self):
538 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
544 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
539
545
540 def get_global_settings(self):
546 def get_global_settings(self):
541 return self._collect_all_settings(global_=True)
547 return self._collect_all_settings(global_=True)
542
548
543 @assert_repo_settings
549 @assert_repo_settings
544 def get_repo_settings(self):
550 def get_repo_settings(self):
545 return self._collect_all_settings(global_=False)
551 return self._collect_all_settings(global_=False)
546
552
547 @assert_repo_settings
553 @assert_repo_settings
548 def get_repo_settings_inherited(self):
554 def get_repo_settings_inherited(self):
549 global_settings = self.get_global_settings()
555 global_settings = self.get_global_settings()
550 global_settings.update(self.get_repo_settings())
556 global_settings.update(self.get_repo_settings())
551 return global_settings
557 return global_settings
552
558
553 @assert_repo_settings
559 @assert_repo_settings
554 def create_or_update_repo_settings(
560 def create_or_update_repo_settings(
555 self, data, inherit_global_settings=False):
561 self, data, inherit_global_settings=False):
556 from rhodecode.model.scm import ScmModel
562 from rhodecode.model.scm import ScmModel
557
563
558 self.inherit_global_settings = inherit_global_settings
564 self.inherit_global_settings = inherit_global_settings
559
565
560 repo = self.repo_settings.get_repo()
566 repo = self.repo_settings.get_repo()
561 if not inherit_global_settings:
567 if not inherit_global_settings:
562 if repo.repo_type == 'svn':
568 if repo.repo_type == 'svn':
563 self.create_repo_svn_settings(data)
569 self.create_repo_svn_settings(data)
564 else:
570 else:
565 self.create_or_update_repo_hook_settings(data)
571 self.create_or_update_repo_hook_settings(data)
566 self.create_or_update_repo_pr_settings(data)
572 self.create_or_update_repo_pr_settings(data)
567
573
568 if repo.repo_type == 'hg':
574 if repo.repo_type == 'hg':
569 self.create_or_update_repo_hg_settings(data)
575 self.create_or_update_repo_hg_settings(data)
570
576
571 if repo.repo_type == 'git':
577 if repo.repo_type == 'git':
572 self.create_or_update_repo_git_settings(data)
578 self.create_or_update_repo_git_settings(data)
573
579
574 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
580 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
575
581
576 @assert_repo_settings
582 @assert_repo_settings
577 def create_or_update_repo_hook_settings(self, data):
583 def create_or_update_repo_hook_settings(self, data):
578 for section, key in self.HOOKS_SETTINGS:
584 for section, key in self.HOOKS_SETTINGS:
579 data_key = self._get_form_ui_key(section, key)
585 data_key = self._get_form_ui_key(section, key)
580 if data_key not in data:
586 if data_key not in data:
581 raise ValueError(
587 raise ValueError(
582 'The given data does not contain {} key'.format(data_key))
588 'The given data does not contain {} key'.format(data_key))
583
589
584 active = data.get(data_key)
590 active = data.get(data_key)
585 repo_setting = self.repo_settings.get_ui_by_section_and_key(
591 repo_setting = self.repo_settings.get_ui_by_section_and_key(
586 section, key)
592 section, key)
587 if not repo_setting:
593 if not repo_setting:
588 global_setting = self.global_settings.\
594 global_setting = self.global_settings.\
589 get_ui_by_section_and_key(section, key)
595 get_ui_by_section_and_key(section, key)
590 self.repo_settings.create_ui_section_value(
596 self.repo_settings.create_ui_section_value(
591 section, global_setting.ui_value, key=key, active=active)
597 section, global_setting.ui_value, key=key, active=active)
592 else:
598 else:
593 repo_setting.ui_active = active
599 repo_setting.ui_active = active
594 Session().add(repo_setting)
600 Session().add(repo_setting)
595
601
596 def update_global_hook_settings(self, data):
602 def update_global_hook_settings(self, data):
597 for section, key in self.HOOKS_SETTINGS:
603 for section, key in self.HOOKS_SETTINGS:
598 data_key = self._get_form_ui_key(section, key)
604 data_key = self._get_form_ui_key(section, key)
599 if data_key not in data:
605 if data_key not in data:
600 raise ValueError(
606 raise ValueError(
601 'The given data does not contain {} key'.format(data_key))
607 'The given data does not contain {} key'.format(data_key))
602 active = data.get(data_key)
608 active = data.get(data_key)
603 repo_setting = self.global_settings.get_ui_by_section_and_key(
609 repo_setting = self.global_settings.get_ui_by_section_and_key(
604 section, key)
610 section, key)
605 repo_setting.ui_active = active
611 repo_setting.ui_active = active
606 Session().add(repo_setting)
612 Session().add(repo_setting)
607
613
608 @assert_repo_settings
614 @assert_repo_settings
609 def create_or_update_repo_pr_settings(self, data):
615 def create_or_update_repo_pr_settings(self, data):
610 return self._create_or_update_general_settings(
616 return self._create_or_update_general_settings(
611 self.repo_settings, data)
617 self.repo_settings, data)
612
618
613 def create_or_update_global_pr_settings(self, data):
619 def create_or_update_global_pr_settings(self, data):
614 return self._create_or_update_general_settings(
620 return self._create_or_update_general_settings(
615 self.global_settings, data)
621 self.global_settings, data)
616
622
617 @assert_repo_settings
623 @assert_repo_settings
618 def create_repo_svn_settings(self, data):
624 def create_repo_svn_settings(self, data):
619 return self._create_svn_settings(self.repo_settings, data)
625 return self._create_svn_settings(self.repo_settings, data)
620
626
621 def _set_evolution(self, settings, is_enabled):
627 def _set_evolution(self, settings, is_enabled):
622 if is_enabled:
628 if is_enabled:
623 # if evolve is active set evolution=all
629 # if evolve is active set evolution=all
624
630
625 self._create_or_update_ui(
631 self._create_or_update_ui(
626 settings, *('experimental', 'evolution'), value='all',
632 settings, *('experimental', 'evolution'), value='all',
627 active=True)
633 active=True)
628 self._create_or_update_ui(
634 self._create_or_update_ui(
629 settings, *('experimental', 'evolution.exchange'), value='yes',
635 settings, *('experimental', 'evolution.exchange'), value='yes',
630 active=True)
636 active=True)
631 # if evolve is active set topics server support
637 # if evolve is active set topics server support
632 self._create_or_update_ui(
638 self._create_or_update_ui(
633 settings, *('extensions', 'topic'), value='',
639 settings, *('extensions', 'topic'), value='',
634 active=True)
640 active=True)
635
641
636 else:
642 else:
637 self._create_or_update_ui(
643 self._create_or_update_ui(
638 settings, *('experimental', 'evolution'), value='',
644 settings, *('experimental', 'evolution'), value='',
639 active=False)
645 active=False)
640 self._create_or_update_ui(
646 self._create_or_update_ui(
641 settings, *('experimental', 'evolution.exchange'), value='no',
647 settings, *('experimental', 'evolution.exchange'), value='no',
642 active=False)
648 active=False)
643 self._create_or_update_ui(
649 self._create_or_update_ui(
644 settings, *('extensions', 'topic'), value='',
650 settings, *('extensions', 'topic'), value='',
645 active=False)
651 active=False)
646
652
647 @assert_repo_settings
653 @assert_repo_settings
648 def create_or_update_repo_hg_settings(self, data):
654 def create_or_update_repo_hg_settings(self, data):
649 largefiles, phases, evolve = \
655 largefiles, phases, evolve = \
650 self.HG_SETTINGS[:3]
656 self.HG_SETTINGS[:3]
651 largefiles_key, phases_key, evolve_key = \
657 largefiles_key, phases_key, evolve_key = \
652 self._get_settings_keys(self.HG_SETTINGS[:3], data)
658 self._get_settings_keys(self.HG_SETTINGS[:3], data)
653
659
654 self._create_or_update_ui(
660 self._create_or_update_ui(
655 self.repo_settings, *largefiles, value='',
661 self.repo_settings, *largefiles, value='',
656 active=data[largefiles_key])
662 active=data[largefiles_key])
657 self._create_or_update_ui(
663 self._create_or_update_ui(
658 self.repo_settings, *evolve, value='',
664 self.repo_settings, *evolve, value='',
659 active=data[evolve_key])
665 active=data[evolve_key])
660 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
666 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
661
667
662 self._create_or_update_ui(
668 self._create_or_update_ui(
663 self.repo_settings, *phases, value=safe_str(data[phases_key]))
669 self.repo_settings, *phases, value=safe_str(data[phases_key]))
664
670
665 def create_or_update_global_hg_settings(self, data):
671 def create_or_update_global_hg_settings(self, data):
666 largefiles, largefiles_store, phases, hgsubversion, evolve \
672 largefiles, largefiles_store, phases, hgsubversion, evolve \
667 = self.GLOBAL_HG_SETTINGS[:5]
673 = self.GLOBAL_HG_SETTINGS[:5]
668 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
674 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
669 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data)
675 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data)
670
676
671 self._create_or_update_ui(
677 self._create_or_update_ui(
672 self.global_settings, *largefiles, value='',
678 self.global_settings, *largefiles, value='',
673 active=data[largefiles_key])
679 active=data[largefiles_key])
674 self._create_or_update_ui(
680 self._create_or_update_ui(
675 self.global_settings, *largefiles_store, value=data[largefiles_store_key])
681 self.global_settings, *largefiles_store, value=data[largefiles_store_key])
676 self._create_or_update_ui(
682 self._create_or_update_ui(
677 self.global_settings, *phases, value=safe_str(data[phases_key]))
683 self.global_settings, *phases, value=safe_str(data[phases_key]))
678 self._create_or_update_ui(
684 self._create_or_update_ui(
679 self.global_settings, *hgsubversion, active=data[subversion_key])
685 self.global_settings, *hgsubversion, active=data[subversion_key])
680 self._create_or_update_ui(
686 self._create_or_update_ui(
681 self.global_settings, *evolve, value='',
687 self.global_settings, *evolve, value='',
682 active=data[evolve_key])
688 active=data[evolve_key])
683 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
689 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
684
690
685 def create_or_update_repo_git_settings(self, data):
691 def create_or_update_repo_git_settings(self, data):
686 # NOTE(marcink): # comma makes unpack work properly
692 # NOTE(marcink): # comma makes unpack work properly
687 lfs_enabled, \
693 lfs_enabled, \
688 = self.GIT_SETTINGS
694 = self.GIT_SETTINGS
689
695
690 lfs_enabled_key, \
696 lfs_enabled_key, \
691 = self._get_settings_keys(self.GIT_SETTINGS, data)
697 = self._get_settings_keys(self.GIT_SETTINGS, data)
692
698
693 self._create_or_update_ui(
699 self._create_or_update_ui(
694 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
700 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
695 active=data[lfs_enabled_key])
701 active=data[lfs_enabled_key])
696
702
697 def create_or_update_global_git_settings(self, data):
703 def create_or_update_global_git_settings(self, data):
698 lfs_enabled, lfs_store_location \
704 lfs_enabled, lfs_store_location \
699 = self.GLOBAL_GIT_SETTINGS
705 = self.GLOBAL_GIT_SETTINGS
700 lfs_enabled_key, lfs_store_location_key \
706 lfs_enabled_key, lfs_store_location_key \
701 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
707 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
702
708
703 self._create_or_update_ui(
709 self._create_or_update_ui(
704 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
710 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
705 active=data[lfs_enabled_key])
711 active=data[lfs_enabled_key])
706 self._create_or_update_ui(
712 self._create_or_update_ui(
707 self.global_settings, *lfs_store_location,
713 self.global_settings, *lfs_store_location,
708 value=data[lfs_store_location_key])
714 value=data[lfs_store_location_key])
709
715
710 def create_or_update_global_svn_settings(self, data):
716 def create_or_update_global_svn_settings(self, data):
711 # branch/tags patterns
717 # branch/tags patterns
712 self._create_svn_settings(self.global_settings, data)
718 self._create_svn_settings(self.global_settings, data)
713
719
714 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
720 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
715 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
721 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
716 self.GLOBAL_SVN_SETTINGS, data)
722 self.GLOBAL_SVN_SETTINGS, data)
717
723
718 self._create_or_update_ui(
724 self._create_or_update_ui(
719 self.global_settings, *http_requests_enabled,
725 self.global_settings, *http_requests_enabled,
720 value=safe_str(data[http_requests_enabled_key]))
726 value=safe_str(data[http_requests_enabled_key]))
721 self._create_or_update_ui(
727 self._create_or_update_ui(
722 self.global_settings, *http_server_url,
728 self.global_settings, *http_server_url,
723 value=data[http_server_url_key])
729 value=data[http_server_url_key])
724
730
725 def update_global_ssl_setting(self, value):
731 def update_global_ssl_setting(self, value):
726 self._create_or_update_ui(
732 self._create_or_update_ui(
727 self.global_settings, *self.SSL_SETTING, value=value)
733 self.global_settings, *self.SSL_SETTING, value=value)
728
734
729 def update_global_path_setting(self, value):
735 def update_global_path_setting(self, value):
730 self._create_or_update_ui(
736 self._create_or_update_ui(
731 self.global_settings, *self.PATH_SETTING, value=value)
737 self.global_settings, *self.PATH_SETTING, value=value)
732
738
733 @assert_repo_settings
739 @assert_repo_settings
734 def delete_repo_svn_pattern(self, id_):
740 def delete_repo_svn_pattern(self, id_):
735 ui = self.repo_settings.UiDbModel.get(id_)
741 ui = self.repo_settings.UiDbModel.get(id_)
736 if ui and ui.repository.repo_name == self.repo_settings.repo:
742 if ui and ui.repository.repo_name == self.repo_settings.repo:
737 # only delete if it's the same repo as initialized settings
743 # only delete if it's the same repo as initialized settings
738 self.repo_settings.delete_ui(id_)
744 self.repo_settings.delete_ui(id_)
739 else:
745 else:
740 # raise error as if we wouldn't find this option
746 # raise error as if we wouldn't find this option
741 self.repo_settings.delete_ui(-1)
747 self.repo_settings.delete_ui(-1)
742
748
743 def delete_global_svn_pattern(self, id_):
749 def delete_global_svn_pattern(self, id_):
744 self.global_settings.delete_ui(id_)
750 self.global_settings.delete_ui(id_)
745
751
746 @assert_repo_settings
752 @assert_repo_settings
747 def get_repo_ui_settings(self, section=None, key=None):
753 def get_repo_ui_settings(self, section=None, key=None):
748 global_uis = self.global_settings.get_ui(section, key)
754 global_uis = self.global_settings.get_ui(section, key)
749 repo_uis = self.repo_settings.get_ui(section, key)
755 repo_uis = self.repo_settings.get_ui(section, key)
750
756
751 filtered_repo_uis = self._filter_ui_settings(repo_uis)
757 filtered_repo_uis = self._filter_ui_settings(repo_uis)
752 filtered_repo_uis_keys = [
758 filtered_repo_uis_keys = [
753 (s.section, s.key) for s in filtered_repo_uis]
759 (s.section, s.key) for s in filtered_repo_uis]
754
760
755 def _is_global_ui_filtered(ui):
761 def _is_global_ui_filtered(ui):
756 return (
762 return (
757 (ui.section, ui.key) in filtered_repo_uis_keys
763 (ui.section, ui.key) in filtered_repo_uis_keys
758 or ui.section in self._svn_sections)
764 or ui.section in self._svn_sections)
759
765
760 filtered_global_uis = [
766 filtered_global_uis = [
761 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
767 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
762
768
763 return filtered_global_uis + filtered_repo_uis
769 return filtered_global_uis + filtered_repo_uis
764
770
765 def get_global_ui_settings(self, section=None, key=None):
771 def get_global_ui_settings(self, section=None, key=None):
766 return self.global_settings.get_ui(section, key)
772 return self.global_settings.get_ui(section, key)
767
773
768 def get_ui_settings_as_config_obj(self, section=None, key=None):
774 def get_ui_settings_as_config_obj(self, section=None, key=None):
769 config = base.Config()
775 config = base.Config()
770
776
771 ui_settings = self.get_ui_settings(section=section, key=key)
777 ui_settings = self.get_ui_settings(section=section, key=key)
772
778
773 for entry in ui_settings:
779 for entry in ui_settings:
774 config.set(entry.section, entry.key, entry.value)
780 config.set(entry.section, entry.key, entry.value)
775
781
776 return config
782 return config
777
783
778 def get_ui_settings(self, section=None, key=None):
784 def get_ui_settings(self, section=None, key=None):
779 if not self.repo_settings or self.inherit_global_settings:
785 if not self.repo_settings or self.inherit_global_settings:
780 return self.get_global_ui_settings(section, key)
786 return self.get_global_ui_settings(section, key)
781 else:
787 else:
782 return self.get_repo_ui_settings(section, key)
788 return self.get_repo_ui_settings(section, key)
783
789
784 def get_svn_patterns(self, section=None):
790 def get_svn_patterns(self, section=None):
785 if not self.repo_settings:
791 if not self.repo_settings:
786 return self.get_global_ui_settings(section)
792 return self.get_global_ui_settings(section)
787 else:
793 else:
788 return self.get_repo_ui_settings(section)
794 return self.get_repo_ui_settings(section)
789
795
790 @assert_repo_settings
796 @assert_repo_settings
791 def get_repo_general_settings(self):
797 def get_repo_general_settings(self):
792 global_settings = self.global_settings.get_all_settings()
798 global_settings = self.global_settings.get_all_settings()
793 repo_settings = self.repo_settings.get_all_settings()
799 repo_settings = self.repo_settings.get_all_settings()
794 filtered_repo_settings = self._filter_general_settings(repo_settings)
800 filtered_repo_settings = self._filter_general_settings(repo_settings)
795 global_settings.update(filtered_repo_settings)
801 global_settings.update(filtered_repo_settings)
796 return global_settings
802 return global_settings
797
803
798 def get_global_general_settings(self):
804 def get_global_general_settings(self):
799 return self.global_settings.get_all_settings()
805 return self.global_settings.get_all_settings()
800
806
801 def get_general_settings(self):
807 def get_general_settings(self):
802 if not self.repo_settings or self.inherit_global_settings:
808 if not self.repo_settings or self.inherit_global_settings:
803 return self.get_global_general_settings()
809 return self.get_global_general_settings()
804 else:
810 else:
805 return self.get_repo_general_settings()
811 return self.get_repo_general_settings()
806
812
807 def get_repos_location(self):
813 def get_repos_location(self):
808 return self.global_settings.get_ui_by_key('/').ui_value
814 return self.global_settings.get_ui_by_key('/').ui_value
809
815
810 def _filter_ui_settings(self, settings):
816 def _filter_ui_settings(self, settings):
811 filtered_settings = [
817 filtered_settings = [
812 s for s in settings if self._should_keep_setting(s)]
818 s for s in settings if self._should_keep_setting(s)]
813 return filtered_settings
819 return filtered_settings
814
820
815 def _should_keep_setting(self, setting):
821 def _should_keep_setting(self, setting):
816 keep = (
822 keep = (
817 (setting.section, setting.key) in self._ui_settings or
823 (setting.section, setting.key) in self._ui_settings or
818 setting.section in self._svn_sections)
824 setting.section in self._svn_sections)
819 return keep
825 return keep
820
826
821 def _filter_general_settings(self, settings):
827 def _filter_general_settings(self, settings):
822 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
828 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
823 return {
829 return {
824 k: settings[k]
830 k: settings[k]
825 for k in settings if k in keys}
831 for k in settings if k in keys}
826
832
827 def _collect_all_settings(self, global_=False):
833 def _collect_all_settings(self, global_=False):
828 settings = self.global_settings if global_ else self.repo_settings
834 settings = self.global_settings if global_ else self.repo_settings
829 result = {}
835 result = {}
830
836
831 for section, key in self._ui_settings:
837 for section, key in self._ui_settings:
832 ui = settings.get_ui_by_section_and_key(section, key)
838 ui = settings.get_ui_by_section_and_key(section, key)
833 result_key = self._get_form_ui_key(section, key)
839 result_key = self._get_form_ui_key(section, key)
834
840
835 if ui:
841 if ui:
836 if section in ('hooks', 'extensions'):
842 if section in ('hooks', 'extensions'):
837 result[result_key] = ui.ui_active
843 result[result_key] = ui.ui_active
838 elif result_key in ['vcs_git_lfs_enabled']:
844 elif result_key in ['vcs_git_lfs_enabled']:
839 result[result_key] = ui.ui_active
845 result[result_key] = ui.ui_active
840 else:
846 else:
841 result[result_key] = ui.ui_value
847 result[result_key] = ui.ui_value
842
848
843 for name in self.GENERAL_SETTINGS:
849 for name in self.GENERAL_SETTINGS:
844 setting = settings.get_setting_by_name(name)
850 setting = settings.get_setting_by_name(name)
845 if setting:
851 if setting:
846 result_key = 'rhodecode_{}'.format(name)
852 result_key = 'rhodecode_{}'.format(name)
847 result[result_key] = setting.app_settings_value
853 result[result_key] = setting.app_settings_value
848
854
849 return result
855 return result
850
856
851 def _get_form_ui_key(self, section, key):
857 def _get_form_ui_key(self, section, key):
852 return '{section}_{key}'.format(
858 return '{section}_{key}'.format(
853 section=section, key=key.replace('.', '_'))
859 section=section, key=key.replace('.', '_'))
854
860
855 def _create_or_update_ui(
861 def _create_or_update_ui(
856 self, settings, section, key, value=None, active=None):
862 self, settings, section, key, value=None, active=None):
857 ui = settings.get_ui_by_section_and_key(section, key)
863 ui = settings.get_ui_by_section_and_key(section, key)
858 if not ui:
864 if not ui:
859 active = True if active is None else active
865 active = True if active is None else active
860 settings.create_ui_section_value(
866 settings.create_ui_section_value(
861 section, value, key=key, active=active)
867 section, value, key=key, active=active)
862 else:
868 else:
863 if active is not None:
869 if active is not None:
864 ui.ui_active = active
870 ui.ui_active = active
865 if value is not None:
871 if value is not None:
866 ui.ui_value = value
872 ui.ui_value = value
867 Session().add(ui)
873 Session().add(ui)
868
874
869 def _create_svn_settings(self, settings, data):
875 def _create_svn_settings(self, settings, data):
870 svn_settings = {
876 svn_settings = {
871 'new_svn_branch': self.SVN_BRANCH_SECTION,
877 'new_svn_branch': self.SVN_BRANCH_SECTION,
872 'new_svn_tag': self.SVN_TAG_SECTION
878 'new_svn_tag': self.SVN_TAG_SECTION
873 }
879 }
874 for key in svn_settings:
880 for key in svn_settings:
875 if data.get(key):
881 if data.get(key):
876 settings.create_ui_section_value(svn_settings[key], data[key])
882 settings.create_ui_section_value(svn_settings[key], data[key])
877
883
878 def _create_or_update_general_settings(self, settings, data):
884 def _create_or_update_general_settings(self, settings, data):
879 for name in self.GENERAL_SETTINGS:
885 for name in self.GENERAL_SETTINGS:
880 data_key = 'rhodecode_{}'.format(name)
886 data_key = 'rhodecode_{}'.format(name)
881 if data_key not in data:
887 if data_key not in data:
882 raise ValueError(
888 raise ValueError(
883 'The given data does not contain {} key'.format(data_key))
889 'The given data does not contain {} key'.format(data_key))
884 setting = settings.create_or_update_setting(
890 setting = settings.create_or_update_setting(
885 name, data[data_key], 'bool')
891 name, data[data_key], 'bool')
886 Session().add(setting)
892 Session().add(setting)
887
893
888 def _get_settings_keys(self, settings, data):
894 def _get_settings_keys(self, settings, data):
889 data_keys = [self._get_form_ui_key(*s) for s in settings]
895 data_keys = [self._get_form_ui_key(*s) for s in settings]
890 for data_key in data_keys:
896 for data_key in data_keys:
891 if data_key not in data:
897 if data_key not in data:
892 raise ValueError(
898 raise ValueError(
893 'The given data does not contain {} key'.format(data_key))
899 'The given data does not contain {} key'.format(data_key))
894 return data_keys
900 return data_keys
895
901
896 def create_largeobjects_dirs_if_needed(self, repo_store_path):
902 def create_largeobjects_dirs_if_needed(self, repo_store_path):
897 """
903 """
898 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
904 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
899 does a repository scan if enabled in the settings.
905 does a repository scan if enabled in the settings.
900 """
906 """
901
907
902 from rhodecode.lib.vcs.backends.hg import largefiles_store
908 from rhodecode.lib.vcs.backends.hg import largefiles_store
903 from rhodecode.lib.vcs.backends.git import lfs_store
909 from rhodecode.lib.vcs.backends.git import lfs_store
904
910
905 paths = [
911 paths = [
906 largefiles_store(repo_store_path),
912 largefiles_store(repo_store_path),
907 lfs_store(repo_store_path)]
913 lfs_store(repo_store_path)]
908
914
909 for path in paths:
915 for path in paths:
910 if os.path.isdir(path):
916 if os.path.isdir(path):
911 continue
917 continue
912 if os.path.isfile(path):
918 if os.path.isfile(path):
913 continue
919 continue
914 # not a file nor dir, we try to create it
920 # not a file nor dir, we try to create it
915 try:
921 try:
916 os.makedirs(path)
922 os.makedirs(path)
917 except Exception:
923 except Exception:
918 log.warning('Failed to create largefiles dir:%s', path)
924 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,144 +1,148 b''
1
1
2
2
3 # Copyright (C) 2013-2020 RhodeCode GmbH
3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import traceback
22 import traceback
23
23
24 import sshpubkeys
24 import sshpubkeys
25 import sshpubkeys.exceptions
25 import sshpubkeys.exceptions
26
26
27 from cryptography.hazmat.primitives.asymmetric import rsa
27 from cryptography.hazmat.primitives.asymmetric import rsa
28 from cryptography.hazmat.primitives import serialization as crypto_serialization
28 from cryptography.hazmat.primitives import serialization as crypto_serialization
29 from cryptography.hazmat.backends import default_backend as crypto_default_backend
29 from cryptography.hazmat.backends import default_backend as crypto_default_backend
30
30
31 from rhodecode.lib.str_utils import safe_bytes, safe_str
31 from rhodecode.model import BaseModel
32 from rhodecode.model import BaseModel
32 from rhodecode.model.db import UserSshKeys
33 from rhodecode.model.db import UserSshKeys
33 from rhodecode.model.meta import Session
34 from rhodecode.model.meta import Session
34
35
35
36
36 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
37
38
38
39
39 class SshKeyModel(BaseModel):
40 class SshKeyModel(BaseModel):
40 cls = UserSshKeys
41 cls = UserSshKeys
41 DEFAULT_PRIVATE_KEY_FORMAT = 'pkcs8'
42 DEFAULT_PRIVATE_KEY_FORMAT = 'pkcs8'
42
43
43 def parse_key(self, key_data):
44 def parse_key(self, key_data):
44 """
45 """
45 print(ssh.bits) # 768
46 print(ssh.bits) # 768
46 print(ssh.hash_md5()) # 56:84:1e:90:08:3b:60:c7:29:70:5f:5e:25:a6:3b:86
47 print(ssh.hash_md5()) # 56:84:1e:90:08:3b:60:c7:29:70:5f:5e:25:a6:3b:86
47 print(ssh.hash_sha256()) # SHA256:xk3IEJIdIoR9MmSRXTP98rjDdZocmXJje/28ohMQEwM
48 print(ssh.hash_sha256()) # SHA256:xk3IEJIdIoR9MmSRXTP98rjDdZocmXJje/28ohMQEwM
48 print(ssh.hash_sha512()) # SHA512:1C3lNBhjpDVQe39hnyy+xvlZYU3IPwzqK1rVneGavy6O3/ebjEQSFvmeWoyMTplIanmUK1hmr9nA8Skmj516HA
49 print(ssh.hash_sha512()) # SHA512:1C3lNBhjpDVQe39hnyy+xvlZYU3IPwzqK1rVneGavy6O3/ebjEQSFvmeWoyMTplIanmUK1hmr9nA8Skmj516HA
49 print(ssh.comment) # ojar@ojar-laptop
50 print(ssh.comment) # ojar@ojar-laptop
50 print(ssh.options_raw) # None (string of optional options at the beginning of public key)
51 print(ssh.options_raw) # None (string of optional options at the beginning of public key)
51 print(ssh.options) # None (options as a dictionary, parsed and validated)
52 print(ssh.options) # None (options as a dictionary, parsed and validated)
52
53
53 :param key_data:
54 :param key_data:
54 :return:
55 :return:
55 """
56 """
56 ssh = sshpubkeys.SSHKey(strict_mode=True)
57 ssh = sshpubkeys.SSHKey(strict_mode=True)
57 try:
58 try:
58 ssh.parse(key_data)
59 ssh.parse(key_data)
59 return ssh
60 return ssh
60 except sshpubkeys.exceptions.InvalidKeyException as err:
61 except sshpubkeys.exceptions.InvalidKeyException as err:
61 log.error("Invalid key: %s", err)
62 log.error("Invalid key: %s", err)
62 raise
63 raise
63 except NotImplementedError as err:
64 except NotImplementedError as err:
64 log.error("Invalid key type: %s", err)
65 log.error("Invalid key type: %s", err)
65 raise
66 raise
66 except Exception as err:
67 except Exception as err:
67 log.error("Key Parse error: %s", err)
68 log.error("Key Parse error: %s", err)
68 raise
69 raise
69
70
70 def generate_keypair(self, comment=None, private_format=DEFAULT_PRIVATE_KEY_FORMAT):
71 def generate_keypair(self, comment=None, private_format=DEFAULT_PRIVATE_KEY_FORMAT):
71
72
72 key = rsa.generate_private_key(
73 key = rsa.generate_private_key(
73 backend=crypto_default_backend(),
74 backend=crypto_default_backend(),
74 public_exponent=65537,
75 public_exponent=65537,
75 key_size=2048
76 key_size=2048
76 )
77 )
77 if private_format == self.DEFAULT_PRIVATE_KEY_FORMAT:
78 if private_format == self.DEFAULT_PRIVATE_KEY_FORMAT:
78 private_format = crypto_serialization.PrivateFormat.PKCS8
79 private_format = crypto_serialization.PrivateFormat.PKCS8
79 else:
80 else:
80 # legacy format that can be used by older systems, use if pkcs8 have
81 # legacy format that can be used by older systems, use if pkcs8 have
81 # problems
82 # problems
82 private_format = crypto_serialization.PrivateFormat.TraditionalOpenSSL
83 private_format = crypto_serialization.PrivateFormat.TraditionalOpenSSL
83
84
84 private_key = key.private_bytes(
85 private_key = key.private_bytes(
85 crypto_serialization.Encoding.PEM,
86 crypto_serialization.Encoding.PEM,
86 private_format,
87 private_format,
87 crypto_serialization.NoEncryption())
88 crypto_serialization.NoEncryption())
89 private_key = safe_str(private_key)
90
88 public_key = key.public_key().public_bytes(
91 public_key = key.public_key().public_bytes(
89 crypto_serialization.Encoding.OpenSSH,
92 crypto_serialization.Encoding.OpenSSH,
90 crypto_serialization.PublicFormat.OpenSSH
93 crypto_serialization.PublicFormat.OpenSSH
91 )
94 )
95 public_key = safe_str(public_key)
92
96
93 if comment:
97 if comment:
94 public_key = public_key + " " + comment
98 public_key = public_key + " " + comment
95 return private_key, public_key
99 return private_key, public_key
96
100
97 def create(self, user, fingerprint, key_data, description):
101 def create(self, user, fingerprint, key_data, description):
98 """
102 """
99 """
103 """
100 user = self._get_user(user)
104 user = self._get_user(user)
101
105
102 new_ssh_key = UserSshKeys()
106 new_ssh_key = UserSshKeys()
103 new_ssh_key.ssh_key_fingerprint = fingerprint
107 new_ssh_key.ssh_key_fingerprint = fingerprint
104 new_ssh_key.ssh_key_data = key_data
108 new_ssh_key.ssh_key_data = key_data
105 new_ssh_key.user_id = user.user_id
109 new_ssh_key.user_id = user.user_id
106 new_ssh_key.description = description
110 new_ssh_key.description = description
107
111
108 Session().add(new_ssh_key)
112 Session().add(new_ssh_key)
109
113
110 return new_ssh_key
114 return new_ssh_key
111
115
112 def delete(self, ssh_key_id, user=None):
116 def delete(self, ssh_key_id, user=None):
113 """
117 """
114 Deletes given api_key, if user is set it also filters the object for
118 Deletes given api_key, if user is set it also filters the object for
115 deletion by given user.
119 deletion by given user.
116 """
120 """
117 ssh_key = UserSshKeys.query().filter(
121 ssh_key = UserSshKeys.query().filter(
118 UserSshKeys.ssh_key_id == ssh_key_id)
122 UserSshKeys.ssh_key_id == ssh_key_id)
119
123
120 if user:
124 if user:
121 user = self._get_user(user)
125 user = self._get_user(user)
122 ssh_key = ssh_key.filter(UserSshKeys.user_id == user.user_id)
126 ssh_key = ssh_key.filter(UserSshKeys.user_id == user.user_id)
123 ssh_key = ssh_key.scalar()
127 ssh_key = ssh_key.scalar()
124
128
125 if ssh_key:
129 if ssh_key:
126 try:
130 try:
127 Session().delete(ssh_key)
131 Session().delete(ssh_key)
128 except Exception:
132 except Exception:
129 log.error(traceback.format_exc())
133 log.error(traceback.format_exc())
130 raise
134 raise
131
135
132 def get_ssh_keys(self, user):
136 def get_ssh_keys(self, user):
133 user = self._get_user(user)
137 user = self._get_user(user)
134 user_ssh_keys = UserSshKeys.query()\
138 user_ssh_keys = UserSshKeys.query()\
135 .filter(UserSshKeys.user_id == user.user_id)
139 .filter(UserSshKeys.user_id == user.user_id)
136 user_ssh_keys = user_ssh_keys.order_by(UserSshKeys.ssh_key_id)
140 user_ssh_keys = user_ssh_keys.order_by(UserSshKeys.ssh_key_id)
137 return user_ssh_keys
141 return user_ssh_keys
138
142
139 def get_ssh_key_by_fingerprint(self, ssh_key_fingerprint):
143 def get_ssh_key_by_fingerprint(self, ssh_key_fingerprint):
140 user_ssh_key = UserSshKeys.query()\
144 user_ssh_key = UserSshKeys.query()\
141 .filter(UserSshKeys.ssh_key_fingerprint == ssh_key_fingerprint)\
145 .filter(UserSshKeys.ssh_key_fingerprint == ssh_key_fingerprint)\
142 .first()
146 .first()
143
147
144 return user_ssh_key
148 return user_ssh_key
@@ -1,83 +1,85 b''
1
1
2
2
3 # Copyright (C) 2013-2020 RhodeCode GmbH
3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import urllib.request, urllib.error, urllib.parse
22 import urllib.request
23 import urllib.error
24 import urllib.parse
23 from packaging.version import Version
25 from packaging.version import Version
24
26
25 import rhodecode
27 import rhodecode
26 from rhodecode.lib.ext_json import json
28 from rhodecode.lib.ext_json import json
27 from rhodecode.model import BaseModel
29 from rhodecode.model import BaseModel
28 from rhodecode.model.meta import Session
30 from rhodecode.model.meta import Session
29 from rhodecode.model.settings import SettingsModel
31 from rhodecode.model.settings import SettingsModel
30
32
31
33
32 log = logging.getLogger(__name__)
34 log = logging.getLogger(__name__)
33
35
34
36
35 class UpdateModel(BaseModel):
37 class UpdateModel(BaseModel):
36 UPDATE_SETTINGS_KEY = 'update_version'
38 UPDATE_SETTINGS_KEY = 'update_version'
37 UPDATE_URL_SETTINGS_KEY = 'rhodecode_update_url'
39 UPDATE_URL_SETTINGS_KEY = 'rhodecode_update_url'
38
40
39 @staticmethod
41 @staticmethod
40 def get_update_data(update_url):
42 def get_update_data(update_url):
41 """Return the JSON update data."""
43 """Return the JSON update data."""
42 ver = rhodecode.__version__
44 ver = rhodecode.__version__
43 log.debug('Checking for upgrade on `%s` server', update_url)
45 log.debug('Checking for upgrade on `%s` server', update_url)
44 opener = urllib.request.build_opener()
46 opener = urllib.request.build_opener()
45 opener.addheaders = [('User-agent', 'RhodeCode-SCM/%s' % ver)]
47 opener.addheaders = [('User-agent', 'RhodeCode-SCM/%s' % ver)]
46 response = opener.open(update_url)
48 response = opener.open(update_url)
47 response_data = response.read()
49 response_data = response.read()
48 data = json.loads(response_data)
50 data = json.loads(response_data)
49 log.debug('update server returned data')
51 log.debug('update server returned data')
50 return data
52 return data
51
53
52 def get_update_url(self):
54 def get_update_url(self):
53 settings = SettingsModel().get_all_settings()
55 settings = SettingsModel().get_all_settings()
54 return settings.get(self.UPDATE_URL_SETTINGS_KEY)
56 return settings.get(self.UPDATE_URL_SETTINGS_KEY)
55
57
56 def store_version(self, version):
58 def store_version(self, version):
57 log.debug('Storing version %s into settings', version)
59 log.debug('Storing version %s into settings', version)
58 setting = SettingsModel().create_or_update_setting(
60 setting = SettingsModel().create_or_update_setting(
59 self.UPDATE_SETTINGS_KEY, version)
61 self.UPDATE_SETTINGS_KEY, version)
60 Session().add(setting)
62 Session().add(setting)
61 Session().commit()
63 Session().commit()
62
64
63 def get_stored_version(self, fallback=None):
65 def get_stored_version(self, fallback=None):
64 obj = SettingsModel().get_setting_by_name(self.UPDATE_SETTINGS_KEY)
66 obj = SettingsModel().get_setting_by_name(self.UPDATE_SETTINGS_KEY)
65 if obj:
67 if obj:
66 return obj.app_settings_value
68 return obj.app_settings_value
67 return fallback or '0.0.0'
69 return fallback or '0.0.0'
68
70
69 def _sanitize_version(self, version):
71 def _sanitize_version(self, version):
70 """
72 """
71 Cleanup our custom ver.
73 Cleanup our custom ver.
72 e.g 4.11.0_20171204_204825_CE_default_EE_default to 4.11.0
74 e.g 4.11.0_20171204_204825_CE_default_EE_default to 4.11.0
73 """
75 """
74 return version.split('_')[0]
76 return version.split('_')[0]
75
77
76 def is_outdated(self, cur_version, latest_version=None):
78 def is_outdated(self, cur_version, latest_version=None):
77 latest_version = latest_version or self.get_stored_version()
79 latest_version = latest_version or self.get_stored_version()
78 try:
80 try:
79 cur_version = self._sanitize_version(cur_version)
81 cur_version = self._sanitize_version(cur_version)
80 return Version(latest_version) > Version(cur_version)
82 return Version(latest_version) > Version(cur_version)
81 except Exception:
83 except Exception:
82 # could be invalid version, etc
84 # could be invalid version, etc
83 return False
85 return False
@@ -1,1046 +1,1047 b''
1
1
2 # Copyright (C) 2010-2020 RhodeCode GmbH
2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 users model for RhodeCode
21 users model for RhodeCode
22 """
22 """
23
23
24 import logging
24 import logging
25 import traceback
25 import traceback
26 import datetime
26 import datetime
27 import ipaddress
27 import ipaddress
28
28
29 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
30 from sqlalchemy.exc import DatabaseError
30 from sqlalchemy.exc import DatabaseError
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.lib.user_log_filter import user_log_filter
33 from rhodecode.lib.user_log_filter import user_log_filter
34 from rhodecode.lib.utils2 import (
34 from rhodecode.lib.utils2 import (
35 safe_unicode, get_current_rhodecode_user, action_logger_generic,
35 get_current_rhodecode_user, action_logger_generic,
36 AttributeDict, str2bool)
36 AttributeDict, str2bool)
37 from rhodecode.lib.str_utils import safe_str
37 from rhodecode.lib.exceptions import (
38 from rhodecode.lib.exceptions import (
38 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
39 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
39 UserOwnsUserGroupsException, NotAllowedToCreateUserError,
40 UserOwnsUserGroupsException, NotAllowedToCreateUserError,
40 UserOwnsPullRequestsException, UserOwnsArtifactsException)
41 UserOwnsPullRequestsException, UserOwnsArtifactsException)
41 from rhodecode.lib.caching_query import FromCache
42 from rhodecode.lib.caching_query import FromCache
42 from rhodecode.model import BaseModel
43 from rhodecode.model import BaseModel
43 from rhodecode.model.db import (
44 from rhodecode.model.db import (
44 _hash_key, func, true, false, or_, joinedload, User, UserToPerm,
45 _hash_key, func, true, false, or_, joinedload, User, UserToPerm,
45 UserEmailMap, UserIpMap, UserLog)
46 UserEmailMap, UserIpMap, UserLog)
46 from rhodecode.model.meta import Session
47 from rhodecode.model.meta import Session
47 from rhodecode.model.auth_token import AuthTokenModel
48 from rhodecode.model.auth_token import AuthTokenModel
48 from rhodecode.model.repo_group import RepoGroupModel
49 from rhodecode.model.repo_group import RepoGroupModel
49
50
50 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
51
52
52
53
53 class UserModel(BaseModel):
54 class UserModel(BaseModel):
54 cls = User
55 cls = User
55
56
56 def get(self, user_id, cache=False):
57 def get(self, user_id, cache=False):
57 user = self.sa.query(User)
58 user = self.sa.query(User)
58 if cache:
59 if cache:
59 user = user.options(
60 user = user.options(
60 FromCache("sql_cache_short", f"get_user_{user_id}"))
61 FromCache("sql_cache_short", f"get_user_{user_id}"))
61 return user.get(user_id)
62 return user.get(user_id)
62
63
63 def get_user(self, user):
64 def get_user(self, user):
64 return self._get_user(user)
65 return self._get_user(user)
65
66
66 def _serialize_user(self, user):
67 def _serialize_user(self, user):
67 import rhodecode.lib.helpers as h
68 import rhodecode.lib.helpers as h
68
69
69 return {
70 return {
70 'id': user.user_id,
71 'id': user.user_id,
71 'first_name': user.first_name,
72 'first_name': user.first_name,
72 'last_name': user.last_name,
73 'last_name': user.last_name,
73 'username': user.username,
74 'username': user.username,
74 'email': user.email,
75 'email': user.email,
75 'icon_link': h.gravatar_url(user.email, 30),
76 'icon_link': h.gravatar_url(user.email, 30),
76 'profile_link': h.link_to_user(user),
77 'profile_link': h.link_to_user(user),
77 'value_display': h.escape(h.person(user)),
78 'value_display': h.escape(h.person(user)),
78 'value': user.username,
79 'value': user.username,
79 'value_type': 'user',
80 'value_type': 'user',
80 'active': user.active,
81 'active': user.active,
81 }
82 }
82
83
83 def get_users(self, name_contains=None, limit=20, only_active=True):
84 def get_users(self, name_contains=None, limit=20, only_active=True):
84
85
85 query = self.sa.query(User)
86 query = self.sa.query(User)
86 if only_active:
87 if only_active:
87 query = query.filter(User.active == true())
88 query = query.filter(User.active == true())
88
89
89 if name_contains:
90 if name_contains:
90 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
91 ilike_expression = u'%{}%'.format(safe_str(name_contains))
91 query = query.filter(
92 query = query.filter(
92 or_(
93 or_(
93 User.name.ilike(ilike_expression),
94 User.name.ilike(ilike_expression),
94 User.lastname.ilike(ilike_expression),
95 User.lastname.ilike(ilike_expression),
95 User.username.ilike(ilike_expression)
96 User.username.ilike(ilike_expression)
96 )
97 )
97 )
98 )
98 # sort by len to have top most matches first
99 # sort by len to have top most matches first
99 query = query.order_by(func.length(User.username))\
100 query = query.order_by(func.length(User.username))\
100 .order_by(User.username)
101 .order_by(User.username)
101 query = query.limit(limit)
102 query = query.limit(limit)
102
103
103 users = query.all()
104 users = query.all()
104
105
105 _users = [
106 _users = [
106 self._serialize_user(user) for user in users
107 self._serialize_user(user) for user in users
107 ]
108 ]
108 return _users
109 return _users
109
110
110 def get_by_username(self, username, cache=False, case_insensitive=False):
111 def get_by_username(self, username, cache=False, case_insensitive=False):
111
112
112 if case_insensitive:
113 if case_insensitive:
113 user = self.sa.query(User).filter(User.username.ilike(username))
114 user = self.sa.query(User).filter(User.username.ilike(username))
114 else:
115 else:
115 user = self.sa.query(User)\
116 user = self.sa.query(User)\
116 .filter(User.username == username)
117 .filter(User.username == username)
117 if cache:
118 if cache:
118 name_key = _hash_key(username)
119 name_key = _hash_key(username)
119 user = user.options(
120 user = user.options(
120 FromCache("sql_cache_short", f"get_user_{name_key}"))
121 FromCache("sql_cache_short", f"get_user_{name_key}"))
121 return user.scalar()
122 return user.scalar()
122
123
123 def get_by_email(self, email, cache=False, case_insensitive=False):
124 def get_by_email(self, email, cache=False, case_insensitive=False):
124 return User.get_by_email(email, case_insensitive, cache)
125 return User.get_by_email(email, case_insensitive, cache)
125
126
126 def get_by_auth_token(self, auth_token, cache=False):
127 def get_by_auth_token(self, auth_token, cache=False):
127 return User.get_by_auth_token(auth_token, cache)
128 return User.get_by_auth_token(auth_token, cache)
128
129
129 def get_active_user_count(self, cache=False):
130 def get_active_user_count(self, cache=False):
130 qry = User.query().filter(
131 qry = User.query().filter(
131 User.active == true()).filter(
132 User.active == true()).filter(
132 User.username != User.DEFAULT_USER)
133 User.username != User.DEFAULT_USER)
133 if cache:
134 if cache:
134 qry = qry.options(
135 qry = qry.options(
135 FromCache("sql_cache_short", "get_active_users"))
136 FromCache("sql_cache_short", "get_active_users"))
136 return qry.count()
137 return qry.count()
137
138
138 def create(self, form_data, cur_user=None):
139 def create(self, form_data, cur_user=None):
139 if not cur_user:
140 if not cur_user:
140 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
141 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
141
142
142 user_data = {
143 user_data = {
143 'username': form_data['username'],
144 'username': form_data['username'],
144 'password': form_data['password'],
145 'password': form_data['password'],
145 'email': form_data['email'],
146 'email': form_data['email'],
146 'firstname': form_data['firstname'],
147 'firstname': form_data['firstname'],
147 'lastname': form_data['lastname'],
148 'lastname': form_data['lastname'],
148 'active': form_data['active'],
149 'active': form_data['active'],
149 'extern_type': form_data['extern_type'],
150 'extern_type': form_data['extern_type'],
150 'extern_name': form_data['extern_name'],
151 'extern_name': form_data['extern_name'],
151 'admin': False,
152 'admin': False,
152 'cur_user': cur_user
153 'cur_user': cur_user
153 }
154 }
154
155
155 if 'create_repo_group' in form_data:
156 if 'create_repo_group' in form_data:
156 user_data['create_repo_group'] = str2bool(
157 user_data['create_repo_group'] = str2bool(
157 form_data.get('create_repo_group'))
158 form_data.get('create_repo_group'))
158
159
159 try:
160 try:
160 if form_data.get('password_change'):
161 if form_data.get('password_change'):
161 user_data['force_password_change'] = True
162 user_data['force_password_change'] = True
162 return UserModel().create_or_update(**user_data)
163 return UserModel().create_or_update(**user_data)
163 except Exception:
164 except Exception:
164 log.error(traceback.format_exc())
165 log.error(traceback.format_exc())
165 raise
166 raise
166
167
167 def update_user(self, user, skip_attrs=None, **kwargs):
168 def update_user(self, user, skip_attrs=None, **kwargs):
168 from rhodecode.lib.auth import get_crypt_password
169 from rhodecode.lib.auth import get_crypt_password
169
170
170 user = self._get_user(user)
171 user = self._get_user(user)
171 if user.username == User.DEFAULT_USER:
172 if user.username == User.DEFAULT_USER:
172 raise DefaultUserException(
173 raise DefaultUserException(
173 "You can't edit this user (`%(username)s`) since it's "
174 "You can't edit this user (`%(username)s`) since it's "
174 "crucial for entire application" % {
175 "crucial for entire application" % {
175 'username': user.username})
176 'username': user.username})
176
177
177 # first store only defaults
178 # first store only defaults
178 user_attrs = {
179 user_attrs = {
179 'updating_user_id': user.user_id,
180 'updating_user_id': user.user_id,
180 'username': user.username,
181 'username': user.username,
181 'password': user.password,
182 'password': user.password,
182 'email': user.email,
183 'email': user.email,
183 'firstname': user.name,
184 'firstname': user.name,
184 'lastname': user.lastname,
185 'lastname': user.lastname,
185 'description': user.description,
186 'description': user.description,
186 'active': user.active,
187 'active': user.active,
187 'admin': user.admin,
188 'admin': user.admin,
188 'extern_name': user.extern_name,
189 'extern_name': user.extern_name,
189 'extern_type': user.extern_type,
190 'extern_type': user.extern_type,
190 'language': user.user_data.get('language')
191 'language': user.user_data.get('language')
191 }
192 }
192
193
193 # in case there's new_password, that comes from form, use it to
194 # in case there's new_password, that comes from form, use it to
194 # store password
195 # store password
195 if kwargs.get('new_password'):
196 if kwargs.get('new_password'):
196 kwargs['password'] = kwargs['new_password']
197 kwargs['password'] = kwargs['new_password']
197
198
198 # cleanups, my_account password change form
199 # cleanups, my_account password change form
199 kwargs.pop('current_password', None)
200 kwargs.pop('current_password', None)
200 kwargs.pop('new_password', None)
201 kwargs.pop('new_password', None)
201
202
202 # cleanups, user edit password change form
203 # cleanups, user edit password change form
203 kwargs.pop('password_confirmation', None)
204 kwargs.pop('password_confirmation', None)
204 kwargs.pop('password_change', None)
205 kwargs.pop('password_change', None)
205
206
206 # create repo group on user creation
207 # create repo group on user creation
207 kwargs.pop('create_repo_group', None)
208 kwargs.pop('create_repo_group', None)
208
209
209 # legacy forms send name, which is the firstname
210 # legacy forms send name, which is the firstname
210 firstname = kwargs.pop('name', None)
211 firstname = kwargs.pop('name', None)
211 if firstname:
212 if firstname:
212 kwargs['firstname'] = firstname
213 kwargs['firstname'] = firstname
213
214
214 for k, v in kwargs.items():
215 for k, v in kwargs.items():
215 # skip if we don't want to update this
216 # skip if we don't want to update this
216 if skip_attrs and k in skip_attrs:
217 if skip_attrs and k in skip_attrs:
217 continue
218 continue
218
219
219 user_attrs[k] = v
220 user_attrs[k] = v
220
221
221 try:
222 try:
222 return self.create_or_update(**user_attrs)
223 return self.create_or_update(**user_attrs)
223 except Exception:
224 except Exception:
224 log.error(traceback.format_exc())
225 log.error(traceback.format_exc())
225 raise
226 raise
226
227
227 def create_or_update(
228 def create_or_update(
228 self, username, password, email, firstname='', lastname='',
229 self, username, password, email, firstname='', lastname='',
229 active=True, admin=False, extern_type=None, extern_name=None,
230 active=True, admin=False, extern_type=None, extern_name=None,
230 cur_user=None, plugin=None, force_password_change=False,
231 cur_user=None, plugin=None, force_password_change=False,
231 allow_to_create_user=True, create_repo_group=None,
232 allow_to_create_user=True, create_repo_group=None,
232 updating_user_id=None, language=None, description='',
233 updating_user_id=None, language=None, description='',
233 strict_creation_check=True):
234 strict_creation_check=True):
234 """
235 """
235 Creates a new instance if not found, or updates current one
236 Creates a new instance if not found, or updates current one
236
237
237 :param username:
238 :param username:
238 :param password:
239 :param password:
239 :param email:
240 :param email:
240 :param firstname:
241 :param firstname:
241 :param lastname:
242 :param lastname:
242 :param active:
243 :param active:
243 :param admin:
244 :param admin:
244 :param extern_type:
245 :param extern_type:
245 :param extern_name:
246 :param extern_name:
246 :param cur_user:
247 :param cur_user:
247 :param plugin: optional plugin this method was called from
248 :param plugin: optional plugin this method was called from
248 :param force_password_change: toggles new or existing user flag
249 :param force_password_change: toggles new or existing user flag
249 for password change
250 for password change
250 :param allow_to_create_user: Defines if the method can actually create
251 :param allow_to_create_user: Defines if the method can actually create
251 new users
252 new users
252 :param create_repo_group: Defines if the method should also
253 :param create_repo_group: Defines if the method should also
253 create an repo group with user name, and owner
254 create an repo group with user name, and owner
254 :param updating_user_id: if we set it up this is the user we want to
255 :param updating_user_id: if we set it up this is the user we want to
255 update this allows to editing username.
256 update this allows to editing username.
256 :param language: language of user from interface.
257 :param language: language of user from interface.
257 :param description: user description
258 :param description: user description
258 :param strict_creation_check: checks for allowed creation license wise etc.
259 :param strict_creation_check: checks for allowed creation license wise etc.
259
260
260 :returns: new User object with injected `is_new_user` attribute.
261 :returns: new User object with injected `is_new_user` attribute.
261 """
262 """
262
263
263 if not cur_user:
264 if not cur_user:
264 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
265 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
265
266
266 from rhodecode.lib.auth import (
267 from rhodecode.lib.auth import (
267 get_crypt_password, check_password)
268 get_crypt_password, check_password)
268 from rhodecode.lib import hooks_base
269 from rhodecode.lib import hooks_base
269
270
270 def _password_change(new_user, password):
271 def _password_change(new_user, password):
271 old_password = new_user.password or ''
272 old_password = new_user.password or ''
272 # empty password
273 # empty password
273 if not old_password:
274 if not old_password:
274 return False
275 return False
275
276
276 # password check is only needed for RhodeCode internal auth calls
277 # password check is only needed for RhodeCode internal auth calls
277 # in case it's a plugin we don't care
278 # in case it's a plugin we don't care
278 if not plugin:
279 if not plugin:
279
280
280 # first check if we gave crypted password back, and if it
281 # first check if we gave crypted password back, and if it
281 # matches it's not password change
282 # matches it's not password change
282 if new_user.password == password:
283 if new_user.password == password:
283 return False
284 return False
284
285
285 password_match = check_password(password, old_password)
286 password_match = check_password(password, old_password)
286 if not password_match:
287 if not password_match:
287 return True
288 return True
288
289
289 return False
290 return False
290
291
291 # read settings on default personal repo group creation
292 # read settings on default personal repo group creation
292 if create_repo_group is None:
293 if create_repo_group is None:
293 default_create_repo_group = RepoGroupModel()\
294 default_create_repo_group = RepoGroupModel()\
294 .get_default_create_personal_repo_group()
295 .get_default_create_personal_repo_group()
295 create_repo_group = default_create_repo_group
296 create_repo_group = default_create_repo_group
296
297
297 user_data = {
298 user_data = {
298 'username': username,
299 'username': username,
299 'password': password,
300 'password': password,
300 'email': email,
301 'email': email,
301 'firstname': firstname,
302 'firstname': firstname,
302 'lastname': lastname,
303 'lastname': lastname,
303 'active': active,
304 'active': active,
304 'admin': admin
305 'admin': admin
305 }
306 }
306
307
307 if updating_user_id:
308 if updating_user_id:
308 log.debug('Checking for existing account in RhodeCode '
309 log.debug('Checking for existing account in RhodeCode '
309 'database with user_id `%s` ', updating_user_id)
310 'database with user_id `%s` ', updating_user_id)
310 user = User.get(updating_user_id)
311 user = User.get(updating_user_id)
311 else:
312 else:
312 log.debug('Checking for existing account in RhodeCode '
313 log.debug('Checking for existing account in RhodeCode '
313 'database with username `%s` ', username)
314 'database with username `%s` ', username)
314 user = User.get_by_username(username, case_insensitive=True)
315 user = User.get_by_username(username, case_insensitive=True)
315
316
316 if user is None:
317 if user is None:
317 # we check internal flag if this method is actually allowed to
318 # we check internal flag if this method is actually allowed to
318 # create new user
319 # create new user
319 if not allow_to_create_user:
320 if not allow_to_create_user:
320 msg = ('Method wants to create new user, but it is not '
321 msg = ('Method wants to create new user, but it is not '
321 'allowed to do so')
322 'allowed to do so')
322 log.warning(msg)
323 log.warning(msg)
323 raise NotAllowedToCreateUserError(msg)
324 raise NotAllowedToCreateUserError(msg)
324
325
325 log.debug('Creating new user %s', username)
326 log.debug('Creating new user %s', username)
326
327
327 # only if we create user that is active
328 # only if we create user that is active
328 new_active_user = active
329 new_active_user = active
329 if new_active_user and strict_creation_check:
330 if new_active_user and strict_creation_check:
330 # raises UserCreationError if it's not allowed for any reason to
331 # raises UserCreationError if it's not allowed for any reason to
331 # create new active user, this also executes pre-create hooks
332 # create new active user, this also executes pre-create hooks
332 hooks_base.check_allowed_create_user(user_data, cur_user, strict_check=True)
333 hooks_base.check_allowed_create_user(user_data, cur_user, strict_check=True)
333 events.trigger(events.UserPreCreate(user_data))
334 events.trigger(events.UserPreCreate(user_data))
334 new_user = User()
335 new_user = User()
335 edit = False
336 edit = False
336 else:
337 else:
337 log.debug('updating user `%s`', username)
338 log.debug('updating user `%s`', username)
338 events.trigger(events.UserPreUpdate(user, user_data))
339 events.trigger(events.UserPreUpdate(user, user_data))
339 new_user = user
340 new_user = user
340 edit = True
341 edit = True
341
342
342 # we're not allowed to edit default user
343 # we're not allowed to edit default user
343 if user.username == User.DEFAULT_USER:
344 if user.username == User.DEFAULT_USER:
344 raise DefaultUserException(
345 raise DefaultUserException(
345 "You can't edit this user (`%(username)s`) since it's "
346 "You can't edit this user (`%(username)s`) since it's "
346 "crucial for entire application"
347 "crucial for entire application"
347 % {'username': user.username})
348 % {'username': user.username})
348
349
349 # inject special attribute that will tell us if User is new or old
350 # inject special attribute that will tell us if User is new or old
350 new_user.is_new_user = not edit
351 new_user.is_new_user = not edit
351 # for users that didn's specify auth type, we use RhodeCode built in
352 # for users that didn's specify auth type, we use RhodeCode built in
352 from rhodecode.authentication.plugins import auth_rhodecode
353 from rhodecode.authentication.plugins import auth_rhodecode
353 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid
354 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid
354 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid
355 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid
355
356
356 try:
357 try:
357 new_user.username = username
358 new_user.username = username
358 new_user.admin = admin
359 new_user.admin = admin
359 new_user.email = email
360 new_user.email = email
360 new_user.active = active
361 new_user.active = active
361 new_user.extern_name = safe_unicode(extern_name)
362 new_user.extern_name = safe_str(extern_name)
362 new_user.extern_type = safe_unicode(extern_type)
363 new_user.extern_type = safe_str(extern_type)
363 new_user.name = firstname
364 new_user.name = firstname
364 new_user.lastname = lastname
365 new_user.lastname = lastname
365 new_user.description = description
366 new_user.description = description
366
367
367 # set password only if creating an user or password is changed
368 # set password only if creating an user or password is changed
368 if not edit or _password_change(new_user, password):
369 if not edit or _password_change(new_user, password):
369 reason = 'new password' if edit else 'new user'
370 reason = 'new password' if edit else 'new user'
370 log.debug('Updating password reason=>%s', reason)
371 log.debug('Updating password reason=>%s', reason)
371 new_user.password = get_crypt_password(password) if password else None
372 new_user.password = get_crypt_password(password) if password else None
372
373
373 if force_password_change:
374 if force_password_change:
374 new_user.update_userdata(force_password_change=True)
375 new_user.update_userdata(force_password_change=True)
375 if language:
376 if language:
376 new_user.update_userdata(language=language)
377 new_user.update_userdata(language=language)
377 new_user.update_userdata(notification_status=True)
378 new_user.update_userdata(notification_status=True)
378
379
379 self.sa.add(new_user)
380 self.sa.add(new_user)
380
381
381 if not edit and create_repo_group:
382 if not edit and create_repo_group:
382 RepoGroupModel().create_personal_repo_group(
383 RepoGroupModel().create_personal_repo_group(
383 new_user, commit_early=False)
384 new_user, commit_early=False)
384
385
385 if not edit:
386 if not edit:
386 # add the RSS token
387 # add the RSS token
387 self.add_auth_token(
388 self.add_auth_token(
388 user=username, lifetime_minutes=-1,
389 user=username, lifetime_minutes=-1,
389 role=self.auth_token_role.ROLE_FEED,
390 role=self.auth_token_role.ROLE_FEED,
390 description=u'Generated feed token')
391 description=u'Generated feed token')
391
392
392 kwargs = new_user.get_dict()
393 kwargs = new_user.get_dict()
393 # backward compat, require api_keys present
394 # backward compat, require api_keys present
394 kwargs['api_keys'] = kwargs['auth_tokens']
395 kwargs['api_keys'] = kwargs['auth_tokens']
395 hooks_base.create_user(created_by=cur_user, **kwargs)
396 hooks_base.create_user(created_by=cur_user, **kwargs)
396 events.trigger(events.UserPostCreate(user_data))
397 events.trigger(events.UserPostCreate(user_data))
397 return new_user
398 return new_user
398 except (DatabaseError,):
399 except (DatabaseError,):
399 log.error(traceback.format_exc())
400 log.error(traceback.format_exc())
400 raise
401 raise
401
402
402 def create_registration(self, form_data,
403 def create_registration(self, form_data,
403 extern_name='rhodecode', extern_type='rhodecode'):
404 extern_name='rhodecode', extern_type='rhodecode'):
404 from rhodecode.model.notification import NotificationModel
405 from rhodecode.model.notification import NotificationModel
405 from rhodecode.model.notification import EmailNotificationModel
406 from rhodecode.model.notification import EmailNotificationModel
406
407
407 try:
408 try:
408 form_data['admin'] = False
409 form_data['admin'] = False
409 form_data['extern_name'] = extern_name
410 form_data['extern_name'] = extern_name
410 form_data['extern_type'] = extern_type
411 form_data['extern_type'] = extern_type
411 new_user = self.create(form_data)
412 new_user = self.create(form_data)
412
413
413 self.sa.add(new_user)
414 self.sa.add(new_user)
414 self.sa.flush()
415 self.sa.flush()
415
416
416 user_data = new_user.get_dict()
417 user_data = new_user.get_dict()
417 user_data.update({
418 user_data.update({
418 'first_name': user_data.get('firstname'),
419 'first_name': user_data.get('firstname'),
419 'last_name': user_data.get('lastname'),
420 'last_name': user_data.get('lastname'),
420 })
421 })
421 kwargs = {
422 kwargs = {
422 # use SQLALCHEMY safe dump of user data
423 # use SQLALCHEMY safe dump of user data
423 'user': AttributeDict(user_data),
424 'user': AttributeDict(user_data),
424 'date': datetime.datetime.now()
425 'date': datetime.datetime.now()
425 }
426 }
426 notification_type = EmailNotificationModel.TYPE_REGISTRATION
427 notification_type = EmailNotificationModel.TYPE_REGISTRATION
427
428
428 # create notification objects, and emails
429 # create notification objects, and emails
429 NotificationModel().create(
430 NotificationModel().create(
430 created_by=new_user,
431 created_by=new_user,
431 notification_subject='', # Filled in based on the notification_type
432 notification_subject='', # Filled in based on the notification_type
432 notification_body='', # Filled in based on the notification_type
433 notification_body='', # Filled in based on the notification_type
433 notification_type=notification_type,
434 notification_type=notification_type,
434 recipients=None, # all admins
435 recipients=None, # all admins
435 email_kwargs=kwargs,
436 email_kwargs=kwargs,
436 )
437 )
437
438
438 return new_user
439 return new_user
439 except Exception:
440 except Exception:
440 log.error(traceback.format_exc())
441 log.error(traceback.format_exc())
441 raise
442 raise
442
443
443 def _handle_user_repos(self, username, repositories, handle_user,
444 def _handle_user_repos(self, username, repositories, handle_user,
444 handle_mode=None):
445 handle_mode=None):
445
446
446 left_overs = True
447 left_overs = True
447
448
448 from rhodecode.model.repo import RepoModel
449 from rhodecode.model.repo import RepoModel
449
450
450 if handle_mode == 'detach':
451 if handle_mode == 'detach':
451 for obj in repositories:
452 for obj in repositories:
452 obj.user = handle_user
453 obj.user = handle_user
453 # set description we know why we super admin now owns
454 # set description we know why we super admin now owns
454 # additional repositories that were orphaned !
455 # additional repositories that were orphaned !
455 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
456 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
456 self.sa.add(obj)
457 self.sa.add(obj)
457 left_overs = False
458 left_overs = False
458 elif handle_mode == 'delete':
459 elif handle_mode == 'delete':
459 for obj in repositories:
460 for obj in repositories:
460 RepoModel().delete(obj, forks='detach')
461 RepoModel().delete(obj, forks='detach')
461 left_overs = False
462 left_overs = False
462
463
463 # if nothing is done we have left overs left
464 # if nothing is done we have left overs left
464 return left_overs
465 return left_overs
465
466
466 def _handle_user_repo_groups(self, username, repository_groups, handle_user,
467 def _handle_user_repo_groups(self, username, repository_groups, handle_user,
467 handle_mode=None):
468 handle_mode=None):
468
469
469 left_overs = True
470 left_overs = True
470
471
471 from rhodecode.model.repo_group import RepoGroupModel
472 from rhodecode.model.repo_group import RepoGroupModel
472
473
473 if handle_mode == 'detach':
474 if handle_mode == 'detach':
474 for r in repository_groups:
475 for r in repository_groups:
475 r.user = handle_user
476 r.user = handle_user
476 # set description we know why we super admin now owns
477 # set description we know why we super admin now owns
477 # additional repositories that were orphaned !
478 # additional repositories that were orphaned !
478 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
479 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
479 r.personal = False
480 r.personal = False
480 self.sa.add(r)
481 self.sa.add(r)
481 left_overs = False
482 left_overs = False
482 elif handle_mode == 'delete':
483 elif handle_mode == 'delete':
483 for r in repository_groups:
484 for r in repository_groups:
484 RepoGroupModel().delete(r)
485 RepoGroupModel().delete(r)
485 left_overs = False
486 left_overs = False
486
487
487 # if nothing is done we have left overs left
488 # if nothing is done we have left overs left
488 return left_overs
489 return left_overs
489
490
490 def _handle_user_user_groups(self, username, user_groups, handle_user,
491 def _handle_user_user_groups(self, username, user_groups, handle_user,
491 handle_mode=None):
492 handle_mode=None):
492
493
493 left_overs = True
494 left_overs = True
494
495
495 from rhodecode.model.user_group import UserGroupModel
496 from rhodecode.model.user_group import UserGroupModel
496
497
497 if handle_mode == 'detach':
498 if handle_mode == 'detach':
498 for r in user_groups:
499 for r in user_groups:
499 for user_user_group_to_perm in r.user_user_group_to_perm:
500 for user_user_group_to_perm in r.user_user_group_to_perm:
500 if user_user_group_to_perm.user.username == username:
501 if user_user_group_to_perm.user.username == username:
501 user_user_group_to_perm.user = handle_user
502 user_user_group_to_perm.user = handle_user
502 r.user = handle_user
503 r.user = handle_user
503 # set description we know why we super admin now owns
504 # set description we know why we super admin now owns
504 # additional repositories that were orphaned !
505 # additional repositories that were orphaned !
505 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
506 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
506 self.sa.add(r)
507 self.sa.add(r)
507 left_overs = False
508 left_overs = False
508 elif handle_mode == 'delete':
509 elif handle_mode == 'delete':
509 for r in user_groups:
510 for r in user_groups:
510 UserGroupModel().delete(r)
511 UserGroupModel().delete(r)
511 left_overs = False
512 left_overs = False
512
513
513 # if nothing is done we have left overs left
514 # if nothing is done we have left overs left
514 return left_overs
515 return left_overs
515
516
516 def _handle_user_pull_requests(self, username, pull_requests, handle_user,
517 def _handle_user_pull_requests(self, username, pull_requests, handle_user,
517 handle_mode=None):
518 handle_mode=None):
518 left_overs = True
519 left_overs = True
519
520
520 from rhodecode.model.pull_request import PullRequestModel
521 from rhodecode.model.pull_request import PullRequestModel
521
522
522 if handle_mode == 'detach':
523 if handle_mode == 'detach':
523 for pr in pull_requests:
524 for pr in pull_requests:
524 pr.user_id = handle_user.user_id
525 pr.user_id = handle_user.user_id
525 # set description we know why we super admin now owns
526 # set description we know why we super admin now owns
526 # additional repositories that were orphaned !
527 # additional repositories that were orphaned !
527 pr.description += ' \n::detached pull requests from deleted user: %s' % (username,)
528 pr.description += ' \n::detached pull requests from deleted user: %s' % (username,)
528 self.sa.add(pr)
529 self.sa.add(pr)
529 left_overs = False
530 left_overs = False
530 elif handle_mode == 'delete':
531 elif handle_mode == 'delete':
531 for pr in pull_requests:
532 for pr in pull_requests:
532 PullRequestModel().delete(pr)
533 PullRequestModel().delete(pr)
533
534
534 left_overs = False
535 left_overs = False
535
536
536 # if nothing is done we have left overs left
537 # if nothing is done we have leftovers left
537 return left_overs
538 return left_overs
538
539
539 def _handle_user_artifacts(self, username, artifacts, handle_user,
540 def _handle_user_artifacts(self, username, artifacts, handle_user,
540 handle_mode=None):
541 handle_mode=None):
541
542
542 left_overs = True
543 left_overs = True
543
544
544 if handle_mode == 'detach':
545 if handle_mode == 'detach':
545 for a in artifacts:
546 for a in artifacts:
546 a.upload_user = handle_user
547 a.upload_user = handle_user
547 # set description we know why we super admin now owns
548 # set description we know why we super admin now owns
548 # additional artifacts that were orphaned !
549 # additional artifacts that were orphaned !
549 a.file_description += ' \n::detached artifact from deleted user: %s' % (username,)
550 a.file_description += ' \n::detached artifact from deleted user: %s' % (username,)
550 self.sa.add(a)
551 self.sa.add(a)
551 left_overs = False
552 left_overs = False
552 elif handle_mode == 'delete':
553 elif handle_mode == 'delete':
553 from rhodecode.apps.file_store import utils as store_utils
554 from rhodecode.apps.file_store import utils as store_utils
554 request = get_current_request()
555 request = get_current_request()
555 storage = store_utils.get_file_storage(request.registry.settings)
556 storage = store_utils.get_file_storage(request.registry.settings)
556 for a in artifacts:
557 for a in artifacts:
557 file_uid = a.file_uid
558 file_uid = a.file_uid
558 storage.delete(file_uid)
559 storage.delete(file_uid)
559 self.sa.delete(a)
560 self.sa.delete(a)
560
561
561 left_overs = False
562 left_overs = False
562
563
563 # if nothing is done we have left overs left
564 # if nothing is done we have left overs left
564 return left_overs
565 return left_overs
565
566
566 def delete(self, user, cur_user=None, handle_repos=None,
567 def delete(self, user, cur_user=None, handle_repos=None,
567 handle_repo_groups=None, handle_user_groups=None,
568 handle_repo_groups=None, handle_user_groups=None,
568 handle_pull_requests=None, handle_artifacts=None, handle_new_owner=None):
569 handle_pull_requests=None, handle_artifacts=None, handle_new_owner=None):
569 from rhodecode.lib import hooks_base
570 from rhodecode.lib import hooks_base
570
571
571 if not cur_user:
572 if not cur_user:
572 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
573 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
573
574
574 user = self._get_user(user)
575 user = self._get_user(user)
575
576
576 try:
577 try:
577 if user.username == User.DEFAULT_USER:
578 if user.username == User.DEFAULT_USER:
578 raise DefaultUserException(
579 raise DefaultUserException(
579 u"You can't remove this user since it's"
580 u"You can't remove this user since it's"
580 u" crucial for entire application")
581 u" crucial for entire application")
581 handle_user = handle_new_owner or self.cls.get_first_super_admin()
582 handle_user = handle_new_owner or self.cls.get_first_super_admin()
582 log.debug('New detached objects owner %s', handle_user)
583 log.debug('New detached objects owner %s', handle_user)
583
584
584 left_overs = self._handle_user_repos(
585 left_overs = self._handle_user_repos(
585 user.username, user.repositories, handle_user, handle_repos)
586 user.username, user.repositories, handle_user, handle_repos)
586 if left_overs and user.repositories:
587 if left_overs and user.repositories:
587 repos = [x.repo_name for x in user.repositories]
588 repos = [x.repo_name for x in user.repositories]
588 raise UserOwnsReposException(
589 raise UserOwnsReposException(
589 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
590 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
590 u'removed. Switch owners or remove those repositories:%(list_repos)s'
591 u'removed. Switch owners or remove those repositories:%(list_repos)s'
591 % {'username': user.username, 'len_repos': len(repos),
592 % {'username': user.username, 'len_repos': len(repos),
592 'list_repos': ', '.join(repos)})
593 'list_repos': ', '.join(repos)})
593
594
594 left_overs = self._handle_user_repo_groups(
595 left_overs = self._handle_user_repo_groups(
595 user.username, user.repository_groups, handle_user, handle_repo_groups)
596 user.username, user.repository_groups, handle_user, handle_repo_groups)
596 if left_overs and user.repository_groups:
597 if left_overs and user.repository_groups:
597 repo_groups = [x.group_name for x in user.repository_groups]
598 repo_groups = [x.group_name for x in user.repository_groups]
598 raise UserOwnsRepoGroupsException(
599 raise UserOwnsRepoGroupsException(
599 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
600 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
600 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
601 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
601 % {'username': user.username, 'len_repo_groups': len(repo_groups),
602 % {'username': user.username, 'len_repo_groups': len(repo_groups),
602 'list_repo_groups': ', '.join(repo_groups)})
603 'list_repo_groups': ', '.join(repo_groups)})
603
604
604 left_overs = self._handle_user_user_groups(
605 left_overs = self._handle_user_user_groups(
605 user.username, user.user_groups, handle_user, handle_user_groups)
606 user.username, user.user_groups, handle_user, handle_user_groups)
606 if left_overs and user.user_groups:
607 if left_overs and user.user_groups:
607 user_groups = [x.users_group_name for x in user.user_groups]
608 user_groups = [x.users_group_name for x in user.user_groups]
608 raise UserOwnsUserGroupsException(
609 raise UserOwnsUserGroupsException(
609 u'user "%s" still owns %s user groups and cannot be '
610 u'user "%s" still owns %s user groups and cannot be '
610 u'removed. Switch owners or remove those user groups:%s'
611 u'removed. Switch owners or remove those user groups:%s'
611 % (user.username, len(user_groups), ', '.join(user_groups)))
612 % (user.username, len(user_groups), ', '.join(user_groups)))
612
613
613 left_overs = self._handle_user_pull_requests(
614 left_overs = self._handle_user_pull_requests(
614 user.username, user.user_pull_requests, handle_user, handle_pull_requests)
615 user.username, user.user_pull_requests, handle_user, handle_pull_requests)
615 if left_overs and user.user_pull_requests:
616 if left_overs and user.user_pull_requests:
616 pull_requests = ['!{}'.format(x.pull_request_id) for x in user.user_pull_requests]
617 pull_requests = ['!{}'.format(x.pull_request_id) for x in user.user_pull_requests]
617 raise UserOwnsPullRequestsException(
618 raise UserOwnsPullRequestsException(
618 u'user "%s" still owns %s pull requests and cannot be '
619 u'user "%s" still owns %s pull requests and cannot be '
619 u'removed. Switch owners or remove those pull requests:%s'
620 u'removed. Switch owners or remove those pull requests:%s'
620 % (user.username, len(pull_requests), ', '.join(pull_requests)))
621 % (user.username, len(pull_requests), ', '.join(pull_requests)))
621
622
622 left_overs = self._handle_user_artifacts(
623 left_overs = self._handle_user_artifacts(
623 user.username, user.artifacts, handle_user, handle_artifacts)
624 user.username, user.artifacts, handle_user, handle_artifacts)
624 if left_overs and user.artifacts:
625 if left_overs and user.artifacts:
625 artifacts = [x.file_uid for x in user.artifacts]
626 artifacts = [x.file_uid for x in user.artifacts]
626 raise UserOwnsArtifactsException(
627 raise UserOwnsArtifactsException(
627 u'user "%s" still owns %s artifacts and cannot be '
628 u'user "%s" still owns %s artifacts and cannot be '
628 u'removed. Switch owners or remove those artifacts:%s'
629 u'removed. Switch owners or remove those artifacts:%s'
629 % (user.username, len(artifacts), ', '.join(artifacts)))
630 % (user.username, len(artifacts), ', '.join(artifacts)))
630
631
631 user_data = user.get_dict() # fetch user data before expire
632 user_data = user.get_dict() # fetch user data before expire
632
633
633 # we might change the user data with detach/delete, make sure
634 # we might change the user data with detach/delete, make sure
634 # the object is marked as expired before actually deleting !
635 # the object is marked as expired before actually deleting !
635 self.sa.expire(user)
636 self.sa.expire(user)
636 self.sa.delete(user)
637 self.sa.delete(user)
637
638
638 hooks_base.delete_user(deleted_by=cur_user, **user_data)
639 hooks_base.delete_user(deleted_by=cur_user, **user_data)
639 except Exception:
640 except Exception:
640 log.error(traceback.format_exc())
641 log.error(traceback.format_exc())
641 raise
642 raise
642
643
643 def reset_password_link(self, data, pwd_reset_url):
644 def reset_password_link(self, data, pwd_reset_url):
644 from rhodecode.lib.celerylib import tasks, run_task
645 from rhodecode.lib.celerylib import tasks, run_task
645 from rhodecode.model.notification import EmailNotificationModel
646 from rhodecode.model.notification import EmailNotificationModel
646 user_email = data['email']
647 user_email = data['email']
647 try:
648 try:
648 user = User.get_by_email(user_email)
649 user = User.get_by_email(user_email)
649 if user:
650 if user:
650 log.debug('password reset user found %s', user)
651 log.debug('password reset user found %s', user)
651
652
652 email_kwargs = {
653 email_kwargs = {
653 'password_reset_url': pwd_reset_url,
654 'password_reset_url': pwd_reset_url,
654 'user': user,
655 'user': user,
655 'email': user_email,
656 'email': user_email,
656 'date': datetime.datetime.now(),
657 'date': datetime.datetime.now(),
657 'first_admin_email': User.get_first_super_admin().email
658 'first_admin_email': User.get_first_super_admin().email
658 }
659 }
659
660
660 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
661 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
661 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
662 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
662
663
663 recipients = [user_email]
664 recipients = [user_email]
664
665
665 action_logger_generic(
666 action_logger_generic(
666 'sending password reset email to user: {}'.format(
667 'sending password reset email to user: {}'.format(
667 user), namespace='security.password_reset')
668 user), namespace='security.password_reset')
668
669
669 run_task(tasks.send_email, recipients, subject,
670 run_task(tasks.send_email, recipients, subject,
670 email_body_plaintext, email_body)
671 email_body_plaintext, email_body)
671
672
672 else:
673 else:
673 log.debug("password reset email %s not found", user_email)
674 log.debug("password reset email %s not found", user_email)
674 except Exception:
675 except Exception:
675 log.error(traceback.format_exc())
676 log.error(traceback.format_exc())
676 return False
677 return False
677
678
678 return True
679 return True
679
680
680 def reset_password(self, data):
681 def reset_password(self, data):
681 from rhodecode.lib.celerylib import tasks, run_task
682 from rhodecode.lib.celerylib import tasks, run_task
682 from rhodecode.model.notification import EmailNotificationModel
683 from rhodecode.model.notification import EmailNotificationModel
683 from rhodecode.lib import auth
684 from rhodecode.lib import auth
684 user_email = data['email']
685 user_email = data['email']
685 pre_db = True
686 pre_db = True
686 try:
687 try:
687 user = User.get_by_email(user_email)
688 user = User.get_by_email(user_email)
688 new_passwd = auth.PasswordGenerator().gen_password(
689 new_passwd = auth.PasswordGenerator().gen_password(
689 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
690 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
690 if user:
691 if user:
691 user.password = auth.get_crypt_password(new_passwd)
692 user.password = auth.get_crypt_password(new_passwd)
692 # also force this user to reset his password !
693 # also force this user to reset his password !
693 user.update_userdata(force_password_change=True)
694 user.update_userdata(force_password_change=True)
694
695
695 Session().add(user)
696 Session().add(user)
696
697
697 # now delete the token in question
698 # now delete the token in question
698 UserApiKeys = AuthTokenModel.cls
699 UserApiKeys = AuthTokenModel.cls
699 UserApiKeys().query().filter(
700 UserApiKeys().query().filter(
700 UserApiKeys.api_key == data['token']).delete()
701 UserApiKeys.api_key == data['token']).delete()
701
702
702 Session().commit()
703 Session().commit()
703 log.info('successfully reset password for `%s`', user_email)
704 log.info('successfully reset password for `%s`', user_email)
704
705
705 if new_passwd is None:
706 if new_passwd is None:
706 raise Exception('unable to generate new password')
707 raise Exception('unable to generate new password')
707
708
708 pre_db = False
709 pre_db = False
709
710
710 email_kwargs = {
711 email_kwargs = {
711 'new_password': new_passwd,
712 'new_password': new_passwd,
712 'user': user,
713 'user': user,
713 'email': user_email,
714 'email': user_email,
714 'date': datetime.datetime.now(),
715 'date': datetime.datetime.now(),
715 'first_admin_email': User.get_first_super_admin().email
716 'first_admin_email': User.get_first_super_admin().email
716 }
717 }
717
718
718 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
719 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
719 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
720 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
720 **email_kwargs)
721 **email_kwargs)
721
722
722 recipients = [user_email]
723 recipients = [user_email]
723
724
724 action_logger_generic(
725 action_logger_generic(
725 'sent new password to user: {} with email: {}'.format(
726 'sent new password to user: {} with email: {}'.format(
726 user, user_email), namespace='security.password_reset')
727 user, user_email), namespace='security.password_reset')
727
728
728 run_task(tasks.send_email, recipients, subject,
729 run_task(tasks.send_email, recipients, subject,
729 email_body_plaintext, email_body)
730 email_body_plaintext, email_body)
730
731
731 except Exception:
732 except Exception:
732 log.error('Failed to update user password')
733 log.error('Failed to update user password')
733 log.error(traceback.format_exc())
734 log.error(traceback.format_exc())
734 if pre_db:
735 if pre_db:
735 # we rollback only if local db stuff fails. If it goes into
736 # we rollback only if local db stuff fails. If it goes into
736 # run_task, we're pass rollback state this wouldn't work then
737 # run_task, we're pass rollback state this wouldn't work then
737 Session().rollback()
738 Session().rollback()
738
739
739 return True
740 return True
740
741
741 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
742 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
742 """
743 """
743 Fetches auth_user by user_id,or api_key if present.
744 Fetches auth_user by user_id,or api_key if present.
744 Fills auth_user attributes with those taken from database.
745 Fills auth_user attributes with those taken from database.
745 Additionally set's is_authenitated if lookup fails
746 Additionally set's is_authenitated if lookup fails
746 present in database
747 present in database
747
748
748 :param auth_user: instance of user to set attributes
749 :param auth_user: instance of user to set attributes
749 :param user_id: user id to fetch by
750 :param user_id: user id to fetch by
750 :param api_key: api key to fetch by
751 :param api_key: api key to fetch by
751 :param username: username to fetch by
752 :param username: username to fetch by
752 """
753 """
753 def token_obfuscate(token):
754 def token_obfuscate(token):
754 if token:
755 if token:
755 return token[:4] + "****"
756 return token[:4] + "****"
756
757
757 if user_id is None and api_key is None and username is None:
758 if user_id is None and api_key is None and username is None:
758 raise Exception('You need to pass user_id, api_key or username')
759 raise Exception('You need to pass user_id, api_key or username')
759
760
760 log.debug(
761 log.debug(
761 'AuthUser: fill data execution based on: '
762 'AuthUser: fill data execution based on: '
762 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
763 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
763 try:
764 try:
764 dbuser = None
765 dbuser = None
765 if user_id:
766 if user_id:
766 dbuser = self.get(user_id)
767 dbuser = self.get(user_id)
767 elif api_key:
768 elif api_key:
768 dbuser = self.get_by_auth_token(api_key)
769 dbuser = self.get_by_auth_token(api_key)
769 elif username:
770 elif username:
770 dbuser = self.get_by_username(username)
771 dbuser = self.get_by_username(username)
771
772
772 if not dbuser:
773 if not dbuser:
773 log.warning(
774 log.warning(
774 'Unable to lookup user by id:%s api_key:%s username:%s',
775 'Unable to lookup user by id:%s api_key:%s username:%s',
775 user_id, token_obfuscate(api_key), username)
776 user_id, token_obfuscate(api_key), username)
776 return False
777 return False
777 if not dbuser.active:
778 if not dbuser.active:
778 log.debug('User `%s:%s` is inactive, skipping fill data',
779 log.debug('User `%s:%s` is inactive, skipping fill data',
779 username, user_id)
780 username, user_id)
780 return False
781 return False
781
782
782 log.debug('AuthUser: filling found user:%s data', dbuser)
783 log.debug('AuthUser: filling found user:%s data', dbuser)
783
784
784 attrs = {
785 attrs = {
785 'user_id': dbuser.user_id,
786 'user_id': dbuser.user_id,
786 'username': dbuser.username,
787 'username': dbuser.username,
787 'name': dbuser.name,
788 'name': dbuser.name,
788 'first_name': dbuser.first_name,
789 'first_name': dbuser.first_name,
789 'firstname': dbuser.firstname,
790 'firstname': dbuser.firstname,
790 'last_name': dbuser.last_name,
791 'last_name': dbuser.last_name,
791 'lastname': dbuser.lastname,
792 'lastname': dbuser.lastname,
792 'admin': dbuser.admin,
793 'admin': dbuser.admin,
793 'active': dbuser.active,
794 'active': dbuser.active,
794
795
795 'email': dbuser.email,
796 'email': dbuser.email,
796 'emails': dbuser.emails_cached(),
797 'emails': dbuser.emails_cached(),
797 'short_contact': dbuser.short_contact,
798 'short_contact': dbuser.short_contact,
798 'full_contact': dbuser.full_contact,
799 'full_contact': dbuser.full_contact,
799 'full_name': dbuser.full_name,
800 'full_name': dbuser.full_name,
800 'full_name_or_username': dbuser.full_name_or_username,
801 'full_name_or_username': dbuser.full_name_or_username,
801
802
802 '_api_key': dbuser._api_key,
803 '_api_key': dbuser._api_key,
803 '_user_data': dbuser._user_data,
804 '_user_data': dbuser._user_data,
804
805
805 'created_on': dbuser.created_on,
806 'created_on': dbuser.created_on,
806 'extern_name': dbuser.extern_name,
807 'extern_name': dbuser.extern_name,
807 'extern_type': dbuser.extern_type,
808 'extern_type': dbuser.extern_type,
808
809
809 'inherit_default_permissions': dbuser.inherit_default_permissions,
810 'inherit_default_permissions': dbuser.inherit_default_permissions,
810
811
811 'language': dbuser.language,
812 'language': dbuser.language,
812 'last_activity': dbuser.last_activity,
813 'last_activity': dbuser.last_activity,
813 'last_login': dbuser.last_login,
814 'last_login': dbuser.last_login,
814 'password': dbuser.password,
815 'password': dbuser.password,
815 }
816 }
816 auth_user.__dict__.update(attrs)
817 auth_user.__dict__.update(attrs)
817 except Exception:
818 except Exception:
818 log.error(traceback.format_exc())
819 log.error(traceback.format_exc())
819 auth_user.is_authenticated = False
820 auth_user.is_authenticated = False
820 return False
821 return False
821
822
822 return True
823 return True
823
824
824 def has_perm(self, user, perm):
825 def has_perm(self, user, perm):
825 perm = self._get_perm(perm)
826 perm = self._get_perm(perm)
826 user = self._get_user(user)
827 user = self._get_user(user)
827
828
828 return UserToPerm.query().filter(UserToPerm.user == user)\
829 return UserToPerm.query().filter(UserToPerm.user == user)\
829 .filter(UserToPerm.permission == perm).scalar() is not None
830 .filter(UserToPerm.permission == perm).scalar() is not None
830
831
831 def grant_perm(self, user, perm):
832 def grant_perm(self, user, perm):
832 """
833 """
833 Grant user global permissions
834 Grant user global permissions
834
835
835 :param user:
836 :param user:
836 :param perm:
837 :param perm:
837 """
838 """
838 user = self._get_user(user)
839 user = self._get_user(user)
839 perm = self._get_perm(perm)
840 perm = self._get_perm(perm)
840 # if this permission is already granted skip it
841 # if this permission is already granted skip it
841 _perm = UserToPerm.query()\
842 _perm = UserToPerm.query()\
842 .filter(UserToPerm.user == user)\
843 .filter(UserToPerm.user == user)\
843 .filter(UserToPerm.permission == perm)\
844 .filter(UserToPerm.permission == perm)\
844 .scalar()
845 .scalar()
845 if _perm:
846 if _perm:
846 return
847 return
847 new = UserToPerm()
848 new = UserToPerm()
848 new.user = user
849 new.user = user
849 new.permission = perm
850 new.permission = perm
850 self.sa.add(new)
851 self.sa.add(new)
851 return new
852 return new
852
853
853 def revoke_perm(self, user, perm):
854 def revoke_perm(self, user, perm):
854 """
855 """
855 Revoke users global permissions
856 Revoke users global permissions
856
857
857 :param user:
858 :param user:
858 :param perm:
859 :param perm:
859 """
860 """
860 user = self._get_user(user)
861 user = self._get_user(user)
861 perm = self._get_perm(perm)
862 perm = self._get_perm(perm)
862
863
863 obj = UserToPerm.query()\
864 obj = UserToPerm.query()\
864 .filter(UserToPerm.user == user)\
865 .filter(UserToPerm.user == user)\
865 .filter(UserToPerm.permission == perm)\
866 .filter(UserToPerm.permission == perm)\
866 .scalar()
867 .scalar()
867 if obj:
868 if obj:
868 self.sa.delete(obj)
869 self.sa.delete(obj)
869
870
870 def add_extra_email(self, user, email):
871 def add_extra_email(self, user, email):
871 """
872 """
872 Adds email address to UserEmailMap
873 Adds email address to UserEmailMap
873
874
874 :param user:
875 :param user:
875 :param email:
876 :param email:
876 """
877 """
877
878
878 user = self._get_user(user)
879 user = self._get_user(user)
879
880
880 obj = UserEmailMap()
881 obj = UserEmailMap()
881 obj.user = user
882 obj.user = user
882 obj.email = email
883 obj.email = email
883 self.sa.add(obj)
884 self.sa.add(obj)
884 return obj
885 return obj
885
886
886 def delete_extra_email(self, user, email_id):
887 def delete_extra_email(self, user, email_id):
887 """
888 """
888 Removes email address from UserEmailMap
889 Removes email address from UserEmailMap
889
890
890 :param user:
891 :param user:
891 :param email_id:
892 :param email_id:
892 """
893 """
893 user = self._get_user(user)
894 user = self._get_user(user)
894 obj = UserEmailMap.query().get(email_id)
895 obj = UserEmailMap.query().get(email_id)
895 if obj and obj.user_id == user.user_id:
896 if obj and obj.user_id == user.user_id:
896 self.sa.delete(obj)
897 self.sa.delete(obj)
897
898
898 def parse_ip_range(self, ip_range):
899 def parse_ip_range(self, ip_range):
899 ip_list = []
900 ip_list = []
900
901
901 def make_unique(value):
902 def make_unique(value):
902 seen = []
903 seen = []
903 return [c for c in value if not (c in seen or seen.append(c))]
904 return [c for c in value if not (c in seen or seen.append(c))]
904
905
905 # firsts split by commas
906 # firsts split by commas
906 for ip_range in ip_range.split(','):
907 for ip_range in ip_range.split(','):
907 if not ip_range:
908 if not ip_range:
908 continue
909 continue
909 ip_range = ip_range.strip()
910 ip_range = ip_range.strip()
910 if '-' in ip_range:
911 if '-' in ip_range:
911 start_ip, end_ip = ip_range.split('-', 1)
912 start_ip, end_ip = ip_range.split('-', 1)
912 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
913 start_ip = ipaddress.ip_address(safe_str(start_ip.strip()))
913 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
914 end_ip = ipaddress.ip_address(safe_str(end_ip.strip()))
914 parsed_ip_range = []
915 parsed_ip_range = []
915
916
916 for index in range(int(start_ip), int(end_ip) + 1):
917 for index in range(int(start_ip), int(end_ip) + 1):
917 new_ip = ipaddress.ip_address(index)
918 new_ip = ipaddress.ip_address(index)
918 parsed_ip_range.append(str(new_ip))
919 parsed_ip_range.append(str(new_ip))
919 ip_list.extend(parsed_ip_range)
920 ip_list.extend(parsed_ip_range)
920 else:
921 else:
921 ip_list.append(ip_range)
922 ip_list.append(ip_range)
922
923
923 return make_unique(ip_list)
924 return make_unique(ip_list)
924
925
925 def add_extra_ip(self, user, ip, description=None):
926 def add_extra_ip(self, user, ip, description=None):
926 """
927 """
927 Adds ip address to UserIpMap
928 Adds ip address to UserIpMap
928
929
929 :param user:
930 :param user:
930 :param ip:
931 :param ip:
931 """
932 """
932
933
933 user = self._get_user(user)
934 user = self._get_user(user)
934 obj = UserIpMap()
935 obj = UserIpMap()
935 obj.user = user
936 obj.user = user
936 obj.ip_addr = ip
937 obj.ip_addr = ip
937 obj.description = description
938 obj.description = description
938 self.sa.add(obj)
939 self.sa.add(obj)
939 return obj
940 return obj
940
941
941 auth_token_role = AuthTokenModel.cls
942 auth_token_role = AuthTokenModel.cls
942
943
943 def add_auth_token(self, user, lifetime_minutes, role, description=u'',
944 def add_auth_token(self, user, lifetime_minutes, role, description=u'',
944 scope_callback=None):
945 scope_callback=None):
945 """
946 """
946 Add AuthToken for user.
947 Add AuthToken for user.
947
948
948 :param user: username/user_id
949 :param user: username/user_id
949 :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit
950 :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit
950 :param role: one of AuthTokenModel.cls.ROLE_*
951 :param role: one of AuthTokenModel.cls.ROLE_*
951 :param description: optional string description
952 :param description: optional string description
952 """
953 """
953
954
954 token = AuthTokenModel().create(
955 token = AuthTokenModel().create(
955 user, description, lifetime_minutes, role)
956 user, description, lifetime_minutes, role)
956 if scope_callback and callable(scope_callback):
957 if scope_callback and callable(scope_callback):
957 # call the callback if we provide, used to attach scope for EE edition
958 # call the callback if we provide, used to attach scope for EE edition
958 scope_callback(token)
959 scope_callback(token)
959 return token
960 return token
960
961
961 def delete_extra_ip(self, user, ip_id):
962 def delete_extra_ip(self, user, ip_id):
962 """
963 """
963 Removes ip address from UserIpMap
964 Removes ip address from UserIpMap
964
965
965 :param user:
966 :param user:
966 :param ip_id:
967 :param ip_id:
967 """
968 """
968 user = self._get_user(user)
969 user = self._get_user(user)
969 obj = UserIpMap.query().get(ip_id)
970 obj = UserIpMap.query().get(ip_id)
970 if obj and obj.user_id == user.user_id:
971 if obj and obj.user_id == user.user_id:
971 self.sa.delete(obj)
972 self.sa.delete(obj)
972
973
973 def get_accounts_in_creation_order(self, current_user=None):
974 def get_accounts_in_creation_order(self, current_user=None):
974 """
975 """
975 Get accounts in order of creation for deactivation for license limits
976 Get accounts in order of creation for deactivation for license limits
976
977
977 pick currently logged in user, and append to the list in position 0
978 pick currently logged in user, and append to the list in position 0
978 pick all super-admins in order of creation date and add it to the list
979 pick all super-admins in order of creation date and add it to the list
979 pick all other accounts in order of creation and add it to the list.
980 pick all other accounts in order of creation and add it to the list.
980
981
981 Based on that list, the last accounts can be disabled as they are
982 Based on that list, the last accounts can be disabled as they are
982 created at the end and don't include any of the super admins as well
983 created at the end and don't include any of the super admins as well
983 as the current user.
984 as the current user.
984
985
985 :param current_user: optionally current user running this operation
986 :param current_user: optionally current user running this operation
986 """
987 """
987
988
988 if not current_user:
989 if not current_user:
989 current_user = get_current_rhodecode_user()
990 current_user = get_current_rhodecode_user()
990 active_super_admins = [
991 active_super_admins = [
991 x.user_id for x in User.query()
992 x.user_id for x in User.query()
992 .filter(User.user_id != current_user.user_id)
993 .filter(User.user_id != current_user.user_id)
993 .filter(User.active == true())
994 .filter(User.active == true())
994 .filter(User.admin == true())
995 .filter(User.admin == true())
995 .order_by(User.created_on.asc())]
996 .order_by(User.created_on.asc())]
996
997
997 active_regular_users = [
998 active_regular_users = [
998 x.user_id for x in User.query()
999 x.user_id for x in User.query()
999 .filter(User.user_id != current_user.user_id)
1000 .filter(User.user_id != current_user.user_id)
1000 .filter(User.active == true())
1001 .filter(User.active == true())
1001 .filter(User.admin == false())
1002 .filter(User.admin == false())
1002 .order_by(User.created_on.asc())]
1003 .order_by(User.created_on.asc())]
1003
1004
1004 list_of_accounts = [current_user.user_id]
1005 list_of_accounts = [current_user.user_id]
1005 list_of_accounts += active_super_admins
1006 list_of_accounts += active_super_admins
1006 list_of_accounts += active_regular_users
1007 list_of_accounts += active_regular_users
1007
1008
1008 return list_of_accounts
1009 return list_of_accounts
1009
1010
1010 def deactivate_last_users(self, expected_users, current_user=None):
1011 def deactivate_last_users(self, expected_users, current_user=None):
1011 """
1012 """
1012 Deactivate accounts that are over the license limits.
1013 Deactivate accounts that are over the license limits.
1013 Algorithm of which accounts to disabled is based on the formula:
1014 Algorithm of which accounts to disabled is based on the formula:
1014
1015
1015 Get current user, then super admins in creation order, then regular
1016 Get current user, then super admins in creation order, then regular
1016 active users in creation order.
1017 active users in creation order.
1017
1018
1018 Using that list we mark all accounts from the end of it as inactive.
1019 Using that list we mark all accounts from the end of it as inactive.
1019 This way we block only latest created accounts.
1020 This way we block only latest created accounts.
1020
1021
1021 :param expected_users: list of users in special order, we deactivate
1022 :param expected_users: list of users in special order, we deactivate
1022 the end N amount of users from that list
1023 the end N amount of users from that list
1023 """
1024 """
1024
1025
1025 list_of_accounts = self.get_accounts_in_creation_order(
1026 list_of_accounts = self.get_accounts_in_creation_order(
1026 current_user=current_user)
1027 current_user=current_user)
1027
1028
1028 for acc_id in list_of_accounts[expected_users + 1:]:
1029 for acc_id in list_of_accounts[expected_users + 1:]:
1029 user = User.get(acc_id)
1030 user = User.get(acc_id)
1030 log.info('Deactivating account %s for license unlock', user)
1031 log.info('Deactivating account %s for license unlock', user)
1031 user.active = False
1032 user.active = False
1032 Session().add(user)
1033 Session().add(user)
1033 Session().commit()
1034 Session().commit()
1034
1035
1035 return
1036 return
1036
1037
1037 def get_user_log(self, user, filter_term):
1038 def get_user_log(self, user, filter_term):
1038 user_log = UserLog.query()\
1039 user_log = UserLog.query()\
1039 .filter(or_(UserLog.user_id == user.user_id,
1040 .filter(or_(UserLog.user_id == user.user_id,
1040 UserLog.username == user.username))\
1041 UserLog.username == user.username))\
1041 .options(joinedload(UserLog.user))\
1042 .options(joinedload(UserLog.user))\
1042 .options(joinedload(UserLog.repository))\
1043 .options(joinedload(UserLog.repository))\
1043 .order_by(UserLog.action_date.desc())
1044 .order_by(UserLog.action_date.desc())
1044
1045
1045 user_log = user_log_filter(user_log, filter_term)
1046 user_log = user_log_filter(user_log, filter_term)
1046 return user_log
1047 return user_log
@@ -1,754 +1,754 b''
1
1
2
2
3 # Copyright (C) 2011-2020 RhodeCode GmbH
3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22 import traceback
22 import traceback
23
23
24 from rhodecode.lib.utils2 import safe_str, safe_unicode
24 from rhodecode.lib.utils2 import safe_str
25 from rhodecode.lib.exceptions import (
25 from rhodecode.lib.exceptions import (
26 UserGroupAssignedException, RepoGroupAssignmentError)
26 UserGroupAssignedException, RepoGroupAssignmentError)
27 from rhodecode.lib.utils2 import (
27 from rhodecode.lib.utils2 import (
28 get_current_rhodecode_user, action_logger_generic)
28 get_current_rhodecode_user, action_logger_generic)
29 from rhodecode.model import BaseModel
29 from rhodecode.model import BaseModel
30 from rhodecode.model.scm import UserGroupList
30 from rhodecode.model.scm import UserGroupList
31 from rhodecode.model.db import (
31 from rhodecode.model.db import (
32 joinedload, true, func, User, UserGroupMember, UserGroup,
32 joinedload, true, func, User, UserGroupMember, UserGroup,
33 UserGroupRepoToPerm, Permission, UserGroupToPerm, UserUserGroupToPerm,
33 UserGroupRepoToPerm, Permission, UserGroupToPerm, UserUserGroupToPerm,
34 UserGroupUserGroupToPerm, UserGroupRepoGroupToPerm)
34 UserGroupUserGroupToPerm, UserGroupRepoGroupToPerm)
35
35
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class UserGroupModel(BaseModel):
40 class UserGroupModel(BaseModel):
41
41
42 cls = UserGroup
42 cls = UserGroup
43
43
44 def _get_user_group(self, user_group):
44 def _get_user_group(self, user_group):
45 return self._get_instance(UserGroup, user_group,
45 return self._get_instance(UserGroup, user_group,
46 callback=UserGroup.get_by_group_name)
46 callback=UserGroup.get_by_group_name)
47
47
48 def _create_default_perms(self, user_group):
48 def _create_default_perms(self, user_group):
49 # create default permission
49 # create default permission
50 default_perm = 'usergroup.read'
50 default_perm = 'usergroup.read'
51 def_user = User.get_default_user()
51 def_user = User.get_default_user()
52 for p in def_user.user_perms:
52 for p in def_user.user_perms:
53 if p.permission.permission_name.startswith('usergroup.'):
53 if p.permission.permission_name.startswith('usergroup.'):
54 default_perm = p.permission.permission_name
54 default_perm = p.permission.permission_name
55 break
55 break
56
56
57 user_group_to_perm = UserUserGroupToPerm()
57 user_group_to_perm = UserUserGroupToPerm()
58 user_group_to_perm.permission = Permission.get_by_key(default_perm)
58 user_group_to_perm.permission = Permission.get_by_key(default_perm)
59
59
60 user_group_to_perm.user_group = user_group
60 user_group_to_perm.user_group = user_group
61 user_group_to_perm.user_id = def_user.user_id
61 user_group_to_perm.user = def_user
62 return user_group_to_perm
62 return user_group_to_perm
63
63
64 def update_permissions(
64 def update_permissions(
65 self, user_group, perm_additions=None, perm_updates=None,
65 self, user_group, perm_additions=None, perm_updates=None,
66 perm_deletions=None, check_perms=True, cur_user=None):
66 perm_deletions=None, check_perms=True, cur_user=None):
67
67
68 from rhodecode.lib.auth import HasUserGroupPermissionAny
68 from rhodecode.lib.auth import HasUserGroupPermissionAny
69 if not perm_additions:
69 if not perm_additions:
70 perm_additions = []
70 perm_additions = []
71 if not perm_updates:
71 if not perm_updates:
72 perm_updates = []
72 perm_updates = []
73 if not perm_deletions:
73 if not perm_deletions:
74 perm_deletions = []
74 perm_deletions = []
75
75
76 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
76 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
77
77
78 changes = {
78 changes = {
79 'added': [],
79 'added': [],
80 'updated': [],
80 'updated': [],
81 'deleted': []
81 'deleted': []
82 }
82 }
83 change_obj = user_group.get_api_data()
83 change_obj = user_group.get_api_data()
84 # update permissions
84 # update permissions
85 for member_id, perm, member_type in perm_updates:
85 for member_id, perm, member_type in perm_updates:
86 member_id = int(member_id)
86 member_id = int(member_id)
87 if member_type == 'user':
87 if member_type == 'user':
88 member_name = User.get(member_id).username
88 member_name = User.get(member_id).username
89 # this updates existing one
89 # this updates existing one
90 self.grant_user_permission(
90 self.grant_user_permission(
91 user_group=user_group, user=member_id, perm=perm
91 user_group=user_group, user=member_id, perm=perm
92 )
92 )
93 elif member_type == 'user_group':
93 elif member_type == 'user_group':
94 # check if we have permissions to alter this usergroup
94 # check if we have permissions to alter this usergroup
95 member_name = UserGroup.get(member_id).users_group_name
95 member_name = UserGroup.get(member_id).users_group_name
96 if not check_perms or HasUserGroupPermissionAny(
96 if not check_perms or HasUserGroupPermissionAny(
97 *req_perms)(member_name, user=cur_user):
97 *req_perms)(member_name, user=cur_user):
98 self.grant_user_group_permission(
98 self.grant_user_group_permission(
99 target_user_group=user_group, user_group=member_id, perm=perm)
99 target_user_group=user_group, user_group=member_id, perm=perm)
100 else:
100 else:
101 raise ValueError("member_type must be 'user' or 'user_group' "
101 raise ValueError("member_type must be 'user' or 'user_group' "
102 "got {} instead".format(member_type))
102 "got {} instead".format(member_type))
103
103
104 changes['updated'].append({
104 changes['updated'].append({
105 'change_obj': change_obj,
105 'change_obj': change_obj,
106 'type': member_type, 'id': member_id,
106 'type': member_type, 'id': member_id,
107 'name': member_name, 'new_perm': perm})
107 'name': member_name, 'new_perm': perm})
108
108
109 # set new permissions
109 # set new permissions
110 for member_id, perm, member_type in perm_additions:
110 for member_id, perm, member_type in perm_additions:
111 member_id = int(member_id)
111 member_id = int(member_id)
112 if member_type == 'user':
112 if member_type == 'user':
113 member_name = User.get(member_id).username
113 member_name = User.get(member_id).username
114 self.grant_user_permission(
114 self.grant_user_permission(
115 user_group=user_group, user=member_id, perm=perm)
115 user_group=user_group, user=member_id, perm=perm)
116 elif member_type == 'user_group':
116 elif member_type == 'user_group':
117 # check if we have permissions to alter this usergroup
117 # check if we have permissions to alter this usergroup
118 member_name = UserGroup.get(member_id).users_group_name
118 member_name = UserGroup.get(member_id).users_group_name
119 if not check_perms or HasUserGroupPermissionAny(
119 if not check_perms or HasUserGroupPermissionAny(
120 *req_perms)(member_name, user=cur_user):
120 *req_perms)(member_name, user=cur_user):
121 self.grant_user_group_permission(
121 self.grant_user_group_permission(
122 target_user_group=user_group, user_group=member_id, perm=perm)
122 target_user_group=user_group, user_group=member_id, perm=perm)
123 else:
123 else:
124 raise ValueError("member_type must be 'user' or 'user_group' "
124 raise ValueError("member_type must be 'user' or 'user_group' "
125 "got {} instead".format(member_type))
125 "got {} instead".format(member_type))
126
126
127 changes['added'].append({
127 changes['added'].append({
128 'change_obj': change_obj,
128 'change_obj': change_obj,
129 'type': member_type, 'id': member_id,
129 'type': member_type, 'id': member_id,
130 'name': member_name, 'new_perm': perm})
130 'name': member_name, 'new_perm': perm})
131
131
132 # delete permissions
132 # delete permissions
133 for member_id, perm, member_type in perm_deletions:
133 for member_id, perm, member_type in perm_deletions:
134 member_id = int(member_id)
134 member_id = int(member_id)
135 if member_type == 'user':
135 if member_type == 'user':
136 member_name = User.get(member_id).username
136 member_name = User.get(member_id).username
137 self.revoke_user_permission(user_group=user_group, user=member_id)
137 self.revoke_user_permission(user_group=user_group, user=member_id)
138 elif member_type == 'user_group':
138 elif member_type == 'user_group':
139 # check if we have permissions to alter this usergroup
139 # check if we have permissions to alter this usergroup
140 member_name = UserGroup.get(member_id).users_group_name
140 member_name = UserGroup.get(member_id).users_group_name
141 if not check_perms or HasUserGroupPermissionAny(
141 if not check_perms or HasUserGroupPermissionAny(
142 *req_perms)(member_name, user=cur_user):
142 *req_perms)(member_name, user=cur_user):
143 self.revoke_user_group_permission(
143 self.revoke_user_group_permission(
144 target_user_group=user_group, user_group=member_id)
144 target_user_group=user_group, user_group=member_id)
145 else:
145 else:
146 raise ValueError("member_type must be 'user' or 'user_group' "
146 raise ValueError("member_type must be 'user' or 'user_group' "
147 "got {} instead".format(member_type))
147 "got {} instead".format(member_type))
148
148
149 changes['deleted'].append({
149 changes['deleted'].append({
150 'change_obj': change_obj,
150 'change_obj': change_obj,
151 'type': member_type, 'id': member_id,
151 'type': member_type, 'id': member_id,
152 'name': member_name, 'new_perm': perm})
152 'name': member_name, 'new_perm': perm})
153
153
154 return changes
154 return changes
155
155
156 def get(self, user_group_id, cache=False):
156 def get(self, user_group_id, cache=False):
157 return UserGroup.get(user_group_id)
157 return UserGroup.get(user_group_id)
158
158
159 def get_group(self, user_group):
159 def get_group(self, user_group):
160 return self._get_user_group(user_group)
160 return self._get_user_group(user_group)
161
161
162 def get_by_name(self, name, cache=False, case_insensitive=False):
162 def get_by_name(self, name, cache=False, case_insensitive=False):
163 return UserGroup.get_by_group_name(name, cache, case_insensitive)
163 return UserGroup.get_by_group_name(name, cache, case_insensitive)
164
164
165 def create(self, name, description, owner, active=True, group_data=None):
165 def create(self, name, description, owner, active=True, group_data=None):
166 try:
166 try:
167 new_user_group = UserGroup()
167 new_user_group = UserGroup()
168 new_user_group.user = self._get_user(owner)
168 new_user_group.user = self._get_user(owner)
169 new_user_group.users_group_name = name
169 new_user_group.users_group_name = name
170 new_user_group.user_group_description = description
170 new_user_group.user_group_description = description
171 new_user_group.users_group_active = active
171 new_user_group.users_group_active = active
172 if group_data:
172 if group_data:
173 new_user_group.group_data = group_data
173 new_user_group.group_data = group_data
174 self.sa.add(new_user_group)
174 self.sa.add(new_user_group)
175 perm_obj = self._create_default_perms(new_user_group)
175 perm_obj = self._create_default_perms(new_user_group)
176 self.sa.add(perm_obj)
176 self.sa.add(perm_obj)
177
177
178 self.grant_user_permission(user_group=new_user_group,
178 self.grant_user_permission(user_group=new_user_group,
179 user=owner, perm='usergroup.admin')
179 user=owner, perm='usergroup.admin')
180
180
181 return new_user_group
181 return new_user_group
182 except Exception:
182 except Exception:
183 log.error(traceback.format_exc())
183 log.error(traceback.format_exc())
184 raise
184 raise
185
185
186 def _get_memberships_for_user_ids(self, user_group, user_id_list):
186 def _get_memberships_for_user_ids(self, user_group, user_id_list):
187 members = []
187 members = []
188 for user_id in user_id_list:
188 for user_id in user_id_list:
189 member = self._get_membership(user_group.users_group_id, user_id)
189 member = self._get_membership(user_group.users_group_id, user_id)
190 members.append(member)
190 members.append(member)
191 return members
191 return members
192
192
193 def _get_added_and_removed_user_ids(self, user_group, user_id_list):
193 def _get_added_and_removed_user_ids(self, user_group, user_id_list):
194 current_members = user_group.members or []
194 current_members = user_group.members or []
195 current_members_ids = [m.user.user_id for m in current_members]
195 current_members_ids = [m.user.user_id for m in current_members]
196
196
197 added_members = [
197 added_members = [
198 user_id for user_id in user_id_list
198 user_id for user_id in user_id_list
199 if user_id not in current_members_ids]
199 if user_id not in current_members_ids]
200 if user_id_list == []:
200 if user_id_list == []:
201 # all members were deleted
201 # all members were deleted
202 deleted_members = current_members_ids
202 deleted_members = current_members_ids
203 else:
203 else:
204 deleted_members = [
204 deleted_members = [
205 user_id for user_id in current_members_ids
205 user_id for user_id in current_members_ids
206 if user_id not in user_id_list]
206 if user_id not in user_id_list]
207
207
208 return added_members, deleted_members
208 return added_members, deleted_members
209
209
210 def _set_users_as_members(self, user_group, user_ids):
210 def _set_users_as_members(self, user_group, user_ids):
211 user_group.members = []
211 user_group.members = []
212 self.sa.flush()
212 self.sa.flush()
213 members = self._get_memberships_for_user_ids(
213 members = self._get_memberships_for_user_ids(
214 user_group, user_ids)
214 user_group, user_ids)
215 user_group.members = members
215 user_group.members = members
216 self.sa.add(user_group)
216 self.sa.add(user_group)
217
217
218 def _update_members_from_user_ids(self, user_group, user_ids):
218 def _update_members_from_user_ids(self, user_group, user_ids):
219 added, removed = self._get_added_and_removed_user_ids(
219 added, removed = self._get_added_and_removed_user_ids(
220 user_group, user_ids)
220 user_group, user_ids)
221 self._set_users_as_members(user_group, user_ids)
221 self._set_users_as_members(user_group, user_ids)
222 self._log_user_changes('added to', user_group, added)
222 self._log_user_changes('added to', user_group, added)
223 self._log_user_changes('removed from', user_group, removed)
223 self._log_user_changes('removed from', user_group, removed)
224 return added, removed
224 return added, removed
225
225
226 def _clean_members_data(self, members_data):
226 def _clean_members_data(self, members_data):
227 if not members_data:
227 if not members_data:
228 members_data = []
228 members_data = []
229
229
230 members = []
230 members = []
231 for user in members_data:
231 for user in members_data:
232 uid = int(user['member_user_id'])
232 uid = int(user['member_user_id'])
233 if uid not in members and user['type'] in ['new', 'existing']:
233 if uid not in members and user['type'] in ['new', 'existing']:
234 members.append(uid)
234 members.append(uid)
235 return members
235 return members
236
236
237 def update(self, user_group, form_data, group_data=None):
237 def update(self, user_group, form_data, group_data=None):
238 user_group = self._get_user_group(user_group)
238 user_group = self._get_user_group(user_group)
239 if 'users_group_name' in form_data:
239 if 'users_group_name' in form_data:
240 user_group.users_group_name = form_data['users_group_name']
240 user_group.users_group_name = form_data['users_group_name']
241 if 'users_group_active' in form_data:
241 if 'users_group_active' in form_data:
242 user_group.users_group_active = form_data['users_group_active']
242 user_group.users_group_active = form_data['users_group_active']
243 if 'user_group_description' in form_data:
243 if 'user_group_description' in form_data:
244 user_group.user_group_description = form_data[
244 user_group.user_group_description = form_data[
245 'user_group_description']
245 'user_group_description']
246
246
247 # handle owner change
247 # handle owner change
248 if 'user' in form_data:
248 if 'user' in form_data:
249 owner = form_data['user']
249 owner = form_data['user']
250 if isinstance(owner, str):
250 if isinstance(owner, str):
251 owner = User.get_by_username(form_data['user'])
251 owner = User.get_by_username(form_data['user'])
252
252
253 if not isinstance(owner, User):
253 if not isinstance(owner, User):
254 raise ValueError(
254 raise ValueError(
255 'invalid owner for user group: %s' % form_data['user'])
255 'invalid owner for user group: %s' % form_data['user'])
256
256
257 user_group.user = owner
257 user_group.user = owner
258
258
259 added_user_ids = []
259 added_user_ids = []
260 removed_user_ids = []
260 removed_user_ids = []
261 if 'users_group_members' in form_data:
261 if 'users_group_members' in form_data:
262 members_id_list = self._clean_members_data(
262 members_id_list = self._clean_members_data(
263 form_data['users_group_members'])
263 form_data['users_group_members'])
264 added_user_ids, removed_user_ids = \
264 added_user_ids, removed_user_ids = \
265 self._update_members_from_user_ids(user_group, members_id_list)
265 self._update_members_from_user_ids(user_group, members_id_list)
266
266
267 if group_data:
267 if group_data:
268 new_group_data = {}
268 new_group_data = {}
269 new_group_data.update(group_data)
269 new_group_data.update(group_data)
270 user_group.group_data = new_group_data
270 user_group.group_data = new_group_data
271
271
272 self.sa.add(user_group)
272 self.sa.add(user_group)
273 return user_group, added_user_ids, removed_user_ids
273 return user_group, added_user_ids, removed_user_ids
274
274
275 def delete(self, user_group, force=False):
275 def delete(self, user_group, force=False):
276 """
276 """
277 Deletes repository group, unless force flag is used
277 Deletes repository group, unless force flag is used
278 raises exception if there are members in that group, else deletes
278 raises exception if there are members in that group, else deletes
279 group and users
279 group and users
280
280
281 :param user_group:
281 :param user_group:
282 :param force:
282 :param force:
283 """
283 """
284 user_group = self._get_user_group(user_group)
284 user_group = self._get_user_group(user_group)
285 if not user_group:
285 if not user_group:
286 return
286 return
287
287
288 try:
288 try:
289 # check if this group is not assigned to repo
289 # check if this group is not assigned to repo
290 assigned_to_repo = [x.repository for x in UserGroupRepoToPerm.query()\
290 assigned_to_repo = [x.repository for x in UserGroupRepoToPerm.query()\
291 .filter(UserGroupRepoToPerm.users_group == user_group).all()]
291 .filter(UserGroupRepoToPerm.users_group == user_group).all()]
292 # check if this group is not assigned to repo
292 # check if this group is not assigned to repo
293 assigned_to_repo_group = [x.group for x in UserGroupRepoGroupToPerm.query()\
293 assigned_to_repo_group = [x.group for x in UserGroupRepoGroupToPerm.query()\
294 .filter(UserGroupRepoGroupToPerm.users_group == user_group).all()]
294 .filter(UserGroupRepoGroupToPerm.users_group == user_group).all()]
295
295
296 if (assigned_to_repo or assigned_to_repo_group) and not force:
296 if (assigned_to_repo or assigned_to_repo_group) and not force:
297 assigned = ','.join(map(safe_str,
297 assigned = ','.join(map(safe_str,
298 assigned_to_repo+assigned_to_repo_group))
298 assigned_to_repo+assigned_to_repo_group))
299
299
300 raise UserGroupAssignedException(
300 raise UserGroupAssignedException(
301 'UserGroup assigned to %s' % (assigned,))
301 'UserGroup assigned to %s' % (assigned,))
302 self.sa.delete(user_group)
302 self.sa.delete(user_group)
303 except Exception:
303 except Exception:
304 log.error(traceback.format_exc())
304 log.error(traceback.format_exc())
305 raise
305 raise
306
306
307 def _log_user_changes(self, action, user_group, user_or_users):
307 def _log_user_changes(self, action, user_group, user_or_users):
308 users = user_or_users
308 users = user_or_users
309 if not isinstance(users, (list, tuple)):
309 if not isinstance(users, (list, tuple)):
310 users = [users]
310 users = [users]
311
311
312 group_name = user_group.users_group_name
312 group_name = user_group.users_group_name
313
313
314 for user_or_user_id in users:
314 for user_or_user_id in users:
315 user = self._get_user(user_or_user_id)
315 user = self._get_user(user_or_user_id)
316 log_text = 'User {user} {action} {group}'.format(
316 log_text = 'User {user} {action} {group}'.format(
317 action=action, user=user.username, group=group_name)
317 action=action, user=user.username, group=group_name)
318 action_logger_generic(log_text)
318 action_logger_generic(log_text)
319
319
320 def _find_user_in_group(self, user, user_group):
320 def _find_user_in_group(self, user, user_group):
321 user_group_member = None
321 user_group_member = None
322 for m in user_group.members:
322 for m in user_group.members:
323 if m.user_id == user.user_id:
323 if m.user_id == user.user_id:
324 # Found this user's membership row
324 # Found this user's membership row
325 user_group_member = m
325 user_group_member = m
326 break
326 break
327
327
328 return user_group_member
328 return user_group_member
329
329
330 def _get_membership(self, user_group_id, user_id):
330 def _get_membership(self, user_group_id, user_id):
331 user_group_member = UserGroupMember(user_group_id, user_id)
331 user_group_member = UserGroupMember(user_group_id, user_id)
332 return user_group_member
332 return user_group_member
333
333
334 def add_user_to_group(self, user_group, user):
334 def add_user_to_group(self, user_group, user):
335 user_group = self._get_user_group(user_group)
335 user_group = self._get_user_group(user_group)
336 user = self._get_user(user)
336 user = self._get_user(user)
337 user_member = self._find_user_in_group(user, user_group)
337 user_member = self._find_user_in_group(user, user_group)
338 if user_member:
338 if user_member:
339 # user already in the group, skip
339 # user already in the group, skip
340 return True
340 return True
341
341
342 member = self._get_membership(
342 member = self._get_membership(
343 user_group.users_group_id, user.user_id)
343 user_group.users_group_id, user.user_id)
344 user_group.members.append(member)
344 user_group.members.append(member)
345
345
346 try:
346 try:
347 self.sa.add(member)
347 self.sa.add(member)
348 except Exception:
348 except Exception:
349 # what could go wrong here?
349 # what could go wrong here?
350 log.error(traceback.format_exc())
350 log.error(traceback.format_exc())
351 raise
351 raise
352
352
353 self._log_user_changes('added to', user_group, user)
353 self._log_user_changes('added to', user_group, user)
354 return member
354 return member
355
355
356 def remove_user_from_group(self, user_group, user):
356 def remove_user_from_group(self, user_group, user):
357 user_group = self._get_user_group(user_group)
357 user_group = self._get_user_group(user_group)
358 user = self._get_user(user)
358 user = self._get_user(user)
359 user_group_member = self._find_user_in_group(user, user_group)
359 user_group_member = self._find_user_in_group(user, user_group)
360
360
361 if not user_group_member:
361 if not user_group_member:
362 # User isn't in that group
362 # User isn't in that group
363 return False
363 return False
364
364
365 try:
365 try:
366 self.sa.delete(user_group_member)
366 self.sa.delete(user_group_member)
367 except Exception:
367 except Exception:
368 log.error(traceback.format_exc())
368 log.error(traceback.format_exc())
369 raise
369 raise
370
370
371 self._log_user_changes('removed from', user_group, user)
371 self._log_user_changes('removed from', user_group, user)
372 return True
372 return True
373
373
374 def has_perm(self, user_group, perm):
374 def has_perm(self, user_group, perm):
375 user_group = self._get_user_group(user_group)
375 user_group = self._get_user_group(user_group)
376 perm = self._get_perm(perm)
376 perm = self._get_perm(perm)
377
377
378 return UserGroupToPerm.query()\
378 return UserGroupToPerm.query()\
379 .filter(UserGroupToPerm.users_group == user_group)\
379 .filter(UserGroupToPerm.users_group == user_group)\
380 .filter(UserGroupToPerm.permission == perm).scalar() is not None
380 .filter(UserGroupToPerm.permission == perm).scalar() is not None
381
381
382 def grant_perm(self, user_group, perm):
382 def grant_perm(self, user_group, perm):
383 user_group = self._get_user_group(user_group)
383 user_group = self._get_user_group(user_group)
384 perm = self._get_perm(perm)
384 perm = self._get_perm(perm)
385
385
386 # if this permission is already granted skip it
386 # if this permission is already granted skip it
387 _perm = UserGroupToPerm.query()\
387 _perm = UserGroupToPerm.query()\
388 .filter(UserGroupToPerm.users_group == user_group)\
388 .filter(UserGroupToPerm.users_group == user_group)\
389 .filter(UserGroupToPerm.permission == perm)\
389 .filter(UserGroupToPerm.permission == perm)\
390 .scalar()
390 .scalar()
391 if _perm:
391 if _perm:
392 return
392 return
393
393
394 new = UserGroupToPerm()
394 new = UserGroupToPerm()
395 new.users_group = user_group
395 new.users_group = user_group
396 new.permission = perm
396 new.permission = perm
397 self.sa.add(new)
397 self.sa.add(new)
398 return new
398 return new
399
399
400 def revoke_perm(self, user_group, perm):
400 def revoke_perm(self, user_group, perm):
401 user_group = self._get_user_group(user_group)
401 user_group = self._get_user_group(user_group)
402 perm = self._get_perm(perm)
402 perm = self._get_perm(perm)
403
403
404 obj = UserGroupToPerm.query()\
404 obj = UserGroupToPerm.query()\
405 .filter(UserGroupToPerm.users_group == user_group)\
405 .filter(UserGroupToPerm.users_group == user_group)\
406 .filter(UserGroupToPerm.permission == perm).scalar()
406 .filter(UserGroupToPerm.permission == perm).scalar()
407 if obj:
407 if obj:
408 self.sa.delete(obj)
408 self.sa.delete(obj)
409
409
410 def grant_user_permission(self, user_group, user, perm):
410 def grant_user_permission(self, user_group, user, perm):
411 """
411 """
412 Grant permission for user on given user group, or update
412 Grant permission for user on given user group, or update
413 existing one if found
413 existing one if found
414
414
415 :param user_group: Instance of UserGroup, users_group_id,
415 :param user_group: Instance of UserGroup, users_group_id,
416 or users_group_name
416 or users_group_name
417 :param user: Instance of User, user_id or username
417 :param user: Instance of User, user_id or username
418 :param perm: Instance of Permission, or permission_name
418 :param perm: Instance of Permission, or permission_name
419 """
419 """
420 changes = {
420 changes = {
421 'added': [],
421 'added': [],
422 'updated': [],
422 'updated': [],
423 'deleted': []
423 'deleted': []
424 }
424 }
425
425
426 user_group = self._get_user_group(user_group)
426 user_group = self._get_user_group(user_group)
427 user = self._get_user(user)
427 user = self._get_user(user)
428 permission = self._get_perm(perm)
428 permission = self._get_perm(perm)
429 perm_name = permission.permission_name
429 perm_name = permission.permission_name
430 member_id = user.user_id
430 member_id = user.user_id
431 member_name = user.username
431 member_name = user.username
432
432
433 # check if we have that permission already
433 # check if we have that permission already
434 obj = self.sa.query(UserUserGroupToPerm)\
434 obj = self.sa.query(UserUserGroupToPerm)\
435 .filter(UserUserGroupToPerm.user == user)\
435 .filter(UserUserGroupToPerm.user == user)\
436 .filter(UserUserGroupToPerm.user_group == user_group)\
436 .filter(UserUserGroupToPerm.user_group == user_group)\
437 .scalar()
437 .scalar()
438 if obj is None:
438 if obj is None:
439 # create new !
439 # create new !
440 obj = UserUserGroupToPerm()
440 obj = UserUserGroupToPerm()
441 obj.user_group = user_group
441 obj.user_group = user_group
442 obj.user = user
442 obj.user = user
443 obj.permission = permission
443 obj.permission = permission
444 self.sa.add(obj)
444 self.sa.add(obj)
445 log.debug('Granted perm %s to %s on %s', perm, user, user_group)
445 log.debug('Granted perm %s to %s on %s', perm, user, user_group)
446 action_logger_generic(
446 action_logger_generic(
447 'granted permission: {} to user: {} on usergroup: {}'.format(
447 'granted permission: {} to user: {} on usergroup: {}'.format(
448 perm, user, user_group), namespace='security.usergroup')
448 perm, user, user_group), namespace='security.usergroup')
449
449
450 changes['added'].append({
450 changes['added'].append({
451 'change_obj': user_group.get_api_data(),
451 'change_obj': user_group.get_api_data(),
452 'type': 'user', 'id': member_id,
452 'type': 'user', 'id': member_id,
453 'name': member_name, 'new_perm': perm_name})
453 'name': member_name, 'new_perm': perm_name})
454
454
455 return changes
455 return changes
456
456
457 def revoke_user_permission(self, user_group, user):
457 def revoke_user_permission(self, user_group, user):
458 """
458 """
459 Revoke permission for user on given user group
459 Revoke permission for user on given user group
460
460
461 :param user_group: Instance of UserGroup, users_group_id,
461 :param user_group: Instance of UserGroup, users_group_id,
462 or users_group name
462 or users_group name
463 :param user: Instance of User, user_id or username
463 :param user: Instance of User, user_id or username
464 """
464 """
465 changes = {
465 changes = {
466 'added': [],
466 'added': [],
467 'updated': [],
467 'updated': [],
468 'deleted': []
468 'deleted': []
469 }
469 }
470
470
471 user_group = self._get_user_group(user_group)
471 user_group = self._get_user_group(user_group)
472 user = self._get_user(user)
472 user = self._get_user(user)
473 perm_name = 'usergroup.none'
473 perm_name = 'usergroup.none'
474 member_id = user.user_id
474 member_id = user.user_id
475 member_name = user.username
475 member_name = user.username
476
476
477 obj = self.sa.query(UserUserGroupToPerm)\
477 obj = self.sa.query(UserUserGroupToPerm)\
478 .filter(UserUserGroupToPerm.user == user)\
478 .filter(UserUserGroupToPerm.user == user)\
479 .filter(UserUserGroupToPerm.user_group == user_group)\
479 .filter(UserUserGroupToPerm.user_group == user_group)\
480 .scalar()
480 .scalar()
481 if obj:
481 if obj:
482 self.sa.delete(obj)
482 self.sa.delete(obj)
483 log.debug('Revoked perm on %s on %s', user_group, user)
483 log.debug('Revoked perm on %s on %s', user_group, user)
484 action_logger_generic(
484 action_logger_generic(
485 'revoked permission from user: {} on usergroup: {}'.format(
485 'revoked permission from user: {} on usergroup: {}'.format(
486 user, user_group), namespace='security.usergroup')
486 user, user_group), namespace='security.usergroup')
487
487
488 changes['deleted'].append({
488 changes['deleted'].append({
489 'change_obj': user_group.get_api_data(),
489 'change_obj': user_group.get_api_data(),
490 'type': 'user', 'id': member_id,
490 'type': 'user', 'id': member_id,
491 'name': member_name, 'new_perm': perm_name})
491 'name': member_name, 'new_perm': perm_name})
492
492
493 return changes
493 return changes
494
494
495 def grant_user_group_permission(self, target_user_group, user_group, perm):
495 def grant_user_group_permission(self, target_user_group, user_group, perm):
496 """
496 """
497 Grant user group permission for given target_user_group
497 Grant user group permission for given target_user_group
498
498
499 :param target_user_group:
499 :param target_user_group:
500 :param user_group:
500 :param user_group:
501 :param perm:
501 :param perm:
502 """
502 """
503 changes = {
503 changes = {
504 'added': [],
504 'added': [],
505 'updated': [],
505 'updated': [],
506 'deleted': []
506 'deleted': []
507 }
507 }
508
508
509 target_user_group = self._get_user_group(target_user_group)
509 target_user_group = self._get_user_group(target_user_group)
510 user_group = self._get_user_group(user_group)
510 user_group = self._get_user_group(user_group)
511 permission = self._get_perm(perm)
511 permission = self._get_perm(perm)
512 perm_name = permission.permission_name
512 perm_name = permission.permission_name
513 member_id = user_group.users_group_id
513 member_id = user_group.users_group_id
514 member_name = user_group.users_group_name
514 member_name = user_group.users_group_name
515
515
516 # forbid assigning same user group to itself
516 # forbid assigning same user group to itself
517 if target_user_group == user_group:
517 if target_user_group == user_group:
518 raise RepoGroupAssignmentError('target repo:%s cannot be '
518 raise RepoGroupAssignmentError('target repo:%s cannot be '
519 'assigned to itself' % target_user_group)
519 'assigned to itself' % target_user_group)
520
520
521 # check if we have that permission already
521 # check if we have that permission already
522 obj = self.sa.query(UserGroupUserGroupToPerm)\
522 obj = self.sa.query(UserGroupUserGroupToPerm)\
523 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
523 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
524 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
524 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
525 .scalar()
525 .scalar()
526 if obj is None:
526 if obj is None:
527 # create new !
527 # create new !
528 obj = UserGroupUserGroupToPerm()
528 obj = UserGroupUserGroupToPerm()
529 obj.user_group = user_group
529 obj.user_group = user_group
530 obj.target_user_group = target_user_group
530 obj.target_user_group = target_user_group
531 obj.permission = permission
531 obj.permission = permission
532 self.sa.add(obj)
532 self.sa.add(obj)
533 log.debug(
533 log.debug(
534 'Granted perm %s to %s on %s', perm, target_user_group, user_group)
534 'Granted perm %s to %s on %s', perm, target_user_group, user_group)
535 action_logger_generic(
535 action_logger_generic(
536 'granted permission: {} to usergroup: {} on usergroup: {}'.format(
536 'granted permission: {} to usergroup: {} on usergroup: {}'.format(
537 perm, user_group, target_user_group),
537 perm, user_group, target_user_group),
538 namespace='security.usergroup')
538 namespace='security.usergroup')
539
539
540 changes['added'].append({
540 changes['added'].append({
541 'change_obj': target_user_group.get_api_data(),
541 'change_obj': target_user_group.get_api_data(),
542 'type': 'user_group', 'id': member_id,
542 'type': 'user_group', 'id': member_id,
543 'name': member_name, 'new_perm': perm_name})
543 'name': member_name, 'new_perm': perm_name})
544
544
545 return changes
545 return changes
546
546
547 def revoke_user_group_permission(self, target_user_group, user_group):
547 def revoke_user_group_permission(self, target_user_group, user_group):
548 """
548 """
549 Revoke user group permission for given target_user_group
549 Revoke user group permission for given target_user_group
550
550
551 :param target_user_group:
551 :param target_user_group:
552 :param user_group:
552 :param user_group:
553 """
553 """
554 changes = {
554 changes = {
555 'added': [],
555 'added': [],
556 'updated': [],
556 'updated': [],
557 'deleted': []
557 'deleted': []
558 }
558 }
559
559
560 target_user_group = self._get_user_group(target_user_group)
560 target_user_group = self._get_user_group(target_user_group)
561 user_group = self._get_user_group(user_group)
561 user_group = self._get_user_group(user_group)
562 perm_name = 'usergroup.none'
562 perm_name = 'usergroup.none'
563 member_id = user_group.users_group_id
563 member_id = user_group.users_group_id
564 member_name = user_group.users_group_name
564 member_name = user_group.users_group_name
565
565
566 obj = self.sa.query(UserGroupUserGroupToPerm)\
566 obj = self.sa.query(UserGroupUserGroupToPerm)\
567 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
567 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
568 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
568 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
569 .scalar()
569 .scalar()
570 if obj:
570 if obj:
571 self.sa.delete(obj)
571 self.sa.delete(obj)
572 log.debug(
572 log.debug(
573 'Revoked perm on %s on %s', target_user_group, user_group)
573 'Revoked perm on %s on %s', target_user_group, user_group)
574 action_logger_generic(
574 action_logger_generic(
575 'revoked permission from usergroup: {} on usergroup: {}'.format(
575 'revoked permission from usergroup: {} on usergroup: {}'.format(
576 user_group, target_user_group),
576 user_group, target_user_group),
577 namespace='security.repogroup')
577 namespace='security.repogroup')
578
578
579 changes['deleted'].append({
579 changes['deleted'].append({
580 'change_obj': target_user_group.get_api_data(),
580 'change_obj': target_user_group.get_api_data(),
581 'type': 'user_group', 'id': member_id,
581 'type': 'user_group', 'id': member_id,
582 'name': member_name, 'new_perm': perm_name})
582 'name': member_name, 'new_perm': perm_name})
583
583
584 return changes
584 return changes
585
585
586 def get_perms_summary(self, user_group_id):
586 def get_perms_summary(self, user_group_id):
587 permissions = {
587 permissions = {
588 'repositories': {},
588 'repositories': {},
589 'repositories_groups': {},
589 'repositories_groups': {},
590 }
590 }
591 ugroup_repo_perms = UserGroupRepoToPerm.query()\
591 ugroup_repo_perms = UserGroupRepoToPerm.query()\
592 .options(joinedload(UserGroupRepoToPerm.permission))\
592 .options(joinedload(UserGroupRepoToPerm.permission))\
593 .options(joinedload(UserGroupRepoToPerm.repository))\
593 .options(joinedload(UserGroupRepoToPerm.repository))\
594 .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\
594 .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\
595 .all()
595 .all()
596
596
597 for gr in ugroup_repo_perms:
597 for gr in ugroup_repo_perms:
598 permissions['repositories'][gr.repository.repo_name] \
598 permissions['repositories'][gr.repository.repo_name] \
599 = gr.permission.permission_name
599 = gr.permission.permission_name
600
600
601 ugroup_group_perms = UserGroupRepoGroupToPerm.query()\
601 ugroup_group_perms = UserGroupRepoGroupToPerm.query()\
602 .options(joinedload(UserGroupRepoGroupToPerm.permission))\
602 .options(joinedload(UserGroupRepoGroupToPerm.permission))\
603 .options(joinedload(UserGroupRepoGroupToPerm.group))\
603 .options(joinedload(UserGroupRepoGroupToPerm.group))\
604 .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\
604 .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\
605 .all()
605 .all()
606
606
607 for gr in ugroup_group_perms:
607 for gr in ugroup_group_perms:
608 permissions['repositories_groups'][gr.group.group_name] \
608 permissions['repositories_groups'][gr.group.group_name] \
609 = gr.permission.permission_name
609 = gr.permission.permission_name
610 return permissions
610 return permissions
611
611
612 def enforce_groups(self, user, groups, extern_type=None):
612 def enforce_groups(self, user, groups, extern_type=None):
613 user = self._get_user(user)
613 user = self._get_user(user)
614 current_groups = user.group_member
614 current_groups = user.group_member
615
615
616 # find the external created groups, i.e automatically created
616 # find the external created groups, i.e automatically created
617 log.debug('Enforcing user group set `%s` on user %s', groups, user)
617 log.debug('Enforcing user group set `%s` on user %s', groups, user)
618 # calculate from what groups user should be removed
618 # calculate from what groups user should be removed
619 # external_groups that are not in groups
619 # external_groups that are not in groups
620 for gr in [x.users_group for x in current_groups]:
620 for gr in [x.users_group for x in current_groups]:
621 managed = gr.group_data.get('extern_type')
621 managed = gr.group_data.get('extern_type')
622 if managed:
622 if managed:
623 if gr.users_group_name not in groups:
623 if gr.users_group_name not in groups:
624 log.debug('Removing user %s from user group %s. '
624 log.debug('Removing user %s from user group %s. '
625 'Group sync managed by: %s', user, gr, managed)
625 'Group sync managed by: %s', user, gr, managed)
626 self.remove_user_from_group(gr, user)
626 self.remove_user_from_group(gr, user)
627 else:
627 else:
628 log.debug('Skipping removal from group %s since it is '
628 log.debug('Skipping removal from group %s since it is '
629 'not set to be automatically synchronized', gr)
629 'not set to be automatically synchronized', gr)
630
630
631 # now we calculate in which groups user should be == groups params
631 # now we calculate in which groups user should be == groups params
632 owner = User.get_first_super_admin().username
632 owner = User.get_first_super_admin().username
633 for gr in set(groups):
633 for gr in set(groups):
634 existing_group = UserGroup.get_by_group_name(gr)
634 existing_group = UserGroup.get_by_group_name(gr)
635 if not existing_group:
635 if not existing_group:
636 desc = 'Automatically created from plugin:%s' % extern_type
636 desc = 'Automatically created from plugin:%s' % extern_type
637 # we use first admin account to set the owner of the group
637 # we use first admin account to set the owner of the group
638 existing_group = UserGroupModel().create(
638 existing_group = UserGroupModel().create(
639 gr, desc, owner, group_data={'extern_type': extern_type})
639 gr, desc, owner, group_data={'extern_type': extern_type})
640
640
641 # we can only add users to groups which have set sync flag via
641 # we can only add users to groups which have set sync flag via
642 # extern_type attribute.
642 # extern_type attribute.
643 # This is either set and created via plugins, or manually
643 # This is either set and created via plugins, or manually
644 managed = existing_group.group_data.get('extern_type')
644 managed = existing_group.group_data.get('extern_type')
645 if managed:
645 if managed:
646 log.debug('Adding user %s to user group %s', user, gr)
646 log.debug('Adding user %s to user group %s', user, gr)
647 UserGroupModel().add_user_to_group(existing_group, user)
647 UserGroupModel().add_user_to_group(existing_group, user)
648 else:
648 else:
649 log.debug('Skipping addition to group %s since it is '
649 log.debug('Skipping addition to group %s since it is '
650 'not set to be automatically synchronized', gr)
650 'not set to be automatically synchronized', gr)
651
651
652 def change_groups(self, user, groups):
652 def change_groups(self, user, groups):
653 """
653 """
654 This method changes user group assignment
654 This method changes user group assignment
655 :param user: User
655 :param user: User
656 :param groups: array of UserGroupModel
656 :param groups: array of UserGroupModel
657 """
657 """
658 user = self._get_user(user)
658 user = self._get_user(user)
659 log.debug('Changing user(%s) assignment to groups(%s)', user, groups)
659 log.debug('Changing user(%s) assignment to groups(%s)', user, groups)
660 current_groups = user.group_member
660 current_groups = user.group_member
661 current_groups = [x.users_group for x in current_groups]
661 current_groups = [x.users_group for x in current_groups]
662
662
663 # calculate from what groups user should be removed/add
663 # calculate from what groups user should be removed/add
664 groups = set(groups)
664 groups = set(groups)
665 current_groups = set(current_groups)
665 current_groups = set(current_groups)
666
666
667 groups_to_remove = current_groups - groups
667 groups_to_remove = current_groups - groups
668 groups_to_add = groups - current_groups
668 groups_to_add = groups - current_groups
669
669
670 removed_from_groups = []
670 removed_from_groups = []
671 added_to_groups = []
671 added_to_groups = []
672 for gr in groups_to_remove:
672 for gr in groups_to_remove:
673 log.debug('Removing user %s from user group %s',
673 log.debug('Removing user %s from user group %s',
674 user.username, gr.users_group_name)
674 user.username, gr.users_group_name)
675 removed_from_groups.append(gr.users_group_id)
675 removed_from_groups.append(gr.users_group_id)
676 self.remove_user_from_group(gr.users_group_name, user.username)
676 self.remove_user_from_group(gr.users_group_name, user.username)
677 for gr in groups_to_add:
677 for gr in groups_to_add:
678 log.debug('Adding user %s to user group %s',
678 log.debug('Adding user %s to user group %s',
679 user.username, gr.users_group_name)
679 user.username, gr.users_group_name)
680 added_to_groups.append(gr.users_group_id)
680 added_to_groups.append(gr.users_group_id)
681 UserGroupModel().add_user_to_group(
681 UserGroupModel().add_user_to_group(
682 gr.users_group_name, user.username)
682 gr.users_group_name, user.username)
683
683
684 return added_to_groups, removed_from_groups
684 return added_to_groups, removed_from_groups
685
685
686 def _serialize_user_group(self, user_group):
686 def _serialize_user_group(self, user_group):
687 import rhodecode.lib.helpers as h
687 import rhodecode.lib.helpers as h
688 return {
688 return {
689 'id': user_group.users_group_id,
689 'id': user_group.users_group_id,
690 # TODO: marcink figure out a way to generate the url for the
690 # TODO: marcink figure out a way to generate the url for the
691 # icon
691 # icon
692 'icon_link': '',
692 'icon_link': '',
693 'value_display': 'Group: %s (%d members)' % (
693 'value_display': 'Group: %s (%d members)' % (
694 user_group.users_group_name, len(user_group.members),),
694 user_group.users_group_name, len(user_group.members),),
695 'value': user_group.users_group_name,
695 'value': user_group.users_group_name,
696 'description': user_group.user_group_description,
696 'description': user_group.user_group_description,
697 'owner': user_group.user.username,
697 'owner': user_group.user.username,
698
698
699 'owner_icon': h.gravatar_url(user_group.user.email, 30),
699 'owner_icon': h.gravatar_url(user_group.user.email, 30),
700 'value_display_owner': h.person(user_group.user.email),
700 'value_display_owner': h.person(user_group.user.email),
701
701
702 'value_type': 'user_group',
702 'value_type': 'user_group',
703 'active': user_group.users_group_active,
703 'active': user_group.users_group_active,
704 }
704 }
705
705
706 def get_user_groups(self, name_contains=None, limit=20, only_active=True,
706 def get_user_groups(self, name_contains=None, limit=20, only_active=True,
707 expand_groups=False):
707 expand_groups=False):
708 query = self.sa.query(UserGroup)
708 query = self.sa.query(UserGroup)
709 if only_active:
709 if only_active:
710 query = query.filter(UserGroup.users_group_active == true())
710 query = query.filter(UserGroup.users_group_active == true())
711
711
712 if name_contains:
712 if name_contains:
713 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
713 ilike_expression = u'%{}%'.format(safe_str(name_contains))
714 query = query.filter(
714 query = query.filter(
715 UserGroup.users_group_name.ilike(ilike_expression))\
715 UserGroup.users_group_name.ilike(ilike_expression))\
716 .order_by(func.length(UserGroup.users_group_name))\
716 .order_by(func.length(UserGroup.users_group_name))\
717 .order_by(UserGroup.users_group_name)
717 .order_by(UserGroup.users_group_name)
718
718
719 query = query.limit(limit)
719 query = query.limit(limit)
720 user_groups = query.all()
720 user_groups = query.all()
721 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
721 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
722 user_groups = UserGroupList(user_groups, perm_set=perm_set)
722 user_groups = UserGroupList(user_groups, perm_set=perm_set)
723
723
724 # store same serialize method to extract data from User
724 # store same serialize method to extract data from User
725 from rhodecode.model.user import UserModel
725 from rhodecode.model.user import UserModel
726 serialize_user = UserModel()._serialize_user
726 serialize_user = UserModel()._serialize_user
727
727
728 _groups = []
728 _groups = []
729 for group in user_groups:
729 for group in user_groups:
730 entry = self._serialize_user_group(group)
730 entry = self._serialize_user_group(group)
731 if expand_groups:
731 if expand_groups:
732 expanded_members = []
732 expanded_members = []
733 for member in group.members:
733 for member in group.members:
734 expanded_members.append(serialize_user(member.user))
734 expanded_members.append(serialize_user(member.user))
735 entry['members'] = expanded_members
735 entry['members'] = expanded_members
736 _groups.append(entry)
736 _groups.append(entry)
737 return _groups
737 return _groups
738
738
739 @staticmethod
739 @staticmethod
740 def get_user_groups_as_dict(user_group):
740 def get_user_groups_as_dict(user_group):
741 import rhodecode.lib.helpers as h
741 import rhodecode.lib.helpers as h
742
742
743 data = {
743 data = {
744 'users_group_id': user_group.users_group_id,
744 'users_group_id': user_group.users_group_id,
745 'group_name': h.link_to_group(user_group.users_group_name),
745 'group_name': h.link_to_group(user_group.users_group_name),
746 'group_description': user_group.user_group_description,
746 'group_description': user_group.user_group_description,
747 'active': user_group.users_group_active,
747 'active': user_group.users_group_active,
748 "owner": user_group.user.username,
748 "owner": user_group.user.username,
749 'owner_icon': h.gravatar_url(user_group.user.email, 30),
749 'owner_icon': h.gravatar_url(user_group.user.email, 30),
750 "owner_data": {
750 "owner_data": {
751 'owner': user_group.user.username,
751 'owner': user_group.user.username,
752 'owner_icon': h.gravatar_url(user_group.user.email, 30)}
752 'owner_icon': h.gravatar_url(user_group.user.email, 30)}
753 }
753 }
754 return data
754 return data
General Comments 0
You need to be logged in to leave comments. Login now