##// END OF EJS Templates
models: major update for python3,...
super-admin -
r5070:175fe6cb default
parent child Browse files
Show More
@@ -1,402 +1,403 b''
1 1
2 2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20
21 21 import itertools
22 22 import logging
23 23 import collections
24 24
25 25 from rhodecode.model import BaseModel
26 26 from rhodecode.model.db import (
27 27 ChangesetStatus, ChangesetComment, PullRequest, PullRequestReviewers, Session)
28 28 from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError
29 29 from rhodecode.lib.markup_renderer import (
30 30 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 class ChangesetStatusModel(BaseModel):
36 36
37 37 cls = ChangesetStatus
38 38
39 39 def __get_changeset_status(self, changeset_status):
40 40 return self._get_instance(ChangesetStatus, changeset_status)
41 41
42 42 def __get_pull_request(self, pull_request):
43 43 return self._get_instance(PullRequest, pull_request)
44 44
45 45 def _get_status_query(self, repo, revision, pull_request,
46 46 with_revisions=False):
47 47 repo = self._get_repo(repo)
48 48
49 49 q = ChangesetStatus.query()\
50 50 .filter(ChangesetStatus.repo == repo)
51 51 if not with_revisions:
52 52 q = q.filter(ChangesetStatus.version == 0)
53 53
54 54 if revision:
55 55 q = q.filter(ChangesetStatus.revision == revision)
56 56 elif pull_request:
57 57 pull_request = self.__get_pull_request(pull_request)
58 58 # TODO: johbo: Think about the impact of this join, there must
59 59 # be a reason why ChangesetStatus and ChanagesetComment is linked
60 60 # to the pull request. Might be that we want to do the same for
61 61 # the pull_request_version_id.
62 62 q = q.join(ChangesetComment).filter(
63 63 ChangesetStatus.pull_request == pull_request,
64 64 ChangesetComment.pull_request_version_id == None)
65 65 else:
66 66 raise Exception('Please specify revision or pull_request')
67 67 q = q.order_by(ChangesetStatus.version.asc())
68 68 return q
69 69
70 70 def calculate_group_vote(self, group_id, group_statuses_by_reviewers,
71 71 trim_votes=True):
72 72 """
73 73 Calculate status based on given group members, and voting rule
74 74
75 75
76 76 group1 - 4 members, 3 required for approval
77 77 user1 - approved
78 78 user2 - reject
79 79 user3 - approved
80 80 user4 - rejected
81 81
82 82 final_state: rejected, reasons not at least 3 votes
83 83
84 84
85 85 group1 - 4 members, 2 required for approval
86 86 user1 - approved
87 87 user2 - reject
88 88 user3 - approved
89 89 user4 - rejected
90 90
91 91 final_state: approved, reasons got at least 2 approvals
92 92
93 93 group1 - 4 members, ALL required for approval
94 94 user1 - approved
95 95 user2 - reject
96 96 user3 - approved
97 97 user4 - rejected
98 98
99 99 final_state: rejected, reasons not all approvals
100 100
101 101
102 102 group1 - 4 members, ALL required for approval
103 103 user1 - approved
104 104 user2 - approved
105 105 user3 - approved
106 106 user4 - approved
107 107
108 108 final_state: approved, reason all approvals received
109 109
110 110 group1 - 4 members, 5 required for approval
111 111 (approval should be shorted to number of actual members)
112 112
113 113 user1 - approved
114 114 user2 - approved
115 115 user3 - approved
116 116 user4 - approved
117 117
118 118 final_state: approved, reason all approvals received
119 119
120 120 """
121 121 group_vote_data = {}
122 122 got_rule = False
123 123 members = collections.OrderedDict()
124 124 for review_obj, user, reasons, mandatory, statuses \
125 125 in group_statuses_by_reviewers:
126 126
127 127 if not got_rule:
128 128 group_vote_data = review_obj.rule_user_group_data()
129 129 got_rule = bool(group_vote_data)
130 130
131 131 members[user.user_id] = statuses
132 132
133 133 if not group_vote_data:
134 134 return []
135 135
136 136 required_votes = group_vote_data['vote_rule']
137 137 if required_votes == -1:
138 138 # -1 means all required, so we replace it with how many people
139 139 # are in the members
140 140 required_votes = len(members)
141 141
142 142 if trim_votes and required_votes > len(members):
143 143 # we require more votes than we have members in the group
144 144 # in this case we trim the required votes to the number of members
145 145 required_votes = len(members)
146 146
147 147 approvals = sum([
148 148 1 for statuses in members.values()
149 149 if statuses and
150 150 statuses[0][1].status == ChangesetStatus.STATUS_APPROVED])
151 151
152 152 calculated_votes = []
153 153 # we have all votes from users, now check if we have enough votes
154 154 # to fill other
155 155 fill_in = ChangesetStatus.STATUS_UNDER_REVIEW
156 156 if approvals >= required_votes:
157 157 fill_in = ChangesetStatus.STATUS_APPROVED
158 158
159 159 for member, statuses in members.items():
160 160 if statuses:
161 161 ver, latest = statuses[0]
162 162 if fill_in == ChangesetStatus.STATUS_APPROVED:
163 163 calculated_votes.append(fill_in)
164 164 else:
165 165 calculated_votes.append(latest.status)
166 166 else:
167 167 calculated_votes.append(fill_in)
168 168
169 169 return calculated_votes
170 170
171 171 def calculate_status(self, statuses_by_reviewers):
172 172 """
173 173 Given the approval statuses from reviewers, calculates final approval
174 174 status. There can only be 3 results, all approved, all rejected. If
175 175 there is no consensus the PR is under review.
176 176
177 177 :param statuses_by_reviewers:
178 178 """
179 179
180 180 def group_rule(element):
181 review_obj = element[0]
182 rule_data = review_obj.rule_user_group_data()
181 _review_obj = element[0]
182 rule_data = _review_obj.rule_user_group_data()
183 183 if rule_data and rule_data['id']:
184 184 return rule_data['id']
185 # don't return None, as we cant compare this
186 return 0
185 187
186 voting_groups = itertools.groupby(
187 sorted(statuses_by_reviewers, key=group_rule), group_rule)
188 voting_groups = itertools.groupby(sorted(statuses_by_reviewers, key=group_rule), group_rule)
188 189
189 190 voting_by_groups = [(x, list(y)) for x, y in voting_groups]
190 191
191 192 reviewers_number = len(statuses_by_reviewers)
192 193 votes = collections.defaultdict(int)
193 194 for group, group_statuses_by_reviewers in voting_by_groups:
194 195 if group:
195 196 # calculate how the "group" voted
196 197 for vote_status in self.calculate_group_vote(
197 198 group, group_statuses_by_reviewers):
198 199 votes[vote_status] += 1
199 200 else:
200 201
201 202 for review_obj, user, reasons, mandatory, statuses \
202 203 in group_statuses_by_reviewers:
203 204 # individual vote
204 205 if statuses:
205 206 ver, latest = statuses[0]
206 207 votes[latest.status] += 1
207 208
208 209 approved_votes_count = votes[ChangesetStatus.STATUS_APPROVED]
209 210 rejected_votes_count = votes[ChangesetStatus.STATUS_REJECTED]
210 211
211 212 # TODO(marcink): with group voting, how does rejected work,
212 213 # do we ever get rejected state ?
213 214
214 215 if approved_votes_count and (approved_votes_count == reviewers_number):
215 216 return ChangesetStatus.STATUS_APPROVED
216 217
217 218 if rejected_votes_count and (rejected_votes_count == reviewers_number):
218 219 return ChangesetStatus.STATUS_REJECTED
219 220
220 221 return ChangesetStatus.STATUS_UNDER_REVIEW
221 222
222 223 def get_statuses(self, repo, revision=None, pull_request=None,
223 224 with_revisions=False):
224 225 q = self._get_status_query(repo, revision, pull_request,
225 226 with_revisions)
226 227 return q.all()
227 228
228 229 def get_status(self, repo, revision=None, pull_request=None, as_str=True):
229 230 """
230 231 Returns latest status of changeset for given revision or for given
231 232 pull request. Statuses are versioned inside a table itself and
232 233 version == 0 is always the current one
233 234
234 235 :param repo:
235 236 :param revision: 40char hash or None
236 237 :param pull_request: pull_request reference
237 238 :param as_str: return status as string not object
238 239 """
239 240 q = self._get_status_query(repo, revision, pull_request)
240 241
241 242 # need to use first here since there can be multiple statuses
242 243 # returned from pull_request
243 244 status = q.first()
244 245 if as_str:
245 246 status = status.status if status else status
246 247 st = status or ChangesetStatus.DEFAULT
247 248 return str(st)
248 249 return status
249 250
250 251 def _render_auto_status_message(
251 252 self, status, commit_id=None, pull_request=None):
252 253 """
253 254 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
254 255 so it's always looking the same disregarding on which default
255 256 renderer system is using.
256 257
257 258 :param status: status text to change into
258 259 :param commit_id: the commit_id we change the status for
259 260 :param pull_request: the pull request we change the status for
260 261 """
261 262
262 263 new_status = ChangesetStatus.get_status_lbl(status)
263 264
264 265 params = {
265 266 'new_status_label': new_status,
266 267 'pull_request': pull_request,
267 268 'commit_id': commit_id,
268 269 }
269 270 renderer = RstTemplateRenderer()
270 271 return renderer.render('auto_status_change.mako', **params)
271 272
272 273 def set_status(self, repo, status, user, comment=None, revision=None,
273 274 pull_request=None, dont_allow_on_closed_pull_request=False):
274 275 """
275 276 Creates new status for changeset or updates the old ones bumping their
276 277 version, leaving the current status at
277 278
278 279 :param repo:
279 280 :param revision:
280 281 :param status:
281 282 :param user:
282 283 :param comment:
283 284 :param dont_allow_on_closed_pull_request: don't allow a status change
284 285 if last status was for pull request and it's closed. We shouldn't
285 286 mess around this manually
286 287 """
287 288 repo = self._get_repo(repo)
288 289
289 290 q = ChangesetStatus.query()
290 291
291 292 if revision:
292 293 q = q.filter(ChangesetStatus.repo == repo)
293 294 q = q.filter(ChangesetStatus.revision == revision)
294 295 elif pull_request:
295 296 pull_request = self.__get_pull_request(pull_request)
296 297 q = q.filter(ChangesetStatus.repo == pull_request.source_repo)
297 298 q = q.filter(ChangesetStatus.revision.in_(pull_request.revisions))
298 299 cur_statuses = q.all()
299 300
300 301 # if statuses exists and last is associated with a closed pull request
301 302 # we need to check if we can allow this status change
302 303 if (dont_allow_on_closed_pull_request and cur_statuses
303 304 and getattr(cur_statuses[0].pull_request, 'status', '')
304 305 == PullRequest.STATUS_CLOSED):
305 306 raise StatusChangeOnClosedPullRequestError(
306 307 'Changing status on closed pull request is not allowed'
307 308 )
308 309
309 310 # update all current statuses with older version
310 311 if cur_statuses:
311 312 for st in cur_statuses:
312 313 st.version += 1
313 314 Session().add(st)
314 315 Session().flush()
315 316
316 317 def _create_status(user, repo, status, comment, revision, pull_request):
317 318 new_status = ChangesetStatus()
318 319 new_status.author = self._get_user(user)
319 320 new_status.repo = self._get_repo(repo)
320 321 new_status.status = status
321 322 new_status.comment = comment
322 323 new_status.revision = revision
323 324 new_status.pull_request = pull_request
324 325 return new_status
325 326
326 327 if not comment:
327 328 from rhodecode.model.comment import CommentsModel
328 329 comment = CommentsModel().create(
329 330 text=self._render_auto_status_message(
330 331 status, commit_id=revision, pull_request=pull_request),
331 332 repo=repo,
332 333 user=user,
333 334 pull_request=pull_request,
334 335 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER
335 336 )
336 337
337 338 if revision:
338 339 new_status = _create_status(
339 340 user=user, repo=repo, status=status, comment=comment,
340 341 revision=revision, pull_request=pull_request)
341 342 Session().add(new_status)
342 343 return new_status
343 344 elif pull_request:
344 345 # pull request can have more than one revision associated to it
345 346 # we need to create new version for each one
346 347 new_statuses = []
347 348 repo = pull_request.source_repo
348 349 for rev in pull_request.revisions:
349 350 new_status = _create_status(
350 351 user=user, repo=repo, status=status, comment=comment,
351 352 revision=rev, pull_request=pull_request)
352 353 new_statuses.append(new_status)
353 354 Session().add(new_status)
354 355 return new_statuses
355 356
356 357 def aggregate_votes_by_user(self, commit_statuses, reviewers_data, user=None):
357 358
358 359 commit_statuses_map = collections.defaultdict(list)
359 360 for st in commit_statuses:
360 361 commit_statuses_map[st.author.username] += [st]
361 362
362 363 reviewers = []
363 364
364 365 def version(commit_status):
365 366 return commit_status.version
366 367
367 368 for obj in reviewers_data:
368 369 if not obj.user:
369 370 continue
370 371 if user and obj.user.username != user.username:
371 372 # single user filter
372 373 continue
373 374
374 375 statuses = commit_statuses_map.get(obj.user.username, None)
375 376 if statuses:
376 377 status_groups = itertools.groupby(
377 378 sorted(statuses, key=version), version)
378 379 statuses = [(x, list(y)[0]) for x, y in status_groups]
379 380
380 381 reviewers.append((obj, obj.user, obj.reasons, obj.mandatory, statuses))
381 382
382 383 if user:
383 384 return reviewers[0] if reviewers else reviewers
384 385 else:
385 386 return reviewers
386 387
387 388 def reviewers_statuses(self, pull_request, user=None):
388 389 _commit_statuses = self.get_statuses(
389 390 pull_request.source_repo,
390 391 pull_request=pull_request,
391 392 with_revisions=True)
392 393 reviewers = pull_request.get_pull_request_reviewers(
393 394 role=PullRequestReviewers.ROLE_REVIEWER)
394 395 return self.aggregate_votes_by_user(_commit_statuses, reviewers, user=user)
395 396
396 397 def calculated_review_status(self, pull_request):
397 398 """
398 399 calculate pull request status based on reviewers, it should be a list
399 400 of two element lists.
400 401 """
401 402 reviewers = self.reviewers_statuses(pull_request)
402 403 return self.calculate_status(reviewers)
@@ -1,857 +1,854 b''
1 1
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 comments model for RhodeCode
23 23 """
24 24 import datetime
25 25
26 26 import logging
27 27 import traceback
28 28 import collections
29 29
30 30 from pyramid.threadlocal import get_current_registry, get_current_request
31 31 from sqlalchemy.sql.expression import null
32 32 from sqlalchemy.sql.functions import coalesce
33 33
34 34 from rhodecode.lib import helpers as h, diffs, channelstream, hooks_utils
35 35 from rhodecode.lib import audit_logger
36 36 from rhodecode.lib.exceptions import CommentVersionMismatch
37 37 from rhodecode.lib.utils2 import extract_mentioned_users, safe_str, safe_int
38 38 from rhodecode.model import BaseModel
39 39 from rhodecode.model.db import (
40 40 false, true,
41 41 ChangesetComment,
42 42 User,
43 43 Notification,
44 44 PullRequest,
45 45 AttributeDict,
46 46 ChangesetCommentHistory,
47 47 )
48 48 from rhodecode.model.notification import NotificationModel
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51 from rhodecode.model.notification import EmailNotificationModel
52 52 from rhodecode.model.validation_schema.schemas import comment_schema
53 53
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class CommentsModel(BaseModel):
59 59
60 60 cls = ChangesetComment
61 61
62 62 DIFF_CONTEXT_BEFORE = 3
63 63 DIFF_CONTEXT_AFTER = 3
64 64
65 65 def __get_commit_comment(self, changeset_comment):
66 66 return self._get_instance(ChangesetComment, changeset_comment)
67 67
68 68 def __get_pull_request(self, pull_request):
69 69 return self._get_instance(PullRequest, pull_request)
70 70
71 71 def _extract_mentions(self, s):
72 72 user_objects = []
73 73 for username in extract_mentioned_users(s):
74 74 user_obj = User.get_by_username(username, case_insensitive=True)
75 75 if user_obj:
76 76 user_objects.append(user_obj)
77 77 return user_objects
78 78
79 79 def _get_renderer(self, global_renderer='rst', request=None):
80 80 request = request or get_current_request()
81 81
82 82 try:
83 83 global_renderer = request.call_context.visual.default_renderer
84 84 except AttributeError:
85 85 log.debug("Renderer not set, falling back "
86 86 "to default renderer '%s'", global_renderer)
87 87 except Exception:
88 88 log.error(traceback.format_exc())
89 89 return global_renderer
90 90
91 91 def aggregate_comments(self, comments, versions, show_version, inline=False):
92 92 # group by versions, and count until, and display objects
93 93
94 94 comment_groups = collections.defaultdict(list)
95 95 [comment_groups[_co.pull_request_version_id].append(_co) for _co in comments]
96 96
97 97 def yield_comments(pos):
98 98 for co in comment_groups[pos]:
99 99 yield co
100 100
101 101 comment_versions = collections.defaultdict(
102 102 lambda: collections.defaultdict(list))
103 103 prev_prvid = -1
104 104 # fake last entry with None, to aggregate on "latest" version which
105 105 # doesn't have an pull_request_version_id
106 106 for ver in versions + [AttributeDict({'pull_request_version_id': None})]:
107 107 prvid = ver.pull_request_version_id
108 108 if prev_prvid == -1:
109 109 prev_prvid = prvid
110 110
111 111 for co in yield_comments(prvid):
112 112 comment_versions[prvid]['at'].append(co)
113 113
114 114 # save until
115 115 current = comment_versions[prvid]['at']
116 116 prev_until = comment_versions[prev_prvid]['until']
117 117 cur_until = prev_until + current
118 118 comment_versions[prvid]['until'].extend(cur_until)
119 119
120 120 # save outdated
121 121 if inline:
122 122 outdated = [x for x in cur_until
123 123 if x.outdated_at_version(show_version)]
124 124 else:
125 125 outdated = [x for x in cur_until
126 126 if x.older_than_version(show_version)]
127 127 display = [x for x in cur_until if x not in outdated]
128 128
129 129 comment_versions[prvid]['outdated'] = outdated
130 130 comment_versions[prvid]['display'] = display
131 131
132 132 prev_prvid = prvid
133 133
134 134 return comment_versions
135 135
136 136 def get_repository_comments(self, repo, comment_type=None, user=None, commit_id=None):
137 137 qry = Session().query(ChangesetComment) \
138 138 .filter(ChangesetComment.repo == repo)
139 139
140 140 if comment_type and comment_type in ChangesetComment.COMMENT_TYPES:
141 141 qry = qry.filter(ChangesetComment.comment_type == comment_type)
142 142
143 143 if user:
144 144 user = self._get_user(user)
145 145 if user:
146 146 qry = qry.filter(ChangesetComment.user_id == user.user_id)
147 147
148 148 if commit_id:
149 149 qry = qry.filter(ChangesetComment.revision == commit_id)
150 150
151 151 qry = qry.order_by(ChangesetComment.created_on)
152 152 return qry.all()
153 153
154 154 def get_repository_unresolved_todos(self, repo):
155 155 todos = Session().query(ChangesetComment) \
156 156 .filter(ChangesetComment.repo == repo) \
157 157 .filter(ChangesetComment.resolved_by == None) \
158 158 .filter(ChangesetComment.comment_type
159 159 == ChangesetComment.COMMENT_TYPE_TODO)
160 160 todos = todos.all()
161 161
162 162 return todos
163 163
164 164 def get_pull_request_unresolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
165 165
166 166 todos = Session().query(ChangesetComment) \
167 167 .filter(ChangesetComment.pull_request == pull_request) \
168 168 .filter(ChangesetComment.resolved_by == None) \
169 169 .filter(ChangesetComment.comment_type
170 170 == ChangesetComment.COMMENT_TYPE_TODO)
171 171
172 172 if not include_drafts:
173 173 todos = todos.filter(ChangesetComment.draft == false())
174 174
175 175 if not show_outdated:
176 176 todos = todos.filter(
177 177 coalesce(ChangesetComment.display_state, '') !=
178 178 ChangesetComment.COMMENT_OUTDATED)
179 179
180 180 todos = todos.all()
181 181
182 182 return todos
183 183
184 184 def get_pull_request_resolved_todos(self, pull_request, show_outdated=True, include_drafts=True):
185 185
186 186 todos = Session().query(ChangesetComment) \
187 187 .filter(ChangesetComment.pull_request == pull_request) \
188 188 .filter(ChangesetComment.resolved_by != None) \
189 189 .filter(ChangesetComment.comment_type
190 190 == ChangesetComment.COMMENT_TYPE_TODO)
191 191
192 192 if not include_drafts:
193 193 todos = todos.filter(ChangesetComment.draft == false())
194 194
195 195 if not show_outdated:
196 196 todos = todos.filter(
197 197 coalesce(ChangesetComment.display_state, '') !=
198 198 ChangesetComment.COMMENT_OUTDATED)
199 199
200 200 todos = todos.all()
201 201
202 202 return todos
203 203
204 204 def get_pull_request_drafts(self, user_id, pull_request):
205 205 drafts = Session().query(ChangesetComment) \
206 206 .filter(ChangesetComment.pull_request == pull_request) \
207 207 .filter(ChangesetComment.user_id == user_id) \
208 208 .filter(ChangesetComment.draft == true())
209 209 return drafts.all()
210 210
211 211 def get_commit_unresolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
212 212
213 213 todos = Session().query(ChangesetComment) \
214 214 .filter(ChangesetComment.revision == commit_id) \
215 215 .filter(ChangesetComment.resolved_by == None) \
216 216 .filter(ChangesetComment.comment_type
217 217 == ChangesetComment.COMMENT_TYPE_TODO)
218 218
219 219 if not include_drafts:
220 220 todos = todos.filter(ChangesetComment.draft == false())
221 221
222 222 if not show_outdated:
223 223 todos = todos.filter(
224 224 coalesce(ChangesetComment.display_state, '') !=
225 225 ChangesetComment.COMMENT_OUTDATED)
226 226
227 227 todos = todos.all()
228 228
229 229 return todos
230 230
231 231 def get_commit_resolved_todos(self, commit_id, show_outdated=True, include_drafts=True):
232 232
233 233 todos = Session().query(ChangesetComment) \
234 234 .filter(ChangesetComment.revision == commit_id) \
235 235 .filter(ChangesetComment.resolved_by != None) \
236 236 .filter(ChangesetComment.comment_type
237 237 == ChangesetComment.COMMENT_TYPE_TODO)
238 238
239 239 if not include_drafts:
240 240 todos = todos.filter(ChangesetComment.draft == false())
241 241
242 242 if not show_outdated:
243 243 todos = todos.filter(
244 244 coalesce(ChangesetComment.display_state, '') !=
245 245 ChangesetComment.COMMENT_OUTDATED)
246 246
247 247 todos = todos.all()
248 248
249 249 return todos
250 250
251 251 def get_commit_inline_comments(self, commit_id, include_drafts=True):
252 252 inline_comments = Session().query(ChangesetComment) \
253 253 .filter(ChangesetComment.line_no != None) \
254 254 .filter(ChangesetComment.f_path != None) \
255 255 .filter(ChangesetComment.revision == commit_id)
256 256
257 257 if not include_drafts:
258 258 inline_comments = inline_comments.filter(ChangesetComment.draft == false())
259 259
260 260 inline_comments = inline_comments.all()
261 261 return inline_comments
262 262
263 263 def _log_audit_action(self, action, action_data, auth_user, comment):
264 264 audit_logger.store(
265 265 action=action,
266 266 action_data=action_data,
267 267 user=auth_user,
268 268 repo=comment.repo)
269 269
270 270 def create(self, text, repo, user, commit_id=None, pull_request=None,
271 271 f_path=None, line_no=None, status_change=None,
272 272 status_change_type=None, comment_type=None, is_draft=False,
273 273 resolves_comment_id=None, closing_pr=False, send_email=True,
274 274 renderer=None, auth_user=None, extra_recipients=None):
275 275 """
276 276 Creates new comment for commit or pull request.
277 277 IF status_change is not none this comment is associated with a
278 278 status change of commit or commit associated with pull request
279 279
280 280 :param text:
281 281 :param repo:
282 282 :param user:
283 283 :param commit_id:
284 284 :param pull_request:
285 285 :param f_path:
286 286 :param line_no:
287 287 :param status_change: Label for status change
288 288 :param comment_type: Type of comment
289 289 :param is_draft: is comment a draft only
290 290 :param resolves_comment_id: id of comment which this one will resolve
291 291 :param status_change_type: type of status change
292 292 :param closing_pr:
293 293 :param send_email:
294 294 :param renderer: pick renderer for this comment
295 295 :param auth_user: current authenticated user calling this method
296 296 :param extra_recipients: list of extra users to be added to recipients
297 297 """
298 298
299 if not text:
300 log.warning('Missing text for comment, skipping...')
301 return
302 299 request = get_current_request()
303 300 _ = request.translate
304 301
305 302 if not renderer:
306 303 renderer = self._get_renderer(request=request)
307 304
308 305 repo = self._get_repo(repo)
309 306 user = self._get_user(user)
310 307 auth_user = auth_user or user
311 308
312 309 schema = comment_schema.CommentSchema()
313 310 validated_kwargs = schema.deserialize(dict(
314 311 comment_body=text,
315 312 comment_type=comment_type,
316 313 is_draft=is_draft,
317 314 comment_file=f_path,
318 315 comment_line=line_no,
319 316 renderer_type=renderer,
320 317 status_change=status_change_type,
321 318 resolves_comment_id=resolves_comment_id,
322 319 repo=repo.repo_id,
323 320 user=user.user_id,
324 321 ))
325 322 is_draft = validated_kwargs['is_draft']
326 323
327 324 comment = ChangesetComment()
328 325 comment.renderer = validated_kwargs['renderer_type']
329 326 comment.text = validated_kwargs['comment_body']
330 327 comment.f_path = validated_kwargs['comment_file']
331 328 comment.line_no = validated_kwargs['comment_line']
332 329 comment.comment_type = validated_kwargs['comment_type']
333 330 comment.draft = is_draft
334 331
335 332 comment.repo = repo
336 333 comment.author = user
337 334 resolved_comment = self.__get_commit_comment(
338 335 validated_kwargs['resolves_comment_id'])
339 336
340 337 # check if the comment actually belongs to this PR
341 338 if resolved_comment and resolved_comment.pull_request and \
342 339 resolved_comment.pull_request != pull_request:
343 340 log.warning('Comment tried to resolved unrelated todo comment: %s',
344 341 resolved_comment)
345 342 # comment not bound to this pull request, forbid
346 343 resolved_comment = None
347 344
348 345 elif resolved_comment and resolved_comment.repo and \
349 346 resolved_comment.repo != repo:
350 347 log.warning('Comment tried to resolved unrelated todo comment: %s',
351 348 resolved_comment)
352 349 # comment not bound to this repo, forbid
353 350 resolved_comment = None
354 351
355 352 if resolved_comment and resolved_comment.resolved_by:
356 353 # if this comment is already resolved, don't mark it again!
357 354 resolved_comment = None
358 355
359 356 comment.resolved_comment = resolved_comment
360 357
361 358 pull_request_id = pull_request
362 359
363 360 commit_obj = None
364 361 pull_request_obj = None
365 362
366 363 if commit_id:
367 364 notification_type = EmailNotificationModel.TYPE_COMMIT_COMMENT
368 365 # do a lookup, so we don't pass something bad here
369 366 commit_obj = repo.scm_instance().get_commit(commit_id=commit_id)
370 367 comment.revision = commit_obj.raw_id
371 368
372 369 elif pull_request_id:
373 370 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST_COMMENT
374 371 pull_request_obj = self.__get_pull_request(pull_request_id)
375 372 comment.pull_request = pull_request_obj
376 373 else:
377 374 raise Exception('Please specify commit or pull_request_id')
378 375
379 376 Session().add(comment)
380 377 Session().flush()
381 378 kwargs = {
382 379 'user': user,
383 380 'renderer_type': renderer,
384 381 'repo_name': repo.repo_name,
385 382 'status_change': status_change,
386 383 'status_change_type': status_change_type,
387 384 'comment_body': text,
388 385 'comment_file': f_path,
389 386 'comment_line': line_no,
390 387 'comment_type': comment_type or 'note',
391 388 'comment_id': comment.comment_id
392 389 }
393 390
394 391 if commit_obj:
395 392 recipients = ChangesetComment.get_users(
396 393 revision=commit_obj.raw_id)
397 394 # add commit author if it's in RhodeCode system
398 395 cs_author = User.get_from_cs_author(commit_obj.author)
399 396 if not cs_author:
400 397 # use repo owner if we cannot extract the author correctly
401 398 cs_author = repo.user
402 399 recipients += [cs_author]
403 400
404 401 commit_comment_url = self.get_url(comment, request=request)
405 402 commit_comment_reply_url = self.get_url(
406 403 comment, request=request,
407 404 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
408 405
409 406 target_repo_url = h.link_to(
410 407 repo.repo_name,
411 408 h.route_url('repo_summary', repo_name=repo.repo_name))
412 409
413 410 commit_url = h.route_url('repo_commit', repo_name=repo.repo_name,
414 411 commit_id=commit_id)
415 412
416 413 # commit specifics
417 414 kwargs.update({
418 415 'commit': commit_obj,
419 416 'commit_message': commit_obj.message,
420 417 'commit_target_repo_url': target_repo_url,
421 418 'commit_comment_url': commit_comment_url,
422 419 'commit_comment_reply_url': commit_comment_reply_url,
423 420 'commit_url': commit_url,
424 421 'thread_ids': [commit_url, commit_comment_url],
425 422 })
426 423
427 424 elif pull_request_obj:
428 425 # get the current participants of this pull request
429 426 recipients = ChangesetComment.get_users(
430 427 pull_request_id=pull_request_obj.pull_request_id)
431 428 # add pull request author
432 429 recipients += [pull_request_obj.author]
433 430
434 431 # add the reviewers to notification
435 432 recipients += [x.user for x in pull_request_obj.get_pull_request_reviewers()]
436 433
437 434 pr_target_repo = pull_request_obj.target_repo
438 435 pr_source_repo = pull_request_obj.source_repo
439 436
440 437 pr_comment_url = self.get_url(comment, request=request)
441 438 pr_comment_reply_url = self.get_url(
442 439 comment, request=request,
443 440 anchor='comment-{}/?/ReplyToComment'.format(comment.comment_id))
444 441
445 442 pr_url = h.route_url(
446 443 'pullrequest_show',
447 444 repo_name=pr_target_repo.repo_name,
448 445 pull_request_id=pull_request_obj.pull_request_id, )
449 446
450 447 # set some variables for email notification
451 448 pr_target_repo_url = h.route_url(
452 449 'repo_summary', repo_name=pr_target_repo.repo_name)
453 450
454 451 pr_source_repo_url = h.route_url(
455 452 'repo_summary', repo_name=pr_source_repo.repo_name)
456 453
457 454 # pull request specifics
458 455 kwargs.update({
459 456 'pull_request': pull_request_obj,
460 457 'pr_id': pull_request_obj.pull_request_id,
461 458 'pull_request_url': pr_url,
462 459 'pull_request_target_repo': pr_target_repo,
463 460 'pull_request_target_repo_url': pr_target_repo_url,
464 461 'pull_request_source_repo': pr_source_repo,
465 462 'pull_request_source_repo_url': pr_source_repo_url,
466 463 'pr_comment_url': pr_comment_url,
467 464 'pr_comment_reply_url': pr_comment_reply_url,
468 465 'pr_closing': closing_pr,
469 466 'thread_ids': [pr_url, pr_comment_url],
470 467 })
471 468
472 469 if send_email:
473 470 recipients += [self._get_user(u) for u in (extra_recipients or [])]
474 471
475 472 mention_recipients = set(
476 473 self._extract_mentions(text)).difference(recipients)
477 474
478 475 # create notification objects, and emails
479 476 NotificationModel().create(
480 477 created_by=user,
481 478 notification_subject='', # Filled in based on the notification_type
482 479 notification_body='', # Filled in based on the notification_type
483 480 notification_type=notification_type,
484 481 recipients=recipients,
485 482 mention_recipients=mention_recipients,
486 483 email_kwargs=kwargs,
487 484 )
488 485
489 486 Session().flush()
490 487 if comment.pull_request:
491 488 action = 'repo.pull_request.comment.create'
492 489 else:
493 490 action = 'repo.commit.comment.create'
494 491
495 492 if not is_draft:
496 493 comment_data = comment.get_api_data()
497 494
498 495 self._log_audit_action(
499 496 action, {'data': comment_data}, auth_user, comment)
500 497
501 498 return comment
502 499
503 500 def edit(self, comment_id, text, auth_user, version):
504 501 """
505 502 Change existing comment for commit or pull request.
506 503
507 504 :param comment_id:
508 505 :param text:
509 506 :param auth_user: current authenticated user calling this method
510 507 :param version: last comment version
511 508 """
512 509 if not text:
513 510 log.warning('Missing text for comment, skipping...')
514 511 return
515 512
516 513 comment = ChangesetComment.get(comment_id)
517 514 old_comment_text = comment.text
518 515 comment.text = text
519 516 comment.modified_at = datetime.datetime.now()
520 517 version = safe_int(version)
521 518
522 519 # NOTE(marcink): this returns initial comment + edits, so v2 from ui
523 520 # would return 3 here
524 521 comment_version = ChangesetCommentHistory.get_version(comment_id)
525 522
526 523 if isinstance(version, int) and (comment_version - version) != 1:
527 524 log.warning(
528 525 'Version mismatch comment_version {} submitted {}, skipping'.format(
529 526 comment_version-1, # -1 since note above
530 527 version
531 528 )
532 529 )
533 530 raise CommentVersionMismatch()
534 531
535 532 comment_history = ChangesetCommentHistory()
536 533 comment_history.comment_id = comment_id
537 534 comment_history.version = comment_version
538 535 comment_history.created_by_user_id = auth_user.user_id
539 536 comment_history.text = old_comment_text
540 537 # TODO add email notification
541 538 Session().add(comment_history)
542 539 Session().add(comment)
543 540 Session().flush()
544 541
545 542 if comment.pull_request:
546 543 action = 'repo.pull_request.comment.edit'
547 544 else:
548 545 action = 'repo.commit.comment.edit'
549 546
550 547 comment_data = comment.get_api_data()
551 548 comment_data['old_comment_text'] = old_comment_text
552 549 self._log_audit_action(
553 550 action, {'data': comment_data}, auth_user, comment)
554 551
555 552 return comment_history
556 553
557 554 def delete(self, comment, auth_user):
558 555 """
559 556 Deletes given comment
560 557 """
561 558 comment = self.__get_commit_comment(comment)
562 559 old_data = comment.get_api_data()
563 560 Session().delete(comment)
564 561
565 562 if comment.pull_request:
566 563 action = 'repo.pull_request.comment.delete'
567 564 else:
568 565 action = 'repo.commit.comment.delete'
569 566
570 567 self._log_audit_action(
571 568 action, {'old_data': old_data}, auth_user, comment)
572 569
573 570 return comment
574 571
575 572 def get_all_comments(self, repo_id, revision=None, pull_request=None,
576 573 include_drafts=True, count_only=False):
577 574 q = ChangesetComment.query()\
578 575 .filter(ChangesetComment.repo_id == repo_id)
579 576 if revision:
580 577 q = q.filter(ChangesetComment.revision == revision)
581 578 elif pull_request:
582 579 pull_request = self.__get_pull_request(pull_request)
583 580 q = q.filter(ChangesetComment.pull_request_id == pull_request.pull_request_id)
584 581 else:
585 582 raise Exception('Please specify commit or pull_request')
586 583 if not include_drafts:
587 584 q = q.filter(ChangesetComment.draft == false())
588 585 q = q.order_by(ChangesetComment.created_on)
589 586 if count_only:
590 587 return q.count()
591 588
592 589 return q.all()
593 590
594 591 def get_url(self, comment, request=None, permalink=False, anchor=None):
595 592 if not request:
596 593 request = get_current_request()
597 594
598 595 comment = self.__get_commit_comment(comment)
599 596 if anchor is None:
600 597 anchor = 'comment-{}'.format(comment.comment_id)
601 598
602 599 if comment.pull_request:
603 600 pull_request = comment.pull_request
604 601 if permalink:
605 602 return request.route_url(
606 603 'pull_requests_global',
607 604 pull_request_id=pull_request.pull_request_id,
608 605 _anchor=anchor)
609 606 else:
610 607 return request.route_url(
611 608 'pullrequest_show',
612 609 repo_name=safe_str(pull_request.target_repo.repo_name),
613 610 pull_request_id=pull_request.pull_request_id,
614 611 _anchor=anchor)
615 612
616 613 else:
617 614 repo = comment.repo
618 615 commit_id = comment.revision
619 616
620 617 if permalink:
621 618 return request.route_url(
622 619 'repo_commit', repo_name=safe_str(repo.repo_id),
623 620 commit_id=commit_id,
624 621 _anchor=anchor)
625 622
626 623 else:
627 624 return request.route_url(
628 625 'repo_commit', repo_name=safe_str(repo.repo_name),
629 626 commit_id=commit_id,
630 627 _anchor=anchor)
631 628
632 629 def get_comments(self, repo_id, revision=None, pull_request=None):
633 630 """
634 631 Gets main comments based on revision or pull_request_id
635 632
636 633 :param repo_id:
637 634 :param revision:
638 635 :param pull_request:
639 636 """
640 637
641 638 q = ChangesetComment.query()\
642 639 .filter(ChangesetComment.repo_id == repo_id)\
643 640 .filter(ChangesetComment.line_no == None)\
644 641 .filter(ChangesetComment.f_path == None)
645 642 if revision:
646 643 q = q.filter(ChangesetComment.revision == revision)
647 644 elif pull_request:
648 645 pull_request = self.__get_pull_request(pull_request)
649 646 q = q.filter(ChangesetComment.pull_request == pull_request)
650 647 else:
651 648 raise Exception('Please specify commit or pull_request')
652 649 q = q.order_by(ChangesetComment.created_on)
653 650 return q.all()
654 651
655 652 def get_inline_comments(self, repo_id, revision=None, pull_request=None):
656 653 q = self._get_inline_comments_query(repo_id, revision, pull_request)
657 654 return self._group_comments_by_path_and_line_number(q)
658 655
659 656 def get_inline_comments_as_list(self, inline_comments, skip_outdated=True,
660 657 version=None):
661 658 inline_comms = []
662 659 for fname, per_line_comments in inline_comments.items():
663 660 for lno, comments in per_line_comments.items():
664 661 for comm in comments:
665 662 if not comm.outdated_at_version(version) and skip_outdated:
666 663 inline_comms.append(comm)
667 664
668 665 return inline_comms
669 666
670 667 def get_outdated_comments(self, repo_id, pull_request):
671 668 # TODO: johbo: Remove `repo_id`, it is not needed to find the comments
672 669 # of a pull request.
673 670 q = self._all_inline_comments_of_pull_request(pull_request)
674 671 q = q.filter(
675 672 ChangesetComment.display_state ==
676 673 ChangesetComment.COMMENT_OUTDATED
677 674 ).order_by(ChangesetComment.comment_id.asc())
678 675
679 676 return self._group_comments_by_path_and_line_number(q)
680 677
681 678 def _get_inline_comments_query(self, repo_id, revision, pull_request):
682 679 # TODO: johbo: Split this into two methods: One for PR and one for
683 680 # commit.
684 681 if revision:
685 682 q = Session().query(ChangesetComment).filter(
686 683 ChangesetComment.repo_id == repo_id,
687 684 ChangesetComment.line_no != null(),
688 685 ChangesetComment.f_path != null(),
689 686 ChangesetComment.revision == revision)
690 687
691 688 elif pull_request:
692 689 pull_request = self.__get_pull_request(pull_request)
693 690 if not CommentsModel.use_outdated_comments(pull_request):
694 691 q = self._visible_inline_comments_of_pull_request(pull_request)
695 692 else:
696 693 q = self._all_inline_comments_of_pull_request(pull_request)
697 694
698 695 else:
699 696 raise Exception('Please specify commit or pull_request_id')
700 697 q = q.order_by(ChangesetComment.comment_id.asc())
701 698 return q
702 699
703 700 def _group_comments_by_path_and_line_number(self, q):
704 701 comments = q.all()
705 702 paths = collections.defaultdict(lambda: collections.defaultdict(list))
706 703 for co in comments:
707 704 paths[co.f_path][co.line_no].append(co)
708 705 return paths
709 706
710 707 @classmethod
711 708 def needed_extra_diff_context(cls):
712 709 return max(cls.DIFF_CONTEXT_BEFORE, cls.DIFF_CONTEXT_AFTER)
713 710
714 711 def outdate_comments(self, pull_request, old_diff_data, new_diff_data):
715 712 if not CommentsModel.use_outdated_comments(pull_request):
716 713 return
717 714
718 715 comments = self._visible_inline_comments_of_pull_request(pull_request)
719 716 comments_to_outdate = comments.all()
720 717
721 718 for comment in comments_to_outdate:
722 719 self._outdate_one_comment(comment, old_diff_data, new_diff_data)
723 720
724 721 def _outdate_one_comment(self, comment, old_diff_proc, new_diff_proc):
725 722 diff_line = _parse_comment_line_number(comment.line_no)
726 723
727 724 try:
728 725 old_context = old_diff_proc.get_context_of_line(
729 726 path=comment.f_path, diff_line=diff_line)
730 727 new_context = new_diff_proc.get_context_of_line(
731 728 path=comment.f_path, diff_line=diff_line)
732 729 except (diffs.LineNotInDiffException,
733 730 diffs.FileNotInDiffException):
734 731 if not comment.draft:
735 732 comment.display_state = ChangesetComment.COMMENT_OUTDATED
736 733 return
737 734
738 735 if old_context == new_context:
739 736 return
740 737
741 738 if self._should_relocate_diff_line(diff_line):
742 739 new_diff_lines = new_diff_proc.find_context(
743 740 path=comment.f_path, context=old_context,
744 741 offset=self.DIFF_CONTEXT_BEFORE)
745 742 if not new_diff_lines and not comment.draft:
746 743 comment.display_state = ChangesetComment.COMMENT_OUTDATED
747 744 else:
748 745 new_diff_line = self._choose_closest_diff_line(
749 746 diff_line, new_diff_lines)
750 747 comment.line_no = _diff_to_comment_line_number(new_diff_line)
751 748 else:
752 749 if not comment.draft:
753 750 comment.display_state = ChangesetComment.COMMENT_OUTDATED
754 751
755 752 def _should_relocate_diff_line(self, diff_line):
756 753 """
757 754 Checks if relocation shall be tried for the given `diff_line`.
758 755
759 756 If a comment points into the first lines, then we can have a situation
760 757 that after an update another line has been added on top. In this case
761 758 we would find the context still and move the comment around. This
762 759 would be wrong.
763 760 """
764 761 should_relocate = (
765 762 (diff_line.new and diff_line.new > self.DIFF_CONTEXT_BEFORE) or
766 763 (diff_line.old and diff_line.old > self.DIFF_CONTEXT_BEFORE))
767 764 return should_relocate
768 765
769 766 def _choose_closest_diff_line(self, diff_line, new_diff_lines):
770 767 candidate = new_diff_lines[0]
771 768 best_delta = _diff_line_delta(diff_line, candidate)
772 769 for new_diff_line in new_diff_lines[1:]:
773 770 delta = _diff_line_delta(diff_line, new_diff_line)
774 771 if delta < best_delta:
775 772 candidate = new_diff_line
776 773 best_delta = delta
777 774 return candidate
778 775
779 776 def _visible_inline_comments_of_pull_request(self, pull_request):
780 777 comments = self._all_inline_comments_of_pull_request(pull_request)
781 778 comments = comments.filter(
782 779 coalesce(ChangesetComment.display_state, '') !=
783 780 ChangesetComment.COMMENT_OUTDATED)
784 781 return comments
785 782
786 783 def _all_inline_comments_of_pull_request(self, pull_request):
787 784 comments = Session().query(ChangesetComment)\
788 785 .filter(ChangesetComment.line_no != None)\
789 786 .filter(ChangesetComment.f_path != None)\
790 787 .filter(ChangesetComment.pull_request == pull_request)
791 788 return comments
792 789
793 790 def _all_general_comments_of_pull_request(self, pull_request):
794 791 comments = Session().query(ChangesetComment)\
795 792 .filter(ChangesetComment.line_no == None)\
796 793 .filter(ChangesetComment.f_path == None)\
797 794 .filter(ChangesetComment.pull_request == pull_request)
798 795
799 796 return comments
800 797
801 798 @staticmethod
802 799 def use_outdated_comments(pull_request):
803 800 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
804 801 settings = settings_model.get_general_settings()
805 802 return settings.get('rhodecode_use_outdated_comments', False)
806 803
807 804 def trigger_commit_comment_hook(self, repo, user, action, data=None):
808 805 repo = self._get_repo(repo)
809 806 target_scm = repo.scm_instance()
810 807 if action == 'create':
811 808 trigger_hook = hooks_utils.trigger_comment_commit_hooks
812 809 elif action == 'edit':
813 810 trigger_hook = hooks_utils.trigger_comment_commit_edit_hooks
814 811 else:
815 812 return
816 813
817 814 log.debug('Handling repo %s trigger_commit_comment_hook with action %s: %s',
818 815 repo, action, trigger_hook)
819 816 trigger_hook(
820 817 username=user.username,
821 818 repo_name=repo.repo_name,
822 819 repo_type=target_scm.alias,
823 820 repo=repo,
824 821 data=data)
825 822
826 823
827 824 def _parse_comment_line_number(line_no):
828 825 """
829 826 Parses line numbers of the form "(o|n)\d+" and returns them in a tuple.
830 827 """
831 828 old_line = None
832 829 new_line = None
833 830 if line_no.startswith('o'):
834 831 old_line = int(line_no[1:])
835 832 elif line_no.startswith('n'):
836 833 new_line = int(line_no[1:])
837 834 else:
838 835 raise ValueError("Comment lines have to start with either 'o' or 'n'.")
839 836 return diffs.DiffLineNumber(old_line, new_line)
840 837
841 838
842 839 def _diff_to_comment_line_number(diff_line):
843 840 if diff_line.new is not None:
844 841 return u'n{}'.format(diff_line.new)
845 842 elif diff_line.old is not None:
846 843 return u'o{}'.format(diff_line.old)
847 844 return u''
848 845
849 846
850 847 def _diff_line_delta(a, b):
851 848 if None not in (a.new, b.new):
852 849 return abs(a.new - b.new)
853 850 elif None not in (a.old, b.old):
854 851 return abs(a.old - b.old)
855 852 else:
856 853 raise ValueError(
857 854 "Cannot compute delta between {} and {}".format(a, b))
@@ -1,256 +1,256 b''
1 1
2 2
3 3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 gist model for RhodeCode
23 23 """
24 24
25 25 import os
26 26 import time
27 27 import logging
28 28 import traceback
29 29 import shutil
30 30
31 31 from pyramid.threadlocal import get_current_request
32 32
33 33 from rhodecode.lib.utils2 import (
34 safe_unicode, unique_id, safe_int, time_to_datetime, AttributeDict)
34 unique_id, safe_int, safe_str, time_to_datetime, AttributeDict)
35 35 from rhodecode.lib.ext_json import json
36 36 from rhodecode.lib.vcs import VCSError
37 37 from rhodecode.model import BaseModel
38 38 from rhodecode.model.db import Gist
39 39 from rhodecode.model.repo import RepoModel
40 40 from rhodecode.model.scm import ScmModel
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44 GIST_STORE_LOC = '.rc_gist_store'
45 45 GIST_METADATA_FILE = '.rc_gist_metadata'
46 46
47 47
48 48 class GistModel(BaseModel):
49 49 cls = Gist
50 50 vcs_backend = 'hg'
51 51
52 52 def _get_gist(self, gist):
53 53 """
54 54 Helper method to get gist by ID, or gist_access_id as a fallback
55 55
56 56 :param gist: GistID, gist_access_id, or Gist instance
57 57 """
58 58 return self._get_instance(Gist, gist, callback=Gist.get_by_access_id)
59 59
60 60 def __delete_gist(self, gist):
61 61 """
62 62 removes gist from filesystem
63 63
64 64 :param gist: gist object
65 65 """
66 66 root_path = RepoModel().repos_path
67 67 rm_path = os.path.join(root_path, GIST_STORE_LOC, gist.gist_access_id)
68 68 log.info("Removing %s", rm_path)
69 69 shutil.rmtree(rm_path)
70 70
71 71 def _store_metadata(self, repo, gist_id, gist_access_id, user_id, username,
72 72 gist_type, gist_expires, gist_acl_level):
73 73 """
74 74 store metadata inside the gist repo, this can be later used for imports
75 75 or gist identification. Currently we use this inside RhodeCode tools
76 76 to do cleanup of gists that are in storage but not in database.
77 77 """
78 78 metadata = {
79 79 'metadata_version': '2',
80 80 'gist_db_id': gist_id,
81 81 'gist_access_id': gist_access_id,
82 82 'gist_owner_id': user_id,
83 83 'gist_owner_username': username,
84 84 'gist_type': gist_type,
85 85 'gist_expires': gist_expires,
86 86 'gist_updated': time.time(),
87 87 'gist_acl_level': gist_acl_level,
88 88 }
89 89 metadata_file = os.path.join(repo.path, '.hg', GIST_METADATA_FILE)
90 90 with open(metadata_file, 'wb') as f:
91 91 f.write(json.dumps(metadata))
92 92
93 93 def get_gist(self, gist):
94 94 return self._get_gist(gist)
95 95
96 96 def get_gist_files(self, gist_access_id, revision=None):
97 97 """
98 98 Get files for given gist
99 99
100 100 :param gist_access_id:
101 101 """
102 102 repo = Gist.get_by_access_id(gist_access_id)
103 103 vcs_repo = repo.scm_instance()
104 104 if not vcs_repo:
105 105 raise VCSError('Failed to load gist repository for {}'.format(repo))
106 106
107 107 commit = vcs_repo.get_commit(commit_id=revision)
108 108 return commit, [n for n in commit.get_node('/')]
109 109
110 110 def create(self, description, owner, gist_mapping,
111 111 gist_type=Gist.GIST_PUBLIC, lifetime=-1, gist_id=None,
112 112 gist_acl_level=Gist.ACL_LEVEL_PRIVATE):
113 113 """
114 114 Create a gist
115 115
116 116 :param description: description of the gist
117 117 :param owner: user who created this gist
118 118 :param gist_mapping: mapping [{'filename': 'file1.txt', 'content': content}, ...}]
119 119 :param gist_type: type of gist private/public
120 120 :param lifetime: in minutes, -1 == forever
121 121 :param gist_acl_level: acl level for this gist
122 122 """
123 123 owner = self._get_user(owner)
124 gist_id = safe_unicode(gist_id or unique_id(20))
124 gist_id = safe_str(gist_id or unique_id(20))
125 125 lifetime = safe_int(lifetime, -1)
126 126 gist_expires = time.time() + (lifetime * 60) if lifetime != -1 else -1
127 127 expiration = (time_to_datetime(gist_expires)
128 128 if gist_expires != -1 else 'forever')
129 129 log.debug('set GIST expiration date to: %s', expiration)
130 130 # create the Database version
131 131 gist = Gist()
132 132 gist.gist_description = description
133 133 gist.gist_access_id = gist_id
134 134 gist.gist_owner = owner.user_id
135 135 gist.gist_expires = gist_expires
136 gist.gist_type = safe_unicode(gist_type)
136 gist.gist_type = safe_str(gist_type)
137 137 gist.acl_level = gist_acl_level
138 138 self.sa.add(gist)
139 139 self.sa.flush()
140 140 if gist_type == Gist.GIST_PUBLIC:
141 141 # use DB ID for easy to use GIST ID
142 gist_id = safe_unicode(gist.gist_id)
142 gist_id = safe_str(gist.gist_id)
143 143 gist.gist_access_id = gist_id
144 144 self.sa.add(gist)
145 145
146 146 gist_repo_path = os.path.join(GIST_STORE_LOC, gist_id)
147 147 log.debug('Creating new %s GIST repo in %s', gist_type, gist_repo_path)
148 148 repo = RepoModel()._create_filesystem_repo(
149 149 repo_name=gist_id, repo_type=self.vcs_backend, repo_group=GIST_STORE_LOC,
150 150 use_global_config=True)
151 151
152 152 # now create single multifile commit
153 153 message = 'added file'
154 154 message += 's: ' if len(gist_mapping) > 1 else ': '
155 message += ', '.join([x for x in gist_mapping])
155 message += ', '.join([safe_str(x) for x in gist_mapping])
156 156
157 157 # fake RhodeCode Repository object
158 158 fake_repo = AttributeDict({
159 159 'repo_name': gist_repo_path,
160 160 'scm_instance': lambda *args, **kwargs: repo,
161 161 })
162 162
163 163 ScmModel().create_nodes(
164 164 user=owner.user_id, repo=fake_repo,
165 165 message=message,
166 166 nodes=gist_mapping,
167 167 trigger_push_hook=False
168 168 )
169 169
170 170 self._store_metadata(repo, gist.gist_id, gist.gist_access_id,
171 171 owner.user_id, owner.username, gist.gist_type,
172 172 gist.gist_expires, gist_acl_level)
173 173 return gist
174 174
175 175 def delete(self, gist, fs_remove=True):
176 176 gist = self._get_gist(gist)
177 177 try:
178 178 self.sa.delete(gist)
179 179 if fs_remove:
180 180 self.__delete_gist(gist)
181 181 else:
182 182 log.debug('skipping removal from filesystem')
183 183 except Exception:
184 184 log.error(traceback.format_exc())
185 185 raise
186 186
187 187 def update(self, gist, description, owner, gist_mapping, lifetime,
188 188 gist_acl_level):
189 189 gist = self._get_gist(gist)
190 190 gist_repo = gist.scm_instance()
191 191
192 192 if lifetime == 0: # preserve old value
193 193 gist_expires = gist.gist_expires
194 194 else:
195 195 gist_expires = (
196 196 time.time() + (lifetime * 60) if lifetime != -1 else -1)
197 197
198 198 # calculate operation type based on given data
199 199 gist_mapping_op = {}
200 200 for k, v in gist_mapping.items():
201 201 # add, mod, del
202 202 if not v['filename_org'] and v['filename']:
203 203 op = 'add'
204 204 elif v['filename_org'] and not v['filename']:
205 205 op = 'del'
206 206 else:
207 207 op = 'mod'
208 208
209 209 v['op'] = op
210 210 gist_mapping_op[k] = v
211 211
212 212 gist.gist_description = description
213 213 gist.gist_expires = gist_expires
214 214 gist.owner = owner
215 215 gist.acl_level = gist_acl_level
216 216 self.sa.add(gist)
217 217 self.sa.flush()
218 218
219 219 message = 'updated file'
220 220 message += 's: ' if len(gist_mapping) > 1 else ': '
221 message += ', '.join([x for x in gist_mapping])
221 message += ', '.join([safe_str(x) for x in gist_mapping])
222 222
223 223 # fake RhodeCode Repository object
224 224 fake_repo = AttributeDict({
225 225 'repo_name': gist_repo.path,
226 226 'scm_instance': lambda *args, **kwargs: gist_repo,
227 227 })
228 228
229 229 self._store_metadata(gist_repo, gist.gist_id, gist.gist_access_id,
230 230 owner.user_id, owner.username, gist.gist_type,
231 231 gist.gist_expires, gist_acl_level)
232 232
233 233 # this can throw NodeNotChangedError, if changes we're trying to commit
234 234 # are not actually changes...
235 235 ScmModel().update_nodes(
236 236 user=owner.user_id,
237 237 repo=fake_repo,
238 238 message=message,
239 239 nodes=gist_mapping_op,
240 240 trigger_push_hook=False
241 241 )
242 242
243 243 return gist
244 244
245 245 def get_url(self, gist, request=None):
246 246 import rhodecode
247 247
248 248 if not request:
249 249 request = get_current_request()
250 250
251 251 alias_url = rhodecode.CONFIG.get('gist_alias_url')
252 252 if alias_url:
253 253 return alias_url.replace('{gistid}', gist.gist_access_id)
254 254
255 255 return request.route_url('gist_show', gist_id=gist.gist_access_id)
256 256
@@ -1,241 +1,239 b''
1 1
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Model for integrations
24 24 """
25 25
26 26
27 27 import logging
28 28
29 29 from sqlalchemy import or_, and_
30 30
31 import rhodecode
32 31 from rhodecode import events
33 32 from rhodecode.integrations.types.base import EEIntegration
34 33 from rhodecode.lib.caching_query import FromCache
35 34 from rhodecode.model import BaseModel
36 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case
35 from rhodecode.model.db import Integration, Repository, RepoGroup, true, false, case, null
37 36 from rhodecode.integrations import integration_type_registry
38 37
39 38 log = logging.getLogger(__name__)
40 39
41 40
42 41 class IntegrationModel(BaseModel):
43 42
44 43 cls = Integration
45 44
46 45 def __get_integration(self, integration):
47 46 if isinstance(integration, Integration):
48 47 return integration
49 48 elif isinstance(integration, int):
50 49 return self.sa.query(Integration).get(integration)
51 50 else:
52 51 if integration:
53 52 raise Exception('integration must be int or Instance'
54 53 ' of Integration got %s' % type(integration))
55 54
56 def create(self, IntegrationType, name, enabled, repo, repo_group,
57 child_repos_only, settings):
55 def create(self, IntegrationType, name, enabled, repo, repo_group, child_repos_only, settings):
58 56 """ Create an IntegrationType integration """
59 57 integration = Integration()
60 58 integration.integration_type = IntegrationType.key
61 59 self.sa.add(integration)
62 60 self.update_integration(integration, name, enabled, repo, repo_group,
63 61 child_repos_only, settings)
64 62 self.sa.commit()
65 63 return integration
66 64
67 65 def update_integration(self, integration, name, enabled, repo, repo_group,
68 66 child_repos_only, settings):
69 67 integration = self.__get_integration(integration)
70 68
71 69 integration.repo = repo
72 70 integration.repo_group = repo_group
73 71 integration.child_repos_only = child_repos_only
74 72 integration.name = name
75 73 integration.enabled = enabled
76 74 integration.settings = settings
77 75
78 76 return integration
79 77
80 78 def delete(self, integration):
81 79 integration = self.__get_integration(integration)
82 80 if integration:
83 81 self.sa.delete(integration)
84 82 return True
85 83 return False
86 84
87 85 def get_integration_handler(self, integration):
88 86 TypeClass = integration_type_registry.get(integration.integration_type)
89 87 if not TypeClass:
90 88 log.error('No class could be found for integration type: {}'.format(
91 89 integration.integration_type))
92 90 return None
93 91 elif isinstance(TypeClass, EEIntegration) or issubclass(TypeClass, EEIntegration):
94 92 log.error('EE integration cannot be '
95 93 'executed for integration type: {}'.format(
96 94 integration.integration_type))
97 95 return None
98 96
99 97 return TypeClass(integration.settings)
100 98
101 99 def send_event(self, integration, event):
102 100 """ Send an event to an integration """
103 101 handler = self.get_integration_handler(integration)
104 102 if handler:
105 103 log.debug(
106 104 'events: sending event %s on integration %s using handler %s',
107 105 event, integration, handler)
108 106 handler.send_event(event)
109 107
110 108 def get_integrations(self, scope, IntegrationType=None):
111 109 """
112 110 Return integrations for a scope, which must be one of:
113 111
114 112 'all' - every integration, global/repogroup/repo
115 113 'global' - global integrations only
116 114 <Repository> instance - integrations for this repo only
117 115 <RepoGroup> instance - integrations for this repogroup only
118 116 """
119 117
120 118 if isinstance(scope, Repository):
121 119 query = self.sa.query(Integration).filter(
122 120 Integration.repo == scope)
123 121 elif isinstance(scope, RepoGroup):
124 122 query = self.sa.query(Integration).filter(
125 123 Integration.repo_group == scope)
126 124 elif scope == 'global':
127 125 # global integrations
128 126 query = self.sa.query(Integration).filter(
129 127 and_(Integration.repo_id == None, Integration.repo_group_id == None)
130 128 )
131 129 elif scope == 'root-repos':
132 130 query = self.sa.query(Integration).filter(
133 131 and_(Integration.repo_id == None,
134 132 Integration.repo_group_id == None,
135 133 Integration.child_repos_only == true())
136 134 )
137 135 elif scope == 'all':
138 136 query = self.sa.query(Integration)
139 137 else:
140 138 raise Exception(
141 139 "invalid `scope`, must be one of: "
142 140 "['global', 'all', <Repository>, <RepoGroup>]")
143 141
144 142 if IntegrationType is not None:
145 143 query = query.filter(
146 144 Integration.integration_type==IntegrationType.key)
147 145
148 146 result = []
149 147 for integration in query.all():
150 148 IntType = integration_type_registry.get(integration.integration_type)
151 149 result.append((IntType, integration))
152 150 return result
153 151
154 152 def get_for_event(self, event, cache=False):
155 153 """
156 154 Get integrations that match an event
157 155 """
158 156 # base query
159 157 query = self.sa.query(
160 158 Integration
161 159 ).filter(
162 160 Integration.enabled == true()
163 161 )
164 162
165 163 global_integrations_filter = and_(
166 Integration.repo_id == None,
167 Integration.repo_group_id == None,
164 Integration.repo_id == null(),
165 Integration.repo_group_id == null(),
168 166 Integration.child_repos_only == false(),
169 167 )
170 168
171 169 if isinstance(event, events.RepoEvent):
172 170 root_repos_integrations_filter = and_(
173 Integration.repo_id == None,
174 Integration.repo_group_id == None,
171 Integration.repo_id == null(),
172 Integration.repo_group_id == null(),
175 173 Integration.child_repos_only == true(),
176 174 )
177 175
178 176 clauses = [
179 177 global_integrations_filter,
180 178 ]
181 179 cases = [
182 180 (global_integrations_filter, 1),
183 181 (root_repos_integrations_filter, 2),
184 182 ]
185 183
186 184 # repo group integrations
187 185 if event.repo.group:
188 186 # repo group with only root level repos
189 187 group_child_repos_filter = and_(
190 188 Integration.repo_group_id == event.repo.group.group_id,
191 189 Integration.child_repos_only == true()
192 190 )
193 191
194 192 clauses.append(group_child_repos_filter)
195 193 cases.append(
196 194 (group_child_repos_filter, 3),
197 195 )
198 196
199 197 # repo group cascade to kids
200 198 group_recursive_repos_filter = and_(
201 199 Integration.repo_group_id.in_(
202 200 [group.group_id for group in event.repo.groups_with_parents]
203 201 ),
204 202 Integration.child_repos_only == false()
205 203 )
206 204 clauses.append(group_recursive_repos_filter)
207 205 cases.append(
208 206 (group_recursive_repos_filter, 4),
209 207 )
210 208
211 209 if not event.repo.group: # root repo
212 210 clauses.append(root_repos_integrations_filter)
213 211
214 212 # repo integrations
215 213 if event.repo.repo_id: # pre create events dont have a repo_id yet
216 214 specific_repo_filter = Integration.repo_id == event.repo.repo_id
217 215 clauses.append(specific_repo_filter)
218 216 cases.append(
219 217 (specific_repo_filter, 5),
220 218 )
221 219
222 220 order_by_criterion = case(cases)
223 221
224 222 query = query.filter(or_(*clauses))
225 223 query = query.order_by(order_by_criterion)
226 224
227 225 if cache:
228 cache_key = "get_enabled_repo_integrations_%i" % event.repo.repo_id
226 cache_key = f"get_enabled_repo_integrations_{event.repo.repo_id}"
229 227 query = query.options(
230 228 FromCache("sql_cache_short", cache_key))
231 229 else: # only global integrations
232 230 order_by_criterion = Integration.integration_id
233 231
234 232 query = query.filter(global_integrations_filter)
235 233 query = query.order_by(order_by_criterion)
236 234 if cache:
237 235 query = query.options(
238 236 FromCache("sql_cache_short", "get_enabled_global_integrations"))
239 237
240 238 result = query.all()
241 239 return result
@@ -1,453 +1,457 b''
1 1
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 Model for notifications
24 24 """
25 25
26 26 import logging
27 27 import traceback
28 28
29 29 import premailer
30 30 from pyramid.threadlocal import get_current_request
31 31 from sqlalchemy.sql.expression import false, true
32 32
33 33 import rhodecode
34 34 from rhodecode.lib import helpers as h
35 35 from rhodecode.model import BaseModel
36 36 from rhodecode.model.db import Notification, User, UserNotification
37 37 from rhodecode.model.meta import Session
38 38 from rhodecode.translation import TranslationString
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42
43 43 class NotificationModel(BaseModel):
44 44
45 45 cls = Notification
46 46
47 47 def __get_notification(self, notification):
48 48 if isinstance(notification, Notification):
49 49 return notification
50 50 elif isinstance(notification, int):
51 51 return Notification.get(notification)
52 52 else:
53 53 if notification:
54 54 raise Exception('notification must be int or Instance'
55 55 ' of Notification got %s' % type(notification))
56 56
57 57 def create(
58 58 self, created_by, notification_subject='', notification_body='',
59 59 notification_type=Notification.TYPE_MESSAGE, recipients=None,
60 60 mention_recipients=None, with_email=True, email_kwargs=None):
61 61 """
62 62
63 63 Creates notification of given type
64 64
65 65 :param created_by: int, str or User instance. User who created this
66 66 notification
67 67 :param notification_subject: subject of notification itself,
68 68 it will be generated automatically from notification_type if not specified
69 69 :param notification_body: body of notification text
70 70 it will be generated automatically from notification_type if not specified
71 71 :param notification_type: type of notification, based on that we
72 72 pick templates
73 73 :param recipients: list of int, str or User objects, when None
74 74 is given send to all admins
75 75 :param mention_recipients: list of int, str or User objects,
76 76 that were mentioned
77 77 :param with_email: send email with this notification
78 78 :param email_kwargs: dict with arguments to generate email
79 79 """
80 80
81 81 from rhodecode.lib.celerylib import tasks, run_task
82 82
83 83 if recipients and not getattr(recipients, '__iter__', False):
84 84 raise Exception('recipients must be an iterable object')
85 85
86 86 if not (notification_subject and notification_body) and not notification_type:
87 87 raise ValueError('notification_subject, and notification_body '
88 88 'cannot be empty when notification_type is not specified')
89 89
90 90 created_by_obj = self._get_user(created_by)
91 91
92 92 if not created_by_obj:
93 93 raise Exception('unknown user %s' % created_by)
94 94
95 95 # default MAIN body if not given
96 96 email_kwargs = email_kwargs or {'body': notification_body}
97 97 mention_recipients = mention_recipients or set()
98 98
99 99 if recipients is None:
100 100 # recipients is None means to all admins
101 101 recipients_objs = User.query().filter(User.admin == true()).all()
102 102 log.debug('sending notifications %s to admins: %s',
103 103 notification_type, recipients_objs)
104 104 else:
105 105 recipients_objs = set()
106 106 for u in recipients:
107 107 obj = self._get_user(u)
108 108 if obj:
109 109 recipients_objs.add(obj)
110 110 else: # we didn't find this user, log the error and carry on
111 111 log.error('cannot notify unknown user %r', u)
112 112
113 113 if not recipients_objs:
114 114 raise Exception('no valid recipients specified')
115 115
116 116 log.debug('sending notifications %s to %s',
117 117 notification_type, recipients_objs)
118 118
119 119 # add mentioned users into recipients
120 120 final_recipients = set(recipients_objs).union(mention_recipients)
121 121
122 122 (subject, email_body, email_body_plaintext) = \
123 123 EmailNotificationModel().render_email(notification_type, **email_kwargs)
124 124
125 125 if not notification_subject:
126 126 notification_subject = subject
127 127
128 128 if not notification_body:
129 129 notification_body = email_body_plaintext
130 130
131 131 notification = Notification.create(
132 132 created_by=created_by_obj, subject=notification_subject,
133 133 body=notification_body, recipients=final_recipients,
134 134 type_=notification_type
135 135 )
136 136
137 137 if not with_email: # skip sending email, and just create notification
138 138 return notification
139 139
140 140 # don't send email to person who created this comment
141 141 rec_objs = set(recipients_objs).difference({created_by_obj})
142 142
143 143 # now notify all recipients in question
144 144
145 145 for recipient in rec_objs.union(mention_recipients):
146 146 # inject current recipient
147 147 email_kwargs['recipient'] = recipient
148 148 email_kwargs['mention'] = recipient in mention_recipients
149 149 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
150 150 notification_type, **email_kwargs)
151 151
152 152 extra_headers = None
153 153 if 'thread_ids' in email_kwargs:
154 154 extra_headers = {'thread_ids': email_kwargs.pop('thread_ids')}
155 155
156 156 log.debug('Creating notification email task for user:`%s`', recipient)
157 157 task = run_task(tasks.send_email, recipient.email, subject,
158 158 email_body_plaintext, email_body, extra_headers=extra_headers)
159 159 log.debug('Created email task: %s', task)
160 160
161 161 return notification
162 162
163 163 def delete(self, user, notification):
164 164 # we don't want to remove actual notification just the assignment
165 165 try:
166 166 notification = self.__get_notification(notification)
167 167 user = self._get_user(user)
168 168 if notification and user:
169 169 obj = UserNotification.query()\
170 170 .filter(UserNotification.user == user)\
171 171 .filter(UserNotification.notification == notification)\
172 172 .one()
173 173 Session().delete(obj)
174 174 return True
175 175 except Exception:
176 176 log.error(traceback.format_exc())
177 177 raise
178 178
179 179 def get_for_user(self, user, filter_=None):
180 180 """
181 181 Get mentions for given user, filter them if filter dict is given
182 182 """
183 183 user = self._get_user(user)
184 184
185 185 q = UserNotification.query()\
186 186 .filter(UserNotification.user == user)\
187 187 .join((
188 188 Notification, UserNotification.notification_id ==
189 189 Notification.notification_id))
190 190 if filter_ == ['all']:
191 191 q = q # no filter
192 192 elif filter_ == ['unread']:
193 193 q = q.filter(UserNotification.read == false())
194 194 elif filter_:
195 195 q = q.filter(Notification.type_.in_(filter_))
196 196
197 197 return q
198 198
199 199 def mark_read(self, user, notification):
200 200 try:
201 201 notification = self.__get_notification(notification)
202 202 user = self._get_user(user)
203 203 if notification and user:
204 204 obj = UserNotification.query()\
205 205 .filter(UserNotification.user == user)\
206 206 .filter(UserNotification.notification == notification)\
207 207 .one()
208 208 obj.read = True
209 209 Session().add(obj)
210 210 return True
211 211 except Exception:
212 212 log.error(traceback.format_exc())
213 213 raise
214 214
215 215 def mark_all_read_for_user(self, user, filter_=None):
216 216 user = self._get_user(user)
217 217 q = UserNotification.query()\
218 218 .filter(UserNotification.user == user)\
219 219 .filter(UserNotification.read == false())\
220 220 .join((
221 221 Notification, UserNotification.notification_id ==
222 222 Notification.notification_id))
223 223 if filter_ == ['unread']:
224 224 q = q.filter(UserNotification.read == false())
225 225 elif filter_:
226 226 q = q.filter(Notification.type_.in_(filter_))
227 227
228 228 # this is a little inefficient but sqlalchemy doesn't support
229 229 # update on joined tables :(
230 230 for obj in q.all():
231 231 obj.read = True
232 232 Session().add(obj)
233 233
234 234 def get_unread_cnt_for_user(self, user):
235 235 user = self._get_user(user)
236 236 return UserNotification.query()\
237 237 .filter(UserNotification.read == false())\
238 238 .filter(UserNotification.user == user).count()
239 239
240 240 def get_unread_for_user(self, user):
241 241 user = self._get_user(user)
242 242 return [x.notification for x in UserNotification.query()
243 243 .filter(UserNotification.read == false())
244 244 .filter(UserNotification.user == user).all()]
245 245
246 246 def get_user_notification(self, user, notification):
247 247 user = self._get_user(user)
248 248 notification = self.__get_notification(notification)
249 249
250 250 return UserNotification.query()\
251 251 .filter(UserNotification.notification == notification)\
252 252 .filter(UserNotification.user == user).scalar()
253 253
254 254 def make_description(self, notification, translate, show_age=True):
255 255 """
256 256 Creates a human readable description based on properties
257 257 of notification object
258 258 """
259 259 _ = translate
260 260 _map = {
261 261 notification.TYPE_CHANGESET_COMMENT: [
262 262 _('%(user)s commented on commit %(date_or_age)s'),
263 263 _('%(user)s commented on commit at %(date_or_age)s'),
264 264 ],
265 265 notification.TYPE_MESSAGE: [
266 266 _('%(user)s sent message %(date_or_age)s'),
267 267 _('%(user)s sent message at %(date_or_age)s'),
268 268 ],
269 269 notification.TYPE_MENTION: [
270 270 _('%(user)s mentioned you %(date_or_age)s'),
271 271 _('%(user)s mentioned you at %(date_or_age)s'),
272 272 ],
273 273 notification.TYPE_REGISTRATION: [
274 274 _('%(user)s registered in RhodeCode %(date_or_age)s'),
275 275 _('%(user)s registered in RhodeCode at %(date_or_age)s'),
276 276 ],
277 277 notification.TYPE_PULL_REQUEST: [
278 278 _('%(user)s opened new pull request %(date_or_age)s'),
279 279 _('%(user)s opened new pull request at %(date_or_age)s'),
280 280 ],
281 281 notification.TYPE_PULL_REQUEST_UPDATE: [
282 282 _('%(user)s updated pull request %(date_or_age)s'),
283 283 _('%(user)s updated pull request at %(date_or_age)s'),
284 284 ],
285 285 notification.TYPE_PULL_REQUEST_COMMENT: [
286 286 _('%(user)s commented on pull request %(date_or_age)s'),
287 287 _('%(user)s commented on pull request at %(date_or_age)s'),
288 288 ],
289 289 }
290 290
291 291 templates = _map[notification.type_]
292 292
293 293 if show_age:
294 294 template = templates[0]
295 295 date_or_age = h.age(notification.created_on)
296 296 if translate:
297 297 date_or_age = translate(date_or_age)
298 298
299 299 if isinstance(date_or_age, TranslationString):
300 300 date_or_age = date_or_age.interpolate()
301 301
302 302 else:
303 303 template = templates[1]
304 304 date_or_age = h.format_date(notification.created_on)
305 305
306 306 return template % {
307 307 'user': notification.created_by_user.username,
308 308 'date_or_age': date_or_age,
309 309 }
310 310
311 311
312 312 # Templates for Titles, that could be overwritten by rcextensions
313 313 # Title of email for pull-request update
314 314 EMAIL_PR_UPDATE_SUBJECT_TEMPLATE = ''
315 315 # Title of email for request for pull request review
316 316 EMAIL_PR_REVIEW_SUBJECT_TEMPLATE = ''
317 317
318 318 # Title of email for general comment on pull request
319 319 EMAIL_PR_COMMENT_SUBJECT_TEMPLATE = ''
320 320 # Title of email for general comment which includes status change on pull request
321 321 EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
322 322 # Title of email for inline comment on a file in pull request
323 323 EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE = ''
324 324
325 325 # Title of email for general comment on commit
326 326 EMAIL_COMMENT_SUBJECT_TEMPLATE = ''
327 327 # Title of email for general comment which includes status change on commit
328 328 EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE = ''
329 329 # Title of email for inline comment on a file in commit
330 330 EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE = ''
331 331
332 332 import cssutils
333 333 # hijack css utils logger and replace with ours
334 334 log = logging.getLogger('rhodecode.cssutils.premailer')
335 log.setLevel(logging.INFO)
335 336 cssutils.log.setLog(log)
336 337
337 338
338 339 class EmailNotificationModel(BaseModel):
339 340 TYPE_COMMIT_COMMENT = Notification.TYPE_CHANGESET_COMMENT
340 341 TYPE_REGISTRATION = Notification.TYPE_REGISTRATION
341 342 TYPE_PULL_REQUEST = Notification.TYPE_PULL_REQUEST
342 343 TYPE_PULL_REQUEST_COMMENT = Notification.TYPE_PULL_REQUEST_COMMENT
343 344 TYPE_PULL_REQUEST_UPDATE = Notification.TYPE_PULL_REQUEST_UPDATE
344 345 TYPE_MAIN = Notification.TYPE_MESSAGE
345 346
346 347 TYPE_PASSWORD_RESET = 'password_reset'
347 348 TYPE_PASSWORD_RESET_CONFIRMATION = 'password_reset_confirmation'
348 349 TYPE_EMAIL_TEST = 'email_test'
349 350 TYPE_EMAIL_EXCEPTION = 'exception'
350 351 TYPE_UPDATE_AVAILABLE = 'update_available'
351 352 TYPE_TEST = 'test'
352 353
353 354 email_types = {
354 355 TYPE_MAIN:
355 356 'rhodecode:templates/email_templates/main.mako',
356 357 TYPE_TEST:
357 358 'rhodecode:templates/email_templates/test.mako',
358 359 TYPE_EMAIL_EXCEPTION:
359 360 'rhodecode:templates/email_templates/exception_tracker.mako',
360 361 TYPE_UPDATE_AVAILABLE:
361 362 'rhodecode:templates/email_templates/update_available.mako',
362 363 TYPE_EMAIL_TEST:
363 364 'rhodecode:templates/email_templates/email_test.mako',
364 365 TYPE_REGISTRATION:
365 366 'rhodecode:templates/email_templates/user_registration.mako',
366 367 TYPE_PASSWORD_RESET:
367 368 'rhodecode:templates/email_templates/password_reset.mako',
368 369 TYPE_PASSWORD_RESET_CONFIRMATION:
369 370 'rhodecode:templates/email_templates/password_reset_confirmation.mako',
370 371 TYPE_COMMIT_COMMENT:
371 372 'rhodecode:templates/email_templates/commit_comment.mako',
372 373 TYPE_PULL_REQUEST:
373 374 'rhodecode:templates/email_templates/pull_request_review.mako',
374 375 TYPE_PULL_REQUEST_COMMENT:
375 376 'rhodecode:templates/email_templates/pull_request_comment.mako',
376 377 TYPE_PULL_REQUEST_UPDATE:
377 378 'rhodecode:templates/email_templates/pull_request_update.mako',
378 379 }
379 380
380 premailer_instance = premailer.Premailer()
381 premailer_instance = premailer.Premailer(
382 #cssutils_logging_handler=log.handlers[0],
383 #cssutils_logging_level=logging.INFO
384 )
381 385
382 386 def __init__(self):
383 387 """
384 388 Example usage::
385 389
386 390 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
387 391 EmailNotificationModel.TYPE_TEST, **email_kwargs)
388 392
389 393 """
390 394 super(EmailNotificationModel, self).__init__()
391 395 self.rhodecode_instance_name = rhodecode.CONFIG.get('rhodecode_title')
392 396
393 397 def _update_kwargs_for_render(self, kwargs):
394 398 """
395 399 Inject params required for Mako rendering
396 400
397 401 :param kwargs:
398 402 """
399 403
400 404 kwargs['rhodecode_instance_name'] = self.rhodecode_instance_name
401 405 kwargs['rhodecode_version'] = rhodecode.__version__
402 406 instance_url = h.route_url('home')
403 407 _kwargs = {
404 408 'instance_url': instance_url,
405 409 'whitespace_filter': self.whitespace_filter,
406 410 'email_pr_update_subject_template': EMAIL_PR_UPDATE_SUBJECT_TEMPLATE,
407 411 'email_pr_review_subject_template': EMAIL_PR_REVIEW_SUBJECT_TEMPLATE,
408 412 'email_pr_comment_subject_template': EMAIL_PR_COMMENT_SUBJECT_TEMPLATE,
409 413 'email_pr_comment_status_change_subject_template': EMAIL_PR_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
410 414 'email_pr_comment_file_subject_template': EMAIL_PR_COMMENT_FILE_SUBJECT_TEMPLATE,
411 415 'email_comment_subject_template': EMAIL_COMMENT_SUBJECT_TEMPLATE,
412 416 'email_comment_status_change_subject_template': EMAIL_COMMENT_STATUS_CHANGE_SUBJECT_TEMPLATE,
413 417 'email_comment_file_subject_template': EMAIL_COMMENT_FILE_SUBJECT_TEMPLATE,
414 418 }
415 419 _kwargs.update(kwargs)
416 420 return _kwargs
417 421
418 422 def whitespace_filter(self, text):
419 423 return text.replace('\n', '').replace('\t', '')
420 424
421 425 def get_renderer(self, type_, request):
422 426 template_name = self.email_types[type_]
423 427 return request.get_partial_renderer(template_name)
424 428
425 429 def render_email(self, type_, **kwargs):
426 430 """
427 431 renders template for email, and returns a tuple of
428 432 (subject, email_headers, email_html_body, email_plaintext_body)
429 433 """
430 434 request = get_current_request()
431 435
432 436 # translator and helpers inject
433 437 _kwargs = self._update_kwargs_for_render(kwargs)
434 438 email_template = self.get_renderer(type_, request=request)
435 439 subject = email_template.render('subject', **_kwargs)
436 440
437 441 try:
438 442 body_plaintext = email_template.render('body_plaintext', **_kwargs)
439 443 except AttributeError:
440 444 # it's not defined in template, ok we can skip it
441 445 body_plaintext = ''
442 446
443 447 # render WHOLE template
444 448 body = email_template.render(None, **_kwargs)
445 449
446 450 try:
447 451 # Inline CSS styles and conversion
448 452 body = self.premailer_instance.transform(body)
449 453 except Exception:
450 454 log.exception('Failed to parse body with premailer')
451 455 pass
452 456
453 457 return subject, body, body_plaintext
@@ -1,599 +1,607 b''
1 1
2 2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 permissions model for RhodeCode
22 22 """
23 23 import collections
24 24 import logging
25 25 import traceback
26 26
27 27 from sqlalchemy.exc import DatabaseError
28 28
29 29 from rhodecode import events
30 30 from rhodecode.model import BaseModel
31 31 from rhodecode.model.db import (
32 32 User, Permission, UserToPerm, UserRepoToPerm, UserRepoGroupToPerm,
33 33 UserUserGroupToPerm, UserGroup, UserGroupToPerm, UserToRepoBranchPermission)
34 34 from rhodecode.lib.utils2 import str2bool, safe_int
35 35
36 36 log = logging.getLogger(__name__)
37 37
38 38
39 39 class PermissionModel(BaseModel):
40 40 """
41 41 Permissions model for RhodeCode
42 42 """
43 43 FORKING_DISABLED = 'hg.fork.none'
44 44 FORKING_ENABLED = 'hg.fork.repository'
45 45
46 46 cls = Permission
47 47 global_perms = {
48 48 'default_repo_create': None,
49 49 # special case for create repos on write access to group
50 50 'default_repo_create_on_write': None,
51 51 'default_repo_group_create': None,
52 52 'default_user_group_create': None,
53 53 'default_fork_create': None,
54 54 'default_inherit_default_permissions': None,
55 55 'default_register': None,
56 56 'default_password_reset': None,
57 57 'default_extern_activate': None,
58 58
59 59 # object permissions below
60 60 'default_repo_perm': None,
61 61 'default_group_perm': None,
62 62 'default_user_group_perm': None,
63 63
64 64 # branch
65 65 'default_branch_perm': None,
66 66 }
67 67
68 68 def set_global_permission_choices(self, c_obj, gettext_translator):
69 69 _ = gettext_translator
70 70
71 71 c_obj.repo_perms_choices = [
72 72 ('repository.none', _('None'),),
73 73 ('repository.read', _('Read'),),
74 74 ('repository.write', _('Write'),),
75 75 ('repository.admin', _('Admin'),)]
76 76
77 77 c_obj.group_perms_choices = [
78 78 ('group.none', _('None'),),
79 79 ('group.read', _('Read'),),
80 80 ('group.write', _('Write'),),
81 81 ('group.admin', _('Admin'),)]
82 82
83 83 c_obj.user_group_perms_choices = [
84 84 ('usergroup.none', _('None'),),
85 85 ('usergroup.read', _('Read'),),
86 86 ('usergroup.write', _('Write'),),
87 87 ('usergroup.admin', _('Admin'),)]
88 88
89 89 c_obj.branch_perms_choices = [
90 90 ('branch.none', _('Protected/No Access'),),
91 91 ('branch.merge', _('Web merge'),),
92 92 ('branch.push', _('Push'),),
93 93 ('branch.push_force', _('Force Push'),)]
94 94
95 95 c_obj.register_choices = [
96 96 ('hg.register.none', _('Disabled')),
97 97 ('hg.register.manual_activate', _('Allowed with manual account activation')),
98 98 ('hg.register.auto_activate', _('Allowed with automatic account activation'))]
99 99
100 100 c_obj.password_reset_choices = [
101 101 ('hg.password_reset.enabled', _('Allow password recovery')),
102 102 ('hg.password_reset.hidden', _('Hide password recovery link')),
103 103 ('hg.password_reset.disabled', _('Disable password recovery'))]
104 104
105 105 c_obj.extern_activate_choices = [
106 106 ('hg.extern_activate.manual', _('Manual activation of external account')),
107 107 ('hg.extern_activate.auto', _('Automatic activation of external account'))]
108 108
109 109 c_obj.repo_create_choices = [
110 110 ('hg.create.none', _('Disabled')),
111 111 ('hg.create.repository', _('Enabled'))]
112 112
113 113 c_obj.repo_create_on_write_choices = [
114 114 ('hg.create.write_on_repogroup.false', _('Disabled')),
115 115 ('hg.create.write_on_repogroup.true', _('Enabled'))]
116 116
117 117 c_obj.user_group_create_choices = [
118 118 ('hg.usergroup.create.false', _('Disabled')),
119 119 ('hg.usergroup.create.true', _('Enabled'))]
120 120
121 121 c_obj.repo_group_create_choices = [
122 122 ('hg.repogroup.create.false', _('Disabled')),
123 123 ('hg.repogroup.create.true', _('Enabled'))]
124 124
125 125 c_obj.fork_choices = [
126 126 (self.FORKING_DISABLED, _('Disabled')),
127 127 (self.FORKING_ENABLED, _('Enabled'))]
128 128
129 129 c_obj.inherit_default_permission_choices = [
130 130 ('hg.inherit_default_perms.false', _('Disabled')),
131 131 ('hg.inherit_default_perms.true', _('Enabled'))]
132 132
133 133 def get_default_perms(self, object_perms, suffix):
134 134 defaults = {}
135 135 for perm in object_perms:
136 136 # perms
137 137 if perm.permission.permission_name.startswith('repository.'):
138 138 defaults['default_repo_perm' + suffix] = perm.permission.permission_name
139 139
140 140 if perm.permission.permission_name.startswith('group.'):
141 141 defaults['default_group_perm' + suffix] = perm.permission.permission_name
142 142
143 143 if perm.permission.permission_name.startswith('usergroup.'):
144 144 defaults['default_user_group_perm' + suffix] = perm.permission.permission_name
145 145
146 146 # branch
147 147 if perm.permission.permission_name.startswith('branch.'):
148 148 defaults['default_branch_perm' + suffix] = perm.permission.permission_name
149 149
150 150 # creation of objects
151 151 if perm.permission.permission_name.startswith('hg.create.write_on_repogroup'):
152 152 defaults['default_repo_create_on_write' + suffix] = perm.permission.permission_name
153 153
154 154 elif perm.permission.permission_name.startswith('hg.create.'):
155 155 defaults['default_repo_create' + suffix] = perm.permission.permission_name
156 156
157 157 if perm.permission.permission_name.startswith('hg.fork.'):
158 158 defaults['default_fork_create' + suffix] = perm.permission.permission_name
159 159
160 160 if perm.permission.permission_name.startswith('hg.inherit_default_perms.'):
161 161 defaults['default_inherit_default_permissions' + suffix] = perm.permission.permission_name
162 162
163 163 if perm.permission.permission_name.startswith('hg.repogroup.'):
164 164 defaults['default_repo_group_create' + suffix] = perm.permission.permission_name
165 165
166 166 if perm.permission.permission_name.startswith('hg.usergroup.'):
167 167 defaults['default_user_group_create' + suffix] = perm.permission.permission_name
168 168
169 169 # registration and external account activation
170 170 if perm.permission.permission_name.startswith('hg.register.'):
171 171 defaults['default_register' + suffix] = perm.permission.permission_name
172 172
173 173 if perm.permission.permission_name.startswith('hg.password_reset.'):
174 174 defaults['default_password_reset' + suffix] = perm.permission.permission_name
175 175
176 176 if perm.permission.permission_name.startswith('hg.extern_activate.'):
177 177 defaults['default_extern_activate' + suffix] = perm.permission.permission_name
178 178
179 179 return defaults
180 180
181 181 def _make_new_user_perm(self, user, perm_name):
182 182 log.debug('Creating new user permission:%s', perm_name)
183 new_perm = Permission.get_by_key(perm_name)
184 if not new_perm:
185 raise ValueError(f'permission with name {perm_name} not found')
186
183 187 new = UserToPerm()
184 188 new.user = user
185 new.permission = Permission.get_by_key(perm_name)
189 new.permission = new_perm
186 190 return new
187 191
188 192 def _make_new_user_group_perm(self, user_group, perm_name):
189 193 log.debug('Creating new user group permission:%s', perm_name)
194 new_perm = Permission.get_by_key(perm_name)
195 if not new_perm:
196 raise ValueError(f'permission with name {perm_name} not found')
197
190 198 new = UserGroupToPerm()
191 199 new.users_group = user_group
192 new.permission = Permission.get_by_key(perm_name)
200 new.permission = new_perm
193 201 return new
194 202
195 203 def _keep_perm(self, perm_name, keep_fields):
196 204 def get_pat(field_name):
197 205 return {
198 206 # global perms
199 207 'default_repo_create': 'hg.create.',
200 208 # special case for create repos on write access to group
201 209 'default_repo_create_on_write': 'hg.create.write_on_repogroup.',
202 210 'default_repo_group_create': 'hg.repogroup.create.',
203 211 'default_user_group_create': 'hg.usergroup.create.',
204 212 'default_fork_create': 'hg.fork.',
205 213 'default_inherit_default_permissions': 'hg.inherit_default_perms.',
206 214
207 215 # application perms
208 216 'default_register': 'hg.register.',
209 217 'default_password_reset': 'hg.password_reset.',
210 218 'default_extern_activate': 'hg.extern_activate.',
211 219
212 220 # object permissions below
213 221 'default_repo_perm': 'repository.',
214 222 'default_group_perm': 'group.',
215 223 'default_user_group_perm': 'usergroup.',
216 224 # branch
217 225 'default_branch_perm': 'branch.',
218 226
219 227 }[field_name]
220 228 for field in keep_fields:
221 229 pat = get_pat(field)
222 230 if perm_name.startswith(pat):
223 231 return True
224 232 return False
225 233
226 234 def _clear_object_perm(self, object_perms, preserve=None):
227 235 preserve = preserve or []
228 236 _deleted = []
229 237 for perm in object_perms:
230 238 perm_name = perm.permission.permission_name
231 239 if not self._keep_perm(perm_name, keep_fields=preserve):
232 240 _deleted.append(perm_name)
233 241 self.sa.delete(perm)
234 242 return _deleted
235 243
236 244 def _clear_user_perms(self, user_id, preserve=None):
237 245 perms = self.sa.query(UserToPerm)\
238 246 .filter(UserToPerm.user_id == user_id)\
239 247 .all()
240 248 return self._clear_object_perm(perms, preserve=preserve)
241 249
242 250 def _clear_user_group_perms(self, user_group_id, preserve=None):
243 251 perms = self.sa.query(UserGroupToPerm)\
244 252 .filter(UserGroupToPerm.users_group_id == user_group_id)\
245 253 .all()
246 254 return self._clear_object_perm(perms, preserve=preserve)
247 255
248 256 def _set_new_object_perms(self, obj_type, to_object, form_result, preserve=None):
249 257 # clear current entries, to make this function idempotent
250 258 # it will fix even if we define more permissions or permissions
251 259 # are somehow missing
252 260 preserve = preserve or []
253 261 _global_perms = self.global_perms.copy()
254 262 if obj_type not in ['user', 'user_group']:
255 263 raise ValueError("obj_type must be on of 'user' or 'user_group'")
256 264 global_perms = len(_global_perms)
257 265 default_user_perms = len(Permission.DEFAULT_USER_PERMISSIONS)
258 266 if global_perms != default_user_perms:
259 267 raise Exception(
260 268 'Inconsistent permissions definition. Got {} vs {}'.format(
261 269 global_perms, default_user_perms))
262 270
263 271 if obj_type == 'user':
264 272 self._clear_user_perms(to_object.user_id, preserve)
265 273 if obj_type == 'user_group':
266 274 self._clear_user_group_perms(to_object.users_group_id, preserve)
267 275
268 276 # now kill the keys that we want to preserve from the form.
269 277 for key in preserve:
270 278 del _global_perms[key]
271 279
272 280 for k in _global_perms.copy():
273 281 _global_perms[k] = form_result[k]
274 282
275 283 # at that stage we validate all are passed inside form_result
276 284 for _perm_key, perm_value in _global_perms.items():
277 285 if perm_value is None:
278 286 raise ValueError('Missing permission for %s' % (_perm_key,))
279 287
280 288 if obj_type == 'user':
281 p = self._make_new_user_perm(object, perm_value)
289 p = self._make_new_user_perm(to_object, perm_value)
282 290 self.sa.add(p)
283 291 if obj_type == 'user_group':
284 p = self._make_new_user_group_perm(object, perm_value)
292 p = self._make_new_user_group_perm(to_object, perm_value)
285 293 self.sa.add(p)
286 294
287 295 def _set_new_user_perms(self, user, form_result, preserve=None):
288 296 return self._set_new_object_perms(
289 297 'user', user, form_result, preserve)
290 298
291 299 def _set_new_user_group_perms(self, user_group, form_result, preserve=None):
292 300 return self._set_new_object_perms(
293 301 'user_group', user_group, form_result, preserve)
294 302
295 303 def set_new_user_perms(self, user, form_result):
296 304 # calculate what to preserve from what is given in form_result
297 305 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
298 306 return self._set_new_user_perms(user, form_result, preserve)
299 307
300 308 def set_new_user_group_perms(self, user_group, form_result):
301 309 # calculate what to preserve from what is given in form_result
302 310 preserve = set(self.global_perms.keys()).difference(set(form_result.keys()))
303 311 return self._set_new_user_group_perms(user_group, form_result, preserve)
304 312
305 313 def create_permissions(self):
306 314 """
307 315 Create permissions for whole system
308 316 """
309 317 for p in Permission.PERMS:
310 318 if not Permission.get_by_key(p[0]):
311 319 new_perm = Permission()
312 320 new_perm.permission_name = p[0]
313 321 new_perm.permission_longname = p[0] # translation err with p[1]
314 322 self.sa.add(new_perm)
315 323
316 324 def _create_default_object_permission(self, obj_type, obj, obj_perms,
317 325 force=False):
318 326 if obj_type not in ['user', 'user_group']:
319 327 raise ValueError("obj_type must be on of 'user' or 'user_group'")
320 328
321 329 def _get_group(perm_name):
322 330 return '.'.join(perm_name.split('.')[:1])
323 331
324 defined_perms_groups = map(
325 _get_group, (x.permission.permission_name for x in obj_perms))
332 defined_perms_groups = list(map(
333 _get_group, (x.permission.permission_name for x in obj_perms)))
326 334 log.debug('GOT ALREADY DEFINED:%s', obj_perms)
327 335
328 336 if force:
329 337 self._clear_object_perm(obj_perms)
330 338 self.sa.commit()
331 339 defined_perms_groups = []
332 340 # for every default permission that needs to be created, we check if
333 341 # it's group is already defined, if it's not we create default perm
334 342 for perm_name in Permission.DEFAULT_USER_PERMISSIONS:
335 343 gr = _get_group(perm_name)
336 344 if gr not in defined_perms_groups:
337 345 log.debug('GR:%s not found, creating permission %s',
338 346 gr, perm_name)
339 347 if obj_type == 'user':
340 348 new_perm = self._make_new_user_perm(obj, perm_name)
341 349 self.sa.add(new_perm)
342 350 if obj_type == 'user_group':
343 351 new_perm = self._make_new_user_group_perm(obj, perm_name)
344 352 self.sa.add(new_perm)
345 353
346 354 def create_default_user_permissions(self, user, force=False):
347 355 """
348 356 Creates only missing default permissions for user, if force is set it
349 357 resets the default permissions for that user
350 358
351 359 :param user:
352 360 :param force:
353 361 """
354 362 user = self._get_user(user)
355 363 obj_perms = UserToPerm.query().filter(UserToPerm.user == user).all()
356 364 return self._create_default_object_permission(
357 365 'user', user, obj_perms, force)
358 366
359 367 def create_default_user_group_permissions(self, user_group, force=False):
360 368 """
361 369 Creates only missing default permissions for user group, if force is
362 370 set it resets the default permissions for that user group
363 371
364 372 :param user_group:
365 373 :param force:
366 374 """
367 375 user_group = self._get_user_group(user_group)
368 376 obj_perms = UserToPerm.query().filter(UserGroupToPerm.users_group == user_group).all()
369 377 return self._create_default_object_permission(
370 378 'user_group', user_group, obj_perms, force)
371 379
372 380 def update_application_permissions(self, form_result):
373 381 if 'perm_user_id' in form_result:
374 382 perm_user = User.get(safe_int(form_result['perm_user_id']))
375 383 else:
376 384 # used mostly to do lookup for default user
377 385 perm_user = User.get_by_username(form_result['perm_user_name'])
378 386
379 387 try:
380 388 # stage 1 set anonymous access
381 389 if perm_user.username == User.DEFAULT_USER:
382 390 perm_user.active = str2bool(form_result['anonymous'])
383 391 self.sa.add(perm_user)
384 392
385 393 # stage 2 reset defaults and set them from form data
386 394 self._set_new_user_perms(perm_user, form_result, preserve=[
387 395 'default_repo_perm',
388 396 'default_group_perm',
389 397 'default_user_group_perm',
390 398 'default_branch_perm',
391 399
392 400 'default_repo_group_create',
393 401 'default_user_group_create',
394 402 'default_repo_create_on_write',
395 403 'default_repo_create',
396 404 'default_fork_create',
397 405 'default_inherit_default_permissions'])
398 406
399 407 self.sa.commit()
400 408 except (DatabaseError,):
401 409 log.error(traceback.format_exc())
402 410 self.sa.rollback()
403 411 raise
404 412
405 413 def update_user_permissions(self, form_result):
406 414 if 'perm_user_id' in form_result:
407 415 perm_user = User.get(safe_int(form_result['perm_user_id']))
408 416 else:
409 417 # used mostly to do lookup for default user
410 418 perm_user = User.get_by_username(form_result['perm_user_name'])
411 419 try:
412 420 # stage 2 reset defaults and set them from form data
413 421 self._set_new_user_perms(perm_user, form_result, preserve=[
414 422 'default_repo_perm',
415 423 'default_group_perm',
416 424 'default_user_group_perm',
417 425 'default_branch_perm',
418 426
419 427 'default_register',
420 428 'default_password_reset',
421 429 'default_extern_activate'])
422 430 self.sa.commit()
423 431 except (DatabaseError,):
424 432 log.error(traceback.format_exc())
425 433 self.sa.rollback()
426 434 raise
427 435
428 436 def update_user_group_permissions(self, form_result):
429 437 if 'perm_user_group_id' in form_result:
430 438 perm_user_group = UserGroup.get(safe_int(form_result['perm_user_group_id']))
431 439 else:
432 440 # used mostly to do lookup for default user
433 441 perm_user_group = UserGroup.get_by_group_name(form_result['perm_user_group_name'])
434 442 try:
435 443 # stage 2 reset defaults and set them from form data
436 444 self._set_new_user_group_perms(perm_user_group, form_result, preserve=[
437 445 'default_repo_perm',
438 446 'default_group_perm',
439 447 'default_user_group_perm',
440 448 'default_branch_perm',
441 449
442 450 'default_register',
443 451 'default_password_reset',
444 452 'default_extern_activate'])
445 453 self.sa.commit()
446 454 except (DatabaseError,):
447 455 log.error(traceback.format_exc())
448 456 self.sa.rollback()
449 457 raise
450 458
451 459 def update_object_permissions(self, form_result):
452 460 if 'perm_user_id' in form_result:
453 461 perm_user = User.get(safe_int(form_result['perm_user_id']))
454 462 else:
455 463 # used mostly to do lookup for default user
456 464 perm_user = User.get_by_username(form_result['perm_user_name'])
457 465 try:
458 466
459 467 # stage 2 reset defaults and set them from form data
460 468 self._set_new_user_perms(perm_user, form_result, preserve=[
461 469 'default_repo_group_create',
462 470 'default_user_group_create',
463 471 'default_repo_create_on_write',
464 472 'default_repo_create',
465 473 'default_fork_create',
466 474 'default_inherit_default_permissions',
467 475 'default_branch_perm',
468 476
469 477 'default_register',
470 478 'default_password_reset',
471 479 'default_extern_activate'])
472 480
473 481 # overwrite default repo permissions
474 482 if form_result['overwrite_default_repo']:
475 483 _def_name = form_result['default_repo_perm'].split('repository.')[-1]
476 484 _def = Permission.get_by_key('repository.' + _def_name)
477 485 for r2p in self.sa.query(UserRepoToPerm)\
478 486 .filter(UserRepoToPerm.user == perm_user)\
479 487 .all():
480 488 # don't reset PRIVATE repositories
481 489 if not r2p.repository.private:
482 490 r2p.permission = _def
483 491 self.sa.add(r2p)
484 492
485 493 # overwrite default repo group permissions
486 494 if form_result['overwrite_default_group']:
487 495 _def_name = form_result['default_group_perm'].split('group.')[-1]
488 496 _def = Permission.get_by_key('group.' + _def_name)
489 497 for g2p in self.sa.query(UserRepoGroupToPerm)\
490 498 .filter(UserRepoGroupToPerm.user == perm_user)\
491 499 .all():
492 500 g2p.permission = _def
493 501 self.sa.add(g2p)
494 502
495 503 # overwrite default user group permissions
496 504 if form_result['overwrite_default_user_group']:
497 505 _def_name = form_result['default_user_group_perm'].split('usergroup.')[-1]
498 506 # user groups
499 507 _def = Permission.get_by_key('usergroup.' + _def_name)
500 508 for g2p in self.sa.query(UserUserGroupToPerm)\
501 509 .filter(UserUserGroupToPerm.user == perm_user)\
502 510 .all():
503 511 g2p.permission = _def
504 512 self.sa.add(g2p)
505 513
506 514 # COMMIT
507 515 self.sa.commit()
508 516 except (DatabaseError,):
509 517 log.exception('Failed to set default object permissions')
510 518 self.sa.rollback()
511 519 raise
512 520
513 521 def update_branch_permissions(self, form_result):
514 522 if 'perm_user_id' in form_result:
515 523 perm_user = User.get(safe_int(form_result['perm_user_id']))
516 524 else:
517 525 # used mostly to do lookup for default user
518 526 perm_user = User.get_by_username(form_result['perm_user_name'])
519 527 try:
520 528
521 529 # stage 2 reset defaults and set them from form data
522 530 self._set_new_user_perms(perm_user, form_result, preserve=[
523 531 'default_repo_perm',
524 532 'default_group_perm',
525 533 'default_user_group_perm',
526 534
527 535 'default_repo_group_create',
528 536 'default_user_group_create',
529 537 'default_repo_create_on_write',
530 538 'default_repo_create',
531 539 'default_fork_create',
532 540 'default_inherit_default_permissions',
533 541
534 542 'default_register',
535 543 'default_password_reset',
536 544 'default_extern_activate'])
537 545
538 546 # overwrite default branch permissions
539 547 if form_result['overwrite_default_branch']:
540 548 _def_name = \
541 549 form_result['default_branch_perm'].split('branch.')[-1]
542 550
543 551 _def = Permission.get_by_key('branch.' + _def_name)
544 552
545 553 user_perms = UserToRepoBranchPermission.query()\
546 554 .join(UserToRepoBranchPermission.user_repo_to_perm)\
547 555 .filter(UserRepoToPerm.user == perm_user).all()
548 556
549 557 for g2p in user_perms:
550 558 g2p.permission = _def
551 559 self.sa.add(g2p)
552 560
553 561 # COMMIT
554 562 self.sa.commit()
555 563 except (DatabaseError,):
556 564 log.exception('Failed to set default branch permissions')
557 565 self.sa.rollback()
558 566 raise
559 567
560 568 def get_users_with_repo_write(self, db_repo):
561 569 write_plus = ['repository.write', 'repository.admin']
562 570 default_user_id = User.get_default_user_id()
563 571 user_write_permissions = collections.OrderedDict()
564 572
565 573 # write or higher and DEFAULT user for inheritance
566 574 for perm in db_repo.permissions():
567 575 if perm.permission in write_plus or perm.user_id == default_user_id:
568 576 user_write_permissions[perm.user_id] = perm
569 577 return user_write_permissions
570 578
571 579 def get_user_groups_with_repo_write(self, db_repo):
572 580 write_plus = ['repository.write', 'repository.admin']
573 581 user_group_write_permissions = collections.OrderedDict()
574 582
575 583 # write or higher and DEFAULT user for inheritance
576 584 for p in db_repo.permission_user_groups():
577 585 if p.permission in write_plus:
578 586 user_group_write_permissions[p.users_group_id] = p
579 587 return user_group_write_permissions
580 588
581 589 def trigger_permission_flush(self, affected_user_ids=None):
582 590 affected_user_ids = affected_user_ids or User.get_all_user_ids()
583 591 events.trigger(events.UserPermissionsChange(affected_user_ids))
584 592
585 593 def flush_user_permission_caches(self, changes, affected_user_ids=None):
586 594 affected_user_ids = affected_user_ids or []
587 595
588 596 for change in changes['added'] + changes['updated'] + changes['deleted']:
589 597 if change['type'] == 'user':
590 598 affected_user_ids.append(change['id'])
591 599 if change['type'] == 'user_group':
592 600 user_group = UserGroup.get(safe_int(change['id']))
593 601 if user_group:
594 602 group_members_ids = [x.user_id for x in user_group.members]
595 603 affected_user_ids.extend(group_members_ids)
596 604
597 605 self.trigger_permission_flush(affected_user_ids)
598 606
599 607 return affected_user_ids
@@ -1,2380 +1,2394 b''
1 1
2 2
3 3 # Copyright (C) 2012-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 pull request model for RhodeCode
24 24 """
25 25
26
27 import json
28 26 import logging
29 27 import os
30 28
31 29 import datetime
32 import urllib.request, urllib.parse, urllib.error
30 import urllib.request
31 import urllib.parse
32 import urllib.error
33 33 import collections
34 34
35 import dataclasses as dataclasses
35 36 from pyramid.threadlocal import get_current_request
36 37
37 38 from rhodecode.lib.vcs.nodes import FileNode
38 39 from rhodecode.translation import lazy_ugettext
39 40 from rhodecode.lib import helpers as h, hooks_utils, diffs
40 41 from rhodecode.lib import audit_logger
41 42 from collections import OrderedDict
42 43 from rhodecode.lib.hooks_daemon import prepare_callback_daemon
44 from rhodecode.lib.ext_json import sjson as json
43 45 from rhodecode.lib.markup_renderer import (
44 46 DEFAULT_COMMENTS_RENDERER, RstTemplateRenderer)
45 from rhodecode.lib.utils2 import (
46 safe_unicode, safe_str, md5_safe, AttributeDict, safe_int,
47 get_current_rhodecode_user)
47 from rhodecode.lib.hash_utils import md5_safe
48 from rhodecode.lib.str_utils import safe_str
49 from rhodecode.lib.utils2 import AttributeDict, get_current_rhodecode_user
48 50 from rhodecode.lib.vcs.backends.base import (
49 51 Reference, MergeResponse, MergeFailureReason, UpdateFailureReason,
50 52 TargetRefMissing, SourceRefMissing)
51 53 from rhodecode.lib.vcs.conf import settings as vcs_settings
52 54 from rhodecode.lib.vcs.exceptions import (
53 55 CommitDoesNotExistError, EmptyRepositoryError)
54 56 from rhodecode.model import BaseModel
55 57 from rhodecode.model.changeset_status import ChangesetStatusModel
56 58 from rhodecode.model.comment import CommentsModel
57 59 from rhodecode.model.db import (
58 aliased, null, lazyload, and_, or_, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
60 aliased, null, lazyload, and_, or_, select, func, String, cast, PullRequest, PullRequestReviewers, ChangesetStatus,
59 61 PullRequestVersion, ChangesetComment, Repository, RepoReviewRule, User)
60 62 from rhodecode.model.meta import Session
61 63 from rhodecode.model.notification import NotificationModel, \
62 64 EmailNotificationModel
63 65 from rhodecode.model.scm import ScmModel
64 66 from rhodecode.model.settings import VcsSettingsModel
65 67
66 68
67 69 log = logging.getLogger(__name__)
68 70
69 71
70 72 # Data structure to hold the response data when updating commits during a pull
71 73 # request update.
72 74 class UpdateResponse(object):
73 75
74 76 def __init__(self, executed, reason, new, old, common_ancestor_id,
75 77 commit_changes, source_changed, target_changed):
76 78
77 79 self.executed = executed
78 80 self.reason = reason
79 81 self.new = new
80 82 self.old = old
81 83 self.common_ancestor_id = common_ancestor_id
82 84 self.changes = commit_changes
83 85 self.source_changed = source_changed
84 86 self.target_changed = target_changed
85 87
86 88
87 89 def get_diff_info(
88 90 source_repo, source_ref, target_repo, target_ref, get_authors=False,
89 91 get_commit_authors=True):
90 92 """
91 93 Calculates detailed diff information for usage in preview of creation of a pull-request.
92 94 This is also used for default reviewers logic
93 95 """
94 96
95 97 source_scm = source_repo.scm_instance()
96 98 target_scm = target_repo.scm_instance()
97 99
98 100 ancestor_id = target_scm.get_common_ancestor(target_ref, source_ref, source_scm)
99 101 if not ancestor_id:
100 102 raise ValueError(
101 103 'cannot calculate diff info without a common ancestor. '
102 104 'Make sure both repositories are related, and have a common forking commit.')
103 105
104 106 # case here is that want a simple diff without incoming commits,
105 107 # previewing what will be merged based only on commits in the source.
106 108 log.debug('Using ancestor %s as source_ref instead of %s',
107 109 ancestor_id, source_ref)
108 110
109 111 # source of changes now is the common ancestor
110 112 source_commit = source_scm.get_commit(commit_id=ancestor_id)
111 113 # target commit becomes the source ref as it is the last commit
112 114 # for diff generation this logic gives proper diff
113 115 target_commit = source_scm.get_commit(commit_id=source_ref)
114 116
115 117 vcs_diff = \
116 118 source_scm.get_diff(commit1=source_commit, commit2=target_commit,
117 119 ignore_whitespace=False, context=3)
118 120
119 diff_processor = diffs.DiffProcessor(
120 vcs_diff, format='newdiff', diff_limit=None,
121 file_limit=None, show_full_diff=True)
121 diff_processor = diffs.DiffProcessor(vcs_diff, diff_format='newdiff',
122 diff_limit=0, file_limit=0, show_full_diff=True)
122 123
123 124 _parsed = diff_processor.prepare()
124 125
125 126 all_files = []
126 127 all_files_changes = []
127 128 changed_lines = {}
128 129 stats = [0, 0]
129 130 for f in _parsed:
130 131 all_files.append(f['filename'])
131 132 all_files_changes.append({
132 133 'filename': f['filename'],
133 134 'stats': f['stats']
134 135 })
135 136 stats[0] += f['stats']['added']
136 137 stats[1] += f['stats']['deleted']
137 138
138 139 changed_lines[f['filename']] = []
139 140 if len(f['chunks']) < 2:
140 141 continue
141 142 # first line is "context" information
142 143 for chunks in f['chunks'][1:]:
143 144 for chunk in chunks['lines']:
144 145 if chunk['action'] not in ('del', 'mod'):
145 146 continue
146 147 changed_lines[f['filename']].append(chunk['old_lineno'])
147 148
148 149 commit_authors = []
149 150 user_counts = {}
150 151 email_counts = {}
151 152 author_counts = {}
152 153 _commit_cache = {}
153 154
154 155 commits = []
155 156 if get_commit_authors:
156 157 log.debug('Obtaining commit authors from set of commits')
157 158 _compare_data = target_scm.compare(
158 159 target_ref, source_ref, source_scm, merge=True,
159 160 pre_load=["author", "date", "message"]
160 161 )
161 162
162 163 for commit in _compare_data:
163 164 # NOTE(marcink): we serialize here, so we don't produce more vcsserver calls on data returned
164 165 # at this function which is later called via JSON serialization
165 166 serialized_commit = dict(
166 167 author=commit.author,
167 168 date=commit.date,
168 169 message=commit.message,
169 170 commit_id=commit.raw_id,
170 171 raw_id=commit.raw_id
171 172 )
172 173 commits.append(serialized_commit)
173 174 user = User.get_from_cs_author(serialized_commit['author'])
174 175 if user and user not in commit_authors:
175 176 commit_authors.append(user)
176 177
177 178 # lines
178 179 if get_authors:
179 180 log.debug('Calculating authors of changed files')
180 181 target_commit = source_repo.get_commit(ancestor_id)
181 182
182 183 for fname, lines in changed_lines.items():
183 184
184 185 try:
185 186 node = target_commit.get_node(fname, pre_load=["is_binary"])
186 187 except Exception:
187 188 log.exception("Failed to load node with path %s", fname)
188 189 continue
189 190
190 191 if not isinstance(node, FileNode):
191 192 continue
192 193
193 194 # NOTE(marcink): for binary node we don't do annotation, just use last author
194 195 if node.is_binary:
195 196 author = node.last_commit.author
196 197 email = node.last_commit.author_email
197 198
198 199 user = User.get_from_cs_author(author)
199 200 if user:
200 201 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
201 202 author_counts[author] = author_counts.get(author, 0) + 1
202 203 email_counts[email] = email_counts.get(email, 0) + 1
203 204
204 205 continue
205 206
206 207 for annotation in node.annotate:
207 208 line_no, commit_id, get_commit_func, line_text = annotation
208 209 if line_no in lines:
209 210 if commit_id not in _commit_cache:
210 211 _commit_cache[commit_id] = get_commit_func()
211 212 commit = _commit_cache[commit_id]
212 213 author = commit.author
213 214 email = commit.author_email
214 215 user = User.get_from_cs_author(author)
215 216 if user:
216 217 user_counts[user.user_id] = user_counts.get(user.user_id, 0) + 1
217 218 author_counts[author] = author_counts.get(author, 0) + 1
218 219 email_counts[email] = email_counts.get(email, 0) + 1
219 220
220 221 log.debug('Default reviewers processing finished')
221 222
222 223 return {
223 224 'commits': commits,
224 225 'files': all_files_changes,
225 226 'stats': stats,
226 227 'ancestor': ancestor_id,
227 228 # original authors of modified files
228 229 'original_authors': {
229 230 'users': user_counts,
230 231 'authors': author_counts,
231 232 'emails': email_counts,
232 233 },
233 234 'commit_authors': commit_authors
234 235 }
235 236
236 237
237 238 class PullRequestModel(BaseModel):
238 239
239 240 cls = PullRequest
240 241
241 242 DIFF_CONTEXT = diffs.DEFAULT_CONTEXT
242 243
243 244 UPDATE_STATUS_MESSAGES = {
244 245 UpdateFailureReason.NONE: lazy_ugettext(
245 246 'Pull request update successful.'),
246 247 UpdateFailureReason.UNKNOWN: lazy_ugettext(
247 248 'Pull request update failed because of an unknown error.'),
248 249 UpdateFailureReason.NO_CHANGE: lazy_ugettext(
249 250 'No update needed because the source and target have not changed.'),
250 251 UpdateFailureReason.WRONG_REF_TYPE: lazy_ugettext(
251 252 'Pull request cannot be updated because the reference type is '
252 253 'not supported for an update. Only Branch, Tag or Bookmark is allowed.'),
253 254 UpdateFailureReason.MISSING_TARGET_REF: lazy_ugettext(
254 255 'This pull request cannot be updated because the target '
255 256 'reference is missing.'),
256 257 UpdateFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
257 258 'This pull request cannot be updated because the source '
258 259 'reference is missing.'),
259 260 }
260 261 REF_TYPES = ['bookmark', 'book', 'tag', 'branch']
261 262 UPDATABLE_REF_TYPES = ['bookmark', 'book', 'branch']
262 263
263 264 def __get_pull_request(self, pull_request):
264 265 return self._get_instance((
265 266 PullRequest, PullRequestVersion), pull_request)
266 267
267 268 def _check_perms(self, perms, pull_request, user, api=False):
268 269 if not api:
269 270 return h.HasRepoPermissionAny(*perms)(
270 271 user=user, repo_name=pull_request.target_repo.repo_name)
271 272 else:
272 273 return h.HasRepoPermissionAnyApi(*perms)(
273 274 user=user, repo_name=pull_request.target_repo.repo_name)
274 275
275 276 def check_user_read(self, pull_request, user, api=False):
276 277 _perms = ('repository.admin', 'repository.write', 'repository.read',)
277 278 return self._check_perms(_perms, pull_request, user, api)
278 279
279 280 def check_user_merge(self, pull_request, user, api=False):
280 281 _perms = ('repository.admin', 'repository.write', 'hg.admin',)
281 282 return self._check_perms(_perms, pull_request, user, api)
282 283
283 284 def check_user_update(self, pull_request, user, api=False):
284 285 owner = user.user_id == pull_request.user_id
285 286 return self.check_user_merge(pull_request, user, api) or owner
286 287
287 288 def check_user_delete(self, pull_request, user):
288 289 owner = user.user_id == pull_request.user_id
289 290 _perms = ('repository.admin',)
290 291 return self._check_perms(_perms, pull_request, user) or owner
291 292
292 293 def is_user_reviewer(self, pull_request, user):
293 294 return user.user_id in [
294 295 x.user_id for x in
295 296 pull_request.get_pull_request_reviewers(PullRequestReviewers.ROLE_REVIEWER)
296 297 if x.user
297 298 ]
298 299
299 300 def check_user_change_status(self, pull_request, user, api=False):
300 301 return self.check_user_update(pull_request, user, api) \
301 302 or self.is_user_reviewer(pull_request, user)
302 303
303 304 def check_user_comment(self, pull_request, user):
304 305 owner = user.user_id == pull_request.user_id
305 306 return self.check_user_read(pull_request, user) or owner
306 307
307 308 def get(self, pull_request):
308 309 return self.__get_pull_request(pull_request)
309 310
310 311 def _prepare_get_all_query(self, repo_name, search_q=None, source=False,
311 312 statuses=None, opened_by=None, order_by=None,
312 313 order_dir='desc', only_created=False):
313 314 repo = None
314 315 if repo_name:
315 316 repo = self._get_repo(repo_name)
316 317
317 318 q = PullRequest.query()
318 319
319 320 if search_q:
320 like_expression = u'%{}%'.format(safe_unicode(search_q))
321 like_expression = u'%{}%'.format(safe_str(search_q))
321 322 q = q.join(User, User.user_id == PullRequest.user_id)
322 323 q = q.filter(or_(
323 324 cast(PullRequest.pull_request_id, String).ilike(like_expression),
324 325 User.username.ilike(like_expression),
325 326 PullRequest.title.ilike(like_expression),
326 327 PullRequest.description.ilike(like_expression),
327 328 ))
328 329
329 330 # source or target
330 331 if repo and source:
331 332 q = q.filter(PullRequest.source_repo == repo)
332 333 elif repo:
333 334 q = q.filter(PullRequest.target_repo == repo)
334 335
335 336 # closed,opened
336 337 if statuses:
337 338 q = q.filter(PullRequest.status.in_(statuses))
338 339
339 340 # opened by filter
340 341 if opened_by:
341 342 q = q.filter(PullRequest.user_id.in_(opened_by))
342 343
343 344 # only get those that are in "created" state
344 345 if only_created:
345 346 q = q.filter(PullRequest.pull_request_state == PullRequest.STATE_CREATED)
346 347
347 348 order_map = {
348 349 'name_raw': PullRequest.pull_request_id,
349 350 'id': PullRequest.pull_request_id,
350 351 'title': PullRequest.title,
351 352 'updated_on_raw': PullRequest.updated_on,
352 353 'target_repo': PullRequest.target_repo_id
353 354 }
354 355 if order_by and order_by in order_map:
355 356 if order_dir == 'asc':
356 357 q = q.order_by(order_map[order_by].asc())
357 358 else:
358 359 q = q.order_by(order_map[order_by].desc())
359 360
360 361 return q
361 362
362 363 def count_all(self, repo_name, search_q=None, source=False, statuses=None,
363 364 opened_by=None):
364 365 """
365 366 Count the number of pull requests for a specific repository.
366 367
367 368 :param repo_name: target or source repo
368 369 :param search_q: filter by text
369 370 :param source: boolean flag to specify if repo_name refers to source
370 371 :param statuses: list of pull request statuses
371 372 :param opened_by: author user of the pull request
372 373 :returns: int number of pull requests
373 374 """
374 375 q = self._prepare_get_all_query(
375 376 repo_name, search_q=search_q, source=source, statuses=statuses,
376 377 opened_by=opened_by)
377 378
378 379 return q.count()
379 380
380 381 def get_all(self, repo_name, search_q=None, source=False, statuses=None,
381 382 opened_by=None, offset=0, length=None, order_by=None, order_dir='desc'):
382 383 """
383 384 Get all pull requests for a specific repository.
384 385
385 386 :param repo_name: target or source repo
386 387 :param search_q: filter by text
387 388 :param source: boolean flag to specify if repo_name refers to source
388 389 :param statuses: list of pull request statuses
389 390 :param opened_by: author user of the pull request
390 391 :param offset: pagination offset
391 392 :param length: length of returned list
392 393 :param order_by: order of the returned list
393 394 :param order_dir: 'asc' or 'desc' ordering direction
394 395 :returns: list of pull requests
395 396 """
396 397 q = self._prepare_get_all_query(
397 398 repo_name, search_q=search_q, source=source, statuses=statuses,
398 399 opened_by=opened_by, order_by=order_by, order_dir=order_dir)
399 400
400 401 if length:
401 402 pull_requests = q.limit(length).offset(offset).all()
402 403 else:
403 404 pull_requests = q.all()
404 405
405 406 return pull_requests
406 407
407 408 def count_awaiting_review(self, repo_name, search_q=None, statuses=None):
408 409 """
409 410 Count the number of pull requests for a specific repository that are
410 411 awaiting review.
411 412
412 413 :param repo_name: target or source repo
413 414 :param search_q: filter by text
414 415 :param statuses: list of pull request statuses
415 416 :returns: int number of pull requests
416 417 """
417 418 pull_requests = self.get_awaiting_review(
418 419 repo_name, search_q=search_q, statuses=statuses)
419 420
420 421 return len(pull_requests)
421 422
422 423 def get_awaiting_review(self, repo_name, search_q=None, statuses=None,
423 424 offset=0, length=None, order_by=None, order_dir='desc'):
424 425 """
425 426 Get all pull requests for a specific repository that are awaiting
426 427 review.
427 428
428 429 :param repo_name: target or source repo
429 430 :param search_q: filter by text
430 431 :param statuses: list of pull request statuses
431 432 :param offset: pagination offset
432 433 :param length: length of returned list
433 434 :param order_by: order of the returned list
434 435 :param order_dir: 'asc' or 'desc' ordering direction
435 436 :returns: list of pull requests
436 437 """
437 438 pull_requests = self.get_all(
438 439 repo_name, search_q=search_q, statuses=statuses,
439 440 order_by=order_by, order_dir=order_dir)
440 441
441 442 _filtered_pull_requests = []
442 443 for pr in pull_requests:
443 444 status = pr.calculated_review_status()
444 445 if status in [ChangesetStatus.STATUS_NOT_REVIEWED,
445 446 ChangesetStatus.STATUS_UNDER_REVIEW]:
446 447 _filtered_pull_requests.append(pr)
447 448 if length:
448 449 return _filtered_pull_requests[offset:offset+length]
449 450 else:
450 451 return _filtered_pull_requests
451 452
452 453 def _prepare_awaiting_my_review_review_query(
453 454 self, repo_name, user_id, search_q=None, statuses=None,
454 455 order_by=None, order_dir='desc'):
455 456
456 457 for_review_statuses = [
457 458 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
458 459 ]
459 460
460 461 pull_request_alias = aliased(PullRequest)
461 462 status_alias = aliased(ChangesetStatus)
462 463 reviewers_alias = aliased(PullRequestReviewers)
463 464 repo_alias = aliased(Repository)
464 465
465 466 last_ver_subq = Session()\
466 467 .query(func.min(ChangesetStatus.version)) \
467 468 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
468 469 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
469 470 .subquery()
470 471
471 472 q = Session().query(pull_request_alias) \
472 473 .options(lazyload(pull_request_alias.author)) \
473 474 .join(reviewers_alias,
474 475 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
475 476 .join(repo_alias,
476 477 repo_alias.repo_id == pull_request_alias.target_repo_id) \
477 478 .outerjoin(status_alias,
478 479 and_(status_alias.user_id == reviewers_alias.user_id,
479 480 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
480 481 .filter(or_(status_alias.version == null(),
481 482 status_alias.version == last_ver_subq)) \
482 483 .filter(reviewers_alias.user_id == user_id) \
483 484 .filter(repo_alias.repo_name == repo_name) \
484 485 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
485 486 .group_by(pull_request_alias)
486 487
487 488 # closed,opened
488 489 if statuses:
489 490 q = q.filter(pull_request_alias.status.in_(statuses))
490 491
491 492 if search_q:
492 like_expression = u'%{}%'.format(safe_unicode(search_q))
493 like_expression = u'%{}%'.format(safe_str(search_q))
493 494 q = q.join(User, User.user_id == pull_request_alias.user_id)
494 495 q = q.filter(or_(
495 496 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
496 497 User.username.ilike(like_expression),
497 498 pull_request_alias.title.ilike(like_expression),
498 499 pull_request_alias.description.ilike(like_expression),
499 500 ))
500 501
501 502 order_map = {
502 503 'name_raw': pull_request_alias.pull_request_id,
503 504 'title': pull_request_alias.title,
504 505 'updated_on_raw': pull_request_alias.updated_on,
505 506 'target_repo': pull_request_alias.target_repo_id
506 507 }
507 508 if order_by and order_by in order_map:
508 509 if order_dir == 'asc':
509 510 q = q.order_by(order_map[order_by].asc())
510 511 else:
511 512 q = q.order_by(order_map[order_by].desc())
512 513
513 514 return q
514 515
515 516 def count_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None):
516 517 """
517 518 Count the number of pull requests for a specific repository that are
518 519 awaiting review from a specific user.
519 520
520 521 :param repo_name: target or source repo
521 522 :param user_id: reviewer user of the pull request
522 523 :param search_q: filter by text
523 524 :param statuses: list of pull request statuses
524 525 :returns: int number of pull requests
525 526 """
526 527 q = self._prepare_awaiting_my_review_review_query(
527 528 repo_name, user_id, search_q=search_q, statuses=statuses)
528 529 return q.count()
529 530
530 531 def get_awaiting_my_review(self, repo_name, user_id, search_q=None, statuses=None,
531 532 offset=0, length=None, order_by=None, order_dir='desc'):
532 533 """
533 534 Get all pull requests for a specific repository that are awaiting
534 535 review from a specific user.
535 536
536 537 :param repo_name: target or source repo
537 538 :param user_id: reviewer user of the pull request
538 539 :param search_q: filter by text
539 540 :param statuses: list of pull request statuses
540 541 :param offset: pagination offset
541 542 :param length: length of returned list
542 543 :param order_by: order of the returned list
543 544 :param order_dir: 'asc' or 'desc' ordering direction
544 545 :returns: list of pull requests
545 546 """
546 547
547 548 q = self._prepare_awaiting_my_review_review_query(
548 549 repo_name, user_id, search_q=search_q, statuses=statuses,
549 550 order_by=order_by, order_dir=order_dir)
550 551
551 552 if length:
552 553 pull_requests = q.limit(length).offset(offset).all()
553 554 else:
554 555 pull_requests = q.all()
555 556
556 557 return pull_requests
557 558
558 559 def _prepare_im_participating_query(self, user_id=None, statuses=None, query='',
559 560 order_by=None, order_dir='desc'):
560 561 """
561 562 return a query of pull-requests user is an creator, or he's added as a reviewer
562 563 """
563 564 q = PullRequest.query()
564 565 if user_id:
565 reviewers_subquery = Session().query(
566 PullRequestReviewers.pull_request_id).filter(
567 PullRequestReviewers.user_id == user_id).subquery()
566
567 base_query = select(PullRequestReviewers)\
568 .where(PullRequestReviewers.user_id == user_id)\
569 .with_only_columns(PullRequestReviewers.pull_request_id)
570
568 571 user_filter = or_(
569 572 PullRequest.user_id == user_id,
570 PullRequest.pull_request_id.in_(reviewers_subquery)
573 PullRequest.pull_request_id.in_(base_query)
571 574 )
572 575 q = PullRequest.query().filter(user_filter)
573 576
574 577 # closed,opened
575 578 if statuses:
576 579 q = q.filter(PullRequest.status.in_(statuses))
577 580
578 581 if query:
579 like_expression = u'%{}%'.format(safe_unicode(query))
582 like_expression = u'%{}%'.format(safe_str(query))
580 583 q = q.join(User, User.user_id == PullRequest.user_id)
581 584 q = q.filter(or_(
582 585 cast(PullRequest.pull_request_id, String).ilike(like_expression),
583 586 User.username.ilike(like_expression),
584 587 PullRequest.title.ilike(like_expression),
585 588 PullRequest.description.ilike(like_expression),
586 589 ))
587 590
588 591 order_map = {
589 592 'name_raw': PullRequest.pull_request_id,
590 593 'title': PullRequest.title,
591 594 'updated_on_raw': PullRequest.updated_on,
592 595 'target_repo': PullRequest.target_repo_id
593 596 }
594 597 if order_by and order_by in order_map:
595 598 if order_dir == 'asc':
596 599 q = q.order_by(order_map[order_by].asc())
597 600 else:
598 601 q = q.order_by(order_map[order_by].desc())
599 602
600 603 return q
601 604
602 605 def count_im_participating_in(self, user_id=None, statuses=None, query=''):
603 606 q = self._prepare_im_participating_query(user_id, statuses=statuses, query=query)
604 607 return q.count()
605 608
606 609 def get_im_participating_in(
607 610 self, user_id=None, statuses=None, query='', offset=0,
608 611 length=None, order_by=None, order_dir='desc'):
609 612 """
610 613 Get all Pull requests that i'm participating in as a reviewer, or i have opened
611 614 """
612 615
613 616 q = self._prepare_im_participating_query(
614 617 user_id, statuses=statuses, query=query, order_by=order_by,
615 618 order_dir=order_dir)
616 619
617 620 if length:
618 621 pull_requests = q.limit(length).offset(offset).all()
619 622 else:
620 623 pull_requests = q.all()
621 624
622 625 return pull_requests
623 626
624 627 def _prepare_participating_in_for_review_query(
625 628 self, user_id, statuses=None, query='', order_by=None, order_dir='desc'):
626 629
627 630 for_review_statuses = [
628 631 ChangesetStatus.STATUS_UNDER_REVIEW, ChangesetStatus.STATUS_NOT_REVIEWED
629 632 ]
630 633
631 634 pull_request_alias = aliased(PullRequest)
632 635 status_alias = aliased(ChangesetStatus)
633 636 reviewers_alias = aliased(PullRequestReviewers)
634 637
635 638 last_ver_subq = Session()\
636 639 .query(func.min(ChangesetStatus.version)) \
637 640 .filter(ChangesetStatus.pull_request_id == reviewers_alias.pull_request_id)\
638 641 .filter(ChangesetStatus.user_id == reviewers_alias.user_id) \
639 642 .subquery()
640 643
641 644 q = Session().query(pull_request_alias) \
642 645 .options(lazyload(pull_request_alias.author)) \
643 646 .join(reviewers_alias,
644 647 reviewers_alias.pull_request_id == pull_request_alias.pull_request_id) \
645 648 .outerjoin(status_alias,
646 649 and_(status_alias.user_id == reviewers_alias.user_id,
647 650 status_alias.pull_request_id == reviewers_alias.pull_request_id)) \
648 651 .filter(or_(status_alias.version == null(),
649 652 status_alias.version == last_ver_subq)) \
650 653 .filter(reviewers_alias.user_id == user_id) \
651 654 .filter(or_(status_alias.status == null(), status_alias.status.in_(for_review_statuses))) \
652 655 .group_by(pull_request_alias)
653 656
654 657 # closed,opened
655 658 if statuses:
656 659 q = q.filter(pull_request_alias.status.in_(statuses))
657 660
658 661 if query:
659 like_expression = u'%{}%'.format(safe_unicode(query))
662 like_expression = u'%{}%'.format(safe_str(query))
660 663 q = q.join(User, User.user_id == pull_request_alias.user_id)
661 664 q = q.filter(or_(
662 665 cast(pull_request_alias.pull_request_id, String).ilike(like_expression),
663 666 User.username.ilike(like_expression),
664 667 pull_request_alias.title.ilike(like_expression),
665 668 pull_request_alias.description.ilike(like_expression),
666 669 ))
667 670
668 671 order_map = {
669 672 'name_raw': pull_request_alias.pull_request_id,
670 673 'title': pull_request_alias.title,
671 674 'updated_on_raw': pull_request_alias.updated_on,
672 675 'target_repo': pull_request_alias.target_repo_id
673 676 }
674 677 if order_by and order_by in order_map:
675 678 if order_dir == 'asc':
676 679 q = q.order_by(order_map[order_by].asc())
677 680 else:
678 681 q = q.order_by(order_map[order_by].desc())
679 682
680 683 return q
681 684
682 685 def count_im_participating_in_for_review(self, user_id, statuses=None, query=''):
683 686 q = self._prepare_participating_in_for_review_query(user_id, statuses=statuses, query=query)
684 687 return q.count()
685 688
686 689 def get_im_participating_in_for_review(
687 690 self, user_id, statuses=None, query='', offset=0,
688 691 length=None, order_by=None, order_dir='desc'):
689 692 """
690 693 Get all Pull requests that needs user approval or rejection
691 694 """
692 695
693 696 q = self._prepare_participating_in_for_review_query(
694 697 user_id, statuses=statuses, query=query, order_by=order_by,
695 698 order_dir=order_dir)
696 699
697 700 if length:
698 701 pull_requests = q.limit(length).offset(offset).all()
699 702 else:
700 703 pull_requests = q.all()
701 704
702 705 return pull_requests
703 706
704 707 def get_versions(self, pull_request):
705 708 """
706 709 returns version of pull request sorted by ID descending
707 710 """
708 711 return PullRequestVersion.query()\
709 712 .filter(PullRequestVersion.pull_request == pull_request)\
710 713 .order_by(PullRequestVersion.pull_request_version_id.asc())\
711 714 .all()
712 715
713 716 def get_pr_version(self, pull_request_id, version=None):
714 717 at_version = None
715 718
716 719 if version and version == 'latest':
717 720 pull_request_ver = PullRequest.get(pull_request_id)
718 721 pull_request_obj = pull_request_ver
719 722 _org_pull_request_obj = pull_request_obj
720 723 at_version = 'latest'
721 724 elif version:
722 725 pull_request_ver = PullRequestVersion.get_or_404(version)
723 726 pull_request_obj = pull_request_ver
724 727 _org_pull_request_obj = pull_request_ver.pull_request
725 728 at_version = pull_request_ver.pull_request_version_id
726 729 else:
727 730 _org_pull_request_obj = pull_request_obj = PullRequest.get_or_404(
728 731 pull_request_id)
729 732
730 733 pull_request_display_obj = PullRequest.get_pr_display_object(
731 734 pull_request_obj, _org_pull_request_obj)
732 735
733 736 return _org_pull_request_obj, pull_request_obj, \
734 737 pull_request_display_obj, at_version
735 738
736 739 def pr_commits_versions(self, versions):
737 740 """
738 741 Maps the pull-request commits into all known PR versions. This way we can obtain
739 742 each pr version the commit was introduced in.
740 743 """
741 744 commit_versions = collections.defaultdict(list)
742 745 num_versions = [x.pull_request_version_id for x in versions]
743 746 for ver in versions:
744 747 for commit_id in ver.revisions:
745 748 ver_idx = ChangesetComment.get_index_from_version(
746 749 ver.pull_request_version_id, num_versions=num_versions)
747 750 commit_versions[commit_id].append(ver_idx)
748 751 return commit_versions
749 752
750 753 def create(self, created_by, source_repo, source_ref, target_repo,
751 754 target_ref, revisions, reviewers, observers, title, description=None,
752 755 common_ancestor_id=None,
753 756 description_renderer=None,
754 757 reviewer_data=None, translator=None, auth_user=None):
755 758 translator = translator or get_current_request().translate
756 759
757 760 created_by_user = self._get_user(created_by)
758 761 auth_user = auth_user or created_by_user.AuthUser()
759 762 source_repo = self._get_repo(source_repo)
760 763 target_repo = self._get_repo(target_repo)
761 764
762 765 pull_request = PullRequest()
763 766 pull_request.source_repo = source_repo
764 767 pull_request.source_ref = source_ref
765 768 pull_request.target_repo = target_repo
766 769 pull_request.target_ref = target_ref
767 770 pull_request.revisions = revisions
768 771 pull_request.title = title
769 772 pull_request.description = description
770 773 pull_request.description_renderer = description_renderer
771 774 pull_request.author = created_by_user
772 775 pull_request.reviewer_data = reviewer_data
773 776 pull_request.pull_request_state = pull_request.STATE_CREATING
774 777 pull_request.common_ancestor_id = common_ancestor_id
775 778
776 779 Session().add(pull_request)
777 780 Session().flush()
778 781
779 782 reviewer_ids = set()
780 783 # members / reviewers
781 784 for reviewer_object in reviewers:
782 785 user_id, reasons, mandatory, role, rules = reviewer_object
783 786 user = self._get_user(user_id)
784 787
785 788 # skip duplicates
786 789 if user.user_id in reviewer_ids:
787 790 continue
788 791
789 792 reviewer_ids.add(user.user_id)
790 793
791 794 reviewer = PullRequestReviewers()
792 795 reviewer.user = user
793 796 reviewer.pull_request = pull_request
794 797 reviewer.reasons = reasons
795 798 reviewer.mandatory = mandatory
796 799 reviewer.role = role
797 800
798 801 # NOTE(marcink): pick only first rule for now
799 802 rule_id = list(rules)[0] if rules else None
800 803 rule = RepoReviewRule.get(rule_id) if rule_id else None
801 804 if rule:
802 805 review_group = rule.user_group_vote_rule(user_id)
803 806 # we check if this particular reviewer is member of a voting group
804 807 if review_group:
805 808 # NOTE(marcink):
806 809 # can be that user is member of more but we pick the first same,
807 810 # same as default reviewers algo
808 811 review_group = review_group[0]
809 812
810 813 rule_data = {
811 814 'rule_name':
812 815 rule.review_rule_name,
813 816 'rule_user_group_entry_id':
814 817 review_group.repo_review_rule_users_group_id,
815 818 'rule_user_group_name':
816 819 review_group.users_group.users_group_name,
817 820 'rule_user_group_members':
818 821 [x.user.username for x in review_group.users_group.members],
819 822 'rule_user_group_members_id':
820 823 [x.user.user_id for x in review_group.users_group.members],
821 824 }
822 825 # e.g {'vote_rule': -1, 'mandatory': True}
823 826 rule_data.update(review_group.rule_data())
824 827
825 828 reviewer.rule_data = rule_data
826 829
827 830 Session().add(reviewer)
828 831 Session().flush()
829 832
830 833 for observer_object in observers:
831 834 user_id, reasons, mandatory, role, rules = observer_object
832 835 user = self._get_user(user_id)
833 836
834 837 # skip duplicates from reviewers
835 838 if user.user_id in reviewer_ids:
836 839 continue
837 840
838 841 #reviewer_ids.add(user.user_id)
839 842
840 843 observer = PullRequestReviewers()
841 844 observer.user = user
842 845 observer.pull_request = pull_request
843 846 observer.reasons = reasons
844 847 observer.mandatory = mandatory
845 848 observer.role = role
846 849
847 850 # NOTE(marcink): pick only first rule for now
848 851 rule_id = list(rules)[0] if rules else None
849 852 rule = RepoReviewRule.get(rule_id) if rule_id else None
850 853 if rule:
851 854 # TODO(marcink): do we need this for observers ??
852 855 pass
853 856
854 857 Session().add(observer)
855 858 Session().flush()
856 859
857 860 # Set approval status to "Under Review" for all commits which are
858 861 # part of this pull request.
859 862 ChangesetStatusModel().set_status(
860 863 repo=target_repo,
861 864 status=ChangesetStatus.STATUS_UNDER_REVIEW,
862 865 user=created_by_user,
863 866 pull_request=pull_request
864 867 )
865 868 # we commit early at this point. This has to do with a fact
866 869 # that before queries do some row-locking. And because of that
867 870 # we need to commit and finish transaction before below validate call
868 871 # that for large repos could be long resulting in long row locks
869 872 Session().commit()
870 873
871 874 # prepare workspace, and run initial merge simulation. Set state during that
872 875 # operation
873 876 pull_request = PullRequest.get(pull_request.pull_request_id)
874 877
875 878 # set as merging, for merge simulation, and if finished to created so we mark
876 879 # simulation is working fine
877 880 with pull_request.set_state(PullRequest.STATE_MERGING,
878 881 final_state=PullRequest.STATE_CREATED) as state_obj:
879 882 MergeCheck.validate(
880 883 pull_request, auth_user=auth_user, translator=translator)
881 884
882 885 self.notify_reviewers(pull_request, reviewer_ids, created_by_user)
883 886 self.trigger_pull_request_hook(pull_request, created_by_user, 'create')
884 887
885 888 creation_data = pull_request.get_api_data(with_merge_state=False)
886 889 self._log_audit_action(
887 890 'repo.pull_request.create', {'data': creation_data},
888 891 auth_user, pull_request)
889 892
890 893 return pull_request
891 894
892 895 def trigger_pull_request_hook(self, pull_request, user, action, data=None):
893 896 pull_request = self.__get_pull_request(pull_request)
894 897 target_scm = pull_request.target_repo.scm_instance()
895 898 if action == 'create':
896 899 trigger_hook = hooks_utils.trigger_create_pull_request_hook
897 900 elif action == 'merge':
898 901 trigger_hook = hooks_utils.trigger_merge_pull_request_hook
899 902 elif action == 'close':
900 903 trigger_hook = hooks_utils.trigger_close_pull_request_hook
901 904 elif action == 'review_status_change':
902 905 trigger_hook = hooks_utils.trigger_review_pull_request_hook
903 906 elif action == 'update':
904 907 trigger_hook = hooks_utils.trigger_update_pull_request_hook
905 908 elif action == 'comment':
906 909 trigger_hook = hooks_utils.trigger_comment_pull_request_hook
907 910 elif action == 'comment_edit':
908 911 trigger_hook = hooks_utils.trigger_comment_pull_request_edit_hook
909 912 else:
910 913 return
911 914
912 915 log.debug('Handling pull_request %s trigger_pull_request_hook with action %s and hook: %s',
913 916 pull_request, action, trigger_hook)
914 917 trigger_hook(
915 918 username=user.username,
916 919 repo_name=pull_request.target_repo.repo_name,
917 920 repo_type=target_scm.alias,
918 921 pull_request=pull_request,
919 922 data=data)
920 923
921 924 def _get_commit_ids(self, pull_request):
922 925 """
923 926 Return the commit ids of the merged pull request.
924 927
925 928 This method is not dealing correctly yet with the lack of autoupdates
926 929 nor with the implicit target updates.
927 930 For example: if a commit in the source repo is already in the target it
928 931 will be reported anyways.
929 932 """
930 933 merge_rev = pull_request.merge_rev
931 934 if merge_rev is None:
932 935 raise ValueError('This pull request was not merged yet')
933 936
934 937 commit_ids = list(pull_request.revisions)
935 938 if merge_rev not in commit_ids:
936 939 commit_ids.append(merge_rev)
937 940
938 941 return commit_ids
939 942
940 943 def merge_repo(self, pull_request, user, extras):
941 944 repo_type = pull_request.source_repo.repo_type
942 log.debug("Merging pull request %s", pull_request.pull_request_id)
945 log.debug("Merging pull request %s", pull_request)
946
943 947 extras['user_agent'] = '{}/internal-merge'.format(repo_type)
944 948 merge_state = self._merge_pull_request(pull_request, user, extras)
945 949 if merge_state.executed:
946 950 log.debug("Merge was successful, updating the pull request comments.")
947 951 self._comment_and_close_pr(pull_request, user, merge_state)
948 952
949 953 self._log_audit_action(
950 954 'repo.pull_request.merge',
951 955 {'merge_state': merge_state.__dict__},
952 956 user, pull_request)
953 957
954 958 else:
955 log.warn("Merge failed, not updating the pull request.")
959 log.warning("Merge failed, not updating the pull request.")
956 960 return merge_state
957 961
958 962 def _merge_pull_request(self, pull_request, user, extras, merge_msg=None):
959 963 target_vcs = pull_request.target_repo.scm_instance()
960 964 source_vcs = pull_request.source_repo.scm_instance()
961 965
962 message = safe_unicode(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
966 message = safe_str(merge_msg or vcs_settings.MERGE_MESSAGE_TMPL).format(
963 967 pr_id=pull_request.pull_request_id,
964 968 pr_title=pull_request.title,
965 969 pr_desc=pull_request.description,
966 970 source_repo=source_vcs.name,
967 971 source_ref_name=pull_request.source_ref_parts.name,
968 972 target_repo=target_vcs.name,
969 973 target_ref_name=pull_request.target_ref_parts.name,
970 974 )
971 975
972 976 workspace_id = self._workspace_id(pull_request)
973 977 repo_id = pull_request.target_repo.repo_id
974 978 use_rebase = self._use_rebase_for_merging(pull_request)
975 979 close_branch = self._close_branch_before_merging(pull_request)
976 980 user_name = self._user_name_for_merging(pull_request, user)
977 981
978 982 target_ref = self._refresh_reference(
979 983 pull_request.target_ref_parts, target_vcs)
980 984
981 985 callback_daemon, extras = prepare_callback_daemon(
982 986 extras, protocol=vcs_settings.HOOKS_PROTOCOL,
983 987 host=vcs_settings.HOOKS_HOST,
984 988 use_direct_calls=vcs_settings.HOOKS_DIRECT_CALLS)
985 989
986 990 with callback_daemon:
987 991 # TODO: johbo: Implement a clean way to run a config_override
988 992 # for a single call.
989 993 target_vcs.config.set(
990 994 'rhodecode', 'RC_SCM_DATA', json.dumps(extras))
991 995
992 996 merge_state = target_vcs.merge(
993 997 repo_id, workspace_id, target_ref, source_vcs,
994 998 pull_request.source_ref_parts,
995 999 user_name=user_name, user_email=user.email,
996 1000 message=message, use_rebase=use_rebase,
997 1001 close_branch=close_branch)
1002
998 1003 return merge_state
999 1004
1000 1005 def _comment_and_close_pr(self, pull_request, user, merge_state, close_msg=None):
1001 1006 pull_request.merge_rev = merge_state.merge_ref.commit_id
1002 1007 pull_request.updated_on = datetime.datetime.now()
1003 1008 close_msg = close_msg or 'Pull request merged and closed'
1004 1009
1005 1010 CommentsModel().create(
1006 text=safe_unicode(close_msg),
1011 text=safe_str(close_msg),
1007 1012 repo=pull_request.target_repo.repo_id,
1008 1013 user=user.user_id,
1009 1014 pull_request=pull_request.pull_request_id,
1010 1015 f_path=None,
1011 1016 line_no=None,
1012 1017 closing_pr=True
1013 1018 )
1014 1019
1015 1020 Session().add(pull_request)
1016 1021 Session().flush()
1017 1022 # TODO: paris: replace invalidation with less radical solution
1018 1023 ScmModel().mark_for_invalidation(
1019 1024 pull_request.target_repo.repo_name)
1020 1025 self.trigger_pull_request_hook(pull_request, user, 'merge')
1021 1026
1022 1027 def has_valid_update_type(self, pull_request):
1023 1028 source_ref_type = pull_request.source_ref_parts.type
1024 1029 return source_ref_type in self.REF_TYPES
1025 1030
1026 1031 def get_flow_commits(self, pull_request):
1027 1032
1028 1033 # source repo
1029 1034 source_ref_name = pull_request.source_ref_parts.name
1030 1035 source_ref_type = pull_request.source_ref_parts.type
1031 1036 source_ref_id = pull_request.source_ref_parts.commit_id
1032 1037 source_repo = pull_request.source_repo.scm_instance()
1033 1038
1034 1039 try:
1035 1040 if source_ref_type in self.REF_TYPES:
1036 1041 source_commit = source_repo.get_commit(
1037 1042 source_ref_name, reference_obj=pull_request.source_ref_parts)
1038 1043 else:
1039 1044 source_commit = source_repo.get_commit(source_ref_id)
1040 1045 except CommitDoesNotExistError:
1041 1046 raise SourceRefMissing()
1042 1047
1043 1048 # target repo
1044 1049 target_ref_name = pull_request.target_ref_parts.name
1045 1050 target_ref_type = pull_request.target_ref_parts.type
1046 1051 target_ref_id = pull_request.target_ref_parts.commit_id
1047 1052 target_repo = pull_request.target_repo.scm_instance()
1048 1053
1049 1054 try:
1050 1055 if target_ref_type in self.REF_TYPES:
1051 1056 target_commit = target_repo.get_commit(
1052 1057 target_ref_name, reference_obj=pull_request.target_ref_parts)
1053 1058 else:
1054 1059 target_commit = target_repo.get_commit(target_ref_id)
1055 1060 except CommitDoesNotExistError:
1056 1061 raise TargetRefMissing()
1057 1062
1058 1063 return source_commit, target_commit
1059 1064
1060 1065 def update_commits(self, pull_request, updating_user):
1061 1066 """
1062 1067 Get the updated list of commits for the pull request
1063 1068 and return the new pull request version and the list
1064 1069 of commits processed by this update action
1065 1070
1066 1071 updating_user is the user_object who triggered the update
1067 1072 """
1068 1073 pull_request = self.__get_pull_request(pull_request)
1069 1074 source_ref_type = pull_request.source_ref_parts.type
1070 1075 source_ref_name = pull_request.source_ref_parts.name
1071 1076 source_ref_id = pull_request.source_ref_parts.commit_id
1072 1077
1073 1078 target_ref_type = pull_request.target_ref_parts.type
1074 1079 target_ref_name = pull_request.target_ref_parts.name
1075 1080 target_ref_id = pull_request.target_ref_parts.commit_id
1076 1081
1077 1082 if not self.has_valid_update_type(pull_request):
1078 1083 log.debug("Skipping update of pull request %s due to ref type: %s",
1079 1084 pull_request, source_ref_type)
1080 1085 return UpdateResponse(
1081 1086 executed=False,
1082 1087 reason=UpdateFailureReason.WRONG_REF_TYPE,
1083 1088 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1084 1089 source_changed=False, target_changed=False)
1085 1090
1086 1091 try:
1087 1092 source_commit, target_commit = self.get_flow_commits(pull_request)
1088 1093 except SourceRefMissing:
1089 1094 return UpdateResponse(
1090 1095 executed=False,
1091 1096 reason=UpdateFailureReason.MISSING_SOURCE_REF,
1092 1097 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1093 1098 source_changed=False, target_changed=False)
1094 1099 except TargetRefMissing:
1095 1100 return UpdateResponse(
1096 1101 executed=False,
1097 1102 reason=UpdateFailureReason.MISSING_TARGET_REF,
1098 1103 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1099 1104 source_changed=False, target_changed=False)
1100 1105
1101 1106 source_changed = source_ref_id != source_commit.raw_id
1102 1107 target_changed = target_ref_id != target_commit.raw_id
1103 1108
1104 1109 if not (source_changed or target_changed):
1105 1110 log.debug("Nothing changed in pull request %s", pull_request)
1106 1111 return UpdateResponse(
1107 1112 executed=False,
1108 1113 reason=UpdateFailureReason.NO_CHANGE,
1109 1114 old=pull_request, new=None, common_ancestor_id=None, commit_changes=None,
1110 1115 source_changed=target_changed, target_changed=source_changed)
1111 1116
1112 1117 change_in_found = 'target repo' if target_changed else 'source repo'
1113 1118 log.debug('Updating pull request because of change in %s detected',
1114 1119 change_in_found)
1115 1120
1116 1121 # Finally there is a need for an update, in case of source change
1117 1122 # we create a new version, else just an update
1118 1123 if source_changed:
1119 1124 pull_request_version = self._create_version_from_snapshot(pull_request)
1120 1125 self._link_comments_to_version(pull_request_version)
1121 1126 else:
1122 1127 try:
1123 1128 ver = pull_request.versions[-1]
1124 1129 except IndexError:
1125 1130 ver = None
1126 1131
1127 1132 pull_request.pull_request_version_id = \
1128 1133 ver.pull_request_version_id if ver else None
1129 1134 pull_request_version = pull_request
1130 1135
1131 1136 source_repo = pull_request.source_repo.scm_instance()
1132 1137 target_repo = pull_request.target_repo.scm_instance()
1133 1138
1134 1139 # re-compute commit ids
1135 1140 old_commit_ids = pull_request.revisions
1136 1141 pre_load = ["author", "date", "message", "branch"]
1137 1142 commit_ranges = target_repo.compare(
1138 1143 target_commit.raw_id, source_commit.raw_id, source_repo, merge=True,
1139 1144 pre_load=pre_load)
1140 1145
1141 1146 target_ref = target_commit.raw_id
1142 1147 source_ref = source_commit.raw_id
1143 1148 ancestor_commit_id = target_repo.get_common_ancestor(
1144 1149 target_ref, source_ref, source_repo)
1145 1150
1146 1151 if not ancestor_commit_id:
1147 1152 raise ValueError(
1148 1153 'cannot calculate diff info without a common ancestor. '
1149 1154 'Make sure both repositories are related, and have a common forking commit.')
1150 1155
1151 1156 pull_request.common_ancestor_id = ancestor_commit_id
1152 1157
1153 1158 pull_request.source_ref = '%s:%s:%s' % (
1154 1159 source_ref_type, source_ref_name, source_commit.raw_id)
1155 1160 pull_request.target_ref = '%s:%s:%s' % (
1156 1161 target_ref_type, target_ref_name, ancestor_commit_id)
1157 1162
1158 1163 pull_request.revisions = [
1159 1164 commit.raw_id for commit in reversed(commit_ranges)]
1160 1165 pull_request.updated_on = datetime.datetime.now()
1161 1166 Session().add(pull_request)
1162 1167 new_commit_ids = pull_request.revisions
1163 1168
1164 1169 old_diff_data, new_diff_data = self._generate_update_diffs(
1165 1170 pull_request, pull_request_version)
1166 1171
1167 1172 # calculate commit and file changes
1168 1173 commit_changes = self._calculate_commit_id_changes(
1169 1174 old_commit_ids, new_commit_ids)
1170 1175 file_changes = self._calculate_file_changes(
1171 1176 old_diff_data, new_diff_data)
1172 1177
1173 1178 # set comments as outdated if DIFFS changed
1174 1179 CommentsModel().outdate_comments(
1175 1180 pull_request, old_diff_data=old_diff_data,
1176 1181 new_diff_data=new_diff_data)
1177 1182
1178 1183 valid_commit_changes = (commit_changes.added or commit_changes.removed)
1179 1184 file_node_changes = (
1180 1185 file_changes.added or file_changes.modified or file_changes.removed)
1181 1186 pr_has_changes = valid_commit_changes or file_node_changes
1182 1187
1183 1188 # Add an automatic comment to the pull request, in case
1184 1189 # anything has changed
1185 1190 if pr_has_changes:
1186 1191 update_comment = CommentsModel().create(
1187 1192 text=self._render_update_message(ancestor_commit_id, commit_changes, file_changes),
1188 1193 repo=pull_request.target_repo,
1189 1194 user=pull_request.author,
1190 1195 pull_request=pull_request,
1191 1196 send_email=False, renderer=DEFAULT_COMMENTS_RENDERER)
1192 1197
1193 1198 # Update status to "Under Review" for added commits
1194 1199 for commit_id in commit_changes.added:
1195 1200 ChangesetStatusModel().set_status(
1196 1201 repo=pull_request.source_repo,
1197 1202 status=ChangesetStatus.STATUS_UNDER_REVIEW,
1198 1203 comment=update_comment,
1199 1204 user=pull_request.author,
1200 1205 pull_request=pull_request,
1201 1206 revision=commit_id)
1202 1207
1203 1208 # initial commit
1204 1209 Session().commit()
1205 1210
1206 1211 if pr_has_changes:
1207 1212 # send update email to users
1208 1213 try:
1209 1214 self.notify_users(pull_request=pull_request, updating_user=updating_user,
1210 1215 ancestor_commit_id=ancestor_commit_id,
1211 1216 commit_changes=commit_changes,
1212 1217 file_changes=file_changes)
1213 1218 Session().commit()
1214 1219 except Exception:
1215 1220 log.exception('Failed to send email notification to users')
1216 1221 Session().rollback()
1217 1222
1218 1223 log.debug(
1219 1224 'Updated pull request %s, added_ids: %s, common_ids: %s, '
1220 1225 'removed_ids: %s', pull_request.pull_request_id,
1221 1226 commit_changes.added, commit_changes.common, commit_changes.removed)
1222 1227 log.debug(
1223 1228 'Updated pull request with the following file changes: %s',
1224 1229 file_changes)
1225 1230
1226 1231 log.info(
1227 1232 "Updated pull request %s from commit %s to commit %s, "
1228 1233 "stored new version %s of this pull request.",
1229 1234 pull_request.pull_request_id, source_ref_id,
1230 1235 pull_request.source_ref_parts.commit_id,
1231 1236 pull_request_version.pull_request_version_id)
1232 1237
1233 1238 self.trigger_pull_request_hook(pull_request, pull_request.author, 'update')
1234 1239
1235 1240 return UpdateResponse(
1236 1241 executed=True, reason=UpdateFailureReason.NONE,
1237 1242 old=pull_request, new=pull_request_version,
1238 1243 common_ancestor_id=ancestor_commit_id, commit_changes=commit_changes,
1239 1244 source_changed=source_changed, target_changed=target_changed)
1240 1245
1241 1246 def _create_version_from_snapshot(self, pull_request):
1242 1247 version = PullRequestVersion()
1243 1248 version.title = pull_request.title
1244 1249 version.description = pull_request.description
1245 1250 version.status = pull_request.status
1246 1251 version.pull_request_state = pull_request.pull_request_state
1247 1252 version.created_on = datetime.datetime.now()
1248 1253 version.updated_on = pull_request.updated_on
1249 1254 version.user_id = pull_request.user_id
1250 1255 version.source_repo = pull_request.source_repo
1251 1256 version.source_ref = pull_request.source_ref
1252 1257 version.target_repo = pull_request.target_repo
1253 1258 version.target_ref = pull_request.target_ref
1254 1259
1255 1260 version._last_merge_source_rev = pull_request._last_merge_source_rev
1256 1261 version._last_merge_target_rev = pull_request._last_merge_target_rev
1257 1262 version.last_merge_status = pull_request.last_merge_status
1258 1263 version.last_merge_metadata = pull_request.last_merge_metadata
1259 1264 version.shadow_merge_ref = pull_request.shadow_merge_ref
1260 1265 version.merge_rev = pull_request.merge_rev
1261 1266 version.reviewer_data = pull_request.reviewer_data
1262 1267
1263 1268 version.revisions = pull_request.revisions
1264 1269 version.common_ancestor_id = pull_request.common_ancestor_id
1265 1270 version.pull_request = pull_request
1266 1271 Session().add(version)
1267 1272 Session().flush()
1268 1273
1269 1274 return version
1270 1275
1271 1276 def _generate_update_diffs(self, pull_request, pull_request_version):
1272 1277
1273 1278 diff_context = (
1274 1279 self.DIFF_CONTEXT +
1275 1280 CommentsModel.needed_extra_diff_context())
1276 1281 hide_whitespace_changes = False
1277 1282 source_repo = pull_request_version.source_repo
1278 1283 source_ref_id = pull_request_version.source_ref_parts.commit_id
1279 1284 target_ref_id = pull_request_version.target_ref_parts.commit_id
1280 1285 old_diff = self._get_diff_from_pr_or_version(
1281 1286 source_repo, source_ref_id, target_ref_id,
1282 1287 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1283 1288
1284 1289 source_repo = pull_request.source_repo
1285 1290 source_ref_id = pull_request.source_ref_parts.commit_id
1286 1291 target_ref_id = pull_request.target_ref_parts.commit_id
1287 1292
1288 1293 new_diff = self._get_diff_from_pr_or_version(
1289 1294 source_repo, source_ref_id, target_ref_id,
1290 1295 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
1291 1296
1292 old_diff_data = diffs.DiffProcessor(old_diff)
1297 # NOTE: this was using diff_format='gitdiff'
1298 old_diff_data = diffs.DiffProcessor(old_diff, diff_format='newdiff')
1293 1299 old_diff_data.prepare()
1294 new_diff_data = diffs.DiffProcessor(new_diff)
1300 new_diff_data = diffs.DiffProcessor(new_diff, diff_format='newdiff')
1295 1301 new_diff_data.prepare()
1296 1302
1297 1303 return old_diff_data, new_diff_data
1298 1304
1299 1305 def _link_comments_to_version(self, pull_request_version):
1300 1306 """
1301 1307 Link all unlinked comments of this pull request to the given version.
1302 1308
1303 1309 :param pull_request_version: The `PullRequestVersion` to which
1304 1310 the comments shall be linked.
1305 1311
1306 1312 """
1307 1313 pull_request = pull_request_version.pull_request
1308 1314 comments = ChangesetComment.query()\
1309 1315 .filter(
1310 1316 # TODO: johbo: Should we query for the repo at all here?
1311 1317 # Pending decision on how comments of PRs are to be related
1312 1318 # to either the source repo, the target repo or no repo at all.
1313 1319 ChangesetComment.repo_id == pull_request.target_repo.repo_id,
1314 1320 ChangesetComment.pull_request == pull_request,
1315 1321 ChangesetComment.pull_request_version == None)\
1316 1322 .order_by(ChangesetComment.comment_id.asc())
1317 1323
1318 1324 # TODO: johbo: Find out why this breaks if it is done in a bulk
1319 1325 # operation.
1320 1326 for comment in comments:
1321 1327 comment.pull_request_version_id = (
1322 1328 pull_request_version.pull_request_version_id)
1323 1329 Session().add(comment)
1324 1330
1325 1331 def _calculate_commit_id_changes(self, old_ids, new_ids):
1326 1332 added = [x for x in new_ids if x not in old_ids]
1327 1333 common = [x for x in new_ids if x in old_ids]
1328 1334 removed = [x for x in old_ids if x not in new_ids]
1329 1335 total = new_ids
1330 1336 return ChangeTuple(added, common, removed, total)
1331 1337
1332 1338 def _calculate_file_changes(self, old_diff_data, new_diff_data):
1333 1339
1334 1340 old_files = OrderedDict()
1335 1341 for diff_data in old_diff_data.parsed_diff:
1336 1342 old_files[diff_data['filename']] = md5_safe(diff_data['raw_diff'])
1337 1343
1338 1344 added_files = []
1339 1345 modified_files = []
1340 1346 removed_files = []
1341 1347 for diff_data in new_diff_data.parsed_diff:
1342 1348 new_filename = diff_data['filename']
1343 1349 new_hash = md5_safe(diff_data['raw_diff'])
1344 1350
1345 1351 old_hash = old_files.get(new_filename)
1346 1352 if not old_hash:
1347 1353 # file is not present in old diff, we have to figure out from parsed diff
1348 1354 # operation ADD/REMOVE
1349 1355 operations_dict = diff_data['stats']['ops']
1350 1356 if diffs.DEL_FILENODE in operations_dict:
1351 1357 removed_files.append(new_filename)
1352 1358 else:
1353 1359 added_files.append(new_filename)
1354 1360 else:
1355 1361 if new_hash != old_hash:
1356 1362 modified_files.append(new_filename)
1357 1363 # now remove a file from old, since we have seen it already
1358 1364 del old_files[new_filename]
1359 1365
1360 1366 # removed files is when there are present in old, but not in NEW,
1361 1367 # since we remove old files that are present in new diff, left-overs
1362 1368 # if any should be the removed files
1363 1369 removed_files.extend(old_files.keys())
1364 1370
1365 1371 return FileChangeTuple(added_files, modified_files, removed_files)
1366 1372
1367 1373 def _render_update_message(self, ancestor_commit_id, changes, file_changes):
1368 1374 """
1369 1375 render the message using DEFAULT_COMMENTS_RENDERER (RST renderer),
1370 1376 so it's always looking the same disregarding on which default
1371 1377 renderer system is using.
1372 1378
1373 1379 :param ancestor_commit_id: ancestor raw_id
1374 1380 :param changes: changes named tuple
1375 1381 :param file_changes: file changes named tuple
1376 1382
1377 1383 """
1378 1384 new_status = ChangesetStatus.get_status_lbl(
1379 1385 ChangesetStatus.STATUS_UNDER_REVIEW)
1380 1386
1381 1387 changed_files = (
1382 1388 file_changes.added + file_changes.modified + file_changes.removed)
1383 1389
1384 1390 params = {
1385 1391 'under_review_label': new_status,
1386 1392 'added_commits': changes.added,
1387 1393 'removed_commits': changes.removed,
1388 1394 'changed_files': changed_files,
1389 1395 'added_files': file_changes.added,
1390 1396 'modified_files': file_changes.modified,
1391 1397 'removed_files': file_changes.removed,
1392 1398 'ancestor_commit_id': ancestor_commit_id
1393 1399 }
1394 1400 renderer = RstTemplateRenderer()
1395 1401 return renderer.render('pull_request_update.mako', **params)
1396 1402
1397 1403 def edit(self, pull_request, title, description, description_renderer, user):
1398 1404 pull_request = self.__get_pull_request(pull_request)
1399 1405 old_data = pull_request.get_api_data(with_merge_state=False)
1400 1406 if pull_request.is_closed():
1401 1407 raise ValueError('This pull request is closed')
1402 1408 if title:
1403 1409 pull_request.title = title
1404 1410 pull_request.description = description
1405 1411 pull_request.updated_on = datetime.datetime.now()
1406 1412 pull_request.description_renderer = description_renderer
1407 1413 Session().add(pull_request)
1408 1414 self._log_audit_action(
1409 1415 'repo.pull_request.edit', {'old_data': old_data},
1410 1416 user, pull_request)
1411 1417
1412 1418 def update_reviewers(self, pull_request, reviewer_data, user):
1413 1419 """
1414 1420 Update the reviewers in the pull request
1415 1421
1416 1422 :param pull_request: the pr to update
1417 1423 :param reviewer_data: list of tuples
1418 1424 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1419 1425 :param user: current use who triggers this action
1420 1426 """
1421 1427
1422 1428 pull_request = self.__get_pull_request(pull_request)
1423 1429 if pull_request.is_closed():
1424 1430 raise ValueError('This pull request is closed')
1425 1431
1426 1432 reviewers = {}
1427 1433 for user_id, reasons, mandatory, role, rules in reviewer_data:
1428 1434 if isinstance(user_id, (int, str)):
1429 1435 user_id = self._get_user(user_id).user_id
1430 1436 reviewers[user_id] = {
1431 1437 'reasons': reasons, 'mandatory': mandatory, 'role': role}
1432 1438
1433 1439 reviewers_ids = set(reviewers.keys())
1434 1440 current_reviewers = PullRequestReviewers.get_pull_request_reviewers(
1435 1441 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_REVIEWER)
1436 1442
1437 1443 current_reviewers_ids = set([x.user.user_id for x in current_reviewers])
1438 1444
1439 1445 ids_to_add = reviewers_ids.difference(current_reviewers_ids)
1440 1446 ids_to_remove = current_reviewers_ids.difference(reviewers_ids)
1441 1447
1442 1448 log.debug("Adding %s reviewers", ids_to_add)
1443 1449 log.debug("Removing %s reviewers", ids_to_remove)
1444 1450 changed = False
1445 1451 added_audit_reviewers = []
1446 1452 removed_audit_reviewers = []
1447 1453
1448 1454 for uid in ids_to_add:
1449 1455 changed = True
1450 1456 _usr = self._get_user(uid)
1451 1457 reviewer = PullRequestReviewers()
1452 1458 reviewer.user = _usr
1453 1459 reviewer.pull_request = pull_request
1454 1460 reviewer.reasons = reviewers[uid]['reasons']
1455 1461 # NOTE(marcink): mandatory shouldn't be changed now
1456 1462 # reviewer.mandatory = reviewers[uid]['reasons']
1457 1463 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1458 1464 reviewer.role = PullRequestReviewers.ROLE_REVIEWER
1459 1465 Session().add(reviewer)
1460 1466 added_audit_reviewers.append(reviewer.get_dict())
1461 1467
1462 1468 for uid in ids_to_remove:
1463 1469 changed = True
1464 1470 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1465 1471 # This is an edge case that handles previous state of having the same reviewer twice.
1466 1472 # this CAN happen due to the lack of DB checks
1467 1473 reviewers = PullRequestReviewers.query()\
1468 1474 .filter(PullRequestReviewers.user_id == uid,
1469 1475 PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER,
1470 1476 PullRequestReviewers.pull_request == pull_request)\
1471 1477 .all()
1472 1478
1473 1479 for obj in reviewers:
1474 1480 added_audit_reviewers.append(obj.get_dict())
1475 1481 Session().delete(obj)
1476 1482
1477 1483 if changed:
1478 1484 Session().expire_all()
1479 1485 pull_request.updated_on = datetime.datetime.now()
1480 1486 Session().add(pull_request)
1481 1487
1482 1488 # finally store audit logs
1483 1489 for user_data in added_audit_reviewers:
1484 1490 self._log_audit_action(
1485 1491 'repo.pull_request.reviewer.add', {'data': user_data},
1486 1492 user, pull_request)
1487 1493 for user_data in removed_audit_reviewers:
1488 1494 self._log_audit_action(
1489 1495 'repo.pull_request.reviewer.delete', {'old_data': user_data},
1490 1496 user, pull_request)
1491 1497
1492 1498 self.notify_reviewers(pull_request, ids_to_add, user)
1493 1499 return ids_to_add, ids_to_remove
1494 1500
1495 1501 def update_observers(self, pull_request, observer_data, user):
1496 1502 """
1497 1503 Update the observers in the pull request
1498 1504
1499 1505 :param pull_request: the pr to update
1500 1506 :param observer_data: list of tuples
1501 1507 [(user, ['reason1', 'reason2'], mandatory_flag, role, [rules])]
1502 1508 :param user: current use who triggers this action
1503 1509 """
1504 1510 pull_request = self.__get_pull_request(pull_request)
1505 1511 if pull_request.is_closed():
1506 1512 raise ValueError('This pull request is closed')
1507 1513
1508 1514 observers = {}
1509 1515 for user_id, reasons, mandatory, role, rules in observer_data:
1510 1516 if isinstance(user_id, (int, str)):
1511 1517 user_id = self._get_user(user_id).user_id
1512 1518 observers[user_id] = {
1513 1519 'reasons': reasons, 'observers': mandatory, 'role': role}
1514 1520
1515 1521 observers_ids = set(observers.keys())
1516 1522 current_observers = PullRequestReviewers.get_pull_request_reviewers(
1517 1523 pull_request.pull_request_id, role=PullRequestReviewers.ROLE_OBSERVER)
1518 1524
1519 1525 current_observers_ids = set([x.user.user_id for x in current_observers])
1520 1526
1521 1527 ids_to_add = observers_ids.difference(current_observers_ids)
1522 1528 ids_to_remove = current_observers_ids.difference(observers_ids)
1523 1529
1524 1530 log.debug("Adding %s observer", ids_to_add)
1525 1531 log.debug("Removing %s observer", ids_to_remove)
1526 1532 changed = False
1527 1533 added_audit_observers = []
1528 1534 removed_audit_observers = []
1529 1535
1530 1536 for uid in ids_to_add:
1531 1537 changed = True
1532 1538 _usr = self._get_user(uid)
1533 1539 observer = PullRequestReviewers()
1534 1540 observer.user = _usr
1535 1541 observer.pull_request = pull_request
1536 1542 observer.reasons = observers[uid]['reasons']
1537 1543 # NOTE(marcink): mandatory shouldn't be changed now
1538 1544 # observer.mandatory = observer[uid]['reasons']
1539 1545
1540 1546 # NOTE(marcink): role should be hardcoded, so we won't edit it.
1541 1547 observer.role = PullRequestReviewers.ROLE_OBSERVER
1542 1548 Session().add(observer)
1543 1549 added_audit_observers.append(observer.get_dict())
1544 1550
1545 1551 for uid in ids_to_remove:
1546 1552 changed = True
1547 1553 # NOTE(marcink): we fetch "ALL" reviewers objects using .all().
1548 1554 # This is an edge case that handles previous state of having the same reviewer twice.
1549 1555 # this CAN happen due to the lack of DB checks
1550 1556 observers = PullRequestReviewers.query()\
1551 1557 .filter(PullRequestReviewers.user_id == uid,
1552 1558 PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER,
1553 1559 PullRequestReviewers.pull_request == pull_request)\
1554 1560 .all()
1555 1561
1556 1562 for obj in observers:
1557 1563 added_audit_observers.append(obj.get_dict())
1558 1564 Session().delete(obj)
1559 1565
1560 1566 if changed:
1561 1567 Session().expire_all()
1562 1568 pull_request.updated_on = datetime.datetime.now()
1563 1569 Session().add(pull_request)
1564 1570
1565 1571 # finally store audit logs
1566 1572 for user_data in added_audit_observers:
1567 1573 self._log_audit_action(
1568 1574 'repo.pull_request.observer.add', {'data': user_data},
1569 1575 user, pull_request)
1570 1576 for user_data in removed_audit_observers:
1571 1577 self._log_audit_action(
1572 1578 'repo.pull_request.observer.delete', {'old_data': user_data},
1573 1579 user, pull_request)
1574 1580
1575 1581 self.notify_observers(pull_request, ids_to_add, user)
1576 1582 return ids_to_add, ids_to_remove
1577 1583
1578 1584 def get_url(self, pull_request, request=None, permalink=False):
1579 1585 if not request:
1580 1586 request = get_current_request()
1581 1587
1582 1588 if permalink:
1583 1589 return request.route_url(
1584 1590 'pull_requests_global',
1585 1591 pull_request_id=pull_request.pull_request_id,)
1586 1592 else:
1587 1593 return request.route_url('pullrequest_show',
1588 1594 repo_name=safe_str(pull_request.target_repo.repo_name),
1589 1595 pull_request_id=pull_request.pull_request_id,)
1590 1596
1591 1597 def get_shadow_clone_url(self, pull_request, request=None):
1592 1598 """
1593 1599 Returns qualified url pointing to the shadow repository. If this pull
1594 1600 request is closed there is no shadow repository and ``None`` will be
1595 1601 returned.
1596 1602 """
1597 1603 if pull_request.is_closed():
1598 1604 return None
1599 1605 else:
1600 1606 pr_url = urllib.parse.unquote(self.get_url(pull_request, request=request))
1601 return safe_unicode('{pr_url}/repository'.format(pr_url=pr_url))
1607 return safe_str('{pr_url}/repository'.format(pr_url=pr_url))
1602 1608
1603 1609 def _notify_reviewers(self, pull_request, user_ids, role, user):
1604 1610 # notification to reviewers/observers
1605 1611 if not user_ids:
1606 1612 return
1607 1613
1608 1614 log.debug('Notify following %s users about pull-request %s', role, user_ids)
1609 1615
1610 1616 pull_request_obj = pull_request
1611 1617 # get the current participants of this pull request
1612 1618 recipients = user_ids
1613 1619 notification_type = EmailNotificationModel.TYPE_PULL_REQUEST
1614 1620
1615 1621 pr_source_repo = pull_request_obj.source_repo
1616 1622 pr_target_repo = pull_request_obj.target_repo
1617 1623
1618 1624 pr_url = h.route_url('pullrequest_show',
1619 1625 repo_name=pr_target_repo.repo_name,
1620 1626 pull_request_id=pull_request_obj.pull_request_id,)
1621 1627
1622 1628 # set some variables for email notification
1623 1629 pr_target_repo_url = h.route_url(
1624 1630 'repo_summary', repo_name=pr_target_repo.repo_name)
1625 1631
1626 1632 pr_source_repo_url = h.route_url(
1627 1633 'repo_summary', repo_name=pr_source_repo.repo_name)
1628 1634
1629 1635 # pull request specifics
1630 1636 pull_request_commits = [
1631 1637 (x.raw_id, x.message)
1632 1638 for x in map(pr_source_repo.get_commit, pull_request.revisions)]
1633 1639
1634 1640 current_rhodecode_user = user
1635 1641 kwargs = {
1636 1642 'user': current_rhodecode_user,
1637 1643 'pull_request_author': pull_request.author,
1638 1644 'pull_request': pull_request_obj,
1639 1645 'pull_request_commits': pull_request_commits,
1640 1646
1641 1647 'pull_request_target_repo': pr_target_repo,
1642 1648 'pull_request_target_repo_url': pr_target_repo_url,
1643 1649
1644 1650 'pull_request_source_repo': pr_source_repo,
1645 1651 'pull_request_source_repo_url': pr_source_repo_url,
1646 1652
1647 1653 'pull_request_url': pr_url,
1648 1654 'thread_ids': [pr_url],
1649 1655 'user_role': role
1650 1656 }
1651 1657
1652 1658 # create notification objects, and emails
1653 1659 NotificationModel().create(
1654 1660 created_by=current_rhodecode_user,
1655 1661 notification_subject='', # Filled in based on the notification_type
1656 1662 notification_body='', # Filled in based on the notification_type
1657 1663 notification_type=notification_type,
1658 1664 recipients=recipients,
1659 1665 email_kwargs=kwargs,
1660 1666 )
1661 1667
1662 1668 def notify_reviewers(self, pull_request, reviewers_ids, user):
1663 1669 return self._notify_reviewers(pull_request, reviewers_ids,
1664 1670 PullRequestReviewers.ROLE_REVIEWER, user)
1665 1671
1666 1672 def notify_observers(self, pull_request, observers_ids, user):
1667 1673 return self._notify_reviewers(pull_request, observers_ids,
1668 1674 PullRequestReviewers.ROLE_OBSERVER, user)
1669 1675
1670 1676 def notify_users(self, pull_request, updating_user, ancestor_commit_id,
1671 1677 commit_changes, file_changes):
1672 1678
1673 1679 updating_user_id = updating_user.user_id
1674 1680 reviewers = set([x.user.user_id for x in pull_request.get_pull_request_reviewers()])
1675 1681 # NOTE(marcink): send notification to all other users except to
1676 1682 # person who updated the PR
1677 1683 recipients = reviewers.difference(set([updating_user_id]))
1678 1684
1679 1685 log.debug('Notify following recipients about pull-request update %s', recipients)
1680 1686
1681 1687 pull_request_obj = pull_request
1682 1688
1683 1689 # send email about the update
1684 1690 changed_files = (
1685 1691 file_changes.added + file_changes.modified + file_changes.removed)
1686 1692
1687 1693 pr_source_repo = pull_request_obj.source_repo
1688 1694 pr_target_repo = pull_request_obj.target_repo
1689 1695
1690 1696 pr_url = h.route_url('pullrequest_show',
1691 1697 repo_name=pr_target_repo.repo_name,
1692 1698 pull_request_id=pull_request_obj.pull_request_id,)
1693 1699
1694 1700 # set some variables for email notification
1695 1701 pr_target_repo_url = h.route_url(
1696 1702 'repo_summary', repo_name=pr_target_repo.repo_name)
1697 1703
1698 1704 pr_source_repo_url = h.route_url(
1699 1705 'repo_summary', repo_name=pr_source_repo.repo_name)
1700 1706
1701 1707 email_kwargs = {
1702 1708 'date': datetime.datetime.now(),
1703 1709 'updating_user': updating_user,
1704 1710
1705 1711 'pull_request': pull_request_obj,
1706 1712
1707 1713 'pull_request_target_repo': pr_target_repo,
1708 1714 'pull_request_target_repo_url': pr_target_repo_url,
1709 1715
1710 1716 'pull_request_source_repo': pr_source_repo,
1711 1717 'pull_request_source_repo_url': pr_source_repo_url,
1712 1718
1713 1719 'pull_request_url': pr_url,
1714 1720
1715 1721 'ancestor_commit_id': ancestor_commit_id,
1716 1722 'added_commits': commit_changes.added,
1717 1723 'removed_commits': commit_changes.removed,
1718 1724 'changed_files': changed_files,
1719 1725 'added_files': file_changes.added,
1720 1726 'modified_files': file_changes.modified,
1721 1727 'removed_files': file_changes.removed,
1722 1728 'thread_ids': [pr_url],
1723 1729 }
1724 1730
1725 1731 # create notification objects, and emails
1726 1732 NotificationModel().create(
1727 1733 created_by=updating_user,
1728 1734 notification_subject='', # Filled in based on the notification_type
1729 1735 notification_body='', # Filled in based on the notification_type
1730 1736 notification_type=EmailNotificationModel.TYPE_PULL_REQUEST_UPDATE,
1731 1737 recipients=recipients,
1732 1738 email_kwargs=email_kwargs,
1733 1739 )
1734 1740
1735 1741 def delete(self, pull_request, user=None):
1736 1742 if not user:
1737 1743 user = getattr(get_current_rhodecode_user(), 'username', None)
1738 1744
1739 1745 pull_request = self.__get_pull_request(pull_request)
1740 1746 old_data = pull_request.get_api_data(with_merge_state=False)
1741 1747 self._cleanup_merge_workspace(pull_request)
1742 1748 self._log_audit_action(
1743 1749 'repo.pull_request.delete', {'old_data': old_data},
1744 1750 user, pull_request)
1745 1751 Session().delete(pull_request)
1746 1752
1747 1753 def close_pull_request(self, pull_request, user):
1748 1754 pull_request = self.__get_pull_request(pull_request)
1749 1755 self._cleanup_merge_workspace(pull_request)
1750 1756 pull_request.status = PullRequest.STATUS_CLOSED
1751 1757 pull_request.updated_on = datetime.datetime.now()
1752 1758 Session().add(pull_request)
1753 1759 self.trigger_pull_request_hook(pull_request, pull_request.author, 'close')
1754 1760
1755 1761 pr_data = pull_request.get_api_data(with_merge_state=False)
1756 1762 self._log_audit_action(
1757 1763 'repo.pull_request.close', {'data': pr_data}, user, pull_request)
1758 1764
1759 1765 def close_pull_request_with_comment(
1760 1766 self, pull_request, user, repo, message=None, auth_user=None):
1761 1767
1762 1768 pull_request_review_status = pull_request.calculated_review_status()
1763 1769
1764 1770 if pull_request_review_status == ChangesetStatus.STATUS_APPROVED:
1765 1771 # approved only if we have voting consent
1766 1772 status = ChangesetStatus.STATUS_APPROVED
1767 1773 else:
1768 1774 status = ChangesetStatus.STATUS_REJECTED
1769 1775 status_lbl = ChangesetStatus.get_status_lbl(status)
1770 1776
1771 1777 default_message = (
1772 1778 'Closing with status change {transition_icon} {status}.'
1773 1779 ).format(transition_icon='>', status=status_lbl)
1774 1780 text = message or default_message
1775 1781
1776 1782 # create a comment, and link it to new status
1777 1783 comment = CommentsModel().create(
1778 1784 text=text,
1779 1785 repo=repo.repo_id,
1780 1786 user=user.user_id,
1781 1787 pull_request=pull_request.pull_request_id,
1782 1788 status_change=status_lbl,
1783 1789 status_change_type=status,
1784 1790 closing_pr=True,
1785 1791 auth_user=auth_user,
1786 1792 )
1787 1793
1788 1794 # calculate old status before we change it
1789 1795 old_calculated_status = pull_request.calculated_review_status()
1790 1796 ChangesetStatusModel().set_status(
1791 1797 repo.repo_id,
1792 1798 status,
1793 1799 user.user_id,
1794 1800 comment=comment,
1795 1801 pull_request=pull_request.pull_request_id
1796 1802 )
1797 1803
1798 1804 Session().flush()
1799 1805
1800 1806 self.trigger_pull_request_hook(pull_request, user, 'comment',
1801 1807 data={'comment': comment})
1802 1808
1803 1809 # we now calculate the status of pull request again, and based on that
1804 1810 # calculation trigger status change. This might happen in cases
1805 1811 # that non-reviewer admin closes a pr, which means his vote doesn't
1806 1812 # change the status, while if he's a reviewer this might change it.
1807 1813 calculated_status = pull_request.calculated_review_status()
1808 1814 if old_calculated_status != calculated_status:
1809 1815 self.trigger_pull_request_hook(pull_request, user, 'review_status_change',
1810 1816 data={'status': calculated_status})
1811 1817
1812 1818 # finally close the PR
1813 1819 PullRequestModel().close_pull_request(pull_request.pull_request_id, user)
1814 1820
1815 1821 return comment, status
1816 1822
1817 1823 def merge_status(self, pull_request, translator=None, force_shadow_repo_refresh=False):
1818 1824 _ = translator or get_current_request().translate
1819 1825
1820 1826 if not self._is_merge_enabled(pull_request):
1821 1827 return None, False, _('Server-side pull request merging is disabled.')
1822 1828
1823 1829 if pull_request.is_closed():
1824 1830 return None, False, _('This pull request is closed.')
1825 1831
1826 1832 merge_possible, msg = self._check_repo_requirements(
1827 1833 target=pull_request.target_repo, source=pull_request.source_repo,
1828 1834 translator=_)
1829 1835 if not merge_possible:
1830 1836 return None, merge_possible, msg
1831 1837
1832 1838 try:
1833 1839 merge_response = self._try_merge(
1834 1840 pull_request, force_shadow_repo_refresh=force_shadow_repo_refresh)
1835 1841 log.debug("Merge response: %s", merge_response)
1836 1842 return merge_response, merge_response.possible, merge_response.merge_status_message
1837 1843 except NotImplementedError:
1838 1844 return None, False, _('Pull request merging is not supported.')
1839 1845
1840 1846 def _check_repo_requirements(self, target, source, translator):
1841 1847 """
1842 1848 Check if `target` and `source` have compatible requirements.
1843 1849
1844 1850 Currently this is just checking for largefiles.
1845 1851 """
1846 1852 _ = translator
1847 1853 target_has_largefiles = self._has_largefiles(target)
1848 1854 source_has_largefiles = self._has_largefiles(source)
1849 1855 merge_possible = True
1850 1856 message = u''
1851 1857
1852 1858 if target_has_largefiles != source_has_largefiles:
1853 1859 merge_possible = False
1854 1860 if source_has_largefiles:
1855 1861 message = _(
1856 1862 'Target repository large files support is disabled.')
1857 1863 else:
1858 1864 message = _(
1859 1865 'Source repository large files support is disabled.')
1860 1866
1861 1867 return merge_possible, message
1862 1868
1863 1869 def _has_largefiles(self, repo):
1864 1870 largefiles_ui = VcsSettingsModel(repo=repo).get_ui_settings(
1865 1871 'extensions', 'largefiles')
1866 1872 return largefiles_ui and largefiles_ui[0].active
1867 1873
1868 1874 def _try_merge(self, pull_request, force_shadow_repo_refresh=False):
1869 1875 """
1870 1876 Try to merge the pull request and return the merge status.
1871 1877 """
1872 1878 log.debug(
1873 1879 "Trying out if the pull request %s can be merged. Force_refresh=%s",
1874 1880 pull_request.pull_request_id, force_shadow_repo_refresh)
1875 1881 target_vcs = pull_request.target_repo.scm_instance()
1876 1882 # Refresh the target reference.
1877 1883 try:
1878 1884 target_ref = self._refresh_reference(
1879 1885 pull_request.target_ref_parts, target_vcs)
1880 1886 except CommitDoesNotExistError:
1881 1887 merge_state = MergeResponse(
1882 1888 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
1883 1889 metadata={'target_ref': pull_request.target_ref_parts})
1884 1890 return merge_state
1885 1891
1886 1892 target_locked = pull_request.target_repo.locked
1887 1893 if target_locked and target_locked[0]:
1888 1894 locked_by = 'user:{}'.format(target_locked[0])
1889 1895 log.debug("The target repository is locked by %s.", locked_by)
1890 1896 merge_state = MergeResponse(
1891 1897 False, False, None, MergeFailureReason.TARGET_IS_LOCKED,
1892 1898 metadata={'locked_by': locked_by})
1893 1899 elif force_shadow_repo_refresh or self._needs_merge_state_refresh(
1894 1900 pull_request, target_ref):
1895 1901 log.debug("Refreshing the merge status of the repository.")
1896 1902 merge_state = self._refresh_merge_state(
1897 1903 pull_request, target_vcs, target_ref)
1898 1904 else:
1899 1905 possible = pull_request.last_merge_status == MergeFailureReason.NONE
1900 1906 metadata = {
1901 1907 'unresolved_files': '',
1902 1908 'target_ref': pull_request.target_ref_parts,
1903 1909 'source_ref': pull_request.source_ref_parts,
1904 1910 }
1905 1911 if pull_request.last_merge_metadata:
1906 1912 metadata.update(pull_request.last_merge_metadata_parsed)
1907 1913
1908 1914 if not possible and target_ref.type == 'branch':
1909 1915 # NOTE(marcink): case for mercurial multiple heads on branch
1910 1916 heads = target_vcs._heads(target_ref.name)
1911 1917 if len(heads) != 1:
1912 1918 heads = '\n,'.join(target_vcs._heads(target_ref.name))
1913 1919 metadata.update({
1914 1920 'heads': heads
1915 1921 })
1916 1922
1917 1923 merge_state = MergeResponse(
1918 1924 possible, False, None, pull_request.last_merge_status, metadata=metadata)
1919 1925
1920 1926 return merge_state
1921 1927
1922 1928 def _refresh_reference(self, reference, vcs_repository):
1923 1929 if reference.type in self.UPDATABLE_REF_TYPES:
1924 1930 name_or_id = reference.name
1925 1931 else:
1926 1932 name_or_id = reference.commit_id
1927 1933
1928 1934 refreshed_commit = vcs_repository.get_commit(name_or_id)
1929 1935 refreshed_reference = Reference(
1930 1936 reference.type, reference.name, refreshed_commit.raw_id)
1931 1937 return refreshed_reference
1932 1938
1933 1939 def _needs_merge_state_refresh(self, pull_request, target_reference):
1934 1940 return not(
1935 1941 pull_request.revisions and
1936 1942 pull_request.revisions[0] == pull_request._last_merge_source_rev and
1937 1943 target_reference.commit_id == pull_request._last_merge_target_rev)
1938 1944
1939 1945 def _refresh_merge_state(self, pull_request, target_vcs, target_reference):
1940 1946 workspace_id = self._workspace_id(pull_request)
1941 1947 source_vcs = pull_request.source_repo.scm_instance()
1942 1948 repo_id = pull_request.target_repo.repo_id
1943 1949 use_rebase = self._use_rebase_for_merging(pull_request)
1944 1950 close_branch = self._close_branch_before_merging(pull_request)
1945 1951 merge_state = target_vcs.merge(
1946 1952 repo_id, workspace_id,
1947 1953 target_reference, source_vcs, pull_request.source_ref_parts,
1948 1954 dry_run=True, use_rebase=use_rebase,
1949 1955 close_branch=close_branch)
1950 1956
1951 1957 # Do not store the response if there was an unknown error.
1952 1958 if merge_state.failure_reason != MergeFailureReason.UNKNOWN:
1953 1959 pull_request._last_merge_source_rev = \
1954 1960 pull_request.source_ref_parts.commit_id
1955 1961 pull_request._last_merge_target_rev = target_reference.commit_id
1956 1962 pull_request.last_merge_status = merge_state.failure_reason
1957 1963 pull_request.last_merge_metadata = merge_state.metadata
1958 1964
1959 1965 pull_request.shadow_merge_ref = merge_state.merge_ref
1960 1966 Session().add(pull_request)
1961 1967 Session().commit()
1962 1968
1963 1969 return merge_state
1964 1970
1965 1971 def _workspace_id(self, pull_request):
1966 1972 workspace_id = 'pr-%s' % pull_request.pull_request_id
1967 1973 return workspace_id
1968 1974
1969 1975 def generate_repo_data(self, repo, commit_id=None, branch=None,
1970 1976 bookmark=None, translator=None):
1971 1977 from rhodecode.model.repo import RepoModel
1972 1978
1973 1979 all_refs, selected_ref = \
1974 1980 self._get_repo_pullrequest_sources(
1975 1981 repo.scm_instance(), commit_id=commit_id,
1976 1982 branch=branch, bookmark=bookmark, translator=translator)
1977 1983
1978 1984 refs_select2 = []
1979 1985 for element in all_refs:
1980 1986 children = [{'id': x[0], 'text': x[1]} for x in element[0]]
1981 1987 refs_select2.append({'text': element[1], 'children': children})
1982 1988
1983 1989 return {
1984 1990 'user': {
1985 1991 'user_id': repo.user.user_id,
1986 1992 'username': repo.user.username,
1987 1993 'firstname': repo.user.first_name,
1988 1994 'lastname': repo.user.last_name,
1989 1995 'gravatar_link': h.gravatar_url(repo.user.email, 14),
1990 1996 },
1991 1997 'name': repo.repo_name,
1992 1998 'link': RepoModel().get_url(repo),
1993 1999 'description': h.chop_at_smart(repo.description_safe, '\n'),
1994 2000 'refs': {
1995 2001 'all_refs': all_refs,
1996 2002 'selected_ref': selected_ref,
1997 2003 'select2_refs': refs_select2
1998 2004 }
1999 2005 }
2000 2006
2001 2007 def generate_pullrequest_title(self, source, source_ref, target):
2002 2008 return u'{source}#{at_ref} to {target}'.format(
2003 2009 source=source,
2004 2010 at_ref=source_ref,
2005 2011 target=target,
2006 2012 )
2007 2013
2008 2014 def _cleanup_merge_workspace(self, pull_request):
2009 2015 # Merging related cleanup
2010 2016 repo_id = pull_request.target_repo.repo_id
2011 2017 target_scm = pull_request.target_repo.scm_instance()
2012 2018 workspace_id = self._workspace_id(pull_request)
2013 2019
2014 2020 try:
2015 2021 target_scm.cleanup_merge_workspace(repo_id, workspace_id)
2016 2022 except NotImplementedError:
2017 2023 pass
2018 2024
2019 2025 def _get_repo_pullrequest_sources(
2020 2026 self, repo, commit_id=None, branch=None, bookmark=None,
2021 2027 translator=None):
2022 2028 """
2023 2029 Return a structure with repo's interesting commits, suitable for
2024 2030 the selectors in pullrequest controller
2025 2031
2026 2032 :param commit_id: a commit that must be in the list somehow
2027 2033 and selected by default
2028 2034 :param branch: a branch that must be in the list and selected
2029 2035 by default - even if closed
2030 2036 :param bookmark: a bookmark that must be in the list and selected
2031 2037 """
2032 2038 _ = translator or get_current_request().translate
2033 2039
2034 2040 commit_id = safe_str(commit_id) if commit_id else None
2035 branch = safe_unicode(branch) if branch else None
2036 bookmark = safe_unicode(bookmark) if bookmark else None
2041 branch = safe_str(branch) if branch else None
2042 bookmark = safe_str(bookmark) if bookmark else None
2037 2043
2038 2044 selected = None
2039 2045
2040 2046 # order matters: first source that has commit_id in it will be selected
2041 2047 sources = []
2042 2048 sources.append(('book', repo.bookmarks.items(), _('Bookmarks'), bookmark))
2043 2049 sources.append(('branch', repo.branches.items(), _('Branches'), branch))
2044 2050
2045 2051 if commit_id:
2046 2052 ref_commit = (h.short_id(commit_id), commit_id)
2047 2053 sources.append(('rev', [ref_commit], _('Commit IDs'), commit_id))
2048 2054
2049 2055 sources.append(
2050 2056 ('branch', repo.branches_closed.items(), _('Closed Branches'), branch),
2051 2057 )
2052 2058
2053 2059 groups = []
2054 2060
2055 2061 for group_key, ref_list, group_name, match in sources:
2056 2062 group_refs = []
2057 2063 for ref_name, ref_id in ref_list:
2058 2064 ref_key = u'{}:{}:{}'.format(group_key, ref_name, ref_id)
2059 2065 group_refs.append((ref_key, ref_name))
2060 2066
2061 2067 if not selected:
2062 2068 if set([commit_id, match]) & set([ref_id, ref_name]):
2063 2069 selected = ref_key
2064 2070
2065 2071 if group_refs:
2066 2072 groups.append((group_refs, group_name))
2067 2073
2068 2074 if not selected:
2069 2075 ref = commit_id or branch or bookmark
2070 2076 if ref:
2071 2077 raise CommitDoesNotExistError(
2072 2078 u'No commit refs could be found matching: {}'.format(ref))
2073 2079 elif repo.DEFAULT_BRANCH_NAME in repo.branches:
2074 2080 selected = u'branch:{}:{}'.format(
2075 safe_unicode(repo.DEFAULT_BRANCH_NAME),
2076 safe_unicode(repo.branches[repo.DEFAULT_BRANCH_NAME])
2081 safe_str(repo.DEFAULT_BRANCH_NAME),
2082 safe_str(repo.branches[repo.DEFAULT_BRANCH_NAME])
2077 2083 )
2078 2084 elif repo.commit_ids:
2079 2085 # make the user select in this case
2080 2086 selected = None
2081 2087 else:
2082 2088 raise EmptyRepositoryError()
2083 2089 return groups, selected
2084 2090
2085 2091 def get_diff(self, source_repo, source_ref_id, target_ref_id,
2086 2092 hide_whitespace_changes, diff_context):
2087 2093
2088 2094 return self._get_diff_from_pr_or_version(
2089 2095 source_repo, source_ref_id, target_ref_id,
2090 2096 hide_whitespace_changes=hide_whitespace_changes, diff_context=diff_context)
2091 2097
2092 2098 def _get_diff_from_pr_or_version(
2093 2099 self, source_repo, source_ref_id, target_ref_id,
2094 2100 hide_whitespace_changes, diff_context):
2095 2101
2096 2102 target_commit = source_repo.get_commit(
2097 2103 commit_id=safe_str(target_ref_id))
2098 2104 source_commit = source_repo.get_commit(
2099 2105 commit_id=safe_str(source_ref_id), maybe_unreachable=True)
2100 2106 if isinstance(source_repo, Repository):
2101 2107 vcs_repo = source_repo.scm_instance()
2102 2108 else:
2103 2109 vcs_repo = source_repo
2104 2110
2105 2111 # TODO: johbo: In the context of an update, we cannot reach
2106 2112 # the old commit anymore with our normal mechanisms. It needs
2107 2113 # some sort of special support in the vcs layer to avoid this
2108 2114 # workaround.
2109 2115 if (source_commit.raw_id == vcs_repo.EMPTY_COMMIT_ID and
2110 2116 vcs_repo.alias == 'git'):
2111 2117 source_commit.raw_id = safe_str(source_ref_id)
2112 2118
2113 2119 log.debug('calculating diff between '
2114 2120 'source_ref:%s and target_ref:%s for repo `%s`',
2115 2121 target_ref_id, source_ref_id,
2116 safe_unicode(vcs_repo.path))
2122 safe_str(vcs_repo.path))
2117 2123
2118 2124 vcs_diff = vcs_repo.get_diff(
2119 2125 commit1=target_commit, commit2=source_commit,
2120 2126 ignore_whitespace=hide_whitespace_changes, context=diff_context)
2121 2127 return vcs_diff
2122 2128
2123 2129 def _is_merge_enabled(self, pull_request):
2124 2130 return self._get_general_setting(
2125 2131 pull_request, 'rhodecode_pr_merge_enabled')
2126 2132
2127 2133 def _use_rebase_for_merging(self, pull_request):
2128 2134 repo_type = pull_request.target_repo.repo_type
2129 2135 if repo_type == 'hg':
2130 2136 return self._get_general_setting(
2131 2137 pull_request, 'rhodecode_hg_use_rebase_for_merging')
2132 2138 elif repo_type == 'git':
2133 2139 return self._get_general_setting(
2134 2140 pull_request, 'rhodecode_git_use_rebase_for_merging')
2135 2141
2136 2142 return False
2137 2143
2138 2144 def _user_name_for_merging(self, pull_request, user):
2139 2145 env_user_name_attr = os.environ.get('RC_MERGE_USER_NAME_ATTR', '')
2140 2146 if env_user_name_attr and hasattr(user, env_user_name_attr):
2141 2147 user_name_attr = env_user_name_attr
2142 2148 else:
2143 2149 user_name_attr = 'short_contact'
2144 2150
2145 2151 user_name = getattr(user, user_name_attr)
2146 2152 return user_name
2147 2153
2148 2154 def _close_branch_before_merging(self, pull_request):
2149 2155 repo_type = pull_request.target_repo.repo_type
2150 2156 if repo_type == 'hg':
2151 2157 return self._get_general_setting(
2152 2158 pull_request, 'rhodecode_hg_close_branch_before_merging')
2153 2159 elif repo_type == 'git':
2154 2160 return self._get_general_setting(
2155 2161 pull_request, 'rhodecode_git_close_branch_before_merging')
2156 2162
2157 2163 return False
2158 2164
2159 2165 def _get_general_setting(self, pull_request, settings_key, default=False):
2160 2166 settings_model = VcsSettingsModel(repo=pull_request.target_repo)
2161 2167 settings = settings_model.get_general_settings()
2162 2168 return settings.get(settings_key, default)
2163 2169
2164 2170 def _log_audit_action(self, action, action_data, user, pull_request):
2165 2171 audit_logger.store(
2166 2172 action=action,
2167 2173 action_data=action_data,
2168 2174 user=user,
2169 2175 repo=pull_request.target_repo)
2170 2176
2171 2177 def get_reviewer_functions(self):
2172 2178 """
2173 2179 Fetches functions for validation and fetching default reviewers.
2174 2180 If available we use the EE package, else we fallback to CE
2175 2181 package functions
2176 2182 """
2177 2183 try:
2178 2184 from rc_reviewers.utils import get_default_reviewers_data
2179 2185 from rc_reviewers.utils import validate_default_reviewers
2180 2186 from rc_reviewers.utils import validate_observers
2181 2187 except ImportError:
2182 2188 from rhodecode.apps.repository.utils import get_default_reviewers_data
2183 2189 from rhodecode.apps.repository.utils import validate_default_reviewers
2184 2190 from rhodecode.apps.repository.utils import validate_observers
2185 2191
2186 2192 return get_default_reviewers_data, validate_default_reviewers, validate_observers
2187 2193
2188 2194
2189 2195 class MergeCheck(object):
2190 2196 """
2191 2197 Perform Merge Checks and returns a check object which stores information
2192 2198 about merge errors, and merge conditions
2193 2199 """
2194 2200 TODO_CHECK = 'todo'
2195 2201 PERM_CHECK = 'perm'
2196 2202 REVIEW_CHECK = 'review'
2197 2203 MERGE_CHECK = 'merge'
2198 2204 WIP_CHECK = 'wip'
2199 2205
2200 2206 def __init__(self):
2201 2207 self.review_status = None
2202 2208 self.merge_possible = None
2203 2209 self.merge_msg = ''
2204 2210 self.merge_response = None
2205 2211 self.failed = None
2206 2212 self.errors = []
2207 2213 self.error_details = OrderedDict()
2208 2214 self.source_commit = AttributeDict()
2209 2215 self.target_commit = AttributeDict()
2210 2216 self.reviewers_count = 0
2211 2217 self.observers_count = 0
2212 2218
2213 2219 def __repr__(self):
2214 2220 return '<MergeCheck(possible:{}, failed:{}, errors:{})>'.format(
2215 2221 self.merge_possible, self.failed, self.errors)
2216 2222
2217 2223 def push_error(self, error_type, message, error_key, details):
2218 2224 self.failed = True
2219 2225 self.errors.append([error_type, message])
2220 2226 self.error_details[error_key] = dict(
2221 2227 details=details,
2222 2228 error_type=error_type,
2223 2229 message=message
2224 2230 )
2225 2231
2226 2232 @classmethod
2227 2233 def validate(cls, pull_request, auth_user, translator, fail_early=False,
2228 2234 force_shadow_repo_refresh=False):
2229 2235 _ = translator
2230 2236 merge_check = cls()
2231 2237
2232 2238 # title has WIP:
2233 2239 if pull_request.work_in_progress:
2234 2240 log.debug("MergeCheck: cannot merge, title has wip: marker.")
2235 2241
2236 2242 msg = _('WIP marker in title prevents from accidental merge.')
2237 2243 merge_check.push_error('error', msg, cls.WIP_CHECK, pull_request.title)
2238 2244 if fail_early:
2239 2245 return merge_check
2240 2246
2241 2247 # permissions to merge
2242 2248 user_allowed_to_merge = PullRequestModel().check_user_merge(pull_request, auth_user)
2243 2249 if not user_allowed_to_merge:
2244 2250 log.debug("MergeCheck: cannot merge, approval is pending.")
2245 2251
2246 2252 msg = _('User `{}` not allowed to perform merge.').format(auth_user.username)
2247 2253 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2248 2254 if fail_early:
2249 2255 return merge_check
2250 2256
2251 2257 # permission to merge into the target branch
2252 2258 target_commit_id = pull_request.target_ref_parts.commit_id
2253 2259 if pull_request.target_ref_parts.type == 'branch':
2254 2260 branch_name = pull_request.target_ref_parts.name
2255 2261 else:
2256 2262 # for mercurial we can always figure out the branch from the commit
2257 2263 # in case of bookmark
2258 2264 target_commit = pull_request.target_repo.get_commit(target_commit_id)
2259 2265 branch_name = target_commit.branch
2260 2266
2261 2267 rule, branch_perm = auth_user.get_rule_and_branch_permission(
2262 2268 pull_request.target_repo.repo_name, branch_name)
2263 2269 if branch_perm and branch_perm == 'branch.none':
2264 2270 msg = _('Target branch `{}` changes rejected by rule {}.').format(
2265 2271 branch_name, rule)
2266 2272 merge_check.push_error('error', msg, cls.PERM_CHECK, auth_user.username)
2267 2273 if fail_early:
2268 2274 return merge_check
2269 2275
2270 2276 # review status, must be always present
2271 2277 review_status = pull_request.calculated_review_status()
2272 2278 merge_check.review_status = review_status
2273 2279 merge_check.reviewers_count = pull_request.reviewers_count
2274 2280 merge_check.observers_count = pull_request.observers_count
2275 2281
2276 2282 status_approved = review_status == ChangesetStatus.STATUS_APPROVED
2277 2283 if not status_approved and merge_check.reviewers_count:
2278 2284 log.debug("MergeCheck: cannot merge, approval is pending.")
2279 2285 msg = _('Pull request reviewer approval is pending.')
2280 2286
2281 2287 merge_check.push_error('warning', msg, cls.REVIEW_CHECK, review_status)
2282 2288
2283 2289 if fail_early:
2284 2290 return merge_check
2285 2291
2286 2292 # left over TODOs
2287 2293 todos = CommentsModel().get_pull_request_unresolved_todos(pull_request)
2288 2294 if todos:
2289 2295 log.debug("MergeCheck: cannot merge, {} "
2290 2296 "unresolved TODOs left.".format(len(todos)))
2291 2297
2292 2298 if len(todos) == 1:
2293 2299 msg = _('Cannot merge, {} TODO still not resolved.').format(
2294 2300 len(todos))
2295 2301 else:
2296 2302 msg = _('Cannot merge, {} TODOs still not resolved.').format(
2297 2303 len(todos))
2298 2304
2299 2305 merge_check.push_error('warning', msg, cls.TODO_CHECK, todos)
2300 2306
2301 2307 if fail_early:
2302 2308 return merge_check
2303 2309
2304 2310 # merge possible, here is the filesystem simulation + shadow repo
2305 2311 merge_response, merge_status, msg = PullRequestModel().merge_status(
2306 2312 pull_request, translator=translator,
2307 2313 force_shadow_repo_refresh=force_shadow_repo_refresh)
2308 2314
2309 2315 merge_check.merge_possible = merge_status
2310 2316 merge_check.merge_msg = msg
2311 2317 merge_check.merge_response = merge_response
2312 2318
2313 2319 source_ref_id = pull_request.source_ref_parts.commit_id
2314 2320 target_ref_id = pull_request.target_ref_parts.commit_id
2315 2321
2316 2322 try:
2317 2323 source_commit, target_commit = PullRequestModel().get_flow_commits(pull_request)
2318 2324 merge_check.source_commit.changed = source_ref_id != source_commit.raw_id
2319 2325 merge_check.source_commit.ref_spec = pull_request.source_ref_parts
2320 2326 merge_check.source_commit.current_raw_id = source_commit.raw_id
2321 2327 merge_check.source_commit.previous_raw_id = source_ref_id
2322 2328
2323 2329 merge_check.target_commit.changed = target_ref_id != target_commit.raw_id
2324 2330 merge_check.target_commit.ref_spec = pull_request.target_ref_parts
2325 2331 merge_check.target_commit.current_raw_id = target_commit.raw_id
2326 2332 merge_check.target_commit.previous_raw_id = target_ref_id
2327 2333 except (SourceRefMissing, TargetRefMissing):
2328 2334 pass
2329 2335
2330 2336 if not merge_status:
2331 2337 log.debug("MergeCheck: cannot merge, pull request merge not possible.")
2332 2338 merge_check.push_error('warning', msg, cls.MERGE_CHECK, None)
2333 2339
2334 2340 if fail_early:
2335 2341 return merge_check
2336 2342
2337 2343 log.debug('MergeCheck: is failed: %s', merge_check.failed)
2338 2344 return merge_check
2339 2345
2340 2346 @classmethod
2341 2347 def get_merge_conditions(cls, pull_request, translator):
2342 2348 _ = translator
2343 2349 merge_details = {}
2344 2350
2345 2351 model = PullRequestModel()
2346 2352 use_rebase = model._use_rebase_for_merging(pull_request)
2347 2353
2348 2354 if use_rebase:
2349 2355 merge_details['merge_strategy'] = dict(
2350 2356 details={},
2351 2357 message=_('Merge strategy: rebase')
2352 2358 )
2353 2359 else:
2354 2360 merge_details['merge_strategy'] = dict(
2355 2361 details={},
2356 2362 message=_('Merge strategy: explicit merge commit')
2357 2363 )
2358 2364
2359 2365 close_branch = model._close_branch_before_merging(pull_request)
2360 2366 if close_branch:
2361 2367 repo_type = pull_request.target_repo.repo_type
2362 2368 close_msg = ''
2363 2369 if repo_type == 'hg':
2364 2370 close_msg = _('Source branch will be closed before the merge.')
2365 2371 elif repo_type == 'git':
2366 2372 close_msg = _('Source branch will be deleted after the merge.')
2367 2373
2368 2374 merge_details['close_branch'] = dict(
2369 2375 details={},
2370 2376 message=close_msg
2371 2377 )
2372 2378
2373 2379 return merge_details
2374 2380
2375 2381
2376 ChangeTuple = collections.namedtuple(
2377 'ChangeTuple', ['added', 'common', 'removed', 'total'])
2382 @dataclasses.dataclass
2383 class ChangeTuple:
2384 added: list
2385 common: list
2386 removed: list
2387 total: list
2378 2388
2379 FileChangeTuple = collections.namedtuple(
2380 'FileChangeTuple', ['added', 'modified', 'removed'])
2389
2390 @dataclasses.dataclass
2391 class FileChangeTuple:
2392 added: list
2393 modified: list
2394 removed: list
@@ -1,1196 +1,1199 b''
1 1
2 2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import os
21 21 import re
22 22 import shutil
23 23 import time
24 24 import logging
25 25 import traceback
26 26 import datetime
27 27
28 28 from pyramid.threadlocal import get_current_request
29 from sqlalchemy.orm import aliased
29 30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 31
31 32 from rhodecode import events
32 33 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 34 from rhodecode.lib.caching_query import FromCache
34 35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 36 from rhodecode.lib import hooks_base
36 37 from rhodecode.lib.user_log_filter import user_log_filter
37 38 from rhodecode.lib.utils import make_db_config
38 39 from rhodecode.lib.utils2 import (
39 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
40 safe_str, remove_prefix, obfuscate_url_pw,
40 41 get_current_rhodecode_user, safe_int, action_logger_generic)
41 42 from rhodecode.lib.vcs.backends import get_backend
42 43 from rhodecode.model import BaseModel
43 44 from rhodecode.model.db import (
44 45 _hash_key, func, case, joinedload, or_, in_filter_generator,
45 46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 49 from rhodecode.model.permission import PermissionModel
49 50 from rhodecode.model.settings import VcsSettingsModel
50 51
51 52 log = logging.getLogger(__name__)
52 53
53 54
54 55 class RepoModel(BaseModel):
55 56
56 57 cls = Repository
57 58
58 59 def _get_user_group(self, users_group):
59 60 return self._get_instance(UserGroup, users_group,
60 61 callback=UserGroup.get_by_group_name)
61 62
62 63 def _get_repo_group(self, repo_group):
63 64 return self._get_instance(RepoGroup, repo_group,
64 65 callback=RepoGroup.get_by_group_name)
65 66
66 67 def _create_default_perms(self, repository, private):
67 68 # create default permission
68 69 default = 'repository.read'
69 70 def_user = User.get_default_user()
70 71 for p in def_user.user_perms:
71 72 if p.permission.permission_name.startswith('repository.'):
72 73 default = p.permission.permission_name
73 74 break
74 75
75 76 default_perm = 'repository.none' if private else default
76 77
77 78 repo_to_perm = UserRepoToPerm()
78 79 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 80
80 81 repo_to_perm.repository = repository
81 repo_to_perm.user_id = def_user.user_id
82 repo_to_perm.user = def_user
82 83
83 84 return repo_to_perm
84 85
85 86 @LazyProperty
86 87 def repos_path(self):
87 88 """
88 89 Gets the repositories root path from database
89 90 """
90 91 settings_model = VcsSettingsModel(sa=self.sa)
91 92 return settings_model.get_repos_location()
92 93
93 94 def get(self, repo_id):
94 95 repo = self.sa.query(Repository) \
95 96 .filter(Repository.repo_id == repo_id)
96 97
97 98 return repo.scalar()
98 99
99 100 def get_repo(self, repository):
100 101 return self._get_repo(repository)
101 102
102 103 def get_by_repo_name(self, repo_name, cache=False):
103 104 repo = self.sa.query(Repository) \
104 105 .filter(Repository.repo_name == repo_name)
105 106
106 107 if cache:
107 108 name_key = _hash_key(repo_name)
108 109 repo = repo.options(
109 110 FromCache("sql_cache_short", f"get_repo_{name_key}"))
110 111 return repo.scalar()
111 112
112 113 def _extract_id_from_repo_name(self, repo_name):
113 114 if repo_name.startswith('/'):
114 115 repo_name = repo_name.lstrip('/')
115 by_id_match = re.match(r'^_(\d{1,})', repo_name)
116 by_id_match = re.match(r'^_(\d+)', repo_name)
116 117 if by_id_match:
117 118 return by_id_match.groups()[0]
118 119
119 120 def get_repo_by_id(self, repo_name):
120 121 """
121 122 Extracts repo_name by id from special urls.
122 123 Example url is _11/repo_name
123 124
124 125 :param repo_name:
125 126 :return: repo object if matched else None
126 127 """
127 128 _repo_id = None
128 129 try:
129 130 _repo_id = self._extract_id_from_repo_name(repo_name)
130 131 if _repo_id:
131 132 return self.get(_repo_id)
132 133 except Exception:
133 134 log.exception('Failed to extract repo_name from URL')
134 135 if _repo_id:
135 136 Session().rollback()
136 137
137 138 return None
138 139
139 140 def get_repos_for_root(self, root, traverse=False):
140 141 if traverse:
141 like_expression = u'{}%'.format(safe_unicode(root))
142 like_expression = u'{}%'.format(safe_str(root))
142 143 repos = Repository.query().filter(
143 144 Repository.repo_name.like(like_expression)).all()
144 145 else:
145 146 if root and not isinstance(root, RepoGroup):
146 147 raise ValueError(
147 148 'Root must be an instance '
148 149 'of RepoGroup, got:{} instead'.format(type(root)))
149 150 repos = Repository.query().filter(Repository.group == root).all()
150 151 return repos
151 152
152 153 def get_url(self, repo, request=None, permalink=False):
153 154 if not request:
154 155 request = get_current_request()
155 156
156 157 if not request:
157 158 return
158 159
159 160 if permalink:
160 161 return request.route_url(
161 162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
162 163 else:
163 164 return request.route_url(
164 165 'repo_summary', repo_name=safe_str(repo.repo_name))
165 166
166 167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
167 168 if not request:
168 169 request = get_current_request()
169 170
170 171 if not request:
171 172 return
172 173
173 174 if permalink:
174 175 return request.route_url(
175 176 'repo_commit', repo_name=safe_str(repo.repo_id),
176 177 commit_id=commit_id)
177 178
178 179 else:
179 180 return request.route_url(
180 181 'repo_commit', repo_name=safe_str(repo.repo_name),
181 182 commit_id=commit_id)
182 183
183 184 def get_repo_log(self, repo, filter_term):
184 185 repo_log = UserLog.query()\
185 186 .filter(or_(UserLog.repository_id == repo.repo_id,
186 187 UserLog.repository_name == repo.repo_name))\
187 188 .options(joinedload(UserLog.user))\
188 189 .options(joinedload(UserLog.repository))\
189 190 .order_by(UserLog.action_date.desc())
190 191
191 192 repo_log = user_log_filter(repo_log, filter_term)
192 193 return repo_log
193 194
194 195 @classmethod
195 196 def update_commit_cache(cls, repositories=None):
196 197 if not repositories:
197 198 repositories = Repository.getAll()
198 199 for repo in repositories:
199 200 repo.update_commit_cache()
200 201
201 202 def get_repos_as_dict(self, repo_list=None, admin=False,
202 203 super_user_actions=False, short_name=None):
203 204
204 205 _render = get_current_request().get_partial_renderer(
205 206 'rhodecode:templates/data_table/_dt_elements.mako')
206 207 c = _render.get_call_context()
207 208 h = _render.get_helpers()
208 209
209 210 def quick_menu(repo_name):
210 211 return _render('quick_menu', repo_name)
211 212
212 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
213 def repo_lnk(name, rtype, rstate, private, archived, fork_repo_name):
213 214 if short_name is not None:
214 215 short_name_var = short_name
215 216 else:
216 217 short_name_var = not admin
217 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
218 return _render('repo_name', name, rtype, rstate, private, archived, fork_repo_name,
218 219 short_name=short_name_var, admin=False)
219 220
220 221 def last_change(last_change):
221 222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
222 223 ts = time.time()
223 224 utc_offset = (datetime.datetime.fromtimestamp(ts)
224 225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
225 226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
226 227
227 228 return _render("last_change", last_change)
228 229
229 230 def rss_lnk(repo_name):
230 231 return _render("rss", repo_name)
231 232
232 233 def atom_lnk(repo_name):
233 234 return _render("atom", repo_name)
234 235
235 236 def last_rev(repo_name, cs_cache):
236 237 return _render('revision', repo_name, cs_cache.get('revision'),
237 238 cs_cache.get('raw_id'), cs_cache.get('author'),
238 239 cs_cache.get('message'), cs_cache.get('date'))
239 240
240 241 def desc(desc):
241 242 return _render('repo_desc', desc, c.visual.stylify_metatags)
242 243
243 244 def state(repo_state):
244 245 return _render("repo_state", repo_state)
245 246
246 247 def repo_actions(repo_name):
247 248 return _render('repo_actions', repo_name, super_user_actions)
248 249
249 250 def user_profile(username):
250 251 return _render('user_profile', username)
251 252
252 253 repos_data = []
253 254 for repo in repo_list:
254 255 # NOTE(marcink): because we use only raw column we need to load it like that
255 256 changeset_cache = Repository._load_changeset_cache(
256 257 repo.repo_id, repo._changeset_cache)
257 258
258 259 row = {
259 260 "menu": quick_menu(repo.repo_name),
260 261
261 262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
262 repo.private, repo.archived, repo.fork),
263 repo.private, repo.archived, repo.fork_repo_name),
263 264
264 265 "desc": desc(h.escape(repo.description)),
265 266
266 267 "last_change": last_change(repo.updated_on),
267 268
268 269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
269 270 "last_changeset_raw": changeset_cache.get('revision'),
270 271
271 "owner": user_profile(repo.User.username),
272 "owner": user_profile(repo.owner_username),
272 273
273 274 "state": state(repo.repo_state),
274 275 "rss": rss_lnk(repo.repo_name),
275 276 "atom": atom_lnk(repo.repo_name),
276 277 }
277 278 if admin:
278 279 row.update({
279 280 "action": repo_actions(repo.repo_name),
280 281 })
281 282 repos_data.append(row)
282 283
283 284 return repos_data
284 285
285 286 def get_repos_data_table(
286 287 self, draw, start, limit,
287 288 search_q, order_by, order_dir,
288 289 auth_user, repo_group_id):
289 290 from rhodecode.model.scm import RepoList
290 291
291 292 _perms = ['repository.read', 'repository.write', 'repository.admin']
292 293
293 294 repos = Repository.query() \
294 295 .filter(Repository.group_id == repo_group_id) \
295 296 .all()
296 297 auth_repo_list = RepoList(
297 298 repos, perm_set=_perms,
298 299 extra_kwargs=dict(user=auth_user))
299 300
300 301 allowed_ids = [-1]
301 302 for repo in auth_repo_list:
302 303 allowed_ids.append(repo.repo_id)
303 304
304 305 repos_data_total_count = Repository.query() \
305 306 .filter(Repository.group_id == repo_group_id) \
306 307 .filter(or_(
307 308 # generate multiple IN to fix limitation problems
308 309 *in_filter_generator(Repository.repo_id, allowed_ids))
309 310 ) \
310 311 .count()
311 312
313 RepoFork = aliased(Repository)
314 OwnerUser = aliased(User)
312 315 base_q = Session.query(
313 316 Repository.repo_id,
314 317 Repository.repo_name,
315 318 Repository.description,
316 319 Repository.repo_type,
317 320 Repository.repo_state,
318 321 Repository.private,
319 322 Repository.archived,
320 Repository.fork,
321 323 Repository.updated_on,
322 324 Repository._changeset_cache,
323 User,
325 RepoFork.repo_name.label('fork_repo_name'),
326 OwnerUser.username.label('owner_username'),
324 327 ) \
325 328 .filter(Repository.group_id == repo_group_id) \
326 329 .filter(or_(
327 330 # generate multiple IN to fix limitation problems
328 331 *in_filter_generator(Repository.repo_id, allowed_ids))
329 332 ) \
330 .join(User, User.user_id == Repository.user_id) \
331 .group_by(Repository, User)
333 .outerjoin(RepoFork, Repository.fork_id == RepoFork.repo_id) \
334 .join(OwnerUser, Repository.user_id == OwnerUser.user_id)
332 335
333 336 repos_data_total_filtered_count = base_q.count()
334 337
335 338 sort_defined = False
336 339 if order_by == 'repo_name':
337 340 sort_col = func.lower(Repository.repo_name)
338 341 sort_defined = True
339 342 elif order_by == 'user_username':
340 343 sort_col = User.username
341 344 else:
342 345 sort_col = getattr(Repository, order_by, None)
343 346
344 347 if sort_defined or sort_col:
345 348 if order_dir == 'asc':
346 349 sort_col = sort_col.asc()
347 350 else:
348 351 sort_col = sort_col.desc()
349 352
350 353 base_q = base_q.order_by(sort_col)
351 354 base_q = base_q.offset(start).limit(limit)
352 355
353 356 repos_list = base_q.all()
354 357
355 358 repos_data = RepoModel().get_repos_as_dict(
356 359 repo_list=repos_list, admin=False)
357 360
358 361 data = ({
359 362 'draw': draw,
360 363 'data': repos_data,
361 364 'recordsTotal': repos_data_total_count,
362 365 'recordsFiltered': repos_data_total_filtered_count,
363 366 })
364 367 return data
365 368
366 369 def _get_defaults(self, repo_name):
367 370 """
368 371 Gets information about repository, and returns a dict for
369 372 usage in forms
370 373
371 374 :param repo_name:
372 375 """
373 376
374 377 repo_info = Repository.get_by_repo_name(repo_name)
375 378
376 379 if repo_info is None:
377 380 return None
378 381
379 382 defaults = repo_info.get_dict()
380 383 defaults['repo_name'] = repo_info.just_name
381 384
382 385 groups = repo_info.groups_with_parents
383 386 parent_group = groups[-1] if groups else None
384 387
385 388 # we use -1 as this is how in HTML, we mark an empty group
386 389 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
387 390
388 391 keys_to_process = (
389 392 {'k': 'repo_type', 'strip': False},
390 393 {'k': 'repo_enable_downloads', 'strip': True},
391 394 {'k': 'repo_description', 'strip': True},
392 395 {'k': 'repo_enable_locking', 'strip': True},
393 396 {'k': 'repo_landing_rev', 'strip': True},
394 397 {'k': 'clone_uri', 'strip': False},
395 398 {'k': 'push_uri', 'strip': False},
396 399 {'k': 'repo_private', 'strip': True},
397 400 {'k': 'repo_enable_statistics', 'strip': True}
398 401 )
399 402
400 403 for item in keys_to_process:
401 404 attr = item['k']
402 405 if item['strip']:
403 406 attr = remove_prefix(item['k'], 'repo_')
404 407
405 408 val = defaults[attr]
406 409 if item['k'] == 'repo_landing_rev':
407 410 val = ':'.join(defaults[attr])
408 411 defaults[item['k']] = val
409 412 if item['k'] == 'clone_uri':
410 413 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
411 414 if item['k'] == 'push_uri':
412 415 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
413 416
414 417 # fill owner
415 418 if repo_info.user:
416 419 defaults.update({'user': repo_info.user.username})
417 420 else:
418 421 replacement_user = User.get_first_super_admin().username
419 422 defaults.update({'user': replacement_user})
420 423
421 424 return defaults
422 425
423 426 def update(self, repo, **kwargs):
424 427 try:
425 428 cur_repo = self._get_repo(repo)
426 429 source_repo_name = cur_repo.repo_name
427 430
428 431 affected_user_ids = []
429 432 if 'user' in kwargs:
430 433 old_owner_id = cur_repo.user.user_id
431 434 new_owner = User.get_by_username(kwargs['user'])
432 435 cur_repo.user = new_owner
433 436
434 437 if old_owner_id != new_owner.user_id:
435 438 affected_user_ids = [new_owner.user_id, old_owner_id]
436 439
437 440 if 'repo_group' in kwargs:
438 441 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
439 442 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
440 443
441 444 update_keys = [
442 445 (1, 'repo_description'),
443 446 (1, 'repo_landing_rev'),
444 447 (1, 'repo_private'),
445 448 (1, 'repo_enable_downloads'),
446 449 (1, 'repo_enable_locking'),
447 450 (1, 'repo_enable_statistics'),
448 451 (0, 'clone_uri'),
449 452 (0, 'push_uri'),
450 453 (0, 'fork_id')
451 454 ]
452 455 for strip, k in update_keys:
453 456 if k in kwargs:
454 457 val = kwargs[k]
455 458 if strip:
456 459 k = remove_prefix(k, 'repo_')
457 460
458 461 setattr(cur_repo, k, val)
459 462
460 463 new_name = cur_repo.get_new_name(kwargs['repo_name'])
461 464 cur_repo.repo_name = new_name
462 465
463 466 # if private flag is set, reset default permission to NONE
464 467 if kwargs.get('repo_private'):
465 468 EMPTY_PERM = 'repository.none'
466 469 RepoModel().grant_user_permission(
467 470 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
468 471 )
469 472 if kwargs.get('repo_landing_rev'):
470 473 landing_rev_val = kwargs['repo_landing_rev']
471 474 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
472 475
473 476 # handle extra fields
474 477 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
475 478 k = RepositoryField.un_prefix_key(field)
476 479 ex_field = RepositoryField.get_by_key_name(
477 480 key=k, repo=cur_repo)
478 481 if ex_field:
479 482 ex_field.field_value = kwargs[field]
480 483 self.sa.add(ex_field)
481 484
482 485 self.sa.add(cur_repo)
483 486
484 487 if source_repo_name != new_name:
485 488 # rename repository
486 489 self._rename_filesystem_repo(
487 490 old=source_repo_name, new=new_name)
488 491
489 492 if affected_user_ids:
490 493 PermissionModel().trigger_permission_flush(affected_user_ids)
491 494
492 495 return cur_repo
493 496 except Exception:
494 497 log.error(traceback.format_exc())
495 498 raise
496 499
497 500 def _create_repo(self, repo_name, repo_type, description, owner,
498 501 private=False, clone_uri=None, repo_group=None,
499 502 landing_rev=None, fork_of=None,
500 503 copy_fork_permissions=False, enable_statistics=False,
501 504 enable_locking=False, enable_downloads=False,
502 505 copy_group_permissions=False,
503 506 state=Repository.STATE_PENDING):
504 507 """
505 508 Create repository inside database with PENDING state, this should be
506 509 only executed by create() repo. With exception of importing existing
507 510 repos
508 511 """
509 512 from rhodecode.model.scm import ScmModel
510 513
511 514 owner = self._get_user(owner)
512 515 fork_of = self._get_repo(fork_of)
513 516 repo_group = self._get_repo_group(safe_int(repo_group))
514 517 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
515 518 landing_rev = landing_rev or default_landing_ref
516 519
517 520 try:
518 repo_name = safe_unicode(repo_name)
519 description = safe_unicode(description)
521 repo_name = safe_str(repo_name)
522 description = safe_str(description)
520 523 # repo name is just a name of repository
521 524 # while repo_name_full is a full qualified name that is combined
522 525 # with name and path of group
523 526 repo_name_full = repo_name
524 527 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
525 528
526 529 new_repo = Repository()
527 530 new_repo.repo_state = state
528 531 new_repo.enable_statistics = False
529 532 new_repo.repo_name = repo_name_full
530 533 new_repo.repo_type = repo_type
531 534 new_repo.user = owner
532 535 new_repo.group = repo_group
533 536 new_repo.description = description or repo_name
534 537 new_repo.private = private
535 538 new_repo.archived = False
536 539 new_repo.clone_uri = clone_uri
537 540 new_repo.landing_rev = landing_rev
538 541
539 542 new_repo.enable_statistics = enable_statistics
540 543 new_repo.enable_locking = enable_locking
541 544 new_repo.enable_downloads = enable_downloads
542 545
543 546 if repo_group:
544 547 new_repo.enable_locking = repo_group.enable_locking
545 548
546 549 if fork_of:
547 550 parent_repo = fork_of
548 551 new_repo.fork = parent_repo
549 552
550 553 events.trigger(events.RepoPreCreateEvent(new_repo))
551 554
552 555 self.sa.add(new_repo)
553 556
554 557 EMPTY_PERM = 'repository.none'
555 558 if fork_of and copy_fork_permissions:
556 559 repo = fork_of
557 560 user_perms = UserRepoToPerm.query() \
558 561 .filter(UserRepoToPerm.repository == repo).all()
559 562 group_perms = UserGroupRepoToPerm.query() \
560 563 .filter(UserGroupRepoToPerm.repository == repo).all()
561 564
562 565 for perm in user_perms:
563 566 UserRepoToPerm.create(
564 567 perm.user, new_repo, perm.permission)
565 568
566 569 for perm in group_perms:
567 570 UserGroupRepoToPerm.create(
568 571 perm.users_group, new_repo, perm.permission)
569 572 # in case we copy permissions and also set this repo to private
570 573 # override the default user permission to make it a private repo
571 574 if private:
572 575 RepoModel(self.sa).grant_user_permission(
573 576 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
574 577
575 578 elif repo_group and copy_group_permissions:
576 579 user_perms = UserRepoGroupToPerm.query() \
577 580 .filter(UserRepoGroupToPerm.group == repo_group).all()
578 581
579 582 group_perms = UserGroupRepoGroupToPerm.query() \
580 583 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
581 584
582 585 for perm in user_perms:
583 586 perm_name = perm.permission.permission_name.replace(
584 587 'group.', 'repository.')
585 588 perm_obj = Permission.get_by_key(perm_name)
586 589 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
587 590
588 591 for perm in group_perms:
589 592 perm_name = perm.permission.permission_name.replace(
590 593 'group.', 'repository.')
591 594 perm_obj = Permission.get_by_key(perm_name)
592 595 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
593 596
594 597 if private:
595 598 RepoModel(self.sa).grant_user_permission(
596 599 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
597 600
598 601 else:
599 602 perm_obj = self._create_default_perms(new_repo, private)
600 603 self.sa.add(perm_obj)
601 604
602 605 # now automatically start following this repository as owner
603 606 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
604 607
605 608 # we need to flush here, in order to check if database won't
606 609 # throw any exceptions, create filesystem dirs at the very end
607 610 self.sa.flush()
608 611 events.trigger(events.RepoCreateEvent(new_repo))
609 612 return new_repo
610 613
611 614 except Exception:
612 615 log.error(traceback.format_exc())
613 616 raise
614 617
615 618 def create(self, form_data, cur_user):
616 619 """
617 620 Create repository using celery tasks
618 621
619 622 :param form_data:
620 623 :param cur_user:
621 624 """
622 625 from rhodecode.lib.celerylib import tasks, run_task
623 626 return run_task(tasks.create_repo, form_data, cur_user)
624 627
625 628 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
626 629 perm_deletions=None, check_perms=True,
627 630 cur_user=None):
628 631 if not perm_additions:
629 632 perm_additions = []
630 633 if not perm_updates:
631 634 perm_updates = []
632 635 if not perm_deletions:
633 636 perm_deletions = []
634 637
635 638 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
636 639
637 640 changes = {
638 641 'added': [],
639 642 'updated': [],
640 643 'deleted': [],
641 644 'default_user_changed': None
642 645 }
643 646
644 647 repo = self._get_repo(repo)
645 648
646 649 # update permissions
647 650 for member_id, perm, member_type in perm_updates:
648 651 member_id = int(member_id)
649 652 if member_type == 'user':
650 653 member_name = User.get(member_id).username
651 654 if member_name == User.DEFAULT_USER:
652 655 # NOTE(dan): detect if we changed permissions for default user
653 656 perm_obj = self.sa.query(UserRepoToPerm) \
654 657 .filter(UserRepoToPerm.user_id == member_id) \
655 658 .filter(UserRepoToPerm.repository == repo) \
656 659 .scalar()
657 660 if perm_obj and perm_obj.permission.permission_name != perm:
658 661 changes['default_user_changed'] = True
659 662
660 663 # this updates also current one if found
661 664 self.grant_user_permission(
662 665 repo=repo, user=member_id, perm=perm)
663 666 elif member_type == 'user_group':
664 667 # check if we have permissions to alter this usergroup
665 668 member_name = UserGroup.get(member_id).users_group_name
666 669 if not check_perms or HasUserGroupPermissionAny(
667 670 *req_perms)(member_name, user=cur_user):
668 671 self.grant_user_group_permission(
669 672 repo=repo, group_name=member_id, perm=perm)
670 673 else:
671 674 raise ValueError("member_type must be 'user' or 'user_group' "
672 675 "got {} instead".format(member_type))
673 676 changes['updated'].append({'type': member_type, 'id': member_id,
674 677 'name': member_name, 'new_perm': perm})
675 678
676 679 # set new permissions
677 680 for member_id, perm, member_type in perm_additions:
678 681 member_id = int(member_id)
679 682 if member_type == 'user':
680 683 member_name = User.get(member_id).username
681 684 self.grant_user_permission(
682 685 repo=repo, user=member_id, perm=perm)
683 686 elif member_type == 'user_group':
684 687 # check if we have permissions to alter this usergroup
685 688 member_name = UserGroup.get(member_id).users_group_name
686 689 if not check_perms or HasUserGroupPermissionAny(
687 690 *req_perms)(member_name, user=cur_user):
688 691 self.grant_user_group_permission(
689 692 repo=repo, group_name=member_id, perm=perm)
690 693 else:
691 694 raise ValueError("member_type must be 'user' or 'user_group' "
692 695 "got {} instead".format(member_type))
693 696
694 697 changes['added'].append({'type': member_type, 'id': member_id,
695 698 'name': member_name, 'new_perm': perm})
696 699 # delete permissions
697 700 for member_id, perm, member_type in perm_deletions:
698 701 member_id = int(member_id)
699 702 if member_type == 'user':
700 703 member_name = User.get(member_id).username
701 704 self.revoke_user_permission(repo=repo, user=member_id)
702 705 elif member_type == 'user_group':
703 706 # check if we have permissions to alter this usergroup
704 707 member_name = UserGroup.get(member_id).users_group_name
705 708 if not check_perms or HasUserGroupPermissionAny(
706 709 *req_perms)(member_name, user=cur_user):
707 710 self.revoke_user_group_permission(
708 711 repo=repo, group_name=member_id)
709 712 else:
710 713 raise ValueError("member_type must be 'user' or 'user_group' "
711 714 "got {} instead".format(member_type))
712 715
713 716 changes['deleted'].append({'type': member_type, 'id': member_id,
714 717 'name': member_name, 'new_perm': perm})
715 718 return changes
716 719
717 720 def create_fork(self, form_data, cur_user):
718 721 """
719 722 Simple wrapper into executing celery task for fork creation
720 723
721 724 :param form_data:
722 725 :param cur_user:
723 726 """
724 727 from rhodecode.lib.celerylib import tasks, run_task
725 728 return run_task(tasks.create_repo_fork, form_data, cur_user)
726 729
727 730 def archive(self, repo):
728 731 """
729 732 Archive given repository. Set archive flag.
730 733
731 734 :param repo:
732 735 """
733 736 repo = self._get_repo(repo)
734 737 if repo:
735 738
736 739 try:
737 740 repo.archived = True
738 741 self.sa.add(repo)
739 742 self.sa.commit()
740 743 except Exception:
741 744 log.error(traceback.format_exc())
742 745 raise
743 746
744 747 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
745 748 """
746 749 Delete given repository, forks parameter defines what do do with
747 750 attached forks. Throws AttachedForksError if deleted repo has attached
748 751 forks
749 752
750 753 :param repo:
751 754 :param forks: str 'delete' or 'detach'
752 755 :param pull_requests: str 'delete' or None
753 756 :param fs_remove: remove(archive) repo from filesystem
754 757 """
755 758 if not cur_user:
756 759 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
757 760 repo = self._get_repo(repo)
758 761 if repo:
759 762 if forks == 'detach':
760 763 for r in repo.forks:
761 764 r.fork = None
762 765 self.sa.add(r)
763 766 elif forks == 'delete':
764 767 for r in repo.forks:
765 768 self.delete(r, forks='delete')
766 769 elif [f for f in repo.forks]:
767 770 raise AttachedForksError()
768 771
769 772 # check for pull requests
770 773 pr_sources = repo.pull_requests_source
771 774 pr_targets = repo.pull_requests_target
772 775 if pull_requests != 'delete' and (pr_sources or pr_targets):
773 776 raise AttachedPullRequestsError()
774 777
775 778 old_repo_dict = repo.get_dict()
776 779 events.trigger(events.RepoPreDeleteEvent(repo))
777 780 try:
778 781 self.sa.delete(repo)
779 782 if fs_remove:
780 783 self._delete_filesystem_repo(repo)
781 784 else:
782 785 log.debug('skipping removal from filesystem')
783 786 old_repo_dict.update({
784 787 'deleted_by': cur_user,
785 788 'deleted_on': time.time(),
786 789 })
787 790 hooks_base.delete_repository(**old_repo_dict)
788 791 events.trigger(events.RepoDeleteEvent(repo))
789 792 except Exception:
790 793 log.error(traceback.format_exc())
791 794 raise
792 795
793 796 def grant_user_permission(self, repo, user, perm):
794 797 """
795 798 Grant permission for user on given repository, or update existing one
796 799 if found
797 800
798 801 :param repo: Instance of Repository, repository_id, or repository name
799 802 :param user: Instance of User, user_id or username
800 803 :param perm: Instance of Permission, or permission_name
801 804 """
802 805 user = self._get_user(user)
803 806 repo = self._get_repo(repo)
804 807 permission = self._get_perm(perm)
805 808
806 809 # check if we have that permission already
807 810 obj = self.sa.query(UserRepoToPerm) \
808 811 .filter(UserRepoToPerm.user == user) \
809 812 .filter(UserRepoToPerm.repository == repo) \
810 813 .scalar()
811 814 if obj is None:
812 815 # create new !
813 816 obj = UserRepoToPerm()
814 817 obj.repository = repo
815 818 obj.user = user
816 819 obj.permission = permission
817 820 self.sa.add(obj)
818 821 log.debug('Granted perm %s to %s on %s', perm, user, repo)
819 822 action_logger_generic(
820 823 'granted permission: {} to user: {} on repo: {}'.format(
821 824 perm, user, repo), namespace='security.repo')
822 825 return obj
823 826
824 827 def revoke_user_permission(self, repo, user):
825 828 """
826 829 Revoke permission for user on given repository
827 830
828 831 :param repo: Instance of Repository, repository_id, or repository name
829 832 :param user: Instance of User, user_id or username
830 833 """
831 834
832 835 user = self._get_user(user)
833 836 repo = self._get_repo(repo)
834 837
835 838 obj = self.sa.query(UserRepoToPerm) \
836 839 .filter(UserRepoToPerm.repository == repo) \
837 840 .filter(UserRepoToPerm.user == user) \
838 841 .scalar()
839 842 if obj:
840 843 self.sa.delete(obj)
841 844 log.debug('Revoked perm on %s on %s', repo, user)
842 845 action_logger_generic(
843 846 'revoked permission from user: {} on repo: {}'.format(
844 847 user, repo), namespace='security.repo')
845 848
846 849 def grant_user_group_permission(self, repo, group_name, perm):
847 850 """
848 851 Grant permission for user group on given repository, or update
849 852 existing one if found
850 853
851 854 :param repo: Instance of Repository, repository_id, or repository name
852 855 :param group_name: Instance of UserGroup, users_group_id,
853 856 or user group name
854 857 :param perm: Instance of Permission, or permission_name
855 858 """
856 859 repo = self._get_repo(repo)
857 860 group_name = self._get_user_group(group_name)
858 861 permission = self._get_perm(perm)
859 862
860 863 # check if we have that permission already
861 864 obj = self.sa.query(UserGroupRepoToPerm) \
862 865 .filter(UserGroupRepoToPerm.users_group == group_name) \
863 866 .filter(UserGroupRepoToPerm.repository == repo) \
864 867 .scalar()
865 868
866 869 if obj is None:
867 870 # create new
868 871 obj = UserGroupRepoToPerm()
869 872
870 873 obj.repository = repo
871 874 obj.users_group = group_name
872 875 obj.permission = permission
873 876 self.sa.add(obj)
874 877 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
875 878 action_logger_generic(
876 879 'granted permission: {} to usergroup: {} on repo: {}'.format(
877 880 perm, group_name, repo), namespace='security.repo')
878 881
879 882 return obj
880 883
881 884 def revoke_user_group_permission(self, repo, group_name):
882 885 """
883 886 Revoke permission for user group on given repository
884 887
885 888 :param repo: Instance of Repository, repository_id, or repository name
886 889 :param group_name: Instance of UserGroup, users_group_id,
887 890 or user group name
888 891 """
889 892 repo = self._get_repo(repo)
890 893 group_name = self._get_user_group(group_name)
891 894
892 895 obj = self.sa.query(UserGroupRepoToPerm) \
893 896 .filter(UserGroupRepoToPerm.repository == repo) \
894 897 .filter(UserGroupRepoToPerm.users_group == group_name) \
895 898 .scalar()
896 899 if obj:
897 900 self.sa.delete(obj)
898 901 log.debug('Revoked perm to %s on %s', repo, group_name)
899 902 action_logger_generic(
900 903 'revoked permission from usergroup: {} on repo: {}'.format(
901 904 group_name, repo), namespace='security.repo')
902 905
903 906 def delete_stats(self, repo_name):
904 907 """
905 908 removes stats for given repo
906 909
907 910 :param repo_name:
908 911 """
909 912 repo = self._get_repo(repo_name)
910 913 try:
911 914 obj = self.sa.query(Statistics) \
912 915 .filter(Statistics.repository == repo).scalar()
913 916 if obj:
914 917 self.sa.delete(obj)
915 918 except Exception:
916 919 log.error(traceback.format_exc())
917 920 raise
918 921
919 922 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
920 923 field_type='str', field_desc=''):
921 924
922 925 repo = self._get_repo(repo_name)
923 926
924 927 new_field = RepositoryField()
925 928 new_field.repository = repo
926 929 new_field.field_key = field_key
927 930 new_field.field_type = field_type # python type
928 931 new_field.field_value = field_value
929 932 new_field.field_desc = field_desc
930 933 new_field.field_label = field_label
931 934 self.sa.add(new_field)
932 935 return new_field
933 936
934 937 def delete_repo_field(self, repo_name, field_key):
935 938 repo = self._get_repo(repo_name)
936 939 field = RepositoryField.get_by_key_name(field_key, repo)
937 940 if field:
938 941 self.sa.delete(field)
939 942
940 943 def set_landing_rev(self, repo, landing_rev_name):
941 944 if landing_rev_name.startswith('branch:'):
942 945 landing_rev_name = landing_rev_name.split('branch:')[-1]
943 946 scm_instance = repo.scm_instance()
944 947 if scm_instance:
945 948 return scm_instance._remote.set_head_ref(landing_rev_name)
946 949
947 950 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
948 951 clone_uri=None, repo_store_location=None,
949 952 use_global_config=False, install_hooks=True):
950 953 """
951 954 makes repository on filesystem. It's group aware means it'll create
952 955 a repository within a group, and alter the paths accordingly of
953 956 group location
954 957
955 958 :param repo_name:
956 959 :param alias:
957 960 :param parent:
958 961 :param clone_uri:
959 962 :param repo_store_location:
960 963 """
961 964 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
962 965 from rhodecode.model.scm import ScmModel
963 966
964 967 if Repository.NAME_SEP in repo_name:
965 968 raise ValueError(
966 969 'repo_name must not contain groups got `%s`' % repo_name)
967 970
968 971 if isinstance(repo_group, RepoGroup):
969 972 new_parent_path = os.sep.join(repo_group.full_path_splitted)
970 973 else:
971 974 new_parent_path = repo_group or ''
972 975
973 976 if repo_store_location:
974 977 _paths = [repo_store_location]
975 978 else:
976 979 _paths = [self.repos_path, new_parent_path, repo_name]
977 980 # we need to make it str for mercurial
978 981 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
979 982
980 983 # check if this path is not a repository
981 984 if is_valid_repo(repo_path, self.repos_path):
982 raise Exception('This path %s is a valid repository' % repo_path)
985 raise Exception(f'This path {repo_path} is a valid repository')
983 986
984 987 # check if this path is a group
985 988 if is_valid_repo_group(repo_path, self.repos_path):
986 raise Exception('This path %s is a valid group' % repo_path)
989 raise Exception(f'This path {repo_path} is a valid group')
987 990
988 991 log.info('creating repo %s in %s from url: `%s`',
989 repo_name, safe_unicode(repo_path),
992 repo_name, safe_str(repo_path),
990 993 obfuscate_url_pw(clone_uri))
991 994
992 995 backend = get_backend(repo_type)
993 996
994 997 config_repo = None if use_global_config else repo_name
995 998 if config_repo and new_parent_path:
996 999 config_repo = Repository.NAME_SEP.join(
997 1000 (new_parent_path, config_repo))
998 1001 config = make_db_config(clear_session=False, repo=config_repo)
999 1002 config.set('extensions', 'largefiles', '')
1000 1003
1001 1004 # patch and reset hooks section of UI config to not run any
1002 1005 # hooks on creating remote repo
1003 1006 config.clear_section('hooks')
1004 1007
1005 1008 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1006 1009 if repo_type == 'git':
1007 1010 repo = backend(
1008 1011 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1009 1012 with_wire={"cache": False})
1010 1013 else:
1011 1014 repo = backend(
1012 1015 repo_path, config=config, create=True, src_url=clone_uri,
1013 1016 with_wire={"cache": False})
1014 1017
1015 1018 if install_hooks:
1016 1019 repo.install_hooks()
1017 1020
1018 1021 log.debug('Created repo %s with %s backend',
1019 safe_unicode(repo_name), safe_unicode(repo_type))
1022 safe_str(repo_name), safe_str(repo_type))
1020 1023 return repo
1021 1024
1022 1025 def _rename_filesystem_repo(self, old, new):
1023 1026 """
1024 1027 renames repository on filesystem
1025 1028
1026 1029 :param old: old name
1027 1030 :param new: new name
1028 1031 """
1029 1032 log.info('renaming repo from %s to %s', old, new)
1030 1033
1031 1034 old_path = os.path.join(self.repos_path, old)
1032 1035 new_path = os.path.join(self.repos_path, new)
1033 1036 if os.path.isdir(new_path):
1034 1037 raise Exception(
1035 1038 'Was trying to rename to already existing dir %s' % new_path
1036 1039 )
1037 1040 shutil.move(old_path, new_path)
1038 1041
1039 1042 def _delete_filesystem_repo(self, repo):
1040 1043 """
1041 removes repo from filesystem, the removal is acctually made by
1042 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1044 removes repo from filesystem, the removal is actually made by
1045 added rm__ prefix into dir, and rename internal .hg/.git dirs so this
1043 1046 repository is no longer valid for rhodecode, can be undeleted later on
1044 1047 by reverting the renames on this repository
1045 1048
1046 1049 :param repo: repo object
1047 1050 """
1048 1051 rm_path = os.path.join(self.repos_path, repo.repo_name)
1049 1052 repo_group = repo.group
1050 log.info("Removing repository %s", rm_path)
1053 log.info("delete_filesystem_repo: removing repository %s", rm_path)
1051 1054 # disable hg/git internal that it doesn't get detected as repo
1052 1055 alias = repo.repo_type
1053 1056
1054 1057 config = make_db_config(clear_session=False)
1055 1058 config.set('extensions', 'largefiles', '')
1056 1059 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1057 1060
1058 1061 # skip this for bare git repos
1059 1062 if not bare:
1060 1063 # disable VCS repo
1061 1064 vcs_path = os.path.join(rm_path, '.%s' % alias)
1062 1065 if os.path.exists(vcs_path):
1063 1066 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1064 1067
1065 1068 _now = datetime.datetime.now()
1066 1069 _ms = str(_now.microsecond).rjust(6, '0')
1067 1070 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1068 1071 repo.just_name)
1069 1072 if repo_group:
1070 1073 # if repository is in group, prefix the removal path with the group
1071 1074 args = repo_group.full_path_splitted + [_d]
1072 1075 _d = os.path.join(*args)
1073 1076
1074 1077 if os.path.isdir(rm_path):
1075 1078 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1076 1079
1077 1080 # finally cleanup diff-cache if it exists
1078 1081 cached_diffs_dir = repo.cached_diffs_dir
1079 1082 if os.path.isdir(cached_diffs_dir):
1080 1083 shutil.rmtree(cached_diffs_dir)
1081 1084
1082 1085
1083 1086 class ReadmeFinder:
1084 1087 """
1085 1088 Utility which knows how to find a readme for a specific commit.
1086 1089
1087 1090 The main idea is that this is a configurable algorithm. When creating an
1088 1091 instance you can define parameters, currently only the `default_renderer`.
1089 1092 Based on this configuration the method :meth:`search` behaves slightly
1090 1093 different.
1091 1094 """
1092 1095
1093 1096 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1094 1097 path_re = re.compile(r'^docs?', re.IGNORECASE)
1095 1098
1096 1099 default_priorities = {
1097 None: 0,
1098 '.text': 2,
1099 '.txt': 3,
1100 '.rst': 1,
1101 '.rest': 2,
1102 '.md': 1,
1103 '.mkdn': 2,
1104 '.mdown': 3,
1100 None: 0,
1101 '.rst': 1,
1102 '.md': 1,
1103 '.rest': 2,
1104 '.mkdn': 2,
1105 '.text': 2,
1106 '.txt': 3,
1107 '.mdown': 3,
1105 1108 '.markdown': 4,
1106 1109 }
1107 1110
1108 1111 path_priority = {
1109 'doc': 0,
1112 'doc': 0,
1110 1113 'docs': 1,
1111 1114 }
1112 1115
1113 1116 FALLBACK_PRIORITY = 99
1114 1117
1115 1118 RENDERER_TO_EXTENSION = {
1116 1119 'rst': ['.rst', '.rest'],
1117 1120 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1118 1121 }
1119 1122
1120 1123 def __init__(self, default_renderer=None):
1121 1124 self._default_renderer = default_renderer
1122 1125 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1123 1126 default_renderer, [])
1124 1127
1125 def search(self, commit, path=u'/'):
1128 def search(self, commit, path='/'):
1126 1129 """
1127 1130 Find a readme in the given `commit`.
1128 1131 """
1129 1132 nodes = commit.get_nodes(path)
1130 1133 matches = self._match_readmes(nodes)
1131 1134 matches = self._sort_according_to_priority(matches)
1132 1135 if matches:
1133 1136 return matches[0].node
1134 1137
1135 1138 paths = self._match_paths(nodes)
1136 1139 paths = self._sort_paths_according_to_priority(paths)
1137 1140 for path in paths:
1138 1141 match = self.search(commit, path=path)
1139 1142 if match:
1140 1143 return match
1141 1144
1142 1145 return None
1143 1146
1144 1147 def _match_readmes(self, nodes):
1145 1148 for node in nodes:
1146 1149 if not node.is_file():
1147 1150 continue
1148 1151 path = node.path.rsplit('/', 1)[-1]
1149 1152 match = self.readme_re.match(path)
1150 1153 if match:
1151 1154 extension = match.group(1)
1152 1155 yield ReadmeMatch(node, match, self._priority(extension))
1153 1156
1154 1157 def _match_paths(self, nodes):
1155 1158 for node in nodes:
1156 1159 if not node.is_dir():
1157 1160 continue
1158 1161 match = self.path_re.match(node.path)
1159 1162 if match:
1160 1163 yield node.path
1161 1164
1162 1165 def _priority(self, extension):
1163 1166 renderer_priority = (
1164 1167 0 if extension in self._renderer_extensions else 1)
1165 1168 extension_priority = self.default_priorities.get(
1166 1169 extension, self.FALLBACK_PRIORITY)
1167 1170 return (renderer_priority, extension_priority)
1168 1171
1169 1172 def _sort_according_to_priority(self, matches):
1170 1173
1171 1174 def priority_and_path(match):
1172 1175 return (match.priority, match.path)
1173 1176
1174 1177 return sorted(matches, key=priority_and_path)
1175 1178
1176 1179 def _sort_paths_according_to_priority(self, paths):
1177 1180
1178 1181 def priority_and_path(path):
1179 1182 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1180 1183
1181 1184 return sorted(paths, key=priority_and_path)
1182 1185
1183 1186
1184 1187 class ReadmeMatch:
1185 1188
1186 1189 def __init__(self, node, match, priority):
1187 1190 self.node = node
1188 1191 self._match = match
1189 1192 self.priority = priority
1190 1193
1191 1194 @property
1192 1195 def path(self):
1193 1196 return self.node.path
1194 1197
1195 1198 def __repr__(self):
1196 1199 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,897 +1,897 b''
1 1
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21
22 22 """
23 23 repo group model for RhodeCode
24 24 """
25 25
26 26 import os
27 27 import datetime
28 28 import itertools
29 29 import logging
30 30 import shutil
31 31 import time
32 32 import traceback
33 33 import string
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from rhodecode import events
38 38 from rhodecode.model import BaseModel
39 39 from rhodecode.model.db import (_hash_key, func, or_, in_filter_generator,
40 40 Session, RepoGroup, UserRepoGroupToPerm, User, Permission, UserGroupRepoGroupToPerm,
41 41 UserGroup, Repository)
42 42 from rhodecode.model.permission import PermissionModel
43 43 from rhodecode.model.settings import VcsSettingsModel, SettingsModel
44 44 from rhodecode.lib.caching_query import FromCache
45 45 from rhodecode.lib.utils2 import action_logger_generic
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 class RepoGroupModel(BaseModel):
51 51
52 52 cls = RepoGroup
53 53 PERSONAL_GROUP_DESC = 'personal repo group of user `%(username)s`'
54 54 PERSONAL_GROUP_PATTERN = '${username}' # default
55 55
56 56 def _get_user_group(self, users_group):
57 57 return self._get_instance(UserGroup, users_group,
58 58 callback=UserGroup.get_by_group_name)
59 59
60 60 def _get_repo_group(self, repo_group):
61 61 return self._get_instance(RepoGroup, repo_group,
62 62 callback=RepoGroup.get_by_group_name)
63 63
64 64 def get_repo_group(self, repo_group):
65 65 return self._get_repo_group(repo_group)
66 66
67 67 @LazyProperty
68 68 def repos_path(self):
69 69 """
70 70 Gets the repositories root path from database
71 71 """
72 72
73 73 settings_model = VcsSettingsModel(sa=self.sa)
74 74 return settings_model.get_repos_location()
75 75
76 76 def get_by_group_name(self, repo_group_name, cache=None):
77 77 repo = self.sa.query(RepoGroup) \
78 78 .filter(RepoGroup.group_name == repo_group_name)
79 79
80 80 if cache:
81 81 name_key = _hash_key(repo_group_name)
82 82 repo = repo.options(
83 83 FromCache("sql_cache_short", f"get_repo_group_{name_key}"))
84 84 return repo.scalar()
85 85
86 86 def get_default_create_personal_repo_group(self):
87 87 value = SettingsModel().get_setting_by_name(
88 88 'create_personal_repo_group')
89 89 return value.app_settings_value if value else None or False
90 90
91 91 def get_personal_group_name_pattern(self):
92 92 value = SettingsModel().get_setting_by_name(
93 93 'personal_repo_group_pattern')
94 94 val = value.app_settings_value if value else None
95 95 group_template = val or self.PERSONAL_GROUP_PATTERN
96 96
97 97 group_template = group_template.lstrip('/')
98 98 return group_template
99 99
100 100 def get_personal_group_name(self, user):
101 101 template = self.get_personal_group_name_pattern()
102 102 return string.Template(template).safe_substitute(
103 103 username=user.username,
104 104 user_id=user.user_id,
105 105 first_name=user.first_name,
106 106 last_name=user.last_name,
107 107 )
108 108
109 109 def create_personal_repo_group(self, user, commit_early=True):
110 110 desc = self.PERSONAL_GROUP_DESC % {'username': user.username}
111 111 personal_repo_group_name = self.get_personal_group_name(user)
112 112
113 113 # create a new one
114 114 RepoGroupModel().create(
115 115 group_name=personal_repo_group_name,
116 116 group_description=desc,
117 117 owner=user.username,
118 118 personal=True,
119 119 commit_early=commit_early)
120 120
121 121 def _create_default_perms(self, new_group):
122 122 # create default permission
123 123 default_perm = 'group.read'
124 124 def_user = User.get_default_user()
125 125 for p in def_user.user_perms:
126 126 if p.permission.permission_name.startswith('group.'):
127 127 default_perm = p.permission.permission_name
128 128 break
129 129
130 130 repo_group_to_perm = UserRepoGroupToPerm()
131 131 repo_group_to_perm.permission = Permission.get_by_key(default_perm)
132 132
133 133 repo_group_to_perm.group = new_group
134 repo_group_to_perm.user_id = def_user.user_id
134 repo_group_to_perm.user = def_user
135 135 return repo_group_to_perm
136 136
137 137 def _get_group_name_and_parent(self, group_name_full, repo_in_path=False,
138 138 get_object=False):
139 139 """
140 140 Get's the group name and a parent group name from given group name.
141 141 If repo_in_path is set to truth, we asume the full path also includes
142 142 repo name, in such case we clean the last element.
143 143
144 144 :param group_name_full:
145 145 """
146 146 split_paths = 1
147 147 if repo_in_path:
148 148 split_paths = 2
149 149 _parts = group_name_full.rsplit(RepoGroup.url_sep(), split_paths)
150 150
151 151 if repo_in_path and len(_parts) > 1:
152 152 # such case last element is the repo_name
153 153 _parts.pop(-1)
154 154 group_name_cleaned = _parts[-1] # just the group name
155 155 parent_repo_group_name = None
156 156
157 157 if len(_parts) > 1:
158 158 parent_repo_group_name = _parts[0]
159 159
160 160 parent_group = None
161 161 if parent_repo_group_name:
162 162 parent_group = RepoGroup.get_by_group_name(parent_repo_group_name)
163 163
164 164 if get_object:
165 165 return group_name_cleaned, parent_repo_group_name, parent_group
166 166
167 167 return group_name_cleaned, parent_repo_group_name
168 168
169 169 def check_exist_filesystem(self, group_name, exc_on_failure=True):
170 170 create_path = os.path.join(self.repos_path, group_name)
171 171 log.debug('creating new group in %s', create_path)
172 172
173 173 if os.path.isdir(create_path):
174 174 if exc_on_failure:
175 175 abs_create_path = os.path.abspath(create_path)
176 176 raise Exception('Directory `{}` already exists !'.format(abs_create_path))
177 177 return False
178 178 return True
179 179
180 180 def _create_group(self, group_name):
181 181 """
182 182 makes repository group on filesystem
183 183
184 184 :param repo_name:
185 185 :param parent_id:
186 186 """
187 187
188 188 self.check_exist_filesystem(group_name)
189 189 create_path = os.path.join(self.repos_path, group_name)
190 190 log.debug('creating new group in %s', create_path)
191 191 os.makedirs(create_path, mode=0o755)
192 192 log.debug('created group in %s', create_path)
193 193
194 194 def _rename_group(self, old, new):
195 195 """
196 196 Renames a group on filesystem
197 197
198 198 :param group_name:
199 199 """
200 200
201 201 if old == new:
202 202 log.debug('skipping group rename')
203 203 return
204 204
205 205 log.debug('renaming repository group from %s to %s', old, new)
206 206
207 207 old_path = os.path.join(self.repos_path, old)
208 208 new_path = os.path.join(self.repos_path, new)
209 209
210 210 log.debug('renaming repos paths from %s to %s', old_path, new_path)
211 211
212 212 if os.path.isdir(new_path):
213 213 raise Exception('Was trying to rename to already '
214 214 'existing dir %s' % new_path)
215 215 shutil.move(old_path, new_path)
216 216
217 217 def _delete_filesystem_group(self, group, force_delete=False):
218 218 """
219 219 Deletes a group from a filesystem
220 220
221 221 :param group: instance of group from database
222 222 :param force_delete: use shutil rmtree to remove all objects
223 223 """
224 224 paths = group.full_path.split(RepoGroup.url_sep())
225 225 paths = os.sep.join(paths)
226 226
227 227 rm_path = os.path.join(self.repos_path, paths)
228 228 log.info("Removing group %s", rm_path)
229 229 # delete only if that path really exists
230 230 if os.path.isdir(rm_path):
231 231 if force_delete:
232 232 shutil.rmtree(rm_path)
233 233 else:
234 234 # archive that group`
235 235 _now = datetime.datetime.now()
236 236 _ms = str(_now.microsecond).rjust(6, '0')
237 237 _d = 'rm__%s_GROUP_%s' % (
238 238 _now.strftime('%Y%m%d_%H%M%S_' + _ms), group.name)
239 239 shutil.move(rm_path, os.path.join(self.repos_path, _d))
240 240
241 241 def create(self, group_name, group_description, owner, just_db=False,
242 242 copy_permissions=False, personal=None, commit_early=True):
243 243
244 244 (group_name_cleaned,
245 245 parent_group_name) = RepoGroupModel()._get_group_name_and_parent(group_name)
246 246
247 247 parent_group = None
248 248 if parent_group_name:
249 249 parent_group = self._get_repo_group(parent_group_name)
250 250 if not parent_group:
251 251 # we tried to create a nested group, but the parent is not
252 252 # existing
253 253 raise ValueError(
254 254 'Parent group `%s` given in `%s` group name '
255 255 'is not yet existing.' % (parent_group_name, group_name))
256 256
257 257 # because we are doing a cleanup, we need to check if such directory
258 258 # already exists. If we don't do that we can accidentally delete
259 259 # existing directory via cleanup that can cause data issues, since
260 260 # delete does a folder rename to special syntax later cleanup
261 261 # functions can delete this
262 262 cleanup_group = self.check_exist_filesystem(group_name,
263 263 exc_on_failure=False)
264 264 user = self._get_user(owner)
265 265 if not user:
266 266 raise ValueError('Owner %s not found as rhodecode user', owner)
267 267
268 268 try:
269 269 new_repo_group = RepoGroup()
270 270 new_repo_group.user = user
271 271 new_repo_group.group_description = group_description or group_name
272 272 new_repo_group.parent_group = parent_group
273 273 new_repo_group.group_name = group_name
274 274 new_repo_group.personal = personal
275 275
276 276 self.sa.add(new_repo_group)
277 277
278 278 # create an ADMIN permission for owner except if we're super admin,
279 279 # later owner should go into the owner field of groups
280 280 if not user.is_admin:
281 281 self.grant_user_permission(repo_group=new_repo_group,
282 282 user=owner, perm='group.admin')
283 283
284 284 if parent_group and copy_permissions:
285 285 # copy permissions from parent
286 286 user_perms = UserRepoGroupToPerm.query() \
287 287 .filter(UserRepoGroupToPerm.group == parent_group).all()
288 288
289 289 group_perms = UserGroupRepoGroupToPerm.query() \
290 290 .filter(UserGroupRepoGroupToPerm.group == parent_group).all()
291 291
292 292 for perm in user_perms:
293 293 # don't copy over the permission for user who is creating
294 294 # this group, if he is not super admin he get's admin
295 295 # permission set above
296 296 if perm.user != user or user.is_admin:
297 297 UserRepoGroupToPerm.create(
298 298 perm.user, new_repo_group, perm.permission)
299 299
300 300 for perm in group_perms:
301 301 UserGroupRepoGroupToPerm.create(
302 302 perm.users_group, new_repo_group, perm.permission)
303 303 else:
304 304 perm_obj = self._create_default_perms(new_repo_group)
305 305 self.sa.add(perm_obj)
306 306
307 307 # now commit the changes, earlier so we are sure everything is in
308 308 # the database.
309 309 if commit_early:
310 310 self.sa.commit()
311 311 if not just_db:
312 312 self._create_group(new_repo_group.group_name)
313 313
314 314 # trigger the post hook
315 315 from rhodecode.lib import hooks_base
316 316 repo_group = RepoGroup.get_by_group_name(group_name)
317 317
318 318 # update repo group commit caches initially
319 319 repo_group.update_commit_cache()
320 320
321 321 hooks_base.create_repository_group(
322 322 created_by=user.username, **repo_group.get_dict())
323 323
324 324 # Trigger create event.
325 325 events.trigger(events.RepoGroupCreateEvent(repo_group))
326 326
327 327 return new_repo_group
328 328 except Exception:
329 329 self.sa.rollback()
330 330 log.exception('Exception occurred when creating repository group, '
331 331 'doing cleanup...')
332 332 # rollback things manually !
333 333 repo_group = RepoGroup.get_by_group_name(group_name)
334 334 if repo_group:
335 335 RepoGroup.delete(repo_group.group_id)
336 336 self.sa.commit()
337 337 if cleanup_group:
338 338 RepoGroupModel()._delete_filesystem_group(repo_group)
339 339 raise
340 340
341 341 def update_permissions(
342 342 self, repo_group, perm_additions=None, perm_updates=None,
343 343 perm_deletions=None, recursive=None, check_perms=True,
344 344 cur_user=None):
345 345 from rhodecode.model.repo import RepoModel
346 346 from rhodecode.lib.auth import HasUserGroupPermissionAny
347 347
348 348 if not perm_additions:
349 349 perm_additions = []
350 350 if not perm_updates:
351 351 perm_updates = []
352 352 if not perm_deletions:
353 353 perm_deletions = []
354 354
355 355 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
356 356
357 357 changes = {
358 358 'added': [],
359 359 'updated': [],
360 360 'deleted': [],
361 361 'default_user_changed': None
362 362 }
363 363
364 364 def _set_perm_user(obj, user, perm):
365 365 if isinstance(obj, RepoGroup):
366 366 self.grant_user_permission(
367 367 repo_group=obj, user=user, perm=perm)
368 368 elif isinstance(obj, Repository):
369 369 # private repos will not allow to change the default
370 370 # permissions using recursive mode
371 371 if obj.private and user == User.DEFAULT_USER:
372 372 return
373 373
374 374 # we set group permission but we have to switch to repo
375 375 # permission
376 376 perm = perm.replace('group.', 'repository.')
377 377 RepoModel().grant_user_permission(
378 378 repo=obj, user=user, perm=perm)
379 379
380 380 def _set_perm_group(obj, users_group, perm):
381 381 if isinstance(obj, RepoGroup):
382 382 self.grant_user_group_permission(
383 383 repo_group=obj, group_name=users_group, perm=perm)
384 384 elif isinstance(obj, Repository):
385 385 # we set group permission but we have to switch to repo
386 386 # permission
387 387 perm = perm.replace('group.', 'repository.')
388 388 RepoModel().grant_user_group_permission(
389 389 repo=obj, group_name=users_group, perm=perm)
390 390
391 391 def _revoke_perm_user(obj, user):
392 392 if isinstance(obj, RepoGroup):
393 393 self.revoke_user_permission(repo_group=obj, user=user)
394 394 elif isinstance(obj, Repository):
395 395 RepoModel().revoke_user_permission(repo=obj, user=user)
396 396
397 397 def _revoke_perm_group(obj, user_group):
398 398 if isinstance(obj, RepoGroup):
399 399 self.revoke_user_group_permission(
400 400 repo_group=obj, group_name=user_group)
401 401 elif isinstance(obj, Repository):
402 402 RepoModel().revoke_user_group_permission(
403 403 repo=obj, group_name=user_group)
404 404
405 405 # start updates
406 406 log.debug('Now updating permissions for %s in recursive mode:%s',
407 407 repo_group, recursive)
408 408
409 409 # initialize check function, we'll call that multiple times
410 410 has_group_perm = HasUserGroupPermissionAny(*req_perms)
411 411
412 412 for obj in repo_group.recursive_groups_and_repos():
413 413 # iterated obj is an instance of a repos group or repository in
414 414 # that group, recursive option can be: none, repos, groups, all
415 415 if recursive == 'all':
416 416 obj = obj
417 417 elif recursive == 'repos':
418 418 # skip groups, other than this one
419 419 if isinstance(obj, RepoGroup) and not obj == repo_group:
420 420 continue
421 421 elif recursive == 'groups':
422 422 # skip repos
423 423 if isinstance(obj, Repository):
424 424 continue
425 425 else: # recursive == 'none':
426 426 # DEFAULT option - don't apply to iterated objects
427 427 # also we do a break at the end of this loop. if we are not
428 428 # in recursive mode
429 429 obj = repo_group
430 430
431 431 change_obj = obj.get_api_data()
432 432
433 433 # update permissions
434 434 for member_id, perm, member_type in perm_updates:
435 435 member_id = int(member_id)
436 436 if member_type == 'user':
437 437 member_name = User.get(member_id).username
438 438 if isinstance(obj, RepoGroup) and obj == repo_group and member_name == User.DEFAULT_USER:
439 439 # NOTE(dan): detect if we changed permissions for default user
440 440 perm_obj = self.sa.query(UserRepoGroupToPerm) \
441 441 .filter(UserRepoGroupToPerm.user_id == member_id) \
442 442 .filter(UserRepoGroupToPerm.group == repo_group) \
443 443 .scalar()
444 444 if perm_obj and perm_obj.permission.permission_name != perm:
445 445 changes['default_user_changed'] = True
446 446
447 447 # this updates also current one if found
448 448 _set_perm_user(obj, user=member_id, perm=perm)
449 449 elif member_type == 'user_group':
450 450 member_name = UserGroup.get(member_id).users_group_name
451 451 if not check_perms or has_group_perm(member_name,
452 452 user=cur_user):
453 453 _set_perm_group(obj, users_group=member_id, perm=perm)
454 454 else:
455 455 raise ValueError("member_type must be 'user' or 'user_group' "
456 456 "got {} instead".format(member_type))
457 457
458 458 changes['updated'].append(
459 459 {'change_obj': change_obj, 'type': member_type,
460 460 'id': member_id, 'name': member_name, 'new_perm': perm})
461 461
462 462 # set new permissions
463 463 for member_id, perm, member_type in perm_additions:
464 464 member_id = int(member_id)
465 465 if member_type == 'user':
466 466 member_name = User.get(member_id).username
467 467 _set_perm_user(obj, user=member_id, perm=perm)
468 468 elif member_type == 'user_group':
469 469 # check if we have permissions to alter this usergroup
470 470 member_name = UserGroup.get(member_id).users_group_name
471 471 if not check_perms or has_group_perm(member_name,
472 472 user=cur_user):
473 473 _set_perm_group(obj, users_group=member_id, perm=perm)
474 474 else:
475 475 raise ValueError("member_type must be 'user' or 'user_group' "
476 476 "got {} instead".format(member_type))
477 477
478 478 changes['added'].append(
479 479 {'change_obj': change_obj, 'type': member_type,
480 480 'id': member_id, 'name': member_name, 'new_perm': perm})
481 481
482 482 # delete permissions
483 483 for member_id, perm, member_type in perm_deletions:
484 484 member_id = int(member_id)
485 485 if member_type == 'user':
486 486 member_name = User.get(member_id).username
487 487 _revoke_perm_user(obj, user=member_id)
488 488 elif member_type == 'user_group':
489 489 # check if we have permissions to alter this usergroup
490 490 member_name = UserGroup.get(member_id).users_group_name
491 491 if not check_perms or has_group_perm(member_name,
492 492 user=cur_user):
493 493 _revoke_perm_group(obj, user_group=member_id)
494 494 else:
495 495 raise ValueError("member_type must be 'user' or 'user_group' "
496 496 "got {} instead".format(member_type))
497 497
498 498 changes['deleted'].append(
499 499 {'change_obj': change_obj, 'type': member_type,
500 500 'id': member_id, 'name': member_name, 'new_perm': perm})
501 501
502 502 # if it's not recursive call for all,repos,groups
503 503 # break the loop and don't proceed with other changes
504 504 if recursive not in ['all', 'repos', 'groups']:
505 505 break
506 506
507 507 return changes
508 508
509 509 def update(self, repo_group, form_data):
510 510 try:
511 511 repo_group = self._get_repo_group(repo_group)
512 512 old_path = repo_group.full_path
513 513
514 514 # change properties
515 515 if 'group_description' in form_data:
516 516 repo_group.group_description = form_data['group_description']
517 517
518 518 if 'enable_locking' in form_data:
519 519 repo_group.enable_locking = form_data['enable_locking']
520 520
521 521 if 'group_parent_id' in form_data:
522 522 parent_group = (
523 523 self._get_repo_group(form_data['group_parent_id']))
524 524 repo_group.group_parent_id = (
525 525 parent_group.group_id if parent_group else None)
526 526 repo_group.parent_group = parent_group
527 527
528 528 # mikhail: to update the full_path, we have to explicitly
529 529 # update group_name
530 530 group_name = form_data.get('group_name', repo_group.name)
531 531 repo_group.group_name = repo_group.get_new_name(group_name)
532 532
533 533 new_path = repo_group.full_path
534 534
535 535 affected_user_ids = []
536 536 if 'user' in form_data:
537 537 old_owner_id = repo_group.user.user_id
538 538 new_owner = User.get_by_username(form_data['user'])
539 539 repo_group.user = new_owner
540 540
541 541 if old_owner_id != new_owner.user_id:
542 542 affected_user_ids = [new_owner.user_id, old_owner_id]
543 543
544 544 self.sa.add(repo_group)
545 545
546 546 # iterate over all members of this groups and do fixes
547 547 # set locking if given
548 548 # if obj is a repoGroup also fix the name of the group according
549 549 # to the parent
550 550 # if obj is a Repo fix it's name
551 551 # this can be potentially heavy operation
552 552 for obj in repo_group.recursive_groups_and_repos():
553 553 # set the value from it's parent
554 554 obj.enable_locking = repo_group.enable_locking
555 555 if isinstance(obj, RepoGroup):
556 556 new_name = obj.get_new_name(obj.name)
557 557 log.debug('Fixing group %s to new name %s',
558 558 obj.group_name, new_name)
559 559 obj.group_name = new_name
560 560
561 561 elif isinstance(obj, Repository):
562 562 # we need to get all repositories from this new group and
563 563 # rename them accordingly to new group path
564 564 new_name = obj.get_new_name(obj.just_name)
565 565 log.debug('Fixing repo %s to new name %s',
566 566 obj.repo_name, new_name)
567 567 obj.repo_name = new_name
568 568
569 569 self.sa.add(obj)
570 570
571 571 self._rename_group(old_path, new_path)
572 572
573 573 # Trigger update event.
574 574 events.trigger(events.RepoGroupUpdateEvent(repo_group))
575 575
576 576 if affected_user_ids:
577 577 PermissionModel().trigger_permission_flush(affected_user_ids)
578 578
579 579 return repo_group
580 580 except Exception:
581 581 log.error(traceback.format_exc())
582 582 raise
583 583
584 584 def delete(self, repo_group, force_delete=False, fs_remove=True):
585 585 repo_group = self._get_repo_group(repo_group)
586 586 if not repo_group:
587 587 return False
588 588 try:
589 589 self.sa.delete(repo_group)
590 590 if fs_remove:
591 591 self._delete_filesystem_group(repo_group, force_delete)
592 592 else:
593 593 log.debug('skipping removal from filesystem')
594 594
595 595 # Trigger delete event.
596 596 events.trigger(events.RepoGroupDeleteEvent(repo_group))
597 597 return True
598 598
599 599 except Exception:
600 600 log.error('Error removing repo_group %s', repo_group)
601 601 raise
602 602
603 603 def grant_user_permission(self, repo_group, user, perm):
604 604 """
605 605 Grant permission for user on given repository group, or update
606 606 existing one if found
607 607
608 608 :param repo_group: Instance of RepoGroup, repositories_group_id,
609 609 or repositories_group name
610 610 :param user: Instance of User, user_id or username
611 611 :param perm: Instance of Permission, or permission_name
612 612 """
613 613
614 614 repo_group = self._get_repo_group(repo_group)
615 615 user = self._get_user(user)
616 616 permission = self._get_perm(perm)
617 617
618 618 # check if we have that permission already
619 619 obj = self.sa.query(UserRepoGroupToPerm)\
620 620 .filter(UserRepoGroupToPerm.user == user)\
621 621 .filter(UserRepoGroupToPerm.group == repo_group)\
622 622 .scalar()
623 623 if obj is None:
624 624 # create new !
625 625 obj = UserRepoGroupToPerm()
626 626 obj.group = repo_group
627 627 obj.user = user
628 628 obj.permission = permission
629 629 self.sa.add(obj)
630 630 log.debug('Granted perm %s to %s on %s', perm, user, repo_group)
631 631 action_logger_generic(
632 632 'granted permission: {} to user: {} on repogroup: {}'.format(
633 633 perm, user, repo_group), namespace='security.repogroup')
634 634 return obj
635 635
636 636 def revoke_user_permission(self, repo_group, user):
637 637 """
638 638 Revoke permission for user on given repository group
639 639
640 640 :param repo_group: Instance of RepoGroup, repositories_group_id,
641 641 or repositories_group name
642 642 :param user: Instance of User, user_id or username
643 643 """
644 644
645 645 repo_group = self._get_repo_group(repo_group)
646 646 user = self._get_user(user)
647 647
648 648 obj = self.sa.query(UserRepoGroupToPerm)\
649 649 .filter(UserRepoGroupToPerm.user == user)\
650 650 .filter(UserRepoGroupToPerm.group == repo_group)\
651 651 .scalar()
652 652 if obj:
653 653 self.sa.delete(obj)
654 654 log.debug('Revoked perm on %s on %s', repo_group, user)
655 655 action_logger_generic(
656 656 'revoked permission from user: {} on repogroup: {}'.format(
657 657 user, repo_group), namespace='security.repogroup')
658 658
659 659 def grant_user_group_permission(self, repo_group, group_name, perm):
660 660 """
661 661 Grant permission for user group on given repository group, or update
662 662 existing one if found
663 663
664 664 :param repo_group: Instance of RepoGroup, repositories_group_id,
665 665 or repositories_group name
666 666 :param group_name: Instance of UserGroup, users_group_id,
667 667 or user group name
668 668 :param perm: Instance of Permission, or permission_name
669 669 """
670 670 repo_group = self._get_repo_group(repo_group)
671 671 group_name = self._get_user_group(group_name)
672 672 permission = self._get_perm(perm)
673 673
674 674 # check if we have that permission already
675 675 obj = self.sa.query(UserGroupRepoGroupToPerm)\
676 676 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
677 677 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
678 678 .scalar()
679 679
680 680 if obj is None:
681 681 # create new
682 682 obj = UserGroupRepoGroupToPerm()
683 683
684 684 obj.group = repo_group
685 685 obj.users_group = group_name
686 686 obj.permission = permission
687 687 self.sa.add(obj)
688 688 log.debug('Granted perm %s to %s on %s', perm, group_name, repo_group)
689 689 action_logger_generic(
690 690 'granted permission: {} to usergroup: {} on repogroup: {}'.format(
691 691 perm, group_name, repo_group), namespace='security.repogroup')
692 692 return obj
693 693
694 694 def revoke_user_group_permission(self, repo_group, group_name):
695 695 """
696 696 Revoke permission for user group on given repository group
697 697
698 698 :param repo_group: Instance of RepoGroup, repositories_group_id,
699 699 or repositories_group name
700 700 :param group_name: Instance of UserGroup, users_group_id,
701 701 or user group name
702 702 """
703 703 repo_group = self._get_repo_group(repo_group)
704 704 group_name = self._get_user_group(group_name)
705 705
706 706 obj = self.sa.query(UserGroupRepoGroupToPerm)\
707 707 .filter(UserGroupRepoGroupToPerm.group == repo_group)\
708 708 .filter(UserGroupRepoGroupToPerm.users_group == group_name)\
709 709 .scalar()
710 710 if obj:
711 711 self.sa.delete(obj)
712 712 log.debug('Revoked perm to %s on %s', repo_group, group_name)
713 713 action_logger_generic(
714 714 'revoked permission from usergroup: {} on repogroup: {}'.format(
715 715 group_name, repo_group), namespace='security.repogroup')
716 716
717 717 @classmethod
718 718 def update_commit_cache(cls, repo_groups=None):
719 719 if not repo_groups:
720 720 repo_groups = RepoGroup.getAll()
721 721 for repo_group in repo_groups:
722 722 repo_group.update_commit_cache()
723 723
724 724 def get_repo_groups_as_dict(self, repo_group_list=None, admin=False,
725 725 super_user_actions=False):
726 726
727 727 from pyramid.threadlocal import get_current_request
728 728 _render = get_current_request().get_partial_renderer(
729 729 'rhodecode:templates/data_table/_dt_elements.mako')
730 730 c = _render.get_call_context()
731 731 h = _render.get_helpers()
732 732
733 733 def quick_menu(repo_group_name):
734 734 return _render('quick_repo_group_menu', repo_group_name)
735 735
736 736 def repo_group_lnk(repo_group_name):
737 737 return _render('repo_group_name', repo_group_name)
738 738
739 739 def last_change(last_change):
740 740 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
741 741 ts = time.time()
742 742 utc_offset = (datetime.datetime.fromtimestamp(ts)
743 743 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
744 744 last_change = last_change + datetime.timedelta(seconds=utc_offset)
745 745 return _render("last_change", last_change)
746 746
747 747 def desc(desc, personal):
748 748 return _render(
749 749 'repo_group_desc', desc, personal, c.visual.stylify_metatags)
750 750
751 751 def repo_group_actions(repo_group_id, repo_group_name, gr_count):
752 752 return _render(
753 753 'repo_group_actions', repo_group_id, repo_group_name, gr_count)
754 754
755 755 def repo_group_name(repo_group_name, children_groups):
756 756 return _render("repo_group_name", repo_group_name, children_groups)
757 757
758 758 def user_profile(username):
759 759 return _render('user_profile', username)
760 760
761 761 repo_group_data = []
762 762 for group in repo_group_list:
763 763 # NOTE(marcink): because we use only raw column we need to load it like that
764 764 changeset_cache = RepoGroup._load_changeset_cache(
765 765 '', group._changeset_cache)
766 766 last_commit_change = RepoGroup._load_commit_change(changeset_cache)
767 767 row = {
768 768 "menu": quick_menu(group.group_name),
769 769 "name": repo_group_lnk(group.group_name),
770 770 "name_raw": group.group_name,
771 771
772 772 "last_change": last_change(last_commit_change),
773 773
774 774 "last_changeset": "",
775 775 "last_changeset_raw": "",
776 776
777 777 "desc": desc(h.escape(group.group_description), group.personal),
778 778 "top_level_repos": 0,
779 779 "owner": user_profile(group.User.username)
780 780 }
781 781 if admin:
782 782 repo_count = group.repositories.count()
783 children_groups = map(
784 h.safe_unicode,
783 children_groups = list(map(
784 h.safe_str,
785 785 itertools.chain((g.name for g in group.parents),
786 (x.name for x in [group])))
786 (x.name for x in [group]))))
787 787 row.update({
788 788 "action": repo_group_actions(
789 789 group.group_id, group.group_name, repo_count),
790 790 "top_level_repos": repo_count,
791 791 "name": repo_group_name(group.group_name, children_groups),
792 792
793 793 })
794 794 repo_group_data.append(row)
795 795
796 796 return repo_group_data
797 797
798 798 def get_repo_groups_data_table(
799 799 self, draw, start, limit,
800 800 search_q, order_by, order_dir,
801 801 auth_user, repo_group_id):
802 802 from rhodecode.model.scm import RepoGroupList
803 803
804 804 _perms = ['group.read', 'group.write', 'group.admin']
805 805 repo_groups = RepoGroup.query() \
806 806 .filter(RepoGroup.group_parent_id == repo_group_id) \
807 807 .all()
808 808 auth_repo_group_list = RepoGroupList(
809 809 repo_groups, perm_set=_perms,
810 810 extra_kwargs=dict(user=auth_user))
811 811
812 812 allowed_ids = [-1]
813 813 for repo_group in auth_repo_group_list:
814 814 allowed_ids.append(repo_group.group_id)
815 815
816 816 repo_groups_data_total_count = RepoGroup.query() \
817 817 .filter(RepoGroup.group_parent_id == repo_group_id) \
818 818 .filter(or_(
819 819 # generate multiple IN to fix limitation problems
820 820 *in_filter_generator(RepoGroup.group_id, allowed_ids))
821 821 ) \
822 822 .count()
823 823
824 824 base_q = Session.query(
825 825 RepoGroup.group_name,
826 826 RepoGroup.group_name_hash,
827 827 RepoGroup.group_description,
828 828 RepoGroup.group_id,
829 829 RepoGroup.personal,
830 830 RepoGroup.updated_on,
831 831 RepoGroup._changeset_cache,
832 832 User,
833 833 ) \
834 834 .filter(RepoGroup.group_parent_id == repo_group_id) \
835 835 .filter(or_(
836 836 # generate multiple IN to fix limitation problems
837 837 *in_filter_generator(RepoGroup.group_id, allowed_ids))
838 838 ) \
839 839 .join(User, User.user_id == RepoGroup.user_id) \
840 840 .group_by(RepoGroup, User)
841 841
842 842 repo_groups_data_total_filtered_count = base_q.count()
843 843
844 844 sort_defined = False
845 845
846 846 if order_by == 'group_name':
847 847 sort_col = func.lower(RepoGroup.group_name)
848 848 sort_defined = True
849 849 elif order_by == 'user_username':
850 850 sort_col = User.username
851 851 else:
852 852 sort_col = getattr(RepoGroup, order_by, None)
853 853
854 854 if sort_defined or sort_col:
855 855 if order_dir == 'asc':
856 856 sort_col = sort_col.asc()
857 857 else:
858 858 sort_col = sort_col.desc()
859 859
860 860 base_q = base_q.order_by(sort_col)
861 861 base_q = base_q.offset(start).limit(limit)
862 862
863 863 repo_group_list = base_q.all()
864 864
865 865 repo_groups_data = RepoGroupModel().get_repo_groups_as_dict(
866 866 repo_group_list=repo_group_list, admin=False)
867 867
868 868 data = ({
869 869 'draw': draw,
870 870 'data': repo_groups_data,
871 871 'recordsTotal': repo_groups_data_total_count,
872 872 'recordsFiltered': repo_groups_data_total_filtered_count,
873 873 })
874 874 return data
875 875
876 876 def _get_defaults(self, repo_group_name):
877 877 repo_group = RepoGroup.get_by_group_name(repo_group_name)
878 878
879 879 if repo_group is None:
880 880 return None
881 881
882 882 defaults = repo_group.get_dict()
883 883 defaults['repo_group_name'] = repo_group.name
884 884 defaults['repo_group_description'] = repo_group.group_description
885 885 defaults['repo_group_enable_locking'] = repo_group.enable_locking
886 886
887 887 # we use -1 as this is how in HTML, we mark an empty group
888 888 defaults['repo_group'] = defaults['group_parent_id'] or -1
889 889
890 890 # fill owner
891 891 if repo_group.user:
892 892 defaults.update({'user': repo_group.user.username})
893 893 else:
894 894 replacement_user = User.get_first_super_admin().username
895 895 defaults.update({'user': replacement_user})
896 896
897 897 return defaults
@@ -1,1027 +1,1042 b''
1 1
2 2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 Scm model for RhodeCode
22 22 """
23 23
24 24 import os.path
25 25 import traceback
26 26 import logging
27 27 import io
28 28
29 29 from sqlalchemy import func
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 import rhodecode
33 from rhodecode.lib.str_utils import safe_bytes
33 34 from rhodecode.lib.vcs import get_backend
34 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 36 from rhodecode.lib.vcs.nodes import FileNode
36 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 38 from rhodecode.lib import helpers as h, rc_cache
38 39 from rhodecode.lib.auth import (
39 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 41 HasUserGroupPermissionAny)
41 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 43 from rhodecode.lib import hooks_utils
43 44 from rhodecode.lib.utils import (
44 45 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
46 from rhodecode.lib.str_utils import safe_str
46 47 from rhodecode.lib.system_info import get_system_info
47 48 from rhodecode.model import BaseModel
48 49 from rhodecode.model.db import (
49 50 or_, false,
50 51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 52 PullRequest, FileStore)
52 53 from rhodecode.model.settings import VcsSettingsModel
53 54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 55
55 56 log = logging.getLogger(__name__)
56 57
57 58
58 59 class UserTemp(object):
59 60 def __init__(self, user_id):
60 61 self.user_id = user_id
61 62
62 63 def __repr__(self):
63 64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 65
65 66
66 67 class RepoTemp(object):
67 68 def __init__(self, repo_id):
68 69 self.repo_id = repo_id
69 70
70 71 def __repr__(self):
71 72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 73
73 74
74 75 class SimpleCachedRepoList(object):
75 76 """
76 77 Lighter version of of iteration of repos without the scm initialisation,
77 78 and with cache usage
78 79 """
79 80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 81 self.db_repo_list = db_repo_list
81 82 self.repos_path = repos_path
82 83 self.order_by = order_by
83 84 self.reversed = (order_by or '').startswith('-')
84 85 if not perm_set:
85 86 perm_set = ['repository.read', 'repository.write',
86 87 'repository.admin']
87 88 self.perm_set = perm_set
88 89
89 90 def __len__(self):
90 91 return len(self.db_repo_list)
91 92
92 93 def __repr__(self):
93 94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 95
95 96 def __iter__(self):
96 97 for dbr in self.db_repo_list:
97 98 # check permission at this level
98 99 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 100 dbr.repo_name, 'SimpleCachedRepoList check')
100 101 if not has_perm:
101 102 continue
102 103
103 104 tmp_d = {
104 105 'name': dbr.repo_name,
105 106 'dbrepo': dbr.get_dict(),
106 107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 108 }
108 109 yield tmp_d
109 110
110 111
111 112 class _PermCheckIterator(object):
112 113
113 114 def __init__(
114 115 self, obj_list, obj_attr, perm_set, perm_checker,
115 116 extra_kwargs=None):
116 117 """
117 118 Creates iterator from given list of objects, additionally
118 119 checking permission for them from perm_set var
119 120
120 121 :param obj_list: list of db objects
121 122 :param obj_attr: attribute of object to pass into perm_checker
122 123 :param perm_set: list of permissions to check
123 124 :param perm_checker: callable to check permissions against
124 125 """
125 126 self.obj_list = obj_list
126 127 self.obj_attr = obj_attr
127 128 self.perm_set = perm_set
128 129 self.perm_checker = perm_checker(*self.perm_set)
129 130 self.extra_kwargs = extra_kwargs or {}
130 131
131 132 def __len__(self):
132 133 return len(self.obj_list)
133 134
134 135 def __repr__(self):
135 136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 137
137 138 def __iter__(self):
138 139 for db_obj in self.obj_list:
139 140 # check permission at this level
140 141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 142 name = db_obj.__dict__.get(self.obj_attr, None)
142 143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 144 continue
144 145
145 146 yield db_obj
146 147
147 148
148 149 class RepoList(_PermCheckIterator):
149 150
150 151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 152 if not perm_set:
152 153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153 154
154 155 super(RepoList, self).__init__(
155 156 obj_list=db_repo_list,
156 157 obj_attr='_repo_name', perm_set=perm_set,
157 158 perm_checker=HasRepoPermissionAny,
158 159 extra_kwargs=extra_kwargs)
159 160
160 161
161 162 class RepoGroupList(_PermCheckIterator):
162 163
163 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 165 if not perm_set:
165 166 perm_set = ['group.read', 'group.write', 'group.admin']
166 167
167 168 super(RepoGroupList, self).__init__(
168 169 obj_list=db_repo_group_list,
169 170 obj_attr='_group_name', perm_set=perm_set,
170 171 perm_checker=HasRepoGroupPermissionAny,
171 172 extra_kwargs=extra_kwargs)
172 173
173 174
174 175 class UserGroupList(_PermCheckIterator):
175 176
176 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 178 if not perm_set:
178 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179 180
180 181 super(UserGroupList, self).__init__(
181 182 obj_list=db_user_group_list,
182 183 obj_attr='users_group_name', perm_set=perm_set,
183 184 perm_checker=HasUserGroupPermissionAny,
184 185 extra_kwargs=extra_kwargs)
185 186
186 187
187 188 class ScmModel(BaseModel):
188 189 """
189 190 Generic Scm Model
190 191 """
191 192
192 193 @LazyProperty
193 194 def repos_path(self):
194 195 """
195 196 Gets the repositories root path from database
196 197 """
197 198
198 199 settings_model = VcsSettingsModel(sa=self.sa)
199 200 return settings_model.get_repos_location()
200 201
201 202 def repo_scan(self, repos_path=None):
202 203 """
203 204 Listing of repositories in given path. This path should not be a
204 205 repository itself. Return a dictionary of repository objects
205 206
206 207 :param repos_path: path to directory containing repositories
207 208 """
208 209
209 210 if repos_path is None:
210 211 repos_path = self.repos_path
211 212
212 213 log.info('scanning for repositories in %s', repos_path)
213 214
214 215 config = make_db_config()
215 216 config.set('extensions', 'largefiles', '')
216 217 repos = {}
217 218
218 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 220 # name need to be decomposed and put back together using the /
220 221 # since this is internal storage separator for rhodecode
221 222 name = Repository.normalize_repo_name(name)
222 223
223 224 try:
224 225 if name in repos:
225 226 raise RepositoryError('Duplicate repository name %s '
226 227 'found in %s' % (name, path))
227 228 elif path[0] in rhodecode.BACKENDS:
228 229 backend = get_backend(path[0])
229 230 repos[name] = backend(path[1], config=config,
230 231 with_wire={"cache": False})
231 232 except OSError:
232 233 continue
233 234 except RepositoryError:
234 235 log.exception('Failed to create a repo')
235 236 continue
236 237
237 238 log.debug('found %s paths with repositories', len(repos))
238 239 return repos
239 240
240 241 def get_repos(self, all_repos=None, sort_key=None):
241 242 """
242 243 Get all repositories from db and for each repo create it's
243 244 backend instance and fill that backed with information from database
244 245
245 246 :param all_repos: list of repository names as strings
246 247 give specific repositories list, good for filtering
247 248
248 249 :param sort_key: initial sorting of repositories
249 250 """
250 251 if all_repos is None:
251 252 all_repos = self.sa.query(Repository)\
252 253 .filter(Repository.group_id == None)\
253 254 .order_by(func.lower(Repository.repo_name)).all()
254 255 repo_iter = SimpleCachedRepoList(
255 256 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 257 return repo_iter
257 258
258 259 def get_repo_groups(self, all_groups=None):
259 260 if all_groups is None:
260 261 all_groups = RepoGroup.query()\
261 262 .filter(RepoGroup.group_parent_id == None).all()
262 263 return [x for x in RepoGroupList(all_groups)]
263 264
264 265 def mark_for_invalidation(self, repo_name, delete=False):
265 266 """
266 267 Mark caches of this repo invalid in the database. `delete` flag
267 268 removes the cache entries
268 269
269 270 :param repo_name: the repo_name for which caches should be marked
270 271 invalid, or deleted
271 272 :param delete: delete the entry keys instead of setting bool
272 273 flag on them, and also purge caches used by the dogpile
273 274 """
274 275 repo = Repository.get_by_repo_name(repo_name)
275 276
276 277 if repo:
277 278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 279 repo_id=repo.repo_id)
279 280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280 281
281 282 repo_id = repo.repo_id
282 283 config = repo._config
283 284 config.set('extensions', 'largefiles', '')
284 285 repo.update_commit_cache(config=config, cs_cache=None)
285 286 if delete:
286 287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
287 rc_cache.clear_cache_namespace(
288 'cache_repo', cache_namespace_uid, invalidate=True)
288 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
289 289
290 290 def toggle_following_repo(self, follow_repo_id, user_id):
291 291
292 292 f = self.sa.query(UserFollowing)\
293 293 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
294 294 .filter(UserFollowing.user_id == user_id).scalar()
295 295
296 296 if f is not None:
297 297 try:
298 298 self.sa.delete(f)
299 299 return
300 300 except Exception:
301 301 log.error(traceback.format_exc())
302 302 raise
303 303
304 304 try:
305 305 f = UserFollowing()
306 306 f.user_id = user_id
307 307 f.follows_repo_id = follow_repo_id
308 308 self.sa.add(f)
309 309 except Exception:
310 310 log.error(traceback.format_exc())
311 311 raise
312 312
313 313 def toggle_following_user(self, follow_user_id, user_id):
314 314 f = self.sa.query(UserFollowing)\
315 315 .filter(UserFollowing.follows_user_id == follow_user_id)\
316 316 .filter(UserFollowing.user_id == user_id).scalar()
317 317
318 318 if f is not None:
319 319 try:
320 320 self.sa.delete(f)
321 321 return
322 322 except Exception:
323 323 log.error(traceback.format_exc())
324 324 raise
325 325
326 326 try:
327 327 f = UserFollowing()
328 328 f.user_id = user_id
329 329 f.follows_user_id = follow_user_id
330 330 self.sa.add(f)
331 331 except Exception:
332 332 log.error(traceback.format_exc())
333 333 raise
334 334
335 335 def is_following_repo(self, repo_name, user_id, cache=False):
336 336 r = self.sa.query(Repository)\
337 337 .filter(Repository.repo_name == repo_name).scalar()
338 338
339 339 f = self.sa.query(UserFollowing)\
340 340 .filter(UserFollowing.follows_repository == r)\
341 341 .filter(UserFollowing.user_id == user_id).scalar()
342 342
343 343 return f is not None
344 344
345 345 def is_following_user(self, username, user_id, cache=False):
346 346 u = User.get_by_username(username)
347 347
348 348 f = self.sa.query(UserFollowing)\
349 349 .filter(UserFollowing.follows_user == u)\
350 350 .filter(UserFollowing.user_id == user_id).scalar()
351 351
352 352 return f is not None
353 353
354 354 def get_followers(self, repo):
355 355 repo = self._get_repo(repo)
356 356
357 357 return self.sa.query(UserFollowing)\
358 358 .filter(UserFollowing.follows_repository == repo).count()
359 359
360 360 def get_forks(self, repo):
361 361 repo = self._get_repo(repo)
362 362 return self.sa.query(Repository)\
363 363 .filter(Repository.fork == repo).count()
364 364
365 365 def get_pull_requests(self, repo):
366 366 repo = self._get_repo(repo)
367 367 return self.sa.query(PullRequest)\
368 368 .filter(PullRequest.target_repo == repo)\
369 369 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
370 370
371 371 def get_artifacts(self, repo):
372 372 repo = self._get_repo(repo)
373 373 return self.sa.query(FileStore)\
374 374 .filter(FileStore.repo == repo)\
375 375 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
376 376
377 377 def mark_as_fork(self, repo, fork, user):
378 378 repo = self._get_repo(repo)
379 379 fork = self._get_repo(fork)
380 380 if fork and repo.repo_id == fork.repo_id:
381 381 raise Exception("Cannot set repository as fork of itself")
382 382
383 383 if fork and repo.repo_type != fork.repo_type:
384 384 raise RepositoryError(
385 385 "Cannot set repository as fork of repository with other type")
386 386
387 387 repo.fork = fork
388 388 self.sa.add(repo)
389 389 return repo
390 390
391 391 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
392 392 dbrepo = self._get_repo(repo)
393 393 remote_uri = remote_uri or dbrepo.clone_uri
394 394 if not remote_uri:
395 395 raise Exception("This repository doesn't have a clone uri")
396 396
397 397 repo = dbrepo.scm_instance(cache=False)
398 398 repo.config.clear_section('hooks')
399 399
400 400 try:
401 401 # NOTE(marcink): add extra validation so we skip invalid urls
402 402 # this is due this tasks can be executed via scheduler without
403 403 # proper validation of remote_uri
404 404 if validate_uri:
405 405 config = make_db_config(clear_session=False)
406 406 url_validator(remote_uri, dbrepo.repo_type, config)
407 407 except InvalidCloneUrl:
408 408 raise
409 409
410 410 repo_name = dbrepo.repo_name
411 411 try:
412 412 # TODO: we need to make sure those operations call proper hooks !
413 413 repo.fetch(remote_uri)
414 414
415 415 self.mark_for_invalidation(repo_name)
416 416 except Exception:
417 417 log.error(traceback.format_exc())
418 418 raise
419 419
420 420 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
421 421 dbrepo = self._get_repo(repo)
422 422 remote_uri = remote_uri or dbrepo.push_uri
423 423 if not remote_uri:
424 424 raise Exception("This repository doesn't have a clone uri")
425 425
426 426 repo = dbrepo.scm_instance(cache=False)
427 427 repo.config.clear_section('hooks')
428 428
429 429 try:
430 430 # NOTE(marcink): add extra validation so we skip invalid urls
431 431 # this is due this tasks can be executed via scheduler without
432 432 # proper validation of remote_uri
433 433 if validate_uri:
434 434 config = make_db_config(clear_session=False)
435 435 url_validator(remote_uri, dbrepo.repo_type, config)
436 436 except InvalidCloneUrl:
437 437 raise
438 438
439 439 try:
440 440 repo.push(remote_uri)
441 441 except Exception:
442 442 log.error(traceback.format_exc())
443 443 raise
444 444
445 445 def commit_change(self, repo, repo_name, commit, user, author, message,
446 content, f_path):
446 content: bytes, f_path: bytes):
447 447 """
448 448 Commits changes
449
450 :param repo: SCM instance
451
452 449 """
453 450 user = self._get_user(user)
454 451
455 # decoding here will force that we have proper encoded values
456 # in any other case this will throw exceptions and deny commit
457 content = safe_str(content)
458 path = safe_str(f_path)
459 452 # message and author needs to be unicode
460 453 # proper backend should then translate that into required type
461 message = safe_unicode(message)
462 author = safe_unicode(author)
454 message = safe_str(message)
455 author = safe_str(author)
463 456 imc = repo.in_memory_commit
464 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
457 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
465 458 try:
466 459 # TODO: handle pre-push action !
467 460 tip = imc.commit(
468 461 message=message, author=author, parents=[commit],
469 462 branch=commit.branch)
470 463 except Exception as e:
471 464 log.error(traceback.format_exc())
472 465 raise IMCCommitError(str(e))
473 466 finally:
474 467 # always clear caches, if commit fails we want fresh object also
475 468 self.mark_for_invalidation(repo_name)
476 469
477 470 # We trigger the post-push action
478 471 hooks_utils.trigger_post_push_hook(
479 472 username=user.username, action='push_local', hook_type='post_push',
480 473 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
481 474 return tip
482 475
483 def _sanitize_path(self, f_path):
484 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
485 raise NonRelativePathError('%s is not an relative path' % f_path)
476 def _sanitize_path(self, f_path: bytes):
477 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
478 raise NonRelativePathError(b'%b is not an relative path' % f_path)
486 479 if f_path:
487 480 f_path = os.path.normpath(f_path)
488 481 return f_path
489 482
490 483 def get_dirnode_metadata(self, request, commit, dir_node):
491 484 if not dir_node.is_dir():
492 485 return []
493 486
494 487 data = []
495 488 for node in dir_node:
496 489 if not node.is_file():
497 490 # we skip file-nodes
498 491 continue
499 492
500 493 last_commit = node.last_commit
501 494 last_commit_date = last_commit.date
502 495 data.append({
503 496 'name': node.name,
504 497 'size': h.format_byte_size_binary(node.size),
505 498 'modified_at': h.format_date(last_commit_date),
506 499 'modified_ts': last_commit_date.isoformat(),
507 500 'revision': last_commit.revision,
508 501 'short_id': last_commit.short_id,
509 502 'message': h.escape(last_commit.message),
510 503 'author': h.escape(last_commit.author),
511 504 'user_profile': h.gravatar_with_user(
512 505 request, last_commit.author),
513 506 })
514 507
515 508 return data
516 509
517 510 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
518 511 extended_info=False, content=False, max_file_bytes=None):
519 512 """
520 513 recursive walk in root dir and return a set of all path in that dir
521 514 based on repository walk function
522 515
523 516 :param repo_name: name of repository
524 517 :param commit_id: commit id for which to list nodes
525 518 :param root_path: root path to list
526 519 :param flat: return as a list, if False returns a dict with description
527 520 :param extended_info: show additional info such as md5, binary, size etc
528 521 :param content: add nodes content to the return data
529 522 :param max_file_bytes: will not return file contents over this limit
530 523
531 524 """
532 525 _files = list()
533 526 _dirs = list()
527
534 528 try:
535 529 _repo = self._get_repo(repo_name)
536 530 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
537 531 root_path = root_path.lstrip('/')
538 for __, dirs, files in commit.walk(root_path):
532
533 # get RootNode, inject pre-load options before walking
534 top_node = commit.get_node(root_path)
535 extended_info_pre_load = []
536 if extended_info:
537 extended_info_pre_load += ['md5']
538 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
539
540 for __, dirs, files in commit.walk(top_node):
539 541
540 542 for f in files:
541 543 _content = None
542 _data = f_name = f.unicode_path
544 _data = f_name = f.str_path
543 545
544 546 if not flat:
545 547 _data = {
546 548 "name": h.escape(f_name),
547 549 "type": "file",
548 550 }
549 551 if extended_info:
550 552 _data.update({
551 553 "md5": f.md5,
552 554 "binary": f.is_binary,
553 555 "size": f.size,
554 556 "extension": f.extension,
555 557 "mimetype": f.mimetype,
556 558 "lines": f.lines()[0]
557 559 })
558 560
559 561 if content:
560 562 over_size_limit = (max_file_bytes is not None
561 563 and f.size > max_file_bytes)
562 564 full_content = None
563 565 if not f.is_binary and not over_size_limit:
564 full_content = safe_str(f.content)
566 full_content = f.str_content
565 567
566 568 _data.update({
567 569 "content": full_content,
568 570 })
569 571 _files.append(_data)
570 572
571 573 for d in dirs:
572 _data = d_name = d.unicode_path
574 _data = d_name = d.str_path
573 575 if not flat:
574 576 _data = {
575 577 "name": h.escape(d_name),
576 578 "type": "dir",
577 579 }
578 580 if extended_info:
579 581 _data.update({
580 "md5": None,
581 "binary": None,
582 "size": None,
583 "extension": None,
582 "md5": "",
583 "binary": False,
584 "size": 0,
585 "extension": "",
584 586 })
585 587 if content:
586 588 _data.update({
587 589 "content": None
588 590 })
589 591 _dirs.append(_data)
590 592 except RepositoryError:
591 593 log.exception("Exception in get_nodes")
592 594 raise
593 595
594 596 return _dirs, _files
595 597
596 598 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 599 """
598 600 Generate files for quick filter in files view
599 601 """
600 602
601 603 _files = list()
602 604 _dirs = list()
603 605 try:
604 606 _repo = self._get_repo(repo_name)
605 607 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 608 root_path = root_path.lstrip('/')
607 609 for __, dirs, files in commit.walk(root_path):
608 610
609 611 for f in files:
610 612
611 613 _data = {
612 "name": h.escape(f.unicode_path),
614 "name": h.escape(f.str_path),
613 615 "type": "file",
614 616 }
615 617
616 618 _files.append(_data)
617 619
618 620 for d in dirs:
619 621
620 622 _data = {
621 "name": h.escape(d.unicode_path),
623 "name": h.escape(d.str_path),
622 624 "type": "dir",
623 625 }
624 626
625 627 _dirs.append(_data)
626 628 except RepositoryError:
627 629 log.exception("Exception in get_quick_filter_nodes")
628 630 raise
629 631
630 632 return _dirs, _files
631 633
632 634 def get_node(self, repo_name, commit_id, file_path,
633 635 extended_info=False, content=False, max_file_bytes=None, cache=True):
634 636 """
635 637 retrieve single node from commit
636 638 """
639
637 640 try:
638 641
639 642 _repo = self._get_repo(repo_name)
640 643 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
641 644
642 645 file_node = commit.get_node(file_path)
643 646 if file_node.is_dir():
644 647 raise RepositoryError('The given path is a directory')
645 648
646 649 _content = None
647 f_name = file_node.unicode_path
650 f_name = file_node.str_path
648 651
649 652 file_data = {
650 653 "name": h.escape(f_name),
651 654 "type": "file",
652 655 }
653 656
654 657 if extended_info:
655 658 file_data.update({
656 659 "extension": file_node.extension,
657 660 "mimetype": file_node.mimetype,
658 661 })
659 662
660 663 if cache:
661 664 md5 = file_node.md5
662 665 is_binary = file_node.is_binary
663 666 size = file_node.size
664 667 else:
665 668 is_binary, md5, size, _content = file_node.metadata_uncached()
666 669
667 670 file_data.update({
668 671 "md5": md5,
669 672 "binary": is_binary,
670 673 "size": size,
671 674 })
672 675
673 676 if content and cache:
674 677 # get content + cache
675 678 size = file_node.size
676 679 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
677 680 full_content = None
678 681 all_lines = 0
679 682 if not file_node.is_binary and not over_size_limit:
680 full_content = safe_unicode(file_node.content)
683 full_content = safe_str(file_node.content)
681 684 all_lines, empty_lines = file_node.count_lines(full_content)
682 685
683 686 file_data.update({
684 687 "content": full_content,
685 688 "lines": all_lines
686 689 })
687 690 elif content:
688 691 # get content *without* cache
689 692 if _content is None:
690 693 is_binary, md5, size, _content = file_node.metadata_uncached()
691 694
692 695 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
693 696 full_content = None
694 697 all_lines = 0
695 698 if not is_binary and not over_size_limit:
696 full_content = safe_unicode(_content)
699 full_content = safe_str(_content)
697 700 all_lines, empty_lines = file_node.count_lines(full_content)
698 701
699 702 file_data.update({
700 703 "content": full_content,
701 704 "lines": all_lines
702 705 })
703 706
704 707 except RepositoryError:
705 708 log.exception("Exception in get_node")
706 709 raise
707 710
708 711 return file_data
709 712
710 713 def get_fts_data(self, repo_name, commit_id, root_path='/'):
711 714 """
712 715 Fetch node tree for usage in full text search
713 716 """
714 717
715 718 tree_info = list()
716 719
717 720 try:
718 721 _repo = self._get_repo(repo_name)
719 722 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
720 723 root_path = root_path.lstrip('/')
721 for __, dirs, files in commit.walk(root_path):
724 top_node = commit.get_node(root_path)
725 top_node.default_pre_load = []
726
727 for __, dirs, files in commit.walk(top_node):
722 728
723 729 for f in files:
724 730 is_binary, md5, size, _content = f.metadata_uncached()
725 731 _data = {
726 "name": f.unicode_path,
732 "name": f.str_path,
727 733 "md5": md5,
728 734 "extension": f.extension,
729 735 "binary": is_binary,
730 736 "size": size
731 737 }
732 738
733 739 tree_info.append(_data)
734 740
735 741 except RepositoryError:
736 742 log.exception("Exception in get_nodes")
737 743 raise
738 744
739 745 return tree_info
740 746
741 747 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
742 748 author=None, trigger_push_hook=True):
743 749 """
744 750 Commits given multiple nodes into repo
745 751
746 752 :param user: RhodeCode User object or user_id, the commiter
747 753 :param repo: RhodeCode Repository object
748 754 :param message: commit message
749 755 :param nodes: mapping {filename:{'content':content},...}
750 756 :param parent_commit: parent commit, can be empty than it's
751 757 initial commit
752 758 :param author: author of commit, cna be different that commiter
753 759 only for git
754 760 :param trigger_push_hook: trigger push hooks
755 761
756 762 :returns: new committed commit
757 763 """
758 764
759 765 user = self._get_user(user)
760 766 scm_instance = repo.scm_instance(cache=False)
761 767
762 processed_nodes = []
763 for f_path in nodes:
764 f_path = self._sanitize_path(f_path)
765 content = nodes[f_path]['content']
766 f_path = safe_str(f_path)
767 # decoding here will force that we have proper encoded values
768 # in any other case this will throw exceptions and deny commit
769 if isinstance(content, (str,)):
770 content = safe_str(content)
771 elif isinstance(content, (file, cStringIO.OutputType,)):
772 content = content.read()
773 else:
774 raise Exception('Content is of unrecognized type %s' % (
775 type(content)
776 ))
777 processed_nodes.append((f_path, content))
778
779 message = safe_unicode(message)
768 message = safe_str(message)
780 769 commiter = user.full_contact
781 author = safe_unicode(author) if author else commiter
770 author = safe_str(author) if author else commiter
782 771
783 772 imc = scm_instance.in_memory_commit
784 773
785 774 if not parent_commit:
786 775 parent_commit = EmptyCommit(alias=scm_instance.alias)
787 776
788 777 if isinstance(parent_commit, EmptyCommit):
789 # EmptyCommit means we we're editing empty repository
778 # EmptyCommit means we're editing empty repository
790 779 parents = None
791 780 else:
792 781 parents = [parent_commit]
782
783 upload_file_types = (io.BytesIO, io.BufferedRandom)
784 processed_nodes = []
785 for filename, content_dict in nodes.items():
786 if not isinstance(filename, bytes):
787 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
788 content = content_dict['content']
789 if not isinstance(content, upload_file_types + (bytes,)):
790 raise ValueError('content key value in nodes needs to be bytes')
791
792 for f_path in nodes:
793 f_path = self._sanitize_path(f_path)
794 content = nodes[f_path]['content']
795
796 # decoding here will force that we have proper encoded values
797 # in any other case this will throw exceptions and deny commit
798
799 if isinstance(content, bytes):
800 pass
801 elif isinstance(content, upload_file_types):
802 content = content.read()
803 else:
804 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
805 processed_nodes.append((f_path, content))
806
793 807 # add multiple nodes
794 808 for path, content in processed_nodes:
795 809 imc.add(FileNode(path, content=content))
810
796 811 # TODO: handle pre push scenario
797 812 tip = imc.commit(message=message,
798 813 author=author,
799 814 parents=parents,
800 815 branch=parent_commit.branch)
801 816
802 817 self.mark_for_invalidation(repo.repo_name)
803 818 if trigger_push_hook:
804 819 hooks_utils.trigger_post_push_hook(
805 820 username=user.username, action='push_local',
806 821 repo_name=repo.repo_name, repo_type=scm_instance.alias,
807 822 hook_type='post_push',
808 823 commit_ids=[tip.raw_id])
809 824 return tip
810 825
811 826 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
812 827 author=None, trigger_push_hook=True):
813 828 user = self._get_user(user)
814 829 scm_instance = repo.scm_instance(cache=False)
815 830
816 message = safe_unicode(message)
831 message = safe_str(message)
817 832 commiter = user.full_contact
818 author = safe_unicode(author) if author else commiter
833 author = safe_str(author) if author else commiter
819 834
820 835 imc = scm_instance.in_memory_commit
821 836
822 837 if not parent_commit:
823 838 parent_commit = EmptyCommit(alias=scm_instance.alias)
824 839
825 840 if isinstance(parent_commit, EmptyCommit):
826 841 # EmptyCommit means we we're editing empty repository
827 842 parents = None
828 843 else:
829 844 parents = [parent_commit]
830 845
831 846 # add multiple nodes
832 847 for _filename, data in nodes.items():
833 848 # new filename, can be renamed from the old one, also sanitaze
834 849 # the path for any hack around relative paths like ../../ etc.
835 850 filename = self._sanitize_path(data['filename'])
836 851 old_filename = self._sanitize_path(_filename)
837 852 content = data['content']
838 853 file_mode = data.get('mode')
839 854 filenode = FileNode(old_filename, content=content, mode=file_mode)
840 855 op = data['op']
841 856 if op == 'add':
842 857 imc.add(filenode)
843 858 elif op == 'del':
844 859 imc.remove(filenode)
845 860 elif op == 'mod':
846 861 if filename != old_filename:
847 862 # TODO: handle renames more efficient, needs vcs lib changes
848 863 imc.remove(filenode)
849 864 imc.add(FileNode(filename, content=content, mode=file_mode))
850 865 else:
851 866 imc.change(filenode)
852 867
853 868 try:
854 869 # TODO: handle pre push scenario commit changes
855 870 tip = imc.commit(message=message,
856 871 author=author,
857 872 parents=parents,
858 873 branch=parent_commit.branch)
859 874 except NodeNotChangedError:
860 875 raise
861 876 except Exception as e:
862 877 log.exception("Unexpected exception during call to imc.commit")
863 878 raise IMCCommitError(str(e))
864 879 finally:
865 880 # always clear caches, if commit fails we want fresh object also
866 881 self.mark_for_invalidation(repo.repo_name)
867 882
868 883 if trigger_push_hook:
869 884 hooks_utils.trigger_post_push_hook(
870 885 username=user.username, action='push_local', hook_type='post_push',
871 886 repo_name=repo.repo_name, repo_type=scm_instance.alias,
872 887 commit_ids=[tip.raw_id])
873 888
874 889 return tip
875 890
876 891 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
877 892 author=None, trigger_push_hook=True):
878 893 """
879 894 Deletes given multiple nodes into `repo`
880 895
881 896 :param user: RhodeCode User object or user_id, the committer
882 897 :param repo: RhodeCode Repository object
883 898 :param message: commit message
884 899 :param nodes: mapping {filename:{'content':content},...}
885 900 :param parent_commit: parent commit, can be empty than it's initial
886 901 commit
887 902 :param author: author of commit, cna be different that commiter only
888 903 for git
889 904 :param trigger_push_hook: trigger push hooks
890 905
891 906 :returns: new commit after deletion
892 907 """
893 908
894 909 user = self._get_user(user)
895 910 scm_instance = repo.scm_instance(cache=False)
896 911
897 912 processed_nodes = []
898 913 for f_path in nodes:
899 914 f_path = self._sanitize_path(f_path)
900 # content can be empty but for compatabilty it allows same dicts
915 # content can be empty but for compatibility it allows same dicts
901 916 # structure as add_nodes
902 917 content = nodes[f_path].get('content')
903 processed_nodes.append((f_path, content))
918 processed_nodes.append((safe_bytes(f_path), content))
904 919
905 message = safe_unicode(message)
920 message = safe_str(message)
906 921 commiter = user.full_contact
907 author = safe_unicode(author) if author else commiter
922 author = safe_str(author) if author else commiter
908 923
909 924 imc = scm_instance.in_memory_commit
910 925
911 926 if not parent_commit:
912 927 parent_commit = EmptyCommit(alias=scm_instance.alias)
913 928
914 929 if isinstance(parent_commit, EmptyCommit):
915 930 # EmptyCommit means we we're editing empty repository
916 931 parents = None
917 932 else:
918 933 parents = [parent_commit]
919 934 # add multiple nodes
920 935 for path, content in processed_nodes:
921 936 imc.remove(FileNode(path, content=content))
922 937
923 938 # TODO: handle pre push scenario
924 939 tip = imc.commit(message=message,
925 940 author=author,
926 941 parents=parents,
927 942 branch=parent_commit.branch)
928 943
929 944 self.mark_for_invalidation(repo.repo_name)
930 945 if trigger_push_hook:
931 946 hooks_utils.trigger_post_push_hook(
932 947 username=user.username, action='push_local', hook_type='post_push',
933 948 repo_name=repo.repo_name, repo_type=scm_instance.alias,
934 949 commit_ids=[tip.raw_id])
935 950 return tip
936 951
937 952 def strip(self, repo, commit_id, branch):
938 953 scm_instance = repo.scm_instance(cache=False)
939 954 scm_instance.config.clear_section('hooks')
940 955 scm_instance.strip(commit_id, branch)
941 956 self.mark_for_invalidation(repo.repo_name)
942 957
943 958 def get_unread_journal(self):
944 959 return self.sa.query(UserLog).count()
945 960
946 961 @classmethod
947 962 def backend_landing_ref(cls, repo_type):
948 963 """
949 964 Return a default landing ref based on a repository type.
950 965 """
951 966
952 967 landing_ref = {
953 968 'hg': ('branch:default', 'default'),
954 969 'git': ('branch:master', 'master'),
955 970 'svn': ('rev:tip', 'latest tip'),
956 971 'default': ('rev:tip', 'latest tip'),
957 972 }
958 973
959 974 return landing_ref.get(repo_type) or landing_ref['default']
960 975
961 976 def get_repo_landing_revs(self, translator, repo=None):
962 977 """
963 978 Generates select option with tags branches and bookmarks (for hg only)
964 979 grouped by type
965 980
966 981 :param repo:
967 982 """
968 983 from rhodecode.lib.vcs.backends.git import GitRepository
969 984
970 985 _ = translator
971 986 repo = self._get_repo(repo)
972 987
973 988 if repo:
974 989 repo_type = repo.repo_type
975 990 else:
976 991 repo_type = 'default'
977 992
978 993 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
979 994
980 995 default_ref_options = [
981 996 [default_landing_ref, landing_ref_lbl]
982 997 ]
983 998 default_choices = [
984 999 default_landing_ref
985 1000 ]
986 1001
987 1002 if not repo:
988 1003 # presented at NEW repo creation
989 1004 return default_choices, default_ref_options
990 1005
991 1006 repo = repo.scm_instance()
992 1007
993 1008 ref_options = [(default_landing_ref, landing_ref_lbl)]
994 1009 choices = [default_landing_ref]
995 1010
996 1011 # branches
997 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
1012 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
998 1013 if not branch_group:
999 1014 # new repo, or without maybe a branch?
1000 1015 branch_group = default_ref_options
1001 1016
1002 1017 branches_group = (branch_group, _("Branches"))
1003 1018 ref_options.append(branches_group)
1004 1019 choices.extend([x[0] for x in branches_group[0]])
1005 1020
1006 1021 # bookmarks for HG
1007 1022 if repo.alias == 'hg':
1008 1023 bookmarks_group = (
1009 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1024 [(f'book:{safe_str(b)}', safe_str(b))
1010 1025 for b in repo.bookmarks],
1011 1026 _("Bookmarks"))
1012 1027 ref_options.append(bookmarks_group)
1013 1028 choices.extend([x[0] for x in bookmarks_group[0]])
1014 1029
1015 1030 # tags
1016 1031 tags_group = (
1017 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1032 [(f'tag:{safe_str(t)}', safe_str(t))
1018 1033 for t in repo.tags],
1019 1034 _("Tags"))
1020 1035 ref_options.append(tags_group)
1021 1036 choices.extend([x[0] for x in tags_group[0]])
1022 1037
1023 1038 return choices, ref_options
1024 1039
1025 1040 def get_server_info(self, environ=None):
1026 1041 server_info = get_system_info(environ)
1027 1042 return server_info
@@ -1,918 +1,924 b''
1 1
2 2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 import os
21 21 import re
22 import hashlib
23 22 import logging
24 23 import time
25 24 import functools
26 25 import bleach
27 26 from collections import namedtuple
28 27
29 from pyramid.threadlocal import get_current_request, get_current_registry
28 from pyramid.threadlocal import get_current_request
30 29
31 30 from rhodecode.lib import rc_cache
31 from rhodecode.lib.hash_utils import sha1_safe
32 32 from rhodecode.lib.utils2 import (
33 33 Optional, AttributeDict, safe_str, remove_prefix, str2bool)
34 34 from rhodecode.lib.vcs.backends import base
35 35 from rhodecode.lib.statsd_client import StatsdClient
36 36 from rhodecode.model import BaseModel
37 37 from rhodecode.model.db import (
38 38 RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi, RhodeCodeSetting)
39 39 from rhodecode.model.meta import Session
40 40
41 41
42 42 log = logging.getLogger(__name__)
43 43
44 44
45 45 UiSetting = namedtuple(
46 46 'UiSetting', ['section', 'key', 'value', 'active'])
47 47
48 48 SOCIAL_PLUGINS_LIST = ['github', 'bitbucket', 'twitter', 'google']
49 49
50 50
51 51 class SettingNotFound(Exception):
52 52 def __init__(self, setting_id):
53 53 msg = 'Setting `{}` is not found'.format(setting_id)
54 54 super(SettingNotFound, self).__init__(msg)
55 55
56 56
57 57 class SettingsModel(BaseModel):
58 58 BUILTIN_HOOKS = (
59 59 RhodeCodeUi.HOOK_REPO_SIZE, RhodeCodeUi.HOOK_PUSH,
60 60 RhodeCodeUi.HOOK_PRE_PUSH, RhodeCodeUi.HOOK_PRETX_PUSH,
61 61 RhodeCodeUi.HOOK_PULL, RhodeCodeUi.HOOK_PRE_PULL,
62 62 RhodeCodeUi.HOOK_PUSH_KEY,)
63 63 HOOKS_SECTION = 'hooks'
64 64
65 65 def __init__(self, sa=None, repo=None):
66 66 self.repo = repo
67 67 self.UiDbModel = RepoRhodeCodeUi if repo else RhodeCodeUi
68 68 self.SettingsDbModel = (
69 69 RepoRhodeCodeSetting if repo else RhodeCodeSetting)
70 70 super(SettingsModel, self).__init__(sa)
71 71
72 72 def get_ui_by_key(self, key):
73 73 q = self.UiDbModel.query()
74 74 q = q.filter(self.UiDbModel.ui_key == key)
75 75 q = self._filter_by_repo(RepoRhodeCodeUi, q)
76 76 return q.scalar()
77 77
78 78 def get_ui_by_section(self, section):
79 79 q = self.UiDbModel.query()
80 80 q = q.filter(self.UiDbModel.ui_section == section)
81 81 q = self._filter_by_repo(RepoRhodeCodeUi, q)
82 82 return q.all()
83 83
84 84 def get_ui_by_section_and_key(self, section, key):
85 85 q = self.UiDbModel.query()
86 86 q = q.filter(self.UiDbModel.ui_section == section)
87 87 q = q.filter(self.UiDbModel.ui_key == key)
88 88 q = self._filter_by_repo(RepoRhodeCodeUi, q)
89 89 return q.scalar()
90 90
91 91 def get_ui(self, section=None, key=None):
92 92 q = self.UiDbModel.query()
93 93 q = self._filter_by_repo(RepoRhodeCodeUi, q)
94 94
95 95 if section:
96 96 q = q.filter(self.UiDbModel.ui_section == section)
97 97 if key:
98 98 q = q.filter(self.UiDbModel.ui_key == key)
99 99
100 100 # TODO: mikhail: add caching
101 101 result = [
102 102 UiSetting(
103 103 section=safe_str(r.ui_section), key=safe_str(r.ui_key),
104 104 value=safe_str(r.ui_value), active=r.ui_active
105 105 )
106 106 for r in q.all()
107 107 ]
108 108 return result
109 109
110 110 def get_builtin_hooks(self):
111 111 q = self.UiDbModel.query()
112 112 q = q.filter(self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
113 113 return self._get_hooks(q)
114 114
115 115 def get_custom_hooks(self):
116 116 q = self.UiDbModel.query()
117 117 q = q.filter(~self.UiDbModel.ui_key.in_(self.BUILTIN_HOOKS))
118 118 return self._get_hooks(q)
119 119
120 120 def create_ui_section_value(self, section, val, key=None, active=True):
121 121 new_ui = self.UiDbModel()
122 122 new_ui.ui_section = section
123 123 new_ui.ui_value = val
124 124 new_ui.ui_active = active
125 125
126 126 repository_id = ''
127 127 if self.repo:
128 128 repo = self._get_repo(self.repo)
129 129 repository_id = repo.repo_id
130 130 new_ui.repository_id = repository_id
131 131
132 132 if not key:
133 133 # keys are unique so they need appended info
134 134 if self.repo:
135 key = hashlib.sha1(
136 '{}{}{}'.format(section, val, repository_id)).hexdigest()
135 key = sha1_safe(f'{section}{val}{repository_id}')
137 136 else:
138 key = hashlib.sha1('{}{}'.format(section, val)).hexdigest()
137 key = sha1_safe(f'{section}{val}')
139 138
140 139 new_ui.ui_key = key
141 140
142 141 Session().add(new_ui)
143 142 return new_ui
144 143
145 144 def create_or_update_hook(self, key, value):
146 145 ui = (
147 146 self.get_ui_by_section_and_key(self.HOOKS_SECTION, key) or
148 147 self.UiDbModel())
149 148 ui.ui_section = self.HOOKS_SECTION
150 149 ui.ui_active = True
151 150 ui.ui_key = key
152 151 ui.ui_value = value
153 152
154 153 if self.repo:
155 154 repo = self._get_repo(self.repo)
156 155 repository_id = repo.repo_id
157 156 ui.repository_id = repository_id
158 157
159 158 Session().add(ui)
160 159 return ui
161 160
162 161 def delete_ui(self, id_):
163 162 ui = self.UiDbModel.get(id_)
164 163 if not ui:
165 164 raise SettingNotFound(id_)
166 165 Session().delete(ui)
167 166
168 167 def get_setting_by_name(self, name):
169 168 q = self._get_settings_query()
170 169 q = q.filter(self.SettingsDbModel.app_settings_name == name)
171 170 return q.scalar()
172 171
173 172 def create_or_update_setting(
174 173 self, name, val=Optional(''), type_=Optional('unicode')):
175 174 """
176 175 Creates or updates RhodeCode setting. If updates is triggered it will
177 176 only update parameters that are explicitly set Optional instance will
178 177 be skipped
179 178
180 179 :param name:
181 180 :param val:
182 181 :param type_:
183 182 :return:
184 183 """
185 184
186 185 res = self.get_setting_by_name(name)
187 186 repo = self._get_repo(self.repo) if self.repo else None
188 187
189 188 if not res:
190 189 val = Optional.extract(val)
191 190 type_ = Optional.extract(type_)
192 191
193 192 args = (
194 193 (repo.repo_id, name, val, type_)
195 194 if repo else (name, val, type_))
196 195 res = self.SettingsDbModel(*args)
197 196
198 197 else:
199 198 if self.repo:
200 199 res.repository_id = repo.repo_id
201 200
202 201 res.app_settings_name = name
203 202 if not isinstance(type_, Optional):
204 203 # update if set
205 204 res.app_settings_type = type_
206 205 if not isinstance(val, Optional):
207 206 # update if set
208 207 res.app_settings_value = val
209 208
210 209 Session().add(res)
211 210 return res
212 211
213 212 def get_cache_region(self):
214 213 repo = self._get_repo(self.repo) if self.repo else None
215 cache_key = "repo.{}".format(repo.repo_id) if repo else "general_settings"
216 cache_namespace_uid = 'cache_settings.{}'.format(cache_key)
214 cache_key = f"repo.{repo.repo_id}" if repo else "repo.ALL"
215 cache_namespace_uid = f'cache_settings.{cache_key}'
217 216 region = rc_cache.get_or_create_region('cache_general', cache_namespace_uid)
218 return region, cache_key
219
220 def invalidate_settings_cache(self):
221 region, cache_key = self.get_cache_region()
222 log.debug('Invalidation cache region %s for cache_key: %s', region, cache_key)
223 region.invalidate()
217 return region, cache_namespace_uid
224 218
225 def get_all_settings(self, cache=False, from_request=True):
226 # defines if we use GLOBAL, or PER_REPO
227 repo = self._get_repo(self.repo) if self.repo else None
219 def invalidate_settings_cache(self, hard=False):
220 region, namespace_key = self.get_cache_region()
221 log.debug('Invalidation cache [%s] region %s for cache_key: %s',
222 'invalidate_settings_cache', region, namespace_key)
228 223
229 # initially try the requests context, this is the fastest
230 # we only fetch global config
231 if from_request:
232 request = get_current_request()
224 # we use hard cleanup if invalidation is sent
225 rc_cache.clear_cache_namespace(region, namespace_key, method=rc_cache.CLEAR_DELETE)
233 226
234 if request and not repo and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
235 rc_config = request.call_context.rc_config
236 if rc_config:
237 return rc_config
238
227 def get_cache_call_method(self, cache=True):
239 228 region, cache_key = self.get_cache_region()
240 229
241 230 @region.conditional_cache_on_arguments(condition=cache)
242 231 def _get_all_settings(name, key):
243 232 q = self._get_settings_query()
244 233 if not q:
245 234 raise Exception('Could not get application settings !')
246 235
247 236 settings = {
248 'rhodecode_' + res.app_settings_name: res.app_settings_value
237 f'rhodecode_{res.app_settings_name}': res.app_settings_value
249 238 for res in q
250 239 }
251 240 return settings
241 return _get_all_settings
242
243 def get_all_settings(self, cache=False, from_request=True):
244 # defines if we use GLOBAL, or PER_REPO
245 repo = self._get_repo(self.repo) if self.repo else None
246
247 # initially try the requests context, this is the fastest
248 # we only fetch global config, NOT for repo-specific
249 if from_request and not repo:
250 request = get_current_request()
251
252 if request and hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'):
253 rc_config = request.call_context.rc_config
254 if rc_config:
255 return rc_config
256
257 _region, cache_key = self.get_cache_region()
258 _get_all_settings = self.get_cache_call_method(cache=cache)
252 259
253 260 start = time.time()
254 261 result = _get_all_settings('rhodecode_settings', cache_key)
255 262 compute_time = time.time() - start
256 263 log.debug('cached method:%s took %.4fs', _get_all_settings.__name__, compute_time)
257 264
258 265 statsd = StatsdClient.statsd
259 266 if statsd:
260 267 elapsed_time_ms = round(1000.0 * compute_time) # use ms only
261 268 statsd.timing("rhodecode_settings_timing.histogram", elapsed_time_ms,
262 269 use_decimals=False)
263 270
264 271 log.debug('Fetching app settings for key: %s took: %.4fs: cache: %s', cache_key, compute_time, cache)
265 272
266 273 return result
267 274
268 275 def get_auth_settings(self):
269 276 q = self._get_settings_query()
270 277 q = q.filter(
271 278 self.SettingsDbModel.app_settings_name.startswith('auth_'))
272 279 rows = q.all()
273 280 auth_settings = {
274 281 row.app_settings_name: row.app_settings_value for row in rows}
275 282 return auth_settings
276 283
277 284 def get_auth_plugins(self):
278 285 auth_plugins = self.get_setting_by_name("auth_plugins")
279 286 return auth_plugins.app_settings_value
280 287
281 288 def get_default_repo_settings(self, strip_prefix=False):
282 289 q = self._get_settings_query()
283 290 q = q.filter(
284 291 self.SettingsDbModel.app_settings_name.startswith('default_'))
285 292 rows = q.all()
286 293
287 294 result = {}
288 295 for row in rows:
289 296 key = row.app_settings_name
290 297 if strip_prefix:
291 298 key = remove_prefix(key, prefix='default_')
292 299 result.update({key: row.app_settings_value})
293 300 return result
294 301
295 302 def get_repo(self):
296 303 repo = self._get_repo(self.repo)
297 304 if not repo:
298 305 raise Exception(
299 306 'Repository `{}` cannot be found inside the database'.format(
300 307 self.repo))
301 308 return repo
302 309
303 310 def _filter_by_repo(self, model, query):
304 311 if self.repo:
305 312 repo = self.get_repo()
306 313 query = query.filter(model.repository_id == repo.repo_id)
307 314 return query
308 315
309 316 def _get_hooks(self, query):
310 317 query = query.filter(self.UiDbModel.ui_section == self.HOOKS_SECTION)
311 318 query = self._filter_by_repo(RepoRhodeCodeUi, query)
312 319 return query.all()
313 320
314 321 def _get_settings_query(self):
315 322 q = self.SettingsDbModel.query()
316 323 return self._filter_by_repo(RepoRhodeCodeSetting, q)
317 324
318 325 def list_enabled_social_plugins(self, settings):
319 326 enabled = []
320 327 for plug in SOCIAL_PLUGINS_LIST:
321 if str2bool(settings.get('rhodecode_auth_{}_enabled'.format(plug)
322 )):
328 if str2bool(settings.get(f'rhodecode_auth_{plug}_enabled')):
323 329 enabled.append(plug)
324 330 return enabled
325 331
326 332
327 333 def assert_repo_settings(func):
328 334 @functools.wraps(func)
329 335 def _wrapper(self, *args, **kwargs):
330 336 if not self.repo_settings:
331 337 raise Exception('Repository is not specified')
332 338 return func(self, *args, **kwargs)
333 339 return _wrapper
334 340
335 341
336 342 class IssueTrackerSettingsModel(object):
337 343 INHERIT_SETTINGS = 'inherit_issue_tracker_settings'
338 344 SETTINGS_PREFIX = 'issuetracker_'
339 345
340 346 def __init__(self, sa=None, repo=None):
341 347 self.global_settings = SettingsModel(sa=sa)
342 348 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
343 349
344 350 @property
345 351 def inherit_global_settings(self):
346 352 if not self.repo_settings:
347 353 return True
348 354 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
349 355 return setting.app_settings_value if setting else True
350 356
351 357 @inherit_global_settings.setter
352 358 def inherit_global_settings(self, value):
353 359 if self.repo_settings:
354 360 settings = self.repo_settings.create_or_update_setting(
355 361 self.INHERIT_SETTINGS, value, type_='bool')
356 362 Session().add(settings)
357 363
358 364 def _get_keyname(self, key, uid, prefix=''):
359 365 return '{0}{1}{2}_{3}'.format(
360 366 prefix, self.SETTINGS_PREFIX, key, uid)
361 367
362 368 def _make_dict_for_settings(self, qs):
363 369 prefix_match = self._get_keyname('pat', '', 'rhodecode_')
364 370
365 371 issuetracker_entries = {}
366 372 # create keys
367 373 for k, v in qs.items():
368 374 if k.startswith(prefix_match):
369 375 uid = k[len(prefix_match):]
370 376 issuetracker_entries[uid] = None
371 377
372 378 def url_cleaner(input_str):
373 379 input_str = input_str.replace('"', '').replace("'", '')
374 380 input_str = bleach.clean(input_str, strip=True)
375 381 return input_str
376 382
377 383 # populate
378 384 for uid in issuetracker_entries:
379 385 url_data = qs.get(self._get_keyname('url', uid, 'rhodecode_'))
380 386
381 387 pat = qs.get(self._get_keyname('pat', uid, 'rhodecode_'))
382 388 try:
383 389 pat_compiled = re.compile(r'%s' % pat)
384 390 except re.error:
385 391 pat_compiled = None
386 392
387 393 issuetracker_entries[uid] = AttributeDict({
388 394 'pat': pat,
389 395 'pat_compiled': pat_compiled,
390 396 'url': url_cleaner(
391 397 qs.get(self._get_keyname('url', uid, 'rhodecode_')) or ''),
392 398 'pref': bleach.clean(
393 399 qs.get(self._get_keyname('pref', uid, 'rhodecode_')) or ''),
394 400 'desc': qs.get(
395 401 self._get_keyname('desc', uid, 'rhodecode_')),
396 402 })
397 403
398 404 return issuetracker_entries
399 405
400 406 def get_global_settings(self, cache=False):
401 407 """
402 408 Returns list of global issue tracker settings
403 409 """
404 410 defaults = self.global_settings.get_all_settings(cache=cache)
405 411 settings = self._make_dict_for_settings(defaults)
406 412 return settings
407 413
408 414 def get_repo_settings(self, cache=False):
409 415 """
410 416 Returns list of issue tracker settings per repository
411 417 """
412 418 if not self.repo_settings:
413 419 raise Exception('Repository is not specified')
414 420 all_settings = self.repo_settings.get_all_settings(cache=cache)
415 421 settings = self._make_dict_for_settings(all_settings)
416 422 return settings
417 423
418 424 def get_settings(self, cache=False):
419 425 if self.inherit_global_settings:
420 426 return self.get_global_settings(cache=cache)
421 427 else:
422 428 return self.get_repo_settings(cache=cache)
423 429
424 430 def delete_entries(self, uid):
425 431 if self.repo_settings:
426 432 all_patterns = self.get_repo_settings()
427 433 settings_model = self.repo_settings
428 434 else:
429 435 all_patterns = self.get_global_settings()
430 436 settings_model = self.global_settings
431 437 entries = all_patterns.get(uid, [])
432 438
433 439 for del_key in entries:
434 440 setting_name = self._get_keyname(del_key, uid)
435 441 entry = settings_model.get_setting_by_name(setting_name)
436 442 if entry:
437 443 Session().delete(entry)
438 444
439 445 Session().commit()
440 446
441 447 def create_or_update_setting(
442 448 self, name, val=Optional(''), type_=Optional('unicode')):
443 449 if self.repo_settings:
444 450 setting = self.repo_settings.create_or_update_setting(
445 451 name, val, type_)
446 452 else:
447 453 setting = self.global_settings.create_or_update_setting(
448 454 name, val, type_)
449 455 return setting
450 456
451 457
452 458 class VcsSettingsModel(object):
453 459
454 460 INHERIT_SETTINGS = 'inherit_vcs_settings'
455 461 GENERAL_SETTINGS = (
456 462 'use_outdated_comments',
457 463 'pr_merge_enabled',
458 464 'hg_use_rebase_for_merging',
459 465 'hg_close_branch_before_merging',
460 466 'git_use_rebase_for_merging',
461 467 'git_close_branch_before_merging',
462 468 'diff_cache',
463 469 )
464 470
465 471 HOOKS_SETTINGS = (
466 472 ('hooks', 'changegroup.repo_size'),
467 473 ('hooks', 'changegroup.push_logger'),
468 474 ('hooks', 'outgoing.pull_logger'),
469 475 )
470 476 HG_SETTINGS = (
471 477 ('extensions', 'largefiles'),
472 478 ('phases', 'publish'),
473 479 ('extensions', 'evolve'),
474 480 ('extensions', 'topic'),
475 481 ('experimental', 'evolution'),
476 482 ('experimental', 'evolution.exchange'),
477 483 )
478 484 GIT_SETTINGS = (
479 485 ('vcs_git_lfs', 'enabled'),
480 486 )
481 487 GLOBAL_HG_SETTINGS = (
482 488 ('extensions', 'largefiles'),
483 489 ('largefiles', 'usercache'),
484 490 ('phases', 'publish'),
485 491 ('extensions', 'hgsubversion'),
486 492 ('extensions', 'evolve'),
487 493 ('extensions', 'topic'),
488 494 ('experimental', 'evolution'),
489 495 ('experimental', 'evolution.exchange'),
490 496 )
491 497
492 498 GLOBAL_GIT_SETTINGS = (
493 499 ('vcs_git_lfs', 'enabled'),
494 500 ('vcs_git_lfs', 'store_location')
495 501 )
496 502
497 503 GLOBAL_SVN_SETTINGS = (
498 504 ('vcs_svn_proxy', 'http_requests_enabled'),
499 505 ('vcs_svn_proxy', 'http_server_url')
500 506 )
501 507
502 508 SVN_BRANCH_SECTION = 'vcs_svn_branch'
503 509 SVN_TAG_SECTION = 'vcs_svn_tag'
504 510 SSL_SETTING = ('web', 'push_ssl')
505 511 PATH_SETTING = ('paths', '/')
506 512
507 513 def __init__(self, sa=None, repo=None):
508 514 self.global_settings = SettingsModel(sa=sa)
509 515 self.repo_settings = SettingsModel(sa=sa, repo=repo) if repo else None
510 516 self._ui_settings = (
511 517 self.HG_SETTINGS + self.GIT_SETTINGS + self.HOOKS_SETTINGS)
512 518 self._svn_sections = (self.SVN_BRANCH_SECTION, self.SVN_TAG_SECTION)
513 519
514 520 @property
515 521 @assert_repo_settings
516 522 def inherit_global_settings(self):
517 523 setting = self.repo_settings.get_setting_by_name(self.INHERIT_SETTINGS)
518 524 return setting.app_settings_value if setting else True
519 525
520 526 @inherit_global_settings.setter
521 527 @assert_repo_settings
522 528 def inherit_global_settings(self, value):
523 529 self.repo_settings.create_or_update_setting(
524 530 self.INHERIT_SETTINGS, value, type_='bool')
525 531
526 532 def get_global_svn_branch_patterns(self):
527 533 return self.global_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
528 534
529 535 @assert_repo_settings
530 536 def get_repo_svn_branch_patterns(self):
531 537 return self.repo_settings.get_ui_by_section(self.SVN_BRANCH_SECTION)
532 538
533 539 def get_global_svn_tag_patterns(self):
534 540 return self.global_settings.get_ui_by_section(self.SVN_TAG_SECTION)
535 541
536 542 @assert_repo_settings
537 543 def get_repo_svn_tag_patterns(self):
538 544 return self.repo_settings.get_ui_by_section(self.SVN_TAG_SECTION)
539 545
540 546 def get_global_settings(self):
541 547 return self._collect_all_settings(global_=True)
542 548
543 549 @assert_repo_settings
544 550 def get_repo_settings(self):
545 551 return self._collect_all_settings(global_=False)
546 552
547 553 @assert_repo_settings
548 554 def get_repo_settings_inherited(self):
549 555 global_settings = self.get_global_settings()
550 556 global_settings.update(self.get_repo_settings())
551 557 return global_settings
552 558
553 559 @assert_repo_settings
554 560 def create_or_update_repo_settings(
555 561 self, data, inherit_global_settings=False):
556 562 from rhodecode.model.scm import ScmModel
557 563
558 564 self.inherit_global_settings = inherit_global_settings
559 565
560 566 repo = self.repo_settings.get_repo()
561 567 if not inherit_global_settings:
562 568 if repo.repo_type == 'svn':
563 569 self.create_repo_svn_settings(data)
564 570 else:
565 571 self.create_or_update_repo_hook_settings(data)
566 572 self.create_or_update_repo_pr_settings(data)
567 573
568 574 if repo.repo_type == 'hg':
569 575 self.create_or_update_repo_hg_settings(data)
570 576
571 577 if repo.repo_type == 'git':
572 578 self.create_or_update_repo_git_settings(data)
573 579
574 580 ScmModel().mark_for_invalidation(repo.repo_name, delete=True)
575 581
576 582 @assert_repo_settings
577 583 def create_or_update_repo_hook_settings(self, data):
578 584 for section, key in self.HOOKS_SETTINGS:
579 585 data_key = self._get_form_ui_key(section, key)
580 586 if data_key not in data:
581 587 raise ValueError(
582 588 'The given data does not contain {} key'.format(data_key))
583 589
584 590 active = data.get(data_key)
585 591 repo_setting = self.repo_settings.get_ui_by_section_and_key(
586 592 section, key)
587 593 if not repo_setting:
588 594 global_setting = self.global_settings.\
589 595 get_ui_by_section_and_key(section, key)
590 596 self.repo_settings.create_ui_section_value(
591 597 section, global_setting.ui_value, key=key, active=active)
592 598 else:
593 599 repo_setting.ui_active = active
594 600 Session().add(repo_setting)
595 601
596 602 def update_global_hook_settings(self, data):
597 603 for section, key in self.HOOKS_SETTINGS:
598 604 data_key = self._get_form_ui_key(section, key)
599 605 if data_key not in data:
600 606 raise ValueError(
601 607 'The given data does not contain {} key'.format(data_key))
602 608 active = data.get(data_key)
603 609 repo_setting = self.global_settings.get_ui_by_section_and_key(
604 610 section, key)
605 611 repo_setting.ui_active = active
606 612 Session().add(repo_setting)
607 613
608 614 @assert_repo_settings
609 615 def create_or_update_repo_pr_settings(self, data):
610 616 return self._create_or_update_general_settings(
611 617 self.repo_settings, data)
612 618
613 619 def create_or_update_global_pr_settings(self, data):
614 620 return self._create_or_update_general_settings(
615 621 self.global_settings, data)
616 622
617 623 @assert_repo_settings
618 624 def create_repo_svn_settings(self, data):
619 625 return self._create_svn_settings(self.repo_settings, data)
620 626
621 627 def _set_evolution(self, settings, is_enabled):
622 628 if is_enabled:
623 629 # if evolve is active set evolution=all
624 630
625 631 self._create_or_update_ui(
626 632 settings, *('experimental', 'evolution'), value='all',
627 633 active=True)
628 634 self._create_or_update_ui(
629 635 settings, *('experimental', 'evolution.exchange'), value='yes',
630 636 active=True)
631 637 # if evolve is active set topics server support
632 638 self._create_or_update_ui(
633 639 settings, *('extensions', 'topic'), value='',
634 640 active=True)
635 641
636 642 else:
637 643 self._create_or_update_ui(
638 644 settings, *('experimental', 'evolution'), value='',
639 645 active=False)
640 646 self._create_or_update_ui(
641 647 settings, *('experimental', 'evolution.exchange'), value='no',
642 648 active=False)
643 649 self._create_or_update_ui(
644 650 settings, *('extensions', 'topic'), value='',
645 651 active=False)
646 652
647 653 @assert_repo_settings
648 654 def create_or_update_repo_hg_settings(self, data):
649 655 largefiles, phases, evolve = \
650 656 self.HG_SETTINGS[:3]
651 657 largefiles_key, phases_key, evolve_key = \
652 658 self._get_settings_keys(self.HG_SETTINGS[:3], data)
653 659
654 660 self._create_or_update_ui(
655 661 self.repo_settings, *largefiles, value='',
656 662 active=data[largefiles_key])
657 663 self._create_or_update_ui(
658 664 self.repo_settings, *evolve, value='',
659 665 active=data[evolve_key])
660 666 self._set_evolution(self.repo_settings, is_enabled=data[evolve_key])
661 667
662 668 self._create_or_update_ui(
663 669 self.repo_settings, *phases, value=safe_str(data[phases_key]))
664 670
665 671 def create_or_update_global_hg_settings(self, data):
666 672 largefiles, largefiles_store, phases, hgsubversion, evolve \
667 673 = self.GLOBAL_HG_SETTINGS[:5]
668 674 largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \
669 675 = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data)
670 676
671 677 self._create_or_update_ui(
672 678 self.global_settings, *largefiles, value='',
673 679 active=data[largefiles_key])
674 680 self._create_or_update_ui(
675 681 self.global_settings, *largefiles_store, value=data[largefiles_store_key])
676 682 self._create_or_update_ui(
677 683 self.global_settings, *phases, value=safe_str(data[phases_key]))
678 684 self._create_or_update_ui(
679 685 self.global_settings, *hgsubversion, active=data[subversion_key])
680 686 self._create_or_update_ui(
681 687 self.global_settings, *evolve, value='',
682 688 active=data[evolve_key])
683 689 self._set_evolution(self.global_settings, is_enabled=data[evolve_key])
684 690
685 691 def create_or_update_repo_git_settings(self, data):
686 692 # NOTE(marcink): # comma makes unpack work properly
687 693 lfs_enabled, \
688 694 = self.GIT_SETTINGS
689 695
690 696 lfs_enabled_key, \
691 697 = self._get_settings_keys(self.GIT_SETTINGS, data)
692 698
693 699 self._create_or_update_ui(
694 700 self.repo_settings, *lfs_enabled, value=data[lfs_enabled_key],
695 701 active=data[lfs_enabled_key])
696 702
697 703 def create_or_update_global_git_settings(self, data):
698 704 lfs_enabled, lfs_store_location \
699 705 = self.GLOBAL_GIT_SETTINGS
700 706 lfs_enabled_key, lfs_store_location_key \
701 707 = self._get_settings_keys(self.GLOBAL_GIT_SETTINGS, data)
702 708
703 709 self._create_or_update_ui(
704 710 self.global_settings, *lfs_enabled, value=data[lfs_enabled_key],
705 711 active=data[lfs_enabled_key])
706 712 self._create_or_update_ui(
707 713 self.global_settings, *lfs_store_location,
708 714 value=data[lfs_store_location_key])
709 715
710 716 def create_or_update_global_svn_settings(self, data):
711 717 # branch/tags patterns
712 718 self._create_svn_settings(self.global_settings, data)
713 719
714 720 http_requests_enabled, http_server_url = self.GLOBAL_SVN_SETTINGS
715 721 http_requests_enabled_key, http_server_url_key = self._get_settings_keys(
716 722 self.GLOBAL_SVN_SETTINGS, data)
717 723
718 724 self._create_or_update_ui(
719 725 self.global_settings, *http_requests_enabled,
720 726 value=safe_str(data[http_requests_enabled_key]))
721 727 self._create_or_update_ui(
722 728 self.global_settings, *http_server_url,
723 729 value=data[http_server_url_key])
724 730
725 731 def update_global_ssl_setting(self, value):
726 732 self._create_or_update_ui(
727 733 self.global_settings, *self.SSL_SETTING, value=value)
728 734
729 735 def update_global_path_setting(self, value):
730 736 self._create_or_update_ui(
731 737 self.global_settings, *self.PATH_SETTING, value=value)
732 738
733 739 @assert_repo_settings
734 740 def delete_repo_svn_pattern(self, id_):
735 741 ui = self.repo_settings.UiDbModel.get(id_)
736 742 if ui and ui.repository.repo_name == self.repo_settings.repo:
737 743 # only delete if it's the same repo as initialized settings
738 744 self.repo_settings.delete_ui(id_)
739 745 else:
740 746 # raise error as if we wouldn't find this option
741 747 self.repo_settings.delete_ui(-1)
742 748
743 749 def delete_global_svn_pattern(self, id_):
744 750 self.global_settings.delete_ui(id_)
745 751
746 752 @assert_repo_settings
747 753 def get_repo_ui_settings(self, section=None, key=None):
748 754 global_uis = self.global_settings.get_ui(section, key)
749 755 repo_uis = self.repo_settings.get_ui(section, key)
750 756
751 757 filtered_repo_uis = self._filter_ui_settings(repo_uis)
752 758 filtered_repo_uis_keys = [
753 759 (s.section, s.key) for s in filtered_repo_uis]
754 760
755 761 def _is_global_ui_filtered(ui):
756 762 return (
757 763 (ui.section, ui.key) in filtered_repo_uis_keys
758 764 or ui.section in self._svn_sections)
759 765
760 766 filtered_global_uis = [
761 767 ui for ui in global_uis if not _is_global_ui_filtered(ui)]
762 768
763 769 return filtered_global_uis + filtered_repo_uis
764 770
765 771 def get_global_ui_settings(self, section=None, key=None):
766 772 return self.global_settings.get_ui(section, key)
767 773
768 774 def get_ui_settings_as_config_obj(self, section=None, key=None):
769 775 config = base.Config()
770 776
771 777 ui_settings = self.get_ui_settings(section=section, key=key)
772 778
773 779 for entry in ui_settings:
774 780 config.set(entry.section, entry.key, entry.value)
775 781
776 782 return config
777 783
778 784 def get_ui_settings(self, section=None, key=None):
779 785 if not self.repo_settings or self.inherit_global_settings:
780 786 return self.get_global_ui_settings(section, key)
781 787 else:
782 788 return self.get_repo_ui_settings(section, key)
783 789
784 790 def get_svn_patterns(self, section=None):
785 791 if not self.repo_settings:
786 792 return self.get_global_ui_settings(section)
787 793 else:
788 794 return self.get_repo_ui_settings(section)
789 795
790 796 @assert_repo_settings
791 797 def get_repo_general_settings(self):
792 798 global_settings = self.global_settings.get_all_settings()
793 799 repo_settings = self.repo_settings.get_all_settings()
794 800 filtered_repo_settings = self._filter_general_settings(repo_settings)
795 801 global_settings.update(filtered_repo_settings)
796 802 return global_settings
797 803
798 804 def get_global_general_settings(self):
799 805 return self.global_settings.get_all_settings()
800 806
801 807 def get_general_settings(self):
802 808 if not self.repo_settings or self.inherit_global_settings:
803 809 return self.get_global_general_settings()
804 810 else:
805 811 return self.get_repo_general_settings()
806 812
807 813 def get_repos_location(self):
808 814 return self.global_settings.get_ui_by_key('/').ui_value
809 815
810 816 def _filter_ui_settings(self, settings):
811 817 filtered_settings = [
812 818 s for s in settings if self._should_keep_setting(s)]
813 819 return filtered_settings
814 820
815 821 def _should_keep_setting(self, setting):
816 822 keep = (
817 823 (setting.section, setting.key) in self._ui_settings or
818 824 setting.section in self._svn_sections)
819 825 return keep
820 826
821 827 def _filter_general_settings(self, settings):
822 828 keys = ['rhodecode_{}'.format(key) for key in self.GENERAL_SETTINGS]
823 829 return {
824 830 k: settings[k]
825 831 for k in settings if k in keys}
826 832
827 833 def _collect_all_settings(self, global_=False):
828 834 settings = self.global_settings if global_ else self.repo_settings
829 835 result = {}
830 836
831 837 for section, key in self._ui_settings:
832 838 ui = settings.get_ui_by_section_and_key(section, key)
833 839 result_key = self._get_form_ui_key(section, key)
834 840
835 841 if ui:
836 842 if section in ('hooks', 'extensions'):
837 843 result[result_key] = ui.ui_active
838 844 elif result_key in ['vcs_git_lfs_enabled']:
839 845 result[result_key] = ui.ui_active
840 846 else:
841 847 result[result_key] = ui.ui_value
842 848
843 849 for name in self.GENERAL_SETTINGS:
844 850 setting = settings.get_setting_by_name(name)
845 851 if setting:
846 852 result_key = 'rhodecode_{}'.format(name)
847 853 result[result_key] = setting.app_settings_value
848 854
849 855 return result
850 856
851 857 def _get_form_ui_key(self, section, key):
852 858 return '{section}_{key}'.format(
853 859 section=section, key=key.replace('.', '_'))
854 860
855 861 def _create_or_update_ui(
856 862 self, settings, section, key, value=None, active=None):
857 863 ui = settings.get_ui_by_section_and_key(section, key)
858 864 if not ui:
859 865 active = True if active is None else active
860 866 settings.create_ui_section_value(
861 867 section, value, key=key, active=active)
862 868 else:
863 869 if active is not None:
864 870 ui.ui_active = active
865 871 if value is not None:
866 872 ui.ui_value = value
867 873 Session().add(ui)
868 874
869 875 def _create_svn_settings(self, settings, data):
870 876 svn_settings = {
871 877 'new_svn_branch': self.SVN_BRANCH_SECTION,
872 878 'new_svn_tag': self.SVN_TAG_SECTION
873 879 }
874 880 for key in svn_settings:
875 881 if data.get(key):
876 882 settings.create_ui_section_value(svn_settings[key], data[key])
877 883
878 884 def _create_or_update_general_settings(self, settings, data):
879 885 for name in self.GENERAL_SETTINGS:
880 886 data_key = 'rhodecode_{}'.format(name)
881 887 if data_key not in data:
882 888 raise ValueError(
883 889 'The given data does not contain {} key'.format(data_key))
884 890 setting = settings.create_or_update_setting(
885 891 name, data[data_key], 'bool')
886 892 Session().add(setting)
887 893
888 894 def _get_settings_keys(self, settings, data):
889 895 data_keys = [self._get_form_ui_key(*s) for s in settings]
890 896 for data_key in data_keys:
891 897 if data_key not in data:
892 898 raise ValueError(
893 899 'The given data does not contain {} key'.format(data_key))
894 900 return data_keys
895 901
896 902 def create_largeobjects_dirs_if_needed(self, repo_store_path):
897 903 """
898 904 This is subscribed to the `pyramid.events.ApplicationCreated` event. It
899 905 does a repository scan if enabled in the settings.
900 906 """
901 907
902 908 from rhodecode.lib.vcs.backends.hg import largefiles_store
903 909 from rhodecode.lib.vcs.backends.git import lfs_store
904 910
905 911 paths = [
906 912 largefiles_store(repo_store_path),
907 913 lfs_store(repo_store_path)]
908 914
909 915 for path in paths:
910 916 if os.path.isdir(path):
911 917 continue
912 918 if os.path.isfile(path):
913 919 continue
914 920 # not a file nor dir, we try to create it
915 921 try:
916 922 os.makedirs(path)
917 923 except Exception:
918 924 log.warning('Failed to create largefiles dir:%s', path)
@@ -1,144 +1,148 b''
1 1
2 2
3 3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import traceback
23 23
24 24 import sshpubkeys
25 25 import sshpubkeys.exceptions
26 26
27 27 from cryptography.hazmat.primitives.asymmetric import rsa
28 28 from cryptography.hazmat.primitives import serialization as crypto_serialization
29 29 from cryptography.hazmat.backends import default_backend as crypto_default_backend
30 30
31 from rhodecode.lib.str_utils import safe_bytes, safe_str
31 32 from rhodecode.model import BaseModel
32 33 from rhodecode.model.db import UserSshKeys
33 34 from rhodecode.model.meta import Session
34 35
35 36
36 37 log = logging.getLogger(__name__)
37 38
38 39
39 40 class SshKeyModel(BaseModel):
40 41 cls = UserSshKeys
41 42 DEFAULT_PRIVATE_KEY_FORMAT = 'pkcs8'
42 43
43 44 def parse_key(self, key_data):
44 45 """
45 46 print(ssh.bits) # 768
46 47 print(ssh.hash_md5()) # 56:84:1e:90:08:3b:60:c7:29:70:5f:5e:25:a6:3b:86
47 48 print(ssh.hash_sha256()) # SHA256:xk3IEJIdIoR9MmSRXTP98rjDdZocmXJje/28ohMQEwM
48 49 print(ssh.hash_sha512()) # SHA512:1C3lNBhjpDVQe39hnyy+xvlZYU3IPwzqK1rVneGavy6O3/ebjEQSFvmeWoyMTplIanmUK1hmr9nA8Skmj516HA
49 50 print(ssh.comment) # ojar@ojar-laptop
50 51 print(ssh.options_raw) # None (string of optional options at the beginning of public key)
51 52 print(ssh.options) # None (options as a dictionary, parsed and validated)
52 53
53 54 :param key_data:
54 55 :return:
55 56 """
56 57 ssh = sshpubkeys.SSHKey(strict_mode=True)
57 58 try:
58 59 ssh.parse(key_data)
59 60 return ssh
60 61 except sshpubkeys.exceptions.InvalidKeyException as err:
61 62 log.error("Invalid key: %s", err)
62 63 raise
63 64 except NotImplementedError as err:
64 65 log.error("Invalid key type: %s", err)
65 66 raise
66 67 except Exception as err:
67 68 log.error("Key Parse error: %s", err)
68 69 raise
69 70
70 71 def generate_keypair(self, comment=None, private_format=DEFAULT_PRIVATE_KEY_FORMAT):
71 72
72 73 key = rsa.generate_private_key(
73 74 backend=crypto_default_backend(),
74 75 public_exponent=65537,
75 76 key_size=2048
76 77 )
77 78 if private_format == self.DEFAULT_PRIVATE_KEY_FORMAT:
78 79 private_format = crypto_serialization.PrivateFormat.PKCS8
79 80 else:
80 81 # legacy format that can be used by older systems, use if pkcs8 have
81 82 # problems
82 83 private_format = crypto_serialization.PrivateFormat.TraditionalOpenSSL
83 84
84 85 private_key = key.private_bytes(
85 86 crypto_serialization.Encoding.PEM,
86 87 private_format,
87 88 crypto_serialization.NoEncryption())
89 private_key = safe_str(private_key)
90
88 91 public_key = key.public_key().public_bytes(
89 92 crypto_serialization.Encoding.OpenSSH,
90 93 crypto_serialization.PublicFormat.OpenSSH
91 94 )
95 public_key = safe_str(public_key)
92 96
93 97 if comment:
94 98 public_key = public_key + " " + comment
95 99 return private_key, public_key
96 100
97 101 def create(self, user, fingerprint, key_data, description):
98 102 """
99 103 """
100 104 user = self._get_user(user)
101 105
102 106 new_ssh_key = UserSshKeys()
103 107 new_ssh_key.ssh_key_fingerprint = fingerprint
104 108 new_ssh_key.ssh_key_data = key_data
105 109 new_ssh_key.user_id = user.user_id
106 110 new_ssh_key.description = description
107 111
108 112 Session().add(new_ssh_key)
109 113
110 114 return new_ssh_key
111 115
112 116 def delete(self, ssh_key_id, user=None):
113 117 """
114 118 Deletes given api_key, if user is set it also filters the object for
115 119 deletion by given user.
116 120 """
117 121 ssh_key = UserSshKeys.query().filter(
118 122 UserSshKeys.ssh_key_id == ssh_key_id)
119 123
120 124 if user:
121 125 user = self._get_user(user)
122 126 ssh_key = ssh_key.filter(UserSshKeys.user_id == user.user_id)
123 127 ssh_key = ssh_key.scalar()
124 128
125 129 if ssh_key:
126 130 try:
127 131 Session().delete(ssh_key)
128 132 except Exception:
129 133 log.error(traceback.format_exc())
130 134 raise
131 135
132 136 def get_ssh_keys(self, user):
133 137 user = self._get_user(user)
134 138 user_ssh_keys = UserSshKeys.query()\
135 139 .filter(UserSshKeys.user_id == user.user_id)
136 140 user_ssh_keys = user_ssh_keys.order_by(UserSshKeys.ssh_key_id)
137 141 return user_ssh_keys
138 142
139 143 def get_ssh_key_by_fingerprint(self, ssh_key_fingerprint):
140 144 user_ssh_key = UserSshKeys.query()\
141 145 .filter(UserSshKeys.ssh_key_fingerprint == ssh_key_fingerprint)\
142 146 .first()
143 147
144 148 return user_ssh_key
@@ -1,83 +1,85 b''
1 1
2 2
3 3 # Copyright (C) 2013-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 import urllib.request, urllib.error, urllib.parse
22 import urllib.request
23 import urllib.error
24 import urllib.parse
23 25 from packaging.version import Version
24 26
25 27 import rhodecode
26 28 from rhodecode.lib.ext_json import json
27 29 from rhodecode.model import BaseModel
28 30 from rhodecode.model.meta import Session
29 31 from rhodecode.model.settings import SettingsModel
30 32
31 33
32 34 log = logging.getLogger(__name__)
33 35
34 36
35 37 class UpdateModel(BaseModel):
36 38 UPDATE_SETTINGS_KEY = 'update_version'
37 39 UPDATE_URL_SETTINGS_KEY = 'rhodecode_update_url'
38 40
39 41 @staticmethod
40 42 def get_update_data(update_url):
41 43 """Return the JSON update data."""
42 44 ver = rhodecode.__version__
43 45 log.debug('Checking for upgrade on `%s` server', update_url)
44 46 opener = urllib.request.build_opener()
45 47 opener.addheaders = [('User-agent', 'RhodeCode-SCM/%s' % ver)]
46 48 response = opener.open(update_url)
47 49 response_data = response.read()
48 50 data = json.loads(response_data)
49 51 log.debug('update server returned data')
50 52 return data
51 53
52 54 def get_update_url(self):
53 55 settings = SettingsModel().get_all_settings()
54 56 return settings.get(self.UPDATE_URL_SETTINGS_KEY)
55 57
56 58 def store_version(self, version):
57 59 log.debug('Storing version %s into settings', version)
58 60 setting = SettingsModel().create_or_update_setting(
59 61 self.UPDATE_SETTINGS_KEY, version)
60 62 Session().add(setting)
61 63 Session().commit()
62 64
63 65 def get_stored_version(self, fallback=None):
64 66 obj = SettingsModel().get_setting_by_name(self.UPDATE_SETTINGS_KEY)
65 67 if obj:
66 68 return obj.app_settings_value
67 69 return fallback or '0.0.0'
68 70
69 71 def _sanitize_version(self, version):
70 72 """
71 73 Cleanup our custom ver.
72 74 e.g 4.11.0_20171204_204825_CE_default_EE_default to 4.11.0
73 75 """
74 76 return version.split('_')[0]
75 77
76 78 def is_outdated(self, cur_version, latest_version=None):
77 79 latest_version = latest_version or self.get_stored_version()
78 80 try:
79 81 cur_version = self._sanitize_version(cur_version)
80 82 return Version(latest_version) > Version(cur_version)
81 83 except Exception:
82 84 # could be invalid version, etc
83 85 return False
@@ -1,1046 +1,1047 b''
1 1
2 2 # Copyright (C) 2010-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software: you can redistribute it and/or modify
5 5 # it under the terms of the GNU Affero General Public License, version 3
6 6 # (only), as published by the Free Software Foundation.
7 7 #
8 8 # This program is distributed in the hope that it will be useful,
9 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 11 # GNU General Public License for more details.
12 12 #
13 13 # You should have received a copy of the GNU Affero General Public License
14 14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 15 #
16 16 # This program is dual-licensed. If you wish to learn more about the
17 17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 19
20 20 """
21 21 users model for RhodeCode
22 22 """
23 23
24 24 import logging
25 25 import traceback
26 26 import datetime
27 27 import ipaddress
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from sqlalchemy.exc import DatabaseError
31 31
32 32 from rhodecode import events
33 33 from rhodecode.lib.user_log_filter import user_log_filter
34 34 from rhodecode.lib.utils2 import (
35 safe_unicode, get_current_rhodecode_user, action_logger_generic,
35 get_current_rhodecode_user, action_logger_generic,
36 36 AttributeDict, str2bool)
37 from rhodecode.lib.str_utils import safe_str
37 38 from rhodecode.lib.exceptions import (
38 39 DefaultUserException, UserOwnsReposException, UserOwnsRepoGroupsException,
39 40 UserOwnsUserGroupsException, NotAllowedToCreateUserError,
40 41 UserOwnsPullRequestsException, UserOwnsArtifactsException)
41 42 from rhodecode.lib.caching_query import FromCache
42 43 from rhodecode.model import BaseModel
43 44 from rhodecode.model.db import (
44 45 _hash_key, func, true, false, or_, joinedload, User, UserToPerm,
45 46 UserEmailMap, UserIpMap, UserLog)
46 47 from rhodecode.model.meta import Session
47 48 from rhodecode.model.auth_token import AuthTokenModel
48 49 from rhodecode.model.repo_group import RepoGroupModel
49 50
50 51 log = logging.getLogger(__name__)
51 52
52 53
53 54 class UserModel(BaseModel):
54 55 cls = User
55 56
56 57 def get(self, user_id, cache=False):
57 58 user = self.sa.query(User)
58 59 if cache:
59 60 user = user.options(
60 61 FromCache("sql_cache_short", f"get_user_{user_id}"))
61 62 return user.get(user_id)
62 63
63 64 def get_user(self, user):
64 65 return self._get_user(user)
65 66
66 67 def _serialize_user(self, user):
67 68 import rhodecode.lib.helpers as h
68 69
69 70 return {
70 71 'id': user.user_id,
71 72 'first_name': user.first_name,
72 73 'last_name': user.last_name,
73 74 'username': user.username,
74 75 'email': user.email,
75 76 'icon_link': h.gravatar_url(user.email, 30),
76 77 'profile_link': h.link_to_user(user),
77 78 'value_display': h.escape(h.person(user)),
78 79 'value': user.username,
79 80 'value_type': 'user',
80 81 'active': user.active,
81 82 }
82 83
83 84 def get_users(self, name_contains=None, limit=20, only_active=True):
84 85
85 86 query = self.sa.query(User)
86 87 if only_active:
87 88 query = query.filter(User.active == true())
88 89
89 90 if name_contains:
90 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
91 ilike_expression = u'%{}%'.format(safe_str(name_contains))
91 92 query = query.filter(
92 93 or_(
93 94 User.name.ilike(ilike_expression),
94 95 User.lastname.ilike(ilike_expression),
95 96 User.username.ilike(ilike_expression)
96 97 )
97 98 )
98 99 # sort by len to have top most matches first
99 100 query = query.order_by(func.length(User.username))\
100 101 .order_by(User.username)
101 102 query = query.limit(limit)
102 103
103 104 users = query.all()
104 105
105 106 _users = [
106 107 self._serialize_user(user) for user in users
107 108 ]
108 109 return _users
109 110
110 111 def get_by_username(self, username, cache=False, case_insensitive=False):
111 112
112 113 if case_insensitive:
113 114 user = self.sa.query(User).filter(User.username.ilike(username))
114 115 else:
115 116 user = self.sa.query(User)\
116 117 .filter(User.username == username)
117 118 if cache:
118 119 name_key = _hash_key(username)
119 120 user = user.options(
120 121 FromCache("sql_cache_short", f"get_user_{name_key}"))
121 122 return user.scalar()
122 123
123 124 def get_by_email(self, email, cache=False, case_insensitive=False):
124 125 return User.get_by_email(email, case_insensitive, cache)
125 126
126 127 def get_by_auth_token(self, auth_token, cache=False):
127 128 return User.get_by_auth_token(auth_token, cache)
128 129
129 130 def get_active_user_count(self, cache=False):
130 131 qry = User.query().filter(
131 132 User.active == true()).filter(
132 133 User.username != User.DEFAULT_USER)
133 134 if cache:
134 135 qry = qry.options(
135 136 FromCache("sql_cache_short", "get_active_users"))
136 137 return qry.count()
137 138
138 139 def create(self, form_data, cur_user=None):
139 140 if not cur_user:
140 141 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
141 142
142 143 user_data = {
143 144 'username': form_data['username'],
144 145 'password': form_data['password'],
145 146 'email': form_data['email'],
146 147 'firstname': form_data['firstname'],
147 148 'lastname': form_data['lastname'],
148 149 'active': form_data['active'],
149 150 'extern_type': form_data['extern_type'],
150 151 'extern_name': form_data['extern_name'],
151 152 'admin': False,
152 153 'cur_user': cur_user
153 154 }
154 155
155 156 if 'create_repo_group' in form_data:
156 157 user_data['create_repo_group'] = str2bool(
157 158 form_data.get('create_repo_group'))
158 159
159 160 try:
160 161 if form_data.get('password_change'):
161 162 user_data['force_password_change'] = True
162 163 return UserModel().create_or_update(**user_data)
163 164 except Exception:
164 165 log.error(traceback.format_exc())
165 166 raise
166 167
167 168 def update_user(self, user, skip_attrs=None, **kwargs):
168 169 from rhodecode.lib.auth import get_crypt_password
169 170
170 171 user = self._get_user(user)
171 172 if user.username == User.DEFAULT_USER:
172 173 raise DefaultUserException(
173 174 "You can't edit this user (`%(username)s`) since it's "
174 175 "crucial for entire application" % {
175 176 'username': user.username})
176 177
177 178 # first store only defaults
178 179 user_attrs = {
179 180 'updating_user_id': user.user_id,
180 181 'username': user.username,
181 182 'password': user.password,
182 183 'email': user.email,
183 184 'firstname': user.name,
184 185 'lastname': user.lastname,
185 186 'description': user.description,
186 187 'active': user.active,
187 188 'admin': user.admin,
188 189 'extern_name': user.extern_name,
189 190 'extern_type': user.extern_type,
190 191 'language': user.user_data.get('language')
191 192 }
192 193
193 194 # in case there's new_password, that comes from form, use it to
194 195 # store password
195 196 if kwargs.get('new_password'):
196 197 kwargs['password'] = kwargs['new_password']
197 198
198 199 # cleanups, my_account password change form
199 200 kwargs.pop('current_password', None)
200 201 kwargs.pop('new_password', None)
201 202
202 203 # cleanups, user edit password change form
203 204 kwargs.pop('password_confirmation', None)
204 205 kwargs.pop('password_change', None)
205 206
206 207 # create repo group on user creation
207 208 kwargs.pop('create_repo_group', None)
208 209
209 210 # legacy forms send name, which is the firstname
210 211 firstname = kwargs.pop('name', None)
211 212 if firstname:
212 213 kwargs['firstname'] = firstname
213 214
214 215 for k, v in kwargs.items():
215 216 # skip if we don't want to update this
216 217 if skip_attrs and k in skip_attrs:
217 218 continue
218 219
219 220 user_attrs[k] = v
220 221
221 222 try:
222 223 return self.create_or_update(**user_attrs)
223 224 except Exception:
224 225 log.error(traceback.format_exc())
225 226 raise
226 227
227 228 def create_or_update(
228 229 self, username, password, email, firstname='', lastname='',
229 230 active=True, admin=False, extern_type=None, extern_name=None,
230 231 cur_user=None, plugin=None, force_password_change=False,
231 232 allow_to_create_user=True, create_repo_group=None,
232 233 updating_user_id=None, language=None, description='',
233 234 strict_creation_check=True):
234 235 """
235 236 Creates a new instance if not found, or updates current one
236 237
237 238 :param username:
238 239 :param password:
239 240 :param email:
240 241 :param firstname:
241 242 :param lastname:
242 243 :param active:
243 244 :param admin:
244 245 :param extern_type:
245 246 :param extern_name:
246 247 :param cur_user:
247 248 :param plugin: optional plugin this method was called from
248 249 :param force_password_change: toggles new or existing user flag
249 250 for password change
250 251 :param allow_to_create_user: Defines if the method can actually create
251 252 new users
252 253 :param create_repo_group: Defines if the method should also
253 254 create an repo group with user name, and owner
254 255 :param updating_user_id: if we set it up this is the user we want to
255 256 update this allows to editing username.
256 257 :param language: language of user from interface.
257 258 :param description: user description
258 259 :param strict_creation_check: checks for allowed creation license wise etc.
259 260
260 261 :returns: new User object with injected `is_new_user` attribute.
261 262 """
262 263
263 264 if not cur_user:
264 265 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
265 266
266 267 from rhodecode.lib.auth import (
267 268 get_crypt_password, check_password)
268 269 from rhodecode.lib import hooks_base
269 270
270 271 def _password_change(new_user, password):
271 272 old_password = new_user.password or ''
272 273 # empty password
273 274 if not old_password:
274 275 return False
275 276
276 277 # password check is only needed for RhodeCode internal auth calls
277 278 # in case it's a plugin we don't care
278 279 if not plugin:
279 280
280 281 # first check if we gave crypted password back, and if it
281 282 # matches it's not password change
282 283 if new_user.password == password:
283 284 return False
284 285
285 286 password_match = check_password(password, old_password)
286 287 if not password_match:
287 288 return True
288 289
289 290 return False
290 291
291 292 # read settings on default personal repo group creation
292 293 if create_repo_group is None:
293 294 default_create_repo_group = RepoGroupModel()\
294 295 .get_default_create_personal_repo_group()
295 296 create_repo_group = default_create_repo_group
296 297
297 298 user_data = {
298 299 'username': username,
299 300 'password': password,
300 301 'email': email,
301 302 'firstname': firstname,
302 303 'lastname': lastname,
303 304 'active': active,
304 305 'admin': admin
305 306 }
306 307
307 308 if updating_user_id:
308 309 log.debug('Checking for existing account in RhodeCode '
309 310 'database with user_id `%s` ', updating_user_id)
310 311 user = User.get(updating_user_id)
311 312 else:
312 313 log.debug('Checking for existing account in RhodeCode '
313 314 'database with username `%s` ', username)
314 315 user = User.get_by_username(username, case_insensitive=True)
315 316
316 317 if user is None:
317 318 # we check internal flag if this method is actually allowed to
318 319 # create new user
319 320 if not allow_to_create_user:
320 321 msg = ('Method wants to create new user, but it is not '
321 322 'allowed to do so')
322 323 log.warning(msg)
323 324 raise NotAllowedToCreateUserError(msg)
324 325
325 326 log.debug('Creating new user %s', username)
326 327
327 328 # only if we create user that is active
328 329 new_active_user = active
329 330 if new_active_user and strict_creation_check:
330 331 # raises UserCreationError if it's not allowed for any reason to
331 332 # create new active user, this also executes pre-create hooks
332 333 hooks_base.check_allowed_create_user(user_data, cur_user, strict_check=True)
333 334 events.trigger(events.UserPreCreate(user_data))
334 335 new_user = User()
335 336 edit = False
336 337 else:
337 338 log.debug('updating user `%s`', username)
338 339 events.trigger(events.UserPreUpdate(user, user_data))
339 340 new_user = user
340 341 edit = True
341 342
342 343 # we're not allowed to edit default user
343 344 if user.username == User.DEFAULT_USER:
344 345 raise DefaultUserException(
345 346 "You can't edit this user (`%(username)s`) since it's "
346 347 "crucial for entire application"
347 348 % {'username': user.username})
348 349
349 350 # inject special attribute that will tell us if User is new or old
350 351 new_user.is_new_user = not edit
351 352 # for users that didn's specify auth type, we use RhodeCode built in
352 353 from rhodecode.authentication.plugins import auth_rhodecode
353 354 extern_name = extern_name or auth_rhodecode.RhodeCodeAuthPlugin.uid
354 355 extern_type = extern_type or auth_rhodecode.RhodeCodeAuthPlugin.uid
355 356
356 357 try:
357 358 new_user.username = username
358 359 new_user.admin = admin
359 360 new_user.email = email
360 361 new_user.active = active
361 new_user.extern_name = safe_unicode(extern_name)
362 new_user.extern_type = safe_unicode(extern_type)
362 new_user.extern_name = safe_str(extern_name)
363 new_user.extern_type = safe_str(extern_type)
363 364 new_user.name = firstname
364 365 new_user.lastname = lastname
365 366 new_user.description = description
366 367
367 368 # set password only if creating an user or password is changed
368 369 if not edit or _password_change(new_user, password):
369 370 reason = 'new password' if edit else 'new user'
370 371 log.debug('Updating password reason=>%s', reason)
371 372 new_user.password = get_crypt_password(password) if password else None
372 373
373 374 if force_password_change:
374 375 new_user.update_userdata(force_password_change=True)
375 376 if language:
376 377 new_user.update_userdata(language=language)
377 378 new_user.update_userdata(notification_status=True)
378 379
379 380 self.sa.add(new_user)
380 381
381 382 if not edit and create_repo_group:
382 383 RepoGroupModel().create_personal_repo_group(
383 384 new_user, commit_early=False)
384 385
385 386 if not edit:
386 387 # add the RSS token
387 388 self.add_auth_token(
388 389 user=username, lifetime_minutes=-1,
389 390 role=self.auth_token_role.ROLE_FEED,
390 391 description=u'Generated feed token')
391 392
392 393 kwargs = new_user.get_dict()
393 394 # backward compat, require api_keys present
394 395 kwargs['api_keys'] = kwargs['auth_tokens']
395 396 hooks_base.create_user(created_by=cur_user, **kwargs)
396 397 events.trigger(events.UserPostCreate(user_data))
397 398 return new_user
398 399 except (DatabaseError,):
399 400 log.error(traceback.format_exc())
400 401 raise
401 402
402 403 def create_registration(self, form_data,
403 404 extern_name='rhodecode', extern_type='rhodecode'):
404 405 from rhodecode.model.notification import NotificationModel
405 406 from rhodecode.model.notification import EmailNotificationModel
406 407
407 408 try:
408 409 form_data['admin'] = False
409 410 form_data['extern_name'] = extern_name
410 411 form_data['extern_type'] = extern_type
411 412 new_user = self.create(form_data)
412 413
413 414 self.sa.add(new_user)
414 415 self.sa.flush()
415 416
416 417 user_data = new_user.get_dict()
417 418 user_data.update({
418 419 'first_name': user_data.get('firstname'),
419 420 'last_name': user_data.get('lastname'),
420 421 })
421 422 kwargs = {
422 423 # use SQLALCHEMY safe dump of user data
423 424 'user': AttributeDict(user_data),
424 425 'date': datetime.datetime.now()
425 426 }
426 427 notification_type = EmailNotificationModel.TYPE_REGISTRATION
427 428
428 429 # create notification objects, and emails
429 430 NotificationModel().create(
430 431 created_by=new_user,
431 432 notification_subject='', # Filled in based on the notification_type
432 433 notification_body='', # Filled in based on the notification_type
433 434 notification_type=notification_type,
434 435 recipients=None, # all admins
435 436 email_kwargs=kwargs,
436 437 )
437 438
438 439 return new_user
439 440 except Exception:
440 441 log.error(traceback.format_exc())
441 442 raise
442 443
443 444 def _handle_user_repos(self, username, repositories, handle_user,
444 445 handle_mode=None):
445 446
446 447 left_overs = True
447 448
448 449 from rhodecode.model.repo import RepoModel
449 450
450 451 if handle_mode == 'detach':
451 452 for obj in repositories:
452 453 obj.user = handle_user
453 454 # set description we know why we super admin now owns
454 455 # additional repositories that were orphaned !
455 456 obj.description += ' \n::detached repository from deleted user: %s' % (username,)
456 457 self.sa.add(obj)
457 458 left_overs = False
458 459 elif handle_mode == 'delete':
459 460 for obj in repositories:
460 461 RepoModel().delete(obj, forks='detach')
461 462 left_overs = False
462 463
463 464 # if nothing is done we have left overs left
464 465 return left_overs
465 466
466 467 def _handle_user_repo_groups(self, username, repository_groups, handle_user,
467 468 handle_mode=None):
468 469
469 470 left_overs = True
470 471
471 472 from rhodecode.model.repo_group import RepoGroupModel
472 473
473 474 if handle_mode == 'detach':
474 475 for r in repository_groups:
475 476 r.user = handle_user
476 477 # set description we know why we super admin now owns
477 478 # additional repositories that were orphaned !
478 479 r.group_description += ' \n::detached repository group from deleted user: %s' % (username,)
479 480 r.personal = False
480 481 self.sa.add(r)
481 482 left_overs = False
482 483 elif handle_mode == 'delete':
483 484 for r in repository_groups:
484 485 RepoGroupModel().delete(r)
485 486 left_overs = False
486 487
487 488 # if nothing is done we have left overs left
488 489 return left_overs
489 490
490 491 def _handle_user_user_groups(self, username, user_groups, handle_user,
491 492 handle_mode=None):
492 493
493 494 left_overs = True
494 495
495 496 from rhodecode.model.user_group import UserGroupModel
496 497
497 498 if handle_mode == 'detach':
498 499 for r in user_groups:
499 500 for user_user_group_to_perm in r.user_user_group_to_perm:
500 501 if user_user_group_to_perm.user.username == username:
501 502 user_user_group_to_perm.user = handle_user
502 503 r.user = handle_user
503 504 # set description we know why we super admin now owns
504 505 # additional repositories that were orphaned !
505 506 r.user_group_description += ' \n::detached user group from deleted user: %s' % (username,)
506 507 self.sa.add(r)
507 508 left_overs = False
508 509 elif handle_mode == 'delete':
509 510 for r in user_groups:
510 511 UserGroupModel().delete(r)
511 512 left_overs = False
512 513
513 514 # if nothing is done we have left overs left
514 515 return left_overs
515 516
516 517 def _handle_user_pull_requests(self, username, pull_requests, handle_user,
517 518 handle_mode=None):
518 519 left_overs = True
519 520
520 521 from rhodecode.model.pull_request import PullRequestModel
521 522
522 523 if handle_mode == 'detach':
523 524 for pr in pull_requests:
524 525 pr.user_id = handle_user.user_id
525 526 # set description we know why we super admin now owns
526 527 # additional repositories that were orphaned !
527 528 pr.description += ' \n::detached pull requests from deleted user: %s' % (username,)
528 529 self.sa.add(pr)
529 530 left_overs = False
530 531 elif handle_mode == 'delete':
531 532 for pr in pull_requests:
532 533 PullRequestModel().delete(pr)
533 534
534 535 left_overs = False
535 536
536 # if nothing is done we have left overs left
537 # if nothing is done we have leftovers left
537 538 return left_overs
538 539
539 540 def _handle_user_artifacts(self, username, artifacts, handle_user,
540 541 handle_mode=None):
541 542
542 543 left_overs = True
543 544
544 545 if handle_mode == 'detach':
545 546 for a in artifacts:
546 547 a.upload_user = handle_user
547 548 # set description we know why we super admin now owns
548 549 # additional artifacts that were orphaned !
549 550 a.file_description += ' \n::detached artifact from deleted user: %s' % (username,)
550 551 self.sa.add(a)
551 552 left_overs = False
552 553 elif handle_mode == 'delete':
553 554 from rhodecode.apps.file_store import utils as store_utils
554 555 request = get_current_request()
555 556 storage = store_utils.get_file_storage(request.registry.settings)
556 557 for a in artifacts:
557 558 file_uid = a.file_uid
558 559 storage.delete(file_uid)
559 560 self.sa.delete(a)
560 561
561 562 left_overs = False
562 563
563 564 # if nothing is done we have left overs left
564 565 return left_overs
565 566
566 567 def delete(self, user, cur_user=None, handle_repos=None,
567 568 handle_repo_groups=None, handle_user_groups=None,
568 569 handle_pull_requests=None, handle_artifacts=None, handle_new_owner=None):
569 570 from rhodecode.lib import hooks_base
570 571
571 572 if not cur_user:
572 573 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
573 574
574 575 user = self._get_user(user)
575 576
576 577 try:
577 578 if user.username == User.DEFAULT_USER:
578 579 raise DefaultUserException(
579 580 u"You can't remove this user since it's"
580 581 u" crucial for entire application")
581 582 handle_user = handle_new_owner or self.cls.get_first_super_admin()
582 583 log.debug('New detached objects owner %s', handle_user)
583 584
584 585 left_overs = self._handle_user_repos(
585 586 user.username, user.repositories, handle_user, handle_repos)
586 587 if left_overs and user.repositories:
587 588 repos = [x.repo_name for x in user.repositories]
588 589 raise UserOwnsReposException(
589 590 u'user "%(username)s" still owns %(len_repos)s repositories and cannot be '
590 591 u'removed. Switch owners or remove those repositories:%(list_repos)s'
591 592 % {'username': user.username, 'len_repos': len(repos),
592 593 'list_repos': ', '.join(repos)})
593 594
594 595 left_overs = self._handle_user_repo_groups(
595 596 user.username, user.repository_groups, handle_user, handle_repo_groups)
596 597 if left_overs and user.repository_groups:
597 598 repo_groups = [x.group_name for x in user.repository_groups]
598 599 raise UserOwnsRepoGroupsException(
599 600 u'user "%(username)s" still owns %(len_repo_groups)s repository groups and cannot be '
600 601 u'removed. Switch owners or remove those repository groups:%(list_repo_groups)s'
601 602 % {'username': user.username, 'len_repo_groups': len(repo_groups),
602 603 'list_repo_groups': ', '.join(repo_groups)})
603 604
604 605 left_overs = self._handle_user_user_groups(
605 606 user.username, user.user_groups, handle_user, handle_user_groups)
606 607 if left_overs and user.user_groups:
607 608 user_groups = [x.users_group_name for x in user.user_groups]
608 609 raise UserOwnsUserGroupsException(
609 610 u'user "%s" still owns %s user groups and cannot be '
610 611 u'removed. Switch owners or remove those user groups:%s'
611 612 % (user.username, len(user_groups), ', '.join(user_groups)))
612 613
613 614 left_overs = self._handle_user_pull_requests(
614 615 user.username, user.user_pull_requests, handle_user, handle_pull_requests)
615 616 if left_overs and user.user_pull_requests:
616 617 pull_requests = ['!{}'.format(x.pull_request_id) for x in user.user_pull_requests]
617 618 raise UserOwnsPullRequestsException(
618 619 u'user "%s" still owns %s pull requests and cannot be '
619 620 u'removed. Switch owners or remove those pull requests:%s'
620 621 % (user.username, len(pull_requests), ', '.join(pull_requests)))
621 622
622 623 left_overs = self._handle_user_artifacts(
623 624 user.username, user.artifacts, handle_user, handle_artifacts)
624 625 if left_overs and user.artifacts:
625 626 artifacts = [x.file_uid for x in user.artifacts]
626 627 raise UserOwnsArtifactsException(
627 628 u'user "%s" still owns %s artifacts and cannot be '
628 629 u'removed. Switch owners or remove those artifacts:%s'
629 630 % (user.username, len(artifacts), ', '.join(artifacts)))
630 631
631 632 user_data = user.get_dict() # fetch user data before expire
632 633
633 634 # we might change the user data with detach/delete, make sure
634 635 # the object is marked as expired before actually deleting !
635 636 self.sa.expire(user)
636 637 self.sa.delete(user)
637 638
638 639 hooks_base.delete_user(deleted_by=cur_user, **user_data)
639 640 except Exception:
640 641 log.error(traceback.format_exc())
641 642 raise
642 643
643 644 def reset_password_link(self, data, pwd_reset_url):
644 645 from rhodecode.lib.celerylib import tasks, run_task
645 646 from rhodecode.model.notification import EmailNotificationModel
646 647 user_email = data['email']
647 648 try:
648 649 user = User.get_by_email(user_email)
649 650 if user:
650 651 log.debug('password reset user found %s', user)
651 652
652 653 email_kwargs = {
653 654 'password_reset_url': pwd_reset_url,
654 655 'user': user,
655 656 'email': user_email,
656 657 'date': datetime.datetime.now(),
657 658 'first_admin_email': User.get_first_super_admin().email
658 659 }
659 660
660 661 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
661 662 EmailNotificationModel.TYPE_PASSWORD_RESET, **email_kwargs)
662 663
663 664 recipients = [user_email]
664 665
665 666 action_logger_generic(
666 667 'sending password reset email to user: {}'.format(
667 668 user), namespace='security.password_reset')
668 669
669 670 run_task(tasks.send_email, recipients, subject,
670 671 email_body_plaintext, email_body)
671 672
672 673 else:
673 674 log.debug("password reset email %s not found", user_email)
674 675 except Exception:
675 676 log.error(traceback.format_exc())
676 677 return False
677 678
678 679 return True
679 680
680 681 def reset_password(self, data):
681 682 from rhodecode.lib.celerylib import tasks, run_task
682 683 from rhodecode.model.notification import EmailNotificationModel
683 684 from rhodecode.lib import auth
684 685 user_email = data['email']
685 686 pre_db = True
686 687 try:
687 688 user = User.get_by_email(user_email)
688 689 new_passwd = auth.PasswordGenerator().gen_password(
689 690 12, auth.PasswordGenerator.ALPHABETS_BIG_SMALL)
690 691 if user:
691 692 user.password = auth.get_crypt_password(new_passwd)
692 693 # also force this user to reset his password !
693 694 user.update_userdata(force_password_change=True)
694 695
695 696 Session().add(user)
696 697
697 698 # now delete the token in question
698 699 UserApiKeys = AuthTokenModel.cls
699 700 UserApiKeys().query().filter(
700 701 UserApiKeys.api_key == data['token']).delete()
701 702
702 703 Session().commit()
703 704 log.info('successfully reset password for `%s`', user_email)
704 705
705 706 if new_passwd is None:
706 707 raise Exception('unable to generate new password')
707 708
708 709 pre_db = False
709 710
710 711 email_kwargs = {
711 712 'new_password': new_passwd,
712 713 'user': user,
713 714 'email': user_email,
714 715 'date': datetime.datetime.now(),
715 716 'first_admin_email': User.get_first_super_admin().email
716 717 }
717 718
718 719 (subject, email_body, email_body_plaintext) = EmailNotificationModel().render_email(
719 720 EmailNotificationModel.TYPE_PASSWORD_RESET_CONFIRMATION,
720 721 **email_kwargs)
721 722
722 723 recipients = [user_email]
723 724
724 725 action_logger_generic(
725 726 'sent new password to user: {} with email: {}'.format(
726 727 user, user_email), namespace='security.password_reset')
727 728
728 729 run_task(tasks.send_email, recipients, subject,
729 730 email_body_plaintext, email_body)
730 731
731 732 except Exception:
732 733 log.error('Failed to update user password')
733 734 log.error(traceback.format_exc())
734 735 if pre_db:
735 736 # we rollback only if local db stuff fails. If it goes into
736 737 # run_task, we're pass rollback state this wouldn't work then
737 738 Session().rollback()
738 739
739 740 return True
740 741
741 742 def fill_data(self, auth_user, user_id=None, api_key=None, username=None):
742 743 """
743 744 Fetches auth_user by user_id,or api_key if present.
744 745 Fills auth_user attributes with those taken from database.
745 746 Additionally set's is_authenitated if lookup fails
746 747 present in database
747 748
748 749 :param auth_user: instance of user to set attributes
749 750 :param user_id: user id to fetch by
750 751 :param api_key: api key to fetch by
751 752 :param username: username to fetch by
752 753 """
753 754 def token_obfuscate(token):
754 755 if token:
755 756 return token[:4] + "****"
756 757
757 758 if user_id is None and api_key is None and username is None:
758 759 raise Exception('You need to pass user_id, api_key or username')
759 760
760 761 log.debug(
761 762 'AuthUser: fill data execution based on: '
762 763 'user_id:%s api_key:%s username:%s', user_id, api_key, username)
763 764 try:
764 765 dbuser = None
765 766 if user_id:
766 767 dbuser = self.get(user_id)
767 768 elif api_key:
768 769 dbuser = self.get_by_auth_token(api_key)
769 770 elif username:
770 771 dbuser = self.get_by_username(username)
771 772
772 773 if not dbuser:
773 774 log.warning(
774 775 'Unable to lookup user by id:%s api_key:%s username:%s',
775 776 user_id, token_obfuscate(api_key), username)
776 777 return False
777 778 if not dbuser.active:
778 779 log.debug('User `%s:%s` is inactive, skipping fill data',
779 780 username, user_id)
780 781 return False
781 782
782 783 log.debug('AuthUser: filling found user:%s data', dbuser)
783 784
784 785 attrs = {
785 786 'user_id': dbuser.user_id,
786 787 'username': dbuser.username,
787 788 'name': dbuser.name,
788 789 'first_name': dbuser.first_name,
789 790 'firstname': dbuser.firstname,
790 791 'last_name': dbuser.last_name,
791 792 'lastname': dbuser.lastname,
792 793 'admin': dbuser.admin,
793 794 'active': dbuser.active,
794 795
795 796 'email': dbuser.email,
796 797 'emails': dbuser.emails_cached(),
797 798 'short_contact': dbuser.short_contact,
798 799 'full_contact': dbuser.full_contact,
799 800 'full_name': dbuser.full_name,
800 801 'full_name_or_username': dbuser.full_name_or_username,
801 802
802 803 '_api_key': dbuser._api_key,
803 804 '_user_data': dbuser._user_data,
804 805
805 806 'created_on': dbuser.created_on,
806 807 'extern_name': dbuser.extern_name,
807 808 'extern_type': dbuser.extern_type,
808 809
809 810 'inherit_default_permissions': dbuser.inherit_default_permissions,
810 811
811 812 'language': dbuser.language,
812 813 'last_activity': dbuser.last_activity,
813 814 'last_login': dbuser.last_login,
814 815 'password': dbuser.password,
815 816 }
816 817 auth_user.__dict__.update(attrs)
817 818 except Exception:
818 819 log.error(traceback.format_exc())
819 820 auth_user.is_authenticated = False
820 821 return False
821 822
822 823 return True
823 824
824 825 def has_perm(self, user, perm):
825 826 perm = self._get_perm(perm)
826 827 user = self._get_user(user)
827 828
828 829 return UserToPerm.query().filter(UserToPerm.user == user)\
829 830 .filter(UserToPerm.permission == perm).scalar() is not None
830 831
831 832 def grant_perm(self, user, perm):
832 833 """
833 834 Grant user global permissions
834 835
835 836 :param user:
836 837 :param perm:
837 838 """
838 839 user = self._get_user(user)
839 840 perm = self._get_perm(perm)
840 841 # if this permission is already granted skip it
841 842 _perm = UserToPerm.query()\
842 843 .filter(UserToPerm.user == user)\
843 844 .filter(UserToPerm.permission == perm)\
844 845 .scalar()
845 846 if _perm:
846 847 return
847 848 new = UserToPerm()
848 849 new.user = user
849 850 new.permission = perm
850 851 self.sa.add(new)
851 852 return new
852 853
853 854 def revoke_perm(self, user, perm):
854 855 """
855 856 Revoke users global permissions
856 857
857 858 :param user:
858 859 :param perm:
859 860 """
860 861 user = self._get_user(user)
861 862 perm = self._get_perm(perm)
862 863
863 864 obj = UserToPerm.query()\
864 865 .filter(UserToPerm.user == user)\
865 866 .filter(UserToPerm.permission == perm)\
866 867 .scalar()
867 868 if obj:
868 869 self.sa.delete(obj)
869 870
870 871 def add_extra_email(self, user, email):
871 872 """
872 873 Adds email address to UserEmailMap
873 874
874 875 :param user:
875 876 :param email:
876 877 """
877 878
878 879 user = self._get_user(user)
879 880
880 881 obj = UserEmailMap()
881 882 obj.user = user
882 883 obj.email = email
883 884 self.sa.add(obj)
884 885 return obj
885 886
886 887 def delete_extra_email(self, user, email_id):
887 888 """
888 889 Removes email address from UserEmailMap
889 890
890 891 :param user:
891 892 :param email_id:
892 893 """
893 894 user = self._get_user(user)
894 895 obj = UserEmailMap.query().get(email_id)
895 896 if obj and obj.user_id == user.user_id:
896 897 self.sa.delete(obj)
897 898
898 899 def parse_ip_range(self, ip_range):
899 900 ip_list = []
900 901
901 902 def make_unique(value):
902 903 seen = []
903 904 return [c for c in value if not (c in seen or seen.append(c))]
904 905
905 906 # firsts split by commas
906 907 for ip_range in ip_range.split(','):
907 908 if not ip_range:
908 909 continue
909 910 ip_range = ip_range.strip()
910 911 if '-' in ip_range:
911 912 start_ip, end_ip = ip_range.split('-', 1)
912 start_ip = ipaddress.ip_address(safe_unicode(start_ip.strip()))
913 end_ip = ipaddress.ip_address(safe_unicode(end_ip.strip()))
913 start_ip = ipaddress.ip_address(safe_str(start_ip.strip()))
914 end_ip = ipaddress.ip_address(safe_str(end_ip.strip()))
914 915 parsed_ip_range = []
915 916
916 917 for index in range(int(start_ip), int(end_ip) + 1):
917 918 new_ip = ipaddress.ip_address(index)
918 919 parsed_ip_range.append(str(new_ip))
919 920 ip_list.extend(parsed_ip_range)
920 921 else:
921 922 ip_list.append(ip_range)
922 923
923 924 return make_unique(ip_list)
924 925
925 926 def add_extra_ip(self, user, ip, description=None):
926 927 """
927 928 Adds ip address to UserIpMap
928 929
929 930 :param user:
930 931 :param ip:
931 932 """
932 933
933 934 user = self._get_user(user)
934 935 obj = UserIpMap()
935 936 obj.user = user
936 937 obj.ip_addr = ip
937 938 obj.description = description
938 939 self.sa.add(obj)
939 940 return obj
940 941
941 942 auth_token_role = AuthTokenModel.cls
942 943
943 944 def add_auth_token(self, user, lifetime_minutes, role, description=u'',
944 945 scope_callback=None):
945 946 """
946 947 Add AuthToken for user.
947 948
948 949 :param user: username/user_id
949 950 :param lifetime_minutes: in minutes the lifetime for token, -1 equals no limit
950 951 :param role: one of AuthTokenModel.cls.ROLE_*
951 952 :param description: optional string description
952 953 """
953 954
954 955 token = AuthTokenModel().create(
955 956 user, description, lifetime_minutes, role)
956 957 if scope_callback and callable(scope_callback):
957 958 # call the callback if we provide, used to attach scope for EE edition
958 959 scope_callback(token)
959 960 return token
960 961
961 962 def delete_extra_ip(self, user, ip_id):
962 963 """
963 964 Removes ip address from UserIpMap
964 965
965 966 :param user:
966 967 :param ip_id:
967 968 """
968 969 user = self._get_user(user)
969 970 obj = UserIpMap.query().get(ip_id)
970 971 if obj and obj.user_id == user.user_id:
971 972 self.sa.delete(obj)
972 973
973 974 def get_accounts_in_creation_order(self, current_user=None):
974 975 """
975 976 Get accounts in order of creation for deactivation for license limits
976 977
977 978 pick currently logged in user, and append to the list in position 0
978 979 pick all super-admins in order of creation date and add it to the list
979 980 pick all other accounts in order of creation and add it to the list.
980 981
981 982 Based on that list, the last accounts can be disabled as they are
982 983 created at the end and don't include any of the super admins as well
983 984 as the current user.
984 985
985 986 :param current_user: optionally current user running this operation
986 987 """
987 988
988 989 if not current_user:
989 990 current_user = get_current_rhodecode_user()
990 991 active_super_admins = [
991 992 x.user_id for x in User.query()
992 993 .filter(User.user_id != current_user.user_id)
993 994 .filter(User.active == true())
994 995 .filter(User.admin == true())
995 996 .order_by(User.created_on.asc())]
996 997
997 998 active_regular_users = [
998 999 x.user_id for x in User.query()
999 1000 .filter(User.user_id != current_user.user_id)
1000 1001 .filter(User.active == true())
1001 1002 .filter(User.admin == false())
1002 1003 .order_by(User.created_on.asc())]
1003 1004
1004 1005 list_of_accounts = [current_user.user_id]
1005 1006 list_of_accounts += active_super_admins
1006 1007 list_of_accounts += active_regular_users
1007 1008
1008 1009 return list_of_accounts
1009 1010
1010 1011 def deactivate_last_users(self, expected_users, current_user=None):
1011 1012 """
1012 1013 Deactivate accounts that are over the license limits.
1013 1014 Algorithm of which accounts to disabled is based on the formula:
1014 1015
1015 1016 Get current user, then super admins in creation order, then regular
1016 1017 active users in creation order.
1017 1018
1018 1019 Using that list we mark all accounts from the end of it as inactive.
1019 1020 This way we block only latest created accounts.
1020 1021
1021 1022 :param expected_users: list of users in special order, we deactivate
1022 1023 the end N amount of users from that list
1023 1024 """
1024 1025
1025 1026 list_of_accounts = self.get_accounts_in_creation_order(
1026 1027 current_user=current_user)
1027 1028
1028 1029 for acc_id in list_of_accounts[expected_users + 1:]:
1029 1030 user = User.get(acc_id)
1030 1031 log.info('Deactivating account %s for license unlock', user)
1031 1032 user.active = False
1032 1033 Session().add(user)
1033 1034 Session().commit()
1034 1035
1035 1036 return
1036 1037
1037 1038 def get_user_log(self, user, filter_term):
1038 1039 user_log = UserLog.query()\
1039 1040 .filter(or_(UserLog.user_id == user.user_id,
1040 1041 UserLog.username == user.username))\
1041 1042 .options(joinedload(UserLog.user))\
1042 1043 .options(joinedload(UserLog.repository))\
1043 1044 .order_by(UserLog.action_date.desc())
1044 1045
1045 1046 user_log = user_log_filter(user_log, filter_term)
1046 1047 return user_log
@@ -1,754 +1,754 b''
1 1
2 2
3 3 # Copyright (C) 2011-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22 import traceback
23 23
24 from rhodecode.lib.utils2 import safe_str, safe_unicode
24 from rhodecode.lib.utils2 import safe_str
25 25 from rhodecode.lib.exceptions import (
26 26 UserGroupAssignedException, RepoGroupAssignmentError)
27 27 from rhodecode.lib.utils2 import (
28 28 get_current_rhodecode_user, action_logger_generic)
29 29 from rhodecode.model import BaseModel
30 30 from rhodecode.model.scm import UserGroupList
31 31 from rhodecode.model.db import (
32 32 joinedload, true, func, User, UserGroupMember, UserGroup,
33 33 UserGroupRepoToPerm, Permission, UserGroupToPerm, UserUserGroupToPerm,
34 34 UserGroupUserGroupToPerm, UserGroupRepoGroupToPerm)
35 35
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class UserGroupModel(BaseModel):
41 41
42 42 cls = UserGroup
43 43
44 44 def _get_user_group(self, user_group):
45 45 return self._get_instance(UserGroup, user_group,
46 46 callback=UserGroup.get_by_group_name)
47 47
48 48 def _create_default_perms(self, user_group):
49 49 # create default permission
50 50 default_perm = 'usergroup.read'
51 51 def_user = User.get_default_user()
52 52 for p in def_user.user_perms:
53 53 if p.permission.permission_name.startswith('usergroup.'):
54 54 default_perm = p.permission.permission_name
55 55 break
56 56
57 57 user_group_to_perm = UserUserGroupToPerm()
58 58 user_group_to_perm.permission = Permission.get_by_key(default_perm)
59 59
60 60 user_group_to_perm.user_group = user_group
61 user_group_to_perm.user_id = def_user.user_id
61 user_group_to_perm.user = def_user
62 62 return user_group_to_perm
63 63
64 64 def update_permissions(
65 65 self, user_group, perm_additions=None, perm_updates=None,
66 66 perm_deletions=None, check_perms=True, cur_user=None):
67 67
68 68 from rhodecode.lib.auth import HasUserGroupPermissionAny
69 69 if not perm_additions:
70 70 perm_additions = []
71 71 if not perm_updates:
72 72 perm_updates = []
73 73 if not perm_deletions:
74 74 perm_deletions = []
75 75
76 76 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
77 77
78 78 changes = {
79 79 'added': [],
80 80 'updated': [],
81 81 'deleted': []
82 82 }
83 83 change_obj = user_group.get_api_data()
84 84 # update permissions
85 85 for member_id, perm, member_type in perm_updates:
86 86 member_id = int(member_id)
87 87 if member_type == 'user':
88 88 member_name = User.get(member_id).username
89 89 # this updates existing one
90 90 self.grant_user_permission(
91 91 user_group=user_group, user=member_id, perm=perm
92 92 )
93 93 elif member_type == 'user_group':
94 94 # check if we have permissions to alter this usergroup
95 95 member_name = UserGroup.get(member_id).users_group_name
96 96 if not check_perms or HasUserGroupPermissionAny(
97 97 *req_perms)(member_name, user=cur_user):
98 98 self.grant_user_group_permission(
99 99 target_user_group=user_group, user_group=member_id, perm=perm)
100 100 else:
101 101 raise ValueError("member_type must be 'user' or 'user_group' "
102 102 "got {} instead".format(member_type))
103 103
104 104 changes['updated'].append({
105 105 'change_obj': change_obj,
106 106 'type': member_type, 'id': member_id,
107 107 'name': member_name, 'new_perm': perm})
108 108
109 109 # set new permissions
110 110 for member_id, perm, member_type in perm_additions:
111 111 member_id = int(member_id)
112 112 if member_type == 'user':
113 113 member_name = User.get(member_id).username
114 114 self.grant_user_permission(
115 115 user_group=user_group, user=member_id, perm=perm)
116 116 elif member_type == 'user_group':
117 117 # check if we have permissions to alter this usergroup
118 118 member_name = UserGroup.get(member_id).users_group_name
119 119 if not check_perms or HasUserGroupPermissionAny(
120 120 *req_perms)(member_name, user=cur_user):
121 121 self.grant_user_group_permission(
122 122 target_user_group=user_group, user_group=member_id, perm=perm)
123 123 else:
124 124 raise ValueError("member_type must be 'user' or 'user_group' "
125 125 "got {} instead".format(member_type))
126 126
127 127 changes['added'].append({
128 128 'change_obj': change_obj,
129 129 'type': member_type, 'id': member_id,
130 130 'name': member_name, 'new_perm': perm})
131 131
132 132 # delete permissions
133 133 for member_id, perm, member_type in perm_deletions:
134 134 member_id = int(member_id)
135 135 if member_type == 'user':
136 136 member_name = User.get(member_id).username
137 137 self.revoke_user_permission(user_group=user_group, user=member_id)
138 138 elif member_type == 'user_group':
139 139 # check if we have permissions to alter this usergroup
140 140 member_name = UserGroup.get(member_id).users_group_name
141 141 if not check_perms or HasUserGroupPermissionAny(
142 142 *req_perms)(member_name, user=cur_user):
143 143 self.revoke_user_group_permission(
144 144 target_user_group=user_group, user_group=member_id)
145 145 else:
146 146 raise ValueError("member_type must be 'user' or 'user_group' "
147 147 "got {} instead".format(member_type))
148 148
149 149 changes['deleted'].append({
150 150 'change_obj': change_obj,
151 151 'type': member_type, 'id': member_id,
152 152 'name': member_name, 'new_perm': perm})
153 153
154 154 return changes
155 155
156 156 def get(self, user_group_id, cache=False):
157 157 return UserGroup.get(user_group_id)
158 158
159 159 def get_group(self, user_group):
160 160 return self._get_user_group(user_group)
161 161
162 162 def get_by_name(self, name, cache=False, case_insensitive=False):
163 163 return UserGroup.get_by_group_name(name, cache, case_insensitive)
164 164
165 165 def create(self, name, description, owner, active=True, group_data=None):
166 166 try:
167 167 new_user_group = UserGroup()
168 168 new_user_group.user = self._get_user(owner)
169 169 new_user_group.users_group_name = name
170 170 new_user_group.user_group_description = description
171 171 new_user_group.users_group_active = active
172 172 if group_data:
173 173 new_user_group.group_data = group_data
174 174 self.sa.add(new_user_group)
175 175 perm_obj = self._create_default_perms(new_user_group)
176 176 self.sa.add(perm_obj)
177 177
178 178 self.grant_user_permission(user_group=new_user_group,
179 179 user=owner, perm='usergroup.admin')
180 180
181 181 return new_user_group
182 182 except Exception:
183 183 log.error(traceback.format_exc())
184 184 raise
185 185
186 186 def _get_memberships_for_user_ids(self, user_group, user_id_list):
187 187 members = []
188 188 for user_id in user_id_list:
189 189 member = self._get_membership(user_group.users_group_id, user_id)
190 190 members.append(member)
191 191 return members
192 192
193 193 def _get_added_and_removed_user_ids(self, user_group, user_id_list):
194 194 current_members = user_group.members or []
195 195 current_members_ids = [m.user.user_id for m in current_members]
196 196
197 197 added_members = [
198 198 user_id for user_id in user_id_list
199 199 if user_id not in current_members_ids]
200 200 if user_id_list == []:
201 201 # all members were deleted
202 202 deleted_members = current_members_ids
203 203 else:
204 204 deleted_members = [
205 205 user_id for user_id in current_members_ids
206 206 if user_id not in user_id_list]
207 207
208 208 return added_members, deleted_members
209 209
210 210 def _set_users_as_members(self, user_group, user_ids):
211 211 user_group.members = []
212 212 self.sa.flush()
213 213 members = self._get_memberships_for_user_ids(
214 214 user_group, user_ids)
215 215 user_group.members = members
216 216 self.sa.add(user_group)
217 217
218 218 def _update_members_from_user_ids(self, user_group, user_ids):
219 219 added, removed = self._get_added_and_removed_user_ids(
220 220 user_group, user_ids)
221 221 self._set_users_as_members(user_group, user_ids)
222 222 self._log_user_changes('added to', user_group, added)
223 223 self._log_user_changes('removed from', user_group, removed)
224 224 return added, removed
225 225
226 226 def _clean_members_data(self, members_data):
227 227 if not members_data:
228 228 members_data = []
229 229
230 230 members = []
231 231 for user in members_data:
232 232 uid = int(user['member_user_id'])
233 233 if uid not in members and user['type'] in ['new', 'existing']:
234 234 members.append(uid)
235 235 return members
236 236
237 237 def update(self, user_group, form_data, group_data=None):
238 238 user_group = self._get_user_group(user_group)
239 239 if 'users_group_name' in form_data:
240 240 user_group.users_group_name = form_data['users_group_name']
241 241 if 'users_group_active' in form_data:
242 242 user_group.users_group_active = form_data['users_group_active']
243 243 if 'user_group_description' in form_data:
244 244 user_group.user_group_description = form_data[
245 245 'user_group_description']
246 246
247 247 # handle owner change
248 248 if 'user' in form_data:
249 249 owner = form_data['user']
250 250 if isinstance(owner, str):
251 251 owner = User.get_by_username(form_data['user'])
252 252
253 253 if not isinstance(owner, User):
254 254 raise ValueError(
255 255 'invalid owner for user group: %s' % form_data['user'])
256 256
257 257 user_group.user = owner
258 258
259 259 added_user_ids = []
260 260 removed_user_ids = []
261 261 if 'users_group_members' in form_data:
262 262 members_id_list = self._clean_members_data(
263 263 form_data['users_group_members'])
264 264 added_user_ids, removed_user_ids = \
265 265 self._update_members_from_user_ids(user_group, members_id_list)
266 266
267 267 if group_data:
268 268 new_group_data = {}
269 269 new_group_data.update(group_data)
270 270 user_group.group_data = new_group_data
271 271
272 272 self.sa.add(user_group)
273 273 return user_group, added_user_ids, removed_user_ids
274 274
275 275 def delete(self, user_group, force=False):
276 276 """
277 277 Deletes repository group, unless force flag is used
278 278 raises exception if there are members in that group, else deletes
279 279 group and users
280 280
281 281 :param user_group:
282 282 :param force:
283 283 """
284 284 user_group = self._get_user_group(user_group)
285 285 if not user_group:
286 286 return
287 287
288 288 try:
289 289 # check if this group is not assigned to repo
290 290 assigned_to_repo = [x.repository for x in UserGroupRepoToPerm.query()\
291 291 .filter(UserGroupRepoToPerm.users_group == user_group).all()]
292 292 # check if this group is not assigned to repo
293 293 assigned_to_repo_group = [x.group for x in UserGroupRepoGroupToPerm.query()\
294 294 .filter(UserGroupRepoGroupToPerm.users_group == user_group).all()]
295 295
296 296 if (assigned_to_repo or assigned_to_repo_group) and not force:
297 297 assigned = ','.join(map(safe_str,
298 298 assigned_to_repo+assigned_to_repo_group))
299 299
300 300 raise UserGroupAssignedException(
301 301 'UserGroup assigned to %s' % (assigned,))
302 302 self.sa.delete(user_group)
303 303 except Exception:
304 304 log.error(traceback.format_exc())
305 305 raise
306 306
307 307 def _log_user_changes(self, action, user_group, user_or_users):
308 308 users = user_or_users
309 309 if not isinstance(users, (list, tuple)):
310 310 users = [users]
311 311
312 312 group_name = user_group.users_group_name
313 313
314 314 for user_or_user_id in users:
315 315 user = self._get_user(user_or_user_id)
316 316 log_text = 'User {user} {action} {group}'.format(
317 317 action=action, user=user.username, group=group_name)
318 318 action_logger_generic(log_text)
319 319
320 320 def _find_user_in_group(self, user, user_group):
321 321 user_group_member = None
322 322 for m in user_group.members:
323 323 if m.user_id == user.user_id:
324 324 # Found this user's membership row
325 325 user_group_member = m
326 326 break
327 327
328 328 return user_group_member
329 329
330 330 def _get_membership(self, user_group_id, user_id):
331 331 user_group_member = UserGroupMember(user_group_id, user_id)
332 332 return user_group_member
333 333
334 334 def add_user_to_group(self, user_group, user):
335 335 user_group = self._get_user_group(user_group)
336 336 user = self._get_user(user)
337 337 user_member = self._find_user_in_group(user, user_group)
338 338 if user_member:
339 339 # user already in the group, skip
340 340 return True
341 341
342 342 member = self._get_membership(
343 343 user_group.users_group_id, user.user_id)
344 344 user_group.members.append(member)
345 345
346 346 try:
347 347 self.sa.add(member)
348 348 except Exception:
349 349 # what could go wrong here?
350 350 log.error(traceback.format_exc())
351 351 raise
352 352
353 353 self._log_user_changes('added to', user_group, user)
354 354 return member
355 355
356 356 def remove_user_from_group(self, user_group, user):
357 357 user_group = self._get_user_group(user_group)
358 358 user = self._get_user(user)
359 359 user_group_member = self._find_user_in_group(user, user_group)
360 360
361 361 if not user_group_member:
362 362 # User isn't in that group
363 363 return False
364 364
365 365 try:
366 366 self.sa.delete(user_group_member)
367 367 except Exception:
368 368 log.error(traceback.format_exc())
369 369 raise
370 370
371 371 self._log_user_changes('removed from', user_group, user)
372 372 return True
373 373
374 374 def has_perm(self, user_group, perm):
375 375 user_group = self._get_user_group(user_group)
376 376 perm = self._get_perm(perm)
377 377
378 378 return UserGroupToPerm.query()\
379 379 .filter(UserGroupToPerm.users_group == user_group)\
380 380 .filter(UserGroupToPerm.permission == perm).scalar() is not None
381 381
382 382 def grant_perm(self, user_group, perm):
383 383 user_group = self._get_user_group(user_group)
384 384 perm = self._get_perm(perm)
385 385
386 386 # if this permission is already granted skip it
387 387 _perm = UserGroupToPerm.query()\
388 388 .filter(UserGroupToPerm.users_group == user_group)\
389 389 .filter(UserGroupToPerm.permission == perm)\
390 390 .scalar()
391 391 if _perm:
392 392 return
393 393
394 394 new = UserGroupToPerm()
395 395 new.users_group = user_group
396 396 new.permission = perm
397 397 self.sa.add(new)
398 398 return new
399 399
400 400 def revoke_perm(self, user_group, perm):
401 401 user_group = self._get_user_group(user_group)
402 402 perm = self._get_perm(perm)
403 403
404 404 obj = UserGroupToPerm.query()\
405 405 .filter(UserGroupToPerm.users_group == user_group)\
406 406 .filter(UserGroupToPerm.permission == perm).scalar()
407 407 if obj:
408 408 self.sa.delete(obj)
409 409
410 410 def grant_user_permission(self, user_group, user, perm):
411 411 """
412 412 Grant permission for user on given user group, or update
413 413 existing one if found
414 414
415 415 :param user_group: Instance of UserGroup, users_group_id,
416 416 or users_group_name
417 417 :param user: Instance of User, user_id or username
418 418 :param perm: Instance of Permission, or permission_name
419 419 """
420 420 changes = {
421 421 'added': [],
422 422 'updated': [],
423 423 'deleted': []
424 424 }
425 425
426 426 user_group = self._get_user_group(user_group)
427 427 user = self._get_user(user)
428 428 permission = self._get_perm(perm)
429 429 perm_name = permission.permission_name
430 430 member_id = user.user_id
431 431 member_name = user.username
432 432
433 433 # check if we have that permission already
434 434 obj = self.sa.query(UserUserGroupToPerm)\
435 435 .filter(UserUserGroupToPerm.user == user)\
436 436 .filter(UserUserGroupToPerm.user_group == user_group)\
437 437 .scalar()
438 438 if obj is None:
439 439 # create new !
440 440 obj = UserUserGroupToPerm()
441 441 obj.user_group = user_group
442 442 obj.user = user
443 443 obj.permission = permission
444 444 self.sa.add(obj)
445 445 log.debug('Granted perm %s to %s on %s', perm, user, user_group)
446 446 action_logger_generic(
447 447 'granted permission: {} to user: {} on usergroup: {}'.format(
448 448 perm, user, user_group), namespace='security.usergroup')
449 449
450 450 changes['added'].append({
451 451 'change_obj': user_group.get_api_data(),
452 452 'type': 'user', 'id': member_id,
453 453 'name': member_name, 'new_perm': perm_name})
454 454
455 455 return changes
456 456
457 457 def revoke_user_permission(self, user_group, user):
458 458 """
459 459 Revoke permission for user on given user group
460 460
461 461 :param user_group: Instance of UserGroup, users_group_id,
462 462 or users_group name
463 463 :param user: Instance of User, user_id or username
464 464 """
465 465 changes = {
466 466 'added': [],
467 467 'updated': [],
468 468 'deleted': []
469 469 }
470 470
471 471 user_group = self._get_user_group(user_group)
472 472 user = self._get_user(user)
473 473 perm_name = 'usergroup.none'
474 474 member_id = user.user_id
475 475 member_name = user.username
476 476
477 477 obj = self.sa.query(UserUserGroupToPerm)\
478 478 .filter(UserUserGroupToPerm.user == user)\
479 479 .filter(UserUserGroupToPerm.user_group == user_group)\
480 480 .scalar()
481 481 if obj:
482 482 self.sa.delete(obj)
483 483 log.debug('Revoked perm on %s on %s', user_group, user)
484 484 action_logger_generic(
485 485 'revoked permission from user: {} on usergroup: {}'.format(
486 486 user, user_group), namespace='security.usergroup')
487 487
488 488 changes['deleted'].append({
489 489 'change_obj': user_group.get_api_data(),
490 490 'type': 'user', 'id': member_id,
491 491 'name': member_name, 'new_perm': perm_name})
492 492
493 493 return changes
494 494
495 495 def grant_user_group_permission(self, target_user_group, user_group, perm):
496 496 """
497 497 Grant user group permission for given target_user_group
498 498
499 499 :param target_user_group:
500 500 :param user_group:
501 501 :param perm:
502 502 """
503 503 changes = {
504 504 'added': [],
505 505 'updated': [],
506 506 'deleted': []
507 507 }
508 508
509 509 target_user_group = self._get_user_group(target_user_group)
510 510 user_group = self._get_user_group(user_group)
511 511 permission = self._get_perm(perm)
512 512 perm_name = permission.permission_name
513 513 member_id = user_group.users_group_id
514 514 member_name = user_group.users_group_name
515 515
516 516 # forbid assigning same user group to itself
517 517 if target_user_group == user_group:
518 518 raise RepoGroupAssignmentError('target repo:%s cannot be '
519 519 'assigned to itself' % target_user_group)
520 520
521 521 # check if we have that permission already
522 522 obj = self.sa.query(UserGroupUserGroupToPerm)\
523 523 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
524 524 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
525 525 .scalar()
526 526 if obj is None:
527 527 # create new !
528 528 obj = UserGroupUserGroupToPerm()
529 529 obj.user_group = user_group
530 530 obj.target_user_group = target_user_group
531 531 obj.permission = permission
532 532 self.sa.add(obj)
533 533 log.debug(
534 534 'Granted perm %s to %s on %s', perm, target_user_group, user_group)
535 535 action_logger_generic(
536 536 'granted permission: {} to usergroup: {} on usergroup: {}'.format(
537 537 perm, user_group, target_user_group),
538 538 namespace='security.usergroup')
539 539
540 540 changes['added'].append({
541 541 'change_obj': target_user_group.get_api_data(),
542 542 'type': 'user_group', 'id': member_id,
543 543 'name': member_name, 'new_perm': perm_name})
544 544
545 545 return changes
546 546
547 547 def revoke_user_group_permission(self, target_user_group, user_group):
548 548 """
549 549 Revoke user group permission for given target_user_group
550 550
551 551 :param target_user_group:
552 552 :param user_group:
553 553 """
554 554 changes = {
555 555 'added': [],
556 556 'updated': [],
557 557 'deleted': []
558 558 }
559 559
560 560 target_user_group = self._get_user_group(target_user_group)
561 561 user_group = self._get_user_group(user_group)
562 562 perm_name = 'usergroup.none'
563 563 member_id = user_group.users_group_id
564 564 member_name = user_group.users_group_name
565 565
566 566 obj = self.sa.query(UserGroupUserGroupToPerm)\
567 567 .filter(UserGroupUserGroupToPerm.target_user_group == target_user_group)\
568 568 .filter(UserGroupUserGroupToPerm.user_group == user_group)\
569 569 .scalar()
570 570 if obj:
571 571 self.sa.delete(obj)
572 572 log.debug(
573 573 'Revoked perm on %s on %s', target_user_group, user_group)
574 574 action_logger_generic(
575 575 'revoked permission from usergroup: {} on usergroup: {}'.format(
576 576 user_group, target_user_group),
577 577 namespace='security.repogroup')
578 578
579 579 changes['deleted'].append({
580 580 'change_obj': target_user_group.get_api_data(),
581 581 'type': 'user_group', 'id': member_id,
582 582 'name': member_name, 'new_perm': perm_name})
583 583
584 584 return changes
585 585
586 586 def get_perms_summary(self, user_group_id):
587 587 permissions = {
588 588 'repositories': {},
589 589 'repositories_groups': {},
590 590 }
591 591 ugroup_repo_perms = UserGroupRepoToPerm.query()\
592 592 .options(joinedload(UserGroupRepoToPerm.permission))\
593 593 .options(joinedload(UserGroupRepoToPerm.repository))\
594 594 .filter(UserGroupRepoToPerm.users_group_id == user_group_id)\
595 595 .all()
596 596
597 597 for gr in ugroup_repo_perms:
598 598 permissions['repositories'][gr.repository.repo_name] \
599 599 = gr.permission.permission_name
600 600
601 601 ugroup_group_perms = UserGroupRepoGroupToPerm.query()\
602 602 .options(joinedload(UserGroupRepoGroupToPerm.permission))\
603 603 .options(joinedload(UserGroupRepoGroupToPerm.group))\
604 604 .filter(UserGroupRepoGroupToPerm.users_group_id == user_group_id)\
605 605 .all()
606 606
607 607 for gr in ugroup_group_perms:
608 608 permissions['repositories_groups'][gr.group.group_name] \
609 609 = gr.permission.permission_name
610 610 return permissions
611 611
612 612 def enforce_groups(self, user, groups, extern_type=None):
613 613 user = self._get_user(user)
614 614 current_groups = user.group_member
615 615
616 616 # find the external created groups, i.e automatically created
617 617 log.debug('Enforcing user group set `%s` on user %s', groups, user)
618 618 # calculate from what groups user should be removed
619 619 # external_groups that are not in groups
620 620 for gr in [x.users_group for x in current_groups]:
621 621 managed = gr.group_data.get('extern_type')
622 622 if managed:
623 623 if gr.users_group_name not in groups:
624 624 log.debug('Removing user %s from user group %s. '
625 625 'Group sync managed by: %s', user, gr, managed)
626 626 self.remove_user_from_group(gr, user)
627 627 else:
628 628 log.debug('Skipping removal from group %s since it is '
629 629 'not set to be automatically synchronized', gr)
630 630
631 631 # now we calculate in which groups user should be == groups params
632 632 owner = User.get_first_super_admin().username
633 633 for gr in set(groups):
634 634 existing_group = UserGroup.get_by_group_name(gr)
635 635 if not existing_group:
636 636 desc = 'Automatically created from plugin:%s' % extern_type
637 637 # we use first admin account to set the owner of the group
638 638 existing_group = UserGroupModel().create(
639 639 gr, desc, owner, group_data={'extern_type': extern_type})
640 640
641 641 # we can only add users to groups which have set sync flag via
642 642 # extern_type attribute.
643 643 # This is either set and created via plugins, or manually
644 644 managed = existing_group.group_data.get('extern_type')
645 645 if managed:
646 646 log.debug('Adding user %s to user group %s', user, gr)
647 647 UserGroupModel().add_user_to_group(existing_group, user)
648 648 else:
649 649 log.debug('Skipping addition to group %s since it is '
650 650 'not set to be automatically synchronized', gr)
651 651
652 652 def change_groups(self, user, groups):
653 653 """
654 654 This method changes user group assignment
655 655 :param user: User
656 656 :param groups: array of UserGroupModel
657 657 """
658 658 user = self._get_user(user)
659 659 log.debug('Changing user(%s) assignment to groups(%s)', user, groups)
660 660 current_groups = user.group_member
661 661 current_groups = [x.users_group for x in current_groups]
662 662
663 663 # calculate from what groups user should be removed/add
664 664 groups = set(groups)
665 665 current_groups = set(current_groups)
666 666
667 667 groups_to_remove = current_groups - groups
668 668 groups_to_add = groups - current_groups
669 669
670 670 removed_from_groups = []
671 671 added_to_groups = []
672 672 for gr in groups_to_remove:
673 673 log.debug('Removing user %s from user group %s',
674 674 user.username, gr.users_group_name)
675 675 removed_from_groups.append(gr.users_group_id)
676 676 self.remove_user_from_group(gr.users_group_name, user.username)
677 677 for gr in groups_to_add:
678 678 log.debug('Adding user %s to user group %s',
679 679 user.username, gr.users_group_name)
680 680 added_to_groups.append(gr.users_group_id)
681 681 UserGroupModel().add_user_to_group(
682 682 gr.users_group_name, user.username)
683 683
684 684 return added_to_groups, removed_from_groups
685 685
686 686 def _serialize_user_group(self, user_group):
687 687 import rhodecode.lib.helpers as h
688 688 return {
689 689 'id': user_group.users_group_id,
690 690 # TODO: marcink figure out a way to generate the url for the
691 691 # icon
692 692 'icon_link': '',
693 693 'value_display': 'Group: %s (%d members)' % (
694 694 user_group.users_group_name, len(user_group.members),),
695 695 'value': user_group.users_group_name,
696 696 'description': user_group.user_group_description,
697 697 'owner': user_group.user.username,
698 698
699 699 'owner_icon': h.gravatar_url(user_group.user.email, 30),
700 700 'value_display_owner': h.person(user_group.user.email),
701 701
702 702 'value_type': 'user_group',
703 703 'active': user_group.users_group_active,
704 704 }
705 705
706 706 def get_user_groups(self, name_contains=None, limit=20, only_active=True,
707 707 expand_groups=False):
708 708 query = self.sa.query(UserGroup)
709 709 if only_active:
710 710 query = query.filter(UserGroup.users_group_active == true())
711 711
712 712 if name_contains:
713 ilike_expression = u'%{}%'.format(safe_unicode(name_contains))
713 ilike_expression = u'%{}%'.format(safe_str(name_contains))
714 714 query = query.filter(
715 715 UserGroup.users_group_name.ilike(ilike_expression))\
716 716 .order_by(func.length(UserGroup.users_group_name))\
717 717 .order_by(UserGroup.users_group_name)
718 718
719 719 query = query.limit(limit)
720 720 user_groups = query.all()
721 721 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
722 722 user_groups = UserGroupList(user_groups, perm_set=perm_set)
723 723
724 724 # store same serialize method to extract data from User
725 725 from rhodecode.model.user import UserModel
726 726 serialize_user = UserModel()._serialize_user
727 727
728 728 _groups = []
729 729 for group in user_groups:
730 730 entry = self._serialize_user_group(group)
731 731 if expand_groups:
732 732 expanded_members = []
733 733 for member in group.members:
734 734 expanded_members.append(serialize_user(member.user))
735 735 entry['members'] = expanded_members
736 736 _groups.append(entry)
737 737 return _groups
738 738
739 739 @staticmethod
740 740 def get_user_groups_as_dict(user_group):
741 741 import rhodecode.lib.helpers as h
742 742
743 743 data = {
744 744 'users_group_id': user_group.users_group_id,
745 745 'group_name': h.link_to_group(user_group.users_group_name),
746 746 'group_description': user_group.user_group_description,
747 747 'active': user_group.users_group_active,
748 748 "owner": user_group.user.username,
749 749 'owner_icon': h.gravatar_url(user_group.user.email, 30),
750 750 "owner_data": {
751 751 'owner': user_group.user.username,
752 752 'owner_icon': h.gravatar_url(user_group.user.email, 30)}
753 753 }
754 754 return data
General Comments 0
You need to be logged in to leave comments. Login now